hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 417k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 1
class | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f720cbcab58f05b66ace66127442ad6b2998f33d | 2,069 | py | Python | botnet/modules/lib/cache.py | admdev8/botnet-2 | 2fd43237e628869eb34d8e7a6747da6d71c1192c | [
"MIT"
] | 69 | 2015-02-24T19:24:23.000Z | 2022-02-23T08:04:53.000Z | botnet/modules/lib/cache.py | admdev8/botnet-2 | 2fd43237e628869eb34d8e7a6747da6d71c1192c | [
"MIT"
] | 10 | 2017-06-28T21:08:29.000Z | 2022-01-26T07:46:02.000Z | botnet/modules/lib/cache.py | admdev8/botnet-2 | 2fd43237e628869eb34d8e7a6747da6d71c1192c | [
"MIT"
] | 39 | 2015-11-19T10:07:21.000Z | 2022-03-30T10:56:24.000Z | """
Contains cache implementations which can be used by the modules, for example
to cache results acquired from various online APIs.
"""
import datetime
import hashlib
def get_md5(string):
"""Returns a hash of a string."""
m = hashlib.md5()
m.update(string.encode('utf-8'))
return m.hexdigest()
class BaseCache(object):
"""Base cache class."""
def __init__(self, default_timeout=300):
self.default_timeout = default_timeout
def set(self, key, value, timeout=None):
"""Sets a value of a key. Returns True on sucess or False in case of
errors.
"""
return True
def get(self, key):
"""Returns a value of a key or None if a key does not exist."""
return None
class MemoryCache(BaseCache):
"""Simple cache. 100% thread unsafety guaranteed.
default_timeout: timeout used by the set method [seconds].
"""
def __init__(self, default_timeout=300):
super().__init__(default_timeout)
self._data = {}
def _prepare_key(self, key):
"""Prepares a key before using it."""
return get_md5(key)
def _clean(self):
"""Removes expired values."""
for key in self._data.copy().keys():
try:
expires, value = self._data[key]
if expires < datetime.datetime.now():
self._data.pop(key)
except KeyError:
pass
def set(self, key, value, timeout=None):
self._clean()
key = self._prepare_key(key)
if timeout is None:
timeout = self.default_timeout
expires = datetime.datetime.now() + datetime.timedelta(seconds=timeout)
self._data[key] = (expires, value)
return True
def get(self, key):
try:
key = self._prepare_key(key)
expires, value = self._data[key]
if expires > datetime.datetime.now():
return value
else:
return None
except KeyError:
return None
| 26.87013 | 80 | 0.581924 |
import datetime
import hashlib
def get_md5(string):
m = hashlib.md5()
m.update(string.encode('utf-8'))
return m.hexdigest()
class BaseCache(object):
def __init__(self, default_timeout=300):
self.default_timeout = default_timeout
def set(self, key, value, timeout=None):
return True
def get(self, key):
return None
class MemoryCache(BaseCache):
def __init__(self, default_timeout=300):
super().__init__(default_timeout)
self._data = {}
def _prepare_key(self, key):
return get_md5(key)
def _clean(self):
for key in self._data.copy().keys():
try:
expires, value = self._data[key]
if expires < datetime.datetime.now():
self._data.pop(key)
except KeyError:
pass
def set(self, key, value, timeout=None):
self._clean()
key = self._prepare_key(key)
if timeout is None:
timeout = self.default_timeout
expires = datetime.datetime.now() + datetime.timedelta(seconds=timeout)
self._data[key] = (expires, value)
return True
def get(self, key):
try:
key = self._prepare_key(key)
expires, value = self._data[key]
if expires > datetime.datetime.now():
return value
else:
return None
except KeyError:
return None
| true | true |
f720cc9a775ee8a5289c1096d9e20c36d79908d3 | 15,229 | py | Python | src/main.py | Steffuu/tgMensaBotDD | 04bca6ce839d5fb040e0e6232163f4343bcb85fb | [
"MIT"
] | null | null | null | src/main.py | Steffuu/tgMensaBotDD | 04bca6ce839d5fb040e0e6232163f4343bcb85fb | [
"MIT"
] | null | null | null | src/main.py | Steffuu/tgMensaBotDD | 04bca6ce839d5fb040e0e6232163f4343bcb85fb | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- encoding: utf-8 -*-
from telegram.ext import Updater, CommandHandler, MessageHandler, Filters, InlineQueryHandler
import telegram as tg
import requests
import json
import os
import io
import time
import logging
from datetime import timedelta
import translate
import random
import praw
REDDIT_BOT_ID = os.environ['REDDIT_BOT_ID']
REDDIT_BOT_SECRET = os.environ['REDDIT_BOT_SECRET']
REDDIT_USER_AGENT = os.environ['REDDIT_USER_AGENT']
USER_AGENT_BROWSER = 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36'
royalTitles = ["Lé", "Baron", "König", "Archlord", "Genius", "Ritter", "Curry", "Burger", "Mc", "Doktor", "Gentoomaster", "Chef", "Lead Developer"]
firstFrag = ["Schm", "J", "Hans-J", "K", "G", "Gr", "B", "Str", "Kr", "Rask"]
secondFrag = ["oerg", "öck", "öhhhrk", "öhrp", "egor", "oeg", "ock"]
thirdFrag = ["inger", "erino", "aroni", "us", "sell", "topus", "thulu", "tain", "rid", "odil", "ette", "nikov"]
nobleAnnex = ["I.", "II.", "III.", "Royale", "dem Allmächtigen", "dem Weisen", "dem hochgradig Intelligenten", "dem Unendlichen", "dem Allwissenden", "dem Gentoobändiger", "dem Meisterinformatiker"]
wisdoms = ["Linux ist voll doof!", "Ich stehe immer um 7.00 Uhr auf!", "Tut schön viel Frischkäse in die Nudelsoße!", "Mensen um 11.00 Uhr ist eine super Sache!", "Ich habe WinRar gekauft!", "Für einen längeren XP-Supportzeitraum!", "Fasst meinen Laptopbildschirm an!", "Natürlich code ich dieses Feature für euch, ganz ohne Pull Request!", "Maxime ist ein toller Papa!", "Hirtenkäsepizza ist die beste!", "Sauerkraut ist doch ekelhaft!", "Mein Lieblingsbrowser ist ja der Internet Explorer!", "Rechtschreibfehler in Kommentaren? Voll okay!", "Party? Warum nicht bei mir zu Hause?", "Irgendwas mit dynamisch Parameter injecten!", "Wie war das mit den Speisezeiten?", "Ich kaufe nur bei Nvidia!", "Wer braucht schon Open Source...", "KöckOS? Kommt noch diese Woche raus!", "Die besten Witze sind Deine-Mutter-Witze!", "Mein Lieblings-OS ist iOS!", "Ein Halloumiburger ist eine eigenständige Mahlzeit!", "Ich kaufe mir ein MacBook!", "Ich fange wieder mit Medieninformatik an!", "Ich liebe Ubuntu!", "Verschlüsselung ist doch Unsinn!", "Machen wir alle ne gemeinsame WG auf?"]
haes = ["HÄ?", "VALORANT?", "WIE", "WANN", "WO", "Geller muss erst noch zu Ende essen!", "???", "*Random Katzenbild*", "Erstmal Valorant!", "ICH HASSE EUCH ALLE", "HÄÄÄ", "ICH ARBEITE", "ICH HASSE DEN", "FUCK YOU", "WIRKLICH", "BITTE", "Natürlich ist das gelb!", "Es gibt Kuchen!", "Wir haben wieder viel zu viel Lasagne!", "Oke", "WAS", "WAS MEINST DU", "WAS WILLST DU DENN JETZT SCHON WIEDER", "Alter", "Wirst schon sehen", "Denk nach du Schwamm", "Stop", "NICHT COOL", "TROLL NICHT RUM", "Uff", "AAAAARGH", "Kann den jemand kicken?", "DU HAST NUR ANGST VOR MIR", "EKELHAFT", "ICH HASSE ALLES", "WOFÜR", "ICH BIN IMMER SO", "KUCHEN", "LASAGNE", "SCHANDE", "WARUM ICH", "ICH LIEBE ARBEITEN", "ICH HASSE UNPÜNKTLICHKEIT", "IDIOT", "HEY", "WO SEID IHR", "WAS SONST", "KIBA", "HAHA", "VERSTEHT IHR DAS NICHT", "SEID IHR DUMM ODER WAS", "WTF", "RED DEUTSCH MIT MIR", "OMG", "LOL", ":)", "MIR IST LANGWEILIG", "ALS OB IHR ALLE SCHON SCHLAFT", "HALLO", "WEIß ICH NICHT", "WER DENKT SICH DAS AUS", "ICH SPRING LIEBER AUS DEM FENSTER", "NE"]
class NotifyUserException(Exception):
"""Raised whenever an error needs to be propagated to the user"""
pass
def start(update, context):
context.bot.send_message(chat_id=update.message.chat_id, text="Reichenbach is never an option!")
def echoText(update, context):
context.bot.send_message(chat_id=update.message.chat_id, text=update.message.text)
def echoSticker(update, context):
sticker = update.message.sticker
context.bot.send_sticker(chat_id=update.message.chat_id, sticker=sticker)
def mensa(update, context):
params = context.args
if len(params) < 1:
daysToAdd = 0
else:
try:
daysToAdd = int(params[0])
except ValueError:
context.bot.send_message(chat_id=update.message.chat_id, text="The first and only parameter has to be an integer value. Aborting.")
return
day = update.message.date.date() + timedelta(days=daysToAdd)
url = "https://openmensa.org/api/v2/canteens/79/days/" + day.strftime("%Y-%m-%d") + "/meals"
resp = requests.get(url)
if not resp.ok:
context.bot.send_message(chat_id=update.message.chat_id, text="I failed miserably. Disgrace!")
return
jsonData = json.loads(resp.content)
for elem in jsonData:
mealNotes = elem["notes"]
if "vegetarisch" in mealNotes or "vegan" in mealNotes:
context.bot.send_message(chat_id=update.message.chat_id, text="*" + elem["name"] + "*", parse_mode="Markdown")
else:
context.bot.send_message(chat_id=update.message.chat_id, text="_" + elem["name"] + "_", parse_mode="Markdown")
def andre(update, context):
context.bot.send_message(chat_id=update.message.chat_id, text="Höhöhö Reichenbach!")
def leon(update, context):
joke = dadJoke()
context.bot.send_message(chat_id=update.message.chat_id, text=joke)
def loen(update, context):
joke = dadJoke()
translator = translate.Translator(from_lang='en', to_lang='de')
translatedJoke = translator.translate(joke)
context.bot.send_message(chat_id=update.message.chat_id, text=translatedJoke)
def dadJoke():
headers = {'Accept': 'text/plain '}
resp = requests.get("https://icanhazdadjoke.com/", headers=headers)
if not resp.ok:
return "I failed miserably. Disgrace!"
return resp.text
def georg(update, context):
context.bot.send_message(chat_id=update.message.chat_id, text="https://wiki.archlinux.org/index.php/Installation_guide")
def maxime(update, context):
context.bot.send_sticker(chat_id=update.message.chat_id, sticker="CAADBQADfAMAAukKyAPfAAFRgAuYdNoWBA")
def andrey(update, context):
context.bot.send_message(chat_id=update.message.chat_id, text="11.00 Bois. Yeef!")
def steffuu(update, context):
context.bot.send_message(chat_id=update.message.chat_id, text=random.choice(haes))
def getXkcd(id, rand):
resp = requests.get("https://xkcd.com/info.0.json")
if not resp.ok:
raise NotifyUserException("I failed miserably. Disgrace!")
jsonData = json.loads(resp.content)
upperLimit = jsonData["num"]
if rand:
id = random.randint(1, upperLimit)
elif id > upperLimit:
raise NotifyUserException("Id not in range. Maximum id currently is " + str(upperLimit) + ".")
resp = requests.get("https://xkcd.com/" + str(id) + "/info.0.json")
if not resp.ok:
raise NotifyUserException("I failed miserably. Disgrace!")
jsonData = json.loads(resp.content)
return (id, jsonData["img"], jsonData["title"])
def xkcd(update, context):
params = context.args
rand = False
id = 0
if len(params) < 1:
rand = True
else:
try:
id = int(params[0])
except ValueError:
context.bot.send_message(chat_id=update.message.chat_id, text="The first and only parameter has to be a positive integer value greater than 0. Aborting.")
return
if id < 1:
context.bot.send_message(chat_id=update.message.chat_id, text="The first and only parameter has to be a positive integer value greater than 0. Aborting.")
return
try:
xkcd = getXkcd(id, rand)
except NotifyUserException as error:
context.bot.send_message(chat_id=update.message.chat_id, text=str(error))
return
context.bot.send_photo(chat_id=update.message.chat_id, photo=xkcd[1], caption=str(xkcd[0]) + " - " + xkcd[2])
def decision(update, context):
headers = {'Accept': 'text/plain '}
resp = requests.get("https://yesno.wtf/api/", headers=headers)
if not resp.ok:
raise NotifyUserException("oof")
data = json.loads(resp.text)
context.bot.send_animation(chat_id=update.message.chat_id, animation=data["image"], caption=data["answer"])
def subredditImg(subreddit, offset=0, count=5):
imageFileEndings = [".png", ".jpg", ".jpeg", ".webp", ".gif"]
reddit = praw.Reddit(client_id=REDDIT_BOT_ID, client_secret=REDDIT_BOT_SECRET, user_agent=REDDIT_USER_AGENT)
images = []
for post in reddit.subreddit(subreddit).hot(limit=count):
for ending in imageFileEndings:
if str(post.url).endswith(ending):
images.append(post.url)
return images
def r(update, context):
params = context.args
offset = 0
if len(params) < 1:
context.bot.send_message(chat_id=update.message.chat_id, text="The first parameter has to be a string identifying the requested subreddit. Aborting.")
return
subreddit = params[0]
if len(params) > 1:
try:
offset = int(params[1])
except ValueError:
context.bot.send_message(chat_id=update.message.chat_id, text="The second parameter has to be a positive integer value. Aborting.")
return
if offset < 0:
context.bot.send_message(chat_id=update.message.chat_id, text="The second parameter has to be a positive integer value. Aborting.")
return
try:
images = subredditImg(subreddit)
except Exception:
context.bot.send_message(chat_id=update.message.chat_id, text="Something went wrong internally. I am deeply sorry.")
return
if len(images) == 0:
context.bot.send_message(chat_id=update.message.chat_id, text="There are no images in the top 5 posts.")
return
for image in images:
context.bot.send_photo(chat_id=update.message.chat_id, photo=image)
def cat(update, context):
context.bot.send_photo(
chat_id=update.message.chat_id,
photo="https://thiscatdoesnotexist.com?time=" + str(time.time()) + str(random.randint(1, 1024))
)
def horse(update, context):
context.bot.send_photo(
chat_id=update.message.chat_id,
photo="https://thishorsedoesnotexist.com?time=" + str(time.time()) + str(random.randint(1, 1024))
)
def person(update, context):
resp = requests.get("https://thispersondoesnotexist.com/image?time=" + str(time.time()) + str(random.randint(1, 1024)), headers={'User-Agent': 'USER_AGENT_BROWSER'})
if not resp.ok:
context.bot.send_message(chat_id=update.message.chat_id, text="Something went wrong internally. I am deeply sorry.")
return
with io.BytesIO(resp.content) as buf:
context.bot.send_photo(chat_id=update.message.chat_id, photo=buf)
def wisdom(update, context):
wisdom = createWisdomString()
context.bot.send_message(chat_id=update.message.chat_id, text=wisdom)
def createWisdomString():
optionalNoble = None
optionalThird = None
optionalAnnex = None
if bool(random.getrandbits(1)):
optionalNoble = random.choice(royalTitles)
if bool(random.getrandbits(1)):
optionalThird = random.choice(thirdFrag)
if bool(random.getrandbits(1)):
optionalAnnex = random.choice(nobleAnnex)
mainBody = random.choice(firstFrag) + random.choice(secondFrag)
output = "Die heutige Weisheit von "
if optionalNoble:
output += optionalNoble + " " + mainBody
else:
output += mainBody
if optionalThird:
output += optionalThird
if optionalAnnex:
output += " " + optionalAnnex
output += ": " + random.choice(wisdoms)
return output
def choose(update, context):
params = context.args
if len(params) < 1:
context.bot.send_message(chat_id=update.message.chat_id, text="You know, I can't choose if there is nothing to choose from. Wise words!")
return
elif len(params) == 1:
context.bot.send_message(chat_id=update.message.chat_id, text="How the hell am I supposed to choose when only value is entered? Gosh.")
return
else:
context.bot.send_message(chat_id=update.message.chat_id, text=random.choice(params) + " shall be my answer!")
def inlineR(update, context):
query = update.inline_query.query
results = []
try:
images = subredditImg(query, count=40)
except Exception:
results.append(tg.InlineQueryResultArticle(0, "No", tg.InputTextMessageContent("No!")))
else:
if len(images) == 0:
results.append(tg.InlineQueryResultArticle(0, "No", "No!", ))
else:
for img in images:
results.append(tg.InlineQueryResultPhoto(img, img, img))
finally:
update.inline_query.answer(results)
def main():
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO)
API_TOKEN = os.environ['TELEGRAM_APITOKEN']
APP_ADDR = os.environ['APP_ADDRESS']
PORT = int(os.environ.get('PORT', '8443'))
updater = Updater(token=API_TOKEN, use_context=True)
startHandler = CommandHandler('start', start)
updater.dispatcher.add_handler(startHandler)
mensaHandler = CommandHandler('mensa', mensa)
updater.dispatcher.add_handler(mensaHandler)
andreHandler = CommandHandler('andre', andre)
updater.dispatcher.add_handler(andreHandler)
leonHandler = CommandHandler('leon', leon)
updater.dispatcher.add_handler(leonHandler)
georgHandler = CommandHandler('georg', georg)
updater.dispatcher.add_handler(georgHandler)
loenHandler = CommandHandler('loen', loen)
updater.dispatcher.add_handler(loenHandler)
maximeHandler = CommandHandler('maxime', maxime)
updater.dispatcher.add_handler(maximeHandler)
andreyHandler = CommandHandler('andrey', andrey)
updater.dispatcher.add_handler(andreyHandler)
steffuuHandler = CommandHandler('steffuu', steffuu)
updater.dispatcher.add_handler(steffuuHandler)
xkcdHandler = CommandHandler('xkcd', xkcd)
updater.dispatcher.add_handler(xkcdHandler)
decisionHandler = CommandHandler('decision', decision)
updater.dispatcher.add_handler(decisionHandler)
redditImgHandler = CommandHandler('r', r)
updater.dispatcher.add_handler(redditImgHandler)
echoHandlerText = MessageHandler(Filters.text, echoText)
updater.dispatcher.add_handler(echoHandlerText)
echoHandlerSticker = MessageHandler(Filters.sticker, echoSticker)
updater.dispatcher.add_handler(echoHandlerSticker)
catHandler = CommandHandler('cat', cat)
updater.dispatcher.add_handler(catHandler)
horseHandler = CommandHandler('horse', horse)
updater.dispatcher.add_handler(horseHandler)
personHandler = CommandHandler('person', person)
updater.dispatcher.add_handler(personHandler)
wisdomHandler = CommandHandler('wisdom', wisdom)
updater.dispatcher.add_handler(wisdomHandler)
chooseHandler = CommandHandler('choose', choose)
updater.dispatcher.add_handler(chooseHandler)
inlineRedditHandler = InlineQueryHandler(inlineR)
updater.dispatcher.add_handler(inlineRedditHandler)
updater.start_webhook(listen="0.0.0.0", port=PORT, url_path=API_TOKEN)
updater.bot.set_webhook(APP_ADDR + API_TOKEN)
updater.idle()
if __name__ == "__main__":
main()
| 40.395225 | 1,074 | 0.690919 |
from telegram.ext import Updater, CommandHandler, MessageHandler, Filters, InlineQueryHandler
import telegram as tg
import requests
import json
import os
import io
import time
import logging
from datetime import timedelta
import translate
import random
import praw
REDDIT_BOT_ID = os.environ['REDDIT_BOT_ID']
REDDIT_BOT_SECRET = os.environ['REDDIT_BOT_SECRET']
REDDIT_USER_AGENT = os.environ['REDDIT_USER_AGENT']
USER_AGENT_BROWSER = 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.132 Safari/537.36'
royalTitles = ["Lé", "Baron", "König", "Archlord", "Genius", "Ritter", "Curry", "Burger", "Mc", "Doktor", "Gentoomaster", "Chef", "Lead Developer"]
firstFrag = ["Schm", "J", "Hans-J", "K", "G", "Gr", "B", "Str", "Kr", "Rask"]
secondFrag = ["oerg", "öck", "öhhhrk", "öhrp", "egor", "oeg", "ock"]
thirdFrag = ["inger", "erino", "aroni", "us", "sell", "topus", "thulu", "tain", "rid", "odil", "ette", "nikov"]
nobleAnnex = ["I.", "II.", "III.", "Royale", "dem Allmächtigen", "dem Weisen", "dem hochgradig Intelligenten", "dem Unendlichen", "dem Allwissenden", "dem Gentoobändiger", "dem Meisterinformatiker"]
wisdoms = ["Linux ist voll doof!", "Ich stehe immer um 7.00 Uhr auf!", "Tut schön viel Frischkäse in die Nudelsoße!", "Mensen um 11.00 Uhr ist eine super Sache!", "Ich habe WinRar gekauft!", "Für einen längeren XP-Supportzeitraum!", "Fasst meinen Laptopbildschirm an!", "Natürlich code ich dieses Feature für euch, ganz ohne Pull Request!", "Maxime ist ein toller Papa!", "Hirtenkäsepizza ist die beste!", "Sauerkraut ist doch ekelhaft!", "Mein Lieblingsbrowser ist ja der Internet Explorer!", "Rechtschreibfehler in Kommentaren? Voll okay!", "Party? Warum nicht bei mir zu Hause?", "Irgendwas mit dynamisch Parameter injecten!", "Wie war das mit den Speisezeiten?", "Ich kaufe nur bei Nvidia!", "Wer braucht schon Open Source...", "KöckOS? Kommt noch diese Woche raus!", "Die besten Witze sind Deine-Mutter-Witze!", "Mein Lieblings-OS ist iOS!", "Ein Halloumiburger ist eine eigenständige Mahlzeit!", "Ich kaufe mir ein MacBook!", "Ich fange wieder mit Medieninformatik an!", "Ich liebe Ubuntu!", "Verschlüsselung ist doch Unsinn!", "Machen wir alle ne gemeinsame WG auf?"]
haes = ["HÄ?", "VALORANT?", "WIE", "WANN", "WO", "Geller muss erst noch zu Ende essen!", "???", "*Random Katzenbild*", "Erstmal Valorant!", "ICH HASSE EUCH ALLE", "HÄÄÄ", "ICH ARBEITE", "ICH HASSE DEN", "FUCK YOU", "WIRKLICH", "BITTE", "Natürlich ist das gelb!", "Es gibt Kuchen!", "Wir haben wieder viel zu viel Lasagne!", "Oke", "WAS", "WAS MEINST DU", "WAS WILLST DU DENN JETZT SCHON WIEDER", "Alter", "Wirst schon sehen", "Denk nach du Schwamm", "Stop", "NICHT COOL", "TROLL NICHT RUM", "Uff", "AAAAARGH", "Kann den jemand kicken?", "DU HAST NUR ANGST VOR MIR", "EKELHAFT", "ICH HASSE ALLES", "WOFÜR", "ICH BIN IMMER SO", "KUCHEN", "LASAGNE", "SCHANDE", "WARUM ICH", "ICH LIEBE ARBEITEN", "ICH HASSE UNPÜNKTLICHKEIT", "IDIOT", "HEY", "WO SEID IHR", "WAS SONST", "KIBA", "HAHA", "VERSTEHT IHR DAS NICHT", "SEID IHR DUMM ODER WAS", "WTF", "RED DEUTSCH MIT MIR", "OMG", "LOL", ":)", "MIR IST LANGWEILIG", "ALS OB IHR ALLE SCHON SCHLAFT", "HALLO", "WEIß ICH NICHT", "WER DENKT SICH DAS AUS", "ICH SPRING LIEBER AUS DEM FENSTER", "NE"]
class NotifyUserException(Exception):
pass
def start(update, context):
context.bot.send_message(chat_id=update.message.chat_id, text="Reichenbach is never an option!")
def echoText(update, context):
context.bot.send_message(chat_id=update.message.chat_id, text=update.message.text)
def echoSticker(update, context):
sticker = update.message.sticker
context.bot.send_sticker(chat_id=update.message.chat_id, sticker=sticker)
def mensa(update, context):
params = context.args
if len(params) < 1:
daysToAdd = 0
else:
try:
daysToAdd = int(params[0])
except ValueError:
context.bot.send_message(chat_id=update.message.chat_id, text="The first and only parameter has to be an integer value. Aborting.")
return
day = update.message.date.date() + timedelta(days=daysToAdd)
url = "https://openmensa.org/api/v2/canteens/79/days/" + day.strftime("%Y-%m-%d") + "/meals"
resp = requests.get(url)
if not resp.ok:
context.bot.send_message(chat_id=update.message.chat_id, text="I failed miserably. Disgrace!")
return
jsonData = json.loads(resp.content)
for elem in jsonData:
mealNotes = elem["notes"]
if "vegetarisch" in mealNotes or "vegan" in mealNotes:
context.bot.send_message(chat_id=update.message.chat_id, text="*" + elem["name"] + "*", parse_mode="Markdown")
else:
context.bot.send_message(chat_id=update.message.chat_id, text="_" + elem["name"] + "_", parse_mode="Markdown")
def andre(update, context):
context.bot.send_message(chat_id=update.message.chat_id, text="Höhöhö Reichenbach!")
def leon(update, context):
joke = dadJoke()
context.bot.send_message(chat_id=update.message.chat_id, text=joke)
def loen(update, context):
joke = dadJoke()
translator = translate.Translator(from_lang='en', to_lang='de')
translatedJoke = translator.translate(joke)
context.bot.send_message(chat_id=update.message.chat_id, text=translatedJoke)
def dadJoke():
headers = {'Accept': 'text/plain '}
resp = requests.get("https://icanhazdadjoke.com/", headers=headers)
if not resp.ok:
return "I failed miserably. Disgrace!"
return resp.text
def georg(update, context):
context.bot.send_message(chat_id=update.message.chat_id, text="https://wiki.archlinux.org/index.php/Installation_guide")
def maxime(update, context):
context.bot.send_sticker(chat_id=update.message.chat_id, sticker="CAADBQADfAMAAukKyAPfAAFRgAuYdNoWBA")
def andrey(update, context):
context.bot.send_message(chat_id=update.message.chat_id, text="11.00 Bois. Yeef!")
def steffuu(update, context):
context.bot.send_message(chat_id=update.message.chat_id, text=random.choice(haes))
def getXkcd(id, rand):
resp = requests.get("https://xkcd.com/info.0.json")
if not resp.ok:
raise NotifyUserException("I failed miserably. Disgrace!")
jsonData = json.loads(resp.content)
upperLimit = jsonData["num"]
if rand:
id = random.randint(1, upperLimit)
elif id > upperLimit:
raise NotifyUserException("Id not in range. Maximum id currently is " + str(upperLimit) + ".")
resp = requests.get("https://xkcd.com/" + str(id) + "/info.0.json")
if not resp.ok:
raise NotifyUserException("I failed miserably. Disgrace!")
jsonData = json.loads(resp.content)
return (id, jsonData["img"], jsonData["title"])
def xkcd(update, context):
params = context.args
rand = False
id = 0
if len(params) < 1:
rand = True
else:
try:
id = int(params[0])
except ValueError:
context.bot.send_message(chat_id=update.message.chat_id, text="The first and only parameter has to be a positive integer value greater than 0. Aborting.")
return
if id < 1:
context.bot.send_message(chat_id=update.message.chat_id, text="The first and only parameter has to be a positive integer value greater than 0. Aborting.")
return
try:
xkcd = getXkcd(id, rand)
except NotifyUserException as error:
context.bot.send_message(chat_id=update.message.chat_id, text=str(error))
return
context.bot.send_photo(chat_id=update.message.chat_id, photo=xkcd[1], caption=str(xkcd[0]) + " - " + xkcd[2])
def decision(update, context):
headers = {'Accept': 'text/plain '}
resp = requests.get("https://yesno.wtf/api/", headers=headers)
if not resp.ok:
raise NotifyUserException("oof")
data = json.loads(resp.text)
context.bot.send_animation(chat_id=update.message.chat_id, animation=data["image"], caption=data["answer"])
def subredditImg(subreddit, offset=0, count=5):
imageFileEndings = [".png", ".jpg", ".jpeg", ".webp", ".gif"]
reddit = praw.Reddit(client_id=REDDIT_BOT_ID, client_secret=REDDIT_BOT_SECRET, user_agent=REDDIT_USER_AGENT)
images = []
for post in reddit.subreddit(subreddit).hot(limit=count):
for ending in imageFileEndings:
if str(post.url).endswith(ending):
images.append(post.url)
return images
def r(update, context):
params = context.args
offset = 0
if len(params) < 1:
context.bot.send_message(chat_id=update.message.chat_id, text="The first parameter has to be a string identifying the requested subreddit. Aborting.")
return
subreddit = params[0]
if len(params) > 1:
try:
offset = int(params[1])
except ValueError:
context.bot.send_message(chat_id=update.message.chat_id, text="The second parameter has to be a positive integer value. Aborting.")
return
if offset < 0:
context.bot.send_message(chat_id=update.message.chat_id, text="The second parameter has to be a positive integer value. Aborting.")
return
try:
images = subredditImg(subreddit)
except Exception:
context.bot.send_message(chat_id=update.message.chat_id, text="Something went wrong internally. I am deeply sorry.")
return
if len(images) == 0:
context.bot.send_message(chat_id=update.message.chat_id, text="There are no images in the top 5 posts.")
return
for image in images:
context.bot.send_photo(chat_id=update.message.chat_id, photo=image)
def cat(update, context):
context.bot.send_photo(
chat_id=update.message.chat_id,
photo="https://thiscatdoesnotexist.com?time=" + str(time.time()) + str(random.randint(1, 1024))
)
def horse(update, context):
context.bot.send_photo(
chat_id=update.message.chat_id,
photo="https://thishorsedoesnotexist.com?time=" + str(time.time()) + str(random.randint(1, 1024))
)
def person(update, context):
resp = requests.get("https://thispersondoesnotexist.com/image?time=" + str(time.time()) + str(random.randint(1, 1024)), headers={'User-Agent': 'USER_AGENT_BROWSER'})
if not resp.ok:
context.bot.send_message(chat_id=update.message.chat_id, text="Something went wrong internally. I am deeply sorry.")
return
with io.BytesIO(resp.content) as buf:
context.bot.send_photo(chat_id=update.message.chat_id, photo=buf)
def wisdom(update, context):
wisdom = createWisdomString()
context.bot.send_message(chat_id=update.message.chat_id, text=wisdom)
def createWisdomString():
optionalNoble = None
optionalThird = None
optionalAnnex = None
if bool(random.getrandbits(1)):
optionalNoble = random.choice(royalTitles)
if bool(random.getrandbits(1)):
optionalThird = random.choice(thirdFrag)
if bool(random.getrandbits(1)):
optionalAnnex = random.choice(nobleAnnex)
mainBody = random.choice(firstFrag) + random.choice(secondFrag)
output = "Die heutige Weisheit von "
if optionalNoble:
output += optionalNoble + " " + mainBody
else:
output += mainBody
if optionalThird:
output += optionalThird
if optionalAnnex:
output += " " + optionalAnnex
output += ": " + random.choice(wisdoms)
return output
def choose(update, context):
params = context.args
if len(params) < 1:
context.bot.send_message(chat_id=update.message.chat_id, text="You know, I can't choose if there is nothing to choose from. Wise words!")
return
elif len(params) == 1:
context.bot.send_message(chat_id=update.message.chat_id, text="How the hell am I supposed to choose when only value is entered? Gosh.")
return
else:
context.bot.send_message(chat_id=update.message.chat_id, text=random.choice(params) + " shall be my answer!")
def inlineR(update, context):
query = update.inline_query.query
results = []
try:
images = subredditImg(query, count=40)
except Exception:
results.append(tg.InlineQueryResultArticle(0, "No", tg.InputTextMessageContent("No!")))
else:
if len(images) == 0:
results.append(tg.InlineQueryResultArticle(0, "No", "No!", ))
else:
for img in images:
results.append(tg.InlineQueryResultPhoto(img, img, img))
finally:
update.inline_query.answer(results)
def main():
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO)
API_TOKEN = os.environ['TELEGRAM_APITOKEN']
APP_ADDR = os.environ['APP_ADDRESS']
PORT = int(os.environ.get('PORT', '8443'))
updater = Updater(token=API_TOKEN, use_context=True)
startHandler = CommandHandler('start', start)
updater.dispatcher.add_handler(startHandler)
mensaHandler = CommandHandler('mensa', mensa)
updater.dispatcher.add_handler(mensaHandler)
andreHandler = CommandHandler('andre', andre)
updater.dispatcher.add_handler(andreHandler)
leonHandler = CommandHandler('leon', leon)
updater.dispatcher.add_handler(leonHandler)
georgHandler = CommandHandler('georg', georg)
updater.dispatcher.add_handler(georgHandler)
loenHandler = CommandHandler('loen', loen)
updater.dispatcher.add_handler(loenHandler)
maximeHandler = CommandHandler('maxime', maxime)
updater.dispatcher.add_handler(maximeHandler)
andreyHandler = CommandHandler('andrey', andrey)
updater.dispatcher.add_handler(andreyHandler)
steffuuHandler = CommandHandler('steffuu', steffuu)
updater.dispatcher.add_handler(steffuuHandler)
xkcdHandler = CommandHandler('xkcd', xkcd)
updater.dispatcher.add_handler(xkcdHandler)
decisionHandler = CommandHandler('decision', decision)
updater.dispatcher.add_handler(decisionHandler)
redditImgHandler = CommandHandler('r', r)
updater.dispatcher.add_handler(redditImgHandler)
echoHandlerText = MessageHandler(Filters.text, echoText)
updater.dispatcher.add_handler(echoHandlerText)
echoHandlerSticker = MessageHandler(Filters.sticker, echoSticker)
updater.dispatcher.add_handler(echoHandlerSticker)
catHandler = CommandHandler('cat', cat)
updater.dispatcher.add_handler(catHandler)
horseHandler = CommandHandler('horse', horse)
updater.dispatcher.add_handler(horseHandler)
personHandler = CommandHandler('person', person)
updater.dispatcher.add_handler(personHandler)
wisdomHandler = CommandHandler('wisdom', wisdom)
updater.dispatcher.add_handler(wisdomHandler)
chooseHandler = CommandHandler('choose', choose)
updater.dispatcher.add_handler(chooseHandler)
inlineRedditHandler = InlineQueryHandler(inlineR)
updater.dispatcher.add_handler(inlineRedditHandler)
updater.start_webhook(listen="0.0.0.0", port=PORT, url_path=API_TOKEN)
updater.bot.set_webhook(APP_ADDR + API_TOKEN)
updater.idle()
if __name__ == "__main__":
main()
| true | true |
f720ccd4ee2f6948386979975d4872da8241f475 | 232 | py | Python | handroll/i18n.py | mblayman/handroll | 42703cf5c969dccd0eb0715402ab84056ab65e22 | [
"BSD-2-Clause"
] | null | null | null | handroll/i18n.py | mblayman/handroll | 42703cf5c969dccd0eb0715402ab84056ab65e22 | [
"BSD-2-Clause"
] | null | null | null | handroll/i18n.py | mblayman/handroll | 42703cf5c969dccd0eb0715402ab84056ab65e22 | [
"BSD-2-Clause"
] | null | null | null | # Copyright (c) 2014, Matt Layman
import gettext
import os
localedir = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'locale')
translate = gettext.translation('handroll', localedir, fallback=True)
_ = translate.gettext
| 25.777778 | 78 | 0.762931 |
import gettext
import os
localedir = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'locale')
translate = gettext.translation('handroll', localedir, fallback=True)
_ = translate.gettext
| true | true |
f720cf1b4711518700b108a7d64fb57a175679e5 | 18,297 | py | Python | neutron/tests/functional/plugins/ml2/drivers/ovn/mech_driver/test_mech_driver.py | huiweics/neutron | 8c7ca776d8cbe967a8bbe773ab38c361414a7068 | [
"Apache-2.0"
] | null | null | null | neutron/tests/functional/plugins/ml2/drivers/ovn/mech_driver/test_mech_driver.py | huiweics/neutron | 8c7ca776d8cbe967a8bbe773ab38c361414a7068 | [
"Apache-2.0"
] | null | null | null | neutron/tests/functional/plugins/ml2/drivers/ovn/mech_driver/test_mech_driver.py | huiweics/neutron | 8c7ca776d8cbe967a8bbe773ab38c361414a7068 | [
"Apache-2.0"
] | null | null | null | # Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import mock
from oslo_config import cfg
from oslo_utils import uuidutils
from neutron.common.ovn import constants as ovn_const
from neutron.common.ovn import utils
from neutron.common import utils as n_utils
from neutron.db import ovn_revision_numbers_db as db_rev
from neutron.tests.functional import base
class TestPortBinding(base.TestOVNFunctionalBase):
def setUp(self):
super(TestPortBinding, self).setUp()
self.ovs_host = 'ovs-host'
self.dpdk_host = 'dpdk-host'
self.invalid_dpdk_host = 'invalid-host'
self.vhu_mode = 'server'
self.add_fake_chassis(self.ovs_host)
self.add_fake_chassis(
self.dpdk_host,
external_ids={'datapath-type': 'netdev',
'iface-types': 'dummy,dummy-internal,dpdkvhostuser'})
self.add_fake_chassis(
self.invalid_dpdk_host,
external_ids={'datapath-type': 'netdev',
'iface-types': 'dummy,dummy-internal,geneve,vxlan'})
self.n1 = self._make_network(self.fmt, 'n1', True)
res = self._create_subnet(self.fmt, self.n1['network']['id'],
'10.0.0.0/24')
self.deserialize(self.fmt, res)
def _create_or_update_port(self, port_id=None, hostname=None):
if port_id is None:
port_data = {
'port': {'network_id': self.n1['network']['id'],
'tenant_id': self._tenant_id}}
if hostname:
port_data['port']['device_id'] = uuidutils.generate_uuid()
port_data['port']['device_owner'] = 'compute:None'
port_data['port']['binding:host_id'] = hostname
port_req = self.new_create_request('ports', port_data, self.fmt)
port_res = port_req.get_response(self.api)
p = self.deserialize(self.fmt, port_res)
port_id = p['port']['id']
else:
port_data = {
'port': {'device_id': uuidutils.generate_uuid(),
'device_owner': 'compute:None',
'binding:host_id': hostname}}
port_req = self.new_update_request('ports', port_data, port_id,
self.fmt)
port_res = port_req.get_response(self.api)
self.deserialize(self.fmt, port_res)
return port_id
def _verify_vif_details(self, port_id, expected_host_name,
expected_vif_type, expected_vif_details):
port_req = self.new_show_request('ports', port_id)
port_res = port_req.get_response(self.api)
p = self.deserialize(self.fmt, port_res)
self.assertEqual(expected_host_name, p['port']['binding:host_id'])
self.assertEqual(expected_vif_type, p['port']['binding:vif_type'])
self.assertEqual(expected_vif_details,
p['port']['binding:vif_details'])
def test_port_binding_create_port(self):
port_id = self._create_or_update_port(hostname=self.ovs_host)
self._verify_vif_details(port_id, self.ovs_host, 'ovs',
{'port_filter': True})
port_id = self._create_or_update_port(hostname=self.dpdk_host)
expected_vif_details = {'port_filter': False,
'vhostuser_mode': self.vhu_mode,
'vhostuser_ovs_plug': True}
expected_vif_details['vhostuser_socket'] = (
utils.ovn_vhu_sockpath(cfg.CONF.ovn.vhost_sock_dir, port_id))
self._verify_vif_details(port_id, self.dpdk_host, 'vhostuser',
expected_vif_details)
port_id = self._create_or_update_port(hostname=self.invalid_dpdk_host)
self._verify_vif_details(port_id, self.invalid_dpdk_host, 'ovs',
{'port_filter': True})
def test_port_binding_update_port(self):
port_id = self._create_or_update_port()
self._verify_vif_details(port_id, '', 'unbound', {})
port_id = self._create_or_update_port(port_id=port_id,
hostname=self.ovs_host)
self._verify_vif_details(port_id, self.ovs_host, 'ovs',
{'port_filter': True})
port_id = self._create_or_update_port(port_id=port_id,
hostname=self.dpdk_host)
expected_vif_details = {'port_filter': False,
'vhostuser_mode': self.vhu_mode,
'vhostuser_ovs_plug': True}
expected_vif_details['vhostuser_socket'] = (
utils.ovn_vhu_sockpath(cfg.CONF.ovn.vhost_sock_dir, port_id))
self._verify_vif_details(port_id, self.dpdk_host, 'vhostuser',
expected_vif_details)
port_id = self._create_or_update_port(port_id=port_id,
hostname=self.invalid_dpdk_host)
self._verify_vif_details(port_id, self.invalid_dpdk_host, 'ovs',
{'port_filter': True})
class TestPortBindingOverTcp(TestPortBinding):
def get_ovsdb_server_protocol(self):
return 'tcp'
# TODO(mjozefcz): This test class hangs during execution.
class TestPortBindingOverSsl(TestPortBinding):
def get_ovsdb_server_protocol(self):
return 'ssl'
class TestNetworkMTUUpdate(base.TestOVNFunctionalBase):
def setUp(self):
super(TestNetworkMTUUpdate, self).setUp()
self._ovn_client = self.mech_driver._ovn_client
self.n1 = self._make_network(self.fmt, 'n1', True)
res = self._create_subnet(self.fmt, self.n1['network']['id'],
'10.0.0.0/24')
self.sub = self.deserialize(self.fmt, res)
def test_update_network_mtu(self):
mtu_value = self.n1['network']['mtu'] - 100
dhcp_options = (
self.mech_driver._ovn_client._nb_idl.get_subnet_dhcp_options(
self.sub['subnet']['id'])
)
self.assertNotEqual(
int(dhcp_options['subnet']['options']['mtu']),
mtu_value)
data = {'network': {'mtu': mtu_value}}
req = self.new_update_request(
'networks', data, self.n1['network']['id'], self.fmt)
req.get_response(self.api)
dhcp_options = (
self.mech_driver._ovn_client._nb_idl.get_subnet_dhcp_options(
self.sub['subnet']['id'])
)
self.assertEqual(
int(dhcp_options['subnet']['options']['mtu']),
mtu_value)
def test_no_update_network_mtu(self):
mtu_value = self.n1['network']['mtu']
base_revision = db_rev.get_revision_row(
self.context,
self.sub['subnet']['id'])
data = {'network': {'mtu': mtu_value}}
req = self.new_update_request(
'networks', data, self.n1['network']['id'], self.fmt)
req.get_response(self.api)
second_revision = db_rev.get_revision_row(
self.context,
self.sub['subnet']['id'])
self.assertEqual(
base_revision.updated_at,
second_revision.updated_at)
@mock.patch('neutron.plugins.ml2.drivers.ovn.mech_driver.'
'ovsdb.ovn_client.OVNClient._is_virtual_port_supported',
lambda *args: True)
class TestVirtualPorts(base.TestOVNFunctionalBase):
def setUp(self):
super(TestVirtualPorts, self).setUp()
self._ovn_client = self.mech_driver._ovn_client
self.n1 = self._make_network(self.fmt, 'n1', True)
res = self._create_subnet(self.fmt, self.n1['network']['id'],
'10.0.0.0/24')
self.sub = self.deserialize(self.fmt, res)
def _create_port(self, fixed_ip=None, allowed_address=None):
port_data = {
'port': {'network_id': self.n1['network']['id'],
'tenant_id': self._tenant_id}}
if fixed_ip:
port_data['port']['fixed_ips'] = [{'ip_address': fixed_ip}]
if allowed_address:
port_data['port']['allowed_address_pairs'] = [
{'ip_address': allowed_address}]
port_req = self.new_create_request('ports', port_data, self.fmt)
port_res = port_req.get_response(self.api)
self.assertEqual(201, port_res.status_int)
return self.deserialize(self.fmt, port_res)['port']
def _update_allowed_address_pair(self, port_id, data):
port_data = {
'port': {'allowed_address_pairs': data}}
port_req = self.new_update_request('ports', port_data, port_id,
self.fmt)
port_res = port_req.get_response(self.api)
self.assertEqual(200, port_res.status_int)
return self.deserialize(self.fmt, port_res)['port']
def _set_allowed_address_pair(self, port_id, ip):
return self._update_allowed_address_pair(port_id, [{'ip_address': ip}])
def _unset_allowed_address_pair(self, port_id):
return self._update_allowed_address_pair(port_id, [])
def _find_port_row(self, port_id):
cmd = self.nb_api.db_find_rows(
'Logical_Switch_Port', ('name', '=', port_id))
rows = cmd.execute(check_error=True)
return rows[0] if rows else None
def _is_ovn_port_type(self, port_id, port_type):
ovn_vport = self._find_port_row(port_id)
return port_type == ovn_vport.type
def _check_port_type(self, port_id, type):
check = functools.partial(self._is_ovn_port_type, port_id, type)
n_utils.wait_until_true(check, timeout=10)
def test_virtual_port_created_before(self):
virt_port = self._create_port()
virt_ip = virt_port['fixed_ips'][0]['ip_address']
# Create the master port with the VIP address already set in
# the allowed_address_pairs field
master = self._create_port(allowed_address=virt_ip)
# Assert the virt port has the type virtual and master is set
# as parent
self._check_port_type(virt_port['id'], ovn_const.LSP_TYPE_VIRTUAL)
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertEqual(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
# Create the backport parent port
backup = self._create_port(allowed_address=virt_ip)
# Assert the virt port now also includes the backup port as a parent
self._check_port_type(virt_port['id'], ovn_const.LSP_TYPE_VIRTUAL)
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertIn(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
self.assertIn(
backup['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
def test_virtual_port_update_address_pairs(self):
master = self._create_port()
backup = self._create_port()
virt_port = self._create_port()
virt_ip = virt_port['fixed_ips'][0]['ip_address']
# Assert the virt port does not yet have the type virtual (no
# address pairs were set yet)
self._check_port_type(virt_port['id'], ''),
ovn_vport = self._find_port_row(virt_port['id'])
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY,
ovn_vport.options)
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY,
ovn_vport.options)
# Set the virt IP to the allowed address pairs of the master port
self._set_allowed_address_pair(master['id'], virt_ip)
# Assert the virt port is now updated
self._check_port_type(virt_port['id'], ovn_const.LSP_TYPE_VIRTUAL),
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertEqual(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
# Set the virt IP to the allowed address pairs of the backup port
self._set_allowed_address_pair(backup['id'], virt_ip)
# Assert the virt port now includes the backup port as a parent
self._check_port_type(virt_port['id'], ovn_const.LSP_TYPE_VIRTUAL),
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertIn(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
self.assertIn(
backup['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
# Remove the address pairs from the master port
self._unset_allowed_address_pair(master['id'])
# Assert the virt port now only has the backup port as a parent
self._check_port_type(virt_port['id'], ovn_const.LSP_TYPE_VIRTUAL),
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertEqual(
backup['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
# Remove the address pairs from the backup port
self._unset_allowed_address_pair(backup['id'])
# Assert the virt port is not type virtual anymore and the virtual
# port options are cleared
self._check_port_type(virt_port['id'], ''),
ovn_vport = self._find_port_row(virt_port['id'])
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY,
ovn_vport.options)
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY,
ovn_vport.options)
def test_virtual_port_created_after(self):
master = self._create_port(fixed_ip='10.0.0.11')
backup = self._create_port(fixed_ip='10.0.0.12')
virt_ip = '10.0.0.55'
# Set the virt IP to the master and backup ports *before* creating
# the virtual port
self._set_allowed_address_pair(master['id'], virt_ip)
self._set_allowed_address_pair(backup['id'], virt_ip)
virt_port = self._create_port(fixed_ip=virt_ip)
# Assert the virtual port has been created with the
# right type and parents
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(ovn_const.LSP_TYPE_VIRTUAL, ovn_vport.type)
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertIn(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
self.assertIn(
backup['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
def test_virtual_port_delete_parents(self):
master = self._create_port()
backup = self._create_port()
virt_port = self._create_port()
virt_ip = virt_port['fixed_ips'][0]['ip_address']
# Assert the virt port does not yet have the type virtual (no
# address pairs were set yet)
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual("", ovn_vport.type)
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY,
ovn_vport.options)
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY,
ovn_vport.options)
# Set allowed address paris to the master and backup ports
self._set_allowed_address_pair(master['id'], virt_ip)
self._set_allowed_address_pair(backup['id'], virt_ip)
# Assert the virtual port is correct
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(ovn_const.LSP_TYPE_VIRTUAL, ovn_vport.type)
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertIn(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
self.assertIn(
backup['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
# Delete the backup port
self._delete('ports', backup['id'])
# Assert the virt port now only has the master port as a parent
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(ovn_const.LSP_TYPE_VIRTUAL, ovn_vport.type)
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertEqual(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
# Delete the master port
self._delete('ports', master['id'])
# Assert the virt port is not type virtual anymore and the virtual
# port options are cleared
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual("", ovn_vport.type)
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY,
ovn_vport.options)
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY,
ovn_vport.options)
| 42.158986 | 79 | 0.6283 |
import functools
import mock
from oslo_config import cfg
from oslo_utils import uuidutils
from neutron.common.ovn import constants as ovn_const
from neutron.common.ovn import utils
from neutron.common import utils as n_utils
from neutron.db import ovn_revision_numbers_db as db_rev
from neutron.tests.functional import base
class TestPortBinding(base.TestOVNFunctionalBase):
def setUp(self):
super(TestPortBinding, self).setUp()
self.ovs_host = 'ovs-host'
self.dpdk_host = 'dpdk-host'
self.invalid_dpdk_host = 'invalid-host'
self.vhu_mode = 'server'
self.add_fake_chassis(self.ovs_host)
self.add_fake_chassis(
self.dpdk_host,
external_ids={'datapath-type': 'netdev',
'iface-types': 'dummy,dummy-internal,dpdkvhostuser'})
self.add_fake_chassis(
self.invalid_dpdk_host,
external_ids={'datapath-type': 'netdev',
'iface-types': 'dummy,dummy-internal,geneve,vxlan'})
self.n1 = self._make_network(self.fmt, 'n1', True)
res = self._create_subnet(self.fmt, self.n1['network']['id'],
'10.0.0.0/24')
self.deserialize(self.fmt, res)
def _create_or_update_port(self, port_id=None, hostname=None):
if port_id is None:
port_data = {
'port': {'network_id': self.n1['network']['id'],
'tenant_id': self._tenant_id}}
if hostname:
port_data['port']['device_id'] = uuidutils.generate_uuid()
port_data['port']['device_owner'] = 'compute:None'
port_data['port']['binding:host_id'] = hostname
port_req = self.new_create_request('ports', port_data, self.fmt)
port_res = port_req.get_response(self.api)
p = self.deserialize(self.fmt, port_res)
port_id = p['port']['id']
else:
port_data = {
'port': {'device_id': uuidutils.generate_uuid(),
'device_owner': 'compute:None',
'binding:host_id': hostname}}
port_req = self.new_update_request('ports', port_data, port_id,
self.fmt)
port_res = port_req.get_response(self.api)
self.deserialize(self.fmt, port_res)
return port_id
def _verify_vif_details(self, port_id, expected_host_name,
expected_vif_type, expected_vif_details):
port_req = self.new_show_request('ports', port_id)
port_res = port_req.get_response(self.api)
p = self.deserialize(self.fmt, port_res)
self.assertEqual(expected_host_name, p['port']['binding:host_id'])
self.assertEqual(expected_vif_type, p['port']['binding:vif_type'])
self.assertEqual(expected_vif_details,
p['port']['binding:vif_details'])
def test_port_binding_create_port(self):
port_id = self._create_or_update_port(hostname=self.ovs_host)
self._verify_vif_details(port_id, self.ovs_host, 'ovs',
{'port_filter': True})
port_id = self._create_or_update_port(hostname=self.dpdk_host)
expected_vif_details = {'port_filter': False,
'vhostuser_mode': self.vhu_mode,
'vhostuser_ovs_plug': True}
expected_vif_details['vhostuser_socket'] = (
utils.ovn_vhu_sockpath(cfg.CONF.ovn.vhost_sock_dir, port_id))
self._verify_vif_details(port_id, self.dpdk_host, 'vhostuser',
expected_vif_details)
port_id = self._create_or_update_port(hostname=self.invalid_dpdk_host)
self._verify_vif_details(port_id, self.invalid_dpdk_host, 'ovs',
{'port_filter': True})
def test_port_binding_update_port(self):
port_id = self._create_or_update_port()
self._verify_vif_details(port_id, '', 'unbound', {})
port_id = self._create_or_update_port(port_id=port_id,
hostname=self.ovs_host)
self._verify_vif_details(port_id, self.ovs_host, 'ovs',
{'port_filter': True})
port_id = self._create_or_update_port(port_id=port_id,
hostname=self.dpdk_host)
expected_vif_details = {'port_filter': False,
'vhostuser_mode': self.vhu_mode,
'vhostuser_ovs_plug': True}
expected_vif_details['vhostuser_socket'] = (
utils.ovn_vhu_sockpath(cfg.CONF.ovn.vhost_sock_dir, port_id))
self._verify_vif_details(port_id, self.dpdk_host, 'vhostuser',
expected_vif_details)
port_id = self._create_or_update_port(port_id=port_id,
hostname=self.invalid_dpdk_host)
self._verify_vif_details(port_id, self.invalid_dpdk_host, 'ovs',
{'port_filter': True})
class TestPortBindingOverTcp(TestPortBinding):
def get_ovsdb_server_protocol(self):
return 'tcp'
class TestPortBindingOverSsl(TestPortBinding):
def get_ovsdb_server_protocol(self):
return 'ssl'
class TestNetworkMTUUpdate(base.TestOVNFunctionalBase):
def setUp(self):
super(TestNetworkMTUUpdate, self).setUp()
self._ovn_client = self.mech_driver._ovn_client
self.n1 = self._make_network(self.fmt, 'n1', True)
res = self._create_subnet(self.fmt, self.n1['network']['id'],
'10.0.0.0/24')
self.sub = self.deserialize(self.fmt, res)
def test_update_network_mtu(self):
mtu_value = self.n1['network']['mtu'] - 100
dhcp_options = (
self.mech_driver._ovn_client._nb_idl.get_subnet_dhcp_options(
self.sub['subnet']['id'])
)
self.assertNotEqual(
int(dhcp_options['subnet']['options']['mtu']),
mtu_value)
data = {'network': {'mtu': mtu_value}}
req = self.new_update_request(
'networks', data, self.n1['network']['id'], self.fmt)
req.get_response(self.api)
dhcp_options = (
self.mech_driver._ovn_client._nb_idl.get_subnet_dhcp_options(
self.sub['subnet']['id'])
)
self.assertEqual(
int(dhcp_options['subnet']['options']['mtu']),
mtu_value)
def test_no_update_network_mtu(self):
mtu_value = self.n1['network']['mtu']
base_revision = db_rev.get_revision_row(
self.context,
self.sub['subnet']['id'])
data = {'network': {'mtu': mtu_value}}
req = self.new_update_request(
'networks', data, self.n1['network']['id'], self.fmt)
req.get_response(self.api)
second_revision = db_rev.get_revision_row(
self.context,
self.sub['subnet']['id'])
self.assertEqual(
base_revision.updated_at,
second_revision.updated_at)
@mock.patch('neutron.plugins.ml2.drivers.ovn.mech_driver.'
'ovsdb.ovn_client.OVNClient._is_virtual_port_supported',
lambda *args: True)
class TestVirtualPorts(base.TestOVNFunctionalBase):
def setUp(self):
super(TestVirtualPorts, self).setUp()
self._ovn_client = self.mech_driver._ovn_client
self.n1 = self._make_network(self.fmt, 'n1', True)
res = self._create_subnet(self.fmt, self.n1['network']['id'],
'10.0.0.0/24')
self.sub = self.deserialize(self.fmt, res)
def _create_port(self, fixed_ip=None, allowed_address=None):
port_data = {
'port': {'network_id': self.n1['network']['id'],
'tenant_id': self._tenant_id}}
if fixed_ip:
port_data['port']['fixed_ips'] = [{'ip_address': fixed_ip}]
if allowed_address:
port_data['port']['allowed_address_pairs'] = [
{'ip_address': allowed_address}]
port_req = self.new_create_request('ports', port_data, self.fmt)
port_res = port_req.get_response(self.api)
self.assertEqual(201, port_res.status_int)
return self.deserialize(self.fmt, port_res)['port']
def _update_allowed_address_pair(self, port_id, data):
port_data = {
'port': {'allowed_address_pairs': data}}
port_req = self.new_update_request('ports', port_data, port_id,
self.fmt)
port_res = port_req.get_response(self.api)
self.assertEqual(200, port_res.status_int)
return self.deserialize(self.fmt, port_res)['port']
def _set_allowed_address_pair(self, port_id, ip):
return self._update_allowed_address_pair(port_id, [{'ip_address': ip}])
def _unset_allowed_address_pair(self, port_id):
return self._update_allowed_address_pair(port_id, [])
def _find_port_row(self, port_id):
cmd = self.nb_api.db_find_rows(
'Logical_Switch_Port', ('name', '=', port_id))
rows = cmd.execute(check_error=True)
return rows[0] if rows else None
def _is_ovn_port_type(self, port_id, port_type):
ovn_vport = self._find_port_row(port_id)
return port_type == ovn_vport.type
def _check_port_type(self, port_id, type):
check = functools.partial(self._is_ovn_port_type, port_id, type)
n_utils.wait_until_true(check, timeout=10)
def test_virtual_port_created_before(self):
virt_port = self._create_port()
virt_ip = virt_port['fixed_ips'][0]['ip_address']
master = self._create_port(allowed_address=virt_ip)
self._check_port_type(virt_port['id'], ovn_const.LSP_TYPE_VIRTUAL)
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertEqual(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
backup = self._create_port(allowed_address=virt_ip)
self._check_port_type(virt_port['id'], ovn_const.LSP_TYPE_VIRTUAL)
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertIn(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
self.assertIn(
backup['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
def test_virtual_port_update_address_pairs(self):
master = self._create_port()
backup = self._create_port()
virt_port = self._create_port()
virt_ip = virt_port['fixed_ips'][0]['ip_address']
self._check_port_type(virt_port['id'], ''),
ovn_vport = self._find_port_row(virt_port['id'])
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY,
ovn_vport.options)
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY,
ovn_vport.options)
self._set_allowed_address_pair(master['id'], virt_ip)
self._check_port_type(virt_port['id'], ovn_const.LSP_TYPE_VIRTUAL),
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertEqual(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
self._set_allowed_address_pair(backup['id'], virt_ip)
self._check_port_type(virt_port['id'], ovn_const.LSP_TYPE_VIRTUAL),
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertIn(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
self.assertIn(
backup['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
self._unset_allowed_address_pair(master['id'])
self._check_port_type(virt_port['id'], ovn_const.LSP_TYPE_VIRTUAL),
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertEqual(
backup['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
self._unset_allowed_address_pair(backup['id'])
self._check_port_type(virt_port['id'], ''),
ovn_vport = self._find_port_row(virt_port['id'])
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY,
ovn_vport.options)
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY,
ovn_vport.options)
def test_virtual_port_created_after(self):
master = self._create_port(fixed_ip='10.0.0.11')
backup = self._create_port(fixed_ip='10.0.0.12')
virt_ip = '10.0.0.55'
self._set_allowed_address_pair(master['id'], virt_ip)
self._set_allowed_address_pair(backup['id'], virt_ip)
virt_port = self._create_port(fixed_ip=virt_ip)
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(ovn_const.LSP_TYPE_VIRTUAL, ovn_vport.type)
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertIn(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
self.assertIn(
backup['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
def test_virtual_port_delete_parents(self):
master = self._create_port()
backup = self._create_port()
virt_port = self._create_port()
virt_ip = virt_port['fixed_ips'][0]['ip_address']
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual("", ovn_vport.type)
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY,
ovn_vport.options)
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY,
ovn_vport.options)
self._set_allowed_address_pair(master['id'], virt_ip)
self._set_allowed_address_pair(backup['id'], virt_ip)
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(ovn_const.LSP_TYPE_VIRTUAL, ovn_vport.type)
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertIn(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
self.assertIn(
backup['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
self._delete('ports', backup['id'])
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual(ovn_const.LSP_TYPE_VIRTUAL, ovn_vport.type)
self.assertEqual(
virt_ip,
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY])
self.assertEqual(
master['id'],
ovn_vport.options[ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY])
self._delete('ports', master['id'])
ovn_vport = self._find_port_row(virt_port['id'])
self.assertEqual("", ovn_vport.type)
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_PARENTS_KEY,
ovn_vport.options)
self.assertNotIn(ovn_const.LSP_OPTIONS_VIRTUAL_IP_KEY,
ovn_vport.options)
| true | true |
f720cfcd78b89cb225ad9d77d9115e223033a0da | 8,174 | py | Python | tensorflow_federated/python/core/impl/value_utils.py | hieunq95/federated | 15402997ce7fb35d782d715758acf82767206916 | [
"Apache-2.0"
] | 5 | 2019-07-23T14:49:46.000Z | 2022-03-30T13:54:22.000Z | tensorflow_federated/python/core/impl/value_utils.py | hieunq95/federated | 15402997ce7fb35d782d715758acf82767206916 | [
"Apache-2.0"
] | null | null | null | tensorflow_federated/python/core/impl/value_utils.py | hieunq95/federated | 15402997ce7fb35d782d715758acf82767206916 | [
"Apache-2.0"
] | null | null | null | # Lint as: python3
# Copyright 2018, The TensorFlow Federated Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities file for functions with TFF `Value`s as inputs and outputs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from six.moves import range
from tensorflow_federated.python.common_libs import anonymous_tuple
from tensorflow_federated.python.common_libs import py_typecheck
from tensorflow_federated.python.core.api import computation_types
from tensorflow_federated.python.core.api import placements
from tensorflow_federated.python.core.api import value_base
from tensorflow_federated.python.core.impl import computation_building_blocks
from tensorflow_federated.python.core.impl import intrinsic_defs
from tensorflow_federated.python.core.impl import type_utils
from tensorflow_federated.python.core.impl import value_impl
def zip_two_tuple(input_val, context_stack):
"""Helper function to perform 2-tuple at a time zipping.
Takes 2-tuple of federated values and returns federated 2-tuple of values.
Args:
input_val: 2-tuple TFF `Value` of `NamedTuple` type, whose elements must be
`FederatedTypes` with the same placement.
context_stack: The context stack to use, as in `impl.value_impl.to_value`.
Returns:
TFF `Value` of `FederatedType` with member of 2-tuple `NamedTuple` type.
"""
py_typecheck.check_type(input_val, value_base.Value)
py_typecheck.check_type(input_val.type_signature,
computation_types.NamedTupleType)
py_typecheck.check_type(input_val[0].type_signature,
computation_types.FederatedType)
zip_uris = {
placements.CLIENTS: intrinsic_defs.FEDERATED_ZIP_AT_CLIENTS.uri,
placements.SERVER: intrinsic_defs.FEDERATED_ZIP_AT_SERVER.uri,
}
zip_all_equal = {
placements.CLIENTS: False,
placements.SERVER: True,
}
output_placement = input_val[0].type_signature.placement
if output_placement not in zip_uris:
raise TypeError('The argument must have components placed at SERVER or '
'CLIENTS')
output_all_equal_bit = zip_all_equal[output_placement]
for elem in input_val:
type_utils.check_federated_value_placement(elem, output_placement)
num_elements = len(anonymous_tuple.to_elements(input_val.type_signature))
if num_elements != 2:
raise ValueError('The argument of zip_two_tuple must be a 2-tuple, '
'not an {}-tuple'.format(num_elements))
result_type = computation_types.FederatedType(
[(name, e.member)
for name, e in anonymous_tuple.to_elements(input_val.type_signature)],
output_placement, output_all_equal_bit)
def _adjust_all_equal_bit(x):
return computation_types.FederatedType(x.member, x.placement,
output_all_equal_bit)
adjusted_input_type = computation_types.NamedTupleType([
(k, _adjust_all_equal_bit(v)) if k else _adjust_all_equal_bit(v)
for k, v in anonymous_tuple.to_elements(input_val.type_signature)
])
intrinsic = value_impl.ValueImpl(
computation_building_blocks.Intrinsic(
zip_uris[output_placement],
computation_types.FunctionType(adjusted_input_type, result_type)),
context_stack)
return intrinsic(input_val)
def flatten_first_index(apply_fn, type_to_add, context_stack):
"""Returns a value `(arg -> APPEND(apply_fn(arg[0]), arg[1]))`.
In the above, `APPEND(a,b)` refers to appending element b to tuple a.
Constructs a Value of a TFF functional type that:
1. Takes as argument a 2-element tuple `(x, y)` of TFF type
`[apply_fn.type_signature.parameter, type_to_add]`.
2. Transforms the 1st element `x` of this 2-tuple by applying `apply_fn`,
producing a result `z` that must be a TFF tuple (e.g, as a result of
flattening `x`).
3. Leaves the 2nd element `y` of the argument 2-tuple unchanged.
4. Returns the result of appending the unchanged `y` at the end of the
tuple `z` returned by `apply_fn`.
Args:
apply_fn: TFF `Value` of type_signature `FunctionType`, a function taking
TFF `Value`s to `Value`s of type `NamedTupleType`.
type_to_add: 2-tuple specifying name and TFF type of arg[1]. Name can be
`None` or `string`.
context_stack: The context stack to use, as in `impl.value_impl.to_value`.
Returns:
TFF `Value` of `FunctionType`, taking 2-tuples to N-tuples, which calls
`apply_fn` on the first index of its argument, appends the second
index to the resulting (N-1)-tuple, then returns the N-tuple thus created.
"""
py_typecheck.check_type(apply_fn, value_base.Value)
py_typecheck.check_type(apply_fn.type_signature,
computation_types.FunctionType)
py_typecheck.check_type(apply_fn.type_signature.result,
computation_types.NamedTupleType)
py_typecheck.check_type(type_to_add, tuple)
if len(type_to_add) != 2:
raise ValueError('Please pass a 2-tuple as type_to_add to '
'flatten_first_index, with first index name or None '
'and second index instance of `computation_types.Type` '
'or something convertible to one by '
'`computationtypes.to_type`.')
prev_param_type = apply_fn.type_signature.parameter
inputs = value_impl.to_value(
computation_building_blocks.Reference(
'inputs',
computation_types.NamedTupleType([prev_param_type, type_to_add])),
None, context_stack)
intermediate = apply_fn(inputs[0])
full_type_spec = anonymous_tuple.to_elements(
apply_fn.type_signature.result) + [type_to_add]
named_values = [
(full_type_spec[k][0], intermediate[k]) for k in range(len(intermediate))
] + [(full_type_spec[-1][0], inputs[1])]
new_elements = value_impl.to_value(
anonymous_tuple.AnonymousTuple(named_values),
type_spec=full_type_spec,
context_stack=context_stack)
return value_impl.to_value(
computation_building_blocks.Lambda(
'inputs', inputs.type_signature,
value_impl.ValueImpl.get_comp(new_elements)), None, context_stack)
def get_curried(fn):
"""Returns a curried version of function `fn` that takes a parameter tuple.
For functions `fn` of types <T1,T2,....,Tn> -> U, the result is a function
of the form T1 -> (T2 -> (T3 -> .... (Tn -> U) ... )).
NOTE: No attempt is made at avoiding naming conflicts in cases where `fn`
contains references. The arguments of the curriend function are named `argN`
with `N` starting at 0.
Args:
fn: A value of a functional TFF type.
Returns:
A value that represents the curried form of `fn`.
"""
py_typecheck.check_type(fn, value_base.Value)
py_typecheck.check_type(fn.type_signature, computation_types.FunctionType)
py_typecheck.check_type(fn.type_signature.parameter,
computation_types.NamedTupleType)
param_elements = anonymous_tuple.to_elements(fn.type_signature.parameter)
references = []
for idx, (_, elem_type) in enumerate(param_elements):
references.append(
computation_building_blocks.Reference('arg{}'.format(idx), elem_type))
result = computation_building_blocks.Call(
value_impl.ValueImpl.get_comp(fn),
computation_building_blocks.Tuple(references))
for ref in references[::-1]:
result = computation_building_blocks.Lambda(ref.name, ref.type_signature,
result)
return value_impl.ValueImpl(result,
value_impl.ValueImpl.get_context_stack(fn))
| 42.572917 | 80 | 0.722535 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from six.moves import range
from tensorflow_federated.python.common_libs import anonymous_tuple
from tensorflow_federated.python.common_libs import py_typecheck
from tensorflow_federated.python.core.api import computation_types
from tensorflow_federated.python.core.api import placements
from tensorflow_federated.python.core.api import value_base
from tensorflow_federated.python.core.impl import computation_building_blocks
from tensorflow_federated.python.core.impl import intrinsic_defs
from tensorflow_federated.python.core.impl import type_utils
from tensorflow_federated.python.core.impl import value_impl
def zip_two_tuple(input_val, context_stack):
py_typecheck.check_type(input_val, value_base.Value)
py_typecheck.check_type(input_val.type_signature,
computation_types.NamedTupleType)
py_typecheck.check_type(input_val[0].type_signature,
computation_types.FederatedType)
zip_uris = {
placements.CLIENTS: intrinsic_defs.FEDERATED_ZIP_AT_CLIENTS.uri,
placements.SERVER: intrinsic_defs.FEDERATED_ZIP_AT_SERVER.uri,
}
zip_all_equal = {
placements.CLIENTS: False,
placements.SERVER: True,
}
output_placement = input_val[0].type_signature.placement
if output_placement not in zip_uris:
raise TypeError('The argument must have components placed at SERVER or '
'CLIENTS')
output_all_equal_bit = zip_all_equal[output_placement]
for elem in input_val:
type_utils.check_federated_value_placement(elem, output_placement)
num_elements = len(anonymous_tuple.to_elements(input_val.type_signature))
if num_elements != 2:
raise ValueError('The argument of zip_two_tuple must be a 2-tuple, '
'not an {}-tuple'.format(num_elements))
result_type = computation_types.FederatedType(
[(name, e.member)
for name, e in anonymous_tuple.to_elements(input_val.type_signature)],
output_placement, output_all_equal_bit)
def _adjust_all_equal_bit(x):
return computation_types.FederatedType(x.member, x.placement,
output_all_equal_bit)
adjusted_input_type = computation_types.NamedTupleType([
(k, _adjust_all_equal_bit(v)) if k else _adjust_all_equal_bit(v)
for k, v in anonymous_tuple.to_elements(input_val.type_signature)
])
intrinsic = value_impl.ValueImpl(
computation_building_blocks.Intrinsic(
zip_uris[output_placement],
computation_types.FunctionType(adjusted_input_type, result_type)),
context_stack)
return intrinsic(input_val)
def flatten_first_index(apply_fn, type_to_add, context_stack):
py_typecheck.check_type(apply_fn, value_base.Value)
py_typecheck.check_type(apply_fn.type_signature,
computation_types.FunctionType)
py_typecheck.check_type(apply_fn.type_signature.result,
computation_types.NamedTupleType)
py_typecheck.check_type(type_to_add, tuple)
if len(type_to_add) != 2:
raise ValueError('Please pass a 2-tuple as type_to_add to '
'flatten_first_index, with first index name or None '
'and second index instance of `computation_types.Type` '
'or something convertible to one by '
'`computationtypes.to_type`.')
prev_param_type = apply_fn.type_signature.parameter
inputs = value_impl.to_value(
computation_building_blocks.Reference(
'inputs',
computation_types.NamedTupleType([prev_param_type, type_to_add])),
None, context_stack)
intermediate = apply_fn(inputs[0])
full_type_spec = anonymous_tuple.to_elements(
apply_fn.type_signature.result) + [type_to_add]
named_values = [
(full_type_spec[k][0], intermediate[k]) for k in range(len(intermediate))
] + [(full_type_spec[-1][0], inputs[1])]
new_elements = value_impl.to_value(
anonymous_tuple.AnonymousTuple(named_values),
type_spec=full_type_spec,
context_stack=context_stack)
return value_impl.to_value(
computation_building_blocks.Lambda(
'inputs', inputs.type_signature,
value_impl.ValueImpl.get_comp(new_elements)), None, context_stack)
def get_curried(fn):
py_typecheck.check_type(fn, value_base.Value)
py_typecheck.check_type(fn.type_signature, computation_types.FunctionType)
py_typecheck.check_type(fn.type_signature.parameter,
computation_types.NamedTupleType)
param_elements = anonymous_tuple.to_elements(fn.type_signature.parameter)
references = []
for idx, (_, elem_type) in enumerate(param_elements):
references.append(
computation_building_blocks.Reference('arg{}'.format(idx), elem_type))
result = computation_building_blocks.Call(
value_impl.ValueImpl.get_comp(fn),
computation_building_blocks.Tuple(references))
for ref in references[::-1]:
result = computation_building_blocks.Lambda(ref.name, ref.type_signature,
result)
return value_impl.ValueImpl(result,
value_impl.ValueImpl.get_context_stack(fn))
| true | true |
f720d050c37ee3d16536fe8dff1a9deb55d14284 | 5,304 | py | Python | backend/tests/baserow/contrib/database/field/test_number_field_type.py | jacklicn/baserow | 978d9462ededbaa96674a6653028ba19876ea273 | [
"MIT"
] | 1 | 2021-04-13T16:27:58.000Z | 2021-04-13T16:27:58.000Z | backend/tests/baserow/contrib/database/field/test_number_field_type.py | jacklicn/baserow | 978d9462ededbaa96674a6653028ba19876ea273 | [
"MIT"
] | null | null | null | backend/tests/baserow/contrib/database/field/test_number_field_type.py | jacklicn/baserow | 978d9462ededbaa96674a6653028ba19876ea273 | [
"MIT"
] | null | null | null | import pytest
from decimal import Decimal
from baserow.contrib.database.fields.handler import FieldHandler
from baserow.contrib.database.fields.registries import field_type_registry
@pytest.mark.django_db
@pytest.mark.parametrize(
"expected,field_kwargs",
[
(
[
9223372036854775807, 100, 100, 101, 0, 0, 0, 0, None, None, None, None,
None
],
{'number_type': 'INTEGER', 'number_negative': False}
),
(
[9223372036854775807, 100, 100, 101, -9223372036854775808, -100, -100, -101,
None, None, None, None, None],
{'number_type': 'INTEGER', 'number_negative': True}
),
(
[
Decimal('9223372036854775807.0'), Decimal('100.0'), Decimal('100.2'),
Decimal('100.6'), Decimal('0.0'), Decimal('0.0'), Decimal('0.0'),
Decimal('0.0'), None, None, None, None, None
],
{
'number_type': 'DECIMAL', 'number_negative': False,
'number_decimal_places': 1
}
),
(
[
Decimal('9223372036854775807.000'), Decimal('100.000'),
Decimal('100.220'), Decimal('100.600'),
Decimal('-9223372036854775808.0'), Decimal('-100.0'),
Decimal('-100.220'), Decimal('-100.600'), None, None, None, None, None
],
{
'number_type': 'DECIMAL', 'number_negative': True,
'number_decimal_places': 3
}
)
]
)
def test_alter_number_field_column_type(expected, field_kwargs, data_fixture):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
field = data_fixture.create_text_field(table=table, order=1)
handler = FieldHandler()
field = handler.update_field(user=user, field=field, name='Text field')
model = table.get_model()
model.objects.create(**{f'field_{field.id}': '9223372036854775807'})
model.objects.create(**{f'field_{field.id}': '100'})
model.objects.create(**{f'field_{field.id}': '100.22'})
model.objects.create(**{f'field_{field.id}': '100.59999'})
model.objects.create(**{f'field_{field.id}': '-9223372036854775808'})
model.objects.create(**{f'field_{field.id}': '-100'})
model.objects.create(**{f'field_{field.id}': '-100.22'})
model.objects.create(**{f'field_{field.id}': '-100.5999'})
model.objects.create(**{f'field_{field.id}': '100.59.99'})
model.objects.create(**{f'field_{field.id}': '-100.59.99'})
model.objects.create(**{f'field_{field.id}': '100TEST100.10'})
model.objects.create(**{f'field_{field.id}': '!@#$%%^^&&^^%$$'})
model.objects.create(**{f'field_{field.id}': '!@#$%%^^5.2&&^^%$$'})
# Change the field type to a number and test if the values have been changed.
field = handler.update_field(user=user, field=field, new_type_name='number',
**field_kwargs)
model = table.get_model()
rows = model.objects.all()
for index, row in enumerate(rows):
assert getattr(row, f'field_{field.id}') == expected[index]
@pytest.mark.django_db
def test_alter_number_field_column_type_negative(data_fixture):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
number_field = data_fixture.create_number_field(table=table, order=1,
number_negative=True)
decimal_field = data_fixture.create_number_field(table=table, order=2,
number_type='DECIMAL',
number_negative=True,
number_decimal_places=2)
model = table.get_model()
model.objects.create(**{
f'field_{number_field.id}': -10,
f'field_{decimal_field.id}': Decimal('-10.10')
})
handler = FieldHandler()
number_field = handler.update_field(user=user, field=number_field,
number_negative=False)
decimal_field = handler.update_field(user=user, field=decimal_field,
number_negative=False)
model = table.get_model()
rows = model.objects.all()
assert getattr(rows[0], f'field_{number_field.id}') == 0
assert getattr(rows[0], f'field_{decimal_field.id}') == 0.00
@pytest.mark.django_db
def test_import_export_number_field(data_fixture):
number_field = data_fixture.create_number_field(
name='Number field',
number_type='DECIMAL',
number_negative=True,
number_decimal_places=2
)
number_field_type = field_type_registry.get_by_model(number_field)
number_serialized = number_field_type.export_serialized(number_field)
number_field_imported = number_field_type.import_serialized(
number_field.table,
number_serialized,
{}
)
assert number_field.number_type == number_field_imported.number_type
assert number_field.number_negative == number_field_imported.number_negative
assert number_field.number_decimal_places == (
number_field_imported.number_decimal_places
)
| 40.181818 | 88 | 0.601244 | import pytest
from decimal import Decimal
from baserow.contrib.database.fields.handler import FieldHandler
from baserow.contrib.database.fields.registries import field_type_registry
@pytest.mark.django_db
@pytest.mark.parametrize(
"expected,field_kwargs",
[
(
[
9223372036854775807, 100, 100, 101, 0, 0, 0, 0, None, None, None, None,
None
],
{'number_type': 'INTEGER', 'number_negative': False}
),
(
[9223372036854775807, 100, 100, 101, -9223372036854775808, -100, -100, -101,
None, None, None, None, None],
{'number_type': 'INTEGER', 'number_negative': True}
),
(
[
Decimal('9223372036854775807.0'), Decimal('100.0'), Decimal('100.2'),
Decimal('100.6'), Decimal('0.0'), Decimal('0.0'), Decimal('0.0'),
Decimal('0.0'), None, None, None, None, None
],
{
'number_type': 'DECIMAL', 'number_negative': False,
'number_decimal_places': 1
}
),
(
[
Decimal('9223372036854775807.000'), Decimal('100.000'),
Decimal('100.220'), Decimal('100.600'),
Decimal('-9223372036854775808.0'), Decimal('-100.0'),
Decimal('-100.220'), Decimal('-100.600'), None, None, None, None, None
],
{
'number_type': 'DECIMAL', 'number_negative': True,
'number_decimal_places': 3
}
)
]
)
def test_alter_number_field_column_type(expected, field_kwargs, data_fixture):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
field = data_fixture.create_text_field(table=table, order=1)
handler = FieldHandler()
field = handler.update_field(user=user, field=field, name='Text field')
model = table.get_model()
model.objects.create(**{f'field_{field.id}': '9223372036854775807'})
model.objects.create(**{f'field_{field.id}': '100'})
model.objects.create(**{f'field_{field.id}': '100.22'})
model.objects.create(**{f'field_{field.id}': '100.59999'})
model.objects.create(**{f'field_{field.id}': '-9223372036854775808'})
model.objects.create(**{f'field_{field.id}': '-100'})
model.objects.create(**{f'field_{field.id}': '-100.22'})
model.objects.create(**{f'field_{field.id}': '-100.5999'})
model.objects.create(**{f'field_{field.id}': '100.59.99'})
model.objects.create(**{f'field_{field.id}': '-100.59.99'})
model.objects.create(**{f'field_{field.id}': '100TEST100.10'})
model.objects.create(**{f'field_{field.id}': '!@#$%%^^&&^^%$$'})
model.objects.create(**{f'field_{field.id}': '!@#$%%^^5.2&&^^%$$'})
field = handler.update_field(user=user, field=field, new_type_name='number',
**field_kwargs)
model = table.get_model()
rows = model.objects.all()
for index, row in enumerate(rows):
assert getattr(row, f'field_{field.id}') == expected[index]
@pytest.mark.django_db
def test_alter_number_field_column_type_negative(data_fixture):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
number_field = data_fixture.create_number_field(table=table, order=1,
number_negative=True)
decimal_field = data_fixture.create_number_field(table=table, order=2,
number_type='DECIMAL',
number_negative=True,
number_decimal_places=2)
model = table.get_model()
model.objects.create(**{
f'field_{number_field.id}': -10,
f'field_{decimal_field.id}': Decimal('-10.10')
})
handler = FieldHandler()
number_field = handler.update_field(user=user, field=number_field,
number_negative=False)
decimal_field = handler.update_field(user=user, field=decimal_field,
number_negative=False)
model = table.get_model()
rows = model.objects.all()
assert getattr(rows[0], f'field_{number_field.id}') == 0
assert getattr(rows[0], f'field_{decimal_field.id}') == 0.00
@pytest.mark.django_db
def test_import_export_number_field(data_fixture):
number_field = data_fixture.create_number_field(
name='Number field',
number_type='DECIMAL',
number_negative=True,
number_decimal_places=2
)
number_field_type = field_type_registry.get_by_model(number_field)
number_serialized = number_field_type.export_serialized(number_field)
number_field_imported = number_field_type.import_serialized(
number_field.table,
number_serialized,
{}
)
assert number_field.number_type == number_field_imported.number_type
assert number_field.number_negative == number_field_imported.number_negative
assert number_field.number_decimal_places == (
number_field_imported.number_decimal_places
)
| true | true |
f720d05559826b7b3e8260bdfa239a1cb56c9a6c | 4,465 | py | Python | generated-libraries/python/netapp/iscsi/iscsi_received_stats_info.py | radekg/netapp-ontap-lib-get | 6445ebb071ec147ea82a486fbe9f094c56c5c40d | [
"MIT"
] | 2 | 2017-03-28T15:31:26.000Z | 2018-08-16T22:15:18.000Z | generated-libraries/python/netapp/iscsi/iscsi_received_stats_info.py | radekg/netapp-ontap-lib-get | 6445ebb071ec147ea82a486fbe9f094c56c5c40d | [
"MIT"
] | null | null | null | generated-libraries/python/netapp/iscsi/iscsi_received_stats_info.py | radekg/netapp-ontap-lib-get | 6445ebb071ec147ea82a486fbe9f094c56c5c40d | [
"MIT"
] | null | null | null | from netapp.netapp_object import NetAppObject
class IscsiReceivedStatsInfo(NetAppObject):
"""
Counts for PDUs received.
"""
_data_out = None
@property
def data_out(self):
"""
Count of data out requests.
"""
return self._data_out
@data_out.setter
def data_out(self, val):
if val != None:
self.validate('data_out', val)
self._data_out = val
_scsi_task_mgt_cmd = None
@property
def scsi_task_mgt_cmd(self):
"""
Count of SCSI task management commands.
"""
return self._scsi_task_mgt_cmd
@scsi_task_mgt_cmd.setter
def scsi_task_mgt_cmd(self, val):
if val != None:
self.validate('scsi_task_mgt_cmd', val)
self._scsi_task_mgt_cmd = val
_login_req = None
@property
def login_req(self):
"""
Count of login requests.
"""
return self._login_req
@login_req.setter
def login_req(self, val):
if val != None:
self.validate('login_req', val)
self._login_req = val
_unknown = None
@property
def unknown(self):
"""
Count of unknown PDUs.
"""
return self._unknown
@unknown.setter
def unknown(self, val):
if val != None:
self.validate('unknown', val)
self._unknown = val
_nop_out = None
@property
def nop_out(self):
"""
Count of NOP Out.
"""
return self._nop_out
@nop_out.setter
def nop_out(self, val):
if val != None:
self.validate('nop_out', val)
self._nop_out = val
_scsi_cmd = None
@property
def scsi_cmd(self):
"""
Count of SCSI commands.
"""
return self._scsi_cmd
@scsi_cmd.setter
def scsi_cmd(self, val):
if val != None:
self.validate('scsi_cmd', val)
self._scsi_cmd = val
_snack = None
@property
def snack(self):
"""
Count of SNACK requests.
"""
return self._snack
@snack.setter
def snack(self, val):
if val != None:
self.validate('snack', val)
self._snack = val
_text_req = None
@property
def text_req(self):
"""
Count of text requests.
"""
return self._text_req
@text_req.setter
def text_req(self, val):
if val != None:
self.validate('text_req', val)
self._text_req = val
_total = None
@property
def total(self):
"""
Total PDUs received.
"""
return self._total
@total.setter
def total(self, val):
if val != None:
self.validate('total', val)
self._total = val
_logout_req = None
@property
def logout_req(self):
"""
Count of logout requests.
"""
return self._logout_req
@logout_req.setter
def logout_req(self, val):
if val != None:
self.validate('logout_req', val)
self._logout_req = val
@staticmethod
def get_api_name():
return "iscsi-received-stats-info"
@staticmethod
def get_desired_attrs():
return [
'data-out',
'scsi-task-mgt-cmd',
'login-req',
'unknown',
'nop-out',
'scsi-cmd',
'snack',
'text-req',
'total',
'logout-req',
]
def describe_properties(self):
return {
'data_out': { 'class': int, 'is_list': False, 'required': 'required' },
'scsi_task_mgt_cmd': { 'class': int, 'is_list': False, 'required': 'required' },
'login_req': { 'class': int, 'is_list': False, 'required': 'required' },
'unknown': { 'class': int, 'is_list': False, 'required': 'required' },
'nop_out': { 'class': int, 'is_list': False, 'required': 'required' },
'scsi_cmd': { 'class': int, 'is_list': False, 'required': 'required' },
'snack': { 'class': int, 'is_list': False, 'required': 'required' },
'text_req': { 'class': int, 'is_list': False, 'required': 'required' },
'total': { 'class': int, 'is_list': False, 'required': 'required' },
'logout_req': { 'class': int, 'is_list': False, 'required': 'required' },
}
| 26.264706 | 92 | 0.520717 | from netapp.netapp_object import NetAppObject
class IscsiReceivedStatsInfo(NetAppObject):
_data_out = None
@property
def data_out(self):
return self._data_out
@data_out.setter
def data_out(self, val):
if val != None:
self.validate('data_out', val)
self._data_out = val
_scsi_task_mgt_cmd = None
@property
def scsi_task_mgt_cmd(self):
return self._scsi_task_mgt_cmd
@scsi_task_mgt_cmd.setter
def scsi_task_mgt_cmd(self, val):
if val != None:
self.validate('scsi_task_mgt_cmd', val)
self._scsi_task_mgt_cmd = val
_login_req = None
@property
def login_req(self):
return self._login_req
@login_req.setter
def login_req(self, val):
if val != None:
self.validate('login_req', val)
self._login_req = val
_unknown = None
@property
def unknown(self):
return self._unknown
@unknown.setter
def unknown(self, val):
if val != None:
self.validate('unknown', val)
self._unknown = val
_nop_out = None
@property
def nop_out(self):
return self._nop_out
@nop_out.setter
def nop_out(self, val):
if val != None:
self.validate('nop_out', val)
self._nop_out = val
_scsi_cmd = None
@property
def scsi_cmd(self):
return self._scsi_cmd
@scsi_cmd.setter
def scsi_cmd(self, val):
if val != None:
self.validate('scsi_cmd', val)
self._scsi_cmd = val
_snack = None
@property
def snack(self):
return self._snack
@snack.setter
def snack(self, val):
if val != None:
self.validate('snack', val)
self._snack = val
_text_req = None
@property
def text_req(self):
return self._text_req
@text_req.setter
def text_req(self, val):
if val != None:
self.validate('text_req', val)
self._text_req = val
_total = None
@property
def total(self):
return self._total
@total.setter
def total(self, val):
if val != None:
self.validate('total', val)
self._total = val
_logout_req = None
@property
def logout_req(self):
return self._logout_req
@logout_req.setter
def logout_req(self, val):
if val != None:
self.validate('logout_req', val)
self._logout_req = val
@staticmethod
def get_api_name():
return "iscsi-received-stats-info"
@staticmethod
def get_desired_attrs():
return [
'data-out',
'scsi-task-mgt-cmd',
'login-req',
'unknown',
'nop-out',
'scsi-cmd',
'snack',
'text-req',
'total',
'logout-req',
]
def describe_properties(self):
return {
'data_out': { 'class': int, 'is_list': False, 'required': 'required' },
'scsi_task_mgt_cmd': { 'class': int, 'is_list': False, 'required': 'required' },
'login_req': { 'class': int, 'is_list': False, 'required': 'required' },
'unknown': { 'class': int, 'is_list': False, 'required': 'required' },
'nop_out': { 'class': int, 'is_list': False, 'required': 'required' },
'scsi_cmd': { 'class': int, 'is_list': False, 'required': 'required' },
'snack': { 'class': int, 'is_list': False, 'required': 'required' },
'text_req': { 'class': int, 'is_list': False, 'required': 'required' },
'total': { 'class': int, 'is_list': False, 'required': 'required' },
'logout_req': { 'class': int, 'is_list': False, 'required': 'required' },
}
| true | true |
f720d09b09639cf12c6d88a9b93e2140d324a4fc | 6,209 | py | Python | data-analysis/analyze_E017+020.py | JakobHavtorn/es-rl | 30d81ad908a30e78d03c83d37454dbe8e05d1452 | [
"MIT"
] | 1 | 2021-09-03T17:54:14.000Z | 2021-09-03T17:54:14.000Z | data-analysis/analyze_E017+020.py | JakobHavtorn/es-rl | 30d81ad908a30e78d03c83d37454dbe8e05d1452 | [
"MIT"
] | null | null | null | data-analysis/analyze_E017+020.py | JakobHavtorn/es-rl | 30d81ad908a30e78d03c83d37454dbe8e05d1452 | [
"MIT"
] | null | null | null | import os
from distutils.dir_util import copy_tree
import warnings
import IPython
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import scipy as sp
import torch
from context import utils
import utils.filesystem as fs
import utils.plotting as plot
from utils.data_analysis import invert_signs, load_stats
from utils.misc import get_equal_dicts, length_of_longest
def create_plots(stats_list, keys_to_plot, groups, result_dir, include_val=True):
n_keys = len(keys_to_plot)
n_chars = len(str(n_keys))
f = ' {:' + str(n_chars) + 'd}/{:' + str(n_chars) + 'd} monitored keys plotted'
groups_org = groups.copy()
for i_key, k in enumerate(keys_to_plot):
# Get data and subset only those series that are done (or the one that is the longest)
groups = groups_org.copy()
list_of_series = [s[k].tolist() for s in stats_list if k in s]
list_of_genera = [s['generations'].tolist() for s in stats_list if k in s]
l = length_of_longest(list_of_series)
indices = [i for i, series in enumerate(list_of_series) if len(series) == l]
groups = groups[indices]
list_of_series = [list_of_series[i] for i in indices]
list_of_genera = [list_of_genera[i] for i in indices]
# Validation series
if include_val:
val_k = k[:-4] + '_val'
list_of_series_val = [s[val_k].tolist() for i, s in enumerate(stats_list) if val_k in s and i in indices]
if include_val and not len(list_of_series_val) == 0:
list_of_genera_val = [np.where(~np.isnan(l))[0].tolist() for l in list_of_series_val]
list_of_genera.extend(list_of_genera_val)
list_of_series_val = [np.array(l) for l in list_of_series_val]
list_of_series_val = [l[~np.isnan(l)].tolist() for l in list_of_series_val]
list_of_series.extend(list_of_series_val)
groups_val = np.array([g + ', validation' for g in groups])
groups = np.append(groups, groups_val)
if k is 'return_val':
IPython.embed()
# Sort
list_of_genera = [x for _,x in sorted(zip(groups.tolist(), list_of_genera))]
list_of_series = [x for _,x in sorted(zip(groups.tolist(), list_of_series))]
groups.sort()
# Plot
plot.timeseries_mean_grouped(list_of_genera, list_of_series, groups, xlabel='generations', ylabel=k, map_labels='supervised')
if 'return' in k:
plt.gca().set_ylim(0, 1.5)
elif 'accuracy' in k:
plt.gca().set_ylim(0.4, 1)
plt.savefig(os.path.join(result_dir, k + '-all-series-mean-sd' + '.pdf'), bbox_inches='tight')
plt.close()
# Progress
if i_key + 1 == n_keys:
print(f.format(i_key+1, n_keys), end='\n')
else:
print(f.format(i_key+1, n_keys), end='\r')
def get_directories(experiment_id):
# Get directories to analyze
this_file_dir_local = os.path.dirname(os.path.abspath(__file__))
package_root_this_file = fs.get_parent(this_file_dir_local, 'es-rl')
d = os.path.join(package_root_this_file, 'experiments', 'checkpoints', experiment_id)
directories = [os.path.join(d, di) for di in os.listdir(d) if os.path.isdir(os.path.join(d, di))]
directories = [d for d in directories if 'monitoring' not in d and 'analysis' not in d]
# Create result directory
result_dir = os.path.join(d, str(experiment_id[:4]))
dst_dir = '/home/jakob/Dropbox/Apps/ShareLaTeX/Master\'s Thesis/graphics/' + experiment_id[:4]
if not os.path.exists(result_dir + '-bn-analysis'):
os.mkdir(result_dir + '-bn-analysis'),
if not os.path.exists(result_dir + '-init-analysis'):
os.mkdir(result_dir + '-init-analysis')
return directories, result_dir, dst_dir
def load(experiment_id, optimizer):
stats_init = []
stats_bn = []
groups_init = np.array([])
groups_bn = np.array([])
for d in directories:
try:
st = pd.read_csv(os.path.join(d, 'stats.csv'))
with open(os.path.join(d, 'init.log'), 'r') as f:
s = f.read()
if 'MNISTNetNoInit' in s:
groups_init = np.append(groups_init, 'Default init' + optimizer) # Has BN
stats_init.append(st)
elif 'MNISTNetNoBN' in s:
groups_bn = np.append(groups_bn, 'No Batchnorm' + optimizer) # Has Xavier Glorot
stats_bn.append(st)
else:
groups_bn = np.append(groups_bn, 'Batchnorm' + optimizer) # Has Xavier Glorot
groups_init = np.append(groups_init, 'Xavier-Glorot' + optimizer) # Has BN
stats_init.append(st)
stats_bn.append(st)
except:
print("None in: " + d)
return stats_init, stats_bn, groups_init, groups_bn
if __name__ == '__main__':
# Ignore warnings from matplotlib
warnings.filterwarnings("ignore", module="matplotlib")
# Font setting
matplotlib.rcParams.update({'font.size': 12})
# Experiment IDs
experiment_ids = ['E017-bn-init', 'E020-bn-init']
# Optimizer labels
# optimizers = [', SGD', ', ADAM']
optimizers = ['', '']
# Keys to analyze
keys_to_plot = {'return_unp', 'return_avg', 'accuracy_unp', 'accuracy_avg', 'sigma'}
# Analyze
for experiment_id, optimizer in zip(experiment_ids, optimizers):
# Get directories
directories, result_dir, dst_dir = get_directories(experiment_id)
if len(directories) == 0:
print('No results for {}'.format(experiment_id))
continue
# Load data
stats_init, stats_bn, groups_init, groups_bn = load(experiment_id, optimizer)
# Plot
invert_signs(stats_init)
invert_signs(stats_bn)
create_plots(stats_init, keys_to_plot, groups_init, result_dir + '-init-analysis', include_val=True)
create_plots(stats_bn, keys_to_plot, groups_bn, result_dir + '-bn-analysis', include_val=True)
copy_tree(result_dir + '-init-analysis', dst_dir + '-init-analysis')
copy_tree(result_dir + '-bn-analysis', dst_dir + '-bn-analysis')
| 42.82069 | 133 | 0.639394 | import os
from distutils.dir_util import copy_tree
import warnings
import IPython
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import scipy as sp
import torch
from context import utils
import utils.filesystem as fs
import utils.plotting as plot
from utils.data_analysis import invert_signs, load_stats
from utils.misc import get_equal_dicts, length_of_longest
def create_plots(stats_list, keys_to_plot, groups, result_dir, include_val=True):
n_keys = len(keys_to_plot)
n_chars = len(str(n_keys))
f = ' {:' + str(n_chars) + 'd}/{:' + str(n_chars) + 'd} monitored keys plotted'
groups_org = groups.copy()
for i_key, k in enumerate(keys_to_plot):
groups = groups_org.copy()
list_of_series = [s[k].tolist() for s in stats_list if k in s]
list_of_genera = [s['generations'].tolist() for s in stats_list if k in s]
l = length_of_longest(list_of_series)
indices = [i for i, series in enumerate(list_of_series) if len(series) == l]
groups = groups[indices]
list_of_series = [list_of_series[i] for i in indices]
list_of_genera = [list_of_genera[i] for i in indices]
if include_val:
val_k = k[:-4] + '_val'
list_of_series_val = [s[val_k].tolist() for i, s in enumerate(stats_list) if val_k in s and i in indices]
if include_val and not len(list_of_series_val) == 0:
list_of_genera_val = [np.where(~np.isnan(l))[0].tolist() for l in list_of_series_val]
list_of_genera.extend(list_of_genera_val)
list_of_series_val = [np.array(l) for l in list_of_series_val]
list_of_series_val = [l[~np.isnan(l)].tolist() for l in list_of_series_val]
list_of_series.extend(list_of_series_val)
groups_val = np.array([g + ', validation' for g in groups])
groups = np.append(groups, groups_val)
if k is 'return_val':
IPython.embed()
list_of_genera = [x for _,x in sorted(zip(groups.tolist(), list_of_genera))]
list_of_series = [x for _,x in sorted(zip(groups.tolist(), list_of_series))]
groups.sort()
plot.timeseries_mean_grouped(list_of_genera, list_of_series, groups, xlabel='generations', ylabel=k, map_labels='supervised')
if 'return' in k:
plt.gca().set_ylim(0, 1.5)
elif 'accuracy' in k:
plt.gca().set_ylim(0.4, 1)
plt.savefig(os.path.join(result_dir, k + '-all-series-mean-sd' + '.pdf'), bbox_inches='tight')
plt.close()
if i_key + 1 == n_keys:
print(f.format(i_key+1, n_keys), end='\n')
else:
print(f.format(i_key+1, n_keys), end='\r')
def get_directories(experiment_id):
this_file_dir_local = os.path.dirname(os.path.abspath(__file__))
package_root_this_file = fs.get_parent(this_file_dir_local, 'es-rl')
d = os.path.join(package_root_this_file, 'experiments', 'checkpoints', experiment_id)
directories = [os.path.join(d, di) for di in os.listdir(d) if os.path.isdir(os.path.join(d, di))]
directories = [d for d in directories if 'monitoring' not in d and 'analysis' not in d]
result_dir = os.path.join(d, str(experiment_id[:4]))
dst_dir = '/home/jakob/Dropbox/Apps/ShareLaTeX/Master\'s Thesis/graphics/' + experiment_id[:4]
if not os.path.exists(result_dir + '-bn-analysis'):
os.mkdir(result_dir + '-bn-analysis'),
if not os.path.exists(result_dir + '-init-analysis'):
os.mkdir(result_dir + '-init-analysis')
return directories, result_dir, dst_dir
def load(experiment_id, optimizer):
stats_init = []
stats_bn = []
groups_init = np.array([])
groups_bn = np.array([])
for d in directories:
try:
st = pd.read_csv(os.path.join(d, 'stats.csv'))
with open(os.path.join(d, 'init.log'), 'r') as f:
s = f.read()
if 'MNISTNetNoInit' in s:
groups_init = np.append(groups_init, 'Default init' + optimizer) # Has BN
stats_init.append(st)
elif 'MNISTNetNoBN' in s:
groups_bn = np.append(groups_bn, 'No Batchnorm' + optimizer) # Has Xavier Glorot
stats_bn.append(st)
else:
groups_bn = np.append(groups_bn, 'Batchnorm' + optimizer) # Has Xavier Glorot
groups_init = np.append(groups_init, 'Xavier-Glorot' + optimizer) # Has BN
stats_init.append(st)
stats_bn.append(st)
except:
print("None in: " + d)
return stats_init, stats_bn, groups_init, groups_bn
if __name__ == '__main__':
# Ignore warnings from matplotlib
warnings.filterwarnings("ignore", module="matplotlib")
# Font setting
matplotlib.rcParams.update({'font.size': 12})
# Experiment IDs
experiment_ids = ['E017-bn-init', 'E020-bn-init']
# Optimizer labels
# optimizers = [', SGD', ', ADAM']
optimizers = ['', '']
# Keys to analyze
keys_to_plot = {'return_unp', 'return_avg', 'accuracy_unp', 'accuracy_avg', 'sigma'}
# Analyze
for experiment_id, optimizer in zip(experiment_ids, optimizers):
# Get directories
directories, result_dir, dst_dir = get_directories(experiment_id)
if len(directories) == 0:
print('No results for {}'.format(experiment_id))
continue
# Load data
stats_init, stats_bn, groups_init, groups_bn = load(experiment_id, optimizer)
# Plot
invert_signs(stats_init)
invert_signs(stats_bn)
create_plots(stats_init, keys_to_plot, groups_init, result_dir + '-init-analysis', include_val=True)
create_plots(stats_bn, keys_to_plot, groups_bn, result_dir + '-bn-analysis', include_val=True)
copy_tree(result_dir + '-init-analysis', dst_dir + '-init-analysis')
copy_tree(result_dir + '-bn-analysis', dst_dir + '-bn-analysis')
| true | true |
f720d1f5708dbc5ccf4ce7f998568b7bcfcee378 | 686 | py | Python | test/test_relay.py | steinwurf/kodo-simulations-python | f9d9bcce70adf1666cf8bac9f352fbbf640ca783 | [
"BSD-3-Clause"
] | 2 | 2017-12-09T20:41:02.000Z | 2022-01-10T23:23:01.000Z | test/test_relay.py | steinwurf/kodo-simulations-python | f9d9bcce70adf1666cf8bac9f352fbbf640ca783 | [
"BSD-3-Clause"
] | null | null | null | test/test_relay.py | steinwurf/kodo-simulations-python | f9d9bcce70adf1666cf8bac9f352fbbf640ca783 | [
"BSD-3-Clause"
] | 5 | 2016-10-12T12:18:59.000Z | 2022-01-10T23:23:55.000Z | #! /usr/bin/env python
# encoding: utf-8
import sys
sys.path.append('..')
sys.path.append('mock')
import unittest
from mock import Mock
import simulator.relay
class TestPacket(unittest.TestCase):
"""Class for testing Relay."""
def test_instantiation(self):
"""Test instantiation."""
id = "test_id"
stats = {}
decoder = Mock(name="decoder_object")
decoder.block_size = Mock(return_value=100)
c = simulator.relay.Relay(id, stats, decoder)
self.assertEqual(c.sender.id, id)
self.assertEqual(c.receiver.id, id)
self.assertEqual(c.receiver.decoder, decoder)
if __name__ == '__main__':
unittest.main()
| 23.655172 | 53 | 0.650146 |
import sys
sys.path.append('..')
sys.path.append('mock')
import unittest
from mock import Mock
import simulator.relay
class TestPacket(unittest.TestCase):
def test_instantiation(self):
id = "test_id"
stats = {}
decoder = Mock(name="decoder_object")
decoder.block_size = Mock(return_value=100)
c = simulator.relay.Relay(id, stats, decoder)
self.assertEqual(c.sender.id, id)
self.assertEqual(c.receiver.id, id)
self.assertEqual(c.receiver.decoder, decoder)
if __name__ == '__main__':
unittest.main()
| true | true |
f720d23a79090927f1bcc5cdbf04f6da46a364cb | 10,513 | py | Python | ui_automation_tests/step_defs/test_open_application.py | uktrade/lite-exporter-frontend | cf42ac37a21236486aa303c8935c44a7eba91ef5 | [
"MIT"
] | 3 | 2019-05-31T06:36:17.000Z | 2020-02-12T16:02:24.000Z | ui_automation_tests/step_defs/test_open_application.py | uktrade/lite-exporter-frontend | cf42ac37a21236486aa303c8935c44a7eba91ef5 | [
"MIT"
] | 33 | 2019-03-28T10:20:14.000Z | 2020-07-16T15:12:43.000Z | ui_automation_tests/step_defs/test_open_application.py | uktrade/lite-exporter-frontend | cf42ac37a21236486aa303c8935c44a7eba91ef5 | [
"MIT"
] | 1 | 2019-05-01T15:52:02.000Z | 2019-05-01T15:52:02.000Z | from pytest_bdd import scenarios, when, then, parsers
import ui_automation_tests.shared.tools.helpers as utils
from ui_automation_tests.pages.generic_application.task_list import TaskListPage
from ui_automation_tests.pages.open_application.country_contract_types import OpenApplicationCountryContractTypes
from ui_automation_tests.pages.open_application.country_contract_types_summary import (
OpenApplicationCountryContractTypesSummaryPage,
)
from ui_automation_tests.pages.exporter_hub_page import ExporterHubPage
from ui_automation_tests.pages.generic_application.ultimate_end_users import GenericApplicationUltimateEndUsers
from ui_automation_tests.shared import functions
from ui_automation_tests.conftest import (
enter_type_of_application,
enter_application_name,
enter_permanent_or_temporary,
choose_open_licence_category,
answer_firearms_question,
)
from ui_automation_tests.pages.apply_for_a_licence_page import ApplyForALicencePage
from ui_automation_tests.pages.open_application.countries import OpenApplicationCountriesPage
from ui_automation_tests.pages.open_application.goods_countries_page import GoodsCountriesPage
from ui_automation_tests.pages.open_application.goods_types import OpenApplicationGoodsTypesPage
from ui_automation_tests.pages.standard_application.goods import StandardApplicationGoodsPage
scenarios(
"../features/submit_open_application.feature", "../features/edit_open_application.feature", strict_gherkin=False
)
@then(parsers.parse('I see my goods type added at position "{position}" with a description and a control code'))
def i_see_the_goods_types_list(driver, position, context):
goods_type_page = OpenApplicationGoodsTypesPage(driver)
good_type = goods_type_page.get_text_of_goods_type_info(int(position))
assert context.good_description in good_type
assert context.control_code in good_type
@then(parsers.parse("I see a list of the preselected media products"))
def i_see_the_goods_types_list_media_oiel(driver, context):
goods_type_page = OpenApplicationGoodsTypesPage(driver)
goods_types = goods_type_page.get_number_of_goods()
assert len(goods_types) == 7
@then(parsers.parse("I see a list of the preselected cryptographic products"))
def i_see_the_goods_types_list_cryptographic_oiel(driver, context):
goods_type_page = OpenApplicationGoodsTypesPage(driver)
goods_types = goods_type_page.get_number_of_goods()
assert len(goods_types) == 4
@then("I should see a list of countries")
def i_should_see_a_list_of_countries(driver):
application_countries_list = OpenApplicationCountriesPage(driver)
page_countries = application_countries_list.get_countries_names()
assert len(page_countries) == 273
assert "United Kingdom" not in page_countries
@then("I should see a list of all countries that have been preselected")
def i_should_see_a_list_of_countries(driver):
application_countries_list = OpenApplicationCountriesPage(driver)
page_countries = application_countries_list.get_static_destinations_list()
assert len(page_countries) == 273
assert "United Kingdom" not in page_countries
@then("I should see a list of the countries permitted for a cryptographic OIEL")
def i_should_see_a_list_of_countries_cryptographic_oiel(driver):
application_countries_list = OpenApplicationCountriesPage(driver)
page_countries = application_countries_list.get_static_destinations_list()
assert len(page_countries) == 213
assert "United Kingdom" not in page_countries
@then("I should see the UK Continental Shelf as the only permitted destination")
def i_should_see_a_list_of_countries_uk_continental_shelf_oiel(driver):
application_countries_list = OpenApplicationCountriesPage(driver)
page_countries = application_countries_list.get_static_destinations_list()
assert len(page_countries) == 1
assert page_countries[0] == "UK Continental Shelf"
@when(parsers.parse('I select "{country}" from the country list'))
def i_select_country_from_the_country_list(driver, country):
application_countries_list = OpenApplicationCountriesPage(driver)
application_countries_list.select_country(country)
assert utils.find_element_by_href(driver, "#" + country).is_displayed()
@when(parsers.parse('I search for country "{country}"'))
def search_for_country(driver, country):
OpenApplicationCountriesPage(driver).search_for_country(country)
@then(parsers.parse('only "{country}" is displayed in country list'))
def search_country_result(driver, country):
assert (
country == OpenApplicationCountriesPage(driver).get_text_of_countries_list()
), "Country not searched correctly"
@when("I click select all countries")
def select_all_countries(driver):
page = OpenApplicationCountriesPage(driver)
page.click_select_all()
@then("all checkboxes are selected")
def all_selected(driver):
page = OpenApplicationCountriesPage(driver)
assert page.get_number_of_checkboxes(checked=False) == page.get_number_of_checkboxes(checked=True)
@when("I select that I want to add the same sectors and contract types to all countries")
def select_yes_to_all_countries_with_the_same_contract_types(driver):
OpenApplicationCountryContractTypes(driver).select_same_contract_types_for_all_countries_radio_button()
@when("I select contract types for all countries")
def select_contract_types_for_all_countries(driver, context):
page = OpenApplicationCountryContractTypes(driver)
context.contract_types = [
{"id": "Navy", "value": "Navy"},
{
"id": "Aircraft-manufacturers,-maintainers-or-operators",
"value": "Aircraft manufacturers, maintainers or operators",
},
{"id": "Pharmaceutical-or-medical", "value": "Pharmaceutical or medical"},
]
page.select_contract_type(context.contract_types[0]["id"])
page.select_contract_type(context.contract_types[1]["id"])
page.select_contract_type(context.contract_types[2]["id"])
page.select_other_contract_type_and_fill_in_details()
functions.click_submit(driver)
@then("I should see all countries and the chosen contract types on the destination summary list")
def i_should_see_destinations_summary_countries_contract_types(driver, context):
page = OpenApplicationCountryContractTypesSummaryPage(driver)
countries_and_contract_types = page.get_countries_with_respective_contract_types()
assert len(countries_and_contract_types) == 273
assert "United Kingdom" not in countries_and_contract_types
for country_with_contract_types in countries_and_contract_types:
for contract_type in context.contract_types:
assert contract_type["value"] in country_with_contract_types[1]
@then(
"I should see the UK Continental Shelf as the only destination and the chosen contract types on the destination summary list"
)
def i_should_see_destinations_summary_uk_continental_shelf_contract_types(driver, context):
page = OpenApplicationCountryContractTypesSummaryPage(driver)
countries_and_contract_types = page.get_countries_with_respective_contract_types()
assert len(countries_and_contract_types) == 1
assert countries_and_contract_types[0][0] == "UK Continental Shelf"
for country_with_contract_types in countries_and_contract_types:
for contract_type in context.contract_types:
assert contract_type["value"] in country_with_contract_types[1]
@when(parsers.parse('I "{assign_or_unassign}" all countries to all goods with link'))
def assign_all_with_link(driver, assign_or_unassign):
countries_page = GoodsCountriesPage(driver)
if assign_or_unassign == "assign":
countries_page.select_all_link()
countries_page.click_save()
else:
countries_page.deselect_all_link()
@when("I click Add goods type button")
def click_goods_type_button(driver):
OpenApplicationGoodsTypesPage(driver).click_add_good_button()
@then(parsers.parse('I see all countries are "{assigned_or_unassigned}" to all goods'))
def see_all_or_no_selected(driver, assigned_or_unassigned):
countries_page = GoodsCountriesPage(driver)
if assigned_or_unassigned == "assigned":
assert countries_page.all_selected()
else:
assert countries_page.all_deselected()
@when(parsers.parse('I create an open application of a "{export_type}" export type')) # noqa
def create_open_app(driver, export_type, context): # noqa
ExporterHubPage(driver).click_apply_for_a_licence()
ApplyForALicencePage(driver).select_licence_type("export_licence")
functions.click_submit(driver)
enter_type_of_application(driver, "oiel", context)
choose_open_licence_category(driver, "military", context)
enter_permanent_or_temporary(driver, export_type, context)
enter_application_name(driver, context)
answer_firearms_question(driver)
@when(parsers.parse('I create an open application for an export licence of the "{licence_type}" licence type')) # noqa
def create_open_app_of_specific_type(driver, licence_type, context): # noqa
ExporterHubPage(driver).click_apply_for_a_licence()
ApplyForALicencePage(driver).select_licence_type("export_licence")
functions.click_submit(driver)
enter_type_of_application(driver, "oiel", context)
choose_open_licence_category(driver, licence_type, context)
if licence_type in ["military", "uk_continental_shelf"]:
enter_permanent_or_temporary(driver, "permanent", context)
enter_application_name(driver, context)
if licence_type in ["military", "uk_continental_shelf"]:
answer_firearms_question(driver)
@when("I click on the add button")
def i_click_on_the_add_button(driver):
GenericApplicationUltimateEndUsers(driver).click_add_ultimate_recipient_button()
@when("I remove a good type from the application")
def i_remove_a_good_from_the_application(driver):
remove_good_link = StandardApplicationGoodsPage(driver).find_remove_goods_type_link()
driver.execute_script("arguments[0].click();", remove_good_link)
@then("no goods types are left on the application")
def no_goods_types_are_left_on_the_application(driver):
assert (OpenApplicationGoodsTypesPage(driver).find_remove_goods_type_link(), None)
@then(parsers.parse('I cannot see the sections "{sections}"')) # noqa
def sections_did_not_appear_on_task_list(driver, sections): # noqa
sections = sections.split(", ")
for section in sections:
assert TaskListPage(driver).get_section(section) is None
| 44.54661 | 129 | 0.799106 | from pytest_bdd import scenarios, when, then, parsers
import ui_automation_tests.shared.tools.helpers as utils
from ui_automation_tests.pages.generic_application.task_list import TaskListPage
from ui_automation_tests.pages.open_application.country_contract_types import OpenApplicationCountryContractTypes
from ui_automation_tests.pages.open_application.country_contract_types_summary import (
OpenApplicationCountryContractTypesSummaryPage,
)
from ui_automation_tests.pages.exporter_hub_page import ExporterHubPage
from ui_automation_tests.pages.generic_application.ultimate_end_users import GenericApplicationUltimateEndUsers
from ui_automation_tests.shared import functions
from ui_automation_tests.conftest import (
enter_type_of_application,
enter_application_name,
enter_permanent_or_temporary,
choose_open_licence_category,
answer_firearms_question,
)
from ui_automation_tests.pages.apply_for_a_licence_page import ApplyForALicencePage
from ui_automation_tests.pages.open_application.countries import OpenApplicationCountriesPage
from ui_automation_tests.pages.open_application.goods_countries_page import GoodsCountriesPage
from ui_automation_tests.pages.open_application.goods_types import OpenApplicationGoodsTypesPage
from ui_automation_tests.pages.standard_application.goods import StandardApplicationGoodsPage
scenarios(
"../features/submit_open_application.feature", "../features/edit_open_application.feature", strict_gherkin=False
)
@then(parsers.parse('I see my goods type added at position "{position}" with a description and a control code'))
def i_see_the_goods_types_list(driver, position, context):
goods_type_page = OpenApplicationGoodsTypesPage(driver)
good_type = goods_type_page.get_text_of_goods_type_info(int(position))
assert context.good_description in good_type
assert context.control_code in good_type
@then(parsers.parse("I see a list of the preselected media products"))
def i_see_the_goods_types_list_media_oiel(driver, context):
goods_type_page = OpenApplicationGoodsTypesPage(driver)
goods_types = goods_type_page.get_number_of_goods()
assert len(goods_types) == 7
@then(parsers.parse("I see a list of the preselected cryptographic products"))
def i_see_the_goods_types_list_cryptographic_oiel(driver, context):
goods_type_page = OpenApplicationGoodsTypesPage(driver)
goods_types = goods_type_page.get_number_of_goods()
assert len(goods_types) == 4
@then("I should see a list of countries")
def i_should_see_a_list_of_countries(driver):
application_countries_list = OpenApplicationCountriesPage(driver)
page_countries = application_countries_list.get_countries_names()
assert len(page_countries) == 273
assert "United Kingdom" not in page_countries
@then("I should see a list of all countries that have been preselected")
def i_should_see_a_list_of_countries(driver):
application_countries_list = OpenApplicationCountriesPage(driver)
page_countries = application_countries_list.get_static_destinations_list()
assert len(page_countries) == 273
assert "United Kingdom" not in page_countries
@then("I should see a list of the countries permitted for a cryptographic OIEL")
def i_should_see_a_list_of_countries_cryptographic_oiel(driver):
application_countries_list = OpenApplicationCountriesPage(driver)
page_countries = application_countries_list.get_static_destinations_list()
assert len(page_countries) == 213
assert "United Kingdom" not in page_countries
@then("I should see the UK Continental Shelf as the only permitted destination")
def i_should_see_a_list_of_countries_uk_continental_shelf_oiel(driver):
application_countries_list = OpenApplicationCountriesPage(driver)
page_countries = application_countries_list.get_static_destinations_list()
assert len(page_countries) == 1
assert page_countries[0] == "UK Continental Shelf"
@when(parsers.parse('I select "{country}" from the country list'))
def i_select_country_from_the_country_list(driver, country):
application_countries_list = OpenApplicationCountriesPage(driver)
application_countries_list.select_country(country)
assert utils.find_element_by_href(driver, "#" + country).is_displayed()
@when(parsers.parse('I search for country "{country}"'))
def search_for_country(driver, country):
OpenApplicationCountriesPage(driver).search_for_country(country)
@then(parsers.parse('only "{country}" is displayed in country list'))
def search_country_result(driver, country):
assert (
country == OpenApplicationCountriesPage(driver).get_text_of_countries_list()
), "Country not searched correctly"
@when("I click select all countries")
def select_all_countries(driver):
page = OpenApplicationCountriesPage(driver)
page.click_select_all()
@then("all checkboxes are selected")
def all_selected(driver):
page = OpenApplicationCountriesPage(driver)
assert page.get_number_of_checkboxes(checked=False) == page.get_number_of_checkboxes(checked=True)
@when("I select that I want to add the same sectors and contract types to all countries")
def select_yes_to_all_countries_with_the_same_contract_types(driver):
OpenApplicationCountryContractTypes(driver).select_same_contract_types_for_all_countries_radio_button()
@when("I select contract types for all countries")
def select_contract_types_for_all_countries(driver, context):
page = OpenApplicationCountryContractTypes(driver)
context.contract_types = [
{"id": "Navy", "value": "Navy"},
{
"id": "Aircraft-manufacturers,-maintainers-or-operators",
"value": "Aircraft manufacturers, maintainers or operators",
},
{"id": "Pharmaceutical-or-medical", "value": "Pharmaceutical or medical"},
]
page.select_contract_type(context.contract_types[0]["id"])
page.select_contract_type(context.contract_types[1]["id"])
page.select_contract_type(context.contract_types[2]["id"])
page.select_other_contract_type_and_fill_in_details()
functions.click_submit(driver)
@then("I should see all countries and the chosen contract types on the destination summary list")
def i_should_see_destinations_summary_countries_contract_types(driver, context):
page = OpenApplicationCountryContractTypesSummaryPage(driver)
countries_and_contract_types = page.get_countries_with_respective_contract_types()
assert len(countries_and_contract_types) == 273
assert "United Kingdom" not in countries_and_contract_types
for country_with_contract_types in countries_and_contract_types:
for contract_type in context.contract_types:
assert contract_type["value"] in country_with_contract_types[1]
@then(
"I should see the UK Continental Shelf as the only destination and the chosen contract types on the destination summary list"
)
def i_should_see_destinations_summary_uk_continental_shelf_contract_types(driver, context):
page = OpenApplicationCountryContractTypesSummaryPage(driver)
countries_and_contract_types = page.get_countries_with_respective_contract_types()
assert len(countries_and_contract_types) == 1
assert countries_and_contract_types[0][0] == "UK Continental Shelf"
for country_with_contract_types in countries_and_contract_types:
for contract_type in context.contract_types:
assert contract_type["value"] in country_with_contract_types[1]
@when(parsers.parse('I "{assign_or_unassign}" all countries to all goods with link'))
def assign_all_with_link(driver, assign_or_unassign):
countries_page = GoodsCountriesPage(driver)
if assign_or_unassign == "assign":
countries_page.select_all_link()
countries_page.click_save()
else:
countries_page.deselect_all_link()
@when("I click Add goods type button")
def click_goods_type_button(driver):
OpenApplicationGoodsTypesPage(driver).click_add_good_button()
@then(parsers.parse('I see all countries are "{assigned_or_unassigned}" to all goods'))
def see_all_or_no_selected(driver, assigned_or_unassigned):
countries_page = GoodsCountriesPage(driver)
if assigned_or_unassigned == "assigned":
assert countries_page.all_selected()
else:
assert countries_page.all_deselected()
@when(parsers.parse('I create an open application of a "{export_type}" export type'))
def create_open_app(driver, export_type, context):
ExporterHubPage(driver).click_apply_for_a_licence()
ApplyForALicencePage(driver).select_licence_type("export_licence")
functions.click_submit(driver)
enter_type_of_application(driver, "oiel", context)
choose_open_licence_category(driver, "military", context)
enter_permanent_or_temporary(driver, export_type, context)
enter_application_name(driver, context)
answer_firearms_question(driver)
@when(parsers.parse('I create an open application for an export licence of the "{licence_type}" licence type'))
def create_open_app_of_specific_type(driver, licence_type, context):
ExporterHubPage(driver).click_apply_for_a_licence()
ApplyForALicencePage(driver).select_licence_type("export_licence")
functions.click_submit(driver)
enter_type_of_application(driver, "oiel", context)
choose_open_licence_category(driver, licence_type, context)
if licence_type in ["military", "uk_continental_shelf"]:
enter_permanent_or_temporary(driver, "permanent", context)
enter_application_name(driver, context)
if licence_type in ["military", "uk_continental_shelf"]:
answer_firearms_question(driver)
@when("I click on the add button")
def i_click_on_the_add_button(driver):
GenericApplicationUltimateEndUsers(driver).click_add_ultimate_recipient_button()
@when("I remove a good type from the application")
def i_remove_a_good_from_the_application(driver):
remove_good_link = StandardApplicationGoodsPage(driver).find_remove_goods_type_link()
driver.execute_script("arguments[0].click();", remove_good_link)
@then("no goods types are left on the application")
def no_goods_types_are_left_on_the_application(driver):
assert (OpenApplicationGoodsTypesPage(driver).find_remove_goods_type_link(), None)
@then(parsers.parse('I cannot see the sections "{sections}"'))
def sections_did_not_appear_on_task_list(driver, sections):
sections = sections.split(", ")
for section in sections:
assert TaskListPage(driver).get_section(section) is None
| true | true |
f720d28d694930288ecc3e99c146b144020f7a87 | 13,442 | py | Python | lib/redis_cache/rediscache.py | eapearson/kb_Metrics | f1c3c8457577060c9c695d6f4cbb7ec8f7fae17f | [
"MIT"
] | null | null | null | lib/redis_cache/rediscache.py | eapearson/kb_Metrics | f1c3c8457577060c9c695d6f4cbb7ec8f7fae17f | [
"MIT"
] | null | null | null | lib/redis_cache/rediscache.py | eapearson/kb_Metrics | f1c3c8457577060c9c695d6f4cbb7ec8f7fae17f | [
"MIT"
] | null | null | null | """
A simple redis-cache interface for storing python objects.
"""
from functools import wraps
import pickle
import json
import hashlib
import redis
import logging
from redis._compat import basestring, unicode
DEFAULT_EXPIRY = 60 * 60 * 24
class RedisConnect(object):
"""
A simple object to store and pass database connection information.
This makes the Simple Cache class a little more flexible, for cases
where redis connection configuration needs customizing.
"""
def __init__(self, host=None, port=None, db=None, password=None):
self.host = host if host else 'localhost'
self.port = port if port else 6379
self.db = db if db else 0
self.password = password
def connect(self):
"""
We cannot assume that connection will succeed, as such we use a ping()
method in the redis client library to validate ability to contact redis.
RedisNoConnException is raised if we fail to ping.
:return: redis.StrictRedis Connection Object
"""
try:
redis.StrictRedis(host=self.host, port=self.port, password=self.password).ping()
except redis.ConnectionError as e:
raise RedisNoConnException("Failed to create connection to redis",
(self.host,
self.port)
)
return redis.StrictRedis(host=self.host,
port=self.port,
db=self.db,
password=self.password)
class CacheMissException(Exception):
pass
class ExpiredKeyException(Exception):
pass
class RedisNoConnException(Exception):
pass
class DoNotCache(Exception):
_result = None
def __init__(self, result):
super(DoNotCache, self).__init__()
self._result = result
@property
def result(self):
return self._result
class SimpleCache(object):
def __init__(self,
limit=10000,
expire=DEFAULT_EXPIRY,
hashkeys=False,
host=None,
port=None,
db=None,
password=None,
namespace="SimpleCache"):
self.limit = limit # No of json encoded strings to cache
self.expire = expire # Time to keys to expire in seconds
self.prefix = namespace
self.host = host
self.port = port
self.db = db
try:
self.connection = RedisConnect(host=self.host,
port=self.port,
db=self.db,
password=password).connect()
except RedisNoConnException as e:
self.connection = None
pass
# Should we hash keys? There is a very small risk of collision invloved.
self.hashkeys = hashkeys
def make_key(self, key):
return "SimpleCache-{0}:{1}".format(self.prefix, key)
def namespace_key(self, namespace):
return self.make_key(namespace + ':*')
def get_set_name(self):
return "SimpleCache-{0}-keys".format(self.prefix)
def store(self, key, value, expire=None):
"""
Method stores a value after checking for space constraints and
freeing up space if required.
:param key: key by which to reference datum being stored in Redis
:param value: actual value being stored under this key
:param expire: time-to-live (ttl) for this datum
"""
key = to_unicode(key)
value = to_unicode(value)
set_name = self.get_set_name()
while self.connection.scard(set_name) >= self.limit:
del_key = self.connection.spop(set_name)
self.connection.delete(self.make_key(del_key))
pipe = self.connection.pipeline()
if expire is None:
expire = self.expire
if (isinstance(expire, int) and expire <= 0) or (expire is None):
pipe.set(self.make_key(key), value)
else:
pipe.setex(self.make_key(key), expire, value)
pipe.sadd(set_name, key)
pipe.execute()
def expire_all_in_set(self):
"""
Method expires all keys in the namespace of this object.
At times there is a need to invalidate cache in bulk, because a
single change may result in all data returned by a decorated function
to be altered.
Method returns a tuple where first value is total number of keys in
the set of this object's namespace and second value is a number of
keys successfully expired.
:return: int, int
"""
all_members = self.keys()
keys = [self.make_key(k) for k in all_members]
with self.connection.pipeline() as pipe:
pipe.delete(*keys)
pipe.execute()
return len(self), len(all_members)
def expire_namespace(self, namespace):
"""
Method expires all keys in the namespace of this object.
At times there is a need to invalidate cache in bulk, because a
single change may result in all data returned by a decorated function
to be altered.
Method returns a tuple where first value is total number of keys in
the set of this object's namespace and second value is a number of
keys successfully expired.
:return: int, int
"""
namespace = self.namespace_key(namespace)
all_members = list(self.connection.keys(namespace))
with self.connection.pipeline() as pipe:
pipe.delete(*all_members)
pipe.execute()
return len(self), len(all_members)
def isexpired(self, key):
"""
Method determines whether a given key is already expired. If not expired,
we expect to get back current ttl for the given key.
:param key: key being looked-up in Redis
:return: bool (True) if expired, or int representing current time-to-live (ttl) value
"""
ttl = self.connection.pttl("SimpleCache-{0}".format(key))
if ttl == -2: # not exist
ttl = self.connection.pttl(self.make_key(key))
elif ttl == -1:
return True
if not ttl is None:
return ttl
else:
return self.connection.pttl("{0}:{1}".format(self.prefix, key))
def store_json(self, key, value, expire=None):
self.store(key, json.dumps(value), expire)
def store_pickle(self, key, value, expire=None):
self.store(key, pickle.dumps(value), expire)
def get(self, key):
key = to_unicode(key)
if key: # No need to validate membership, which is an O(1) operation, but seems we can do without.
value = self.connection.get(self.make_key(key))
if value is None: # expired key
if not key in self: # If key does not exist at all, it is a straight miss.
raise CacheMissException
self.connection.srem(self.get_set_name(), key)
raise ExpiredKeyException
else:
return value
def mget(self, keys):
"""
Method returns a dict of key/values for found keys.
:param keys: array of keys to look up in Redis
:return: dict of found key/values
"""
if keys:
cache_keys = [self.make_key(to_unicode(key)) for key in keys]
values = self.connection.mget(cache_keys)
if None in values:
pipe = self.connection.pipeline()
for cache_key, value in zip(cache_keys, values):
if value is None: # non-existant or expired key
pipe.srem(self.get_set_name(), cache_key)
pipe.execute()
return {k: v for (k, v) in zip(keys, values) if v is not None}
def get_json(self, key):
return json.loads(self.get(key))
def get_pickle(self, key):
return pickle.loads(self.get(key))
def mget_json(self, keys):
"""
Method returns a dict of key/values for found keys with each value
parsed from JSON format.
:param keys: array of keys to look up in Redis
:return: dict of found key/values with values parsed from JSON format
"""
d = self.mget(keys)
if d:
for key in d.keys():
d[key] = json.loads(d[key]) if d[key] else None
return d
def invalidate(self, key):
"""
Method removes (invalidates) an item from the cache.
:param key: key to remove from Redis
"""
key = to_unicode(key)
pipe = self.connection.pipeline()
pipe.srem(self.get_set_name(), key)
pipe.delete(self.make_key(key))
pipe.execute()
def __contains__(self, key):
return self.connection.sismember(self.get_set_name(), key)
def __iter__(self):
if not self.connection:
return iter([])
return iter(
["{0}:{1}".format(self.prefix, x)
for x in self.connection.smembers(self.get_set_name())
])
def __len__(self):
return self.connection.scard(self.get_set_name())
def keys(self):
return self.connection.smembers(self.get_set_name())
def flush(self):
keys = list(self.keys())
keys.append(self.get_set_name())
with self.connection.pipeline() as pipe:
pipe.delete(*keys)
pipe.execute()
def flush_namespace(self, space):
namespace = self.namespace_key(space)
setname = self.get_set_name()
keys = list(self.connection.keys(namespace))
with self.connection.pipeline() as pipe:
pipe.delete(*keys)
pipe.srem(setname, *space)
pipe.execute()
def get_hash(self, args):
if self.hashkeys:
key = hashlib.md5(args).hexdigest()
else:
key = pickle.dumps(args)
return key
def cache_it(limit=10000, expire=DEFAULT_EXPIRY, cache=None,
use_json=False, namespace=None):
"""
Arguments and function result must be pickleable.
:param limit: maximum number of keys to maintain in the set
:param expire: period after which an entry in cache is considered expired
:param cache: SimpleCache object, if created separately
:return: decorated function
"""
cache_ = cache ## Since python 2.x doesn't have the nonlocal keyword, we need to do this
expire_ = expire ## Same here.
def decorator(function):
cache, expire = cache_, expire_
if cache is None:
cache = SimpleCache(limit, expire, hashkeys=True, namespace=function.__module__)
elif expire == DEFAULT_EXPIRY:
# If the expire arg value is the default, set it to None so we store
# the expire value of the passed cache object
expire = None
@wraps(function)
def func(*args, **kwargs):
## Handle cases where caching is down or otherwise not available.
if cache.connection is None:
result = function(*args, **kwargs)
return result
serializer = json if use_json else pickle
fetcher = cache.get_json if use_json else cache.get_pickle
storer = cache.store_json if use_json else cache.store_pickle
## Key will be either a md5 hash or just pickle object,
## in the form of `function name`:`key`
key = cache.get_hash(serializer.dumps([args, kwargs]))
cache_key = '{func_name}:{key}'.format(func_name=function.__name__,
key=key)
if namespace:
cache_key = '{namespace}:{key}'.format(namespace=namespace,
key=cache_key)
try:
return fetcher(cache_key)
except (ExpiredKeyException, CacheMissException) as e:
## Add some sort of cache miss handing here.
pass
except:
logging.exception("Unknown redis-simple-cache error. Please check your Redis free space.")
try:
result = function(*args, **kwargs)
except DoNotCache as e:
result = e.result
else:
try:
storer(cache_key, result, expire)
except redis.ConnectionError as e:
logging.exception(e)
return result
return func
return decorator
def cache_it_json(limit=10000, expire=DEFAULT_EXPIRY, cache=None, namespace=None):
"""
Arguments and function result must be able to convert to JSON.
:param limit: maximum number of keys to maintain in the set
:param expire: period after which an entry in cache is considered expired
:param cache: SimpleCache object, if created separately
:return: decorated function
"""
return cache_it(limit=limit, expire=expire, use_json=True,
cache=cache, namespace=None)
def to_unicode(obj, encoding='utf-8'):
if isinstance(obj, basestring):
if not isinstance(obj, unicode):
obj = unicode(obj, encoding)
return obj
| 34.64433 | 107 | 0.588157 | from functools import wraps
import pickle
import json
import hashlib
import redis
import logging
from redis._compat import basestring, unicode
DEFAULT_EXPIRY = 60 * 60 * 24
class RedisConnect(object):
def __init__(self, host=None, port=None, db=None, password=None):
self.host = host if host else 'localhost'
self.port = port if port else 6379
self.db = db if db else 0
self.password = password
def connect(self):
try:
redis.StrictRedis(host=self.host, port=self.port, password=self.password).ping()
except redis.ConnectionError as e:
raise RedisNoConnException("Failed to create connection to redis",
(self.host,
self.port)
)
return redis.StrictRedis(host=self.host,
port=self.port,
db=self.db,
password=self.password)
class CacheMissException(Exception):
pass
class ExpiredKeyException(Exception):
pass
class RedisNoConnException(Exception):
pass
class DoNotCache(Exception):
_result = None
def __init__(self, result):
super(DoNotCache, self).__init__()
self._result = result
@property
def result(self):
return self._result
class SimpleCache(object):
def __init__(self,
limit=10000,
expire=DEFAULT_EXPIRY,
hashkeys=False,
host=None,
port=None,
db=None,
password=None,
namespace="SimpleCache"):
self.limit = limit
self.expire = expire
self.prefix = namespace
self.host = host
self.port = port
self.db = db
try:
self.connection = RedisConnect(host=self.host,
port=self.port,
db=self.db,
password=password).connect()
except RedisNoConnException as e:
self.connection = None
pass
self.hashkeys = hashkeys
def make_key(self, key):
return "SimpleCache-{0}:{1}".format(self.prefix, key)
def namespace_key(self, namespace):
return self.make_key(namespace + ':*')
def get_set_name(self):
return "SimpleCache-{0}-keys".format(self.prefix)
def store(self, key, value, expire=None):
key = to_unicode(key)
value = to_unicode(value)
set_name = self.get_set_name()
while self.connection.scard(set_name) >= self.limit:
del_key = self.connection.spop(set_name)
self.connection.delete(self.make_key(del_key))
pipe = self.connection.pipeline()
if expire is None:
expire = self.expire
if (isinstance(expire, int) and expire <= 0) or (expire is None):
pipe.set(self.make_key(key), value)
else:
pipe.setex(self.make_key(key), expire, value)
pipe.sadd(set_name, key)
pipe.execute()
def expire_all_in_set(self):
all_members = self.keys()
keys = [self.make_key(k) for k in all_members]
with self.connection.pipeline() as pipe:
pipe.delete(*keys)
pipe.execute()
return len(self), len(all_members)
def expire_namespace(self, namespace):
namespace = self.namespace_key(namespace)
all_members = list(self.connection.keys(namespace))
with self.connection.pipeline() as pipe:
pipe.delete(*all_members)
pipe.execute()
return len(self), len(all_members)
def isexpired(self, key):
ttl = self.connection.pttl("SimpleCache-{0}".format(key))
if ttl == -2:
ttl = self.connection.pttl(self.make_key(key))
elif ttl == -1:
return True
if not ttl is None:
return ttl
else:
return self.connection.pttl("{0}:{1}".format(self.prefix, key))
def store_json(self, key, value, expire=None):
self.store(key, json.dumps(value), expire)
def store_pickle(self, key, value, expire=None):
self.store(key, pickle.dumps(value), expire)
def get(self, key):
key = to_unicode(key)
if key:
value = self.connection.get(self.make_key(key))
if value is None:
if not key in self:
raise CacheMissException
self.connection.srem(self.get_set_name(), key)
raise ExpiredKeyException
else:
return value
def mget(self, keys):
if keys:
cache_keys = [self.make_key(to_unicode(key)) for key in keys]
values = self.connection.mget(cache_keys)
if None in values:
pipe = self.connection.pipeline()
for cache_key, value in zip(cache_keys, values):
if value is None:
pipe.srem(self.get_set_name(), cache_key)
pipe.execute()
return {k: v for (k, v) in zip(keys, values) if v is not None}
def get_json(self, key):
return json.loads(self.get(key))
def get_pickle(self, key):
return pickle.loads(self.get(key))
def mget_json(self, keys):
d = self.mget(keys)
if d:
for key in d.keys():
d[key] = json.loads(d[key]) if d[key] else None
return d
def invalidate(self, key):
key = to_unicode(key)
pipe = self.connection.pipeline()
pipe.srem(self.get_set_name(), key)
pipe.delete(self.make_key(key))
pipe.execute()
def __contains__(self, key):
return self.connection.sismember(self.get_set_name(), key)
def __iter__(self):
if not self.connection:
return iter([])
return iter(
["{0}:{1}".format(self.prefix, x)
for x in self.connection.smembers(self.get_set_name())
])
def __len__(self):
return self.connection.scard(self.get_set_name())
def keys(self):
return self.connection.smembers(self.get_set_name())
def flush(self):
keys = list(self.keys())
keys.append(self.get_set_name())
with self.connection.pipeline() as pipe:
pipe.delete(*keys)
pipe.execute()
def flush_namespace(self, space):
namespace = self.namespace_key(space)
setname = self.get_set_name()
keys = list(self.connection.keys(namespace))
with self.connection.pipeline() as pipe:
pipe.delete(*keys)
pipe.srem(setname, *space)
pipe.execute()
def get_hash(self, args):
if self.hashkeys:
key = hashlib.md5(args).hexdigest()
else:
key = pickle.dumps(args)
return key
def cache_it(limit=10000, expire=DEFAULT_EXPIRY, cache=None,
use_json=False, namespace=None):
cache_ = cache cache, expire = cache_, expire_
if cache is None:
cache = SimpleCache(limit, expire, hashkeys=True, namespace=function.__module__)
elif expire == DEFAULT_EXPIRY:
# If the expire arg value is the default, set it to None so we store
# the expire value of the passed cache object
expire = None
@wraps(function)
def func(*args, **kwargs):
## Handle cases where caching is down or otherwise not available.
if cache.connection is None:
result = function(*args, **kwargs)
return result
serializer = json if use_json else pickle
fetcher = cache.get_json if use_json else cache.get_pickle
storer = cache.store_json if use_json else cache.store_pickle
## Key will be either a md5 hash or just pickle object,
## in the form of `function name`:`key`
key = cache.get_hash(serializer.dumps([args, kwargs]))
cache_key = '{func_name}:{key}'.format(func_name=function.__name__,
key=key)
if namespace:
cache_key = '{namespace}:{key}'.format(namespace=namespace,
key=cache_key)
try:
return fetcher(cache_key)
except (ExpiredKeyException, CacheMissException) as e:
## Add some sort of cache miss handing here.
pass
except:
logging.exception("Unknown redis-simple-cache error. Please check your Redis free space.")
try:
result = function(*args, **kwargs)
except DoNotCache as e:
result = e.result
else:
try:
storer(cache_key, result, expire)
except redis.ConnectionError as e:
logging.exception(e)
return result
return func
return decorator
def cache_it_json(limit=10000, expire=DEFAULT_EXPIRY, cache=None, namespace=None):
return cache_it(limit=limit, expire=expire, use_json=True,
cache=cache, namespace=None)
def to_unicode(obj, encoding='utf-8'):
if isinstance(obj, basestring):
if not isinstance(obj, unicode):
obj = unicode(obj, encoding)
return obj
| true | true |
f720d329eaad65945f4c82bf41d8502618bb8cd8 | 892 | py | Python | setup.py | msaroufim/spektral | 6881e6650602b2f98b09516f490c185678075bc8 | [
"MIT"
] | 1 | 2020-07-28T09:11:57.000Z | 2020-07-28T09:11:57.000Z | setup.py | msaroufim/spektral | 6881e6650602b2f98b09516f490c185678075bc8 | [
"MIT"
] | null | null | null | setup.py | msaroufim/spektral | 6881e6650602b2f98b09516f490c185678075bc8 | [
"MIT"
] | null | null | null | from setuptools import setup, find_packages
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name='spektral',
version='0.6.0',
packages=find_packages(),
install_requires=['tensorflow>=2.1.0',
'networkx',
'pandas',
'lxml',
'joblib',
'numpy',
'scipy',
'requests',
'scikit-learn'],
url='https://github.com/danielegrattarola/spektral',
license='MIT',
author='Daniele Grattarola',
author_email='daniele.grattarola@gmail.com',
description='Graph Neural Networks with Keras and Tensorflow 2.',
long_description=long_description,
long_description_content_type="text/markdown",
classifiers=[
"Programming Language :: Python :: 3.5"
],
)
| 29.733333 | 69 | 0.545964 | from setuptools import setup, find_packages
with open("README.md", "r") as fh:
long_description = fh.read()
setup(
name='spektral',
version='0.6.0',
packages=find_packages(),
install_requires=['tensorflow>=2.1.0',
'networkx',
'pandas',
'lxml',
'joblib',
'numpy',
'scipy',
'requests',
'scikit-learn'],
url='https://github.com/danielegrattarola/spektral',
license='MIT',
author='Daniele Grattarola',
author_email='daniele.grattarola@gmail.com',
description='Graph Neural Networks with Keras and Tensorflow 2.',
long_description=long_description,
long_description_content_type="text/markdown",
classifiers=[
"Programming Language :: Python :: 3.5"
],
)
| true | true |
f720d5217ca55aacc0922b9a609c312d27b6d596 | 3,175 | py | Python | tests/unit/test_subscribers.py | cclauss/s3transfer | 258c3c69416338f8df307621ec5cefa85c453150 | [
"Apache-2.0"
] | 1 | 2021-05-08T10:43:40.000Z | 2021-05-08T10:43:40.000Z | tests/unit/test_subscribers.py | Saiprasad16/s3transfer | 59e968d05288092948284001710c416677102266 | [
"Apache-2.0"
] | 1 | 2021-04-08T21:25:06.000Z | 2021-04-13T16:36:43.000Z | tests/unit/test_subscribers.py | Saiprasad16/s3transfer | 59e968d05288092948284001710c416677102266 | [
"Apache-2.0"
] | 1 | 2020-12-28T19:16:31.000Z | 2020-12-28T19:16:31.000Z | # Copyright 2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License'). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the 'license' file accompanying this file. This file is
# distributed on an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests import unittest
from s3transfer.exceptions import InvalidSubscriberMethodError
from s3transfer.subscribers import BaseSubscriber
class ExtraMethodsSubscriber(BaseSubscriber):
def extra_method(self):
return 'called extra method'
class NotCallableSubscriber(BaseSubscriber):
on_done = 'foo'
class NoKwargsSubscriber(BaseSubscriber):
def on_done(self):
pass
class OverrideMethodSubscriber(BaseSubscriber):
def on_queued(self, **kwargs):
return kwargs
class OverrideConstructorSubscriber(BaseSubscriber):
def __init__(self, arg1, arg2):
self.arg1 = arg1
self.arg2 = arg2
class TestSubscribers(unittest.TestCase):
def test_can_instantiate_base_subscriber(self):
try:
BaseSubscriber()
except InvalidSubscriberMethodError:
self.fail('BaseSubscriber should be instantiable')
def test_can_call_base_subscriber_method(self):
subscriber = BaseSubscriber()
try:
subscriber.on_done(future=None)
except Exception as e:
self.fail(
'Should be able to call base class subscriber method. '
'instead got: %s' % e)
def test_subclass_can_have_and_call_additional_methods(self):
subscriber = ExtraMethodsSubscriber()
self.assertEqual(subscriber.extra_method(), 'called extra method')
def test_can_subclass_and_override_method_from_base_subscriber(self):
subscriber = OverrideMethodSubscriber()
# Make sure that the overriden method is called
self.assertEqual(subscriber.on_queued(foo='bar'), {'foo': 'bar'})
def test_can_subclass_and_override_constructor_from_base_class(self):
subscriber = OverrideConstructorSubscriber('foo', arg2='bar')
# Make sure you can create a custom constructor.
self.assertEqual(subscriber.arg1, 'foo')
self.assertEqual(subscriber.arg2, 'bar')
def test_invalid_arguments_in_constructor_of_subclass_subscriber(self):
# The override constructor should still have validation of
# constructor args.
with self.assertRaises(TypeError):
OverrideConstructorSubscriber()
def test_not_callable_in_subclass_subscriber_method(self):
with self.assertRaisesRegexp(
InvalidSubscriberMethodError, 'must be callable'):
NotCallableSubscriber()
def test_no_kwargs_in_subclass_subscriber_method(self):
with self.assertRaisesRegexp(
InvalidSubscriberMethodError, 'must accept keyword'):
NoKwargsSubscriber()
| 35.674157 | 75 | 0.716535 |
from tests import unittest
from s3transfer.exceptions import InvalidSubscriberMethodError
from s3transfer.subscribers import BaseSubscriber
class ExtraMethodsSubscriber(BaseSubscriber):
def extra_method(self):
return 'called extra method'
class NotCallableSubscriber(BaseSubscriber):
on_done = 'foo'
class NoKwargsSubscriber(BaseSubscriber):
def on_done(self):
pass
class OverrideMethodSubscriber(BaseSubscriber):
def on_queued(self, **kwargs):
return kwargs
class OverrideConstructorSubscriber(BaseSubscriber):
def __init__(self, arg1, arg2):
self.arg1 = arg1
self.arg2 = arg2
class TestSubscribers(unittest.TestCase):
def test_can_instantiate_base_subscriber(self):
try:
BaseSubscriber()
except InvalidSubscriberMethodError:
self.fail('BaseSubscriber should be instantiable')
def test_can_call_base_subscriber_method(self):
subscriber = BaseSubscriber()
try:
subscriber.on_done(future=None)
except Exception as e:
self.fail(
'Should be able to call base class subscriber method. '
'instead got: %s' % e)
def test_subclass_can_have_and_call_additional_methods(self):
subscriber = ExtraMethodsSubscriber()
self.assertEqual(subscriber.extra_method(), 'called extra method')
def test_can_subclass_and_override_method_from_base_subscriber(self):
subscriber = OverrideMethodSubscriber()
self.assertEqual(subscriber.on_queued(foo='bar'), {'foo': 'bar'})
def test_can_subclass_and_override_constructor_from_base_class(self):
subscriber = OverrideConstructorSubscriber('foo', arg2='bar')
self.assertEqual(subscriber.arg1, 'foo')
self.assertEqual(subscriber.arg2, 'bar')
def test_invalid_arguments_in_constructor_of_subclass_subscriber(self):
with self.assertRaises(TypeError):
OverrideConstructorSubscriber()
def test_not_callable_in_subclass_subscriber_method(self):
with self.assertRaisesRegexp(
InvalidSubscriberMethodError, 'must be callable'):
NotCallableSubscriber()
def test_no_kwargs_in_subclass_subscriber_method(self):
with self.assertRaisesRegexp(
InvalidSubscriberMethodError, 'must accept keyword'):
NoKwargsSubscriber()
| true | true |
f720d5fe861a06e326fd1453b262a21ad8d73c63 | 233 | py | Python | encapsulation_exercise/restaurant/project/beverage/cold_beverage.py | Veselin-Stoilov/software-university-OOP | 452a77cabf2e7d93f30f629c67c6b22682eb255d | [
"MIT"
] | null | null | null | encapsulation_exercise/restaurant/project/beverage/cold_beverage.py | Veselin-Stoilov/software-university-OOP | 452a77cabf2e7d93f30f629c67c6b22682eb255d | [
"MIT"
] | null | null | null | encapsulation_exercise/restaurant/project/beverage/cold_beverage.py | Veselin-Stoilov/software-university-OOP | 452a77cabf2e7d93f30f629c67c6b22682eb255d | [
"MIT"
] | null | null | null | from encapsulation_exercise.restaurant.project.beverage.beverage import Beverage
class ColdBeverage(Beverage):
def __init__(self, name: str, price: float, milliliters: float):
super().__init__(name, price, milliliters)
| 33.285714 | 80 | 0.76824 | from encapsulation_exercise.restaurant.project.beverage.beverage import Beverage
class ColdBeverage(Beverage):
def __init__(self, name: str, price: float, milliliters: float):
super().__init__(name, price, milliliters)
| true | true |
f720d64ceba2868cd71f12c692ec517b850f2ae3 | 5,655 | py | Python | qiskit/providers/basicaer/statevector_simulator.py | biplab37/qiskit-aakash | e10b204887606f1f75bdfde182bb0c6d0a322c68 | [
"Apache-2.0"
] | 22 | 2019-08-15T04:39:15.000Z | 2022-03-06T05:17:04.000Z | qiskit/providers/basicaer/statevector_simulator.py | biplab37/qiskit-aakash | e10b204887606f1f75bdfde182bb0c6d0a322c68 | [
"Apache-2.0"
] | 2 | 2020-10-26T07:12:12.000Z | 2021-12-09T16:22:51.000Z | qiskit/providers/basicaer/statevector_simulator.py | biplab37/qiskit-aakash | e10b204887606f1f75bdfde182bb0c6d0a322c68 | [
"Apache-2.0"
] | 9 | 2019-09-05T05:33:00.000Z | 2021-10-09T16:04:53.000Z | # -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Contains a (slow) python statevector simulator.
It simulates the statevector through a quantum circuit. It is exponential in
the number of qubits.
We advise using the c++ simulator or online simulator for larger size systems.
The input is a qobj dictionary and the output is a Result object.
The input qobj to this simulator has no shots, no measures, no reset, no noise.
"""
import logging
from math import log2
from qiskit.util import local_hardware_info
from qiskit.providers.basicaer.exceptions import BasicAerError
from qiskit.providers.models import QasmBackendConfiguration
from .qasm_simulator import QasmSimulatorPy
logger = logging.getLogger(__name__)
class StatevectorSimulatorPy(QasmSimulatorPy):
"""Python statevector simulator."""
MAX_QUBITS_MEMORY = int(log2(local_hardware_info()['memory'] * (1024 ** 3) / 16))
DEFAULT_CONFIGURATION = {
'backend_name': 'statevector_simulator',
'backend_version': '1.0.0',
'n_qubits': min(24, MAX_QUBITS_MEMORY),
'url': 'https://github.com/Qiskit/qiskit-terra',
'simulator': True,
'local': True,
'conditional': True,
'open_pulse': False,
'memory': True,
'max_shots': 65536,
'coupling_map': None,
'description': 'A Python statevector simulator for qobj files',
'basis_gates': ['u1', 'u2', 'u3', 'cx', 'id', 'snapshot'],
'gates': [
{
'name': 'u1',
'parameters': ['lambda'],
'qasm_def': 'gate u1(lambda) q { U(0,0,lambda) q; }'
},
{
'name': 'u2',
'parameters': ['phi', 'lambda'],
'qasm_def': 'gate u2(phi,lambda) q { U(pi/2,phi,lambda) q; }'
},
{
'name': 'u3',
'parameters': ['theta', 'phi', 'lambda'],
'qasm_def': 'gate u3(theta,phi,lambda) q { U(theta,phi,lambda) q; }'
},
{
'name': 'cx',
'parameters': ['c', 't'],
'qasm_def': 'gate cx c,t { CX c,t; }'
},
{
'name': 'id',
'parameters': ['a'],
'qasm_def': 'gate id a { U(0,0,0) a; }'
},
{
'name': 'snapshot',
'parameters': ['slot'],
'qasm_def': 'gate snapshot(slot) q { TODO }'
}
]
}
# Override base class value to return the final state vector
SHOW_FINAL_STATE = True
def __init__(self, configuration=None, provider=None):
super().__init__(configuration=(
configuration or QasmBackendConfiguration.from_dict(self.DEFAULT_CONFIGURATION)),
provider=provider)
def run(self, qobj, backend_options=None):
"""Run qobj asynchronously.
Args:
qobj (Qobj): payload of the experiment
backend_options (dict): backend options
Returns:
BasicAerJob: derived from BaseJob
Additional Information::
backend_options: Is a dict of options for the backend. It may contain
* "initial_statevector": vector_like
* "chop_threshold": double
The "initial_statevector" option specifies a custom initial
initial statevector for the simulator to be used instead of the all
zero state. This size of this vector must be correct for the number
of qubits in all experiments in the qobj.
The "chop_threshold" option specifies a truncation value for
setting small values to zero in the output statevector. The default
value is 1e-15.
Example::
backend_options = {
"initial_statevector": np.array([1, 0, 0, 1j]) / np.sqrt(2),
"chop_threshold": 1e-15
}
"""
return super().run(qobj, backend_options=backend_options)
def _validate(self, qobj):
"""Semantic validations of the qobj which cannot be done via schemas.
Some of these may later move to backend schemas.
1. No shots
2. No measurements in the middle
"""
n_qubits = qobj.config.n_qubits
max_qubits = self.configuration().n_qubits
if n_qubits > max_qubits:
raise BasicAerError('Number of qubits {} '.format(n_qubits) +
'is greater than maximum ({}) '.format(max_qubits) +
'for "{}".'.format(self.name()))
if qobj.config.shots != 1:
logger.info('"%s" only supports 1 shot. Setting shots=1.',
self.name())
qobj.config.shots = 1
for experiment in qobj.experiments:
name = experiment.header.name
if getattr(experiment.config, 'shots', 1) != 1:
logger.info('"%s" only supports 1 shot. '
'Setting shots=1 for circuit "%s".',
self.name(), name)
experiment.config.shots = 1
| 36.019108 | 93 | 0.567286 |
import logging
from math import log2
from qiskit.util import local_hardware_info
from qiskit.providers.basicaer.exceptions import BasicAerError
from qiskit.providers.models import QasmBackendConfiguration
from .qasm_simulator import QasmSimulatorPy
logger = logging.getLogger(__name__)
class StatevectorSimulatorPy(QasmSimulatorPy):
MAX_QUBITS_MEMORY = int(log2(local_hardware_info()['memory'] * (1024 ** 3) / 16))
DEFAULT_CONFIGURATION = {
'backend_name': 'statevector_simulator',
'backend_version': '1.0.0',
'n_qubits': min(24, MAX_QUBITS_MEMORY),
'url': 'https://github.com/Qiskit/qiskit-terra',
'simulator': True,
'local': True,
'conditional': True,
'open_pulse': False,
'memory': True,
'max_shots': 65536,
'coupling_map': None,
'description': 'A Python statevector simulator for qobj files',
'basis_gates': ['u1', 'u2', 'u3', 'cx', 'id', 'snapshot'],
'gates': [
{
'name': 'u1',
'parameters': ['lambda'],
'qasm_def': 'gate u1(lambda) q { U(0,0,lambda) q; }'
},
{
'name': 'u2',
'parameters': ['phi', 'lambda'],
'qasm_def': 'gate u2(phi,lambda) q { U(pi/2,phi,lambda) q; }'
},
{
'name': 'u3',
'parameters': ['theta', 'phi', 'lambda'],
'qasm_def': 'gate u3(theta,phi,lambda) q { U(theta,phi,lambda) q; }'
},
{
'name': 'cx',
'parameters': ['c', 't'],
'qasm_def': 'gate cx c,t { CX c,t; }'
},
{
'name': 'id',
'parameters': ['a'],
'qasm_def': 'gate id a { U(0,0,0) a; }'
},
{
'name': 'snapshot',
'parameters': ['slot'],
'qasm_def': 'gate snapshot(slot) q { TODO }'
}
]
}
SHOW_FINAL_STATE = True
def __init__(self, configuration=None, provider=None):
super().__init__(configuration=(
configuration or QasmBackendConfiguration.from_dict(self.DEFAULT_CONFIGURATION)),
provider=provider)
def run(self, qobj, backend_options=None):
return super().run(qobj, backend_options=backend_options)
def _validate(self, qobj):
n_qubits = qobj.config.n_qubits
max_qubits = self.configuration().n_qubits
if n_qubits > max_qubits:
raise BasicAerError('Number of qubits {} '.format(n_qubits) +
'is greater than maximum ({}) '.format(max_qubits) +
'for "{}".'.format(self.name()))
if qobj.config.shots != 1:
logger.info('"%s" only supports 1 shot. Setting shots=1.',
self.name())
qobj.config.shots = 1
for experiment in qobj.experiments:
name = experiment.header.name
if getattr(experiment.config, 'shots', 1) != 1:
logger.info('"%s" only supports 1 shot. '
'Setting shots=1 for circuit "%s".',
self.name(), name)
experiment.config.shots = 1
| true | true |
f720d6c78dc5035a3c9b881b6fc3670b51d08456 | 3,919 | py | Python | myprojectenv/lib/python3.5/site-packages/ansible/modules/windows/win_unzip.py | lancerenteria/doFlask | 2d4e242469b108c6c8316ee18a540307497bfb53 | [
"MIT"
] | null | null | null | myprojectenv/lib/python3.5/site-packages/ansible/modules/windows/win_unzip.py | lancerenteria/doFlask | 2d4e242469b108c6c8316ee18a540307497bfb53 | [
"MIT"
] | null | null | null | myprojectenv/lib/python3.5/site-packages/ansible/modules/windows/win_unzip.py | lancerenteria/doFlask | 2d4e242469b108c6c8316ee18a540307497bfb53 | [
"MIT"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Phil Schwartz <schwartzmx@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: win_unzip
version_added: "2.0"
short_description: Unzips compressed files and archives on the Windows node
description:
- Unzips compressed files and archives.
- Supports .zip files natively
- Supports other formats supported by the Powershell Community Extensions (PSCX) module (basically everything 7zip supports)
requirements:
- PSCX
options:
src:
description:
- File to be unzipped (provide absolute path)
required: true
dest:
description:
- Destination of zip file (provide absolute path of directory). If it does not exist, the directory will be created.
required: true
rm:
description:
- Remove the zip file, after unzipping
required: no
choices:
- true
- false
- yes
- no
default: false
recurse:
description:
- Recursively expand zipped files within the src file.
required: no
default: false
choices:
- true
- false
- yes
- no
creates:
description:
- If this file or directory exists the specified src will not be extracted.
required: no
default: null
notes:
- For extracting any compression types other than .zip, the PowerShellCommunityExtensions (PSCX) Module is required. This module (in conjunction with PSCX)
has the ability to recursively unzip files within the src zip file provided and also functionality for many other compression types. If the destination
directory does not exist, it will be created before unzipping the file. Specifying rm parameter will force removal of the src file after extraction.
author: Phil Schwartz
'''
EXAMPLES = r'''
# This unzips a library that was downloaded with win_get_url, and removes the file after extraction
# $ ansible -i hosts -m win_unzip -a "src=C:\\LibraryToUnzip.zip dest=C:\\Lib rm=true" all
# Playbook example
# Simple unzip
---
- name: Unzip a bz2 (BZip) file
win_unzip:
src: C:\Users\Phil\Logs.bz2
dest: C:\Users\Phil\OldLogs
creates: C:\Users\Phil\OldLogs
# This playbook example unzips a .zip file and recursively decompresses the contained .gz files and removes all unneeded compressed files after completion.
- name: Unzip ApplicationLogs.zip and decompress all GZipped log files
hosts: all
gather_facts: false
tasks:
- name: Recursively decompress GZ files in ApplicationLogs.zip
win_unzip:
src: C:\Downloads\ApplicationLogs.zip
dest: C:\Application\Logs
recurse: yes
rm: true
# Install PSCX to use for extracting a gz file
- name: Grab PSCX msi
win_get_url:
url: http://download-codeplex.sec.s-msft.com/Download/Release?ProjectName=pscx&DownloadId=923562&FileTime=130585918034470000&Build=20959
dest: C:\pscx.msi
- name: Install PSCX
win_msi:
path: C:\pscx.msi
- name: Unzip gz log
win_unzip:
src: C:\Logs\application-error-logs.gz
dest: C:\ExtractedLogs\application-error-logs
'''
| 32.932773 | 156 | 0.713958 |
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: win_unzip
version_added: "2.0"
short_description: Unzips compressed files and archives on the Windows node
description:
- Unzips compressed files and archives.
- Supports .zip files natively
- Supports other formats supported by the Powershell Community Extensions (PSCX) module (basically everything 7zip supports)
requirements:
- PSCX
options:
src:
description:
- File to be unzipped (provide absolute path)
required: true
dest:
description:
- Destination of zip file (provide absolute path of directory). If it does not exist, the directory will be created.
required: true
rm:
description:
- Remove the zip file, after unzipping
required: no
choices:
- true
- false
- yes
- no
default: false
recurse:
description:
- Recursively expand zipped files within the src file.
required: no
default: false
choices:
- true
- false
- yes
- no
creates:
description:
- If this file or directory exists the specified src will not be extracted.
required: no
default: null
notes:
- For extracting any compression types other than .zip, the PowerShellCommunityExtensions (PSCX) Module is required. This module (in conjunction with PSCX)
has the ability to recursively unzip files within the src zip file provided and also functionality for many other compression types. If the destination
directory does not exist, it will be created before unzipping the file. Specifying rm parameter will force removal of the src file after extraction.
author: Phil Schwartz
'''
EXAMPLES = r'''
# This unzips a library that was downloaded with win_get_url, and removes the file after extraction
# $ ansible -i hosts -m win_unzip -a "src=C:\\LibraryToUnzip.zip dest=C:\\Lib rm=true" all
# Playbook example
# Simple unzip
---
- name: Unzip a bz2 (BZip) file
win_unzip:
src: C:\Users\Phil\Logs.bz2
dest: C:\Users\Phil\OldLogs
creates: C:\Users\Phil\OldLogs
# This playbook example unzips a .zip file and recursively decompresses the contained .gz files and removes all unneeded compressed files after completion.
- name: Unzip ApplicationLogs.zip and decompress all GZipped log files
hosts: all
gather_facts: false
tasks:
- name: Recursively decompress GZ files in ApplicationLogs.zip
win_unzip:
src: C:\Downloads\ApplicationLogs.zip
dest: C:\Application\Logs
recurse: yes
rm: true
# Install PSCX to use for extracting a gz file
- name: Grab PSCX msi
win_get_url:
url: http://download-codeplex.sec.s-msft.com/Download/Release?ProjectName=pscx&DownloadId=923562&FileTime=130585918034470000&Build=20959
dest: C:\pscx.msi
- name: Install PSCX
win_msi:
path: C:\pscx.msi
- name: Unzip gz log
win_unzip:
src: C:\Logs\application-error-logs.gz
dest: C:\ExtractedLogs\application-error-logs
'''
| true | true |
f720d7542161f6d3c83a81ed0d3c647a9030afd4 | 259 | py | Python | mmaction/apis/__init__.py | HypnosXC/mmaction2 | a26d5f981449445a5e22a0a60d8b285e06c3dd6e | [
"Apache-2.0"
] | 648 | 2021-06-24T19:33:09.000Z | 2022-03-31T06:27:24.000Z | mmaction/apis/__init__.py | xumingze0308/mmaction2 | 777546f27f8f5a3c83e10d966e2149be2fc9fa31 | [
"Apache-2.0"
] | 98 | 2020-01-21T09:41:30.000Z | 2022-03-12T00:53:06.000Z | mmaction/apis/__init__.py | xumingze0308/mmaction2 | 777546f27f8f5a3c83e10d966e2149be2fc9fa31 | [
"Apache-2.0"
] | 233 | 2020-01-18T03:46:27.000Z | 2022-03-19T03:17:47.000Z | from .inference import inference_recognizer, init_recognizer
from .test import multi_gpu_test, single_gpu_test
from .train import train_model
__all__ = [
'train_model', 'init_recognizer', 'inference_recognizer', 'multi_gpu_test',
'single_gpu_test'
]
| 28.777778 | 79 | 0.791506 | from .inference import inference_recognizer, init_recognizer
from .test import multi_gpu_test, single_gpu_test
from .train import train_model
__all__ = [
'train_model', 'init_recognizer', 'inference_recognizer', 'multi_gpu_test',
'single_gpu_test'
]
| true | true |
f720d77ecc540423a6a6545f9e50c117ad1c08db | 2,579 | py | Python | se3_transformer/model/layers/linear.py | RosettaCommons/RFDesign | b404b8b2c57f89c047529c30259aeeb8f6012b61 | [
"MIT"
] | 45 | 2022-01-12T04:39:36.000Z | 2022-03-25T12:33:36.000Z | se3_transformer/model/layers/linear.py | RosettaCommons/RFDesign | b404b8b2c57f89c047529c30259aeeb8f6012b61 | [
"MIT"
] | 6 | 2022-01-15T16:48:39.000Z | 2022-03-15T16:20:34.000Z | se3_transformer/model/layers/linear.py | RosettaCommons/RFDesign | b404b8b2c57f89c047529c30259aeeb8f6012b61 | [
"MIT"
] | 10 | 2022-01-12T11:28:03.000Z | 2022-03-30T11:36:41.000Z | # Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
# SPDX-FileCopyrightText: Copyright (c) 2021 NVIDIA CORPORATION & AFFILIATES
# SPDX-License-Identifier: MIT
from typing import Dict
import numpy as np
import torch
import torch.nn as nn
from torch import Tensor
from se3_transformer.model.fiber import Fiber
class LinearSE3(nn.Module):
"""
Graph Linear SE(3)-equivariant layer, equivalent to a 1x1 convolution.
Maps a fiber to a fiber with the same degrees (channels may be different).
No interaction between degrees, but interaction between channels.
type-0 features (C_0 channels) ────> Linear(bias=False) ────> type-0 features (C'_0 channels)
type-1 features (C_1 channels) ────> Linear(bias=False) ────> type-1 features (C'_1 channels)
:
type-k features (C_k channels) ────> Linear(bias=False) ────> type-k features (C'_k channels)
"""
def __init__(self, fiber_in: Fiber, fiber_out: Fiber):
super().__init__()
self.weights = nn.ParameterDict({
str(degree_out): nn.Parameter(
torch.randn(channels_out, fiber_in[degree_out]) / np.sqrt(fiber_in[degree_out]))
for degree_out, channels_out in fiber_out
})
def forward(self, features: Dict[str, Tensor], *args, **kwargs) -> Dict[str, Tensor]:
return {
degree: self.weights[degree] @ features[degree]
for degree, weight in self.weights.items()
}
| 42.983333 | 97 | 0.703761 |
from typing import Dict
import numpy as np
import torch
import torch.nn as nn
from torch import Tensor
from se3_transformer.model.fiber import Fiber
class LinearSE3(nn.Module):
def __init__(self, fiber_in: Fiber, fiber_out: Fiber):
super().__init__()
self.weights = nn.ParameterDict({
str(degree_out): nn.Parameter(
torch.randn(channels_out, fiber_in[degree_out]) / np.sqrt(fiber_in[degree_out]))
for degree_out, channels_out in fiber_out
})
def forward(self, features: Dict[str, Tensor], *args, **kwargs) -> Dict[str, Tensor]:
return {
degree: self.weights[degree] @ features[degree]
for degree, weight in self.weights.items()
}
| true | true |
f720d79b4d6d96c43d1bfceebd505df12ce179cf | 1,524 | py | Python | plotly/validators/streamtube/colorbar/_titlefont.py | gnestor/plotly.py | a8ae062795ddbf9867b8578fe6d9e244948c15ff | [
"MIT"
] | 12 | 2020-04-18T18:10:22.000Z | 2021-12-06T10:11:15.000Z | plotly/validators/streamtube/colorbar/_titlefont.py | gnestor/plotly.py | a8ae062795ddbf9867b8578fe6d9e244948c15ff | [
"MIT"
] | 1 | 2020-12-15T16:56:11.000Z | 2020-12-15T16:56:11.000Z | plotly/validators/streamtube/colorbar/_titlefont.py | gnestor/plotly.py | a8ae062795ddbf9867b8578fe6d9e244948c15ff | [
"MIT"
] | 6 | 2020-04-18T23:07:08.000Z | 2021-11-18T07:53:06.000Z | import _plotly_utils.basevalidators
class TitlefontValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(
self,
plotly_name='titlefont',
parent_name='streamtube.colorbar',
**kwargs
):
super(TitlefontValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop('data_class_str', 'Titlefont'),
data_docs=kwargs.pop(
'data_docs', """
color
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The plotly service (at https://plot.ly
or on-premise) generates images on a server,
where only a select number of fonts are
installed and supported. These include "Arial",
"Balto", "Courier New", "Droid Sans",, "Droid
Serif", "Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
size
"""
),
**kwargs
)
| 36.285714 | 73 | 0.557743 | import _plotly_utils.basevalidators
class TitlefontValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(
self,
plotly_name='titlefont',
parent_name='streamtube.colorbar',
**kwargs
):
super(TitlefontValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop('data_class_str', 'Titlefont'),
data_docs=kwargs.pop(
'data_docs', """
color
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The plotly service (at https://plot.ly
or on-premise) generates images on a server,
where only a select number of fonts are
installed and supported. These include "Arial",
"Balto", "Courier New", "Droid Sans",, "Droid
Serif", "Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
size
"""
),
**kwargs
)
| true | true |
f720d9caab26b0c898d32c3bc5d19d61e2797724 | 7,527 | py | Python | divvydata/historical_data.py | chrisluedtke/divvy-data-analysis | 441fa9028ed4bb77ad47e8109a8be749ea1d30b1 | [
"MIT"
] | 2 | 2019-02-09T12:54:02.000Z | 2019-02-11T23:02:35.000Z | divvydata/historical_data.py | chrisluedtke/divvy-data-analysis | 441fa9028ed4bb77ad47e8109a8be749ea1d30b1 | [
"MIT"
] | null | null | null | divvydata/historical_data.py | chrisluedtke/divvy-data-analysis | 441fa9028ed4bb77ad47e8109a8be749ea1d30b1 | [
"MIT"
] | null | null | null | """
Pulls data from:
https://www.divvybikes.com/system-data
https://s3.amazonaws.com/divvy-data/tripdata
"""
from io import BytesIO
import os
import re
import requests
from zipfile import ZipFile
from typing import List
from lxml import html
import pandas as pd
from .stations_feed import StationsFeed
STN_DT_FORM = {
'2013': "%m/%d/%Y", # Not labeled for quarters
'2014_Q1Q2': None, # xlsx file
'2014_Q3Q4': "%m/%d/%Y %H:%M",
'2015': None, # no date column and not labeled for quarters
'2016_Q1Q2': "%m/%d/%Y",
'2016_Q3': "%m/%d/%Y",
'2016_Q4': "%m/%d/%Y",
'2017_Q1Q2': "%m/%d/%Y %H:%M:%S",
'2017_Q3Q4': "%m/%d/%Y %H:%M",
}
STN_COL_MAP = {
'latitude': 'lat',
'longitude': 'lon',
'dateCreated': 'online_date',
'online date': 'online_date',
}
RD_DT_FORM = {
'2013': "%Y-%m-%d %H:%M", # Not labeled for quarters
'2014_Q1Q2': "%m/%d/%Y %H:%M",
'2014_Q3': "%m/%d/%Y %H:%M",
'2014_Q4': "%m/%d/%Y %H:%M",
'2015_Q1': "%m/%d/%Y %H:%M",
'2015_Q2': "%m/%d/%Y %H:%M",
'2015': "%m/%d/%Y %H:%M", # Q3 labeled as month integer
'2015_Q4': "%m/%d/%Y %H:%M",
'2016_Q1': "%m/%d/%Y %H:%M",
'2016': "%m/%d/%Y %H:%M", # Q2 labeled as month integer
'2016_Q3': "%m/%d/%Y %H:%M:%S",
'2016_Q4': "%m/%d/%Y %H:%M:%S",
'2017_Q1': "%m/%d/%Y %H:%M:%S",
'2017_Q2': "%m/%d/%Y %H:%M:%S",
'2017_Q3': "%m/%d/%Y %H:%M:%S",
'2017_Q4': "%m/%d/%Y %H:%M",
'2018_Q1': "%Y-%m-%d %H:%M:%S",
'2018_Q2': "%Y-%m-%d %H:%M:%S",
'2018_Q3': "%Y-%m-%d %H:%M:%S",
'2018_Q4': "%Y-%m-%d %H:%M:%S",
}
RD_COL_MAP = {
'01 - Rental Details Rental ID': 'trip_id',
'01 - Rental Details Local Start Time': 'start_time',
'01 - Rental Details Local End Time': 'end_time',
'01 - Rental Details Bike ID': 'bikeid',
'01 - Rental Details Duration In Seconds Uncapped': 'tripduration',
'03 - Rental Start Station ID': 'from_station_id',
'03 - Rental Start Station Name': 'from_station_name',
'02 - Rental End Station ID': 'to_station_id',
'02 - Rental End Station Name': 'to_station_name',
'User Type': 'usertype',
'Member Gender': 'gender',
'05 - Member Details Member Birthday Year': 'birthyear',
'stoptime': 'end_time',
'starttime': 'start_time',
'birthday': 'birthyear',
}
def parse_zip_urls_from_url(url):
r = requests.get(url)
webpage = html.fromstring(r.content)
base_source = 'https://s3.amazonaws.com/divvy-data/tripdata/'
urls = [url for url in set(webpage.xpath('//a/@href'))
if (base_source in url and url.endswith('.zip'))]
return urls
def year_lookup_to_date(yr_lookup: str) -> str:
q_map = {
'Q1': '03-31',
'Q2': '06-30',
'Q3': '09-30',
'Q4': '12-31',
}
yr_l_splt = yr_lookup.split('_')
q = yr_l_splt[-1][-2:]
date = q_map.get(q, '12-31')
date = f'{yr_l_splt[0]}-{date}'
return date
def get_current_stations():
"""Pulls most recent data from Divvy JSON feed.
Necessar because Divvy did not provide 2018 station data.
"""
df = StationsFeed().get_current_data()
cols = ['id', 'stationName', 'latitude', 'longitude',
'totalDocks', 'lastCommunicationTime']
df = df[cols].rename(columns={
'stationName': 'name',
'lastCommunicationTime': 'as_of_date',
'totalDocks': 'dpcapacity'
})
df = df.rename(columns=STN_COL_MAP)
return df
def process_ride_df(z, fpath, year_lookup):
df = (pd.read_csv(z.open(fpath))
.rename(columns=RD_COL_MAP))
df['start_time'] = pd.to_datetime(
df['start_time'],
format=RD_DT_FORM.get(year_lookup, None),
errors='coerce'
)
df['end_time'] = pd.to_datetime(
df['end_time'],
format=RD_DT_FORM.get(year_lookup, None),
errors='coerce'
)
return df
def process_station_df(z, fpath, year_lookup):
if fpath.endswith('.csv'):
df = pd.read_csv(z.open(fpath))
else: # must be '.xlsx'
df = pd.read_excel(z.open(fpath))
df = df.rename(columns=STN_COL_MAP)
df['as_of_date'] = year_lookup_to_date(year_lookup)
df['as_of_date'] = pd.to_datetime(df['as_of_date'])
if 'online_date' in df:
df['online_date'] = pd.to_datetime(
df['online_date'],
format=STN_DT_FORM.get(year_lookup, None),
errors='coerce'
)
return df
def combine_ride_dfs(dfs: List[pd.DataFrame]) -> pd.DataFrame:
dfs = (pd.concat(dfs, ignore_index=True, sort=True)
.sort_values('start_time')
.reset_index(drop=True))
dfs['tripduration'] = (
dfs.tripduration.astype(str).str.replace(',', '').astype(float)
)
cols = ['trip_id', 'bikeid', 'start_time', 'end_time', 'tripduration',
'from_station_id', 'from_station_name', 'to_station_id',
'to_station_name', 'usertype', 'gender', 'birthyear']
dfs = dfs[[col for col in cols if col in dfs]]
return dfs
def combine_station_dfs(dfs: List[pd.DataFrame]) -> pd.DataFrame:
dfs = (pd.concat(dfs, ignore_index=True, sort=True)
.sort_values(['id', 'as_of_date'])
.reset_index(drop=True))
# excludes ['city', 'Unnamed: 7']
cols = ['id', 'name', 'as_of_date', 'lat', 'lon', 'dpcapacity',
'online_date', 'landmark']
dfs = dfs[[col for col in cols if col in dfs]]
return dfs
def get_historical_data(years: List[str], write_to: str = '', rides=True,
stations=True):
"""Gathers and cleans historical Divvy data
write_to: optional local folder path to extract zip files to
returns: (pandas.DataFrame of rides, pandas.DataFrame of stations)
"""
if isinstance(years, str):
years = [years]
ride_dfs = []
station_dfs = []
if not (rides or stations):
return ride_dfs, station_dfs
urls = parse_zip_urls_from_url('https://www.divvybikes.com/system-data')
for url in sorted(urls):
z_fn = url.split('/')[-1]
z_year = re.findall(r'20\d{2}', z_fn)[0]
if z_year not in years:
continue
print(url)
r = requests.get(url)
with ZipFile(BytesIO(r.content)) as z:
if write_to:
write_path = os.path.join(write_to, z_fn.replace('.zip', ''))
z.extractall(write_path)
for fpath in z.namelist():
fn = fpath.split('/')[-1]
if fn.endswith(('.csv', '.xlsx')) and not fn.startswith('.'):
quarter = re.findall('Q[1-4]', fn)
if quarter:
year_lookup = f"{z_year}_{''.join(quarter)}"
else:
year_lookup = z_year
else:
continue
if rides and '_trips_' in fn.lower():
print(fn, year_lookup)
df = process_ride_df(z, fpath, year_lookup)
ride_dfs.append(df)
elif stations and '_stations_' in fn.lower():
print(fn, year_lookup)
df = process_station_df(z, fpath, year_lookup)
station_dfs.append(df)
if rides:
ride_dfs = combine_ride_dfs(ride_dfs)
if stations:
if '2018' in years:
df = get_current_stations()
station_dfs.append(df)
station_dfs = combine_station_dfs(station_dfs)
return ride_dfs, station_dfs
| 29.287938 | 77 | 0.563571 | from io import BytesIO
import os
import re
import requests
from zipfile import ZipFile
from typing import List
from lxml import html
import pandas as pd
from .stations_feed import StationsFeed
STN_DT_FORM = {
'2013': "%m/%d/%Y",
'2014_Q1Q2': None,
'2014_Q3Q4': "%m/%d/%Y %H:%M",
'2015': None,
'2016_Q1Q2': "%m/%d/%Y",
'2016_Q3': "%m/%d/%Y",
'2016_Q4': "%m/%d/%Y",
'2017_Q1Q2': "%m/%d/%Y %H:%M:%S",
'2017_Q3Q4': "%m/%d/%Y %H:%M",
}
STN_COL_MAP = {
'latitude': 'lat',
'longitude': 'lon',
'dateCreated': 'online_date',
'online date': 'online_date',
}
RD_DT_FORM = {
'2013': "%Y-%m-%d %H:%M",
'2014_Q1Q2': "%m/%d/%Y %H:%M",
'2014_Q3': "%m/%d/%Y %H:%M",
'2014_Q4': "%m/%d/%Y %H:%M",
'2015_Q1': "%m/%d/%Y %H:%M",
'2015_Q2': "%m/%d/%Y %H:%M",
'2015': "%m/%d/%Y %H:%M",
'2015_Q4': "%m/%d/%Y %H:%M",
'2016_Q1': "%m/%d/%Y %H:%M",
'2016': "%m/%d/%Y %H:%M",
'2016_Q3': "%m/%d/%Y %H:%M:%S",
'2016_Q4': "%m/%d/%Y %H:%M:%S",
'2017_Q1': "%m/%d/%Y %H:%M:%S",
'2017_Q2': "%m/%d/%Y %H:%M:%S",
'2017_Q3': "%m/%d/%Y %H:%M:%S",
'2017_Q4': "%m/%d/%Y %H:%M",
'2018_Q1': "%Y-%m-%d %H:%M:%S",
'2018_Q2': "%Y-%m-%d %H:%M:%S",
'2018_Q3': "%Y-%m-%d %H:%M:%S",
'2018_Q4': "%Y-%m-%d %H:%M:%S",
}
RD_COL_MAP = {
'01 - Rental Details Rental ID': 'trip_id',
'01 - Rental Details Local Start Time': 'start_time',
'01 - Rental Details Local End Time': 'end_time',
'01 - Rental Details Bike ID': 'bikeid',
'01 - Rental Details Duration In Seconds Uncapped': 'tripduration',
'03 - Rental Start Station ID': 'from_station_id',
'03 - Rental Start Station Name': 'from_station_name',
'02 - Rental End Station ID': 'to_station_id',
'02 - Rental End Station Name': 'to_station_name',
'User Type': 'usertype',
'Member Gender': 'gender',
'05 - Member Details Member Birthday Year': 'birthyear',
'stoptime': 'end_time',
'starttime': 'start_time',
'birthday': 'birthyear',
}
def parse_zip_urls_from_url(url):
r = requests.get(url)
webpage = html.fromstring(r.content)
base_source = 'https://s3.amazonaws.com/divvy-data/tripdata/'
urls = [url for url in set(webpage.xpath('//a/@href'))
if (base_source in url and url.endswith('.zip'))]
return urls
def year_lookup_to_date(yr_lookup: str) -> str:
q_map = {
'Q1': '03-31',
'Q2': '06-30',
'Q3': '09-30',
'Q4': '12-31',
}
yr_l_splt = yr_lookup.split('_')
q = yr_l_splt[-1][-2:]
date = q_map.get(q, '12-31')
date = f'{yr_l_splt[0]}-{date}'
return date
def get_current_stations():
df = StationsFeed().get_current_data()
cols = ['id', 'stationName', 'latitude', 'longitude',
'totalDocks', 'lastCommunicationTime']
df = df[cols].rename(columns={
'stationName': 'name',
'lastCommunicationTime': 'as_of_date',
'totalDocks': 'dpcapacity'
})
df = df.rename(columns=STN_COL_MAP)
return df
def process_ride_df(z, fpath, year_lookup):
df = (pd.read_csv(z.open(fpath))
.rename(columns=RD_COL_MAP))
df['start_time'] = pd.to_datetime(
df['start_time'],
format=RD_DT_FORM.get(year_lookup, None),
errors='coerce'
)
df['end_time'] = pd.to_datetime(
df['end_time'],
format=RD_DT_FORM.get(year_lookup, None),
errors='coerce'
)
return df
def process_station_df(z, fpath, year_lookup):
if fpath.endswith('.csv'):
df = pd.read_csv(z.open(fpath))
else:
df = pd.read_excel(z.open(fpath))
df = df.rename(columns=STN_COL_MAP)
df['as_of_date'] = year_lookup_to_date(year_lookup)
df['as_of_date'] = pd.to_datetime(df['as_of_date'])
if 'online_date' in df:
df['online_date'] = pd.to_datetime(
df['online_date'],
format=STN_DT_FORM.get(year_lookup, None),
errors='coerce'
)
return df
def combine_ride_dfs(dfs: List[pd.DataFrame]) -> pd.DataFrame:
dfs = (pd.concat(dfs, ignore_index=True, sort=True)
.sort_values('start_time')
.reset_index(drop=True))
dfs['tripduration'] = (
dfs.tripduration.astype(str).str.replace(',', '').astype(float)
)
cols = ['trip_id', 'bikeid', 'start_time', 'end_time', 'tripduration',
'from_station_id', 'from_station_name', 'to_station_id',
'to_station_name', 'usertype', 'gender', 'birthyear']
dfs = dfs[[col for col in cols if col in dfs]]
return dfs
def combine_station_dfs(dfs: List[pd.DataFrame]) -> pd.DataFrame:
dfs = (pd.concat(dfs, ignore_index=True, sort=True)
.sort_values(['id', 'as_of_date'])
.reset_index(drop=True))
cols = ['id', 'name', 'as_of_date', 'lat', 'lon', 'dpcapacity',
'online_date', 'landmark']
dfs = dfs[[col for col in cols if col in dfs]]
return dfs
def get_historical_data(years: List[str], write_to: str = '', rides=True,
stations=True):
if isinstance(years, str):
years = [years]
ride_dfs = []
station_dfs = []
if not (rides or stations):
return ride_dfs, station_dfs
urls = parse_zip_urls_from_url('https://www.divvybikes.com/system-data')
for url in sorted(urls):
z_fn = url.split('/')[-1]
z_year = re.findall(r'20\d{2}', z_fn)[0]
if z_year not in years:
continue
print(url)
r = requests.get(url)
with ZipFile(BytesIO(r.content)) as z:
if write_to:
write_path = os.path.join(write_to, z_fn.replace('.zip', ''))
z.extractall(write_path)
for fpath in z.namelist():
fn = fpath.split('/')[-1]
if fn.endswith(('.csv', '.xlsx')) and not fn.startswith('.'):
quarter = re.findall('Q[1-4]', fn)
if quarter:
year_lookup = f"{z_year}_{''.join(quarter)}"
else:
year_lookup = z_year
else:
continue
if rides and '_trips_' in fn.lower():
print(fn, year_lookup)
df = process_ride_df(z, fpath, year_lookup)
ride_dfs.append(df)
elif stations and '_stations_' in fn.lower():
print(fn, year_lookup)
df = process_station_df(z, fpath, year_lookup)
station_dfs.append(df)
if rides:
ride_dfs = combine_ride_dfs(ride_dfs)
if stations:
if '2018' in years:
df = get_current_stations()
station_dfs.append(df)
station_dfs = combine_station_dfs(station_dfs)
return ride_dfs, station_dfs
| true | true |
f720d9f5df4419371640fe5d3822b74acdb36bf0 | 35,757 | py | Python | incidentes/views.py | Alvaruz/ATMS | 962a1967e1654efe4d448891deb7881fa3addf85 | [
"MIT"
] | null | null | null | incidentes/views.py | Alvaruz/ATMS | 962a1967e1654efe4d448891deb7881fa3addf85 | [
"MIT"
] | null | null | null | incidentes/views.py | Alvaruz/ATMS | 962a1967e1654efe4d448891deb7881fa3addf85 | [
"MIT"
] | null | null | null | from django.shortcuts import render, redirect
from django.template import loader
from django.urls import reverse_lazy
from .models import *
from django.http import HttpResponse
from .forms import TicketForm
from django.views.generic import ListView, CreateView, UpdateView, DeleteView
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from django.db import connections
from django.db.models import Count
from django.http import JsonResponse
from django.core import serializers
from datetime import *
from django.utils import timezone
from django.utils.timezone import make_aware
# Create your views here.
def home(request):
return render(request, "index2.html", {})
def base(request):
return render(request, "base.html", {})
def ticket_list(request):
return render(request, "ticket_list.html", {})
def ticket_home(request):
return render(request, "tickets2.html", {})
def login(request):
return render(request, "login.html", {})
def tickets(request):
ticket = Ticket.objects.order_by('-fecha')
paginator = Paginator(ticket, 25) # Show 25 contacts per page
# paginate_by = 25
# tk_vencido = Ticket.objects.order_by('fecha')
template = loader.get_template('ticket_list.html')
context = {
'ticket': ticket,
'categoria': ticket,
'grupo_destino': ticket,
'fecha': ticket,
'estado': ticket,
}
# page = request.GET.get('page')
# context = paginator.get_page(page)
# return render(request, 'list.html', {'context': context})
return HttpResponse(template.render(context, request))
def ticket_view(request):
if request.method == 'POST':
form = TicketForm(request.POST)
if form.is_valid():
form.save()
print("formulario guardado")
return redirect('tickets')
else:
form = TicketForm()
return render(request, 'ticket_form.html', {'form':form})
# version de prueba class
class TicketListView(ListView):
template_name = 'ticket_list.html'
model = Ticket
paginate_by = 25
listado_tickets = Ticket.objects.all()
# paginator = Paginator(listado_tickets, 10) # Muestra 10 elementos por página.
# pagina = request.GET.get('page')
# pagina_actual = paginator.get_page(page)
# return render(request, 'list.html', {'pagina_actual': pagina_actual})
def get_queryset(self):
queryset = super(TicketListView, self).get_queryset()
return queryset.filter(author_id=self.kwargs['author_id'])
class TicketAddView(CreateView):
model = Ticket
template_name = 'ticket_form2.html'
form_class = TicketForm
success_url = reverse_lazy('ticket_list')
def form_valid(self, form):
form.save()
return super(TicketAddView, self).form_valid(form)
def ticket_edit(request, pk):
ticket = Ticket.objects.get(id=pk)
if request.method == 'GET':
form = TicketForm(instance=ticket)
else:
form = TicketForm(request.POST, instance=ticket)
f = open('wtf.txt','w')
f.write(form)
f.close()
print(form)
if form.is_valid():
form.save()
return redirect('ticket_list')
return render(request, 'ticket_form2.html',{'form':form})
class TicketEditView(UpdateView):
model = Ticket
template_name = 'ticket_form2.html'
form_class = TicketForm
success_url = reverse_lazy('ticket_list')
paginate_by = 25
# def form_valid(self, form):
# form.save()
# return super(TicketEditView, self).form_valid(form)
class TicketDeleteView(DeleteView):
model = Ticket
template_name = 'ticket_delete2.html'
form_class = TicketForm
success_url = reverse_lazy('ticket_list')
def estadisticas_main(request):
return render(request, 'estadisticas_main.html', {})
def apimes(request):
data = Ticket.objects.all() \
.extra(select={'month': connections[Ticket.objects.db].ops.date_trunc_sql('month', 'fecha')}) \
.values('month') \
.annotate(count_items=Count('id'))
return JsonResponse(list(data), safe=False)
def estadisticas_total(request):
# data = serializers.serialize("json", Ticket.objects.only("categoria").annotate(Count('id')))
#--CATEGORIA---
mantenimiento = Ticket.objects.only("categoria").filter(categoria=1).count()
vehiculo_mal_estacionado = Ticket.objects.only("categoria").filter(categoria=2).count()
vehiculo_descompuesto = Ticket.objects.only("categoria").filter(categoria=3).count()
manifestacion = Ticket.objects.only("categoria").filter(categoria=4).count()
cierre_de_calle = Ticket.objects.only("categoria").filter(categoria=5).count()
accidente = Ticket.objects.only("categoria").filter(categoria=6).count()
obras = Ticket.objects.only("categoria").filter(categoria=7).count()
obstaculo = Ticket.objects.only("categoria").filter(categoria=8).count()
congestionamiento = Ticket.objects.only("categoria").filter(categoria=9).count()
sincronizacion = Ticket.objects.only("categoria").filter(categoria=10).count()
semaforo_apagado = Ticket.objects.only("categoria").filter(categoria=11).count()
infracciones = Ticket.objects.only("categoria").filter(categoria=12).count()
led_foco = Ticket.objects.only("categoria").filter(categoria=13).count()
#--GRUPO---
sistemas = Ticket.objects.only("grupo_destino").filter(grupo_destino=1).count()
redes = Ticket.objects.only("grupo_destino").filter(grupo_destino=2).count()
pmt_atms = Ticket.objects.only("grupo_destino").filter(grupo_destino=3).count()
pmt_otros = Ticket.objects.only("grupo_destino").filter(grupo_destino=4).count()
operadores = Ticket.objects.only("grupo_destino").filter(grupo_destino=5).count()
tecnicos = Ticket.objects.only("grupo_destino").filter(grupo_destino=6).count()
administrativa = Ticket.objects.only("grupo_destino").filter(grupo_destino=7).count()
jefatura = Ticket.objects.only("grupo_destino").filter(grupo_destino=8).count()
#--ESTADO--
pendiente = Ticket.objects.only("estado").filter(estado=1).count()
cerrado = Ticket.objects.only("estado").filter(estado=2).count()
atendido = Ticket.objects.only("estado").filter(estado=3).count()
vencido = Ticket.objects.only("estado").filter(estado=4).count()
#--USUARIOS--
atms = Ticket.objects.filter(usuario=1).count()
jose = Ticket.objects.filter(usuario=2).count()
emilio = Ticket.objects.filter(usuario=3).count()
gustavo = Ticket.objects.filter(usuario=4).count()
elias = Ticket.objects.filter(usuario=25).count()
usuario = atms + jose + emilio + gustavo + elias
data = {
"mantenimiento": mantenimiento,
"vehiculo_mal_estacionado": vehiculo_mal_estacionado,
"vehiculo_descompuesto": vehiculo_descompuesto,
"manifestacion": manifestacion,
"cierre_de_calle": cierre_de_calle,
"accidente": accidente,
"obras": obras,
"obstaculo": obstaculo,
"congestionamiento": congestionamiento,
"sincronizacion": sincronizacion,
"semaforo_apagado": semaforo_apagado,
"infracciones": infracciones,
"led_foco": led_foco,
"pmt_otros": pmt_otros,
"sistemas": sistemas,
"redes": redes,
"pmt_atms": pmt_atms,
"operadores": operadores,
"tecnicos": tecnicos,
"administrativa": administrativa,
"jefatura": jefatura,
"pendiente": pendiente,
"cerrado": cerrado,
"atendido": atendido,
"vencido": vencido,
"atms": atms,
"jose": jose,
"emilio": emilio,
"gustavo": gustavo,
"elias": elias,
"usuario": usuario,
}
return render(request, 'estadisticas_global.html', {'data':data})
def estadisticas_mes(request):
hoy = datetime.now().day
mes = datetime.now().month
# mes = 11
#--CATEGORIA---
mantenimiento = Ticket.objects.filter(fecha__month = mes).filter(categoria=1).count()
vehiculo_mal_estacionado = Ticket.objects.filter(fecha__month = mes).filter(categoria=2).count()
vehiculo_descompuesto = Ticket.objects.filter(fecha__month = mes).filter(categoria=3).count()
manifestacion = Ticket.objects.filter(fecha__month = mes).filter(categoria=4).count()
cierre_de_calle = Ticket.objects.filter(fecha__month = mes).filter(categoria=5).count()
accidente = Ticket.objects.filter(fecha__month = mes).filter(categoria=6).count()
obras = Ticket.objects.filter(fecha__month = mes).filter(categoria=7).count()
obstaculo = Ticket.objects.filter(fecha__month = mes).filter(categoria=8).count()
congestionamiento = Ticket.objects.filter(fecha__month = mes).filter(categoria=9).count()
sincronizacion = Ticket.objects.filter(fecha__month = mes).filter(categoria=10).count()
semaforo_apagado = Ticket.objects.filter(fecha__month = mes).filter(categoria=11).count()
infracciones = Ticket.objects.filter(fecha__month = mes).filter(categoria=12).count()
led_foco = Ticket.objects.filter(fecha__month = mes).filter(categoria=13).count()
#--GRUPO---
sistemas = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=1).count()
redes = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=2).count()
pmt_atms = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=3).count()
pmt_otros = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=4).count()
operadores = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=5).count()
tecnicos = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=6).count()
administrativa = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=7).count()
jefatura = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=8).count()
#--ESTADO---
pendiente = Ticket.objects.filter(fecha__month = mes).filter(estado=1).count()
cerrado = Ticket.objects.filter(fecha__month = mes).filter(estado=2).count()
atendido = Ticket.objects.filter(fecha__month = mes).filter(estado=3).count()
vencido = Ticket.objects.filter(fecha__month = mes).filter(estado=4).count()
#--USUARIOS--
atms = Ticket.objects.filter(fecha__month = mes).filter(usuario=1).count()
jose = Ticket.objects.filter(fecha__month = mes).filter(usuario=2).count()
emilio = Ticket.objects.filter(fecha__month = mes).filter(usuario=3).count()
gustavo = Ticket.objects.filter(fecha__month = mes).filter(usuario=4).count()
elias = Ticket.objects.filter(fecha__month = mes).filter(usuario=25).count()
usuario = atms + jose + emilio + gustavo + elias
categoria = mantenimiento + vehiculo_mal_estacionado + vehiculo_descompuesto + manifestacion + cierre_de_calle + accidente + obras + obstaculo + congestionamiento + sincronizacion + semaforo_apagado + infracciones + led_foco
grupo = sistemas + redes + pmt_atms + pmt_otros + operadores + tecnicos + administrativa + jefatura
estado = pendiente + cerrado + atendido + vencido
data = {
"mantenimiento": mantenimiento,
"vehiculo_mal_estacionado": vehiculo_mal_estacionado,
"vehiculo_descompuesto": vehiculo_descompuesto,
"manifestacion": manifestacion,
"cierre_de_calle": cierre_de_calle,
"accidente": accidente,
"obras": obras,
"obstaculo": obstaculo,
"congestionamiento": congestionamiento,
"sincronizacion": sincronizacion,
"semaforo_apagado": semaforo_apagado,
"infracciones": infracciones,
"led_foco": led_foco,
"pmt_otros": pmt_otros,
"sistemas": sistemas,
"redes": redes,
"pmt_atms": pmt_atms,
"operadores": operadores,
"tecnicos": tecnicos,
"administrativa": administrativa,
"jefatura": jefatura,
"pendiente": pendiente,
"cerrado": cerrado,
"atendido": atendido,
"vencido": vencido,
"hoy": hoy,
"mes": mes,
"categoria": categoria,
"grupo": grupo,
"estado": estado,
"atms": atms,
"jose": jose,
"emilio": emilio,
"gustavo": gustavo,
"elias": elias,
"usuario": usuario,
}
return render(request, 'estadisticas_mes.html', {'data':data})
def estadisticas_dia(request):
hoy = datetime.now().day
mes = datetime.now().month
#--CATEGORIA---
mantenimiento = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=1).count()
vehiculo_mal_estacionado = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=2).count()
vehiculo_descompuesto = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=3).count()
manifestacion = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=4).count()
cierre_de_calle = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=5).count()
accidente = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=6).count()
obras = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=7).count()
obstaculo = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=8).count()
congestionamiento = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=9).count()
sincronizacion = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=10).count()
semaforo_apagado = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=11).count()
infracciones = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=12).count()
led_foco = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=13).count()
#--GRUPO---
sistemas = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=1).count()
redes = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=2).count()
pmt_atms = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=3).count()
pmt_otros = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=4).count()
operadores = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=5).count()
tecnicos = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=6).count()
administrativa = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=7).count()
jefatura = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=8).count()
#--ESTADO--
pendiente = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=1).count()
cerrado = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=2).count()
atendido = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=3).count()
vencido = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=4).count()
#--USUARIOS--
atms = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(usuario=1).count()
jose = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(usuario=2).count()
emilio = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(usuario=3).count()
gustavo = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(usuario=4).count()
elias = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(usuario=25).count()
usuario = atms + jose + emilio + gustavo + elias
categoria = mantenimiento + vehiculo_mal_estacionado + vehiculo_descompuesto + manifestacion + cierre_de_calle + accidente + obras + obstaculo + congestionamiento + sincronizacion + semaforo_apagado + infracciones + led_foco
grupo = sistemas + redes + pmt_atms + pmt_otros + operadores + tecnicos + administrativa + jefatura
estado = pendiente + cerrado + atendido + vencido
data = {
"mantenimiento": mantenimiento,
"vehiculo_mal_estacionado": vehiculo_mal_estacionado,
"vehiculo_descompuesto": vehiculo_descompuesto,
"manifestacion": manifestacion,
"cierre_de_calle": cierre_de_calle,
"accidente": accidente,
"obras": obras,
"obstaculo": obstaculo,
"congestionamiento": congestionamiento,
"sincronizacion": sincronizacion,
"semaforo_apagado": semaforo_apagado,
"infracciones": infracciones,
"led_foco": led_foco,
"pmt_otros": pmt_otros,
"sistemas": sistemas,
"redes": redes,
"pmt_atms": pmt_atms,
"operadores": operadores,
"tecnicos": tecnicos,
"administrativa": administrativa,
"jefatura": jefatura,
"pendiente": pendiente,
"cerrado": cerrado,
"atendido": atendido,
"vencido": vencido,
"hoy": hoy,
"mes": mes,
"categoria": categoria,
"grupo": grupo,
"estado": estado,
"atms": atms,
"jose": jose,
"emilio": emilio,
"gustavo": gustavo,
"elias": elias,
"usuario": usuario,
}
return render(request, 'estadisticas_dia.html', {'data':data})
def comunicaciones_estadisticas_mes(request):
hoy = datetime.now().day
mes = datetime.now().month
# mes = 11
#--CATEGORIA---
mantenimiento = Ticket.objects.filter(fecha__month = mes).filter(categoria=1).count()
vehiculo_mal_estacionado = Ticket.objects.filter(fecha__month = mes).filter(categoria=2).count()
vehiculo_descompuesto = Ticket.objects.filter(fecha__month = mes).filter(categoria=3).count()
manifestacion = Ticket.objects.filter(fecha__month = mes).filter(categoria=4).count()
cierre_de_calle = Ticket.objects.filter(fecha__month = mes).filter(categoria=5).count()
accidente = Ticket.objects.filter(fecha__month = mes).filter(categoria=6).count()
obras = Ticket.objects.filter(fecha__month = mes).filter(categoria=7).count()
obstaculo = Ticket.objects.filter(fecha__month = mes).filter(categoria=8).count()
congestionamiento = Ticket.objects.filter(fecha__month = mes).filter(categoria=9).count()
sincronizacion = Ticket.objects.filter(fecha__month = mes).filter(categoria=10).count()
semaforo_apagado = Ticket.objects.filter(fecha__month = mes).filter(categoria=11).count()
infracciones = Ticket.objects.filter(fecha__month = mes).filter(categoria=12).count()
led_foco = Ticket.objects.filter(fecha__month = mes).filter(categoria=13).count()
#--GRUPO---
sistemas = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=1).count()
redes = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=2).count()
pmt_atms = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=3).count()
pmt_otros = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=4).count()
operadores = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=5).count()
tecnicos = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=6).count()
administrativa = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=7).count()
jefatura = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=8).count()
#--ESTADO---
pendiente = Ticket.objects.filter(fecha__month = mes).filter(estado=1).count()
cerrado = Ticket.objects.filter(fecha__month = mes).filter(estado=2).count()
atendido = Ticket.objects.filter(fecha__month = mes).filter(estado=3).count()
vencido = Ticket.objects.filter(fecha__month = mes).filter(estado=4).count()
categoria = mantenimiento + vehiculo_mal_estacionado + vehiculo_descompuesto + manifestacion + cierre_de_calle + accidente + obras + obstaculo + congestionamiento + sincronizacion + semaforo_apagado + infracciones + led_foco
grupo = sistemas + redes + pmt_atms + pmt_otros + operadores + tecnicos + administrativa + jefatura
estado = pendiente + cerrado + atendido + vencido
data = {
"mantenimiento": mantenimiento,
"vehiculo_mal_estacionado": vehiculo_mal_estacionado,
"vehiculo_descompuesto": vehiculo_descompuesto,
"manifestacion": manifestacion,
"cierre_de_calle": cierre_de_calle,
"accidente": accidente,
"obras": obras,
"obstaculo": obstaculo,
"congestionamiento": congestionamiento,
"sincronizacion": sincronizacion,
"semaforo_apagado": semaforo_apagado,
"infracciones": infracciones,
"led_foco": led_foco,
"pmt_otros": pmt_otros,
"sistemas": sistemas,
"redes": redes,
"pmt_atms": pmt_atms,
"operadores": operadores,
"tecnicos": tecnicos,
"administrativa": administrativa,
"jefatura": jefatura,
"pendiente": pendiente,
"cerrado": cerrado,
"atendido": atendido,
"vencido": vencido,
"hoy": hoy,
"mes": mes,
"categoria": categoria,
"grupo": grupo,
"estado": estado,
}
return render(request, 'comunicaciones_estadisticas_mes.html', {'data':data})
def comunicaciones_estadisticas_dia(request):
hoy = datetime.now().day
mes = datetime.now().month
#--CATEGORIA---
mantenimiento = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=1).count()
vehiculo_mal_estacionado = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=2).count()
vehiculo_descompuesto = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=3).count()
manifestacion = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=4).count()
cierre_de_calle = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=5).count()
accidente = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=6).count()
obras = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=7).count()
obstaculo = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=8).count()
congestionamiento = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=9).count()
sincronizacion = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=10).count()
semaforo_apagado = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=11).count()
infracciones = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=12).count()
led_foco = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=13).count()
#--GRUPO---
sistemas = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=1).count()
redes = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=2).count()
pmt_atms = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=3).count()
pmt_otros = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=4).count()
operadores = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=5).count()
tecnicos = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=6).count()
administrativa = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=7).count()
jefatura = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=8).count()
#--ESTADO--
pendiente = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=1).count()
cerrado = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=2).count()
atendido = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=3).count()
vencido = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=4).count()
categoria = mantenimiento + vehiculo_mal_estacionado + vehiculo_descompuesto + manifestacion + cierre_de_calle + accidente + obras + obstaculo + congestionamiento + sincronizacion + semaforo_apagado + infracciones + led_foco
grupo = sistemas + redes + pmt_atms + pmt_otros + operadores + tecnicos + administrativa + jefatura
estado = pendiente + cerrado + atendido + vencido
data = {
"mantenimiento": mantenimiento,
"vehiculo_mal_estacionado": vehiculo_mal_estacionado,
"vehiculo_descompuesto": vehiculo_descompuesto,
"manifestacion": manifestacion,
"cierre_de_calle": cierre_de_calle,
"accidente": accidente,
"obras": obras,
"obstaculo": obstaculo,
"congestionamiento": congestionamiento,
"sincronizacion": sincronizacion,
"semaforo_apagado": semaforo_apagado,
"infracciones": infracciones,
"led_foco": led_foco,
"pmt_otros": pmt_otros,
"sistemas": sistemas,
"redes": redes,
"pmt_atms": pmt_atms,
"operadores": operadores,
"tecnicos": tecnicos,
"administrativa": administrativa,
"jefatura": jefatura,
"pendiente": pendiente,
"cerrado": cerrado,
"atendido": atendido,
"vencido": vencido,
"hoy": hoy,
"mes": mes,
"categoria": categoria,
"grupo": grupo,
"estado": estado,
}
return render(request, 'comunicaciones_estadisticas_hoy.html', {'data':data})
def prensa_estadisticas_mes(request):
hoy = datetime.now().day
mes = datetime.now().month
#--CATEGORIA---
vehiculo_mal_estacionado = Ticket.objects.filter(fecha__month = mes).filter(categoria=2).count()
vehiculo_descompuesto = Ticket.objects.filter(fecha__month = mes).filter(categoria=3).count()
manifestacion = Ticket.objects.filter(fecha__month = mes).filter(categoria=4).count()
cierre_de_calle = Ticket.objects.filter(fecha__month = mes).filter(categoria=5).count()
accidente = Ticket.objects.filter(fecha__month = mes).filter(categoria=6).count()
obras = Ticket.objects.filter(fecha__month = mes).filter(categoria=7).count()
obstaculo = Ticket.objects.filter(fecha__month = mes).filter(categoria=8).count()
congestionamiento = Ticket.objects.filter(fecha__month = mes).filter(categoria=9).count()
#--GRUPO---
sistemas = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=1).count()
redes = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=2).count()
pmt_atms = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=3).count()
pmt_otros = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=4).count()
operadores = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=5).count()
tecnicos = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=6).count()
administrativa = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=7).count()
jefatura = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=8).count()
#--ESTADO---
pendiente = Ticket.objects.filter(fecha__month = mes).filter(estado=1).count()
cerrado = Ticket.objects.filter(fecha__month = mes).filter(estado=2).count()
atendido = Ticket.objects.filter(fecha__month = mes).filter(estado=3).count()
vencido = Ticket.objects.filter(fecha__month = mes).filter(estado=4).count()
categoria = vehiculo_mal_estacionado + vehiculo_descompuesto + manifestacion + cierre_de_calle + accidente + obras + obstaculo + congestionamiento
grupo = sistemas + redes + pmt_atms + pmt_otros + operadores + tecnicos + administrativa + jefatura
estado = pendiente + cerrado + atendido + vencido
data = {
"vehiculo_mal_estacionado": vehiculo_mal_estacionado,
"vehiculo_descompuesto": vehiculo_descompuesto,
"manifestacion": manifestacion,
"cierre_de_calle": cierre_de_calle,
"accidente": accidente,
"obras": obras,
"obstaculo": obstaculo,
"congestionamiento": congestionamiento,
"pmt_otros": pmt_otros,
"sistemas": sistemas,
"redes": redes,
"pmt_atms": pmt_atms,
"operadores": operadores,
"tecnicos": tecnicos,
"administrativa": administrativa,
"jefatura": jefatura,
"pendiente": pendiente,
"cerrado": cerrado,
"atendido": atendido,
"vencido": vencido,
"hoy": hoy,
"mes": mes,
"categoria": categoria,
"grupo": grupo,
"estado": estado,
}
return render(request, 'prensa_estadisticas_mes.html', {'data':data})
# Sin uso
def prensa_estadisticas_dia(request):
hoy = datetime.now().day
mes = datetime.now().month
#--CATEGORIA---
vehiculo_mal_estacionado = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=2).count()
vehiculo_descompuesto = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=3).count()
manifestacion = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=4).count()
cierre_de_calle = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=5).count()
accidente = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=6).count()
obras = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=7).count()
obstaculo = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=8).count()
congestionamiento = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=9).count()
#--GRUPO---
sistemas = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=1).count()
redes = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=2).count()
pmt_atms = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=3).count()
pmt_otros = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=4).count()
operadores = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=5).count()
tecnicos = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=6).count()
administrativa = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=7).count()
jefatura = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=8).count()
#--ESTADO--
pendiente = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=1).count()
cerrado = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=2).count()
atendido = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=3).count()
vencido = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=4).count()
categoria = vehiculo_mal_estacionado + vehiculo_descompuesto + manifestacion + cierre_de_calle + accidente + obras + obstaculo + congestionamiento
grupo = sistemas + redes + pmt_atms + pmt_otros + operadores + tecnicos + administrativa + jefatura
estado = pendiente + cerrado + atendido + vencido
data = {
"vehiculo_mal_estacionado": vehiculo_mal_estacionado,
"vehiculo_descompuesto": vehiculo_descompuesto,
"manifestacion": manifestacion,
"cierre_de_calle": cierre_de_calle,
"accidente": accidente,
"obras": obras,
"obstaculo": obstaculo,
"congestionamiento": congestionamiento,
"pmt_otros": pmt_otros,
"sistemas": sistemas,
"redes": redes,
"pmt_atms": pmt_atms,
"operadores": operadores,
"tecnicos": tecnicos,
"administrativa": administrativa,
"jefatura": jefatura,
"pendiente": pendiente,
"cerrado": cerrado,
"atendido": atendido,
"vencido": vencido,
"hoy": hoy,
"mes": mes,
"categoria": categoria,
"grupo": grupo,
"estado": estado,
}
return render(request, 'prensa_estadisticas_hoy.html', {'data':data})
def global_versus(request):
#-- Tickets total por Grupo destino --
pmt_atms_total = Ticket.objects.filter(grupo_destino=3).count()
pmt_otros_total = Ticket.objects.filter(grupo_destino=4).count()
#-- Tickets total vencidos por grupo destino --
pmt_atms_vencidos = Ticket.objects.filter(grupo_destino=3, estado=4).count()
pmt_otros_vencidos = Ticket.objects.filter(grupo_destino=4, estado=4).count()
#-- Tickets total tipos por pmt_atms --
pmt_atms_vehiculo_mal_estacionado = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=2).count()
pmt_atms_vehiculo_descompuesto = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=3).count()
pmt_atms_manifestacion = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=4).count()
pmt_atms_cierre_de_calle = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=5).count()
pmt_atms_accidente = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=6).count()
pmt_atms_obras = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=7).count()
pmt_atms_obstaculo = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=8).count()
pmt_atms_congestionamiento = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=9).count()
pmt_atms_infracciones_varias = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=12).count()
#-- Tickets total tipos por pmt_otros --
pmt_otros_vehiculo_mal_estacionado = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=2).count()
pmt_otros_vehiculo_descompuesto = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=3).count()
pmt_otros_manifestacion = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=4).count()
pmt_otros_cierre_de_calle = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=5).count()
pmt_otros_accidente = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=6).count()
pmt_otros_obras = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=7).count()
pmt_otros_obstaculo = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=8).count()
pmt_otros_congestionamiento = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=9).count()
pmt_otros_infracciones_varias = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=12).count()
data = {
"pmt_atms_total": pmt_atms_total,
"pmt_otros_total": pmt_otros_total,
"pmt_atms_vencidos": pmt_atms_vencidos,
"pmt_otros_vencidos": pmt_otros_vencidos,
"pmt_atms_vehiculo_mal_estacionado": pmt_atms_vehiculo_mal_estacionado,
"pmt_atms_vehiculo_descompuesto": pmt_atms_vehiculo_descompuesto,
"pmt_atms_manifestacion": pmt_atms_manifestacion,
"pmt_atms_cierre_de_calle": pmt_atms_cierre_de_calle,
"pmt_atms_accidente": pmt_atms_accidente,
"pmt_atms_obras": pmt_atms_obras,
"pmt_atms_obstaculo": pmt_atms_obstaculo,
"pmt_atms_congestionamiento": pmt_atms_congestionamiento,
"pmt_atms_infracciones_varias": pmt_atms_infracciones_varias,
"pmt_otros_vehiculo_mal_estacionado": pmt_otros_vehiculo_mal_estacionado,
"pmt_otros_vehiculo_descompuesto": pmt_otros_vehiculo_descompuesto,
"pmt_otros_manifestacion": pmt_otros_manifestacion,
"pmt_otros_cierre_de_calle": pmt_otros_cierre_de_calle,
"pmt_otros_accidente": pmt_otros_accidente,
"pmt_otros_obras": pmt_otros_obras,
"pmt_otros_obstaculo": pmt_otros_obstaculo,
"pmt_otros_congestionamiento": pmt_otros_congestionamiento,
"pmt_otros_infracciones_varias": pmt_otros_infracciones_varias,
}
return render(request, 'global_versus.html', {'data':data})
# Mcal. López
mcal_lopez = Ticket.objects.filter(ubicacion__contains='cal')
Ticket.objects.select_related('grupo_destino').filter(grupo_destino=3).count() # PMT Atms
Ticket.objects.select_related('grupo_destino').filter(grupo_destino=4).count() # PMT Otros | 44.090012 | 228 | 0.711833 | from django.shortcuts import render, redirect
from django.template import loader
from django.urls import reverse_lazy
from .models import *
from django.http import HttpResponse
from .forms import TicketForm
from django.views.generic import ListView, CreateView, UpdateView, DeleteView
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from django.db import connections
from django.db.models import Count
from django.http import JsonResponse
from django.core import serializers
from datetime import *
from django.utils import timezone
from django.utils.timezone import make_aware
def home(request):
return render(request, "index2.html", {})
def base(request):
return render(request, "base.html", {})
def ticket_list(request):
return render(request, "ticket_list.html", {})
def ticket_home(request):
return render(request, "tickets2.html", {})
def login(request):
return render(request, "login.html", {})
def tickets(request):
ticket = Ticket.objects.order_by('-fecha')
paginator = Paginator(ticket, 25)
template = loader.get_template('ticket_list.html')
context = {
'ticket': ticket,
'categoria': ticket,
'grupo_destino': ticket,
'fecha': ticket,
'estado': ticket,
}
return HttpResponse(template.render(context, request))
def ticket_view(request):
if request.method == 'POST':
form = TicketForm(request.POST)
if form.is_valid():
form.save()
print("formulario guardado")
return redirect('tickets')
else:
form = TicketForm()
return render(request, 'ticket_form.html', {'form':form})
class TicketListView(ListView):
template_name = 'ticket_list.html'
model = Ticket
paginate_by = 25
listado_tickets = Ticket.objects.all()
yset(self):
queryset = super(TicketListView, self).get_queryset()
return queryset.filter(author_id=self.kwargs['author_id'])
class TicketAddView(CreateView):
model = Ticket
template_name = 'ticket_form2.html'
form_class = TicketForm
success_url = reverse_lazy('ticket_list')
def form_valid(self, form):
form.save()
return super(TicketAddView, self).form_valid(form)
def ticket_edit(request, pk):
ticket = Ticket.objects.get(id=pk)
if request.method == 'GET':
form = TicketForm(instance=ticket)
else:
form = TicketForm(request.POST, instance=ticket)
f = open('wtf.txt','w')
f.write(form)
f.close()
print(form)
if form.is_valid():
form.save()
return redirect('ticket_list')
return render(request, 'ticket_form2.html',{'form':form})
class TicketEditView(UpdateView):
model = Ticket
template_name = 'ticket_form2.html'
form_class = TicketForm
success_url = reverse_lazy('ticket_list')
paginate_by = 25
class TicketDeleteView(DeleteView):
model = Ticket
template_name = 'ticket_delete2.html'
form_class = TicketForm
success_url = reverse_lazy('ticket_list')
def estadisticas_main(request):
return render(request, 'estadisticas_main.html', {})
def apimes(request):
data = Ticket.objects.all() \
.extra(select={'month': connections[Ticket.objects.db].ops.date_trunc_sql('month', 'fecha')}) \
.values('month') \
.annotate(count_items=Count('id'))
return JsonResponse(list(data), safe=False)
def estadisticas_total(request):
mantenimiento = Ticket.objects.only("categoria").filter(categoria=1).count()
vehiculo_mal_estacionado = Ticket.objects.only("categoria").filter(categoria=2).count()
vehiculo_descompuesto = Ticket.objects.only("categoria").filter(categoria=3).count()
manifestacion = Ticket.objects.only("categoria").filter(categoria=4).count()
cierre_de_calle = Ticket.objects.only("categoria").filter(categoria=5).count()
accidente = Ticket.objects.only("categoria").filter(categoria=6).count()
obras = Ticket.objects.only("categoria").filter(categoria=7).count()
obstaculo = Ticket.objects.only("categoria").filter(categoria=8).count()
congestionamiento = Ticket.objects.only("categoria").filter(categoria=9).count()
sincronizacion = Ticket.objects.only("categoria").filter(categoria=10).count()
semaforo_apagado = Ticket.objects.only("categoria").filter(categoria=11).count()
infracciones = Ticket.objects.only("categoria").filter(categoria=12).count()
led_foco = Ticket.objects.only("categoria").filter(categoria=13).count()
sistemas = Ticket.objects.only("grupo_destino").filter(grupo_destino=1).count()
redes = Ticket.objects.only("grupo_destino").filter(grupo_destino=2).count()
pmt_atms = Ticket.objects.only("grupo_destino").filter(grupo_destino=3).count()
pmt_otros = Ticket.objects.only("grupo_destino").filter(grupo_destino=4).count()
operadores = Ticket.objects.only("grupo_destino").filter(grupo_destino=5).count()
tecnicos = Ticket.objects.only("grupo_destino").filter(grupo_destino=6).count()
administrativa = Ticket.objects.only("grupo_destino").filter(grupo_destino=7).count()
jefatura = Ticket.objects.only("grupo_destino").filter(grupo_destino=8).count()
pendiente = Ticket.objects.only("estado").filter(estado=1).count()
cerrado = Ticket.objects.only("estado").filter(estado=2).count()
atendido = Ticket.objects.only("estado").filter(estado=3).count()
vencido = Ticket.objects.only("estado").filter(estado=4).count()
atms = Ticket.objects.filter(usuario=1).count()
jose = Ticket.objects.filter(usuario=2).count()
emilio = Ticket.objects.filter(usuario=3).count()
gustavo = Ticket.objects.filter(usuario=4).count()
elias = Ticket.objects.filter(usuario=25).count()
usuario = atms + jose + emilio + gustavo + elias
data = {
"mantenimiento": mantenimiento,
"vehiculo_mal_estacionado": vehiculo_mal_estacionado,
"vehiculo_descompuesto": vehiculo_descompuesto,
"manifestacion": manifestacion,
"cierre_de_calle": cierre_de_calle,
"accidente": accidente,
"obras": obras,
"obstaculo": obstaculo,
"congestionamiento": congestionamiento,
"sincronizacion": sincronizacion,
"semaforo_apagado": semaforo_apagado,
"infracciones": infracciones,
"led_foco": led_foco,
"pmt_otros": pmt_otros,
"sistemas": sistemas,
"redes": redes,
"pmt_atms": pmt_atms,
"operadores": operadores,
"tecnicos": tecnicos,
"administrativa": administrativa,
"jefatura": jefatura,
"pendiente": pendiente,
"cerrado": cerrado,
"atendido": atendido,
"vencido": vencido,
"atms": atms,
"jose": jose,
"emilio": emilio,
"gustavo": gustavo,
"elias": elias,
"usuario": usuario,
}
return render(request, 'estadisticas_global.html', {'data':data})
def estadisticas_mes(request):
hoy = datetime.now().day
mes = datetime.now().month
mantenimiento = Ticket.objects.filter(fecha__month = mes).filter(categoria=1).count()
vehiculo_mal_estacionado = Ticket.objects.filter(fecha__month = mes).filter(categoria=2).count()
vehiculo_descompuesto = Ticket.objects.filter(fecha__month = mes).filter(categoria=3).count()
manifestacion = Ticket.objects.filter(fecha__month = mes).filter(categoria=4).count()
cierre_de_calle = Ticket.objects.filter(fecha__month = mes).filter(categoria=5).count()
accidente = Ticket.objects.filter(fecha__month = mes).filter(categoria=6).count()
obras = Ticket.objects.filter(fecha__month = mes).filter(categoria=7).count()
obstaculo = Ticket.objects.filter(fecha__month = mes).filter(categoria=8).count()
congestionamiento = Ticket.objects.filter(fecha__month = mes).filter(categoria=9).count()
sincronizacion = Ticket.objects.filter(fecha__month = mes).filter(categoria=10).count()
semaforo_apagado = Ticket.objects.filter(fecha__month = mes).filter(categoria=11).count()
infracciones = Ticket.objects.filter(fecha__month = mes).filter(categoria=12).count()
led_foco = Ticket.objects.filter(fecha__month = mes).filter(categoria=13).count()
sistemas = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=1).count()
redes = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=2).count()
pmt_atms = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=3).count()
pmt_otros = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=4).count()
operadores = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=5).count()
tecnicos = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=6).count()
administrativa = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=7).count()
jefatura = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=8).count()
pendiente = Ticket.objects.filter(fecha__month = mes).filter(estado=1).count()
cerrado = Ticket.objects.filter(fecha__month = mes).filter(estado=2).count()
atendido = Ticket.objects.filter(fecha__month = mes).filter(estado=3).count()
vencido = Ticket.objects.filter(fecha__month = mes).filter(estado=4).count()
atms = Ticket.objects.filter(fecha__month = mes).filter(usuario=1).count()
jose = Ticket.objects.filter(fecha__month = mes).filter(usuario=2).count()
emilio = Ticket.objects.filter(fecha__month = mes).filter(usuario=3).count()
gustavo = Ticket.objects.filter(fecha__month = mes).filter(usuario=4).count()
elias = Ticket.objects.filter(fecha__month = mes).filter(usuario=25).count()
usuario = atms + jose + emilio + gustavo + elias
categoria = mantenimiento + vehiculo_mal_estacionado + vehiculo_descompuesto + manifestacion + cierre_de_calle + accidente + obras + obstaculo + congestionamiento + sincronizacion + semaforo_apagado + infracciones + led_foco
grupo = sistemas + redes + pmt_atms + pmt_otros + operadores + tecnicos + administrativa + jefatura
estado = pendiente + cerrado + atendido + vencido
data = {
"mantenimiento": mantenimiento,
"vehiculo_mal_estacionado": vehiculo_mal_estacionado,
"vehiculo_descompuesto": vehiculo_descompuesto,
"manifestacion": manifestacion,
"cierre_de_calle": cierre_de_calle,
"accidente": accidente,
"obras": obras,
"obstaculo": obstaculo,
"congestionamiento": congestionamiento,
"sincronizacion": sincronizacion,
"semaforo_apagado": semaforo_apagado,
"infracciones": infracciones,
"led_foco": led_foco,
"pmt_otros": pmt_otros,
"sistemas": sistemas,
"redes": redes,
"pmt_atms": pmt_atms,
"operadores": operadores,
"tecnicos": tecnicos,
"administrativa": administrativa,
"jefatura": jefatura,
"pendiente": pendiente,
"cerrado": cerrado,
"atendido": atendido,
"vencido": vencido,
"hoy": hoy,
"mes": mes,
"categoria": categoria,
"grupo": grupo,
"estado": estado,
"atms": atms,
"jose": jose,
"emilio": emilio,
"gustavo": gustavo,
"elias": elias,
"usuario": usuario,
}
return render(request, 'estadisticas_mes.html', {'data':data})
def estadisticas_dia(request):
hoy = datetime.now().day
mes = datetime.now().month
mantenimiento = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=1).count()
vehiculo_mal_estacionado = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=2).count()
vehiculo_descompuesto = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=3).count()
manifestacion = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=4).count()
cierre_de_calle = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=5).count()
accidente = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=6).count()
obras = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=7).count()
obstaculo = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=8).count()
congestionamiento = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=9).count()
sincronizacion = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=10).count()
semaforo_apagado = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=11).count()
infracciones = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=12).count()
led_foco = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=13).count()
sistemas = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=1).count()
redes = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=2).count()
pmt_atms = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=3).count()
pmt_otros = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=4).count()
operadores = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=5).count()
tecnicos = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=6).count()
administrativa = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=7).count()
jefatura = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=8).count()
pendiente = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=1).count()
cerrado = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=2).count()
atendido = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=3).count()
vencido = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=4).count()
atms = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(usuario=1).count()
jose = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(usuario=2).count()
emilio = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(usuario=3).count()
gustavo = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(usuario=4).count()
elias = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(usuario=25).count()
usuario = atms + jose + emilio + gustavo + elias
categoria = mantenimiento + vehiculo_mal_estacionado + vehiculo_descompuesto + manifestacion + cierre_de_calle + accidente + obras + obstaculo + congestionamiento + sincronizacion + semaforo_apagado + infracciones + led_foco
grupo = sistemas + redes + pmt_atms + pmt_otros + operadores + tecnicos + administrativa + jefatura
estado = pendiente + cerrado + atendido + vencido
data = {
"mantenimiento": mantenimiento,
"vehiculo_mal_estacionado": vehiculo_mal_estacionado,
"vehiculo_descompuesto": vehiculo_descompuesto,
"manifestacion": manifestacion,
"cierre_de_calle": cierre_de_calle,
"accidente": accidente,
"obras": obras,
"obstaculo": obstaculo,
"congestionamiento": congestionamiento,
"sincronizacion": sincronizacion,
"semaforo_apagado": semaforo_apagado,
"infracciones": infracciones,
"led_foco": led_foco,
"pmt_otros": pmt_otros,
"sistemas": sistemas,
"redes": redes,
"pmt_atms": pmt_atms,
"operadores": operadores,
"tecnicos": tecnicos,
"administrativa": administrativa,
"jefatura": jefatura,
"pendiente": pendiente,
"cerrado": cerrado,
"atendido": atendido,
"vencido": vencido,
"hoy": hoy,
"mes": mes,
"categoria": categoria,
"grupo": grupo,
"estado": estado,
"atms": atms,
"jose": jose,
"emilio": emilio,
"gustavo": gustavo,
"elias": elias,
"usuario": usuario,
}
return render(request, 'estadisticas_dia.html', {'data':data})
def comunicaciones_estadisticas_mes(request):
hoy = datetime.now().day
mes = datetime.now().month
mantenimiento = Ticket.objects.filter(fecha__month = mes).filter(categoria=1).count()
vehiculo_mal_estacionado = Ticket.objects.filter(fecha__month = mes).filter(categoria=2).count()
vehiculo_descompuesto = Ticket.objects.filter(fecha__month = mes).filter(categoria=3).count()
manifestacion = Ticket.objects.filter(fecha__month = mes).filter(categoria=4).count()
cierre_de_calle = Ticket.objects.filter(fecha__month = mes).filter(categoria=5).count()
accidente = Ticket.objects.filter(fecha__month = mes).filter(categoria=6).count()
obras = Ticket.objects.filter(fecha__month = mes).filter(categoria=7).count()
obstaculo = Ticket.objects.filter(fecha__month = mes).filter(categoria=8).count()
congestionamiento = Ticket.objects.filter(fecha__month = mes).filter(categoria=9).count()
sincronizacion = Ticket.objects.filter(fecha__month = mes).filter(categoria=10).count()
semaforo_apagado = Ticket.objects.filter(fecha__month = mes).filter(categoria=11).count()
infracciones = Ticket.objects.filter(fecha__month = mes).filter(categoria=12).count()
led_foco = Ticket.objects.filter(fecha__month = mes).filter(categoria=13).count()
sistemas = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=1).count()
redes = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=2).count()
pmt_atms = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=3).count()
pmt_otros = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=4).count()
operadores = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=5).count()
tecnicos = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=6).count()
administrativa = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=7).count()
jefatura = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=8).count()
pendiente = Ticket.objects.filter(fecha__month = mes).filter(estado=1).count()
cerrado = Ticket.objects.filter(fecha__month = mes).filter(estado=2).count()
atendido = Ticket.objects.filter(fecha__month = mes).filter(estado=3).count()
vencido = Ticket.objects.filter(fecha__month = mes).filter(estado=4).count()
categoria = mantenimiento + vehiculo_mal_estacionado + vehiculo_descompuesto + manifestacion + cierre_de_calle + accidente + obras + obstaculo + congestionamiento + sincronizacion + semaforo_apagado + infracciones + led_foco
grupo = sistemas + redes + pmt_atms + pmt_otros + operadores + tecnicos + administrativa + jefatura
estado = pendiente + cerrado + atendido + vencido
data = {
"mantenimiento": mantenimiento,
"vehiculo_mal_estacionado": vehiculo_mal_estacionado,
"vehiculo_descompuesto": vehiculo_descompuesto,
"manifestacion": manifestacion,
"cierre_de_calle": cierre_de_calle,
"accidente": accidente,
"obras": obras,
"obstaculo": obstaculo,
"congestionamiento": congestionamiento,
"sincronizacion": sincronizacion,
"semaforo_apagado": semaforo_apagado,
"infracciones": infracciones,
"led_foco": led_foco,
"pmt_otros": pmt_otros,
"sistemas": sistemas,
"redes": redes,
"pmt_atms": pmt_atms,
"operadores": operadores,
"tecnicos": tecnicos,
"administrativa": administrativa,
"jefatura": jefatura,
"pendiente": pendiente,
"cerrado": cerrado,
"atendido": atendido,
"vencido": vencido,
"hoy": hoy,
"mes": mes,
"categoria": categoria,
"grupo": grupo,
"estado": estado,
}
return render(request, 'comunicaciones_estadisticas_mes.html', {'data':data})
def comunicaciones_estadisticas_dia(request):
hoy = datetime.now().day
mes = datetime.now().month
mantenimiento = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=1).count()
vehiculo_mal_estacionado = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=2).count()
vehiculo_descompuesto = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=3).count()
manifestacion = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=4).count()
cierre_de_calle = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=5).count()
accidente = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=6).count()
obras = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=7).count()
obstaculo = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=8).count()
congestionamiento = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=9).count()
sincronizacion = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=10).count()
semaforo_apagado = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=11).count()
infracciones = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=12).count()
led_foco = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=13).count()
sistemas = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=1).count()
redes = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=2).count()
pmt_atms = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=3).count()
pmt_otros = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=4).count()
operadores = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=5).count()
tecnicos = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=6).count()
administrativa = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=7).count()
jefatura = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=8).count()
pendiente = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=1).count()
cerrado = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=2).count()
atendido = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=3).count()
vencido = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=4).count()
categoria = mantenimiento + vehiculo_mal_estacionado + vehiculo_descompuesto + manifestacion + cierre_de_calle + accidente + obras + obstaculo + congestionamiento + sincronizacion + semaforo_apagado + infracciones + led_foco
grupo = sistemas + redes + pmt_atms + pmt_otros + operadores + tecnicos + administrativa + jefatura
estado = pendiente + cerrado + atendido + vencido
data = {
"mantenimiento": mantenimiento,
"vehiculo_mal_estacionado": vehiculo_mal_estacionado,
"vehiculo_descompuesto": vehiculo_descompuesto,
"manifestacion": manifestacion,
"cierre_de_calle": cierre_de_calle,
"accidente": accidente,
"obras": obras,
"obstaculo": obstaculo,
"congestionamiento": congestionamiento,
"sincronizacion": sincronizacion,
"semaforo_apagado": semaforo_apagado,
"infracciones": infracciones,
"led_foco": led_foco,
"pmt_otros": pmt_otros,
"sistemas": sistemas,
"redes": redes,
"pmt_atms": pmt_atms,
"operadores": operadores,
"tecnicos": tecnicos,
"administrativa": administrativa,
"jefatura": jefatura,
"pendiente": pendiente,
"cerrado": cerrado,
"atendido": atendido,
"vencido": vencido,
"hoy": hoy,
"mes": mes,
"categoria": categoria,
"grupo": grupo,
"estado": estado,
}
return render(request, 'comunicaciones_estadisticas_hoy.html', {'data':data})
def prensa_estadisticas_mes(request):
hoy = datetime.now().day
mes = datetime.now().month
vehiculo_mal_estacionado = Ticket.objects.filter(fecha__month = mes).filter(categoria=2).count()
vehiculo_descompuesto = Ticket.objects.filter(fecha__month = mes).filter(categoria=3).count()
manifestacion = Ticket.objects.filter(fecha__month = mes).filter(categoria=4).count()
cierre_de_calle = Ticket.objects.filter(fecha__month = mes).filter(categoria=5).count()
accidente = Ticket.objects.filter(fecha__month = mes).filter(categoria=6).count()
obras = Ticket.objects.filter(fecha__month = mes).filter(categoria=7).count()
obstaculo = Ticket.objects.filter(fecha__month = mes).filter(categoria=8).count()
congestionamiento = Ticket.objects.filter(fecha__month = mes).filter(categoria=9).count()
sistemas = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=1).count()
redes = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=2).count()
pmt_atms = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=3).count()
pmt_otros = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=4).count()
operadores = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=5).count()
tecnicos = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=6).count()
administrativa = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=7).count()
jefatura = Ticket.objects.filter(fecha__month = mes).filter(grupo_destino=8).count()
pendiente = Ticket.objects.filter(fecha__month = mes).filter(estado=1).count()
cerrado = Ticket.objects.filter(fecha__month = mes).filter(estado=2).count()
atendido = Ticket.objects.filter(fecha__month = mes).filter(estado=3).count()
vencido = Ticket.objects.filter(fecha__month = mes).filter(estado=4).count()
categoria = vehiculo_mal_estacionado + vehiculo_descompuesto + manifestacion + cierre_de_calle + accidente + obras + obstaculo + congestionamiento
grupo = sistemas + redes + pmt_atms + pmt_otros + operadores + tecnicos + administrativa + jefatura
estado = pendiente + cerrado + atendido + vencido
data = {
"vehiculo_mal_estacionado": vehiculo_mal_estacionado,
"vehiculo_descompuesto": vehiculo_descompuesto,
"manifestacion": manifestacion,
"cierre_de_calle": cierre_de_calle,
"accidente": accidente,
"obras": obras,
"obstaculo": obstaculo,
"congestionamiento": congestionamiento,
"pmt_otros": pmt_otros,
"sistemas": sistemas,
"redes": redes,
"pmt_atms": pmt_atms,
"operadores": operadores,
"tecnicos": tecnicos,
"administrativa": administrativa,
"jefatura": jefatura,
"pendiente": pendiente,
"cerrado": cerrado,
"atendido": atendido,
"vencido": vencido,
"hoy": hoy,
"mes": mes,
"categoria": categoria,
"grupo": grupo,
"estado": estado,
}
return render(request, 'prensa_estadisticas_mes.html', {'data':data})
def prensa_estadisticas_dia(request):
hoy = datetime.now().day
mes = datetime.now().month
vehiculo_mal_estacionado = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=2).count()
vehiculo_descompuesto = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=3).count()
manifestacion = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=4).count()
cierre_de_calle = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=5).count()
accidente = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=6).count()
obras = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=7).count()
obstaculo = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=8).count()
congestionamiento = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(categoria=9).count()
sistemas = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=1).count()
redes = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=2).count()
pmt_atms = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=3).count()
pmt_otros = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=4).count()
operadores = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=5).count()
tecnicos = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=6).count()
administrativa = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=7).count()
jefatura = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(grupo_destino=8).count()
pendiente = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=1).count()
cerrado = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=2).count()
atendido = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=3).count()
vencido = Ticket.objects.filter(fecha__day=hoy, fecha__month=mes).filter(estado=4).count()
categoria = vehiculo_mal_estacionado + vehiculo_descompuesto + manifestacion + cierre_de_calle + accidente + obras + obstaculo + congestionamiento
grupo = sistemas + redes + pmt_atms + pmt_otros + operadores + tecnicos + administrativa + jefatura
estado = pendiente + cerrado + atendido + vencido
data = {
"vehiculo_mal_estacionado": vehiculo_mal_estacionado,
"vehiculo_descompuesto": vehiculo_descompuesto,
"manifestacion": manifestacion,
"cierre_de_calle": cierre_de_calle,
"accidente": accidente,
"obras": obras,
"obstaculo": obstaculo,
"congestionamiento": congestionamiento,
"pmt_otros": pmt_otros,
"sistemas": sistemas,
"redes": redes,
"pmt_atms": pmt_atms,
"operadores": operadores,
"tecnicos": tecnicos,
"administrativa": administrativa,
"jefatura": jefatura,
"pendiente": pendiente,
"cerrado": cerrado,
"atendido": atendido,
"vencido": vencido,
"hoy": hoy,
"mes": mes,
"categoria": categoria,
"grupo": grupo,
"estado": estado,
}
return render(request, 'prensa_estadisticas_hoy.html', {'data':data})
def global_versus(request):
pmt_atms_total = Ticket.objects.filter(grupo_destino=3).count()
pmt_otros_total = Ticket.objects.filter(grupo_destino=4).count()
pmt_atms_vencidos = Ticket.objects.filter(grupo_destino=3, estado=4).count()
pmt_otros_vencidos = Ticket.objects.filter(grupo_destino=4, estado=4).count()
pmt_atms_vehiculo_mal_estacionado = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=2).count()
pmt_atms_vehiculo_descompuesto = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=3).count()
pmt_atms_manifestacion = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=4).count()
pmt_atms_cierre_de_calle = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=5).count()
pmt_atms_accidente = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=6).count()
pmt_atms_obras = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=7).count()
pmt_atms_obstaculo = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=8).count()
pmt_atms_congestionamiento = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=9).count()
pmt_atms_infracciones_varias = Ticket.objects.filter(grupo_destino=3, estado=4, categoria=12).count()
pmt_otros_vehiculo_mal_estacionado = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=2).count()
pmt_otros_vehiculo_descompuesto = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=3).count()
pmt_otros_manifestacion = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=4).count()
pmt_otros_cierre_de_calle = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=5).count()
pmt_otros_accidente = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=6).count()
pmt_otros_obras = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=7).count()
pmt_otros_obstaculo = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=8).count()
pmt_otros_congestionamiento = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=9).count()
pmt_otros_infracciones_varias = Ticket.objects.filter(grupo_destino=4, estado=4, categoria=12).count()
data = {
"pmt_atms_total": pmt_atms_total,
"pmt_otros_total": pmt_otros_total,
"pmt_atms_vencidos": pmt_atms_vencidos,
"pmt_otros_vencidos": pmt_otros_vencidos,
"pmt_atms_vehiculo_mal_estacionado": pmt_atms_vehiculo_mal_estacionado,
"pmt_atms_vehiculo_descompuesto": pmt_atms_vehiculo_descompuesto,
"pmt_atms_manifestacion": pmt_atms_manifestacion,
"pmt_atms_cierre_de_calle": pmt_atms_cierre_de_calle,
"pmt_atms_accidente": pmt_atms_accidente,
"pmt_atms_obras": pmt_atms_obras,
"pmt_atms_obstaculo": pmt_atms_obstaculo,
"pmt_atms_congestionamiento": pmt_atms_congestionamiento,
"pmt_atms_infracciones_varias": pmt_atms_infracciones_varias,
"pmt_otros_vehiculo_mal_estacionado": pmt_otros_vehiculo_mal_estacionado,
"pmt_otros_vehiculo_descompuesto": pmt_otros_vehiculo_descompuesto,
"pmt_otros_manifestacion": pmt_otros_manifestacion,
"pmt_otros_cierre_de_calle": pmt_otros_cierre_de_calle,
"pmt_otros_accidente": pmt_otros_accidente,
"pmt_otros_obras": pmt_otros_obras,
"pmt_otros_obstaculo": pmt_otros_obstaculo,
"pmt_otros_congestionamiento": pmt_otros_congestionamiento,
"pmt_otros_infracciones_varias": pmt_otros_infracciones_varias,
}
return render(request, 'global_versus.html', {'data':data})
mcal_lopez = Ticket.objects.filter(ubicacion__contains='cal')
Ticket.objects.select_related('grupo_destino').filter(grupo_destino=3).count()
Ticket.objects.select_related('grupo_destino').filter(grupo_destino=4).count() | true | true |
f720da7486a07c56f32fcbde3e8956ad3ccbd326 | 1,830 | py | Python | doc/listings/interstore/webcal.py | jonathanj/mantissa | 53e5502aba23ce99be78b27f923a276593033fe8 | [
"MIT"
] | 6 | 2016-02-17T15:04:53.000Z | 2021-08-20T09:44:10.000Z | doc/listings/interstore/webcal.py | jonathanj/mantissa | 53e5502aba23ce99be78b27f923a276593033fe8 | [
"MIT"
] | 62 | 2015-02-04T23:40:55.000Z | 2021-02-18T19:56:02.000Z | doc/listings/interstore/webcal.py | jonathanj/mantissa | 53e5502aba23ce99be78b27f923a276593033fe8 | [
"MIT"
] | 8 | 2015-11-15T17:26:42.000Z | 2020-12-02T06:36:52.000Z |
from datetime import timedelta
from epsilon.extime import Time
from nevow.page import renderer
from nevow.loaders import stan
from nevow.tags import div
from nevow.athena import LiveElement
from xmantissa.liveform import TEXT_INPUT, LiveForm, Parameter
class CalendarElement(LiveElement):
docFactory = stan(div[
"It's a calendar!",
div(render="appointments"),
div(render="appointmentForm")])
def __init__(self, calendar):
LiveElement.__init__(self)
self.calendar = calendar
@renderer
def appointments(self, request, tag):
appointments = self.calendar.getAppointments()
for appointment in appointments:
appDiv = div[
"Appointment with ",
appointment.withWhomUsername, "@",
appointment.withWhomDomain, " at ",
appointment.when.asHumanly()]
if appointment.failed is not None:
appDiv[" (Rejected: ", appointment.failed, ")"]
elif appointment.remoteID is None:
appDiv[" (Pending confirmation)"]
tag[appDiv]
return tag
def _requestAppointment(self, whom):
local, domain = whom.split(u"@")
target = self.calendar.calendarIDFor(local, domain)
self.calendar.requestAppointmentWith(target, Time() + timedelta(days=2))
@renderer
def appointmentForm(self, request, tag):
form = LiveForm(
self._requestAppointment,
[Parameter(u"whom", TEXT_INPUT, unicode, u"Whom:",
u"The username of the person with whom "
u"to create an appointment (user@domain).",
None)],
"Request An Appointment")
form.setFragmentParent(self)
return form
| 29.516129 | 80 | 0.604918 |
from datetime import timedelta
from epsilon.extime import Time
from nevow.page import renderer
from nevow.loaders import stan
from nevow.tags import div
from nevow.athena import LiveElement
from xmantissa.liveform import TEXT_INPUT, LiveForm, Parameter
class CalendarElement(LiveElement):
docFactory = stan(div[
"It's a calendar!",
div(render="appointments"),
div(render="appointmentForm")])
def __init__(self, calendar):
LiveElement.__init__(self)
self.calendar = calendar
@renderer
def appointments(self, request, tag):
appointments = self.calendar.getAppointments()
for appointment in appointments:
appDiv = div[
"Appointment with ",
appointment.withWhomUsername, "@",
appointment.withWhomDomain, " at ",
appointment.when.asHumanly()]
if appointment.failed is not None:
appDiv[" (Rejected: ", appointment.failed, ")"]
elif appointment.remoteID is None:
appDiv[" (Pending confirmation)"]
tag[appDiv]
return tag
def _requestAppointment(self, whom):
local, domain = whom.split(u"@")
target = self.calendar.calendarIDFor(local, domain)
self.calendar.requestAppointmentWith(target, Time() + timedelta(days=2))
@renderer
def appointmentForm(self, request, tag):
form = LiveForm(
self._requestAppointment,
[Parameter(u"whom", TEXT_INPUT, unicode, u"Whom:",
u"The username of the person with whom "
u"to create an appointment (user@domain).",
None)],
"Request An Appointment")
form.setFragmentParent(self)
return form
| true | true |
f720da77bf370fc9b4db8eeeefff5308d08c418c | 197 | py | Python | robots/test/strategies/run_tests/tests/test_sharing/test_share/t1.py | memristor/mep2 | bc5cddacba3d740f791f3454b8cb51bda83ce202 | [
"MIT"
] | 5 | 2018-11-27T15:15:00.000Z | 2022-02-10T21:44:13.000Z | robots/test/strategies/run_tests/tests/test_sharing/test_share/t1.py | memristor/mep2 | bc5cddacba3d740f791f3454b8cb51bda83ce202 | [
"MIT"
] | 2 | 2018-10-20T15:48:40.000Z | 2018-11-20T05:11:33.000Z | robots/test/strategies/run_tests/tests/test_sharing/test_share/t1.py | memristor/mep2 | bc5cddacba3d740f791f3454b8cb51bda83ce202 | [
"MIT"
] | 1 | 2020-02-07T12:44:47.000Z | 2020-02-07T12:44:47.000Z | weight=1
a=_State('a', name='var1', shared=True)
def run():
@_do
def _():
print(a.val)
sleep(10)
a.val = 5
@_do
def _():
print(a.val)
sleep(10)
a.val = 8
@_do
def _():
print(a.val)
| 11.588235 | 39 | 0.563452 | weight=1
a=_State('a', name='var1', shared=True)
def run():
@_do
def _():
print(a.val)
sleep(10)
a.val = 5
@_do
def _():
print(a.val)
sleep(10)
a.val = 8
@_do
def _():
print(a.val)
| true | true |
f720da93b083e8b08000df92605af508a5009d38 | 2,479 | py | Python | csympy/tests/test_arit.py | shipci/csympy | 6b5a1d7d8a3f9bbe0b983b78a44be90a70db0743 | [
"MIT"
] | null | null | null | csympy/tests/test_arit.py | shipci/csympy | 6b5a1d7d8a3f9bbe0b983b78a44be90a70db0743 | [
"MIT"
] | null | null | null | csympy/tests/test_arit.py | shipci/csympy | 6b5a1d7d8a3f9bbe0b983b78a44be90a70db0743 | [
"MIT"
] | null | null | null | from nose.tools import raises
from csympy import Symbol, Integer, Add, Pow
def test_arit1():
x = Symbol("x")
y = Symbol("y")
e = x + y
e = x * y
e = Integer(2)*x
e = 2*x
e = x + 1
e = 1 + x
def test_arit2():
x = Symbol("x")
y = Symbol("y")
assert x+x == Integer(2) * x
assert x+x != Integer(3) * x
assert x+y == y+x
assert x+x == 2*x
assert x+x == x*2
assert x+x+x == 3*x
assert x+y+x+x == 3*x+y
assert not x+x == 3*x
assert not x+x != 2*x
@raises(TypeError)
def test_arit3():
x = Symbol("x")
y = Symbol("y")
e = "x"*x
def test_arit4():
x = Symbol("x")
y = Symbol("y")
assert x*x == x**2
assert x*y == y*x
assert x*x*x == x**3
assert x*y*x*x == x**3*y
def test_arit5():
x = Symbol("x")
y = Symbol("y")
e = (x+y)**2
f = e.expand()
assert e == (x+y)**2
assert e != x**2 + 2*x*y + y**2
assert isinstance(e, Pow)
assert f == x**2 + 2*x*y + y**2
assert isinstance(f, Add)
def test_arit6():
x = Symbol("x")
y = Symbol("y")
e = x + y
assert str(e) == "x + y" or "y + x"
e = x * y
assert str(e) == "x*y" or "y*x"
e = Integer(2)*x
assert str(e) == "2x"
e = 2*x
assert str(e) == "2x"
def test_arit7():
x = Symbol("x")
y = Symbol("y")
assert x - x == 0
assert x - y != y - x
assert 2*x - x == x
assert 3*x - x == 2*x
assert 2*x*y - x*y == x*y
def test_arit8():
x = Symbol("x")
y = Symbol("y")
z = Symbol("z")
assert x**y * x**x == x**(x+y)
assert x**y * x**x * x**z == x**(x+y+z)
assert x**y - x**y == 0
assert x**2 / x == x
assert y*x**2 / (x*y) == x
assert (2 * x**3 * y**2 * z)**3 / 8 == x**9 * y**6 * z**3
assert (2*y**(-2*x**2)) * (3*y**(2*x**2)) == 6
def test_expand1():
x = Symbol("x")
y = Symbol("y")
z = Symbol("z")
assert ((2*x+y)**2).expand() == 4*x**2 + 4*x*y + y**2
assert (x**2)**3 == x**6
assert ((2*x**2+3*y)**2).expand() == 4*x**4 + 12*x**2*y + 9*y**2
assert ((2*x/3+y/4)**2).expand() == 4*x**2/9 + x*y/3 + y**2/16
def test_arit9():
x = Symbol("x")
y = Symbol("y")
assert 1/x == 1/x
assert 1/x != 1/y
def test_expand2():
y = Symbol("y")
z = Symbol("z")
assert ((1/(y*z) - y*z)*y*z).expand() == 1-(y*z)**2
def test_expand3():
x = Symbol("x")
y = Symbol("y")
assert ((1/(x*y) - x*y+2)*(1+x*y)).expand() == 3 + 1/(x*y) + x*y - (x*y)**2
| 21.938053 | 79 | 0.449375 | from nose.tools import raises
from csympy import Symbol, Integer, Add, Pow
def test_arit1():
x = Symbol("x")
y = Symbol("y")
e = x + y
e = x * y
e = Integer(2)*x
e = 2*x
e = x + 1
e = 1 + x
def test_arit2():
x = Symbol("x")
y = Symbol("y")
assert x+x == Integer(2) * x
assert x+x != Integer(3) * x
assert x+y == y+x
assert x+x == 2*x
assert x+x == x*2
assert x+x+x == 3*x
assert x+y+x+x == 3*x+y
assert not x+x == 3*x
assert not x+x != 2*x
@raises(TypeError)
def test_arit3():
x = Symbol("x")
y = Symbol("y")
e = "x"*x
def test_arit4():
x = Symbol("x")
y = Symbol("y")
assert x*x == x**2
assert x*y == y*x
assert x*x*x == x**3
assert x*y*x*x == x**3*y
def test_arit5():
x = Symbol("x")
y = Symbol("y")
e = (x+y)**2
f = e.expand()
assert e == (x+y)**2
assert e != x**2 + 2*x*y + y**2
assert isinstance(e, Pow)
assert f == x**2 + 2*x*y + y**2
assert isinstance(f, Add)
def test_arit6():
x = Symbol("x")
y = Symbol("y")
e = x + y
assert str(e) == "x + y" or "y + x"
e = x * y
assert str(e) == "x*y" or "y*x"
e = Integer(2)*x
assert str(e) == "2x"
e = 2*x
assert str(e) == "2x"
def test_arit7():
x = Symbol("x")
y = Symbol("y")
assert x - x == 0
assert x - y != y - x
assert 2*x - x == x
assert 3*x - x == 2*x
assert 2*x*y - x*y == x*y
def test_arit8():
x = Symbol("x")
y = Symbol("y")
z = Symbol("z")
assert x**y * x**x == x**(x+y)
assert x**y * x**x * x**z == x**(x+y+z)
assert x**y - x**y == 0
assert x**2 / x == x
assert y*x**2 / (x*y) == x
assert (2 * x**3 * y**2 * z)**3 / 8 == x**9 * y**6 * z**3
assert (2*y**(-2*x**2)) * (3*y**(2*x**2)) == 6
def test_expand1():
x = Symbol("x")
y = Symbol("y")
z = Symbol("z")
assert ((2*x+y)**2).expand() == 4*x**2 + 4*x*y + y**2
assert (x**2)**3 == x**6
assert ((2*x**2+3*y)**2).expand() == 4*x**4 + 12*x**2*y + 9*y**2
assert ((2*x/3+y/4)**2).expand() == 4*x**2/9 + x*y/3 + y**2/16
def test_arit9():
x = Symbol("x")
y = Symbol("y")
assert 1/x == 1/x
assert 1/x != 1/y
def test_expand2():
y = Symbol("y")
z = Symbol("z")
assert ((1/(y*z) - y*z)*y*z).expand() == 1-(y*z)**2
def test_expand3():
x = Symbol("x")
y = Symbol("y")
assert ((1/(x*y) - x*y+2)*(1+x*y)).expand() == 3 + 1/(x*y) + x*y - (x*y)**2
| true | true |
f720db2bca4a842dab5f8a8604fb53fae21bea7f | 2,309 | py | Python | epytope/Data/pssms/smmpmbec/mat/B_07_02_9.py | christopher-mohr/epytope | 8ac9fe52c0b263bdb03235a5a6dffcb72012a4fd | [
"BSD-3-Clause"
] | 7 | 2021-02-01T18:11:28.000Z | 2022-01-31T19:14:07.000Z | epytope/Data/pssms/smmpmbec/mat/B_07_02_9.py | christopher-mohr/epytope | 8ac9fe52c0b263bdb03235a5a6dffcb72012a4fd | [
"BSD-3-Clause"
] | 22 | 2021-01-02T15:25:23.000Z | 2022-03-14T11:32:53.000Z | epytope/Data/pssms/smmpmbec/mat/B_07_02_9.py | christopher-mohr/epytope | 8ac9fe52c0b263bdb03235a5a6dffcb72012a4fd | [
"BSD-3-Clause"
] | 4 | 2021-05-28T08:50:38.000Z | 2022-03-14T11:45:32.000Z | B_07_02_9 = {0: {'A': -0.332, 'C': 0.186, 'E': 0.544, 'D': 0.788, 'G': 0.214, 'F': -0.118, 'I': -0.161, 'H': -0.257, 'K': -0.244, 'M': -0.332, 'L': -0.105, 'N': 0.105, 'Q': 0.294, 'P': 0.58, 'S': -0.286, 'R': -0.62, 'T': 0.187, 'W': -0.114, 'V': -0.03, 'Y': -0.3}, 1: {'A': -0.604, 'C': 0.467, 'E': 0.468, 'D': 0.371, 'G': 0.128, 'F': 0.243, 'I': -0.242, 'H': 0.497, 'K': 0.244, 'M': -0.104, 'L': -0.131, 'N': 0.214, 'Q': 0.225, 'P': -2.038, 'S': 0.048, 'R': 0.36, 'T': -0.158, 'W': 0.467, 'V': -0.685, 'Y': 0.228}, 2: {'A': -0.307, 'C': 0.286, 'E': 0.256, 'D': 0.166, 'G': 0.217, 'F': 0.278, 'I': -0.015, 'H': -0.187, 'K': 0.072, 'M': -0.472, 'L': 0.03, 'N': 0.139, 'Q': -0.062, 'P': 0.399, 'S': -0.001, 'R': -0.829, 'T': 0.069, 'W': -0.071, 'V': 0.113, 'Y': -0.081}, 3: {'A': -0.077, 'C': 0.126, 'E': 0.127, 'D': 0.16, 'G': -0.091, 'F': 0.053, 'I': 0.146, 'H': -0.09, 'K': -0.069, 'M': -0.051, 'L': 0.038, 'N': 0.037, 'Q': -0.16, 'P': -0.047, 'S': -0.026, 'R': -0.081, 'T': 0.094, 'W': -0.175, 'V': 0.079, 'Y': 0.006}, 4: {'A': -0.129, 'C': -0.105, 'E': 0.445, 'D': 0.273, 'G': -0.12, 'F': 0.172, 'I': 0.218, 'H': -0.303, 'K': 0.061, 'M': -0.098, 'L': 0.138, 'N': -0.076, 'Q': 0.002, 'P': -0.135, 'S': -0.123, 'R': -0.267, 'T': -0.098, 'W': 0.058, 'V': 0.082, 'Y': 0.006}, 5: {'A': 0.025, 'C': 0.217, 'E': 0.317, 'D': 0.199, 'G': -0.291, 'F': -0.017, 'I': 0.113, 'H': -0.156, 'K': -0.035, 'M': -0.068, 'L': 0.119, 'N': -0.059, 'Q': 0.093, 'P': 0.185, 'S': -0.085, 'R': -0.472, 'T': -0.283, 'W': -0.109, 'V': 0.128, 'Y': 0.178}, 6: {'A': -0.233, 'C': 0.164, 'E': 0.335, 'D': 0.37, 'G': -0.26, 'F': 0.046, 'I': -0.003, 'H': -0.073, 'K': 0.132, 'M': -0.124, 'L': -0.129, 'N': -0.154, 'Q': -0.006, 'P': 0.15, 'S': -0.292, 'R': -0.299, 'T': -0.136, 'W': 0.376, 'V': -0.059, 'Y': 0.196}, 7: {'A': -0.654, 'C': 0.213, 'E': -0.076, 'D': 0.111, 'G': 0.084, 'F': 0.191, 'I': 0.094, 'H': 0.284, 'K': 0.362, 'M': 0.048, 'L': 0.063, 'N': 0.223, 'Q': -0.058, 'P': -0.543, 'S': -0.449, 'R': 0.158, 'T': -0.193, 'W': 0.222, 'V': -0.299, 'Y': 0.22}, 8: {'A': -0.341, 'C': 0.351, 'E': 0.445, 'D': 0.805, 'G': 0.754, 'F': -0.779, 'I': -0.736, 'H': 0.007, 'K': 0.417, 'M': -1.109, 'L': -1.214, 'N': 0.775, 'Q': 0.172, 'P': 0.786, 'S': 0.332, 'R': 0.306, 'T': -0.204, 'W': -0.245, 'V': -0.699, 'Y': 0.178}, -1: {'con': 5.45316}} | 2,309 | 2,309 | 0.395409 | B_07_02_9 = {0: {'A': -0.332, 'C': 0.186, 'E': 0.544, 'D': 0.788, 'G': 0.214, 'F': -0.118, 'I': -0.161, 'H': -0.257, 'K': -0.244, 'M': -0.332, 'L': -0.105, 'N': 0.105, 'Q': 0.294, 'P': 0.58, 'S': -0.286, 'R': -0.62, 'T': 0.187, 'W': -0.114, 'V': -0.03, 'Y': -0.3}, 1: {'A': -0.604, 'C': 0.467, 'E': 0.468, 'D': 0.371, 'G': 0.128, 'F': 0.243, 'I': -0.242, 'H': 0.497, 'K': 0.244, 'M': -0.104, 'L': -0.131, 'N': 0.214, 'Q': 0.225, 'P': -2.038, 'S': 0.048, 'R': 0.36, 'T': -0.158, 'W': 0.467, 'V': -0.685, 'Y': 0.228}, 2: {'A': -0.307, 'C': 0.286, 'E': 0.256, 'D': 0.166, 'G': 0.217, 'F': 0.278, 'I': -0.015, 'H': -0.187, 'K': 0.072, 'M': -0.472, 'L': 0.03, 'N': 0.139, 'Q': -0.062, 'P': 0.399, 'S': -0.001, 'R': -0.829, 'T': 0.069, 'W': -0.071, 'V': 0.113, 'Y': -0.081}, 3: {'A': -0.077, 'C': 0.126, 'E': 0.127, 'D': 0.16, 'G': -0.091, 'F': 0.053, 'I': 0.146, 'H': -0.09, 'K': -0.069, 'M': -0.051, 'L': 0.038, 'N': 0.037, 'Q': -0.16, 'P': -0.047, 'S': -0.026, 'R': -0.081, 'T': 0.094, 'W': -0.175, 'V': 0.079, 'Y': 0.006}, 4: {'A': -0.129, 'C': -0.105, 'E': 0.445, 'D': 0.273, 'G': -0.12, 'F': 0.172, 'I': 0.218, 'H': -0.303, 'K': 0.061, 'M': -0.098, 'L': 0.138, 'N': -0.076, 'Q': 0.002, 'P': -0.135, 'S': -0.123, 'R': -0.267, 'T': -0.098, 'W': 0.058, 'V': 0.082, 'Y': 0.006}, 5: {'A': 0.025, 'C': 0.217, 'E': 0.317, 'D': 0.199, 'G': -0.291, 'F': -0.017, 'I': 0.113, 'H': -0.156, 'K': -0.035, 'M': -0.068, 'L': 0.119, 'N': -0.059, 'Q': 0.093, 'P': 0.185, 'S': -0.085, 'R': -0.472, 'T': -0.283, 'W': -0.109, 'V': 0.128, 'Y': 0.178}, 6: {'A': -0.233, 'C': 0.164, 'E': 0.335, 'D': 0.37, 'G': -0.26, 'F': 0.046, 'I': -0.003, 'H': -0.073, 'K': 0.132, 'M': -0.124, 'L': -0.129, 'N': -0.154, 'Q': -0.006, 'P': 0.15, 'S': -0.292, 'R': -0.299, 'T': -0.136, 'W': 0.376, 'V': -0.059, 'Y': 0.196}, 7: {'A': -0.654, 'C': 0.213, 'E': -0.076, 'D': 0.111, 'G': 0.084, 'F': 0.191, 'I': 0.094, 'H': 0.284, 'K': 0.362, 'M': 0.048, 'L': 0.063, 'N': 0.223, 'Q': -0.058, 'P': -0.543, 'S': -0.449, 'R': 0.158, 'T': -0.193, 'W': 0.222, 'V': -0.299, 'Y': 0.22}, 8: {'A': -0.341, 'C': 0.351, 'E': 0.445, 'D': 0.805, 'G': 0.754, 'F': -0.779, 'I': -0.736, 'H': 0.007, 'K': 0.417, 'M': -1.109, 'L': -1.214, 'N': 0.775, 'Q': 0.172, 'P': 0.786, 'S': 0.332, 'R': 0.306, 'T': -0.204, 'W': -0.245, 'V': -0.699, 'Y': 0.178}, -1: {'con': 5.45316}} | true | true |
f720dbb912a33f6df1fac7c953a783e5d94e86e3 | 13,329 | py | Python | SourceControlMgmt/SourceControlMgmt.py | tigelane/ACI-Simplified-GUI-Management | f2c3d27375421a75de0f5b9bbdc645c380549f05 | [
"MIT"
] | null | null | null | SourceControlMgmt/SourceControlMgmt.py | tigelane/ACI-Simplified-GUI-Management | f2c3d27375421a75de0f5b9bbdc645c380549f05 | [
"MIT"
] | 14 | 2020-02-14T23:47:50.000Z | 2020-03-04T20:16:29.000Z | SourceControlMgmt/SourceControlMgmt.py | IGNW/devnet-create-2020 | 1eea17891a6cd1fedc265605a7b06378542762bb | [
"MIT"
] | 1 | 2021-07-06T14:42:55.000Z | 2021-07-06T14:42:55.000Z | from pathlib import Path
from datetime import datetime
import shutil
import subprocess
import yaml
import requests
class SCMCredentialValidationError(Exception):
pass
class SCMCloneRepoError(Exception):
pass
class SCMCreateBranchError(Exception):
pass
class SCMWriteFileError(Exception):
pass
class SCMPushDataError(Exception):
pass
class SCMDeleteRepoError(Exception):
pass
class SCMGraphQLError(Exception):
pass
class SourceControlMgmt():
def __init__(self, username=None, password=None, friendly_name=None, email=None, repo_name=None, repo_owner=None):
self.username = username
self.password = password
self.friendly_name = friendly_name
self.email = email
self.repo_path = None
self.repo_name = repo_name
self.filename = None
self.branch_name = None
self.full_file_path = None
self.relative_file_path = None
self.existing_branches = {}
self.git_hub_graphql_api = 'https://api.github.com/graphql'
self.github_repo_id = None
self.repo_owner = self.username if not repo_owner else repo_owner
self.get_github_repo_id()
exceptions = ['repo_path', 'filename', 'branch_name', 'full_file_path', 'relative_file_path', 'existing_branches']
if not all(vars(self).values()):
missing_values = [k for k, v in vars(self).items() if not v and k not in exceptions]
if missing_values:
raise TypeError(f"All values must have data. The following attributes are empty: {missing_values}")
def validate_scm_creds(self):
"""
Verify user credentials will return the HEAD
git ls-remote https://<user>:<password>@github.com/IGNW/pge-aci-epgs/ HEAD
"""
results = subprocess.run(['git', 'ls-remote', f'https://{self.username}:{self.password}@github.com/{self.repo_owner}/{self.repo_name}/', 'HEAD'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
check=False)
if results.returncode == 0 and b"HEAD" in results.stdout:
return True
raise SCMCredentialValidationError("The supplied credentials do not provide access to the given repo")
def clone_private_repo(self, directory=None):
"""
Clone the repo into the directory specified
git clone https://<user>:<password>@github.com/IGNW/pge-aci-epgs /tmp/pge-aci-epgs
"""
if directory is None:
raise TypeError('Must pass a value for the directory into this function')
# If the directory is a string, convert it to a PathLib object
if isinstance(directory, str):
d = Path(directory)
elif isinstance(directory, Path):
d = directory
self.repo_path = d / self.repo_name
if self.repo_path.exists() is True and self.repo_path.is_dir() is True:
# Delete the directory
print('Directory exists and is being deleted')
shutil.rmtree(self.repo_path)
results = subprocess.run(['git', 'clone', f'https://{self.username}:{self.password}@github.com/{self.repo_owner}/{self.repo_name}/', f'{self.repo_path}'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
check=False)
# The git clone writes to stderr instead of stdout
expected_string = f"Cloning into '{self.repo_path}'...\n"
encoded_expected_string = expected_string.encode()
if (results.returncode == 0 and
encoded_expected_string == results.stderr and
self.repo_path.exists() is True and
self.repo_path.is_dir() is True):
return True
else:
raise SCMCloneRepoError("The repo could not be cloned")
def create_new_branch_in_repo(self, branch_name=None):
"""
Create New Branch in existing repo
cd /tmp/pge-aci-epgs
git checkout -b NEW_TEST_BRANCH_NAME1
"""
if not branch_name:
raise TypeError('You must pass a branch name into this function')
else:
self.branch_name = branch_name
if self.repo_path and self.repo_path.exists() is True and self.repo_path.is_dir() is True:
results = subprocess.run(["git", "checkout", "-b", branch_name], cwd=self.repo_path, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=False)
else:
raise SCMCreateBranchError('You must have a repo cloned before trying to create a branch')
expected_results = f"Switched to a new branch '{self.branch_name}'\n"
if results.returncode == 0 and expected_results.encode() == results.stderr:
return True
else:
raise SCMCreateBranchError("A new branch was not able to be created")
def write_data_to_file_in_repo(self, data, file_path=None, file_name=None, append_timestamp=False, as_yaml=False):
"""
Write the data to a file in the repo
"""
if file_path is None:
raise TypeError('Must pass a string with the folder name of where the file will be stored into this function')
if as_yaml and not isinstance(data, dict):
raise TypeError('Must pass a dictionary to this function')
# if 'schema' not in data.keys() and 'epgname' not in data.keys():
# raise ValueError('Must be a properly formatted aci dictionary object to use this function')
now = datetime.now()
str_now = now.strftime("%Y%m%d-%H%M%S")
if append_timestamp:
file_parts = file_name.split('.')
if len(file_parts) > 1:
self.filename = f"{file_parts[0]}-{str_now}.{file_parts[1]}"
else:
self.filename = f"{file_name}-{str_now}"
else:
self.filename = f"{file_name}"
if self.repo_path and self.repo_path.exists() is True and self.repo_path.is_dir() is True:
self.full_dir_path = self.repo_path / f"{file_path}"
self.full_file_path = self.full_dir_path / self.filename
self.relative_file_path = f'{file_path}/{self.filename}' if file_path else f'{self.filename}'
if self.full_file_path.exists():
raise SCMWriteFileError(f'This file already exists in the repo: {self.full_file_path}')
elif not self.full_dir_path.exists():
raise SCMWriteFileError('The path provided to save the file in does not exist')
else:
if as_yaml:
with open(self.full_file_path, 'w') as outfile:
yaml.dump(data, outfile, explicit_start=True, explicit_end=True, default_flow_style=False)
else:
with open(self.full_file_path, 'w') as outfile:
outfile.write(data)
else:
raise SCMWriteFileError('You must have a repo cloned before trying to create a file')
if self.full_file_path.exists():
return True
else:
raise SCMWriteFileError('Was not able to write the file to the filesystem')
def push_data_to_remote_repo(self):
"""
Commit the changes and push the branch to master
"""
if self.repo_path and self.repo_path.exists() is True and self.repo_path.is_dir() is True:
results = subprocess.run(["git", "add", f"{self.relative_file_path}"],
cwd=self.repo_path, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, check=False)
if results.returncode != 0:
raise SCMPushDataError(f"something bad happened while adding the file. returncode: {results.returncode} stderr: {results.stderr}")
command = ["git", "-c", f"user.name='{self.username}'", "-c", f"user.email='{self.email}'", "commit", "-m", "Adding file to repo from python"]
results = subprocess.run(command, cwd=self.repo_path, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=False)
if results.returncode != 0:
raise SCMPushDataError(f"something bad happened while commiting the changes. returncode: {results.returncode} stderr: {results.stderr}")
dest = f'https://{self.username}:{self.password}@github.com/{self.repo_owner}/{self.repo_name}/'
src = f'{self.branch_name}'
results = subprocess.run(['git', 'push', dest, src], cwd=self.repo_path, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=False)
if results.returncode != 0:
print('dest:', dest)
print('src:', src)
raise SCMPushDataError(f"something bad happened while pushing the branch. "
f"returncode: {results.returncode} stderr: {results.stderr} "
f"repo: {self.repo_name} branch: {self.branch_name}")
else:
return True
else:
raise SCMPushDataError("An undefined error occured while attempting to push the data")
def delete_local_copy_of_repo(self):
"""
Delete the local repo when action is completed
"""
try:
shutil.rmtree(self.repo_path)
return True
except Exception as e:
raise SCMDeleteRepoError(f"An error occured while attempting to delete the repo. {type(e)} {e}")
def _gql_query(self, query=None, vars=None):
"""
Helper function to call the GraphQL enpoint in GitHub
"""
if query is None:
raise TypeError("A GraphQL query is required to run this function")
headers = {"Authorization": f"token {self.password}"}
request = requests.post(self.git_hub_graphql_api, json={'query': query, 'variables': vars}, headers=headers)
try:
data = request.json()
if data['data'].get("errors"):
error = data['data']['errors']
raise SCMGraphQLError(f"An error in GraphQL occured. See the following for more info: {error}")
else:
return data
except Exception as e:
print(e)
print(type(e))
print(dir(e))
print(request)
raise
def get_github_repo_id(self):
"""
Takes the github user id and repo name and gets the github internal id
"""
query = """
query RepoIDQuery($repo_name: String!, $owner: String!) {
repository(name: $repo_name, owner: $owner) {
id
}
}
"""
variables = {
"repo_name": self.repo_name,
"owner": self.repo_owner
}
response = self._gql_query(query=query, vars=variables)
self.github_repo_id = response['data']['repository']['id']
def create_git_hub_pull_request(self, destination_branch=None, source_branch=None, title=None, body=None):
"""
Create a Pull Request in GitHub
Takes 2 branch names, title, body, and the repo ID
"""
if destination_branch is None or source_branch is None:
raise TypeError("Must have a source and destination branch to create a Pull Request")
mutation = """
mutation MyMutation($repo_id: String!, $dest_branch: String!, $src_branch: String!, $title: String!, $body: String!) {
__typename
createPullRequest(input: {repositoryId: $repo_id,
baseRefName: $dest_branch,
headRefName: $src_branch,
title: $title,
body: $body}) {
pullRequest {
number,
url
}
}
}
"""
variables = {
"repo_id": self.github_repo_id,
"dest_branch": destination_branch,
"src_branch": source_branch,
"title": title,
"body": body
}
data = self._gql_query(query=mutation, vars=variables)
return data
def get_all_current_branches(self):
"""
Pull the last 10 branches and ref ID's from a github repo
"""
query = """
query BranchQuery($repo_name: String!, $owner: String!) {
repository(name: $repo_name, owner: $owner) {
name
nameWithOwner
refs(refPrefix: "refs/heads/", last: 10) {
totalCount
nodes {
id
name
}
}
}
}
"""
variables = {
"owner": self.repo_owner,
"repo_name": self.repo_name
}
data = self._gql_query(query=query, vars=variables)
for ref in data['data']['repository']['refs']['nodes']:
id = ref['id']
name = ref['name']
self.existing_branches[name] = id
| 38.082857 | 162 | 0.579038 | from pathlib import Path
from datetime import datetime
import shutil
import subprocess
import yaml
import requests
class SCMCredentialValidationError(Exception):
pass
class SCMCloneRepoError(Exception):
pass
class SCMCreateBranchError(Exception):
pass
class SCMWriteFileError(Exception):
pass
class SCMPushDataError(Exception):
pass
class SCMDeleteRepoError(Exception):
pass
class SCMGraphQLError(Exception):
pass
class SourceControlMgmt():
def __init__(self, username=None, password=None, friendly_name=None, email=None, repo_name=None, repo_owner=None):
self.username = username
self.password = password
self.friendly_name = friendly_name
self.email = email
self.repo_path = None
self.repo_name = repo_name
self.filename = None
self.branch_name = None
self.full_file_path = None
self.relative_file_path = None
self.existing_branches = {}
self.git_hub_graphql_api = 'https://api.github.com/graphql'
self.github_repo_id = None
self.repo_owner = self.username if not repo_owner else repo_owner
self.get_github_repo_id()
exceptions = ['repo_path', 'filename', 'branch_name', 'full_file_path', 'relative_file_path', 'existing_branches']
if not all(vars(self).values()):
missing_values = [k for k, v in vars(self).items() if not v and k not in exceptions]
if missing_values:
raise TypeError(f"All values must have data. The following attributes are empty: {missing_values}")
def validate_scm_creds(self):
results = subprocess.run(['git', 'ls-remote', f'https://{self.username}:{self.password}@github.com/{self.repo_owner}/{self.repo_name}/', 'HEAD'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
check=False)
if results.returncode == 0 and b"HEAD" in results.stdout:
return True
raise SCMCredentialValidationError("The supplied credentials do not provide access to the given repo")
def clone_private_repo(self, directory=None):
if directory is None:
raise TypeError('Must pass a value for the directory into this function')
if isinstance(directory, str):
d = Path(directory)
elif isinstance(directory, Path):
d = directory
self.repo_path = d / self.repo_name
if self.repo_path.exists() is True and self.repo_path.is_dir() is True:
print('Directory exists and is being deleted')
shutil.rmtree(self.repo_path)
results = subprocess.run(['git', 'clone', f'https://{self.username}:{self.password}@github.com/{self.repo_owner}/{self.repo_name}/', f'{self.repo_path}'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
check=False)
expected_string = f"Cloning into '{self.repo_path}'...\n"
encoded_expected_string = expected_string.encode()
if (results.returncode == 0 and
encoded_expected_string == results.stderr and
self.repo_path.exists() is True and
self.repo_path.is_dir() is True):
return True
else:
raise SCMCloneRepoError("The repo could not be cloned")
def create_new_branch_in_repo(self, branch_name=None):
if not branch_name:
raise TypeError('You must pass a branch name into this function')
else:
self.branch_name = branch_name
if self.repo_path and self.repo_path.exists() is True and self.repo_path.is_dir() is True:
results = subprocess.run(["git", "checkout", "-b", branch_name], cwd=self.repo_path, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=False)
else:
raise SCMCreateBranchError('You must have a repo cloned before trying to create a branch')
expected_results = f"Switched to a new branch '{self.branch_name}'\n"
if results.returncode == 0 and expected_results.encode() == results.stderr:
return True
else:
raise SCMCreateBranchError("A new branch was not able to be created")
def write_data_to_file_in_repo(self, data, file_path=None, file_name=None, append_timestamp=False, as_yaml=False):
if file_path is None:
raise TypeError('Must pass a string with the folder name of where the file will be stored into this function')
if as_yaml and not isinstance(data, dict):
raise TypeError('Must pass a dictionary to this function')
now = datetime.now()
str_now = now.strftime("%Y%m%d-%H%M%S")
if append_timestamp:
file_parts = file_name.split('.')
if len(file_parts) > 1:
self.filename = f"{file_parts[0]}-{str_now}.{file_parts[1]}"
else:
self.filename = f"{file_name}-{str_now}"
else:
self.filename = f"{file_name}"
if self.repo_path and self.repo_path.exists() is True and self.repo_path.is_dir() is True:
self.full_dir_path = self.repo_path / f"{file_path}"
self.full_file_path = self.full_dir_path / self.filename
self.relative_file_path = f'{file_path}/{self.filename}' if file_path else f'{self.filename}'
if self.full_file_path.exists():
raise SCMWriteFileError(f'This file already exists in the repo: {self.full_file_path}')
elif not self.full_dir_path.exists():
raise SCMWriteFileError('The path provided to save the file in does not exist')
else:
if as_yaml:
with open(self.full_file_path, 'w') as outfile:
yaml.dump(data, outfile, explicit_start=True, explicit_end=True, default_flow_style=False)
else:
with open(self.full_file_path, 'w') as outfile:
outfile.write(data)
else:
raise SCMWriteFileError('You must have a repo cloned before trying to create a file')
if self.full_file_path.exists():
return True
else:
raise SCMWriteFileError('Was not able to write the file to the filesystem')
def push_data_to_remote_repo(self):
if self.repo_path and self.repo_path.exists() is True and self.repo_path.is_dir() is True:
results = subprocess.run(["git", "add", f"{self.relative_file_path}"],
cwd=self.repo_path, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, check=False)
if results.returncode != 0:
raise SCMPushDataError(f"something bad happened while adding the file. returncode: {results.returncode} stderr: {results.stderr}")
command = ["git", "-c", f"user.name='{self.username}'", "-c", f"user.email='{self.email}'", "commit", "-m", "Adding file to repo from python"]
results = subprocess.run(command, cwd=self.repo_path, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=False)
if results.returncode != 0:
raise SCMPushDataError(f"something bad happened while commiting the changes. returncode: {results.returncode} stderr: {results.stderr}")
dest = f'https://{self.username}:{self.password}@github.com/{self.repo_owner}/{self.repo_name}/'
src = f'{self.branch_name}'
results = subprocess.run(['git', 'push', dest, src], cwd=self.repo_path, stdout=subprocess.PIPE, stderr=subprocess.PIPE, check=False)
if results.returncode != 0:
print('dest:', dest)
print('src:', src)
raise SCMPushDataError(f"something bad happened while pushing the branch. "
f"returncode: {results.returncode} stderr: {results.stderr} "
f"repo: {self.repo_name} branch: {self.branch_name}")
else:
return True
else:
raise SCMPushDataError("An undefined error occured while attempting to push the data")
def delete_local_copy_of_repo(self):
try:
shutil.rmtree(self.repo_path)
return True
except Exception as e:
raise SCMDeleteRepoError(f"An error occured while attempting to delete the repo. {type(e)} {e}")
def _gql_query(self, query=None, vars=None):
if query is None:
raise TypeError("A GraphQL query is required to run this function")
headers = {"Authorization": f"token {self.password}"}
request = requests.post(self.git_hub_graphql_api, json={'query': query, 'variables': vars}, headers=headers)
try:
data = request.json()
if data['data'].get("errors"):
error = data['data']['errors']
raise SCMGraphQLError(f"An error in GraphQL occured. See the following for more info: {error}")
else:
return data
except Exception as e:
print(e)
print(type(e))
print(dir(e))
print(request)
raise
def get_github_repo_id(self):
query = """
query RepoIDQuery($repo_name: String!, $owner: String!) {
repository(name: $repo_name, owner: $owner) {
id
}
}
"""
variables = {
"repo_name": self.repo_name,
"owner": self.repo_owner
}
response = self._gql_query(query=query, vars=variables)
self.github_repo_id = response['data']['repository']['id']
def create_git_hub_pull_request(self, destination_branch=None, source_branch=None, title=None, body=None):
if destination_branch is None or source_branch is None:
raise TypeError("Must have a source and destination branch to create a Pull Request")
mutation = """
mutation MyMutation($repo_id: String!, $dest_branch: String!, $src_branch: String!, $title: String!, $body: String!) {
__typename
createPullRequest(input: {repositoryId: $repo_id,
baseRefName: $dest_branch,
headRefName: $src_branch,
title: $title,
body: $body}) {
pullRequest {
number,
url
}
}
}
"""
variables = {
"repo_id": self.github_repo_id,
"dest_branch": destination_branch,
"src_branch": source_branch,
"title": title,
"body": body
}
data = self._gql_query(query=mutation, vars=variables)
return data
def get_all_current_branches(self):
query = """
query BranchQuery($repo_name: String!, $owner: String!) {
repository(name: $repo_name, owner: $owner) {
name
nameWithOwner
refs(refPrefix: "refs/heads/", last: 10) {
totalCount
nodes {
id
name
}
}
}
}
"""
variables = {
"owner": self.repo_owner,
"repo_name": self.repo_name
}
data = self._gql_query(query=query, vars=variables)
for ref in data['data']['repository']['refs']['nodes']:
id = ref['id']
name = ref['name']
self.existing_branches[name] = id
| true | true |
f720dc83e899603cde1322429190880fb730dec1 | 682 | py | Python | recommendation/recommendation/apps/films/migrations/0003_auto_20200314_0357.py | WillionLei/recommendation | 49fd28a47574877a91458201b21ec2a80409bb5f | [
"MIT"
] | null | null | null | recommendation/recommendation/apps/films/migrations/0003_auto_20200314_0357.py | WillionLei/recommendation | 49fd28a47574877a91458201b21ec2a80409bb5f | [
"MIT"
] | null | null | null | recommendation/recommendation/apps/films/migrations/0003_auto_20200314_0357.py | WillionLei/recommendation | 49fd28a47574877a91458201b21ec2a80409bb5f | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2020-03-14 03:57
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('films', '0002_film'),
]
operations = [
migrations.AddField(
model_name='film',
name='charge',
field=models.SmallIntegerField(choices=[(0, '免费'), (1, '会员'), (2, '付费')], default=0, verbose_name='费用'),
),
migrations.AddField(
model_name='film',
name='fcomment',
field=models.CharField(max_length=200, null=True, verbose_name='描述信息'),
),
]
| 26.230769 | 116 | 0.577713 |
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('films', '0002_film'),
]
operations = [
migrations.AddField(
model_name='film',
name='charge',
field=models.SmallIntegerField(choices=[(0, '免费'), (1, '会员'), (2, '付费')], default=0, verbose_name='费用'),
),
migrations.AddField(
model_name='film',
name='fcomment',
field=models.CharField(max_length=200, null=True, verbose_name='描述信息'),
),
]
| true | true |
f720dca24b37afd8444ce644acfa3b1e0c6ddc1c | 197 | py | Python | pola/tests/commands/test_send_ai_pics_stats.py | rodkiewicz/pola-backend | e26df1cea07b43c8b4272739234b7e78e2ce08c9 | [
"BSD-3-Clause"
] | 30 | 2015-08-13T01:05:36.000Z | 2022-01-22T03:02:50.000Z | pola/tests/commands/test_send_ai_pics_stats.py | rodkiewicz/pola-backend | e26df1cea07b43c8b4272739234b7e78e2ce08c9 | [
"BSD-3-Clause"
] | 1,428 | 2015-10-08T07:38:26.000Z | 2022-03-31T08:36:08.000Z | pola/tests/commands/test_send_ai_pics_stats.py | rodkiewicz/pola-backend | e26df1cea07b43c8b4272739234b7e78e2ce08c9 | [
"BSD-3-Clause"
] | 13 | 2015-12-27T22:35:25.000Z | 2022-02-01T15:55:58.000Z | from unittest import TestCase
from django.core.management import call_command
class SendAiPicsStatsTestCase(TestCase):
def test_run_command(self):
call_command('send_ai_pics_stats')
| 21.888889 | 47 | 0.796954 | from unittest import TestCase
from django.core.management import call_command
class SendAiPicsStatsTestCase(TestCase):
def test_run_command(self):
call_command('send_ai_pics_stats')
| true | true |
f720de11464a36f7cc26d40b9c9c173b3751a6c4 | 6,695 | py | Python | tests/kafkatest/tests/core/fetch_from_follower_test.py | heyingquan13/kafka | 620ada9888f82756d6ed0eabe96bb9b54518b378 | [
"Apache-2.0"
] | 35 | 2016-09-22T22:53:14.000Z | 2020-02-13T15:12:21.000Z | tests/kafkatest/tests/core/fetch_from_follower_test.py | heyingquan13/kafka | 620ada9888f82756d6ed0eabe96bb9b54518b378 | [
"Apache-2.0"
] | 27 | 2022-02-07T21:53:02.000Z | 2022-03-15T20:38:46.000Z | tests/kafkatest/tests/core/fetch_from_follower_test.py | heyingquan13/kafka | 620ada9888f82756d6ed0eabe96bb9b54518b378 | [
"Apache-2.0"
] | 88 | 2016-11-27T02:16:11.000Z | 2020-02-28T05:10:26.000Z | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from collections import defaultdict
from ducktape.mark import matrix
from ducktape.mark.resource import cluster
from kafkatest.services.console_consumer import ConsoleConsumer
from kafkatest.services.kafka import KafkaService, quorum
from kafkatest.services.monitor.jmx import JmxTool
from kafkatest.services.verifiable_producer import VerifiableProducer
from kafkatest.services.zookeeper import ZookeeperService
from kafkatest.tests.produce_consume_validate import ProduceConsumeValidateTest
from kafkatest.utils import is_int
class FetchFromFollowerTest(ProduceConsumeValidateTest):
RACK_AWARE_REPLICA_SELECTOR = "org.apache.kafka.common.replica.RackAwareReplicaSelector"
METADATA_MAX_AGE_MS = 3000
def __init__(self, test_context):
super(FetchFromFollowerTest, self).__init__(test_context=test_context)
self.jmx_tool = JmxTool(test_context, jmx_poll_ms=100)
self.topic = "test_topic"
self.zk = ZookeeperService(test_context, num_nodes=1) if quorum.for_test(test_context) == quorum.zk else None
self.kafka = KafkaService(test_context,
num_nodes=3,
zk=self.zk,
topics={
self.topic: {
"partitions": 1,
"replication-factor": 3,
"configs": {"min.insync.replicas": 1}},
},
server_prop_overrides=[
["replica.selector.class", self.RACK_AWARE_REPLICA_SELECTOR]
],
per_node_server_prop_overrides={
1: [("broker.rack", "rack-a")],
2: [("broker.rack", "rack-b")],
3: [("broker.rack", "rack-c")]
},
controller_num_nodes_override=1)
self.producer_throughput = 1000
self.num_producers = 1
self.num_consumers = 1
def min_cluster_size(self):
return super(FetchFromFollowerTest, self).min_cluster_size() + self.num_producers * 2 + self.num_consumers * 2
def setUp(self):
if self.zk:
self.zk.start()
self.kafka.start()
@cluster(num_nodes=9)
@matrix(metadata_quorum=quorum.all_non_upgrade)
def test_consumer_preferred_read_replica(self, metadata_quorum=quorum.zk):
"""
This test starts up brokers with "broker.rack" and "replica.selector.class" configurations set. The replica
selector is set to the rack-aware implementation. One of the brokers has a different rack than the other two.
We then use a console consumer with the "client.rack" set to the same value as the differing broker. After
producing some records, we verify that the client has been informed of the preferred replica and that all the
records are properly consumed.
"""
# Find the leader, configure consumer to be on a different rack
leader_node = self.kafka.leader(self.topic, 0)
leader_idx = self.kafka.idx(leader_node)
non_leader_idx = 2 if leader_idx != 2 else 1
non_leader_rack = "rack-b" if leader_idx != 2 else "rack-a"
self.logger.debug("Leader %d %s" % (leader_idx, leader_node))
self.logger.debug("Non-Leader %d %s" % (non_leader_idx, non_leader_rack))
self.producer = VerifiableProducer(self.test_context, self.num_producers, self.kafka, self.topic,
throughput=self.producer_throughput)
self.consumer = ConsoleConsumer(self.test_context, self.num_consumers, self.kafka, self.topic,
client_id="console-consumer", group_id="test-consumer-group-1",
consumer_timeout_ms=60000, message_validator=is_int,
consumer_properties={"client.rack": non_leader_rack, "metadata.max.age.ms": self.METADATA_MAX_AGE_MS})
# Start up and let some data get produced
self.start_producer_and_consumer()
time.sleep(self.METADATA_MAX_AGE_MS * 2. / 1000)
consumer_node = self.consumer.nodes[0]
consumer_idx = self.consumer.idx(consumer_node)
read_replica_attribute = "preferred-read-replica"
read_replica_mbean = "kafka.consumer:type=consumer-fetch-manager-metrics,client-id=%s,topic=%s,partition=%d" % \
("console-consumer", self.topic, 0)
self.jmx_tool.jmx_object_names = [read_replica_mbean]
self.jmx_tool.jmx_attributes = [read_replica_attribute]
self.jmx_tool.start_jmx_tool(consumer_idx, consumer_node)
# Wait for at least one interval of "metadata.max.age.ms"
time.sleep(self.METADATA_MAX_AGE_MS * 2. / 1000)
# Read the JMX output
self.jmx_tool.read_jmx_output(consumer_idx, consumer_node)
all_captured_preferred_read_replicas = defaultdict(int)
self.logger.debug(self.jmx_tool.jmx_stats)
for ts, data in self.jmx_tool.jmx_stats[0].items():
for k, v in data.items():
if k.endswith(read_replica_attribute):
all_captured_preferred_read_replicas[int(v)] += 1
self.logger.debug("Saw the following preferred read replicas %s",
dict(all_captured_preferred_read_replicas.items()))
assert all_captured_preferred_read_replicas[non_leader_idx] > 0, \
"Expected to see broker %d (%s) as a preferred replica" % (non_leader_idx, non_leader_rack)
# Validate consumed messages
self.stop_producer_and_consumer()
self.validate()
| 49.592593 | 142 | 0.64003 |
import time
from collections import defaultdict
from ducktape.mark import matrix
from ducktape.mark.resource import cluster
from kafkatest.services.console_consumer import ConsoleConsumer
from kafkatest.services.kafka import KafkaService, quorum
from kafkatest.services.monitor.jmx import JmxTool
from kafkatest.services.verifiable_producer import VerifiableProducer
from kafkatest.services.zookeeper import ZookeeperService
from kafkatest.tests.produce_consume_validate import ProduceConsumeValidateTest
from kafkatest.utils import is_int
class FetchFromFollowerTest(ProduceConsumeValidateTest):
RACK_AWARE_REPLICA_SELECTOR = "org.apache.kafka.common.replica.RackAwareReplicaSelector"
METADATA_MAX_AGE_MS = 3000
def __init__(self, test_context):
super(FetchFromFollowerTest, self).__init__(test_context=test_context)
self.jmx_tool = JmxTool(test_context, jmx_poll_ms=100)
self.topic = "test_topic"
self.zk = ZookeeperService(test_context, num_nodes=1) if quorum.for_test(test_context) == quorum.zk else None
self.kafka = KafkaService(test_context,
num_nodes=3,
zk=self.zk,
topics={
self.topic: {
"partitions": 1,
"replication-factor": 3,
"configs": {"min.insync.replicas": 1}},
},
server_prop_overrides=[
["replica.selector.class", self.RACK_AWARE_REPLICA_SELECTOR]
],
per_node_server_prop_overrides={
1: [("broker.rack", "rack-a")],
2: [("broker.rack", "rack-b")],
3: [("broker.rack", "rack-c")]
},
controller_num_nodes_override=1)
self.producer_throughput = 1000
self.num_producers = 1
self.num_consumers = 1
def min_cluster_size(self):
return super(FetchFromFollowerTest, self).min_cluster_size() + self.num_producers * 2 + self.num_consumers * 2
def setUp(self):
if self.zk:
self.zk.start()
self.kafka.start()
@cluster(num_nodes=9)
@matrix(metadata_quorum=quorum.all_non_upgrade)
def test_consumer_preferred_read_replica(self, metadata_quorum=quorum.zk):
leader_node = self.kafka.leader(self.topic, 0)
leader_idx = self.kafka.idx(leader_node)
non_leader_idx = 2 if leader_idx != 2 else 1
non_leader_rack = "rack-b" if leader_idx != 2 else "rack-a"
self.logger.debug("Leader %d %s" % (leader_idx, leader_node))
self.logger.debug("Non-Leader %d %s" % (non_leader_idx, non_leader_rack))
self.producer = VerifiableProducer(self.test_context, self.num_producers, self.kafka, self.topic,
throughput=self.producer_throughput)
self.consumer = ConsoleConsumer(self.test_context, self.num_consumers, self.kafka, self.topic,
client_id="console-consumer", group_id="test-consumer-group-1",
consumer_timeout_ms=60000, message_validator=is_int,
consumer_properties={"client.rack": non_leader_rack, "metadata.max.age.ms": self.METADATA_MAX_AGE_MS})
self.start_producer_and_consumer()
time.sleep(self.METADATA_MAX_AGE_MS * 2. / 1000)
consumer_node = self.consumer.nodes[0]
consumer_idx = self.consumer.idx(consumer_node)
read_replica_attribute = "preferred-read-replica"
read_replica_mbean = "kafka.consumer:type=consumer-fetch-manager-metrics,client-id=%s,topic=%s,partition=%d" % \
("console-consumer", self.topic, 0)
self.jmx_tool.jmx_object_names = [read_replica_mbean]
self.jmx_tool.jmx_attributes = [read_replica_attribute]
self.jmx_tool.start_jmx_tool(consumer_idx, consumer_node)
time.sleep(self.METADATA_MAX_AGE_MS * 2. / 1000)
self.jmx_tool.read_jmx_output(consumer_idx, consumer_node)
all_captured_preferred_read_replicas = defaultdict(int)
self.logger.debug(self.jmx_tool.jmx_stats)
for ts, data in self.jmx_tool.jmx_stats[0].items():
for k, v in data.items():
if k.endswith(read_replica_attribute):
all_captured_preferred_read_replicas[int(v)] += 1
self.logger.debug("Saw the following preferred read replicas %s",
dict(all_captured_preferred_read_replicas.items()))
assert all_captured_preferred_read_replicas[non_leader_idx] > 0, \
"Expected to see broker %d (%s) as a preferred replica" % (non_leader_idx, non_leader_rack)
self.stop_producer_and_consumer()
self.validate()
| true | true |
f720def8adc18a066172259ff0e5e88e433e15c0 | 39,628 | py | Python | python/dgl/distributed/graph_partition_book.py | hoangdzung/dgl | f7ce267164118a0526dd2f42f3baf799bb59d6b7 | [
"Apache-2.0"
] | 1 | 2021-08-18T11:54:42.000Z | 2021-08-18T11:54:42.000Z | python/dgl/distributed/graph_partition_book.py | amorehead/dgl | 738b75f41e5d3229e5ccda52d76e1297d7b0520d | [
"Apache-2.0"
] | null | null | null | python/dgl/distributed/graph_partition_book.py | amorehead/dgl | 738b75f41e5d3229e5ccda52d76e1297d7b0520d | [
"Apache-2.0"
] | 1 | 2021-11-28T09:16:55.000Z | 2021-11-28T09:16:55.000Z | """Define graph partition book."""
import pickle
from abc import ABC
import numpy as np
from .. import backend as F
from ..base import NID, EID
from .. import utils
from .shared_mem_utils import _to_shared_mem, _get_ndata_path, _get_edata_path, DTYPE_DICT
from .._ffi.ndarray import empty_shared_mem
from ..ndarray import exist_shared_mem_array
from .id_map import IdMap
def _move_metadata_to_shared_mem(graph_name, num_nodes, num_edges, part_id,
num_partitions, node_map, edge_map, is_range_part):
''' Move all metadata of the partition book to the shared memory.
These metadata will be used to construct graph partition book.
Parameters
----------
graph_name : str
The name of the graph
num_nodes : int
The total number of nodes
num_edges : int
The total number of edges
part_id : int
The partition ID.
num_partitions : int
The number of physical partitions generated for the graph.
node_map : Tensor
It stores the mapping information from node IDs to partitions. With range partitioning,
the tensor stores the serialized result of partition ranges.
edge_map : Tensor
It stores the mapping information from edge IDs to partitions. With range partitioning,
the tensor stores the serialized result of partition ranges.
is_range_part : bool
Indicate that we use a range partition. This is important for us to deserialize data
in node_map and edge_map.
Returns
-------
(Tensor, Tensor, Tensor)
The first tensor stores the serialized metadata, the second tensor stores the serialized
node map and the third tensor stores the serialized edge map. All tensors are stored in
shared memory.
'''
meta = _to_shared_mem(F.tensor([int(is_range_part), num_nodes, num_edges,
num_partitions, part_id,
len(node_map), len(edge_map)]),
_get_ndata_path(graph_name, 'meta'))
node_map = _to_shared_mem(node_map, _get_ndata_path(graph_name, 'node_map'))
edge_map = _to_shared_mem(edge_map, _get_edata_path(graph_name, 'edge_map'))
return meta, node_map, edge_map
def _get_shared_mem_metadata(graph_name):
''' Get the metadata of the graph from shared memory.
The server serializes the metadata of a graph and store them in shared memory.
The client needs to deserialize the data in shared memory and get the metadata
of the graph.
Parameters
----------
graph_name : str
The name of the graph. We can use the graph name to find the shared memory name.
Returns
-------
(bool, int, int, Tensor, Tensor)
The first element indicates whether it is range partitioning;
the second element is the partition ID;
the third element is the number of partitions;
the fourth element is the tensor that stores the serialized result of node maps;
the fifth element is the tensor that stores the serialized result of edge maps.
'''
# The metadata has 7 elements: is_range_part, num_nodes, num_edges, num_partitions, part_id,
# the length of node map and the length of the edge map.
shape = (7,)
dtype = F.int64
dtype = DTYPE_DICT[dtype]
data = empty_shared_mem(_get_ndata_path(graph_name, 'meta'), False, shape, dtype)
dlpack = data.to_dlpack()
meta = F.asnumpy(F.zerocopy_from_dlpack(dlpack))
is_range_part, _, _, num_partitions, part_id, node_map_len, edge_map_len = meta
# Load node map
data = empty_shared_mem(_get_ndata_path(graph_name, 'node_map'), False, (node_map_len,), dtype)
dlpack = data.to_dlpack()
node_map = F.zerocopy_from_dlpack(dlpack)
# Load edge_map
data = empty_shared_mem(_get_edata_path(graph_name, 'edge_map'), False, (edge_map_len,), dtype)
dlpack = data.to_dlpack()
edge_map = F.zerocopy_from_dlpack(dlpack)
return is_range_part, part_id, num_partitions, node_map, edge_map
def get_shared_mem_partition_book(graph_name, graph_part):
'''Get a graph partition book from shared memory.
A graph partition book of a specific graph can be serialized to shared memory.
We can reconstruct a graph partition book from shared memory.
Parameters
----------
graph_name : str
The name of the graph.
graph_part : DGLGraph
The graph structure of a partition.
Returns
-------
GraphPartitionBook
A graph partition book for a particular partition.
'''
if not exist_shared_mem_array(_get_ndata_path(graph_name, 'meta')):
return None
is_range_part, part_id, num_parts, node_map_data, edge_map_data = \
_get_shared_mem_metadata(graph_name)
if is_range_part == 1:
# node ID ranges and edge ID ranges are stored in the order of node type IDs
# and edge type IDs.
node_map = {}
ntypes = {}
# node_map_data and edge_map_data were serialized with pickle and converted into
# a list of bytes and then stored in a numpy array before being placed in shared
# memory. To deserialize, we need to reverse the process.
node_map_data = pickle.loads(bytes(F.asnumpy(node_map_data).tolist()))
for i, (ntype, nid_range) in enumerate(node_map_data):
ntypes[ntype] = i
node_map[ntype] = nid_range
edge_map = {}
etypes = {}
edge_map_data = pickle.loads(bytes(F.asnumpy(edge_map_data).tolist()))
for i, (etype, eid_range) in enumerate(edge_map_data):
etypes[etype] = i
edge_map[etype] = eid_range
return RangePartitionBook(part_id, num_parts, node_map, edge_map, ntypes, etypes)
else:
return BasicPartitionBook(part_id, num_parts, node_map_data, edge_map_data, graph_part)
class GraphPartitionBook(ABC):
""" The base class of the graph partition book.
For distributed training, a graph is partitioned into multiple parts and is loaded
in multiple machines. The partition book contains all necessary information to locate
nodes and edges in the cluster.
The partition book contains various partition information, including
* the number of partitions,
* the partition ID that a node or edge belongs to,
* the node IDs and the edge IDs that a partition has.
* the local IDs of nodes and edges in a partition.
Currently, there are two classes that implement ``GraphPartitionBook``:
``BasicGraphPartitionBook`` and ``RangePartitionBook``. ``BasicGraphPartitionBook``
stores the mappings between every individual node/edge ID and partition ID on
every machine, which usually consumes a lot of memory, while ``RangePartitionBook``
calculates the mapping between node/edge IDs and partition IDs based on some small
metadata because nodes/edges have been relabeled to have IDs in the same partition
fall in a contiguous ID range. ``RangePartitionBook`` is usually a preferred way to
provide mappings between node/edge IDs and partition IDs.
A graph partition book is constructed automatically when a graph is partitioned.
When a graph partition is loaded, a graph partition book is loaded as well.
Please see :py:meth:`~dgl.distributed.partition.partition_graph`,
:py:meth:`~dgl.distributed.partition.load_partition` and
:py:meth:`~dgl.distributed.partition.load_partition_book` for more details.
"""
def shared_memory(self, graph_name):
"""Move the partition book to shared memory.
Parameters
----------
graph_name : str
The graph name. This name will be used to read the partition book from shared
memory in another process.
"""
def num_partitions(self):
"""Return the number of partitions.
Returns
-------
int
number of partitions
"""
def metadata(self):
"""Return the partition meta data.
The meta data includes:
* The machine ID.
* Number of nodes and edges of each partition.
Examples
--------
>>> print(g.get_partition_book().metadata())
>>> [{'machine_id' : 0, 'num_nodes' : 3000, 'num_edges' : 5000},
... {'machine_id' : 1, 'num_nodes' : 2000, 'num_edges' : 4888},
... ...]
Returns
-------
list[dict[str, any]]
Meta data of each partition.
"""
def nid2partid(self, nids, ntype):
"""From global node IDs to partition IDs
Parameters
----------
nids : tensor
global node IDs
ntype : str
The node type
Returns
-------
tensor
partition IDs
"""
def eid2partid(self, eids, etype):
"""From global edge IDs to partition IDs
Parameters
----------
eids : tensor
global edge IDs
etype : str
The edge type
Returns
-------
tensor
partition IDs
"""
def partid2nids(self, partid, ntype):
"""From partition id to global node IDs
Parameters
----------
partid : int
partition id
ntype : str
The node type
Returns
-------
tensor
node IDs
"""
def partid2eids(self, partid, etype):
"""From partition id to global edge IDs
Parameters
----------
partid : int
partition id
etype : str
The edge type
Returns
-------
tensor
edge IDs
"""
def nid2localnid(self, nids, partid, ntype):
"""Get local node IDs within the given partition.
Parameters
----------
nids : tensor
global node IDs
partid : int
partition ID
ntype : str
The node type
Returns
-------
tensor
local node IDs
"""
def eid2localeid(self, eids, partid, etype):
"""Get the local edge ids within the given partition.
Parameters
----------
eids : tensor
global edge IDs
partid : int
partition ID
etype : str
The edge type
Returns
-------
tensor
local edge IDs
"""
@property
def partid(self):
"""Get the current partition ID
Return
------
int
The partition ID of current machine
"""
@property
def ntypes(self):
"""Get the list of node types
"""
@property
def etypes(self):
"""Get the list of edge types
"""
def map_to_per_ntype(self, ids):
"""Map homogeneous node IDs to type-wise IDs and node types.
Parameters
----------
ids : tensor
Homogeneous node IDs.
Returns
-------
(tensor, tensor)
node type IDs and type-wise node IDs.
"""
def map_to_per_etype(self, ids):
"""Map homogeneous edge IDs to type-wise IDs and edge types.
Parameters
----------
ids : tensor
Homogeneous edge IDs.
Returns
-------
(tensor, tensor)
edge type IDs and type-wise edge IDs.
"""
def map_to_homo_nid(self, ids, ntype):
"""Map type-wise node IDs and type IDs to homogeneous node IDs.
Parameters
----------
ids : tensor
Type-wise node Ids
ntype : str
node type
Returns
-------
Tensor
Homogeneous node IDs.
"""
def map_to_homo_eid(self, ids, etype):
"""Map type-wise edge IDs and type IDs to homogeneous edge IDs.
Parameters
----------
ids : tensor
Type-wise edge Ids
etype : str
edge type
Returns
-------
Tensor
Homogeneous edge IDs.
"""
class BasicPartitionBook(GraphPartitionBook):
"""This provides the most flexible way to store parition information.
The partition book maintains the mapping of every single node IDs and edge IDs to
partition IDs. This is very flexible at the coast of large memory consumption.
On a large graph, the mapping consumes significant memory and this partition book
is not recommended.
Parameters
----------
part_id : int
partition ID of current partition book
num_parts : int
number of total partitions
node_map : tensor
global node ID mapping to partition ID
edge_map : tensor
global edge ID mapping to partition ID
part_graph : DGLGraph
The graph partition structure.
"""
def __init__(self, part_id, num_parts, node_map, edge_map, part_graph):
assert part_id >= 0, 'part_id cannot be a negative number.'
assert num_parts > 0, 'num_parts must be greater than zero.'
self._part_id = int(part_id)
self._num_partitions = int(num_parts)
self._nid2partid = F.tensor(node_map)
assert F.dtype(self._nid2partid) == F.int64, \
'the node map must be stored in an integer array'
self._eid2partid = F.tensor(edge_map)
assert F.dtype(self._eid2partid) == F.int64, \
'the edge map must be stored in an integer array'
# Get meta data of the partition book.
self._partition_meta_data = []
_, nid_count = np.unique(F.asnumpy(self._nid2partid), return_counts=True)
_, eid_count = np.unique(F.asnumpy(self._eid2partid), return_counts=True)
for partid in range(self._num_partitions):
part_info = {}
part_info['machine_id'] = partid
part_info['num_nodes'] = int(nid_count[partid])
part_info['num_edges'] = int(eid_count[partid])
self._partition_meta_data.append(part_info)
# Get partid2nids
self._partid2nids = []
sorted_nid = F.tensor(np.argsort(F.asnumpy(self._nid2partid)))
start = 0
for offset in nid_count:
part_nids = sorted_nid[start:start+offset]
start += offset
self._partid2nids.append(part_nids)
# Get partid2eids
self._partid2eids = []
sorted_eid = F.tensor(np.argsort(F.asnumpy(self._eid2partid)))
start = 0
for offset in eid_count:
part_eids = sorted_eid[start:start+offset]
start += offset
self._partid2eids.append(part_eids)
# Get nidg2l
self._nidg2l = [None] * self._num_partitions
global_id = part_graph.ndata[NID]
max_global_id = np.amax(F.asnumpy(global_id))
# TODO(chao): support int32 index
g2l = F.zeros((max_global_id+1), F.int64, F.context(global_id))
g2l = F.scatter_row(g2l, global_id, F.arange(0, len(global_id)))
self._nidg2l[self._part_id] = g2l
# Get eidg2l
self._eidg2l = [None] * self._num_partitions
global_id = part_graph.edata[EID]
max_global_id = np.amax(F.asnumpy(global_id))
# TODO(chao): support int32 index
g2l = F.zeros((max_global_id+1), F.int64, F.context(global_id))
g2l = F.scatter_row(g2l, global_id, F.arange(0, len(global_id)))
self._eidg2l[self._part_id] = g2l
# node size and edge size
self._edge_size = len(self.partid2eids(self._part_id))
self._node_size = len(self.partid2nids(self._part_id))
def shared_memory(self, graph_name):
"""Move data to shared memory.
"""
self._meta, self._nid2partid, self._eid2partid = _move_metadata_to_shared_mem(
graph_name, self._num_nodes(), self._num_edges(), self._part_id, self._num_partitions,
self._nid2partid, self._eid2partid, False)
def num_partitions(self):
"""Return the number of partitions.
"""
return self._num_partitions
def metadata(self):
"""Return the partition meta data.
"""
return self._partition_meta_data
def _num_nodes(self, ntype='_N'):
""" The total number of nodes
"""
assert ntype == '_N', 'Base partition book only supports homogeneous graph.'
return len(self._nid2partid)
def _num_edges(self, etype='_E'):
""" The total number of edges
"""
assert etype == '_E', 'Base partition book only supports homogeneous graph.'
return len(self._eid2partid)
def map_to_per_ntype(self, ids):
"""Map global homogeneous node IDs to node type IDs.
Returns
type_ids, per_type_ids
"""
return F.zeros((len(ids),), F.int32, F.cpu()), ids
def map_to_per_etype(self, ids):
"""Map global homogeneous edge IDs to edge type IDs.
Returns
type_ids, per_type_ids
"""
return F.zeros((len(ids),), F.int32, F.cpu()), ids
def map_to_homo_nid(self, ids, ntype):
"""Map per-node-type IDs to global node IDs in the homogeneous format.
"""
assert ntype == '_N', 'Base partition book only supports homogeneous graph.'
return ids
def map_to_homo_eid(self, ids, etype):
"""Map per-edge-type IDs to global edge IDs in the homoenegeous format.
"""
assert etype == '_E', 'Base partition book only supports homogeneous graph.'
return ids
def nid2partid(self, nids, ntype='_N'):
"""From global node IDs to partition IDs
"""
assert ntype == '_N', 'Base partition book only supports homogeneous graph.'
return F.gather_row(self._nid2partid, nids)
def eid2partid(self, eids, etype='_E'):
"""From global edge IDs to partition IDs
"""
assert etype == '_E', 'Base partition book only supports homogeneous graph.'
return F.gather_row(self._eid2partid, eids)
def partid2nids(self, partid, ntype='_N'):
"""From partition id to global node IDs
"""
assert ntype == '_N', 'Base partition book only supports homogeneous graph.'
return self._partid2nids[partid]
def partid2eids(self, partid, etype='_E'):
"""From partition id to global edge IDs
"""
assert etype == '_E', 'Base partition book only supports homogeneous graph.'
return self._partid2eids[partid]
def nid2localnid(self, nids, partid, ntype='_N'):
"""Get local node IDs within the given partition.
"""
assert ntype == '_N', 'Base partition book only supports homogeneous graph.'
if partid != self._part_id:
raise RuntimeError('Now GraphPartitionBook does not support \
getting remote tensor of nid2localnid.')
return F.gather_row(self._nidg2l[partid], nids)
def eid2localeid(self, eids, partid, etype='_E'):
"""Get the local edge ids within the given partition.
"""
assert etype == '_E', 'Base partition book only supports homogeneous graph.'
if partid != self._part_id:
raise RuntimeError('Now GraphPartitionBook does not support \
getting remote tensor of eid2localeid.')
return F.gather_row(self._eidg2l[partid], eids)
@property
def partid(self):
"""Get the current partition ID
"""
return self._part_id
@property
def ntypes(self):
"""Get the list of node types
"""
return ['_N']
@property
def etypes(self):
"""Get the list of edge types
"""
return ['_E']
class RangePartitionBook(GraphPartitionBook):
"""This partition book supports more efficient storage of partition information.
This partition book is used if the nodes and edges of a graph partition are assigned
with contiguous IDs. It uses very small amount of memory to store the partition
information.
Parameters
----------
part_id : int
partition ID of current partition book
num_parts : int
number of total partitions
node_map : dict[str, Tensor]
Global node ID ranges within partitions for each node type. The key is the node type
name in string. The value is a tensor of shape :math:`(K, 2)`, where :math:`K` is
the number of partitions. Each row has two integers: the starting and the ending IDs
for a particular node type in a partition. For example, all nodes of type ``"T"`` in
partition ``i`` has ID range ``node_map["T"][i][0]`` to ``node_map["T"][i][1]``.
edge_map : dict[str, Tensor]
Global edge ID ranges within partitions for each edge type. The key is the edge type
name in string. The value is a tensor of shape :math:`(K, 2)`, where :math:`K` is
the number of partitions. Each row has two integers: the starting and the ending IDs
for a particular edge type in a partition. For example, all edges of type ``"T"`` in
partition ``i`` has ID range ``edge_map["T"][i][0]`` to ``edge_map["T"][i][1]``.
ntypes : dict[str, int]
map ntype strings to ntype IDs.
etypes : dict[str, int]
map etype strings to etype IDs.
"""
def __init__(self, part_id, num_parts, node_map, edge_map, ntypes, etypes):
assert part_id >= 0, 'part_id cannot be a negative number.'
assert num_parts > 0, 'num_parts must be greater than zero.'
self._partid = part_id
self._num_partitions = num_parts
self._ntypes = [None] * len(ntypes)
self._etypes = [None] * len(etypes)
for ntype in ntypes:
ntype_id = ntypes[ntype]
self._ntypes[ntype_id] = ntype
assert all([ntype is not None for ntype in self._ntypes]), \
"The node types have invalid IDs."
for etype in etypes:
etype_id = etypes[etype]
self._etypes[etype_id] = etype
assert all([etype is not None for etype in self._etypes]), \
"The edge types have invalid IDs."
# This stores the node ID ranges for each node type in each partition.
# The key is the node type, the value is a NumPy matrix with two columns, in which
# each row indicates the start and the end of the node ID range in a partition.
# The node IDs are global node IDs in the homogeneous representation.
self._typed_nid_range = {}
# This stores the node ID map for per-node-type IDs in each partition.
# The key is the node type, the value is a NumPy vector which indicates
# the last node ID in a partition.
self._typed_max_node_ids = {}
max_node_map = np.zeros((num_parts,), dtype=np.int64)
for key in node_map:
if not isinstance(node_map[key], np.ndarray):
node_map[key] = F.asnumpy(node_map[key])
assert node_map[key].shape == (num_parts, 2)
self._typed_nid_range[key] = node_map[key]
# This is used for per-node-type lookup.
self._typed_max_node_ids[key] = np.cumsum(self._typed_nid_range[key][:, 1]
- self._typed_nid_range[key][:, 0])
# This is used for homogeneous node ID lookup.
max_node_map = np.maximum(self._typed_nid_range[key][:, 1], max_node_map)
# This is a vector that indicates the last node ID in each partition.
# The ID is the global ID in the homogeneous representation.
self._max_node_ids = max_node_map
# Similar to _typed_nid_range.
self._typed_eid_range = {}
# similar to _typed_max_node_ids.
self._typed_max_edge_ids = {}
max_edge_map = np.zeros((num_parts,), dtype=np.int64)
for key in edge_map:
if not isinstance(edge_map[key], np.ndarray):
edge_map[key] = F.asnumpy(edge_map[key])
assert edge_map[key].shape == (num_parts, 2)
self._typed_eid_range[key] = edge_map[key]
# This is used for per-edge-type lookup.
self._typed_max_edge_ids[key] = np.cumsum(self._typed_eid_range[key][:, 1]
- self._typed_eid_range[key][:, 0])
# This is used for homogeneous edge ID lookup.
max_edge_map = np.maximum(self._typed_eid_range[key][:, 1], max_edge_map)
# Similar to _max_node_ids
self._max_edge_ids = max_edge_map
# These two are map functions that map node/edge IDs to node/edge type IDs.
self._nid_map = IdMap(self._typed_nid_range)
self._eid_map = IdMap(self._typed_eid_range)
# Get meta data of the partition book
self._partition_meta_data = []
for partid in range(self._num_partitions):
nrange_start = max_node_map[partid - 1] if partid > 0 else 0
nrange_end = max_node_map[partid]
num_nodes = nrange_end - nrange_start
erange_start = max_edge_map[partid - 1] if partid > 0 else 0
erange_end = max_edge_map[partid]
num_edges = erange_end - erange_start
part_info = {}
part_info['machine_id'] = partid
part_info['num_nodes'] = int(num_nodes)
part_info['num_edges'] = int(num_edges)
self._partition_meta_data.append(part_info)
def shared_memory(self, graph_name):
"""Move data to shared memory.
"""
# we need to store the nid ranges and eid ranges of different types in the order defined
# by type IDs.
nid_range = [None] * len(self.ntypes)
for i, ntype in enumerate(self.ntypes):
nid_range[i] = (ntype, self._typed_nid_range[ntype])
nid_range_pickle = pickle.dumps(nid_range)
nid_range_pickle = [e for e in nid_range_pickle]
eid_range = [None] * len(self.etypes)
for i, etype in enumerate(self.etypes):
eid_range[i] = (etype, self._typed_eid_range[etype])
eid_range_pickle = pickle.dumps(eid_range)
eid_range_pickle = [e for e in eid_range_pickle]
self._meta = _move_metadata_to_shared_mem(graph_name,
0, # We don't need to provide the number of nodes
0, # We don't need to provide the number of edges
self._partid, self._num_partitions,
F.tensor(nid_range_pickle),
F.tensor(eid_range_pickle),
True)
def num_partitions(self):
"""Return the number of partitions.
"""
return self._num_partitions
def _num_nodes(self, ntype='_N'):
""" The total number of nodes
"""
if ntype == '_N':
return int(self._max_node_ids[-1])
else:
return int(self._typed_max_node_ids[ntype][-1])
def _num_edges(self, etype='_E'):
""" The total number of edges
"""
if etype == '_E':
return int(self._max_edge_ids[-1])
else:
return int(self._typed_max_edge_ids[etype][-1])
def metadata(self):
"""Return the partition meta data.
"""
return self._partition_meta_data
def map_to_per_ntype(self, ids):
"""Map global homogeneous node IDs to node type IDs.
Returns
type_ids, per_type_ids
"""
return self._nid_map(ids)
def map_to_per_etype(self, ids):
"""Map global homogeneous edge IDs to edge type IDs.
Returns
type_ids, per_type_ids
"""
return self._eid_map(ids)
def map_to_homo_nid(self, ids, ntype):
"""Map per-node-type IDs to global node IDs in the homogeneous format.
"""
ids = utils.toindex(ids).tousertensor()
partids = self.nid2partid(ids, ntype)
typed_max_nids = F.zerocopy_from_numpy(self._typed_max_node_ids[ntype])
end_diff = F.gather_row(typed_max_nids, partids) - ids
typed_nid_range = F.zerocopy_from_numpy(self._typed_nid_range[ntype][:, 1])
return F.gather_row(typed_nid_range, partids) - end_diff
def map_to_homo_eid(self, ids, etype):
"""Map per-edge-type IDs to global edge IDs in the homoenegeous format.
"""
ids = utils.toindex(ids).tousertensor()
partids = self.eid2partid(ids, etype)
typed_max_eids = F.zerocopy_from_numpy(self._typed_max_edge_ids[etype])
end_diff = F.gather_row(typed_max_eids, partids) - ids
typed_eid_range = F.zerocopy_from_numpy(self._typed_eid_range[etype][:, 1])
return F.gather_row(typed_eid_range, partids) - end_diff
def nid2partid(self, nids, ntype='_N'):
"""From global node IDs to partition IDs
"""
nids = utils.toindex(nids)
if ntype == '_N':
ret = np.searchsorted(self._max_node_ids, nids.tonumpy(), side='right')
else:
ret = np.searchsorted(self._typed_max_node_ids[ntype], nids.tonumpy(), side='right')
ret = utils.toindex(ret)
return ret.tousertensor()
def eid2partid(self, eids, etype='_E'):
"""From global edge IDs to partition IDs
"""
eids = utils.toindex(eids)
if etype == '_E':
ret = np.searchsorted(self._max_edge_ids, eids.tonumpy(), side='right')
else:
ret = np.searchsorted(self._typed_max_edge_ids[etype], eids.tonumpy(), side='right')
ret = utils.toindex(ret)
return ret.tousertensor()
def partid2nids(self, partid, ntype='_N'):
"""From partition ID to global node IDs
"""
# TODO do we need to cache it?
if ntype == '_N':
start = self._max_node_ids[partid - 1] if partid > 0 else 0
end = self._max_node_ids[partid]
return F.arange(start, end)
else:
start = self._typed_max_node_ids[ntype][partid - 1] if partid > 0 else 0
end = self._typed_max_node_ids[ntype][partid]
return F.arange(start, end)
def partid2eids(self, partid, etype='_E'):
"""From partition ID to global edge IDs
"""
# TODO do we need to cache it?
if etype == '_E':
start = self._max_edge_ids[partid - 1] if partid > 0 else 0
end = self._max_edge_ids[partid]
return F.arange(start, end)
else:
start = self._typed_max_edge_ids[etype][partid - 1] if partid > 0 else 0
end = self._typed_max_edge_ids[etype][partid]
return F.arange(start, end)
def nid2localnid(self, nids, partid, ntype='_N'):
"""Get local node IDs within the given partition.
"""
if partid != self._partid:
raise RuntimeError('Now RangePartitionBook does not support \
getting remote tensor of nid2localnid.')
nids = utils.toindex(nids)
nids = nids.tousertensor()
if ntype == '_N':
start = self._max_node_ids[partid - 1] if partid > 0 else 0
else:
start = self._typed_max_node_ids[ntype][partid - 1] if partid > 0 else 0
return nids - int(start)
def eid2localeid(self, eids, partid, etype='_E'):
"""Get the local edge IDs within the given partition.
"""
if partid != self._partid:
raise RuntimeError('Now RangePartitionBook does not support \
getting remote tensor of eid2localeid.')
eids = utils.toindex(eids)
eids = eids.tousertensor()
if etype == '_E':
start = self._max_edge_ids[partid - 1] if partid > 0 else 0
else:
start = self._typed_max_edge_ids[etype][partid - 1] if partid > 0 else 0
return eids - int(start)
@property
def partid(self):
"""Get the current partition ID.
"""
return self._partid
@property
def ntypes(self):
"""Get the list of node types
"""
return self._ntypes
@property
def etypes(self):
"""Get the list of edge types
"""
return self._etypes
NODE_PART_POLICY = 'node'
EDGE_PART_POLICY = 'edge'
class PartitionPolicy(object):
"""This defines a partition policy for a distributed tensor or distributed embedding.
When DGL shards tensors and stores them in a cluster of machines, it requires
partition policies that map rows of the tensors to machines in the cluster.
Although an arbitrary partition policy can be defined, DGL currently supports
two partition policies for mapping nodes and edges to machines. To define a partition
policy from a graph partition book, users need to specify the policy name ('node' or 'edge').
Parameters
----------
policy_str : str
Partition policy name, e.g., 'edge:_E' or 'node:_N'.
partition_book : GraphPartitionBook
A graph partition book
"""
def __init__(self, policy_str, partition_book):
splits = policy_str.split(':')
if len(splits) == 1:
assert policy_str in (EDGE_PART_POLICY, NODE_PART_POLICY), \
'policy_str must contain \'edge\' or \'node\'.'
if NODE_PART_POLICY == policy_str:
policy_str = NODE_PART_POLICY + ":_N"
else:
policy_str = EDGE_PART_POLICY + ":_E"
self._policy_str = policy_str
self._part_id = partition_book.partid
self._partition_book = partition_book
@property
def policy_str(self):
"""Get the policy name
Returns
-------
str
The name of the partition policy.
"""
return self._policy_str
@property
def part_id(self):
"""Get partition ID
Returns
-------
int
The partition ID
"""
return self._part_id
@property
def partition_book(self):
"""Get partition book
Returns
-------
GraphPartitionBook
The graph partition book
"""
return self._partition_book
def get_data_name(self, name):
"""Get HeteroDataName
"""
is_node = NODE_PART_POLICY in self._policy_str
return HeteroDataName(is_node, self._policy_str[5:], name)
def to_local(self, id_tensor):
"""Mapping global ID to local ID.
Parameters
----------
id_tensor : tensor
Gloabl ID tensor
Return
------
tensor
local ID tensor
"""
if EDGE_PART_POLICY in self._policy_str:
return self._partition_book.eid2localeid(id_tensor, self._part_id, self._policy_str[5:])
elif NODE_PART_POLICY in self._policy_str:
return self._partition_book.nid2localnid(id_tensor, self._part_id, self._policy_str[5:])
else:
raise RuntimeError('Cannot support policy: %s ' % self._policy_str)
def to_partid(self, id_tensor):
"""Mapping global ID to partition ID.
Parameters
----------
id_tensor : tensor
Global ID tensor
Return
------
tensor
partition ID
"""
if EDGE_PART_POLICY in self._policy_str:
return self._partition_book.eid2partid(id_tensor, self._policy_str[5:])
elif NODE_PART_POLICY in self._policy_str:
return self._partition_book.nid2partid(id_tensor, self._policy_str[5:])
else:
raise RuntimeError('Cannot support policy: %s ' % self._policy_str)
def get_part_size(self):
"""Get data size of current partition.
Returns
-------
int
data size
"""
if EDGE_PART_POLICY in self._policy_str:
return len(self._partition_book.partid2eids(self._part_id, self._policy_str[5:]))
elif NODE_PART_POLICY in self._policy_str:
return len(self._partition_book.partid2nids(self._part_id, self._policy_str[5:]))
else:
raise RuntimeError('Cannot support policy: %s ' % self._policy_str)
def get_size(self):
"""Get the full size of the data.
Returns
-------
int
data size
"""
if EDGE_PART_POLICY in self._policy_str:
return self._partition_book._num_edges(self._policy_str[5:])
elif NODE_PART_POLICY in self._policy_str:
return self._partition_book._num_nodes(self._policy_str[5:])
else:
raise RuntimeError('Cannot support policy: %s ' % self._policy_str)
class NodePartitionPolicy(PartitionPolicy):
'''Partition policy for nodes.
'''
def __init__(self, partition_book, ntype='_N'):
super(NodePartitionPolicy, self).__init__(NODE_PART_POLICY + ':' + ntype, partition_book)
class EdgePartitionPolicy(PartitionPolicy):
'''Partition policy for edges.
'''
def __init__(self, partition_book, etype='_E'):
super(EdgePartitionPolicy, self).__init__(EDGE_PART_POLICY + ':' + etype, partition_book)
class HeteroDataName(object):
''' The data name in a heterogeneous graph.
A unique data name has three components:
* indicate it's node data or edge data.
* indicate the node/edge type.
* the name of the data.
Parameters
----------
is_node : bool
Indicate whether it's node data or edge data.
entity_type : str
The type of the node/edge.
data_name : str
The name of the data.
'''
def __init__(self, is_node, entity_type, data_name):
self.policy_str = NODE_PART_POLICY if is_node else EDGE_PART_POLICY
self.policy_str = self.policy_str + ':' + entity_type
self.data_name = data_name
def is_node(self):
''' Is this the name of node data
'''
return NODE_PART_POLICY in self.policy_str
def is_edge(self):
''' Is this the name of edge data
'''
return EDGE_PART_POLICY in self.policy_str
def get_type(self):
''' The type of the node/edge.
This is only meaningful in a heterogeneous graph.
In homogeneous graph, type is '_N' for a node and '_E' for an edge.
'''
return self.policy_str[5:]
def get_name(self):
''' The name of the data.
'''
return self.data_name
def __str__(self):
''' The full name of the data.
The full name is used as the key in the KVStore.
'''
return self.policy_str + ':' + self.data_name
def parse_hetero_data_name(name):
'''Parse data name and create HeteroDataName.
The data name has a specialized format. We can parse the name to determine if
it's node data or edge data, node/edge type and its actual name. The data name
has three fields and they are separated by ":".
Parameters
----------
name : str
The data name
Returns
-------
HeteroDataName
'''
names = name.split(':')
assert len(names) == 3, '{} is not a valid heterograph data name'.format(name)
assert names[0] in (NODE_PART_POLICY, EDGE_PART_POLICY), \
'{} is not a valid heterograph data name'.format(name)
return HeteroDataName(names[0] == NODE_PART_POLICY, names[1], names[2])
| 35.540807 | 100 | 0.609266 |
import pickle
from abc import ABC
import numpy as np
from .. import backend as F
from ..base import NID, EID
from .. import utils
from .shared_mem_utils import _to_shared_mem, _get_ndata_path, _get_edata_path, DTYPE_DICT
from .._ffi.ndarray import empty_shared_mem
from ..ndarray import exist_shared_mem_array
from .id_map import IdMap
def _move_metadata_to_shared_mem(graph_name, num_nodes, num_edges, part_id,
num_partitions, node_map, edge_map, is_range_part):
meta = _to_shared_mem(F.tensor([int(is_range_part), num_nodes, num_edges,
num_partitions, part_id,
len(node_map), len(edge_map)]),
_get_ndata_path(graph_name, 'meta'))
node_map = _to_shared_mem(node_map, _get_ndata_path(graph_name, 'node_map'))
edge_map = _to_shared_mem(edge_map, _get_edata_path(graph_name, 'edge_map'))
return meta, node_map, edge_map
def _get_shared_mem_metadata(graph_name):
shape = (7,)
dtype = F.int64
dtype = DTYPE_DICT[dtype]
data = empty_shared_mem(_get_ndata_path(graph_name, 'meta'), False, shape, dtype)
dlpack = data.to_dlpack()
meta = F.asnumpy(F.zerocopy_from_dlpack(dlpack))
is_range_part, _, _, num_partitions, part_id, node_map_len, edge_map_len = meta
data = empty_shared_mem(_get_ndata_path(graph_name, 'node_map'), False, (node_map_len,), dtype)
dlpack = data.to_dlpack()
node_map = F.zerocopy_from_dlpack(dlpack)
data = empty_shared_mem(_get_edata_path(graph_name, 'edge_map'), False, (edge_map_len,), dtype)
dlpack = data.to_dlpack()
edge_map = F.zerocopy_from_dlpack(dlpack)
return is_range_part, part_id, num_partitions, node_map, edge_map
def get_shared_mem_partition_book(graph_name, graph_part):
if not exist_shared_mem_array(_get_ndata_path(graph_name, 'meta')):
return None
is_range_part, part_id, num_parts, node_map_data, edge_map_data = \
_get_shared_mem_metadata(graph_name)
if is_range_part == 1:
node_map = {}
ntypes = {}
node_map_data = pickle.loads(bytes(F.asnumpy(node_map_data).tolist()))
for i, (ntype, nid_range) in enumerate(node_map_data):
ntypes[ntype] = i
node_map[ntype] = nid_range
edge_map = {}
etypes = {}
edge_map_data = pickle.loads(bytes(F.asnumpy(edge_map_data).tolist()))
for i, (etype, eid_range) in enumerate(edge_map_data):
etypes[etype] = i
edge_map[etype] = eid_range
return RangePartitionBook(part_id, num_parts, node_map, edge_map, ntypes, etypes)
else:
return BasicPartitionBook(part_id, num_parts, node_map_data, edge_map_data, graph_part)
class GraphPartitionBook(ABC):
def shared_memory(self, graph_name):
def num_partitions(self):
def metadata(self):
def nid2partid(self, nids, ntype):
def eid2partid(self, eids, etype):
def partid2nids(self, partid, ntype):
def partid2eids(self, partid, etype):
def nid2localnid(self, nids, partid, ntype):
def eid2localeid(self, eids, partid, etype):
@property
def partid(self):
@property
def ntypes(self):
@property
def etypes(self):
def map_to_per_ntype(self, ids):
def map_to_per_etype(self, ids):
def map_to_homo_nid(self, ids, ntype):
def map_to_homo_eid(self, ids, etype):
class BasicPartitionBook(GraphPartitionBook):
def __init__(self, part_id, num_parts, node_map, edge_map, part_graph):
assert part_id >= 0, 'part_id cannot be a negative number.'
assert num_parts > 0, 'num_parts must be greater than zero.'
self._part_id = int(part_id)
self._num_partitions = int(num_parts)
self._nid2partid = F.tensor(node_map)
assert F.dtype(self._nid2partid) == F.int64, \
'the node map must be stored in an integer array'
self._eid2partid = F.tensor(edge_map)
assert F.dtype(self._eid2partid) == F.int64, \
'the edge map must be stored in an integer array'
self._partition_meta_data = []
_, nid_count = np.unique(F.asnumpy(self._nid2partid), return_counts=True)
_, eid_count = np.unique(F.asnumpy(self._eid2partid), return_counts=True)
for partid in range(self._num_partitions):
part_info = {}
part_info['machine_id'] = partid
part_info['num_nodes'] = int(nid_count[partid])
part_info['num_edges'] = int(eid_count[partid])
self._partition_meta_data.append(part_info)
self._partid2nids = []
sorted_nid = F.tensor(np.argsort(F.asnumpy(self._nid2partid)))
start = 0
for offset in nid_count:
part_nids = sorted_nid[start:start+offset]
start += offset
self._partid2nids.append(part_nids)
self._partid2eids = []
sorted_eid = F.tensor(np.argsort(F.asnumpy(self._eid2partid)))
start = 0
for offset in eid_count:
part_eids = sorted_eid[start:start+offset]
start += offset
self._partid2eids.append(part_eids)
self._nidg2l = [None] * self._num_partitions
global_id = part_graph.ndata[NID]
max_global_id = np.amax(F.asnumpy(global_id))
g2l = F.zeros((max_global_id+1), F.int64, F.context(global_id))
g2l = F.scatter_row(g2l, global_id, F.arange(0, len(global_id)))
self._nidg2l[self._part_id] = g2l
self._eidg2l = [None] * self._num_partitions
global_id = part_graph.edata[EID]
max_global_id = np.amax(F.asnumpy(global_id))
g2l = F.zeros((max_global_id+1), F.int64, F.context(global_id))
g2l = F.scatter_row(g2l, global_id, F.arange(0, len(global_id)))
self._eidg2l[self._part_id] = g2l
self._edge_size = len(self.partid2eids(self._part_id))
self._node_size = len(self.partid2nids(self._part_id))
def shared_memory(self, graph_name):
self._meta, self._nid2partid, self._eid2partid = _move_metadata_to_shared_mem(
graph_name, self._num_nodes(), self._num_edges(), self._part_id, self._num_partitions,
self._nid2partid, self._eid2partid, False)
def num_partitions(self):
return self._num_partitions
def metadata(self):
return self._partition_meta_data
def _num_nodes(self, ntype='_N'):
assert ntype == '_N', 'Base partition book only supports homogeneous graph.'
return len(self._nid2partid)
def _num_edges(self, etype='_E'):
assert etype == '_E', 'Base partition book only supports homogeneous graph.'
return len(self._eid2partid)
def map_to_per_ntype(self, ids):
return F.zeros((len(ids),), F.int32, F.cpu()), ids
def map_to_per_etype(self, ids):
return F.zeros((len(ids),), F.int32, F.cpu()), ids
def map_to_homo_nid(self, ids, ntype):
assert ntype == '_N', 'Base partition book only supports homogeneous graph.'
return ids
def map_to_homo_eid(self, ids, etype):
assert etype == '_E', 'Base partition book only supports homogeneous graph.'
return ids
def nid2partid(self, nids, ntype='_N'):
assert ntype == '_N', 'Base partition book only supports homogeneous graph.'
return F.gather_row(self._nid2partid, nids)
def eid2partid(self, eids, etype='_E'):
assert etype == '_E', 'Base partition book only supports homogeneous graph.'
return F.gather_row(self._eid2partid, eids)
def partid2nids(self, partid, ntype='_N'):
assert ntype == '_N', 'Base partition book only supports homogeneous graph.'
return self._partid2nids[partid]
def partid2eids(self, partid, etype='_E'):
assert etype == '_E', 'Base partition book only supports homogeneous graph.'
return self._partid2eids[partid]
def nid2localnid(self, nids, partid, ntype='_N'):
assert ntype == '_N', 'Base partition book only supports homogeneous graph.'
if partid != self._part_id:
raise RuntimeError('Now GraphPartitionBook does not support \
getting remote tensor of nid2localnid.')
return F.gather_row(self._nidg2l[partid], nids)
def eid2localeid(self, eids, partid, etype='_E'):
assert etype == '_E', 'Base partition book only supports homogeneous graph.'
if partid != self._part_id:
raise RuntimeError('Now GraphPartitionBook does not support \
getting remote tensor of eid2localeid.')
return F.gather_row(self._eidg2l[partid], eids)
@property
def partid(self):
return self._part_id
@property
def ntypes(self):
return ['_N']
@property
def etypes(self):
return ['_E']
class RangePartitionBook(GraphPartitionBook):
def __init__(self, part_id, num_parts, node_map, edge_map, ntypes, etypes):
assert part_id >= 0, 'part_id cannot be a negative number.'
assert num_parts > 0, 'num_parts must be greater than zero.'
self._partid = part_id
self._num_partitions = num_parts
self._ntypes = [None] * len(ntypes)
self._etypes = [None] * len(etypes)
for ntype in ntypes:
ntype_id = ntypes[ntype]
self._ntypes[ntype_id] = ntype
assert all([ntype is not None for ntype in self._ntypes]), \
"The node types have invalid IDs."
for etype in etypes:
etype_id = etypes[etype]
self._etypes[etype_id] = etype
assert all([etype is not None for etype in self._etypes]), \
"The edge types have invalid IDs."
self._typed_nid_range = {}
self._typed_max_node_ids = {}
max_node_map = np.zeros((num_parts,), dtype=np.int64)
for key in node_map:
if not isinstance(node_map[key], np.ndarray):
node_map[key] = F.asnumpy(node_map[key])
assert node_map[key].shape == (num_parts, 2)
self._typed_nid_range[key] = node_map[key]
self._typed_max_node_ids[key] = np.cumsum(self._typed_nid_range[key][:, 1]
- self._typed_nid_range[key][:, 0])
max_node_map = np.maximum(self._typed_nid_range[key][:, 1], max_node_map)
self._max_node_ids = max_node_map
self._typed_eid_range = {}
self._typed_max_edge_ids = {}
max_edge_map = np.zeros((num_parts,), dtype=np.int64)
for key in edge_map:
if not isinstance(edge_map[key], np.ndarray):
edge_map[key] = F.asnumpy(edge_map[key])
assert edge_map[key].shape == (num_parts, 2)
self._typed_eid_range[key] = edge_map[key]
self._typed_max_edge_ids[key] = np.cumsum(self._typed_eid_range[key][:, 1]
- self._typed_eid_range[key][:, 0])
max_edge_map = np.maximum(self._typed_eid_range[key][:, 1], max_edge_map)
self._max_edge_ids = max_edge_map
self._nid_map = IdMap(self._typed_nid_range)
self._eid_map = IdMap(self._typed_eid_range)
self._partition_meta_data = []
for partid in range(self._num_partitions):
nrange_start = max_node_map[partid - 1] if partid > 0 else 0
nrange_end = max_node_map[partid]
num_nodes = nrange_end - nrange_start
erange_start = max_edge_map[partid - 1] if partid > 0 else 0
erange_end = max_edge_map[partid]
num_edges = erange_end - erange_start
part_info = {}
part_info['machine_id'] = partid
part_info['num_nodes'] = int(num_nodes)
part_info['num_edges'] = int(num_edges)
self._partition_meta_data.append(part_info)
def shared_memory(self, graph_name):
nid_range = [None] * len(self.ntypes)
for i, ntype in enumerate(self.ntypes):
nid_range[i] = (ntype, self._typed_nid_range[ntype])
nid_range_pickle = pickle.dumps(nid_range)
nid_range_pickle = [e for e in nid_range_pickle]
eid_range = [None] * len(self.etypes)
for i, etype in enumerate(self.etypes):
eid_range[i] = (etype, self._typed_eid_range[etype])
eid_range_pickle = pickle.dumps(eid_range)
eid_range_pickle = [e for e in eid_range_pickle]
self._meta = _move_metadata_to_shared_mem(graph_name,
0,
0, # We don't need to provide the number of edges
self._partid, self._num_partitions,
F.tensor(nid_range_pickle),
F.tensor(eid_range_pickle),
True)
def num_partitions(self):
return self._num_partitions
def _num_nodes(self, ntype='_N'):
if ntype == '_N':
return int(self._max_node_ids[-1])
else:
return int(self._typed_max_node_ids[ntype][-1])
def _num_edges(self, etype='_E'):
if etype == '_E':
return int(self._max_edge_ids[-1])
else:
return int(self._typed_max_edge_ids[etype][-1])
def metadata(self):
return self._partition_meta_data
def map_to_per_ntype(self, ids):
return self._nid_map(ids)
def map_to_per_etype(self, ids):
return self._eid_map(ids)
def map_to_homo_nid(self, ids, ntype):
ids = utils.toindex(ids).tousertensor()
partids = self.nid2partid(ids, ntype)
typed_max_nids = F.zerocopy_from_numpy(self._typed_max_node_ids[ntype])
end_diff = F.gather_row(typed_max_nids, partids) - ids
typed_nid_range = F.zerocopy_from_numpy(self._typed_nid_range[ntype][:, 1])
return F.gather_row(typed_nid_range, partids) - end_diff
def map_to_homo_eid(self, ids, etype):
ids = utils.toindex(ids).tousertensor()
partids = self.eid2partid(ids, etype)
typed_max_eids = F.zerocopy_from_numpy(self._typed_max_edge_ids[etype])
end_diff = F.gather_row(typed_max_eids, partids) - ids
typed_eid_range = F.zerocopy_from_numpy(self._typed_eid_range[etype][:, 1])
return F.gather_row(typed_eid_range, partids) - end_diff
def nid2partid(self, nids, ntype='_N'):
nids = utils.toindex(nids)
if ntype == '_N':
ret = np.searchsorted(self._max_node_ids, nids.tonumpy(), side='right')
else:
ret = np.searchsorted(self._typed_max_node_ids[ntype], nids.tonumpy(), side='right')
ret = utils.toindex(ret)
return ret.tousertensor()
def eid2partid(self, eids, etype='_E'):
eids = utils.toindex(eids)
if etype == '_E':
ret = np.searchsorted(self._max_edge_ids, eids.tonumpy(), side='right')
else:
ret = np.searchsorted(self._typed_max_edge_ids[etype], eids.tonumpy(), side='right')
ret = utils.toindex(ret)
return ret.tousertensor()
def partid2nids(self, partid, ntype='_N'):
if ntype == '_N':
start = self._max_node_ids[partid - 1] if partid > 0 else 0
end = self._max_node_ids[partid]
return F.arange(start, end)
else:
start = self._typed_max_node_ids[ntype][partid - 1] if partid > 0 else 0
end = self._typed_max_node_ids[ntype][partid]
return F.arange(start, end)
def partid2eids(self, partid, etype='_E'):
if etype == '_E':
start = self._max_edge_ids[partid - 1] if partid > 0 else 0
end = self._max_edge_ids[partid]
return F.arange(start, end)
else:
start = self._typed_max_edge_ids[etype][partid - 1] if partid > 0 else 0
end = self._typed_max_edge_ids[etype][partid]
return F.arange(start, end)
def nid2localnid(self, nids, partid, ntype='_N'):
if partid != self._partid:
raise RuntimeError('Now RangePartitionBook does not support \
getting remote tensor of nid2localnid.')
nids = utils.toindex(nids)
nids = nids.tousertensor()
if ntype == '_N':
start = self._max_node_ids[partid - 1] if partid > 0 else 0
else:
start = self._typed_max_node_ids[ntype][partid - 1] if partid > 0 else 0
return nids - int(start)
def eid2localeid(self, eids, partid, etype='_E'):
if partid != self._partid:
raise RuntimeError('Now RangePartitionBook does not support \
getting remote tensor of eid2localeid.')
eids = utils.toindex(eids)
eids = eids.tousertensor()
if etype == '_E':
start = self._max_edge_ids[partid - 1] if partid > 0 else 0
else:
start = self._typed_max_edge_ids[etype][partid - 1] if partid > 0 else 0
return eids - int(start)
@property
def partid(self):
return self._partid
@property
def ntypes(self):
return self._ntypes
@property
def etypes(self):
return self._etypes
NODE_PART_POLICY = 'node'
EDGE_PART_POLICY = 'edge'
class PartitionPolicy(object):
def __init__(self, policy_str, partition_book):
splits = policy_str.split(':')
if len(splits) == 1:
assert policy_str in (EDGE_PART_POLICY, NODE_PART_POLICY), \
'policy_str must contain \'edge\' or \'node\'.'
if NODE_PART_POLICY == policy_str:
policy_str = NODE_PART_POLICY + ":_N"
else:
policy_str = EDGE_PART_POLICY + ":_E"
self._policy_str = policy_str
self._part_id = partition_book.partid
self._partition_book = partition_book
@property
def policy_str(self):
return self._policy_str
@property
def part_id(self):
return self._part_id
@property
def partition_book(self):
return self._partition_book
def get_data_name(self, name):
is_node = NODE_PART_POLICY in self._policy_str
return HeteroDataName(is_node, self._policy_str[5:], name)
def to_local(self, id_tensor):
if EDGE_PART_POLICY in self._policy_str:
return self._partition_book.eid2localeid(id_tensor, self._part_id, self._policy_str[5:])
elif NODE_PART_POLICY in self._policy_str:
return self._partition_book.nid2localnid(id_tensor, self._part_id, self._policy_str[5:])
else:
raise RuntimeError('Cannot support policy: %s ' % self._policy_str)
def to_partid(self, id_tensor):
if EDGE_PART_POLICY in self._policy_str:
return self._partition_book.eid2partid(id_tensor, self._policy_str[5:])
elif NODE_PART_POLICY in self._policy_str:
return self._partition_book.nid2partid(id_tensor, self._policy_str[5:])
else:
raise RuntimeError('Cannot support policy: %s ' % self._policy_str)
def get_part_size(self):
if EDGE_PART_POLICY in self._policy_str:
return len(self._partition_book.partid2eids(self._part_id, self._policy_str[5:]))
elif NODE_PART_POLICY in self._policy_str:
return len(self._partition_book.partid2nids(self._part_id, self._policy_str[5:]))
else:
raise RuntimeError('Cannot support policy: %s ' % self._policy_str)
def get_size(self):
if EDGE_PART_POLICY in self._policy_str:
return self._partition_book._num_edges(self._policy_str[5:])
elif NODE_PART_POLICY in self._policy_str:
return self._partition_book._num_nodes(self._policy_str[5:])
else:
raise RuntimeError('Cannot support policy: %s ' % self._policy_str)
class NodePartitionPolicy(PartitionPolicy):
def __init__(self, partition_book, ntype='_N'):
super(NodePartitionPolicy, self).__init__(NODE_PART_POLICY + ':' + ntype, partition_book)
class EdgePartitionPolicy(PartitionPolicy):
def __init__(self, partition_book, etype='_E'):
super(EdgePartitionPolicy, self).__init__(EDGE_PART_POLICY + ':' + etype, partition_book)
class HeteroDataName(object):
def __init__(self, is_node, entity_type, data_name):
self.policy_str = NODE_PART_POLICY if is_node else EDGE_PART_POLICY
self.policy_str = self.policy_str + ':' + entity_type
self.data_name = data_name
def is_node(self):
return NODE_PART_POLICY in self.policy_str
def is_edge(self):
return EDGE_PART_POLICY in self.policy_str
def get_type(self):
return self.policy_str[5:]
def get_name(self):
return self.data_name
def __str__(self):
return self.policy_str + ':' + self.data_name
def parse_hetero_data_name(name):
names = name.split(':')
assert len(names) == 3, '{} is not a valid heterograph data name'.format(name)
assert names[0] in (NODE_PART_POLICY, EDGE_PART_POLICY), \
'{} is not a valid heterograph data name'.format(name)
return HeteroDataName(names[0] == NODE_PART_POLICY, names[1], names[2])
| true | true |
f720df0b58abbc375a8a7a17d5d8da4f91638bcc | 53,237 | py | Python | ecl/tests/unit/test_resource.py | keiichi-hikita/eclsdk | c43afb982fd54eb1875cdc22d46044644d804c4a | [
"Apache-2.0"
] | 5 | 2017-04-07T06:23:04.000Z | 2019-11-19T00:52:34.000Z | ecl/tests/unit/test_resource.py | keiichi-hikita/eclsdk | c43afb982fd54eb1875cdc22d46044644d804c4a | [
"Apache-2.0"
] | 16 | 2018-09-12T11:14:40.000Z | 2021-04-19T09:02:44.000Z | ecl/tests/unit/test_resource.py | keiichi-hikita/eclsdk | c43afb982fd54eb1875cdc22d46044644d804c4a | [
"Apache-2.0"
] | 14 | 2017-05-11T14:26:26.000Z | 2021-07-14T14:00:06.000Z | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import json
import os
from keystoneauth1 import session
import mock
import requests
from testtools import matchers
from ecl import exceptions
from ecl import format
from ecl import resource
from ecl.tests.unit import base
from ecl import utils
fake_parent = 'robert'
fake_name = 'rey'
fake_id = 99
fake_attr1 = 'lana'
fake_attr2 = 'del'
fake_resource = 'fake'
fake_resources = 'fakes'
fake_arguments = {'parent_name': fake_parent}
fake_base_path = '/fakes/%(parent_name)s/data'
fake_path = '/fakes/rey/data'
fake_data = {'id': fake_id,
'enabled': True,
'name': fake_name,
'parent': fake_parent,
'attr1': fake_attr1,
'attr2': fake_attr2,
'status': None}
fake_body = {fake_resource: fake_data}
class FakeParent(resource.Resource):
id_attribute = "name"
name = resource.prop('name')
class FakeResource(resource.Resource):
resource_key = fake_resource
resources_key = fake_resources
base_path = fake_base_path
allow_create = allow_retrieve = allow_update = True
allow_delete = allow_list = allow_head = True
enabled = resource.prop('enabled', type=format.BoolStr)
name = resource.prop('name')
parent = resource.prop('parent_name')
first = resource.prop('attr1')
second = resource.prop('attr2')
third = resource.prop('attr3', alias='attr_three')
status = resource.prop('status')
class FakeResourceNoKeys(FakeResource):
resource_key = None
resources_key = None
class PropTests(base.TestCase):
def test_with_alias_and_type(self):
class Test(resource.Resource):
attr = resource.prop("attr1", alias="attr2", type=bool)
t = Test(attrs={"attr2": 500})
# Don't test with assertTrue because 500 evaluates to True.
# Need to test that bool(500) happened and attr2 *is* True.
self.assertIs(t.attr, True)
def test_defaults(self):
new_default = "new_default"
class Test(resource.Resource):
attr1 = resource.prop("attr1")
attr2 = resource.prop("attr2", default=new_default)
t = Test()
self.assertIsNone(t.attr1)
self.assertEqual(new_default, t.attr2)
# When the default value is passed in, it is left untouched.
# Check that attr2 is literally the same object we set as default.
t.attr2 = new_default
self.assertIs(new_default, t.attr2)
not_default = 'not default'
t2 = Test({'attr2': not_default})
self.assertEqual(not_default, t2.attr2)
# Assert that if the default is passed in, it overrides the previously
# set value (bug #1425996)
t2.attr2 = new_default
self.assertEqual(new_default, t2.attr2)
def test_get_without_instance(self):
self.assertIsNone(FakeResource.name)
def test_set_ValueError(self):
class Test(resource.Resource):
attr = resource.prop("attr", type=int)
t = Test()
def should_raise():
t.attr = "this is not an int"
self.assertThat(should_raise, matchers.raises(ValueError))
def test_set_TypeError(self):
class Type(object):
def __init__(self):
pass
class Test(resource.Resource):
attr = resource.prop("attr", type=Type)
t = Test()
def should_raise():
t.attr = "this type takes no args"
self.assertThat(should_raise, matchers.raises(TypeError))
def test_resource_type(self):
class FakestResource(resource.Resource):
shortstop = resource.prop("shortstop", type=FakeResource)
third_base = resource.prop("third_base", type=FakeResource)
sot = FakestResource()
id1 = "Ernie Banks"
id2 = "Ron Santo"
sot.shortstop = id1
sot.third_base = id2
resource1 = FakeResource.new(id=id1)
self.assertEqual(resource1, sot.shortstop)
self.assertEqual(id1, sot.shortstop.id)
self.assertEqual(FakeResource, type(sot.shortstop))
resource2 = FakeResource.new(id=id2)
self.assertEqual(resource2, sot.third_base)
self.assertEqual(id2, sot.third_base.id)
self.assertEqual(FakeResource, type(sot.third_base))
sot2 = FakestResource()
sot2.shortstop = resource1
sot2.third_base = resource2
self.assertEqual(resource1, sot2.shortstop)
self.assertEqual(id1, sot2.shortstop.id)
self.assertEqual(FakeResource, type(sot2.shortstop))
self.assertEqual(resource2, sot2.third_base)
self.assertEqual(id2, sot2.third_base.id)
self.assertEqual(FakeResource, type(sot2.third_base))
body = {
"shortstop": id1,
"third_base": id2
}
sot3 = FakestResource(body)
self.assertEqual(FakeResource({"id": id1}), sot3.shortstop)
self.assertEqual(FakeResource({"id": id2}), sot3.third_base)
def test_set_alias_same_name(self):
class Test(resource.Resource):
attr = resource.prop("something", alias="attr")
val = "hey"
args = {"something": val}
sot = Test(args)
self.assertEqual(val, sot._attrs["something"])
self.assertEqual(val, sot.attr)
def test_property_is_none(self):
class Test(resource.Resource):
attr = resource.prop("something", type=dict)
args = {"something": None}
sot = Test(args)
self.assertIsNone(sot._attrs["something"])
self.assertIsNone(sot.attr)
class HeaderTests(base.TestCase):
class Test(resource.Resource):
base_path = "/ramones"
service = "punk"
allow_create = True
allow_update = True
hey = resource.header("vocals")
ho = resource.header("guitar")
letsgo = resource.header("bass")
def test_get(self):
val = "joey"
args = {"vocals": val}
sot = HeaderTests.Test({'headers': args})
self.assertEqual(val, sot.hey)
self.assertIsNone(sot.ho)
self.assertIsNone(sot.letsgo)
def test_set_new(self):
args = {"vocals": "joey", "bass": "deedee"}
sot = HeaderTests.Test({'headers': args})
sot._reset_dirty()
sot.ho = "johnny"
self.assertEqual("johnny", sot.ho)
self.assertTrue(sot.is_dirty)
def test_set_old(self):
args = {"vocals": "joey", "bass": "deedee"}
sot = HeaderTests.Test({'headers': args})
sot._reset_dirty()
sot.letsgo = "cj"
self.assertEqual("cj", sot.letsgo)
self.assertTrue(sot.is_dirty)
def test_set_brand_new(self):
sot = HeaderTests.Test({'headers': {}})
sot._reset_dirty()
sot.ho = "johnny"
self.assertEqual("johnny", sot.ho)
self.assertTrue(sot.is_dirty)
self.assertEqual({'headers': {"guitar": "johnny"}}, sot)
def test_1428342(self):
sot = HeaderTests.Test({'headers':
requests.structures.CaseInsensitiveDict()})
self.assertIsNone(sot.hey)
def test_create_update_headers(self):
sot = HeaderTests.Test()
sot._reset_dirty()
sot.ho = "johnny"
sot.letsgo = "deedee"
response = mock.Mock()
response_body = {'id': 1}
response.json = mock.Mock(return_value=response_body)
response.headers = None
sess = mock.Mock()
sess.post = mock.Mock(return_value=response)
sess.put = mock.Mock(return_value=response)
sot.create(sess)
headers = {'guitar': 'johnny', 'bass': 'deedee'}
sess.post.assert_called_with(HeaderTests.Test.base_path,
endpoint_filter=HeaderTests.Test.service,
headers=headers,
json={})
sot['id'] = 1
sot.letsgo = "cj"
headers = {'guitar': 'johnny', 'bass': 'cj'}
sot.update(sess)
sess.put.assert_called_with('ramones/1',
endpoint_filter=HeaderTests.Test.service,
headers=headers,
json={})
class ResourceTests(base.TestCase):
def setUp(self):
super(ResourceTests, self).setUp()
self.session = mock.Mock(spec=session.Session)
self.session.get_filter = mock.Mock(return_value={})
def assertCalledURL(self, method, url):
# call_args gives a tuple of *args and tuple of **kwargs.
# Check that the first arg in *args (the URL) has our url.
self.assertEqual(method.call_args[0][0], url)
def test_empty_id(self):
resp = mock.Mock()
resp.json = mock.Mock(return_value=fake_body)
self.session.get.return_value = resp
obj = FakeResource.new(**fake_arguments)
self.assertEqual(obj, obj.get(self.session))
self.assertEqual(fake_id, obj.id)
self.assertEqual(fake_name, obj['name'])
self.assertEqual(fake_attr1, obj['attr1'])
self.assertEqual(fake_attr2, obj['attr2'])
self.assertEqual(fake_name, obj.name)
self.assertEqual(fake_attr1, obj.first)
self.assertEqual(fake_attr2, obj.second)
def test_not_allowed(self):
class Nope(resource.Resource):
allow_create = allow_retrieve = allow_update = False
allow_delete = allow_list = allow_head = False
nope = Nope()
def cant_create():
nope.create_by_id(1, 2)
def cant_retrieve():
nope.get_data_by_id(1, 2)
def cant_update():
nope.update_by_id(1, 2, 3)
def cant_delete():
nope.delete_by_id(1, 2)
def cant_list():
for i in nope.list(1):
pass
def cant_head():
nope.head_data_by_id(1, 2)
self.assertThat(cant_create,
matchers.raises(exceptions.MethodNotSupported))
self.assertThat(cant_retrieve,
matchers.raises(exceptions.MethodNotSupported))
self.assertThat(cant_update,
matchers.raises(exceptions.MethodNotSupported))
self.assertThat(cant_delete,
matchers.raises(exceptions.MethodNotSupported))
self.assertThat(cant_list,
matchers.raises(exceptions.MethodNotSupported))
self.assertThat(cant_head,
matchers.raises(exceptions.MethodNotSupported))
def _test_create_by_id(self, key, response_value, response_body,
attrs, json_body, response_headers=None):
class FakeResource2(FakeResource):
resource_key = key
service = "my_service"
response = mock.Mock()
response.json = mock.Mock(return_value=response_body)
response.headers = response_headers
expected_resp = response_value.copy()
if response_headers:
expected_resp.update({'headers': response_headers})
sess = mock.Mock()
sess.put = mock.Mock(return_value=response)
sess.post = mock.Mock(return_value=response)
resp = FakeResource2.create_by_id(sess, attrs)
self.assertEqual(expected_resp, resp)
sess.post.assert_called_with(FakeResource2.base_path,
endpoint_filter=FakeResource2.service,
json=json_body)
r_id = "my_id"
resp = FakeResource2.create_by_id(sess, attrs, resource_id=r_id)
self.assertEqual(response_value, resp)
sess.put.assert_called_with(
utils.urljoin(FakeResource2.base_path, r_id),
endpoint_filter=FakeResource2.service,
json=json_body)
path_args = {"parent_name": "my_name"}
resp = FakeResource2.create_by_id(sess, attrs, path_args=path_args)
self.assertEqual(response_value, resp)
sess.post.assert_called_with(FakeResource2.base_path % path_args,
endpoint_filter=FakeResource2.service,
json=json_body)
resp = FakeResource2.create_by_id(sess, attrs, resource_id=r_id,
path_args=path_args)
self.assertEqual(response_value, resp)
sess.put.assert_called_with(
utils.urljoin(FakeResource2.base_path % path_args, r_id),
endpoint_filter=FakeResource2.service,
json=json_body)
def test_create_without_resource_key(self):
key = None
response_value = {"a": 1, "b": 2, "c": 3}
response_body = response_value
attrs = response_value
json_body = attrs
self._test_create_by_id(key, response_value, response_body,
attrs, json_body)
def test_create_with_response_headers(self):
key = None
response_value = {"a": 1, "b": 2, "c": 3}
response_body = response_value
response_headers = {'location': 'foo'}
attrs = response_value.copy()
json_body = attrs
self._test_create_by_id(key, response_value, response_body,
attrs, json_body,
response_headers=response_headers)
def test_create_with_resource_key(self):
key = "my_key"
response_value = {"a": 1, "b": 2, "c": 3}
response_body = {key: response_value}
attrs = response_body
json_body = {key: attrs}
self._test_create_by_id(key, response_value, response_body,
attrs, json_body)
def _test_get_data_by_id(self, key, response_value, response_body):
class FakeResource2(FakeResource):
resource_key = key
service = "my_service"
response = mock.Mock()
response.json = mock.Mock(return_value=response_body)
sess = mock.Mock()
sess.get = mock.Mock(return_value=response)
r_id = "my_id"
resp = FakeResource2.get_data_by_id(sess, resource_id=r_id)
self.assertEqual(response_value, resp)
sess.get.assert_called_with(
utils.urljoin(FakeResource2.base_path, r_id),
endpoint_filter=FakeResource2.service)
path_args = {"parent_name": "my_name"}
resp = FakeResource2.get_data_by_id(sess, resource_id=r_id,
path_args=path_args)
self.assertEqual(response_value, resp)
sess.get.assert_called_with(
utils.urljoin(FakeResource2.base_path % path_args, r_id),
endpoint_filter=FakeResource2.service)
def test_get_data_without_resource_key(self):
key = None
response_value = {"a": 1, "b": 2, "c": 3}
response_body = response_value
self._test_get_data_by_id(key, response_value, response_body)
def test_get_data_with_resource_key(self):
key = "my_key"
response_value = {"a": 1, "b": 2, "c": 3}
response_body = {key: response_value}
self._test_get_data_by_id(key, response_value, response_body)
def _test_head_data_by_id(self, key, response_value):
class FakeResource2(FakeResource):
resource_key = key
service = "my_service"
response = mock.Mock()
response.headers = response_value
sess = mock.Mock()
sess.head = mock.Mock(return_value=response)
r_id = "my_id"
resp = FakeResource2.head_data_by_id(sess, resource_id=r_id)
self.assertEqual({'headers': response_value}, resp)
headers = {'Accept': ''}
sess.head.assert_called_with(
utils.urljoin(FakeResource2.base_path, r_id),
endpoint_filter=FakeResource2.service,
headers=headers)
path_args = {"parent_name": "my_name"}
resp = FakeResource2.head_data_by_id(sess, resource_id=r_id,
path_args=path_args)
self.assertEqual({'headers': response_value}, resp)
headers = {'Accept': ''}
sess.head.assert_called_with(
utils.urljoin(FakeResource2.base_path % path_args, r_id),
endpoint_filter=FakeResource2.service,
headers=headers)
def test_head_data_without_resource_key(self):
key = None
response_value = {"key1": "value1", "key2": "value2"}
self._test_head_data_by_id(key, response_value)
def test_head_data_with_resource_key(self):
key = "my_key"
response_value = {"key1": "value1", "key2": "value2"}
self._test_head_data_by_id(key, response_value)
def _test_update_by_id(self, key, response_value, response_body,
attrs, json_body, response_headers=None):
class FakeResource2(FakeResource):
patch_update = True
resource_key = key
service = "my_service"
response = mock.Mock()
response.json = mock.Mock(return_value=response_body)
response.headers = response_headers
expected_resp = response_value.copy()
if response_headers:
expected_resp.update({'headers': response_headers})
sess = mock.Mock()
sess.patch = mock.Mock(return_value=response)
r_id = "my_id"
resp = FakeResource2.update_by_id(sess, r_id, attrs)
self.assertEqual(expected_resp, resp)
sess.patch.assert_called_with(
utils.urljoin(FakeResource2.base_path, r_id),
endpoint_filter=FakeResource2.service,
json=json_body)
path_args = {"parent_name": "my_name"}
resp = FakeResource2.update_by_id(sess, r_id, attrs,
path_args=path_args)
self.assertEqual(expected_resp, resp)
sess.patch.assert_called_with(
utils.urljoin(FakeResource2.base_path % path_args, r_id),
endpoint_filter=FakeResource2.service,
json=json_body)
def test_update_without_resource_key(self):
key = None
response_value = {"a": 1, "b": 2, "c": 3}
response_body = response_value
attrs = response_value
json_body = attrs
self._test_update_by_id(key, response_value, response_body,
attrs, json_body)
def test_update_with_resource_key(self):
key = "my_key"
response_value = {"a": 1, "b": 2, "c": 3}
response_body = {key: response_value}
attrs = response_value
json_body = {key: attrs}
self._test_update_by_id(key, response_value, response_body,
attrs, json_body)
def test_update_with_response_headers(self):
key = "my_key"
response_value = {"a": 1, "b": 2, "c": 3}
response_body = {key: response_value}
response_headers = {'location': 'foo'}
attrs = response_value.copy()
json_body = {key: attrs}
self._test_update_by_id(key, response_value, response_body,
attrs, json_body,
response_headers=response_headers)
def test_delete_by_id(self):
class FakeResource2(FakeResource):
service = "my_service"
sess = mock.Mock()
sess.delete = mock.Mock(return_value=None)
r_id = "my_id"
resp = FakeResource2.delete_by_id(sess, r_id)
self.assertIsNone(resp)
headers = {'Accept': ''}
sess.delete.assert_called_with(
utils.urljoin(FakeResource2.base_path, r_id),
endpoint_filter=FakeResource2.service,
headers=headers)
path_args = {"parent_name": "my_name"}
resp = FakeResource2.delete_by_id(sess, r_id, path_args=path_args)
self.assertIsNone(resp)
headers = {'Accept': ''}
sess.delete.assert_called_with(
utils.urljoin(FakeResource2.base_path % path_args, r_id),
endpoint_filter=FakeResource2.service,
headers=headers)
def test_create(self):
resp = mock.Mock()
resp.json = mock.Mock(return_value=fake_body)
resp.headers = {'location': 'foo'}
self.session.post = mock.Mock(return_value=resp)
# Create resource with subset of attributes in order to
# verify create refreshes all attributes from response.
obj = FakeResource.new(parent_name=fake_parent,
name=fake_name,
enabled=True,
attr1=fake_attr1)
self.assertEqual(obj, obj.create(self.session))
self.assertFalse(obj.is_dirty)
last_req = self.session.post.call_args[1]["json"][
FakeResource.resource_key]
self.assertEqual(4, len(last_req))
self.assertTrue(last_req['enabled'])
self.assertEqual(fake_parent, last_req['parent_name'])
self.assertEqual(fake_name, last_req['name'])
self.assertEqual(fake_attr1, last_req['attr1'])
self.assertTrue(obj['enabled'])
self.assertEqual(fake_name, obj['name'])
self.assertEqual(fake_parent, obj['parent_name'])
self.assertEqual(fake_attr1, obj['attr1'])
self.assertEqual(fake_attr2, obj['attr2'])
self.assertIsNone(obj['status'])
self.assertTrue(obj.enabled)
self.assertEqual(fake_id, obj.id)
self.assertEqual(fake_name, obj.name)
self.assertEqual(fake_parent, obj.parent_name)
self.assertEqual(fake_parent, obj.parent)
self.assertEqual(fake_attr1, obj.first)
self.assertEqual(fake_attr1, obj.attr1)
self.assertEqual(fake_attr2, obj.second)
self.assertEqual(fake_attr2, obj.attr2)
self.assertIsNone(obj.status)
self.assertEqual('foo', obj.location)
def test_get(self):
resp = mock.Mock()
resp.json = mock.Mock(return_value=fake_body)
resp.headers = {'location': 'foo'}
self.session.get = mock.Mock(return_value=resp)
# Create resource with subset of attributes in order to
# verify get refreshes all attributes from response.
obj = FakeResource.from_id(str(fake_id))
obj['parent_name'] = fake_parent
self.assertEqual(obj, obj.get(self.session))
# Check that the proper URL is being built.
self.assertCalledURL(self.session.get,
os.path.join(fake_base_path % fake_arguments,
str(fake_id))[1:])
self.assertTrue(obj['enabled'])
self.assertEqual(fake_name, obj['name'])
self.assertEqual(fake_parent, obj['parent_name'])
self.assertEqual(fake_attr1, obj['attr1'])
self.assertEqual(fake_attr2, obj['attr2'])
self.assertIsNone(obj['status'])
self.assertTrue(obj.enabled)
self.assertEqual(fake_id, obj.id)
self.assertEqual(fake_name, obj.name)
self.assertEqual(fake_parent, obj.parent_name)
self.assertEqual(fake_parent, obj.parent)
self.assertEqual(fake_attr1, obj.first)
self.assertEqual(fake_attr1, obj.attr1)
self.assertEqual(fake_attr2, obj.second)
self.assertEqual(fake_attr2, obj.attr2)
self.assertIsNone(obj.status)
self.assertIsNone(obj.location)
def test_get_by_id(self):
resp = mock.Mock()
resp.json = mock.Mock(return_value=fake_body)
self.session.get = mock.Mock(return_value=resp)
obj = FakeResource.get_by_id(self.session, fake_id,
path_args=fake_arguments)
# Check that the proper URL is being built.
self.assertCalledURL(self.session.get,
os.path.join(fake_base_path % fake_arguments,
str(fake_id))[1:])
self.assertEqual(fake_id, obj.id)
self.assertEqual(fake_name, obj['name'])
self.assertEqual(fake_attr1, obj['attr1'])
self.assertEqual(fake_attr2, obj['attr2'])
self.assertEqual(fake_name, obj.name)
self.assertEqual(fake_attr1, obj.first)
self.assertEqual(fake_attr2, obj.second)
def test_get_by_id_with_headers(self):
header1 = "fake-value1"
header2 = "fake-value2"
headers = {"header1": header1,
"header2": header2}
resp = mock.Mock(headers=headers)
resp.json = mock.Mock(return_value=fake_body)
self.session.get = mock.Mock(return_value=resp)
class FakeResource2(FakeResource):
header1 = resource.header("header1")
header2 = resource.header("header2")
obj = FakeResource2.get_by_id(self.session, fake_id,
path_args=fake_arguments,
include_headers=True)
self.assertCalledURL(self.session.get,
os.path.join(fake_base_path % fake_arguments,
str(fake_id))[1:])
self.assertEqual(fake_id, obj.id)
self.assertEqual(fake_name, obj['name'])
self.assertEqual(fake_attr1, obj['attr1'])
self.assertEqual(fake_attr2, obj['attr2'])
self.assertEqual(header1, obj['headers']['header1'])
self.assertEqual(header2, obj['headers']['header2'])
self.assertEqual(fake_name, obj.name)
self.assertEqual(fake_attr1, obj.first)
self.assertEqual(fake_attr2, obj.second)
self.assertEqual(header1, obj.header1)
self.assertEqual(header2, obj.header2)
def test_head_by_id(self):
class FakeResource2(FakeResource):
header1 = resource.header("header1")
header2 = resource.header("header2")
resp = mock.Mock(headers={"header1": "one", "header2": "two"})
self.session.head = mock.Mock(return_value=resp)
obj = FakeResource2.head_by_id(self.session, fake_id,
path_args=fake_arguments)
self.assertCalledURL(self.session.head,
os.path.join(fake_base_path % fake_arguments,
str(fake_id))[1:])
self.assertEqual('one', obj['headers']['header1'])
self.assertEqual('two', obj['headers']['header2'])
self.assertEqual('one', obj.header1)
self.assertEqual('two', obj.header2)
def test_patch_update(self):
class FakeResourcePatch(FakeResource):
patch_update = True
resp = mock.Mock()
resp.json = mock.Mock(return_value=fake_body)
resp.headers = {'location': 'foo'}
self.session.patch = mock.Mock(return_value=resp)
# Create resource with subset of attributes in order to
# verify update refreshes all attributes from response.
obj = FakeResourcePatch.new(id=fake_id, parent_name=fake_parent,
name=fake_name, attr1=fake_attr1)
self.assertTrue(obj.is_dirty)
self.assertEqual(obj, obj.update(self.session))
self.assertFalse(obj.is_dirty)
self.assertCalledURL(self.session.patch,
os.path.join(fake_base_path % fake_arguments,
str(fake_id))[1:])
last_req = self.session.patch.call_args[1]["json"][
FakeResource.resource_key]
self.assertEqual(3, len(last_req))
self.assertEqual(fake_parent, last_req['parent_name'])
self.assertEqual(fake_name, last_req['name'])
self.assertEqual(fake_attr1, last_req['attr1'])
self.assertTrue(obj['enabled'])
self.assertEqual(fake_name, obj['name'])
self.assertEqual(fake_parent, obj['parent_name'])
self.assertEqual(fake_attr1, obj['attr1'])
self.assertEqual(fake_attr2, obj['attr2'])
self.assertIsNone(obj['status'])
self.assertTrue(obj.enabled)
self.assertEqual(fake_id, obj.id)
self.assertEqual(fake_name, obj.name)
self.assertEqual(fake_parent, obj.parent_name)
self.assertEqual(fake_parent, obj.parent)
self.assertEqual(fake_attr1, obj.first)
self.assertEqual(fake_attr1, obj.attr1)
self.assertEqual(fake_attr2, obj.second)
self.assertEqual(fake_attr2, obj.attr2)
self.assertIsNone(obj.status)
self.assertEqual('foo', obj.location)
def test_put_update(self):
class FakeResourcePut(FakeResource):
# This is False by default, but explicit for this test.
patch_update = False
resp = mock.Mock()
resp.json = mock.Mock(return_value=fake_body)
resp.headers = {'location': 'foo'}
self.session.put = mock.Mock(return_value=resp)
# Create resource with subset of attributes in order to
# verify update refreshes all attributes from response.
obj = FakeResourcePut.new(id=fake_id, parent_name=fake_parent,
name=fake_name, attr1=fake_attr1)
self.assertTrue(obj.is_dirty)
self.assertEqual(obj, obj.update(self.session))
self.assertFalse(obj.is_dirty)
self.assertCalledURL(self.session.put,
os.path.join(fake_base_path % fake_arguments,
str(fake_id))[1:])
last_req = self.session.put.call_args[1]["json"][
FakeResource.resource_key]
self.assertEqual(3, len(last_req))
self.assertEqual(fake_parent, last_req['parent_name'])
self.assertEqual(fake_name, last_req['name'])
self.assertEqual(fake_attr1, last_req['attr1'])
self.assertTrue(obj['enabled'])
self.assertEqual(fake_name, obj['name'])
self.assertEqual(fake_parent, obj['parent_name'])
self.assertEqual(fake_attr1, obj['attr1'])
self.assertEqual(fake_attr2, obj['attr2'])
self.assertIsNone(obj['status'])
self.assertTrue(obj.enabled)
self.assertEqual(fake_id, obj.id)
self.assertEqual(fake_name, obj.name)
self.assertEqual(fake_parent, obj.parent_name)
self.assertEqual(fake_parent, obj.parent)
self.assertEqual(fake_attr1, obj.first)
self.assertEqual(fake_attr1, obj.attr1)
self.assertEqual(fake_attr2, obj.second)
self.assertEqual(fake_attr2, obj.attr2)
self.assertIsNone(obj.status)
self.assertEqual('foo', obj.location)
def test_update_early_exit(self):
obj = FakeResource()
obj._dirty = [] # Bail out early if there's nothing to update.
self.assertIsNone(obj.update("session"))
def test_update_no_id_attribute(self):
obj = FakeResource.existing(id=1, attr="value1",
parent_name=fake_parent)
obj.first = "value2" # Make it dirty
obj.update_by_id = mock.Mock(return_value=dict())
# If no id_attribute is returned in the update response, make sure
# we handle the resulting KeyError.
self.assertEqual(obj, obj.update("session"))
def test_delete(self):
obj = FakeResource({"id": fake_id, "parent_name": fake_parent})
obj.delete(self.session)
self.assertCalledURL(self.session.delete,
os.path.join(fake_base_path % fake_arguments,
str(fake_id))[1:])
def _test_list(self, resource_class):
results = [fake_data.copy(), fake_data.copy(), fake_data.copy()]
for i in range(len(results)):
results[i]['id'] = fake_id + i
if resource_class.resources_key is not None:
body = {resource_class.resources_key:
self._get_expected_results()}
sentinel = {resource_class.resources_key: []}
else:
body = self._get_expected_results()
sentinel = []
resp1 = mock.Mock()
resp1.json = mock.Mock(return_value=body)
resp2 = mock.Mock()
resp2.json = mock.Mock(return_value=sentinel)
self.session.get.side_effect = [resp1, resp2]
objs = list(resource_class.list(self.session, path_args=fake_arguments,
paginated=True))
params = {'limit': 3, 'marker': results[-1]['id']}
self.assertEqual(params, self.session.get.call_args[1]['params'])
self.assertEqual(3, len(objs))
for obj in objs:
self.assertIn(obj.id, range(fake_id, fake_id + 3))
self.assertEqual(fake_name, obj['name'])
self.assertEqual(fake_name, obj.name)
self.assertIsInstance(obj, FakeResource)
def _get_expected_results(self):
results = [fake_data.copy(), fake_data.copy(), fake_data.copy()]
for i in range(len(results)):
results[i]['id'] = fake_id + i
return results
def test_list_keyed_resource(self):
self._test_list(FakeResource)
def test_list_non_keyed_resource(self):
self._test_list(FakeResourceNoKeys)
def _test_list_call_count(self, paginated):
# Test that we've only made one call to receive all data
results = [fake_data.copy(), fake_data.copy(), fake_data.copy()]
resp = mock.Mock()
resp.json = mock.Mock(return_value={fake_resources: results})
attrs = {"get.return_value": resp}
session = mock.Mock(**attrs)
list(FakeResource.list(session, params={'limit': len(results) + 1},
path_args=fake_arguments,
paginated=paginated))
# Ensure we only made one call to complete this.
self.assertEqual(1, session.get.call_count)
def test_list_bail_out(self):
# When we get less data than limit, make sure we made one call
self._test_list_call_count(True)
def test_list_nonpaginated(self):
# When we call with paginated=False, make sure we made one call
self._test_list_call_count(False)
def test_determine_limit(self):
full_page = [fake_data.copy(), fake_data.copy(), fake_data.copy()]
last_page = [fake_data.copy()]
session = mock.Mock()
session.get = mock.Mock()
full_response = mock.Mock()
response_body = {FakeResource.resources_key: full_page}
full_response.json = mock.Mock(return_value=response_body)
last_response = mock.Mock()
response_body = {FakeResource.resources_key: last_page}
last_response.json = mock.Mock(return_value=response_body)
pages = [full_response, full_response, last_response]
session.get.side_effect = pages
# Don't specify a limit. Resource.list will determine the limit
# is 3 based on the first `full_page`.
results = list(FakeResource.list(session, path_args=fake_arguments,
paginated=True))
self.assertEqual(session.get.call_count, len(pages))
self.assertEqual(len(full_page + full_page + last_page), len(results))
def test_empty_list(self):
page = []
session = mock.Mock()
session.get = mock.Mock()
full_response = mock.Mock()
response_body = {FakeResource.resources_key: page}
full_response.json = mock.Mock(return_value=response_body)
pages = [full_response]
session.get.side_effect = pages
results = list(FakeResource.list(session, path_args=fake_arguments,
paginated=True))
self.assertEqual(session.get.call_count, len(pages))
self.assertEqual(len(page), len(results))
def test_attrs_name(self):
obj = FakeResource()
self.assertIsNone(obj.name)
del obj.name
def test_to_dict(self):
kwargs = {
'enabled': True,
'name': 'FOO',
'parent': 'dad',
'attr1': 'BAR',
'attr2': ['ZOO', 'BAZ'],
'status': 'Active',
'headers': {
'key': 'value'
}
}
obj = FakeResource(kwargs)
res = obj.to_dict()
self.assertIsInstance(res, dict)
self.assertTrue(res['enabled'])
self.assertEqual('FOO', res['name'])
self.assertEqual('dad', res['parent'])
self.assertEqual('BAR', res['attr1'])
self.assertEqual(['ZOO', 'BAZ'], res['attr2'])
self.assertEqual('Active', res['status'])
self.assertNotIn('headers', res)
def test_composite_attr_happy(self):
obj = FakeResource.existing(**{'attr3': '3'})
try:
self.assertEqual('3', obj.third)
except AttributeError:
self.fail("third was not found as expected")
def test_composite_attr_fallback(self):
obj = FakeResource.existing(**{'attr_three': '3'})
try:
self.assertEqual('3', obj.third)
except AttributeError:
self.fail("third was not found in fallback as expected")
def test_id_del(self):
class Test(resource.Resource):
id_attribute = "my_id"
attrs = {"my_id": 100}
t = Test(attrs=attrs)
self.assertEqual(attrs["my_id"], t.id)
del t.id
self.assertTrue(Test.id_attribute not in t._attrs)
def test_from_name_with_name(self):
name = "Ernie Banks"
obj = FakeResource.from_name(name)
self.assertEqual(name, obj.name)
def test_from_id_with_name(self):
name = "Sandy Koufax"
obj = FakeResource.from_id(name)
self.assertEqual(name, obj.id)
def test_from_id_with_object(self):
name = "Mickey Mantle"
obj = FakeResource.new(name=name)
new_obj = FakeResource.from_id(obj)
self.assertIs(new_obj, obj)
self.assertEqual(obj.name, new_obj.name)
def test_from_id_with_bad_value(self):
def should_raise():
FakeResource.from_id(3.14)
self.assertThat(should_raise, matchers.raises(ValueError))
def test_dirty_list(self):
class Test(resource.Resource):
attr = resource.prop("attr")
# Check if dirty after setting by prop
sot1 = Test()
self.assertFalse(sot1.is_dirty)
sot1.attr = 1
self.assertTrue(sot1.is_dirty)
# Check if dirty after setting by mapping
sot2 = Test()
sot2["attr"] = 1
self.assertTrue(sot1.is_dirty)
# Check if dirty after creation
sot3 = Test({"attr": 1})
self.assertTrue(sot3.is_dirty)
def test_update_attrs(self):
class Test(resource.Resource):
moe = resource.prop("the-attr")
larry = resource.prop("the-attr2")
curly = resource.prop("the-attr3", type=int)
shemp = resource.prop("the-attr4")
value1 = "one"
value2 = "two"
value3 = "3"
value4 = "fore"
value5 = "fiver"
sot = Test({"the-attr": value1})
sot.update_attrs({"the-attr2": value2, "notprop": value4})
self.assertTrue(sot.is_dirty)
self.assertEqual(value1, sot.moe)
self.assertEqual(value1, sot["the-attr"])
self.assertEqual(value2, sot.larry)
self.assertEqual(value4, sot.notprop)
sot._reset_dirty()
sot.update_attrs(curly=value3)
self.assertTrue(sot.is_dirty)
self.assertEqual(int, type(sot.curly))
self.assertEqual(int(value3), sot.curly)
sot._reset_dirty()
sot.update_attrs(**{"the-attr4": value5})
self.assertTrue(sot.is_dirty)
self.assertEqual(value5, sot.shemp)
def test_get_id(self):
class Test(resource.Resource):
pass
ID = "an id"
res = Test({"id": ID})
self.assertEqual(ID, resource.Resource.get_id(ID))
self.assertEqual(ID, resource.Resource.get_id(res))
def test_convert_ids(self):
class TestResourceFoo(resource.Resource):
pass
class TestResourceBar(resource.Resource):
pass
resfoo = TestResourceFoo({'id': 'FAKEFOO'})
resbar = TestResourceBar({'id': 'FAKEBAR'})
self.assertIsNone(resource.Resource.convert_ids(None))
attrs = {
'key1': 'value1'
}
self.assertEqual(attrs, resource.Resource.convert_ids(attrs))
attrs = {
'foo': resfoo,
'bar': resbar,
'other': 'whatever',
}
res = resource.Resource.convert_ids(attrs)
self.assertEqual('FAKEFOO', res['foo'])
self.assertEqual('FAKEBAR', res['bar'])
self.assertEqual('whatever', res['other'])
def test_repr(self):
fr = FakeResource()
fr._loaded = False
fr.first = "hey"
fr.second = "hi"
fr.third = "nah"
the_repr = repr(fr)
the_repr = the_repr.replace('ecl.tests.unit.test_resource.', '')
result = eval(the_repr)
self.assertEqual(fr._loaded, result._loaded)
self.assertEqual(fr.first, result.first)
self.assertEqual(fr.second, result.second)
self.assertEqual(fr.third, result.third)
def test_id_attribute(self):
faker = FakeResource(fake_data)
self.assertEqual(fake_id, faker.id)
faker.id_attribute = 'name'
self.assertEqual(fake_name, faker.id)
faker.id_attribute = 'attr1'
self.assertEqual(fake_attr1, faker.id)
faker.id_attribute = 'attr2'
self.assertEqual(fake_attr2, faker.id)
faker.id_attribute = 'id'
self.assertEqual(fake_id, faker.id)
def test_name_attribute(self):
class Person_ES(resource.Resource):
name_attribute = "nombre"
nombre = resource.prop('nombre')
name = "Brian"
args = {'nombre': name}
person = Person_ES(args)
self.assertEqual(name, person.nombre)
self.assertEqual(name, person.name)
new_name = "Julien"
person.name = new_name
self.assertEqual(new_name, person.nombre)
self.assertEqual(new_name, person.name)
def test_boolstr_prop(self):
faker = FakeResource(fake_data)
self.assertTrue(faker.enabled)
self.assertTrue(faker['enabled'])
faker._attrs['enabled'] = False
self.assertFalse(faker.enabled)
self.assertFalse(faker['enabled'])
# should fail fast
def set_invalid():
faker.enabled = 'INVALID'
self.assertRaises(ValueError, set_invalid)
class ResourceMapping(base.TestCase):
def test__getitem(self):
value = 10
class Test(resource.Resource):
attr = resource.prop("attr")
t = Test(attrs={"attr": value})
self.assertEqual(value, t["attr"])
def test__setitem__existing_item_changed(self):
class Test(resource.Resource):
pass
t = Test()
key = "attr"
value = 1
t[key] = value
self.assertEqual(value, t._attrs[key])
self.assertTrue(key in t._dirty)
def test__setitem__existing_item_unchanged(self):
class Test(resource.Resource):
pass
key = "attr"
value = 1
t = Test(attrs={key: value})
t._reset_dirty() # Clear dirty list so this checks as unchanged.
t[key] = value
self.assertEqual(value, t._attrs[key])
self.assertTrue(key not in t._dirty)
def test__setitem__new_item(self):
class Test(resource.Resource):
pass
t = Test()
key = "attr"
value = 1
t[key] = value
self.assertEqual(value, t._attrs[key])
self.assertTrue(key in t._dirty)
def test__delitem__(self):
class Test(resource.Resource):
pass
key = "attr"
value = 1
t = Test(attrs={key: value})
del t[key]
self.assertTrue(key not in t._attrs)
self.assertTrue(key in t._dirty)
def test__len__(self):
class Test(resource.Resource):
pass
attrs = {"a": 1, "b": 2, "c": 3}
t = Test(attrs=attrs)
self.assertEqual(len(attrs.keys()), len(t))
def test__iter__(self):
class Test(resource.Resource):
pass
attrs = {"a": 1, "b": 2, "c": 3}
t = Test(attrs=attrs)
for attr in t:
self.assertEqual(attrs[attr], t[attr])
def _test_resource_serialization(self, session_method, resource_method):
attr_type = resource.Resource
class Test(resource.Resource):
allow_create = True
attr = resource.prop("attr", type=attr_type)
the_id = 123
sot = Test()
sot.attr = resource.Resource({"id": the_id})
self.assertEqual(attr_type, type(sot.attr))
def fake_call(*args, **kwargs):
attrs = kwargs["json"]
try:
json.dumps(attrs)
except TypeError as e:
self.fail("Unable to serialize _attrs: %s" % e)
resp = mock.Mock()
resp.json = mock.Mock(return_value=attrs)
return resp
session = mock.Mock()
setattr(session, session_method, mock.Mock(side_effect=fake_call))
if resource_method == "create_by_id":
session.create_by_id(session, sot._attrs)
elif resource_method == "update_by_id":
session.update_by_id(session, None, sot._attrs)
def test_create_serializes_resource_types(self):
self._test_resource_serialization("post", "create_by_id")
def test_update_serializes_resource_types(self):
self._test_resource_serialization("patch", "update_by_id")
class FakeResponse(object):
def __init__(self, response):
self.body = response
def json(self):
return self.body
class TestFind(base.TestCase):
NAME = 'matrix'
ID = 'Fishburne'
PROP = 'attribute2'
def setUp(self):
super(TestFind, self).setUp()
self.mock_session = mock.Mock()
self.mock_get = mock.Mock()
self.mock_session.get = self.mock_get
self.matrix = {'id': self.ID, 'name': self.NAME, 'prop': self.PROP}
def test_name(self):
self.mock_get.side_effect = [
exceptions.NotFoundException(),
FakeResponse({FakeResource.resources_key: [self.matrix]})
]
result = FakeResource.find(self.mock_session, self.NAME,
path_args=fake_arguments)
self.assertEqual(self.NAME, result.name)
self.assertEqual(self.PROP, result.prop)
def test_id(self):
self.mock_get.side_effect = [
FakeResponse({FakeResource.resource_key: self.matrix})
]
result = FakeResource.find(self.mock_session, self.ID,
path_args=fake_arguments)
self.assertEqual(self.ID, result.id)
self.assertEqual(self.PROP, result.prop)
path = "fakes/" + fake_parent + "/data/" + self.ID
self.mock_get.assert_any_call(path, endpoint_filter=None)
def test_id_no_retrieve(self):
self.mock_get.side_effect = [
FakeResponse({FakeResource.resources_key: [self.matrix]})
]
class NoRetrieveResource(FakeResource):
allow_retrieve = False
result = NoRetrieveResource.find(self.mock_session, self.ID,
path_args=fake_arguments)
self.assertEqual(self.ID, result.id)
self.assertEqual(self.PROP, result.prop)
def test_dups(self):
dupe = self.matrix.copy()
dupe['id'] = 'different'
self.mock_get.side_effect = [
# Raise a 404 first so we get out of the ID search and into name.
exceptions.NotFoundException(),
FakeResponse({FakeResource.resources_key: [self.matrix, dupe]})
]
self.assertRaises(exceptions.DuplicateResource, FakeResource.find,
self.mock_session, self.NAME)
def test_id_attribute_find(self):
floater = {'ip_address': "127.0.0.1", 'prop': self.PROP}
self.mock_get.side_effect = [
FakeResponse({FakeResource.resource_key: floater})
]
FakeResource.id_attribute = 'ip_address'
FakeResource.id_attribute = 'ip_address'
result = FakeResource.find(self.mock_session, "127.0.0.1",
path_args=fake_arguments)
self.assertEqual("127.0.0.1", result.id)
self.assertEqual(self.PROP, result.prop)
FakeResource.id_attribute = 'id'
p = {'ip_address': "127.0.0.1"}
path = fake_path + "?limit=2"
self.mock_get.called_once_with(path, params=p, endpoint_filter=None)
def test_nada(self):
self.mock_get.side_effect = [
exceptions.NotFoundException(),
FakeResponse({FakeResource.resources_key: []})
]
self.assertIsNone(FakeResource.find(self.mock_session, self.NAME))
def test_no_name(self):
self.mock_get.side_effect = [
exceptions.NotFoundException(),
FakeResponse({FakeResource.resources_key: [self.matrix]})
]
FakeResource.name_attribute = None
self.assertIsNone(FakeResource.find(self.mock_session, self.NAME))
def test_nada_not_ignored(self):
self.mock_get.side_effect = [
exceptions.NotFoundException(),
FakeResponse({FakeResource.resources_key: []})
]
self.assertRaises(exceptions.ResourceNotFound, FakeResource.find,
self.mock_session, self.NAME, ignore_missing=False)
class TestWaitForStatus(base.TestCase):
def __init__(self, *args, **kwargs):
super(TestWaitForStatus, self).__init__(*args, **kwargs)
self.build = FakeResponse(self.body_with_status(fake_body, 'BUILD'))
self.active = FakeResponse(self.body_with_status(fake_body, 'ACTIVE'))
self.error = FakeResponse(self.body_with_status(fake_body, 'ERROR'))
def setUp(self):
super(TestWaitForStatus, self).setUp()
self.sess = mock.Mock()
def body_with_status(self, body, status):
body_copy = copy.deepcopy(body)
body_copy[fake_resource]['status'] = status
return body_copy
def test_wait_for_status_nothing(self):
self.sess.get = mock.Mock()
sot = FakeResource.new(**fake_data)
sot.status = 'ACTIVE'
self.assertEqual(sot, resource.wait_for_status(
self.sess, sot, 'ACTIVE', [], 1, 2))
self.assertEqual([], self.sess.get.call_args_list)
def test_wait_for_status(self):
self.sess.get = mock.Mock()
self.sess.get.side_effect = [self.build, self.active]
sot = FakeResource.new(**fake_data)
self.assertEqual(sot, resource.wait_for_status(
self.sess, sot, 'ACTIVE', [], 1, 2))
def test_wait_for_status_timeout(self):
self.sess.get = mock.Mock()
self.sess.get.side_effect = [self.build, self.build]
sot = FakeResource.new(**fake_data)
self.assertRaises(exceptions.ResourceTimeout, resource.wait_for_status,
self.sess, sot, 'ACTIVE', ['ERROR'], 1, 2)
def test_wait_for_status_failures(self):
self.sess.get = mock.Mock()
self.sess.get.side_effect = [self.build, self.error]
sot = FakeResource.new(**fake_data)
self.assertRaises(exceptions.ResourceFailure, resource.wait_for_status,
self.sess, sot, 'ACTIVE', ['ERROR'], 1, 2)
def test_wait_for_status_no_status(self):
class FakeResourceNoStatus(resource.Resource):
allow_retrieve = True
sot = FakeResourceNoStatus.new(id=123)
self.assertRaises(AttributeError, resource.wait_for_status,
self.sess, sot, 'ACTIVE', ['ERROR'], 1, 2)
class TestWaitForDelete(base.TestCase):
def test_wait_for_delete(self):
sess = mock.Mock()
sot = FakeResource.new(**fake_data)
sot.get = mock.Mock()
sot.get.side_effect = [
sot,
exceptions.NotFoundException()]
self.assertEqual(sot, resource.wait_for_delete(sess, sot, 1, 2))
def test_wait_for_delete_fail(self):
sess = mock.Mock()
sot = FakeResource.new(**fake_data)
sot.get = mock.Mock(return_value=sot)
self.assertRaises(exceptions.ResourceTimeout, resource.wait_for_delete,
sess, sot, 1, 2)
| 34.704694 | 79 | 0.608148 |
import copy
import json
import os
from keystoneauth1 import session
import mock
import requests
from testtools import matchers
from ecl import exceptions
from ecl import format
from ecl import resource
from ecl.tests.unit import base
from ecl import utils
fake_parent = 'robert'
fake_name = 'rey'
fake_id = 99
fake_attr1 = 'lana'
fake_attr2 = 'del'
fake_resource = 'fake'
fake_resources = 'fakes'
fake_arguments = {'parent_name': fake_parent}
fake_base_path = '/fakes/%(parent_name)s/data'
fake_path = '/fakes/rey/data'
fake_data = {'id': fake_id,
'enabled': True,
'name': fake_name,
'parent': fake_parent,
'attr1': fake_attr1,
'attr2': fake_attr2,
'status': None}
fake_body = {fake_resource: fake_data}
class FakeParent(resource.Resource):
id_attribute = "name"
name = resource.prop('name')
class FakeResource(resource.Resource):
resource_key = fake_resource
resources_key = fake_resources
base_path = fake_base_path
allow_create = allow_retrieve = allow_update = True
allow_delete = allow_list = allow_head = True
enabled = resource.prop('enabled', type=format.BoolStr)
name = resource.prop('name')
parent = resource.prop('parent_name')
first = resource.prop('attr1')
second = resource.prop('attr2')
third = resource.prop('attr3', alias='attr_three')
status = resource.prop('status')
class FakeResourceNoKeys(FakeResource):
resource_key = None
resources_key = None
class PropTests(base.TestCase):
def test_with_alias_and_type(self):
class Test(resource.Resource):
attr = resource.prop("attr1", alias="attr2", type=bool)
t = Test(attrs={"attr2": 500})
# Need to test that bool(500) happened and attr2 *is* True.
self.assertIs(t.attr, True)
def test_defaults(self):
new_default = "new_default"
class Test(resource.Resource):
attr1 = resource.prop("attr1")
attr2 = resource.prop("attr2", default=new_default)
t = Test()
self.assertIsNone(t.attr1)
self.assertEqual(new_default, t.attr2)
# When the default value is passed in, it is left untouched.
# Check that attr2 is literally the same object we set as default.
t.attr2 = new_default
self.assertIs(new_default, t.attr2)
not_default = 'not default'
t2 = Test({'attr2': not_default})
self.assertEqual(not_default, t2.attr2)
# Assert that if the default is passed in, it overrides the previously
# set value (bug #1425996)
t2.attr2 = new_default
self.assertEqual(new_default, t2.attr2)
def test_get_without_instance(self):
self.assertIsNone(FakeResource.name)
def test_set_ValueError(self):
class Test(resource.Resource):
attr = resource.prop("attr", type=int)
t = Test()
def should_raise():
t.attr = "this is not an int"
self.assertThat(should_raise, matchers.raises(ValueError))
def test_set_TypeError(self):
class Type(object):
def __init__(self):
pass
class Test(resource.Resource):
attr = resource.prop("attr", type=Type)
t = Test()
def should_raise():
t.attr = "this type takes no args"
self.assertThat(should_raise, matchers.raises(TypeError))
def test_resource_type(self):
class FakestResource(resource.Resource):
shortstop = resource.prop("shortstop", type=FakeResource)
third_base = resource.prop("third_base", type=FakeResource)
sot = FakestResource()
id1 = "Ernie Banks"
id2 = "Ron Santo"
sot.shortstop = id1
sot.third_base = id2
resource1 = FakeResource.new(id=id1)
self.assertEqual(resource1, sot.shortstop)
self.assertEqual(id1, sot.shortstop.id)
self.assertEqual(FakeResource, type(sot.shortstop))
resource2 = FakeResource.new(id=id2)
self.assertEqual(resource2, sot.third_base)
self.assertEqual(id2, sot.third_base.id)
self.assertEqual(FakeResource, type(sot.third_base))
sot2 = FakestResource()
sot2.shortstop = resource1
sot2.third_base = resource2
self.assertEqual(resource1, sot2.shortstop)
self.assertEqual(id1, sot2.shortstop.id)
self.assertEqual(FakeResource, type(sot2.shortstop))
self.assertEqual(resource2, sot2.third_base)
self.assertEqual(id2, sot2.third_base.id)
self.assertEqual(FakeResource, type(sot2.third_base))
body = {
"shortstop": id1,
"third_base": id2
}
sot3 = FakestResource(body)
self.assertEqual(FakeResource({"id": id1}), sot3.shortstop)
self.assertEqual(FakeResource({"id": id2}), sot3.third_base)
def test_set_alias_same_name(self):
class Test(resource.Resource):
attr = resource.prop("something", alias="attr")
val = "hey"
args = {"something": val}
sot = Test(args)
self.assertEqual(val, sot._attrs["something"])
self.assertEqual(val, sot.attr)
def test_property_is_none(self):
class Test(resource.Resource):
attr = resource.prop("something", type=dict)
args = {"something": None}
sot = Test(args)
self.assertIsNone(sot._attrs["something"])
self.assertIsNone(sot.attr)
class HeaderTests(base.TestCase):
class Test(resource.Resource):
base_path = "/ramones"
service = "punk"
allow_create = True
allow_update = True
hey = resource.header("vocals")
ho = resource.header("guitar")
letsgo = resource.header("bass")
def test_get(self):
val = "joey"
args = {"vocals": val}
sot = HeaderTests.Test({'headers': args})
self.assertEqual(val, sot.hey)
self.assertIsNone(sot.ho)
self.assertIsNone(sot.letsgo)
def test_set_new(self):
args = {"vocals": "joey", "bass": "deedee"}
sot = HeaderTests.Test({'headers': args})
sot._reset_dirty()
sot.ho = "johnny"
self.assertEqual("johnny", sot.ho)
self.assertTrue(sot.is_dirty)
def test_set_old(self):
args = {"vocals": "joey", "bass": "deedee"}
sot = HeaderTests.Test({'headers': args})
sot._reset_dirty()
sot.letsgo = "cj"
self.assertEqual("cj", sot.letsgo)
self.assertTrue(sot.is_dirty)
def test_set_brand_new(self):
sot = HeaderTests.Test({'headers': {}})
sot._reset_dirty()
sot.ho = "johnny"
self.assertEqual("johnny", sot.ho)
self.assertTrue(sot.is_dirty)
self.assertEqual({'headers': {"guitar": "johnny"}}, sot)
def test_1428342(self):
sot = HeaderTests.Test({'headers':
requests.structures.CaseInsensitiveDict()})
self.assertIsNone(sot.hey)
def test_create_update_headers(self):
sot = HeaderTests.Test()
sot._reset_dirty()
sot.ho = "johnny"
sot.letsgo = "deedee"
response = mock.Mock()
response_body = {'id': 1}
response.json = mock.Mock(return_value=response_body)
response.headers = None
sess = mock.Mock()
sess.post = mock.Mock(return_value=response)
sess.put = mock.Mock(return_value=response)
sot.create(sess)
headers = {'guitar': 'johnny', 'bass': 'deedee'}
sess.post.assert_called_with(HeaderTests.Test.base_path,
endpoint_filter=HeaderTests.Test.service,
headers=headers,
json={})
sot['id'] = 1
sot.letsgo = "cj"
headers = {'guitar': 'johnny', 'bass': 'cj'}
sot.update(sess)
sess.put.assert_called_with('ramones/1',
endpoint_filter=HeaderTests.Test.service,
headers=headers,
json={})
class ResourceTests(base.TestCase):
def setUp(self):
super(ResourceTests, self).setUp()
self.session = mock.Mock(spec=session.Session)
self.session.get_filter = mock.Mock(return_value={})
def assertCalledURL(self, method, url):
# call_args gives a tuple of *args and tuple of **kwargs.
# Check that the first arg in *args (the URL) has our url.
self.assertEqual(method.call_args[0][0], url)
def test_empty_id(self):
resp = mock.Mock()
resp.json = mock.Mock(return_value=fake_body)
self.session.get.return_value = resp
obj = FakeResource.new(**fake_arguments)
self.assertEqual(obj, obj.get(self.session))
self.assertEqual(fake_id, obj.id)
self.assertEqual(fake_name, obj['name'])
self.assertEqual(fake_attr1, obj['attr1'])
self.assertEqual(fake_attr2, obj['attr2'])
self.assertEqual(fake_name, obj.name)
self.assertEqual(fake_attr1, obj.first)
self.assertEqual(fake_attr2, obj.second)
def test_not_allowed(self):
class Nope(resource.Resource):
allow_create = allow_retrieve = allow_update = False
allow_delete = allow_list = allow_head = False
nope = Nope()
def cant_create():
nope.create_by_id(1, 2)
def cant_retrieve():
nope.get_data_by_id(1, 2)
def cant_update():
nope.update_by_id(1, 2, 3)
def cant_delete():
nope.delete_by_id(1, 2)
def cant_list():
for i in nope.list(1):
pass
def cant_head():
nope.head_data_by_id(1, 2)
self.assertThat(cant_create,
matchers.raises(exceptions.MethodNotSupported))
self.assertThat(cant_retrieve,
matchers.raises(exceptions.MethodNotSupported))
self.assertThat(cant_update,
matchers.raises(exceptions.MethodNotSupported))
self.assertThat(cant_delete,
matchers.raises(exceptions.MethodNotSupported))
self.assertThat(cant_list,
matchers.raises(exceptions.MethodNotSupported))
self.assertThat(cant_head,
matchers.raises(exceptions.MethodNotSupported))
def _test_create_by_id(self, key, response_value, response_body,
attrs, json_body, response_headers=None):
class FakeResource2(FakeResource):
resource_key = key
service = "my_service"
response = mock.Mock()
response.json = mock.Mock(return_value=response_body)
response.headers = response_headers
expected_resp = response_value.copy()
if response_headers:
expected_resp.update({'headers': response_headers})
sess = mock.Mock()
sess.put = mock.Mock(return_value=response)
sess.post = mock.Mock(return_value=response)
resp = FakeResource2.create_by_id(sess, attrs)
self.assertEqual(expected_resp, resp)
sess.post.assert_called_with(FakeResource2.base_path,
endpoint_filter=FakeResource2.service,
json=json_body)
r_id = "my_id"
resp = FakeResource2.create_by_id(sess, attrs, resource_id=r_id)
self.assertEqual(response_value, resp)
sess.put.assert_called_with(
utils.urljoin(FakeResource2.base_path, r_id),
endpoint_filter=FakeResource2.service,
json=json_body)
path_args = {"parent_name": "my_name"}
resp = FakeResource2.create_by_id(sess, attrs, path_args=path_args)
self.assertEqual(response_value, resp)
sess.post.assert_called_with(FakeResource2.base_path % path_args,
endpoint_filter=FakeResource2.service,
json=json_body)
resp = FakeResource2.create_by_id(sess, attrs, resource_id=r_id,
path_args=path_args)
self.assertEqual(response_value, resp)
sess.put.assert_called_with(
utils.urljoin(FakeResource2.base_path % path_args, r_id),
endpoint_filter=FakeResource2.service,
json=json_body)
def test_create_without_resource_key(self):
key = None
response_value = {"a": 1, "b": 2, "c": 3}
response_body = response_value
attrs = response_value
json_body = attrs
self._test_create_by_id(key, response_value, response_body,
attrs, json_body)
def test_create_with_response_headers(self):
key = None
response_value = {"a": 1, "b": 2, "c": 3}
response_body = response_value
response_headers = {'location': 'foo'}
attrs = response_value.copy()
json_body = attrs
self._test_create_by_id(key, response_value, response_body,
attrs, json_body,
response_headers=response_headers)
def test_create_with_resource_key(self):
key = "my_key"
response_value = {"a": 1, "b": 2, "c": 3}
response_body = {key: response_value}
attrs = response_body
json_body = {key: attrs}
self._test_create_by_id(key, response_value, response_body,
attrs, json_body)
def _test_get_data_by_id(self, key, response_value, response_body):
class FakeResource2(FakeResource):
resource_key = key
service = "my_service"
response = mock.Mock()
response.json = mock.Mock(return_value=response_body)
sess = mock.Mock()
sess.get = mock.Mock(return_value=response)
r_id = "my_id"
resp = FakeResource2.get_data_by_id(sess, resource_id=r_id)
self.assertEqual(response_value, resp)
sess.get.assert_called_with(
utils.urljoin(FakeResource2.base_path, r_id),
endpoint_filter=FakeResource2.service)
path_args = {"parent_name": "my_name"}
resp = FakeResource2.get_data_by_id(sess, resource_id=r_id,
path_args=path_args)
self.assertEqual(response_value, resp)
sess.get.assert_called_with(
utils.urljoin(FakeResource2.base_path % path_args, r_id),
endpoint_filter=FakeResource2.service)
def test_get_data_without_resource_key(self):
key = None
response_value = {"a": 1, "b": 2, "c": 3}
response_body = response_value
self._test_get_data_by_id(key, response_value, response_body)
def test_get_data_with_resource_key(self):
key = "my_key"
response_value = {"a": 1, "b": 2, "c": 3}
response_body = {key: response_value}
self._test_get_data_by_id(key, response_value, response_body)
def _test_head_data_by_id(self, key, response_value):
class FakeResource2(FakeResource):
resource_key = key
service = "my_service"
response = mock.Mock()
response.headers = response_value
sess = mock.Mock()
sess.head = mock.Mock(return_value=response)
r_id = "my_id"
resp = FakeResource2.head_data_by_id(sess, resource_id=r_id)
self.assertEqual({'headers': response_value}, resp)
headers = {'Accept': ''}
sess.head.assert_called_with(
utils.urljoin(FakeResource2.base_path, r_id),
endpoint_filter=FakeResource2.service,
headers=headers)
path_args = {"parent_name": "my_name"}
resp = FakeResource2.head_data_by_id(sess, resource_id=r_id,
path_args=path_args)
self.assertEqual({'headers': response_value}, resp)
headers = {'Accept': ''}
sess.head.assert_called_with(
utils.urljoin(FakeResource2.base_path % path_args, r_id),
endpoint_filter=FakeResource2.service,
headers=headers)
def test_head_data_without_resource_key(self):
key = None
response_value = {"key1": "value1", "key2": "value2"}
self._test_head_data_by_id(key, response_value)
def test_head_data_with_resource_key(self):
key = "my_key"
response_value = {"key1": "value1", "key2": "value2"}
self._test_head_data_by_id(key, response_value)
def _test_update_by_id(self, key, response_value, response_body,
attrs, json_body, response_headers=None):
class FakeResource2(FakeResource):
patch_update = True
resource_key = key
service = "my_service"
response = mock.Mock()
response.json = mock.Mock(return_value=response_body)
response.headers = response_headers
expected_resp = response_value.copy()
if response_headers:
expected_resp.update({'headers': response_headers})
sess = mock.Mock()
sess.patch = mock.Mock(return_value=response)
r_id = "my_id"
resp = FakeResource2.update_by_id(sess, r_id, attrs)
self.assertEqual(expected_resp, resp)
sess.patch.assert_called_with(
utils.urljoin(FakeResource2.base_path, r_id),
endpoint_filter=FakeResource2.service,
json=json_body)
path_args = {"parent_name": "my_name"}
resp = FakeResource2.update_by_id(sess, r_id, attrs,
path_args=path_args)
self.assertEqual(expected_resp, resp)
sess.patch.assert_called_with(
utils.urljoin(FakeResource2.base_path % path_args, r_id),
endpoint_filter=FakeResource2.service,
json=json_body)
def test_update_without_resource_key(self):
key = None
response_value = {"a": 1, "b": 2, "c": 3}
response_body = response_value
attrs = response_value
json_body = attrs
self._test_update_by_id(key, response_value, response_body,
attrs, json_body)
def test_update_with_resource_key(self):
key = "my_key"
response_value = {"a": 1, "b": 2, "c": 3}
response_body = {key: response_value}
attrs = response_value
json_body = {key: attrs}
self._test_update_by_id(key, response_value, response_body,
attrs, json_body)
def test_update_with_response_headers(self):
key = "my_key"
response_value = {"a": 1, "b": 2, "c": 3}
response_body = {key: response_value}
response_headers = {'location': 'foo'}
attrs = response_value.copy()
json_body = {key: attrs}
self._test_update_by_id(key, response_value, response_body,
attrs, json_body,
response_headers=response_headers)
def test_delete_by_id(self):
class FakeResource2(FakeResource):
service = "my_service"
sess = mock.Mock()
sess.delete = mock.Mock(return_value=None)
r_id = "my_id"
resp = FakeResource2.delete_by_id(sess, r_id)
self.assertIsNone(resp)
headers = {'Accept': ''}
sess.delete.assert_called_with(
utils.urljoin(FakeResource2.base_path, r_id),
endpoint_filter=FakeResource2.service,
headers=headers)
path_args = {"parent_name": "my_name"}
resp = FakeResource2.delete_by_id(sess, r_id, path_args=path_args)
self.assertIsNone(resp)
headers = {'Accept': ''}
sess.delete.assert_called_with(
utils.urljoin(FakeResource2.base_path % path_args, r_id),
endpoint_filter=FakeResource2.service,
headers=headers)
def test_create(self):
resp = mock.Mock()
resp.json = mock.Mock(return_value=fake_body)
resp.headers = {'location': 'foo'}
self.session.post = mock.Mock(return_value=resp)
# Create resource with subset of attributes in order to
# verify create refreshes all attributes from response.
obj = FakeResource.new(parent_name=fake_parent,
name=fake_name,
enabled=True,
attr1=fake_attr1)
self.assertEqual(obj, obj.create(self.session))
self.assertFalse(obj.is_dirty)
last_req = self.session.post.call_args[1]["json"][
FakeResource.resource_key]
self.assertEqual(4, len(last_req))
self.assertTrue(last_req['enabled'])
self.assertEqual(fake_parent, last_req['parent_name'])
self.assertEqual(fake_name, last_req['name'])
self.assertEqual(fake_attr1, last_req['attr1'])
self.assertTrue(obj['enabled'])
self.assertEqual(fake_name, obj['name'])
self.assertEqual(fake_parent, obj['parent_name'])
self.assertEqual(fake_attr1, obj['attr1'])
self.assertEqual(fake_attr2, obj['attr2'])
self.assertIsNone(obj['status'])
self.assertTrue(obj.enabled)
self.assertEqual(fake_id, obj.id)
self.assertEqual(fake_name, obj.name)
self.assertEqual(fake_parent, obj.parent_name)
self.assertEqual(fake_parent, obj.parent)
self.assertEqual(fake_attr1, obj.first)
self.assertEqual(fake_attr1, obj.attr1)
self.assertEqual(fake_attr2, obj.second)
self.assertEqual(fake_attr2, obj.attr2)
self.assertIsNone(obj.status)
self.assertEqual('foo', obj.location)
def test_get(self):
resp = mock.Mock()
resp.json = mock.Mock(return_value=fake_body)
resp.headers = {'location': 'foo'}
self.session.get = mock.Mock(return_value=resp)
# Create resource with subset of attributes in order to
# verify get refreshes all attributes from response.
obj = FakeResource.from_id(str(fake_id))
obj['parent_name'] = fake_parent
self.assertEqual(obj, obj.get(self.session))
# Check that the proper URL is being built.
self.assertCalledURL(self.session.get,
os.path.join(fake_base_path % fake_arguments,
str(fake_id))[1:])
self.assertTrue(obj['enabled'])
self.assertEqual(fake_name, obj['name'])
self.assertEqual(fake_parent, obj['parent_name'])
self.assertEqual(fake_attr1, obj['attr1'])
self.assertEqual(fake_attr2, obj['attr2'])
self.assertIsNone(obj['status'])
self.assertTrue(obj.enabled)
self.assertEqual(fake_id, obj.id)
self.assertEqual(fake_name, obj.name)
self.assertEqual(fake_parent, obj.parent_name)
self.assertEqual(fake_parent, obj.parent)
self.assertEqual(fake_attr1, obj.first)
self.assertEqual(fake_attr1, obj.attr1)
self.assertEqual(fake_attr2, obj.second)
self.assertEqual(fake_attr2, obj.attr2)
self.assertIsNone(obj.status)
self.assertIsNone(obj.location)
def test_get_by_id(self):
resp = mock.Mock()
resp.json = mock.Mock(return_value=fake_body)
self.session.get = mock.Mock(return_value=resp)
obj = FakeResource.get_by_id(self.session, fake_id,
path_args=fake_arguments)
# Check that the proper URL is being built.
self.assertCalledURL(self.session.get,
os.path.join(fake_base_path % fake_arguments,
str(fake_id))[1:])
self.assertEqual(fake_id, obj.id)
self.assertEqual(fake_name, obj['name'])
self.assertEqual(fake_attr1, obj['attr1'])
self.assertEqual(fake_attr2, obj['attr2'])
self.assertEqual(fake_name, obj.name)
self.assertEqual(fake_attr1, obj.first)
self.assertEqual(fake_attr2, obj.second)
def test_get_by_id_with_headers(self):
header1 = "fake-value1"
header2 = "fake-value2"
headers = {"header1": header1,
"header2": header2}
resp = mock.Mock(headers=headers)
resp.json = mock.Mock(return_value=fake_body)
self.session.get = mock.Mock(return_value=resp)
class FakeResource2(FakeResource):
header1 = resource.header("header1")
header2 = resource.header("header2")
obj = FakeResource2.get_by_id(self.session, fake_id,
path_args=fake_arguments,
include_headers=True)
self.assertCalledURL(self.session.get,
os.path.join(fake_base_path % fake_arguments,
str(fake_id))[1:])
self.assertEqual(fake_id, obj.id)
self.assertEqual(fake_name, obj['name'])
self.assertEqual(fake_attr1, obj['attr1'])
self.assertEqual(fake_attr2, obj['attr2'])
self.assertEqual(header1, obj['headers']['header1'])
self.assertEqual(header2, obj['headers']['header2'])
self.assertEqual(fake_name, obj.name)
self.assertEqual(fake_attr1, obj.first)
self.assertEqual(fake_attr2, obj.second)
self.assertEqual(header1, obj.header1)
self.assertEqual(header2, obj.header2)
def test_head_by_id(self):
class FakeResource2(FakeResource):
header1 = resource.header("header1")
header2 = resource.header("header2")
resp = mock.Mock(headers={"header1": "one", "header2": "two"})
self.session.head = mock.Mock(return_value=resp)
obj = FakeResource2.head_by_id(self.session, fake_id,
path_args=fake_arguments)
self.assertCalledURL(self.session.head,
os.path.join(fake_base_path % fake_arguments,
str(fake_id))[1:])
self.assertEqual('one', obj['headers']['header1'])
self.assertEqual('two', obj['headers']['header2'])
self.assertEqual('one', obj.header1)
self.assertEqual('two', obj.header2)
def test_patch_update(self):
class FakeResourcePatch(FakeResource):
patch_update = True
resp = mock.Mock()
resp.json = mock.Mock(return_value=fake_body)
resp.headers = {'location': 'foo'}
self.session.patch = mock.Mock(return_value=resp)
# Create resource with subset of attributes in order to
# verify update refreshes all attributes from response.
obj = FakeResourcePatch.new(id=fake_id, parent_name=fake_parent,
name=fake_name, attr1=fake_attr1)
self.assertTrue(obj.is_dirty)
self.assertEqual(obj, obj.update(self.session))
self.assertFalse(obj.is_dirty)
self.assertCalledURL(self.session.patch,
os.path.join(fake_base_path % fake_arguments,
str(fake_id))[1:])
last_req = self.session.patch.call_args[1]["json"][
FakeResource.resource_key]
self.assertEqual(3, len(last_req))
self.assertEqual(fake_parent, last_req['parent_name'])
self.assertEqual(fake_name, last_req['name'])
self.assertEqual(fake_attr1, last_req['attr1'])
self.assertTrue(obj['enabled'])
self.assertEqual(fake_name, obj['name'])
self.assertEqual(fake_parent, obj['parent_name'])
self.assertEqual(fake_attr1, obj['attr1'])
self.assertEqual(fake_attr2, obj['attr2'])
self.assertIsNone(obj['status'])
self.assertTrue(obj.enabled)
self.assertEqual(fake_id, obj.id)
self.assertEqual(fake_name, obj.name)
self.assertEqual(fake_parent, obj.parent_name)
self.assertEqual(fake_parent, obj.parent)
self.assertEqual(fake_attr1, obj.first)
self.assertEqual(fake_attr1, obj.attr1)
self.assertEqual(fake_attr2, obj.second)
self.assertEqual(fake_attr2, obj.attr2)
self.assertIsNone(obj.status)
self.assertEqual('foo', obj.location)
def test_put_update(self):
class FakeResourcePut(FakeResource):
# This is False by default, but explicit for this test.
patch_update = False
resp = mock.Mock()
resp.json = mock.Mock(return_value=fake_body)
resp.headers = {'location': 'foo'}
self.session.put = mock.Mock(return_value=resp)
# Create resource with subset of attributes in order to
# verify update refreshes all attributes from response.
obj = FakeResourcePut.new(id=fake_id, parent_name=fake_parent,
name=fake_name, attr1=fake_attr1)
self.assertTrue(obj.is_dirty)
self.assertEqual(obj, obj.update(self.session))
self.assertFalse(obj.is_dirty)
self.assertCalledURL(self.session.put,
os.path.join(fake_base_path % fake_arguments,
str(fake_id))[1:])
last_req = self.session.put.call_args[1]["json"][
FakeResource.resource_key]
self.assertEqual(3, len(last_req))
self.assertEqual(fake_parent, last_req['parent_name'])
self.assertEqual(fake_name, last_req['name'])
self.assertEqual(fake_attr1, last_req['attr1'])
self.assertTrue(obj['enabled'])
self.assertEqual(fake_name, obj['name'])
self.assertEqual(fake_parent, obj['parent_name'])
self.assertEqual(fake_attr1, obj['attr1'])
self.assertEqual(fake_attr2, obj['attr2'])
self.assertIsNone(obj['status'])
self.assertTrue(obj.enabled)
self.assertEqual(fake_id, obj.id)
self.assertEqual(fake_name, obj.name)
self.assertEqual(fake_parent, obj.parent_name)
self.assertEqual(fake_parent, obj.parent)
self.assertEqual(fake_attr1, obj.first)
self.assertEqual(fake_attr1, obj.attr1)
self.assertEqual(fake_attr2, obj.second)
self.assertEqual(fake_attr2, obj.attr2)
self.assertIsNone(obj.status)
self.assertEqual('foo', obj.location)
def test_update_early_exit(self):
obj = FakeResource()
obj._dirty = [] # Bail out early if there's nothing to update.
self.assertIsNone(obj.update("session"))
def test_update_no_id_attribute(self):
obj = FakeResource.existing(id=1, attr="value1",
parent_name=fake_parent)
obj.first = "value2"
obj.update_by_id = mock.Mock(return_value=dict())
self.assertEqual(obj, obj.update("session"))
def test_delete(self):
obj = FakeResource({"id": fake_id, "parent_name": fake_parent})
obj.delete(self.session)
self.assertCalledURL(self.session.delete,
os.path.join(fake_base_path % fake_arguments,
str(fake_id))[1:])
def _test_list(self, resource_class):
results = [fake_data.copy(), fake_data.copy(), fake_data.copy()]
for i in range(len(results)):
results[i]['id'] = fake_id + i
if resource_class.resources_key is not None:
body = {resource_class.resources_key:
self._get_expected_results()}
sentinel = {resource_class.resources_key: []}
else:
body = self._get_expected_results()
sentinel = []
resp1 = mock.Mock()
resp1.json = mock.Mock(return_value=body)
resp2 = mock.Mock()
resp2.json = mock.Mock(return_value=sentinel)
self.session.get.side_effect = [resp1, resp2]
objs = list(resource_class.list(self.session, path_args=fake_arguments,
paginated=True))
params = {'limit': 3, 'marker': results[-1]['id']}
self.assertEqual(params, self.session.get.call_args[1]['params'])
self.assertEqual(3, len(objs))
for obj in objs:
self.assertIn(obj.id, range(fake_id, fake_id + 3))
self.assertEqual(fake_name, obj['name'])
self.assertEqual(fake_name, obj.name)
self.assertIsInstance(obj, FakeResource)
def _get_expected_results(self):
results = [fake_data.copy(), fake_data.copy(), fake_data.copy()]
for i in range(len(results)):
results[i]['id'] = fake_id + i
return results
def test_list_keyed_resource(self):
self._test_list(FakeResource)
def test_list_non_keyed_resource(self):
self._test_list(FakeResourceNoKeys)
def _test_list_call_count(self, paginated):
results = [fake_data.copy(), fake_data.copy(), fake_data.copy()]
resp = mock.Mock()
resp.json = mock.Mock(return_value={fake_resources: results})
attrs = {"get.return_value": resp}
session = mock.Mock(**attrs)
list(FakeResource.list(session, params={'limit': len(results) + 1},
path_args=fake_arguments,
paginated=paginated))
# Ensure we only made one call to complete this.
self.assertEqual(1, session.get.call_count)
def test_list_bail_out(self):
# When we get less data than limit, make sure we made one call
self._test_list_call_count(True)
def test_list_nonpaginated(self):
# When we call with paginated=False, make sure we made one call
self._test_list_call_count(False)
def test_determine_limit(self):
full_page = [fake_data.copy(), fake_data.copy(), fake_data.copy()]
last_page = [fake_data.copy()]
session = mock.Mock()
session.get = mock.Mock()
full_response = mock.Mock()
response_body = {FakeResource.resources_key: full_page}
full_response.json = mock.Mock(return_value=response_body)
last_response = mock.Mock()
response_body = {FakeResource.resources_key: last_page}
last_response.json = mock.Mock(return_value=response_body)
pages = [full_response, full_response, last_response]
session.get.side_effect = pages
# Don't specify a limit. Resource.list will determine the limit
results = list(FakeResource.list(session, path_args=fake_arguments,
paginated=True))
self.assertEqual(session.get.call_count, len(pages))
self.assertEqual(len(full_page + full_page + last_page), len(results))
def test_empty_list(self):
page = []
session = mock.Mock()
session.get = mock.Mock()
full_response = mock.Mock()
response_body = {FakeResource.resources_key: page}
full_response.json = mock.Mock(return_value=response_body)
pages = [full_response]
session.get.side_effect = pages
results = list(FakeResource.list(session, path_args=fake_arguments,
paginated=True))
self.assertEqual(session.get.call_count, len(pages))
self.assertEqual(len(page), len(results))
def test_attrs_name(self):
obj = FakeResource()
self.assertIsNone(obj.name)
del obj.name
def test_to_dict(self):
kwargs = {
'enabled': True,
'name': 'FOO',
'parent': 'dad',
'attr1': 'BAR',
'attr2': ['ZOO', 'BAZ'],
'status': 'Active',
'headers': {
'key': 'value'
}
}
obj = FakeResource(kwargs)
res = obj.to_dict()
self.assertIsInstance(res, dict)
self.assertTrue(res['enabled'])
self.assertEqual('FOO', res['name'])
self.assertEqual('dad', res['parent'])
self.assertEqual('BAR', res['attr1'])
self.assertEqual(['ZOO', 'BAZ'], res['attr2'])
self.assertEqual('Active', res['status'])
self.assertNotIn('headers', res)
def test_composite_attr_happy(self):
obj = FakeResource.existing(**{'attr3': '3'})
try:
self.assertEqual('3', obj.third)
except AttributeError:
self.fail("third was not found as expected")
def test_composite_attr_fallback(self):
obj = FakeResource.existing(**{'attr_three': '3'})
try:
self.assertEqual('3', obj.third)
except AttributeError:
self.fail("third was not found in fallback as expected")
def test_id_del(self):
class Test(resource.Resource):
id_attribute = "my_id"
attrs = {"my_id": 100}
t = Test(attrs=attrs)
self.assertEqual(attrs["my_id"], t.id)
del t.id
self.assertTrue(Test.id_attribute not in t._attrs)
def test_from_name_with_name(self):
name = "Ernie Banks"
obj = FakeResource.from_name(name)
self.assertEqual(name, obj.name)
def test_from_id_with_name(self):
name = "Sandy Koufax"
obj = FakeResource.from_id(name)
self.assertEqual(name, obj.id)
def test_from_id_with_object(self):
name = "Mickey Mantle"
obj = FakeResource.new(name=name)
new_obj = FakeResource.from_id(obj)
self.assertIs(new_obj, obj)
self.assertEqual(obj.name, new_obj.name)
def test_from_id_with_bad_value(self):
def should_raise():
FakeResource.from_id(3.14)
self.assertThat(should_raise, matchers.raises(ValueError))
def test_dirty_list(self):
class Test(resource.Resource):
attr = resource.prop("attr")
sot1 = Test()
self.assertFalse(sot1.is_dirty)
sot1.attr = 1
self.assertTrue(sot1.is_dirty)
sot2 = Test()
sot2["attr"] = 1
self.assertTrue(sot1.is_dirty)
sot3 = Test({"attr": 1})
self.assertTrue(sot3.is_dirty)
def test_update_attrs(self):
class Test(resource.Resource):
moe = resource.prop("the-attr")
larry = resource.prop("the-attr2")
curly = resource.prop("the-attr3", type=int)
shemp = resource.prop("the-attr4")
value1 = "one"
value2 = "two"
value3 = "3"
value4 = "fore"
value5 = "fiver"
sot = Test({"the-attr": value1})
sot.update_attrs({"the-attr2": value2, "notprop": value4})
self.assertTrue(sot.is_dirty)
self.assertEqual(value1, sot.moe)
self.assertEqual(value1, sot["the-attr"])
self.assertEqual(value2, sot.larry)
self.assertEqual(value4, sot.notprop)
sot._reset_dirty()
sot.update_attrs(curly=value3)
self.assertTrue(sot.is_dirty)
self.assertEqual(int, type(sot.curly))
self.assertEqual(int(value3), sot.curly)
sot._reset_dirty()
sot.update_attrs(**{"the-attr4": value5})
self.assertTrue(sot.is_dirty)
self.assertEqual(value5, sot.shemp)
def test_get_id(self):
class Test(resource.Resource):
pass
ID = "an id"
res = Test({"id": ID})
self.assertEqual(ID, resource.Resource.get_id(ID))
self.assertEqual(ID, resource.Resource.get_id(res))
def test_convert_ids(self):
class TestResourceFoo(resource.Resource):
pass
class TestResourceBar(resource.Resource):
pass
resfoo = TestResourceFoo({'id': 'FAKEFOO'})
resbar = TestResourceBar({'id': 'FAKEBAR'})
self.assertIsNone(resource.Resource.convert_ids(None))
attrs = {
'key1': 'value1'
}
self.assertEqual(attrs, resource.Resource.convert_ids(attrs))
attrs = {
'foo': resfoo,
'bar': resbar,
'other': 'whatever',
}
res = resource.Resource.convert_ids(attrs)
self.assertEqual('FAKEFOO', res['foo'])
self.assertEqual('FAKEBAR', res['bar'])
self.assertEqual('whatever', res['other'])
def test_repr(self):
fr = FakeResource()
fr._loaded = False
fr.first = "hey"
fr.second = "hi"
fr.third = "nah"
the_repr = repr(fr)
the_repr = the_repr.replace('ecl.tests.unit.test_resource.', '')
result = eval(the_repr)
self.assertEqual(fr._loaded, result._loaded)
self.assertEqual(fr.first, result.first)
self.assertEqual(fr.second, result.second)
self.assertEqual(fr.third, result.third)
def test_id_attribute(self):
faker = FakeResource(fake_data)
self.assertEqual(fake_id, faker.id)
faker.id_attribute = 'name'
self.assertEqual(fake_name, faker.id)
faker.id_attribute = 'attr1'
self.assertEqual(fake_attr1, faker.id)
faker.id_attribute = 'attr2'
self.assertEqual(fake_attr2, faker.id)
faker.id_attribute = 'id'
self.assertEqual(fake_id, faker.id)
def test_name_attribute(self):
class Person_ES(resource.Resource):
name_attribute = "nombre"
nombre = resource.prop('nombre')
name = "Brian"
args = {'nombre': name}
person = Person_ES(args)
self.assertEqual(name, person.nombre)
self.assertEqual(name, person.name)
new_name = "Julien"
person.name = new_name
self.assertEqual(new_name, person.nombre)
self.assertEqual(new_name, person.name)
def test_boolstr_prop(self):
faker = FakeResource(fake_data)
self.assertTrue(faker.enabled)
self.assertTrue(faker['enabled'])
faker._attrs['enabled'] = False
self.assertFalse(faker.enabled)
self.assertFalse(faker['enabled'])
def set_invalid():
faker.enabled = 'INVALID'
self.assertRaises(ValueError, set_invalid)
class ResourceMapping(base.TestCase):
def test__getitem(self):
value = 10
class Test(resource.Resource):
attr = resource.prop("attr")
t = Test(attrs={"attr": value})
self.assertEqual(value, t["attr"])
def test__setitem__existing_item_changed(self):
class Test(resource.Resource):
pass
t = Test()
key = "attr"
value = 1
t[key] = value
self.assertEqual(value, t._attrs[key])
self.assertTrue(key in t._dirty)
def test__setitem__existing_item_unchanged(self):
class Test(resource.Resource):
pass
key = "attr"
value = 1
t = Test(attrs={key: value})
t._reset_dirty()
t[key] = value
self.assertEqual(value, t._attrs[key])
self.assertTrue(key not in t._dirty)
def test__setitem__new_item(self):
class Test(resource.Resource):
pass
t = Test()
key = "attr"
value = 1
t[key] = value
self.assertEqual(value, t._attrs[key])
self.assertTrue(key in t._dirty)
def test__delitem__(self):
class Test(resource.Resource):
pass
key = "attr"
value = 1
t = Test(attrs={key: value})
del t[key]
self.assertTrue(key not in t._attrs)
self.assertTrue(key in t._dirty)
def test__len__(self):
class Test(resource.Resource):
pass
attrs = {"a": 1, "b": 2, "c": 3}
t = Test(attrs=attrs)
self.assertEqual(len(attrs.keys()), len(t))
def test__iter__(self):
class Test(resource.Resource):
pass
attrs = {"a": 1, "b": 2, "c": 3}
t = Test(attrs=attrs)
for attr in t:
self.assertEqual(attrs[attr], t[attr])
def _test_resource_serialization(self, session_method, resource_method):
attr_type = resource.Resource
class Test(resource.Resource):
allow_create = True
attr = resource.prop("attr", type=attr_type)
the_id = 123
sot = Test()
sot.attr = resource.Resource({"id": the_id})
self.assertEqual(attr_type, type(sot.attr))
def fake_call(*args, **kwargs):
attrs = kwargs["json"]
try:
json.dumps(attrs)
except TypeError as e:
self.fail("Unable to serialize _attrs: %s" % e)
resp = mock.Mock()
resp.json = mock.Mock(return_value=attrs)
return resp
session = mock.Mock()
setattr(session, session_method, mock.Mock(side_effect=fake_call))
if resource_method == "create_by_id":
session.create_by_id(session, sot._attrs)
elif resource_method == "update_by_id":
session.update_by_id(session, None, sot._attrs)
def test_create_serializes_resource_types(self):
self._test_resource_serialization("post", "create_by_id")
def test_update_serializes_resource_types(self):
self._test_resource_serialization("patch", "update_by_id")
class FakeResponse(object):
def __init__(self, response):
self.body = response
def json(self):
return self.body
class TestFind(base.TestCase):
NAME = 'matrix'
ID = 'Fishburne'
PROP = 'attribute2'
def setUp(self):
super(TestFind, self).setUp()
self.mock_session = mock.Mock()
self.mock_get = mock.Mock()
self.mock_session.get = self.mock_get
self.matrix = {'id': self.ID, 'name': self.NAME, 'prop': self.PROP}
def test_name(self):
self.mock_get.side_effect = [
exceptions.NotFoundException(),
FakeResponse({FakeResource.resources_key: [self.matrix]})
]
result = FakeResource.find(self.mock_session, self.NAME,
path_args=fake_arguments)
self.assertEqual(self.NAME, result.name)
self.assertEqual(self.PROP, result.prop)
def test_id(self):
self.mock_get.side_effect = [
FakeResponse({FakeResource.resource_key: self.matrix})
]
result = FakeResource.find(self.mock_session, self.ID,
path_args=fake_arguments)
self.assertEqual(self.ID, result.id)
self.assertEqual(self.PROP, result.prop)
path = "fakes/" + fake_parent + "/data/" + self.ID
self.mock_get.assert_any_call(path, endpoint_filter=None)
def test_id_no_retrieve(self):
self.mock_get.side_effect = [
FakeResponse({FakeResource.resources_key: [self.matrix]})
]
class NoRetrieveResource(FakeResource):
allow_retrieve = False
result = NoRetrieveResource.find(self.mock_session, self.ID,
path_args=fake_arguments)
self.assertEqual(self.ID, result.id)
self.assertEqual(self.PROP, result.prop)
def test_dups(self):
dupe = self.matrix.copy()
dupe['id'] = 'different'
self.mock_get.side_effect = [
exceptions.NotFoundException(),
FakeResponse({FakeResource.resources_key: [self.matrix, dupe]})
]
self.assertRaises(exceptions.DuplicateResource, FakeResource.find,
self.mock_session, self.NAME)
def test_id_attribute_find(self):
floater = {'ip_address': "127.0.0.1", 'prop': self.PROP}
self.mock_get.side_effect = [
FakeResponse({FakeResource.resource_key: floater})
]
FakeResource.id_attribute = 'ip_address'
FakeResource.id_attribute = 'ip_address'
result = FakeResource.find(self.mock_session, "127.0.0.1",
path_args=fake_arguments)
self.assertEqual("127.0.0.1", result.id)
self.assertEqual(self.PROP, result.prop)
FakeResource.id_attribute = 'id'
p = {'ip_address': "127.0.0.1"}
path = fake_path + "?limit=2"
self.mock_get.called_once_with(path, params=p, endpoint_filter=None)
def test_nada(self):
self.mock_get.side_effect = [
exceptions.NotFoundException(),
FakeResponse({FakeResource.resources_key: []})
]
self.assertIsNone(FakeResource.find(self.mock_session, self.NAME))
def test_no_name(self):
self.mock_get.side_effect = [
exceptions.NotFoundException(),
FakeResponse({FakeResource.resources_key: [self.matrix]})
]
FakeResource.name_attribute = None
self.assertIsNone(FakeResource.find(self.mock_session, self.NAME))
def test_nada_not_ignored(self):
self.mock_get.side_effect = [
exceptions.NotFoundException(),
FakeResponse({FakeResource.resources_key: []})
]
self.assertRaises(exceptions.ResourceNotFound, FakeResource.find,
self.mock_session, self.NAME, ignore_missing=False)
class TestWaitForStatus(base.TestCase):
def __init__(self, *args, **kwargs):
super(TestWaitForStatus, self).__init__(*args, **kwargs)
self.build = FakeResponse(self.body_with_status(fake_body, 'BUILD'))
self.active = FakeResponse(self.body_with_status(fake_body, 'ACTIVE'))
self.error = FakeResponse(self.body_with_status(fake_body, 'ERROR'))
def setUp(self):
super(TestWaitForStatus, self).setUp()
self.sess = mock.Mock()
def body_with_status(self, body, status):
body_copy = copy.deepcopy(body)
body_copy[fake_resource]['status'] = status
return body_copy
def test_wait_for_status_nothing(self):
self.sess.get = mock.Mock()
sot = FakeResource.new(**fake_data)
sot.status = 'ACTIVE'
self.assertEqual(sot, resource.wait_for_status(
self.sess, sot, 'ACTIVE', [], 1, 2))
self.assertEqual([], self.sess.get.call_args_list)
def test_wait_for_status(self):
self.sess.get = mock.Mock()
self.sess.get.side_effect = [self.build, self.active]
sot = FakeResource.new(**fake_data)
self.assertEqual(sot, resource.wait_for_status(
self.sess, sot, 'ACTIVE', [], 1, 2))
def test_wait_for_status_timeout(self):
self.sess.get = mock.Mock()
self.sess.get.side_effect = [self.build, self.build]
sot = FakeResource.new(**fake_data)
self.assertRaises(exceptions.ResourceTimeout, resource.wait_for_status,
self.sess, sot, 'ACTIVE', ['ERROR'], 1, 2)
def test_wait_for_status_failures(self):
self.sess.get = mock.Mock()
self.sess.get.side_effect = [self.build, self.error]
sot = FakeResource.new(**fake_data)
self.assertRaises(exceptions.ResourceFailure, resource.wait_for_status,
self.sess, sot, 'ACTIVE', ['ERROR'], 1, 2)
def test_wait_for_status_no_status(self):
class FakeResourceNoStatus(resource.Resource):
allow_retrieve = True
sot = FakeResourceNoStatus.new(id=123)
self.assertRaises(AttributeError, resource.wait_for_status,
self.sess, sot, 'ACTIVE', ['ERROR'], 1, 2)
class TestWaitForDelete(base.TestCase):
def test_wait_for_delete(self):
sess = mock.Mock()
sot = FakeResource.new(**fake_data)
sot.get = mock.Mock()
sot.get.side_effect = [
sot,
exceptions.NotFoundException()]
self.assertEqual(sot, resource.wait_for_delete(sess, sot, 1, 2))
def test_wait_for_delete_fail(self):
sess = mock.Mock()
sot = FakeResource.new(**fake_data)
sot.get = mock.Mock(return_value=sot)
self.assertRaises(exceptions.ResourceTimeout, resource.wait_for_delete,
sess, sot, 1, 2)
| true | true |
f720df1f8976d6666a660d614734f5c3010f2b3d | 5,980 | py | Python | deep-learning-for-image-processing-master/pytorch_object_detection/train_coco_dataset/network_files/boxes.py | zpwithme/zzzzpppp | 0f5df647f1e9d6cb8c01b3fc7df25ee543714af3 | [
"MIT"
] | null | null | null | deep-learning-for-image-processing-master/pytorch_object_detection/train_coco_dataset/network_files/boxes.py | zpwithme/zzzzpppp | 0f5df647f1e9d6cb8c01b3fc7df25ee543714af3 | [
"MIT"
] | null | null | null | deep-learning-for-image-processing-master/pytorch_object_detection/train_coco_dataset/network_files/boxes.py | zpwithme/zzzzpppp | 0f5df647f1e9d6cb8c01b3fc7df25ee543714af3 | [
"MIT"
] | 2 | 2021-06-26T16:53:38.000Z | 2021-08-29T22:16:20.000Z | import torch
from typing import Tuple
from torch import Tensor
import torchvision
def nms(boxes, scores, iou_threshold):
# type: (Tensor, Tensor, float) -> Tensor
"""
Performs non-maximum suppression (NMS) on the boxes according
to their intersection-over-union (IoU).
NMS iteratively removes lower scoring boxes which have an
IoU greater than iou_threshold with another (higher scoring)
box.
Parameters
----------
boxes : Tensor[N, 4])
boxes to perform NMS on. They
are expected to be in (x1, y1, x2, y2) format
scores : Tensor[N]
scores for each one of the boxes
iou_threshold : float
discards all overlapping
boxes with IoU < iou_threshold
Returns
-------
keep : Tensor
int64 tensor with the indices
of the elements that have been kept
by NMS, sorted in decreasing order of scores
"""
return torch.ops.torchvision.nms(boxes, scores, iou_threshold)
def batched_nms(boxes, scores, idxs, iou_threshold):
# type: (Tensor, Tensor, Tensor, float) -> Tensor
"""
Performs non-maximum suppression in a batched fashion.
Each index value correspond to a category, and NMS
will not be applied between elements of different categories.
Parameters
----------
boxes : Tensor[N, 4]
boxes where NMS will be performed. They
are expected to be in (x1, y1, x2, y2) format
scores : Tensor[N]
scores for each one of the boxes
idxs : Tensor[N]
indices of the categories for each one of the boxes.
iou_threshold : float
discards all overlapping boxes
with IoU < iou_threshold
Returns
-------
keep : Tensor
int64 tensor with the indices of
the elements that have been kept by NMS, sorted
in decreasing order of scores
"""
if boxes.numel() == 0:
return torch.empty((0,), dtype=torch.int64, device=boxes.device)
# strategy: in order to perform NMS independently per class.
# we add an offset to all the boxes. The offset is dependent
# only on the class idx, and is large enough so that boxes
# from different classes do not overlap
# 获取所有boxes中最大的坐标值(xmin, ymin, xmax, ymax)
max_coordinate = boxes.max()
# to(): Performs Tensor dtype and/or device conversion
# 为每一个类别/每一层生成一个很大的偏移量
# 这里的to只是让生成tensor的dytpe和device与boxes保持一致
offsets = idxs.to(boxes) * (max_coordinate + 1)
# boxes加上对应层的偏移量后,保证不同类别/层之间boxes不会有重合的现象
boxes_for_nms = boxes + offsets[:, None]
keep = nms(boxes_for_nms, scores, iou_threshold)
return keep
def remove_small_boxes(boxes, min_size):
# type: (Tensor, float) -> Tensor
"""
Remove boxes which contains at least one side smaller than min_size.
移除宽高小于指定阈值的索引
Arguments:
boxes (Tensor[N, 4]): boxes in (x1, y1, x2, y2) format
min_size (float): minimum size
Returns:
keep (Tensor[K]): indices of the boxes that have both sides
larger than min_size
"""
ws, hs = boxes[:, 2] - boxes[:, 0], boxes[:, 3] - boxes[:, 1] # 预测boxes的宽和高
# keep = (ws >= min_size) & (hs >= min_size) # 当满足宽,高都大于给定阈值时为True
keep = torch.logical_and(torch.ge(ws, min_size), torch.ge(hs, min_size))
# nonzero(): Returns a tensor containing the indices of all non-zero elements of input
# keep = keep.nonzero().squeeze(1)
keep = torch.where(keep)[0]
return keep
def clip_boxes_to_image(boxes, size):
# type: (Tensor, Tuple[int, int]) -> Tensor
"""
Clip boxes so that they lie inside an image of size `size`.
裁剪预测的boxes信息,将越界的坐标调整到图片边界上
Arguments:
boxes (Tensor[N, 4]): boxes in (x1, y1, x2, y2) format
size (Tuple[height, width]): size of the image
Returns:
clipped_boxes (Tensor[N, 4])
"""
dim = boxes.dim()
boxes_x = boxes[..., 0::2] # x1, x2
boxes_y = boxes[..., 1::2] # y1, y2
height, width = size
if torchvision._is_tracing():
boxes_x = torch.max(boxes_x, torch.tensor(0, dtype=boxes.dtype, device=boxes.device))
boxes_x = torch.min(boxes_x, torch.tensor(width, dtype=boxes.dtype, device=boxes.device))
boxes_y = torch.max(boxes_y, torch.tensor(0, dtype=boxes.dtype, device=boxes.device))
boxes_y = torch.min(boxes_y, torch.tensor(height, dtype=boxes.dtype, device=boxes.device))
else:
boxes_x = boxes_x.clamp(min=0, max=width) # 限制x坐标范围在[0,width]之间
boxes_y = boxes_y.clamp(min=0, max=height) # 限制y坐标范围在[0,height]之间
clipped_boxes = torch.stack((boxes_x, boxes_y), dim=dim)
return clipped_boxes.reshape(boxes.shape)
def box_area(boxes):
"""
Computes the area of a set of bounding boxes, which are specified by its
(x1, y1, x2, y2) coordinates.
Arguments:
boxes (Tensor[N, 4]): boxes for which the area will be computed. They
are expected to be in (x1, y1, x2, y2) format
Returns:
area (Tensor[N]): area for each box
"""
return (boxes[:, 2] - boxes[:, 0]) * (boxes[:, 3] - boxes[:, 1])
def box_iou(boxes1, boxes2):
"""
Return intersection-over-union (Jaccard index) of boxes.
Both sets of boxes are expected to be in (x1, y1, x2, y2) format.
Arguments:
boxes1 (Tensor[N, 4])
boxes2 (Tensor[M, 4])
Returns:
iou (Tensor[N, M]): the NxM matrix containing the pairwise
IoU values for every element in boxes1 and boxes2
"""
area1 = box_area(boxes1)
area2 = box_area(boxes2)
# When the shapes do not match,
# the shape of the returned output tensor follows the broadcasting rules
lt = torch.max(boxes1[:, None, :2], boxes2[:, :2]) # left-top [N,M,2]
rb = torch.min(boxes1[:, None, 2:], boxes2[:, 2:]) # right-bottom [N,M,2]
wh = (rb - lt).clamp(min=0) # [N,M,2]
inter = wh[:, :, 0] * wh[:, :, 1] # [N,M]
iou = inter / (area1[:, None] + area2 - inter)
return iou
| 32.857143 | 98 | 0.634783 | import torch
from typing import Tuple
from torch import Tensor
import torchvision
def nms(boxes, scores, iou_threshold):
return torch.ops.torchvision.nms(boxes, scores, iou_threshold)
def batched_nms(boxes, scores, idxs, iou_threshold):
if boxes.numel() == 0:
return torch.empty((0,), dtype=torch.int64, device=boxes.device)
max_coordinate = boxes.max()
offsets = idxs.to(boxes) * (max_coordinate + 1)
boxes_for_nms = boxes + offsets[:, None]
keep = nms(boxes_for_nms, scores, iou_threshold)
return keep
def remove_small_boxes(boxes, min_size):
ws, hs = boxes[:, 2] - boxes[:, 0], boxes[:, 3] - boxes[:, 1]
ical_and(torch.ge(ws, min_size), torch.ge(hs, min_size))
keep = torch.where(keep)[0]
return keep
def clip_boxes_to_image(boxes, size):
dim = boxes.dim()
boxes_x = boxes[..., 0::2]
boxes_y = boxes[..., 1::2]
height, width = size
if torchvision._is_tracing():
boxes_x = torch.max(boxes_x, torch.tensor(0, dtype=boxes.dtype, device=boxes.device))
boxes_x = torch.min(boxes_x, torch.tensor(width, dtype=boxes.dtype, device=boxes.device))
boxes_y = torch.max(boxes_y, torch.tensor(0, dtype=boxes.dtype, device=boxes.device))
boxes_y = torch.min(boxes_y, torch.tensor(height, dtype=boxes.dtype, device=boxes.device))
else:
boxes_x = boxes_x.clamp(min=0, max=width)
boxes_y = boxes_y.clamp(min=0, max=height)
clipped_boxes = torch.stack((boxes_x, boxes_y), dim=dim)
return clipped_boxes.reshape(boxes.shape)
def box_area(boxes):
return (boxes[:, 2] - boxes[:, 0]) * (boxes[:, 3] - boxes[:, 1])
def box_iou(boxes1, boxes2):
area1 = box_area(boxes1)
area2 = box_area(boxes2)
lt = torch.max(boxes1[:, None, :2], boxes2[:, :2])
rb = torch.min(boxes1[:, None, 2:], boxes2[:, 2:])
wh = (rb - lt).clamp(min=0)
inter = wh[:, :, 0] * wh[:, :, 1]
iou = inter / (area1[:, None] + area2 - inter)
return iou
| true | true |
f720dfa2212e24646fbef26faa5e5bdf2d802ce4 | 14,811 | py | Python | PyObjCTest/test_nsgraphics.py | linuxfood/pyobjc-framework-Cocoa-test | 3475890f165ab26a740f13d5afe4c62b4423a140 | [
"MIT"
] | null | null | null | PyObjCTest/test_nsgraphics.py | linuxfood/pyobjc-framework-Cocoa-test | 3475890f165ab26a740f13d5afe4c62b4423a140 | [
"MIT"
] | null | null | null | PyObjCTest/test_nsgraphics.py | linuxfood/pyobjc-framework-Cocoa-test | 3475890f165ab26a740f13d5afe4c62b4423a140 | [
"MIT"
] | null | null | null | import AppKit
import objc
from PyObjCTools.TestSupport import TestCase, min_os_level
class TestNSGraphics(TestCase):
def testConstants(self):
self.assertEqual(AppKit.NSCompositeClear, 0)
self.assertEqual(AppKit.NSCompositeCopy, 1)
self.assertEqual(AppKit.NSCompositeSourceOver, 2)
self.assertEqual(AppKit.NSCompositeSourceIn, 3)
self.assertEqual(AppKit.NSCompositeSourceOut, 4)
self.assertEqual(AppKit.NSCompositeSourceAtop, 5)
self.assertEqual(AppKit.NSCompositeDestinationOver, 6)
self.assertEqual(AppKit.NSCompositeDestinationIn, 7)
self.assertEqual(AppKit.NSCompositeDestinationOut, 8)
self.assertEqual(AppKit.NSCompositeDestinationAtop, 9)
self.assertEqual(AppKit.NSCompositeXOR, 10)
self.assertEqual(AppKit.NSCompositePlusDarker, 11)
self.assertEqual(AppKit.NSCompositeHighlight, 12)
self.assertEqual(AppKit.NSCompositePlusLighter, 13)
self.assertEqual(AppKit.NSCompositeMultiply, 14)
self.assertEqual(AppKit.NSCompositeScreen, 15)
self.assertEqual(AppKit.NSCompositeOverlay, 16)
self.assertEqual(AppKit.NSCompositeDarken, 17)
self.assertEqual(AppKit.NSCompositeLighten, 18)
self.assertEqual(AppKit.NSCompositeColorDodge, 19)
self.assertEqual(AppKit.NSCompositeColorBurn, 20)
self.assertEqual(AppKit.NSCompositeSoftLight, 21)
self.assertEqual(AppKit.NSCompositeHardLight, 22)
self.assertEqual(AppKit.NSCompositeDifference, 23)
self.assertEqual(AppKit.NSCompositeExclusion, 24)
self.assertEqual(AppKit.NSCompositeHue, 25)
self.assertEqual(AppKit.NSCompositeSaturation, 26)
self.assertEqual(AppKit.NSCompositeColor, 27)
self.assertEqual(AppKit.NSCompositeLuminosity, 28)
self.assertEqual(AppKit.NSCompositingOperationClear, 0)
self.assertEqual(AppKit.NSCompositingOperationCopy, 1)
self.assertEqual(AppKit.NSCompositingOperationSourceOver, 2)
self.assertEqual(AppKit.NSCompositingOperationSourceIn, 3)
self.assertEqual(AppKit.NSCompositingOperationSourceOut, 4)
self.assertEqual(AppKit.NSCompositingOperationSourceAtop, 5)
self.assertEqual(AppKit.NSCompositingOperationDestinationOver, 6)
self.assertEqual(AppKit.NSCompositingOperationDestinationIn, 7)
self.assertEqual(AppKit.NSCompositingOperationDestinationOut, 8)
self.assertEqual(AppKit.NSCompositingOperationDestinationAtop, 9)
self.assertEqual(AppKit.NSCompositingOperationXOR, 10)
self.assertEqual(AppKit.NSCompositingOperationPlusDarker, 11)
self.assertEqual(AppKit.NSCompositingOperationHighlight, 12)
self.assertEqual(AppKit.NSCompositingOperationPlusLighter, 13)
self.assertEqual(AppKit.NSCompositingOperationMultiply, 14)
self.assertEqual(AppKit.NSCompositingOperationScreen, 15)
self.assertEqual(AppKit.NSCompositingOperationOverlay, 16)
self.assertEqual(AppKit.NSCompositingOperationDarken, 17)
self.assertEqual(AppKit.NSCompositingOperationLighten, 18)
self.assertEqual(AppKit.NSCompositingOperationColorDodge, 19)
self.assertEqual(AppKit.NSCompositingOperationColorBurn, 20)
self.assertEqual(AppKit.NSCompositingOperationSoftLight, 21)
self.assertEqual(AppKit.NSCompositingOperationHardLight, 22)
self.assertEqual(AppKit.NSCompositingOperationDifference, 23)
self.assertEqual(AppKit.NSCompositingOperationExclusion, 24)
self.assertEqual(AppKit.NSCompositingOperationHue, 25)
self.assertEqual(AppKit.NSCompositingOperationSaturation, 26)
self.assertEqual(AppKit.NSCompositingOperationColor, 27)
self.assertEqual(AppKit.NSCompositingOperationLuminosity, 28)
self.assertEqual(AppKit.NSBackingStoreRetained, 0)
self.assertEqual(AppKit.NSBackingStoreNonretained, 1)
self.assertEqual(AppKit.NSBackingStoreBuffered, 2)
self.assertEqual(AppKit.NSWindowAbove, 1)
self.assertEqual(AppKit.NSWindowBelow, -1)
self.assertEqual(AppKit.NSWindowOut, 0)
self.assertEqual(AppKit.NSFocusRingOnly, 0)
self.assertEqual(AppKit.NSFocusRingBelow, 1)
self.assertEqual(AppKit.NSFocusRingAbove, 2)
self.assertEqual(AppKit.NSFocusRingTypeDefault, 0)
self.assertEqual(AppKit.NSFocusRingTypeNone, 1)
self.assertEqual(AppKit.NSFocusRingTypeExterior, 2)
self.assertIsInstance(AppKit.NSCalibratedWhiteColorSpace, str)
self.assertIsInstance(AppKit.NSCalibratedBlackColorSpace, str)
self.assertIsInstance(AppKit.NSCalibratedRGBColorSpace, str)
self.assertIsInstance(AppKit.NSDeviceWhiteColorSpace, str)
self.assertIsInstance(AppKit.NSDeviceBlackColorSpace, str)
self.assertIsInstance(AppKit.NSDeviceRGBColorSpace, str)
self.assertIsInstance(AppKit.NSDeviceCMYKColorSpace, str)
self.assertIsInstance(AppKit.NSNamedColorSpace, str)
self.assertIsInstance(AppKit.NSPatternColorSpace, str)
self.assertIsInstance(AppKit.NSCustomColorSpace, str)
self.assertIsInstance(AppKit.NSWhite, float)
self.assertIsInstance(AppKit.NSLightGray, float)
self.assertIsInstance(AppKit.NSDarkGray, float)
self.assertIsInstance(AppKit.NSBlack, float)
self.assertIsInstance(AppKit.NSDeviceResolution, str)
self.assertIsInstance(AppKit.NSDeviceColorSpaceName, str)
self.assertIsInstance(AppKit.NSDeviceBitsPerSample, str)
self.assertIsInstance(AppKit.NSDeviceIsScreen, str)
self.assertIsInstance(AppKit.NSDeviceIsPrinter, str)
self.assertIsInstance(AppKit.NSDeviceSize, str)
self.assertEqual(AppKit.NSAnimationEffectDisappearingItemDefault, 0)
self.assertEqual(AppKit.NSAnimationEffectPoof, 10)
self.assertEqual(AppKit.NSDisplayGamutSRGB, 1)
self.assertEqual(AppKit.NSDisplayGamutP3, 2)
def testFunctions(self):
app = AppKit.NSApplication.sharedApplication() # noqa: F841
self.assertArgHasType(AppKit.NSBestDepth, 4, b"o^" + objc._C_NSBOOL)
self.assertArgIsBOOL(AppKit.NSBestDepth, 3)
d, e = AppKit.NSBestDepth(AppKit.NSDeviceRGBColorSpace, 8, 32, False, None)
self.assertIsInstance(d, int)
self.assertIsInstance(e, bool)
self.assertResultIsBOOL(AppKit.NSPlanarFromDepth)
self.assertIsInstance(AppKit.NSPlanarFromDepth(0), bool)
self.assertIsInstance(AppKit.NSColorSpaceFromDepth(0), str)
self.assertIsInstance(AppKit.NSBitsPerSampleFromDepth(0), int)
self.assertIsInstance(AppKit.NSBitsPerPixelFromDepth(0), int)
self.assertIsInstance(
AppKit.NSNumberOfColorComponents(AppKit.NSDeviceRGBColorSpace), int
)
v = AppKit.NSAvailableWindowDepths()
self.assertIsInstance(v, tuple)
self.assertNotEqual(len(v), 0)
self.assertIsInstance(v[0], int)
img = AppKit.NSBitmapImageRep.alloc().initWithBitmapDataPlanes_pixelsWide_pixelsHigh_bitsPerSample_samplesPerPixel_hasAlpha_isPlanar_colorSpaceName_bitmapFormat_bytesPerRow_bitsPerPixel_( # noqa: B950
None, 255, 255, 8, 4, True, False, AppKit.NSCalibratedRGBColorSpace, 0, 0, 0
)
context = AppKit.NSGraphicsContext.graphicsContextWithBitmapImageRep_(img)
current = AppKit.NSGraphicsContext.currentContext()
try:
AppKit.NSGraphicsContext.setCurrentContext_(context)
AppKit.NSRectFill(((0, 0), (1, 2)))
self.assertArgSizeInArg(AppKit.NSRectFillList, 0, 1)
AppKit.NSRectFillList([((0, 0), (1, 2)), ((10, 50), (9, 9))], 2)
self.assertArgSizeInArg(AppKit.NSRectFillListWithGrays, 0, 2)
self.assertArgSizeInArg(AppKit.NSRectFillListWithGrays, 1, 2)
AppKit.NSRectFillListWithGrays(
[((0, 0), (1, 2)), ((10, 50), (9, 9))], (0.5, 0.6), 2
)
self.assertArgSizeInArg(AppKit.NSRectFillListWithColors, 0, 2)
self.assertArgSizeInArg(AppKit.NSRectFillListWithColors, 1, 2)
AppKit.NSRectFillListWithColors(
[((0, 0), (1, 2)), ((10, 50), (9, 9))],
(AppKit.NSColor.blueColor(), AppKit.NSColor.redColor()),
2,
)
AppKit.NSRectFillUsingOperation(
((0, 0), (1, 2)), AppKit.NSCompositeSourceOver
)
self.assertArgSizeInArg(AppKit.NSRectFillListUsingOperation, 0, 1)
AppKit.NSRectFillListUsingOperation(
[((0, 0), (1, 2)), ((10, 50), (9, 9))], 2, AppKit.NSCompositeSourceOver
)
self.assertArgSizeInArg(AppKit.NSRectFillListWithColorsUsingOperation, 0, 2)
self.assertArgSizeInArg(AppKit.NSRectFillListWithColorsUsingOperation, 1, 2)
AppKit.NSRectFillListWithColorsUsingOperation(
[((0, 0), (1, 2)), ((10, 50), (9, 9))],
(AppKit.NSColor.blueColor(), AppKit.NSColor.redColor()),
2,
AppKit.NSCompositeSourceOver,
)
AppKit.NSFrameRect(((5, 5), (20, 30)))
AppKit.NSFrameRectWithWidth(((5, 5), (20, 30)), 4)
AppKit.NSFrameRectWithWidthUsingOperation(
((5, 5), (20, 30)), 4, AppKit.NSCompositeSourceOver
)
AppKit.NSRectClip(((5, 5), (200, 200)))
self.assertArgSizeInArg(AppKit.NSRectClipList, 0, 1)
AppKit.NSRectClipList([((5, 5), (200, 200)), ((50, 50), (90, 100))], 2)
color = AppKit.NSReadPixel((5, 5))
self.assertIsInstance(color, AppKit.NSColor)
self.assertArgSizeInArg(AppKit.NSDrawTiledRects, 2, 4)
self.assertArgSizeInArg(AppKit.NSDrawTiledRects, 3, 4)
self.assertArgIsIn(AppKit.NSDrawTiledRects, 2)
self.assertArgIsIn(AppKit.NSDrawTiledRects, 3)
AppKit.NSDrawTiledRects(
((10, 10), (50, 50)),
((15, 15), (10, 10)),
[AppKit.NSMinXEdge, AppKit.NSMaxXEdge],
[0.8, 0.9],
2,
)
AppKit.NSDrawGrayBezel(((0, 0), (10, 10)), ((0, 0), (50, 50)))
AppKit.NSDrawGroove(((0, 0), (10, 10)), ((0, 0), (50, 50)))
AppKit.NSDrawWhiteBezel(((0, 0), (10, 10)), ((0, 0), (50, 50)))
AppKit.NSDrawButton(((0, 0), (10, 10)), ((0, 0), (50, 50)))
AppKit.NSEraseRect(((0, 0), (10, 10)))
AppKit.NSCopyBits(0, ((10, 10), (50, 50)), (50, 50))
AppKit.NSHighlightRect(((10, 10), (50, 50)))
AppKit.NSDrawDarkBezel(((0, 0), (10, 10)), ((0, 0), (50, 50)))
AppKit.NSDrawLightBezel(((0, 0), (10, 10)), ((0, 0), (50, 50)))
AppKit.NSDottedFrameRect(((10, 10), (50, 50)))
AppKit.NSDrawWindowBackground(((10, 10), (50, 50)))
finally:
AppKit.NSGraphicsContext.setCurrentContext_(current)
AppKit.NSSetFocusRingStyle(AppKit.NSFocusRingAbove)
self.assertArgIsOut(AppKit.NSGetWindowServerMemory, 1)
self.assertArgIsOut(AppKit.NSGetWindowServerMemory, 2)
self.assertArgIsOut(AppKit.NSGetWindowServerMemory, 3)
r = AppKit.NSGetWindowServerMemory(0, None, None, None)
self.assertIsInstance(r[0], int)
self.assertIsInstance(r[1], int)
self.assertIsInstance(r[2], int)
self.assertArgSizeInArg(AppKit.NSDrawColorTiledRects, 2, 4)
self.assertArgSizeInArg(AppKit.NSDrawColorTiledRects, 3, 4)
self.assertArgIsIn(AppKit.NSDrawColorTiledRects, 2)
self.assertArgIsIn(AppKit.NSDrawColorTiledRects, 3)
AppKit.NSDrawColorTiledRects(
((10, 10), (50, 50)),
((15, 15), (10, 10)),
[AppKit.NSMinXEdge, AppKit.NSMaxXEdge],
[AppKit.NSColor.redColor(), AppKit.NSColor.blueColor()],
2,
)
# self.assertArgIsBOOL(AppKit.NSDrawBitmap, 7)
# self.assertArgIsBOOL(AppKit.NSDrawBitmap, 8)
# AppKit.NSDrawBitmap(((0, 0), (10, 10)), 10, 20, 8, 4, 32, 40, False, True,
# AppKit.NSDeviceRGBColorSpace, [' '*4*10*20, '', '', '', ''])
self.assertArgSizeInArg(AppKit.NSWindowList, 1, 0)
self.assertArgIsOut(AppKit.NSWindowList, 1)
v = AppKit.NSWindowList(5, None)
self.assertIsInstance(v, tuple)
self.assertEqual(len(v), 5)
self.assertIsInstance(v[0], int)
self.assertArgIsOut(AppKit.NSCountWindowsForContext, 1)
v = AppKit.NSCountWindowsForContext(1, None)
self.assertIsInstance(v, int)
self.assertArgIsOut(AppKit.NSWindowListForContext, 2)
self.assertArgSizeInArg(AppKit.NSWindowListForContext, 2, 1)
v = AppKit.NSWindowListForContext(0, 5, None)
self.assertIsInstance(v, tuple)
self.assertEqual(len(v), 5)
self.assertIsInstance(v[0], int)
AppKit.NSBeep()
count = AppKit.NSCountWindows(None)
self.assertIsInstance(count, int)
try:
AppKit.NSDisableScreenUpdates()
except objc.error:
pass
try:
AppKit.NSEnableScreenUpdates()
except objc.error:
pass
self.assertArgIsSEL(AppKit.NSShowAnimationEffect, 4, b"v@:^v")
self.assertArgHasType(AppKit.NSShowAnimationEffect, 5, b"^v")
try:
AppKit.NSShowAnimationEffect(
AppKit.NSAnimationEffectPoof, (10, 10), (20, 30), None, None, None
)
except objc.error:
pass
@min_os_level("10.5")
def testConstants10_5(self):
self.assertEqual(AppKit.NSColorRenderingIntentDefault, 0)
self.assertEqual(AppKit.NSColorRenderingIntentAbsoluteColorimetric, 1)
self.assertEqual(AppKit.NSColorRenderingIntentRelativeColorimetric, 2)
self.assertEqual(AppKit.NSColorRenderingIntentPerceptual, 3)
self.assertEqual(AppKit.NSColorRenderingIntentSaturation, 4)
self.assertEqual(AppKit.NSImageInterpolationDefault, 0)
self.assertEqual(AppKit.NSImageInterpolationNone, 1)
self.assertEqual(AppKit.NSImageInterpolationLow, 2)
self.assertEqual(AppKit.NSImageInterpolationHigh, 3)
@min_os_level("10.6")
def testConstants10_6(self):
self.assertEqual(AppKit.NSWindowDepthTwentyfourBitRGB, 0x208)
self.assertEqual(AppKit.NSWindowDepthSixtyfourBitRGB, 0x210)
self.assertEqual(AppKit.NSWindowDepthOnehundredtwentyeightBitRGB, 0x220)
self.assertEqual(AppKit.NSImageInterpolationMedium, 4)
AppKit.NSApplication.sharedApplication()
| 47.932039 | 209 | 0.667207 | import AppKit
import objc
from PyObjCTools.TestSupport import TestCase, min_os_level
class TestNSGraphics(TestCase):
def testConstants(self):
self.assertEqual(AppKit.NSCompositeClear, 0)
self.assertEqual(AppKit.NSCompositeCopy, 1)
self.assertEqual(AppKit.NSCompositeSourceOver, 2)
self.assertEqual(AppKit.NSCompositeSourceIn, 3)
self.assertEqual(AppKit.NSCompositeSourceOut, 4)
self.assertEqual(AppKit.NSCompositeSourceAtop, 5)
self.assertEqual(AppKit.NSCompositeDestinationOver, 6)
self.assertEqual(AppKit.NSCompositeDestinationIn, 7)
self.assertEqual(AppKit.NSCompositeDestinationOut, 8)
self.assertEqual(AppKit.NSCompositeDestinationAtop, 9)
self.assertEqual(AppKit.NSCompositeXOR, 10)
self.assertEqual(AppKit.NSCompositePlusDarker, 11)
self.assertEqual(AppKit.NSCompositeHighlight, 12)
self.assertEqual(AppKit.NSCompositePlusLighter, 13)
self.assertEqual(AppKit.NSCompositeMultiply, 14)
self.assertEqual(AppKit.NSCompositeScreen, 15)
self.assertEqual(AppKit.NSCompositeOverlay, 16)
self.assertEqual(AppKit.NSCompositeDarken, 17)
self.assertEqual(AppKit.NSCompositeLighten, 18)
self.assertEqual(AppKit.NSCompositeColorDodge, 19)
self.assertEqual(AppKit.NSCompositeColorBurn, 20)
self.assertEqual(AppKit.NSCompositeSoftLight, 21)
self.assertEqual(AppKit.NSCompositeHardLight, 22)
self.assertEqual(AppKit.NSCompositeDifference, 23)
self.assertEqual(AppKit.NSCompositeExclusion, 24)
self.assertEqual(AppKit.NSCompositeHue, 25)
self.assertEqual(AppKit.NSCompositeSaturation, 26)
self.assertEqual(AppKit.NSCompositeColor, 27)
self.assertEqual(AppKit.NSCompositeLuminosity, 28)
self.assertEqual(AppKit.NSCompositingOperationClear, 0)
self.assertEqual(AppKit.NSCompositingOperationCopy, 1)
self.assertEqual(AppKit.NSCompositingOperationSourceOver, 2)
self.assertEqual(AppKit.NSCompositingOperationSourceIn, 3)
self.assertEqual(AppKit.NSCompositingOperationSourceOut, 4)
self.assertEqual(AppKit.NSCompositingOperationSourceAtop, 5)
self.assertEqual(AppKit.NSCompositingOperationDestinationOver, 6)
self.assertEqual(AppKit.NSCompositingOperationDestinationIn, 7)
self.assertEqual(AppKit.NSCompositingOperationDestinationOut, 8)
self.assertEqual(AppKit.NSCompositingOperationDestinationAtop, 9)
self.assertEqual(AppKit.NSCompositingOperationXOR, 10)
self.assertEqual(AppKit.NSCompositingOperationPlusDarker, 11)
self.assertEqual(AppKit.NSCompositingOperationHighlight, 12)
self.assertEqual(AppKit.NSCompositingOperationPlusLighter, 13)
self.assertEqual(AppKit.NSCompositingOperationMultiply, 14)
self.assertEqual(AppKit.NSCompositingOperationScreen, 15)
self.assertEqual(AppKit.NSCompositingOperationOverlay, 16)
self.assertEqual(AppKit.NSCompositingOperationDarken, 17)
self.assertEqual(AppKit.NSCompositingOperationLighten, 18)
self.assertEqual(AppKit.NSCompositingOperationColorDodge, 19)
self.assertEqual(AppKit.NSCompositingOperationColorBurn, 20)
self.assertEqual(AppKit.NSCompositingOperationSoftLight, 21)
self.assertEqual(AppKit.NSCompositingOperationHardLight, 22)
self.assertEqual(AppKit.NSCompositingOperationDifference, 23)
self.assertEqual(AppKit.NSCompositingOperationExclusion, 24)
self.assertEqual(AppKit.NSCompositingOperationHue, 25)
self.assertEqual(AppKit.NSCompositingOperationSaturation, 26)
self.assertEqual(AppKit.NSCompositingOperationColor, 27)
self.assertEqual(AppKit.NSCompositingOperationLuminosity, 28)
self.assertEqual(AppKit.NSBackingStoreRetained, 0)
self.assertEqual(AppKit.NSBackingStoreNonretained, 1)
self.assertEqual(AppKit.NSBackingStoreBuffered, 2)
self.assertEqual(AppKit.NSWindowAbove, 1)
self.assertEqual(AppKit.NSWindowBelow, -1)
self.assertEqual(AppKit.NSWindowOut, 0)
self.assertEqual(AppKit.NSFocusRingOnly, 0)
self.assertEqual(AppKit.NSFocusRingBelow, 1)
self.assertEqual(AppKit.NSFocusRingAbove, 2)
self.assertEqual(AppKit.NSFocusRingTypeDefault, 0)
self.assertEqual(AppKit.NSFocusRingTypeNone, 1)
self.assertEqual(AppKit.NSFocusRingTypeExterior, 2)
self.assertIsInstance(AppKit.NSCalibratedWhiteColorSpace, str)
self.assertIsInstance(AppKit.NSCalibratedBlackColorSpace, str)
self.assertIsInstance(AppKit.NSCalibratedRGBColorSpace, str)
self.assertIsInstance(AppKit.NSDeviceWhiteColorSpace, str)
self.assertIsInstance(AppKit.NSDeviceBlackColorSpace, str)
self.assertIsInstance(AppKit.NSDeviceRGBColorSpace, str)
self.assertIsInstance(AppKit.NSDeviceCMYKColorSpace, str)
self.assertIsInstance(AppKit.NSNamedColorSpace, str)
self.assertIsInstance(AppKit.NSPatternColorSpace, str)
self.assertIsInstance(AppKit.NSCustomColorSpace, str)
self.assertIsInstance(AppKit.NSWhite, float)
self.assertIsInstance(AppKit.NSLightGray, float)
self.assertIsInstance(AppKit.NSDarkGray, float)
self.assertIsInstance(AppKit.NSBlack, float)
self.assertIsInstance(AppKit.NSDeviceResolution, str)
self.assertIsInstance(AppKit.NSDeviceColorSpaceName, str)
self.assertIsInstance(AppKit.NSDeviceBitsPerSample, str)
self.assertIsInstance(AppKit.NSDeviceIsScreen, str)
self.assertIsInstance(AppKit.NSDeviceIsPrinter, str)
self.assertIsInstance(AppKit.NSDeviceSize, str)
self.assertEqual(AppKit.NSAnimationEffectDisappearingItemDefault, 0)
self.assertEqual(AppKit.NSAnimationEffectPoof, 10)
self.assertEqual(AppKit.NSDisplayGamutSRGB, 1)
self.assertEqual(AppKit.NSDisplayGamutP3, 2)
def testFunctions(self):
app = AppKit.NSApplication.sharedApplication()
self.assertArgHasType(AppKit.NSBestDepth, 4, b"o^" + objc._C_NSBOOL)
self.assertArgIsBOOL(AppKit.NSBestDepth, 3)
d, e = AppKit.NSBestDepth(AppKit.NSDeviceRGBColorSpace, 8, 32, False, None)
self.assertIsInstance(d, int)
self.assertIsInstance(e, bool)
self.assertResultIsBOOL(AppKit.NSPlanarFromDepth)
self.assertIsInstance(AppKit.NSPlanarFromDepth(0), bool)
self.assertIsInstance(AppKit.NSColorSpaceFromDepth(0), str)
self.assertIsInstance(AppKit.NSBitsPerSampleFromDepth(0), int)
self.assertIsInstance(AppKit.NSBitsPerPixelFromDepth(0), int)
self.assertIsInstance(
AppKit.NSNumberOfColorComponents(AppKit.NSDeviceRGBColorSpace), int
)
v = AppKit.NSAvailableWindowDepths()
self.assertIsInstance(v, tuple)
self.assertNotEqual(len(v), 0)
self.assertIsInstance(v[0], int)
img = AppKit.NSBitmapImageRep.alloc().initWithBitmapDataPlanes_pixelsWide_pixelsHigh_bitsPerSample_samplesPerPixel_hasAlpha_isPlanar_colorSpaceName_bitmapFormat_bytesPerRow_bitsPerPixel_(
None, 255, 255, 8, 4, True, False, AppKit.NSCalibratedRGBColorSpace, 0, 0, 0
)
context = AppKit.NSGraphicsContext.graphicsContextWithBitmapImageRep_(img)
current = AppKit.NSGraphicsContext.currentContext()
try:
AppKit.NSGraphicsContext.setCurrentContext_(context)
AppKit.NSRectFill(((0, 0), (1, 2)))
self.assertArgSizeInArg(AppKit.NSRectFillList, 0, 1)
AppKit.NSRectFillList([((0, 0), (1, 2)), ((10, 50), (9, 9))], 2)
self.assertArgSizeInArg(AppKit.NSRectFillListWithGrays, 0, 2)
self.assertArgSizeInArg(AppKit.NSRectFillListWithGrays, 1, 2)
AppKit.NSRectFillListWithGrays(
[((0, 0), (1, 2)), ((10, 50), (9, 9))], (0.5, 0.6), 2
)
self.assertArgSizeInArg(AppKit.NSRectFillListWithColors, 0, 2)
self.assertArgSizeInArg(AppKit.NSRectFillListWithColors, 1, 2)
AppKit.NSRectFillListWithColors(
[((0, 0), (1, 2)), ((10, 50), (9, 9))],
(AppKit.NSColor.blueColor(), AppKit.NSColor.redColor()),
2,
)
AppKit.NSRectFillUsingOperation(
((0, 0), (1, 2)), AppKit.NSCompositeSourceOver
)
self.assertArgSizeInArg(AppKit.NSRectFillListUsingOperation, 0, 1)
AppKit.NSRectFillListUsingOperation(
[((0, 0), (1, 2)), ((10, 50), (9, 9))], 2, AppKit.NSCompositeSourceOver
)
self.assertArgSizeInArg(AppKit.NSRectFillListWithColorsUsingOperation, 0, 2)
self.assertArgSizeInArg(AppKit.NSRectFillListWithColorsUsingOperation, 1, 2)
AppKit.NSRectFillListWithColorsUsingOperation(
[((0, 0), (1, 2)), ((10, 50), (9, 9))],
(AppKit.NSColor.blueColor(), AppKit.NSColor.redColor()),
2,
AppKit.NSCompositeSourceOver,
)
AppKit.NSFrameRect(((5, 5), (20, 30)))
AppKit.NSFrameRectWithWidth(((5, 5), (20, 30)), 4)
AppKit.NSFrameRectWithWidthUsingOperation(
((5, 5), (20, 30)), 4, AppKit.NSCompositeSourceOver
)
AppKit.NSRectClip(((5, 5), (200, 200)))
self.assertArgSizeInArg(AppKit.NSRectClipList, 0, 1)
AppKit.NSRectClipList([((5, 5), (200, 200)), ((50, 50), (90, 100))], 2)
color = AppKit.NSReadPixel((5, 5))
self.assertIsInstance(color, AppKit.NSColor)
self.assertArgSizeInArg(AppKit.NSDrawTiledRects, 2, 4)
self.assertArgSizeInArg(AppKit.NSDrawTiledRects, 3, 4)
self.assertArgIsIn(AppKit.NSDrawTiledRects, 2)
self.assertArgIsIn(AppKit.NSDrawTiledRects, 3)
AppKit.NSDrawTiledRects(
((10, 10), (50, 50)),
((15, 15), (10, 10)),
[AppKit.NSMinXEdge, AppKit.NSMaxXEdge],
[0.8, 0.9],
2,
)
AppKit.NSDrawGrayBezel(((0, 0), (10, 10)), ((0, 0), (50, 50)))
AppKit.NSDrawGroove(((0, 0), (10, 10)), ((0, 0), (50, 50)))
AppKit.NSDrawWhiteBezel(((0, 0), (10, 10)), ((0, 0), (50, 50)))
AppKit.NSDrawButton(((0, 0), (10, 10)), ((0, 0), (50, 50)))
AppKit.NSEraseRect(((0, 0), (10, 10)))
AppKit.NSCopyBits(0, ((10, 10), (50, 50)), (50, 50))
AppKit.NSHighlightRect(((10, 10), (50, 50)))
AppKit.NSDrawDarkBezel(((0, 0), (10, 10)), ((0, 0), (50, 50)))
AppKit.NSDrawLightBezel(((0, 0), (10, 10)), ((0, 0), (50, 50)))
AppKit.NSDottedFrameRect(((10, 10), (50, 50)))
AppKit.NSDrawWindowBackground(((10, 10), (50, 50)))
finally:
AppKit.NSGraphicsContext.setCurrentContext_(current)
AppKit.NSSetFocusRingStyle(AppKit.NSFocusRingAbove)
self.assertArgIsOut(AppKit.NSGetWindowServerMemory, 1)
self.assertArgIsOut(AppKit.NSGetWindowServerMemory, 2)
self.assertArgIsOut(AppKit.NSGetWindowServerMemory, 3)
r = AppKit.NSGetWindowServerMemory(0, None, None, None)
self.assertIsInstance(r[0], int)
self.assertIsInstance(r[1], int)
self.assertIsInstance(r[2], int)
self.assertArgSizeInArg(AppKit.NSDrawColorTiledRects, 2, 4)
self.assertArgSizeInArg(AppKit.NSDrawColorTiledRects, 3, 4)
self.assertArgIsIn(AppKit.NSDrawColorTiledRects, 2)
self.assertArgIsIn(AppKit.NSDrawColorTiledRects, 3)
AppKit.NSDrawColorTiledRects(
((10, 10), (50, 50)),
((15, 15), (10, 10)),
[AppKit.NSMinXEdge, AppKit.NSMaxXEdge],
[AppKit.NSColor.redColor(), AppKit.NSColor.blueColor()],
2,
)
self.assertArgSizeInArg(AppKit.NSWindowList, 1, 0)
self.assertArgIsOut(AppKit.NSWindowList, 1)
v = AppKit.NSWindowList(5, None)
self.assertIsInstance(v, tuple)
self.assertEqual(len(v), 5)
self.assertIsInstance(v[0], int)
self.assertArgIsOut(AppKit.NSCountWindowsForContext, 1)
v = AppKit.NSCountWindowsForContext(1, None)
self.assertIsInstance(v, int)
self.assertArgIsOut(AppKit.NSWindowListForContext, 2)
self.assertArgSizeInArg(AppKit.NSWindowListForContext, 2, 1)
v = AppKit.NSWindowListForContext(0, 5, None)
self.assertIsInstance(v, tuple)
self.assertEqual(len(v), 5)
self.assertIsInstance(v[0], int)
AppKit.NSBeep()
count = AppKit.NSCountWindows(None)
self.assertIsInstance(count, int)
try:
AppKit.NSDisableScreenUpdates()
except objc.error:
pass
try:
AppKit.NSEnableScreenUpdates()
except objc.error:
pass
self.assertArgIsSEL(AppKit.NSShowAnimationEffect, 4, b"v@:^v")
self.assertArgHasType(AppKit.NSShowAnimationEffect, 5, b"^v")
try:
AppKit.NSShowAnimationEffect(
AppKit.NSAnimationEffectPoof, (10, 10), (20, 30), None, None, None
)
except objc.error:
pass
@min_os_level("10.5")
def testConstants10_5(self):
self.assertEqual(AppKit.NSColorRenderingIntentDefault, 0)
self.assertEqual(AppKit.NSColorRenderingIntentAbsoluteColorimetric, 1)
self.assertEqual(AppKit.NSColorRenderingIntentRelativeColorimetric, 2)
self.assertEqual(AppKit.NSColorRenderingIntentPerceptual, 3)
self.assertEqual(AppKit.NSColorRenderingIntentSaturation, 4)
self.assertEqual(AppKit.NSImageInterpolationDefault, 0)
self.assertEqual(AppKit.NSImageInterpolationNone, 1)
self.assertEqual(AppKit.NSImageInterpolationLow, 2)
self.assertEqual(AppKit.NSImageInterpolationHigh, 3)
@min_os_level("10.6")
def testConstants10_6(self):
self.assertEqual(AppKit.NSWindowDepthTwentyfourBitRGB, 0x208)
self.assertEqual(AppKit.NSWindowDepthSixtyfourBitRGB, 0x210)
self.assertEqual(AppKit.NSWindowDepthOnehundredtwentyeightBitRGB, 0x220)
self.assertEqual(AppKit.NSImageInterpolationMedium, 4)
AppKit.NSApplication.sharedApplication()
| true | true |
f720dfbd8a87908f745dd0e7e519b11314b25551 | 2,649 | py | Python | zExtraLearning/MLPrep/tf2.0/NbExtracts/23tf2_0_mirrored_strategy.py | talk2sunil83/UpgradLearning | 70c4f993c68ce5030e9df0edd15004bbb9fc71e7 | [
"Apache-2.0"
] | null | null | null | zExtraLearning/MLPrep/tf2.0/NbExtracts/23tf2_0_mirrored_strategy.py | talk2sunil83/UpgradLearning | 70c4f993c68ce5030e9df0edd15004bbb9fc71e7 | [
"Apache-2.0"
] | null | null | null | zExtraLearning/MLPrep/tf2.0/NbExtracts/23tf2_0_mirrored_strategy.py | talk2sunil83/UpgradLearning | 70c4f993c68ce5030e9df0edd15004bbb9fc71e7 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""TF2.0 Mirrored Strategy.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1e7_N_vVQGyfa3Wz9ND0smWnnsHsQUs_k
"""
# Commented out IPython magic to ensure Python compatibility.
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Input, Conv2D, Dense, Flatten, Dropout, GlobalMaxPooling2D, MaxPooling2D, BatchNormalization
import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf
print(tf.__version__)
# additional imports
# Load in the data
cifar10 = tf.keras.datasets.cifar10
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
x_train, x_test = x_train / 255.0, x_test / 255.0
y_train, y_test = y_train.flatten(), y_test.flatten()
print("x_train.shape:", x_train.shape)
print("y_train.shape", y_train.shape)
# number of classes
K = len(set(y_train))
print("number of classes:", K)
# Build the model using the functional API
def create_model():
i = Input(shape=x_train[0].shape)
x = Conv2D(32, (3, 3), activation='relu', padding='same')(i)
x = BatchNormalization()(x)
x = Conv2D(32, (3, 3), activation='relu', padding='same')(x)
x = BatchNormalization()(x)
x = MaxPooling2D((2, 2))(x)
x = Conv2D(64, (3, 3), activation='relu', padding='same')(x)
x = BatchNormalization()(x)
x = Conv2D(64, (3, 3), activation='relu', padding='same')(x)
x = BatchNormalization()(x)
x = MaxPooling2D((2, 2))(x)
x = Conv2D(128, (3, 3), activation='relu', padding='same')(x)
x = BatchNormalization()(x)
x = Conv2D(128, (3, 3), activation='relu', padding='same')(x)
x = BatchNormalization()(x)
x = MaxPooling2D((2, 2))(x)
x = Flatten()(x)
x = Dropout(0.2)(x)
x = Dense(1024, activation='relu')(x)
x = Dropout(0.2)(x)
x = Dense(K, activation='softmax')(x)
model = Model(i, x)
return model
strategy = tf.distribute.MirroredStrategy()
# strategy = tf.distribute.experimental.CentralStorageStrategy()
print(f'Number of devices: {strategy.num_replicas_in_sync}')
with strategy.scope():
model = create_model()
model.compile(loss='sparse_categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
# Fit
r = model.fit(x_train, y_train, validation_data=(x_test, y_test), epochs=5)
50000/391
10000/79
# Compare this to non-distributed training
model2 = create_model()
model2.compile(loss='sparse_categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
r = model2.fit(x_train, y_train, validation_data=(x_test, y_test), epochs=5)
| 29.10989 | 128 | 0.678369 |
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Input, Conv2D, Dense, Flatten, Dropout, GlobalMaxPooling2D, MaxPooling2D, BatchNormalization
import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf
print(tf.__version__)
cifar10 = tf.keras.datasets.cifar10
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
x_train, x_test = x_train / 255.0, x_test / 255.0
y_train, y_test = y_train.flatten(), y_test.flatten()
print("x_train.shape:", x_train.shape)
print("y_train.shape", y_train.shape)
K = len(set(y_train))
print("number of classes:", K)
def create_model():
i = Input(shape=x_train[0].shape)
x = Conv2D(32, (3, 3), activation='relu', padding='same')(i)
x = BatchNormalization()(x)
x = Conv2D(32, (3, 3), activation='relu', padding='same')(x)
x = BatchNormalization()(x)
x = MaxPooling2D((2, 2))(x)
x = Conv2D(64, (3, 3), activation='relu', padding='same')(x)
x = BatchNormalization()(x)
x = Conv2D(64, (3, 3), activation='relu', padding='same')(x)
x = BatchNormalization()(x)
x = MaxPooling2D((2, 2))(x)
x = Conv2D(128, (3, 3), activation='relu', padding='same')(x)
x = BatchNormalization()(x)
x = Conv2D(128, (3, 3), activation='relu', padding='same')(x)
x = BatchNormalization()(x)
x = MaxPooling2D((2, 2))(x)
x = Flatten()(x)
x = Dropout(0.2)(x)
x = Dense(1024, activation='relu')(x)
x = Dropout(0.2)(x)
x = Dense(K, activation='softmax')(x)
model = Model(i, x)
return model
strategy = tf.distribute.MirroredStrategy()
print(f'Number of devices: {strategy.num_replicas_in_sync}')
with strategy.scope():
model = create_model()
model.compile(loss='sparse_categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
r = model.fit(x_train, y_train, validation_data=(x_test, y_test), epochs=5)
50000/391
10000/79
model2 = create_model()
model2.compile(loss='sparse_categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
r = model2.fit(x_train, y_train, validation_data=(x_test, y_test), epochs=5)
| true | true |
f720e314a25973213209e088a8ac815f6b5568fc | 20,043 | py | Python | src/pregame.py | the5thEmperor/lykos | 62cc7694ec24eb0c177dfd25db79725a092a57fa | [
"BSD-2-Clause"
] | null | null | null | src/pregame.py | the5thEmperor/lykos | 62cc7694ec24eb0c177dfd25db79725a092a57fa | [
"BSD-2-Clause"
] | null | null | null | src/pregame.py | the5thEmperor/lykos | 62cc7694ec24eb0c177dfd25db79725a092a57fa | [
"BSD-2-Clause"
] | null | null | null | from collections import defaultdict, Counter
from datetime import datetime, timedelta
import threading
import itertools
import random
import time
import math
import re
from src.containers import UserDict, UserSet
from src.decorators import COMMANDS, command, event_listener, handle_error
from src.functions import get_players
from src.warnings import decrement_stasis
from src.messages import messages
from src.events import Event
from src.cats import Wolfchat, All
from src import channels
import botconfig
WAIT_LOCK = threading.RLock()
WAIT_TOKENS = 0
WAIT_LAST = 0
LAST_START = UserDict() # type: UserDict[users.User, List[datetime, int]]
LAST_WAIT = UserDict() # type: UserDict[users.User, datetime]
START_VOTES = UserSet() # type: UserSet[users.User]
RESTART_TRIES = 0 # type: int
MAX_RETRIES = 3 # constant: not a setting
@command("wait", playing=True, phases=("join",))
def wait(var, wrapper, message):
"""Increase the wait time until !start can be used."""
if wrapper.target is not channels.Main:
return
pl = get_players()
with WAIT_LOCK:
global WAIT_TOKENS, WAIT_LAST
wait_check_time = time.time()
WAIT_TOKENS += (wait_check_time - WAIT_LAST) / var.WAIT_TB_DELAY
WAIT_LAST = wait_check_time
WAIT_TOKENS = min(WAIT_TOKENS, var.WAIT_TB_BURST)
now = datetime.now()
if ((LAST_WAIT and wrapper.source in LAST_WAIT and LAST_WAIT[wrapper.source] +
timedelta(seconds=var.WAIT_RATE_LIMIT) > now) or WAIT_TOKENS < 1):
wrapper.pm(messages["command_ratelimited"])
return
LAST_WAIT[wrapper.source] = now
WAIT_TOKENS -= 1
if now > var.CAN_START_TIME:
var.CAN_START_TIME = now + timedelta(seconds=var.EXTRA_WAIT)
else:
var.CAN_START_TIME += timedelta(seconds=var.EXTRA_WAIT)
wrapper.send(messages["wait_time_increase"].format(wrapper.source, var.EXTRA_WAIT))
@command("fwait", flag="w", phases=("join",))
def fwait(var, wrapper, message):
"""Force an increase (or decrease) in wait time. Can be used with a number of seconds to wait."""
pl = get_players()
msg = re.split(" +", message.strip(), 1)[0]
if msg and (msg.isdigit() or (msg[0] == "-" and msg[1:].isdigit())):
extra = int(msg)
else:
extra = var.EXTRA_WAIT
now = datetime.now()
extra = max(-900, min(900, extra))
if now > var.CAN_START_TIME:
var.CAN_START_TIME = now + timedelta(seconds=extra)
else:
var.CAN_START_TIME += timedelta(seconds=extra)
if extra >= 0:
wrapper.send(messages["forced_wait_time_increase"].format(wrapper.source, abs(extra)))
else:
wrapper.send(messages["forced_wait_time_decrease"].format(wrapper.source, abs(extra)))
@command("start", phases=("none", "join"))
def start_cmd(var, wrapper, message):
"""Start a game of Werewolf."""
if wrapper.target is channels.Main:
start(var, wrapper)
@command("fstart", flag="S", phases=("join",))
def fstart(var, wrapper, message):
"""Force the game to start immediately."""
channels.Main.send(messages["fstart_success"].format(wrapper.source))
wrapper.target = channels.Main
start(var, wrapper, forced=True)
@command("retract", phases=("day", "join"))
def retract(var, wrapper, message):
"""Take back your vote during the day (for whom to lynch)."""
if wrapper.source not in get_players() or wrapper.source in var.DISCONNECTED:
return
with var.GRAVEYARD_LOCK, var.WARNING_LOCK:
if var.PHASE == "join":
if wrapper.source not in START_VOTES:
wrapper.pm(messages["start_novote"])
else:
START_VOTES.discard(wrapper.source)
wrapper.send(messages["start_retract"].format(wrapper.source))
if not START_VOTES:
var.TIMERS["start_votes"][0].cancel()
del var.TIMERS["start_votes"]
@event_listener("del_player")
def on_del_player(evt, var, player, all_roles, death_triggers):
if var.PHASE == "join":
with var.WARNING_LOCK:
START_VOTES.discard(player)
# Cancel the start vote timer if there are no votes left
if not START_VOTES and "start_votes" in var.TIMERS:
var.TIMERS["start_votes"][0].cancel()
del var.TIMERS["start_votes"]
def start(var, wrapper, *, forced=False, restart=""):
if (not forced and LAST_START and wrapper.source in LAST_START and
LAST_START[wrapper.source][0] + timedelta(seconds=var.START_RATE_LIMIT) >
datetime.now() and not restart):
LAST_START[wrapper.source][1] += 1
wrapper.source.send(messages["command_ratelimited"])
return
if restart:
global RESTART_TRIES
RESTART_TRIES += 1
if RESTART_TRIES > MAX_RETRIES:
from src.wolfgame import stop_game
stop_game(var, abort=True)
return
if not restart:
LAST_START[wrapper.source] = [datetime.now(), 1]
villagers = get_players()
vils = set(get_players())
if not restart:
if var.PHASE == "none":
wrapper.source.send(messages["no_game_running"])
return
if var.PHASE != "join":
wrapper.source.send(messages["werewolf_already_running"])
return
if wrapper.source not in villagers and not forced:
return
now = datetime.now()
var.GAME_START_TIME = now # Only used for the idler checker
dur = int((var.CAN_START_TIME - now).total_seconds())
if dur > 0 and not forced:
wrapper.send(messages["please_wait"].format(dur))
return
if len(villagers) < var.MIN_PLAYERS:
wrapper.send(messages["not_enough_players"].format(wrapper.source, var.MIN_PLAYERS))
return
if len(villagers) > var.MAX_PLAYERS:
wrapper.send.send(messages["max_players"].format(wrapper.source, var.MAX_PLAYERS))
return
with var.WARNING_LOCK:
if not forced and wrapper.source in START_VOTES:
wrapper.pm(messages["start_already_voted"])
return
start_votes_required = min(math.ceil(len(villagers) * var.START_VOTES_SCALE), var.START_VOTES_MAX)
if not forced and len(START_VOTES) < start_votes_required:
# If there's only one more vote required, start the game immediately.
# Checked here to make sure that a player that has already voted can't
# vote again for the final start.
if len(START_VOTES) < start_votes_required - 1:
START_VOTES.add(wrapper.source)
remaining_votes = start_votes_required - len(START_VOTES)
wrapper.send(messages["start_voted"].format(wrapper.source, remaining_votes))
# If this was the first vote
if len(START_VOTES) == 1:
t = threading.Timer(60, expire_start_votes, (var, wrapper.target))
var.TIMERS["start_votes"] = (t, time.time(), 60)
t.daemon = True
t.start()
return
if not var.FGAMED:
votes = {} #key = gamemode, not hostmask
for gamemode in var.GAMEMODE_VOTES.values():
if len(villagers) >= var.GAME_MODES[gamemode][1] and len(villagers) <= var.GAME_MODES[gamemode][2]:
votes[gamemode] = votes.get(gamemode, 0) + 1
voted = [gamemode for gamemode in votes if votes[gamemode] == max(votes.values()) and votes[gamemode] >= len(villagers)/2]
if voted:
from src.wolfgame import cgamemode
cgamemode(random.choice(voted))
else:
possiblegamemodes = []
numvotes = 0
for gamemode, num in votes.items():
if len(villagers) < var.GAME_MODES[gamemode][1] or len(villagers) > var.GAME_MODES[gamemode][2] or var.GAME_MODES[gamemode][3] == 0:
continue
possiblegamemodes += [gamemode] * num
numvotes += num
if len(villagers) - numvotes > 0:
possiblegamemodes += [None] * ((len(villagers) - numvotes) // 2)
# check if we go with a voted mode or a random mode
gamemode = random.choice(possiblegamemodes)
if gamemode is None:
possiblegamemodes = []
for gamemode in var.GAME_MODES.keys() - var.DISABLED_GAMEMODES:
if len(villagers) >= var.GAME_MODES[gamemode][1] and len(villagers) <= var.GAME_MODES[gamemode][2] and var.GAME_MODES[gamemode][3] > 0:
possiblegamemodes += [gamemode] * var.GAME_MODES[gamemode][3]
gamemode = random.choice(possiblegamemodes)
from src.wolfgame import cgamemode
cgamemode(gamemode)
else:
from src.wolfgame import cgamemode
cgamemode(restart)
var.GAME_ID = time.time() # restart reaper timer
from src.wolfgame import chk_win_conditions # TODO: Move that into its own postgame module
event = Event("role_attribution", {"addroles": Counter()})
if event.dispatch(var, chk_win_conditions, villagers):
addroles = event.data["addroles"]
strip = lambda x: re.sub(r"\(.*\)", "", x)
lv = len(villagers)
roles = []
for num, rolelist in var.CURRENT_GAMEMODE.ROLE_GUIDE.items():
if num <= lv:
roles.extend(rolelist)
defroles = Counter(strip(x) for x in roles)
for role, count in list(defroles.items()):
if role[0] == "-":
srole = role[1:]
defroles[srole] -= count
del defroles[role]
if defroles[srole] == 0:
del defroles[srole]
if not defroles:
wrapper.send(messages["no_settings_defined"].format(wrapper.source, lv))
return
for role, num in defroles.items():
addroles[role] = max(addroles.get(role, num), len(var.FORCE_ROLES.get(role, ())))
if sum([addroles[r] for r in addroles if r not in var.CURRENT_GAMEMODE.SECONDARY_ROLES]) > lv:
wrapper.send(messages["too_many_roles"])
return
for role in All:
addroles.setdefault(role, 0)
else:
addroles = event.data["addroles"]
# convert roleset aliases into the appropriate roles
possible_rolesets = [Counter()]
roleset_roles = defaultdict(int)
var.CURRENT_GAMEMODE.ACTIVE_ROLE_SETS = {}
for role, amt in list(addroles.items()):
# not a roleset? add a fixed amount of them
if role not in var.CURRENT_GAMEMODE.ROLE_SETS:
for pr in possible_rolesets:
pr[role] += amt
continue
# if a roleset, ensure we don't try to expose the roleset name in !stats or future attribution
# but do keep track of the sets in use so we can have !stats reflect proper information
var.CURRENT_GAMEMODE.ACTIVE_ROLE_SETS[role] = amt
del addroles[role]
# init !stats with all 0s so that it can number things properly; the keys need to exist in the Counter
# across every possible roleset so that !stats works right
rs = Counter(var.CURRENT_GAMEMODE.ROLE_SETS[role])
for r in rs:
for pr in possible_rolesets:
pr[r] += 0
toadd = random.sample(list(rs.elements()), amt)
for r in toadd:
addroles[r] += 1
roleset_roles[r] += 1
add_rolesets = []
temp_rolesets = []
for c in itertools.combinations(rs.elements(), amt):
add_rolesets.append(Counter(c))
for pr in possible_rolesets:
for ar in add_rolesets:
temp = Counter(pr)
temp.update(ar)
temp_rolesets.append(temp)
possible_rolesets = temp_rolesets
if var.ORIGINAL_SETTINGS and not restart: # Custom settings
need_reset = True
wvs = sum(addroles[r] for r in Wolfchat)
if len(villagers) < (sum(addroles.values()) - sum(addroles[r] for r in var.CURRENT_GAMEMODE.SECONDARY_ROLES)):
wrapper.send(messages["too_few_players_custom"])
elif not wvs and var.CURRENT_GAMEMODE.name != "villagergame":
wrapper.send(messages["need_one_wolf"])
elif wvs > (len(villagers) / 2):
wrapper.send(messages["too_many_wolves"])
else:
need_reset = False
if need_reset:
from src.wolfgame import reset_settings
reset_settings()
wrapper.send(messages["default_reset"])
var.PHASE = "join"
return
if var.ADMIN_TO_PING is not None and not restart:
for decor in (COMMANDS["join"] + COMMANDS["start"]):
decor(_command_disabled)
var.ROLES.clear()
var.MAIN_ROLES.clear()
var.NIGHT_COUNT = 0
var.DAY_COUNT = 0
var.FINAL_ROLES.clear()
var.EXTRA_WOLVES = 0
var.DEADCHAT_PLAYERS.clear()
var.SPECTATING_WOLFCHAT.clear()
var.SPECTATING_DEADCHAT.clear()
for role in All:
var.ROLES[role] = UserSet()
var.ROLES[var.DEFAULT_ROLE] = UserSet()
for role, ps in var.FORCE_ROLES.items():
if role not in var.CURRENT_GAMEMODE.SECONDARY_ROLES.keys():
vils.difference_update(ps)
for role, count in addroles.items():
if role in var.CURRENT_GAMEMODE.SECONDARY_ROLES:
var.ROLES[role] = (None,) * count
continue # We deal with those later, see below
to_add = set()
if role in var.FORCE_ROLES:
if len(var.FORCE_ROLES[role]) > count:
channels.Main.send(messages["error_frole_too_many"].format(role))
return
for user in var.FORCE_ROLES[role]:
# If multiple main roles were forced, only first one is put in MAIN_ROLES
if not user in var.MAIN_ROLES:
var.MAIN_ROLES[user] = role
var.ORIGINAL_MAIN_ROLES[user] = role
to_add.add(user)
count -= 1
selected = random.sample(vils, count)
for x in selected:
var.MAIN_ROLES[x] = role
var.ORIGINAL_MAIN_ROLES[x] = role
vils.remove(x)
var.ROLES[role].update(selected)
var.ROLES[role].update(to_add)
var.ROLES[var.DEFAULT_ROLE].update(vils)
for x in vils:
var.MAIN_ROLES[x] = var.DEFAULT_ROLE
var.ORIGINAL_MAIN_ROLES[x] = var.DEFAULT_ROLE
if vils:
for pr in possible_rolesets:
pr[var.DEFAULT_ROLE] += len(vils)
# Collapse possible_rolesets into var.ROLE_STATS
# which is a FrozenSet[FrozenSet[Tuple[str, int]]]
possible_rolesets_set = set()
event = Event("reconfigure_stats", {"new": []})
for pr in possible_rolesets:
event.data["new"] = [pr]
event.dispatch(var, pr, "start")
for v in event.data["new"]:
if min(v.values()) >= 0:
possible_rolesets_set.add(frozenset(v.items()))
var.ROLE_STATS = frozenset(possible_rolesets_set)
# Now for the secondary roles
for role, dfn in var.CURRENT_GAMEMODE.SECONDARY_ROLES.items():
count = len(var.ROLES[role])
var.ROLES[role] = UserSet()
if role in var.FORCE_ROLES:
ps = var.FORCE_ROLES[role]
var.ROLES[role].update(ps)
count -= len(ps)
# Don't do anything further if this secondary role was forced on enough players already
if count <= 0:
continue
possible = get_players(dfn)
if len(possible) < count:
wrapper.send(messages["not_enough_targets"].format(role))
if var.ORIGINAL_SETTINGS:
from src.wolfgame import reset_settings
var.ROLES.clear()
var.ROLES["person"] = UserSet(var.ALL_PLAYERS)
reset_settings()
wrapper.send(messages["default_reset"])
var.PHASE = "join"
return
else:
wrapper.send(messages["role_skipped"])
continue
var.ROLES[role].update(x for x in random.sample(possible, count))
with var.WARNING_LOCK: # cancel timers
for name in ("join", "join_pinger", "start_votes"):
if name in var.TIMERS:
var.TIMERS[name][0].cancel()
del var.TIMERS[name]
var.LAST_STATS = None
var.LAST_TIME = None
for role, players in var.ROLES.items():
for player in players:
evt = Event("new_role", {"messages": [], "role": role, "in_wolfchat": False}, inherit_from=None)
evt.dispatch(var, player, None)
if not restart:
gamemode = var.CURRENT_GAMEMODE.name
if gamemode == "villagergame":
gamemode = "default"
# Alert the players to option changes they may not be aware of
# All keys begin with gso_* (game start options)
options = []
if var.ORIGINAL_SETTINGS.get("ROLE_REVEAL") is not None:
# Keys used here: gso_rr_on, gso_rr_team, gso_rr_off
options.append(messages["gso_rr_{0}".format(var.ROLE_REVEAL)])
if var.ORIGINAL_SETTINGS.get("STATS_TYPE") is not None:
# Keys used here: gso_st_default, gso_st_accurate, gso_st_team, gso_st_disabled
options.append(messages["gso_st_{0}".format(var.STATS_TYPE)])
if var.ORIGINAL_SETTINGS.get("ABSTAIN_ENABLED") is not None or var.ORIGINAL_SETTINGS.get("LIMIT_ABSTAIN") is not None:
if var.ABSTAIN_ENABLED and var.LIMIT_ABSTAIN:
options.append(messages["gso_abs_rest"])
elif var.ABSTAIN_ENABLED:
options.append(messages["gso_abs_unrest"])
else:
options.append(messages["gso_abs_none"])
key = "welcome_simple"
if options:
key = "welcome_options"
wrapper.send(messages[key].format(villagers, gamemode, options))
wrapper.target.mode("+m")
var.ORIGINAL_ROLES.clear()
for role, players in var.ROLES.items():
var.ORIGINAL_ROLES[role] = players.copy()
var.DAY_TIMEDELTA = timedelta(0)
var.NIGHT_TIMEDELTA = timedelta(0)
var.DAY_START_TIME = datetime.now()
var.NIGHT_START_TIME = datetime.now()
var.LAST_PING = None
if restart:
var.PHASE = "join" # allow transition_* to run properly if game was restarted on first night
if not var.START_WITH_DAY:
from src.wolfgame import transition_night
var.GAMEPHASE = "day" # gamephase needs to be the thing we're transitioning from
transition_night()
else:
from src.wolfgame import transition_day
var.FIRST_DAY = True
var.GAMEPHASE = "night"
transition_day()
decrement_stasis()
if not (botconfig.DEBUG_MODE and var.DISABLE_DEBUG_MODE_REAPER):
# DEATH TO IDLERS!
from src.wolfgame import reaper
reapertimer = threading.Thread(None, reaper, args=(wrapper.client, var.GAME_ID))
reapertimer.daemon = True
reapertimer.start()
def _command_disabled(var, wrapper, message):
wrapper.send(messages["command_disabled_admin"])
@handle_error
def expire_start_votes(var, channel):
# Should never happen as the timer is removed on game start, but just to be safe
if var.PHASE != "join":
return
with var.WARNING_LOCK:
START_VOTES.clear()
channel.send(messages["start_expired"])
@event_listener("reset")
def on_reset(evt, var):
global MAX_RETRIES, WAIT_TOKENS, WAIT_LAST
LAST_START.clear()
LAST_WAIT.clear()
START_VOTES.clear()
MAX_RETRIES = 0
WAIT_TOKENS = 0
WAIT_LAST = 0
| 39.3 | 159 | 0.612533 | from collections import defaultdict, Counter
from datetime import datetime, timedelta
import threading
import itertools
import random
import time
import math
import re
from src.containers import UserDict, UserSet
from src.decorators import COMMANDS, command, event_listener, handle_error
from src.functions import get_players
from src.warnings import decrement_stasis
from src.messages import messages
from src.events import Event
from src.cats import Wolfchat, All
from src import channels
import botconfig
WAIT_LOCK = threading.RLock()
WAIT_TOKENS = 0
WAIT_LAST = 0
LAST_START = UserDict()
LAST_WAIT = UserDict()
START_VOTES = UserSet()
RESTART_TRIES = 0
MAX_RETRIES = 3
@command("wait", playing=True, phases=("join",))
def wait(var, wrapper, message):
if wrapper.target is not channels.Main:
return
pl = get_players()
with WAIT_LOCK:
global WAIT_TOKENS, WAIT_LAST
wait_check_time = time.time()
WAIT_TOKENS += (wait_check_time - WAIT_LAST) / var.WAIT_TB_DELAY
WAIT_LAST = wait_check_time
WAIT_TOKENS = min(WAIT_TOKENS, var.WAIT_TB_BURST)
now = datetime.now()
if ((LAST_WAIT and wrapper.source in LAST_WAIT and LAST_WAIT[wrapper.source] +
timedelta(seconds=var.WAIT_RATE_LIMIT) > now) or WAIT_TOKENS < 1):
wrapper.pm(messages["command_ratelimited"])
return
LAST_WAIT[wrapper.source] = now
WAIT_TOKENS -= 1
if now > var.CAN_START_TIME:
var.CAN_START_TIME = now + timedelta(seconds=var.EXTRA_WAIT)
else:
var.CAN_START_TIME += timedelta(seconds=var.EXTRA_WAIT)
wrapper.send(messages["wait_time_increase"].format(wrapper.source, var.EXTRA_WAIT))
@command("fwait", flag="w", phases=("join",))
def fwait(var, wrapper, message):
pl = get_players()
msg = re.split(" +", message.strip(), 1)[0]
if msg and (msg.isdigit() or (msg[0] == "-" and msg[1:].isdigit())):
extra = int(msg)
else:
extra = var.EXTRA_WAIT
now = datetime.now()
extra = max(-900, min(900, extra))
if now > var.CAN_START_TIME:
var.CAN_START_TIME = now + timedelta(seconds=extra)
else:
var.CAN_START_TIME += timedelta(seconds=extra)
if extra >= 0:
wrapper.send(messages["forced_wait_time_increase"].format(wrapper.source, abs(extra)))
else:
wrapper.send(messages["forced_wait_time_decrease"].format(wrapper.source, abs(extra)))
@command("start", phases=("none", "join"))
def start_cmd(var, wrapper, message):
if wrapper.target is channels.Main:
start(var, wrapper)
@command("fstart", flag="S", phases=("join",))
def fstart(var, wrapper, message):
channels.Main.send(messages["fstart_success"].format(wrapper.source))
wrapper.target = channels.Main
start(var, wrapper, forced=True)
@command("retract", phases=("day", "join"))
def retract(var, wrapper, message):
if wrapper.source not in get_players() or wrapper.source in var.DISCONNECTED:
return
with var.GRAVEYARD_LOCK, var.WARNING_LOCK:
if var.PHASE == "join":
if wrapper.source not in START_VOTES:
wrapper.pm(messages["start_novote"])
else:
START_VOTES.discard(wrapper.source)
wrapper.send(messages["start_retract"].format(wrapper.source))
if not START_VOTES:
var.TIMERS["start_votes"][0].cancel()
del var.TIMERS["start_votes"]
@event_listener("del_player")
def on_del_player(evt, var, player, all_roles, death_triggers):
if var.PHASE == "join":
with var.WARNING_LOCK:
START_VOTES.discard(player)
if not START_VOTES and "start_votes" in var.TIMERS:
var.TIMERS["start_votes"][0].cancel()
del var.TIMERS["start_votes"]
def start(var, wrapper, *, forced=False, restart=""):
if (not forced and LAST_START and wrapper.source in LAST_START and
LAST_START[wrapper.source][0] + timedelta(seconds=var.START_RATE_LIMIT) >
datetime.now() and not restart):
LAST_START[wrapper.source][1] += 1
wrapper.source.send(messages["command_ratelimited"])
return
if restart:
global RESTART_TRIES
RESTART_TRIES += 1
if RESTART_TRIES > MAX_RETRIES:
from src.wolfgame import stop_game
stop_game(var, abort=True)
return
if not restart:
LAST_START[wrapper.source] = [datetime.now(), 1]
villagers = get_players()
vils = set(get_players())
if not restart:
if var.PHASE == "none":
wrapper.source.send(messages["no_game_running"])
return
if var.PHASE != "join":
wrapper.source.send(messages["werewolf_already_running"])
return
if wrapper.source not in villagers and not forced:
return
now = datetime.now()
var.GAME_START_TIME = now
dur = int((var.CAN_START_TIME - now).total_seconds())
if dur > 0 and not forced:
wrapper.send(messages["please_wait"].format(dur))
return
if len(villagers) < var.MIN_PLAYERS:
wrapper.send(messages["not_enough_players"].format(wrapper.source, var.MIN_PLAYERS))
return
if len(villagers) > var.MAX_PLAYERS:
wrapper.send.send(messages["max_players"].format(wrapper.source, var.MAX_PLAYERS))
return
with var.WARNING_LOCK:
if not forced and wrapper.source in START_VOTES:
wrapper.pm(messages["start_already_voted"])
return
start_votes_required = min(math.ceil(len(villagers) * var.START_VOTES_SCALE), var.START_VOTES_MAX)
if not forced and len(START_VOTES) < start_votes_required:
# Checked here to make sure that a player that has already voted can't
if len(START_VOTES) < start_votes_required - 1:
START_VOTES.add(wrapper.source)
remaining_votes = start_votes_required - len(START_VOTES)
wrapper.send(messages["start_voted"].format(wrapper.source, remaining_votes))
if len(START_VOTES) == 1:
t = threading.Timer(60, expire_start_votes, (var, wrapper.target))
var.TIMERS["start_votes"] = (t, time.time(), 60)
t.daemon = True
t.start()
return
if not var.FGAMED:
votes = {}
for gamemode in var.GAMEMODE_VOTES.values():
if len(villagers) >= var.GAME_MODES[gamemode][1] and len(villagers) <= var.GAME_MODES[gamemode][2]:
votes[gamemode] = votes.get(gamemode, 0) + 1
voted = [gamemode for gamemode in votes if votes[gamemode] == max(votes.values()) and votes[gamemode] >= len(villagers)/2]
if voted:
from src.wolfgame import cgamemode
cgamemode(random.choice(voted))
else:
possiblegamemodes = []
numvotes = 0
for gamemode, num in votes.items():
if len(villagers) < var.GAME_MODES[gamemode][1] or len(villagers) > var.GAME_MODES[gamemode][2] or var.GAME_MODES[gamemode][3] == 0:
continue
possiblegamemodes += [gamemode] * num
numvotes += num
if len(villagers) - numvotes > 0:
possiblegamemodes += [None] * ((len(villagers) - numvotes) // 2)
gamemode = random.choice(possiblegamemodes)
if gamemode is None:
possiblegamemodes = []
for gamemode in var.GAME_MODES.keys() - var.DISABLED_GAMEMODES:
if len(villagers) >= var.GAME_MODES[gamemode][1] and len(villagers) <= var.GAME_MODES[gamemode][2] and var.GAME_MODES[gamemode][3] > 0:
possiblegamemodes += [gamemode] * var.GAME_MODES[gamemode][3]
gamemode = random.choice(possiblegamemodes)
from src.wolfgame import cgamemode
cgamemode(gamemode)
else:
from src.wolfgame import cgamemode
cgamemode(restart)
var.GAME_ID = time.time()
from src.wolfgame import chk_win_conditions
event = Event("role_attribution", {"addroles": Counter()})
if event.dispatch(var, chk_win_conditions, villagers):
addroles = event.data["addroles"]
strip = lambda x: re.sub(r"\(.*\)", "", x)
lv = len(villagers)
roles = []
for num, rolelist in var.CURRENT_GAMEMODE.ROLE_GUIDE.items():
if num <= lv:
roles.extend(rolelist)
defroles = Counter(strip(x) for x in roles)
for role, count in list(defroles.items()):
if role[0] == "-":
srole = role[1:]
defroles[srole] -= count
del defroles[role]
if defroles[srole] == 0:
del defroles[srole]
if not defroles:
wrapper.send(messages["no_settings_defined"].format(wrapper.source, lv))
return
for role, num in defroles.items():
addroles[role] = max(addroles.get(role, num), len(var.FORCE_ROLES.get(role, ())))
if sum([addroles[r] for r in addroles if r not in var.CURRENT_GAMEMODE.SECONDARY_ROLES]) > lv:
wrapper.send(messages["too_many_roles"])
return
for role in All:
addroles.setdefault(role, 0)
else:
addroles = event.data["addroles"]
possible_rolesets = [Counter()]
roleset_roles = defaultdict(int)
var.CURRENT_GAMEMODE.ACTIVE_ROLE_SETS = {}
for role, amt in list(addroles.items()):
if role not in var.CURRENT_GAMEMODE.ROLE_SETS:
for pr in possible_rolesets:
pr[role] += amt
continue
# but do keep track of the sets in use so we can have !stats reflect proper information
var.CURRENT_GAMEMODE.ACTIVE_ROLE_SETS[role] = amt
del addroles[role]
# init !stats with all 0s so that it can number things properly; the keys need to exist in the Counter
# across every possible roleset so that !stats works right
rs = Counter(var.CURRENT_GAMEMODE.ROLE_SETS[role])
for r in rs:
for pr in possible_rolesets:
pr[r] += 0
toadd = random.sample(list(rs.elements()), amt)
for r in toadd:
addroles[r] += 1
roleset_roles[r] += 1
add_rolesets = []
temp_rolesets = []
for c in itertools.combinations(rs.elements(), amt):
add_rolesets.append(Counter(c))
for pr in possible_rolesets:
for ar in add_rolesets:
temp = Counter(pr)
temp.update(ar)
temp_rolesets.append(temp)
possible_rolesets = temp_rolesets
if var.ORIGINAL_SETTINGS and not restart: # Custom settings
need_reset = True
wvs = sum(addroles[r] for r in Wolfchat)
if len(villagers) < (sum(addroles.values()) - sum(addroles[r] for r in var.CURRENT_GAMEMODE.SECONDARY_ROLES)):
wrapper.send(messages["too_few_players_custom"])
elif not wvs and var.CURRENT_GAMEMODE.name != "villagergame":
wrapper.send(messages["need_one_wolf"])
elif wvs > (len(villagers) / 2):
wrapper.send(messages["too_many_wolves"])
else:
need_reset = False
if need_reset:
from src.wolfgame import reset_settings
reset_settings()
wrapper.send(messages["default_reset"])
var.PHASE = "join"
return
if var.ADMIN_TO_PING is not None and not restart:
for decor in (COMMANDS["join"] + COMMANDS["start"]):
decor(_command_disabled)
var.ROLES.clear()
var.MAIN_ROLES.clear()
var.NIGHT_COUNT = 0
var.DAY_COUNT = 0
var.FINAL_ROLES.clear()
var.EXTRA_WOLVES = 0
var.DEADCHAT_PLAYERS.clear()
var.SPECTATING_WOLFCHAT.clear()
var.SPECTATING_DEADCHAT.clear()
for role in All:
var.ROLES[role] = UserSet()
var.ROLES[var.DEFAULT_ROLE] = UserSet()
for role, ps in var.FORCE_ROLES.items():
if role not in var.CURRENT_GAMEMODE.SECONDARY_ROLES.keys():
vils.difference_update(ps)
for role, count in addroles.items():
if role in var.CURRENT_GAMEMODE.SECONDARY_ROLES:
var.ROLES[role] = (None,) * count
continue # We deal with those later, see below
to_add = set()
if role in var.FORCE_ROLES:
if len(var.FORCE_ROLES[role]) > count:
channels.Main.send(messages["error_frole_too_many"].format(role))
return
for user in var.FORCE_ROLES[role]:
# If multiple main roles were forced, only first one is put in MAIN_ROLES
if not user in var.MAIN_ROLES:
var.MAIN_ROLES[user] = role
var.ORIGINAL_MAIN_ROLES[user] = role
to_add.add(user)
count -= 1
selected = random.sample(vils, count)
for x in selected:
var.MAIN_ROLES[x] = role
var.ORIGINAL_MAIN_ROLES[x] = role
vils.remove(x)
var.ROLES[role].update(selected)
var.ROLES[role].update(to_add)
var.ROLES[var.DEFAULT_ROLE].update(vils)
for x in vils:
var.MAIN_ROLES[x] = var.DEFAULT_ROLE
var.ORIGINAL_MAIN_ROLES[x] = var.DEFAULT_ROLE
if vils:
for pr in possible_rolesets:
pr[var.DEFAULT_ROLE] += len(vils)
# Collapse possible_rolesets into var.ROLE_STATS
# which is a FrozenSet[FrozenSet[Tuple[str, int]]]
possible_rolesets_set = set()
event = Event("reconfigure_stats", {"new": []})
for pr in possible_rolesets:
event.data["new"] = [pr]
event.dispatch(var, pr, "start")
for v in event.data["new"]:
if min(v.values()) >= 0:
possible_rolesets_set.add(frozenset(v.items()))
var.ROLE_STATS = frozenset(possible_rolesets_set)
# Now for the secondary roles
for role, dfn in var.CURRENT_GAMEMODE.SECONDARY_ROLES.items():
count = len(var.ROLES[role])
var.ROLES[role] = UserSet()
if role in var.FORCE_ROLES:
ps = var.FORCE_ROLES[role]
var.ROLES[role].update(ps)
count -= len(ps)
# Don't do anything further if this secondary role was forced on enough players already
if count <= 0:
continue
possible = get_players(dfn)
if len(possible) < count:
wrapper.send(messages["not_enough_targets"].format(role))
if var.ORIGINAL_SETTINGS:
from src.wolfgame import reset_settings
var.ROLES.clear()
var.ROLES["person"] = UserSet(var.ALL_PLAYERS)
reset_settings()
wrapper.send(messages["default_reset"])
var.PHASE = "join"
return
else:
wrapper.send(messages["role_skipped"])
continue
var.ROLES[role].update(x for x in random.sample(possible, count))
with var.WARNING_LOCK:
for name in ("join", "join_pinger", "start_votes"):
if name in var.TIMERS:
var.TIMERS[name][0].cancel()
del var.TIMERS[name]
var.LAST_STATS = None
var.LAST_TIME = None
for role, players in var.ROLES.items():
for player in players:
evt = Event("new_role", {"messages": [], "role": role, "in_wolfchat": False}, inherit_from=None)
evt.dispatch(var, player, None)
if not restart:
gamemode = var.CURRENT_GAMEMODE.name
if gamemode == "villagergame":
gamemode = "default"
options = []
if var.ORIGINAL_SETTINGS.get("ROLE_REVEAL") is not None:
options.append(messages["gso_rr_{0}".format(var.ROLE_REVEAL)])
if var.ORIGINAL_SETTINGS.get("STATS_TYPE") is not None:
options.append(messages["gso_st_{0}".format(var.STATS_TYPE)])
if var.ORIGINAL_SETTINGS.get("ABSTAIN_ENABLED") is not None or var.ORIGINAL_SETTINGS.get("LIMIT_ABSTAIN") is not None:
if var.ABSTAIN_ENABLED and var.LIMIT_ABSTAIN:
options.append(messages["gso_abs_rest"])
elif var.ABSTAIN_ENABLED:
options.append(messages["gso_abs_unrest"])
else:
options.append(messages["gso_abs_none"])
key = "welcome_simple"
if options:
key = "welcome_options"
wrapper.send(messages[key].format(villagers, gamemode, options))
wrapper.target.mode("+m")
var.ORIGINAL_ROLES.clear()
for role, players in var.ROLES.items():
var.ORIGINAL_ROLES[role] = players.copy()
var.DAY_TIMEDELTA = timedelta(0)
var.NIGHT_TIMEDELTA = timedelta(0)
var.DAY_START_TIME = datetime.now()
var.NIGHT_START_TIME = datetime.now()
var.LAST_PING = None
if restart:
var.PHASE = "join"
if not var.START_WITH_DAY:
from src.wolfgame import transition_night
var.GAMEPHASE = "day"
transition_night()
else:
from src.wolfgame import transition_day
var.FIRST_DAY = True
var.GAMEPHASE = "night"
transition_day()
decrement_stasis()
if not (botconfig.DEBUG_MODE and var.DISABLE_DEBUG_MODE_REAPER):
# DEATH TO IDLERS!
from src.wolfgame import reaper
reapertimer = threading.Thread(None, reaper, args=(wrapper.client, var.GAME_ID))
reapertimer.daemon = True
reapertimer.start()
def _command_disabled(var, wrapper, message):
wrapper.send(messages["command_disabled_admin"])
@handle_error
def expire_start_votes(var, channel):
# Should never happen as the timer is removed on game start, but just to be safe
if var.PHASE != "join":
return
with var.WARNING_LOCK:
START_VOTES.clear()
channel.send(messages["start_expired"])
@event_listener("reset")
def on_reset(evt, var):
global MAX_RETRIES, WAIT_TOKENS, WAIT_LAST
LAST_START.clear()
LAST_WAIT.clear()
START_VOTES.clear()
MAX_RETRIES = 0
WAIT_TOKENS = 0
WAIT_LAST = 0
| true | true |
f720e349ea77eb354bef27e43be8e0b0f558fa43 | 3,840 | py | Python | wes_service/util.py | SamarthVP/workflow-service | a4a557ca17a38c1e8642983c2d3af6b6325da0f8 | [
"Apache-2.0"
] | 2 | 2020-02-14T18:41:08.000Z | 2020-02-17T06:56:10.000Z | wes_service/util.py | Sage-Bionetworks/workflow-service | 8b5dc0afe9ea0972014cdf48a693ee6f893cfe5e | [
"Apache-2.0"
] | 9 | 2021-03-31T19:32:52.000Z | 2022-02-26T23:21:38.000Z | wes_service/util.py | Sage-Bionetworks/workflow-service | 8b5dc0afe9ea0972014cdf48a693ee6f893cfe5e | [
"Apache-2.0"
] | 2 | 2020-02-12T23:21:35.000Z | 2020-06-02T14:50:31.000Z | import tempfile
import json
import os
import logging
from six import itervalues, iterlists
import connexion
from werkzeug.utils import secure_filename
def visit(d, op):
"""Recursively call op(d) for all list subelements and dictionary 'values' that d may have."""
op(d)
if isinstance(d, list):
for i in d:
visit(i, op)
elif isinstance(d, dict):
for i in itervalues(d):
visit(i, op)
class WESBackend(object):
"""Stores and retrieves options. Intended to be inherited."""
def __init__(self, opts):
"""Parse and store options as a list of tuples."""
self.pairs = []
for o in opts if opts else []:
k, v = o.split("=", 1)
self.pairs.append((k, v))
def getopt(self, p, default=None):
"""Returns the first option value stored that matches p or default."""
for k, v in self.pairs:
if k == p:
return v
return default
def getoptlist(self, p):
"""Returns all option values stored that match p as a list."""
optlist = []
for k, v in self.pairs:
if k == p:
optlist.append(v)
return optlist
def log_for_run(self, run_id, message):
logging.info("Workflow %s: %s", run_id, message)
def collect_attachments(self, run_id=None):
tempdir = tempfile.mkdtemp()
body = {}
has_attachments = False
for k, ls in iterlists(connexion.request.files):
try:
for v in ls:
if k == "workflow_attachment":
sp = v.filename.split("/")
fn = []
for p in sp:
if p not in ("", ".", ".."):
fn.append(secure_filename(p))
dest = os.path.join(tempdir, *fn)
if not os.path.isdir(os.path.dirname(dest)):
os.makedirs(os.path.dirname(dest))
self.log_for_run(run_id, "Staging attachment '%s' to '%s'" % (v.filename, dest))
v.save(dest)
has_attachments = True
body[k] = "file://%s" % tempdir # Reference to temp working dir.
elif k in ("workflow_params", "tags", "workflow_engine_parameters"):
content = v.read()
body[k] = json.loads(content.decode("utf-8"))
else:
body[k] = v.read().decode()
except Exception as e:
raise ValueError("Error reading parameter '%s': %s" % (k, e))
for k, ls in iterlists(connexion.request.form):
try:
for v in ls:
if not v:
continue
if k in ("workflow_params", "tags", "workflow_engine_parameters"):
body[k] = json.loads(v)
else:
body[k] = v
except Exception as e:
raise ValueError("Error reading parameter '%s': %s" % (k, e))
if "workflow_url" in body:
if ":" not in body["workflow_url"]:
if not has_attachments:
raise ValueError("Relative 'workflow_url' but missing 'workflow_attachment'")
body["workflow_url"] = "file://%s" % os.path.join(tempdir, secure_filename(body["workflow_url"]))
self.log_for_run(run_id, "Using workflow_url '%s'" % body.get("workflow_url"))
else:
raise ValueError("Missing 'workflow_url' in submission")
if "workflow_params" not in body:
raise ValueError("Missing 'workflow_params' in submission")
return tempdir, body
| 38.019802 | 113 | 0.507552 | import tempfile
import json
import os
import logging
from six import itervalues, iterlists
import connexion
from werkzeug.utils import secure_filename
def visit(d, op):
op(d)
if isinstance(d, list):
for i in d:
visit(i, op)
elif isinstance(d, dict):
for i in itervalues(d):
visit(i, op)
class WESBackend(object):
def __init__(self, opts):
self.pairs = []
for o in opts if opts else []:
k, v = o.split("=", 1)
self.pairs.append((k, v))
def getopt(self, p, default=None):
for k, v in self.pairs:
if k == p:
return v
return default
def getoptlist(self, p):
optlist = []
for k, v in self.pairs:
if k == p:
optlist.append(v)
return optlist
def log_for_run(self, run_id, message):
logging.info("Workflow %s: %s", run_id, message)
def collect_attachments(self, run_id=None):
tempdir = tempfile.mkdtemp()
body = {}
has_attachments = False
for k, ls in iterlists(connexion.request.files):
try:
for v in ls:
if k == "workflow_attachment":
sp = v.filename.split("/")
fn = []
for p in sp:
if p not in ("", ".", ".."):
fn.append(secure_filename(p))
dest = os.path.join(tempdir, *fn)
if not os.path.isdir(os.path.dirname(dest)):
os.makedirs(os.path.dirname(dest))
self.log_for_run(run_id, "Staging attachment '%s' to '%s'" % (v.filename, dest))
v.save(dest)
has_attachments = True
body[k] = "file://%s" % tempdir
elif k in ("workflow_params", "tags", "workflow_engine_parameters"):
content = v.read()
body[k] = json.loads(content.decode("utf-8"))
else:
body[k] = v.read().decode()
except Exception as e:
raise ValueError("Error reading parameter '%s': %s" % (k, e))
for k, ls in iterlists(connexion.request.form):
try:
for v in ls:
if not v:
continue
if k in ("workflow_params", "tags", "workflow_engine_parameters"):
body[k] = json.loads(v)
else:
body[k] = v
except Exception as e:
raise ValueError("Error reading parameter '%s': %s" % (k, e))
if "workflow_url" in body:
if ":" not in body["workflow_url"]:
if not has_attachments:
raise ValueError("Relative 'workflow_url' but missing 'workflow_attachment'")
body["workflow_url"] = "file://%s" % os.path.join(tempdir, secure_filename(body["workflow_url"]))
self.log_for_run(run_id, "Using workflow_url '%s'" % body.get("workflow_url"))
else:
raise ValueError("Missing 'workflow_url' in submission")
if "workflow_params" not in body:
raise ValueError("Missing 'workflow_params' in submission")
return tempdir, body
| true | true |
f720e41f86ef851d3645b1502f4b7c42729748ba | 27,550 | py | Python | autosklearn/smbo.py | a1rb4Ck/auto-sklearn | cdf48b82632927ec56c8c14258c0bfc4c6b2e7d1 | [
"BSD-3-Clause"
] | null | null | null | autosklearn/smbo.py | a1rb4Ck/auto-sklearn | cdf48b82632927ec56c8c14258c0bfc4c6b2e7d1 | [
"BSD-3-Clause"
] | null | null | null | autosklearn/smbo.py | a1rb4Ck/auto-sklearn | cdf48b82632927ec56c8c14258c0bfc4c6b2e7d1 | [
"BSD-3-Clause"
] | null | null | null | import json
import os
import time
import traceback
import warnings
import numpy as np
import pynisher
from smac.facade.smac_facade import SMAC
from smac.optimizer.objective import average_cost
from smac.runhistory.runhistory import RunHistory
from smac.runhistory.runhistory2epm import RunHistory2EPM4Cost
from smac.scenario.scenario import Scenario
from smac.tae.execute_ta_run import StatusType
from smac.optimizer import pSMAC
import autosklearn.metalearning
from autosklearn.constants import MULTILABEL_CLASSIFICATION, \
BINARY_CLASSIFICATION, TASK_TYPES_TO_STRING, CLASSIFICATION_TASKS, \
REGRESSION_TASKS, MULTICLASS_CLASSIFICATION, REGRESSION
from autosklearn.metalearning.mismbo import suggest_via_metalearning
from autosklearn.data.abstract_data_manager import AbstractDataManager
from autosklearn.data.competition_data_manager import CompetitionDataManager
from autosklearn.evaluation import ExecuteTaFuncWithQueue, WORST_POSSIBLE_RESULT
from autosklearn.util import get_logger
from autosklearn.metalearning.metalearning.meta_base import MetaBase
from autosklearn.metalearning.metafeatures.metafeatures import \
calculate_all_metafeatures_with_labels, calculate_all_metafeatures_encoded_labels
EXCLUDE_META_FEATURES_CLASSIFICATION = {
'Landmark1NN',
'LandmarkDecisionNodeLearner',
'LandmarkDecisionTree',
'LandmarkLDA',
'LandmarkNaiveBayes',
'PCAFractionOfComponentsFor95PercentVariance',
'PCAKurtosisFirstPC',
'PCASkewnessFirstPC',
'PCA'
}
EXCLUDE_META_FEATURES_REGRESSION = {
'Landmark1NN',
'LandmarkDecisionNodeLearner',
'LandmarkDecisionTree',
'LandmarkLDA',
'LandmarkNaiveBayes',
'PCAFractionOfComponentsFor95PercentVariance',
'PCAKurtosisFirstPC',
'PCASkewnessFirstPC',
'NumberOfClasses',
'ClassOccurences',
'ClassProbabilityMin',
'ClassProbabilityMax',
'ClassProbabilityMean',
'ClassProbabilitySTD',
'ClassEntropy',
'LandmarkRandomNodeLearner',
'PCA',
}
# dataset helpers
def load_data(dataset_info, backend, max_mem=None):
try:
D = backend.load_datamanager()
except IOError:
D = None
# Datamanager probably doesn't exist
if D is None:
if max_mem is None:
D = CompetitionDataManager(dataset_info)
else:
D = CompetitionDataManager(dataset_info, max_memory_in_mb=max_mem)
return D
# metalearning helpers
def _calculate_metafeatures(data_feat_type, data_info_task, basename,
x_train, y_train, watcher, logger):
# == Calculate metafeatures
task_name = 'CalculateMetafeatures'
watcher.start_task(task_name)
categorical = [True if feat_type.lower() in ['categorical'] else False
for feat_type in data_feat_type]
EXCLUDE_META_FEATURES = EXCLUDE_META_FEATURES_CLASSIFICATION \
if data_info_task in CLASSIFICATION_TASKS else EXCLUDE_META_FEATURES_REGRESSION
if data_info_task in [MULTICLASS_CLASSIFICATION, BINARY_CLASSIFICATION,
MULTILABEL_CLASSIFICATION, REGRESSION]:
logger.info('Start calculating metafeatures for %s', basename)
result = calculate_all_metafeatures_with_labels(
x_train, y_train, categorical=categorical,
dataset_name=basename,
dont_calculate=EXCLUDE_META_FEATURES, )
for key in list(result.metafeature_values.keys()):
if result.metafeature_values[key].type_ != 'METAFEATURE':
del result.metafeature_values[key]
else:
result = None
logger.info('Metafeatures not calculated')
watcher.stop_task(task_name)
logger.info(
'Calculating Metafeatures (categorical attributes) took %5.2f',
watcher.wall_elapsed(task_name))
return result
def _calculate_metafeatures_encoded(basename, x_train, y_train, watcher,
task, logger):
EXCLUDE_META_FEATURES = EXCLUDE_META_FEATURES_CLASSIFICATION \
if task in CLASSIFICATION_TASKS else EXCLUDE_META_FEATURES_REGRESSION
task_name = 'CalculateMetafeaturesEncoded'
watcher.start_task(task_name)
result = calculate_all_metafeatures_encoded_labels(
x_train, y_train, categorical=[False] * x_train.shape[1],
dataset_name=basename, dont_calculate=EXCLUDE_META_FEATURES)
for key in list(result.metafeature_values.keys()):
if result.metafeature_values[key].type_ != 'METAFEATURE':
del result.metafeature_values[key]
watcher.stop_task(task_name)
logger.info(
'Calculating Metafeatures (encoded attributes) took %5.2fsec',
watcher.wall_elapsed(task_name))
return result
def _get_metalearning_configurations(meta_base, basename, metric,
configuration_space,
task,
initial_configurations_via_metalearning,
is_sparse,
watcher, logger):
task_name = 'InitialConfigurations'
watcher.start_task(task_name)
try:
metalearning_configurations = suggest_via_metalearning(
meta_base, basename, metric,
task,
is_sparse == 1,
initial_configurations_via_metalearning
)
except Exception as e:
logger.error("Error getting metalearning configurations!")
logger.error(str(e))
logger.error(traceback.format_exc())
metalearning_configurations = []
watcher.stop_task(task_name)
return metalearning_configurations
def _print_debug_info_of_init_configuration(initial_configurations, basename,
time_for_task, logger, watcher):
logger.debug('Initial Configurations: (%d)' % len(initial_configurations))
for initial_configuration in initial_configurations:
logger.debug(initial_configuration)
logger.debug('Looking for initial configurations took %5.2fsec',
watcher.wall_elapsed('InitialConfigurations'))
logger.info(
'Time left for %s after finding initial configurations: %5.2fsec',
basename, time_for_task - watcher.wall_elapsed(basename))
def get_smac_object(
scenario_dict,
seed,
ta,
backend,
metalearning_configurations,
runhistory,
):
scenario_dict['input_psmac_dirs'] = backend.get_smac_output_glob(
smac_run_id=seed if not scenario_dict['shared-model'] else '*',
)
scenario = Scenario(scenario_dict)
if len(metalearning_configurations) > 0:
default_config = scenario.cs.get_default_configuration()
initial_configurations = [default_config] + metalearning_configurations
else:
initial_configurations = None
rh2EPM = RunHistory2EPM4Cost(
num_params=len(scenario.cs.get_hyperparameters()),
scenario=scenario,
success_states=[
StatusType.SUCCESS,
StatusType.MEMOUT,
StatusType.TIMEOUT,
# As long as we don't have a model for crashes yet!
StatusType.CRASHED,
],
impute_censored_data=False,
impute_state=None,
)
return SMAC(
scenario=scenario,
rng=seed,
runhistory2epm=rh2EPM,
tae_runner=ta,
initial_configurations=initial_configurations,
runhistory=runhistory,
run_id=seed,
)
class AutoMLSMBO(object):
def __init__(self, config_space, dataset_name,
backend,
total_walltime_limit,
func_eval_time_limit,
memory_limit,
metric,
watcher, start_num_run=1,
data_memory_limit=None,
num_metalearning_cfgs=25,
config_file=None,
seed=1,
metadata_directory=None,
resampling_strategy='holdout',
resampling_strategy_args=None,
shared_mode=False,
include_estimators=None,
exclude_estimators=None,
include_preprocessors=None,
exclude_preprocessors=None,
disable_file_output=False,
std_scores=False,
smac_scenario_args=None,
get_smac_object_callback=None):
super(AutoMLSMBO, self).__init__()
# data related
self.dataset_name = dataset_name
self.datamanager = None
self.metric = metric
self.task = None
self.backend = backend
# the configuration space
self.config_space = config_space
# Evaluation
self.resampling_strategy = resampling_strategy
if resampling_strategy_args is None:
resampling_strategy_args = {}
self.resampling_strategy_args = resampling_strategy_args
# and a bunch of useful limits
self.total_walltime_limit = int(total_walltime_limit)
self.func_eval_time_limit = int(func_eval_time_limit)
self.memory_limit = memory_limit
self.data_memory_limit = data_memory_limit
self.watcher = watcher
self.num_metalearning_cfgs = num_metalearning_cfgs
self.config_file = config_file
self.seed = seed
self.metadata_directory = metadata_directory
self.start_num_run = start_num_run
self.shared_mode = shared_mode
self.include_estimators = include_estimators
self.exclude_estimators = exclude_estimators
self.include_preprocessors = include_preprocessors
self.exclude_preprocessors = exclude_preprocessors
self.disable_file_output = disable_file_output
self.std_scores = std_scores
self.smac_scenario_args = smac_scenario_args
self.get_smac_object_callback = get_smac_object_callback
logger_name = '%s(%d):%s' % (self.__class__.__name__, self.seed,
":" + dataset_name if dataset_name is
not None else "")
self.logger = get_logger(logger_name)
def _send_warnings_to_log(self, message, category, filename, lineno,
file=None, line=None):
self.logger.debug('%s:%s: %s:%s', filename, lineno, category.__name__,
message)
def reset_data_manager(self, max_mem=None):
if max_mem is None:
max_mem = self.data_memory_limit
if self.datamanager is not None:
del self.datamanager
if isinstance(self.dataset_name, AbstractDataManager):
self.datamanager = self.dataset_name
else:
self.datamanager = load_data(self.dataset_name,
self.backend,
max_mem=max_mem)
self.task = self.datamanager.info['task']
def collect_metalearning_suggestions(self, meta_base):
metalearning_configurations = _get_metalearning_configurations(
meta_base=meta_base,
basename=self.dataset_name,
metric=self.metric,
configuration_space=self.config_space,
task=self.task,
is_sparse=self.datamanager.info['is_sparse'],
initial_configurations_via_metalearning=self.num_metalearning_cfgs,
watcher=self.watcher,
logger=self.logger)
_print_debug_info_of_init_configuration(
metalearning_configurations,
self.dataset_name,
self.total_walltime_limit,
self.logger,
self.watcher)
return metalearning_configurations
def _calculate_metafeatures(self):
with warnings.catch_warnings():
warnings.showwarning = self._send_warnings_to_log
meta_features = _calculate_metafeatures(
data_feat_type=self.datamanager.feat_type,
data_info_task=self.datamanager.info['task'],
x_train=self.datamanager.data['X_train'],
y_train=self.datamanager.data['Y_train'],
basename=self.dataset_name,
watcher=self.watcher,
logger=self.logger)
return meta_features
def _calculate_metafeatures_with_limits(self, time_limit):
res = None
time_limit = max(time_limit, 1)
try:
safe_mf = pynisher.enforce_limits(mem_in_mb=self.memory_limit,
wall_time_in_s=int(time_limit),
grace_period_in_s=30,
logger=self.logger)(
self._calculate_metafeatures)
res = safe_mf()
except Exception as e:
self.logger.error('Error getting metafeatures: %s', str(e))
return res
def _calculate_metafeatures_encoded(self):
with warnings.catch_warnings():
warnings.showwarning = self._send_warnings_to_log
meta_features_encoded = _calculate_metafeatures_encoded(
self.dataset_name,
self.datamanager.data['X_train'],
self.datamanager.data['Y_train'],
self.watcher,
self.datamanager.info['task'],
self.logger)
return meta_features_encoded
def _calculate_metafeatures_encoded_with_limits(self, time_limit):
res = None
time_limit = max(time_limit, 1)
try:
safe_mf = pynisher.enforce_limits(mem_in_mb=self.memory_limit,
wall_time_in_s=int(time_limit),
grace_period_in_s=30,
logger=self.logger)(
self._calculate_metafeatures_encoded)
res = safe_mf()
except Exception as e:
self.logger.error('Error getting metafeatures (encoded) : %s',
str(e))
return res
def run_smbo(self):
self.watcher.start_task('SMBO')
# == first things first: load the datamanager
self.reset_data_manager()
# == Initialize non-SMBO stuff
# first create a scenario
seed = self.seed
self.config_space.seed(seed)
num_params = len(self.config_space.get_hyperparameters())
# allocate a run history
num_run = self.start_num_run
# Initialize some SMAC dependencies
metalearning_configurations = self.get_metalearning_suggestions()
if self.resampling_strategy in ['partial-cv',
'partial-cv-iterative-fit']:
num_folds = self.resampling_strategy_args['folds']
instances = [[json.dumps({'task_id': self.dataset_name,
'fold': fold_number})]
for fold_number in range(num_folds)]
else:
instances = [[json.dumps({'task_id': self.dataset_name})]]
# TODO rebuild target algorithm to be it's own target algorithm
# evaluator, which takes into account that a run can be killed prior
# to the model being fully fitted; thus putting intermediate results
# into a queue and querying them once the time is over
exclude = dict()
include = dict()
if self.include_preprocessors is not None and \
self.exclude_preprocessors is not None:
raise ValueError('Cannot specify include_preprocessors and '
'exclude_preprocessors.')
elif self.include_preprocessors is not None:
include['preprocessor'] = self.include_preprocessors
elif self.exclude_preprocessors is not None:
exclude['preprocessor'] = self.exclude_preprocessors
if self.include_estimators is not None and \
self.exclude_estimators is not None:
raise ValueError('Cannot specify include_estimators and '
'exclude_estimators.')
elif self.include_estimators is not None:
if self.task in CLASSIFICATION_TASKS:
include['classifier'] = self.include_estimators
elif self.task in REGRESSION_TASKS:
include['regressor'] = self.include_estimators
else:
raise ValueError(self.task)
elif self.exclude_estimators is not None:
if self.task in CLASSIFICATION_TASKS:
exclude['classifier'] = self.exclude_estimators
elif self.task in REGRESSION_TASKS:
exclude['regressor'] = self.exclude_estimators
else:
raise ValueError(self.task)
ta = ExecuteTaFuncWithQueue(backend=self.backend,
autosklearn_seed=seed,
resampling_strategy=self.resampling_strategy,
initial_num_run=num_run,
logger=self.logger,
include=include,
exclude=exclude,
metric=self.metric,
memory_limit=self.memory_limit,
disable_file_output=self.disable_file_output,
std_scores=self.std_scores,
**self.resampling_strategy_args)
startup_time = self.watcher.wall_elapsed(self.dataset_name)
total_walltime_limit = self.total_walltime_limit - startup_time - 5
scenario_dict = {
'abort_on_first_run_crash': False,
'cs': self.config_space,
'cutoff_time': self.func_eval_time_limit,
'deterministic': 'true',
'instances': instances,
'memory_limit': self.memory_limit,
'output-dir':
self.backend.get_smac_output_directory(),
'run_obj': 'quality',
'shared-model': self.shared_mode,
'wallclock_limit': total_walltime_limit,
'cost_for_crash': WORST_POSSIBLE_RESULT,
}
if self.smac_scenario_args is not None:
for arg in [
'abort_on_first_run_crash',
'cs',
'deterministic',
'instances',
'output-dir',
'run_obj',
'shared-model',
'cost_for_crash',
]:
if arg in self.smac_scenario_args:
self.logger.warning('Cannot override scenario argument %s, '
'will ignore this.', arg)
del self.smac_scenario_args[arg]
for arg in [
'cutoff_time',
'memory_limit',
'wallclock_limit',
]:
if arg in self.smac_scenario_args:
self.logger.warning(
'Overriding scenario argument %s: %s with value %s',
arg,
scenario_dict[arg],
self.smac_scenario_args[arg]
)
scenario_dict.update(self.smac_scenario_args)
runhistory = RunHistory(aggregate_func=average_cost)
smac_args = {
'scenario_dict': scenario_dict,
'seed': seed,
'ta': ta,
'backend': self.backend,
'metalearning_configurations': metalearning_configurations,
'runhistory': runhistory,
}
if self.get_smac_object_callback is not None:
smac = self.get_smac_object_callback(**smac_args)
else:
smac = get_smac_object(**smac_args)
smac.optimize()
# Patch SMAC to read in data from parallel runs after the last
# function evaluation
if self.shared_mode:
pSMAC.read(
run_history=smac.solver.runhistory,
output_dirs=smac.solver.scenario.input_psmac_dirs,
configuration_space=smac.solver.config_space,
logger=smac.solver.logger,
)
self.runhistory = smac.solver.runhistory
self.trajectory = smac.solver.intensifier.traj_logger.trajectory
return self.runhistory, self.trajectory
def get_metalearning_suggestions(self):
# == METALEARNING suggestions
# we start by evaluating the defaults on the full dataset again
# and add the suggestions from metalearning behind it
if self.num_metalearning_cfgs > 0:
# If metadata directory is None, use default
if self.metadata_directory is None:
metalearning_directory = os.path.dirname(
autosklearn.metalearning.__file__)
# There is no multilabel data in OpenML
if self.task == MULTILABEL_CLASSIFICATION:
meta_task = BINARY_CLASSIFICATION
else:
meta_task = self.task
metadata_directory = os.path.join(
metalearning_directory, 'files',
'%s_%s_%s' % (self.metric, TASK_TYPES_TO_STRING[meta_task],
'sparse' if self.datamanager.info['is_sparse']
else 'dense'))
self.metadata_directory = metadata_directory
# If metadata directory is specified by user,
# then verify that it exists.
else:
if not os.path.exists(self.metadata_directory):
raise ValueError('The specified metadata directory \'%s\' '
'does not exist!' % self.metadata_directory)
else:
# There is no multilabel data in OpenML
if self.task == MULTILABEL_CLASSIFICATION:
meta_task = BINARY_CLASSIFICATION
else:
meta_task = self.task
metadata_directory = os.path.join(
self.metadata_directory,
'%s_%s_%s' % (self.metric, TASK_TYPES_TO_STRING[meta_task],
'sparse' if self.datamanager.info['is_sparse']
else 'dense'))
# Check that the metadata directory has the correct
# subdirectory needed for this dataset.
if os.path.basename(metadata_directory) not in \
os.listdir(self.metadata_directory):
raise ValueError('The specified metadata directory '
'\'%s\' does not have the correct '
'subdirectory \'%s\'' %
(self.metadata_directory,
os.path.basename(metadata_directory))
)
self.metadata_directory = metadata_directory
if os.path.exists(self.metadata_directory):
self.logger.info('Metadata directory: %s',
self.metadata_directory)
meta_base = MetaBase(self.config_space, self.metadata_directory)
metafeature_calculation_time_limit = int(
self.total_walltime_limit / 4)
metafeature_calculation_start_time = time.time()
meta_features = self._calculate_metafeatures_with_limits(
metafeature_calculation_time_limit)
metafeature_calculation_end_time = time.time()
metafeature_calculation_time_limit = \
metafeature_calculation_time_limit - (
metafeature_calculation_end_time -
metafeature_calculation_start_time)
if metafeature_calculation_time_limit < 1:
self.logger.warning(
'Time limit for metafeature calculation less '
'than 1 seconds (%f). Skipping calculation '
'of metafeatures for encoded dataset.',
metafeature_calculation_time_limit)
meta_features_encoded = None
else:
with warnings.catch_warnings():
warnings.showwarning = self._send_warnings_to_log
self.datamanager.perform1HotEncoding()
meta_features_encoded = \
self._calculate_metafeatures_encoded_with_limits(
metafeature_calculation_time_limit)
# In case there is a problem calculating the encoded meta-features
if meta_features is None:
if meta_features_encoded is not None:
meta_features = meta_features_encoded
else:
if meta_features_encoded is not None:
meta_features.metafeature_values.update(
meta_features_encoded.metafeature_values)
if meta_features is not None:
meta_base.add_dataset(self.dataset_name, meta_features)
# Do mean imputation of the meta-features - should be done specific
# for each prediction model!
all_metafeatures = meta_base.get_metafeatures(
features=list(meta_features.keys()))
all_metafeatures.fillna(all_metafeatures.mean(),
inplace=True)
with warnings.catch_warnings():
warnings.showwarning = self._send_warnings_to_log
metalearning_configurations = self.collect_metalearning_suggestions(
meta_base)
if metalearning_configurations is None:
metalearning_configurations = []
self.reset_data_manager()
self.logger.info('%s', meta_features)
# Convert meta-features into a dictionary because the scenario
# expects a dictionary
meta_features_dict = {}
for dataset, series in all_metafeatures.iterrows():
meta_features_dict[dataset] = series.values
meta_features_list = []
for meta_feature_name in all_metafeatures.columns:
meta_features_list.append(
meta_features[meta_feature_name].value)
meta_features_list = np.array(meta_features_list).reshape(
(1, -1))
self.logger.info(list(meta_features_dict.keys()))
else:
meta_features = None
self.logger.warning('Could not find meta-data directory %s' %
metadata_directory)
else:
meta_features = None
if meta_features is None:
meta_features_list = []
metalearning_configurations = []
return metalearning_configurations
| 41.742424 | 92 | 0.59147 | import json
import os
import time
import traceback
import warnings
import numpy as np
import pynisher
from smac.facade.smac_facade import SMAC
from smac.optimizer.objective import average_cost
from smac.runhistory.runhistory import RunHistory
from smac.runhistory.runhistory2epm import RunHistory2EPM4Cost
from smac.scenario.scenario import Scenario
from smac.tae.execute_ta_run import StatusType
from smac.optimizer import pSMAC
import autosklearn.metalearning
from autosklearn.constants import MULTILABEL_CLASSIFICATION, \
BINARY_CLASSIFICATION, TASK_TYPES_TO_STRING, CLASSIFICATION_TASKS, \
REGRESSION_TASKS, MULTICLASS_CLASSIFICATION, REGRESSION
from autosklearn.metalearning.mismbo import suggest_via_metalearning
from autosklearn.data.abstract_data_manager import AbstractDataManager
from autosklearn.data.competition_data_manager import CompetitionDataManager
from autosklearn.evaluation import ExecuteTaFuncWithQueue, WORST_POSSIBLE_RESULT
from autosklearn.util import get_logger
from autosklearn.metalearning.metalearning.meta_base import MetaBase
from autosklearn.metalearning.metafeatures.metafeatures import \
calculate_all_metafeatures_with_labels, calculate_all_metafeatures_encoded_labels
EXCLUDE_META_FEATURES_CLASSIFICATION = {
'Landmark1NN',
'LandmarkDecisionNodeLearner',
'LandmarkDecisionTree',
'LandmarkLDA',
'LandmarkNaiveBayes',
'PCAFractionOfComponentsFor95PercentVariance',
'PCAKurtosisFirstPC',
'PCASkewnessFirstPC',
'PCA'
}
EXCLUDE_META_FEATURES_REGRESSION = {
'Landmark1NN',
'LandmarkDecisionNodeLearner',
'LandmarkDecisionTree',
'LandmarkLDA',
'LandmarkNaiveBayes',
'PCAFractionOfComponentsFor95PercentVariance',
'PCAKurtosisFirstPC',
'PCASkewnessFirstPC',
'NumberOfClasses',
'ClassOccurences',
'ClassProbabilityMin',
'ClassProbabilityMax',
'ClassProbabilityMean',
'ClassProbabilitySTD',
'ClassEntropy',
'LandmarkRandomNodeLearner',
'PCA',
}
def load_data(dataset_info, backend, max_mem=None):
try:
D = backend.load_datamanager()
except IOError:
D = None
if D is None:
if max_mem is None:
D = CompetitionDataManager(dataset_info)
else:
D = CompetitionDataManager(dataset_info, max_memory_in_mb=max_mem)
return D
# metalearning helpers
def _calculate_metafeatures(data_feat_type, data_info_task, basename,
x_train, y_train, watcher, logger):
# == Calculate metafeatures
task_name = 'CalculateMetafeatures'
watcher.start_task(task_name)
categorical = [True if feat_type.lower() in ['categorical'] else False
for feat_type in data_feat_type]
EXCLUDE_META_FEATURES = EXCLUDE_META_FEATURES_CLASSIFICATION \
if data_info_task in CLASSIFICATION_TASKS else EXCLUDE_META_FEATURES_REGRESSION
if data_info_task in [MULTICLASS_CLASSIFICATION, BINARY_CLASSIFICATION,
MULTILABEL_CLASSIFICATION, REGRESSION]:
logger.info('Start calculating metafeatures for %s', basename)
result = calculate_all_metafeatures_with_labels(
x_train, y_train, categorical=categorical,
dataset_name=basename,
dont_calculate=EXCLUDE_META_FEATURES, )
for key in list(result.metafeature_values.keys()):
if result.metafeature_values[key].type_ != 'METAFEATURE':
del result.metafeature_values[key]
else:
result = None
logger.info('Metafeatures not calculated')
watcher.stop_task(task_name)
logger.info(
'Calculating Metafeatures (categorical attributes) took %5.2f',
watcher.wall_elapsed(task_name))
return result
def _calculate_metafeatures_encoded(basename, x_train, y_train, watcher,
task, logger):
EXCLUDE_META_FEATURES = EXCLUDE_META_FEATURES_CLASSIFICATION \
if task in CLASSIFICATION_TASKS else EXCLUDE_META_FEATURES_REGRESSION
task_name = 'CalculateMetafeaturesEncoded'
watcher.start_task(task_name)
result = calculate_all_metafeatures_encoded_labels(
x_train, y_train, categorical=[False] * x_train.shape[1],
dataset_name=basename, dont_calculate=EXCLUDE_META_FEATURES)
for key in list(result.metafeature_values.keys()):
if result.metafeature_values[key].type_ != 'METAFEATURE':
del result.metafeature_values[key]
watcher.stop_task(task_name)
logger.info(
'Calculating Metafeatures (encoded attributes) took %5.2fsec',
watcher.wall_elapsed(task_name))
return result
def _get_metalearning_configurations(meta_base, basename, metric,
configuration_space,
task,
initial_configurations_via_metalearning,
is_sparse,
watcher, logger):
task_name = 'InitialConfigurations'
watcher.start_task(task_name)
try:
metalearning_configurations = suggest_via_metalearning(
meta_base, basename, metric,
task,
is_sparse == 1,
initial_configurations_via_metalearning
)
except Exception as e:
logger.error("Error getting metalearning configurations!")
logger.error(str(e))
logger.error(traceback.format_exc())
metalearning_configurations = []
watcher.stop_task(task_name)
return metalearning_configurations
def _print_debug_info_of_init_configuration(initial_configurations, basename,
time_for_task, logger, watcher):
logger.debug('Initial Configurations: (%d)' % len(initial_configurations))
for initial_configuration in initial_configurations:
logger.debug(initial_configuration)
logger.debug('Looking for initial configurations took %5.2fsec',
watcher.wall_elapsed('InitialConfigurations'))
logger.info(
'Time left for %s after finding initial configurations: %5.2fsec',
basename, time_for_task - watcher.wall_elapsed(basename))
def get_smac_object(
scenario_dict,
seed,
ta,
backend,
metalearning_configurations,
runhistory,
):
scenario_dict['input_psmac_dirs'] = backend.get_smac_output_glob(
smac_run_id=seed if not scenario_dict['shared-model'] else '*',
)
scenario = Scenario(scenario_dict)
if len(metalearning_configurations) > 0:
default_config = scenario.cs.get_default_configuration()
initial_configurations = [default_config] + metalearning_configurations
else:
initial_configurations = None
rh2EPM = RunHistory2EPM4Cost(
num_params=len(scenario.cs.get_hyperparameters()),
scenario=scenario,
success_states=[
StatusType.SUCCESS,
StatusType.MEMOUT,
StatusType.TIMEOUT,
# As long as we don't have a model for crashes yet!
StatusType.CRASHED,
],
impute_censored_data=False,
impute_state=None,
)
return SMAC(
scenario=scenario,
rng=seed,
runhistory2epm=rh2EPM,
tae_runner=ta,
initial_configurations=initial_configurations,
runhistory=runhistory,
run_id=seed,
)
class AutoMLSMBO(object):
def __init__(self, config_space, dataset_name,
backend,
total_walltime_limit,
func_eval_time_limit,
memory_limit,
metric,
watcher, start_num_run=1,
data_memory_limit=None,
num_metalearning_cfgs=25,
config_file=None,
seed=1,
metadata_directory=None,
resampling_strategy='holdout',
resampling_strategy_args=None,
shared_mode=False,
include_estimators=None,
exclude_estimators=None,
include_preprocessors=None,
exclude_preprocessors=None,
disable_file_output=False,
std_scores=False,
smac_scenario_args=None,
get_smac_object_callback=None):
super(AutoMLSMBO, self).__init__()
self.dataset_name = dataset_name
self.datamanager = None
self.metric = metric
self.task = None
self.backend = backend
self.config_space = config_space
self.resampling_strategy = resampling_strategy
if resampling_strategy_args is None:
resampling_strategy_args = {}
self.resampling_strategy_args = resampling_strategy_args
self.total_walltime_limit = int(total_walltime_limit)
self.func_eval_time_limit = int(func_eval_time_limit)
self.memory_limit = memory_limit
self.data_memory_limit = data_memory_limit
self.watcher = watcher
self.num_metalearning_cfgs = num_metalearning_cfgs
self.config_file = config_file
self.seed = seed
self.metadata_directory = metadata_directory
self.start_num_run = start_num_run
self.shared_mode = shared_mode
self.include_estimators = include_estimators
self.exclude_estimators = exclude_estimators
self.include_preprocessors = include_preprocessors
self.exclude_preprocessors = exclude_preprocessors
self.disable_file_output = disable_file_output
self.std_scores = std_scores
self.smac_scenario_args = smac_scenario_args
self.get_smac_object_callback = get_smac_object_callback
logger_name = '%s(%d):%s' % (self.__class__.__name__, self.seed,
":" + dataset_name if dataset_name is
not None else "")
self.logger = get_logger(logger_name)
def _send_warnings_to_log(self, message, category, filename, lineno,
file=None, line=None):
self.logger.debug('%s:%s: %s:%s', filename, lineno, category.__name__,
message)
def reset_data_manager(self, max_mem=None):
if max_mem is None:
max_mem = self.data_memory_limit
if self.datamanager is not None:
del self.datamanager
if isinstance(self.dataset_name, AbstractDataManager):
self.datamanager = self.dataset_name
else:
self.datamanager = load_data(self.dataset_name,
self.backend,
max_mem=max_mem)
self.task = self.datamanager.info['task']
def collect_metalearning_suggestions(self, meta_base):
metalearning_configurations = _get_metalearning_configurations(
meta_base=meta_base,
basename=self.dataset_name,
metric=self.metric,
configuration_space=self.config_space,
task=self.task,
is_sparse=self.datamanager.info['is_sparse'],
initial_configurations_via_metalearning=self.num_metalearning_cfgs,
watcher=self.watcher,
logger=self.logger)
_print_debug_info_of_init_configuration(
metalearning_configurations,
self.dataset_name,
self.total_walltime_limit,
self.logger,
self.watcher)
return metalearning_configurations
def _calculate_metafeatures(self):
with warnings.catch_warnings():
warnings.showwarning = self._send_warnings_to_log
meta_features = _calculate_metafeatures(
data_feat_type=self.datamanager.feat_type,
data_info_task=self.datamanager.info['task'],
x_train=self.datamanager.data['X_train'],
y_train=self.datamanager.data['Y_train'],
basename=self.dataset_name,
watcher=self.watcher,
logger=self.logger)
return meta_features
def _calculate_metafeatures_with_limits(self, time_limit):
res = None
time_limit = max(time_limit, 1)
try:
safe_mf = pynisher.enforce_limits(mem_in_mb=self.memory_limit,
wall_time_in_s=int(time_limit),
grace_period_in_s=30,
logger=self.logger)(
self._calculate_metafeatures)
res = safe_mf()
except Exception as e:
self.logger.error('Error getting metafeatures: %s', str(e))
return res
def _calculate_metafeatures_encoded(self):
with warnings.catch_warnings():
warnings.showwarning = self._send_warnings_to_log
meta_features_encoded = _calculate_metafeatures_encoded(
self.dataset_name,
self.datamanager.data['X_train'],
self.datamanager.data['Y_train'],
self.watcher,
self.datamanager.info['task'],
self.logger)
return meta_features_encoded
def _calculate_metafeatures_encoded_with_limits(self, time_limit):
res = None
time_limit = max(time_limit, 1)
try:
safe_mf = pynisher.enforce_limits(mem_in_mb=self.memory_limit,
wall_time_in_s=int(time_limit),
grace_period_in_s=30,
logger=self.logger)(
self._calculate_metafeatures_encoded)
res = safe_mf()
except Exception as e:
self.logger.error('Error getting metafeatures (encoded) : %s',
str(e))
return res
def run_smbo(self):
self.watcher.start_task('SMBO')
self.reset_data_manager()
seed = self.seed
self.config_space.seed(seed)
num_params = len(self.config_space.get_hyperparameters())
num_run = self.start_num_run
metalearning_configurations = self.get_metalearning_suggestions()
if self.resampling_strategy in ['partial-cv',
'partial-cv-iterative-fit']:
num_folds = self.resampling_strategy_args['folds']
instances = [[json.dumps({'task_id': self.dataset_name,
'fold': fold_number})]
for fold_number in range(num_folds)]
else:
instances = [[json.dumps({'task_id': self.dataset_name})]]
# evaluator, which takes into account that a run can be killed prior
# to the model being fully fitted; thus putting intermediate results
# into a queue and querying them once the time is over
exclude = dict()
include = dict()
if self.include_preprocessors is not None and \
self.exclude_preprocessors is not None:
raise ValueError('Cannot specify include_preprocessors and '
'exclude_preprocessors.')
elif self.include_preprocessors is not None:
include['preprocessor'] = self.include_preprocessors
elif self.exclude_preprocessors is not None:
exclude['preprocessor'] = self.exclude_preprocessors
if self.include_estimators is not None and \
self.exclude_estimators is not None:
raise ValueError('Cannot specify include_estimators and '
'exclude_estimators.')
elif self.include_estimators is not None:
if self.task in CLASSIFICATION_TASKS:
include['classifier'] = self.include_estimators
elif self.task in REGRESSION_TASKS:
include['regressor'] = self.include_estimators
else:
raise ValueError(self.task)
elif self.exclude_estimators is not None:
if self.task in CLASSIFICATION_TASKS:
exclude['classifier'] = self.exclude_estimators
elif self.task in REGRESSION_TASKS:
exclude['regressor'] = self.exclude_estimators
else:
raise ValueError(self.task)
ta = ExecuteTaFuncWithQueue(backend=self.backend,
autosklearn_seed=seed,
resampling_strategy=self.resampling_strategy,
initial_num_run=num_run,
logger=self.logger,
include=include,
exclude=exclude,
metric=self.metric,
memory_limit=self.memory_limit,
disable_file_output=self.disable_file_output,
std_scores=self.std_scores,
**self.resampling_strategy_args)
startup_time = self.watcher.wall_elapsed(self.dataset_name)
total_walltime_limit = self.total_walltime_limit - startup_time - 5
scenario_dict = {
'abort_on_first_run_crash': False,
'cs': self.config_space,
'cutoff_time': self.func_eval_time_limit,
'deterministic': 'true',
'instances': instances,
'memory_limit': self.memory_limit,
'output-dir':
self.backend.get_smac_output_directory(),
'run_obj': 'quality',
'shared-model': self.shared_mode,
'wallclock_limit': total_walltime_limit,
'cost_for_crash': WORST_POSSIBLE_RESULT,
}
if self.smac_scenario_args is not None:
for arg in [
'abort_on_first_run_crash',
'cs',
'deterministic',
'instances',
'output-dir',
'run_obj',
'shared-model',
'cost_for_crash',
]:
if arg in self.smac_scenario_args:
self.logger.warning('Cannot override scenario argument %s, '
'will ignore this.', arg)
del self.smac_scenario_args[arg]
for arg in [
'cutoff_time',
'memory_limit',
'wallclock_limit',
]:
if arg in self.smac_scenario_args:
self.logger.warning(
'Overriding scenario argument %s: %s with value %s',
arg,
scenario_dict[arg],
self.smac_scenario_args[arg]
)
scenario_dict.update(self.smac_scenario_args)
runhistory = RunHistory(aggregate_func=average_cost)
smac_args = {
'scenario_dict': scenario_dict,
'seed': seed,
'ta': ta,
'backend': self.backend,
'metalearning_configurations': metalearning_configurations,
'runhistory': runhistory,
}
if self.get_smac_object_callback is not None:
smac = self.get_smac_object_callback(**smac_args)
else:
smac = get_smac_object(**smac_args)
smac.optimize()
# Patch SMAC to read in data from parallel runs after the last
# function evaluation
if self.shared_mode:
pSMAC.read(
run_history=smac.solver.runhistory,
output_dirs=smac.solver.scenario.input_psmac_dirs,
configuration_space=smac.solver.config_space,
logger=smac.solver.logger,
)
self.runhistory = smac.solver.runhistory
self.trajectory = smac.solver.intensifier.traj_logger.trajectory
return self.runhistory, self.trajectory
def get_metalearning_suggestions(self):
# == METALEARNING suggestions
# we start by evaluating the defaults on the full dataset again
# and add the suggestions from metalearning behind it
if self.num_metalearning_cfgs > 0:
# If metadata directory is None, use default
if self.metadata_directory is None:
metalearning_directory = os.path.dirname(
autosklearn.metalearning.__file__)
# There is no multilabel data in OpenML
if self.task == MULTILABEL_CLASSIFICATION:
meta_task = BINARY_CLASSIFICATION
else:
meta_task = self.task
metadata_directory = os.path.join(
metalearning_directory, 'files',
'%s_%s_%s' % (self.metric, TASK_TYPES_TO_STRING[meta_task],
'sparse' if self.datamanager.info['is_sparse']
else 'dense'))
self.metadata_directory = metadata_directory
# If metadata directory is specified by user,
# then verify that it exists.
else:
if not os.path.exists(self.metadata_directory):
raise ValueError('The specified metadata directory \'%s\' '
'does not exist!' % self.metadata_directory)
else:
# There is no multilabel data in OpenML
if self.task == MULTILABEL_CLASSIFICATION:
meta_task = BINARY_CLASSIFICATION
else:
meta_task = self.task
metadata_directory = os.path.join(
self.metadata_directory,
'%s_%s_%s' % (self.metric, TASK_TYPES_TO_STRING[meta_task],
'sparse' if self.datamanager.info['is_sparse']
else 'dense'))
# Check that the metadata directory has the correct
# subdirectory needed for this dataset.
if os.path.basename(metadata_directory) not in \
os.listdir(self.metadata_directory):
raise ValueError('The specified metadata directory '
'\'%s\' does not have the correct '
'subdirectory \'%s\'' %
(self.metadata_directory,
os.path.basename(metadata_directory))
)
self.metadata_directory = metadata_directory
if os.path.exists(self.metadata_directory):
self.logger.info('Metadata directory: %s',
self.metadata_directory)
meta_base = MetaBase(self.config_space, self.metadata_directory)
metafeature_calculation_time_limit = int(
self.total_walltime_limit / 4)
metafeature_calculation_start_time = time.time()
meta_features = self._calculate_metafeatures_with_limits(
metafeature_calculation_time_limit)
metafeature_calculation_end_time = time.time()
metafeature_calculation_time_limit = \
metafeature_calculation_time_limit - (
metafeature_calculation_end_time -
metafeature_calculation_start_time)
if metafeature_calculation_time_limit < 1:
self.logger.warning(
'Time limit for metafeature calculation less '
'than 1 seconds (%f). Skipping calculation '
'of metafeatures for encoded dataset.',
metafeature_calculation_time_limit)
meta_features_encoded = None
else:
with warnings.catch_warnings():
warnings.showwarning = self._send_warnings_to_log
self.datamanager.perform1HotEncoding()
meta_features_encoded = \
self._calculate_metafeatures_encoded_with_limits(
metafeature_calculation_time_limit)
# In case there is a problem calculating the encoded meta-features
if meta_features is None:
if meta_features_encoded is not None:
meta_features = meta_features_encoded
else:
if meta_features_encoded is not None:
meta_features.metafeature_values.update(
meta_features_encoded.metafeature_values)
if meta_features is not None:
meta_base.add_dataset(self.dataset_name, meta_features)
# Do mean imputation of the meta-features - should be done specific
# for each prediction model!
all_metafeatures = meta_base.get_metafeatures(
features=list(meta_features.keys()))
all_metafeatures.fillna(all_metafeatures.mean(),
inplace=True)
with warnings.catch_warnings():
warnings.showwarning = self._send_warnings_to_log
metalearning_configurations = self.collect_metalearning_suggestions(
meta_base)
if metalearning_configurations is None:
metalearning_configurations = []
self.reset_data_manager()
self.logger.info('%s', meta_features)
# Convert meta-features into a dictionary because the scenario
# expects a dictionary
meta_features_dict = {}
for dataset, series in all_metafeatures.iterrows():
meta_features_dict[dataset] = series.values
meta_features_list = []
for meta_feature_name in all_metafeatures.columns:
meta_features_list.append(
meta_features[meta_feature_name].value)
meta_features_list = np.array(meta_features_list).reshape(
(1, -1))
self.logger.info(list(meta_features_dict.keys()))
else:
meta_features = None
self.logger.warning('Could not find meta-data directory %s' %
metadata_directory)
else:
meta_features = None
if meta_features is None:
meta_features_list = []
metalearning_configurations = []
return metalearning_configurations
| true | true |
f720e4b13eef675ed79b1d8f5021f8b090a3e097 | 3,223 | py | Python | harbor/datadog_checks/harbor/config_models/defaults.py | codylerum/integrations-core | aee18148cebf5026099abde7bc218d3ba8d2e75c | [
"BSD-3-Clause"
] | null | null | null | harbor/datadog_checks/harbor/config_models/defaults.py | codylerum/integrations-core | aee18148cebf5026099abde7bc218d3ba8d2e75c | [
"BSD-3-Clause"
] | null | null | null | harbor/datadog_checks/harbor/config_models/defaults.py | codylerum/integrations-core | aee18148cebf5026099abde7bc218d3ba8d2e75c | [
"BSD-3-Clause"
] | null | null | null | # (C) Datadog, Inc. 2021-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from datadog_checks.base.utils.models.fields import get_default_field_value
def shared_proxy(field, value):
return get_default_field_value(field, value)
def shared_service(field, value):
return get_default_field_value(field, value)
def shared_skip_proxy(field, value):
return False
def shared_timeout(field, value):
return 10
def instance_allow_redirects(field, value):
return True
def instance_auth_token(field, value):
return get_default_field_value(field, value)
def instance_auth_type(field, value):
return 'basic'
def instance_aws_host(field, value):
return get_default_field_value(field, value)
def instance_aws_region(field, value):
return get_default_field_value(field, value)
def instance_aws_service(field, value):
return get_default_field_value(field, value)
def instance_connect_timeout(field, value):
return get_default_field_value(field, value)
def instance_disable_generic_tags(field, value):
return False
def instance_empty_default_hostname(field, value):
return False
def instance_extra_headers(field, value):
return get_default_field_value(field, value)
def instance_headers(field, value):
return get_default_field_value(field, value)
def instance_kerberos_auth(field, value):
return 'disabled'
def instance_kerberos_cache(field, value):
return get_default_field_value(field, value)
def instance_kerberos_delegate(field, value):
return False
def instance_kerberos_force_initiate(field, value):
return False
def instance_kerberos_hostname(field, value):
return get_default_field_value(field, value)
def instance_kerberos_keytab(field, value):
return get_default_field_value(field, value)
def instance_kerberos_principal(field, value):
return get_default_field_value(field, value)
def instance_log_requests(field, value):
return False
def instance_min_collection_interval(field, value):
return 15
def instance_ntlm_domain(field, value):
return get_default_field_value(field, value)
def instance_persist_connections(field, value):
return False
def instance_proxy(field, value):
return get_default_field_value(field, value)
def instance_read_timeout(field, value):
return get_default_field_value(field, value)
def instance_service(field, value):
return get_default_field_value(field, value)
def instance_skip_proxy(field, value):
return False
def instance_tags(field, value):
return get_default_field_value(field, value)
def instance_timeout(field, value):
return 10
def instance_tls_ca_cert(field, value):
return get_default_field_value(field, value)
def instance_tls_cert(field, value):
return get_default_field_value(field, value)
def instance_tls_ignore_warning(field, value):
return False
def instance_tls_private_key(field, value):
return get_default_field_value(field, value)
def instance_tls_use_host_header(field, value):
return False
def instance_tls_verify(field, value):
return True
def instance_use_legacy_auth_encoding(field, value):
return True
| 20.018634 | 75 | 0.779398 |
from datadog_checks.base.utils.models.fields import get_default_field_value
def shared_proxy(field, value):
return get_default_field_value(field, value)
def shared_service(field, value):
return get_default_field_value(field, value)
def shared_skip_proxy(field, value):
return False
def shared_timeout(field, value):
return 10
def instance_allow_redirects(field, value):
return True
def instance_auth_token(field, value):
return get_default_field_value(field, value)
def instance_auth_type(field, value):
return 'basic'
def instance_aws_host(field, value):
return get_default_field_value(field, value)
def instance_aws_region(field, value):
return get_default_field_value(field, value)
def instance_aws_service(field, value):
return get_default_field_value(field, value)
def instance_connect_timeout(field, value):
return get_default_field_value(field, value)
def instance_disable_generic_tags(field, value):
return False
def instance_empty_default_hostname(field, value):
return False
def instance_extra_headers(field, value):
return get_default_field_value(field, value)
def instance_headers(field, value):
return get_default_field_value(field, value)
def instance_kerberos_auth(field, value):
return 'disabled'
def instance_kerberos_cache(field, value):
return get_default_field_value(field, value)
def instance_kerberos_delegate(field, value):
return False
def instance_kerberos_force_initiate(field, value):
return False
def instance_kerberos_hostname(field, value):
return get_default_field_value(field, value)
def instance_kerberos_keytab(field, value):
return get_default_field_value(field, value)
def instance_kerberos_principal(field, value):
return get_default_field_value(field, value)
def instance_log_requests(field, value):
return False
def instance_min_collection_interval(field, value):
return 15
def instance_ntlm_domain(field, value):
return get_default_field_value(field, value)
def instance_persist_connections(field, value):
return False
def instance_proxy(field, value):
return get_default_field_value(field, value)
def instance_read_timeout(field, value):
return get_default_field_value(field, value)
def instance_service(field, value):
return get_default_field_value(field, value)
def instance_skip_proxy(field, value):
return False
def instance_tags(field, value):
return get_default_field_value(field, value)
def instance_timeout(field, value):
return 10
def instance_tls_ca_cert(field, value):
return get_default_field_value(field, value)
def instance_tls_cert(field, value):
return get_default_field_value(field, value)
def instance_tls_ignore_warning(field, value):
return False
def instance_tls_private_key(field, value):
return get_default_field_value(field, value)
def instance_tls_use_host_header(field, value):
return False
def instance_tls_verify(field, value):
return True
def instance_use_legacy_auth_encoding(field, value):
return True
| true | true |
f720e54b8a4add55c8bb4945dbfdd8f7cd946e00 | 790 | py | Python | st2common/st2common/exceptions/ssh.py | kkkanil/st2 | 07cd195d7a6e177a37dd019e5c9ab8329259d0fa | [
"Apache-2.0"
] | null | null | null | st2common/st2common/exceptions/ssh.py | kkkanil/st2 | 07cd195d7a6e177a37dd019e5c9ab8329259d0fa | [
"Apache-2.0"
] | 15 | 2021-02-11T22:58:54.000Z | 2021-08-06T18:03:47.000Z | st2common/st2common/exceptions/ssh.py | kkkanil/st2 | 07cd195d7a6e177a37dd019e5c9ab8329259d0fa | [
"Apache-2.0"
] | 1 | 2021-07-10T15:02:29.000Z | 2021-07-10T15:02:29.000Z | # Copyright 2020 The StackStorm Authors.
# Copyright 2019 Extreme Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
'InvalidCredentialsException'
]
class InvalidCredentialsException(Exception):
pass
class NoHostsConnectedToException(Exception):
pass
| 29.259259 | 74 | 0.764557 |
__all__ = [
'InvalidCredentialsException'
]
class InvalidCredentialsException(Exception):
pass
class NoHostsConnectedToException(Exception):
pass
| true | true |
f720e5c38c523665abca1c94ba91d51a3d76168c | 18,992 | py | Python | flytekit/common/launch_plan.py | tnsetting/flytekit | 4782264ffbc4bfdbaabe7a789a9ad76cb7e5499e | [
"Apache-2.0"
] | null | null | null | flytekit/common/launch_plan.py | tnsetting/flytekit | 4782264ffbc4bfdbaabe7a789a9ad76cb7e5499e | [
"Apache-2.0"
] | null | null | null | flytekit/common/launch_plan.py | tnsetting/flytekit | 4782264ffbc4bfdbaabe7a789a9ad76cb7e5499e | [
"Apache-2.0"
] | null | null | null | import datetime as _datetime
import logging as _logging
import uuid as _uuid
import six as _six
from deprecated import deprecated as _deprecated
from flytekit.common import interface as _interface
from flytekit.common import nodes as _nodes
from flytekit.common import promise as _promises
from flytekit.common import sdk_bases as _sdk_bases
from flytekit.common import workflow_execution as _workflow_execution
from flytekit.common.core import identifier as _identifier
from flytekit.common.exceptions import scopes as _exception_scopes
from flytekit.common.exceptions import user as _user_exceptions
from flytekit.common.mixins import hash as _hash_mixin
from flytekit.common.mixins import launchable as _launchable_mixin
from flytekit.common.mixins import registerable as _registerable
from flytekit.common.types import helpers as _type_helpers
from flytekit.configuration import auth as _auth_config
from flytekit.configuration import sdk as _sdk_config
from flytekit.engines.flyte import engine as _flyte_engine
from flytekit.models import common as _common_models
from flytekit.models import execution as _execution_models
from flytekit.models import interface as _interface_models
from flytekit.models import launch_plan as _launch_plan_models
from flytekit.models import literals as _literal_models
from flytekit.models import schedule as _schedule_model
from flytekit.models.core import identifier as _identifier_model
from flytekit.models.core import workflow as _workflow_models
class SdkLaunchPlan(
_launchable_mixin.LaunchableEntity,
_registerable.HasDependencies,
_registerable.RegisterableEntity,
_launch_plan_models.LaunchPlanSpec,
metaclass=_sdk_bases.ExtendedSdkType,
):
def __init__(self, *args, **kwargs):
super(SdkLaunchPlan, self).__init__(*args, **kwargs)
# Set all the attributes we expect this class to have
self._id = None
# The interface is not set explicitly unless fetched in an engine context
self._interface = None
@classmethod
def promote_from_model(cls, model) -> "SdkLaunchPlan":
"""
:param flytekit.models.launch_plan.LaunchPlanSpec model:
:rtype: SdkLaunchPlan
"""
return cls(
workflow_id=_identifier.Identifier.promote_from_model(model.workflow_id),
default_inputs=_interface_models.ParameterMap(
{
k: _promises.Input.promote_from_model(v).rename_and_return_reference(k)
for k, v in _six.iteritems(model.default_inputs.parameters)
}
),
fixed_inputs=model.fixed_inputs,
entity_metadata=model.entity_metadata,
labels=model.labels,
annotations=model.annotations,
auth_role=model.auth_role,
raw_output_data_config=model.raw_output_data_config,
)
@_exception_scopes.system_entry_point
def register(self, project, domain, name, version):
"""
:param Text project:
:param Text domain:
:param Text name:
:param Text version:
"""
self.validate()
id_to_register = _identifier.Identifier(
_identifier_model.ResourceType.LAUNCH_PLAN, project, domain, name, version
)
client = _flyte_engine.get_client()
try:
client.create_launch_plan(id_to_register, self)
except _user_exceptions.FlyteEntityAlreadyExistsException:
pass
self._id = id_to_register
return str(self.id)
@classmethod
@_exception_scopes.system_entry_point
def fetch(cls, project, domain, name, version=None):
"""
This function uses the engine loader to call create a hydrated task from Admin.
:param Text project:
:param Text domain:
:param Text name:
:param Text version: [Optional] If not set, the SDK will fetch the active launch plan for the given project,
domain, and name.
:rtype: SdkLaunchPlan
"""
from flytekit.common import workflow as _workflow
launch_plan_id = _identifier.Identifier(
_identifier_model.ResourceType.LAUNCH_PLAN, project, domain, name, version
)
if launch_plan_id.version:
lp = _flyte_engine.get_client().get_launch_plan(launch_plan_id)
else:
named_entity_id = _common_models.NamedEntityIdentifier(
launch_plan_id.project, launch_plan_id.domain, launch_plan_id.name
)
lp = _flyte_engine.get_client().get_active_launch_plan(named_entity_id)
sdk_lp = cls.promote_from_model(lp.spec)
sdk_lp._id = lp.id
# TODO: Add a test for this, and this function as a whole
wf_id = sdk_lp.workflow_id
lp_wf = _workflow.SdkWorkflow.fetch(wf_id.project, wf_id.domain, wf_id.name, wf_id.version)
sdk_lp._interface = lp_wf.interface
sdk_lp._has_registered = True
return sdk_lp
@_exception_scopes.system_entry_point
def serialize(self):
"""
Unlike the SdkWorkflow serialize call, nothing special needs to be done here.
:rtype: flyteidl.admin.launch_plan_pb2.LaunchPlanSpec
"""
return self.to_flyte_idl()
@property
def id(self):
"""
:rtype: flytekit.common.core.identifier.Identifier
"""
return self._id
@property
def is_scheduled(self):
"""
:rtype: bool
"""
if self.entity_metadata.schedule.cron_expression:
return True
elif self.entity_metadata.schedule.rate and self.entity_metadata.schedule.rate.value:
return True
else:
return False
@property
def auth_role(self):
"""
:rtype: flytekit.models.common.AuthRole
"""
fixed_auth = super(SdkLaunchPlan, self).auth_role
if fixed_auth is not None and (
fixed_auth.assumable_iam_role is not None or fixed_auth.kubernetes_service_account is not None
):
return fixed_auth
assumable_iam_role = _auth_config.ASSUMABLE_IAM_ROLE.get()
kubernetes_service_account = _auth_config.KUBERNETES_SERVICE_ACCOUNT.get()
if not (assumable_iam_role or kubernetes_service_account):
_logging.warning(
"Using deprecated `role` from config. Please update your config to use `assumable_iam_role` instead"
)
assumable_iam_role = _sdk_config.ROLE.get()
return _common_models.AuthRole(
assumable_iam_role=assumable_iam_role, kubernetes_service_account=kubernetes_service_account,
)
@property
def workflow_id(self):
"""
:rtype: flytekit.common.core.identifier.Identifier
"""
return self._workflow_id
@property
def interface(self):
"""
The interface is not technically part of the admin.LaunchPlanSpec in the IDL, however the workflow ID is, and
from the workflow ID, fetch will fill in the interface. This is nice because then you can __call__ the=
object and get a node.
:rtype: flytekit.common.interface.TypedInterface
"""
return self._interface
@property
def resource_type(self):
"""
Integer from _identifier.ResourceType enum
:rtype: int
"""
return _identifier_model.ResourceType.LAUNCH_PLAN
@property
def entity_type_text(self):
"""
:rtype: Text
"""
return "Launch Plan"
@property
def raw_output_data_config(self):
"""
:rtype: flytekit.models.common.RawOutputDataConfig
"""
raw_output_data_config = super(SdkLaunchPlan, self).raw_output_data_config
if raw_output_data_config is not None and raw_output_data_config.output_location_prefix != "":
return raw_output_data_config
# If it was not set explicitly then let's use the value found in the configuration.
return _common_models.RawOutputDataConfig(_auth_config.RAW_OUTPUT_DATA_PREFIX.get())
@_exception_scopes.system_entry_point
def validate(self):
# TODO: Validate workflow is satisfied
pass
@_exception_scopes.system_entry_point
def update(self, state):
"""
:param int state: Enum value from flytekit.models.launch_plan.LaunchPlanState
"""
if not self.id:
raise _user_exceptions.FlyteAssertion(
"Failed to update launch plan because the launch plan's ID is not set. Please call register to fetch "
"or register the identifier first"
)
return _flyte_engine.get_client().update_launch_plan(self.id, state)
def _python_std_input_map_to_literal_map(self, inputs):
"""
:param dict[Text,Any] inputs: A dictionary of Python standard inputs that will be type-checked and compiled
to a LiteralMap
:rtype: flytekit.models.literals.LiteralMap
"""
return _type_helpers.pack_python_std_map_to_literal_map(
inputs,
{k: user_input.sdk_type for k, user_input in _six.iteritems(self.default_inputs.parameters) if k in inputs},
)
@_deprecated(reason="Use launch_with_literals instead", version="0.9.0")
def execute_with_literals(
self,
project,
domain,
literal_inputs,
name=None,
notification_overrides=None,
label_overrides=None,
annotation_overrides=None,
):
"""
Deprecated.
"""
return self.launch_with_literals(
project, domain, literal_inputs, name, notification_overrides, label_overrides, annotation_overrides,
)
@_exception_scopes.system_entry_point
def launch_with_literals(
self,
project,
domain,
literal_inputs,
name=None,
notification_overrides=None,
label_overrides=None,
annotation_overrides=None,
):
"""
Executes the launch plan and returns the execution identifier. This version of execution is meant for when
you already have a LiteralMap of inputs.
:param Text project:
:param Text domain:
:param flytekit.models.literals.LiteralMap literal_inputs: Inputs to the execution.
:param Text name: [Optional] If specified, an execution will be created with this name. Note: the name must
be unique within the context of the project and domain.
:param list[flytekit.common.notifications.Notification] notification_overrides: [Optional] If specified, these
are the notifications that will be honored for this execution. An empty list signals to disable all
notifications.
:param flytekit.models.common.Labels label_overrides:
:param flytekit.models.common.Annotations annotation_overrides:
:rtype: flytekit.common.workflow_execution.SdkWorkflowExecution
"""
# Kubernetes requires names starting with an alphabet for some resources.
name = name or "f" + _uuid.uuid4().hex[:19]
disable_all = notification_overrides == []
if disable_all:
notification_overrides = None
else:
notification_overrides = _execution_models.NotificationList(notification_overrides or [])
disable_all = None
client = _flyte_engine.get_client()
try:
exec_id = client.create_execution(
project,
domain,
name,
_execution_models.ExecutionSpec(
self.id,
_execution_models.ExecutionMetadata(
_execution_models.ExecutionMetadata.ExecutionMode.MANUAL,
"sdk", # TODO: get principle
0, # TODO: Detect nesting
),
notifications=notification_overrides,
disable_all=disable_all,
labels=label_overrides,
annotations=annotation_overrides,
),
literal_inputs,
)
except _user_exceptions.FlyteEntityAlreadyExistsException:
exec_id = _identifier.WorkflowExecutionIdentifier(project, domain, name)
execution = client.get_execution(exec_id)
return _workflow_execution.SdkWorkflowExecution.promote_from_model(execution)
@_exception_scopes.system_entry_point
def __call__(self, *args, **input_map):
"""
:param list[T] args: Do not specify. Kwargs only are supported for this function.
:param dict[Text,T] input_map: Map of inputs. Can be statically defined or OutputReference links.
:rtype: flytekit.common.nodes.SdkNode
"""
if len(args) > 0:
raise _user_exceptions.FlyteAssertion(
"When adding a launchplan as a node in a workflow, all inputs must be specified with kwargs only. We "
"detected {} positional args.".format(len(args))
)
# Take the default values from the launch plan
default_inputs = {k: v.sdk_default for k, v in _six.iteritems(self.default_inputs.parameters) if not v.required}
default_inputs.update(input_map)
bindings, upstream_nodes = self.interface.create_bindings_for_inputs(default_inputs)
return _nodes.SdkNode(
id=None,
metadata=_workflow_models.NodeMetadata("", _datetime.timedelta(), _literal_models.RetryStrategy(0)),
bindings=sorted(bindings, key=lambda b: b.var),
upstream_nodes=upstream_nodes,
sdk_launch_plan=self,
)
def __repr__(self):
"""
:rtype: Text
"""
return "SdkLaunchPlan(ID: {} Interface: {} WF ID: {})".format(self.id, self.interface, self.workflow_id)
# The difference between this and the SdkLaunchPlan class is that this runnable class is supposed to only be used for
# launch plans loaded alongside the current Python interpreter.
class SdkRunnableLaunchPlan(_hash_mixin.HashOnReferenceMixin, SdkLaunchPlan):
def __init__(
self,
sdk_workflow,
default_inputs=None,
fixed_inputs=None,
role=None,
schedule=None,
notifications=None,
labels=None,
annotations=None,
auth_role=None,
raw_output_data_config=None,
):
"""
:param flytekit.common.local_workflow.SdkRunnableWorkflow sdk_workflow:
:param dict[Text,flytekit.common.promise.Input] default_inputs:
:param dict[Text,Any] fixed_inputs: These inputs will be fixed and not need to be set when executing this
launch plan.
:param Text role: Deprecated. IAM role to execute this launch plan with.
:param flytekit.models.schedule.Schedule: Schedule to apply to this workflow.
:param list[flytekit.models.common.Notification]: List of notifications to apply to this launch plan.
:param flytekit.models.common.Labels labels: Any custom kubernetes labels to apply to workflows executed by this
launch plan.
:param flytekit.models.common.Annotations annotations: Any custom kubernetes annotations to apply to workflows
executed by this launch plan.
Any custom kubernetes annotations to apply to workflows executed by this launch plan.
:param flytekit.models.common.Authrole auth_role: The auth method with which to execute the workflow.
:param flytekit.models.common.RawOutputDataConfig raw_output_data_config: Config for offloading data
"""
if role and auth_role:
raise ValueError("Cannot set both role and auth. Role is deprecated, use auth instead.")
fixed_inputs = fixed_inputs or {}
default_inputs = default_inputs or {}
if role:
auth_role = _common_models.AuthRole(assumable_iam_role=role)
# The constructor for SdkLaunchPlan sets the id to None anyways so we don't bother passing in an ID. The ID
# should be set in one of three places,
# 1) When the object is registered (in the code above)
# 2) By the dynamic task code after this runnable object has already been __call__'ed. The SdkNode produced
# maintains a link to this object and will set the ID according to the configuration variables present.
# 3) When SdkLaunchPlan.fetch() is run
super(SdkRunnableLaunchPlan, self).__init__(
None,
_launch_plan_models.LaunchPlanMetadata(
schedule=schedule or _schedule_model.Schedule(""), notifications=notifications or [],
),
_interface_models.ParameterMap(default_inputs),
_type_helpers.pack_python_std_map_to_literal_map(
fixed_inputs,
{
k: _type_helpers.get_sdk_type_from_literal_type(var.type)
for k, var in _six.iteritems(sdk_workflow.interface.inputs)
if k in fixed_inputs
},
),
labels or _common_models.Labels({}),
annotations or _common_models.Annotations({}),
auth_role,
raw_output_data_config or _common_models.RawOutputDataConfig(""),
)
self._interface = _interface.TypedInterface(
{k: v.var for k, v in _six.iteritems(default_inputs)}, sdk_workflow.interface.outputs,
)
self._upstream_entities = {sdk_workflow}
self._sdk_workflow = sdk_workflow
@classmethod
def from_flyte_idl(cls, _):
raise _user_exceptions.FlyteAssertion(
"An SdkRunnableLaunchPlan must be created from a reference to local Python code only."
)
@classmethod
def promote_from_model(cls, model):
raise _user_exceptions.FlyteAssertion(
"An SdkRunnableLaunchPlan must be created from a reference to local Python code only."
)
@classmethod
@_exception_scopes.system_entry_point
def fetch(cls, project, domain, name, version=None):
"""
This function uses the engine loader to call create a hydrated task from Admin.
:param Text project:
:param Text domain:
:param Text name:
:param Text version:
:rtype: SdkRunnableLaunchPlan
"""
raise _user_exceptions.FlyteAssertion(
"An SdkRunnableLaunchPlan must be created from a reference to local Python code only."
)
@property
def workflow_id(self):
"""
:rtype: flytekit.common.core.identifier.Identifier
"""
return self._sdk_workflow.id
def __repr__(self):
"""
:rtype: Text
"""
return "SdkRunnableLaunchPlan(ID: {} Interface: {} WF ID: {})".format(self.id, self.interface, self.workflow_id)
| 40.15222 | 120 | 0.662647 | import datetime as _datetime
import logging as _logging
import uuid as _uuid
import six as _six
from deprecated import deprecated as _deprecated
from flytekit.common import interface as _interface
from flytekit.common import nodes as _nodes
from flytekit.common import promise as _promises
from flytekit.common import sdk_bases as _sdk_bases
from flytekit.common import workflow_execution as _workflow_execution
from flytekit.common.core import identifier as _identifier
from flytekit.common.exceptions import scopes as _exception_scopes
from flytekit.common.exceptions import user as _user_exceptions
from flytekit.common.mixins import hash as _hash_mixin
from flytekit.common.mixins import launchable as _launchable_mixin
from flytekit.common.mixins import registerable as _registerable
from flytekit.common.types import helpers as _type_helpers
from flytekit.configuration import auth as _auth_config
from flytekit.configuration import sdk as _sdk_config
from flytekit.engines.flyte import engine as _flyte_engine
from flytekit.models import common as _common_models
from flytekit.models import execution as _execution_models
from flytekit.models import interface as _interface_models
from flytekit.models import launch_plan as _launch_plan_models
from flytekit.models import literals as _literal_models
from flytekit.models import schedule as _schedule_model
from flytekit.models.core import identifier as _identifier_model
from flytekit.models.core import workflow as _workflow_models
class SdkLaunchPlan(
_launchable_mixin.LaunchableEntity,
_registerable.HasDependencies,
_registerable.RegisterableEntity,
_launch_plan_models.LaunchPlanSpec,
metaclass=_sdk_bases.ExtendedSdkType,
):
def __init__(self, *args, **kwargs):
super(SdkLaunchPlan, self).__init__(*args, **kwargs)
self._id = None
self._interface = None
@classmethod
def promote_from_model(cls, model) -> "SdkLaunchPlan":
return cls(
workflow_id=_identifier.Identifier.promote_from_model(model.workflow_id),
default_inputs=_interface_models.ParameterMap(
{
k: _promises.Input.promote_from_model(v).rename_and_return_reference(k)
for k, v in _six.iteritems(model.default_inputs.parameters)
}
),
fixed_inputs=model.fixed_inputs,
entity_metadata=model.entity_metadata,
labels=model.labels,
annotations=model.annotations,
auth_role=model.auth_role,
raw_output_data_config=model.raw_output_data_config,
)
@_exception_scopes.system_entry_point
def register(self, project, domain, name, version):
self.validate()
id_to_register = _identifier.Identifier(
_identifier_model.ResourceType.LAUNCH_PLAN, project, domain, name, version
)
client = _flyte_engine.get_client()
try:
client.create_launch_plan(id_to_register, self)
except _user_exceptions.FlyteEntityAlreadyExistsException:
pass
self._id = id_to_register
return str(self.id)
@classmethod
@_exception_scopes.system_entry_point
def fetch(cls, project, domain, name, version=None):
from flytekit.common import workflow as _workflow
launch_plan_id = _identifier.Identifier(
_identifier_model.ResourceType.LAUNCH_PLAN, project, domain, name, version
)
if launch_plan_id.version:
lp = _flyte_engine.get_client().get_launch_plan(launch_plan_id)
else:
named_entity_id = _common_models.NamedEntityIdentifier(
launch_plan_id.project, launch_plan_id.domain, launch_plan_id.name
)
lp = _flyte_engine.get_client().get_active_launch_plan(named_entity_id)
sdk_lp = cls.promote_from_model(lp.spec)
sdk_lp._id = lp.id
wf_id = sdk_lp.workflow_id
lp_wf = _workflow.SdkWorkflow.fetch(wf_id.project, wf_id.domain, wf_id.name, wf_id.version)
sdk_lp._interface = lp_wf.interface
sdk_lp._has_registered = True
return sdk_lp
@_exception_scopes.system_entry_point
def serialize(self):
return self.to_flyte_idl()
@property
def id(self):
return self._id
@property
def is_scheduled(self):
if self.entity_metadata.schedule.cron_expression:
return True
elif self.entity_metadata.schedule.rate and self.entity_metadata.schedule.rate.value:
return True
else:
return False
@property
def auth_role(self):
fixed_auth = super(SdkLaunchPlan, self).auth_role
if fixed_auth is not None and (
fixed_auth.assumable_iam_role is not None or fixed_auth.kubernetes_service_account is not None
):
return fixed_auth
assumable_iam_role = _auth_config.ASSUMABLE_IAM_ROLE.get()
kubernetes_service_account = _auth_config.KUBERNETES_SERVICE_ACCOUNT.get()
if not (assumable_iam_role or kubernetes_service_account):
_logging.warning(
"Using deprecated `role` from config. Please update your config to use `assumable_iam_role` instead"
)
assumable_iam_role = _sdk_config.ROLE.get()
return _common_models.AuthRole(
assumable_iam_role=assumable_iam_role, kubernetes_service_account=kubernetes_service_account,
)
@property
def workflow_id(self):
return self._workflow_id
@property
def interface(self):
return self._interface
@property
def resource_type(self):
return _identifier_model.ResourceType.LAUNCH_PLAN
@property
def entity_type_text(self):
return "Launch Plan"
@property
def raw_output_data_config(self):
raw_output_data_config = super(SdkLaunchPlan, self).raw_output_data_config
if raw_output_data_config is not None and raw_output_data_config.output_location_prefix != "":
return raw_output_data_config
return _common_models.RawOutputDataConfig(_auth_config.RAW_OUTPUT_DATA_PREFIX.get())
@_exception_scopes.system_entry_point
def validate(self):
# TODO: Validate workflow is satisfied
pass
@_exception_scopes.system_entry_point
def update(self, state):
if not self.id:
raise _user_exceptions.FlyteAssertion(
"Failed to update launch plan because the launch plan's ID is not set. Please call register to fetch "
"or register the identifier first"
)
return _flyte_engine.get_client().update_launch_plan(self.id, state)
def _python_std_input_map_to_literal_map(self, inputs):
return _type_helpers.pack_python_std_map_to_literal_map(
inputs,
{k: user_input.sdk_type for k, user_input in _six.iteritems(self.default_inputs.parameters) if k in inputs},
)
@_deprecated(reason="Use launch_with_literals instead", version="0.9.0")
def execute_with_literals(
self,
project,
domain,
literal_inputs,
name=None,
notification_overrides=None,
label_overrides=None,
annotation_overrides=None,
):
return self.launch_with_literals(
project, domain, literal_inputs, name, notification_overrides, label_overrides, annotation_overrides,
)
@_exception_scopes.system_entry_point
def launch_with_literals(
self,
project,
domain,
literal_inputs,
name=None,
notification_overrides=None,
label_overrides=None,
annotation_overrides=None,
):
name = name or "f" + _uuid.uuid4().hex[:19]
disable_all = notification_overrides == []
if disable_all:
notification_overrides = None
else:
notification_overrides = _execution_models.NotificationList(notification_overrides or [])
disable_all = None
client = _flyte_engine.get_client()
try:
exec_id = client.create_execution(
project,
domain,
name,
_execution_models.ExecutionSpec(
self.id,
_execution_models.ExecutionMetadata(
_execution_models.ExecutionMetadata.ExecutionMode.MANUAL,
"sdk",
0,
),
notifications=notification_overrides,
disable_all=disable_all,
labels=label_overrides,
annotations=annotation_overrides,
),
literal_inputs,
)
except _user_exceptions.FlyteEntityAlreadyExistsException:
exec_id = _identifier.WorkflowExecutionIdentifier(project, domain, name)
execution = client.get_execution(exec_id)
return _workflow_execution.SdkWorkflowExecution.promote_from_model(execution)
@_exception_scopes.system_entry_point
def __call__(self, *args, **input_map):
if len(args) > 0:
raise _user_exceptions.FlyteAssertion(
"When adding a launchplan as a node in a workflow, all inputs must be specified with kwargs only. We "
"detected {} positional args.".format(len(args))
)
default_inputs = {k: v.sdk_default for k, v in _six.iteritems(self.default_inputs.parameters) if not v.required}
default_inputs.update(input_map)
bindings, upstream_nodes = self.interface.create_bindings_for_inputs(default_inputs)
return _nodes.SdkNode(
id=None,
metadata=_workflow_models.NodeMetadata("", _datetime.timedelta(), _literal_models.RetryStrategy(0)),
bindings=sorted(bindings, key=lambda b: b.var),
upstream_nodes=upstream_nodes,
sdk_launch_plan=self,
)
def __repr__(self):
return "SdkLaunchPlan(ID: {} Interface: {} WF ID: {})".format(self.id, self.interface, self.workflow_id)
class SdkRunnableLaunchPlan(_hash_mixin.HashOnReferenceMixin, SdkLaunchPlan):
def __init__(
self,
sdk_workflow,
default_inputs=None,
fixed_inputs=None,
role=None,
schedule=None,
notifications=None,
labels=None,
annotations=None,
auth_role=None,
raw_output_data_config=None,
):
if role and auth_role:
raise ValueError("Cannot set both role and auth. Role is deprecated, use auth instead.")
fixed_inputs = fixed_inputs or {}
default_inputs = default_inputs or {}
if role:
auth_role = _common_models.AuthRole(assumable_iam_role=role)
# should be set in one of three places,
# 1) When the object is registered (in the code above)
# 2) By the dynamic task code after this runnable object has already been __call__'ed. The SdkNode produced
super(SdkRunnableLaunchPlan, self).__init__(
None,
_launch_plan_models.LaunchPlanMetadata(
schedule=schedule or _schedule_model.Schedule(""), notifications=notifications or [],
),
_interface_models.ParameterMap(default_inputs),
_type_helpers.pack_python_std_map_to_literal_map(
fixed_inputs,
{
k: _type_helpers.get_sdk_type_from_literal_type(var.type)
for k, var in _six.iteritems(sdk_workflow.interface.inputs)
if k in fixed_inputs
},
),
labels or _common_models.Labels({}),
annotations or _common_models.Annotations({}),
auth_role,
raw_output_data_config or _common_models.RawOutputDataConfig(""),
)
self._interface = _interface.TypedInterface(
{k: v.var for k, v in _six.iteritems(default_inputs)}, sdk_workflow.interface.outputs,
)
self._upstream_entities = {sdk_workflow}
self._sdk_workflow = sdk_workflow
@classmethod
def from_flyte_idl(cls, _):
raise _user_exceptions.FlyteAssertion(
"An SdkRunnableLaunchPlan must be created from a reference to local Python code only."
)
@classmethod
def promote_from_model(cls, model):
raise _user_exceptions.FlyteAssertion(
"An SdkRunnableLaunchPlan must be created from a reference to local Python code only."
)
@classmethod
@_exception_scopes.system_entry_point
def fetch(cls, project, domain, name, version=None):
raise _user_exceptions.FlyteAssertion(
"An SdkRunnableLaunchPlan must be created from a reference to local Python code only."
)
@property
def workflow_id(self):
return self._sdk_workflow.id
def __repr__(self):
return "SdkRunnableLaunchPlan(ID: {} Interface: {} WF ID: {})".format(self.id, self.interface, self.workflow_id)
| true | true |
f720e5c62f21e8d5ff58e6fa829b2e05a1daba2e | 3,614 | py | Python | model_v2/synthetic_data.py | suchir/passenger_screening_algorithm_challenge | 65e3e3ce1889e9a100f6b9b6a53fe5c785a84612 | [
"MIT"
] | 7 | 2018-02-05T01:57:30.000Z | 2019-06-25T08:00:40.000Z | model_v2/synthetic_data.py | suchir/passenger_screening_algorithm_challenge | 65e3e3ce1889e9a100f6b9b6a53fe5c785a84612 | [
"MIT"
] | 1 | 2018-05-07T15:28:29.000Z | 2018-05-07T15:28:29.000Z | model_v2/synthetic_data.py | suchir/passenger_screening_algorithm_challenge | 65e3e3ce1889e9a100f6b9b6a53fe5c785a84612 | [
"MIT"
] | 3 | 2018-05-16T03:50:44.000Z | 2018-08-20T12:40:58.000Z | from common.caching import read_input_dir, cached, read_log_dir
from common.dataio import get_aps_data_hdf5, get_passenger_clusters, get_data
from . import dataio
from collections import defaultdict
import numpy as np
import skimage.transform
import skimage.io
import skimage.color
import glob
import os
import tqdm
import h5py
import pickle
import imageio
import math
import time
import subprocess
import json
@cached(version=0)
def generate_random_models(n_models):
with read_input_dir('makehuman/passengers'):
ranges = defaultdict(lambda: [float('inf'), float('-inf')])
for file in glob.glob('*.mhm'):
with open(file, 'r') as f:
modifiers = f.readlines()[4:-5]
for modifier in modifiers:
_, m, x = modifier.split(' ')
x = float(x)
r = ranges[m]
r[0], r[1] = min(r[0], x), max(r[1], x)
np.random.seed(0)
for i in range(n_models):
lines = ['version v1.1.1']
for modifier in ranges:
val = np.random.uniform(*ranges[modifier])
lines.append('modifier %s %s' % (modifier, val))
lines.append('skeleton game_engine.mhskel')
with open('%s.mhm' % i, 'w') as f:
f.write('\n'.join(lines))
BODY_ZONE_COLORS = np.array([
[255, 255, 255],
[255, 115, 35],
[55, 64, 197],
[32, 168, 67],
[116, 116, 116],
[255, 193, 17],
[255, 164, 194],
[172, 226, 28],
[193, 183, 227],
[142, 212, 231],
[255, 240, 3],
[234, 25, 33],
[176, 110, 77],
[232, 219, 164],
[101, 135, 182],
[255, 3, 255],
[125, 0, 21],
[153, 64, 154]
])
def _convert_colors_to_label(image):
highlight = lambda color: np.sum(np.abs(image-color), axis=-1)
dist = np.stack([highlight(color) for color in BODY_ZONE_COLORS], axis=-1)
return np.argmin(dist, axis=-1)
@cached(generate_random_models, subdir='ssd', version=0)
def render_synthetic_zone_data(mode):
assert mode in ('all', 'sample_large', 'sample')
if not os.path.exists('done'):
with read_input_dir('makehuman/generated'):
mesh_paths = sorted(['%s/%s' % (os.getcwd(), x) for x in glob.glob('*.mhx2')])
if mode == 'sample_large':
mesh_paths = mesh_paths[:100]
elif mode == 'sample':
mesh_paths = mesh_paths[:10]
with read_input_dir('hand_labeling/blender'):
texture_path = os.getcwd() + '/zones.png'
with read_input_dir('scripts/blender'):
script_path = os.getcwd() + '/render_synthetic_data.py'
angles = 16
with open('config.json', 'w') as f:
json.dump({
'num_angles': angles,
'texture_path': texture_path,
'mesh_paths': mesh_paths
}, f)
subprocess.check_call(['blender', '--python', script_path, '--background'])
f = h5py.File('data.hdf5', 'w')
dset = f.create_dataset('dset', (len(mesh_paths), angles, 330, 256, 2))
for i, file in enumerate(tqdm.tqdm(glob.glob('*_depth.png'))):
zones_file = file.replace('depth', 'zones')
angle = int(file.split('_')[-2])
dset[i//angles, angle, ..., 0] = skimage.color.rgb2gray(skimage.io.imread(file))
zones = skimage.io.imread(zones_file)
labels = _convert_colors_to_label(zones[..., :3])
dset[i//angles, angle, ..., 1] = labels
open('done', 'w').close()
else:
f = h5py.File('data.hdf5', 'r')
dset = f['dset']
return dset | 31.426087 | 92 | 0.571942 | from common.caching import read_input_dir, cached, read_log_dir
from common.dataio import get_aps_data_hdf5, get_passenger_clusters, get_data
from . import dataio
from collections import defaultdict
import numpy as np
import skimage.transform
import skimage.io
import skimage.color
import glob
import os
import tqdm
import h5py
import pickle
import imageio
import math
import time
import subprocess
import json
@cached(version=0)
def generate_random_models(n_models):
with read_input_dir('makehuman/passengers'):
ranges = defaultdict(lambda: [float('inf'), float('-inf')])
for file in glob.glob('*.mhm'):
with open(file, 'r') as f:
modifiers = f.readlines()[4:-5]
for modifier in modifiers:
_, m, x = modifier.split(' ')
x = float(x)
r = ranges[m]
r[0], r[1] = min(r[0], x), max(r[1], x)
np.random.seed(0)
for i in range(n_models):
lines = ['version v1.1.1']
for modifier in ranges:
val = np.random.uniform(*ranges[modifier])
lines.append('modifier %s %s' % (modifier, val))
lines.append('skeleton game_engine.mhskel')
with open('%s.mhm' % i, 'w') as f:
f.write('\n'.join(lines))
BODY_ZONE_COLORS = np.array([
[255, 255, 255],
[255, 115, 35],
[55, 64, 197],
[32, 168, 67],
[116, 116, 116],
[255, 193, 17],
[255, 164, 194],
[172, 226, 28],
[193, 183, 227],
[142, 212, 231],
[255, 240, 3],
[234, 25, 33],
[176, 110, 77],
[232, 219, 164],
[101, 135, 182],
[255, 3, 255],
[125, 0, 21],
[153, 64, 154]
])
def _convert_colors_to_label(image):
highlight = lambda color: np.sum(np.abs(image-color), axis=-1)
dist = np.stack([highlight(color) for color in BODY_ZONE_COLORS], axis=-1)
return np.argmin(dist, axis=-1)
@cached(generate_random_models, subdir='ssd', version=0)
def render_synthetic_zone_data(mode):
assert mode in ('all', 'sample_large', 'sample')
if not os.path.exists('done'):
with read_input_dir('makehuman/generated'):
mesh_paths = sorted(['%s/%s' % (os.getcwd(), x) for x in glob.glob('*.mhx2')])
if mode == 'sample_large':
mesh_paths = mesh_paths[:100]
elif mode == 'sample':
mesh_paths = mesh_paths[:10]
with read_input_dir('hand_labeling/blender'):
texture_path = os.getcwd() + '/zones.png'
with read_input_dir('scripts/blender'):
script_path = os.getcwd() + '/render_synthetic_data.py'
angles = 16
with open('config.json', 'w') as f:
json.dump({
'num_angles': angles,
'texture_path': texture_path,
'mesh_paths': mesh_paths
}, f)
subprocess.check_call(['blender', '--python', script_path, '--background'])
f = h5py.File('data.hdf5', 'w')
dset = f.create_dataset('dset', (len(mesh_paths), angles, 330, 256, 2))
for i, file in enumerate(tqdm.tqdm(glob.glob('*_depth.png'))):
zones_file = file.replace('depth', 'zones')
angle = int(file.split('_')[-2])
dset[i//angles, angle, ..., 0] = skimage.color.rgb2gray(skimage.io.imread(file))
zones = skimage.io.imread(zones_file)
labels = _convert_colors_to_label(zones[..., :3])
dset[i//angles, angle, ..., 1] = labels
open('done', 'w').close()
else:
f = h5py.File('data.hdf5', 'r')
dset = f['dset']
return dset | true | true |
f720e6032cfc7932950462b55a729037d787591f | 404 | py | Python | AboutModel/migrations/0006_person_upload.py | jinjinanan/HelloDjango1 | d1174b72341946f0575df37236d85983facc1bc6 | [
"MIT"
] | null | null | null | AboutModel/migrations/0006_person_upload.py | jinjinanan/HelloDjango1 | d1174b72341946f0575df37236d85983facc1bc6 | [
"MIT"
] | null | null | null | AboutModel/migrations/0006_person_upload.py | jinjinanan/HelloDjango1 | d1174b72341946f0575df37236d85983facc1bc6 | [
"MIT"
] | null | null | null | # Generated by Django 2.1.1 on 2018-09-26 09:08
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('AboutModel', '0005_auto_20180926_1639'),
]
operations = [
migrations.AddField(
model_name='person',
name='upload',
field=models.FileField(default='', upload_to='media/'),
),
]
| 21.263158 | 67 | 0.596535 |
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('AboutModel', '0005_auto_20180926_1639'),
]
operations = [
migrations.AddField(
model_name='person',
name='upload',
field=models.FileField(default='', upload_to='media/'),
),
]
| true | true |
f720e775b9e53621d7ef0b929530a0e01f683291 | 216 | py | Python | display/display/handlers/calendar/calendar.py | owlsn/h_crawl | c0431ee6484e61d9339553c3350962ea517749d6 | [
"MIT"
] | null | null | null | display/display/handlers/calendar/calendar.py | owlsn/h_crawl | c0431ee6484e61d9339553c3350962ea517749d6 | [
"MIT"
] | 8 | 2021-03-18T20:33:29.000Z | 2022-03-11T23:21:04.000Z | display/display/handlers/calendar/calendar.py | owlsn/h_crawl | c0431ee6484e61d9339553c3350962ea517749d6 | [
"MIT"
] | null | null | null | from display.handlers.base import BaseHandler
class CalendarHandler(BaseHandler):
def get(self):
title = 'CalendarHandler'
self.render('calendar/calendar.html', title = title, **self.render_dict) | 36 | 80 | 0.722222 | from display.handlers.base import BaseHandler
class CalendarHandler(BaseHandler):
def get(self):
title = 'CalendarHandler'
self.render('calendar/calendar.html', title = title, **self.render_dict) | true | true |
f720e782756412b8e32b05c6b3b8cd42bb215506 | 298 | py | Python | 1.py | lorenaEscobar0014/TALLER-DE-FOR | a448358b336d6e240ff3017a9c44d7df67bf173e | [
"MIT"
] | null | null | null | 1.py | lorenaEscobar0014/TALLER-DE-FOR | a448358b336d6e240ff3017a9c44d7df67bf173e | [
"MIT"
] | null | null | null | 1.py | lorenaEscobar0014/TALLER-DE-FOR | a448358b336d6e240ff3017a9c44d7df67bf173e | [
"MIT"
] | null | null | null | archivo = open('paises.txt', 'r')
lista = []
ciudad = []
for i in archivo:
a = i.index(":")
for r in range(a+2, len(i)):
lista.append(i[r])
a = "".join(lista)
ciudad.append(a)
lista = []
for i in ciudad:
if(i[0] == "M"):
print(i)
lista.append(i)
print(len(lista))
archivo.close() | 18.625 | 33 | 0.57047 | archivo = open('paises.txt', 'r')
lista = []
ciudad = []
for i in archivo:
a = i.index(":")
for r in range(a+2, len(i)):
lista.append(i[r])
a = "".join(lista)
ciudad.append(a)
lista = []
for i in ciudad:
if(i[0] == "M"):
print(i)
lista.append(i)
print(len(lista))
archivo.close() | true | true |
f720e79407295f9aac9a3426d1cae24917442d5c | 2,720 | py | Python | src/pipelines/vaccinations/se_authority.py | chrismayemba/covid-19-open-data | cacecb05cd8277f8e61b6e7932915826f41af24b | [
"Apache-2.0"
] | 1 | 2021-10-21T15:24:08.000Z | 2021-10-21T15:24:08.000Z | src/pipelines/vaccinations/se_authority.py | chrismayemba/covid-19-open-data | cacecb05cd8277f8e61b6e7932915826f41af24b | [
"Apache-2.0"
] | null | null | null | src/pipelines/vaccinations/se_authority.py | chrismayemba/covid-19-open-data | cacecb05cd8277f8e61b6e7932915826f41af24b | [
"Apache-2.0"
] | null | null | null | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
from typing import Any, Dict
from pandas import DataFrame, concat
from lib.data_source import DataSource
from lib.time import datetime_isoformat
from lib.utils import aggregate_admin_level, table_merge, table_rename
from pipelines.epidemiology.it_authority import _subregion1_code_converter
_column_adapter = {
"Vecka": "week",
"År": "year",
"Region": "match_string",
"Antal vaccinerade": "_total_doses",
# "Andel vaccinerade": "",
"Dosnummer": "_dose_type",
}
class SwedenDataSource(DataSource):
def parse_dataframes(
self, dataframes: Dict[Any, DataFrame], aux: Dict[str, DataFrame], **parse_opts
) -> DataFrame:
data = table_rename(dataframes[0], _column_adapter, drop=True)
# Convert date to ISO format
data["date"] = data["year"].apply(lambda x: datetime.datetime.strptime(str(x), "%Y"))
data["date"] = data["date"] + data["week"].apply(lambda x: datetime.timedelta(weeks=x))
data["date"] = data["date"].apply(lambda x: x.date().isoformat())
data = data.drop(columns=["week", "year"])
# Process 1-dose and 2-dose separately
data_1_dose = data[data["_dose_type"].str.slice(-1) == "1"].drop(columns=["_dose_type"])
data_2_dose = data[data["_dose_type"].str.slice(-1) == "2"].drop(columns=["_dose_type"])
data_1_dose = data_1_dose.rename(columns={"_total_doses": "total_persons_vaccinated"})
data_2_dose = data_2_dose.rename(columns={"_total_doses": "total_persons_fully_vaccinated"})
data = table_merge([data_1_dose, data_2_dose], how="outer")
# Make sure only subregion1 matches
data["key"] = None
data["country_code"] = "SE"
data["subregion2_code"] = None
data["locality_code"] = None
# Country totals are reported using a special name
data.loc[data["match_string"] == "| Sverige |", "key"] = "SE"
# Estimate the total doses from person counts
data["total_vaccine_doses_administered"] = (
data["total_persons_vaccinated"] + data["total_persons_fully_vaccinated"]
)
return data
| 40 | 100 | 0.683088 |
import datetime
from typing import Any, Dict
from pandas import DataFrame, concat
from lib.data_source import DataSource
from lib.time import datetime_isoformat
from lib.utils import aggregate_admin_level, table_merge, table_rename
from pipelines.epidemiology.it_authority import _subregion1_code_converter
_column_adapter = {
"Vecka": "week",
"År": "year",
"Region": "match_string",
"Antal vaccinerade": "_total_doses",
"Dosnummer": "_dose_type",
}
class SwedenDataSource(DataSource):
def parse_dataframes(
self, dataframes: Dict[Any, DataFrame], aux: Dict[str, DataFrame], **parse_opts
) -> DataFrame:
data = table_rename(dataframes[0], _column_adapter, drop=True)
data["date"] = data["year"].apply(lambda x: datetime.datetime.strptime(str(x), "%Y"))
data["date"] = data["date"] + data["week"].apply(lambda x: datetime.timedelta(weeks=x))
data["date"] = data["date"].apply(lambda x: x.date().isoformat())
data = data.drop(columns=["week", "year"])
data_1_dose = data[data["_dose_type"].str.slice(-1) == "1"].drop(columns=["_dose_type"])
data_2_dose = data[data["_dose_type"].str.slice(-1) == "2"].drop(columns=["_dose_type"])
data_1_dose = data_1_dose.rename(columns={"_total_doses": "total_persons_vaccinated"})
data_2_dose = data_2_dose.rename(columns={"_total_doses": "total_persons_fully_vaccinated"})
data = table_merge([data_1_dose, data_2_dose], how="outer")
data["key"] = None
data["country_code"] = "SE"
data["subregion2_code"] = None
data["locality_code"] = None
data.loc[data["match_string"] == "| Sverige |", "key"] = "SE"
data["total_vaccine_doses_administered"] = (
data["total_persons_vaccinated"] + data["total_persons_fully_vaccinated"]
)
return data
| true | true |
f720e7b3881bb7f2ca7c123f52d4f902222b4dac | 2,385 | py | Python | imblearn/under_sampling/_prototype_selection/tests/test_instance_hardness_threshold.py | laurallu/imbalanced-learn | 321b751f90ef8faaec6b39218f8c531893e9e79f | [
"MIT"
] | null | null | null | imblearn/under_sampling/_prototype_selection/tests/test_instance_hardness_threshold.py | laurallu/imbalanced-learn | 321b751f90ef8faaec6b39218f8c531893e9e79f | [
"MIT"
] | null | null | null | imblearn/under_sampling/_prototype_selection/tests/test_instance_hardness_threshold.py | laurallu/imbalanced-learn | 321b751f90ef8faaec6b39218f8c531893e9e79f | [
"MIT"
] | null | null | null | """Test the module ."""
# Authors: Guillaume Lemaitre <g.lemaitre58@gmail.com>
# Christos Aridas
# License: MIT
import pytest
import numpy as np
from sklearn.ensemble import GradientBoostingClassifier
from imblearn.under_sampling import InstanceHardnessThreshold
RND_SEED = 0
X = np.array(
[
[-0.3879569, 0.6894251],
[-0.09322739, 1.28177189],
[-0.77740357, 0.74097941],
[0.91542919, -0.65453327],
[-0.03852113, 0.40910479],
[-0.43877303, 1.07366684],
[-0.85795321, 0.82980738],
[-0.18430329, 0.52328473],
[-0.30126957, -0.66268378],
[-0.65571327, 0.42412021],
[-0.28305528, 0.30284991],
[0.20246714, -0.34727125],
[1.06446472, -1.09279772],
[0.30543283, -0.02589502],
[-0.00717161, 0.00318087],
]
)
Y = np.array([0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0])
ESTIMATOR = GradientBoostingClassifier(random_state=RND_SEED)
def test_iht_init():
sampling_strategy = "auto"
iht = InstanceHardnessThreshold(
ESTIMATOR, sampling_strategy=sampling_strategy, random_state=RND_SEED
)
assert iht.sampling_strategy == sampling_strategy
assert iht.random_state == RND_SEED
def test_iht_fit_resample():
iht = InstanceHardnessThreshold(ESTIMATOR, random_state=RND_SEED)
X_resampled, y_resampled = iht.fit_resample(X, Y)
assert X_resampled.shape == (12, 2)
assert y_resampled.shape == (12,)
def test_iht_fit_resample_half():
sampling_strategy = {0: 6, 1: 8}
iht = InstanceHardnessThreshold(
ESTIMATOR, sampling_strategy=sampling_strategy, random_state=RND_SEED
)
X_resampled, y_resampled = iht.fit_resample(X, Y)
assert X_resampled.shape == (14, 2)
assert y_resampled.shape == (14,)
def test_iht_fit_resample_class_obj():
est = GradientBoostingClassifier(random_state=RND_SEED)
iht = InstanceHardnessThreshold(estimator=est, random_state=RND_SEED)
X_resampled, y_resampled = iht.fit_resample(X, Y)
assert X_resampled.shape == (12, 2)
assert y_resampled.shape == (12,)
def test_iht_fit_resample_wrong_class_obj():
from sklearn.cluster import KMeans
est = KMeans()
iht = InstanceHardnessThreshold(estimator=est, random_state=RND_SEED)
with pytest.raises(ValueError, match="Invalid parameter `estimator`"):
iht.fit_resample(X, Y)
| 30.189873 | 77 | 0.678826 |
import pytest
import numpy as np
from sklearn.ensemble import GradientBoostingClassifier
from imblearn.under_sampling import InstanceHardnessThreshold
RND_SEED = 0
X = np.array(
[
[-0.3879569, 0.6894251],
[-0.09322739, 1.28177189],
[-0.77740357, 0.74097941],
[0.91542919, -0.65453327],
[-0.03852113, 0.40910479],
[-0.43877303, 1.07366684],
[-0.85795321, 0.82980738],
[-0.18430329, 0.52328473],
[-0.30126957, -0.66268378],
[-0.65571327, 0.42412021],
[-0.28305528, 0.30284991],
[0.20246714, -0.34727125],
[1.06446472, -1.09279772],
[0.30543283, -0.02589502],
[-0.00717161, 0.00318087],
]
)
Y = np.array([0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0, 0, 0])
ESTIMATOR = GradientBoostingClassifier(random_state=RND_SEED)
def test_iht_init():
sampling_strategy = "auto"
iht = InstanceHardnessThreshold(
ESTIMATOR, sampling_strategy=sampling_strategy, random_state=RND_SEED
)
assert iht.sampling_strategy == sampling_strategy
assert iht.random_state == RND_SEED
def test_iht_fit_resample():
iht = InstanceHardnessThreshold(ESTIMATOR, random_state=RND_SEED)
X_resampled, y_resampled = iht.fit_resample(X, Y)
assert X_resampled.shape == (12, 2)
assert y_resampled.shape == (12,)
def test_iht_fit_resample_half():
sampling_strategy = {0: 6, 1: 8}
iht = InstanceHardnessThreshold(
ESTIMATOR, sampling_strategy=sampling_strategy, random_state=RND_SEED
)
X_resampled, y_resampled = iht.fit_resample(X, Y)
assert X_resampled.shape == (14, 2)
assert y_resampled.shape == (14,)
def test_iht_fit_resample_class_obj():
est = GradientBoostingClassifier(random_state=RND_SEED)
iht = InstanceHardnessThreshold(estimator=est, random_state=RND_SEED)
X_resampled, y_resampled = iht.fit_resample(X, Y)
assert X_resampled.shape == (12, 2)
assert y_resampled.shape == (12,)
def test_iht_fit_resample_wrong_class_obj():
from sklearn.cluster import KMeans
est = KMeans()
iht = InstanceHardnessThreshold(estimator=est, random_state=RND_SEED)
with pytest.raises(ValueError, match="Invalid parameter `estimator`"):
iht.fit_resample(X, Y)
| true | true |
f720e859b033940aead6b8c6f677e377794adbc7 | 798 | py | Python | piton/lib/readchar/readchar.py | piton-package-manager/PPM | 19015b76184befe1e2daa63189a13b039787868d | [
"MIT"
] | 19 | 2016-04-08T04:00:07.000Z | 2021-11-12T19:36:56.000Z | piton/lib/readchar/readchar.py | LookLikeAPro/PPM | 19015b76184befe1e2daa63189a13b039787868d | [
"MIT"
] | 9 | 2017-01-03T13:39:47.000Z | 2022-01-15T20:38:20.000Z | piton/lib/readchar/readchar.py | LookLikeAPro/PPM | 19015b76184befe1e2daa63189a13b039787868d | [
"MIT"
] | 6 | 2017-04-01T03:38:45.000Z | 2021-05-06T11:25:31.000Z | # -*- coding: utf-8 -*-
# This file is based on this gist:
# http://code.activestate.com/recipes/134892/
# So real authors are DannyYoo and company.
import sys
if sys.platform.startswith('linux'):
from .readchar_linux import readchar
elif sys.platform == 'darwin':
from .readchar_linux import readchar
elif sys.platform in ('win32', 'cygwin'):
from .readchar_windows import readchar
else:
raise NotImplemented('The platform %s is not supported yet' % sys.platform)
def readkey(getchar_fn=None):
getchar = getchar_fn or readchar
c1 = getchar()
if ord(c1) != 0x1b:
return c1
c2 = getchar()
if ord(c2) != 0x5b:
return c1 + c2
c3 = getchar()
if ord(c3) != 0x33:
return c1 + c2 + c3
c4 = getchar()
return c1 + c2 + c3 + c4
| 25.741935 | 79 | 0.645363 |
import sys
if sys.platform.startswith('linux'):
from .readchar_linux import readchar
elif sys.platform == 'darwin':
from .readchar_linux import readchar
elif sys.platform in ('win32', 'cygwin'):
from .readchar_windows import readchar
else:
raise NotImplemented('The platform %s is not supported yet' % sys.platform)
def readkey(getchar_fn=None):
getchar = getchar_fn or readchar
c1 = getchar()
if ord(c1) != 0x1b:
return c1
c2 = getchar()
if ord(c2) != 0x5b:
return c1 + c2
c3 = getchar()
if ord(c3) != 0x33:
return c1 + c2 + c3
c4 = getchar()
return c1 + c2 + c3 + c4
| true | true |
f720e8b77258c01a05c510ec80e3283dcdbe46b3 | 1,698 | py | Python | leetcode/combination_sum_III.py | sci-c0/python-misc-problems | a0827cc9cd290ca142bba3b7dda307234da63c3c | [
"BSD-3-Clause"
] | null | null | null | leetcode/combination_sum_III.py | sci-c0/python-misc-problems | a0827cc9cd290ca142bba3b7dda307234da63c3c | [
"BSD-3-Clause"
] | null | null | null | leetcode/combination_sum_III.py | sci-c0/python-misc-problems | a0827cc9cd290ca142bba3b7dda307234da63c3c | [
"BSD-3-Clause"
] | null | null | null | """
https://leetcode.com/problems/combination-sum-iii/
Tags: Practice; Concepts; Algorithms; Recursion/BackTracking; Medium
"""
from typing import List
class Solution:
def combinationSum3(self, k: int, n: int) -> List[List[int]]:
# Create a list of nums to choose fromx
return self.combinations(list(range(1, 10)), [], n, k)
def combinations(self, nums: List[int], combi: List[int], s: int, k: int):
ans = []
# If only one slot is remaining, we do not need further recursion...
if len(combi) == k - 1:
# ... just Check if the remaining sum is present in the `nums` as a single number
if s in nums:
return [combi + [s]]
else:
return None
# Algorithm
for i, v in enumerate(nums):
remaining_sum = s - v
# Since we have a sorted the array of nums to chose from, hence, to avoid unnecessary recursive calls,
# check if remaining sum is greater than the current value
if remaining_sum > v:
# Since, we can't have duplicates, nor can we have permutations of already chosen combinations,
# We will pass only the remaining array to the next recursion.
remaining_list_to_chose_from = nums[i + 1:]
# Append the current value to the combination
new_combi = combi + [v]
final_combi = self.combinations(remaining_list_to_chose_from, new_combi, remaining_sum, k)
if final_combi is not None:
ans.extend(final_combi)
else:
break
return ans
| 32.653846 | 114 | 0.579505 |
from typing import List
class Solution:
def combinationSum3(self, k: int, n: int) -> List[List[int]]:
return self.combinations(list(range(1, 10)), [], n, k)
def combinations(self, nums: List[int], combi: List[int], s: int, k: int):
ans = []
if len(combi) == k - 1:
if s in nums:
return [combi + [s]]
else:
return None
for i, v in enumerate(nums):
remaining_sum = s - v
if remaining_sum > v:
# We will pass only the remaining array to the next recursion.
remaining_list_to_chose_from = nums[i + 1:]
# Append the current value to the combination
new_combi = combi + [v]
final_combi = self.combinations(remaining_list_to_chose_from, new_combi, remaining_sum, k)
if final_combi is not None:
ans.extend(final_combi)
else:
break
return ans
| true | true |
f720e94c7b98eefd4db2a78ffdc2366c09186edd | 942 | py | Python | hypernet/src/thermophysicalModels/chemistry/reactions/reactionRate/arrhenius.py | christian-jacobsen/hypernet | 9f62e1531eb152cc08af0b0c6b09d6fde8d42400 | [
"Apache-2.0"
] | null | null | null | hypernet/src/thermophysicalModels/chemistry/reactions/reactionRate/arrhenius.py | christian-jacobsen/hypernet | 9f62e1531eb152cc08af0b0c6b09d6fde8d42400 | [
"Apache-2.0"
] | null | null | null | hypernet/src/thermophysicalModels/chemistry/reactions/reactionRate/arrhenius.py | christian-jacobsen/hypernet | 9f62e1531eb152cc08af0b0c6b09d6fde8d42400 | [
"Apache-2.0"
] | null | null | null | import numpy as np
from hypernet.src.thermophysicalModels.chemistry.reactions.reactionRate import Basic
class Arrhenius(Basic):
# Initialization
###########################################################################
def __init__(
self,
reactionsDatabase,
*args,
**kwargs
):
super(Arrhenius, self).__init__(
reactionsDatabase,
*args,
**kwargs
)
self.A = self.reacDB['A'].to_numpy()
self.beta = self.reacDB['beta'].to_numpy()
self.Ta = self.reacDB['Ta'].to_numpy()
# Methods
###########################################################################
# Forward reaction rates --------------------------------------------------
def k_(self, T):
return self.A * np.power(T, self.beta) * np.exp(-self.Ta / T)
def dkdT_(self, T):
return (self.beta + self.Ta / T) * self.k / T
| 28.545455 | 84 | 0.440552 | import numpy as np
from hypernet.src.thermophysicalModels.chemistry.reactions.reactionRate import Basic
class Arrhenius(Basic):
| true | true |
f720eaa230ec470ea6eabf1b1bc884458772e552 | 9,670 | py | Python | qpth/qp.py | lopa23/flim_optcrf | 2d9a1dba37a7e5e6beae66c536b07bb7ae4bdfe9 | [
"Apache-2.0"
] | null | null | null | qpth/qp.py | lopa23/flim_optcrf | 2d9a1dba37a7e5e6beae66c536b07bb7ae4bdfe9 | [
"Apache-2.0"
] | null | null | null | qpth/qp.py | lopa23/flim_optcrf | 2d9a1dba37a7e5e6beae66c536b07bb7ae4bdfe9 | [
"Apache-2.0"
] | null | null | null | import torch
from torch.autograd import Function
from .util import bger, expandParam, extract_nBatch
from . import solvers
from .solvers.pdipm import batch as pdipm_b
from .solvers.pdipm import spbatch as pdipm_spb
# from .solvers.pdipm import single as pdipm_s
from enum import Enum
class QPSolvers(Enum):
PDIPM_BATCHED = 1
CVXPY = 2
def QPFunction(eps=1e-12, verbose=1, notImprovedLim=3,
maxIter=20, solver=QPSolvers.PDIPM_BATCHED,
check_Q_spd=False):
class QPFunctionFn(Function):
@staticmethod
def forward(ctx, Q_, p_, G_, h_, A_, b_):
"""Solve a batch of QPs.
This function solves a batch of QPs, each optimizing over
`nz` variables and having `nineq` inequality constraints
and `neq` equality constraints.
The optimization problem for each instance in the batch
(dropping indexing from the notation) is of the form
\hat z = argmin_z 1/2 z^T Q z + p^T z
subject to Gz <= h
Az = b
where Q \in S^{nz,nz},
S^{nz,nz} is the set of all positive semi-definite matrices,
p \in R^{nz}
G \in R^{nineq,nz}
h \in R^{nineq}
A \in R^{neq,nz}
b \in R^{neq}
These parameters should all be passed to this function as
Variable- or Parameter-wrapped Tensors.
(See torch.autograd.Variable and torch.nn.parameter.Parameter)
If you want to solve a batch of QPs where `nz`, `nineq` and `neq`
are the same, but some of the contents differ across the
minibatch, you can pass in tensors in the standard way
where the first dimension indicates the batch example.
This can be done with some or all of the coefficients.
You do not need to add an extra dimension to coefficients
that will not change across all of the minibatch examples.
This function is able to infer such cases.
If you don't want to use any equality or inequality constraints,
you can set the appropriate values to:
e = Variable(torch.Tensor())
Parameters:
Q: A (nBatch, nz, nz) or (nz, nz) Tensor.
p: A (nBatch, nz) or (nz) Tensor.
G: A (nBatch, nineq, nz) or (nineq, nz) Tensor.
h: A (nBatch, nineq) or (nineq) Tensor.
A: A (nBatch, neq, nz) or (neq, nz) Tensor.
b: A (nBatch, neq) or (neq) Tensor.
Returns: \hat z: a (nBatch, nz) Tensor.
"""
nBatch = extract_nBatch(Q_, p_, G_, h_, A_, b_)
Q, _ = expandParam(Q_, nBatch, 3)
p, _ = expandParam(p_, nBatch, 2)
G, _ = expandParam(G_, nBatch, 3)
h, _ = expandParam(h_, nBatch, 2)
A, _ = expandParam(A_, nBatch, 3)
b, _ = expandParam(b_, nBatch, 2)
if check_Q_spd:
for i in range(nBatch):
e, _ = torch.eig(Q[i])
if not torch.all(e[:,0] > 0):
raise RuntimeError('Q is not SPD.')
_, nineq, nz = G.size()
print("In constructor QP", G.size())
neq = A.size(1) if A.nelement() > 0 else 0
assert(neq > 0 or nineq > 0)
ctx.neq, ctx.nineq, ctx.nz = neq, nineq, nz
if solver == QPSolvers.PDIPM_BATCHED:
ctx.Q_LU, ctx.S_LU, ctx.R = pdipm_b.pre_factor_kkt(Q, G, A)
zhats, ctx.nus, ctx.lams, ctx.slacks = pdipm_b.forward(
Q, p, G, h, A, b, ctx.Q_LU, ctx.S_LU, ctx.R,
eps, verbose, notImprovedLim, maxIter)
elif solver == QPSolvers.CVXPY:
vals = torch.Tensor(nBatch).type_as(Q)
zhats = torch.Tensor(nBatch, ctx.nz).type_as(Q)
lams = torch.Tensor(nBatch, ctx.nineq).type_as(Q)
nus = torch.Tensor(nBatch, ctx.neq).type_as(Q) \
if ctx.neq > 0 else torch.Tensor()
slacks = torch.Tensor(nBatch, ctx.nineq).type_as(Q)
for i in range(nBatch):
Ai, bi = (A[i], b[i]) if neq > 0 else (None, None)
vals[i], zhati, nui, lami, si = solvers.cvxpy.forward_single_np(
*[x.cpu().numpy() if x is not None else None
for x in (Q[i], p[i], G[i], h[i], Ai, bi)])
# if zhati[0] is None:
# import IPython, sys; IPython.embed(); sys.exit(-1)
zhats[i] = torch.Tensor(zhati)
lams[i] = torch.Tensor(lami)
slacks[i] = torch.Tensor(si)
if neq > 0:
nus[i] = torch.Tensor(nui)
ctx.vals = vals
ctx.lams = lams
ctx.nus = nus
ctx.slacks = slacks
else:
assert False
ctx.save_for_backward(zhats, Q_, p_, G_, h_, A_, b_)
return zhats
@staticmethod
def backward(ctx, dl_dzhat):
zhats, Q, p, G, h, A, b = ctx.saved_tensors
nBatch = extract_nBatch(Q, p, G, h, A, b)
Q, Q_e = expandParam(Q, nBatch, 3)
p, p_e = expandParam(p, nBatch, 2)
G, G_e = expandParam(G, nBatch, 3)
h, h_e = expandParam(h, nBatch, 2)
A, A_e = expandParam(A, nBatch, 3)
b, b_e = expandParam(b, nBatch, 2)
# neq, nineq, nz = ctx.neq, ctx.nineq, ctx.nz
neq, nineq = ctx.neq, ctx.nineq
#print("Here in backward")
if solver == QPSolvers.CVXPY:
ctx.Q_LU, ctx.S_LU, ctx.R = pdipm_b.pre_factor_kkt(Q, G, A)
# Clamp here to avoid issues coming up when the slacks are too small.
# TODO: A better fix would be to get lams and slacks from the
# solver that don't have this issue.
d = torch.clamp(ctx.lams, min=1e-8) / torch.clamp(ctx.slacks, min=1e-8)
pdipm_b.factor_kkt(ctx.S_LU, ctx.R, d)
dx, _, dlam, dnu = pdipm_b.solve_kkt(
ctx.Q_LU, d, G, A, ctx.S_LU,
dl_dzhat, torch.zeros(nBatch, nineq).type_as(G),
torch.zeros(nBatch, nineq).type_as(G),
torch.zeros(nBatch, neq).type_as(G) if neq > 0 else torch.Tensor())
print("In backwards,aftersolve_kkt")
dps = dx
dGs = bger(dlam, zhats) + bger(ctx.lams, dx)
if G_e:
dGs = dGs.mean(0)
dhs = -dlam
if h_e:
dhs = dhs.mean(0)
if neq > 0:
dAs = bger(dnu, zhats) + bger(ctx.nus, dx)
dbs = -dnu
if A_e:
dAs = dAs.mean(0)
if b_e:
dbs = dbs.mean(0)
else:
dAs, dbs = None, None
dQs = 0.5 * (bger(dx, zhats) + bger(zhats, dx))
if Q_e:
dQs = dQs.mean(0)
if p_e:
dps = dps.mean(0)
grads = (dQs, dps, dGs, dhs, dAs, dbs)
return grads
return QPFunctionFn.apply
class SpQPFunction(Function):
def __init__(self, Qi, Qsz, Gi, Gsz, Ai, Asz,
eps=1e-12, verbose=0, notImprovedLim=3, maxIter=20):
self.Qi, self.Qsz = Qi, Qsz
self.Gi, self.Gsz = Gi, Gsz
self.Ai, self.Asz = Ai, Asz
self.eps = eps
self.verbose = verbose
self.notImprovedLim = notImprovedLim
self.maxIter = maxIter
self.nineq, self.nz = Gsz
self.neq, _ = Asz
def forward(self, Qv, p, Gv, h, Av, b):
self.nBatch = Qv.size(0)
zhats, self.nus, self.lams, self.slacks = pdipm_spb.forward(
self.Qi, Qv, self.Qsz, p, self.Gi, Gv, self.Gsz, h,
self.Ai, Av, self.Asz, b, self.eps, self.verbose,
self.notImprovedLim, self.maxIter)
self.save_for_backward(zhats, Qv, p, Gv, h, Av, b)
return zhats
def backward(self, dl_dzhat):
zhats, Qv, p, Gv, h, Av, b = self.saved_tensors
Di = type(self.Qi)([range(self.nineq), range(self.nineq)])
Dv = self.lams / self.slacks
Dsz = torch.Size([self.nineq, self.nineq])
dx, _, dlam, dnu = pdipm_spb.solve_kkt(
self.Qi, Qv, self.Qsz, Di, Dv, Dsz,
self.Gi, Gv, self.Gsz,
self.Ai, Av, self.Asz, dl_dzhat,
type(p)(self.nBatch, self.nineq).zero_(),
type(p)(self.nBatch, self.nineq).zero_(),
type(p)(self.nBatch, self.neq).zero_())
dps = dx
dGs = bger(dlam, zhats) + bger(self.lams, dx)
GM = torch.cuda.sparse.DoubleTensor(
self.Gi, Gv[0].clone().fill_(1.0), self.Gsz
).to_dense().byte().expand_as(dGs)
dGs = dGs[GM].view_as(Gv)
dhs = -dlam
dAs = bger(dnu, zhats) + bger(self.nus, dx)
AM = torch.cuda.sparse.DoubleTensor(
self.Ai, Av[0].clone().fill_(1.0), self.Asz
).to_dense().byte().expand_as(dAs)
dAs = dAs[AM].view_as(Av)
dbs = -dnu
dQs = 0.5 * (bger(dx, zhats) + bger(zhats, dx))
QM = torch.cuda.sparse.DoubleTensor(
self.Qi, Qv[0].clone().fill_(1.0), self.Qsz
).to_dense().byte().expand_as(dQs)
dQs = dQs[QM].view_as(Qv)
grads = (dQs, dps, dGs, dhs, dAs, dbs)
return grads
| 37.773438 | 84 | 0.512099 | import torch
from torch.autograd import Function
from .util import bger, expandParam, extract_nBatch
from . import solvers
from .solvers.pdipm import batch as pdipm_b
from .solvers.pdipm import spbatch as pdipm_spb
from enum import Enum
class QPSolvers(Enum):
PDIPM_BATCHED = 1
CVXPY = 2
def QPFunction(eps=1e-12, verbose=1, notImprovedLim=3,
maxIter=20, solver=QPSolvers.PDIPM_BATCHED,
check_Q_spd=False):
class QPFunctionFn(Function):
@staticmethod
def forward(ctx, Q_, p_, G_, h_, A_, b_):
nBatch = extract_nBatch(Q_, p_, G_, h_, A_, b_)
Q, _ = expandParam(Q_, nBatch, 3)
p, _ = expandParam(p_, nBatch, 2)
G, _ = expandParam(G_, nBatch, 3)
h, _ = expandParam(h_, nBatch, 2)
A, _ = expandParam(A_, nBatch, 3)
b, _ = expandParam(b_, nBatch, 2)
if check_Q_spd:
for i in range(nBatch):
e, _ = torch.eig(Q[i])
if not torch.all(e[:,0] > 0):
raise RuntimeError('Q is not SPD.')
_, nineq, nz = G.size()
print("In constructor QP", G.size())
neq = A.size(1) if A.nelement() > 0 else 0
assert(neq > 0 or nineq > 0)
ctx.neq, ctx.nineq, ctx.nz = neq, nineq, nz
if solver == QPSolvers.PDIPM_BATCHED:
ctx.Q_LU, ctx.S_LU, ctx.R = pdipm_b.pre_factor_kkt(Q, G, A)
zhats, ctx.nus, ctx.lams, ctx.slacks = pdipm_b.forward(
Q, p, G, h, A, b, ctx.Q_LU, ctx.S_LU, ctx.R,
eps, verbose, notImprovedLim, maxIter)
elif solver == QPSolvers.CVXPY:
vals = torch.Tensor(nBatch).type_as(Q)
zhats = torch.Tensor(nBatch, ctx.nz).type_as(Q)
lams = torch.Tensor(nBatch, ctx.nineq).type_as(Q)
nus = torch.Tensor(nBatch, ctx.neq).type_as(Q) \
if ctx.neq > 0 else torch.Tensor()
slacks = torch.Tensor(nBatch, ctx.nineq).type_as(Q)
for i in range(nBatch):
Ai, bi = (A[i], b[i]) if neq > 0 else (None, None)
vals[i], zhati, nui, lami, si = solvers.cvxpy.forward_single_np(
*[x.cpu().numpy() if x is not None else None
for x in (Q[i], p[i], G[i], h[i], Ai, bi)])
zhats[i] = torch.Tensor(zhati)
lams[i] = torch.Tensor(lami)
slacks[i] = torch.Tensor(si)
if neq > 0:
nus[i] = torch.Tensor(nui)
ctx.vals = vals
ctx.lams = lams
ctx.nus = nus
ctx.slacks = slacks
else:
assert False
ctx.save_for_backward(zhats, Q_, p_, G_, h_, A_, b_)
return zhats
@staticmethod
def backward(ctx, dl_dzhat):
zhats, Q, p, G, h, A, b = ctx.saved_tensors
nBatch = extract_nBatch(Q, p, G, h, A, b)
Q, Q_e = expandParam(Q, nBatch, 3)
p, p_e = expandParam(p, nBatch, 2)
G, G_e = expandParam(G, nBatch, 3)
h, h_e = expandParam(h, nBatch, 2)
A, A_e = expandParam(A, nBatch, 3)
b, b_e = expandParam(b, nBatch, 2)
neq, nineq = ctx.neq, ctx.nineq
if solver == QPSolvers.CVXPY:
ctx.Q_LU, ctx.S_LU, ctx.R = pdipm_b.pre_factor_kkt(Q, G, A)
d = torch.clamp(ctx.lams, min=1e-8) / torch.clamp(ctx.slacks, min=1e-8)
pdipm_b.factor_kkt(ctx.S_LU, ctx.R, d)
dx, _, dlam, dnu = pdipm_b.solve_kkt(
ctx.Q_LU, d, G, A, ctx.S_LU,
dl_dzhat, torch.zeros(nBatch, nineq).type_as(G),
torch.zeros(nBatch, nineq).type_as(G),
torch.zeros(nBatch, neq).type_as(G) if neq > 0 else torch.Tensor())
print("In backwards,aftersolve_kkt")
dps = dx
dGs = bger(dlam, zhats) + bger(ctx.lams, dx)
if G_e:
dGs = dGs.mean(0)
dhs = -dlam
if h_e:
dhs = dhs.mean(0)
if neq > 0:
dAs = bger(dnu, zhats) + bger(ctx.nus, dx)
dbs = -dnu
if A_e:
dAs = dAs.mean(0)
if b_e:
dbs = dbs.mean(0)
else:
dAs, dbs = None, None
dQs = 0.5 * (bger(dx, zhats) + bger(zhats, dx))
if Q_e:
dQs = dQs.mean(0)
if p_e:
dps = dps.mean(0)
grads = (dQs, dps, dGs, dhs, dAs, dbs)
return grads
return QPFunctionFn.apply
class SpQPFunction(Function):
def __init__(self, Qi, Qsz, Gi, Gsz, Ai, Asz,
eps=1e-12, verbose=0, notImprovedLim=3, maxIter=20):
self.Qi, self.Qsz = Qi, Qsz
self.Gi, self.Gsz = Gi, Gsz
self.Ai, self.Asz = Ai, Asz
self.eps = eps
self.verbose = verbose
self.notImprovedLim = notImprovedLim
self.maxIter = maxIter
self.nineq, self.nz = Gsz
self.neq, _ = Asz
def forward(self, Qv, p, Gv, h, Av, b):
self.nBatch = Qv.size(0)
zhats, self.nus, self.lams, self.slacks = pdipm_spb.forward(
self.Qi, Qv, self.Qsz, p, self.Gi, Gv, self.Gsz, h,
self.Ai, Av, self.Asz, b, self.eps, self.verbose,
self.notImprovedLim, self.maxIter)
self.save_for_backward(zhats, Qv, p, Gv, h, Av, b)
return zhats
def backward(self, dl_dzhat):
zhats, Qv, p, Gv, h, Av, b = self.saved_tensors
Di = type(self.Qi)([range(self.nineq), range(self.nineq)])
Dv = self.lams / self.slacks
Dsz = torch.Size([self.nineq, self.nineq])
dx, _, dlam, dnu = pdipm_spb.solve_kkt(
self.Qi, Qv, self.Qsz, Di, Dv, Dsz,
self.Gi, Gv, self.Gsz,
self.Ai, Av, self.Asz, dl_dzhat,
type(p)(self.nBatch, self.nineq).zero_(),
type(p)(self.nBatch, self.nineq).zero_(),
type(p)(self.nBatch, self.neq).zero_())
dps = dx
dGs = bger(dlam, zhats) + bger(self.lams, dx)
GM = torch.cuda.sparse.DoubleTensor(
self.Gi, Gv[0].clone().fill_(1.0), self.Gsz
).to_dense().byte().expand_as(dGs)
dGs = dGs[GM].view_as(Gv)
dhs = -dlam
dAs = bger(dnu, zhats) + bger(self.nus, dx)
AM = torch.cuda.sparse.DoubleTensor(
self.Ai, Av[0].clone().fill_(1.0), self.Asz
).to_dense().byte().expand_as(dAs)
dAs = dAs[AM].view_as(Av)
dbs = -dnu
dQs = 0.5 * (bger(dx, zhats) + bger(zhats, dx))
QM = torch.cuda.sparse.DoubleTensor(
self.Qi, Qv[0].clone().fill_(1.0), self.Qsz
).to_dense().byte().expand_as(dQs)
dQs = dQs[QM].view_as(Qv)
grads = (dQs, dps, dGs, dhs, dAs, dbs)
return grads
| true | true |
f720ef19782f7092c0e07d4d635eb810543e0ea4 | 9,608 | py | Python | tests/functional/tests/management/test_add_remove.py | beef9999/ocf | 4d1b086956e3019456fa86c33954eeb53cfeab9e | [
"BSD-3-Clause-Clear"
] | null | null | null | tests/functional/tests/management/test_add_remove.py | beef9999/ocf | 4d1b086956e3019456fa86c33954eeb53cfeab9e | [
"BSD-3-Clause-Clear"
] | null | null | null | tests/functional/tests/management/test_add_remove.py | beef9999/ocf | 4d1b086956e3019456fa86c33954eeb53cfeab9e | [
"BSD-3-Clause-Clear"
] | null | null | null | # Copyright(c) 2019-2021 Intel Corporation
# SPDX-License-Identifier: BSD-3-Clause-Clear
#
import pytest
from ctypes import c_int
from random import randint
from pyocf.types.cache import Cache, CacheMode
from pyocf.types.core import Core
from pyocf.types.volume import Volume
from pyocf.types.data import Data
from pyocf.types.io import IoDir
from pyocf.utils import Size as S
from pyocf.types.shared import OcfError, OcfCompletion, CacheLineSize
@pytest.mark.parametrize("cache_mode", CacheMode)
@pytest.mark.parametrize("cls", CacheLineSize)
def test_adding_core(pyocf_ctx, cache_mode, cls):
# Start cache device
cache_device = Volume(S.from_MiB(30))
cache = Cache.start_on_device(
cache_device, cache_mode=cache_mode, cache_line_size=cls
)
# Create core device
core_device = Volume(S.from_MiB(10))
core = Core.using_device(core_device)
# Check statistics before adding core
stats = cache.get_stats()
assert stats["conf"]["core_count"] == 0
# Add core to cache
cache.add_core(core)
# Check statistics after adding core
stats = cache.get_stats()
assert stats["conf"]["core_count"] == 1
@pytest.mark.parametrize("cache_mode", CacheMode)
@pytest.mark.parametrize("cls", CacheLineSize)
def test_removing_core(pyocf_ctx, cache_mode, cls):
# Start cache device
cache_device = Volume(S.from_MiB(30))
cache = Cache.start_on_device(
cache_device, cache_mode=cache_mode, cache_line_size=cls
)
# Create core device
core_device = Volume(S.from_MiB(10))
core = Core.using_device(core_device)
# Add core to cache
cache.add_core(core)
# Remove core from cache
cache.remove_core(core)
# Check statistics after removing core
stats = cache.get_stats()
assert stats["conf"]["core_count"] == 0
@pytest.mark.parametrize("cache_mode", [CacheMode.WB])
@pytest.mark.parametrize("cls", CacheLineSize)
def test_remove_dirty_no_flush(pyocf_ctx, cache_mode, cls):
# Start cache device
cache_device = Volume(S.from_MiB(30))
cache = Cache.start_on_device(
cache_device, cache_mode=cache_mode, cache_line_size=cls
)
# Create core device
core_device = Volume(S.from_MiB(10))
core = Core.using_device(core_device)
cache.add_core(core)
# Prepare data
core_size = core.get_stats()["size"]
data = Data(core_size.B)
_io_to_core(core, data)
# Remove core from cache
cache.remove_core(core)
def test_30add_remove(pyocf_ctx):
# Start cache device
cache_device = Volume(S.from_MiB(30))
cache = Cache.start_on_device(cache_device)
# Create core device
core_device = Volume(S.from_MiB(10))
core = Core.using_device(core_device)
# Add and remove core device in a loop 100 times
# Check statistics after every operation
for i in range(0, 30):
cache.add_core(core)
stats = cache.get_stats()
assert stats["conf"]["core_count"] == 1
cache.remove_core(core)
stats = cache.get_stats()
assert stats["conf"]["core_count"] == 0
def test_10add_remove_with_io(pyocf_ctx):
# Start cache device
cache_device = Volume(S.from_MiB(30))
cache = Cache.start_on_device(cache_device)
# Create core device
core_device = Volume(S.from_MiB(10))
core = Core.using_device(core_device)
# Add and remove core 10 times in a loop with io in between
for i in range(0, 10):
cache.add_core(core)
stats = cache.get_stats()
assert stats["conf"]["core_count"] == 1
write_data = Data.from_string("Test data")
io = core.new_io(
cache.get_default_queue(), S.from_sector(1).B, write_data.size,
IoDir.WRITE, 0, 0
)
io.set_data(write_data)
cmpl = OcfCompletion([("err", c_int)])
io.callback = cmpl.callback
io.submit()
cmpl.wait()
cache.remove_core(core)
stats = cache.get_stats()
assert stats["conf"]["core_count"] == 0
def test_add_remove_30core(pyocf_ctx):
# Start cache device
cache_device = Volume(S.from_MiB(30))
cache = Cache.start_on_device(cache_device)
core_devices = []
core_amount = 30
# Add 50 cores and check stats after each addition
for i in range(0, core_amount):
stats = cache.get_stats()
assert stats["conf"]["core_count"] == i
core_device = Volume(S.from_MiB(10))
core = Core.using_device(core_device, name=f"core{i}")
core_devices.append(core)
cache.add_core(core)
# Remove 50 cores and check stats before each removal
for i in range(0, core_amount):
stats = cache.get_stats()
assert stats["conf"]["core_count"] == core_amount - i
cache.remove_core(core_devices[i])
# Check statistics
stats = cache.get_stats()
assert stats["conf"]["core_count"] == 0
def test_adding_to_random_cache(pyocf_ctx):
cache_devices = []
core_devices = {}
cache_amount = 5
core_amount = 30
# Create 5 cache devices
for i in range(0, cache_amount):
cache_device = Volume(S.from_MiB(30))
cache = Cache.start_on_device(cache_device, name=f"cache{i}")
cache_devices.append(cache)
# Create 50 core devices and add to random cache
for i in range(0, core_amount):
core_device = Volume(S.from_MiB(10))
core = Core.using_device(core_device, name=f"core{i}")
core_devices[core] = randint(0, cache_amount - 1)
cache_devices[core_devices[core]].add_core(core)
# Count expected number of cores per cache
count_dict = {}
for i in range(0, cache_amount):
count_dict[i] = sum(k == i for k in core_devices.values())
# Check if cache statistics are as expected
for i in range(0, cache_amount):
stats = cache_devices[i].get_stats()
assert stats["conf"]["core_count"] == count_dict[i]
@pytest.mark.parametrize("cache_mode", CacheMode)
@pytest.mark.parametrize("cls", CacheLineSize)
def test_adding_core_twice(pyocf_ctx, cache_mode, cls):
# Start cache device
cache_device = Volume(S.from_MiB(30))
cache = Cache.start_on_device(
cache_device, cache_mode=cache_mode, cache_line_size=cls
)
# Create core device
core_device = Volume(S.from_MiB(10))
core = Core.using_device(core_device)
# Add core
cache.add_core(core)
# Check that it is not possible to add the same core again
with pytest.raises(OcfError):
cache.add_core(core)
# Check that core count is still equal to one
stats = cache.get_stats()
assert stats["conf"]["core_count"] == 1
@pytest.mark.parametrize("cache_mode", CacheMode)
@pytest.mark.parametrize("cls", CacheLineSize)
def test_adding_core_already_used(pyocf_ctx, cache_mode, cls):
# Start first cache device
cache_device1 = Volume(S.from_MiB(30))
cache1 = Cache.start_on_device(
cache_device1, cache_mode=cache_mode, cache_line_size=cls, name="cache1"
)
# Start second cache device
cache_device2 = Volume(S.from_MiB(30))
cache2 = Cache.start_on_device(
cache_device2, cache_mode=cache_mode, cache_line_size=cls, name="cache2"
)
# Create core device
core_device = Volume(S.from_MiB(10))
core = Core.using_device(core_device)
# Add core to first cache
cache1.add_core(core)
# Check that it is not possible to add core to second cache
with pytest.raises(OcfError):
cache2.add_core(core)
# Check that core count is as expected
stats = cache1.get_stats()
assert stats["conf"]["core_count"] == 1
stats = cache2.get_stats()
assert stats["conf"]["core_count"] == 0
@pytest.mark.parametrize("cache_mode", CacheMode)
@pytest.mark.parametrize("cls", CacheLineSize)
def test_add_remove_incrementally(pyocf_ctx, cache_mode, cls):
# Start cache device
cache_device = Volume(S.from_MiB(30))
cache = Cache.start_on_device(
cache_device, cache_mode=cache_mode, cache_line_size=cls
)
core_devices = []
core_amount = 5
# Create 5 core devices and add to cache
for i in range(0, core_amount):
core_device = Volume(S.from_MiB(10))
core = Core.using_device(core_device, name=f"core{i}")
core_devices.append(core)
cache.add_core(core)
# Check that core count is as expected
stats = cache.get_stats()
assert stats["conf"]["core_count"] == core_amount
# Remove 3 cores
cache.remove_core(core_devices[0])
cache.remove_core(core_devices[1])
cache.remove_core(core_devices[2])
# Add 2 cores and check if core count is as expected
cache.add_core(core_devices[0])
cache.add_core(core_devices[1])
stats = cache.get_stats()
assert stats["conf"]["core_count"] == core_amount - 1
# Remove 1 core and check if core count is as expected
cache.remove_core(core_devices[1])
stats = cache.get_stats()
assert stats["conf"]["core_count"] == core_amount - 2
# Add 2 cores and check if core count is as expected
cache.add_core(core_devices[1])
cache.add_core(core_devices[2])
stats = cache.get_stats()
assert stats["conf"]["core_count"] == core_amount
def _io_to_core(exported_obj: Core, data: Data):
io = exported_obj.new_io(exported_obj.cache.get_default_queue(), 0, data.size,
IoDir.WRITE, 0, 0)
io.set_data(data)
completion = OcfCompletion([("err", c_int)])
io.callback = completion.callback
io.submit()
completion.wait()
assert completion.results["err"] == 0, "IO to exported object completion"
| 30.405063 | 82 | 0.679954 |
import pytest
from ctypes import c_int
from random import randint
from pyocf.types.cache import Cache, CacheMode
from pyocf.types.core import Core
from pyocf.types.volume import Volume
from pyocf.types.data import Data
from pyocf.types.io import IoDir
from pyocf.utils import Size as S
from pyocf.types.shared import OcfError, OcfCompletion, CacheLineSize
@pytest.mark.parametrize("cache_mode", CacheMode)
@pytest.mark.parametrize("cls", CacheLineSize)
def test_adding_core(pyocf_ctx, cache_mode, cls):
cache_device = Volume(S.from_MiB(30))
cache = Cache.start_on_device(
cache_device, cache_mode=cache_mode, cache_line_size=cls
)
core_device = Volume(S.from_MiB(10))
core = Core.using_device(core_device)
stats = cache.get_stats()
assert stats["conf"]["core_count"] == 0
cache.add_core(core)
stats = cache.get_stats()
assert stats["conf"]["core_count"] == 1
@pytest.mark.parametrize("cache_mode", CacheMode)
@pytest.mark.parametrize("cls", CacheLineSize)
def test_removing_core(pyocf_ctx, cache_mode, cls):
cache_device = Volume(S.from_MiB(30))
cache = Cache.start_on_device(
cache_device, cache_mode=cache_mode, cache_line_size=cls
)
core_device = Volume(S.from_MiB(10))
core = Core.using_device(core_device)
cache.add_core(core)
cache.remove_core(core)
stats = cache.get_stats()
assert stats["conf"]["core_count"] == 0
@pytest.mark.parametrize("cache_mode", [CacheMode.WB])
@pytest.mark.parametrize("cls", CacheLineSize)
def test_remove_dirty_no_flush(pyocf_ctx, cache_mode, cls):
cache_device = Volume(S.from_MiB(30))
cache = Cache.start_on_device(
cache_device, cache_mode=cache_mode, cache_line_size=cls
)
core_device = Volume(S.from_MiB(10))
core = Core.using_device(core_device)
cache.add_core(core)
core_size = core.get_stats()["size"]
data = Data(core_size.B)
_io_to_core(core, data)
cache.remove_core(core)
def test_30add_remove(pyocf_ctx):
cache_device = Volume(S.from_MiB(30))
cache = Cache.start_on_device(cache_device)
core_device = Volume(S.from_MiB(10))
core = Core.using_device(core_device)
for i in range(0, 30):
cache.add_core(core)
stats = cache.get_stats()
assert stats["conf"]["core_count"] == 1
cache.remove_core(core)
stats = cache.get_stats()
assert stats["conf"]["core_count"] == 0
def test_10add_remove_with_io(pyocf_ctx):
cache_device = Volume(S.from_MiB(30))
cache = Cache.start_on_device(cache_device)
core_device = Volume(S.from_MiB(10))
core = Core.using_device(core_device)
for i in range(0, 10):
cache.add_core(core)
stats = cache.get_stats()
assert stats["conf"]["core_count"] == 1
write_data = Data.from_string("Test data")
io = core.new_io(
cache.get_default_queue(), S.from_sector(1).B, write_data.size,
IoDir.WRITE, 0, 0
)
io.set_data(write_data)
cmpl = OcfCompletion([("err", c_int)])
io.callback = cmpl.callback
io.submit()
cmpl.wait()
cache.remove_core(core)
stats = cache.get_stats()
assert stats["conf"]["core_count"] == 0
def test_add_remove_30core(pyocf_ctx):
cache_device = Volume(S.from_MiB(30))
cache = Cache.start_on_device(cache_device)
core_devices = []
core_amount = 30
for i in range(0, core_amount):
stats = cache.get_stats()
assert stats["conf"]["core_count"] == i
core_device = Volume(S.from_MiB(10))
core = Core.using_device(core_device, name=f"core{i}")
core_devices.append(core)
cache.add_core(core)
for i in range(0, core_amount):
stats = cache.get_stats()
assert stats["conf"]["core_count"] == core_amount - i
cache.remove_core(core_devices[i])
stats = cache.get_stats()
assert stats["conf"]["core_count"] == 0
def test_adding_to_random_cache(pyocf_ctx):
cache_devices = []
core_devices = {}
cache_amount = 5
core_amount = 30
for i in range(0, cache_amount):
cache_device = Volume(S.from_MiB(30))
cache = Cache.start_on_device(cache_device, name=f"cache{i}")
cache_devices.append(cache)
for i in range(0, core_amount):
core_device = Volume(S.from_MiB(10))
core = Core.using_device(core_device, name=f"core{i}")
core_devices[core] = randint(0, cache_amount - 1)
cache_devices[core_devices[core]].add_core(core)
count_dict = {}
for i in range(0, cache_amount):
count_dict[i] = sum(k == i for k in core_devices.values())
for i in range(0, cache_amount):
stats = cache_devices[i].get_stats()
assert stats["conf"]["core_count"] == count_dict[i]
@pytest.mark.parametrize("cache_mode", CacheMode)
@pytest.mark.parametrize("cls", CacheLineSize)
def test_adding_core_twice(pyocf_ctx, cache_mode, cls):
cache_device = Volume(S.from_MiB(30))
cache = Cache.start_on_device(
cache_device, cache_mode=cache_mode, cache_line_size=cls
)
core_device = Volume(S.from_MiB(10))
core = Core.using_device(core_device)
cache.add_core(core)
with pytest.raises(OcfError):
cache.add_core(core)
stats = cache.get_stats()
assert stats["conf"]["core_count"] == 1
@pytest.mark.parametrize("cache_mode", CacheMode)
@pytest.mark.parametrize("cls", CacheLineSize)
def test_adding_core_already_used(pyocf_ctx, cache_mode, cls):
cache_device1 = Volume(S.from_MiB(30))
cache1 = Cache.start_on_device(
cache_device1, cache_mode=cache_mode, cache_line_size=cls, name="cache1"
)
cache_device2 = Volume(S.from_MiB(30))
cache2 = Cache.start_on_device(
cache_device2, cache_mode=cache_mode, cache_line_size=cls, name="cache2"
)
core_device = Volume(S.from_MiB(10))
core = Core.using_device(core_device)
cache1.add_core(core)
with pytest.raises(OcfError):
cache2.add_core(core)
stats = cache1.get_stats()
assert stats["conf"]["core_count"] == 1
stats = cache2.get_stats()
assert stats["conf"]["core_count"] == 0
@pytest.mark.parametrize("cache_mode", CacheMode)
@pytest.mark.parametrize("cls", CacheLineSize)
def test_add_remove_incrementally(pyocf_ctx, cache_mode, cls):
cache_device = Volume(S.from_MiB(30))
cache = Cache.start_on_device(
cache_device, cache_mode=cache_mode, cache_line_size=cls
)
core_devices = []
core_amount = 5
for i in range(0, core_amount):
core_device = Volume(S.from_MiB(10))
core = Core.using_device(core_device, name=f"core{i}")
core_devices.append(core)
cache.add_core(core)
stats = cache.get_stats()
assert stats["conf"]["core_count"] == core_amount
cache.remove_core(core_devices[0])
cache.remove_core(core_devices[1])
cache.remove_core(core_devices[2])
cache.add_core(core_devices[0])
cache.add_core(core_devices[1])
stats = cache.get_stats()
assert stats["conf"]["core_count"] == core_amount - 1
cache.remove_core(core_devices[1])
stats = cache.get_stats()
assert stats["conf"]["core_count"] == core_amount - 2
cache.add_core(core_devices[1])
cache.add_core(core_devices[2])
stats = cache.get_stats()
assert stats["conf"]["core_count"] == core_amount
def _io_to_core(exported_obj: Core, data: Data):
io = exported_obj.new_io(exported_obj.cache.get_default_queue(), 0, data.size,
IoDir.WRITE, 0, 0)
io.set_data(data)
completion = OcfCompletion([("err", c_int)])
io.callback = completion.callback
io.submit()
completion.wait()
assert completion.results["err"] == 0, "IO to exported object completion"
| true | true |
f720efc3c7a943431ee1490b8c525586b3496e7e | 98 | py | Python | game/forms.py | mingaleg/yakubovich | 95398c78eaffbd6ff69f8fdbedfc847531219d8a | [
"MIT"
] | 5 | 2018-12-12T16:24:42.000Z | 2020-02-29T18:45:30.000Z | game/forms.py | mingaleg/yakubovich | 95398c78eaffbd6ff69f8fdbedfc847531219d8a | [
"MIT"
] | 3 | 2020-06-05T17:47:13.000Z | 2022-02-11T03:39:54.000Z | game/forms.py | mingaleg/yakubovich | 95398c78eaffbd6ff69f8fdbedfc847531219d8a | [
"MIT"
] | null | null | null | from django import forms
class GuessForm(forms.Form):
guess = forms.CharField(max_length=32) | 19.6 | 42 | 0.765306 | from django import forms
class GuessForm(forms.Form):
guess = forms.CharField(max_length=32) | true | true |
f720f0cdfccab7e5f9e79ca3a814fc670b37f244 | 7,403 | py | Python | packages/syft/src/syft/core/node/network.py | Noob-can-Compile/PySyft | 156cf93489b16dd0205b0058d4d23d56b3a91ab8 | [
"Apache-2.0"
] | null | null | null | packages/syft/src/syft/core/node/network.py | Noob-can-Compile/PySyft | 156cf93489b16dd0205b0058d4d23d56b3a91ab8 | [
"Apache-2.0"
] | null | null | null | packages/syft/src/syft/core/node/network.py | Noob-can-Compile/PySyft | 156cf93489b16dd0205b0058d4d23d56b3a91ab8 | [
"Apache-2.0"
] | null | null | null | # future
from __future__ import annotations
# stdlib
import os
from typing import Any
from typing import Dict
from typing import List
from typing import Optional
from typing import Union
# third party
import ascii_magic
from nacl.signing import SigningKey
from nacl.signing import VerifyKey
from pydantic import BaseSettings
# relative
from ...lib.python import String
from ...logger import error
from ..common.message import SignedImmediateSyftMessageWithReply
from ..common.message import SignedMessage
from ..common.message import SyftMessage
from ..common.uid import UID
from ..io.location import Location
from ..io.location import SpecificLocation
from .common.node import Node
from .common.node_manager.association_request_manager import AssociationRequestManager
from .common.node_manager.node_manager import NodeManager
from .common.node_manager.node_route_manager import NodeRouteManager
from .common.node_manager.role_manager import RoleManager
from .common.node_manager.user_manager import UserManager
from .common.node_service.association_request.association_request_service import (
AssociationRequestService,
)
from .common.node_service.association_request.association_request_service import (
AssociationRequestWithoutReplyService,
)
from .common.node_service.network_search.network_search_service import (
NetworkSearchService,
)
from .common.node_service.node_setup.node_setup_messages import (
CreateInitialSetUpMessage,
)
from .common.node_service.node_setup.node_setup_service import NodeSetupService
from .common.node_service.peer_discovery.peer_discovery_service import (
PeerDiscoveryService,
)
from .common.node_service.ping.ping_service import PingService
from .common.node_service.request_receiver.request_receiver_messages import (
RequestMessage,
)
from .common.node_service.role_manager.role_manager_service import RoleManagerService
from .common.node_service.user_manager.user_manager_service import UserManagerService
from .common.node_service.vpn.vpn_service import VPNConnectService
from .common.node_service.vpn.vpn_service import VPNJoinSelfService
from .common.node_service.vpn.vpn_service import VPNJoinService
from .common.node_service.vpn.vpn_service import VPNRegisterService
from .common.node_service.vpn.vpn_service import VPNStatusService
from .domain import Domain
from .domain_client import DomainClient
from .network_client import NetworkClient
class Network(Node):
network: SpecificLocation
child_type = Domain
client_type = NetworkClient
child_type_client_type = DomainClient
def __init__(
self,
name: Optional[str],
network: SpecificLocation = SpecificLocation(),
domain: Optional[Location] = None,
device: Optional[Location] = None,
vm: Optional[Location] = None,
signing_key: Optional[SigningKey] = None,
verify_key: Optional[VerifyKey] = None,
root_key: Optional[VerifyKey] = None,
db_engine: Any = None,
settings: Optional[BaseSettings] = None,
):
super().__init__(
name=name,
network=network,
domain=domain,
device=device,
vm=vm,
signing_key=signing_key,
verify_key=verify_key,
db_engine=db_engine,
settings=settings,
)
# share settings with the FastAPI application level
self.settings = settings
# specific location with name
self.network = SpecificLocation(name=self.name)
self.root_key = root_key
# Database Management Instances
self.users = UserManager(db_engine)
self.roles = RoleManager(db_engine)
self.node = NodeManager(db_engine)
self.node_route = NodeRouteManager(db_engine)
self.association_requests = AssociationRequestManager(db_engine)
# Grid Network Services
self.immediate_services_with_reply.append(AssociationRequestService)
self.immediate_services_with_reply.append(NodeSetupService)
self.immediate_services_with_reply.append(RoleManagerService)
self.immediate_services_with_reply.append(UserManagerService)
self.immediate_services_with_reply.append(VPNConnectService)
self.immediate_services_with_reply.append(VPNJoinService)
self.immediate_services_with_reply.append(VPNRegisterService)
self.immediate_services_with_reply.append(VPNStatusService)
self.immediate_services_with_reply.append(VPNJoinSelfService)
self.immediate_services_with_reply.append(PingService)
self.immediate_services_with_reply.append(NetworkSearchService)
self.immediate_services_with_reply.append(PeerDiscoveryService)
self.immediate_services_without_reply.append(
AssociationRequestWithoutReplyService
)
self.requests: List[RequestMessage] = list()
# available_device_types = set()
# TODO: add available compute types
# default_device = None
# TODO: add default compute type
self._register_services()
self.request_handlers: List[Dict[Union[str, String], Any]] = []
self.handled_requests: Dict[Any, float] = {}
self.post_init()
def initial_setup( # nosec
self,
first_superuser_name: str = "Jane Doe",
first_superuser_email: str = "info@openmined.org",
first_superuser_password: str = "changethis",
first_superuser_budget: float = 5.55,
domain_name: str = "BigHospital",
) -> Network:
# Build Syft Message
msg: SignedImmediateSyftMessageWithReply = CreateInitialSetUpMessage(
address=self.address,
name=first_superuser_name,
email=first_superuser_email,
password=first_superuser_password,
domain_name=domain_name,
budget=first_superuser_budget,
reply_to=self.address,
).sign(signing_key=self.signing_key)
# Process syft message
_ = self.recv_immediate_msg_with_reply(msg=msg).message
return self
def post_init(self) -> None:
super().post_init()
self.set_node_uid()
def loud_print(self) -> None:
try:
install_path = os.path.abspath(
os.path.join(os.path.realpath(__file__), "../../../img/")
)
ascii_magic.to_terminal(
ascii_magic.from_image_file(
img_path=install_path + "/pygrid.png", columns=83
)
)
print(
r"""
|\ | _ |_ _ _ |
| \| (- |_ \)/ (_) | |(
"""
)
except Exception:
print("NETOWRK NODE (print fail backup)")
@property
def icon(self) -> str:
return "🔗"
@property
def id(self) -> UID:
return self.network.id
def message_is_for_me(self, msg: Union[SyftMessage, SignedMessage]) -> bool:
# this needs to be defensive by checking network_id NOT network.id or it breaks
try:
return msg.address.network_id == self.id and msg.address.domain is None
except Exception as e:
error(f"Error checking if {msg.pprint} is for me on {self.pprint}. {e}")
return False
| 35.763285 | 87 | 0.694178 |
from __future__ import annotations
import os
from typing import Any
from typing import Dict
from typing import List
from typing import Optional
from typing import Union
import ascii_magic
from nacl.signing import SigningKey
from nacl.signing import VerifyKey
from pydantic import BaseSettings
from ...lib.python import String
from ...logger import error
from ..common.message import SignedImmediateSyftMessageWithReply
from ..common.message import SignedMessage
from ..common.message import SyftMessage
from ..common.uid import UID
from ..io.location import Location
from ..io.location import SpecificLocation
from .common.node import Node
from .common.node_manager.association_request_manager import AssociationRequestManager
from .common.node_manager.node_manager import NodeManager
from .common.node_manager.node_route_manager import NodeRouteManager
from .common.node_manager.role_manager import RoleManager
from .common.node_manager.user_manager import UserManager
from .common.node_service.association_request.association_request_service import (
AssociationRequestService,
)
from .common.node_service.association_request.association_request_service import (
AssociationRequestWithoutReplyService,
)
from .common.node_service.network_search.network_search_service import (
NetworkSearchService,
)
from .common.node_service.node_setup.node_setup_messages import (
CreateInitialSetUpMessage,
)
from .common.node_service.node_setup.node_setup_service import NodeSetupService
from .common.node_service.peer_discovery.peer_discovery_service import (
PeerDiscoveryService,
)
from .common.node_service.ping.ping_service import PingService
from .common.node_service.request_receiver.request_receiver_messages import (
RequestMessage,
)
from .common.node_service.role_manager.role_manager_service import RoleManagerService
from .common.node_service.user_manager.user_manager_service import UserManagerService
from .common.node_service.vpn.vpn_service import VPNConnectService
from .common.node_service.vpn.vpn_service import VPNJoinSelfService
from .common.node_service.vpn.vpn_service import VPNJoinService
from .common.node_service.vpn.vpn_service import VPNRegisterService
from .common.node_service.vpn.vpn_service import VPNStatusService
from .domain import Domain
from .domain_client import DomainClient
from .network_client import NetworkClient
class Network(Node):
network: SpecificLocation
child_type = Domain
client_type = NetworkClient
child_type_client_type = DomainClient
def __init__(
self,
name: Optional[str],
network: SpecificLocation = SpecificLocation(),
domain: Optional[Location] = None,
device: Optional[Location] = None,
vm: Optional[Location] = None,
signing_key: Optional[SigningKey] = None,
verify_key: Optional[VerifyKey] = None,
root_key: Optional[VerifyKey] = None,
db_engine: Any = None,
settings: Optional[BaseSettings] = None,
):
super().__init__(
name=name,
network=network,
domain=domain,
device=device,
vm=vm,
signing_key=signing_key,
verify_key=verify_key,
db_engine=db_engine,
settings=settings,
)
self.settings = settings
self.network = SpecificLocation(name=self.name)
self.root_key = root_key
self.users = UserManager(db_engine)
self.roles = RoleManager(db_engine)
self.node = NodeManager(db_engine)
self.node_route = NodeRouteManager(db_engine)
self.association_requests = AssociationRequestManager(db_engine)
self.immediate_services_with_reply.append(AssociationRequestService)
self.immediate_services_with_reply.append(NodeSetupService)
self.immediate_services_with_reply.append(RoleManagerService)
self.immediate_services_with_reply.append(UserManagerService)
self.immediate_services_with_reply.append(VPNConnectService)
self.immediate_services_with_reply.append(VPNJoinService)
self.immediate_services_with_reply.append(VPNRegisterService)
self.immediate_services_with_reply.append(VPNStatusService)
self.immediate_services_with_reply.append(VPNJoinSelfService)
self.immediate_services_with_reply.append(PingService)
self.immediate_services_with_reply.append(NetworkSearchService)
self.immediate_services_with_reply.append(PeerDiscoveryService)
self.immediate_services_without_reply.append(
AssociationRequestWithoutReplyService
)
self.requests: List[RequestMessage] = list()
self._register_services()
self.request_handlers: List[Dict[Union[str, String], Any]] = []
self.handled_requests: Dict[Any, float] = {}
self.post_init()
def initial_setup(
self,
first_superuser_name: str = "Jane Doe",
first_superuser_email: str = "info@openmined.org",
first_superuser_password: str = "changethis",
first_superuser_budget: float = 5.55,
domain_name: str = "BigHospital",
) -> Network:
msg: SignedImmediateSyftMessageWithReply = CreateInitialSetUpMessage(
address=self.address,
name=first_superuser_name,
email=first_superuser_email,
password=first_superuser_password,
domain_name=domain_name,
budget=first_superuser_budget,
reply_to=self.address,
).sign(signing_key=self.signing_key)
_ = self.recv_immediate_msg_with_reply(msg=msg).message
return self
def post_init(self) -> None:
super().post_init()
self.set_node_uid()
def loud_print(self) -> None:
try:
install_path = os.path.abspath(
os.path.join(os.path.realpath(__file__), "../../../img/")
)
ascii_magic.to_terminal(
ascii_magic.from_image_file(
img_path=install_path + "/pygrid.png", columns=83
)
)
print(
r"""
|\ | _ |_ _ _ |
| \| (- |_ \)/ (_) | |(
"""
)
except Exception:
print("NETOWRK NODE (print fail backup)")
@property
def icon(self) -> str:
return "🔗"
@property
def id(self) -> UID:
return self.network.id
def message_is_for_me(self, msg: Union[SyftMessage, SignedMessage]) -> bool:
try:
return msg.address.network_id == self.id and msg.address.domain is None
except Exception as e:
error(f"Error checking if {msg.pprint} is for me on {self.pprint}. {e}")
return False
| true | true |
f720f0e6e33f0328fc6c7ca0e2c409dffe494e2d | 469 | py | Python | rest/taskrouter/activities/list/get/example-1/example-1.5.x.py | azaddeveloper/api-snippets | f88b153cd7186fa70b33733b205886502db0d1f2 | [
"MIT"
] | 2 | 2017-11-23T11:31:20.000Z | 2018-01-22T04:14:02.000Z | rest/taskrouter/activities/list/get/example-1/example-1.5.x.py | azaddeveloper/api-snippets | f88b153cd7186fa70b33733b205886502db0d1f2 | [
"MIT"
] | null | null | null | rest/taskrouter/activities/list/get/example-1/example-1.5.x.py | azaddeveloper/api-snippets | f88b153cd7186fa70b33733b205886502db0d1f2 | [
"MIT"
] | 2 | 2020-05-22T23:31:21.000Z | 2021-06-10T18:33:45.000Z | # Download the Python helper library from twilio.com/docs/python/install
from twilio.rest import TwilioTaskRouterClient
# Your Account Sid and Auth Token from twilio.com/user/account
account_sid = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
auth_token = "your_auth_token"
workspace_sid = "WSXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
client = TwilioTaskRouterClient(account_sid, auth_token)
for activity in client.activities(workspace_sid).list():
print(activity.friendly_name)
| 36.076923 | 72 | 0.831557 |
from twilio.rest import TwilioTaskRouterClient
account_sid = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
auth_token = "your_auth_token"
workspace_sid = "WSXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
client = TwilioTaskRouterClient(account_sid, auth_token)
for activity in client.activities(workspace_sid).list():
print(activity.friendly_name)
| true | true |
f720f0e9572244aa93d948eff6a96fb8c4142ebe | 26,980 | py | Python | lang/python/github/com/metaprov/modelaapi/services/modelautobuilder/v1/modelautobuilder_pb2.py | metaprov/modelaapi | 64ab493dd73329196235e15776e5177c72281990 | [
"Apache-2.0"
] | 5 | 2022-02-18T03:40:10.000Z | 2022-03-01T16:11:24.000Z | lang/python/github/com/metaprov/modelaapi/services/modelautobuilder/v1/modelautobuilder_pb2.py | metaprov/modelaapi | 64ab493dd73329196235e15776e5177c72281990 | [
"Apache-2.0"
] | 1 | 2022-01-07T19:59:25.000Z | 2022-02-04T01:21:14.000Z | lang/python/github/com/metaprov/modelaapi/services/modelautobuilder/v1/modelautobuilder_pb2.py | metaprov/modelaapi | 64ab493dd73329196235e15776e5177c72281990 | [
"Apache-2.0"
] | 1 | 2022-03-25T10:21:43.000Z | 2022-03-25T10:21:43.000Z | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: github.com/metaprov/modelaapi/services/modelautobuilder/v1/modelautobuilder.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
from github.com.metaprov.modelaapi.pkg.apis.training.v1alpha1 import generated_pb2 as github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_training_dot_v1alpha1_dot_generated__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='github.com/metaprov/modelaapi/services/modelautobuilder/v1/modelautobuilder.proto',
package='github.com.metaprov.modelaapi.services.modelautobuilder.v1',
syntax='proto3',
serialized_options=b'Z:github.com/metaprov/modelaapi/services/modelautobuilder/v1',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\nQgithub.com/metaprov/modelaapi/services/modelautobuilder/v1/modelautobuilder.proto\x12:github.com.metaprov.modelaapi.services.modelautobuilder.v1\x1a\x1cgoogle/api/annotations.proto\x1aHgithub.com/metaprov/modelaapi/pkg/apis/training/v1alpha1/generated.proto\"\xd6\x01\n\x1cListModelAutobuildersRequest\x12\x11\n\tnamespace\x18\x01 \x01(\t\x12t\n\x06labels\x18\x02 \x03(\x0b\x32\x64.github.com.metaprov.modelaapi.services.modelautobuilder.v1.ListModelAutobuildersRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"~\n\x1dListModelAutobuildersResponse\x12]\n\x05items\x18\x01 \x01(\x0b\x32N.github.com.metaprov.modelaapi.pkg.apis.training.v1alpha1.ModelAutobuilderList\"y\n\x1d\x43reateModelAutobuilderRequest\x12X\n\x04item\x18\x01 \x01(\x0b\x32J.github.com.metaprov.modelaapi.pkg.apis.training.v1alpha1.ModelAutobuilder\" \n\x1e\x43reateModelAutobuilderResponse\"y\n\x1dUpdateModelAutobuilderRequest\x12X\n\x04item\x18\x01 \x01(\x0b\x32J.github.com.metaprov.modelaapi.pkg.apis.training.v1alpha1.ModelAutobuilder\" \n\x1eUpdateModelAutobuilderResponse\"=\n\x1aGetModelAutobuilderRequest\x12\x11\n\tnamespace\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"\x85\x01\n\x1bGetModelAutobuilderResponse\x12X\n\x04item\x18\x01 \x01(\x0b\x32J.github.com.metaprov.modelaapi.pkg.apis.training.v1alpha1.ModelAutobuilder\x12\x0c\n\x04yaml\x18\x02 \x01(\t\"@\n\x1d\x44\x65leteModelAutobuilderRequest\x12\x11\n\tnamespace\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\" \n\x1e\x44\x65leteModelAutobuilderResponse2\xa9\n\n\x17ModelAutobuilderService\x12\xf7\x01\n\x15ListModelAutobuilders\x12X.github.com.metaprov.modelaapi.services.modelautobuilder.v1.ListModelAutobuildersRequest\x1aY.github.com.metaprov.modelaapi.services.modelautobuilder.v1.ListModelAutobuildersResponse\")\x82\xd3\xe4\x93\x02#\x12!/v1/modelautobuilders/{namespace}\x12\xf1\x01\n\x16\x43reateModelAutobuilder\x12Y.github.com.metaprov.modelaapi.services.modelautobuilder.v1.CreateModelAutobuilderRequest\x1aZ.github.com.metaprov.modelaapi.services.modelautobuilder.v1.CreateModelAutobuilderResponse\" \x82\xd3\xe4\x93\x02\x1a\"\x15/v1/modelautobuilders:\x01*\x12\xf8\x01\n\x13GetModelAutobuilder\x12V.github.com.metaprov.modelaapi.services.modelautobuilder.v1.GetModelAutobuilderRequest\x1aW.github.com.metaprov.modelaapi.services.modelautobuilder.v1.GetModelAutobuilderResponse\"0\x82\xd3\xe4\x93\x02*\x12(/v1/modelautobuilders/{namespace}/{name}\x12\xa0\x02\n\x16UpdateModelAutobuilder\x12Y.github.com.metaprov.modelaapi.services.modelautobuilder.v1.UpdateModelAutobuilderRequest\x1aZ.github.com.metaprov.modelaapi.services.modelautobuilder.v1.UpdateModelAutobuilderResponse\"O\x82\xd3\xe4\x93\x02I\x1a\x44/v1/modelautobuilders/{item.metadata.namespace}/{item.metadata.name}:\x01*\x12\x81\x02\n\x16\x44\x65leteModelAutobuilder\x12Y.github.com.metaprov.modelaapi.services.modelautobuilder.v1.DeleteModelAutobuilderRequest\x1aZ.github.com.metaprov.modelaapi.services.modelautobuilder.v1.DeleteModelAutobuilderResponse\"0\x82\xd3\xe4\x93\x02**(/v1/modelautobuilders/{namespace}/{name}B<Z:github.com/metaprov/modelaapi/services/modelautobuilder/v1b\x06proto3'
,
dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_training_dot_v1alpha1_dot_generated__pb2.DESCRIPTOR,])
_LISTMODELAUTOBUILDERSREQUEST_LABELSENTRY = _descriptor.Descriptor(
name='LabelsEntry',
full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.ListModelAutobuildersRequest.LabelsEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.ListModelAutobuildersRequest.LabelsEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.ListModelAutobuildersRequest.LabelsEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=419,
serialized_end=464,
)
_LISTMODELAUTOBUILDERSREQUEST = _descriptor.Descriptor(
name='ListModelAutobuildersRequest',
full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.ListModelAutobuildersRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace', full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.ListModelAutobuildersRequest.namespace', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='labels', full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.ListModelAutobuildersRequest.labels', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_LISTMODELAUTOBUILDERSREQUEST_LABELSENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=250,
serialized_end=464,
)
_LISTMODELAUTOBUILDERSRESPONSE = _descriptor.Descriptor(
name='ListModelAutobuildersResponse',
full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.ListModelAutobuildersResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='items', full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.ListModelAutobuildersResponse.items', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=466,
serialized_end=592,
)
_CREATEMODELAUTOBUILDERREQUEST = _descriptor.Descriptor(
name='CreateModelAutobuilderRequest',
full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.CreateModelAutobuilderRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='item', full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.CreateModelAutobuilderRequest.item', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=594,
serialized_end=715,
)
_CREATEMODELAUTOBUILDERRESPONSE = _descriptor.Descriptor(
name='CreateModelAutobuilderResponse',
full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.CreateModelAutobuilderResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=717,
serialized_end=749,
)
_UPDATEMODELAUTOBUILDERREQUEST = _descriptor.Descriptor(
name='UpdateModelAutobuilderRequest',
full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.UpdateModelAutobuilderRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='item', full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.UpdateModelAutobuilderRequest.item', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=751,
serialized_end=872,
)
_UPDATEMODELAUTOBUILDERRESPONSE = _descriptor.Descriptor(
name='UpdateModelAutobuilderResponse',
full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.UpdateModelAutobuilderResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=874,
serialized_end=906,
)
_GETMODELAUTOBUILDERREQUEST = _descriptor.Descriptor(
name='GetModelAutobuilderRequest',
full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.GetModelAutobuilderRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace', full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.GetModelAutobuilderRequest.namespace', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.GetModelAutobuilderRequest.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=908,
serialized_end=969,
)
_GETMODELAUTOBUILDERRESPONSE = _descriptor.Descriptor(
name='GetModelAutobuilderResponse',
full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.GetModelAutobuilderResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='item', full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.GetModelAutobuilderResponse.item', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='yaml', full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.GetModelAutobuilderResponse.yaml', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=972,
serialized_end=1105,
)
_DELETEMODELAUTOBUILDERREQUEST = _descriptor.Descriptor(
name='DeleteModelAutobuilderRequest',
full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.DeleteModelAutobuilderRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace', full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.DeleteModelAutobuilderRequest.namespace', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.DeleteModelAutobuilderRequest.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1107,
serialized_end=1171,
)
_DELETEMODELAUTOBUILDERRESPONSE = _descriptor.Descriptor(
name='DeleteModelAutobuilderResponse',
full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.DeleteModelAutobuilderResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1173,
serialized_end=1205,
)
_LISTMODELAUTOBUILDERSREQUEST_LABELSENTRY.containing_type = _LISTMODELAUTOBUILDERSREQUEST
_LISTMODELAUTOBUILDERSREQUEST.fields_by_name['labels'].message_type = _LISTMODELAUTOBUILDERSREQUEST_LABELSENTRY
_LISTMODELAUTOBUILDERSRESPONSE.fields_by_name['items'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_training_dot_v1alpha1_dot_generated__pb2._MODELAUTOBUILDERLIST
_CREATEMODELAUTOBUILDERREQUEST.fields_by_name['item'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_training_dot_v1alpha1_dot_generated__pb2._MODELAUTOBUILDER
_UPDATEMODELAUTOBUILDERREQUEST.fields_by_name['item'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_training_dot_v1alpha1_dot_generated__pb2._MODELAUTOBUILDER
_GETMODELAUTOBUILDERRESPONSE.fields_by_name['item'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_training_dot_v1alpha1_dot_generated__pb2._MODELAUTOBUILDER
DESCRIPTOR.message_types_by_name['ListModelAutobuildersRequest'] = _LISTMODELAUTOBUILDERSREQUEST
DESCRIPTOR.message_types_by_name['ListModelAutobuildersResponse'] = _LISTMODELAUTOBUILDERSRESPONSE
DESCRIPTOR.message_types_by_name['CreateModelAutobuilderRequest'] = _CREATEMODELAUTOBUILDERREQUEST
DESCRIPTOR.message_types_by_name['CreateModelAutobuilderResponse'] = _CREATEMODELAUTOBUILDERRESPONSE
DESCRIPTOR.message_types_by_name['UpdateModelAutobuilderRequest'] = _UPDATEMODELAUTOBUILDERREQUEST
DESCRIPTOR.message_types_by_name['UpdateModelAutobuilderResponse'] = _UPDATEMODELAUTOBUILDERRESPONSE
DESCRIPTOR.message_types_by_name['GetModelAutobuilderRequest'] = _GETMODELAUTOBUILDERREQUEST
DESCRIPTOR.message_types_by_name['GetModelAutobuilderResponse'] = _GETMODELAUTOBUILDERRESPONSE
DESCRIPTOR.message_types_by_name['DeleteModelAutobuilderRequest'] = _DELETEMODELAUTOBUILDERREQUEST
DESCRIPTOR.message_types_by_name['DeleteModelAutobuilderResponse'] = _DELETEMODELAUTOBUILDERRESPONSE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
ListModelAutobuildersRequest = _reflection.GeneratedProtocolMessageType('ListModelAutobuildersRequest', (_message.Message,), {
'LabelsEntry' : _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), {
'DESCRIPTOR' : _LISTMODELAUTOBUILDERSREQUEST_LABELSENTRY,
'__module__' : 'github.com.metaprov.modelaapi.services.modelautobuilder.v1.modelautobuilder_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.modelautobuilder.v1.ListModelAutobuildersRequest.LabelsEntry)
})
,
'DESCRIPTOR' : _LISTMODELAUTOBUILDERSREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.modelautobuilder.v1.modelautobuilder_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.modelautobuilder.v1.ListModelAutobuildersRequest)
})
_sym_db.RegisterMessage(ListModelAutobuildersRequest)
_sym_db.RegisterMessage(ListModelAutobuildersRequest.LabelsEntry)
ListModelAutobuildersResponse = _reflection.GeneratedProtocolMessageType('ListModelAutobuildersResponse', (_message.Message,), {
'DESCRIPTOR' : _LISTMODELAUTOBUILDERSRESPONSE,
'__module__' : 'github.com.metaprov.modelaapi.services.modelautobuilder.v1.modelautobuilder_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.modelautobuilder.v1.ListModelAutobuildersResponse)
})
_sym_db.RegisterMessage(ListModelAutobuildersResponse)
CreateModelAutobuilderRequest = _reflection.GeneratedProtocolMessageType('CreateModelAutobuilderRequest', (_message.Message,), {
'DESCRIPTOR' : _CREATEMODELAUTOBUILDERREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.modelautobuilder.v1.modelautobuilder_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.modelautobuilder.v1.CreateModelAutobuilderRequest)
})
_sym_db.RegisterMessage(CreateModelAutobuilderRequest)
CreateModelAutobuilderResponse = _reflection.GeneratedProtocolMessageType('CreateModelAutobuilderResponse', (_message.Message,), {
'DESCRIPTOR' : _CREATEMODELAUTOBUILDERRESPONSE,
'__module__' : 'github.com.metaprov.modelaapi.services.modelautobuilder.v1.modelautobuilder_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.modelautobuilder.v1.CreateModelAutobuilderResponse)
})
_sym_db.RegisterMessage(CreateModelAutobuilderResponse)
UpdateModelAutobuilderRequest = _reflection.GeneratedProtocolMessageType('UpdateModelAutobuilderRequest', (_message.Message,), {
'DESCRIPTOR' : _UPDATEMODELAUTOBUILDERREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.modelautobuilder.v1.modelautobuilder_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.modelautobuilder.v1.UpdateModelAutobuilderRequest)
})
_sym_db.RegisterMessage(UpdateModelAutobuilderRequest)
UpdateModelAutobuilderResponse = _reflection.GeneratedProtocolMessageType('UpdateModelAutobuilderResponse', (_message.Message,), {
'DESCRIPTOR' : _UPDATEMODELAUTOBUILDERRESPONSE,
'__module__' : 'github.com.metaprov.modelaapi.services.modelautobuilder.v1.modelautobuilder_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.modelautobuilder.v1.UpdateModelAutobuilderResponse)
})
_sym_db.RegisterMessage(UpdateModelAutobuilderResponse)
GetModelAutobuilderRequest = _reflection.GeneratedProtocolMessageType('GetModelAutobuilderRequest', (_message.Message,), {
'DESCRIPTOR' : _GETMODELAUTOBUILDERREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.modelautobuilder.v1.modelautobuilder_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.modelautobuilder.v1.GetModelAutobuilderRequest)
})
_sym_db.RegisterMessage(GetModelAutobuilderRequest)
GetModelAutobuilderResponse = _reflection.GeneratedProtocolMessageType('GetModelAutobuilderResponse', (_message.Message,), {
'DESCRIPTOR' : _GETMODELAUTOBUILDERRESPONSE,
'__module__' : 'github.com.metaprov.modelaapi.services.modelautobuilder.v1.modelautobuilder_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.modelautobuilder.v1.GetModelAutobuilderResponse)
})
_sym_db.RegisterMessage(GetModelAutobuilderResponse)
DeleteModelAutobuilderRequest = _reflection.GeneratedProtocolMessageType('DeleteModelAutobuilderRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETEMODELAUTOBUILDERREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.modelautobuilder.v1.modelautobuilder_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.modelautobuilder.v1.DeleteModelAutobuilderRequest)
})
_sym_db.RegisterMessage(DeleteModelAutobuilderRequest)
DeleteModelAutobuilderResponse = _reflection.GeneratedProtocolMessageType('DeleteModelAutobuilderResponse', (_message.Message,), {
'DESCRIPTOR' : _DELETEMODELAUTOBUILDERRESPONSE,
'__module__' : 'github.com.metaprov.modelaapi.services.modelautobuilder.v1.modelautobuilder_pb2'
# @@protoc_insertion_point(class_scope:github.com.metaprov.modelaapi.services.modelautobuilder.v1.DeleteModelAutobuilderResponse)
})
_sym_db.RegisterMessage(DeleteModelAutobuilderResponse)
DESCRIPTOR._options = None
_LISTMODELAUTOBUILDERSREQUEST_LABELSENTRY._options = None
_MODELAUTOBUILDERSERVICE = _descriptor.ServiceDescriptor(
name='ModelAutobuilderService',
full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.ModelAutobuilderService',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=1208,
serialized_end=2529,
methods=[
_descriptor.MethodDescriptor(
name='ListModelAutobuilders',
full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.ModelAutobuilderService.ListModelAutobuilders',
index=0,
containing_service=None,
input_type=_LISTMODELAUTOBUILDERSREQUEST,
output_type=_LISTMODELAUTOBUILDERSRESPONSE,
serialized_options=b'\202\323\344\223\002#\022!/v1/modelautobuilders/{namespace}',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='CreateModelAutobuilder',
full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.ModelAutobuilderService.CreateModelAutobuilder',
index=1,
containing_service=None,
input_type=_CREATEMODELAUTOBUILDERREQUEST,
output_type=_CREATEMODELAUTOBUILDERRESPONSE,
serialized_options=b'\202\323\344\223\002\032\"\025/v1/modelautobuilders:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetModelAutobuilder',
full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.ModelAutobuilderService.GetModelAutobuilder',
index=2,
containing_service=None,
input_type=_GETMODELAUTOBUILDERREQUEST,
output_type=_GETMODELAUTOBUILDERRESPONSE,
serialized_options=b'\202\323\344\223\002*\022(/v1/modelautobuilders/{namespace}/{name}',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='UpdateModelAutobuilder',
full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.ModelAutobuilderService.UpdateModelAutobuilder',
index=3,
containing_service=None,
input_type=_UPDATEMODELAUTOBUILDERREQUEST,
output_type=_UPDATEMODELAUTOBUILDERRESPONSE,
serialized_options=b'\202\323\344\223\002I\032D/v1/modelautobuilders/{item.metadata.namespace}/{item.metadata.name}:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='DeleteModelAutobuilder',
full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.ModelAutobuilderService.DeleteModelAutobuilder',
index=4,
containing_service=None,
input_type=_DELETEMODELAUTOBUILDERREQUEST,
output_type=_DELETEMODELAUTOBUILDERRESPONSE,
serialized_options=b'\202\323\344\223\002**(/v1/modelautobuilders/{namespace}/{name}',
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_MODELAUTOBUILDERSERVICE)
DESCRIPTOR.services_by_name['ModelAutobuilderService'] = _MODELAUTOBUILDERSERVICE
# @@protoc_insertion_point(module_scope)
| 48.092692 | 3,212 | 0.807969 |
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
_sym_db = _symbol_database.Default()
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
from github.com.metaprov.modelaapi.pkg.apis.training.v1alpha1 import generated_pb2 as github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_training_dot_v1alpha1_dot_generated__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='github.com/metaprov/modelaapi/services/modelautobuilder/v1/modelautobuilder.proto',
package='github.com.metaprov.modelaapi.services.modelautobuilder.v1',
syntax='proto3',
serialized_options=b'Z:github.com/metaprov/modelaapi/services/modelautobuilder/v1',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\nQgithub.com/metaprov/modelaapi/services/modelautobuilder/v1/modelautobuilder.proto\x12:github.com.metaprov.modelaapi.services.modelautobuilder.v1\x1a\x1cgoogle/api/annotations.proto\x1aHgithub.com/metaprov/modelaapi/pkg/apis/training/v1alpha1/generated.proto\"\xd6\x01\n\x1cListModelAutobuildersRequest\x12\x11\n\tnamespace\x18\x01 \x01(\t\x12t\n\x06labels\x18\x02 \x03(\x0b\x32\x64.github.com.metaprov.modelaapi.services.modelautobuilder.v1.ListModelAutobuildersRequest.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"~\n\x1dListModelAutobuildersResponse\x12]\n\x05items\x18\x01 \x01(\x0b\x32N.github.com.metaprov.modelaapi.pkg.apis.training.v1alpha1.ModelAutobuilderList\"y\n\x1d\x43reateModelAutobuilderRequest\x12X\n\x04item\x18\x01 \x01(\x0b\x32J.github.com.metaprov.modelaapi.pkg.apis.training.v1alpha1.ModelAutobuilder\" \n\x1e\x43reateModelAutobuilderResponse\"y\n\x1dUpdateModelAutobuilderRequest\x12X\n\x04item\x18\x01 \x01(\x0b\x32J.github.com.metaprov.modelaapi.pkg.apis.training.v1alpha1.ModelAutobuilder\" \n\x1eUpdateModelAutobuilderResponse\"=\n\x1aGetModelAutobuilderRequest\x12\x11\n\tnamespace\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\"\x85\x01\n\x1bGetModelAutobuilderResponse\x12X\n\x04item\x18\x01 \x01(\x0b\x32J.github.com.metaprov.modelaapi.pkg.apis.training.v1alpha1.ModelAutobuilder\x12\x0c\n\x04yaml\x18\x02 \x01(\t\"@\n\x1d\x44\x65leteModelAutobuilderRequest\x12\x11\n\tnamespace\x18\x01 \x01(\t\x12\x0c\n\x04name\x18\x02 \x01(\t\" \n\x1e\x44\x65leteModelAutobuilderResponse2\xa9\n\n\x17ModelAutobuilderService\x12\xf7\x01\n\x15ListModelAutobuilders\x12X.github.com.metaprov.modelaapi.services.modelautobuilder.v1.ListModelAutobuildersRequest\x1aY.github.com.metaprov.modelaapi.services.modelautobuilder.v1.ListModelAutobuildersResponse\")\x82\xd3\xe4\x93\x02#\x12!/v1/modelautobuilders/{namespace}\x12\xf1\x01\n\x16\x43reateModelAutobuilder\x12Y.github.com.metaprov.modelaapi.services.modelautobuilder.v1.CreateModelAutobuilderRequest\x1aZ.github.com.metaprov.modelaapi.services.modelautobuilder.v1.CreateModelAutobuilderResponse\" \x82\xd3\xe4\x93\x02\x1a\"\x15/v1/modelautobuilders:\x01*\x12\xf8\x01\n\x13GetModelAutobuilder\x12V.github.com.metaprov.modelaapi.services.modelautobuilder.v1.GetModelAutobuilderRequest\x1aW.github.com.metaprov.modelaapi.services.modelautobuilder.v1.GetModelAutobuilderResponse\"0\x82\xd3\xe4\x93\x02*\x12(/v1/modelautobuilders/{namespace}/{name}\x12\xa0\x02\n\x16UpdateModelAutobuilder\x12Y.github.com.metaprov.modelaapi.services.modelautobuilder.v1.UpdateModelAutobuilderRequest\x1aZ.github.com.metaprov.modelaapi.services.modelautobuilder.v1.UpdateModelAutobuilderResponse\"O\x82\xd3\xe4\x93\x02I\x1a\x44/v1/modelautobuilders/{item.metadata.namespace}/{item.metadata.name}:\x01*\x12\x81\x02\n\x16\x44\x65leteModelAutobuilder\x12Y.github.com.metaprov.modelaapi.services.modelautobuilder.v1.DeleteModelAutobuilderRequest\x1aZ.github.com.metaprov.modelaapi.services.modelautobuilder.v1.DeleteModelAutobuilderResponse\"0\x82\xd3\xe4\x93\x02**(/v1/modelautobuilders/{namespace}/{name}B<Z:github.com/metaprov/modelaapi/services/modelautobuilder/v1b\x06proto3'
,
dependencies=[google_dot_api_dot_annotations__pb2.DESCRIPTOR,github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_training_dot_v1alpha1_dot_generated__pb2.DESCRIPTOR,])
_LISTMODELAUTOBUILDERSREQUEST_LABELSENTRY = _descriptor.Descriptor(
name='LabelsEntry',
full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.ListModelAutobuildersRequest.LabelsEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.ListModelAutobuildersRequest.LabelsEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.ListModelAutobuildersRequest.LabelsEntry.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=419,
serialized_end=464,
)
_LISTMODELAUTOBUILDERSREQUEST = _descriptor.Descriptor(
name='ListModelAutobuildersRequest',
full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.ListModelAutobuildersRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace', full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.ListModelAutobuildersRequest.namespace', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='labels', full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.ListModelAutobuildersRequest.labels', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_LISTMODELAUTOBUILDERSREQUEST_LABELSENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=250,
serialized_end=464,
)
_LISTMODELAUTOBUILDERSRESPONSE = _descriptor.Descriptor(
name='ListModelAutobuildersResponse',
full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.ListModelAutobuildersResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='items', full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.ListModelAutobuildersResponse.items', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=466,
serialized_end=592,
)
_CREATEMODELAUTOBUILDERREQUEST = _descriptor.Descriptor(
name='CreateModelAutobuilderRequest',
full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.CreateModelAutobuilderRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='item', full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.CreateModelAutobuilderRequest.item', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=594,
serialized_end=715,
)
_CREATEMODELAUTOBUILDERRESPONSE = _descriptor.Descriptor(
name='CreateModelAutobuilderResponse',
full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.CreateModelAutobuilderResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=717,
serialized_end=749,
)
_UPDATEMODELAUTOBUILDERREQUEST = _descriptor.Descriptor(
name='UpdateModelAutobuilderRequest',
full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.UpdateModelAutobuilderRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='item', full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.UpdateModelAutobuilderRequest.item', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=751,
serialized_end=872,
)
_UPDATEMODELAUTOBUILDERRESPONSE = _descriptor.Descriptor(
name='UpdateModelAutobuilderResponse',
full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.UpdateModelAutobuilderResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=874,
serialized_end=906,
)
_GETMODELAUTOBUILDERREQUEST = _descriptor.Descriptor(
name='GetModelAutobuilderRequest',
full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.GetModelAutobuilderRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace', full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.GetModelAutobuilderRequest.namespace', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.GetModelAutobuilderRequest.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=908,
serialized_end=969,
)
_GETMODELAUTOBUILDERRESPONSE = _descriptor.Descriptor(
name='GetModelAutobuilderResponse',
full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.GetModelAutobuilderResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='item', full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.GetModelAutobuilderResponse.item', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='yaml', full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.GetModelAutobuilderResponse.yaml', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=972,
serialized_end=1105,
)
_DELETEMODELAUTOBUILDERREQUEST = _descriptor.Descriptor(
name='DeleteModelAutobuilderRequest',
full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.DeleteModelAutobuilderRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='namespace', full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.DeleteModelAutobuilderRequest.namespace', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.DeleteModelAutobuilderRequest.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1107,
serialized_end=1171,
)
_DELETEMODELAUTOBUILDERRESPONSE = _descriptor.Descriptor(
name='DeleteModelAutobuilderResponse',
full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.DeleteModelAutobuilderResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1173,
serialized_end=1205,
)
_LISTMODELAUTOBUILDERSREQUEST_LABELSENTRY.containing_type = _LISTMODELAUTOBUILDERSREQUEST
_LISTMODELAUTOBUILDERSREQUEST.fields_by_name['labels'].message_type = _LISTMODELAUTOBUILDERSREQUEST_LABELSENTRY
_LISTMODELAUTOBUILDERSRESPONSE.fields_by_name['items'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_training_dot_v1alpha1_dot_generated__pb2._MODELAUTOBUILDERLIST
_CREATEMODELAUTOBUILDERREQUEST.fields_by_name['item'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_training_dot_v1alpha1_dot_generated__pb2._MODELAUTOBUILDER
_UPDATEMODELAUTOBUILDERREQUEST.fields_by_name['item'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_training_dot_v1alpha1_dot_generated__pb2._MODELAUTOBUILDER
_GETMODELAUTOBUILDERRESPONSE.fields_by_name['item'].message_type = github_dot_com_dot_metaprov_dot_modelaapi_dot_pkg_dot_apis_dot_training_dot_v1alpha1_dot_generated__pb2._MODELAUTOBUILDER
DESCRIPTOR.message_types_by_name['ListModelAutobuildersRequest'] = _LISTMODELAUTOBUILDERSREQUEST
DESCRIPTOR.message_types_by_name['ListModelAutobuildersResponse'] = _LISTMODELAUTOBUILDERSRESPONSE
DESCRIPTOR.message_types_by_name['CreateModelAutobuilderRequest'] = _CREATEMODELAUTOBUILDERREQUEST
DESCRIPTOR.message_types_by_name['CreateModelAutobuilderResponse'] = _CREATEMODELAUTOBUILDERRESPONSE
DESCRIPTOR.message_types_by_name['UpdateModelAutobuilderRequest'] = _UPDATEMODELAUTOBUILDERREQUEST
DESCRIPTOR.message_types_by_name['UpdateModelAutobuilderResponse'] = _UPDATEMODELAUTOBUILDERRESPONSE
DESCRIPTOR.message_types_by_name['GetModelAutobuilderRequest'] = _GETMODELAUTOBUILDERREQUEST
DESCRIPTOR.message_types_by_name['GetModelAutobuilderResponse'] = _GETMODELAUTOBUILDERRESPONSE
DESCRIPTOR.message_types_by_name['DeleteModelAutobuilderRequest'] = _DELETEMODELAUTOBUILDERREQUEST
DESCRIPTOR.message_types_by_name['DeleteModelAutobuilderResponse'] = _DELETEMODELAUTOBUILDERRESPONSE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
ListModelAutobuildersRequest = _reflection.GeneratedProtocolMessageType('ListModelAutobuildersRequest', (_message.Message,), {
'LabelsEntry' : _reflection.GeneratedProtocolMessageType('LabelsEntry', (_message.Message,), {
'DESCRIPTOR' : _LISTMODELAUTOBUILDERSREQUEST_LABELSENTRY,
'__module__' : 'github.com.metaprov.modelaapi.services.modelautobuilder.v1.modelautobuilder_pb2'
})
,
'DESCRIPTOR' : _LISTMODELAUTOBUILDERSREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.modelautobuilder.v1.modelautobuilder_pb2'
})
_sym_db.RegisterMessage(ListModelAutobuildersRequest)
_sym_db.RegisterMessage(ListModelAutobuildersRequest.LabelsEntry)
ListModelAutobuildersResponse = _reflection.GeneratedProtocolMessageType('ListModelAutobuildersResponse', (_message.Message,), {
'DESCRIPTOR' : _LISTMODELAUTOBUILDERSRESPONSE,
'__module__' : 'github.com.metaprov.modelaapi.services.modelautobuilder.v1.modelautobuilder_pb2'
})
_sym_db.RegisterMessage(ListModelAutobuildersResponse)
CreateModelAutobuilderRequest = _reflection.GeneratedProtocolMessageType('CreateModelAutobuilderRequest', (_message.Message,), {
'DESCRIPTOR' : _CREATEMODELAUTOBUILDERREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.modelautobuilder.v1.modelautobuilder_pb2'
})
_sym_db.RegisterMessage(CreateModelAutobuilderRequest)
CreateModelAutobuilderResponse = _reflection.GeneratedProtocolMessageType('CreateModelAutobuilderResponse', (_message.Message,), {
'DESCRIPTOR' : _CREATEMODELAUTOBUILDERRESPONSE,
'__module__' : 'github.com.metaprov.modelaapi.services.modelautobuilder.v1.modelautobuilder_pb2'
})
_sym_db.RegisterMessage(CreateModelAutobuilderResponse)
UpdateModelAutobuilderRequest = _reflection.GeneratedProtocolMessageType('UpdateModelAutobuilderRequest', (_message.Message,), {
'DESCRIPTOR' : _UPDATEMODELAUTOBUILDERREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.modelautobuilder.v1.modelautobuilder_pb2'
})
_sym_db.RegisterMessage(UpdateModelAutobuilderRequest)
UpdateModelAutobuilderResponse = _reflection.GeneratedProtocolMessageType('UpdateModelAutobuilderResponse', (_message.Message,), {
'DESCRIPTOR' : _UPDATEMODELAUTOBUILDERRESPONSE,
'__module__' : 'github.com.metaprov.modelaapi.services.modelautobuilder.v1.modelautobuilder_pb2'
})
_sym_db.RegisterMessage(UpdateModelAutobuilderResponse)
GetModelAutobuilderRequest = _reflection.GeneratedProtocolMessageType('GetModelAutobuilderRequest', (_message.Message,), {
'DESCRIPTOR' : _GETMODELAUTOBUILDERREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.modelautobuilder.v1.modelautobuilder_pb2'
})
_sym_db.RegisterMessage(GetModelAutobuilderRequest)
GetModelAutobuilderResponse = _reflection.GeneratedProtocolMessageType('GetModelAutobuilderResponse', (_message.Message,), {
'DESCRIPTOR' : _GETMODELAUTOBUILDERRESPONSE,
'__module__' : 'github.com.metaprov.modelaapi.services.modelautobuilder.v1.modelautobuilder_pb2'
})
_sym_db.RegisterMessage(GetModelAutobuilderResponse)
DeleteModelAutobuilderRequest = _reflection.GeneratedProtocolMessageType('DeleteModelAutobuilderRequest', (_message.Message,), {
'DESCRIPTOR' : _DELETEMODELAUTOBUILDERREQUEST,
'__module__' : 'github.com.metaprov.modelaapi.services.modelautobuilder.v1.modelautobuilder_pb2'
})
_sym_db.RegisterMessage(DeleteModelAutobuilderRequest)
DeleteModelAutobuilderResponse = _reflection.GeneratedProtocolMessageType('DeleteModelAutobuilderResponse', (_message.Message,), {
'DESCRIPTOR' : _DELETEMODELAUTOBUILDERRESPONSE,
'__module__' : 'github.com.metaprov.modelaapi.services.modelautobuilder.v1.modelautobuilder_pb2'
})
_sym_db.RegisterMessage(DeleteModelAutobuilderResponse)
DESCRIPTOR._options = None
_LISTMODELAUTOBUILDERSREQUEST_LABELSENTRY._options = None
_MODELAUTOBUILDERSERVICE = _descriptor.ServiceDescriptor(
name='ModelAutobuilderService',
full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.ModelAutobuilderService',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=1208,
serialized_end=2529,
methods=[
_descriptor.MethodDescriptor(
name='ListModelAutobuilders',
full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.ModelAutobuilderService.ListModelAutobuilders',
index=0,
containing_service=None,
input_type=_LISTMODELAUTOBUILDERSREQUEST,
output_type=_LISTMODELAUTOBUILDERSRESPONSE,
serialized_options=b'\202\323\344\223\002#\022!/v1/modelautobuilders/{namespace}',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='CreateModelAutobuilder',
full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.ModelAutobuilderService.CreateModelAutobuilder',
index=1,
containing_service=None,
input_type=_CREATEMODELAUTOBUILDERREQUEST,
output_type=_CREATEMODELAUTOBUILDERRESPONSE,
serialized_options=b'\202\323\344\223\002\032\"\025/v1/modelautobuilders:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetModelAutobuilder',
full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.ModelAutobuilderService.GetModelAutobuilder',
index=2,
containing_service=None,
input_type=_GETMODELAUTOBUILDERREQUEST,
output_type=_GETMODELAUTOBUILDERRESPONSE,
serialized_options=b'\202\323\344\223\002*\022(/v1/modelautobuilders/{namespace}/{name}',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='UpdateModelAutobuilder',
full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.ModelAutobuilderService.UpdateModelAutobuilder',
index=3,
containing_service=None,
input_type=_UPDATEMODELAUTOBUILDERREQUEST,
output_type=_UPDATEMODELAUTOBUILDERRESPONSE,
serialized_options=b'\202\323\344\223\002I\032D/v1/modelautobuilders/{item.metadata.namespace}/{item.metadata.name}:\001*',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='DeleteModelAutobuilder',
full_name='github.com.metaprov.modelaapi.services.modelautobuilder.v1.ModelAutobuilderService.DeleteModelAutobuilder',
index=4,
containing_service=None,
input_type=_DELETEMODELAUTOBUILDERREQUEST,
output_type=_DELETEMODELAUTOBUILDERRESPONSE,
serialized_options=b'\202\323\344\223\002**(/v1/modelautobuilders/{namespace}/{name}',
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_MODELAUTOBUILDERSERVICE)
DESCRIPTOR.services_by_name['ModelAutobuilderService'] = _MODELAUTOBUILDERSERVICE
# @@protoc_insertion_point(module_scope)
| true | true |
f720f1e95b326e40c9aeac42acdf9e1f3addaa58 | 753 | py | Python | tests/instructions/test_tfr.py | rob-smallshire/asm68 | a9bbb99e7a7fbbe7656815df488c74606d08b252 | [
"X11"
] | null | null | null | tests/instructions/test_tfr.py | rob-smallshire/asm68 | a9bbb99e7a7fbbe7656815df488c74606d08b252 | [
"X11"
] | null | null | null | tests/instructions/test_tfr.py | rob-smallshire/asm68 | a9bbb99e7a7fbbe7656815df488c74606d08b252 | [
"X11"
] | 1 | 2018-05-08T11:03:22.000Z | 2018-05-08T11:03:22.000Z | from asm68.registers import *
from asm68.mnemonics import TFR
from asm68.asmdsl import AsmDsl, statements
from asm68.assembler import assemble, InterRegisterError
from helpers.code import check_object_code
from pytest import raises
def test_tfr_a_a():
check_object_code('1F 88', TFR, (A, A))
def test_tfr_a_b():
check_object_code('1F 89', TFR, (A, B))
def test_tfr_x_y():
check_object_code('1F 12', TFR, (X, Y))
def test_tfr_md_a_raises_inter_register_error():
asm = AsmDsl()
asm(TFR, (MD, A))
with raises(InterRegisterError):
assemble(statements(asm))
def test_tfr_s_z_raises_inter_register_error():
asm = AsmDsl()
asm(TFR, (S, Q))
with raises(InterRegisterError):
assemble(statements(asm)) | 23.53125 | 56 | 0.718459 | from asm68.registers import *
from asm68.mnemonics import TFR
from asm68.asmdsl import AsmDsl, statements
from asm68.assembler import assemble, InterRegisterError
from helpers.code import check_object_code
from pytest import raises
def test_tfr_a_a():
check_object_code('1F 88', TFR, (A, A))
def test_tfr_a_b():
check_object_code('1F 89', TFR, (A, B))
def test_tfr_x_y():
check_object_code('1F 12', TFR, (X, Y))
def test_tfr_md_a_raises_inter_register_error():
asm = AsmDsl()
asm(TFR, (MD, A))
with raises(InterRegisterError):
assemble(statements(asm))
def test_tfr_s_z_raises_inter_register_error():
asm = AsmDsl()
asm(TFR, (S, Q))
with raises(InterRegisterError):
assemble(statements(asm)) | true | true |
f720f269f987186e910ee271a51453fc316eb7d7 | 4,231 | py | Python | tests/sender/cli.py | OvidiuMM/python-sdk | 8e5c4e5b00de1269f75d44e7614d2d8d5c934b3b | [
"MIT"
] | 2 | 2020-07-20T09:07:12.000Z | 2020-07-20T09:56:21.000Z | tests/sender/cli.py | OvidiuMM/python-sdk | 8e5c4e5b00de1269f75d44e7614d2d8d5c934b3b | [
"MIT"
] | null | null | null | tests/sender/cli.py | OvidiuMM/python-sdk | 8e5c4e5b00de1269f75d44e7614d2d8d5c934b3b | [
"MIT"
] | null | null | null | import unittest
import socket
from click.testing import CliRunner
from devo.common import Configuration
from devo.sender.scripts.sender_cli import data
from devo.sender import DevoSenderException
try:
from .load_certs import *
except ImportError:
from load_certs import *
class TestSender(unittest.TestCase):
def setUp(self):
self.address = os.getenv('DEVO_SENDER_SERVER', "127.0.0.1")
self.port = int(os.getenv('DEVO_SENDER_PORT', 4488))
self.tcp_address = os.getenv('DEVO_SENDER_TCP_SERVER', "127.0.0.1")
self.tcp_port = int(os.getenv('DEVO_SENDER_TCP_PORT', 4489))
self.key = os.getenv('DEVO_SENDER_KEY', CLIENT_KEY)
self.cert = os.getenv('DEVO_SENDER_CERT', CLIENT_CERT)
self.chain = os.getenv('DEVO_SENDER_CHAIN', CLIENT_CHAIN)
self.local_key = os.getenv(CLIENT_KEY)
self.test_tcp = os.getenv('DEVO_TEST_TCP', "True")
self.my_app = 'test.drop.free'
self.my_bapp = b'test.drop.free'
self.my_date = 'my.date.test.sender'
self.test_file = "".join((os.path.dirname(os.path.abspath(__file__)),
os.sep, "testfile_multiline.txt"))
self.test_msg = 'Test send msg\n'
self.localhost = socket.gethostname()
# change this value if you want to send another number of test string
self.default_numbers_sendings = 10
configuration = Configuration()
configuration.set("sender", {
"key": self.key, "cert": self.cert, "chain": self.chain,
"address": self.address, "port": self.port,
"verify_mode": 0, "check_hostname": False
})
self.config_path = "/tmp/devo_sender_tests_config.json"
configuration.save(path=self.config_path)
def test_sender_args(self):
runner = CliRunner()
result = runner.invoke(data, [])
self.assertIn('No address', result.stdout)
def test_bad_address(self):
runner = CliRunner()
result = runner.invoke(data, ["--debug",
"--address", self.address + "asd"])
self.assertIsInstance(result.exception, DevoSenderException)
self.assertIn("Name or service not known",
result.exception.args[0])
def test_bad_certs(self):
runner = CliRunner()
result = runner.invoke(data, ["--debug",
"--address",
"collector-us.devo.io",
"--port", "443",
"--key", self.local_key,
"--cert", self.cert,
"--chain", self.chain,
"--verify_mode", 0,
'--check_hostname', False])
self.assertIsInstance(result.exception, DevoSenderException)
self.assertIn("SSL conn establishment socket error",
result.exception.args[0])
def test_normal_send(self):
runner = CliRunner()
result = runner.invoke(data, ["--debug",
"--address", self.address,
"--port", self.port,
"--key", self.key,
"--cert", self.cert,
"--chain", self.chain,
"--tag", self.my_app,
"--verify_mode", 0,
'--check_hostname', False,
"--line", "Test line"])
self.assertIsNone(result.exception)
self.assertGreater(int(result.output.split("Sended: ")[-1]), 0)
def test_with_config_file(self):
if self.config_path:
runner = CliRunner()
result = runner.invoke(data, ["--debug",
"--config", self.config_path])
self.assertIsNone(result.exception)
self.assertGreater(int(result.output.split("Sended: ")[-1]), 0)
if __name__ == '__main__':
unittest.main()
| 40.295238 | 77 | 0.523044 | import unittest
import socket
from click.testing import CliRunner
from devo.common import Configuration
from devo.sender.scripts.sender_cli import data
from devo.sender import DevoSenderException
try:
from .load_certs import *
except ImportError:
from load_certs import *
class TestSender(unittest.TestCase):
def setUp(self):
self.address = os.getenv('DEVO_SENDER_SERVER', "127.0.0.1")
self.port = int(os.getenv('DEVO_SENDER_PORT', 4488))
self.tcp_address = os.getenv('DEVO_SENDER_TCP_SERVER', "127.0.0.1")
self.tcp_port = int(os.getenv('DEVO_SENDER_TCP_PORT', 4489))
self.key = os.getenv('DEVO_SENDER_KEY', CLIENT_KEY)
self.cert = os.getenv('DEVO_SENDER_CERT', CLIENT_CERT)
self.chain = os.getenv('DEVO_SENDER_CHAIN', CLIENT_CHAIN)
self.local_key = os.getenv(CLIENT_KEY)
self.test_tcp = os.getenv('DEVO_TEST_TCP', "True")
self.my_app = 'test.drop.free'
self.my_bapp = b'test.drop.free'
self.my_date = 'my.date.test.sender'
self.test_file = "".join((os.path.dirname(os.path.abspath(__file__)),
os.sep, "testfile_multiline.txt"))
self.test_msg = 'Test send msg\n'
self.localhost = socket.gethostname()
self.default_numbers_sendings = 10
configuration = Configuration()
configuration.set("sender", {
"key": self.key, "cert": self.cert, "chain": self.chain,
"address": self.address, "port": self.port,
"verify_mode": 0, "check_hostname": False
})
self.config_path = "/tmp/devo_sender_tests_config.json"
configuration.save(path=self.config_path)
def test_sender_args(self):
runner = CliRunner()
result = runner.invoke(data, [])
self.assertIn('No address', result.stdout)
def test_bad_address(self):
runner = CliRunner()
result = runner.invoke(data, ["--debug",
"--address", self.address + "asd"])
self.assertIsInstance(result.exception, DevoSenderException)
self.assertIn("Name or service not known",
result.exception.args[0])
def test_bad_certs(self):
runner = CliRunner()
result = runner.invoke(data, ["--debug",
"--address",
"collector-us.devo.io",
"--port", "443",
"--key", self.local_key,
"--cert", self.cert,
"--chain", self.chain,
"--verify_mode", 0,
'--check_hostname', False])
self.assertIsInstance(result.exception, DevoSenderException)
self.assertIn("SSL conn establishment socket error",
result.exception.args[0])
def test_normal_send(self):
runner = CliRunner()
result = runner.invoke(data, ["--debug",
"--address", self.address,
"--port", self.port,
"--key", self.key,
"--cert", self.cert,
"--chain", self.chain,
"--tag", self.my_app,
"--verify_mode", 0,
'--check_hostname', False,
"--line", "Test line"])
self.assertIsNone(result.exception)
self.assertGreater(int(result.output.split("Sended: ")[-1]), 0)
def test_with_config_file(self):
if self.config_path:
runner = CliRunner()
result = runner.invoke(data, ["--debug",
"--config", self.config_path])
self.assertIsNone(result.exception)
self.assertGreater(int(result.output.split("Sended: ")[-1]), 0)
if __name__ == '__main__':
unittest.main()
| true | true |
f720f373767dfe318e91d21f618da8dedddfa285 | 3,700 | py | Python | examples/poisson_test.py | intact-solutions/pysparse | f3dca3ae9d02ab3f49486fbae5d9d68059a318ab | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | examples/poisson_test.py | intact-solutions/pysparse | f3dca3ae9d02ab3f49486fbae5d9d68059a318ab | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | examples/poisson_test.py | intact-solutions/pysparse | f3dca3ae9d02ab3f49486fbae5d9d68059a318ab | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | import numpy as np
import math
from pysparse.sparse import spmatrix
from pysparse.itsolvers.krylov import pcg, qmrs
from pysparse.precon import precon
import time
def poisson2d(n):
L = spmatrix.ll_mat(n*n, n*n)
for i in range(n):
for j in range(n):
k = i + n*j
L[k,k] = 4
if i > 0:
L[k,k-1] = -1
if i < n-1:
L[k,k+1] = -1
if j > 0:
L[k,k-n] = -1
if j < n-1:
L[k,k+n] = -1
return L
def poisson2d_sym(n):
L = spmatrix.ll_mat_sym(n*n)
for i in range(n):
for j in range(n):
k = i + n*j
L[k,k] = 4
if i > 0:
L[k,k-1] = -1
if j > 0:
L[k,k-n] = -1
return L
def poisson2d_sym_blk(n):
L = spmatrix.ll_mat_sym(n*n)
I = spmatrix.ll_mat_sym(n)
P = spmatrix.ll_mat_sym(n)
for i in range(n):
I[i,i] = -1
for i in range(n):
P[i,i] = 4
if i > 0: P[i,i-1] = -1
for i in range(0, n*n, n):
L[i:i+n,i:i+n] = P
if i > 0: L[i:i+n,i-n:i] = I
return L
tol = 1e-8
n = 100
t1 = time.clock()
L = poisson2d_sym_blk(n)
print('Time for constructing the matrix using poisson2d_sym_blk: %8.2f sec' % (time.clock() - t1, ))
t1 = time.clock()
L = poisson2d_sym(n)
print('Time for constructing the matrix using poisson2d_sym : %8.2f sec' % (time.clock() - t1, ))
t1 = time.clock()
L = poisson2d(n)
print('Time for constructing the matrix using poisson2d : %8.2f sec' % (time.clock() - t1, ))
A = L.to_csr()
S = L.to_sss()
print(L.nnz)
print(S.nnz)
print(A.nnz)
b = np.ones(n*n, 'd')
# -----------------------------------------------------------------------------
t1 = time.clock()
x = np.empty(n*n, 'd')
info, iter, relres = pcg(S, b, x, tol, 2000)
print('info=%d, iter=%d, relres=%e' % (info, iter, relres))
print('Solve time using SSS matrix: %8.2f s' % (time.clock() - t1))
print('norm(x) = %g' % np.linalg.norm(x))
r = np.empty(n*n, 'd')
S.matvec(x, r)
r = b - r
print('norm(b - A*x) = %g' % np.linalg.norm(r))
print(x[0:10])
# -----------------------------------------------------------------------------
t1 = time.clock()
x = np.empty(n*n, 'd')
info, iter, relres = pcg(A, b, x, tol, 2000)
print('info=%d, iter=%d, relres=%e' % (info, iter, relres))
print('Solve time using CSR matrix: %8.2f sec' % (time.clock() - t1))
print('norm(x) = %g' % np.linalg.norm(x))
r = np.empty(n*n, 'd')
A.matvec(x, r)
r = b - r
print('norm(b - A*x) = %g' % np.linalg.norm(r))
# -----------------------------------------------------------------------------
t1 = time.clock()
x = np.empty(n*n, 'd')
info, iter, relres = pcg(L, b, x, tol, 2000)
print('info=%d, iter=%d, relres=%e' % (info, iter, relres))
print('Solve time using LL matrix: %8.2f sec' % (time.clock() - t1))
print('norm(x) = %g' % np.linalg.norm(x))
r = np.empty(n*n, 'd')
A.matvec(x, r)
r = b - r
print('norm(b - A*x) = %g' % np.linalg.norm(r))
# -----------------------------------------------------------------------------
K_ssor = precon.ssor(S, 1.9)
t1 = time.clock()
x = np.empty(n*n, 'd')
info, iter, relres = pcg(S, b, x, tol, 2000, K_ssor)
print('info=%d, iter=%d, relres=%e' % (info, iter, relres))
print('Solve time using SSS matrix and SSOR preconditioner: %8.2f sec' % (time.clock() - t1))
print('norm(x) = %g' % np.linalg.norm(x))
r = np.empty(n*n, 'd')
S.matvec(x, r)
r = b - r
print('norm(b - A*x) = %g' % np.linalg.norm(r))
# -----------------------------------------------------------------------------
from pysparse.eigen import jdsym
jdsym.jdsym(S, None, None, 5, 0.0, 1e-8, 100, qmrs, clvl=1)
| 25 | 100 | 0.481081 | import numpy as np
import math
from pysparse.sparse import spmatrix
from pysparse.itsolvers.krylov import pcg, qmrs
from pysparse.precon import precon
import time
def poisson2d(n):
L = spmatrix.ll_mat(n*n, n*n)
for i in range(n):
for j in range(n):
k = i + n*j
L[k,k] = 4
if i > 0:
L[k,k-1] = -1
if i < n-1:
L[k,k+1] = -1
if j > 0:
L[k,k-n] = -1
if j < n-1:
L[k,k+n] = -1
return L
def poisson2d_sym(n):
L = spmatrix.ll_mat_sym(n*n)
for i in range(n):
for j in range(n):
k = i + n*j
L[k,k] = 4
if i > 0:
L[k,k-1] = -1
if j > 0:
L[k,k-n] = -1
return L
def poisson2d_sym_blk(n):
L = spmatrix.ll_mat_sym(n*n)
I = spmatrix.ll_mat_sym(n)
P = spmatrix.ll_mat_sym(n)
for i in range(n):
I[i,i] = -1
for i in range(n):
P[i,i] = 4
if i > 0: P[i,i-1] = -1
for i in range(0, n*n, n):
L[i:i+n,i:i+n] = P
if i > 0: L[i:i+n,i-n:i] = I
return L
tol = 1e-8
n = 100
t1 = time.clock()
L = poisson2d_sym_blk(n)
print('Time for constructing the matrix using poisson2d_sym_blk: %8.2f sec' % (time.clock() - t1, ))
t1 = time.clock()
L = poisson2d_sym(n)
print('Time for constructing the matrix using poisson2d_sym : %8.2f sec' % (time.clock() - t1, ))
t1 = time.clock()
L = poisson2d(n)
print('Time for constructing the matrix using poisson2d : %8.2f sec' % (time.clock() - t1, ))
A = L.to_csr()
S = L.to_sss()
print(L.nnz)
print(S.nnz)
print(A.nnz)
b = np.ones(n*n, 'd')
t1 = time.clock()
x = np.empty(n*n, 'd')
info, iter, relres = pcg(S, b, x, tol, 2000)
print('info=%d, iter=%d, relres=%e' % (info, iter, relres))
print('Solve time using SSS matrix: %8.2f s' % (time.clock() - t1))
print('norm(x) = %g' % np.linalg.norm(x))
r = np.empty(n*n, 'd')
S.matvec(x, r)
r = b - r
print('norm(b - A*x) = %g' % np.linalg.norm(r))
print(x[0:10])
t1 = time.clock()
x = np.empty(n*n, 'd')
info, iter, relres = pcg(A, b, x, tol, 2000)
print('info=%d, iter=%d, relres=%e' % (info, iter, relres))
print('Solve time using CSR matrix: %8.2f sec' % (time.clock() - t1))
print('norm(x) = %g' % np.linalg.norm(x))
r = np.empty(n*n, 'd')
A.matvec(x, r)
r = b - r
print('norm(b - A*x) = %g' % np.linalg.norm(r))
t1 = time.clock()
x = np.empty(n*n, 'd')
info, iter, relres = pcg(L, b, x, tol, 2000)
print('info=%d, iter=%d, relres=%e' % (info, iter, relres))
print('Solve time using LL matrix: %8.2f sec' % (time.clock() - t1))
print('norm(x) = %g' % np.linalg.norm(x))
r = np.empty(n*n, 'd')
A.matvec(x, r)
r = b - r
print('norm(b - A*x) = %g' % np.linalg.norm(r))
K_ssor = precon.ssor(S, 1.9)
t1 = time.clock()
x = np.empty(n*n, 'd')
info, iter, relres = pcg(S, b, x, tol, 2000, K_ssor)
print('info=%d, iter=%d, relres=%e' % (info, iter, relres))
print('Solve time using SSS matrix and SSOR preconditioner: %8.2f sec' % (time.clock() - t1))
print('norm(x) = %g' % np.linalg.norm(x))
r = np.empty(n*n, 'd')
S.matvec(x, r)
r = b - r
print('norm(b - A*x) = %g' % np.linalg.norm(r))
from pysparse.eigen import jdsym
jdsym.jdsym(S, None, None, 5, 0.0, 1e-8, 100, qmrs, clvl=1)
| true | true |
f720f3ad35136c86211956b945ba2de3bd65784c | 170 | py | Python | scripts/item/consume_2432355.py | Snewmy/swordie | ae01ed4ec0eb20a18730e8cd209eea0b84a8dd17 | [
"MIT"
] | null | null | null | scripts/item/consume_2432355.py | Snewmy/swordie | ae01ed4ec0eb20a18730e8cd209eea0b84a8dd17 | [
"MIT"
] | null | null | null | scripts/item/consume_2432355.py | Snewmy/swordie | ae01ed4ec0eb20a18730e8cd209eea0b84a8dd17 | [
"MIT"
] | null | null | null | # Snowflake Damage Skin
success = sm.addDamageSkin(2432355)
if success:
sm.chat("The Snowflake Damage Skin has been added to your account's damage skin collection.")
| 34 | 97 | 0.770588 |
success = sm.addDamageSkin(2432355)
if success:
sm.chat("The Snowflake Damage Skin has been added to your account's damage skin collection.")
| true | true |
f720f5d9454e5ea4b2e9262d909e29b9ee507501 | 1,314 | py | Python | app/core/tests/test_admin.py | royandri/recipe-app-api | 5eb7fd433946f6c25fb84d063a46173ee595adf5 | [
"MIT"
] | null | null | null | app/core/tests/test_admin.py | royandri/recipe-app-api | 5eb7fd433946f6c25fb84d063a46173ee595adf5 | [
"MIT"
] | null | null | null | app/core/tests/test_admin.py | royandri/recipe-app-api | 5eb7fd433946f6c25fb84d063a46173ee595adf5 | [
"MIT"
] | null | null | null | from django.test import TestCase, Client
from django.contrib.auth import get_user_model
from django.urls import reverse
class AdminSiteTests(TestCase):
def setUp(self):
self.client = Client()
self.admin_user = get_user_model().objects.create_superuser(
email='royandri.dev@gmail.com',
password='admin'
)
self.client.force_login(self.admin_user)
self.user = get_user_model().objects.create_user(
email=' test@mail.com',
password='admin',
name='Test User'
)
def test_users_listed(self):
# Test that users are listed on user page
url = reverse('admin:core_user_changelist')
res = self.client.get(url)
self.assertContains(res, self.user.name)
self.assertContains(res, self.user.email)
def test_user_change_page(self):
# Test that user edit pages works
url = reverse('admin:core_user_change', args=[self.user.id])
# /admin/core/user/1
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
def test_create_user_page(self):
# Test that the crate user page works
url = reverse('admin:core_user_add')
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
| 31.285714 | 68 | 0.637747 | from django.test import TestCase, Client
from django.contrib.auth import get_user_model
from django.urls import reverse
class AdminSiteTests(TestCase):
def setUp(self):
self.client = Client()
self.admin_user = get_user_model().objects.create_superuser(
email='royandri.dev@gmail.com',
password='admin'
)
self.client.force_login(self.admin_user)
self.user = get_user_model().objects.create_user(
email=' test@mail.com',
password='admin',
name='Test User'
)
def test_users_listed(self):
url = reverse('admin:core_user_changelist')
res = self.client.get(url)
self.assertContains(res, self.user.name)
self.assertContains(res, self.user.email)
def test_user_change_page(self):
url = reverse('admin:core_user_change', args=[self.user.id])
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
def test_create_user_page(self):
url = reverse('admin:core_user_add')
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
| true | true |
f720f7d7aa6b5c6b8450862f0abd2256a26a8136 | 58 | py | Python | www/speed/benchmarks/function_call.py | olemis/brython | 3ef4a602eed5a75130e507707579ad9aa2dc3e5c | [
"BSD-3-Clause"
] | 2 | 2018-06-09T15:29:48.000Z | 2019-11-13T09:15:08.000Z | www/speed/benchmarks/function_call.py | olemis/brython | 3ef4a602eed5a75130e507707579ad9aa2dc3e5c | [
"BSD-3-Clause"
] | 2 | 2017-04-14T03:52:41.000Z | 2017-04-14T04:02:06.000Z | client/components/ide/brython/www/speed/benchmarks/function_call.py | pascualy/coding_blind | 420947c61ec3cd0169d5a25f7b01ae6df9541607 | [
"MIT"
] | 2 | 2018-02-22T09:48:18.000Z | 2020-06-04T17:00:09.000Z | def f(x):
return x
for i in range(1000000):
f(i)
| 9.666667 | 24 | 0.551724 | def f(x):
return x
for i in range(1000000):
f(i)
| true | true |
f720f8eccc250efd8c3d430ddb9ee9afde19d1ec | 4,224 | py | Python | lmctl/cli/commands/targets/behaviour_projects.py | manojn97/lmctl | 844925cb414722351efac90cb97f10c1185eef7a | [
"Apache-2.0"
] | 3 | 2021-07-19T09:46:01.000Z | 2022-03-07T13:51:25.000Z | lmctl/cli/commands/targets/behaviour_projects.py | manojn97/lmctl | 844925cb414722351efac90cb97f10c1185eef7a | [
"Apache-2.0"
] | 43 | 2019-08-27T12:36:29.000Z | 2020-08-27T14:50:40.000Z | lmctl/cli/commands/targets/behaviour_projects.py | manojn97/lmctl | 844925cb414722351efac90cb97f10c1185eef7a | [
"Apache-2.0"
] | 7 | 2020-09-22T20:32:17.000Z | 2022-03-29T12:25:51.000Z | import click
from typing import Dict
from lmctl.client import TNCOClient, TNCOClientHttpError
from lmctl.cli.arguments import common_output_format_handler
from lmctl.cli.format import Table, Column
from .tnco_target import TNCOTarget, LmGet, LmCreate, LmUpdate, LmDelete, LmGen
class ProjectTable(Table):
columns = [
Column('name', header='Name'),
Column('description', header='Description')
]
output_formats = common_output_format_handler(table=ProjectTable())
class Projects(TNCOTarget):
name = 'behaviourproject'
plural = 'behaviourprojects'
display_name = 'Behaviour Project'
@LmGen()
def genfile(self, ctx: click.Context, name: str):
return {
'name': f'assembly::{name}::1.0',
}
@LmGet(output_formats=output_formats, help=f'''\
Get all {display_name}s or get one by name\
\n\nUse NAME argument to get by one by name\
\n\nOmit NAME argument get all projects\
\n\nNote: all Assembly descriptors have a Behaviour Project associated with them so can be found using their name e.g. assembly::example::1.0''')
@click.argument('name', required=False)
def get(self, tnco_client: TNCOClient, ctx: click.Context, name: str = None):
api = tnco_client.behaviour_projects
if name is not None:
return api.get(name)
else:
return api.all()
@LmCreate()
def create(self, tnco_client: TNCOClient, ctx: click.Context, file_content: Dict = None, set_values: Dict = None):
api = tnco_client.behaviour_projects
if file_content is not None:
if set_values is not None and len(set_values) > 0:
raise click.BadArgumentUsage(message='Do not use "--set" option when using "-f, --file" option', ctx=ctx)
project = file_content
else:
project = set_values
result = api.create(project)
return result.get('name')
@LmUpdate()
@click.argument('name', required=False)
def update(self, tnco_client: TNCOClient, ctx: click.Context, file_content: Dict = None, name: str = None, set_values: Dict = None):
api = tnco_client.behaviour_projects
if file_content is not None:
if name is not None:
raise click.BadArgumentUsage(message='Do not use "NAME" argument when using "-f, --file" option', ctx=ctx)
project = file_content
else:
if name is None:
raise click.BadArgumentUsage(message='Must set "NAME" argument when no "-f, --file" option specified', ctx=ctx)
project = api.get(name)
project.update(set_values)
result = api.update(project)
return project.get('name')
@LmDelete()
@click.argument('name', required=False)
def delete(self, tnco_client: TNCOClient, ctx: click.Context, file_content: Dict = None, name: str = None, ignore_missing: bool = None):
api = tnco_client.behaviour_projects
if file_content is not None:
if name is not None:
raise click.BadArgumentUsage(message='Do not use "NAME" argument when using "-f, --file" option', ctx=ctx)
project = file_content
project_id = project.get('id', project.get('name', None))
if project_id is None:
raise click.BadArgumentUsage(message='Object from file does not contain an "name" (or "id") attribute', ctx=ctx)
else:
if name is None:
raise click.BadArgumentUsage(message='Must set "NAME" argument when no "-f, --file" option specified', ctx=ctx)
project_id = name
try:
result = api.delete(project_id)
except TNCOClientHttpError as e:
if e.status_code == 404:
# Not found
if ignore_missing:
ctl = self._get_controller()
ctl.io.print(f'No {self.display_name} found with name (ID) {project_id} (ignoring)')
return
raise
return project_id | 44.93617 | 189 | 0.60535 | import click
from typing import Dict
from lmctl.client import TNCOClient, TNCOClientHttpError
from lmctl.cli.arguments import common_output_format_handler
from lmctl.cli.format import Table, Column
from .tnco_target import TNCOTarget, LmGet, LmCreate, LmUpdate, LmDelete, LmGen
class ProjectTable(Table):
columns = [
Column('name', header='Name'),
Column('description', header='Description')
]
output_formats = common_output_format_handler(table=ProjectTable())
class Projects(TNCOTarget):
name = 'behaviourproject'
plural = 'behaviourprojects'
display_name = 'Behaviour Project'
@LmGen()
def genfile(self, ctx: click.Context, name: str):
return {
'name': f'assembly::{name}::1.0',
}
@LmGet(output_formats=output_formats, help=f'''\
Get all {display_name}s or get one by name\
\n\nUse NAME argument to get by one by name\
\n\nOmit NAME argument get all projects\
\n\nNote: all Assembly descriptors have a Behaviour Project associated with them so can be found using their name e.g. assembly::example::1.0''')
@click.argument('name', required=False)
def get(self, tnco_client: TNCOClient, ctx: click.Context, name: str = None):
api = tnco_client.behaviour_projects
if name is not None:
return api.get(name)
else:
return api.all()
@LmCreate()
def create(self, tnco_client: TNCOClient, ctx: click.Context, file_content: Dict = None, set_values: Dict = None):
api = tnco_client.behaviour_projects
if file_content is not None:
if set_values is not None and len(set_values) > 0:
raise click.BadArgumentUsage(message='Do not use "--set" option when using "-f, --file" option', ctx=ctx)
project = file_content
else:
project = set_values
result = api.create(project)
return result.get('name')
@LmUpdate()
@click.argument('name', required=False)
def update(self, tnco_client: TNCOClient, ctx: click.Context, file_content: Dict = None, name: str = None, set_values: Dict = None):
api = tnco_client.behaviour_projects
if file_content is not None:
if name is not None:
raise click.BadArgumentUsage(message='Do not use "NAME" argument when using "-f, --file" option', ctx=ctx)
project = file_content
else:
if name is None:
raise click.BadArgumentUsage(message='Must set "NAME" argument when no "-f, --file" option specified', ctx=ctx)
project = api.get(name)
project.update(set_values)
result = api.update(project)
return project.get('name')
@LmDelete()
@click.argument('name', required=False)
def delete(self, tnco_client: TNCOClient, ctx: click.Context, file_content: Dict = None, name: str = None, ignore_missing: bool = None):
api = tnco_client.behaviour_projects
if file_content is not None:
if name is not None:
raise click.BadArgumentUsage(message='Do not use "NAME" argument when using "-f, --file" option', ctx=ctx)
project = file_content
project_id = project.get('id', project.get('name', None))
if project_id is None:
raise click.BadArgumentUsage(message='Object from file does not contain an "name" (or "id") attribute', ctx=ctx)
else:
if name is None:
raise click.BadArgumentUsage(message='Must set "NAME" argument when no "-f, --file" option specified', ctx=ctx)
project_id = name
try:
result = api.delete(project_id)
except TNCOClientHttpError as e:
if e.status_code == 404:
if ignore_missing:
ctl = self._get_controller()
ctl.io.print(f'No {self.display_name} found with name (ID) {project_id} (ignoring)')
return
raise
return project_id | true | true |
f720f9e7fd9b231b60cfa0de9c50219e99364bef | 2,516 | py | Python | api/serializers.py | NiklasMerz/shoppinglist | 38c494b2a2f80a0c543beaf0d9d9a75870bdbb22 | [
"MIT"
] | null | null | null | api/serializers.py | NiklasMerz/shoppinglist | 38c494b2a2f80a0c543beaf0d9d9a75870bdbb22 | [
"MIT"
] | 45 | 2021-11-03T20:48:50.000Z | 2021-12-14T21:22:12.000Z | api/serializers.py | NiklasMerz/shoppinglist | 38c494b2a2f80a0c543beaf0d9d9a75870bdbb22 | [
"MIT"
] | null | null | null | from list.models import *
from rest_framework import serializers
class CatalogItemSerializer(serializers.ModelSerializer):
class Meta:
model = CatalogItem
fields = ['id', 'description']
class ItemSerializer(serializers.ModelSerializer):
last_checkout = serializers.SerializerMethodField()
last_line_item_date = serializers.SerializerMethodField()
last_line_item_total = serializers.SerializerMethodField()
last_line_item_store = serializers.SerializerMethodField()
def get_last_checkout(self, obj):
try:
return obj.checkouts.latest().time
except:
return None
def get_last_line_item_date(self, obj):
try:
return LineItem.objects.filter(sku__in=obj.skus.all()).latest().receipt.time
except:
return None
def get_last_line_item_total(self, obj):
try:
return LineItem.objects.filter(sku__in=obj.skus.all()).latest().total.amount
except:
return None
def get_last_line_item_store(self, obj):
try:
return LineItem.objects.filter(sku__in=obj.skus.all()).latest().receipt.store.name
except:
return None
class Meta:
model = Item
fields = ['id', 'description', 'note', 'buy', 'list', 'last_checkout', 'last_line_item_date', 'last_line_item_total', 'last_line_item_store', 'catalog_item']
class ListSerializer(serializers.ModelSerializer):
class Meta:
model = List
fields = ['id', 'name']
class StoreSerializer(serializers.ModelSerializer):
class Meta:
model = Store
fields = ['id', 'name', 'note', 'location']
class TripSerializer(serializers.ModelSerializer):
class Meta:
model = Trip
fields = ['id', 'time', 'store', 'list', 'finish_time', 'label', 'notes']
class CheckoutSerializer(serializers.ModelSerializer):
class Meta:
model = Checkout
fields = ['id', 'time', 'trip', 'item', 'count']
class LineItemSerializer(serializers.ModelSerializer):
item = serializers.PrimaryKeyRelatedField(read_only=True)
date = serializers.CharField(source='receipt.time')
class Meta:
model = LineItem
fields = ('id', 'description', 'total', 'quantity', 'item', 'date')
class ReceiptSerializer(serializers.ModelSerializer):
line_items = LineItemSerializer(many=True, read_only=True)
class Meta:
model = Receipt
fields = ['id', 'time', 'trip', 'total', 'line_items'] | 33.546667 | 165 | 0.661367 | from list.models import *
from rest_framework import serializers
class CatalogItemSerializer(serializers.ModelSerializer):
class Meta:
model = CatalogItem
fields = ['id', 'description']
class ItemSerializer(serializers.ModelSerializer):
last_checkout = serializers.SerializerMethodField()
last_line_item_date = serializers.SerializerMethodField()
last_line_item_total = serializers.SerializerMethodField()
last_line_item_store = serializers.SerializerMethodField()
def get_last_checkout(self, obj):
try:
return obj.checkouts.latest().time
except:
return None
def get_last_line_item_date(self, obj):
try:
return LineItem.objects.filter(sku__in=obj.skus.all()).latest().receipt.time
except:
return None
def get_last_line_item_total(self, obj):
try:
return LineItem.objects.filter(sku__in=obj.skus.all()).latest().total.amount
except:
return None
def get_last_line_item_store(self, obj):
try:
return LineItem.objects.filter(sku__in=obj.skus.all()).latest().receipt.store.name
except:
return None
class Meta:
model = Item
fields = ['id', 'description', 'note', 'buy', 'list', 'last_checkout', 'last_line_item_date', 'last_line_item_total', 'last_line_item_store', 'catalog_item']
class ListSerializer(serializers.ModelSerializer):
class Meta:
model = List
fields = ['id', 'name']
class StoreSerializer(serializers.ModelSerializer):
class Meta:
model = Store
fields = ['id', 'name', 'note', 'location']
class TripSerializer(serializers.ModelSerializer):
class Meta:
model = Trip
fields = ['id', 'time', 'store', 'list', 'finish_time', 'label', 'notes']
class CheckoutSerializer(serializers.ModelSerializer):
class Meta:
model = Checkout
fields = ['id', 'time', 'trip', 'item', 'count']
class LineItemSerializer(serializers.ModelSerializer):
item = serializers.PrimaryKeyRelatedField(read_only=True)
date = serializers.CharField(source='receipt.time')
class Meta:
model = LineItem
fields = ('id', 'description', 'total', 'quantity', 'item', 'date')
class ReceiptSerializer(serializers.ModelSerializer):
line_items = LineItemSerializer(many=True, read_only=True)
class Meta:
model = Receipt
fields = ['id', 'time', 'trip', 'total', 'line_items'] | true | true |
f720fb43dcf64ffc735cf5c4010db34b4ad229a7 | 8,091 | py | Python | tests/test_cli_exiftool.py | oPromessa/osxphotos | 0d7e324f0262093727147b9f22ed275e962e8725 | [
"MIT"
] | null | null | null | tests/test_cli_exiftool.py | oPromessa/osxphotos | 0d7e324f0262093727147b9f22ed275e962e8725 | [
"MIT"
] | null | null | null | tests/test_cli_exiftool.py | oPromessa/osxphotos | 0d7e324f0262093727147b9f22ed275e962e8725 | [
"MIT"
] | null | null | null | """Tests for `osxphotos exiftool` command."""
import glob
import json
import os
import pytest
from click.testing import CliRunner
from osxphotos.cli.exiftool_cli import exiftool
from osxphotos.cli.export import export
from osxphotos.exiftool import ExifTool, get_exiftool_path
from .test_cli import CLI_EXIFTOOL, PHOTOS_DB_15_7
# determine if exiftool installed so exiftool tests can be skipped
try:
exiftool_path = get_exiftool_path()
except FileNotFoundError:
exiftool_path = None
@pytest.mark.skipif(exiftool_path is None, reason="exiftool not installed")
def test_export_exiftool():
"""Test osxphotos exiftool"""
runner = CliRunner()
cwd = os.getcwd()
with runner.isolated_filesystem() as temp_dir:
uuid_option = []
for uuid in CLI_EXIFTOOL:
uuid_option.extend(("--uuid", uuid))
# first, export without --exiftool
result = runner.invoke(
export,
[
"--db",
os.path.join(cwd, PHOTOS_DB_15_7),
temp_dir,
"-V",
*uuid_option,
],
)
assert result.exit_code == 0
files = glob.glob("*")
assert sorted(files) == sorted(
[CLI_EXIFTOOL[uuid]["File:FileName"] for uuid in CLI_EXIFTOOL]
)
# now, run exiftool command to update exiftool metadata
result = runner.invoke(
exiftool,
["--db", os.path.join(cwd, PHOTOS_DB_15_7), "-V", "--db-config", temp_dir],
)
assert result.exit_code == 0
exif = ExifTool(CLI_EXIFTOOL[uuid]["File:FileName"]).asdict()
for key in CLI_EXIFTOOL[uuid]:
if type(exif[key]) == list:
assert sorted(exif[key]) == sorted(CLI_EXIFTOOL[uuid][key])
else:
assert exif[key] == CLI_EXIFTOOL[uuid][key]
# now, export with --exiftool --update, no files should be updated
result = runner.invoke(
export,
[
"--db",
os.path.join(cwd, PHOTOS_DB_15_7),
temp_dir,
"-V",
"--exiftool",
"--update",
*uuid_option,
],
)
assert result.exit_code == 0
assert f"exported: 0, updated: 0, skipped: {len(CLI_EXIFTOOL)}" in result.output
@pytest.mark.skipif(exiftool_path is None, reason="exiftool not installed")
def test_export_exiftool_album_keyword():
"""Test osxphotos exiftool with --album-template."""
runner = CliRunner()
cwd = os.getcwd()
with runner.isolated_filesystem() as temp_dir:
# first, export without --exiftool
result = runner.invoke(
export,
[
"--db",
os.path.join(cwd, PHOTOS_DB_15_7),
temp_dir,
"-V",
"--album",
"Pumpkin Farm",
],
)
assert result.exit_code == 0
files = glob.glob("*")
assert len(files) == 3
# now, run exiftool command to update exiftool metadata
result = runner.invoke(
exiftool,
[
"--db",
os.path.join(cwd, PHOTOS_DB_15_7),
"-V",
"--db-config",
"--report",
"exiftool.json",
"--album-keyword",
temp_dir,
],
)
assert result.exit_code == 0
report = json.load(open("exiftool.json", "r"))
assert len(report) == 3
# verify exiftool metadata was updated
for file in report:
exif = ExifTool(file["filename"]).asdict()
assert "Pumpkin Farm" in exif["IPTC:Keywords"]
# now, export with --exiftool --update, no files should be updated
result = runner.invoke(
export,
[
"--db",
os.path.join(cwd, PHOTOS_DB_15_7),
temp_dir,
"-V",
"--exiftool",
"--update",
"--album",
"Pumpkin Farm",
"--album-keyword",
],
)
assert result.exit_code == 0
assert f"exported: 0, updated: 0, skipped: 3" in result.output
@pytest.mark.skipif(exiftool_path is None, reason="exiftool not installed")
def test_export_exiftool_keyword_template():
"""Test osxphotos exiftool with --keyword-template."""
runner = CliRunner()
cwd = os.getcwd()
with runner.isolated_filesystem() as temp_dir:
uuid_option = []
for uuid in CLI_EXIFTOOL:
uuid_option.extend(("--uuid", uuid))
# first, export without --exiftool
result = runner.invoke(
export,
[
"--db",
os.path.join(cwd, PHOTOS_DB_15_7),
temp_dir,
"-V",
*uuid_option,
],
)
assert result.exit_code == 0
# now, run exiftool command to update exiftool metadata
result = runner.invoke(
exiftool,
[
"--db",
os.path.join(cwd, PHOTOS_DB_15_7),
"-V",
"--db-config",
"--keyword-template",
"FOO",
temp_dir,
"--report",
"exiftool.json",
],
)
assert result.exit_code == 0
report = json.load(open("exiftool.json", "r"))
for file in report:
exif = ExifTool(file["filename"]).asdict()
assert "FOO" in exif["IPTC:Keywords"]
# now, export with --exiftool --update, no files should be updated
result = runner.invoke(
export,
[
"--db",
os.path.join(cwd, PHOTOS_DB_15_7),
temp_dir,
"-V",
"--exiftool",
"--keyword-template",
"FOO",
"--update",
*uuid_option,
],
)
assert result.exit_code == 0
assert f"exported: 0, updated: 0, skipped: {len(CLI_EXIFTOOL)}" in result.output
@pytest.mark.skipif(exiftool_path is None, reason="exiftool not installed")
def test_export_exiftool_load_config():
"""Test osxphotos exiftool with --load-config"""
runner = CliRunner()
cwd = os.getcwd()
with runner.isolated_filesystem() as temp_dir:
uuid_option = []
for uuid in CLI_EXIFTOOL:
uuid_option.extend(("--uuid", uuid))
# first, export without --exiftool
result = runner.invoke(
export,
[
"--db",
os.path.join(cwd, PHOTOS_DB_15_7),
temp_dir,
"-V",
"--save-config",
"config.toml",
*uuid_option,
],
)
assert result.exit_code == 0
# now, run exiftool command to update exiftool metadata
result = runner.invoke(
exiftool,
["-V", "--load-config", "config.toml", temp_dir],
)
assert result.exit_code == 0
exif = ExifTool(CLI_EXIFTOOL[uuid]["File:FileName"]).asdict()
for key in CLI_EXIFTOOL[uuid]:
if type(exif[key]) == list:
assert sorted(exif[key]) == sorted(CLI_EXIFTOOL[uuid][key])
else:
assert exif[key] == CLI_EXIFTOOL[uuid][key]
# now, export with --exiftool --update, no files should be updated
result = runner.invoke(
export,
[
"--db",
os.path.join(cwd, PHOTOS_DB_15_7),
temp_dir,
"-V",
"--exiftool",
"--update",
*uuid_option,
],
)
assert result.exit_code == 0
assert f"exported: 0, updated: 0, skipped: {len(CLI_EXIFTOOL)}" in result.output
| 30.303371 | 88 | 0.502163 |
import glob
import json
import os
import pytest
from click.testing import CliRunner
from osxphotos.cli.exiftool_cli import exiftool
from osxphotos.cli.export import export
from osxphotos.exiftool import ExifTool, get_exiftool_path
from .test_cli import CLI_EXIFTOOL, PHOTOS_DB_15_7
try:
exiftool_path = get_exiftool_path()
except FileNotFoundError:
exiftool_path = None
@pytest.mark.skipif(exiftool_path is None, reason="exiftool not installed")
def test_export_exiftool():
runner = CliRunner()
cwd = os.getcwd()
with runner.isolated_filesystem() as temp_dir:
uuid_option = []
for uuid in CLI_EXIFTOOL:
uuid_option.extend(("--uuid", uuid))
result = runner.invoke(
export,
[
"--db",
os.path.join(cwd, PHOTOS_DB_15_7),
temp_dir,
"-V",
*uuid_option,
],
)
assert result.exit_code == 0
files = glob.glob("*")
assert sorted(files) == sorted(
[CLI_EXIFTOOL[uuid]["File:FileName"] for uuid in CLI_EXIFTOOL]
)
result = runner.invoke(
exiftool,
["--db", os.path.join(cwd, PHOTOS_DB_15_7), "-V", "--db-config", temp_dir],
)
assert result.exit_code == 0
exif = ExifTool(CLI_EXIFTOOL[uuid]["File:FileName"]).asdict()
for key in CLI_EXIFTOOL[uuid]:
if type(exif[key]) == list:
assert sorted(exif[key]) == sorted(CLI_EXIFTOOL[uuid][key])
else:
assert exif[key] == CLI_EXIFTOOL[uuid][key]
result = runner.invoke(
export,
[
"--db",
os.path.join(cwd, PHOTOS_DB_15_7),
temp_dir,
"-V",
"--exiftool",
"--update",
*uuid_option,
],
)
assert result.exit_code == 0
assert f"exported: 0, updated: 0, skipped: {len(CLI_EXIFTOOL)}" in result.output
@pytest.mark.skipif(exiftool_path is None, reason="exiftool not installed")
def test_export_exiftool_album_keyword():
runner = CliRunner()
cwd = os.getcwd()
with runner.isolated_filesystem() as temp_dir:
result = runner.invoke(
export,
[
"--db",
os.path.join(cwd, PHOTOS_DB_15_7),
temp_dir,
"-V",
"--album",
"Pumpkin Farm",
],
)
assert result.exit_code == 0
files = glob.glob("*")
assert len(files) == 3
result = runner.invoke(
exiftool,
[
"--db",
os.path.join(cwd, PHOTOS_DB_15_7),
"-V",
"--db-config",
"--report",
"exiftool.json",
"--album-keyword",
temp_dir,
],
)
assert result.exit_code == 0
report = json.load(open("exiftool.json", "r"))
assert len(report) == 3
for file in report:
exif = ExifTool(file["filename"]).asdict()
assert "Pumpkin Farm" in exif["IPTC:Keywords"]
result = runner.invoke(
export,
[
"--db",
os.path.join(cwd, PHOTOS_DB_15_7),
temp_dir,
"-V",
"--exiftool",
"--update",
"--album",
"Pumpkin Farm",
"--album-keyword",
],
)
assert result.exit_code == 0
assert f"exported: 0, updated: 0, skipped: 3" in result.output
@pytest.mark.skipif(exiftool_path is None, reason="exiftool not installed")
def test_export_exiftool_keyword_template():
runner = CliRunner()
cwd = os.getcwd()
with runner.isolated_filesystem() as temp_dir:
uuid_option = []
for uuid in CLI_EXIFTOOL:
uuid_option.extend(("--uuid", uuid))
result = runner.invoke(
export,
[
"--db",
os.path.join(cwd, PHOTOS_DB_15_7),
temp_dir,
"-V",
*uuid_option,
],
)
assert result.exit_code == 0
result = runner.invoke(
exiftool,
[
"--db",
os.path.join(cwd, PHOTOS_DB_15_7),
"-V",
"--db-config",
"--keyword-template",
"FOO",
temp_dir,
"--report",
"exiftool.json",
],
)
assert result.exit_code == 0
report = json.load(open("exiftool.json", "r"))
for file in report:
exif = ExifTool(file["filename"]).asdict()
assert "FOO" in exif["IPTC:Keywords"]
result = runner.invoke(
export,
[
"--db",
os.path.join(cwd, PHOTOS_DB_15_7),
temp_dir,
"-V",
"--exiftool",
"--keyword-template",
"FOO",
"--update",
*uuid_option,
],
)
assert result.exit_code == 0
assert f"exported: 0, updated: 0, skipped: {len(CLI_EXIFTOOL)}" in result.output
@pytest.mark.skipif(exiftool_path is None, reason="exiftool not installed")
def test_export_exiftool_load_config():
runner = CliRunner()
cwd = os.getcwd()
with runner.isolated_filesystem() as temp_dir:
uuid_option = []
for uuid in CLI_EXIFTOOL:
uuid_option.extend(("--uuid", uuid))
result = runner.invoke(
export,
[
"--db",
os.path.join(cwd, PHOTOS_DB_15_7),
temp_dir,
"-V",
"--save-config",
"config.toml",
*uuid_option,
],
)
assert result.exit_code == 0
result = runner.invoke(
exiftool,
["-V", "--load-config", "config.toml", temp_dir],
)
assert result.exit_code == 0
exif = ExifTool(CLI_EXIFTOOL[uuid]["File:FileName"]).asdict()
for key in CLI_EXIFTOOL[uuid]:
if type(exif[key]) == list:
assert sorted(exif[key]) == sorted(CLI_EXIFTOOL[uuid][key])
else:
assert exif[key] == CLI_EXIFTOOL[uuid][key]
result = runner.invoke(
export,
[
"--db",
os.path.join(cwd, PHOTOS_DB_15_7),
temp_dir,
"-V",
"--exiftool",
"--update",
*uuid_option,
],
)
assert result.exit_code == 0
assert f"exported: 0, updated: 0, skipped: {len(CLI_EXIFTOOL)}" in result.output
| true | true |
f720fb57cc3918cd168d86f2c7f319f139afdefb | 1,488 | py | Python | datasets/raman_tablets/__init__.py | ryuzakyl/data-bloodhound | ae0413e748e55a0d2dbae35bbe96a672f313a64b | [
"Apache-2.0"
] | 3 | 2019-03-18T03:22:06.000Z | 2021-04-06T07:53:51.000Z | datasets/raman_tablets/__init__.py | ryuzakyl/data-bloodhound | ae0413e748e55a0d2dbae35bbe96a672f313a64b | [
"Apache-2.0"
] | null | null | null | datasets/raman_tablets/__init__.py | ryuzakyl/data-bloodhound | ae0413e748e55a0d2dbae35bbe96a672f313a64b | [
"Apache-2.0"
] | 2 | 2020-10-05T08:22:25.000Z | 2020-10-05T08:24:02.000Z | #!/usr/bin/env
# -*- coding: utf-8 -*-
# Copyright (C) Victor M. Mendiola Lau - All Rights Reserved
# Unauthorized copying of this file, via any medium is strictly prohibited
# Proprietary and confidential
# Written by Victor M. Mendiola Lau <ryuzakyl@gmail.com>, February 2017
import os
import scipy.io as sio
import utils.datasets as utils
# ---------------------------------------------------------------
# data set paths
__data_set_path = "{}/data/Ramandata_tablets.mat".format(os.path.split(__file__)[0])
__pickle_path = "{}/cache/raman_tablets.pickle".format(os.path.split(__file__)[0])
# ---------------------------------------------------------------
# TODO: Add docstring with usage examples (see 'uv_fuel' data set)
@utils.load_data_from_pickle(__pickle_path)
def load_raman_tablets():
# loading matlab data set
raw_data = sio.loadmat(__data_set_path)
# getting samples labels
samples_labels = raw_data['ObjLabels'].tolist()
# getting features labels
raw_features = raw_data['VarLabels'].tolist()
features_labels = list(map(float, raw_features[2:]))
# getting data
raw_data = raw_data['Matrix']
data = raw_data[:, 2:]
# creating the extra columns
other_cols = {
'active (% w/w)': raw_data[:, 0].tolist(),
'Type': raw_data[:, 1].astype(int).tolist(),
}
# returning the built data set
return utils.build_data_set(data, samples_labels, features_labels, extra_cols=other_cols)
| 29.76 | 93 | 0.635753 |
import os
import scipy.io as sio
import utils.datasets as utils
__data_set_path = "{}/data/Ramandata_tablets.mat".format(os.path.split(__file__)[0])
__pickle_path = "{}/cache/raman_tablets.pickle".format(os.path.split(__file__)[0])
@utils.load_data_from_pickle(__pickle_path)
def load_raman_tablets():
raw_data = sio.loadmat(__data_set_path)
samples_labels = raw_data['ObjLabels'].tolist()
raw_features = raw_data['VarLabels'].tolist()
features_labels = list(map(float, raw_features[2:]))
raw_data = raw_data['Matrix']
data = raw_data[:, 2:]
other_cols = {
'active (% w/w)': raw_data[:, 0].tolist(),
'Type': raw_data[:, 1].astype(int).tolist(),
}
return utils.build_data_set(data, samples_labels, features_labels, extra_cols=other_cols)
| true | true |
f720fb60277344026d5780ac04e0013b225304fb | 4,616 | py | Python | homeassistant/components/climate/homekit_controller.py | dauden1184/home-assistant | f4c6d389b77d0efa86644e76604eaea5d21abdb5 | [
"Apache-2.0"
] | 4 | 2019-01-10T14:47:54.000Z | 2021-04-22T02:06:27.000Z | homeassistant/components/climate/homekit_controller.py | dauden1184/home-assistant | f4c6d389b77d0efa86644e76604eaea5d21abdb5 | [
"Apache-2.0"
] | 6 | 2021-02-08T20:25:50.000Z | 2022-03-11T23:27:53.000Z | homeassistant/components/climate/homekit_controller.py | dauden1184/home-assistant | f4c6d389b77d0efa86644e76604eaea5d21abdb5 | [
"Apache-2.0"
] | 3 | 2018-09-14T07:34:09.000Z | 2018-09-29T12:57:10.000Z | """
Support for Homekit climate devices.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/climate.homekit_controller/
"""
import logging
from homeassistant.components.homekit_controller import (
HomeKitEntity, KNOWN_ACCESSORIES)
from homeassistant.components.climate import (
ClimateDevice, STATE_HEAT, STATE_COOL, STATE_IDLE,
SUPPORT_TARGET_TEMPERATURE, SUPPORT_OPERATION_MODE)
from homeassistant.const import TEMP_CELSIUS, STATE_OFF, ATTR_TEMPERATURE
DEPENDENCIES = ['homekit_controller']
_LOGGER = logging.getLogger(__name__)
# Map of Homekit operation modes to hass modes
MODE_HOMEKIT_TO_HASS = {
0: STATE_OFF,
1: STATE_HEAT,
2: STATE_COOL,
}
# Map of hass operation modes to homekit modes
MODE_HASS_TO_HOMEKIT = {v: k for k, v in MODE_HOMEKIT_TO_HASS.items()}
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up Homekit climate."""
if discovery_info is not None:
accessory = hass.data[KNOWN_ACCESSORIES][discovery_info['serial']]
add_entities([HomeKitClimateDevice(accessory, discovery_info)], True)
class HomeKitClimateDevice(HomeKitEntity, ClimateDevice):
"""Representation of a Homekit climate device."""
def __init__(self, *args):
"""Initialise the device."""
super().__init__(*args)
self._state = None
self._current_mode = None
self._valid_modes = []
self._current_temp = None
self._target_temp = None
def update_characteristics(self, characteristics):
"""Synchronise device state with Home Assistant."""
# pylint: disable=import-error
from homekit import CharacteristicsTypes as ctypes
for characteristic in characteristics:
ctype = characteristic['type']
if ctype == ctypes.HEATING_COOLING_CURRENT:
self._state = MODE_HOMEKIT_TO_HASS.get(
characteristic['value'])
if ctype == ctypes.HEATING_COOLING_TARGET:
self._chars['target_mode'] = characteristic['iid']
self._features |= SUPPORT_OPERATION_MODE
self._current_mode = MODE_HOMEKIT_TO_HASS.get(
characteristic['value'])
self._valid_modes = [MODE_HOMEKIT_TO_HASS.get(
mode) for mode in characteristic['valid-values']]
elif ctype == ctypes.TEMPERATURE_CURRENT:
self._current_temp = characteristic['value']
elif ctype == ctypes.TEMPERATURE_TARGET:
self._chars['target_temp'] = characteristic['iid']
self._features |= SUPPORT_TARGET_TEMPERATURE
self._target_temp = characteristic['value']
def set_temperature(self, **kwargs):
"""Set new target temperature."""
temp = kwargs.get(ATTR_TEMPERATURE)
characteristics = [{'aid': self._aid,
'iid': self._chars['target_temp'],
'value': temp}]
self.put_characteristics(characteristics)
def set_operation_mode(self, operation_mode):
"""Set new target operation mode."""
characteristics = [{'aid': self._aid,
'iid': self._chars['target_mode'],
'value': MODE_HASS_TO_HOMEKIT[operation_mode]}]
self.put_characteristics(characteristics)
@property
def state(self):
"""Return the current state."""
# If the device reports its operating mode as off, it sometimes doesn't
# report a new state.
if self._current_mode == STATE_OFF:
return STATE_OFF
if self._state == STATE_OFF and self._current_mode != STATE_OFF:
return STATE_IDLE
return self._state
@property
def current_temperature(self):
"""Return the current temperature."""
return self._current_temp
@property
def target_temperature(self):
"""Return the temperature we try to reach."""
return self._target_temp
@property
def current_operation(self):
"""Return current operation ie. heat, cool, idle."""
return self._current_mode
@property
def operation_list(self):
"""Return the list of available operation modes."""
return self._valid_modes
@property
def supported_features(self):
"""Return the list of supported features."""
return self._features
@property
def temperature_unit(self):
"""Return the unit of measurement."""
return TEMP_CELSIUS
| 35.236641 | 79 | 0.649697 | import logging
from homeassistant.components.homekit_controller import (
HomeKitEntity, KNOWN_ACCESSORIES)
from homeassistant.components.climate import (
ClimateDevice, STATE_HEAT, STATE_COOL, STATE_IDLE,
SUPPORT_TARGET_TEMPERATURE, SUPPORT_OPERATION_MODE)
from homeassistant.const import TEMP_CELSIUS, STATE_OFF, ATTR_TEMPERATURE
DEPENDENCIES = ['homekit_controller']
_LOGGER = logging.getLogger(__name__)
MODE_HOMEKIT_TO_HASS = {
0: STATE_OFF,
1: STATE_HEAT,
2: STATE_COOL,
}
MODE_HASS_TO_HOMEKIT = {v: k for k, v in MODE_HOMEKIT_TO_HASS.items()}
def setup_platform(hass, config, add_entities, discovery_info=None):
if discovery_info is not None:
accessory = hass.data[KNOWN_ACCESSORIES][discovery_info['serial']]
add_entities([HomeKitClimateDevice(accessory, discovery_info)], True)
class HomeKitClimateDevice(HomeKitEntity, ClimateDevice):
def __init__(self, *args):
super().__init__(*args)
self._state = None
self._current_mode = None
self._valid_modes = []
self._current_temp = None
self._target_temp = None
def update_characteristics(self, characteristics):
from homekit import CharacteristicsTypes as ctypes
for characteristic in characteristics:
ctype = characteristic['type']
if ctype == ctypes.HEATING_COOLING_CURRENT:
self._state = MODE_HOMEKIT_TO_HASS.get(
characteristic['value'])
if ctype == ctypes.HEATING_COOLING_TARGET:
self._chars['target_mode'] = characteristic['iid']
self._features |= SUPPORT_OPERATION_MODE
self._current_mode = MODE_HOMEKIT_TO_HASS.get(
characteristic['value'])
self._valid_modes = [MODE_HOMEKIT_TO_HASS.get(
mode) for mode in characteristic['valid-values']]
elif ctype == ctypes.TEMPERATURE_CURRENT:
self._current_temp = characteristic['value']
elif ctype == ctypes.TEMPERATURE_TARGET:
self._chars['target_temp'] = characteristic['iid']
self._features |= SUPPORT_TARGET_TEMPERATURE
self._target_temp = characteristic['value']
def set_temperature(self, **kwargs):
temp = kwargs.get(ATTR_TEMPERATURE)
characteristics = [{'aid': self._aid,
'iid': self._chars['target_temp'],
'value': temp}]
self.put_characteristics(characteristics)
def set_operation_mode(self, operation_mode):
characteristics = [{'aid': self._aid,
'iid': self._chars['target_mode'],
'value': MODE_HASS_TO_HOMEKIT[operation_mode]}]
self.put_characteristics(characteristics)
@property
def state(self):
# report a new state.
if self._current_mode == STATE_OFF:
return STATE_OFF
if self._state == STATE_OFF and self._current_mode != STATE_OFF:
return STATE_IDLE
return self._state
@property
def current_temperature(self):
return self._current_temp
@property
def target_temperature(self):
return self._target_temp
@property
def current_operation(self):
return self._current_mode
@property
def operation_list(self):
return self._valid_modes
@property
def supported_features(self):
return self._features
@property
def temperature_unit(self):
return TEMP_CELSIUS
| true | true |
f720fb753855fb74cefd74341a9ca1be69022a34 | 247 | py | Python | frappe/patches/v5_3/rename_chinese_languages.py | Nxweb-in/frappe | 56b3eb52bf56dd71bee29fde3ed28ed9c6d15947 | [
"MIT"
] | 1 | 2021-06-03T07:04:48.000Z | 2021-06-03T07:04:48.000Z | frappe/patches/v5_3/rename_chinese_languages.py | Nxweb-in/frappe | 56b3eb52bf56dd71bee29fde3ed28ed9c6d15947 | [
"MIT"
] | null | null | null | frappe/patches/v5_3/rename_chinese_languages.py | Nxweb-in/frappe | 56b3eb52bf56dd71bee29fde3ed28ed9c6d15947 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import frappe
from frappe.translate import rename_language
def execute():
language_map = {
"中国(简体)": "簡體中文",
"中國(繁體)": "正體中文"
}
for old_name, new_name in language_map.items():
rename_language(old_name, new_name)
| 19 | 48 | 0.684211 |
import frappe
from frappe.translate import rename_language
def execute():
language_map = {
"中国(简体)": "簡體中文",
"中國(繁體)": "正體中文"
}
for old_name, new_name in language_map.items():
rename_language(old_name, new_name)
| true | true |
f720fbff40e522e9a078688ae64f8333f985dc4f | 110 | py | Python | video.py | KazukiChiyo/lane-keeping | 46ac1ce2cb96eb32a0da4946433c8d0ecbf4dc53 | [
"MIT"
] | 1 | 2018-10-09T12:59:30.000Z | 2018-10-09T12:59:30.000Z | video.py | KazukiChiyo/lane-keeping | 46ac1ce2cb96eb32a0da4946433c8d0ecbf4dc53 | [
"MIT"
] | null | null | null | video.py | KazukiChiyo/lane-keeping | 46ac1ce2cb96eb32a0da4946433c8d0ecbf4dc53 | [
"MIT"
] | 1 | 2020-05-22T05:57:29.000Z | 2020-05-22T05:57:29.000Z | from moviepy.editor import VideoFileClip
clip = VideoFileClip("output_images/out_video.mp4")
print(clip.fps)
| 22 | 51 | 0.818182 | from moviepy.editor import VideoFileClip
clip = VideoFileClip("output_images/out_video.mp4")
print(clip.fps)
| true | true |
f720fc48a7b225366d7031ba6afe3845468b78f8 | 5,354 | py | Python | tests/test_node_licenses.py | gaybro8777/osf.io | 30408511510a40bc393565817b343ef5fd76ab14 | [
"Apache-2.0"
] | 628 | 2015-01-15T04:33:22.000Z | 2022-03-30T06:40:10.000Z | tests/test_node_licenses.py | gaybro8777/osf.io | 30408511510a40bc393565817b343ef5fd76ab14 | [
"Apache-2.0"
] | 4,712 | 2015-01-02T01:41:53.000Z | 2022-03-30T14:18:40.000Z | tests/test_node_licenses.py | Johnetordoff/osf.io | de10bf249c46cede04c78f7e6f7e352c69e6e6b5 | [
"Apache-2.0"
] | 371 | 2015-01-12T16:14:08.000Z | 2022-03-31T18:58:29.000Z | # -*- coding: utf-8 -*-
import builtins
import json
import unittest
import mock
import pytest
from django.core.exceptions import ValidationError
from nose.tools import * # noqa: F403 (PEP8 asserts)
from framework.auth import Auth
from osf_tests.factories import (AuthUserFactory, NodeLicenseRecordFactory,
ProjectFactory)
from tests.base import OsfTestCase
from osf.utils.migrations import ensure_licenses
from tests.utils import assert_logs, assert_not_logs
from website import settings
from osf.models.licenses import NodeLicense, serialize_node_license_record, serialize_node_license
from osf.models import NodeLog
from osf.exceptions import NodeStateError
CHANGED_NAME = 'FOO BAR'
CHANGED_TEXT = 'Some good new text'
CHANGED_PROPERTIES = ['foo', 'bar']
LICENSE_TEXT = json.dumps({
'MIT': {
'name': CHANGED_NAME,
'text': CHANGED_TEXT,
'properties': CHANGED_PROPERTIES
}
})
class TestNodeLicenses(OsfTestCase):
def setUp(self):
super(TestNodeLicenses, self).setUp()
self.user = AuthUserFactory()
self.node = ProjectFactory(creator=self.user)
self.LICENSE_NAME = 'MIT License'
self.node_license = NodeLicense.objects.get(name=self.LICENSE_NAME)
self.YEAR = '2105'
self.COPYRIGHT_HOLDERS = ['Foo', 'Bar']
self.node.node_license = NodeLicenseRecordFactory(
node_license=self.node_license,
year=self.YEAR,
copyright_holders=self.COPYRIGHT_HOLDERS
)
self.node.save()
def test_serialize_node_license(self):
serialized = serialize_node_license(self.node_license)
assert_equal(serialized['name'], self.LICENSE_NAME)
assert_equal(serialized['id'], self.node_license.license_id)
assert_equal(serialized['text'], self.node_license.text)
def test_serialize_node_license_record(self):
serialized = serialize_node_license_record(self.node.node_license)
assert_equal(serialized['name'], self.LICENSE_NAME)
assert_equal(serialized['id'], self.node_license.license_id)
assert_equal(serialized['text'], self.node_license.text)
assert_equal(serialized['year'], self.YEAR)
assert_equal(serialized['copyright_holders'], self.COPYRIGHT_HOLDERS)
def test_serialize_node_license_record_None(self):
self.node.node_license = None
serialized = serialize_node_license_record(self.node.node_license)
assert_equal(serialized, {})
def test_copy_node_license_record(self):
record = self.node.node_license
copied = record.copy()
assert_is_not_none(copied._id)
assert_not_equal(record._id, copied._id)
for prop in ('license_id', 'name', 'node_license'):
assert_equal(getattr(record, prop), getattr(copied, prop))
@pytest.mark.enable_implicit_clean
def test_license_uniqueness_on_id_is_enforced_in_the_database(self):
NodeLicense(license_id='foo', name='bar', text='baz').save()
assert_raises(ValidationError, NodeLicense(license_id='foo', name='buz', text='boo').save)
def test_ensure_licenses_updates_existing_licenses(self):
assert_equal(ensure_licenses(), (0, 18))
def test_ensure_licenses_no_licenses(self):
before_count = NodeLicense.objects.all().count()
NodeLicense.objects.all().delete()
assert_false(NodeLicense.objects.all().count())
ensure_licenses()
assert_equal(before_count, NodeLicense.objects.all().count())
def test_ensure_licenses_some_missing(self):
NodeLicense.objects.get(license_id='LGPL3').delete()
with assert_raises(NodeLicense.DoesNotExist):
NodeLicense.objects.get(license_id='LGPL3')
ensure_licenses()
found = NodeLicense.objects.get(license_id='LGPL3')
assert_is_not_none(found)
def test_ensure_licenses_updates_existing(self):
with mock.patch.object(builtins, 'open', mock.mock_open(read_data=LICENSE_TEXT)):
ensure_licenses()
MIT = NodeLicense.objects.get(license_id='MIT')
assert_equal(MIT.name, CHANGED_NAME)
assert_equal(MIT.text, CHANGED_TEXT)
assert_equal(MIT.properties, CHANGED_PROPERTIES)
@assert_logs(NodeLog.CHANGED_LICENSE, 'node')
def test_Node_set_node_license(self):
GPL3 = NodeLicense.objects.get(license_id='GPL3')
NEW_YEAR = '2014'
COPYLEFT_HOLDERS = ['Richard Stallman']
self.node.set_node_license(
{
'id': GPL3.license_id,
'year': NEW_YEAR,
'copyrightHolders': COPYLEFT_HOLDERS
},
auth=Auth(self.user),
save=True
)
assert_equal(self.node.node_license.license_id, GPL3.license_id)
assert_equal(self.node.node_license.name, GPL3.name)
assert_equal(self.node.node_license.copyright_holders, COPYLEFT_HOLDERS)
@assert_not_logs(NodeLog.CHANGED_LICENSE, 'node')
def test_Node_set_node_license_invalid(self):
with assert_raises(NodeStateError):
self.node.set_node_license(
{
'id': 'SOME ID',
'year': 'foo',
'copyrightHolders': []
},
auth=Auth(self.user)
)
| 37.704225 | 98 | 0.678371 |
import builtins
import json
import unittest
import mock
import pytest
from django.core.exceptions import ValidationError
from nose.tools import *
from framework.auth import Auth
from osf_tests.factories import (AuthUserFactory, NodeLicenseRecordFactory,
ProjectFactory)
from tests.base import OsfTestCase
from osf.utils.migrations import ensure_licenses
from tests.utils import assert_logs, assert_not_logs
from website import settings
from osf.models.licenses import NodeLicense, serialize_node_license_record, serialize_node_license
from osf.models import NodeLog
from osf.exceptions import NodeStateError
CHANGED_NAME = 'FOO BAR'
CHANGED_TEXT = 'Some good new text'
CHANGED_PROPERTIES = ['foo', 'bar']
LICENSE_TEXT = json.dumps({
'MIT': {
'name': CHANGED_NAME,
'text': CHANGED_TEXT,
'properties': CHANGED_PROPERTIES
}
})
class TestNodeLicenses(OsfTestCase):
def setUp(self):
super(TestNodeLicenses, self).setUp()
self.user = AuthUserFactory()
self.node = ProjectFactory(creator=self.user)
self.LICENSE_NAME = 'MIT License'
self.node_license = NodeLicense.objects.get(name=self.LICENSE_NAME)
self.YEAR = '2105'
self.COPYRIGHT_HOLDERS = ['Foo', 'Bar']
self.node.node_license = NodeLicenseRecordFactory(
node_license=self.node_license,
year=self.YEAR,
copyright_holders=self.COPYRIGHT_HOLDERS
)
self.node.save()
def test_serialize_node_license(self):
serialized = serialize_node_license(self.node_license)
assert_equal(serialized['name'], self.LICENSE_NAME)
assert_equal(serialized['id'], self.node_license.license_id)
assert_equal(serialized['text'], self.node_license.text)
def test_serialize_node_license_record(self):
serialized = serialize_node_license_record(self.node.node_license)
assert_equal(serialized['name'], self.LICENSE_NAME)
assert_equal(serialized['id'], self.node_license.license_id)
assert_equal(serialized['text'], self.node_license.text)
assert_equal(serialized['year'], self.YEAR)
assert_equal(serialized['copyright_holders'], self.COPYRIGHT_HOLDERS)
def test_serialize_node_license_record_None(self):
self.node.node_license = None
serialized = serialize_node_license_record(self.node.node_license)
assert_equal(serialized, {})
def test_copy_node_license_record(self):
record = self.node.node_license
copied = record.copy()
assert_is_not_none(copied._id)
assert_not_equal(record._id, copied._id)
for prop in ('license_id', 'name', 'node_license'):
assert_equal(getattr(record, prop), getattr(copied, prop))
@pytest.mark.enable_implicit_clean
def test_license_uniqueness_on_id_is_enforced_in_the_database(self):
NodeLicense(license_id='foo', name='bar', text='baz').save()
assert_raises(ValidationError, NodeLicense(license_id='foo', name='buz', text='boo').save)
def test_ensure_licenses_updates_existing_licenses(self):
assert_equal(ensure_licenses(), (0, 18))
def test_ensure_licenses_no_licenses(self):
before_count = NodeLicense.objects.all().count()
NodeLicense.objects.all().delete()
assert_false(NodeLicense.objects.all().count())
ensure_licenses()
assert_equal(before_count, NodeLicense.objects.all().count())
def test_ensure_licenses_some_missing(self):
NodeLicense.objects.get(license_id='LGPL3').delete()
with assert_raises(NodeLicense.DoesNotExist):
NodeLicense.objects.get(license_id='LGPL3')
ensure_licenses()
found = NodeLicense.objects.get(license_id='LGPL3')
assert_is_not_none(found)
def test_ensure_licenses_updates_existing(self):
with mock.patch.object(builtins, 'open', mock.mock_open(read_data=LICENSE_TEXT)):
ensure_licenses()
MIT = NodeLicense.objects.get(license_id='MIT')
assert_equal(MIT.name, CHANGED_NAME)
assert_equal(MIT.text, CHANGED_TEXT)
assert_equal(MIT.properties, CHANGED_PROPERTIES)
@assert_logs(NodeLog.CHANGED_LICENSE, 'node')
def test_Node_set_node_license(self):
GPL3 = NodeLicense.objects.get(license_id='GPL3')
NEW_YEAR = '2014'
COPYLEFT_HOLDERS = ['Richard Stallman']
self.node.set_node_license(
{
'id': GPL3.license_id,
'year': NEW_YEAR,
'copyrightHolders': COPYLEFT_HOLDERS
},
auth=Auth(self.user),
save=True
)
assert_equal(self.node.node_license.license_id, GPL3.license_id)
assert_equal(self.node.node_license.name, GPL3.name)
assert_equal(self.node.node_license.copyright_holders, COPYLEFT_HOLDERS)
@assert_not_logs(NodeLog.CHANGED_LICENSE, 'node')
def test_Node_set_node_license_invalid(self):
with assert_raises(NodeStateError):
self.node.set_node_license(
{
'id': 'SOME ID',
'year': 'foo',
'copyrightHolders': []
},
auth=Auth(self.user)
)
| true | true |
f720fc870a26f0386b206c00d49fa2c271f5ac7a | 6,675 | py | Python | cavalgada_do_mar/src/webapps/website.py | ProfessionalIT/customers | 3dbc1989bb3494fb6de7edad67dc59b7b0385ac3 | [
"MIT"
] | null | null | null | cavalgada_do_mar/src/webapps/website.py | ProfessionalIT/customers | 3dbc1989bb3494fb6de7edad67dc59b7b0385ac3 | [
"MIT"
] | 1 | 2015-11-08T11:49:35.000Z | 2015-11-08T11:49:43.000Z | cavalgada_do_mar/src/webapps/website.py | ProfessionalIT/customers | 3dbc1989bb3494fb6de7edad67dc59b7b0385ac3 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import web
from web.contrib import PyRSS2Gen
import render_website as render
import model
import forms
import logging
from paginator import Paginator, PaginatorSearch, PaginatorPublicacao
from datetime import datetime
from configuration import WEBSITE_URL
from utils import break_string
urls = (
'', 'Index',
'/', 'Index',
'/index', 'Index',
'/quem-somos', 'QuemSomos',
'/historico', 'Historico',
'/projetos-sociais', 'ProjetosSociais',
'/percurso', 'Percurso',
'/atividades', 'Atividades',
'/comenda', 'Comenda',
'/premiacoes', 'Premiacoes',
'/dicas', 'Dicas',
'/albuns', 'Albuns',
'/fotos', 'Fotos',
'/videos', 'Videos',
'/depoimentos', 'Depoimentos',
'/patrocinadores', 'Patrocionadores',
'/inscricao', 'Inscricao',
'/noticias', 'Noticias',
'/noticia/(.+)', 'Noticia',
'/boletins', 'Boletins',
'/boletim/(.+)', 'Boletim',
'/fale-conosco', 'Contato',
'/agradece-contato', 'Agradecimento',
'/rss', 'RSS'
)
class Index:
def GET(self):
return render.layout('menu_home', 'Página Inicial do Site', render.index())
class QuemSomos:
def GET(self):
return render.layout('menu_quem_somos', 'Fundação Cavalgada do Mar', render.pagina('quem-somos'))
class Historico:
def GET(self):
return render.layout('menu_historico', 'Nosso Histórico', render.pagina('historico'))
class ProjetosSociais:
def GET(self):
return render.layout('menu_projetos_sociais', 'Nossos Projetos Sociais', render.pagina('projetos-sociais'))
class Percurso:
def GET(self):
return render.layout('menu_percurso', 'O Percurso da Cavalgada', render.pagina('percurso'))
class Atividades:
def GET(self):
return render.layout('menu_atividades', 'As Atividades', render.pagina('atividades'))
class Comenda:
def GET(self):
return render.layout('menu_comenda', 'A Comenda e os Comendadores', render.pagina('comenda'))
class Premiacoes:
def GET(self):
return render.layout('menu_premiacoes', 'As Premiações', render.pagina('premiacoes'))
class Dicas:
def GET(self):
return render.layout('menu_dicas', 'Dicas da Cavalgada do Mar', render.pagina('dicas'))
class Albuns:
def GET(self):
return render.layout('menu_albuns', 'Os Albúns', render.pagina('albuns'))
class Fotos:
def GET(self):
return render.layout('menu_albuns', 'As Fotos', render.pagina('fotos'))
class Videos:
def GET(self):
return render.layout('menu_albuns', 'Os Videos', render.pagina('videos'))
class Depoimentos:
def GET(self):
return render.layout('menu_depoimentos', 'Os Depoímentos', render.pagina('depoimentos'))
class Patrocionadores:
def GET(self):
return render.layout('menu_patrocinadores', 'Os Patrocinadores', render.pagina('patrocinadores'))
class Inscricao:
def GET(self):
return render.layout('menu_inscricao', 'Faça sua Inscrição', render.pagina('inscricao'))
class Noticias:
def GET(self):
pagination = PaginatorPublicacao(web.input(), 'noticias', order='data_hora desc')
return render.layout('menu_noticias', 'Notícias', render.noticias(pagination))
def POST(self):
pagination = PaginatorPublicacao(web.input(), 'noticias', order='data_hora desc')
return render.layout('menu_noticias', 'Notícias', render.noticias(pagination))
class Noticia:
def GET(self, slug_noticia):
return render.layout('menu_noticias', 'Notícias', render.noticia(slug_noticia))
class Boletins:
def GET(self):
pagination = PaginatorPublicacao(web.input(), 'boletins', order='data_hora desc')
return render.layout('menu_home', 'Boletins', render.boletins(pagination))
def POST(self):
pagination = PaginatorPublicacao(web.input(), 'boletins', order='data_hora desc')
return render.layout('menu_home', 'Boletins', render.boletins(pagination))
class Boletim:
def GET(self, slug_boletim):
return render.layout('menu_home', 'Boletins', render.boletim(slug_boletim))
class Contato:
def GET(self):
return render.layout('menu_fale_conosco', 'Contatos', render.contato())
def POST(self):
try:
i = web.input()
assunto='Assunto: ' + break_string(i.assunto)
nome='O visitante ' + break_string(i.nome)
telefone=' com o telefone: ' + break_string(i.telefone)
email=' com o E-mail: ' + break_string(i.email)
mensagem='Deixou a seguinte mensagem: ' + '\n\t' + break_string(i.texto)
mensagem_completa = nome + telefone + email + mensagem
to_email = 'henrique@equineclinic.com.br'
web.sendmail(email, to_email, '%s' % assunto, '%s' % mensagem_completa)
raise web.seeother('/agradece-contato')
except Exception:
raise
class Agradecimento:
def GET(self):
return render.layout('menu_fale_conosco', 'Contatos', render.pagina('agradece-contato'))
class RSS:
def GET(self):
items=[]
noticias = model.get_publicacoes_rss('Notícia')
boletins = model.get_publicacoes_rss('Boletim')
if noticias:
for entry in noticias:
link= WEBSITE_URL + '/noticia/%s' % entry.slug
items.append(PyRSS2Gen.RSSItem(title=entry.titulo,
link=link,
description=entry.intro,
author='Fundação Cultural Cavalgada do Mar em Viamão - RS',
guid=PyRSS2Gen.Guid(link),
pubDate=entry.data_hora))
if boletins:
for entry in boletins:
link= WEBSITE_URL + '/boletim/%s' % entry.slug
items.append(PyRSS2Gen.RSSItem(title=entry.titulo,
link=link,
description=entry.intro,
author='Fundação Cultural Cavalgada do Mar em Viamão - RS',
guid=PyRSS2Gen.Guid(link),
pubDate=entry.data_hora))
titulo = 'RSS da Cavalgada do Mar'
descricao = 'Últimas publicações da Fundação Cultural Cavalgada do Mar em Porto Alegre - RS.'
rss=PyRSS2Gen.RSS2(title=titulo,
link= WEBSITE_URL + '/rss',
description=descricao,
lastBuildDate=datetime.now(),
items=items)
web.header('Content-Type', 'application/rss+xml; charset=utf-8')
return rss.to_xml()
app = web.application(urls, globals())
def main():
pass
| 34.585492 | 115 | 0.625019 |
import web
from web.contrib import PyRSS2Gen
import render_website as render
import model
import forms
import logging
from paginator import Paginator, PaginatorSearch, PaginatorPublicacao
from datetime import datetime
from configuration import WEBSITE_URL
from utils import break_string
urls = (
'', 'Index',
'/', 'Index',
'/index', 'Index',
'/quem-somos', 'QuemSomos',
'/historico', 'Historico',
'/projetos-sociais', 'ProjetosSociais',
'/percurso', 'Percurso',
'/atividades', 'Atividades',
'/comenda', 'Comenda',
'/premiacoes', 'Premiacoes',
'/dicas', 'Dicas',
'/albuns', 'Albuns',
'/fotos', 'Fotos',
'/videos', 'Videos',
'/depoimentos', 'Depoimentos',
'/patrocinadores', 'Patrocionadores',
'/inscricao', 'Inscricao',
'/noticias', 'Noticias',
'/noticia/(.+)', 'Noticia',
'/boletins', 'Boletins',
'/boletim/(.+)', 'Boletim',
'/fale-conosco', 'Contato',
'/agradece-contato', 'Agradecimento',
'/rss', 'RSS'
)
class Index:
def GET(self):
return render.layout('menu_home', 'Página Inicial do Site', render.index())
class QuemSomos:
def GET(self):
return render.layout('menu_quem_somos', 'Fundação Cavalgada do Mar', render.pagina('quem-somos'))
class Historico:
def GET(self):
return render.layout('menu_historico', 'Nosso Histórico', render.pagina('historico'))
class ProjetosSociais:
def GET(self):
return render.layout('menu_projetos_sociais', 'Nossos Projetos Sociais', render.pagina('projetos-sociais'))
class Percurso:
def GET(self):
return render.layout('menu_percurso', 'O Percurso da Cavalgada', render.pagina('percurso'))
class Atividades:
def GET(self):
return render.layout('menu_atividades', 'As Atividades', render.pagina('atividades'))
class Comenda:
def GET(self):
return render.layout('menu_comenda', 'A Comenda e os Comendadores', render.pagina('comenda'))
class Premiacoes:
def GET(self):
return render.layout('menu_premiacoes', 'As Premiações', render.pagina('premiacoes'))
class Dicas:
def GET(self):
return render.layout('menu_dicas', 'Dicas da Cavalgada do Mar', render.pagina('dicas'))
class Albuns:
def GET(self):
return render.layout('menu_albuns', 'Os Albúns', render.pagina('albuns'))
class Fotos:
def GET(self):
return render.layout('menu_albuns', 'As Fotos', render.pagina('fotos'))
class Videos:
def GET(self):
return render.layout('menu_albuns', 'Os Videos', render.pagina('videos'))
class Depoimentos:
def GET(self):
return render.layout('menu_depoimentos', 'Os Depoímentos', render.pagina('depoimentos'))
class Patrocionadores:
def GET(self):
return render.layout('menu_patrocinadores', 'Os Patrocinadores', render.pagina('patrocinadores'))
class Inscricao:
def GET(self):
return render.layout('menu_inscricao', 'Faça sua Inscrição', render.pagina('inscricao'))
class Noticias:
def GET(self):
pagination = PaginatorPublicacao(web.input(), 'noticias', order='data_hora desc')
return render.layout('menu_noticias', 'Notícias', render.noticias(pagination))
def POST(self):
pagination = PaginatorPublicacao(web.input(), 'noticias', order='data_hora desc')
return render.layout('menu_noticias', 'Notícias', render.noticias(pagination))
class Noticia:
def GET(self, slug_noticia):
return render.layout('menu_noticias', 'Notícias', render.noticia(slug_noticia))
class Boletins:
def GET(self):
pagination = PaginatorPublicacao(web.input(), 'boletins', order='data_hora desc')
return render.layout('menu_home', 'Boletins', render.boletins(pagination))
def POST(self):
pagination = PaginatorPublicacao(web.input(), 'boletins', order='data_hora desc')
return render.layout('menu_home', 'Boletins', render.boletins(pagination))
class Boletim:
def GET(self, slug_boletim):
return render.layout('menu_home', 'Boletins', render.boletim(slug_boletim))
class Contato:
def GET(self):
return render.layout('menu_fale_conosco', 'Contatos', render.contato())
def POST(self):
try:
i = web.input()
assunto='Assunto: ' + break_string(i.assunto)
nome='O visitante ' + break_string(i.nome)
telefone=' com o telefone: ' + break_string(i.telefone)
email=' com o E-mail: ' + break_string(i.email)
mensagem='Deixou a seguinte mensagem: ' + '\n\t' + break_string(i.texto)
mensagem_completa = nome + telefone + email + mensagem
to_email = 'henrique@equineclinic.com.br'
web.sendmail(email, to_email, '%s' % assunto, '%s' % mensagem_completa)
raise web.seeother('/agradece-contato')
except Exception:
raise
class Agradecimento:
def GET(self):
return render.layout('menu_fale_conosco', 'Contatos', render.pagina('agradece-contato'))
class RSS:
def GET(self):
items=[]
noticias = model.get_publicacoes_rss('Notícia')
boletins = model.get_publicacoes_rss('Boletim')
if noticias:
for entry in noticias:
link= WEBSITE_URL + '/noticia/%s' % entry.slug
items.append(PyRSS2Gen.RSSItem(title=entry.titulo,
link=link,
description=entry.intro,
author='Fundação Cultural Cavalgada do Mar em Viamão - RS',
guid=PyRSS2Gen.Guid(link),
pubDate=entry.data_hora))
if boletins:
for entry in boletins:
link= WEBSITE_URL + '/boletim/%s' % entry.slug
items.append(PyRSS2Gen.RSSItem(title=entry.titulo,
link=link,
description=entry.intro,
author='Fundação Cultural Cavalgada do Mar em Viamão - RS',
guid=PyRSS2Gen.Guid(link),
pubDate=entry.data_hora))
titulo = 'RSS da Cavalgada do Mar'
descricao = 'Últimas publicações da Fundação Cultural Cavalgada do Mar em Porto Alegre - RS.'
rss=PyRSS2Gen.RSS2(title=titulo,
link= WEBSITE_URL + '/rss',
description=descricao,
lastBuildDate=datetime.now(),
items=items)
web.header('Content-Type', 'application/rss+xml; charset=utf-8')
return rss.to_xml()
app = web.application(urls, globals())
def main():
pass
| true | true |
f720fd62a5d1381a1365405380ceac93188e3ca0 | 11,640 | py | Python | clients/client/python/ory_client/model/project_revisions.py | ALTELMA/sdk | a04d56edd0431382dda8a9d10229b8479174aa8e | [
"Apache-2.0"
] | null | null | null | clients/client/python/ory_client/model/project_revisions.py | ALTELMA/sdk | a04d56edd0431382dda8a9d10229b8479174aa8e | [
"Apache-2.0"
] | null | null | null | clients/client/python/ory_client/model/project_revisions.py | ALTELMA/sdk | a04d56edd0431382dda8a9d10229b8479174aa8e | [
"Apache-2.0"
] | null | null | null | """
Ory APIs
Documentation for all public and administrative Ory APIs. Administrative APIs can only be accessed with a valid Personal Access Token. Public APIs are mostly used in browsers. # noqa: E501
The version of the OpenAPI document: v0.0.1-alpha.93
Contact: support@ory.sh
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from ory_client.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from ory_client.exceptions import ApiAttributeError
def lazy_import():
from ory_client.model.project_revision import ProjectRevision
globals()['ProjectRevision'] = ProjectRevision
class ProjectRevisions(ModelSimple):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'value': ([ProjectRevision],),
}
@cached_property
def discriminator():
return None
attribute_map = {}
read_only_vars = set()
_composed_schemas = None
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs):
"""ProjectRevisions - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] ([ProjectRevision]): # noqa: E501
Keyword Args:
value ([ProjectRevision]): # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
"""ProjectRevisions - a model defined in OpenAPI
Note that value can be passed either in args or in kwargs, but not in both.
Args:
args[0] ([ProjectRevision]): # noqa: E501
Keyword Args:
value ([ProjectRevision]): # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
"""
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
self = super(OpenApiModel, cls).__new__(cls)
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
return self
| 40.842105 | 194 | 0.563574 |
import re
import sys
from ory_client.model_utils import (
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from ory_client.exceptions import ApiAttributeError
def lazy_import():
from ory_client.model.project_revision import ProjectRevision
globals()['ProjectRevision'] = ProjectRevision
class ProjectRevisions(ModelSimple):
allowed_values = {
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
lazy_import()
return {
'value': ([ProjectRevision],),
}
@cached_property
def discriminator():
return None
attribute_map = {}
read_only_vars = set()
_composed_schemas = None
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs):
_path_to_item = kwargs.pop('_path_to_item', ())
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs):
# required up here when default value is not given
_path_to_item = kwargs.pop('_path_to_item', ())
self = super(OpenApiModel, cls).__new__(cls)
if 'value' in kwargs:
value = kwargs.pop('value')
elif args:
args = list(args)
value = args.pop(0)
else:
raise ApiTypeError(
"value is required, but not passed in args or kwargs and doesn't have default",
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
self.value = value
if kwargs:
raise ApiTypeError(
"Invalid named arguments=%s passed to %s. Remove those invalid named arguments." % (
kwargs,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
return self
| true | true |
f720fe1037c1d4bf5fae4c4643726fa3e26e29a5 | 2,400 | py | Python | rlkit/core/eval_util.py | ethanabrooks/oyster | 08b758b15ca19c50c43a137cba733b79be55654a | [
"MIT"
] | null | null | null | rlkit/core/eval_util.py | ethanabrooks/oyster | 08b758b15ca19c50c43a137cba733b79be55654a | [
"MIT"
] | null | null | null | rlkit/core/eval_util.py | ethanabrooks/oyster | 08b758b15ca19c50c43a137cba733b79be55654a | [
"MIT"
] | null | null | null | """
Common evaluation utilities.
"""
from collections import OrderedDict
from numbers import Number
import os
import numpy as np
def dprint(*args):
# hacky, but will do for now
if int(os.environ["DEBUG"]) == 1:
print(args)
def get_generic_path_information(paths, stat_prefix=""):
"""
Get an OrderedDict with a bunch of statistic names and values.
"""
statistics = OrderedDict()
returns = [sum(path["rewards"]) for path in paths]
rewards = np.vstack([path["rewards"] for path in paths])
statistics.update(
create_stats_ordered_dict("Rewards", rewards, stat_prefix=stat_prefix)
)
statistics.update(
create_stats_ordered_dict("Returns", returns, stat_prefix=stat_prefix)
)
actions = [path["actions"] for path in paths]
if len(actions[0].shape) == 1:
actions = np.hstack([path["actions"] for path in paths])
else:
actions = np.vstack([path["actions"] for path in paths])
statistics.update(
create_stats_ordered_dict("Actions", actions, stat_prefix=stat_prefix)
)
statistics["Num Paths"] = len(paths)
return statistics
def get_average_returns(paths):
returns = [sum(path["rewards"]) for path in paths]
return np.mean(returns)
def create_stats_ordered_dict(
name, data, stat_prefix=None, always_show_all_stats=True, exclude_max_min=False,
):
if stat_prefix is not None:
name = "{} {}".format(stat_prefix, name)
if isinstance(data, Number):
return OrderedDict({name: data})
if len(data) == 0:
return OrderedDict()
if isinstance(data, tuple):
ordered_dict = OrderedDict()
for number, d in enumerate(data):
sub_dict = create_stats_ordered_dict("{0}_{1}".format(name, number), d,)
ordered_dict.update(sub_dict)
return ordered_dict
if isinstance(data, list):
try:
iter(data[0])
except TypeError:
pass
else:
data = np.concatenate(data)
if isinstance(data, np.ndarray) and data.size == 1 and not always_show_all_stats:
return OrderedDict({name: float(data)})
stats = OrderedDict(
[(name + " Mean", np.mean(data)), (name + " Std", np.std(data)),]
)
if not exclude_max_min:
stats[name + " Max"] = np.max(data)
stats[name + " Min"] = np.min(data)
return stats
| 28.235294 | 85 | 0.635 |
from collections import OrderedDict
from numbers import Number
import os
import numpy as np
def dprint(*args):
if int(os.environ["DEBUG"]) == 1:
print(args)
def get_generic_path_information(paths, stat_prefix=""):
statistics = OrderedDict()
returns = [sum(path["rewards"]) for path in paths]
rewards = np.vstack([path["rewards"] for path in paths])
statistics.update(
create_stats_ordered_dict("Rewards", rewards, stat_prefix=stat_prefix)
)
statistics.update(
create_stats_ordered_dict("Returns", returns, stat_prefix=stat_prefix)
)
actions = [path["actions"] for path in paths]
if len(actions[0].shape) == 1:
actions = np.hstack([path["actions"] for path in paths])
else:
actions = np.vstack([path["actions"] for path in paths])
statistics.update(
create_stats_ordered_dict("Actions", actions, stat_prefix=stat_prefix)
)
statistics["Num Paths"] = len(paths)
return statistics
def get_average_returns(paths):
returns = [sum(path["rewards"]) for path in paths]
return np.mean(returns)
def create_stats_ordered_dict(
name, data, stat_prefix=None, always_show_all_stats=True, exclude_max_min=False,
):
if stat_prefix is not None:
name = "{} {}".format(stat_prefix, name)
if isinstance(data, Number):
return OrderedDict({name: data})
if len(data) == 0:
return OrderedDict()
if isinstance(data, tuple):
ordered_dict = OrderedDict()
for number, d in enumerate(data):
sub_dict = create_stats_ordered_dict("{0}_{1}".format(name, number), d,)
ordered_dict.update(sub_dict)
return ordered_dict
if isinstance(data, list):
try:
iter(data[0])
except TypeError:
pass
else:
data = np.concatenate(data)
if isinstance(data, np.ndarray) and data.size == 1 and not always_show_all_stats:
return OrderedDict({name: float(data)})
stats = OrderedDict(
[(name + " Mean", np.mean(data)), (name + " Std", np.std(data)),]
)
if not exclude_max_min:
stats[name + " Max"] = np.max(data)
stats[name + " Min"] = np.min(data)
return stats
| true | true |
f720ff6a241c7d87d8b54a04ab91ce4d35a8ee45 | 55,439 | py | Python | dlpy/timeseries.py | qzlvyh/sassoftware-python-dlpy | 9bf8cc4ffd5ae235e377004644ef70398431e09c | [
"Apache-2.0"
] | 1 | 2019-04-02T14:36:55.000Z | 2019-04-02T14:36:55.000Z | dlpy/timeseries.py | qzlvyh/sassoftware-python-dlpy | 9bf8cc4ffd5ae235e377004644ef70398431e09c | [
"Apache-2.0"
] | null | null | null | dlpy/timeseries.py | qzlvyh/sassoftware-python-dlpy | 9bf8cc4ffd5ae235e377004644ef70398431e09c | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# encoding: utf-8
#
# Copyright SAS Institute
#
# Licensed under the Apache License, Version 2.0 (the License);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
''' Timeseries related classes and functions '''
from __future__ import (print_function, division, absolute_import, unicode_literals)
from swat.cas.table import CASTable
from .utils import random_name, get_cas_host_type, char_to_double, int_to_double
from dlpy.utils import DLPyError
from swat.cas import datamsghandlers
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import warnings
import datetime
import numbers
import re
import swat
def plot_timeseries(tbl, timeid, timeseries, figure=None,
groupid=None, start_time=None, end_time=None, xlim=None,
ylim=None, xlabel=None, ylabel=None, xdate_format=None,
title=None, figsize=None,
fontsize_spec=None, **kwargs):
'''
Create an timeseries line plot from a CASTable or pandas DataFrame
Parameters
----------
tbl : :class:`CASTable` or :class:`pandas.DataFrame` or :class:`pandas.Series`
The input table for the plot. If it is CASTable, it will be fetched to
the client. If it is pandas.Series, the index name will become timeid,
the series name will become timeseries.
timeid : str
The name of the timeid variable. It will be the value to be used in the
x-axis.
timeseries : str
The name of the column contains the timeseries value. It will be the
value to be used in the y-axis.
figure : two-element-tuple, optional
The tuple must be in the form (:class:`matplotlib.figure.Figure`,
:class:`matplotlib.axes.Axes`). These are the figure and axes that the
user wants to plot on. It can be used to plot new timeseries plot on
pre-existing figures.
Default: None
groupid : dict, optional
It is in the format {column1 : value1, column2 : value2, ...}.
It is used to plot subset of the data where column1 = value1 and
column2 = value2, etc.
Default: None, which means do not subset the data.
start_time : :class:`datetime.datetime` or :class:`datetime.date`, optional
The start time of the plotted timeseries.
Default: None, which means the plot starts at the beginning of the
timeseries.
end_time : :class:`datetime.datetime` or :class:`datetime.date`, optional
The end time of the plotted timeseries.
Default: None, which means the plot ends at the end of the timeseries.
xlim : tuple, optional
Set the data limits for the x-axis.
Default: None
ylim : tuple, optional
Set the data limits for the y-axis.
Default: None
xlabel : string, optional
Set the label for the x-axis.
ylabel : string, optional
Set the label for the y-axis.
xdate_format : string, optional
If the x-axis represents date or datetime, this is the date or datetime
format string. (e.g. '%Y-%m-%d' is the format of 2000-03-10,
refer to documentation for :meth:`datetime.datetime.strftime`)
Default: None
title : string, optional
Set the title of the figure.
Default: None
figsize : tuple, optional
The size of the figure.
Default: None
fontsize_spec : dict, optional
It specifies the fontsize for 'xlabel', 'ylabel', 'xtick', 'ytick',
'legend' and 'title'. (e.g. {'xlabel':14, 'ylabel':14}).
If None, and figure is specified, then it will take from provided
figure object. Otherwise, it will take the default fontsize, which are
{'xlabel':16, 'ylabel':16, 'xtick':14, 'ytick':14, 'legend':14, 'title':20}
Default: None
`**kwargs` : keyword arguments, optional
Options to pass to matplotlib plotting method.
Returns
-------
(:class:`matplotlib.figure.Figure`, :class:`matplotlib.axes.Axes`)
'''
default_fontsize_spec = {'xlabel':16, 'ylabel':16, 'xtick':14,
'ytick':14, 'legend':14, 'title':20}
if figure is None:
fig, ax = plt.subplots(1, 1, figsize=figsize)
if fontsize_spec is not None:
default_fontsize_spec.update(fontsize_spec)
fontsize_spec = default_fontsize_spec
else:
fig, ax = figure
if fontsize_spec is None:
fontsize_spec = {}
if 'legend' not in fontsize_spec.keys():
fontsize_spec['legend'] = default_fontsize_spec['legend']
if isinstance(tbl, CASTable):
if groupid is None:
tbl = tbl.to_frame()
else:
where_clause_list = []
for gid in groupid.keys():
where_clause_list.append(gid + '=' + str(groupid[gid]))
where_clause = ' and '.join(where_clause_list)
tbl = tbl.query(where_clause)
tbl = tbl.to_frame()
else:
if isinstance(tbl, pd.Series):
timeseries = tbl.name
tbl = tbl.reset_index()
timeid = [colname for colname in tbl.columns if colname != timeseries][0]
if groupid is not None:
for gid in groupid.keys():
tbl = tbl.loc[tbl[gid]==groupid[gid]]
if not (np.issubdtype(tbl[timeid].dtype, np.integer) or
np.issubdtype(tbl[timeid].dtype, np.floating)):
tbl[timeid] = pd.to_datetime(tbl[timeid])
fig.autofmt_xdate()
if xdate_format is not None:
import matplotlib.dates as mdates
xfmt = mdates.DateFormatter(xdate_format)
ax.xaxis.set_major_formatter(xfmt)
if start_time is not None:
if isinstance(start_time, datetime.date):
start_time = pd.Timestamp(start_time)
tbl = tbl.loc[tbl[timeid]>=start_time]
if end_time is not None:
if isinstance(start_time, datetime.date):
end_time = pd.Timestamp(end_time)
tbl = tbl.loc[tbl[timeid]<=end_time]
tbl = tbl.sort_values(timeid)
ax.plot(tbl[timeid], tbl[timeseries], **kwargs)
if xlabel is not None:
if 'xlabel' in fontsize_spec.keys():
ax.set_xlabel(xlabel, fontsize=fontsize_spec['xlabel'])
else:
ax.set_xlabel(xlabel)
elif figure is not None:
if 'xlabel' in fontsize_spec.keys():
ax.set_xlabel(ax.get_xlabel(), fontsize=fontsize_spec['xlabel'])
else:
ax.set_xlabel(timeid, fontsize=fontsize_spec['xlabel'])
if ylabel is not None:
if 'ylabel' in fontsize_spec.keys():
ax.set_ylabel(ylabel, fontsize=fontsize_spec['ylabel'])
else:
ax.set_ylabel(ylabel)
elif figure is not None:
if 'ylabel' in fontsize_spec.keys():
ax.set_ylabel(ax.get_ylabel(), fontsize=fontsize_spec['ylabel'])
else:
ax.set_ylabel(timeseries, fontsize=fontsize_spec['ylabel'])
if xlim is not None:
ax.set_xlim(xlim)
if ylim is not None:
ax.set_ylim(ylim)
if title is not None:
if 'title' in fontsize_spec.keys():
ax.set_title(title, fontsize=fontsize_spec['title'])
else:
ax.set_title(title)
elif figure is not None:
if 'title' in fontsize_spec.keys():
ax.set_title(ax.get_title(), fontsize=fontsize_spec['title'])
ax.legend(loc='best', bbox_to_anchor=(1, 1), prop={'size': fontsize_spec['legend']})
if 'xtick' in fontsize_spec.keys():
ax.get_xaxis().set_tick_params(direction='out', labelsize=fontsize_spec['xtick'])
else:
ax.get_xaxis().set_tick_params(direction='out')
if 'ytick' in fontsize_spec.keys():
ax.get_yaxis().set_tick_params(direction='out', labelsize=fontsize_spec['ytick'])
else:
ax.get_yaxis().set_tick_params(direction='out')
return (fig, ax)
class TimeseriesTable(CASTable):
'''
Table for preprocessing timeseries
It creates an instance of :class:`TimeseriesTable` by loading from
files on the server side, or files on the client side, or in
memory :class:`CASTable`, :class:`pandas.DataFrame` or
:class:`pandas.Series. It then performs inplace timeseries formatting,
timeseries accumulation, timeseries subsequence generation, and
timeseries partitioning to prepare the timeseries into a format that
can be followed by subsequent deep learning models.
Parameters
----------
name : string, optional
Name of the CAS table
timeid : string, optional
Specifies the column name for the timeid.
Default: None
groupby_var : string or list-of-strings, optional
The groupby variables.
Default: None.
sequence_opt : dict, optional
Dictionary with keys: 'input_length', 'target_length' and 'token_size'.
It will be created by the prepare_subsequences method.
Default: None
inputs_target : dict, optional
Dictionary with keys: 'inputs', 'target'.
It will be created by the prepare_subsequences method.
Default: None
Returns
-------
:class:`TimeseriesTable`
'''
running_caslib = None
def __init__(self, name, timeid=None, groupby_var=None,
sequence_opt=None, inputs_target=None, **table_params):
CASTable.__init__(self, name, **table_params)
self.timeid = timeid
self.groupby_var = groupby_var
self.sequence_opt = sequence_opt
self.inputs_target = inputs_target
@classmethod
def from_table(cls, tbl, columns=None, casout=None):
'''
Create an TimeseriesTable from a CASTable
Parameters
----------
tbl : :class:`CASTable`
The CASTable object to use as the source.
columns : list-of-strings, optional
Columns to keep when loading the data.
None means it will include all the columns from the source.
Empty list means include no column, which will generate empty data.
Default: None
casout : dict or :class:`CASTable`, optional
if it is dict, it specifies the output CASTable parameters.
if it is CASTable, it is the CASTable that will be overwritten.
None means a new CASTable with random name will be generated.
Default: None
Returns
-------
:class:`TimeseriesTable`
'''
input_tbl_params = tbl.to_outtable_params()
input_tbl_name = input_tbl_params['name']
conn = tbl.get_connection()
if casout is None:
casout_params = {}
elif isinstance(casout, CASTable):
casout_params = casout.to_outtable_params()
elif isinstance(casout, dict):
casout_params = casout
if 'name' not in casout_params:
casout_params['name'] = random_name('Timeseries', 6)
output_tbl_name = casout_params['name']
if columns is None:
keep_col_sascode = '''
data {0};
set {1};
run;
'''.format(output_tbl_name, input_tbl_name)
conn.retrieve('dataStep.runCode', _messagelevel='error',
code=keep_col_sascode)
else:
if not isinstance(columns, list):
columns = [columns]
keepcol = ' '.join(columns)
keep_col_sascode = '''
data {0};
set {1};
keep {2};
run;
'''.format(output_tbl_name, input_tbl_name, keepcol)
conn.retrieve('dataStep.runCode', _messagelevel='error',
code=keep_col_sascode)
out = cls(**casout_params)
out.set_connection(conn)
return out
@classmethod
def from_pandas(cls, conn, pandas_df, casout=None):
'''
Create an TimeseriesTable from a pandas DataFrame or Series
Parameters
----------
conn : CAS
The CAS connection object
pandas_df : :class:`pandas.DataFrame` or :class:`pandas.Series`
The pandas dataframe or series to use as the source.
casout : dict or :class:`CASTable`, optional
if it is dict, it specifies the output CASTable parameters.
if it is CASTable, it is the CASTable that will be overwritten.
None means a new CASTable with random name will be generated.
Default: None
Returns
-------
:class:`TimeseriesTable`
'''
if isinstance(pandas_df, pd.Series):
pandas_df = pandas_df.reset_index()
if casout is None:
casout_params = {}
elif isinstance(casout, CASTable):
casout_params = casout.to_outtable_params()
elif isinstance(casout, dict):
casout_params = casout
if 'name' not in casout_params:
casout_params['name'] = random_name('Timeseries', 6)
output_tbl_name = casout_params['name']
handler = datamsghandlers.PandasDataFrame(pandas_df)
conn.addtable(table=output_tbl_name, replace=True, **handler.args.addtable)
tbl = conn.CASTable(name=output_tbl_name)
return cls.from_table(tbl, columns=None, casout=casout_params)
@classmethod
def from_localfile(cls, conn, path, columns=None, importoptions=None,
casout=None):
'''
Create an TimeseriesTable from a file on the client side.
Parameters
----------
conn : CAS
The CAS connection object
path : string
The full path to the local file that will be uploaded to the server.
columns : list-of-strings, optional
Columns to keep when loading the data.
None means it will include all the columns from the source.
Empty list means to include no column, which will generate empty data.
Default: None
importoptions : dict, optional
Options to import data and upload to the server, such as filetype,
delimiter, etc. None means use the default 'auto' method in the
importoptions from CAS.upload.
Default: None
casout : dict or :class:`CASTable`, optional
If it is dict, it specifies the output CASTable parameters.
If it is CASTable, it is the CASTable that will be overwritten.
None means a new CASTable with random name will be generated.
Default: None
Returns
-------
:class:`TimeseriesTable`
'''
if casout is None:
casout_params = {}
elif isinstance(casout, CASTable):
casout_params = casout.to_outtable_params()
elif isinstance(casout, dict):
casout_params = casout
if 'name' not in casout_params:
casout_params['name'] = random_name('Timeseries', 6)
if importoptions is None:
importoptions = {}
upload_result = conn.upload(path,
importoptions=importoptions,
casout=casout_params)
tbl = conn.CASTable(**casout_params)
return cls.from_table(tbl, columns=columns, casout=casout_params)
@classmethod
def from_serverfile(cls, conn, path, columns=None, caslib=None,
importoptions=None, casout=None):
'''
Create an TimeseriesTable from a file on the server side
Parameters
----------
conn : CAS
The CAS connection object
path : string
The path that the server can access. If the caslib is specified,
it is relative path to the file with respect to the caslib.
otherwise, it is the full path to the file.
columns : list-of-strings, optional
columns to keep when loading the data.
None means it will include all the columns from the source.
Empty list means include no column, which will generate empty data.
Default: None
caslib : string, optional
The name of the caslib which contains the file to be uploaded.
Default: None
importoptions : dict, optional
Options to import data and upload to the server, such as filetype,
delimiter, etc. None means use the default 'auto' method in the
importoptions from CAS.upload.
Default: None
casout : dict or :class:`CASTable`, optional
If it is dict, it specifies the output CASTable parameters.
If it is CASTable, it is the CASTable that will be overwritten.
None means a new CASTable with random name will be generated.
Default: None
Returns
-------
:class:`TimeseriesTable`
'''
if casout is None:
casout_params = {}
elif isinstance(casout, CASTable):
casout_params = casout.to_outtable_params()
elif isinstance(casout, dict):
casout_params = casout
if 'name' not in casout_params:
casout_params['name'] = random_name('Timeseries', 6)
if importoptions is None:
importoptions = {}
if caslib is None:
caslib, rest_path = cls.find_file_caslib(conn, path)
if caslib is None:
server_type = get_cas_host_type(conn).lower()
if server_type.startswith("lin") or server_type.startswith("osx"):
path_split = path.rsplit("/", 1)
else:
path_split = path.rsplit("\\", 1)
caslib = random_name('Caslib', 6)
rt1 = conn.retrieve('addcaslib', _messagelevel='error',
name=caslib, path=path_split[0],
activeonadd=False, subdirectories=False,
datasource={'srctype':'path'})
if rt1.severity < 2:
rt2 = conn.retrieve('table.loadTable',
_messagelevel='error',
casout=casout_params,
caslib=caslib,
importoptions=importoptions,
path=path_split[1])
if rt2.severity > 1:
for msg in rt2.messages:
print(msg)
raise DLPyError('cannot load files, something is wrong!')
else:
for msg in rt1.messages:
print(msg)
raise DLPyError('''cannot create caslib with path:{},
something is wrong!'''.format(path_split[0]))
else:
rt3 = conn.retrieve('table.loadTable',
_messagelevel='error',
casout=casout_params,
caslib=caslib,
importoptions=importoptions,
path=rest_path)
if rt3.severity > 1:
for msg in rt3.messages:
print(msg)
raise DLPyError('cannot load files, something is wrong!')
else:
rt4 = conn.retrieve('table.loadTable',
_messagelevel='error',
casout=casout_params,
caslib=caslib,
importoptions=importoptions,
path=path)
if rt4.severity > 1:
for msg in rt4.messages:
print(msg)
raise DLPyError('cannot load files, something is wrong!')
tbl = conn.CASTable(**casout_params)
return cls.from_table(tbl, columns=columns, casout=casout_params)
def timeseries_formatting(self, timeid, timeseries,
timeid_informat=None, timeid_format=None,
extra_columns=None):
'''
Format the TimeseriesTable
Format timeid into appropriate format and check and format
timeseries columns into numeric columns.
Parameters
----------
timeid : string
Specifies the column name for the timeid.
timeseries : string or list-of-strings
Specifies the column name for the timeseries, that will be part of
the input or output of the RNN. If str, then it is univariate
time series. If list of strings, then it is multivariate timeseries.
timeid_informat : string, optional
if timeid is in the string format, this is required to parse the
timeid column.
Default: None
timeid_format : string, optional
Specifies the SAS format that the timeid column will be stored in
after parsing.
None means it will be stored in numeric form, not a specific date or datetime format.
Default: None
extra_columns : string or list-of-strings, optional
Specifies the addtional columns to be included.
Empty list means to include no extra columns other than timeid and timeseries.
if None, all columns are included.
Default: None
'''
self.timeid = timeid
self.timeseries = timeseries
self.timeid_format = timeid_format
self.timeid_informat = timeid_informat
self.extra_columns = extra_columns
input_tbl_params = self.to_outtable_params()
input_tbl_name = input_tbl_params['name']
conn = self.get_connection()
tbl_colinfo = self.columninfo().ColumnInfo
if self.timeid_format is None:
if self.timeid_informat is None:
self.timeid_format = self.timeid_informat
elif self.timeid_informat.lower().startswith('anydtdtm'):
self.timeid_format = 'DATETIME19.'
else:
self.timeid_format = self.timeid_informat
if (((self.timeid_type not in ['double', 'date', 'datetime'])
and (not self.timeid_type.startswith('int')))
and (self.timeid_informat is not None)):
fmt_code = '''
data {0};
set {0}(rename=({1}=c_{1}));
{1} = input(c_{1},{2});
drop c_{1};
format {1} {3};
run;
'''.format(input_tbl_name, self.timeid,
self.timeid_informat, self.timeid_format)
conn.retrieve('dataStep.runCode', _messagelevel='error', code=fmt_code)
elif (((self.timeid_type not in ['double', 'date', 'datetime'])
and (not self.timeid_type.startswith('int')))
and (self.timeid_informat is None)):
raise ValueError('''timeid variable is not in the numeric format,
so timeid_informat is required for parsing the timeid variable.
''')
elif (self.timeid_format is not None):
fmt_code = '''
data {0};
set {0};
format {1} {2};
run;
'''.format(input_tbl_name, self.timeid, self.timeid_format)
conn.retrieve('dataStep.runCode', _messagelevel='error', code=fmt_code)
else:
fmt_code = '''
data {0};
set {0};
run;
'''.format(input_tbl_name)
conn.retrieve('dataStep.runCode', _messagelevel='error', code=fmt_code)
tbl_colinfo = self.columninfo().ColumnInfo
if not isinstance(self.timeseries, list):
self.timeseries = [self.timeseries]
if set(self.timeseries).issubset(tbl_colinfo.Column):
char_to_double(conn, tbl_colinfo, input_tbl_name,
input_tbl_name, self.timeseries)
else:
raise ValueError('''One or more variables specified in 'timeseries'
do not exist in the input table.
''')
if self.extra_columns is not None:
if not isinstance(self.extra_columns, list):
self.extra_columns = [self.extra_columns]
keepcol = [self.timeid]
keepcol.extend(self.timeseries + self.extra_columns)
keepcol = ' '.join(keepcol)
keep_col_sascode = '''
data {0};
set {0};
keep {1};
run;
'''.format(input_tbl_name, keepcol)
conn.retrieve('dataStep.runCode', _messagelevel='error', code=keep_col_sascode)
print('NOTE: Timeseries formatting is completed.')
def timeseries_accumlation(self, acc_interval='day',timeid=None,
timeseries=None, groupby=None,
extra_num_columns=None, default_ts_acc='sum',
default_col_acc = 'avg',
acc_method_byvar=None):
'''
Accumulate the TimeseriesTable into regular consecutive intervals
Parameters
----------
acc_interval : string, optional
The accumulation interval, such as 'year', 'qtr', 'month', 'week',
'day', 'hour', 'minute', 'second'.
timeid : string, optional
Specifies the column name for the timeid.
If None, it will take the timeid specified in timeseries_formatting.
Default: None
timeseries : string or list-of-strings, optional
Specifies the column name for the timeseries, that will be part of
the input or output of the RNN. If str, then it is univariate
time series. If list of strings, then it is multivariate timeseries.
If None, it will take the timeseries specified in timeseries_formatting.
Default: None
groupby : string or list-of-strings, optional
The groupby variables.
Default: None
extra_num_columns : string or list-of-strings, optional
Specifies the addtional numeric columns to be included for
accumulation. These columns can include static feature, and might
be accumulated differently than the timeseries that will be used
in RNN. if None, it means no additional numeric columns will be
accumulated for later processing and modeling.
Default: None
default_ts_acc : string, optional
Default accumulation method for timeseries.
Default: sum
default_col_acc : string, optional
Default accumulation method for additional numeric columns
Default: avg
acc_method_byvar : dict, optional
It specifies specific accumulation method for individual columns,
if the method is different from the default.
It has following structure: {'column1 name': 'accumulation method1',
'column2 name': 'accumulation method2', ...}
Default: None
'''
if (timeid is None) and (self.timeid is None):
raise DLPyError('''timeid is not specified, consider specifying
and formatting it with timeseries_formatting''')
elif (timeid is not None) and (timeid != self.timeid):
warnings.warn('''timeid has not been formatted by timeseries_formatting,
consider reload the data and use timeseries_formatting to format the data,
unless the data has already been pre-formatted.''')
self.timeid = timeid
if timeseries is None:
if ((hasattr(self, 'timeseries') and self.timeseries is None) or
(not hasattr(self, 'timeseries'))):
raise DLPyError('''timeseries is not specified, consider specifying
and formatting it with timeseries_formatting''')
else:
if not isinstance(timeseries, list):
timeseries = [timeseries]
if ((hasattr(self, 'timeseries') and (self.timeseries is None)) or
(not hasattr(self, 'timeseries'))):
warnings.warn('''timeseries has not been formatted by timeseries_formatting,
consider reload the data and use timeseries_formatting to format the data,
unless the data has already been pre-formatted.''')
elif not set(timeseries).issubset(self.timeseries):
warnings.warn('''timeseries contains variable(s) that has not been
formatted by timeseries_formatting, consider reload the data and use
timeseries_formatting to format the data,
unless the data has already been pre-formatted.''')
self.timeseries = timeseries
self.groupby_var = groupby
self.extra_num_columns = extra_num_columns
input_tbl_params = self.to_outtable_params()
input_tbl_name = input_tbl_params['name']
conn = self.get_connection()
conn.loadactionset('timeData')
tbl_colinfo = self.columninfo().ColumnInfo
if self.groupby_var is None:
self.groupby_var = []
elif not isinstance(self.groupby_var, list):
self.groupby_var = [self.groupby_var]
if set(self.groupby_var).issubset(tbl_colinfo.Column):
int_to_double(conn, tbl_colinfo, input_tbl_name,
input_tbl_name, self.groupby_var)
else:
raise ValueError('''One or more variables specified in 'groupby'
do not exist in the input table.
''')
tbl_colinfo = self.columninfo().ColumnInfo
#Check timeid is in the input columns
if self.timeid not in tbl_colinfo.Column.values:
raise ValueError('''variable 'timeid' does not exist in input table.
''')
#Check timeseries is in the input columns
if not isinstance(self.timeseries, list):
self.timeseries = [self.timeseries]
if not set(self.timeseries).issubset(tbl_colinfo.Column):
raise ValueError('''One or more variables specified in 'timeseries'
do not exist in the input table.
''')
#Check extra_num_columns is in the input columns
if self.extra_num_columns is None:
self.extra_num_columns = []
elif not isinstance(self.extra_num_columns, list):
self.extra_num_columns = [self.extra_num_columns]
if not set(self.extra_num_columns).issubset(tbl_colinfo.Column):
raise ValueError('''One or more variables specified in 'extra_num_columns'
do not exist in the input table.
''')
if self.timeid_type == 'datetime':
acc_interval = 'dt' + acc_interval
elif ((self.timeid_type == 'date')
and (acc_interval.lower() in ['hour', 'minute', 'second'])):
raise ValueError('''the acc_interval has higher frequency than day,
yet the timeid variable is in the date format.
''')
if acc_method_byvar is None:
acc_method_byvar = {}
serieslist = []
for ts in self.timeseries:
if ts in acc_method_byvar.keys():
method_dict = {'acc':acc_method_byvar[ts],'name':ts}
serieslist.append(method_dict)
else:
method_dict = {'acc':default_ts_acc,'name':ts}
serieslist.append(method_dict)
for extra_col in self.extra_num_columns:
if extra_col in self.timeseries:
warnings.warn('''
columns in extra_num_columns are also found in
timeseries, and will be ignored.
''')
continue
elif extra_col in acc_method_byvar.keys():
method_dict = {'acc':acc_method_byvar[extra_col],'name':extra_col}
serieslist.append(method_dict)
else:
method_dict = {'acc':default_col_acc,'name':extra_col}
serieslist.append(method_dict)
acc_result = conn.retrieve('timedata.timeseries', _messagelevel='error',
table={'groupby':self.groupby_var,'name': input_tbl_name},
series=serieslist,
timeid=self.timeid,
interval=acc_interval,
trimid='BOTH',
sumout=dict(name=input_tbl_name + '_summary', replace=True),
casout=dict(name=input_tbl_name, replace=True))
if acc_interval.startswith('dt'):
print('NOTE: Timeseries are accumulated to the frequency of {}'.format(acc_interval[2:]))
else:
print('NOTE: Timeseries are accumulated to the frequency of {}'.format(acc_interval))
def prepare_subsequences(self, seq_len, target, predictor_timeseries=None,
timeid=None, groupby=None,
input_length_name='xlen', target_length_name='ylen',
missing_handling='drop'):
'''
Prepare the subsequences that will be pass into RNN
Parameters
----------
seq_len : int
subsequence length that will be passed onto RNN.
target : string
the target variable for RNN. Currenly only support univariate target,
so only string is accepted here, not list of strings.
predictor_timeseries : string or list-of-strings, optional
Timeseries that will be used to predict target. They will be preprocessed
into subsequences as well. If None, it will take the target timeseries
as the predictor, which corresponds to auto-regressive models.
Default: None
timeid : string, optional
Specifies the column name for the timeid.
If None, it will take the timeid specified in timeseries_accumlation.
Default: None
groupby : string or list-of-strings, optional
The groupby variables. if None, it will take the groupby specified
in timeseries_accumlation.
Default: None
input_length_name : string, optional
The column name in the CASTable specifying input sequence length.
Default: xlen
target_length_name : string, optional
The column name in the CASTable specifying target sequence length.
currently target length only support length 1 for numeric sequence.
Default: ylen
missing_handling : string, optional
How to handle missing value in the subsequences.
default: drop
'''
tbl_colinfo = self.columninfo().ColumnInfo
input_tbl_params = self.to_outtable_params()
input_tbl_name = input_tbl_params['name']
conn = self.get_connection()
if timeid is not None:
self.timeid = timeid
elif self.timeid is None:
raise ValueError('''timeid is not specified''')
if self.timeid not in tbl_colinfo.Column.values:
raise ValueError('''timeid does not exist in the input table''')
if groupby is not None:
self.groupby_var = groupby
if self.groupby_var is None:
self.groupby_var = []
elif not isinstance(self.groupby_var, list):
self.groupby_var = [self.groupby_var]
if set(self.groupby_var).issubset(tbl_colinfo.Column):
int_to_double(conn, tbl_colinfo, input_tbl_name,
input_tbl_name, self.groupby_var)
else:
raise ValueError('''One or more variables specified in 'groupby'
do not exist in the input table.
''')
if isinstance(target, list):
if len(target) > 1:
raise DLPyError('''currently only support univariate target''')
else:
target = [target]
if predictor_timeseries is None:
predictor_timeseries = target
elif not isinstance(predictor_timeseries, list):
predictor_timeseries = [predictor_timeseries]
if set(target).issubset(predictor_timeseries):
independent_pred = [var for var in predictor_timeseries
if var not in target]
self.auto_regressive = True
else:
independent_pred = predictor_timeseries
self.auto_regressive = False
if not set(target).issubset(tbl_colinfo.Column):
raise ValueError('''invalid target variable''')
if len(independent_pred) > 0:
if not set(independent_pred).issubset(tbl_colinfo.Column):
raise ValueError('''columns in predictor_timeseries are absent from
the accumulated timeseriest table.''')
if self.timeseries is None:
warnings.warn('''timeseries has not been formatted by timeseries_formatting,
consider reload the data and use timeseries_formatting to format the data,
unless the data has already been pre-formatted.''')
else:
if not set(target).issubset(self.timeseries):
warnings.warn('''target is not in pre-formatted timeseries,
consider reload the data and use timeseries_formatting to format the data,
unless the data has already been pre-formatted.''')
if len(independent_pred) > 0:
if not set(independent_pred).issubset(self.timeseries):
warnings.warn('''
some of predictor_timeseries are not in pre-accumulated timeseries,\n
consider reload the data and use timeseries_accumulation to accumulate the data,\n
unless the data has already been pre-formatted.
''')
self.target = target[0]
self.independent_pred = independent_pred
self.seq_len = seq_len
if self.seq_len < 1:
raise ValueError('''RNN sequence length at least need to be 1''')
sasCode = 'data {0}; set {0}; by {1} {2};'.format(
input_tbl_name, ' '.join(self.groupby_var), self.timeid)
if self.seq_len > 1:
for var in self.independent_pred:
sasCode += self.create_lags(var, self.seq_len - 1, self.groupby_var)
if self.auto_regressive:
sasCode += self.create_lags(self.target, self.seq_len, self.groupby_var)
sasCode += '{0} = {1};'.format(input_length_name, self.seq_len)
sasCode += '{} = 1;'.format(target_length_name) # Currently only support one timestep numeric output.
if missing_handling == 'drop':
sasCode += 'if not cmiss(of _all_) then output {};'.format(input_tbl_name)
sasCode += 'run;'
if len(self.groupby_var) == 0:
conn.retrieve('dataStep.runCode', _messagelevel='error', code=sasCode,
single='Yes')
else:
conn.retrieve('dataStep.runCode', _messagelevel='error', code=sasCode)
self.input_vars = []
for i in range(self.seq_len):
if self.auto_regressive:
self.input_vars.append('{0}_lag{1}'.format(self.target, i+1))
for var in self.independent_pred:
if i == 0:
self.input_vars.append(var)
else:
self.input_vars.append('{0}_lag{1}'.format(var, i))
self.input_vars.reverse()
self.tokensize = len(predictor_timeseries)
self.sequence_opt = dict(input_length=input_length_name,
target_length=target_length_name,
token_size=self.tokensize)
self.inputs_target = dict(inputs=self.input_vars,
target=self.target)
print('NOTE: timeseries subsequences are prepared with subsequence length = {}'.format(seq_len))
@property
def timeid_type(self):
tbl_colinfo = self.columninfo().ColumnInfo
timeid_type = self.identify_coltype(self.timeid, tbl_colinfo)
return timeid_type
@staticmethod
def identify_coltype(col, tbl_colinfo):
if col not in tbl_colinfo.Column.values:
raise ValueError('''variable {} does not exist in input table.
'''.format(col))
if 'Format' in tbl_colinfo.columns:
cas_timeid_fmt = tbl_colinfo.Format[tbl_colinfo.Column == col].values[0]
else:
cas_timeid_fmt = None
col_type = tbl_colinfo.Type[tbl_colinfo.Column == col].values[0]
if cas_timeid_fmt:
for pattern in swat.options.cas.dataset.date_formats:
if re.match(r'{}\Z'.format(pattern), cas_timeid_fmt):
col_type = 'date'
break
for pattern in swat.options.cas.dataset.datetime_formats:
if re.match(r'{}\Z'.format(pattern), cas_timeid_fmt):
if col_type == 'date':
raise DLPyError('''{} format in CASTable is ambiguous,
and can match both sas date and sas datetime format'''.format(col))
else:
col_type = 'datetime'
break
return col_type
def timeseries_partition(self, training_start=None, validation_start=None,
testing_start=None, end_time=None,
partition_var_name='split_id',
traintbl_suffix='train',
validtbl_suffix='valid',
testtbl_suffix='test'):
'''
Split the dataset into training, validation and testing set
Parameters
----------
training_start : float or :class:`datetime.datetime` or :class:`datetime.date`, optional
The training set starting time stamp. if None, the training set
start at the earliest observation record in the table.
Default: None
validation_start : float or :class:`datetime.datetime` or :class:`datetime.date`, optional
The validation set starting time stamp. The training set
ends right before it. If None, there is no validation set,
and the training set ends right before the start of
testing set.
Default: None
testing_start : float or :class:`datetime.datetime` or :class:`datetime.date`, optional
The testing set starting time stamp. The validation set
(or training set if validation set is not specified) ends
right before it. If None, there is no testing set, and
the validation set (or training set if validation set is
not set) ends at the end_time.
Default: None
end_time : float or :class:`datetime.datetime` or :class:`datetime.date`, optional
The end time for the table.
partition_var_name : string, optional
The name of the indicator column that indicates training,
testing and validation.
Default: 'split_id'.
traintbl_suffix : string, optional
The suffix name of the CASTable for the training set.
Default: 'train'
validtbl_suffix : string, optional
The suffix name of the CASTable for the validation set.
Default: 'valid'
testtbl_suffix : string, optional
The suffix name of the CASTable for the testing set.
Default: 'test'
Returns
-------
( training TimeseriesTable, validation TimeseriesTable, testing TimeseriesTable )
'''
self.partition_var_name = partition_var_name
conn = self.get_connection()
training_start = self.convert_to_sas_time_format(training_start, self.timeid_type)
validation_start = self.convert_to_sas_time_format(validation_start, self.timeid_type)
testing_start = self.convert_to_sas_time_format(testing_start, self.timeid_type)
end_time = self.convert_to_sas_time_format(end_time, self.timeid_type)
if testing_start is None:
testing_start = end_time
test_statement = ';'
else:
test_statement = self.generate_splitting_code(
self.timeid, testing_start, end_time,
True, self.partition_var_name, 'test')
if validation_start is None:
validation_start = testing_start
valid_statement = ';'
else:
if testing_start == end_time:
valid_statement = self.generate_splitting_code(
self.timeid, validation_start, testing_start,
True, self.partition_var_name, 'valid')
else:
valid_statement = self.generate_splitting_code(
self.timeid, validation_start, testing_start,
False, self.partition_var_name, 'valid')
if validation_start == end_time:
train_statement = self.generate_splitting_code(
self.timeid, training_start, validation_start,
True, self.partition_var_name, 'train')
else:
train_statement = self.generate_splitting_code(
self.timeid, training_start, validation_start,
False, self.partition_var_name, 'train')
input_tbl_params = self.to_outtable_params()
input_tbl_name = input_tbl_params['name']
traintbl_name = '_'.join([input_tbl_name, traintbl_suffix])
validtbl_name = '_'.join([input_tbl_name, validtbl_suffix])
testtbl_name = '_'.join([input_tbl_name, testtbl_suffix])
splitting_code = '''
data {4} {5} {6};
set {0};
{1}
{2}
{3}
if {7} = 'train' then output {4};
if {7} = 'valid' then output {5};
if {7} = 'test' then output {6};
run;
'''.format(input_tbl_name, train_statement, valid_statement, test_statement,
traintbl_name, validtbl_name, testtbl_name, self.partition_var_name)
conn.retrieve('dataStep.runCode', _messagelevel='error', code=splitting_code)
train_out = dict(name=traintbl_name, timeid=self.timeid, groupby_var=self.groupby_var,
sequence_opt=self.sequence_opt, inputs_target=self.inputs_target)
valid_out = dict(name=validtbl_name, timeid=self.timeid, groupby_var=self.groupby_var,
sequence_opt=self.sequence_opt, inputs_target=self.inputs_target)
test_out = dict(name=testtbl_name, timeid=self.timeid, groupby_var=self.groupby_var,
sequence_opt=self.sequence_opt, inputs_target=self.inputs_target)
train_out_tbl = TimeseriesTable(**train_out)
train_out_tbl.set_connection(conn)
valid_out_tbl = TimeseriesTable(**valid_out)
valid_out_tbl.set_connection(conn)
test_out_tbl = TimeseriesTable(**test_out)
test_out_tbl.set_connection(conn)
print('NOTE: Training set has {} observations'.format(train_out_tbl.shape[0]))
print('NOTE: Validation set has {} observations'.format(valid_out_tbl.shape[0]))
print('NOTE: Testing set has {} observations'.format(test_out_tbl.shape[0]))
return train_out_tbl, valid_out_tbl, test_out_tbl
@staticmethod
def generate_splitting_code(timeid, start, end, right_inclusive,
partition_var_name, partition_val):
if (start is None) and (end is not None):
if right_inclusive:
statement = '''if {0} <= {1} then {2} = '{3}';'''.format(
timeid, end, partition_var_name, partition_val)
else:
statement = '''if {0} < {1} then {2} = '{3}';'''.format(
timeid, end, partition_var_name, partition_val)
elif (start is not None) and (end is None):
statement = '''if {0} >= {1} then {2} = '{3}';'''.format(
timeid, start, partition_var_name, partition_val)
elif (start is not None) and (end is not None):
if right_inclusive:
statement = '''if {0} >= {1} and {0} <= {2} then {3} = '{4}';'''.format(
timeid, start, end, partition_var_name, partition_val)
else:
statement = '''if {0} >= {1} and {0} < {2} then {3} = '{4}';'''.format(
timeid, start, end, partition_var_name, partition_val)
else:
statement = '''{0} = '{1}';'''.format(partition_var_name, partition_val)
return statement
@staticmethod
def convert_to_sas_time_format(python_time, sas_format_type):
if sas_format_type == 'date':
if isinstance(python_time, datetime.date):
sas_time_str = 'mdy({0},{1},{2})'.format(python_time.month,
python_time.day, python_time.year)
return sas_time_str
elif python_time is None:
return None
else:
raise ValueError('''The timeid type is date format, so the input
python time variable should be date or datetime format''')
elif sas_format_type == 'datetime':
if isinstance(python_time, datetime.datetime):
sas_time_str = 'dhms(mdy({0},{1},{2}), {3}, {4}, {5})'.format(
python_time.month, python_time.day, python_time.year,
python_time.hour, python_time.minute, python_time.second)
return sas_time_str
elif isinstance(python_time, datetime.date):
sas_time_str = 'dhms(mdy({0},{1},{2}), 0, 0, 0)'.format(
python_time.month, python_time.day, python_time.year)
return sas_time_str
elif python_time is None:
return None
else:
raise ValueError('''The timeid type is datetime format, so the input
python time variable should be date or datetime format''')
elif sas_format_type == 'double':
if isinstance(python_time, numbers.Real):
return python_time
elif python_time is None:
return None
else:
raise ValueError('''The timeid type is double, so the input
python time variable should be int or float''')
else:
raise DLPyError('''timeid format in CASTable is wrong, consider reload
the table and formatting it with timeseries_formatting''')
@staticmethod
def create_lags(varname, nlags, byvar):
if not isinstance(byvar, list):
byvar = [byvar]
byvar_strlist = ['first.{}'.format(var) for var in byvar]
sasCode = ''
for i in range(nlags):
if i == 0:
sasCode += '{0}_lag{1} = lag({0});'.format(varname, i+1)
else:
sasCode += '{0}_lag{1} = lag({0}_lag{2});'.format(varname, i+1, i)
if len(byvar) > 0:
sasCode += 'if ' + ' or '.join(byvar_strlist)
sasCode += ' then {0}_lag{1} = .;'.format(varname, i+1)
return sasCode
@staticmethod
def find_file_caslib(conn, path):
'''
Check whether the specified path is in the caslibs of the current session
Parameters
----------
conn : CAS
Specifies the CAS connection object
path : string
Specifies the name of the path.
Returns
-------
( flag, caslib_name )
flag specifies if path exist in session.
caslib_name specifies the name of the caslib that contains the path.
'''
paths = conn.caslibinfo().CASLibInfo.Path.tolist()
caslibs = conn.caslibinfo().CASLibInfo.Name.tolist()
subdirs = conn.caslibinfo().CASLibInfo.Subdirs.tolist()
server_type = get_cas_host_type(conn).lower()
if server_type.startswith("lin") or server_type.startswith("osx"):
sep = '/'
else:
sep = '\\'
for i, directory in enumerate(paths):
if path.startswith(directory) and (subdirs[i]==1):
rest_path = path[len(directory):]
caslibname = caslibs[i]
return (caslibname, rest_path)
elif path.startswith(directory) and (subdirs[i]==0):
rest_path = path[len(directory):]
if sep in rest_path:
continue
else:
caslibname = caslibs[i]
return (caslibname, rest_path)
return (None, None)
| 41.840755 | 116 | 0.570771 |
from __future__ import (print_function, division, absolute_import, unicode_literals)
from swat.cas.table import CASTable
from .utils import random_name, get_cas_host_type, char_to_double, int_to_double
from dlpy.utils import DLPyError
from swat.cas import datamsghandlers
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import warnings
import datetime
import numbers
import re
import swat
def plot_timeseries(tbl, timeid, timeseries, figure=None,
groupid=None, start_time=None, end_time=None, xlim=None,
ylim=None, xlabel=None, ylabel=None, xdate_format=None,
title=None, figsize=None,
fontsize_spec=None, **kwargs):
default_fontsize_spec = {'xlabel':16, 'ylabel':16, 'xtick':14,
'ytick':14, 'legend':14, 'title':20}
if figure is None:
fig, ax = plt.subplots(1, 1, figsize=figsize)
if fontsize_spec is not None:
default_fontsize_spec.update(fontsize_spec)
fontsize_spec = default_fontsize_spec
else:
fig, ax = figure
if fontsize_spec is None:
fontsize_spec = {}
if 'legend' not in fontsize_spec.keys():
fontsize_spec['legend'] = default_fontsize_spec['legend']
if isinstance(tbl, CASTable):
if groupid is None:
tbl = tbl.to_frame()
else:
where_clause_list = []
for gid in groupid.keys():
where_clause_list.append(gid + '=' + str(groupid[gid]))
where_clause = ' and '.join(where_clause_list)
tbl = tbl.query(where_clause)
tbl = tbl.to_frame()
else:
if isinstance(tbl, pd.Series):
timeseries = tbl.name
tbl = tbl.reset_index()
timeid = [colname for colname in tbl.columns if colname != timeseries][0]
if groupid is not None:
for gid in groupid.keys():
tbl = tbl.loc[tbl[gid]==groupid[gid]]
if not (np.issubdtype(tbl[timeid].dtype, np.integer) or
np.issubdtype(tbl[timeid].dtype, np.floating)):
tbl[timeid] = pd.to_datetime(tbl[timeid])
fig.autofmt_xdate()
if xdate_format is not None:
import matplotlib.dates as mdates
xfmt = mdates.DateFormatter(xdate_format)
ax.xaxis.set_major_formatter(xfmt)
if start_time is not None:
if isinstance(start_time, datetime.date):
start_time = pd.Timestamp(start_time)
tbl = tbl.loc[tbl[timeid]>=start_time]
if end_time is not None:
if isinstance(start_time, datetime.date):
end_time = pd.Timestamp(end_time)
tbl = tbl.loc[tbl[timeid]<=end_time]
tbl = tbl.sort_values(timeid)
ax.plot(tbl[timeid], tbl[timeseries], **kwargs)
if xlabel is not None:
if 'xlabel' in fontsize_spec.keys():
ax.set_xlabel(xlabel, fontsize=fontsize_spec['xlabel'])
else:
ax.set_xlabel(xlabel)
elif figure is not None:
if 'xlabel' in fontsize_spec.keys():
ax.set_xlabel(ax.get_xlabel(), fontsize=fontsize_spec['xlabel'])
else:
ax.set_xlabel(timeid, fontsize=fontsize_spec['xlabel'])
if ylabel is not None:
if 'ylabel' in fontsize_spec.keys():
ax.set_ylabel(ylabel, fontsize=fontsize_spec['ylabel'])
else:
ax.set_ylabel(ylabel)
elif figure is not None:
if 'ylabel' in fontsize_spec.keys():
ax.set_ylabel(ax.get_ylabel(), fontsize=fontsize_spec['ylabel'])
else:
ax.set_ylabel(timeseries, fontsize=fontsize_spec['ylabel'])
if xlim is not None:
ax.set_xlim(xlim)
if ylim is not None:
ax.set_ylim(ylim)
if title is not None:
if 'title' in fontsize_spec.keys():
ax.set_title(title, fontsize=fontsize_spec['title'])
else:
ax.set_title(title)
elif figure is not None:
if 'title' in fontsize_spec.keys():
ax.set_title(ax.get_title(), fontsize=fontsize_spec['title'])
ax.legend(loc='best', bbox_to_anchor=(1, 1), prop={'size': fontsize_spec['legend']})
if 'xtick' in fontsize_spec.keys():
ax.get_xaxis().set_tick_params(direction='out', labelsize=fontsize_spec['xtick'])
else:
ax.get_xaxis().set_tick_params(direction='out')
if 'ytick' in fontsize_spec.keys():
ax.get_yaxis().set_tick_params(direction='out', labelsize=fontsize_spec['ytick'])
else:
ax.get_yaxis().set_tick_params(direction='out')
return (fig, ax)
class TimeseriesTable(CASTable):
running_caslib = None
def __init__(self, name, timeid=None, groupby_var=None,
sequence_opt=None, inputs_target=None, **table_params):
CASTable.__init__(self, name, **table_params)
self.timeid = timeid
self.groupby_var = groupby_var
self.sequence_opt = sequence_opt
self.inputs_target = inputs_target
@classmethod
def from_table(cls, tbl, columns=None, casout=None):
input_tbl_params = tbl.to_outtable_params()
input_tbl_name = input_tbl_params['name']
conn = tbl.get_connection()
if casout is None:
casout_params = {}
elif isinstance(casout, CASTable):
casout_params = casout.to_outtable_params()
elif isinstance(casout, dict):
casout_params = casout
if 'name' not in casout_params:
casout_params['name'] = random_name('Timeseries', 6)
output_tbl_name = casout_params['name']
if columns is None:
keep_col_sascode = '''
data {0};
set {1};
run;
'''.format(output_tbl_name, input_tbl_name)
conn.retrieve('dataStep.runCode', _messagelevel='error',
code=keep_col_sascode)
else:
if not isinstance(columns, list):
columns = [columns]
keepcol = ' '.join(columns)
keep_col_sascode = '''
data {0};
set {1};
keep {2};
run;
'''.format(output_tbl_name, input_tbl_name, keepcol)
conn.retrieve('dataStep.runCode', _messagelevel='error',
code=keep_col_sascode)
out = cls(**casout_params)
out.set_connection(conn)
return out
@classmethod
def from_pandas(cls, conn, pandas_df, casout=None):
if isinstance(pandas_df, pd.Series):
pandas_df = pandas_df.reset_index()
if casout is None:
casout_params = {}
elif isinstance(casout, CASTable):
casout_params = casout.to_outtable_params()
elif isinstance(casout, dict):
casout_params = casout
if 'name' not in casout_params:
casout_params['name'] = random_name('Timeseries', 6)
output_tbl_name = casout_params['name']
handler = datamsghandlers.PandasDataFrame(pandas_df)
conn.addtable(table=output_tbl_name, replace=True, **handler.args.addtable)
tbl = conn.CASTable(name=output_tbl_name)
return cls.from_table(tbl, columns=None, casout=casout_params)
@classmethod
def from_localfile(cls, conn, path, columns=None, importoptions=None,
casout=None):
if casout is None:
casout_params = {}
elif isinstance(casout, CASTable):
casout_params = casout.to_outtable_params()
elif isinstance(casout, dict):
casout_params = casout
if 'name' not in casout_params:
casout_params['name'] = random_name('Timeseries', 6)
if importoptions is None:
importoptions = {}
upload_result = conn.upload(path,
importoptions=importoptions,
casout=casout_params)
tbl = conn.CASTable(**casout_params)
return cls.from_table(tbl, columns=columns, casout=casout_params)
@classmethod
def from_serverfile(cls, conn, path, columns=None, caslib=None,
importoptions=None, casout=None):
if casout is None:
casout_params = {}
elif isinstance(casout, CASTable):
casout_params = casout.to_outtable_params()
elif isinstance(casout, dict):
casout_params = casout
if 'name' not in casout_params:
casout_params['name'] = random_name('Timeseries', 6)
if importoptions is None:
importoptions = {}
if caslib is None:
caslib, rest_path = cls.find_file_caslib(conn, path)
if caslib is None:
server_type = get_cas_host_type(conn).lower()
if server_type.startswith("lin") or server_type.startswith("osx"):
path_split = path.rsplit("/", 1)
else:
path_split = path.rsplit("\\", 1)
caslib = random_name('Caslib', 6)
rt1 = conn.retrieve('addcaslib', _messagelevel='error',
name=caslib, path=path_split[0],
activeonadd=False, subdirectories=False,
datasource={'srctype':'path'})
if rt1.severity < 2:
rt2 = conn.retrieve('table.loadTable',
_messagelevel='error',
casout=casout_params,
caslib=caslib,
importoptions=importoptions,
path=path_split[1])
if rt2.severity > 1:
for msg in rt2.messages:
print(msg)
raise DLPyError('cannot load files, something is wrong!')
else:
for msg in rt1.messages:
print(msg)
raise DLPyError('''cannot create caslib with path:{},
something is wrong!'''.format(path_split[0]))
else:
rt3 = conn.retrieve('table.loadTable',
_messagelevel='error',
casout=casout_params,
caslib=caslib,
importoptions=importoptions,
path=rest_path)
if rt3.severity > 1:
for msg in rt3.messages:
print(msg)
raise DLPyError('cannot load files, something is wrong!')
else:
rt4 = conn.retrieve('table.loadTable',
_messagelevel='error',
casout=casout_params,
caslib=caslib,
importoptions=importoptions,
path=path)
if rt4.severity > 1:
for msg in rt4.messages:
print(msg)
raise DLPyError('cannot load files, something is wrong!')
tbl = conn.CASTable(**casout_params)
return cls.from_table(tbl, columns=columns, casout=casout_params)
def timeseries_formatting(self, timeid, timeseries,
timeid_informat=None, timeid_format=None,
extra_columns=None):
self.timeid = timeid
self.timeseries = timeseries
self.timeid_format = timeid_format
self.timeid_informat = timeid_informat
self.extra_columns = extra_columns
input_tbl_params = self.to_outtable_params()
input_tbl_name = input_tbl_params['name']
conn = self.get_connection()
tbl_colinfo = self.columninfo().ColumnInfo
if self.timeid_format is None:
if self.timeid_informat is None:
self.timeid_format = self.timeid_informat
elif self.timeid_informat.lower().startswith('anydtdtm'):
self.timeid_format = 'DATETIME19.'
else:
self.timeid_format = self.timeid_informat
if (((self.timeid_type not in ['double', 'date', 'datetime'])
and (not self.timeid_type.startswith('int')))
and (self.timeid_informat is not None)):
fmt_code = '''
data {0};
set {0}(rename=({1}=c_{1}));
{1} = input(c_{1},{2});
drop c_{1};
format {1} {3};
run;
'''.format(input_tbl_name, self.timeid,
self.timeid_informat, self.timeid_format)
conn.retrieve('dataStep.runCode', _messagelevel='error', code=fmt_code)
elif (((self.timeid_type not in ['double', 'date', 'datetime'])
and (not self.timeid_type.startswith('int')))
and (self.timeid_informat is None)):
raise ValueError('''timeid variable is not in the numeric format,
so timeid_informat is required for parsing the timeid variable.
''')
elif (self.timeid_format is not None):
fmt_code = '''
data {0};
set {0};
format {1} {2};
run;
'''.format(input_tbl_name, self.timeid, self.timeid_format)
conn.retrieve('dataStep.runCode', _messagelevel='error', code=fmt_code)
else:
fmt_code = '''
data {0};
set {0};
run;
'''.format(input_tbl_name)
conn.retrieve('dataStep.runCode', _messagelevel='error', code=fmt_code)
tbl_colinfo = self.columninfo().ColumnInfo
if not isinstance(self.timeseries, list):
self.timeseries = [self.timeseries]
if set(self.timeseries).issubset(tbl_colinfo.Column):
char_to_double(conn, tbl_colinfo, input_tbl_name,
input_tbl_name, self.timeseries)
else:
raise ValueError('''One or more variables specified in 'timeseries'
do not exist in the input table.
''')
if self.extra_columns is not None:
if not isinstance(self.extra_columns, list):
self.extra_columns = [self.extra_columns]
keepcol = [self.timeid]
keepcol.extend(self.timeseries + self.extra_columns)
keepcol = ' '.join(keepcol)
keep_col_sascode = '''
data {0};
set {0};
keep {1};
run;
'''.format(input_tbl_name, keepcol)
conn.retrieve('dataStep.runCode', _messagelevel='error', code=keep_col_sascode)
print('NOTE: Timeseries formatting is completed.')
def timeseries_accumlation(self, acc_interval='day',timeid=None,
timeseries=None, groupby=None,
extra_num_columns=None, default_ts_acc='sum',
default_col_acc = 'avg',
acc_method_byvar=None):
if (timeid is None) and (self.timeid is None):
raise DLPyError('''timeid is not specified, consider specifying
and formatting it with timeseries_formatting''')
elif (timeid is not None) and (timeid != self.timeid):
warnings.warn('''timeid has not been formatted by timeseries_formatting,
consider reload the data and use timeseries_formatting to format the data,
unless the data has already been pre-formatted.''')
self.timeid = timeid
if timeseries is None:
if ((hasattr(self, 'timeseries') and self.timeseries is None) or
(not hasattr(self, 'timeseries'))):
raise DLPyError('''timeseries is not specified, consider specifying
and formatting it with timeseries_formatting''')
else:
if not isinstance(timeseries, list):
timeseries = [timeseries]
if ((hasattr(self, 'timeseries') and (self.timeseries is None)) or
(not hasattr(self, 'timeseries'))):
warnings.warn('''timeseries has not been formatted by timeseries_formatting,
consider reload the data and use timeseries_formatting to format the data,
unless the data has already been pre-formatted.''')
elif not set(timeseries).issubset(self.timeseries):
warnings.warn('''timeseries contains variable(s) that has not been
formatted by timeseries_formatting, consider reload the data and use
timeseries_formatting to format the data,
unless the data has already been pre-formatted.''')
self.timeseries = timeseries
self.groupby_var = groupby
self.extra_num_columns = extra_num_columns
input_tbl_params = self.to_outtable_params()
input_tbl_name = input_tbl_params['name']
conn = self.get_connection()
conn.loadactionset('timeData')
tbl_colinfo = self.columninfo().ColumnInfo
if self.groupby_var is None:
self.groupby_var = []
elif not isinstance(self.groupby_var, list):
self.groupby_var = [self.groupby_var]
if set(self.groupby_var).issubset(tbl_colinfo.Column):
int_to_double(conn, tbl_colinfo, input_tbl_name,
input_tbl_name, self.groupby_var)
else:
raise ValueError('''One or more variables specified in 'groupby'
do not exist in the input table.
''')
tbl_colinfo = self.columninfo().ColumnInfo
if self.timeid not in tbl_colinfo.Column.values:
raise ValueError('''variable 'timeid' does not exist in input table.
''')
if not isinstance(self.timeseries, list):
self.timeseries = [self.timeseries]
if not set(self.timeseries).issubset(tbl_colinfo.Column):
raise ValueError('''One or more variables specified in 'timeseries'
do not exist in the input table.
''')
if self.extra_num_columns is None:
self.extra_num_columns = []
elif not isinstance(self.extra_num_columns, list):
self.extra_num_columns = [self.extra_num_columns]
if not set(self.extra_num_columns).issubset(tbl_colinfo.Column):
raise ValueError('''One or more variables specified in 'extra_num_columns'
do not exist in the input table.
''')
if self.timeid_type == 'datetime':
acc_interval = 'dt' + acc_interval
elif ((self.timeid_type == 'date')
and (acc_interval.lower() in ['hour', 'minute', 'second'])):
raise ValueError('''the acc_interval has higher frequency than day,
yet the timeid variable is in the date format.
''')
if acc_method_byvar is None:
acc_method_byvar = {}
serieslist = []
for ts in self.timeseries:
if ts in acc_method_byvar.keys():
method_dict = {'acc':acc_method_byvar[ts],'name':ts}
serieslist.append(method_dict)
else:
method_dict = {'acc':default_ts_acc,'name':ts}
serieslist.append(method_dict)
for extra_col in self.extra_num_columns:
if extra_col in self.timeseries:
warnings.warn('''
columns in extra_num_columns are also found in
timeseries, and will be ignored.
''')
continue
elif extra_col in acc_method_byvar.keys():
method_dict = {'acc':acc_method_byvar[extra_col],'name':extra_col}
serieslist.append(method_dict)
else:
method_dict = {'acc':default_col_acc,'name':extra_col}
serieslist.append(method_dict)
acc_result = conn.retrieve('timedata.timeseries', _messagelevel='error',
table={'groupby':self.groupby_var,'name': input_tbl_name},
series=serieslist,
timeid=self.timeid,
interval=acc_interval,
trimid='BOTH',
sumout=dict(name=input_tbl_name + '_summary', replace=True),
casout=dict(name=input_tbl_name, replace=True))
if acc_interval.startswith('dt'):
print('NOTE: Timeseries are accumulated to the frequency of {}'.format(acc_interval[2:]))
else:
print('NOTE: Timeseries are accumulated to the frequency of {}'.format(acc_interval))
def prepare_subsequences(self, seq_len, target, predictor_timeseries=None,
timeid=None, groupby=None,
input_length_name='xlen', target_length_name='ylen',
missing_handling='drop'):
tbl_colinfo = self.columninfo().ColumnInfo
input_tbl_params = self.to_outtable_params()
input_tbl_name = input_tbl_params['name']
conn = self.get_connection()
if timeid is not None:
self.timeid = timeid
elif self.timeid is None:
raise ValueError('''timeid is not specified''')
if self.timeid not in tbl_colinfo.Column.values:
raise ValueError('''timeid does not exist in the input table''')
if groupby is not None:
self.groupby_var = groupby
if self.groupby_var is None:
self.groupby_var = []
elif not isinstance(self.groupby_var, list):
self.groupby_var = [self.groupby_var]
if set(self.groupby_var).issubset(tbl_colinfo.Column):
int_to_double(conn, tbl_colinfo, input_tbl_name,
input_tbl_name, self.groupby_var)
else:
raise ValueError('''One or more variables specified in 'groupby'
do not exist in the input table.
''')
if isinstance(target, list):
if len(target) > 1:
raise DLPyError('''currently only support univariate target''')
else:
target = [target]
if predictor_timeseries is None:
predictor_timeseries = target
elif not isinstance(predictor_timeseries, list):
predictor_timeseries = [predictor_timeseries]
if set(target).issubset(predictor_timeseries):
independent_pred = [var for var in predictor_timeseries
if var not in target]
self.auto_regressive = True
else:
independent_pred = predictor_timeseries
self.auto_regressive = False
if not set(target).issubset(tbl_colinfo.Column):
raise ValueError('''invalid target variable''')
if len(independent_pred) > 0:
if not set(independent_pred).issubset(tbl_colinfo.Column):
raise ValueError('''columns in predictor_timeseries are absent from
the accumulated timeseriest table.''')
if self.timeseries is None:
warnings.warn('''timeseries has not been formatted by timeseries_formatting,
consider reload the data and use timeseries_formatting to format the data,
unless the data has already been pre-formatted.''')
else:
if not set(target).issubset(self.timeseries):
warnings.warn('''target is not in pre-formatted timeseries,
consider reload the data and use timeseries_formatting to format the data,
unless the data has already been pre-formatted.''')
if len(independent_pred) > 0:
if not set(independent_pred).issubset(self.timeseries):
warnings.warn('''
some of predictor_timeseries are not in pre-accumulated timeseries,\n
consider reload the data and use timeseries_accumulation to accumulate the data,\n
unless the data has already been pre-formatted.
''')
self.target = target[0]
self.independent_pred = independent_pred
self.seq_len = seq_len
if self.seq_len < 1:
raise ValueError('''RNN sequence length at least need to be 1''')
sasCode = 'data {0}; set {0}; by {1} {2};'.format(
input_tbl_name, ' '.join(self.groupby_var), self.timeid)
if self.seq_len > 1:
for var in self.independent_pred:
sasCode += self.create_lags(var, self.seq_len - 1, self.groupby_var)
if self.auto_regressive:
sasCode += self.create_lags(self.target, self.seq_len, self.groupby_var)
sasCode += '{0} = {1};'.format(input_length_name, self.seq_len)
sasCode += '{} = 1;'.format(target_length_name)
if missing_handling == 'drop':
sasCode += 'if not cmiss(of _all_) then output {};'.format(input_tbl_name)
sasCode += 'run;'
if len(self.groupby_var) == 0:
conn.retrieve('dataStep.runCode', _messagelevel='error', code=sasCode,
single='Yes')
else:
conn.retrieve('dataStep.runCode', _messagelevel='error', code=sasCode)
self.input_vars = []
for i in range(self.seq_len):
if self.auto_regressive:
self.input_vars.append('{0}_lag{1}'.format(self.target, i+1))
for var in self.independent_pred:
if i == 0:
self.input_vars.append(var)
else:
self.input_vars.append('{0}_lag{1}'.format(var, i))
self.input_vars.reverse()
self.tokensize = len(predictor_timeseries)
self.sequence_opt = dict(input_length=input_length_name,
target_length=target_length_name,
token_size=self.tokensize)
self.inputs_target = dict(inputs=self.input_vars,
target=self.target)
print('NOTE: timeseries subsequences are prepared with subsequence length = {}'.format(seq_len))
@property
def timeid_type(self):
tbl_colinfo = self.columninfo().ColumnInfo
timeid_type = self.identify_coltype(self.timeid, tbl_colinfo)
return timeid_type
@staticmethod
def identify_coltype(col, tbl_colinfo):
if col not in tbl_colinfo.Column.values:
raise ValueError('''variable {} does not exist in input table.
'''.format(col))
if 'Format' in tbl_colinfo.columns:
cas_timeid_fmt = tbl_colinfo.Format[tbl_colinfo.Column == col].values[0]
else:
cas_timeid_fmt = None
col_type = tbl_colinfo.Type[tbl_colinfo.Column == col].values[0]
if cas_timeid_fmt:
for pattern in swat.options.cas.dataset.date_formats:
if re.match(r'{}\Z'.format(pattern), cas_timeid_fmt):
col_type = 'date'
break
for pattern in swat.options.cas.dataset.datetime_formats:
if re.match(r'{}\Z'.format(pattern), cas_timeid_fmt):
if col_type == 'date':
raise DLPyError('''{} format in CASTable is ambiguous,
and can match both sas date and sas datetime format'''.format(col))
else:
col_type = 'datetime'
break
return col_type
def timeseries_partition(self, training_start=None, validation_start=None,
testing_start=None, end_time=None,
partition_var_name='split_id',
traintbl_suffix='train',
validtbl_suffix='valid',
testtbl_suffix='test'):
self.partition_var_name = partition_var_name
conn = self.get_connection()
training_start = self.convert_to_sas_time_format(training_start, self.timeid_type)
validation_start = self.convert_to_sas_time_format(validation_start, self.timeid_type)
testing_start = self.convert_to_sas_time_format(testing_start, self.timeid_type)
end_time = self.convert_to_sas_time_format(end_time, self.timeid_type)
if testing_start is None:
testing_start = end_time
test_statement = ';'
else:
test_statement = self.generate_splitting_code(
self.timeid, testing_start, end_time,
True, self.partition_var_name, 'test')
if validation_start is None:
validation_start = testing_start
valid_statement = ';'
else:
if testing_start == end_time:
valid_statement = self.generate_splitting_code(
self.timeid, validation_start, testing_start,
True, self.partition_var_name, 'valid')
else:
valid_statement = self.generate_splitting_code(
self.timeid, validation_start, testing_start,
False, self.partition_var_name, 'valid')
if validation_start == end_time:
train_statement = self.generate_splitting_code(
self.timeid, training_start, validation_start,
True, self.partition_var_name, 'train')
else:
train_statement = self.generate_splitting_code(
self.timeid, training_start, validation_start,
False, self.partition_var_name, 'train')
input_tbl_params = self.to_outtable_params()
input_tbl_name = input_tbl_params['name']
traintbl_name = '_'.join([input_tbl_name, traintbl_suffix])
validtbl_name = '_'.join([input_tbl_name, validtbl_suffix])
testtbl_name = '_'.join([input_tbl_name, testtbl_suffix])
splitting_code = '''
data {4} {5} {6};
set {0};
{1}
{2}
{3}
if {7} = 'train' then output {4};
if {7} = 'valid' then output {5};
if {7} = 'test' then output {6};
run;
'''.format(input_tbl_name, train_statement, valid_statement, test_statement,
traintbl_name, validtbl_name, testtbl_name, self.partition_var_name)
conn.retrieve('dataStep.runCode', _messagelevel='error', code=splitting_code)
train_out = dict(name=traintbl_name, timeid=self.timeid, groupby_var=self.groupby_var,
sequence_opt=self.sequence_opt, inputs_target=self.inputs_target)
valid_out = dict(name=validtbl_name, timeid=self.timeid, groupby_var=self.groupby_var,
sequence_opt=self.sequence_opt, inputs_target=self.inputs_target)
test_out = dict(name=testtbl_name, timeid=self.timeid, groupby_var=self.groupby_var,
sequence_opt=self.sequence_opt, inputs_target=self.inputs_target)
train_out_tbl = TimeseriesTable(**train_out)
train_out_tbl.set_connection(conn)
valid_out_tbl = TimeseriesTable(**valid_out)
valid_out_tbl.set_connection(conn)
test_out_tbl = TimeseriesTable(**test_out)
test_out_tbl.set_connection(conn)
print('NOTE: Training set has {} observations'.format(train_out_tbl.shape[0]))
print('NOTE: Validation set has {} observations'.format(valid_out_tbl.shape[0]))
print('NOTE: Testing set has {} observations'.format(test_out_tbl.shape[0]))
return train_out_tbl, valid_out_tbl, test_out_tbl
@staticmethod
def generate_splitting_code(timeid, start, end, right_inclusive,
partition_var_name, partition_val):
if (start is None) and (end is not None):
if right_inclusive:
statement = '''if {0} <= {1} then {2} = '{3}';'''.format(
timeid, end, partition_var_name, partition_val)
else:
statement = '''if {0} < {1} then {2} = '{3}';'''.format(
timeid, end, partition_var_name, partition_val)
elif (start is not None) and (end is None):
statement = '''if {0} >= {1} then {2} = '{3}';'''.format(
timeid, start, partition_var_name, partition_val)
elif (start is not None) and (end is not None):
if right_inclusive:
statement = '''if {0} >= {1} and {0} <= {2} then {3} = '{4}';'''.format(
timeid, start, end, partition_var_name, partition_val)
else:
statement = '''if {0} >= {1} and {0} < {2} then {3} = '{4}';'''.format(
timeid, start, end, partition_var_name, partition_val)
else:
statement = '''{0} = '{1}';'''.format(partition_var_name, partition_val)
return statement
@staticmethod
def convert_to_sas_time_format(python_time, sas_format_type):
if sas_format_type == 'date':
if isinstance(python_time, datetime.date):
sas_time_str = 'mdy({0},{1},{2})'.format(python_time.month,
python_time.day, python_time.year)
return sas_time_str
elif python_time is None:
return None
else:
raise ValueError('''The timeid type is date format, so the input
python time variable should be date or datetime format''')
elif sas_format_type == 'datetime':
if isinstance(python_time, datetime.datetime):
sas_time_str = 'dhms(mdy({0},{1},{2}), {3}, {4}, {5})'.format(
python_time.month, python_time.day, python_time.year,
python_time.hour, python_time.minute, python_time.second)
return sas_time_str
elif isinstance(python_time, datetime.date):
sas_time_str = 'dhms(mdy({0},{1},{2}), 0, 0, 0)'.format(
python_time.month, python_time.day, python_time.year)
return sas_time_str
elif python_time is None:
return None
else:
raise ValueError('''The timeid type is datetime format, so the input
python time variable should be date or datetime format''')
elif sas_format_type == 'double':
if isinstance(python_time, numbers.Real):
return python_time
elif python_time is None:
return None
else:
raise ValueError('''The timeid type is double, so the input
python time variable should be int or float''')
else:
raise DLPyError('''timeid format in CASTable is wrong, consider reload
the table and formatting it with timeseries_formatting''')
@staticmethod
def create_lags(varname, nlags, byvar):
if not isinstance(byvar, list):
byvar = [byvar]
byvar_strlist = ['first.{}'.format(var) for var in byvar]
sasCode = ''
for i in range(nlags):
if i == 0:
sasCode += '{0}_lag{1} = lag({0});'.format(varname, i+1)
else:
sasCode += '{0}_lag{1} = lag({0}_lag{2});'.format(varname, i+1, i)
if len(byvar) > 0:
sasCode += 'if ' + ' or '.join(byvar_strlist)
sasCode += ' then {0}_lag{1} = .;'.format(varname, i+1)
return sasCode
@staticmethod
def find_file_caslib(conn, path):
paths = conn.caslibinfo().CASLibInfo.Path.tolist()
caslibs = conn.caslibinfo().CASLibInfo.Name.tolist()
subdirs = conn.caslibinfo().CASLibInfo.Subdirs.tolist()
server_type = get_cas_host_type(conn).lower()
if server_type.startswith("lin") or server_type.startswith("osx"):
sep = '/'
else:
sep = '\\'
for i, directory in enumerate(paths):
if path.startswith(directory) and (subdirs[i]==1):
rest_path = path[len(directory):]
caslibname = caslibs[i]
return (caslibname, rest_path)
elif path.startswith(directory) and (subdirs[i]==0):
rest_path = path[len(directory):]
if sep in rest_path:
continue
else:
caslibname = caslibs[i]
return (caslibname, rest_path)
return (None, None)
| true | true |
f720ffac3d7e28046fdffc89dc587da7ce834892 | 9,152 | py | Python | tests/utils_tests/test_functional.py | Lord-Elrond/django | 178109c1734ccc16386c3e3cbae1465c7a1b8ed8 | [
"BSD-3-Clause",
"0BSD"
] | 61,676 | 2015-01-01T00:05:13.000Z | 2022-03-31T20:37:54.000Z | tests/utils_tests/test_functional.py | Lord-Elrond/django | 178109c1734ccc16386c3e3cbae1465c7a1b8ed8 | [
"BSD-3-Clause",
"0BSD"
] | 8,884 | 2015-01-01T00:12:05.000Z | 2022-03-31T19:53:11.000Z | tests/utils_tests/test_functional.py | Lord-Elrond/django | 178109c1734ccc16386c3e3cbae1465c7a1b8ed8 | [
"BSD-3-Clause",
"0BSD"
] | 33,143 | 2015-01-01T02:04:52.000Z | 2022-03-31T19:42:46.000Z | from unittest import mock
from django.test import SimpleTestCase
from django.test.utils import ignore_warnings
from django.utils.deprecation import RemovedInDjango50Warning
from django.utils.functional import cached_property, classproperty, lazy
class FunctionalTests(SimpleTestCase):
def test_lazy(self):
t = lazy(lambda: tuple(range(3)), list, tuple)
for a, b in zip(t(), range(3)):
self.assertEqual(a, b)
def test_lazy_base_class(self):
"""lazy also finds base class methods in the proxy object"""
class Base:
def base_method(self):
pass
class Klazz(Base):
pass
t = lazy(lambda: Klazz(), Klazz)()
self.assertIn('base_method', dir(t))
def test_lazy_base_class_override(self):
"""lazy finds the correct (overridden) method implementation"""
class Base:
def method(self):
return 'Base'
class Klazz(Base):
def method(self):
return 'Klazz'
t = lazy(lambda: Klazz(), Base)()
self.assertEqual(t.method(), 'Klazz')
def test_lazy_object_to_string(self):
class Klazz:
def __str__(self):
return "Î am ā Ǩlâzz."
def __bytes__(self):
return b"\xc3\x8e am \xc4\x81 binary \xc7\xa8l\xc3\xa2zz."
t = lazy(lambda: Klazz(), Klazz)()
self.assertEqual(str(t), "Î am ā Ǩlâzz.")
self.assertEqual(bytes(t), b"\xc3\x8e am \xc4\x81 binary \xc7\xa8l\xc3\xa2zz.")
def assertCachedPropertyWorks(self, attr, Class):
with self.subTest(attr=attr):
def get(source):
return getattr(source, attr)
obj = Class()
class SubClass(Class):
pass
subobj = SubClass()
# Docstring is preserved.
self.assertEqual(get(Class).__doc__, 'Here is the docstring...')
self.assertEqual(get(SubClass).__doc__, 'Here is the docstring...')
# It's cached.
self.assertEqual(get(obj), get(obj))
self.assertEqual(get(subobj), get(subobj))
# The correct value is returned.
self.assertEqual(get(obj)[0], 1)
self.assertEqual(get(subobj)[0], 1)
# State isn't shared between instances.
obj2 = Class()
subobj2 = SubClass()
self.assertNotEqual(get(obj), get(obj2))
self.assertNotEqual(get(subobj), get(subobj2))
# It behaves like a property when there's no instance.
self.assertIsInstance(get(Class), cached_property)
self.assertIsInstance(get(SubClass), cached_property)
# 'other_value' doesn't become a property.
self.assertTrue(callable(obj.other_value))
self.assertTrue(callable(subobj.other_value))
def test_cached_property(self):
"""cached_property caches its value and behaves like a property."""
class Class:
@cached_property
def value(self):
"""Here is the docstring..."""
return 1, object()
@cached_property
def __foo__(self):
"""Here is the docstring..."""
return 1, object()
def other_value(self):
"""Here is the docstring..."""
return 1, object()
other = cached_property(other_value)
attrs = ['value', 'other', '__foo__']
for attr in attrs:
self.assertCachedPropertyWorks(attr, Class)
@ignore_warnings(category=RemovedInDjango50Warning)
def test_cached_property_name(self):
class Class:
def other_value(self):
"""Here is the docstring..."""
return 1, object()
other = cached_property(other_value, name='other')
other2 = cached_property(other_value, name='different_name')
self.assertCachedPropertyWorks('other', Class)
# An explicit name is ignored.
obj = Class()
obj.other2
self.assertFalse(hasattr(obj, 'different_name'))
def test_cached_property_name_deprecation_warning(self):
def value(self):
return 1
msg = "The name argument is deprecated as it's unnecessary as of Python 3.6."
with self.assertWarnsMessage(RemovedInDjango50Warning, msg):
cached_property(value, name='other_name')
def test_cached_property_auto_name(self):
"""
cached_property caches its value and behaves like a property
on mangled methods or when the name kwarg isn't set.
"""
class Class:
@cached_property
def __value(self):
"""Here is the docstring..."""
return 1, object()
def other_value(self):
"""Here is the docstring..."""
return 1, object()
other = cached_property(other_value)
attrs = ['_Class__value', 'other']
for attr in attrs:
self.assertCachedPropertyWorks(attr, Class)
def test_cached_property_reuse_different_names(self):
"""Disallow this case because the decorated function wouldn't be cached."""
with self.assertRaises(RuntimeError) as ctx:
class ReusedCachedProperty:
@cached_property
def a(self):
pass
b = a
self.assertEqual(
str(ctx.exception.__context__),
str(TypeError(
"Cannot assign the same cached_property to two different "
"names ('a' and 'b')."
))
)
def test_cached_property_reuse_same_name(self):
"""
Reusing a cached_property on different classes under the same name is
allowed.
"""
counter = 0
@cached_property
def _cp(_self):
nonlocal counter
counter += 1
return counter
class A:
cp = _cp
class B:
cp = _cp
a = A()
b = B()
self.assertEqual(a.cp, 1)
self.assertEqual(b.cp, 2)
self.assertEqual(a.cp, 1)
def test_cached_property_set_name_not_called(self):
cp = cached_property(lambda s: None)
class Foo:
pass
Foo.cp = cp
msg = 'Cannot use cached_property instance without calling __set_name__() on it.'
with self.assertRaisesMessage(TypeError, msg):
Foo().cp
def test_lazy_add(self):
lazy_4 = lazy(lambda: 4, int)
lazy_5 = lazy(lambda: 5, int)
self.assertEqual(lazy_4() + lazy_5(), 9)
def test_lazy_equality(self):
"""
== and != work correctly for Promises.
"""
lazy_a = lazy(lambda: 4, int)
lazy_b = lazy(lambda: 4, int)
lazy_c = lazy(lambda: 5, int)
self.assertEqual(lazy_a(), lazy_b())
self.assertNotEqual(lazy_b(), lazy_c())
def test_lazy_repr_text(self):
original_object = 'Lazy translation text'
lazy_obj = lazy(lambda: original_object, str)
self.assertEqual(repr(original_object), repr(lazy_obj()))
def test_lazy_repr_int(self):
original_object = 15
lazy_obj = lazy(lambda: original_object, int)
self.assertEqual(repr(original_object), repr(lazy_obj()))
def test_lazy_repr_bytes(self):
original_object = b'J\xc3\xbcst a str\xc3\xadng'
lazy_obj = lazy(lambda: original_object, bytes)
self.assertEqual(repr(original_object), repr(lazy_obj()))
def test_lazy_class_preparation_caching(self):
# lazy() should prepare the proxy class only once i.e. the first time
# it's used.
lazified = lazy(lambda: 0, int)
__proxy__ = lazified().__class__
with mock.patch.object(__proxy__, '__prepare_class__') as mocked:
lazified()
mocked.assert_not_called()
def test_lazy_bytes_and_str_result_classes(self):
lazy_obj = lazy(lambda: 'test', str, bytes)
msg = 'Cannot call lazy() with both bytes and text return types.'
with self.assertRaisesMessage(ValueError, msg):
lazy_obj()
def test_classproperty_getter(self):
class Foo:
foo_attr = 123
def __init__(self):
self.foo_attr = 456
@classproperty
def foo(cls):
return cls.foo_attr
class Bar:
bar = classproperty()
@bar.getter
def bar(cls):
return 123
self.assertEqual(Foo.foo, 123)
self.assertEqual(Foo().foo, 123)
self.assertEqual(Bar.bar, 123)
self.assertEqual(Bar().bar, 123)
def test_classproperty_override_getter(self):
class Foo:
@classproperty
def foo(cls):
return 123
@foo.getter
def foo(cls):
return 456
self.assertEqual(Foo.foo, 456)
self.assertEqual(Foo().foo, 456)
| 31.777778 | 89 | 0.573864 | from unittest import mock
from django.test import SimpleTestCase
from django.test.utils import ignore_warnings
from django.utils.deprecation import RemovedInDjango50Warning
from django.utils.functional import cached_property, classproperty, lazy
class FunctionalTests(SimpleTestCase):
def test_lazy(self):
t = lazy(lambda: tuple(range(3)), list, tuple)
for a, b in zip(t(), range(3)):
self.assertEqual(a, b)
def test_lazy_base_class(self):
class Base:
def base_method(self):
pass
class Klazz(Base):
pass
t = lazy(lambda: Klazz(), Klazz)()
self.assertIn('base_method', dir(t))
def test_lazy_base_class_override(self):
class Base:
def method(self):
return 'Base'
class Klazz(Base):
def method(self):
return 'Klazz'
t = lazy(lambda: Klazz(), Base)()
self.assertEqual(t.method(), 'Klazz')
def test_lazy_object_to_string(self):
class Klazz:
def __str__(self):
return "Î am ā Ǩlâzz."
def __bytes__(self):
return b"\xc3\x8e am \xc4\x81 binary \xc7\xa8l\xc3\xa2zz."
t = lazy(lambda: Klazz(), Klazz)()
self.assertEqual(str(t), "Î am ā Ǩlâzz.")
self.assertEqual(bytes(t), b"\xc3\x8e am \xc4\x81 binary \xc7\xa8l\xc3\xa2zz.")
def assertCachedPropertyWorks(self, attr, Class):
with self.subTest(attr=attr):
def get(source):
return getattr(source, attr)
obj = Class()
class SubClass(Class):
pass
subobj = SubClass()
self.assertEqual(get(Class).__doc__, 'Here is the docstring...')
self.assertEqual(get(SubClass).__doc__, 'Here is the docstring...')
self.assertEqual(get(obj), get(obj))
self.assertEqual(get(subobj), get(subobj))
# The correct value is returned.
self.assertEqual(get(obj)[0], 1)
self.assertEqual(get(subobj)[0], 1)
# State isn't shared between instances.
obj2 = Class()
subobj2 = SubClass()
self.assertNotEqual(get(obj), get(obj2))
self.assertNotEqual(get(subobj), get(subobj2))
self.assertIsInstance(get(Class), cached_property)
self.assertIsInstance(get(SubClass), cached_property)
# 'other_value' doesn't become a property.
self.assertTrue(callable(obj.other_value))
self.assertTrue(callable(subobj.other_value))
def test_cached_property(self):
class Class:
@cached_property
def value(self):
return 1, object()
@cached_property
def __foo__(self):
return 1, object()
def other_value(self):
return 1, object()
other = cached_property(other_value)
attrs = ['value', 'other', '__foo__']
for attr in attrs:
self.assertCachedPropertyWorks(attr, Class)
@ignore_warnings(category=RemovedInDjango50Warning)
def test_cached_property_name(self):
class Class:
def other_value(self):
return 1, object()
other = cached_property(other_value, name='other')
other2 = cached_property(other_value, name='different_name')
self.assertCachedPropertyWorks('other', Class)
obj = Class()
obj.other2
self.assertFalse(hasattr(obj, 'different_name'))
def test_cached_property_name_deprecation_warning(self):
def value(self):
return 1
msg = "The name argument is deprecated as it's unnecessary as of Python 3.6."
with self.assertWarnsMessage(RemovedInDjango50Warning, msg):
cached_property(value, name='other_name')
def test_cached_property_auto_name(self):
class Class:
@cached_property
def __value(self):
return 1, object()
def other_value(self):
return 1, object()
other = cached_property(other_value)
attrs = ['_Class__value', 'other']
for attr in attrs:
self.assertCachedPropertyWorks(attr, Class)
def test_cached_property_reuse_different_names(self):
with self.assertRaises(RuntimeError) as ctx:
class ReusedCachedProperty:
@cached_property
def a(self):
pass
b = a
self.assertEqual(
str(ctx.exception.__context__),
str(TypeError(
"Cannot assign the same cached_property to two different "
"names ('a' and 'b')."
))
)
def test_cached_property_reuse_same_name(self):
counter = 0
@cached_property
def _cp(_self):
nonlocal counter
counter += 1
return counter
class A:
cp = _cp
class B:
cp = _cp
a = A()
b = B()
self.assertEqual(a.cp, 1)
self.assertEqual(b.cp, 2)
self.assertEqual(a.cp, 1)
def test_cached_property_set_name_not_called(self):
cp = cached_property(lambda s: None)
class Foo:
pass
Foo.cp = cp
msg = 'Cannot use cached_property instance without calling __set_name__() on it.'
with self.assertRaisesMessage(TypeError, msg):
Foo().cp
def test_lazy_add(self):
lazy_4 = lazy(lambda: 4, int)
lazy_5 = lazy(lambda: 5, int)
self.assertEqual(lazy_4() + lazy_5(), 9)
def test_lazy_equality(self):
lazy_a = lazy(lambda: 4, int)
lazy_b = lazy(lambda: 4, int)
lazy_c = lazy(lambda: 5, int)
self.assertEqual(lazy_a(), lazy_b())
self.assertNotEqual(lazy_b(), lazy_c())
def test_lazy_repr_text(self):
original_object = 'Lazy translation text'
lazy_obj = lazy(lambda: original_object, str)
self.assertEqual(repr(original_object), repr(lazy_obj()))
def test_lazy_repr_int(self):
original_object = 15
lazy_obj = lazy(lambda: original_object, int)
self.assertEqual(repr(original_object), repr(lazy_obj()))
def test_lazy_repr_bytes(self):
original_object = b'J\xc3\xbcst a str\xc3\xadng'
lazy_obj = lazy(lambda: original_object, bytes)
self.assertEqual(repr(original_object), repr(lazy_obj()))
def test_lazy_class_preparation_caching(self):
# lazy() should prepare the proxy class only once i.e. the first time
# it's used.
lazified = lazy(lambda: 0, int)
__proxy__ = lazified().__class__
with mock.patch.object(__proxy__, '__prepare_class__') as mocked:
lazified()
mocked.assert_not_called()
def test_lazy_bytes_and_str_result_classes(self):
lazy_obj = lazy(lambda: 'test', str, bytes)
msg = 'Cannot call lazy() with both bytes and text return types.'
with self.assertRaisesMessage(ValueError, msg):
lazy_obj()
def test_classproperty_getter(self):
class Foo:
foo_attr = 123
def __init__(self):
self.foo_attr = 456
@classproperty
def foo(cls):
return cls.foo_attr
class Bar:
bar = classproperty()
@bar.getter
def bar(cls):
return 123
self.assertEqual(Foo.foo, 123)
self.assertEqual(Foo().foo, 123)
self.assertEqual(Bar.bar, 123)
self.assertEqual(Bar().bar, 123)
def test_classproperty_override_getter(self):
class Foo:
@classproperty
def foo(cls):
return 123
@foo.getter
def foo(cls):
return 456
self.assertEqual(Foo.foo, 456)
self.assertEqual(Foo().foo, 456)
| true | true |
f7210110e7084f60ae5367f63c7dbd932a3b569e | 4,446 | py | Python | examples/batch_mode/14-burning_ship-deeper_DEM.py | GBillotey/Fractalshades | e100b12db031f016bf1a8a1f4fad9ca1c64a0302 | [
"MIT"
] | null | null | null | examples/batch_mode/14-burning_ship-deeper_DEM.py | GBillotey/Fractalshades | e100b12db031f016bf1a8a1f4fad9ca1c64a0302 | [
"MIT"
] | 1 | 2021-11-01T14:55:57.000Z | 2021-11-01T14:55:57.000Z | examples/batch_mode/14-burning_ship-deeper_DEM.py | GBillotey/Fractalshades | e100b12db031f016bf1a8a1f4fad9ca1c64a0302 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
============================
14 - Burning ship deeper DEM
============================
Plotting of a distance estimation for the Burning ship (power-2).
This zoom is deeper, featuring a miniship at 1.e-101
Reference:
`fractalshades.models.Perturbation_burning_ship`
"""
import os
import numpy as np
import fractalshades as fs
import fractalshades.models as fsm
import fractalshades.colors as fscolors
from fractalshades.postproc import (
Postproc_batch,
Continuous_iter_pp,
DEM_normal_pp,
DEM_pp,
Raw_pp,
)
from fractalshades.colors.layers import (
Color_layer,
Bool_layer,
Normal_map_layer,
Virtual_layer,
Blinn_lighting,
)
def plot(plot_dir):
fs.settings.enable_multithreading = True
fs.settings.inspect_calc = True
# A simple showcase using perturbation technique
x = '0.533551593577038561769721161491702555962775680136595415306315189524970818968817900068355227861158570104764433694'
y = '1.26175074578870311547721223871955368990255513054155186351034363459852900933566891849764050954410207620093433856'
dx = '7.072814368784043e-101'
precision = 150
nx = 2400
xy_ratio = 1.8
sign = 1.0
DEM_min = 5.e-5
zmin = 0.0
zmax = 1.0
# As this formula is non-analytic, we will 'unskew' based on the
# influencing miniship "size estimate" matrix.
has_skew = True
skew_00 = 1.3141410612942215
skew_01 = 0.8651590600810832
skew_10 = 0.6372176654581702
skew_11 = 1.1804627997751416
calc_name="Burning_ship"
colormap = fscolors.cmap_register["dawn"]
# Run the calculation
f = fsm.Perturbation_burning_ship(plot_dir)
# f.clean_up()
f.zoom(
precision=precision,
x=x,
y=y,
dx=dx,
nx=nx,
xy_ratio=xy_ratio,
theta_deg=-2.,
projection="cartesian",
antialiasing=False,
has_skew=has_skew,
skew_00=skew_00,
skew_01=skew_01,
skew_10=skew_10,
skew_11=skew_11
)
f.calc_std_div(
calc_name=calc_name,
subset=None,
max_iter=50000,
M_divergence=1.e3,
BLA_params={"eps": 1.e-6},
)
f.run()
print("has been run")
# Plot the image
pp = Postproc_batch(f, calc_name)
pp.add_postproc("continuous_iter", Continuous_iter_pp())
pp.add_postproc("distance_estimation", DEM_pp())
pp.add_postproc("interior", Raw_pp("stop_reason", func="x != 1."))
pp.add_postproc("DEM_map", DEM_normal_pp(kind="potential"))
plotter = fs.Fractal_plotter(pp)
plotter.add_layer(Bool_layer("interior", output=False))
plotter.add_layer(Normal_map_layer("DEM_map", max_slope=50, output=False))
plotter.add_layer(
Virtual_layer("continuous_iter", func=None, output=False)
)
cmap_func = lambda x: sign * np.where(
np.isinf(x),
np.log(DEM_min),
np.log(np.clip(x, DEM_min, None))
)
plotter.add_layer(Color_layer(
"distance_estimation",
func=cmap_func,
colormap=colormap,
probes_z=[zmin, zmax],
probes_kind="relative",
output=True
))
plotter["distance_estimation"].set_mask(plotter["interior"],
mask_color=(0.0, 0.22745098173618317, 0.9803921580314636))
plotter["DEM_map"].set_mask(plotter["interior"], mask_color=(0., 0., 0.))
# define the lighting and apply the shading
light = Blinn_lighting(0.4, np.array([1., 1., 1.]))
light.add_light_source(
k_diffuse=0.4,
k_specular=3.,
shininess=100.,
angles=(45., 40.),
coords=None,
color=np.array([1.0, 1.0, 0.98]))
# light.add_light_source(
# k_diffuse=0.8,
# k_specular=1.,
# shininess=40.,
# angles=(90., 20.),
# coords=None,
# color=np.array([1., 1., 1.]))
plotter["distance_estimation"].shade(plotter["DEM_map"], light)
plotter.plot()
if __name__ == "__main__":
# Some magic to get the directory for plotting: with a name that matches
# the file or a temporary dir if we are building the documentation
try:
realpath = os.path.realpath(__file__)
plot_dir = os.path.splitext(realpath)[0]
plot(plot_dir)
except NameError:
import tempfile
with tempfile.TemporaryDirectory() as plot_dir:
fs.utils.exec_no_output(plot, plot_dir)
| 27.7875 | 123 | 0.639226 |
import os
import numpy as np
import fractalshades as fs
import fractalshades.models as fsm
import fractalshades.colors as fscolors
from fractalshades.postproc import (
Postproc_batch,
Continuous_iter_pp,
DEM_normal_pp,
DEM_pp,
Raw_pp,
)
from fractalshades.colors.layers import (
Color_layer,
Bool_layer,
Normal_map_layer,
Virtual_layer,
Blinn_lighting,
)
def plot(plot_dir):
fs.settings.enable_multithreading = True
fs.settings.inspect_calc = True
x = '0.533551593577038561769721161491702555962775680136595415306315189524970818968817900068355227861158570104764433694'
y = '1.26175074578870311547721223871955368990255513054155186351034363459852900933566891849764050954410207620093433856'
dx = '7.072814368784043e-101'
precision = 150
nx = 2400
xy_ratio = 1.8
sign = 1.0
DEM_min = 5.e-5
zmin = 0.0
zmax = 1.0
has_skew = True
skew_00 = 1.3141410612942215
skew_01 = 0.8651590600810832
skew_10 = 0.6372176654581702
skew_11 = 1.1804627997751416
calc_name="Burning_ship"
colormap = fscolors.cmap_register["dawn"]
f = fsm.Perturbation_burning_ship(plot_dir)
f.zoom(
precision=precision,
x=x,
y=y,
dx=dx,
nx=nx,
xy_ratio=xy_ratio,
theta_deg=-2.,
projection="cartesian",
antialiasing=False,
has_skew=has_skew,
skew_00=skew_00,
skew_01=skew_01,
skew_10=skew_10,
skew_11=skew_11
)
f.calc_std_div(
calc_name=calc_name,
subset=None,
max_iter=50000,
M_divergence=1.e3,
BLA_params={"eps": 1.e-6},
)
f.run()
print("has been run")
pp = Postproc_batch(f, calc_name)
pp.add_postproc("continuous_iter", Continuous_iter_pp())
pp.add_postproc("distance_estimation", DEM_pp())
pp.add_postproc("interior", Raw_pp("stop_reason", func="x != 1."))
pp.add_postproc("DEM_map", DEM_normal_pp(kind="potential"))
plotter = fs.Fractal_plotter(pp)
plotter.add_layer(Bool_layer("interior", output=False))
plotter.add_layer(Normal_map_layer("DEM_map", max_slope=50, output=False))
plotter.add_layer(
Virtual_layer("continuous_iter", func=None, output=False)
)
cmap_func = lambda x: sign * np.where(
np.isinf(x),
np.log(DEM_min),
np.log(np.clip(x, DEM_min, None))
)
plotter.add_layer(Color_layer(
"distance_estimation",
func=cmap_func,
colormap=colormap,
probes_z=[zmin, zmax],
probes_kind="relative",
output=True
))
plotter["distance_estimation"].set_mask(plotter["interior"],
mask_color=(0.0, 0.22745098173618317, 0.9803921580314636))
plotter["DEM_map"].set_mask(plotter["interior"], mask_color=(0., 0., 0.))
light = Blinn_lighting(0.4, np.array([1., 1., 1.]))
light.add_light_source(
k_diffuse=0.4,
k_specular=3.,
shininess=100.,
angles=(45., 40.),
coords=None,
color=np.array([1.0, 1.0, 0.98]))
plotter["distance_estimation"].shade(plotter["DEM_map"], light)
plotter.plot()
if __name__ == "__main__":
try:
realpath = os.path.realpath(__file__)
plot_dir = os.path.splitext(realpath)[0]
plot(plot_dir)
except NameError:
import tempfile
with tempfile.TemporaryDirectory() as plot_dir:
fs.utils.exec_no_output(plot, plot_dir)
| true | true |
f721011b4e470373ce2d983fc11e2f51ebcc9318 | 2,154 | py | Python | mean_var_std.py | jmacdonald2010/mean-variance-standard-deviation-calculator | badae42c099081610fd55ea5a788867c352da6c0 | [
"MIT"
] | null | null | null | mean_var_std.py | jmacdonald2010/mean-variance-standard-deviation-calculator | badae42c099081610fd55ea5a788867c352da6c0 | [
"MIT"
] | null | null | null | mean_var_std.py | jmacdonald2010/mean-variance-standard-deviation-calculator | badae42c099081610fd55ea5a788867c352da6c0 | [
"MIT"
] | null | null | null | import numpy as np
def calculate(list):
if len(list) != 9:
raise ValueError('List must contain nine numbers.')
input_array = np.array([[list[0], list[1], list[2]], [list[3], list[4], list[5]], [list[6], list[7], list[8]]])
calculations = dict()
print(input_array)
# calc mean
c_mean = np.mean(input_array, axis=0) # axis 0 is column
r_mean = np.mean(input_array, axis=1)
f_mean = np.mean(input_array)
calculations['mean'] = [c_mean.tolist(), r_mean.tolist(), f_mean]
# variance
c_var = np.var(input_array, axis=0)
r_var = np.var(input_array, axis=1)
f_var = np.var(input_array)
calculations['variance'] = [c_var.tolist(), r_var.tolist(), f_var]
# standard dev
c_std = np.std(input_array, axis=0)
r_std = np.std(input_array, axis=1)
f_std = np.std(input_array)
calculations['standard deviation'] = [c_std.tolist(), r_std.tolist(), f_std]
# max
c_max = np.amax(input_array, axis=0)
r_max = np.amax(input_array, axis=1)
f_max = np.amax(input_array)
calculations['max'] = [c_max.tolist(), r_max.tolist(), f_max]
# min
c_min = np.amin(input_array, axis=0)
r_min = np.amin(input_array, axis=1)
f_min = np.amin(input_array)
calculations['min'] = [c_min.tolist(), r_min.tolist(), f_min]
# sum
c_sum = np.sum(input_array, axis=0)
r_sum = np.sum(input_array, axis=1)
f_sum = np.sum(input_array)
calculations['sum'] = [c_sum.tolist(), r_sum.tolist(), f_sum]
return calculations
# this code below is for testing the function, and what the dict should look like when outputting data
# test calculations
print(calculate([0,1,2,3,4,5,6,7,8]))
# should return:
'''
{
'mean': [[3.0, 4.0, 5.0], [1.0, 4.0, 7.0], 4.0],
'variance': [[6.0, 6.0, 6.0], [0.6666666666666666, 0.6666666666666666, 0.6666666666666666], 6.666666666666667],
'standard deviation': [[2.449489742783178, 2.449489742783178, 2.449489742783178], [0.816496580927726, 0.816496580927726, 0.816496580927726], 2.581988897471611],
'max': [[6, 7, 8], [2, 5, 8], 8],
'min': [[0, 1, 2], [0, 3, 6], 0],
'sum': [[9, 12, 15], [3, 12, 21], 36]
}''' | 35.9 | 162 | 0.633705 | import numpy as np
def calculate(list):
if len(list) != 9:
raise ValueError('List must contain nine numbers.')
input_array = np.array([[list[0], list[1], list[2]], [list[3], list[4], list[5]], [list[6], list[7], list[8]]])
calculations = dict()
print(input_array)
c_mean = np.mean(input_array, axis=0)
r_mean = np.mean(input_array, axis=1)
f_mean = np.mean(input_array)
calculations['mean'] = [c_mean.tolist(), r_mean.tolist(), f_mean]
c_var = np.var(input_array, axis=0)
r_var = np.var(input_array, axis=1)
f_var = np.var(input_array)
calculations['variance'] = [c_var.tolist(), r_var.tolist(), f_var]
c_std = np.std(input_array, axis=0)
r_std = np.std(input_array, axis=1)
f_std = np.std(input_array)
calculations['standard deviation'] = [c_std.tolist(), r_std.tolist(), f_std]
c_max = np.amax(input_array, axis=0)
r_max = np.amax(input_array, axis=1)
f_max = np.amax(input_array)
calculations['max'] = [c_max.tolist(), r_max.tolist(), f_max]
c_min = np.amin(input_array, axis=0)
r_min = np.amin(input_array, axis=1)
f_min = np.amin(input_array)
calculations['min'] = [c_min.tolist(), r_min.tolist(), f_min]
c_sum = np.sum(input_array, axis=0)
r_sum = np.sum(input_array, axis=1)
f_sum = np.sum(input_array)
calculations['sum'] = [c_sum.tolist(), r_sum.tolist(), f_sum]
return calculations
print(calculate([0,1,2,3,4,5,6,7,8]))
| true | true |
f7210156036c5232eb883f6a274abc49ea56fb3e | 154 | py | Python | src/wsgi.py | mononobi/charma-server | ed90f5ec0b5ff3996232d5fe49a4f77f96d82ced | [
"BSD-3-Clause"
] | 1 | 2020-01-16T23:36:10.000Z | 2020-01-16T23:36:10.000Z | src/wsgi.py | mononobi/imovie-server | ed90f5ec0b5ff3996232d5fe49a4f77f96d82ced | [
"BSD-3-Clause"
] | 24 | 2020-06-08T18:27:04.000Z | 2021-06-06T12:01:39.000Z | src/wsgi.py | mononobi/charma-server | ed90f5ec0b5ff3996232d5fe49a4f77f96d82ced | [
"BSD-3-Clause"
] | 1 | 2020-12-20T05:29:04.000Z | 2020-12-20T05:29:04.000Z | # -*- coding: utf-8 -*-
"""
wsgi module.
"""
from charma import CharmaApplication
app = CharmaApplication()
if __name__ == '__main__':
app.run()
| 11 | 36 | 0.62987 |
from charma import CharmaApplication
app = CharmaApplication()
if __name__ == '__main__':
app.run()
| true | true |
f721018bc2069beaa9e6763bc79cdfced921521d | 667 | py | Python | examples/pipelayer_microservice/src/service/api/__init__.py | greater-than/PipeLayer | 569f43b65992f8a32079835585b864d5fe0bb251 | [
"BSD-2-Clause"
] | 61 | 2021-02-03T02:54:18.000Z | 2021-12-26T11:38:51.000Z | examples/pipelayer_microservice/src/service/api/__init__.py | greater-than/PipeLayer | 569f43b65992f8a32079835585b864d5fe0bb251 | [
"BSD-2-Clause"
] | 1 | 2021-02-16T13:58:33.000Z | 2021-02-18T12:56:32.000Z | examples/pipelayer_microservice/src/service/api/__init__.py | greater-than/PipeLayer | 569f43b65992f8a32079835585b864d5fe0bb251 | [
"BSD-2-Clause"
] | null | null | null | from logging import Logger
from typing import cast
from service.exception import ResponseException
def handle_exception(e: Exception, log: Logger = Logger("Error Logger")) -> dict:
log.error("Error")
if isinstance(e, [ResponseException]):
e: ResponseException = cast(ResponseException, e)
log.error("{str(e)}", exc_info=e, http_status_code=e.http_status_code)
return {
"statusCode": e.http_status_code,
"message": str(e)
}
else:
log.error("Unhandled Exception", exc_info=e)
return {
"statusCode": 500,
"message": "An unhandled exception occured"
}
| 30.318182 | 81 | 0.626687 | from logging import Logger
from typing import cast
from service.exception import ResponseException
def handle_exception(e: Exception, log: Logger = Logger("Error Logger")) -> dict:
log.error("Error")
if isinstance(e, [ResponseException]):
e: ResponseException = cast(ResponseException, e)
log.error("{str(e)}", exc_info=e, http_status_code=e.http_status_code)
return {
"statusCode": e.http_status_code,
"message": str(e)
}
else:
log.error("Unhandled Exception", exc_info=e)
return {
"statusCode": 500,
"message": "An unhandled exception occured"
}
| true | true |
f7210264f1cece9dc5803d333f7cdf0b48ec3e1d | 68,178 | py | Python | pymc3/tests/test_distributions.py | semohr/pymc3 | 198d13e2ed6f32b33fd8f4b591a47dc8dd8fe2df | [
"Apache-2.0"
] | null | null | null | pymc3/tests/test_distributions.py | semohr/pymc3 | 198d13e2ed6f32b33fd8f4b591a47dc8dd8fe2df | [
"Apache-2.0"
] | null | null | null | pymc3/tests/test_distributions.py | semohr/pymc3 | 198d13e2ed6f32b33fd8f4b591a47dc8dd8fe2df | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 The PyMC Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import itertools
import sys
from .helpers import SeededTest, select_by_precision
from ..vartypes import continuous_types
from ..model import Model, Point, Deterministic
from ..blocking import DictToVarBijection
from ..distributions import (
DensityDist,
Categorical,
Multinomial,
VonMises,
Dirichlet,
MvStudentT,
MvNormal,
MatrixNormal,
ZeroInflatedPoisson,
ZeroInflatedNegativeBinomial,
Constant,
Poisson,
Bernoulli,
Beta,
BetaBinomial,
HalfStudentT,
StudentT,
Weibull,
Pareto,
InverseGamma,
Gamma,
Cauchy,
HalfCauchy,
Lognormal,
Laplace,
NegativeBinomial,
Geometric,
Exponential,
ExGaussian,
Normal,
TruncatedNormal,
Flat,
LKJCorr,
Wald,
ChiSquared,
HalfNormal,
DiscreteUniform,
Bound,
Uniform,
Triangular,
Binomial,
SkewNormal,
DiscreteWeibull,
Gumbel,
Logistic,
OrderedLogistic,
LogitNormal,
Interpolated,
ZeroInflatedBinomial,
HalfFlat,
AR1,
KroneckerNormal,
Rice,
Kumaraswamy,
Moyal,
HyperGeometric,
)
from ..distributions import continuous
from pymc3.theanof import floatX
import pymc3 as pm
from numpy import array, inf, log, exp
from numpy.testing import assert_almost_equal, assert_allclose, assert_equal
import numpy.random as nr
import numpy as np
import pytest
from scipy import integrate
import scipy.stats.distributions as sp
import scipy.stats
from scipy.special import logit
import theano
import theano.tensor as tt
from ..math import kronecker
def get_lkj_cases():
"""
Log probabilities calculated using the formulas in:
http://www.sciencedirect.com/science/article/pii/S0047259X09000876
"""
tri = np.array([0.7, 0.0, -0.7])
return [
(tri, 1, 3, 1.5963125911388549),
(tri, 3, 3, -7.7963493376312742),
(tri, 0, 3, -np.inf),
(np.array([1.1, 0.0, -0.7]), 1, 3, -np.inf),
(np.array([0.7, 0.0, -1.1]), 1, 3, -np.inf),
]
LKJ_CASES = get_lkj_cases()
class Domain:
def __init__(self, vals, dtype=None, edges=None, shape=None):
avals = array(vals, dtype=dtype)
if dtype is None and not str(avals.dtype).startswith("int"):
avals = avals.astype(theano.config.floatX)
vals = [array(v, dtype=avals.dtype) for v in vals]
if edges is None:
edges = array(vals[0]), array(vals[-1])
vals = vals[1:-1]
if shape is None:
shape = avals[0].shape
self.vals = vals
self.shape = shape
self.lower, self.upper = edges
self.dtype = avals.dtype
def __add__(self, other):
return Domain(
[v + other for v in self.vals],
self.dtype,
(self.lower + other, self.upper + other),
self.shape,
)
def __mul__(self, other):
try:
return Domain(
[v * other for v in self.vals],
self.dtype,
(self.lower * other, self.upper * other),
self.shape,
)
except TypeError:
return Domain(
[v * other for v in self.vals],
self.dtype,
(self.lower, self.upper),
self.shape,
)
def __neg__(self):
return Domain([-v for v in self.vals], self.dtype, (-self.lower, -self.upper), self.shape)
def product(domains, n_samples=-1):
"""Get an iterator over a product of domains.
Args:
domains: a dictionary of (name, object) pairs, where the objects
must be "domain-like", as in, have a `.vals` property
n_samples: int, maximum samples to return. -1 to return whole product
Returns:
list of the cartesian product of the domains
"""
try:
names, domains = zip(*domains.items())
except ValueError: # domains.items() is empty
return [{}]
all_vals = [zip(names, val) for val in itertools.product(*[d.vals for d in domains])]
if n_samples > 0 and len(all_vals) > n_samples:
return (all_vals[j] for j in nr.choice(len(all_vals), n_samples, replace=False))
return all_vals
R = Domain([-inf, -2.1, -1, -0.01, 0.0, 0.01, 1, 2.1, inf])
Rplus = Domain([0, 0.01, 0.1, 0.9, 0.99, 1, 1.5, 2, 100, inf])
Rplusbig = Domain([0, 0.5, 0.9, 0.99, 1, 1.5, 2, 20, inf])
Rminusbig = Domain([-inf, -2, -1.5, -1, -0.99, -0.9, -0.5, -0.01, 0])
Unit = Domain([0, 0.001, 0.1, 0.5, 0.75, 0.99, 1])
Circ = Domain([-np.pi, -2.1, -1, -0.01, 0.0, 0.01, 1, 2.1, np.pi])
Runif = Domain([-1, -0.4, 0, 0.4, 1])
Rdunif = Domain([-10, 0, 10.0])
Rplusunif = Domain([0, 0.5, inf])
Rplusdunif = Domain([2, 10, 100], "int64")
I = Domain([-1000, -3, -2, -1, 0, 1, 2, 3, 1000], "int64")
NatSmall = Domain([0, 3, 4, 5, 1000], "int64")
Nat = Domain([0, 1, 2, 3, 2000], "int64")
NatBig = Domain([0, 1, 2, 3, 5000, 50000], "int64")
PosNat = Domain([1, 2, 3, 2000], "int64")
Bool = Domain([0, 0, 1, 1], "int64")
def build_model(distfam, valuedomain, vardomains, extra_args=None):
if extra_args is None:
extra_args = {}
with Model() as m:
vals = {}
for v, dom in vardomains.items():
vals[v] = Flat(v, dtype=dom.dtype, shape=dom.shape, testval=dom.vals[0])
vals.update(extra_args)
distfam("value", shape=valuedomain.shape, transform=None, **vals)
return m
def integrate_nd(f, domain, shape, dtype):
if shape == () or shape == (1,):
if dtype in continuous_types:
return integrate.quad(f, domain.lower, domain.upper, epsabs=1e-8)[0]
else:
return sum(f(j) for j in range(domain.lower, domain.upper + 1))
elif shape == (2,):
def f2(a, b):
return f([a, b])
return integrate.dblquad(
f2,
domain.lower[0],
domain.upper[0],
lambda _: domain.lower[1],
lambda _: domain.upper[1],
)[0]
elif shape == (3,):
def f3(a, b, c):
return f([a, b, c])
return integrate.tplquad(
f3,
domain.lower[0],
domain.upper[0],
lambda _: domain.lower[1],
lambda _: domain.upper[1],
lambda _, __: domain.lower[2],
lambda _, __: domain.upper[2],
)[0]
else:
raise ValueError("Dont know how to integrate shape: " + str(shape))
def multinomial_logpdf(value, n, p):
if value.sum() == n and (0 <= value).all() and (value <= n).all():
logpdf = scipy.special.gammaln(n + 1)
logpdf -= scipy.special.gammaln(value + 1).sum()
logpdf += logpow(p, value).sum()
return logpdf
else:
return -inf
def beta_mu_sigma(value, mu, sigma):
kappa = mu * (1 - mu) / sigma ** 2 - 1
if kappa > 0:
return sp.beta.logpdf(value, mu * kappa, (1 - mu) * kappa)
else:
return -inf
class ProductDomain:
def __init__(self, domains):
self.vals = list(itertools.product(*[d.vals for d in domains]))
self.shape = (len(domains),) + domains[0].shape
self.lower = [d.lower for d in domains]
self.upper = [d.upper for d in domains]
self.dtype = domains[0].dtype
def Vector(D, n):
return ProductDomain([D] * n)
def SortedVector(n):
vals = []
np.random.seed(42)
for _ in range(10):
vals.append(np.sort(np.random.randn(n)))
return Domain(vals, edges=(None, None))
def UnitSortedVector(n):
vals = []
np.random.seed(42)
for _ in range(10):
vals.append(np.sort(np.random.rand(n)))
return Domain(vals, edges=(None, None))
def RealMatrix(n, m):
vals = []
np.random.seed(42)
for _ in range(10):
vals.append(np.random.randn(n, m))
return Domain(vals, edges=(None, None))
def simplex_values(n):
if n == 1:
yield array([1.0])
else:
for v in Unit.vals:
for vals in simplex_values(n - 1):
yield np.concatenate([[v], (1 - v) * vals])
def normal_logpdf_tau(value, mu, tau):
return normal_logpdf_cov(value, mu, np.linalg.inv(tau)).sum()
def normal_logpdf_cov(value, mu, cov):
return scipy.stats.multivariate_normal.logpdf(value, mu, cov).sum()
def normal_logpdf_chol(value, mu, chol):
return normal_logpdf_cov(value, mu, np.dot(chol, chol.T)).sum()
def normal_logpdf_chol_upper(value, mu, chol):
return normal_logpdf_cov(value, mu, np.dot(chol.T, chol)).sum()
def matrix_normal_logpdf_cov(value, mu, rowcov, colcov):
return scipy.stats.matrix_normal.logpdf(value, mu, rowcov, colcov)
def matrix_normal_logpdf_chol(value, mu, rowchol, colchol):
return matrix_normal_logpdf_cov(
value, mu, np.dot(rowchol, rowchol.T), np.dot(colchol, colchol.T)
)
def kron_normal_logpdf_cov(value, mu, covs, sigma):
cov = kronecker(*covs).eval()
if sigma is not None:
cov += sigma ** 2 * np.eye(*cov.shape)
return scipy.stats.multivariate_normal.logpdf(value, mu, cov).sum()
def kron_normal_logpdf_chol(value, mu, chols, sigma):
covs = [np.dot(chol, chol.T) for chol in chols]
return kron_normal_logpdf_cov(value, mu, covs, sigma=sigma)
def kron_normal_logpdf_evd(value, mu, evds, sigma):
covs = []
for eigs, Q in evds:
try:
eigs = eigs.eval()
except AttributeError:
pass
try:
Q = Q.eval()
except AttributeError:
pass
covs.append(np.dot(Q, np.dot(np.diag(eigs), Q.T)))
return kron_normal_logpdf_cov(value, mu, covs, sigma)
def betafn(a):
return floatX(scipy.special.gammaln(a).sum(-1) - scipy.special.gammaln(a.sum(-1)))
def logpow(v, p):
return np.choose(v == 0, [p * np.log(v), 0])
def discrete_weibull_logpmf(value, q, beta):
return floatX(
np.log(
np.power(floatX(q), np.power(floatX(value), floatX(beta)))
- np.power(floatX(q), np.power(floatX(value + 1), floatX(beta)))
)
)
def dirichlet_logpdf(value, a):
return floatX((-betafn(a) + logpow(value, a - 1).sum(-1)).sum())
def categorical_logpdf(value, p):
if value >= 0 and value <= len(p):
return floatX(np.log(np.moveaxis(p, -1, 0)[value]))
else:
return -inf
def mvt_logpdf(value, nu, Sigma, mu=0):
d = len(Sigma)
dist = np.atleast_2d(value) - mu
chol = np.linalg.cholesky(Sigma)
trafo = np.linalg.solve(chol, dist.T).T
logdet = np.log(np.diag(chol)).sum()
lgamma = scipy.special.gammaln
norm = lgamma((nu + d) / 2.0) - 0.5 * d * np.log(nu * np.pi) - lgamma(nu / 2.0)
logp = norm - logdet - (nu + d) / 2.0 * np.log1p((trafo * trafo).sum(-1) / nu)
return logp.sum()
def AR1_logpdf(value, k, tau_e):
tau = tau_e * (1 - k ** 2)
return (
sp.norm(loc=0, scale=1 / np.sqrt(tau)).logpdf(value[0])
+ sp.norm(loc=k * value[:-1], scale=1 / np.sqrt(tau_e)).logpdf(value[1:]).sum()
)
def invlogit(x, eps=sys.float_info.epsilon):
return (1.0 - 2.0 * eps) / (1.0 + np.exp(-x)) + eps
def orderedlogistic_logpdf(value, eta, cutpoints):
c = np.concatenate(([-np.inf], cutpoints, [np.inf]))
ps = np.array([invlogit(eta - cc) - invlogit(eta - cc1) for cc, cc1 in zip(c[:-1], c[1:])])
p = ps[value]
return np.where(np.all(ps >= 0), np.log(p), -np.inf)
class Simplex:
def __init__(self, n):
self.vals = list(simplex_values(n))
self.shape = (n,)
self.dtype = Unit.dtype
class MultiSimplex:
def __init__(self, n_dependent, n_independent):
self.vals = []
for simplex_value in itertools.product(simplex_values(n_dependent), repeat=n_independent):
self.vals.append(np.vstack(simplex_value))
self.shape = (n_independent, n_dependent)
self.dtype = Unit.dtype
def PdMatrix(n):
if n == 1:
return PdMatrix1
elif n == 2:
return PdMatrix2
elif n == 3:
return PdMatrix3
else:
raise ValueError("n out of bounds")
PdMatrix1 = Domain([np.eye(1), [[0.5]]], edges=(None, None))
PdMatrix2 = Domain([np.eye(2), [[0.5, 0.05], [0.05, 4.5]]], edges=(None, None))
PdMatrix3 = Domain([np.eye(3), [[0.5, 0.1, 0], [0.1, 1, 0], [0, 0, 2.5]]], edges=(None, None))
PdMatrixChol1 = Domain([np.eye(1), [[0.001]]], edges=(None, None))
PdMatrixChol2 = Domain([np.eye(2), [[0.1, 0], [10, 1]]], edges=(None, None))
PdMatrixChol3 = Domain([np.eye(3), [[0.1, 0, 0], [10, 100, 0], [0, 1, 10]]], edges=(None, None))
def PdMatrixChol(n):
if n == 1:
return PdMatrixChol1
elif n == 2:
return PdMatrixChol2
elif n == 3:
return PdMatrixChol3
else:
raise ValueError("n out of bounds")
PdMatrixCholUpper1 = Domain([np.eye(1), [[0.001]]], edges=(None, None))
PdMatrixCholUpper2 = Domain([np.eye(2), [[0.1, 10], [0, 1]]], edges=(None, None))
PdMatrixCholUpper3 = Domain(
[np.eye(3), [[0.1, 10, 0], [0, 100, 1], [0, 0, 10]]], edges=(None, None)
)
def PdMatrixCholUpper(n):
if n == 1:
return PdMatrixCholUpper1
elif n == 2:
return PdMatrixCholUpper2
elif n == 3:
return PdMatrixCholUpper3
else:
raise ValueError("n out of bounds")
def RandomPdMatrix(n):
A = np.random.rand(n, n)
return np.dot(A, A.T) + n * np.identity(n)
class TestMatchesScipy(SeededTest):
def pymc3_matches_scipy(
self,
pymc3_dist,
domain,
paramdomains,
scipy_dist,
decimal=None,
extra_args=None,
scipy_args=None,
):
if extra_args is None:
extra_args = {}
if scipy_args is None:
scipy_args = {}
model = build_model(pymc3_dist, domain, paramdomains, extra_args)
value = model.named_vars["value"]
def logp(args):
args.update(scipy_args)
return scipy_dist(**args)
self.check_logp(model, value, domain, paramdomains, logp, decimal=decimal)
def check_logp(self, model, value, domain, paramdomains, logp_reference, decimal=None):
domains = paramdomains.copy()
domains["value"] = domain
logp = model.fastlogp
for pt in product(domains, n_samples=100):
pt = Point(pt, model=model)
if decimal is None:
decimal = select_by_precision(float64=6, float32=3)
assert_almost_equal(logp(pt), logp_reference(pt), decimal=decimal, err_msg=str(pt))
def check_logcdf(
self,
pymc3_dist,
domain,
paramdomains,
scipy_logcdf,
decimal=None,
n_samples=100,
):
domains = paramdomains.copy()
domains["value"] = domain
if decimal is None:
decimal = select_by_precision(float64=6, float32=3)
for pt in product(domains, n_samples=n_samples):
params = dict(pt)
scipy_cdf = scipy_logcdf(**params)
value = params.pop("value")
dist = pymc3_dist.dist(**params)
assert_almost_equal(
dist.logcdf(value).tag.test_value,
scipy_cdf,
decimal=decimal,
err_msg=str(pt),
)
def check_int_to_1(self, model, value, domain, paramdomains):
pdf = model.fastfn(exp(model.logpt))
for pt in product(paramdomains, n_samples=10):
pt = Point(pt, value=value.tag.test_value, model=model)
bij = DictToVarBijection(value, (), pt)
pdfx = bij.mapf(pdf)
area = integrate_nd(pdfx, domain, value.dshape, value.dtype)
assert_almost_equal(area, 1, err_msg=str(pt))
def checkd(self, distfam, valuedomain, vardomains, checks=None, extra_args=None):
if checks is None:
checks = (self.check_int_to_1,)
if extra_args is None:
extra_args = {}
m = build_model(distfam, valuedomain, vardomains, extra_args=extra_args)
for check in checks:
check(m, m.named_vars["value"], valuedomain, vardomains)
def test_uniform(self):
self.pymc3_matches_scipy(
Uniform,
Runif,
{"lower": -Rplusunif, "upper": Rplusunif},
lambda value, lower, upper: sp.uniform.logpdf(value, lower, upper - lower),
)
self.check_logcdf(
Uniform,
Runif,
{"lower": -Rplusunif, "upper": Rplusunif},
lambda value, lower, upper: sp.uniform.logcdf(value, lower, upper - lower),
)
def test_triangular(self):
self.pymc3_matches_scipy(
Triangular,
Runif,
{"lower": -Rplusunif, "c": Runif, "upper": Rplusunif},
lambda value, c, lower, upper: sp.triang.logpdf(value, c - lower, lower, upper - lower),
)
self.check_logcdf(
Triangular,
Runif,
{"lower": -Rplusunif, "c": Runif, "upper": Rplusunif},
lambda value, c, lower, upper: sp.triang.logcdf(value, c - lower, lower, upper - lower),
)
def test_bound_normal(self):
PositiveNormal = Bound(Normal, lower=0.0)
self.pymc3_matches_scipy(
PositiveNormal,
Rplus,
{"mu": Rplus, "sigma": Rplus},
lambda value, mu, sigma: sp.norm.logpdf(value, mu, sigma),
decimal=select_by_precision(float64=6, float32=-1),
)
with Model():
x = PositiveNormal("x", mu=0, sigma=1, transform=None)
assert np.isinf(x.logp({"x": -1}))
def test_discrete_unif(self):
self.pymc3_matches_scipy(
DiscreteUniform,
Rdunif,
{"lower": -Rplusdunif, "upper": Rplusdunif},
lambda value, lower, upper: sp.randint.logpmf(value, lower, upper + 1),
)
def test_flat(self):
self.pymc3_matches_scipy(Flat, Runif, {}, lambda value: 0)
with Model():
x = Flat("a")
assert_allclose(x.tag.test_value, 0)
self.check_logcdf(Flat, Runif, {}, lambda value: np.log(0.5))
# Check infinite cases individually.
assert 0.0 == Flat.dist().logcdf(np.inf).tag.test_value
assert -np.inf == Flat.dist().logcdf(-np.inf).tag.test_value
def test_half_flat(self):
self.pymc3_matches_scipy(HalfFlat, Rplus, {}, lambda value: 0)
with Model():
x = HalfFlat("a", shape=2)
assert_allclose(x.tag.test_value, 1)
assert x.tag.test_value.shape == (2,)
self.check_logcdf(HalfFlat, Runif, {}, lambda value: -np.inf)
# Check infinite cases individually.
assert 0.0 == HalfFlat.dist().logcdf(np.inf).tag.test_value
assert -np.inf == HalfFlat.dist().logcdf(-np.inf).tag.test_value
def test_normal(self):
self.pymc3_matches_scipy(
Normal,
R,
{"mu": R, "sigma": Rplus},
lambda value, mu, sigma: sp.norm.logpdf(value, mu, sigma),
decimal=select_by_precision(float64=6, float32=1),
)
self.check_logcdf(
Normal,
R,
{"mu": R, "sigma": Rplus},
lambda value, mu, sigma: sp.norm.logcdf(value, mu, sigma),
)
def test_truncated_normal(self):
def scipy_logp(value, mu, sigma, lower, upper):
return sp.truncnorm.logpdf(
value, (lower - mu) / sigma, (upper - mu) / sigma, loc=mu, scale=sigma
)
self.pymc3_matches_scipy(
TruncatedNormal,
R,
{"mu": R, "sigma": Rplusbig, "lower": -Rplusbig, "upper": Rplusbig},
scipy_logp,
decimal=select_by_precision(float64=6, float32=1),
)
def test_half_normal(self):
self.pymc3_matches_scipy(
HalfNormal,
Rplus,
{"sigma": Rplus},
lambda value, sigma: sp.halfnorm.logpdf(value, scale=sigma),
decimal=select_by_precision(float64=6, float32=-1),
)
self.check_logcdf(
HalfNormal,
Rplus,
{"sigma": Rplus},
lambda value, sigma: sp.halfnorm.logcdf(value, scale=sigma),
)
def test_chi_squared(self):
self.pymc3_matches_scipy(
ChiSquared,
Rplus,
{"nu": Rplusdunif},
lambda value, nu: sp.chi2.logpdf(value, df=nu),
)
@pytest.mark.xfail(reason="Poor CDF in SciPy. See scipy/scipy#869 for details.")
def test_wald_scipy(self):
self.pymc3_matches_scipy(
Wald,
Rplus,
{"mu": Rplus, "alpha": Rplus},
lambda value, mu, alpha: sp.invgauss.logpdf(value, mu=mu, loc=alpha),
decimal=select_by_precision(float64=6, float32=1),
)
self.check_logcdf(
Wald,
Rplus,
{"mu": Rplus, "alpha": Rplus},
lambda value, mu, alpha: sp.invgauss.logcdf(value, mu=mu, loc=alpha),
)
@pytest.mark.parametrize(
"value,mu,lam,phi,alpha,logp",
[
(0.5, 0.001, 0.5, None, 0.0, -124500.7257914),
(1.0, 0.5, 0.001, None, 0.0, -4.3733162),
(2.0, 1.0, None, None, 0.0, -2.2086593),
(5.0, 2.0, 2.5, None, 0.0, -3.4374500),
(7.5, 5.0, None, 1.0, 0.0, -3.2199074),
(15.0, 10.0, None, 0.75, 0.0, -4.0360623),
(50.0, 15.0, None, 0.66666, 0.0, -6.1801249),
(0.5, 0.001, 0.5, None, 0.0, -124500.7257914),
(1.0, 0.5, 0.001, None, 0.5, -3.3330954),
(2.0, 1.0, None, None, 1.0, -0.9189385),
(5.0, 2.0, 2.5, None, 2.0, -2.2128783),
(7.5, 5.0, None, 1.0, 2.5, -2.5283764),
(15.0, 10.0, None, 0.75, 5.0, -3.3653647),
(50.0, 15.0, None, 0.666666, 10.0, -5.6481874),
],
)
def test_wald(self, value, mu, lam, phi, alpha, logp):
# Log probabilities calculated using the dIG function from the R package gamlss.
# See e.g., doi: 10.1111/j.1467-9876.2005.00510.x, or
# http://www.gamlss.org/.
with Model() as model:
Wald("wald", mu=mu, lam=lam, phi=phi, alpha=alpha, transform=None)
pt = {"wald": value}
decimals = select_by_precision(float64=6, float32=1)
assert_almost_equal(model.fastlogp(pt), logp, decimal=decimals, err_msg=str(pt))
def test_beta(self):
self.pymc3_matches_scipy(
Beta,
Unit,
{"alpha": Rplus, "beta": Rplus},
lambda value, alpha, beta: sp.beta.logpdf(value, alpha, beta),
)
self.pymc3_matches_scipy(Beta, Unit, {"mu": Unit, "sigma": Rplus}, beta_mu_sigma)
self.check_logcdf(
Beta,
Unit,
{"alpha": Rplus, "beta": Rplus},
lambda value, alpha, beta: sp.beta.logcdf(value, alpha, beta),
)
def test_kumaraswamy(self):
# Scipy does not have a built-in Kumaraswamy pdf
def scipy_log_pdf(value, a, b):
return (
np.log(a) + np.log(b) + (a - 1) * np.log(value) + (b - 1) * np.log(1 - value ** a)
)
self.pymc3_matches_scipy(Kumaraswamy, Unit, {"a": Rplus, "b": Rplus}, scipy_log_pdf)
def test_exponential(self):
self.pymc3_matches_scipy(
Exponential,
Rplus,
{"lam": Rplus},
lambda value, lam: sp.expon.logpdf(value, 0, 1 / lam),
)
self.check_logcdf(
Exponential,
Rplus,
{"lam": Rplus},
lambda value, lam: sp.expon.logcdf(value, 0, 1 / lam),
)
def test_geometric(self):
self.pymc3_matches_scipy(
Geometric, Nat, {"p": Unit}, lambda value, p: np.log(sp.geom.pmf(value, p))
)
def test_hypergeometric(self):
self.pymc3_matches_scipy(
HyperGeometric,
Nat,
{"N": NatSmall, "k": NatSmall, "n": NatSmall},
lambda value, N, k, n: sp.hypergeom.logpmf(value, N, k, n),
)
def test_negative_binomial(self):
def test_fun(value, mu, alpha):
return sp.nbinom.logpmf(value, alpha, 1 - mu / (mu + alpha))
self.pymc3_matches_scipy(NegativeBinomial, Nat, {"mu": Rplus, "alpha": Rplus}, test_fun)
self.pymc3_matches_scipy(
NegativeBinomial,
Nat,
{"p": Unit, "n": Rplus},
lambda value, p, n: sp.nbinom.logpmf(value, n, p),
)
@pytest.mark.parametrize(
"mu, p, alpha, n, expected",
[
(5, None, None, None, "Must specify either alpha or n."),
(None, 0.5, None, None, "Must specify either alpha or n."),
(None, None, None, None, "Must specify either alpha or n."),
(5, None, 2, 2, "Can't specify both alpha and n."),
(None, 0.5, 2, 2, "Can't specify both alpha and n."),
(None, None, 2, 2, "Can't specify both alpha and n."),
(None, None, 2, None, "Must specify either mu or p."),
(None, None, None, 2, "Must specify either mu or p."),
(5, 0.5, 2, None, "Can't specify both mu and p."),
(5, 0.5, None, 2, "Can't specify both mu and p."),
],
)
def test_negative_binomial_init_fail(self, mu, p, alpha, n, expected):
with Model():
with pytest.raises(ValueError, match=f"Incompatible parametrization. {expected}"):
NegativeBinomial("x", mu=mu, p=p, alpha=alpha, n=n)
def test_laplace(self):
self.pymc3_matches_scipy(
Laplace,
R,
{"mu": R, "b": Rplus},
lambda value, mu, b: sp.laplace.logpdf(value, mu, b),
)
self.check_logcdf(
Laplace,
R,
{"mu": R, "b": Rplus},
lambda value, mu, b: sp.laplace.logcdf(value, mu, b),
)
def test_lognormal(self):
self.pymc3_matches_scipy(
Lognormal,
Rplus,
{"mu": R, "tau": Rplusbig},
lambda value, mu, tau: floatX(sp.lognorm.logpdf(value, tau ** -0.5, 0, np.exp(mu))),
)
self.check_logcdf(
Lognormal,
Rplus,
{"mu": R, "tau": Rplusbig},
lambda value, mu, tau: sp.lognorm.logcdf(value, tau ** -0.5, 0, np.exp(mu)),
)
def test_t(self):
self.pymc3_matches_scipy(
StudentT,
R,
{"nu": Rplus, "mu": R, "lam": Rplus},
lambda value, nu, mu, lam: sp.t.logpdf(value, nu, mu, lam ** -0.5),
)
self.check_logcdf(
StudentT,
R,
{"nu": Rplus, "mu": R, "lam": Rplus},
lambda value, nu, mu, lam: sp.t.logcdf(value, nu, mu, lam ** -0.5),
n_samples=10,
)
def test_cauchy(self):
self.pymc3_matches_scipy(
Cauchy,
R,
{"alpha": R, "beta": Rplusbig},
lambda value, alpha, beta: sp.cauchy.logpdf(value, alpha, beta),
)
self.check_logcdf(
Cauchy,
R,
{"alpha": R, "beta": Rplusbig},
lambda value, alpha, beta: sp.cauchy.logcdf(value, alpha, beta),
)
def test_half_cauchy(self):
self.pymc3_matches_scipy(
HalfCauchy,
Rplus,
{"beta": Rplusbig},
lambda value, beta: sp.halfcauchy.logpdf(value, scale=beta),
)
self.check_logcdf(
HalfCauchy,
Rplus,
{"beta": Rplusbig},
lambda value, beta: sp.halfcauchy.logcdf(value, scale=beta),
)
def test_gamma(self):
self.pymc3_matches_scipy(
Gamma,
Rplus,
{"alpha": Rplusbig, "beta": Rplusbig},
lambda value, alpha, beta: sp.gamma.logpdf(value, alpha, scale=1.0 / beta),
)
def test_fun(value, mu, sigma):
return sp.gamma.logpdf(value, mu ** 2 / sigma ** 2, scale=1.0 / (mu / sigma ** 2))
self.pymc3_matches_scipy(Gamma, Rplus, {"mu": Rplusbig, "sigma": Rplusbig}, test_fun)
self.check_logcdf(
Gamma,
Rplus,
{"alpha": Rplusbig, "beta": Rplusbig},
lambda value, alpha, beta: sp.gamma.logcdf(value, alpha, scale=1.0 / beta),
)
@pytest.mark.xfail(
condition=(theano.config.floatX == "float32"),
reason="Fails on float32 due to numerical issues",
)
def test_inverse_gamma(self):
self.pymc3_matches_scipy(
InverseGamma,
Rplus,
{"alpha": Rplus, "beta": Rplus},
lambda value, alpha, beta: sp.invgamma.logpdf(value, alpha, scale=beta),
)
self.check_logcdf(
InverseGamma,
Rplus,
{"alpha": Rplus, "beta": Rplus},
lambda value, alpha, beta: sp.invgamma.logcdf(value, alpha, scale=beta),
)
@pytest.mark.xfail(
condition=(theano.config.floatX == "float32"),
reason="Fails on float32 due to scaling issues",
)
def test_inverse_gamma_alt_params(self):
def test_fun(value, mu, sigma):
alpha, beta = InverseGamma._get_alpha_beta(None, None, mu, sigma)
return sp.invgamma.logpdf(value, alpha, scale=beta)
self.pymc3_matches_scipy(InverseGamma, Rplus, {"mu": Rplus, "sigma": Rplus}, test_fun)
def test_pareto(self):
self.pymc3_matches_scipy(
Pareto,
Rplus,
{"alpha": Rplusbig, "m": Rplusbig},
lambda value, alpha, m: sp.pareto.logpdf(value, alpha, scale=m),
)
self.check_logcdf(
Pareto,
Rplus,
{"alpha": Rplusbig, "m": Rplusbig},
lambda value, alpha, m: sp.pareto.logcdf(value, alpha, scale=m),
)
@pytest.mark.xfail(
condition=(theano.config.floatX == "float32"),
reason="Fails on float32 due to inf issues",
)
def test_weibull(self):
self.pymc3_matches_scipy(
Weibull,
Rplus,
{"alpha": Rplusbig, "beta": Rplusbig},
lambda value, alpha, beta: sp.exponweib.logpdf(value, 1, alpha, scale=beta),
)
self.check_logcdf(
Weibull,
Rplus,
{"alpha": Rplusbig, "beta": Rplusbig},
lambda value, alpha, beta: sp.exponweib.logcdf(value, 1, alpha, scale=beta),
)
def test_half_studentt(self):
# this is only testing for nu=1 (halfcauchy)
self.pymc3_matches_scipy(
HalfStudentT,
Rplus,
{"sigma": Rplus},
lambda value, sigma: sp.halfcauchy.logpdf(value, 0, sigma),
)
def test_skew_normal(self):
self.pymc3_matches_scipy(
SkewNormal,
R,
{"mu": R, "sigma": Rplusbig, "alpha": R},
lambda value, alpha, mu, sigma: sp.skewnorm.logpdf(value, alpha, mu, sigma),
)
def test_binomial(self):
self.pymc3_matches_scipy(
Binomial,
Nat,
{"n": NatSmall, "p": Unit},
lambda value, n, p: sp.binom.logpmf(value, n, p),
)
# Too lazy to propagate decimal parameter through the whole chain of deps
@pytest.mark.xfail(condition=(theano.config.floatX == "float32"), reason="Fails on float32")
def test_beta_binomial(self):
self.checkd(BetaBinomial, Nat, {"alpha": Rplus, "beta": Rplus, "n": NatSmall})
def test_bernoulli(self):
self.pymc3_matches_scipy(
Bernoulli,
Bool,
{"logit_p": R},
lambda value, logit_p: sp.bernoulli.logpmf(value, scipy.special.expit(logit_p)),
)
self.pymc3_matches_scipy(
Bernoulli, Bool, {"p": Unit}, lambda value, p: sp.bernoulli.logpmf(value, p)
)
def test_discrete_weibull(self):
self.pymc3_matches_scipy(
DiscreteWeibull,
Nat,
{"q": Unit, "beta": Rplusdunif},
discrete_weibull_logpmf,
)
def test_poisson(self):
self.pymc3_matches_scipy(
Poisson, Nat, {"mu": Rplus}, lambda value, mu: sp.poisson.logpmf(value, mu)
)
def test_bound_poisson(self):
NonZeroPoisson = Bound(Poisson, lower=1.0)
self.pymc3_matches_scipy(
NonZeroPoisson,
PosNat,
{"mu": Rplus},
lambda value, mu: sp.poisson.logpmf(value, mu),
)
with Model():
x = NonZeroPoisson("x", mu=4)
assert np.isinf(x.logp({"x": 0}))
def test_constantdist(self):
self.pymc3_matches_scipy(Constant, I, {"c": I}, lambda value, c: np.log(c == value))
# Too lazy to propagate decimal parameter through the whole chain of deps
@pytest.mark.xfail(condition=(theano.config.floatX == "float32"), reason="Fails on float32")
def test_zeroinflatedpoisson(self):
self.checkd(ZeroInflatedPoisson, Nat, {"theta": Rplus, "psi": Unit})
# Too lazy to propagate decimal parameter through the whole chain of deps
@pytest.mark.xfail(condition=(theano.config.floatX == "float32"), reason="Fails on float32")
def test_zeroinflatednegativebinomial(self):
self.checkd(
ZeroInflatedNegativeBinomial,
Nat,
{"mu": Rplusbig, "alpha": Rplusbig, "psi": Unit},
)
# Too lazy to propagate decimal parameter through the whole chain of deps
@pytest.mark.xfail(condition=(theano.config.floatX == "float32"), reason="Fails on float32")
def test_zeroinflatedbinomial(self):
self.checkd(ZeroInflatedBinomial, Nat, {"n": NatSmall, "p": Unit, "psi": Unit})
@pytest.mark.parametrize("n", [1, 2, 3])
def test_mvnormal(self, n):
self.pymc3_matches_scipy(
MvNormal,
RealMatrix(5, n),
{"mu": Vector(R, n), "tau": PdMatrix(n)},
normal_logpdf_tau,
)
self.pymc3_matches_scipy(
MvNormal,
Vector(R, n),
{"mu": Vector(R, n), "tau": PdMatrix(n)},
normal_logpdf_tau,
)
self.pymc3_matches_scipy(
MvNormal,
RealMatrix(5, n),
{"mu": Vector(R, n), "cov": PdMatrix(n)},
normal_logpdf_cov,
)
self.pymc3_matches_scipy(
MvNormal,
Vector(R, n),
{"mu": Vector(R, n), "cov": PdMatrix(n)},
normal_logpdf_cov,
)
self.pymc3_matches_scipy(
MvNormal,
RealMatrix(5, n),
{"mu": Vector(R, n), "chol": PdMatrixChol(n)},
normal_logpdf_chol,
decimal=select_by_precision(float64=6, float32=-1),
)
self.pymc3_matches_scipy(
MvNormal,
Vector(R, n),
{"mu": Vector(R, n), "chol": PdMatrixChol(n)},
normal_logpdf_chol,
decimal=select_by_precision(float64=6, float32=0),
)
def MvNormalUpper(*args, **kwargs):
return MvNormal(lower=False, *args, **kwargs)
self.pymc3_matches_scipy(
MvNormalUpper,
Vector(R, n),
{"mu": Vector(R, n), "chol": PdMatrixCholUpper(n)},
normal_logpdf_chol_upper,
decimal=select_by_precision(float64=6, float32=0),
)
@pytest.mark.xfail(
condition=(theano.config.floatX == "float32"),
reason="Fails on float32 due to inf issues",
)
def test_mvnormal_indef(self):
cov_val = np.array([[1, 0.5], [0.5, -2]])
cov = tt.matrix("cov")
cov.tag.test_value = np.eye(2)
mu = floatX(np.zeros(2))
x = tt.vector("x")
x.tag.test_value = np.zeros(2)
logp = MvNormal.dist(mu=mu, cov=cov).logp(x)
f_logp = theano.function([cov, x], logp)
assert f_logp(cov_val, np.ones(2)) == -np.inf
dlogp = tt.grad(logp, cov)
f_dlogp = theano.function([cov, x], dlogp)
assert not np.all(np.isfinite(f_dlogp(cov_val, np.ones(2))))
logp = MvNormal.dist(mu=mu, tau=cov).logp(x)
f_logp = theano.function([cov, x], logp)
assert f_logp(cov_val, np.ones(2)) == -np.inf
dlogp = tt.grad(logp, cov)
f_dlogp = theano.function([cov, x], dlogp)
assert not np.all(np.isfinite(f_dlogp(cov_val, np.ones(2))))
def test_mvnormal_init_fail(self):
with Model():
with pytest.raises(ValueError):
x = MvNormal("x", mu=np.zeros(3), shape=3)
with pytest.raises(ValueError):
x = MvNormal("x", mu=np.zeros(3), cov=np.eye(3), tau=np.eye(3), shape=3)
@pytest.mark.parametrize("n", [1, 2, 3])
def test_matrixnormal(self, n):
mat_scale = 1e3 # To reduce logp magnitude
mean_scale = 0.1
self.pymc3_matches_scipy(
MatrixNormal,
RealMatrix(n, n),
{
"mu": RealMatrix(n, n) * mean_scale,
"rowcov": PdMatrix(n) * mat_scale,
"colcov": PdMatrix(n) * mat_scale,
},
matrix_normal_logpdf_cov,
)
self.pymc3_matches_scipy(
MatrixNormal,
RealMatrix(2, n),
{
"mu": RealMatrix(2, n) * mean_scale,
"rowcov": PdMatrix(2) * mat_scale,
"colcov": PdMatrix(n) * mat_scale,
},
matrix_normal_logpdf_cov,
)
self.pymc3_matches_scipy(
MatrixNormal,
RealMatrix(3, n),
{
"mu": RealMatrix(3, n) * mean_scale,
"rowchol": PdMatrixChol(3) * mat_scale,
"colchol": PdMatrixChol(n) * mat_scale,
},
matrix_normal_logpdf_chol,
decimal=select_by_precision(float64=6, float32=-1),
)
self.pymc3_matches_scipy(
MatrixNormal,
RealMatrix(n, 3),
{
"mu": RealMatrix(n, 3) * mean_scale,
"rowchol": PdMatrixChol(n) * mat_scale,
"colchol": PdMatrixChol(3) * mat_scale,
},
matrix_normal_logpdf_chol,
decimal=select_by_precision(float64=6, float32=0),
)
@pytest.mark.parametrize("n", [2, 3])
@pytest.mark.parametrize("m", [3])
@pytest.mark.parametrize("sigma", [None, 1.0])
def test_kroneckernormal(self, n, m, sigma):
np.random.seed(5)
N = n * m
covs = [RandomPdMatrix(n), RandomPdMatrix(m)]
chols = list(map(np.linalg.cholesky, covs))
evds = list(map(np.linalg.eigh, covs))
dom = Domain([np.random.randn(N) * 0.1], edges=(None, None), shape=N)
mu = Domain([np.random.randn(N) * 0.1], edges=(None, None), shape=N)
std_args = {"mu": mu}
cov_args = {"covs": covs}
chol_args = {"chols": chols}
evd_args = {"evds": evds}
if sigma is not None and sigma != 0:
std_args["sigma"] = Domain([sigma], edges=(None, None))
else:
for args in [cov_args, chol_args, evd_args]:
args["sigma"] = sigma
self.pymc3_matches_scipy(
KroneckerNormal,
dom,
std_args,
kron_normal_logpdf_cov,
extra_args=cov_args,
scipy_args=cov_args,
)
self.pymc3_matches_scipy(
KroneckerNormal,
dom,
std_args,
kron_normal_logpdf_chol,
extra_args=chol_args,
scipy_args=chol_args,
)
self.pymc3_matches_scipy(
KroneckerNormal,
dom,
std_args,
kron_normal_logpdf_evd,
extra_args=evd_args,
scipy_args=evd_args,
)
dom = Domain([np.random.randn(2, N) * 0.1], edges=(None, None), shape=(2, N))
self.pymc3_matches_scipy(
KroneckerNormal,
dom,
std_args,
kron_normal_logpdf_cov,
extra_args=cov_args,
scipy_args=cov_args,
)
self.pymc3_matches_scipy(
KroneckerNormal,
dom,
std_args,
kron_normal_logpdf_chol,
extra_args=chol_args,
scipy_args=chol_args,
)
self.pymc3_matches_scipy(
KroneckerNormal,
dom,
std_args,
kron_normal_logpdf_evd,
extra_args=evd_args,
scipy_args=evd_args,
)
@pytest.mark.parametrize("n", [1, 2])
def test_mvt(self, n):
self.pymc3_matches_scipy(
MvStudentT,
Vector(R, n),
{"nu": Rplus, "Sigma": PdMatrix(n), "mu": Vector(R, n)},
mvt_logpdf,
)
self.pymc3_matches_scipy(
MvStudentT,
RealMatrix(2, n),
{"nu": Rplus, "Sigma": PdMatrix(n), "mu": Vector(R, n)},
mvt_logpdf,
)
@pytest.mark.parametrize("n", [2, 3, 4])
def test_AR1(self, n):
self.pymc3_matches_scipy(AR1, Vector(R, n), {"k": Unit, "tau_e": Rplus}, AR1_logpdf)
@pytest.mark.parametrize("n", [2, 3])
def test_wishart(self, n):
# This check compares the autodiff gradient to the numdiff gradient.
# However, due to the strict constraints of the wishart,
# it is impossible to numerically determine the gradient as a small
# pertubation breaks the symmetry. Thus disabling. Also, numdifftools was
# removed in June 2019, so an alternative would be needed.
#
# self.checkd(Wishart, PdMatrix(n), {'n': Domain([2, 3, 4, 2000]), 'V': PdMatrix(n)},
# checks=[self.check_dlogp])
pass
@pytest.mark.parametrize("x,eta,n,lp", LKJ_CASES)
def test_lkj(self, x, eta, n, lp):
with Model() as model:
LKJCorr("lkj", eta=eta, n=n, transform=None)
pt = {"lkj": x}
decimals = select_by_precision(float64=6, float32=4)
assert_almost_equal(model.fastlogp(pt), lp, decimal=decimals, err_msg=str(pt))
@pytest.mark.parametrize("n", [2, 3])
def test_dirichlet(self, n):
self.pymc3_matches_scipy(Dirichlet, Simplex(n), {"a": Vector(Rplus, n)}, dirichlet_logpdf)
def test_dirichlet_shape(self):
a = tt.as_tensor_variable(np.r_[1, 2])
with pytest.warns(DeprecationWarning):
dir_rv = Dirichlet.dist(a)
assert dir_rv.shape == (2,)
with pytest.warns(DeprecationWarning), theano.change_flags(compute_test_value="ignore"):
dir_rv = Dirichlet.dist(tt.vector())
def test_dirichlet_2D(self):
self.pymc3_matches_scipy(
Dirichlet,
MultiSimplex(2, 2),
{"a": Vector(Vector(Rplus, 2), 2)},
dirichlet_logpdf,
)
@pytest.mark.parametrize("n", [2, 3])
def test_multinomial(self, n):
self.pymc3_matches_scipy(
Multinomial, Vector(Nat, n), {"p": Simplex(n), "n": Nat}, multinomial_logpdf
)
@pytest.mark.parametrize(
"p,n",
[
[[0.25, 0.25, 0.25, 0.25], 1],
[[0.3, 0.6, 0.05, 0.05], 2],
[[0.3, 0.6, 0.05, 0.05], 10],
],
)
def test_multinomial_mode(self, p, n):
_p = np.array(p)
with Model() as model:
m = Multinomial("m", n, _p, _p.shape)
assert_allclose(m.distribution.mode.eval().sum(), n)
_p = np.array([p, p])
with Model() as model:
m = Multinomial("m", n, _p, _p.shape)
assert_allclose(m.distribution.mode.eval().sum(axis=-1), n)
@pytest.mark.parametrize(
"p, shape, n",
[
[[0.25, 0.25, 0.25, 0.25], 4, 2],
[[0.25, 0.25, 0.25, 0.25], (1, 4), 3],
# 3: expect to fail
# [[.25, .25, .25, .25], (10, 4)],
[[0.25, 0.25, 0.25, 0.25], (10, 1, 4), 5],
# 5: expect to fail
# [[[.25, .25, .25, .25]], (2, 4), [7, 11]],
[[[0.25, 0.25, 0.25, 0.25], [0.25, 0.25, 0.25, 0.25]], (2, 4), 13],
[[[0.25, 0.25, 0.25, 0.25], [0.25, 0.25, 0.25, 0.25]], (1, 2, 4), [23, 29]],
[
[[0.25, 0.25, 0.25, 0.25], [0.25, 0.25, 0.25, 0.25]],
(10, 2, 4),
[31, 37],
],
[[[0.25, 0.25, 0.25, 0.25], [0.25, 0.25, 0.25, 0.25]], (2, 4), [17, 19]],
],
)
def test_multinomial_random(self, p, shape, n):
p = np.asarray(p)
with Model() as model:
m = Multinomial("m", n=n, p=p, shape=shape)
m.random()
def test_multinomial_mode_with_shape(self):
n = [1, 10]
p = np.asarray([[0.25, 0.25, 0.25, 0.25], [0.26, 0.26, 0.26, 0.22]])
with Model() as model:
m = Multinomial("m", n=n, p=p, shape=(2, 4))
assert_allclose(m.distribution.mode.eval().sum(axis=-1), n)
def test_multinomial_vec(self):
vals = np.array([[2, 4, 4], [3, 3, 4]])
p = np.array([0.2, 0.3, 0.5])
n = 10
with Model() as model_single:
Multinomial("m", n=n, p=p, shape=len(p))
with Model() as model_many:
Multinomial("m", n=n, p=p, shape=vals.shape)
assert_almost_equal(
scipy.stats.multinomial.logpmf(vals, n, p),
np.asarray([model_single.fastlogp({"m": val}) for val in vals]),
decimal=4,
)
assert_almost_equal(
scipy.stats.multinomial.logpmf(vals, n, p),
model_many.free_RVs[0].logp_elemwise({"m": vals}).squeeze(),
decimal=4,
)
assert_almost_equal(
sum([model_single.fastlogp({"m": val}) for val in vals]),
model_many.fastlogp({"m": vals}),
decimal=4,
)
def test_multinomial_vec_1d_n(self):
vals = np.array([[2, 4, 4], [4, 3, 4]])
p = np.array([0.2, 0.3, 0.5])
ns = np.array([10, 11])
with Model() as model:
Multinomial("m", n=ns, p=p, shape=vals.shape)
assert_almost_equal(
sum([multinomial_logpdf(val, n, p) for val, n in zip(vals, ns)]),
model.fastlogp({"m": vals}),
decimal=4,
)
def test_multinomial_vec_1d_n_2d_p(self):
vals = np.array([[2, 4, 4], [4, 3, 4]])
ps = np.array([[0.2, 0.3, 0.5], [0.9, 0.09, 0.01]])
ns = np.array([10, 11])
with Model() as model:
Multinomial("m", n=ns, p=ps, shape=vals.shape)
assert_almost_equal(
sum([multinomial_logpdf(val, n, p) for val, n, p in zip(vals, ns, ps)]),
model.fastlogp({"m": vals}),
decimal=4,
)
def test_multinomial_vec_2d_p(self):
vals = np.array([[2, 4, 4], [3, 3, 4]])
ps = np.array([[0.2, 0.3, 0.5], [0.3, 0.3, 0.4]])
n = 10
with Model() as model:
Multinomial("m", n=n, p=ps, shape=vals.shape)
assert_almost_equal(
sum([multinomial_logpdf(val, n, p) for val, p in zip(vals, ps)]),
model.fastlogp({"m": vals}),
decimal=4,
)
def test_batch_multinomial(self):
n = 10
vals = np.zeros((4, 5, 3), dtype="int32")
p = np.zeros_like(vals, dtype=theano.config.floatX)
inds = np.random.randint(vals.shape[-1], size=vals.shape[:-1])[..., None]
np.put_along_axis(vals, inds, n, axis=-1)
np.put_along_axis(p, inds, 1, axis=-1)
dist = Multinomial.dist(n=n, p=p, shape=vals.shape)
value = tt.tensor3(dtype="int32")
value.tag.test_value = np.zeros_like(vals, dtype="int32")
logp = tt.exp(dist.logp(value))
f = theano.function(inputs=[value], outputs=logp)
assert_almost_equal(
f(vals),
np.ones(vals.shape[:-1] + (1,)),
decimal=select_by_precision(float64=6, float32=3),
)
sample = dist.random(size=2)
assert_allclose(sample, np.stack([vals, vals], axis=0))
def test_categorical_bounds(self):
with Model():
x = Categorical("x", p=np.array([0.2, 0.3, 0.5]))
assert np.isinf(x.logp({"x": -1}))
assert np.isinf(x.logp({"x": 3}))
def test_categorical_valid_p(self):
with Model():
x = Categorical("x", p=np.array([-0.2, 0.3, 0.5]))
assert np.isinf(x.logp({"x": 0}))
assert np.isinf(x.logp({"x": 1}))
assert np.isinf(x.logp({"x": 2}))
with Model():
# A model where p sums to 1 but contains negative values
x = Categorical("x", p=np.array([-0.2, 0.7, 0.5]))
assert np.isinf(x.logp({"x": 0}))
assert np.isinf(x.logp({"x": 1}))
assert np.isinf(x.logp({"x": 2}))
with Model():
# Hard edge case from #2082
# Early automatic normalization of p's sum would hide the negative
# entries if there is a single or pair number of negative values
# and the rest are zero
x = Categorical("x", p=np.array([-1, -1, 0, 0]))
assert np.isinf(x.logp({"x": 0}))
assert np.isinf(x.logp({"x": 1}))
assert np.isinf(x.logp({"x": 2}))
assert np.isinf(x.logp({"x": 3}))
@pytest.mark.parametrize("n", [2, 3, 4])
def test_categorical(self, n):
self.pymc3_matches_scipy(
Categorical,
Domain(range(n), "int64"),
{"p": Simplex(n)},
lambda value, p: categorical_logpdf(value, p),
)
@pytest.mark.parametrize("n", [2, 3, 4])
def test_orderedlogistic(self, n):
self.pymc3_matches_scipy(
OrderedLogistic,
Domain(range(n), "int64"),
{"eta": R, "cutpoints": Vector(R, n - 1)},
lambda value, eta, cutpoints: orderedlogistic_logpdf(value, eta, cutpoints),
)
def test_densitydist(self):
def logp(x):
return -log(2 * 0.5) - abs(x - 0.5) / 0.5
self.checkd(DensityDist, R, {}, extra_args={"logp": logp})
def test_get_tau_sigma(self):
sigma = np.array([2])
assert_almost_equal(continuous.get_tau_sigma(sigma=sigma), [1.0 / sigma ** 2, sigma])
@pytest.mark.parametrize(
"value,mu,sigma,nu,logp",
[
(0.5, -50.000, 0.500, 0.500, -99.8068528),
(1.0, -1.000, 0.001, 0.001, -1992.5922447),
(2.0, 0.001, 1.000, 1.000, -1.6720416),
(5.0, 0.500, 2.500, 2.500, -2.4543644),
(7.5, 2.000, 5.000, 5.000, -2.8259429),
(15.0, 5.000, 7.500, 7.500, -3.3093854),
(50.0, 50.000, 10.000, 10.000, -3.6436067),
(1000.0, 500.000, 10.000, 20.000, -27.8707323),
],
)
def test_ex_gaussian(self, value, mu, sigma, nu, logp):
"""Log probabilities calculated using the dexGAUS function from the R package gamlss.
See e.g., doi: 10.1111/j.1467-9876.2005.00510.x, or http://www.gamlss.org/."""
with Model() as model:
ExGaussian("eg", mu=mu, sigma=sigma, nu=nu)
pt = {"eg": value}
assert_almost_equal(
model.fastlogp(pt),
logp,
decimal=select_by_precision(float64=6, float32=2),
err_msg=str(pt),
)
@pytest.mark.parametrize(
"value,mu,sigma,nu,logcdf",
[
(0.5, -50.000, 0.500, 0.500, 0.0000000),
(1.0, -1.000, 0.001, 0.001, 0.0000000),
(2.0, 0.001, 1.000, 1.000, -0.2365674),
(5.0, 0.500, 2.500, 2.500, -0.2886489),
(7.5, 2.000, 5.000, 5.000, -0.5655104),
(15.0, 5.000, 7.500, 7.500, -0.4545255),
(50.0, 50.000, 10.000, 10.000, -1.433714),
(1000.0, 500.000, 10.000, 20.000, -1.573708e-11),
],
)
def test_ex_gaussian_cdf(self, value, mu, sigma, nu, logcdf):
"""Log probabilities calculated using the pexGAUS function from the R package gamlss.
See e.g., doi: 10.1111/j.1467-9876.2005.00510.x, or http://www.gamlss.org/."""
assert_almost_equal(
ExGaussian.dist(mu=mu, sigma=sigma, nu=nu).logcdf(value).tag.test_value,
logcdf,
decimal=select_by_precision(float64=6, float32=2),
err_msg=str((value, mu, sigma, nu, logcdf)),
)
@pytest.mark.xfail(condition=(theano.config.floatX == "float32"), reason="Fails on float32")
def test_vonmises(self):
self.pymc3_matches_scipy(
VonMises,
R,
{"mu": Circ, "kappa": Rplus},
lambda value, mu, kappa: floatX(sp.vonmises.logpdf(value, kappa, loc=mu)),
)
def test_gumbel(self):
def gumbel(value, mu, beta):
return floatX(sp.gumbel_r.logpdf(value, loc=mu, scale=beta))
self.pymc3_matches_scipy(Gumbel, R, {"mu": R, "beta": Rplusbig}, gumbel)
def gumbellcdf(value, mu, beta):
return floatX(sp.gumbel_r.logcdf(value, loc=mu, scale=beta))
self.check_logcdf(Gumbel, R, {"mu": R, "beta": Rplusbig}, gumbellcdf)
def test_logistic(self):
self.pymc3_matches_scipy(
Logistic,
R,
{"mu": R, "s": Rplus},
lambda value, mu, s: sp.logistic.logpdf(value, mu, s),
decimal=select_by_precision(float64=6, float32=1),
)
self.check_logcdf(
Logistic,
R,
{"mu": R, "s": Rplus},
lambda value, mu, s: sp.logistic.logcdf(value, mu, s),
decimal=select_by_precision(float64=6, float32=1),
)
def test_logitnormal(self):
self.pymc3_matches_scipy(
LogitNormal,
Unit,
{"mu": R, "sigma": Rplus},
lambda value, mu, sigma: (
sp.norm.logpdf(logit(value), mu, sigma) - (np.log(value) + np.log1p(-value))
),
decimal=select_by_precision(float64=6, float32=1),
)
def test_multidimensional_beta_construction(self):
with Model():
Beta("beta", alpha=1.0, beta=1.0, shape=(10, 20))
def test_rice(self):
self.pymc3_matches_scipy(
Rice,
Rplus,
{"nu": Rplus, "sigma": Rplusbig},
lambda value, nu, sigma: sp.rice.logpdf(value, b=nu / sigma, loc=0, scale=sigma),
)
self.pymc3_matches_scipy(
Rice,
Rplus,
{"b": Rplus, "sigma": Rplusbig},
lambda value, b, sigma: sp.rice.logpdf(value, b=b, loc=0, scale=sigma),
)
@pytest.mark.xfail(condition=(theano.config.floatX == "float32"), reason="Fails on float32")
def test_moyal(self):
self.pymc3_matches_scipy(
Moyal,
R,
{"mu": R, "sigma": Rplusbig},
lambda value, mu, sigma: floatX(sp.moyal.logpdf(value, mu, sigma)),
)
self.check_logcdf(
Moyal,
R,
{"mu": R, "sigma": Rplusbig},
lambda value, mu, sigma: floatX(sp.moyal.logcdf(value, mu, sigma)),
)
@pytest.mark.xfail(condition=(theano.config.floatX == "float32"), reason="Fails on float32")
def test_interpolated(self):
for mu in R.vals:
for sigma in Rplus.vals:
# pylint: disable=cell-var-from-loop
xmin = mu - 5 * sigma
xmax = mu + 5 * sigma
class TestedInterpolated(Interpolated):
def __init__(self, **kwargs):
x_points = np.linspace(xmin, xmax, 100000)
pdf_points = sp.norm.pdf(x_points, loc=mu, scale=sigma)
super().__init__(x_points=x_points, pdf_points=pdf_points, **kwargs)
def ref_pdf(value):
return np.where(
np.logical_and(value >= xmin, value <= xmax),
sp.norm.logpdf(value, mu, sigma),
-np.inf * np.ones(value.shape),
)
self.pymc3_matches_scipy(TestedInterpolated, R, {}, ref_pdf)
def test_bound():
np.random.seed(42)
UnboundNormal = Bound(Normal)
dist = UnboundNormal.dist(mu=0, sigma=1)
assert dist.transform is None
assert dist.default() == 0.0
assert isinstance(dist.random(), np.ndarray)
LowerNormal = Bound(Normal, lower=1)
dist = LowerNormal.dist(mu=0, sigma=1)
assert dist.logp(0).eval() == -np.inf
assert dist.default() > 1
assert dist.transform is not None
assert np.all(dist.random() > 1)
UpperNormal = Bound(Normal, upper=-1)
dist = UpperNormal.dist(mu=0, sigma=1)
assert dist.logp(-0.5).eval() == -np.inf
assert dist.default() < -1
assert dist.transform is not None
assert np.all(dist.random() < -1)
ArrayNormal = Bound(Normal, lower=[1, 2], upper=[2, 3])
dist = ArrayNormal.dist(mu=0, sigma=1, shape=2)
assert_equal(dist.logp([0.5, 3.5]).eval(), -np.array([np.inf, np.inf]))
assert_equal(dist.default(), np.array([1.5, 2.5]))
assert dist.transform is not None
with pytest.raises(ValueError) as err:
dist.random()
err.match("Drawing samples from distributions with array-valued")
with Model():
a = ArrayNormal("c", shape=2)
assert_equal(a.tag.test_value, np.array([1.5, 2.5]))
lower = tt.vector("lower")
lower.tag.test_value = np.array([1, 2]).astype(theano.config.floatX)
upper = 3
ArrayNormal = Bound(Normal, lower=lower, upper=upper)
dist = ArrayNormal.dist(mu=0, sigma=1, shape=2)
logp = dist.logp([0.5, 3.5]).eval({lower: lower.tag.test_value})
assert_equal(logp, -np.array([np.inf, np.inf]))
assert_equal(dist.default(), np.array([2, 2.5]))
assert dist.transform is not None
with Model():
a = ArrayNormal("c", shape=2)
assert_equal(a.tag.test_value, np.array([2, 2.5]))
rand = Bound(Binomial, lower=10).dist(n=20, p=0.3).random()
assert rand.dtype in [np.int16, np.int32, np.int64]
assert rand >= 10
rand = Bound(Binomial, upper=10).dist(n=20, p=0.8).random()
assert rand.dtype in [np.int16, np.int32, np.int64]
assert rand <= 10
rand = Bound(Binomial, lower=5, upper=8).dist(n=10, p=0.6).random()
assert rand.dtype in [np.int16, np.int32, np.int64]
assert rand >= 5 and rand <= 8
with Model():
BoundPoisson = Bound(Poisson, upper=6)
BoundPoisson(name="y", mu=1)
with Model():
BoundNormalNamedArgs = Bound(Normal, upper=6)("y", mu=2.0, sd=1.0)
BoundNormalPositionalArgs = Bound(Normal, upper=6)("x", 2.0, 1.0)
with Model():
BoundPoissonNamedArgs = Bound(Poisson, upper=6)("y", mu=2.0)
BoundPoissonPositionalArgs = Bound(Poisson, upper=6)("x", 2.0)
class TestStrAndLatexRepr:
def setup_class(self):
# True parameter values
alpha, sigma = 1, 1
beta = [1, 2.5]
# Size of dataset
size = 100
# Predictor variable
X = np.random.normal(size=(size, 2)).dot(np.array([[1, 0], [0, 0.2]]))
# Simulate outcome variable
Y = alpha + X.dot(beta) + np.random.randn(size) * sigma
with Model() as self.model:
# Priors for unknown model parameters
alpha = Normal("alpha", mu=0, sigma=10)
b = Normal("beta", mu=0, sigma=10, shape=(2,), observed=beta)
sigma = HalfNormal("sigma", sigma=1)
# Test Cholesky parameterization
Z = MvNormal("Z", mu=np.zeros(2), chol=np.eye(2), shape=(2,))
# NegativeBinomial representations to test issue 4186
nb1 = pm.NegativeBinomial(
"nb_with_mu_alpha", mu=pm.Normal("nbmu"), alpha=pm.Gamma("nbalpha", mu=6, sigma=1)
)
nb2 = pm.NegativeBinomial("nb_with_p_n", p=pm.Uniform("nbp"), n=10)
# Expected value of outcome
mu = Deterministic("mu", floatX(alpha + tt.dot(X, b)))
# add a bounded variable as well
bound_var = Bound(Normal, lower=1.0)("bound_var", mu=0, sigma=10)
# KroneckerNormal
n, m = 3, 4
covs = [np.eye(n), np.eye(m)]
kron_normal = KroneckerNormal("kron_normal", mu=np.zeros(n * m), covs=covs, shape=n * m)
# MatrixNormal
matrix_normal = MatrixNormal(
"mat_normal",
mu=np.random.normal(size=n),
rowcov=np.eye(n),
colchol=np.linalg.cholesky(np.eye(n)),
shape=(n, n),
)
# Likelihood (sampling distribution) of observations
Y_obs = Normal("Y_obs", mu=mu, sigma=sigma, observed=Y)
self.distributions = [alpha, sigma, mu, b, Z, nb1, nb2, Y_obs, bound_var]
self.expected = {
"latex": (
r"$\text{alpha} \sim \text{Normal}$",
r"$\text{sigma} \sim \text{HalfNormal}$",
r"$\text{mu} \sim \text{Deterministic}$",
r"$\text{beta} \sim \text{Normal}$",
r"$\text{Z} \sim \text{MvNormal}$",
r"$\text{nb_with_mu_alpha} \sim \text{NegativeBinomial}$",
r"$\text{nb_with_p_n} \sim \text{NegativeBinomial}$",
r"$\text{Y_obs} \sim \text{Normal}$",
r"$\text{bound_var} \sim \text{Bound}$ -- \text{Normal}$",
r"$\text{kron_normal} \sim \text{KroneckerNormal}$",
r"$\text{mat_normal} \sim \text{MatrixNormal}$",
),
"plain": (
r"alpha ~ Normal",
r"sigma ~ HalfNormal",
r"mu ~ Deterministic",
r"beta ~ Normal",
r"Z ~ MvNormal",
r"nb_with_mu_alpha ~ NegativeBinomial",
r"nb_with_p_n ~ NegativeBinomial",
r"Y_obs ~ Normal",
r"bound_var ~ Bound-Normal",
r"kron_normal ~ KroneckerNormal",
r"mat_normal ~ MatrixNormal",
),
"latex_with_params": (
r"$\text{alpha} \sim \text{Normal}(\mathit{mu}=0.0,~\mathit{sigma}=10.0)$",
r"$\text{sigma} \sim \text{HalfNormal}(\mathit{sigma}=1.0)$",
r"$\text{mu} \sim \text{Deterministic}(\text{alpha},~\text{Constant},~\text{beta})$",
r"$\text{beta} \sim \text{Normal}(\mathit{mu}=0.0,~\mathit{sigma}=10.0)$",
r"$\text{Z} \sim \text{MvNormal}(\mathit{mu}=array,~\mathit{chol_cov}=array)$",
r"$\text{nb_with_mu_alpha} \sim \text{NegativeBinomial}(\mathit{mu}=\text{nbmu},~\mathit{alpha}=\text{nbalpha})$",
r"$\text{nb_with_p_n} \sim \text{NegativeBinomial}(\mathit{p}=\text{nbp},~\mathit{n}=10)$",
r"$\text{Y_obs} \sim \text{Normal}(\mathit{mu}=\text{mu},~\mathit{sigma}=f(\text{sigma}))$",
r"$\text{bound_var} \sim \text{Bound}(\mathit{lower}=1.0,~\mathit{upper}=\text{None})$ -- \text{Normal}(\mathit{mu}=0.0,~\mathit{sigma}=10.0)$",
r"$\text{kron_normal} \sim \text{KroneckerNormal}(\mathit{mu}=array)$",
r"$\text{mat_normal} \sim \text{MatrixNormal}(\mathit{mu}=array,~\mathit{rowcov}=array,~\mathit{colchol_cov}=array)$",
),
"plain_with_params": (
r"alpha ~ Normal(mu=0.0, sigma=10.0)",
r"sigma ~ HalfNormal(sigma=1.0)",
r"mu ~ Deterministic(alpha, Constant, beta)",
r"beta ~ Normal(mu=0.0, sigma=10.0)",
r"Z ~ MvNormal(mu=array, chol_cov=array)",
r"nb_with_mu_alpha ~ NegativeBinomial(mu=nbmu, alpha=nbalpha)",
r"nb_with_p_n ~ NegativeBinomial(p=nbp, n=10)",
r"Y_obs ~ Normal(mu=mu, sigma=f(sigma))",
r"bound_var ~ Bound(lower=1.0, upper=None)-Normal(mu=0.0, sigma=10.0)",
r"kron_normal ~ KroneckerNormal(mu=array)",
r"mat_normal ~ MatrixNormal(mu=array, rowcov=array, colchol_cov=array)",
),
}
def test__repr_latex_(self):
for distribution, tex in zip(self.distributions, self.expected["latex_with_params"]):
assert distribution._repr_latex_() == tex
model_tex = self.model._repr_latex_()
# make sure each variable is in the model
for tex in self.expected["latex"]:
for segment in tex.strip("$").split(r"\sim"):
assert segment in model_tex
def test___latex__(self):
for distribution, tex in zip(self.distributions, self.expected["latex_with_params"]):
assert distribution._repr_latex_() == distribution.__latex__()
assert self.model._repr_latex_() == self.model.__latex__()
def test___str__(self):
for distribution, str_repr in zip(self.distributions, self.expected["plain"]):
assert distribution.__str__() == str_repr
model_str = self.model.__str__()
for str_repr in self.expected["plain"]:
assert str_repr in model_str
def test_str(self):
for distribution, str_repr in zip(self.distributions, self.expected["plain"]):
assert str(distribution) == str_repr
model_str = str(self.model)
for str_repr in self.expected["plain"]:
assert str_repr in model_str
def test_discrete_trafo():
with pytest.raises(ValueError) as err:
Binomial.dist(n=5, p=0.5, transform="log")
err.match("Transformations for discrete distributions")
with Model():
with pytest.raises(ValueError) as err:
Binomial("a", n=5, p=0.5, transform="log")
err.match("Transformations for discrete distributions")
@pytest.mark.parametrize("shape", [tuple(), (1,), (3, 1), (3, 2)], ids=str)
def test_orderedlogistic_dimensions(shape):
# Test for issue #3535
loge = np.log10(np.exp(1))
size = 7
p = np.ones(shape + (10,)) / 10
cutpoints = np.tile(logit(np.linspace(0, 1, 11)[1:-1]), shape + (1,))
obs = np.random.randint(0, 1, size=(size,) + shape)
with Model():
ol = OrderedLogistic(
"ol", eta=np.zeros(shape), cutpoints=cutpoints, shape=shape, observed=obs
)
c = Categorical("c", p=p, shape=shape, observed=obs)
ologp = ol.logp({"ol": 1}) * loge
clogp = c.logp({"c": 1}) * loge
expected = -np.prod((size,) + shape)
assert c.distribution.p.ndim == (len(shape) + 1)
assert np.allclose(clogp, expected)
assert ol.distribution.p.ndim == (len(shape) + 1)
assert np.allclose(ologp, expected)
class TestBugfixes:
@pytest.mark.parametrize(
"dist_cls,kwargs", [(MvNormal, dict(mu=0)), (MvStudentT, dict(mu=0, nu=2))]
)
@pytest.mark.parametrize("dims", [1, 2, 4])
def test_issue_3051(self, dims, dist_cls, kwargs):
d = dist_cls.dist(**kwargs, cov=np.eye(dims), shape=(dims,))
X = np.random.normal(size=(20, dims))
actual_t = d.logp(X)
assert isinstance(actual_t, tt.TensorVariable)
actual_a = actual_t.eval()
assert isinstance(actual_a, np.ndarray)
assert actual_a.shape == (X.shape[0],)
pass
def test_serialize_density_dist():
def func(x):
return -2 * (x ** 2).sum()
with pm.Model():
pm.Normal("x")
y = pm.DensityDist("y", func)
pm.sample(draws=5, tune=1, mp_ctx="spawn")
import pickle
pickle.loads(pickle.dumps(y))
| 34.46815 | 160 | 0.551014 |
import itertools
import sys
from .helpers import SeededTest, select_by_precision
from ..vartypes import continuous_types
from ..model import Model, Point, Deterministic
from ..blocking import DictToVarBijection
from ..distributions import (
DensityDist,
Categorical,
Multinomial,
VonMises,
Dirichlet,
MvStudentT,
MvNormal,
MatrixNormal,
ZeroInflatedPoisson,
ZeroInflatedNegativeBinomial,
Constant,
Poisson,
Bernoulli,
Beta,
BetaBinomial,
HalfStudentT,
StudentT,
Weibull,
Pareto,
InverseGamma,
Gamma,
Cauchy,
HalfCauchy,
Lognormal,
Laplace,
NegativeBinomial,
Geometric,
Exponential,
ExGaussian,
Normal,
TruncatedNormal,
Flat,
LKJCorr,
Wald,
ChiSquared,
HalfNormal,
DiscreteUniform,
Bound,
Uniform,
Triangular,
Binomial,
SkewNormal,
DiscreteWeibull,
Gumbel,
Logistic,
OrderedLogistic,
LogitNormal,
Interpolated,
ZeroInflatedBinomial,
HalfFlat,
AR1,
KroneckerNormal,
Rice,
Kumaraswamy,
Moyal,
HyperGeometric,
)
from ..distributions import continuous
from pymc3.theanof import floatX
import pymc3 as pm
from numpy import array, inf, log, exp
from numpy.testing import assert_almost_equal, assert_allclose, assert_equal
import numpy.random as nr
import numpy as np
import pytest
from scipy import integrate
import scipy.stats.distributions as sp
import scipy.stats
from scipy.special import logit
import theano
import theano.tensor as tt
from ..math import kronecker
def get_lkj_cases():
tri = np.array([0.7, 0.0, -0.7])
return [
(tri, 1, 3, 1.5963125911388549),
(tri, 3, 3, -7.7963493376312742),
(tri, 0, 3, -np.inf),
(np.array([1.1, 0.0, -0.7]), 1, 3, -np.inf),
(np.array([0.7, 0.0, -1.1]), 1, 3, -np.inf),
]
LKJ_CASES = get_lkj_cases()
class Domain:
def __init__(self, vals, dtype=None, edges=None, shape=None):
avals = array(vals, dtype=dtype)
if dtype is None and not str(avals.dtype).startswith("int"):
avals = avals.astype(theano.config.floatX)
vals = [array(v, dtype=avals.dtype) for v in vals]
if edges is None:
edges = array(vals[0]), array(vals[-1])
vals = vals[1:-1]
if shape is None:
shape = avals[0].shape
self.vals = vals
self.shape = shape
self.lower, self.upper = edges
self.dtype = avals.dtype
def __add__(self, other):
return Domain(
[v + other for v in self.vals],
self.dtype,
(self.lower + other, self.upper + other),
self.shape,
)
def __mul__(self, other):
try:
return Domain(
[v * other for v in self.vals],
self.dtype,
(self.lower * other, self.upper * other),
self.shape,
)
except TypeError:
return Domain(
[v * other for v in self.vals],
self.dtype,
(self.lower, self.upper),
self.shape,
)
def __neg__(self):
return Domain([-v for v in self.vals], self.dtype, (-self.lower, -self.upper), self.shape)
def product(domains, n_samples=-1):
try:
names, domains = zip(*domains.items())
except ValueError:
return [{}]
all_vals = [zip(names, val) for val in itertools.product(*[d.vals for d in domains])]
if n_samples > 0 and len(all_vals) > n_samples:
return (all_vals[j] for j in nr.choice(len(all_vals), n_samples, replace=False))
return all_vals
R = Domain([-inf, -2.1, -1, -0.01, 0.0, 0.01, 1, 2.1, inf])
Rplus = Domain([0, 0.01, 0.1, 0.9, 0.99, 1, 1.5, 2, 100, inf])
Rplusbig = Domain([0, 0.5, 0.9, 0.99, 1, 1.5, 2, 20, inf])
Rminusbig = Domain([-inf, -2, -1.5, -1, -0.99, -0.9, -0.5, -0.01, 0])
Unit = Domain([0, 0.001, 0.1, 0.5, 0.75, 0.99, 1])
Circ = Domain([-np.pi, -2.1, -1, -0.01, 0.0, 0.01, 1, 2.1, np.pi])
Runif = Domain([-1, -0.4, 0, 0.4, 1])
Rdunif = Domain([-10, 0, 10.0])
Rplusunif = Domain([0, 0.5, inf])
Rplusdunif = Domain([2, 10, 100], "int64")
I = Domain([-1000, -3, -2, -1, 0, 1, 2, 3, 1000], "int64")
NatSmall = Domain([0, 3, 4, 5, 1000], "int64")
Nat = Domain([0, 1, 2, 3, 2000], "int64")
NatBig = Domain([0, 1, 2, 3, 5000, 50000], "int64")
PosNat = Domain([1, 2, 3, 2000], "int64")
Bool = Domain([0, 0, 1, 1], "int64")
def build_model(distfam, valuedomain, vardomains, extra_args=None):
if extra_args is None:
extra_args = {}
with Model() as m:
vals = {}
for v, dom in vardomains.items():
vals[v] = Flat(v, dtype=dom.dtype, shape=dom.shape, testval=dom.vals[0])
vals.update(extra_args)
distfam("value", shape=valuedomain.shape, transform=None, **vals)
return m
def integrate_nd(f, domain, shape, dtype):
if shape == () or shape == (1,):
if dtype in continuous_types:
return integrate.quad(f, domain.lower, domain.upper, epsabs=1e-8)[0]
else:
return sum(f(j) for j in range(domain.lower, domain.upper + 1))
elif shape == (2,):
def f2(a, b):
return f([a, b])
return integrate.dblquad(
f2,
domain.lower[0],
domain.upper[0],
lambda _: domain.lower[1],
lambda _: domain.upper[1],
)[0]
elif shape == (3,):
def f3(a, b, c):
return f([a, b, c])
return integrate.tplquad(
f3,
domain.lower[0],
domain.upper[0],
lambda _: domain.lower[1],
lambda _: domain.upper[1],
lambda _, __: domain.lower[2],
lambda _, __: domain.upper[2],
)[0]
else:
raise ValueError("Dont know how to integrate shape: " + str(shape))
def multinomial_logpdf(value, n, p):
if value.sum() == n and (0 <= value).all() and (value <= n).all():
logpdf = scipy.special.gammaln(n + 1)
logpdf -= scipy.special.gammaln(value + 1).sum()
logpdf += logpow(p, value).sum()
return logpdf
else:
return -inf
def beta_mu_sigma(value, mu, sigma):
kappa = mu * (1 - mu) / sigma ** 2 - 1
if kappa > 0:
return sp.beta.logpdf(value, mu * kappa, (1 - mu) * kappa)
else:
return -inf
class ProductDomain:
def __init__(self, domains):
self.vals = list(itertools.product(*[d.vals for d in domains]))
self.shape = (len(domains),) + domains[0].shape
self.lower = [d.lower for d in domains]
self.upper = [d.upper for d in domains]
self.dtype = domains[0].dtype
def Vector(D, n):
return ProductDomain([D] * n)
def SortedVector(n):
vals = []
np.random.seed(42)
for _ in range(10):
vals.append(np.sort(np.random.randn(n)))
return Domain(vals, edges=(None, None))
def UnitSortedVector(n):
vals = []
np.random.seed(42)
for _ in range(10):
vals.append(np.sort(np.random.rand(n)))
return Domain(vals, edges=(None, None))
def RealMatrix(n, m):
vals = []
np.random.seed(42)
for _ in range(10):
vals.append(np.random.randn(n, m))
return Domain(vals, edges=(None, None))
def simplex_values(n):
if n == 1:
yield array([1.0])
else:
for v in Unit.vals:
for vals in simplex_values(n - 1):
yield np.concatenate([[v], (1 - v) * vals])
def normal_logpdf_tau(value, mu, tau):
return normal_logpdf_cov(value, mu, np.linalg.inv(tau)).sum()
def normal_logpdf_cov(value, mu, cov):
return scipy.stats.multivariate_normal.logpdf(value, mu, cov).sum()
def normal_logpdf_chol(value, mu, chol):
return normal_logpdf_cov(value, mu, np.dot(chol, chol.T)).sum()
def normal_logpdf_chol_upper(value, mu, chol):
return normal_logpdf_cov(value, mu, np.dot(chol.T, chol)).sum()
def matrix_normal_logpdf_cov(value, mu, rowcov, colcov):
return scipy.stats.matrix_normal.logpdf(value, mu, rowcov, colcov)
def matrix_normal_logpdf_chol(value, mu, rowchol, colchol):
return matrix_normal_logpdf_cov(
value, mu, np.dot(rowchol, rowchol.T), np.dot(colchol, colchol.T)
)
def kron_normal_logpdf_cov(value, mu, covs, sigma):
cov = kronecker(*covs).eval()
if sigma is not None:
cov += sigma ** 2 * np.eye(*cov.shape)
return scipy.stats.multivariate_normal.logpdf(value, mu, cov).sum()
def kron_normal_logpdf_chol(value, mu, chols, sigma):
covs = [np.dot(chol, chol.T) for chol in chols]
return kron_normal_logpdf_cov(value, mu, covs, sigma=sigma)
def kron_normal_logpdf_evd(value, mu, evds, sigma):
covs = []
for eigs, Q in evds:
try:
eigs = eigs.eval()
except AttributeError:
pass
try:
Q = Q.eval()
except AttributeError:
pass
covs.append(np.dot(Q, np.dot(np.diag(eigs), Q.T)))
return kron_normal_logpdf_cov(value, mu, covs, sigma)
def betafn(a):
return floatX(scipy.special.gammaln(a).sum(-1) - scipy.special.gammaln(a.sum(-1)))
def logpow(v, p):
return np.choose(v == 0, [p * np.log(v), 0])
def discrete_weibull_logpmf(value, q, beta):
return floatX(
np.log(
np.power(floatX(q), np.power(floatX(value), floatX(beta)))
- np.power(floatX(q), np.power(floatX(value + 1), floatX(beta)))
)
)
def dirichlet_logpdf(value, a):
return floatX((-betafn(a) + logpow(value, a - 1).sum(-1)).sum())
def categorical_logpdf(value, p):
if value >= 0 and value <= len(p):
return floatX(np.log(np.moveaxis(p, -1, 0)[value]))
else:
return -inf
def mvt_logpdf(value, nu, Sigma, mu=0):
d = len(Sigma)
dist = np.atleast_2d(value) - mu
chol = np.linalg.cholesky(Sigma)
trafo = np.linalg.solve(chol, dist.T).T
logdet = np.log(np.diag(chol)).sum()
lgamma = scipy.special.gammaln
norm = lgamma((nu + d) / 2.0) - 0.5 * d * np.log(nu * np.pi) - lgamma(nu / 2.0)
logp = norm - logdet - (nu + d) / 2.0 * np.log1p((trafo * trafo).sum(-1) / nu)
return logp.sum()
def AR1_logpdf(value, k, tau_e):
tau = tau_e * (1 - k ** 2)
return (
sp.norm(loc=0, scale=1 / np.sqrt(tau)).logpdf(value[0])
+ sp.norm(loc=k * value[:-1], scale=1 / np.sqrt(tau_e)).logpdf(value[1:]).sum()
)
def invlogit(x, eps=sys.float_info.epsilon):
return (1.0 - 2.0 * eps) / (1.0 + np.exp(-x)) + eps
def orderedlogistic_logpdf(value, eta, cutpoints):
c = np.concatenate(([-np.inf], cutpoints, [np.inf]))
ps = np.array([invlogit(eta - cc) - invlogit(eta - cc1) for cc, cc1 in zip(c[:-1], c[1:])])
p = ps[value]
return np.where(np.all(ps >= 0), np.log(p), -np.inf)
class Simplex:
def __init__(self, n):
self.vals = list(simplex_values(n))
self.shape = (n,)
self.dtype = Unit.dtype
class MultiSimplex:
def __init__(self, n_dependent, n_independent):
self.vals = []
for simplex_value in itertools.product(simplex_values(n_dependent), repeat=n_independent):
self.vals.append(np.vstack(simplex_value))
self.shape = (n_independent, n_dependent)
self.dtype = Unit.dtype
def PdMatrix(n):
if n == 1:
return PdMatrix1
elif n == 2:
return PdMatrix2
elif n == 3:
return PdMatrix3
else:
raise ValueError("n out of bounds")
PdMatrix1 = Domain([np.eye(1), [[0.5]]], edges=(None, None))
PdMatrix2 = Domain([np.eye(2), [[0.5, 0.05], [0.05, 4.5]]], edges=(None, None))
PdMatrix3 = Domain([np.eye(3), [[0.5, 0.1, 0], [0.1, 1, 0], [0, 0, 2.5]]], edges=(None, None))
PdMatrixChol1 = Domain([np.eye(1), [[0.001]]], edges=(None, None))
PdMatrixChol2 = Domain([np.eye(2), [[0.1, 0], [10, 1]]], edges=(None, None))
PdMatrixChol3 = Domain([np.eye(3), [[0.1, 0, 0], [10, 100, 0], [0, 1, 10]]], edges=(None, None))
def PdMatrixChol(n):
if n == 1:
return PdMatrixChol1
elif n == 2:
return PdMatrixChol2
elif n == 3:
return PdMatrixChol3
else:
raise ValueError("n out of bounds")
PdMatrixCholUpper1 = Domain([np.eye(1), [[0.001]]], edges=(None, None))
PdMatrixCholUpper2 = Domain([np.eye(2), [[0.1, 10], [0, 1]]], edges=(None, None))
PdMatrixCholUpper3 = Domain(
[np.eye(3), [[0.1, 10, 0], [0, 100, 1], [0, 0, 10]]], edges=(None, None)
)
def PdMatrixCholUpper(n):
if n == 1:
return PdMatrixCholUpper1
elif n == 2:
return PdMatrixCholUpper2
elif n == 3:
return PdMatrixCholUpper3
else:
raise ValueError("n out of bounds")
def RandomPdMatrix(n):
A = np.random.rand(n, n)
return np.dot(A, A.T) + n * np.identity(n)
class TestMatchesScipy(SeededTest):
def pymc3_matches_scipy(
self,
pymc3_dist,
domain,
paramdomains,
scipy_dist,
decimal=None,
extra_args=None,
scipy_args=None,
):
if extra_args is None:
extra_args = {}
if scipy_args is None:
scipy_args = {}
model = build_model(pymc3_dist, domain, paramdomains, extra_args)
value = model.named_vars["value"]
def logp(args):
args.update(scipy_args)
return scipy_dist(**args)
self.check_logp(model, value, domain, paramdomains, logp, decimal=decimal)
def check_logp(self, model, value, domain, paramdomains, logp_reference, decimal=None):
domains = paramdomains.copy()
domains["value"] = domain
logp = model.fastlogp
for pt in product(domains, n_samples=100):
pt = Point(pt, model=model)
if decimal is None:
decimal = select_by_precision(float64=6, float32=3)
assert_almost_equal(logp(pt), logp_reference(pt), decimal=decimal, err_msg=str(pt))
def check_logcdf(
self,
pymc3_dist,
domain,
paramdomains,
scipy_logcdf,
decimal=None,
n_samples=100,
):
domains = paramdomains.copy()
domains["value"] = domain
if decimal is None:
decimal = select_by_precision(float64=6, float32=3)
for pt in product(domains, n_samples=n_samples):
params = dict(pt)
scipy_cdf = scipy_logcdf(**params)
value = params.pop("value")
dist = pymc3_dist.dist(**params)
assert_almost_equal(
dist.logcdf(value).tag.test_value,
scipy_cdf,
decimal=decimal,
err_msg=str(pt),
)
def check_int_to_1(self, model, value, domain, paramdomains):
pdf = model.fastfn(exp(model.logpt))
for pt in product(paramdomains, n_samples=10):
pt = Point(pt, value=value.tag.test_value, model=model)
bij = DictToVarBijection(value, (), pt)
pdfx = bij.mapf(pdf)
area = integrate_nd(pdfx, domain, value.dshape, value.dtype)
assert_almost_equal(area, 1, err_msg=str(pt))
def checkd(self, distfam, valuedomain, vardomains, checks=None, extra_args=None):
if checks is None:
checks = (self.check_int_to_1,)
if extra_args is None:
extra_args = {}
m = build_model(distfam, valuedomain, vardomains, extra_args=extra_args)
for check in checks:
check(m, m.named_vars["value"], valuedomain, vardomains)
def test_uniform(self):
self.pymc3_matches_scipy(
Uniform,
Runif,
{"lower": -Rplusunif, "upper": Rplusunif},
lambda value, lower, upper: sp.uniform.logpdf(value, lower, upper - lower),
)
self.check_logcdf(
Uniform,
Runif,
{"lower": -Rplusunif, "upper": Rplusunif},
lambda value, lower, upper: sp.uniform.logcdf(value, lower, upper - lower),
)
def test_triangular(self):
self.pymc3_matches_scipy(
Triangular,
Runif,
{"lower": -Rplusunif, "c": Runif, "upper": Rplusunif},
lambda value, c, lower, upper: sp.triang.logpdf(value, c - lower, lower, upper - lower),
)
self.check_logcdf(
Triangular,
Runif,
{"lower": -Rplusunif, "c": Runif, "upper": Rplusunif},
lambda value, c, lower, upper: sp.triang.logcdf(value, c - lower, lower, upper - lower),
)
def test_bound_normal(self):
PositiveNormal = Bound(Normal, lower=0.0)
self.pymc3_matches_scipy(
PositiveNormal,
Rplus,
{"mu": Rplus, "sigma": Rplus},
lambda value, mu, sigma: sp.norm.logpdf(value, mu, sigma),
decimal=select_by_precision(float64=6, float32=-1),
)
with Model():
x = PositiveNormal("x", mu=0, sigma=1, transform=None)
assert np.isinf(x.logp({"x": -1}))
def test_discrete_unif(self):
self.pymc3_matches_scipy(
DiscreteUniform,
Rdunif,
{"lower": -Rplusdunif, "upper": Rplusdunif},
lambda value, lower, upper: sp.randint.logpmf(value, lower, upper + 1),
)
def test_flat(self):
self.pymc3_matches_scipy(Flat, Runif, {}, lambda value: 0)
with Model():
x = Flat("a")
assert_allclose(x.tag.test_value, 0)
self.check_logcdf(Flat, Runif, {}, lambda value: np.log(0.5))
assert 0.0 == Flat.dist().logcdf(np.inf).tag.test_value
assert -np.inf == Flat.dist().logcdf(-np.inf).tag.test_value
def test_half_flat(self):
self.pymc3_matches_scipy(HalfFlat, Rplus, {}, lambda value: 0)
with Model():
x = HalfFlat("a", shape=2)
assert_allclose(x.tag.test_value, 1)
assert x.tag.test_value.shape == (2,)
self.check_logcdf(HalfFlat, Runif, {}, lambda value: -np.inf)
assert 0.0 == HalfFlat.dist().logcdf(np.inf).tag.test_value
assert -np.inf == HalfFlat.dist().logcdf(-np.inf).tag.test_value
def test_normal(self):
self.pymc3_matches_scipy(
Normal,
R,
{"mu": R, "sigma": Rplus},
lambda value, mu, sigma: sp.norm.logpdf(value, mu, sigma),
decimal=select_by_precision(float64=6, float32=1),
)
self.check_logcdf(
Normal,
R,
{"mu": R, "sigma": Rplus},
lambda value, mu, sigma: sp.norm.logcdf(value, mu, sigma),
)
def test_truncated_normal(self):
def scipy_logp(value, mu, sigma, lower, upper):
return sp.truncnorm.logpdf(
value, (lower - mu) / sigma, (upper - mu) / sigma, loc=mu, scale=sigma
)
self.pymc3_matches_scipy(
TruncatedNormal,
R,
{"mu": R, "sigma": Rplusbig, "lower": -Rplusbig, "upper": Rplusbig},
scipy_logp,
decimal=select_by_precision(float64=6, float32=1),
)
def test_half_normal(self):
self.pymc3_matches_scipy(
HalfNormal,
Rplus,
{"sigma": Rplus},
lambda value, sigma: sp.halfnorm.logpdf(value, scale=sigma),
decimal=select_by_precision(float64=6, float32=-1),
)
self.check_logcdf(
HalfNormal,
Rplus,
{"sigma": Rplus},
lambda value, sigma: sp.halfnorm.logcdf(value, scale=sigma),
)
def test_chi_squared(self):
self.pymc3_matches_scipy(
ChiSquared,
Rplus,
{"nu": Rplusdunif},
lambda value, nu: sp.chi2.logpdf(value, df=nu),
)
@pytest.mark.xfail(reason="Poor CDF in SciPy. See scipy/scipy#869 for details.")
def test_wald_scipy(self):
self.pymc3_matches_scipy(
Wald,
Rplus,
{"mu": Rplus, "alpha": Rplus},
lambda value, mu, alpha: sp.invgauss.logpdf(value, mu=mu, loc=alpha),
decimal=select_by_precision(float64=6, float32=1),
)
self.check_logcdf(
Wald,
Rplus,
{"mu": Rplus, "alpha": Rplus},
lambda value, mu, alpha: sp.invgauss.logcdf(value, mu=mu, loc=alpha),
)
@pytest.mark.parametrize(
"value,mu,lam,phi,alpha,logp",
[
(0.5, 0.001, 0.5, None, 0.0, -124500.7257914),
(1.0, 0.5, 0.001, None, 0.0, -4.3733162),
(2.0, 1.0, None, None, 0.0, -2.2086593),
(5.0, 2.0, 2.5, None, 0.0, -3.4374500),
(7.5, 5.0, None, 1.0, 0.0, -3.2199074),
(15.0, 10.0, None, 0.75, 0.0, -4.0360623),
(50.0, 15.0, None, 0.66666, 0.0, -6.1801249),
(0.5, 0.001, 0.5, None, 0.0, -124500.7257914),
(1.0, 0.5, 0.001, None, 0.5, -3.3330954),
(2.0, 1.0, None, None, 1.0, -0.9189385),
(5.0, 2.0, 2.5, None, 2.0, -2.2128783),
(7.5, 5.0, None, 1.0, 2.5, -2.5283764),
(15.0, 10.0, None, 0.75, 5.0, -3.3653647),
(50.0, 15.0, None, 0.666666, 10.0, -5.6481874),
],
)
def test_wald(self, value, mu, lam, phi, alpha, logp):
with Model() as model:
Wald("wald", mu=mu, lam=lam, phi=phi, alpha=alpha, transform=None)
pt = {"wald": value}
decimals = select_by_precision(float64=6, float32=1)
assert_almost_equal(model.fastlogp(pt), logp, decimal=decimals, err_msg=str(pt))
def test_beta(self):
self.pymc3_matches_scipy(
Beta,
Unit,
{"alpha": Rplus, "beta": Rplus},
lambda value, alpha, beta: sp.beta.logpdf(value, alpha, beta),
)
self.pymc3_matches_scipy(Beta, Unit, {"mu": Unit, "sigma": Rplus}, beta_mu_sigma)
self.check_logcdf(
Beta,
Unit,
{"alpha": Rplus, "beta": Rplus},
lambda value, alpha, beta: sp.beta.logcdf(value, alpha, beta),
)
def test_kumaraswamy(self):
def scipy_log_pdf(value, a, b):
return (
np.log(a) + np.log(b) + (a - 1) * np.log(value) + (b - 1) * np.log(1 - value ** a)
)
self.pymc3_matches_scipy(Kumaraswamy, Unit, {"a": Rplus, "b": Rplus}, scipy_log_pdf)
def test_exponential(self):
self.pymc3_matches_scipy(
Exponential,
Rplus,
{"lam": Rplus},
lambda value, lam: sp.expon.logpdf(value, 0, 1 / lam),
)
self.check_logcdf(
Exponential,
Rplus,
{"lam": Rplus},
lambda value, lam: sp.expon.logcdf(value, 0, 1 / lam),
)
def test_geometric(self):
self.pymc3_matches_scipy(
Geometric, Nat, {"p": Unit}, lambda value, p: np.log(sp.geom.pmf(value, p))
)
def test_hypergeometric(self):
self.pymc3_matches_scipy(
HyperGeometric,
Nat,
{"N": NatSmall, "k": NatSmall, "n": NatSmall},
lambda value, N, k, n: sp.hypergeom.logpmf(value, N, k, n),
)
def test_negative_binomial(self):
def test_fun(value, mu, alpha):
return sp.nbinom.logpmf(value, alpha, 1 - mu / (mu + alpha))
self.pymc3_matches_scipy(NegativeBinomial, Nat, {"mu": Rplus, "alpha": Rplus}, test_fun)
self.pymc3_matches_scipy(
NegativeBinomial,
Nat,
{"p": Unit, "n": Rplus},
lambda value, p, n: sp.nbinom.logpmf(value, n, p),
)
@pytest.mark.parametrize(
"mu, p, alpha, n, expected",
[
(5, None, None, None, "Must specify either alpha or n."),
(None, 0.5, None, None, "Must specify either alpha or n."),
(None, None, None, None, "Must specify either alpha or n."),
(5, None, 2, 2, "Can't specify both alpha and n."),
(None, 0.5, 2, 2, "Can't specify both alpha and n."),
(None, None, 2, 2, "Can't specify both alpha and n."),
(None, None, 2, None, "Must specify either mu or p."),
(None, None, None, 2, "Must specify either mu or p."),
(5, 0.5, 2, None, "Can't specify both mu and p."),
(5, 0.5, None, 2, "Can't specify both mu and p."),
],
)
def test_negative_binomial_init_fail(self, mu, p, alpha, n, expected):
with Model():
with pytest.raises(ValueError, match=f"Incompatible parametrization. {expected}"):
NegativeBinomial("x", mu=mu, p=p, alpha=alpha, n=n)
def test_laplace(self):
self.pymc3_matches_scipy(
Laplace,
R,
{"mu": R, "b": Rplus},
lambda value, mu, b: sp.laplace.logpdf(value, mu, b),
)
self.check_logcdf(
Laplace,
R,
{"mu": R, "b": Rplus},
lambda value, mu, b: sp.laplace.logcdf(value, mu, b),
)
def test_lognormal(self):
self.pymc3_matches_scipy(
Lognormal,
Rplus,
{"mu": R, "tau": Rplusbig},
lambda value, mu, tau: floatX(sp.lognorm.logpdf(value, tau ** -0.5, 0, np.exp(mu))),
)
self.check_logcdf(
Lognormal,
Rplus,
{"mu": R, "tau": Rplusbig},
lambda value, mu, tau: sp.lognorm.logcdf(value, tau ** -0.5, 0, np.exp(mu)),
)
def test_t(self):
self.pymc3_matches_scipy(
StudentT,
R,
{"nu": Rplus, "mu": R, "lam": Rplus},
lambda value, nu, mu, lam: sp.t.logpdf(value, nu, mu, lam ** -0.5),
)
self.check_logcdf(
StudentT,
R,
{"nu": Rplus, "mu": R, "lam": Rplus},
lambda value, nu, mu, lam: sp.t.logcdf(value, nu, mu, lam ** -0.5),
n_samples=10,
)
def test_cauchy(self):
self.pymc3_matches_scipy(
Cauchy,
R,
{"alpha": R, "beta": Rplusbig},
lambda value, alpha, beta: sp.cauchy.logpdf(value, alpha, beta),
)
self.check_logcdf(
Cauchy,
R,
{"alpha": R, "beta": Rplusbig},
lambda value, alpha, beta: sp.cauchy.logcdf(value, alpha, beta),
)
def test_half_cauchy(self):
self.pymc3_matches_scipy(
HalfCauchy,
Rplus,
{"beta": Rplusbig},
lambda value, beta: sp.halfcauchy.logpdf(value, scale=beta),
)
self.check_logcdf(
HalfCauchy,
Rplus,
{"beta": Rplusbig},
lambda value, beta: sp.halfcauchy.logcdf(value, scale=beta),
)
def test_gamma(self):
self.pymc3_matches_scipy(
Gamma,
Rplus,
{"alpha": Rplusbig, "beta": Rplusbig},
lambda value, alpha, beta: sp.gamma.logpdf(value, alpha, scale=1.0 / beta),
)
def test_fun(value, mu, sigma):
return sp.gamma.logpdf(value, mu ** 2 / sigma ** 2, scale=1.0 / (mu / sigma ** 2))
self.pymc3_matches_scipy(Gamma, Rplus, {"mu": Rplusbig, "sigma": Rplusbig}, test_fun)
self.check_logcdf(
Gamma,
Rplus,
{"alpha": Rplusbig, "beta": Rplusbig},
lambda value, alpha, beta: sp.gamma.logcdf(value, alpha, scale=1.0 / beta),
)
@pytest.mark.xfail(
condition=(theano.config.floatX == "float32"),
reason="Fails on float32 due to numerical issues",
)
def test_inverse_gamma(self):
self.pymc3_matches_scipy(
InverseGamma,
Rplus,
{"alpha": Rplus, "beta": Rplus},
lambda value, alpha, beta: sp.invgamma.logpdf(value, alpha, scale=beta),
)
self.check_logcdf(
InverseGamma,
Rplus,
{"alpha": Rplus, "beta": Rplus},
lambda value, alpha, beta: sp.invgamma.logcdf(value, alpha, scale=beta),
)
@pytest.mark.xfail(
condition=(theano.config.floatX == "float32"),
reason="Fails on float32 due to scaling issues",
)
def test_inverse_gamma_alt_params(self):
def test_fun(value, mu, sigma):
alpha, beta = InverseGamma._get_alpha_beta(None, None, mu, sigma)
return sp.invgamma.logpdf(value, alpha, scale=beta)
self.pymc3_matches_scipy(InverseGamma, Rplus, {"mu": Rplus, "sigma": Rplus}, test_fun)
def test_pareto(self):
self.pymc3_matches_scipy(
Pareto,
Rplus,
{"alpha": Rplusbig, "m": Rplusbig},
lambda value, alpha, m: sp.pareto.logpdf(value, alpha, scale=m),
)
self.check_logcdf(
Pareto,
Rplus,
{"alpha": Rplusbig, "m": Rplusbig},
lambda value, alpha, m: sp.pareto.logcdf(value, alpha, scale=m),
)
@pytest.mark.xfail(
condition=(theano.config.floatX == "float32"),
reason="Fails on float32 due to inf issues",
)
def test_weibull(self):
self.pymc3_matches_scipy(
Weibull,
Rplus,
{"alpha": Rplusbig, "beta": Rplusbig},
lambda value, alpha, beta: sp.exponweib.logpdf(value, 1, alpha, scale=beta),
)
self.check_logcdf(
Weibull,
Rplus,
{"alpha": Rplusbig, "beta": Rplusbig},
lambda value, alpha, beta: sp.exponweib.logcdf(value, 1, alpha, scale=beta),
)
def test_half_studentt(self):
# this is only testing for nu=1 (halfcauchy)
self.pymc3_matches_scipy(
HalfStudentT,
Rplus,
{"sigma": Rplus},
lambda value, sigma: sp.halfcauchy.logpdf(value, 0, sigma),
)
def test_skew_normal(self):
self.pymc3_matches_scipy(
SkewNormal,
R,
{"mu": R, "sigma": Rplusbig, "alpha": R},
lambda value, alpha, mu, sigma: sp.skewnorm.logpdf(value, alpha, mu, sigma),
)
def test_binomial(self):
self.pymc3_matches_scipy(
Binomial,
Nat,
{"n": NatSmall, "p": Unit},
lambda value, n, p: sp.binom.logpmf(value, n, p),
)
# Too lazy to propagate decimal parameter through the whole chain of deps
@pytest.mark.xfail(condition=(theano.config.floatX == "float32"), reason="Fails on float32")
def test_beta_binomial(self):
self.checkd(BetaBinomial, Nat, {"alpha": Rplus, "beta": Rplus, "n": NatSmall})
def test_bernoulli(self):
self.pymc3_matches_scipy(
Bernoulli,
Bool,
{"logit_p": R},
lambda value, logit_p: sp.bernoulli.logpmf(value, scipy.special.expit(logit_p)),
)
self.pymc3_matches_scipy(
Bernoulli, Bool, {"p": Unit}, lambda value, p: sp.bernoulli.logpmf(value, p)
)
def test_discrete_weibull(self):
self.pymc3_matches_scipy(
DiscreteWeibull,
Nat,
{"q": Unit, "beta": Rplusdunif},
discrete_weibull_logpmf,
)
def test_poisson(self):
self.pymc3_matches_scipy(
Poisson, Nat, {"mu": Rplus}, lambda value, mu: sp.poisson.logpmf(value, mu)
)
def test_bound_poisson(self):
NonZeroPoisson = Bound(Poisson, lower=1.0)
self.pymc3_matches_scipy(
NonZeroPoisson,
PosNat,
{"mu": Rplus},
lambda value, mu: sp.poisson.logpmf(value, mu),
)
with Model():
x = NonZeroPoisson("x", mu=4)
assert np.isinf(x.logp({"x": 0}))
def test_constantdist(self):
self.pymc3_matches_scipy(Constant, I, {"c": I}, lambda value, c: np.log(c == value))
# Too lazy to propagate decimal parameter through the whole chain of deps
@pytest.mark.xfail(condition=(theano.config.floatX == "float32"), reason="Fails on float32")
def test_zeroinflatedpoisson(self):
self.checkd(ZeroInflatedPoisson, Nat, {"theta": Rplus, "psi": Unit})
# Too lazy to propagate decimal parameter through the whole chain of deps
@pytest.mark.xfail(condition=(theano.config.floatX == "float32"), reason="Fails on float32")
def test_zeroinflatednegativebinomial(self):
self.checkd(
ZeroInflatedNegativeBinomial,
Nat,
{"mu": Rplusbig, "alpha": Rplusbig, "psi": Unit},
)
# Too lazy to propagate decimal parameter through the whole chain of deps
@pytest.mark.xfail(condition=(theano.config.floatX == "float32"), reason="Fails on float32")
def test_zeroinflatedbinomial(self):
self.checkd(ZeroInflatedBinomial, Nat, {"n": NatSmall, "p": Unit, "psi": Unit})
@pytest.mark.parametrize("n", [1, 2, 3])
def test_mvnormal(self, n):
self.pymc3_matches_scipy(
MvNormal,
RealMatrix(5, n),
{"mu": Vector(R, n), "tau": PdMatrix(n)},
normal_logpdf_tau,
)
self.pymc3_matches_scipy(
MvNormal,
Vector(R, n),
{"mu": Vector(R, n), "tau": PdMatrix(n)},
normal_logpdf_tau,
)
self.pymc3_matches_scipy(
MvNormal,
RealMatrix(5, n),
{"mu": Vector(R, n), "cov": PdMatrix(n)},
normal_logpdf_cov,
)
self.pymc3_matches_scipy(
MvNormal,
Vector(R, n),
{"mu": Vector(R, n), "cov": PdMatrix(n)},
normal_logpdf_cov,
)
self.pymc3_matches_scipy(
MvNormal,
RealMatrix(5, n),
{"mu": Vector(R, n), "chol": PdMatrixChol(n)},
normal_logpdf_chol,
decimal=select_by_precision(float64=6, float32=-1),
)
self.pymc3_matches_scipy(
MvNormal,
Vector(R, n),
{"mu": Vector(R, n), "chol": PdMatrixChol(n)},
normal_logpdf_chol,
decimal=select_by_precision(float64=6, float32=0),
)
def MvNormalUpper(*args, **kwargs):
return MvNormal(lower=False, *args, **kwargs)
self.pymc3_matches_scipy(
MvNormalUpper,
Vector(R, n),
{"mu": Vector(R, n), "chol": PdMatrixCholUpper(n)},
normal_logpdf_chol_upper,
decimal=select_by_precision(float64=6, float32=0),
)
@pytest.mark.xfail(
condition=(theano.config.floatX == "float32"),
reason="Fails on float32 due to inf issues",
)
def test_mvnormal_indef(self):
cov_val = np.array([[1, 0.5], [0.5, -2]])
cov = tt.matrix("cov")
cov.tag.test_value = np.eye(2)
mu = floatX(np.zeros(2))
x = tt.vector("x")
x.tag.test_value = np.zeros(2)
logp = MvNormal.dist(mu=mu, cov=cov).logp(x)
f_logp = theano.function([cov, x], logp)
assert f_logp(cov_val, np.ones(2)) == -np.inf
dlogp = tt.grad(logp, cov)
f_dlogp = theano.function([cov, x], dlogp)
assert not np.all(np.isfinite(f_dlogp(cov_val, np.ones(2))))
logp = MvNormal.dist(mu=mu, tau=cov).logp(x)
f_logp = theano.function([cov, x], logp)
assert f_logp(cov_val, np.ones(2)) == -np.inf
dlogp = tt.grad(logp, cov)
f_dlogp = theano.function([cov, x], dlogp)
assert not np.all(np.isfinite(f_dlogp(cov_val, np.ones(2))))
def test_mvnormal_init_fail(self):
with Model():
with pytest.raises(ValueError):
x = MvNormal("x", mu=np.zeros(3), shape=3)
with pytest.raises(ValueError):
x = MvNormal("x", mu=np.zeros(3), cov=np.eye(3), tau=np.eye(3), shape=3)
@pytest.mark.parametrize("n", [1, 2, 3])
def test_matrixnormal(self, n):
mat_scale = 1e3 # To reduce logp magnitude
mean_scale = 0.1
self.pymc3_matches_scipy(
MatrixNormal,
RealMatrix(n, n),
{
"mu": RealMatrix(n, n) * mean_scale,
"rowcov": PdMatrix(n) * mat_scale,
"colcov": PdMatrix(n) * mat_scale,
},
matrix_normal_logpdf_cov,
)
self.pymc3_matches_scipy(
MatrixNormal,
RealMatrix(2, n),
{
"mu": RealMatrix(2, n) * mean_scale,
"rowcov": PdMatrix(2) * mat_scale,
"colcov": PdMatrix(n) * mat_scale,
},
matrix_normal_logpdf_cov,
)
self.pymc3_matches_scipy(
MatrixNormal,
RealMatrix(3, n),
{
"mu": RealMatrix(3, n) * mean_scale,
"rowchol": PdMatrixChol(3) * mat_scale,
"colchol": PdMatrixChol(n) * mat_scale,
},
matrix_normal_logpdf_chol,
decimal=select_by_precision(float64=6, float32=-1),
)
self.pymc3_matches_scipy(
MatrixNormal,
RealMatrix(n, 3),
{
"mu": RealMatrix(n, 3) * mean_scale,
"rowchol": PdMatrixChol(n) * mat_scale,
"colchol": PdMatrixChol(3) * mat_scale,
},
matrix_normal_logpdf_chol,
decimal=select_by_precision(float64=6, float32=0),
)
@pytest.mark.parametrize("n", [2, 3])
@pytest.mark.parametrize("m", [3])
@pytest.mark.parametrize("sigma", [None, 1.0])
def test_kroneckernormal(self, n, m, sigma):
np.random.seed(5)
N = n * m
covs = [RandomPdMatrix(n), RandomPdMatrix(m)]
chols = list(map(np.linalg.cholesky, covs))
evds = list(map(np.linalg.eigh, covs))
dom = Domain([np.random.randn(N) * 0.1], edges=(None, None), shape=N)
mu = Domain([np.random.randn(N) * 0.1], edges=(None, None), shape=N)
std_args = {"mu": mu}
cov_args = {"covs": covs}
chol_args = {"chols": chols}
evd_args = {"evds": evds}
if sigma is not None and sigma != 0:
std_args["sigma"] = Domain([sigma], edges=(None, None))
else:
for args in [cov_args, chol_args, evd_args]:
args["sigma"] = sigma
self.pymc3_matches_scipy(
KroneckerNormal,
dom,
std_args,
kron_normal_logpdf_cov,
extra_args=cov_args,
scipy_args=cov_args,
)
self.pymc3_matches_scipy(
KroneckerNormal,
dom,
std_args,
kron_normal_logpdf_chol,
extra_args=chol_args,
scipy_args=chol_args,
)
self.pymc3_matches_scipy(
KroneckerNormal,
dom,
std_args,
kron_normal_logpdf_evd,
extra_args=evd_args,
scipy_args=evd_args,
)
dom = Domain([np.random.randn(2, N) * 0.1], edges=(None, None), shape=(2, N))
self.pymc3_matches_scipy(
KroneckerNormal,
dom,
std_args,
kron_normal_logpdf_cov,
extra_args=cov_args,
scipy_args=cov_args,
)
self.pymc3_matches_scipy(
KroneckerNormal,
dom,
std_args,
kron_normal_logpdf_chol,
extra_args=chol_args,
scipy_args=chol_args,
)
self.pymc3_matches_scipy(
KroneckerNormal,
dom,
std_args,
kron_normal_logpdf_evd,
extra_args=evd_args,
scipy_args=evd_args,
)
@pytest.mark.parametrize("n", [1, 2])
def test_mvt(self, n):
self.pymc3_matches_scipy(
MvStudentT,
Vector(R, n),
{"nu": Rplus, "Sigma": PdMatrix(n), "mu": Vector(R, n)},
mvt_logpdf,
)
self.pymc3_matches_scipy(
MvStudentT,
RealMatrix(2, n),
{"nu": Rplus, "Sigma": PdMatrix(n), "mu": Vector(R, n)},
mvt_logpdf,
)
@pytest.mark.parametrize("n", [2, 3, 4])
def test_AR1(self, n):
self.pymc3_matches_scipy(AR1, Vector(R, n), {"k": Unit, "tau_e": Rplus}, AR1_logpdf)
@pytest.mark.parametrize("n", [2, 3])
def test_wishart(self, n):
# This check compares the autodiff gradient to the numdiff gradient.
# However, due to the strict constraints of the wishart,
# it is impossible to numerically determine the gradient as a small
# pertubation breaks the symmetry. Thus disabling. Also, numdifftools was
# removed in June 2019, so an alternative would be needed.
#
# self.checkd(Wishart, PdMatrix(n), {'n': Domain([2, 3, 4, 2000]), 'V': PdMatrix(n)},
# checks=[self.check_dlogp])
pass
@pytest.mark.parametrize("x,eta,n,lp", LKJ_CASES)
def test_lkj(self, x, eta, n, lp):
with Model() as model:
LKJCorr("lkj", eta=eta, n=n, transform=None)
pt = {"lkj": x}
decimals = select_by_precision(float64=6, float32=4)
assert_almost_equal(model.fastlogp(pt), lp, decimal=decimals, err_msg=str(pt))
@pytest.mark.parametrize("n", [2, 3])
def test_dirichlet(self, n):
self.pymc3_matches_scipy(Dirichlet, Simplex(n), {"a": Vector(Rplus, n)}, dirichlet_logpdf)
def test_dirichlet_shape(self):
a = tt.as_tensor_variable(np.r_[1, 2])
with pytest.warns(DeprecationWarning):
dir_rv = Dirichlet.dist(a)
assert dir_rv.shape == (2,)
with pytest.warns(DeprecationWarning), theano.change_flags(compute_test_value="ignore"):
dir_rv = Dirichlet.dist(tt.vector())
def test_dirichlet_2D(self):
self.pymc3_matches_scipy(
Dirichlet,
MultiSimplex(2, 2),
{"a": Vector(Vector(Rplus, 2), 2)},
dirichlet_logpdf,
)
@pytest.mark.parametrize("n", [2, 3])
def test_multinomial(self, n):
self.pymc3_matches_scipy(
Multinomial, Vector(Nat, n), {"p": Simplex(n), "n": Nat}, multinomial_logpdf
)
@pytest.mark.parametrize(
"p,n",
[
[[0.25, 0.25, 0.25, 0.25], 1],
[[0.3, 0.6, 0.05, 0.05], 2],
[[0.3, 0.6, 0.05, 0.05], 10],
],
)
def test_multinomial_mode(self, p, n):
_p = np.array(p)
with Model() as model:
m = Multinomial("m", n, _p, _p.shape)
assert_allclose(m.distribution.mode.eval().sum(), n)
_p = np.array([p, p])
with Model() as model:
m = Multinomial("m", n, _p, _p.shape)
assert_allclose(m.distribution.mode.eval().sum(axis=-1), n)
@pytest.mark.parametrize(
"p, shape, n",
[
[[0.25, 0.25, 0.25, 0.25], 4, 2],
[[0.25, 0.25, 0.25, 0.25], (1, 4), 3],
# 3: expect to fail
# [[.25, .25, .25, .25], (10, 4)],
[[0.25, 0.25, 0.25, 0.25], (10, 1, 4), 5],
# 5: expect to fail
# [[[.25, .25, .25, .25]], (2, 4), [7, 11]],
[[[0.25, 0.25, 0.25, 0.25], [0.25, 0.25, 0.25, 0.25]], (2, 4), 13],
[[[0.25, 0.25, 0.25, 0.25], [0.25, 0.25, 0.25, 0.25]], (1, 2, 4), [23, 29]],
[
[[0.25, 0.25, 0.25, 0.25], [0.25, 0.25, 0.25, 0.25]],
(10, 2, 4),
[31, 37],
],
[[[0.25, 0.25, 0.25, 0.25], [0.25, 0.25, 0.25, 0.25]], (2, 4), [17, 19]],
],
)
def test_multinomial_random(self, p, shape, n):
p = np.asarray(p)
with Model() as model:
m = Multinomial("m", n=n, p=p, shape=shape)
m.random()
def test_multinomial_mode_with_shape(self):
n = [1, 10]
p = np.asarray([[0.25, 0.25, 0.25, 0.25], [0.26, 0.26, 0.26, 0.22]])
with Model() as model:
m = Multinomial("m", n=n, p=p, shape=(2, 4))
assert_allclose(m.distribution.mode.eval().sum(axis=-1), n)
def test_multinomial_vec(self):
vals = np.array([[2, 4, 4], [3, 3, 4]])
p = np.array([0.2, 0.3, 0.5])
n = 10
with Model() as model_single:
Multinomial("m", n=n, p=p, shape=len(p))
with Model() as model_many:
Multinomial("m", n=n, p=p, shape=vals.shape)
assert_almost_equal(
scipy.stats.multinomial.logpmf(vals, n, p),
np.asarray([model_single.fastlogp({"m": val}) for val in vals]),
decimal=4,
)
assert_almost_equal(
scipy.stats.multinomial.logpmf(vals, n, p),
model_many.free_RVs[0].logp_elemwise({"m": vals}).squeeze(),
decimal=4,
)
assert_almost_equal(
sum([model_single.fastlogp({"m": val}) for val in vals]),
model_many.fastlogp({"m": vals}),
decimal=4,
)
def test_multinomial_vec_1d_n(self):
vals = np.array([[2, 4, 4], [4, 3, 4]])
p = np.array([0.2, 0.3, 0.5])
ns = np.array([10, 11])
with Model() as model:
Multinomial("m", n=ns, p=p, shape=vals.shape)
assert_almost_equal(
sum([multinomial_logpdf(val, n, p) for val, n in zip(vals, ns)]),
model.fastlogp({"m": vals}),
decimal=4,
)
def test_multinomial_vec_1d_n_2d_p(self):
vals = np.array([[2, 4, 4], [4, 3, 4]])
ps = np.array([[0.2, 0.3, 0.5], [0.9, 0.09, 0.01]])
ns = np.array([10, 11])
with Model() as model:
Multinomial("m", n=ns, p=ps, shape=vals.shape)
assert_almost_equal(
sum([multinomial_logpdf(val, n, p) for val, n, p in zip(vals, ns, ps)]),
model.fastlogp({"m": vals}),
decimal=4,
)
def test_multinomial_vec_2d_p(self):
vals = np.array([[2, 4, 4], [3, 3, 4]])
ps = np.array([[0.2, 0.3, 0.5], [0.3, 0.3, 0.4]])
n = 10
with Model() as model:
Multinomial("m", n=n, p=ps, shape=vals.shape)
assert_almost_equal(
sum([multinomial_logpdf(val, n, p) for val, p in zip(vals, ps)]),
model.fastlogp({"m": vals}),
decimal=4,
)
def test_batch_multinomial(self):
n = 10
vals = np.zeros((4, 5, 3), dtype="int32")
p = np.zeros_like(vals, dtype=theano.config.floatX)
inds = np.random.randint(vals.shape[-1], size=vals.shape[:-1])[..., None]
np.put_along_axis(vals, inds, n, axis=-1)
np.put_along_axis(p, inds, 1, axis=-1)
dist = Multinomial.dist(n=n, p=p, shape=vals.shape)
value = tt.tensor3(dtype="int32")
value.tag.test_value = np.zeros_like(vals, dtype="int32")
logp = tt.exp(dist.logp(value))
f = theano.function(inputs=[value], outputs=logp)
assert_almost_equal(
f(vals),
np.ones(vals.shape[:-1] + (1,)),
decimal=select_by_precision(float64=6, float32=3),
)
sample = dist.random(size=2)
assert_allclose(sample, np.stack([vals, vals], axis=0))
def test_categorical_bounds(self):
with Model():
x = Categorical("x", p=np.array([0.2, 0.3, 0.5]))
assert np.isinf(x.logp({"x": -1}))
assert np.isinf(x.logp({"x": 3}))
def test_categorical_valid_p(self):
with Model():
x = Categorical("x", p=np.array([-0.2, 0.3, 0.5]))
assert np.isinf(x.logp({"x": 0}))
assert np.isinf(x.logp({"x": 1}))
assert np.isinf(x.logp({"x": 2}))
with Model():
# A model where p sums to 1 but contains negative values
x = Categorical("x", p=np.array([-0.2, 0.7, 0.5]))
assert np.isinf(x.logp({"x": 0}))
assert np.isinf(x.logp({"x": 1}))
assert np.isinf(x.logp({"x": 2}))
with Model():
# Hard edge case from #2082
# Early automatic normalization of p's sum would hide the negative
x = Categorical("x", p=np.array([-1, -1, 0, 0]))
assert np.isinf(x.logp({"x": 0}))
assert np.isinf(x.logp({"x": 1}))
assert np.isinf(x.logp({"x": 2}))
assert np.isinf(x.logp({"x": 3}))
@pytest.mark.parametrize("n", [2, 3, 4])
def test_categorical(self, n):
self.pymc3_matches_scipy(
Categorical,
Domain(range(n), "int64"),
{"p": Simplex(n)},
lambda value, p: categorical_logpdf(value, p),
)
@pytest.mark.parametrize("n", [2, 3, 4])
def test_orderedlogistic(self, n):
self.pymc3_matches_scipy(
OrderedLogistic,
Domain(range(n), "int64"),
{"eta": R, "cutpoints": Vector(R, n - 1)},
lambda value, eta, cutpoints: orderedlogistic_logpdf(value, eta, cutpoints),
)
def test_densitydist(self):
def logp(x):
return -log(2 * 0.5) - abs(x - 0.5) / 0.5
self.checkd(DensityDist, R, {}, extra_args={"logp": logp})
def test_get_tau_sigma(self):
sigma = np.array([2])
assert_almost_equal(continuous.get_tau_sigma(sigma=sigma), [1.0 / sigma ** 2, sigma])
@pytest.mark.parametrize(
"value,mu,sigma,nu,logp",
[
(0.5, -50.000, 0.500, 0.500, -99.8068528),
(1.0, -1.000, 0.001, 0.001, -1992.5922447),
(2.0, 0.001, 1.000, 1.000, -1.6720416),
(5.0, 0.500, 2.500, 2.500, -2.4543644),
(7.5, 2.000, 5.000, 5.000, -2.8259429),
(15.0, 5.000, 7.500, 7.500, -3.3093854),
(50.0, 50.000, 10.000, 10.000, -3.6436067),
(1000.0, 500.000, 10.000, 20.000, -27.8707323),
],
)
def test_ex_gaussian(self, value, mu, sigma, nu, logp):
with Model() as model:
ExGaussian("eg", mu=mu, sigma=sigma, nu=nu)
pt = {"eg": value}
assert_almost_equal(
model.fastlogp(pt),
logp,
decimal=select_by_precision(float64=6, float32=2),
err_msg=str(pt),
)
@pytest.mark.parametrize(
"value,mu,sigma,nu,logcdf",
[
(0.5, -50.000, 0.500, 0.500, 0.0000000),
(1.0, -1.000, 0.001, 0.001, 0.0000000),
(2.0, 0.001, 1.000, 1.000, -0.2365674),
(5.0, 0.500, 2.500, 2.500, -0.2886489),
(7.5, 2.000, 5.000, 5.000, -0.5655104),
(15.0, 5.000, 7.500, 7.500, -0.4545255),
(50.0, 50.000, 10.000, 10.000, -1.433714),
(1000.0, 500.000, 10.000, 20.000, -1.573708e-11),
],
)
def test_ex_gaussian_cdf(self, value, mu, sigma, nu, logcdf):
assert_almost_equal(
ExGaussian.dist(mu=mu, sigma=sigma, nu=nu).logcdf(value).tag.test_value,
logcdf,
decimal=select_by_precision(float64=6, float32=2),
err_msg=str((value, mu, sigma, nu, logcdf)),
)
@pytest.mark.xfail(condition=(theano.config.floatX == "float32"), reason="Fails on float32")
def test_vonmises(self):
self.pymc3_matches_scipy(
VonMises,
R,
{"mu": Circ, "kappa": Rplus},
lambda value, mu, kappa: floatX(sp.vonmises.logpdf(value, kappa, loc=mu)),
)
def test_gumbel(self):
def gumbel(value, mu, beta):
return floatX(sp.gumbel_r.logpdf(value, loc=mu, scale=beta))
self.pymc3_matches_scipy(Gumbel, R, {"mu": R, "beta": Rplusbig}, gumbel)
def gumbellcdf(value, mu, beta):
return floatX(sp.gumbel_r.logcdf(value, loc=mu, scale=beta))
self.check_logcdf(Gumbel, R, {"mu": R, "beta": Rplusbig}, gumbellcdf)
def test_logistic(self):
self.pymc3_matches_scipy(
Logistic,
R,
{"mu": R, "s": Rplus},
lambda value, mu, s: sp.logistic.logpdf(value, mu, s),
decimal=select_by_precision(float64=6, float32=1),
)
self.check_logcdf(
Logistic,
R,
{"mu": R, "s": Rplus},
lambda value, mu, s: sp.logistic.logcdf(value, mu, s),
decimal=select_by_precision(float64=6, float32=1),
)
def test_logitnormal(self):
self.pymc3_matches_scipy(
LogitNormal,
Unit,
{"mu": R, "sigma": Rplus},
lambda value, mu, sigma: (
sp.norm.logpdf(logit(value), mu, sigma) - (np.log(value) + np.log1p(-value))
),
decimal=select_by_precision(float64=6, float32=1),
)
def test_multidimensional_beta_construction(self):
with Model():
Beta("beta", alpha=1.0, beta=1.0, shape=(10, 20))
def test_rice(self):
self.pymc3_matches_scipy(
Rice,
Rplus,
{"nu": Rplus, "sigma": Rplusbig},
lambda value, nu, sigma: sp.rice.logpdf(value, b=nu / sigma, loc=0, scale=sigma),
)
self.pymc3_matches_scipy(
Rice,
Rplus,
{"b": Rplus, "sigma": Rplusbig},
lambda value, b, sigma: sp.rice.logpdf(value, b=b, loc=0, scale=sigma),
)
@pytest.mark.xfail(condition=(theano.config.floatX == "float32"), reason="Fails on float32")
def test_moyal(self):
self.pymc3_matches_scipy(
Moyal,
R,
{"mu": R, "sigma": Rplusbig},
lambda value, mu, sigma: floatX(sp.moyal.logpdf(value, mu, sigma)),
)
self.check_logcdf(
Moyal,
R,
{"mu": R, "sigma": Rplusbig},
lambda value, mu, sigma: floatX(sp.moyal.logcdf(value, mu, sigma)),
)
@pytest.mark.xfail(condition=(theano.config.floatX == "float32"), reason="Fails on float32")
def test_interpolated(self):
for mu in R.vals:
for sigma in Rplus.vals:
xmin = mu - 5 * sigma
xmax = mu + 5 * sigma
class TestedInterpolated(Interpolated):
def __init__(self, **kwargs):
x_points = np.linspace(xmin, xmax, 100000)
pdf_points = sp.norm.pdf(x_points, loc=mu, scale=sigma)
super().__init__(x_points=x_points, pdf_points=pdf_points, **kwargs)
def ref_pdf(value):
return np.where(
np.logical_and(value >= xmin, value <= xmax),
sp.norm.logpdf(value, mu, sigma),
-np.inf * np.ones(value.shape),
)
self.pymc3_matches_scipy(TestedInterpolated, R, {}, ref_pdf)
def test_bound():
np.random.seed(42)
UnboundNormal = Bound(Normal)
dist = UnboundNormal.dist(mu=0, sigma=1)
assert dist.transform is None
assert dist.default() == 0.0
assert isinstance(dist.random(), np.ndarray)
LowerNormal = Bound(Normal, lower=1)
dist = LowerNormal.dist(mu=0, sigma=1)
assert dist.logp(0).eval() == -np.inf
assert dist.default() > 1
assert dist.transform is not None
assert np.all(dist.random() > 1)
UpperNormal = Bound(Normal, upper=-1)
dist = UpperNormal.dist(mu=0, sigma=1)
assert dist.logp(-0.5).eval() == -np.inf
assert dist.default() < -1
assert dist.transform is not None
assert np.all(dist.random() < -1)
ArrayNormal = Bound(Normal, lower=[1, 2], upper=[2, 3])
dist = ArrayNormal.dist(mu=0, sigma=1, shape=2)
assert_equal(dist.logp([0.5, 3.5]).eval(), -np.array([np.inf, np.inf]))
assert_equal(dist.default(), np.array([1.5, 2.5]))
assert dist.transform is not None
with pytest.raises(ValueError) as err:
dist.random()
err.match("Drawing samples from distributions with array-valued")
with Model():
a = ArrayNormal("c", shape=2)
assert_equal(a.tag.test_value, np.array([1.5, 2.5]))
lower = tt.vector("lower")
lower.tag.test_value = np.array([1, 2]).astype(theano.config.floatX)
upper = 3
ArrayNormal = Bound(Normal, lower=lower, upper=upper)
dist = ArrayNormal.dist(mu=0, sigma=1, shape=2)
logp = dist.logp([0.5, 3.5]).eval({lower: lower.tag.test_value})
assert_equal(logp, -np.array([np.inf, np.inf]))
assert_equal(dist.default(), np.array([2, 2.5]))
assert dist.transform is not None
with Model():
a = ArrayNormal("c", shape=2)
assert_equal(a.tag.test_value, np.array([2, 2.5]))
rand = Bound(Binomial, lower=10).dist(n=20, p=0.3).random()
assert rand.dtype in [np.int16, np.int32, np.int64]
assert rand >= 10
rand = Bound(Binomial, upper=10).dist(n=20, p=0.8).random()
assert rand.dtype in [np.int16, np.int32, np.int64]
assert rand <= 10
rand = Bound(Binomial, lower=5, upper=8).dist(n=10, p=0.6).random()
assert rand.dtype in [np.int16, np.int32, np.int64]
assert rand >= 5 and rand <= 8
with Model():
BoundPoisson = Bound(Poisson, upper=6)
BoundPoisson(name="y", mu=1)
with Model():
BoundNormalNamedArgs = Bound(Normal, upper=6)("y", mu=2.0, sd=1.0)
BoundNormalPositionalArgs = Bound(Normal, upper=6)("x", 2.0, 1.0)
with Model():
BoundPoissonNamedArgs = Bound(Poisson, upper=6)("y", mu=2.0)
BoundPoissonPositionalArgs = Bound(Poisson, upper=6)("x", 2.0)
class TestStrAndLatexRepr:
def setup_class(self):
alpha, sigma = 1, 1
beta = [1, 2.5]
size = 100
X = np.random.normal(size=(size, 2)).dot(np.array([[1, 0], [0, 0.2]]))
Y = alpha + X.dot(beta) + np.random.randn(size) * sigma
with Model() as self.model:
alpha = Normal("alpha", mu=0, sigma=10)
b = Normal("beta", mu=0, sigma=10, shape=(2,), observed=beta)
sigma = HalfNormal("sigma", sigma=1)
Z = MvNormal("Z", mu=np.zeros(2), chol=np.eye(2), shape=(2,))
nb1 = pm.NegativeBinomial(
"nb_with_mu_alpha", mu=pm.Normal("nbmu"), alpha=pm.Gamma("nbalpha", mu=6, sigma=1)
)
nb2 = pm.NegativeBinomial("nb_with_p_n", p=pm.Uniform("nbp"), n=10)
mu = Deterministic("mu", floatX(alpha + tt.dot(X, b)))
bound_var = Bound(Normal, lower=1.0)("bound_var", mu=0, sigma=10)
n, m = 3, 4
covs = [np.eye(n), np.eye(m)]
kron_normal = KroneckerNormal("kron_normal", mu=np.zeros(n * m), covs=covs, shape=n * m)
matrix_normal = MatrixNormal(
"mat_normal",
mu=np.random.normal(size=n),
rowcov=np.eye(n),
colchol=np.linalg.cholesky(np.eye(n)),
shape=(n, n),
)
Y_obs = Normal("Y_obs", mu=mu, sigma=sigma, observed=Y)
self.distributions = [alpha, sigma, mu, b, Z, nb1, nb2, Y_obs, bound_var]
self.expected = {
"latex": (
r"$\text{alpha} \sim \text{Normal}$",
r"$\text{sigma} \sim \text{HalfNormal}$",
r"$\text{mu} \sim \text{Deterministic}$",
r"$\text{beta} \sim \text{Normal}$",
r"$\text{Z} \sim \text{MvNormal}$",
r"$\text{nb_with_mu_alpha} \sim \text{NegativeBinomial}$",
r"$\text{nb_with_p_n} \sim \text{NegativeBinomial}$",
r"$\text{Y_obs} \sim \text{Normal}$",
r"$\text{bound_var} \sim \text{Bound}$ -- \text{Normal}$",
r"$\text{kron_normal} \sim \text{KroneckerNormal}$",
r"$\text{mat_normal} \sim \text{MatrixNormal}$",
),
"plain": (
r"alpha ~ Normal",
r"sigma ~ HalfNormal",
r"mu ~ Deterministic",
r"beta ~ Normal",
r"Z ~ MvNormal",
r"nb_with_mu_alpha ~ NegativeBinomial",
r"nb_with_p_n ~ NegativeBinomial",
r"Y_obs ~ Normal",
r"bound_var ~ Bound-Normal",
r"kron_normal ~ KroneckerNormal",
r"mat_normal ~ MatrixNormal",
),
"latex_with_params": (
r"$\text{alpha} \sim \text{Normal}(\mathit{mu}=0.0,~\mathit{sigma}=10.0)$",
r"$\text{sigma} \sim \text{HalfNormal}(\mathit{sigma}=1.0)$",
r"$\text{mu} \sim \text{Deterministic}(\text{alpha},~\text{Constant},~\text{beta})$",
r"$\text{beta} \sim \text{Normal}(\mathit{mu}=0.0,~\mathit{sigma}=10.0)$",
r"$\text{Z} \sim \text{MvNormal}(\mathit{mu}=array,~\mathit{chol_cov}=array)$",
r"$\text{nb_with_mu_alpha} \sim \text{NegativeBinomial}(\mathit{mu}=\text{nbmu},~\mathit{alpha}=\text{nbalpha})$",
r"$\text{nb_with_p_n} \sim \text{NegativeBinomial}(\mathit{p}=\text{nbp},~\mathit{n}=10)$",
r"$\text{Y_obs} \sim \text{Normal}(\mathit{mu}=\text{mu},~\mathit{sigma}=f(\text{sigma}))$",
r"$\text{bound_var} \sim \text{Bound}(\mathit{lower}=1.0,~\mathit{upper}=\text{None})$ -- \text{Normal}(\mathit{mu}=0.0,~\mathit{sigma}=10.0)$",
r"$\text{kron_normal} \sim \text{KroneckerNormal}(\mathit{mu}=array)$",
r"$\text{mat_normal} \sim \text{MatrixNormal}(\mathit{mu}=array,~\mathit{rowcov}=array,~\mathit{colchol_cov}=array)$",
),
"plain_with_params": (
r"alpha ~ Normal(mu=0.0, sigma=10.0)",
r"sigma ~ HalfNormal(sigma=1.0)",
r"mu ~ Deterministic(alpha, Constant, beta)",
r"beta ~ Normal(mu=0.0, sigma=10.0)",
r"Z ~ MvNormal(mu=array, chol_cov=array)",
r"nb_with_mu_alpha ~ NegativeBinomial(mu=nbmu, alpha=nbalpha)",
r"nb_with_p_n ~ NegativeBinomial(p=nbp, n=10)",
r"Y_obs ~ Normal(mu=mu, sigma=f(sigma))",
r"bound_var ~ Bound(lower=1.0, upper=None)-Normal(mu=0.0, sigma=10.0)",
r"kron_normal ~ KroneckerNormal(mu=array)",
r"mat_normal ~ MatrixNormal(mu=array, rowcov=array, colchol_cov=array)",
),
}
def test__repr_latex_(self):
for distribution, tex in zip(self.distributions, self.expected["latex_with_params"]):
assert distribution._repr_latex_() == tex
model_tex = self.model._repr_latex_()
for tex in self.expected["latex"]:
for segment in tex.strip("$").split(r"\sim"):
assert segment in model_tex
def test___latex__(self):
for distribution, tex in zip(self.distributions, self.expected["latex_with_params"]):
assert distribution._repr_latex_() == distribution.__latex__()
assert self.model._repr_latex_() == self.model.__latex__()
def test___str__(self):
for distribution, str_repr in zip(self.distributions, self.expected["plain"]):
assert distribution.__str__() == str_repr
model_str = self.model.__str__()
for str_repr in self.expected["plain"]:
assert str_repr in model_str
def test_str(self):
for distribution, str_repr in zip(self.distributions, self.expected["plain"]):
assert str(distribution) == str_repr
model_str = str(self.model)
for str_repr in self.expected["plain"]:
assert str_repr in model_str
def test_discrete_trafo():
with pytest.raises(ValueError) as err:
Binomial.dist(n=5, p=0.5, transform="log")
err.match("Transformations for discrete distributions")
with Model():
with pytest.raises(ValueError) as err:
Binomial("a", n=5, p=0.5, transform="log")
err.match("Transformations for discrete distributions")
@pytest.mark.parametrize("shape", [tuple(), (1,), (3, 1), (3, 2)], ids=str)
def test_orderedlogistic_dimensions(shape):
loge = np.log10(np.exp(1))
size = 7
p = np.ones(shape + (10,)) / 10
cutpoints = np.tile(logit(np.linspace(0, 1, 11)[1:-1]), shape + (1,))
obs = np.random.randint(0, 1, size=(size,) + shape)
with Model():
ol = OrderedLogistic(
"ol", eta=np.zeros(shape), cutpoints=cutpoints, shape=shape, observed=obs
)
c = Categorical("c", p=p, shape=shape, observed=obs)
ologp = ol.logp({"ol": 1}) * loge
clogp = c.logp({"c": 1}) * loge
expected = -np.prod((size,) + shape)
assert c.distribution.p.ndim == (len(shape) + 1)
assert np.allclose(clogp, expected)
assert ol.distribution.p.ndim == (len(shape) + 1)
assert np.allclose(ologp, expected)
class TestBugfixes:
@pytest.mark.parametrize(
"dist_cls,kwargs", [(MvNormal, dict(mu=0)), (MvStudentT, dict(mu=0, nu=2))]
)
@pytest.mark.parametrize("dims", [1, 2, 4])
def test_issue_3051(self, dims, dist_cls, kwargs):
d = dist_cls.dist(**kwargs, cov=np.eye(dims), shape=(dims,))
X = np.random.normal(size=(20, dims))
actual_t = d.logp(X)
assert isinstance(actual_t, tt.TensorVariable)
actual_a = actual_t.eval()
assert isinstance(actual_a, np.ndarray)
assert actual_a.shape == (X.shape[0],)
pass
def test_serialize_density_dist():
def func(x):
return -2 * (x ** 2).sum()
with pm.Model():
pm.Normal("x")
y = pm.DensityDist("y", func)
pm.sample(draws=5, tune=1, mp_ctx="spawn")
import pickle
pickle.loads(pickle.dumps(y))
| true | true |
f72103e31fd52dd21e230b7d278470e15c333340 | 4,056 | py | Python | volttron/platform/agent/math_utils.py | Entek-Technical-Services/BEMOSS3.5 | 581a205b4129530474a5ceee93cb36ef62992d4c | [
"BSD-3-Clause"
] | 73 | 2017-07-11T21:46:41.000Z | 2022-03-11T03:35:25.000Z | volttron/platform/agent/math_utils.py | Entek-Technical-Services/BEMOSS3.5 | 581a205b4129530474a5ceee93cb36ef62992d4c | [
"BSD-3-Clause"
] | 19 | 2017-10-10T22:06:15.000Z | 2022-03-28T21:03:33.000Z | volttron/platform/agent/math_utils.py | Entek-Technical-Services/BEMOSS3.5 | 581a205b4129530474a5ceee93cb36ef62992d4c | [
"BSD-3-Clause"
] | 36 | 2017-06-24T00:17:03.000Z | 2022-03-31T13:58:36.000Z | # -*- coding: utf-8 -*- {{{
# vim: set fenc=utf-8 ft=python sw=4 ts=4 sts=4 et:
# Copyright (c) 2015, Battelle Memorial Institute
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation
# are those of the authors and should not be interpreted as representing
# official policies, either expressed or implied, of the FreeBSD
# Project.
#
# This material was prepared as an account of work sponsored by an
# agency of the United States Government. Neither the United States
# Government nor the United States Department of Energy, nor Battelle,
# nor any of their employees, nor any jurisdiction or organization that
# has cooperated in the development of these materials, makes any
# warranty, express or implied, or assumes any legal liability or
# responsibility for the accuracy, completeness, or usefulness or any
# information, apparatus, product, software, or process disclosed, or
# represents that its use would not infringe privately owned rights.
#
# Reference herein to any specific commercial product, process, or
# service by trade name, trademark, manufacturer, or otherwise does not
# necessarily constitute or imply its endorsement, recommendation, or
# favoring by the United States Government or any agency thereof, or
# Battelle Memorial Institute. The views and opinions of authors
# expressed herein do not necessarily state or reflect those of the
# United States Government or any agency thereof.
#
# PACIFIC NORTHWEST NATIONAL LABORATORY
# operated by BATTELLE for the UNITED STATES DEPARTMENT OF ENERGY
# under Contract DE-AC05-76RL01830
#}}}
'''Dumping ground for VOLTTRON platform™ agent math helper functions.
Not meant to replace numpy in all cases. A basic set common math
routines to remove the need for numpy in simple cases.
This module should NEVER import numpy as that would defeat the
purpose.'''
def mean(data):
"""Return the sample arithmetic mean of data."""
n = len(data)
if n < 1:
raise ValueError('mean requires at least one data point')
return sum(data)/n # in Python 2 use sum(data)/float(n)
def _ss(data):
"""Return sum of square deviations of sequence data."""
c = mean(data)
ss = sum((x-c)**2 for x in data)
return ss
def pstdev(data):
"""Calculates the population standard deviation."""
n = len(data)
if n < 2:
raise ValueError('variance requires at least two data points')
ss = _ss(data)
pvar = ss/n # the population variance
return pvar**0.5
def stdev(data):
"""Calculates the sample standard deviation."""
n = len(data)
if n < 2:
raise ValueError('variance requires at least two data points')
ss = _ss(data)
pvar = ss/(n-1) # sample variance
return pvar**0.5
| 41.387755 | 72 | 0.747288 |
def mean(data):
n = len(data)
if n < 1:
raise ValueError('mean requires at least one data point')
return sum(data)/n
def _ss(data):
c = mean(data)
ss = sum((x-c)**2 for x in data)
return ss
def pstdev(data):
n = len(data)
if n < 2:
raise ValueError('variance requires at least two data points')
ss = _ss(data)
pvar = ss/n
return pvar**0.5
def stdev(data):
n = len(data)
if n < 2:
raise ValueError('variance requires at least two data points')
ss = _ss(data)
pvar = ss/(n-1)
return pvar**0.5
| true | true |
f721053f1c2b0366de64431ea3ca1a8eaac1c75f | 9,874 | py | Python | tests/conftest.py | dobixu/elastalert2 | 2d403918514d7c6e8aa24658c4c1f683dd143d89 | [
"Apache-2.0"
] | 250 | 2021-04-24T18:06:30.000Z | 2022-03-31T04:37:47.000Z | tests/conftest.py | dobixu/elastalert2 | 2d403918514d7c6e8aa24658c4c1f683dd143d89 | [
"Apache-2.0"
] | 129 | 2021-04-24T17:09:50.000Z | 2022-03-29T08:52:14.000Z | tests/conftest.py | dobixu/elastalert2 | 2d403918514d7c6e8aa24658c4c1f683dd143d89 | [
"Apache-2.0"
] | 128 | 2021-04-25T15:20:34.000Z | 2022-03-31T04:37:49.000Z | # -*- coding: utf-8 -*-
import datetime
import logging
import os
from unittest import mock
import pytest
import elastalert.elastalert
import elastalert.util
from elastalert.util import dt_to_ts
from elastalert.util import ts_to_dt
writeback_index = 'wb'
def pytest_addoption(parser):
parser.addoption(
"--runelasticsearch", action="store_true", default=False, help="run elasticsearch tests"
)
def pytest_collection_modifyitems(config, items):
if config.getoption("--runelasticsearch"):
# --runelasticsearch given in cli: run elasticsearch tests, skip ordinary unit tests
skip_unit_tests = pytest.mark.skip(reason="not running when --runelasticsearch option is used to run")
for item in items:
if "elasticsearch" not in item.keywords:
item.add_marker(skip_unit_tests)
else:
# skip elasticsearch tests
skip_elasticsearch = pytest.mark.skip(reason="need --runelasticsearch option to run")
for item in items:
if "elasticsearch" in item.keywords:
item.add_marker(skip_elasticsearch)
@pytest.fixture(scope='function', autouse=True)
def reset_loggers():
"""Prevent logging handlers from capturing temporary file handles.
For example, a test that uses the `capsys` fixture and calls
`logging.exception()` will initialize logging with a default handler that
captures `sys.stderr`. When the test ends, the file handles will be closed
and `sys.stderr` will be returned to its original handle, but the logging
will have a dangling reference to the temporary handle used in the `capsys`
fixture.
"""
logger = logging.getLogger()
for handler in logger.handlers:
logger.removeHandler(handler)
class mock_es_indices_client(object):
def __init__(self):
self.exists = mock.Mock(return_value=True)
class mock_es_client(object):
def __init__(self, host='es', port=14900):
self.host = host
self.port = port
self.return_hits = []
self.search = mock.Mock()
self.deprecated_search = mock.Mock()
self.create = mock.Mock()
self.index = mock.Mock()
self.delete = mock.Mock()
self.info = mock.Mock(return_value={'status': 200, 'name': 'foo', 'version': {'number': '2.0'}})
self.ping = mock.Mock(return_value=True)
self.indices = mock_es_indices_client()
self.es_version = mock.Mock(return_value='2.0')
self.is_atleastfive = mock.Mock(return_value=False)
self.is_atleastsix = mock.Mock(return_value=False)
self.is_atleastsixtwo = mock.Mock(return_value=False)
self.is_atleastsixsix = mock.Mock(return_value=False)
self.is_atleastseven = mock.Mock(return_value=False)
self.resolve_writeback_index = mock.Mock(return_value=writeback_index)
class mock_es_sixsix_client(object):
def __init__(self, host='es', port=14900):
self.host = host
self.port = port
self.return_hits = []
self.search = mock.Mock()
self.deprecated_search = mock.Mock()
self.create = mock.Mock()
self.index = mock.Mock()
self.delete = mock.Mock()
self.info = mock.Mock(return_value={'status': 200, 'name': 'foo', 'version': {'number': '6.6.0'}})
self.ping = mock.Mock(return_value=True)
self.indices = mock_es_indices_client()
self.es_version = mock.Mock(return_value='6.6.0')
self.is_atleastfive = mock.Mock(return_value=True)
self.is_atleastsix = mock.Mock(return_value=True)
self.is_atleastsixtwo = mock.Mock(return_value=False)
self.is_atleastsixsix = mock.Mock(return_value=True)
self.is_atleastseven = mock.Mock(return_value=False)
def writeback_index_side_effect(index, doc_type):
if doc_type == 'silence':
return index + '_silence'
elif doc_type == 'past_elastalert':
return index + '_past'
elif doc_type == 'elastalert_status':
return index + '_status'
elif doc_type == 'elastalert_error':
return index + '_error'
return index
self.resolve_writeback_index = mock.Mock(side_effect=writeback_index_side_effect)
class mock_rule_loader(object):
def __init__(self, conf):
self.base_config = conf
self.load = mock.Mock()
self.get_hashes = mock.Mock()
self.load_configuration = mock.Mock()
class mock_ruletype(object):
def __init__(self):
self.add_data = mock.Mock()
self.add_count_data = mock.Mock()
self.add_terms_data = mock.Mock()
self.matches = []
self.get_match_data = lambda x: x
self.get_match_str = lambda x: "some stuff happened"
self.garbage_collect = mock.Mock()
class mock_alert(object):
def __init__(self):
self.alert = mock.Mock()
def get_info(self):
return {'type': 'mock'}
@pytest.fixture
def ea():
rules = [{'es_host': '',
'es_port': 14900,
'name': 'anytest',
'index': 'idx',
'filter': [],
'include': ['@timestamp'],
'aggregation': datetime.timedelta(0),
'realert': datetime.timedelta(0),
'processed_hits': {},
'timestamp_field': '@timestamp',
'match_enhancements': [],
'rule_file': 'blah.yaml',
'max_query_size': 10000,
'ts_to_dt': ts_to_dt,
'dt_to_ts': dt_to_ts,
'_source_enabled': True,
'run_every': datetime.timedelta(seconds=15)}]
conf = {'rules_folder': 'rules',
'run_every': datetime.timedelta(minutes=10),
'buffer_time': datetime.timedelta(minutes=5),
'alert_time_limit': datetime.timedelta(hours=24),
'es_host': 'es',
'es_port': 14900,
'writeback_index': 'wb',
'rules': rules,
'max_query_size': 10000,
'old_query_limit': datetime.timedelta(weeks=1),
'disable_rules_on_error': False,
'scroll_keepalive': '30s',
'custom_pretty_ts_format': '%Y-%m-%d %H:%M'}
elastalert.util.elasticsearch_client = mock_es_client
conf['rules_loader'] = mock_rule_loader(conf)
elastalert.elastalert.elasticsearch_client = mock_es_client
with mock.patch('elastalert.elastalert.load_conf') as load_conf:
with mock.patch('elastalert.elastalert.BackgroundScheduler'):
load_conf.return_value = conf
conf['rules_loader'].load.return_value = rules
conf['rules_loader'].get_hashes.return_value = {}
ea = elastalert.elastalert.ElastAlerter(['--pin_rules'])
ea.rules[0]['type'] = mock_ruletype()
ea.rules[0]['alert'] = [mock_alert()]
ea.writeback_es = mock_es_client()
ea.writeback_es.search.return_value = {'hits': {'hits': []}, 'total': 0}
ea.writeback_es.deprecated_search.return_value = {'hits': {'hits': []}}
ea.writeback_es.index.return_value = {'_id': 'ABCD', 'created': True}
ea.current_es = mock_es_client('', '')
ea.thread_data.current_es = ea.current_es
ea.thread_data.num_hits = 0
ea.thread_data.num_dupes = 0
return ea
@pytest.fixture
def ea_sixsix():
rules = [{'es_host': '',
'es_port': 14900,
'name': 'anytest',
'index': 'idx',
'filter': [],
'include': ['@timestamp'],
'run_every': datetime.timedelta(seconds=1),
'aggregation': datetime.timedelta(0),
'realert': datetime.timedelta(0),
'processed_hits': {},
'timestamp_field': '@timestamp',
'match_enhancements': [],
'rule_file': 'blah.yaml',
'max_query_size': 10000,
'ts_to_dt': ts_to_dt,
'dt_to_ts': dt_to_ts,
'_source_enabled': True}]
conf = {'rules_folder': 'rules',
'run_every': datetime.timedelta(minutes=10),
'buffer_time': datetime.timedelta(minutes=5),
'alert_time_limit': datetime.timedelta(hours=24),
'es_host': 'es',
'es_port': 14900,
'writeback_index': writeback_index,
'rules': rules,
'max_query_size': 10000,
'old_query_limit': datetime.timedelta(weeks=1),
'disable_rules_on_error': False,
'scroll_keepalive': '30s',
'custom_pretty_ts_format': '%Y-%m-%d %H:%M'}
conf['rules_loader'] = mock_rule_loader(conf)
elastalert.elastalert.elasticsearch_client = mock_es_sixsix_client
elastalert.util.elasticsearch_client = mock_es_sixsix_client
with mock.patch('elastalert.elastalert.load_conf') as load_conf:
with mock.patch('elastalert.elastalert.BackgroundScheduler'):
load_conf.return_value = conf
conf['rules_loader'].load.return_value = rules
conf['rules_loader'].get_hashes.return_value = {}
ea_sixsix = elastalert.elastalert.ElastAlerter(['--pin_rules'])
ea_sixsix.rules[0]['type'] = mock_ruletype()
ea_sixsix.rules[0]['alert'] = [mock_alert()]
ea_sixsix.writeback_es = mock_es_sixsix_client()
ea_sixsix.writeback_es.search.return_value = {'hits': {'hits': []}}
ea_sixsix.writeback_es.deprecated_search.return_value = {'hits': {'hits': []}}
ea_sixsix.writeback_es.index.return_value = {'_id': 'ABCD'}
ea_sixsix.current_es = mock_es_sixsix_client('', -1)
return ea_sixsix
@pytest.fixture(scope='function')
def environ():
"""py.test fixture to get a fresh mutable environment."""
old_env = os.environ
new_env = dict(list(old_env.items()))
os.environ = new_env
yield os.environ
os.environ = old_env
| 38.570313 | 110 | 0.623962 |
import datetime
import logging
import os
from unittest import mock
import pytest
import elastalert.elastalert
import elastalert.util
from elastalert.util import dt_to_ts
from elastalert.util import ts_to_dt
writeback_index = 'wb'
def pytest_addoption(parser):
parser.addoption(
"--runelasticsearch", action="store_true", default=False, help="run elasticsearch tests"
)
def pytest_collection_modifyitems(config, items):
if config.getoption("--runelasticsearch"):
skip_unit_tests = pytest.mark.skip(reason="not running when --runelasticsearch option is used to run")
for item in items:
if "elasticsearch" not in item.keywords:
item.add_marker(skip_unit_tests)
else:
skip_elasticsearch = pytest.mark.skip(reason="need --runelasticsearch option to run")
for item in items:
if "elasticsearch" in item.keywords:
item.add_marker(skip_elasticsearch)
@pytest.fixture(scope='function', autouse=True)
def reset_loggers():
logger = logging.getLogger()
for handler in logger.handlers:
logger.removeHandler(handler)
class mock_es_indices_client(object):
def __init__(self):
self.exists = mock.Mock(return_value=True)
class mock_es_client(object):
def __init__(self, host='es', port=14900):
self.host = host
self.port = port
self.return_hits = []
self.search = mock.Mock()
self.deprecated_search = mock.Mock()
self.create = mock.Mock()
self.index = mock.Mock()
self.delete = mock.Mock()
self.info = mock.Mock(return_value={'status': 200, 'name': 'foo', 'version': {'number': '2.0'}})
self.ping = mock.Mock(return_value=True)
self.indices = mock_es_indices_client()
self.es_version = mock.Mock(return_value='2.0')
self.is_atleastfive = mock.Mock(return_value=False)
self.is_atleastsix = mock.Mock(return_value=False)
self.is_atleastsixtwo = mock.Mock(return_value=False)
self.is_atleastsixsix = mock.Mock(return_value=False)
self.is_atleastseven = mock.Mock(return_value=False)
self.resolve_writeback_index = mock.Mock(return_value=writeback_index)
class mock_es_sixsix_client(object):
def __init__(self, host='es', port=14900):
self.host = host
self.port = port
self.return_hits = []
self.search = mock.Mock()
self.deprecated_search = mock.Mock()
self.create = mock.Mock()
self.index = mock.Mock()
self.delete = mock.Mock()
self.info = mock.Mock(return_value={'status': 200, 'name': 'foo', 'version': {'number': '6.6.0'}})
self.ping = mock.Mock(return_value=True)
self.indices = mock_es_indices_client()
self.es_version = mock.Mock(return_value='6.6.0')
self.is_atleastfive = mock.Mock(return_value=True)
self.is_atleastsix = mock.Mock(return_value=True)
self.is_atleastsixtwo = mock.Mock(return_value=False)
self.is_atleastsixsix = mock.Mock(return_value=True)
self.is_atleastseven = mock.Mock(return_value=False)
def writeback_index_side_effect(index, doc_type):
if doc_type == 'silence':
return index + '_silence'
elif doc_type == 'past_elastalert':
return index + '_past'
elif doc_type == 'elastalert_status':
return index + '_status'
elif doc_type == 'elastalert_error':
return index + '_error'
return index
self.resolve_writeback_index = mock.Mock(side_effect=writeback_index_side_effect)
class mock_rule_loader(object):
def __init__(self, conf):
self.base_config = conf
self.load = mock.Mock()
self.get_hashes = mock.Mock()
self.load_configuration = mock.Mock()
class mock_ruletype(object):
def __init__(self):
self.add_data = mock.Mock()
self.add_count_data = mock.Mock()
self.add_terms_data = mock.Mock()
self.matches = []
self.get_match_data = lambda x: x
self.get_match_str = lambda x: "some stuff happened"
self.garbage_collect = mock.Mock()
class mock_alert(object):
def __init__(self):
self.alert = mock.Mock()
def get_info(self):
return {'type': 'mock'}
@pytest.fixture
def ea():
rules = [{'es_host': '',
'es_port': 14900,
'name': 'anytest',
'index': 'idx',
'filter': [],
'include': ['@timestamp'],
'aggregation': datetime.timedelta(0),
'realert': datetime.timedelta(0),
'processed_hits': {},
'timestamp_field': '@timestamp',
'match_enhancements': [],
'rule_file': 'blah.yaml',
'max_query_size': 10000,
'ts_to_dt': ts_to_dt,
'dt_to_ts': dt_to_ts,
'_source_enabled': True,
'run_every': datetime.timedelta(seconds=15)}]
conf = {'rules_folder': 'rules',
'run_every': datetime.timedelta(minutes=10),
'buffer_time': datetime.timedelta(minutes=5),
'alert_time_limit': datetime.timedelta(hours=24),
'es_host': 'es',
'es_port': 14900,
'writeback_index': 'wb',
'rules': rules,
'max_query_size': 10000,
'old_query_limit': datetime.timedelta(weeks=1),
'disable_rules_on_error': False,
'scroll_keepalive': '30s',
'custom_pretty_ts_format': '%Y-%m-%d %H:%M'}
elastalert.util.elasticsearch_client = mock_es_client
conf['rules_loader'] = mock_rule_loader(conf)
elastalert.elastalert.elasticsearch_client = mock_es_client
with mock.patch('elastalert.elastalert.load_conf') as load_conf:
with mock.patch('elastalert.elastalert.BackgroundScheduler'):
load_conf.return_value = conf
conf['rules_loader'].load.return_value = rules
conf['rules_loader'].get_hashes.return_value = {}
ea = elastalert.elastalert.ElastAlerter(['--pin_rules'])
ea.rules[0]['type'] = mock_ruletype()
ea.rules[0]['alert'] = [mock_alert()]
ea.writeback_es = mock_es_client()
ea.writeback_es.search.return_value = {'hits': {'hits': []}, 'total': 0}
ea.writeback_es.deprecated_search.return_value = {'hits': {'hits': []}}
ea.writeback_es.index.return_value = {'_id': 'ABCD', 'created': True}
ea.current_es = mock_es_client('', '')
ea.thread_data.current_es = ea.current_es
ea.thread_data.num_hits = 0
ea.thread_data.num_dupes = 0
return ea
@pytest.fixture
def ea_sixsix():
rules = [{'es_host': '',
'es_port': 14900,
'name': 'anytest',
'index': 'idx',
'filter': [],
'include': ['@timestamp'],
'run_every': datetime.timedelta(seconds=1),
'aggregation': datetime.timedelta(0),
'realert': datetime.timedelta(0),
'processed_hits': {},
'timestamp_field': '@timestamp',
'match_enhancements': [],
'rule_file': 'blah.yaml',
'max_query_size': 10000,
'ts_to_dt': ts_to_dt,
'dt_to_ts': dt_to_ts,
'_source_enabled': True}]
conf = {'rules_folder': 'rules',
'run_every': datetime.timedelta(minutes=10),
'buffer_time': datetime.timedelta(minutes=5),
'alert_time_limit': datetime.timedelta(hours=24),
'es_host': 'es',
'es_port': 14900,
'writeback_index': writeback_index,
'rules': rules,
'max_query_size': 10000,
'old_query_limit': datetime.timedelta(weeks=1),
'disable_rules_on_error': False,
'scroll_keepalive': '30s',
'custom_pretty_ts_format': '%Y-%m-%d %H:%M'}
conf['rules_loader'] = mock_rule_loader(conf)
elastalert.elastalert.elasticsearch_client = mock_es_sixsix_client
elastalert.util.elasticsearch_client = mock_es_sixsix_client
with mock.patch('elastalert.elastalert.load_conf') as load_conf:
with mock.patch('elastalert.elastalert.BackgroundScheduler'):
load_conf.return_value = conf
conf['rules_loader'].load.return_value = rules
conf['rules_loader'].get_hashes.return_value = {}
ea_sixsix = elastalert.elastalert.ElastAlerter(['--pin_rules'])
ea_sixsix.rules[0]['type'] = mock_ruletype()
ea_sixsix.rules[0]['alert'] = [mock_alert()]
ea_sixsix.writeback_es = mock_es_sixsix_client()
ea_sixsix.writeback_es.search.return_value = {'hits': {'hits': []}}
ea_sixsix.writeback_es.deprecated_search.return_value = {'hits': {'hits': []}}
ea_sixsix.writeback_es.index.return_value = {'_id': 'ABCD'}
ea_sixsix.current_es = mock_es_sixsix_client('', -1)
return ea_sixsix
@pytest.fixture(scope='function')
def environ():
old_env = os.environ
new_env = dict(list(old_env.items()))
os.environ = new_env
yield os.environ
os.environ = old_env
| true | true |
f721054ced7239cd366b9a4117dc04473f5453e9 | 310 | py | Python | allauth/app_settings.py | tobiasgoecke/django-allauth | 5e80865e521a6ec7b4e0bf4aa62ba470a8376e28 | [
"MIT"
] | 2 | 2016-05-24T21:13:32.000Z | 2017-12-27T13:43:26.000Z | allauth/app_settings.py | tobiasgoecke/django-allauth | 5e80865e521a6ec7b4e0bf4aa62ba470a8376e28 | [
"MIT"
] | null | null | null | allauth/app_settings.py | tobiasgoecke/django-allauth | 5e80865e521a6ec7b4e0bf4aa62ba470a8376e28 | [
"MIT"
] | null | null | null | from django.conf import settings
SOCIALACCOUNT_ENABLED = 'allauth.socialaccount' in settings.INSTALLED_APPS
LOGIN_REDIRECT_URL = getattr(settings, 'LOGIN_REDIRECT_URL', '/')
USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
REGISTRATION_OPEN = getattr(settings, 'REGISTRATION_OPEN', 'True')
| 25.833333 | 74 | 0.790323 | from django.conf import settings
SOCIALACCOUNT_ENABLED = 'allauth.socialaccount' in settings.INSTALLED_APPS
LOGIN_REDIRECT_URL = getattr(settings, 'LOGIN_REDIRECT_URL', '/')
USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
REGISTRATION_OPEN = getattr(settings, 'REGISTRATION_OPEN', 'True')
| true | true |
f721060bb454c8f7e5e8d09071be951a7eff3765 | 13,013 | py | Python | tests/p2p/discv5/test_enr.py | AndreMiras/trinity | 6c20e2b63a698d345c282db8ab0cd426f4329ff5 | [
"MIT"
] | null | null | null | tests/p2p/discv5/test_enr.py | AndreMiras/trinity | 6c20e2b63a698d345c282db8ab0cd426f4329ff5 | [
"MIT"
] | null | null | null | tests/p2p/discv5/test_enr.py | AndreMiras/trinity | 6c20e2b63a698d345c282db8ab0cd426f4329ff5 | [
"MIT"
] | null | null | null | import base64
import pytest
import rlp
from eth_utils import (
decode_hex,
to_bytes,
ValidationError,
)
from eth_utils.toolz import (
assoc,
assoc_in,
)
from p2p.discv5.enr import (
ENR,
ENRSedes,
UnsignedENR,
)
from p2p.discv5.identity_schemes import (
IdentityScheme,
V4IdentityScheme,
IdentitySchemeRegistry,
)
from p2p.forkid import ForkID
# Source: https://github.com/fjl/EIPs/blob/0acb5939555cbd0efcdd04da0d3acb0cc81d049a/EIPS/eip-778.md
OFFICIAL_TEST_DATA = {
"repr": (
"enr:-IS4QHCYrYZbAKWCBRlAy5zzaDZXJBGkcnh4MHcBFZntXNFrdvJjX04jRzjzCBOonrkT"
"fj499SZuOh8R33Ls8RRcy5wBgmlkgnY0gmlwhH8AAAGJc2VjcDI1NmsxoQPKY0yuDUmstAHY"
"pMa2_oxVtw0RW_QAdpzBQA8yWM0xOIN1ZHCCdl8"
),
"private_key": decode_hex("b71c71a67e1177ad4e901695e1b4b9ee17ae16c6668d313eac2f96dbcda3f291"),
"public_key": decode_hex("03ca634cae0d49acb401d8a4c6b6fe8c55b70d115bf400769cc1400f3258cd3138"),
"node_id": decode_hex("a448f24c6d18e575453db13171562b71999873db5b286df957af199ec94617f7"),
"identity_scheme": V4IdentityScheme,
"sequence_number": 1,
"kv_pairs": {
b"id": b"v4",
b"ip": decode_hex("7f000001"),
b"secp256k1": decode_hex(
"03ca634cae0d49acb401d8a4c6b6fe8c55b70d115bf400769cc1400f3258cd3138",
),
b"udp": 0x765f,
}
}
# This is an ENR sent by geth and it includes a fork ID (https://eips.ethereum.org/EIPS/eip-2124)
# kv pair as well.
REAL_LIFE_TEST_DATA = {
"repr": (
"enr:-Jq4QO5zEyIBU5lSa9iaen0A2xUB5_IVrCi1DbyASTTnLV5RJan6aGPr8kU0p0MYKU5YezZgdSUE"
"-GOBEio6Ultyf1Aog2V0aMrJhGN2AZCDGfCggmlkgnY0gmlwhF4_wLuJc2VjcDI1NmsxoQOt7cA_B_Kg"
"nQ5RmwyA6ji8M1Y0jfINItRGbOOwy7XgbIN0Y3CCdl-DdWRwgnZf"
),
"public_key": decode_hex("03adedc03f07f2a09d0e519b0c80ea38bc3356348df20d22d4466ce3b0cbb5e06c"),
"node_id": decode_hex("dc8542768b457753669bebfe215d5f9ef4adb7d7df84beabddbe98350869165f"),
"identity_scheme": V4IdentityScheme,
"sequence_number": 40,
"kv_pairs": {
b"eth": (ForkID(hash=to_bytes(hexstr='0x63760190'), next=1700000), ),
b"id": b"v4",
b"ip": decode_hex("5e3fc0bb"),
b"secp256k1": decode_hex(
"03adedc03f07f2a09d0e519b0c80ea38bc3356348df20d22d4466ce3b0cbb5e06c",
),
b"tcp": 30303,
b"udp": 30303,
}
}
class MockIdentityScheme(IdentityScheme):
id = b"mock"
private_key_size = 32
@classmethod
def create_enr_signature(cls, enr, private_key: bytes) -> bytes:
if len(private_key) != cls.private_key_size:
raise ValidationError("Invalid private key")
return private_key + enr.get_signing_message()
@classmethod
def validate_enr_structure(cls, enr) -> None:
pass
@classmethod
def validate_enr_signature(cls, enr) -> None:
if not enr.signature == enr.node_id + enr.get_signing_message():
raise ValidationError("Invalid signature")
@classmethod
def extract_public_key(cls, enr) -> bytes:
return b""
@classmethod
def extract_node_id(cls, enr) -> bytes:
return enr.signature[:cls.private_key_size]
@pytest.fixture
def mock_identity_scheme():
return MockIdentityScheme
@pytest.fixture
def identity_scheme_registry(mock_identity_scheme):
registry = IdentitySchemeRegistry()
registry.register(V4IdentityScheme)
registry.register(mock_identity_scheme)
return registry
def test_mapping_interface(identity_scheme_registry):
kv_pairs = {
b"id": b"mock",
b"key1": b"value1",
b"key2": b"value2",
}
enr = ENR(
signature=b"",
sequence_number=0,
kv_pairs=kv_pairs,
identity_scheme_registry=identity_scheme_registry,
)
for key, value in kv_pairs.items():
assert key in enr
assert enr[key] == value
assert enr.get(key) == value
not_a_key = b"key3"
assert not_a_key not in kv_pairs
assert not_a_key not in enr
enr.get(not_a_key) is None
assert enr.get(not_a_key, b"default") == b"default"
assert tuple(enr.keys()) == tuple(kv_pairs.keys())
assert tuple(enr.values()) == tuple(kv_pairs.values())
assert tuple(enr.items()) == tuple(kv_pairs.items())
assert len(enr) == len(kv_pairs)
assert tuple(iter(enr)) == tuple(iter(kv_pairs))
def test_inititialization(identity_scheme_registry):
valid_sequence_number = 0
valid_kv_pairs = {b"id": b"mock"}
valid_signature = b"" # signature is not validated during initialization
assert UnsignedENR(
sequence_number=valid_sequence_number,
kv_pairs=valid_kv_pairs,
identity_scheme_registry=identity_scheme_registry,
)
assert ENR(
sequence_number=valid_sequence_number,
kv_pairs=valid_kv_pairs,
signature=valid_signature,
identity_scheme_registry=identity_scheme_registry,
)
with pytest.raises(ValidationError):
UnsignedENR(
sequence_number=valid_sequence_number,
kv_pairs={b"no-id": b""},
identity_scheme_registry=identity_scheme_registry,
)
with pytest.raises(ValidationError):
ENR(
sequence_number=valid_sequence_number,
kv_pairs={b"no-id": b""},
signature=valid_signature,
identity_scheme_registry=identity_scheme_registry,
)
with pytest.raises(ValidationError):
UnsignedENR(
sequence_number=-1,
kv_pairs=valid_kv_pairs,
identity_scheme_registry=identity_scheme_registry,
)
with pytest.raises(ValidationError):
ENR(
sequence_number=-1,
kv_pairs=valid_kv_pairs,
signature=valid_signature,
identity_scheme_registry=identity_scheme_registry,
)
def test_signing(mock_identity_scheme, identity_scheme_registry):
unsigned_enr = UnsignedENR(
sequence_number=0,
kv_pairs={b"id": b"mock"},
identity_scheme_registry=identity_scheme_registry
)
private_key = b"\x00" * 32
enr = unsigned_enr.to_signed_enr(private_key)
assert enr.signature == mock_identity_scheme.create_enr_signature(enr, private_key)
def test_signature_validation(mock_identity_scheme, identity_scheme_registry):
unsigned_enr = UnsignedENR(0, {b"id": b"mock"}, identity_scheme_registry)
private_key = b"\x00" * 32
enr = unsigned_enr.to_signed_enr(private_key)
enr.validate_signature()
invalid_signature = b"\xff" * 64
invalid_enr = ENR(
enr.sequence_number,
dict(enr),
invalid_signature,
identity_scheme_registry=identity_scheme_registry
)
with pytest.raises(ValidationError):
invalid_enr.validate_signature()
with pytest.raises(ValidationError):
ENR(
0,
{b"id": b"unknown"},
b"",
identity_scheme_registry=identity_scheme_registry,
)
def test_public_key(mock_identity_scheme, identity_scheme_registry):
unsigned_enr = UnsignedENR(0, {b"id": b"mock"}, identity_scheme_registry)
private_key = b"\x00" * 32
enr = unsigned_enr.to_signed_enr(private_key)
assert enr.public_key == mock_identity_scheme.extract_public_key(enr)
def test_node_id(mock_identity_scheme, identity_scheme_registry):
unsigned_enr = UnsignedENR(0, {b"id": b"mock"}, identity_scheme_registry)
private_key = b"\x00" * 32
enr = unsigned_enr.to_signed_enr(private_key)
assert enr.node_id == private_key
def test_signature_scheme_selection(mock_identity_scheme, identity_scheme_registry):
mock_enr = ENR(0, {b"id": b"mock"}, b"", identity_scheme_registry)
assert mock_enr.identity_scheme is mock_identity_scheme
v4_enr = ENR(0, {b"id": b"v4", b"secp256k1": b"\x02" * 33}, b"", identity_scheme_registry)
assert v4_enr.identity_scheme is V4IdentityScheme
with pytest.raises(ValidationError):
ENR(0, {b"id": b"other"}, b"", identity_scheme_registry)
def test_repr(mock_identity_scheme, identity_scheme_registry):
unsigned_enr = UnsignedENR(0, {b"id": b"mock"}, identity_scheme_registry)
enr = unsigned_enr.to_signed_enr(b"\x00" * 32)
base64_encoded_enr = base64.urlsafe_b64encode(rlp.encode(enr))
represented_enr = repr(enr)
assert represented_enr.startswith("enr:")
assert base64_encoded_enr.rstrip(b"=").decode() == represented_enr[4:]
assert ENR.from_repr(represented_enr, identity_scheme_registry) == enr
def test_deserialization_key_order_validation(identity_scheme_registry):
serialized_enr = rlp.encode([
b"signature",
0,
b"key1",
b"value1",
b"id",
b"",
b"key2",
b"value2",
])
with pytest.raises(rlp.DeserializationError):
rlp.decode(
serialized_enr,
ENRSedes,
identity_scheme_registry=identity_scheme_registry,
)
def test_deserialization_key_uniqueness_validation(identity_scheme_registry):
serialized_enr = rlp.encode([
b"signature",
0,
b"key1",
b"value1",
b"id",
b"",
b"key1",
b"value2",
])
with pytest.raises(rlp.DeserializationError):
rlp.decode(
serialized_enr,
ENRSedes,
identity_scheme_registry=identity_scheme_registry,
)
@pytest.mark.parametrize("incomplete_enr", (
(),
(b"signature",),
(b"signature", 0, b"key1"),
(b"signature", 0, b"key1", b"value1", b"id"),
))
def test_deserialization_completeness_validation(incomplete_enr, identity_scheme_registry):
incomplete_enr_rlp = rlp.encode(incomplete_enr)
with pytest.raises(rlp.DeserializationError):
rlp.decode(
incomplete_enr_rlp,
ENRSedes,
identity_scheme_registry=identity_scheme_registry,
)
def test_equality(identity_scheme_registry):
base_kwargs = {
"sequence_number": 0,
"kv_pairs": {
b"id": b"mock",
b"key1": b"value1",
b"key2": b"value2",
},
"signature": b"signature",
"identity_scheme_registry": identity_scheme_registry,
}
base_enr = ENR(**base_kwargs)
equal_enr = ENR(**base_kwargs)
enr_different_sequence_number = ENR(
**assoc(base_kwargs, "sequence_number", 1)
)
enr_different_kv_pairs = ENR(
**assoc_in(base_kwargs, ("kv_pairs", b"key1"), b"value2"),
)
enr_different_signature = ENR(
**assoc(base_kwargs, "signature", b"different-signature")
)
assert base_enr == base_enr
assert equal_enr == base_enr
assert enr_different_sequence_number != base_enr
assert enr_different_kv_pairs != base_enr
assert enr_different_signature != base_enr
def test_serialization_roundtrip(identity_scheme_registry):
original_enr = ENR(
sequence_number=0,
kv_pairs={
b"id": b"mock",
b"key2": b"value2", # wrong order so that serialization is forced to fix this
b"key1": b"value1",
},
signature=b"",
identity_scheme_registry=identity_scheme_registry,
)
encoded = rlp.encode(original_enr)
recovered_enr = rlp.decode(
encoded,
ENR,
identity_scheme_registry=identity_scheme_registry,
)
assert recovered_enr == original_enr
@pytest.mark.parametrize("invalid_kv_pairs", (
{b"id": b"v4"}, # missing public key
{b"id": b"v4", b"secp256k1": b"\x00"}, # invalid public key
))
def test_v4_structure_validation(invalid_kv_pairs, identity_scheme_registry):
with pytest.raises(ValidationError):
UnsignedENR(
sequence_number=0,
kv_pairs=invalid_kv_pairs,
identity_scheme_registry=identity_scheme_registry,
)
def test_official_test_vector():
enr = ENR.from_repr(OFFICIAL_TEST_DATA["repr"]) # use default identity scheme registry
assert enr.sequence_number == OFFICIAL_TEST_DATA["sequence_number"]
assert dict(enr) == OFFICIAL_TEST_DATA["kv_pairs"]
assert enr.public_key == OFFICIAL_TEST_DATA["public_key"]
assert enr.node_id == OFFICIAL_TEST_DATA["node_id"]
assert enr.identity_scheme is OFFICIAL_TEST_DATA["identity_scheme"]
assert repr(enr) == OFFICIAL_TEST_DATA["repr"]
unsigned_enr = UnsignedENR(enr.sequence_number, dict(enr))
reconstructed_enr = unsigned_enr.to_signed_enr(OFFICIAL_TEST_DATA["private_key"])
assert reconstructed_enr == enr
def test_real_life_test_vector():
enr = ENR.from_repr(REAL_LIFE_TEST_DATA["repr"])
assert enr.sequence_number == REAL_LIFE_TEST_DATA["sequence_number"]
assert enr.public_key == REAL_LIFE_TEST_DATA["public_key"]
assert enr.node_id == REAL_LIFE_TEST_DATA["node_id"]
assert enr.identity_scheme is REAL_LIFE_TEST_DATA["identity_scheme"]
assert dict(enr) == REAL_LIFE_TEST_DATA["kv_pairs"]
assert repr(enr) == REAL_LIFE_TEST_DATA["repr"]
| 31.508475 | 99 | 0.683394 | import base64
import pytest
import rlp
from eth_utils import (
decode_hex,
to_bytes,
ValidationError,
)
from eth_utils.toolz import (
assoc,
assoc_in,
)
from p2p.discv5.enr import (
ENR,
ENRSedes,
UnsignedENR,
)
from p2p.discv5.identity_schemes import (
IdentityScheme,
V4IdentityScheme,
IdentitySchemeRegistry,
)
from p2p.forkid import ForkID
OFFICIAL_TEST_DATA = {
"repr": (
"enr:-IS4QHCYrYZbAKWCBRlAy5zzaDZXJBGkcnh4MHcBFZntXNFrdvJjX04jRzjzCBOonrkT"
"fj499SZuOh8R33Ls8RRcy5wBgmlkgnY0gmlwhH8AAAGJc2VjcDI1NmsxoQPKY0yuDUmstAHY"
"pMa2_oxVtw0RW_QAdpzBQA8yWM0xOIN1ZHCCdl8"
),
"private_key": decode_hex("b71c71a67e1177ad4e901695e1b4b9ee17ae16c6668d313eac2f96dbcda3f291"),
"public_key": decode_hex("03ca634cae0d49acb401d8a4c6b6fe8c55b70d115bf400769cc1400f3258cd3138"),
"node_id": decode_hex("a448f24c6d18e575453db13171562b71999873db5b286df957af199ec94617f7"),
"identity_scheme": V4IdentityScheme,
"sequence_number": 1,
"kv_pairs": {
b"id": b"v4",
b"ip": decode_hex("7f000001"),
b"secp256k1": decode_hex(
"03ca634cae0d49acb401d8a4c6b6fe8c55b70d115bf400769cc1400f3258cd3138",
),
b"udp": 0x765f,
}
}
REAL_LIFE_TEST_DATA = {
"repr": (
"enr:-Jq4QO5zEyIBU5lSa9iaen0A2xUB5_IVrCi1DbyASTTnLV5RJan6aGPr8kU0p0MYKU5YezZgdSUE"
"-GOBEio6Ultyf1Aog2V0aMrJhGN2AZCDGfCggmlkgnY0gmlwhF4_wLuJc2VjcDI1NmsxoQOt7cA_B_Kg"
"nQ5RmwyA6ji8M1Y0jfINItRGbOOwy7XgbIN0Y3CCdl-DdWRwgnZf"
),
"public_key": decode_hex("03adedc03f07f2a09d0e519b0c80ea38bc3356348df20d22d4466ce3b0cbb5e06c"),
"node_id": decode_hex("dc8542768b457753669bebfe215d5f9ef4adb7d7df84beabddbe98350869165f"),
"identity_scheme": V4IdentityScheme,
"sequence_number": 40,
"kv_pairs": {
b"eth": (ForkID(hash=to_bytes(hexstr='0x63760190'), next=1700000), ),
b"id": b"v4",
b"ip": decode_hex("5e3fc0bb"),
b"secp256k1": decode_hex(
"03adedc03f07f2a09d0e519b0c80ea38bc3356348df20d22d4466ce3b0cbb5e06c",
),
b"tcp": 30303,
b"udp": 30303,
}
}
class MockIdentityScheme(IdentityScheme):
id = b"mock"
private_key_size = 32
@classmethod
def create_enr_signature(cls, enr, private_key: bytes) -> bytes:
if len(private_key) != cls.private_key_size:
raise ValidationError("Invalid private key")
return private_key + enr.get_signing_message()
@classmethod
def validate_enr_structure(cls, enr) -> None:
pass
@classmethod
def validate_enr_signature(cls, enr) -> None:
if not enr.signature == enr.node_id + enr.get_signing_message():
raise ValidationError("Invalid signature")
@classmethod
def extract_public_key(cls, enr) -> bytes:
return b""
@classmethod
def extract_node_id(cls, enr) -> bytes:
return enr.signature[:cls.private_key_size]
@pytest.fixture
def mock_identity_scheme():
return MockIdentityScheme
@pytest.fixture
def identity_scheme_registry(mock_identity_scheme):
registry = IdentitySchemeRegistry()
registry.register(V4IdentityScheme)
registry.register(mock_identity_scheme)
return registry
def test_mapping_interface(identity_scheme_registry):
kv_pairs = {
b"id": b"mock",
b"key1": b"value1",
b"key2": b"value2",
}
enr = ENR(
signature=b"",
sequence_number=0,
kv_pairs=kv_pairs,
identity_scheme_registry=identity_scheme_registry,
)
for key, value in kv_pairs.items():
assert key in enr
assert enr[key] == value
assert enr.get(key) == value
not_a_key = b"key3"
assert not_a_key not in kv_pairs
assert not_a_key not in enr
enr.get(not_a_key) is None
assert enr.get(not_a_key, b"default") == b"default"
assert tuple(enr.keys()) == tuple(kv_pairs.keys())
assert tuple(enr.values()) == tuple(kv_pairs.values())
assert tuple(enr.items()) == tuple(kv_pairs.items())
assert len(enr) == len(kv_pairs)
assert tuple(iter(enr)) == tuple(iter(kv_pairs))
def test_inititialization(identity_scheme_registry):
valid_sequence_number = 0
valid_kv_pairs = {b"id": b"mock"}
valid_signature = b""
assert UnsignedENR(
sequence_number=valid_sequence_number,
kv_pairs=valid_kv_pairs,
identity_scheme_registry=identity_scheme_registry,
)
assert ENR(
sequence_number=valid_sequence_number,
kv_pairs=valid_kv_pairs,
signature=valid_signature,
identity_scheme_registry=identity_scheme_registry,
)
with pytest.raises(ValidationError):
UnsignedENR(
sequence_number=valid_sequence_number,
kv_pairs={b"no-id": b""},
identity_scheme_registry=identity_scheme_registry,
)
with pytest.raises(ValidationError):
ENR(
sequence_number=valid_sequence_number,
kv_pairs={b"no-id": b""},
signature=valid_signature,
identity_scheme_registry=identity_scheme_registry,
)
with pytest.raises(ValidationError):
UnsignedENR(
sequence_number=-1,
kv_pairs=valid_kv_pairs,
identity_scheme_registry=identity_scheme_registry,
)
with pytest.raises(ValidationError):
ENR(
sequence_number=-1,
kv_pairs=valid_kv_pairs,
signature=valid_signature,
identity_scheme_registry=identity_scheme_registry,
)
def test_signing(mock_identity_scheme, identity_scheme_registry):
unsigned_enr = UnsignedENR(
sequence_number=0,
kv_pairs={b"id": b"mock"},
identity_scheme_registry=identity_scheme_registry
)
private_key = b"\x00" * 32
enr = unsigned_enr.to_signed_enr(private_key)
assert enr.signature == mock_identity_scheme.create_enr_signature(enr, private_key)
def test_signature_validation(mock_identity_scheme, identity_scheme_registry):
unsigned_enr = UnsignedENR(0, {b"id": b"mock"}, identity_scheme_registry)
private_key = b"\x00" * 32
enr = unsigned_enr.to_signed_enr(private_key)
enr.validate_signature()
invalid_signature = b"\xff" * 64
invalid_enr = ENR(
enr.sequence_number,
dict(enr),
invalid_signature,
identity_scheme_registry=identity_scheme_registry
)
with pytest.raises(ValidationError):
invalid_enr.validate_signature()
with pytest.raises(ValidationError):
ENR(
0,
{b"id": b"unknown"},
b"",
identity_scheme_registry=identity_scheme_registry,
)
def test_public_key(mock_identity_scheme, identity_scheme_registry):
unsigned_enr = UnsignedENR(0, {b"id": b"mock"}, identity_scheme_registry)
private_key = b"\x00" * 32
enr = unsigned_enr.to_signed_enr(private_key)
assert enr.public_key == mock_identity_scheme.extract_public_key(enr)
def test_node_id(mock_identity_scheme, identity_scheme_registry):
unsigned_enr = UnsignedENR(0, {b"id": b"mock"}, identity_scheme_registry)
private_key = b"\x00" * 32
enr = unsigned_enr.to_signed_enr(private_key)
assert enr.node_id == private_key
def test_signature_scheme_selection(mock_identity_scheme, identity_scheme_registry):
mock_enr = ENR(0, {b"id": b"mock"}, b"", identity_scheme_registry)
assert mock_enr.identity_scheme is mock_identity_scheme
v4_enr = ENR(0, {b"id": b"v4", b"secp256k1": b"\x02" * 33}, b"", identity_scheme_registry)
assert v4_enr.identity_scheme is V4IdentityScheme
with pytest.raises(ValidationError):
ENR(0, {b"id": b"other"}, b"", identity_scheme_registry)
def test_repr(mock_identity_scheme, identity_scheme_registry):
unsigned_enr = UnsignedENR(0, {b"id": b"mock"}, identity_scheme_registry)
enr = unsigned_enr.to_signed_enr(b"\x00" * 32)
base64_encoded_enr = base64.urlsafe_b64encode(rlp.encode(enr))
represented_enr = repr(enr)
assert represented_enr.startswith("enr:")
assert base64_encoded_enr.rstrip(b"=").decode() == represented_enr[4:]
assert ENR.from_repr(represented_enr, identity_scheme_registry) == enr
def test_deserialization_key_order_validation(identity_scheme_registry):
serialized_enr = rlp.encode([
b"signature",
0,
b"key1",
b"value1",
b"id",
b"",
b"key2",
b"value2",
])
with pytest.raises(rlp.DeserializationError):
rlp.decode(
serialized_enr,
ENRSedes,
identity_scheme_registry=identity_scheme_registry,
)
def test_deserialization_key_uniqueness_validation(identity_scheme_registry):
serialized_enr = rlp.encode([
b"signature",
0,
b"key1",
b"value1",
b"id",
b"",
b"key1",
b"value2",
])
with pytest.raises(rlp.DeserializationError):
rlp.decode(
serialized_enr,
ENRSedes,
identity_scheme_registry=identity_scheme_registry,
)
@pytest.mark.parametrize("incomplete_enr", (
(),
(b"signature",),
(b"signature", 0, b"key1"),
(b"signature", 0, b"key1", b"value1", b"id"),
))
def test_deserialization_completeness_validation(incomplete_enr, identity_scheme_registry):
incomplete_enr_rlp = rlp.encode(incomplete_enr)
with pytest.raises(rlp.DeserializationError):
rlp.decode(
incomplete_enr_rlp,
ENRSedes,
identity_scheme_registry=identity_scheme_registry,
)
def test_equality(identity_scheme_registry):
base_kwargs = {
"sequence_number": 0,
"kv_pairs": {
b"id": b"mock",
b"key1": b"value1",
b"key2": b"value2",
},
"signature": b"signature",
"identity_scheme_registry": identity_scheme_registry,
}
base_enr = ENR(**base_kwargs)
equal_enr = ENR(**base_kwargs)
enr_different_sequence_number = ENR(
**assoc(base_kwargs, "sequence_number", 1)
)
enr_different_kv_pairs = ENR(
**assoc_in(base_kwargs, ("kv_pairs", b"key1"), b"value2"),
)
enr_different_signature = ENR(
**assoc(base_kwargs, "signature", b"different-signature")
)
assert base_enr == base_enr
assert equal_enr == base_enr
assert enr_different_sequence_number != base_enr
assert enr_different_kv_pairs != base_enr
assert enr_different_signature != base_enr
def test_serialization_roundtrip(identity_scheme_registry):
original_enr = ENR(
sequence_number=0,
kv_pairs={
b"id": b"mock",
b"key2": b"value2",
b"key1": b"value1",
},
signature=b"",
identity_scheme_registry=identity_scheme_registry,
)
encoded = rlp.encode(original_enr)
recovered_enr = rlp.decode(
encoded,
ENR,
identity_scheme_registry=identity_scheme_registry,
)
assert recovered_enr == original_enr
@pytest.mark.parametrize("invalid_kv_pairs", (
{b"id": b"v4"},
{b"id": b"v4", b"secp256k1": b"\x00"},
))
def test_v4_structure_validation(invalid_kv_pairs, identity_scheme_registry):
with pytest.raises(ValidationError):
UnsignedENR(
sequence_number=0,
kv_pairs=invalid_kv_pairs,
identity_scheme_registry=identity_scheme_registry,
)
def test_official_test_vector():
enr = ENR.from_repr(OFFICIAL_TEST_DATA["repr"])
assert enr.sequence_number == OFFICIAL_TEST_DATA["sequence_number"]
assert dict(enr) == OFFICIAL_TEST_DATA["kv_pairs"]
assert enr.public_key == OFFICIAL_TEST_DATA["public_key"]
assert enr.node_id == OFFICIAL_TEST_DATA["node_id"]
assert enr.identity_scheme is OFFICIAL_TEST_DATA["identity_scheme"]
assert repr(enr) == OFFICIAL_TEST_DATA["repr"]
unsigned_enr = UnsignedENR(enr.sequence_number, dict(enr))
reconstructed_enr = unsigned_enr.to_signed_enr(OFFICIAL_TEST_DATA["private_key"])
assert reconstructed_enr == enr
def test_real_life_test_vector():
enr = ENR.from_repr(REAL_LIFE_TEST_DATA["repr"])
assert enr.sequence_number == REAL_LIFE_TEST_DATA["sequence_number"]
assert enr.public_key == REAL_LIFE_TEST_DATA["public_key"]
assert enr.node_id == REAL_LIFE_TEST_DATA["node_id"]
assert enr.identity_scheme is REAL_LIFE_TEST_DATA["identity_scheme"]
assert dict(enr) == REAL_LIFE_TEST_DATA["kv_pairs"]
assert repr(enr) == REAL_LIFE_TEST_DATA["repr"]
| true | true |
f72107e0ab86bdefce931d0993f38f0d3db29c26 | 12,483 | py | Python | mypy/test/testpep561.py | chubbymaggie/mypy | 50c3dfcdca94726130e8cfdb6bde02b3eeca4e09 | [
"PSF-2.0"
] | 1 | 2019-06-15T08:26:28.000Z | 2019-06-15T08:26:28.000Z | mypy/test/testpep561.py | chubbymaggie/mypy | 50c3dfcdca94726130e8cfdb6bde02b3eeca4e09 | [
"PSF-2.0"
] | 1 | 2021-03-31T20:22:11.000Z | 2021-03-31T20:22:11.000Z | mypy/test/testpep561.py | chubbymaggie/mypy | 50c3dfcdca94726130e8cfdb6bde02b3eeca4e09 | [
"PSF-2.0"
] | null | null | null | from contextlib import contextmanager
from enum import Enum
import os
import sys
import tempfile
from typing import Tuple, List, Generator, Optional
from unittest import TestCase, main
import mypy.api
from mypy.modulefinder import get_site_packages_dirs
from mypy.test.config import package_path
from mypy.test.helpers import run_command
from mypy.util import try_find_python2_interpreter
# NOTE: options.use_builtins_fixtures should not be set in these
# tests, otherwise mypy will ignore installed third-party packages.
SIMPLE_PROGRAM = """
from typedpkg.sample import ex
from typedpkg import dne
a = ex([''])
reveal_type(a)
"""
_NAMESPACE_PROGRAM = """
{import_style}
from typedpkg_ns.ns.dne import dne
af("abc")
bf(False)
dne(123)
af(False)
bf(2)
dne("abc")
"""
class NSImportStyle(Enum):
# These should all be on exactly two lines because NamespaceMsg
# uses line numbers which expect the imports to be exactly two lines
from_import = """\
from typedpkg.pkg.aaa import af
from typedpkg_ns.ns.bbb import bf"""
import_as = """\
import typedpkg.pkg.aaa as nm; af = nm.af
import typedpkg_ns.ns.bbb as am; bf = am.bf"""
reg_import = """\
import typedpkg.pkg.aaa; af = typedpkg.pkg.aaa.af
import typedpkg_ns.ns.bbb; bf = typedpkg_ns.ns.bbb.bf"""
class SimpleMsg(Enum):
msg_dne = "{tempfile}:3: error: Module 'typedpkg' has no attribute 'dne'"
msg_list = "{tempfile}:5: error: Revealed type is 'builtins.list[builtins.str]'"
msg_tuple = "{tempfile}:5: error: Revealed type is 'builtins.tuple[builtins.str]'"
class NamespaceMsg(Enum):
cfm_beta = ("{tempfile}:4: error: Cannot find module named "
"'typedpkg_ns.ns.dne'")
help_note = ('{tempfile}:4: note: (Perhaps setting MYPYPATH or using the '
'"--ignore-missing-imports" flag would help)')
bool_str = ('{tempfile}:10: error: Argument 1 has incompatible type '
'"bool"; expected "str"')
int_bool = ('{tempfile}:11: error: Argument 1 has incompatible type '
'"int"; expected "bool"')
to_bool_str = ('{tempfile}:10: error: Argument 1 to "af" has incompatible type '
'"bool"; expected "str"')
to_int_bool = ('{tempfile}:11: error: Argument 1 to "bf" has incompatible type '
'"int"; expected "bool"')
def create_ns_program_src(import_style: NSImportStyle) -> str:
return _NAMESPACE_PROGRAM.format(import_style=import_style.value)
class ExampleProg(object):
_fname = 'test_program.py'
def __init__(self, source_code: str) -> None:
self._source_code = source_code
self._temp_dir = None # type: Optional[tempfile.TemporaryDirectory[str]]
self._full_fname = ''
def create(self) -> None:
self._temp_dir = tempfile.TemporaryDirectory()
self._full_fname = os.path.join(self._temp_dir.name, self._fname)
with open(self._full_fname, 'w+') as f:
f.write(self._source_code)
def cleanup(self) -> None:
if self._temp_dir:
self._temp_dir.cleanup()
def build_msg(self, *msgs: Enum) -> str:
return '\n'.join(
msg.value.format(tempfile=self._full_fname)
for msg in msgs
) + '\n'
def check_mypy_run(self,
python_executable: str,
expected_out: List[Enum],
expected_err: str = '',
expected_returncode: int = 1,
venv_dir: Optional[str] = None) -> None:
"""Helper to run mypy and check the output."""
cmd_line = [self._full_fname]
if venv_dir is not None:
old_dir = os.getcwd()
os.chdir(venv_dir)
try:
if python_executable != sys.executable:
cmd_line.append('--python-executable={}'.format(python_executable))
out, err, returncode = mypy.api.run(cmd_line)
assert out == self.build_msg(*expected_out), err
assert err == expected_err, out
assert returncode == expected_returncode, returncode
finally:
if venv_dir is not None:
os.chdir(old_dir)
class TestPEP561(TestCase):
@contextmanager
def virtualenv(self,
python_executable: str = sys.executable
) -> Generator[Tuple[str, str], None, None]:
"""Context manager that creates a virtualenv in a temporary directory
returns the path to the created Python executable"""
# Sadly, we need virtualenv, as the Python 3 venv module does not support creating a venv
# for Python 2, and Python 2 does not have its own venv.
with tempfile.TemporaryDirectory() as venv_dir:
returncode, lines = run_command([sys.executable,
'-m',
'virtualenv',
'-p{}'.format(python_executable),
venv_dir], cwd=os.getcwd())
if returncode != 0:
err = '\n'.join(lines)
self.fail("Failed to create venv. Do you have virtualenv installed?\n" + err)
if sys.platform == 'win32':
yield venv_dir, os.path.abspath(os.path.join(venv_dir, 'Scripts', 'python'))
else:
yield venv_dir, os.path.abspath(os.path.join(venv_dir, 'bin', 'python'))
def install_package(self, pkg: str,
python_executable: str = sys.executable,
use_pip: bool = True,
editable: bool = False) -> None:
"""Context manager to temporarily install a package from test-data/packages/pkg/"""
working_dir = os.path.join(package_path, pkg)
if use_pip:
install_cmd = [python_executable, '-m', 'pip', 'install']
if editable:
install_cmd.append('-e')
install_cmd.append('.')
else:
install_cmd = [python_executable, 'setup.py']
if editable:
install_cmd.append('develop')
else:
install_cmd.append('install')
returncode, lines = run_command(install_cmd, cwd=working_dir)
if returncode != 0:
self.fail('\n'.join(lines))
def setUp(self) -> None:
self.simple_prog = ExampleProg(SIMPLE_PROGRAM)
self.from_ns_prog = ExampleProg(create_ns_program_src(NSImportStyle.from_import))
self.import_as_ns_prog = ExampleProg(create_ns_program_src(NSImportStyle.import_as))
self.regular_import_ns_prog = ExampleProg(create_ns_program_src(NSImportStyle.reg_import))
def tearDown(self) -> None:
self.simple_prog.cleanup()
self.from_ns_prog.cleanup()
self.import_as_ns_prog.cleanup()
self.regular_import_ns_prog.cleanup()
def test_get_pkg_dirs(self) -> None:
"""Check that get_package_dirs works."""
dirs = get_site_packages_dirs(sys.executable)
assert dirs
def test_typedpkg_stub_package(self) -> None:
self.simple_prog.create()
with self.virtualenv() as venv:
venv_dir, python_executable = venv
self.install_package('typedpkg-stubs', python_executable)
self.simple_prog.check_mypy_run(
python_executable,
[SimpleMsg.msg_dne, SimpleMsg.msg_list],
venv_dir=venv_dir,
)
def test_typedpkg(self) -> None:
self.simple_prog.create()
with self.virtualenv() as venv:
venv_dir, python_executable = venv
self.install_package('typedpkg', python_executable)
self.simple_prog.check_mypy_run(
python_executable,
[SimpleMsg.msg_tuple],
venv_dir=venv_dir,
)
def test_stub_and_typed_pkg(self) -> None:
self.simple_prog.create()
with self.virtualenv() as venv:
venv_dir, python_executable = venv
self.install_package('typedpkg', python_executable)
self.install_package('typedpkg-stubs', python_executable)
self.simple_prog.check_mypy_run(
python_executable,
[SimpleMsg.msg_list],
venv_dir=venv_dir,
)
def test_typedpkg_stubs_python2(self) -> None:
self.simple_prog.create()
python2 = try_find_python2_interpreter()
if python2:
with self.virtualenv(python2) as venv:
venv_dir, py2 = venv
self.install_package('typedpkg-stubs', py2)
self.simple_prog.check_mypy_run(
py2,
[SimpleMsg.msg_dne, SimpleMsg.msg_list],
venv_dir=venv_dir,
)
def test_typedpkg_python2(self) -> None:
self.simple_prog.create()
python2 = try_find_python2_interpreter()
if python2:
with self.virtualenv(python2) as venv:
venv_dir, py2 = venv
self.install_package('typedpkg', py2)
self.simple_prog.check_mypy_run(
py2,
[SimpleMsg.msg_tuple],
venv_dir=venv_dir,
)
def test_typedpkg_egg(self) -> None:
self.simple_prog.create()
with self.virtualenv() as venv:
venv_dir, python_executable = venv
self.install_package('typedpkg', python_executable, use_pip=False)
self.simple_prog.check_mypy_run(
python_executable,
[SimpleMsg.msg_tuple],
venv_dir=venv_dir,
)
def test_typedpkg_editable(self) -> None:
self.simple_prog.create()
with self.virtualenv() as venv:
venv_dir, python_executable = venv
self.install_package('typedpkg', python_executable, editable=True)
self.simple_prog.check_mypy_run(
python_executable,
[SimpleMsg.msg_tuple],
venv_dir=venv_dir,
)
def test_typedpkg_egg_editable(self) -> None:
self.simple_prog.create()
with self.virtualenv() as venv:
venv_dir, python_executable = venv
self.install_package('typedpkg', python_executable, use_pip=False, editable=True)
self.simple_prog.check_mypy_run(
python_executable,
[SimpleMsg.msg_tuple],
venv_dir=venv_dir,
)
def test_nested_and_namespace_from_import(self) -> None:
self.from_ns_prog.create()
with self.virtualenv() as venv:
venv_dir, python_executable = venv
self.install_package('typedpkg', python_executable)
self.install_package('typedpkg_ns', python_executable)
self.from_ns_prog.check_mypy_run(
python_executable,
[NamespaceMsg.cfm_beta,
NamespaceMsg.help_note,
NamespaceMsg.to_bool_str,
NamespaceMsg.to_int_bool],
venv_dir=venv_dir,
)
def test_nested_and_namespace_import_as(self) -> None:
self.import_as_ns_prog.create()
with self.virtualenv() as venv:
venv_dir, python_executable = venv
self.install_package('typedpkg', python_executable)
self.install_package('typedpkg_ns', python_executable)
self.import_as_ns_prog.check_mypy_run(
python_executable,
[NamespaceMsg.cfm_beta,
NamespaceMsg.help_note,
NamespaceMsg.bool_str,
NamespaceMsg.int_bool],
venv_dir=venv_dir,
)
def test_nested_and_namespace_regular_import(self) -> None:
self.regular_import_ns_prog.create()
with self.virtualenv() as venv:
venv_dir, python_executable = venv
self.install_package('typedpkg', python_executable)
self.install_package('typedpkg_ns', python_executable)
self.regular_import_ns_prog.check_mypy_run(
python_executable,
[NamespaceMsg.cfm_beta,
NamespaceMsg.help_note,
NamespaceMsg.bool_str,
NamespaceMsg.int_bool],
venv_dir=venv_dir,
)
if __name__ == '__main__':
main()
| 37.827273 | 98 | 0.599295 | from contextlib import contextmanager
from enum import Enum
import os
import sys
import tempfile
from typing import Tuple, List, Generator, Optional
from unittest import TestCase, main
import mypy.api
from mypy.modulefinder import get_site_packages_dirs
from mypy.test.config import package_path
from mypy.test.helpers import run_command
from mypy.util import try_find_python2_interpreter
SIMPLE_PROGRAM = """
from typedpkg.sample import ex
from typedpkg import dne
a = ex([''])
reveal_type(a)
"""
_NAMESPACE_PROGRAM = """
{import_style}
from typedpkg_ns.ns.dne import dne
af("abc")
bf(False)
dne(123)
af(False)
bf(2)
dne("abc")
"""
class NSImportStyle(Enum):
from_import = """\
from typedpkg.pkg.aaa import af
from typedpkg_ns.ns.bbb import bf"""
import_as = """\
import typedpkg.pkg.aaa as nm; af = nm.af
import typedpkg_ns.ns.bbb as am; bf = am.bf"""
reg_import = """\
import typedpkg.pkg.aaa; af = typedpkg.pkg.aaa.af
import typedpkg_ns.ns.bbb; bf = typedpkg_ns.ns.bbb.bf"""
class SimpleMsg(Enum):
msg_dne = "{tempfile}:3: error: Module 'typedpkg' has no attribute 'dne'"
msg_list = "{tempfile}:5: error: Revealed type is 'builtins.list[builtins.str]'"
msg_tuple = "{tempfile}:5: error: Revealed type is 'builtins.tuple[builtins.str]'"
class NamespaceMsg(Enum):
cfm_beta = ("{tempfile}:4: error: Cannot find module named "
"'typedpkg_ns.ns.dne'")
help_note = ('{tempfile}:4: note: (Perhaps setting MYPYPATH or using the '
'"--ignore-missing-imports" flag would help)')
bool_str = ('{tempfile}:10: error: Argument 1 has incompatible type '
'"bool"; expected "str"')
int_bool = ('{tempfile}:11: error: Argument 1 has incompatible type '
'"int"; expected "bool"')
to_bool_str = ('{tempfile}:10: error: Argument 1 to "af" has incompatible type '
'"bool"; expected "str"')
to_int_bool = ('{tempfile}:11: error: Argument 1 to "bf" has incompatible type '
'"int"; expected "bool"')
def create_ns_program_src(import_style: NSImportStyle) -> str:
return _NAMESPACE_PROGRAM.format(import_style=import_style.value)
class ExampleProg(object):
_fname = 'test_program.py'
def __init__(self, source_code: str) -> None:
self._source_code = source_code
self._temp_dir = None
self._full_fname = ''
def create(self) -> None:
self._temp_dir = tempfile.TemporaryDirectory()
self._full_fname = os.path.join(self._temp_dir.name, self._fname)
with open(self._full_fname, 'w+') as f:
f.write(self._source_code)
def cleanup(self) -> None:
if self._temp_dir:
self._temp_dir.cleanup()
def build_msg(self, *msgs: Enum) -> str:
return '\n'.join(
msg.value.format(tempfile=self._full_fname)
for msg in msgs
) + '\n'
def check_mypy_run(self,
python_executable: str,
expected_out: List[Enum],
expected_err: str = '',
expected_returncode: int = 1,
venv_dir: Optional[str] = None) -> None:
cmd_line = [self._full_fname]
if venv_dir is not None:
old_dir = os.getcwd()
os.chdir(venv_dir)
try:
if python_executable != sys.executable:
cmd_line.append('--python-executable={}'.format(python_executable))
out, err, returncode = mypy.api.run(cmd_line)
assert out == self.build_msg(*expected_out), err
assert err == expected_err, out
assert returncode == expected_returncode, returncode
finally:
if venv_dir is not None:
os.chdir(old_dir)
class TestPEP561(TestCase):
@contextmanager
def virtualenv(self,
python_executable: str = sys.executable
) -> Generator[Tuple[str, str], None, None]:
with tempfile.TemporaryDirectory() as venv_dir:
returncode, lines = run_command([sys.executable,
'-m',
'virtualenv',
'-p{}'.format(python_executable),
venv_dir], cwd=os.getcwd())
if returncode != 0:
err = '\n'.join(lines)
self.fail("Failed to create venv. Do you have virtualenv installed?\n" + err)
if sys.platform == 'win32':
yield venv_dir, os.path.abspath(os.path.join(venv_dir, 'Scripts', 'python'))
else:
yield venv_dir, os.path.abspath(os.path.join(venv_dir, 'bin', 'python'))
def install_package(self, pkg: str,
python_executable: str = sys.executable,
use_pip: bool = True,
editable: bool = False) -> None:
working_dir = os.path.join(package_path, pkg)
if use_pip:
install_cmd = [python_executable, '-m', 'pip', 'install']
if editable:
install_cmd.append('-e')
install_cmd.append('.')
else:
install_cmd = [python_executable, 'setup.py']
if editable:
install_cmd.append('develop')
else:
install_cmd.append('install')
returncode, lines = run_command(install_cmd, cwd=working_dir)
if returncode != 0:
self.fail('\n'.join(lines))
def setUp(self) -> None:
self.simple_prog = ExampleProg(SIMPLE_PROGRAM)
self.from_ns_prog = ExampleProg(create_ns_program_src(NSImportStyle.from_import))
self.import_as_ns_prog = ExampleProg(create_ns_program_src(NSImportStyle.import_as))
self.regular_import_ns_prog = ExampleProg(create_ns_program_src(NSImportStyle.reg_import))
def tearDown(self) -> None:
self.simple_prog.cleanup()
self.from_ns_prog.cleanup()
self.import_as_ns_prog.cleanup()
self.regular_import_ns_prog.cleanup()
def test_get_pkg_dirs(self) -> None:
dirs = get_site_packages_dirs(sys.executable)
assert dirs
def test_typedpkg_stub_package(self) -> None:
self.simple_prog.create()
with self.virtualenv() as venv:
venv_dir, python_executable = venv
self.install_package('typedpkg-stubs', python_executable)
self.simple_prog.check_mypy_run(
python_executable,
[SimpleMsg.msg_dne, SimpleMsg.msg_list],
venv_dir=venv_dir,
)
def test_typedpkg(self) -> None:
self.simple_prog.create()
with self.virtualenv() as venv:
venv_dir, python_executable = venv
self.install_package('typedpkg', python_executable)
self.simple_prog.check_mypy_run(
python_executable,
[SimpleMsg.msg_tuple],
venv_dir=venv_dir,
)
def test_stub_and_typed_pkg(self) -> None:
self.simple_prog.create()
with self.virtualenv() as venv:
venv_dir, python_executable = venv
self.install_package('typedpkg', python_executable)
self.install_package('typedpkg-stubs', python_executable)
self.simple_prog.check_mypy_run(
python_executable,
[SimpleMsg.msg_list],
venv_dir=venv_dir,
)
def test_typedpkg_stubs_python2(self) -> None:
self.simple_prog.create()
python2 = try_find_python2_interpreter()
if python2:
with self.virtualenv(python2) as venv:
venv_dir, py2 = venv
self.install_package('typedpkg-stubs', py2)
self.simple_prog.check_mypy_run(
py2,
[SimpleMsg.msg_dne, SimpleMsg.msg_list],
venv_dir=venv_dir,
)
def test_typedpkg_python2(self) -> None:
self.simple_prog.create()
python2 = try_find_python2_interpreter()
if python2:
with self.virtualenv(python2) as venv:
venv_dir, py2 = venv
self.install_package('typedpkg', py2)
self.simple_prog.check_mypy_run(
py2,
[SimpleMsg.msg_tuple],
venv_dir=venv_dir,
)
def test_typedpkg_egg(self) -> None:
self.simple_prog.create()
with self.virtualenv() as venv:
venv_dir, python_executable = venv
self.install_package('typedpkg', python_executable, use_pip=False)
self.simple_prog.check_mypy_run(
python_executable,
[SimpleMsg.msg_tuple],
venv_dir=venv_dir,
)
def test_typedpkg_editable(self) -> None:
self.simple_prog.create()
with self.virtualenv() as venv:
venv_dir, python_executable = venv
self.install_package('typedpkg', python_executable, editable=True)
self.simple_prog.check_mypy_run(
python_executable,
[SimpleMsg.msg_tuple],
venv_dir=venv_dir,
)
def test_typedpkg_egg_editable(self) -> None:
self.simple_prog.create()
with self.virtualenv() as venv:
venv_dir, python_executable = venv
self.install_package('typedpkg', python_executable, use_pip=False, editable=True)
self.simple_prog.check_mypy_run(
python_executable,
[SimpleMsg.msg_tuple],
venv_dir=venv_dir,
)
def test_nested_and_namespace_from_import(self) -> None:
self.from_ns_prog.create()
with self.virtualenv() as venv:
venv_dir, python_executable = venv
self.install_package('typedpkg', python_executable)
self.install_package('typedpkg_ns', python_executable)
self.from_ns_prog.check_mypy_run(
python_executable,
[NamespaceMsg.cfm_beta,
NamespaceMsg.help_note,
NamespaceMsg.to_bool_str,
NamespaceMsg.to_int_bool],
venv_dir=venv_dir,
)
def test_nested_and_namespace_import_as(self) -> None:
self.import_as_ns_prog.create()
with self.virtualenv() as venv:
venv_dir, python_executable = venv
self.install_package('typedpkg', python_executable)
self.install_package('typedpkg_ns', python_executable)
self.import_as_ns_prog.check_mypy_run(
python_executable,
[NamespaceMsg.cfm_beta,
NamespaceMsg.help_note,
NamespaceMsg.bool_str,
NamespaceMsg.int_bool],
venv_dir=venv_dir,
)
def test_nested_and_namespace_regular_import(self) -> None:
self.regular_import_ns_prog.create()
with self.virtualenv() as venv:
venv_dir, python_executable = venv
self.install_package('typedpkg', python_executable)
self.install_package('typedpkg_ns', python_executable)
self.regular_import_ns_prog.check_mypy_run(
python_executable,
[NamespaceMsg.cfm_beta,
NamespaceMsg.help_note,
NamespaceMsg.bool_str,
NamespaceMsg.int_bool],
venv_dir=venv_dir,
)
if __name__ == '__main__':
main()
| true | true |
f72108b9bfb35d1a7e2ad22f95c5ce9bc663f987 | 14,680 | py | Python | scripts/cluster/agent.py | nobusugi246/microk8s | 797720e2d1e74030fc3d8df5d291469c6082aaac | [
"Apache-2.0"
] | null | null | null | scripts/cluster/agent.py | nobusugi246/microk8s | 797720e2d1e74030fc3d8df5d291469c6082aaac | [
"Apache-2.0"
] | null | null | null | scripts/cluster/agent.py | nobusugi246/microk8s | 797720e2d1e74030fc3d8df5d291469c6082aaac | [
"Apache-2.0"
] | null | null | null | #!flask/bin/python
import getopt
import json
import os
import shutil
import socket
import string
import random
import subprocess
import sys
from .common.utils import try_set_file_permissions
from flask import Flask, jsonify, request, abort, Response
app = Flask(__name__)
CLUSTER_API="cluster/api/v1.0"
snapdata_path = os.environ.get('SNAP_DATA')
snap_path = os.environ.get('SNAP_DATA')
cluster_tokens_file = "{}/credentials/cluster-tokens.txt".format(snapdata_path)
callback_tokens_file = "{}/credentials/callback-tokens.txt".format(snapdata_path)
callback_token_file = "{}/credentials/callback-token.txt".format(snapdata_path)
certs_request_tokens_file = "{}/credentials/certs-request-tokens.txt".format(snapdata_path)
default_port = 25000
default_listen_interface = "0.0.0.0"
def get_service_name(service):
"""
Returns the service name from its configuration file name.
:param service: the name of the service configuration file
:returns: the service name
"""
if service in ["kube-proxy", "kube-apiserver", "kube-scheduler", "kube-controller-manager"]:
return service[len("kube-"), :]
else:
return service
def update_service_argument(service, key, val):
"""
Adds an argument to the arguments file of the service.
:param service: the service
:param key: the argument to add
:param val: the value for the argument
"""
args_file = "{}/args/{}".format(snapdata_path, service)
args_file_tmp = "{}/args/{}.tmp".format(snapdata_path, service)
found = False
with open(args_file_tmp, "w+") as bfp:
with open(args_file, "r+") as fp:
for _, line in enumerate(fp):
if line.startswith(key):
if val is not None:
bfp.write("{}={}\n".format(key, val))
found = True
else:
bfp.write("{}\n".format(line.rstrip()))
if not found and val is not None:
bfp.write("{}={}\n".format(key, val))
try_set_file_permissions(args_file_tmp)
shutil.move(args_file_tmp, args_file)
def store_callback_token(node, callback_token):
"""
Store a callback token
:param node: the node
:param callback_token: the token
"""
tmp_file = "{}.tmp".format(callback_tokens_file)
if not os.path.isfile(callback_tokens_file):
open(callback_tokens_file, 'a+')
os.chmod(callback_tokens_file, 0o600)
with open(tmp_file, "w") as backup_fp:
os.chmod(tmp_file, 0o600)
found = False
with open(callback_tokens_file, 'r+') as callback_fp:
for _, line in enumerate(callback_fp):
if line.startswith(node):
backup_fp.write("{} {}\n".format(node, callback_token))
found = True
else:
backup_fp.write(line)
if not found:
backup_fp.write("{} {}\n".format(node, callback_token))
try_set_file_permissions(tmp_file)
shutil.move(tmp_file, callback_tokens_file)
def sign_client_cert(cert_request, token):
"""
Sign a certificate request
:param cert_request: the request
:param token: a token acting as a request uuid
:returns: the certificate
"""
req_file = "{}/certs/request.{}.csr".format(snapdata_path, token)
sign_cmd = "openssl x509 -req -in {csr} -CA {SNAP_DATA}/certs/ca.crt -CAkey" \
" {SNAP_DATA}/certs/ca.key -CAcreateserial -out {SNAP_DATA}/certs/server.{token}.crt" \
" -days 100000".format(csr=req_file, SNAP_DATA=snapdata_path, token=token)
with open(req_file, 'w') as fp:
fp.write(cert_request)
subprocess.check_call(sign_cmd.split())
with open("{SNAP_DATA}/certs/server.{token}.crt".format(SNAP_DATA=snapdata_path, token=token)) as fp:
cert = fp.read()
return cert
def add_token_to_certs_request(token):
"""
Add a token to the file holding the nodes we expect a certificate request from
:param token: the token
"""
with open(certs_request_tokens_file, "a+") as fp:
fp.write("{}\n".format(token))
def remove_token_from_file(token, file):
"""
Remove a token from the valid tokens set
:param token: the token to be removed
:param file: the file to be removed from
"""
backup_file = "{}.backup".format(file)
# That is a critical section. We need to protect it.
# We are safe for now because flask serves one request at a time.
with open(backup_file, 'w') as back_fp:
with open(file, 'r') as fp:
for _, line in enumerate(fp):
if line.startswith(token):
continue
back_fp.write("{}".format(line))
shutil.copyfile(backup_file, file)
def get_token(name):
"""
Get token from known_tokens file
:param name: the name of the node
:returns: the token or None(if name doesn't exist)
"""
file = "{}/credentials/known_tokens.csv".format(snapdata_path)
with open(file) as fp:
line = fp.readline()
if name in line:
parts = line.split(',')
return parts[0].rstrip()
return None
def add_kubelet_token(hostname):
"""
Add a token for a node in the known tokens
:param hostname: the name of the node
:returns: the token added
"""
file = "{}/credentials/known_tokens.csv".format(snapdata_path)
old_token = get_token("system:node:{}".format(hostname))
if old_token:
return old_token.rstrip()
alpha = string.ascii_letters + string.digits
token = ''.join(random.SystemRandom().choice(alpha) for _ in range(32))
uid = ''.join(random.SystemRandom().choice(string.digits) for _ in range(8))
with open(file, 'a') as fp:
# TODO double check this format. Why is userid unique?
line = "{},system:node:{},kubelet,kubelet-{},\"system:nodes\"".format(token, hostname, uid)
fp.write(line + os.linesep)
return token.rstrip()
def getCA():
"""
Return the CA
:returns: the CA file contents
"""
ca_file = "{}/certs/ca.crt".format(snapdata_path)
with open(ca_file) as fp:
ca = fp.read()
return ca
def get_arg(key, file):
"""
Get an argument from an arguments file
:param key: the argument we look for
:param file: the arguments file to search in
:returns: the value of the argument or None(if the key doesn't exist)
"""
filename = "{}/args/{}".format(snapdata_path, file)
with open(filename) as fp:
for _, line in enumerate(fp):
if line.startswith(key):
args = line.split(' ')
args = args[-1].split('=')
return args[-1].rstrip()
return None
def is_valid(token, token_type=cluster_tokens_file):
"""
Check whether a token is valid
:param token: token to be checked
:param token_type: the type of token (bootstrap or signature)
:returns: True for a valid token, False otherwise
"""
with open(token_type) as fp:
for _, line in enumerate(fp):
if line.startswith(token):
return True
return False
def read_kubelet_args_file(node=None):
"""
Return the contents of the kubelet arguments file
:param node: node to add a host override (defaults to None)
:returns: the kubelet args file
"""
filename = "{}/args/kubelet".format(snapdata_path)
with open(filename) as fp:
args = fp.read()
if node:
args = "{}--hostname-override {}".format(args, node)
return args
def get_node_ep(hostname, remote_addr):
"""
Return the endpoint to be used for the node based by trying to resolve the hostname provided
:param hostname: the provided hostname
:param remote_addr: the address the request came from
:returns: the node's location
"""
try:
socket.gethostbyname(hostname)
return hostname
except socket.gaierror:
return remote_addr
return remote_addr
@app.route('/{}/join'.format(CLUSTER_API), methods=['POST'])
def join_node():
"""
Web call to join a node to the cluster
"""
if request.headers['Content-Type'] == 'application/json':
token = request.json['token']
hostname = request.json['hostname']
port = request.json['port']
callback_token = request.json['callback']
else:
token = request.form['token']
hostname = request.form['hostname']
port = request.form['port']
callback_token = request.form['callback']
if not is_valid(token):
error_msg={"error": "Invalid token"}
return Response(json.dumps(error_msg), mimetype='application/json', status=500)
add_token_to_certs_request(token)
remove_token_from_file(token, cluster_tokens_file)
node_addr = get_node_ep(hostname, request.remote_addr)
node_ep = "{}:{}".format(node_addr, port)
store_callback_token(node_ep, callback_token)
ca = getCA()
etcd_ep = get_arg('--listen-client-urls', 'etcd')
api_port = get_arg('--secure-port', 'kube-apiserver')
proxy_token = get_token('kube-proxy')
kubelet_token = add_kubelet_token(hostname)
subprocess.check_call("systemctl restart snap.microk8s.daemon-apiserver.service".split())
if node_addr != hostname:
kubelet_args = read_kubelet_args_file(node_addr)
else:
kubelet_args = read_kubelet_args_file()
return jsonify(ca=ca,
etcd=etcd_ep,
kubeproxy=proxy_token,
apiport=api_port,
kubelet=kubelet_token,
kubelet_args=kubelet_args,
hostname_override=node_addr)
@app.route('/{}/sign-cert'.format(CLUSTER_API), methods=['POST'])
def sign_cert():
"""
Web call to sign a certificate
"""
if request.headers['Content-Type'] == 'application/json':
token = request.json['token']
cert_request = request.json['request']
else:
token = request.form['token']
cert_request = request.form['request']
if not is_valid(token, certs_request_tokens_file):
error_msg={"error": "Invalid token"}
return Response(json.dumps(error_msg), mimetype='application/json', status=500)
remove_token_from_file(token, certs_request_tokens_file)
signed_cert = sign_client_cert(cert_request, token)
return jsonify(certificate=signed_cert)
@app.route('/{}/configure'.format(CLUSTER_API), methods=['POST'])
def configure():
"""
Web call to configure the node
"""
if request.headers['Content-Type'] == 'application/json':
callback_token = request.json['callback']
configuration = request.json
else:
callback_token = request.form['callback']
configuration = json.loads(request.form['configuration'])
if not is_valid(callback_token, callback_token_file):
error_msg={"error": "Invalid token"}
return Response(json.dumps(error_msg), mimetype='application/json', status=500)
# We expect something like this:
'''
{
"callback": "xyztoken"
"service":
[
{
"name": "kubelet",
"arguments_remove":
[
"myoldarg"
],
"arguments_update":
[
{"myarg": "myvalue"},
{"myarg2": "myvalue2"},
{"myarg3": "myvalue3"}
],
"restart": False
},
{
"name": "kube-proxy",
"restart": True
}
],
"addon":
[
{
"name": "gpu",
"enable": True
},
{
"name": "gpu",
"disable": True
}
]
}
'''
if "service" in configuration:
for service in configuration["service"]:
print("{}".format(service["name"]))
if "arguments_update" in service:
print("Updating arguments")
for argument in service["arguments_update"]:
for key, val in argument.items():
print("{} is {}".format(key, val))
update_service_argument(service["name"], key, val)
if "arguments_remove" in service:
print("Removing arguments")
for argument in service["arguments_remove"]:
print("{}".format(argument))
update_service_argument(service["name"], argument, None)
if "restart" in service and service["restart"]:
service_name = get_service_name(service["name"])
print("restarting {}".format(service["name"]))
subprocess.check_call("systemctl restart snap.microk8s.daemon-{}.service".format(service_name).split())
if "addon" in configuration:
for addon in configuration["addon"]:
print("{}".format(addon["name"]))
if "enable" in addon and addon["enable"]:
print("Enabling {}".format(addon["name"]))
subprocess.check_call("{}/microk8s-enable.wrapper {}".format(snap_path, addon["name"]).split())
if "disable" in addon and addon["disable"]:
print("Disabling {}".format(addon["name"]))
subprocess.check_call("{}/microk8s-disable.wrapper {}".format(snap_path, addon["name"]).split())
resp_date = {"result": "ok"}
resp = Response(json.dumps(resp_date), status=200, mimetype='application/json')
return resp
def usage():
print("Agent responsible for setting up a cluster. Arguments:")
print("-l, --listen: interfaces to listen to (defaults to {})".format(default_listen_interface))
print("-p, --port: port to listen to (default {})".format(default_port))
if __name__ == '__main__':
server_cert = "{SNAP_DATA}/certs/server.crt".format(SNAP_DATA=snapdata_path)
server_key = "{SNAP_DATA}/certs/server.key".format(SNAP_DATA=snapdata_path)
try:
opts, args = getopt.gnu_getopt(sys.argv[1:], "hl:p:", ["help", "listen=", "port="])
except getopt.GetoptError as err:
print(err) # will print something like "option -a not recognized"
usage()
sys.exit(2)
port = default_port
listen = default_listen_interface
for o, a in opts:
if o in ("-l", "--listen"):
listen = a
if o in ("-p", "--port"):
port = a
elif o in ("-h", "--help"):
usage()
sys.exit(1)
else:
assert False, "unhandled option"
app.run(host=listen, port=port, ssl_context=(server_cert, server_key))
| 32.767857 | 119 | 0.611512 |
import getopt
import json
import os
import shutil
import socket
import string
import random
import subprocess
import sys
from .common.utils import try_set_file_permissions
from flask import Flask, jsonify, request, abort, Response
app = Flask(__name__)
CLUSTER_API="cluster/api/v1.0"
snapdata_path = os.environ.get('SNAP_DATA')
snap_path = os.environ.get('SNAP_DATA')
cluster_tokens_file = "{}/credentials/cluster-tokens.txt".format(snapdata_path)
callback_tokens_file = "{}/credentials/callback-tokens.txt".format(snapdata_path)
callback_token_file = "{}/credentials/callback-token.txt".format(snapdata_path)
certs_request_tokens_file = "{}/credentials/certs-request-tokens.txt".format(snapdata_path)
default_port = 25000
default_listen_interface = "0.0.0.0"
def get_service_name(service):
if service in ["kube-proxy", "kube-apiserver", "kube-scheduler", "kube-controller-manager"]:
return service[len("kube-"), :]
else:
return service
def update_service_argument(service, key, val):
args_file = "{}/args/{}".format(snapdata_path, service)
args_file_tmp = "{}/args/{}.tmp".format(snapdata_path, service)
found = False
with open(args_file_tmp, "w+") as bfp:
with open(args_file, "r+") as fp:
for _, line in enumerate(fp):
if line.startswith(key):
if val is not None:
bfp.write("{}={}\n".format(key, val))
found = True
else:
bfp.write("{}\n".format(line.rstrip()))
if not found and val is not None:
bfp.write("{}={}\n".format(key, val))
try_set_file_permissions(args_file_tmp)
shutil.move(args_file_tmp, args_file)
def store_callback_token(node, callback_token):
tmp_file = "{}.tmp".format(callback_tokens_file)
if not os.path.isfile(callback_tokens_file):
open(callback_tokens_file, 'a+')
os.chmod(callback_tokens_file, 0o600)
with open(tmp_file, "w") as backup_fp:
os.chmod(tmp_file, 0o600)
found = False
with open(callback_tokens_file, 'r+') as callback_fp:
for _, line in enumerate(callback_fp):
if line.startswith(node):
backup_fp.write("{} {}\n".format(node, callback_token))
found = True
else:
backup_fp.write(line)
if not found:
backup_fp.write("{} {}\n".format(node, callback_token))
try_set_file_permissions(tmp_file)
shutil.move(tmp_file, callback_tokens_file)
def sign_client_cert(cert_request, token):
req_file = "{}/certs/request.{}.csr".format(snapdata_path, token)
sign_cmd = "openssl x509 -req -in {csr} -CA {SNAP_DATA}/certs/ca.crt -CAkey" \
" {SNAP_DATA}/certs/ca.key -CAcreateserial -out {SNAP_DATA}/certs/server.{token}.crt" \
" -days 100000".format(csr=req_file, SNAP_DATA=snapdata_path, token=token)
with open(req_file, 'w') as fp:
fp.write(cert_request)
subprocess.check_call(sign_cmd.split())
with open("{SNAP_DATA}/certs/server.{token}.crt".format(SNAP_DATA=snapdata_path, token=token)) as fp:
cert = fp.read()
return cert
def add_token_to_certs_request(token):
with open(certs_request_tokens_file, "a+") as fp:
fp.write("{}\n".format(token))
def remove_token_from_file(token, file):
backup_file = "{}.backup".format(file)
with open(backup_file, 'w') as back_fp:
with open(file, 'r') as fp:
for _, line in enumerate(fp):
if line.startswith(token):
continue
back_fp.write("{}".format(line))
shutil.copyfile(backup_file, file)
def get_token(name):
file = "{}/credentials/known_tokens.csv".format(snapdata_path)
with open(file) as fp:
line = fp.readline()
if name in line:
parts = line.split(',')
return parts[0].rstrip()
return None
def add_kubelet_token(hostname):
file = "{}/credentials/known_tokens.csv".format(snapdata_path)
old_token = get_token("system:node:{}".format(hostname))
if old_token:
return old_token.rstrip()
alpha = string.ascii_letters + string.digits
token = ''.join(random.SystemRandom().choice(alpha) for _ in range(32))
uid = ''.join(random.SystemRandom().choice(string.digits) for _ in range(8))
with open(file, 'a') as fp:
line = "{},system:node:{},kubelet,kubelet-{},\"system:nodes\"".format(token, hostname, uid)
fp.write(line + os.linesep)
return token.rstrip()
def getCA():
ca_file = "{}/certs/ca.crt".format(snapdata_path)
with open(ca_file) as fp:
ca = fp.read()
return ca
def get_arg(key, file):
filename = "{}/args/{}".format(snapdata_path, file)
with open(filename) as fp:
for _, line in enumerate(fp):
if line.startswith(key):
args = line.split(' ')
args = args[-1].split('=')
return args[-1].rstrip()
return None
def is_valid(token, token_type=cluster_tokens_file):
with open(token_type) as fp:
for _, line in enumerate(fp):
if line.startswith(token):
return True
return False
def read_kubelet_args_file(node=None):
filename = "{}/args/kubelet".format(snapdata_path)
with open(filename) as fp:
args = fp.read()
if node:
args = "{}--hostname-override {}".format(args, node)
return args
def get_node_ep(hostname, remote_addr):
try:
socket.gethostbyname(hostname)
return hostname
except socket.gaierror:
return remote_addr
return remote_addr
@app.route('/{}/join'.format(CLUSTER_API), methods=['POST'])
def join_node():
if request.headers['Content-Type'] == 'application/json':
token = request.json['token']
hostname = request.json['hostname']
port = request.json['port']
callback_token = request.json['callback']
else:
token = request.form['token']
hostname = request.form['hostname']
port = request.form['port']
callback_token = request.form['callback']
if not is_valid(token):
error_msg={"error": "Invalid token"}
return Response(json.dumps(error_msg), mimetype='application/json', status=500)
add_token_to_certs_request(token)
remove_token_from_file(token, cluster_tokens_file)
node_addr = get_node_ep(hostname, request.remote_addr)
node_ep = "{}:{}".format(node_addr, port)
store_callback_token(node_ep, callback_token)
ca = getCA()
etcd_ep = get_arg('--listen-client-urls', 'etcd')
api_port = get_arg('--secure-port', 'kube-apiserver')
proxy_token = get_token('kube-proxy')
kubelet_token = add_kubelet_token(hostname)
subprocess.check_call("systemctl restart snap.microk8s.daemon-apiserver.service".split())
if node_addr != hostname:
kubelet_args = read_kubelet_args_file(node_addr)
else:
kubelet_args = read_kubelet_args_file()
return jsonify(ca=ca,
etcd=etcd_ep,
kubeproxy=proxy_token,
apiport=api_port,
kubelet=kubelet_token,
kubelet_args=kubelet_args,
hostname_override=node_addr)
@app.route('/{}/sign-cert'.format(CLUSTER_API), methods=['POST'])
def sign_cert():
if request.headers['Content-Type'] == 'application/json':
token = request.json['token']
cert_request = request.json['request']
else:
token = request.form['token']
cert_request = request.form['request']
if not is_valid(token, certs_request_tokens_file):
error_msg={"error": "Invalid token"}
return Response(json.dumps(error_msg), mimetype='application/json', status=500)
remove_token_from_file(token, certs_request_tokens_file)
signed_cert = sign_client_cert(cert_request, token)
return jsonify(certificate=signed_cert)
@app.route('/{}/configure'.format(CLUSTER_API), methods=['POST'])
def configure():
if request.headers['Content-Type'] == 'application/json':
callback_token = request.json['callback']
configuration = request.json
else:
callback_token = request.form['callback']
configuration = json.loads(request.form['configuration'])
if not is_valid(callback_token, callback_token_file):
error_msg={"error": "Invalid token"}
return Response(json.dumps(error_msg), mimetype='application/json', status=500)
if "service" in configuration:
for service in configuration["service"]:
print("{}".format(service["name"]))
if "arguments_update" in service:
print("Updating arguments")
for argument in service["arguments_update"]:
for key, val in argument.items():
print("{} is {}".format(key, val))
update_service_argument(service["name"], key, val)
if "arguments_remove" in service:
print("Removing arguments")
for argument in service["arguments_remove"]:
print("{}".format(argument))
update_service_argument(service["name"], argument, None)
if "restart" in service and service["restart"]:
service_name = get_service_name(service["name"])
print("restarting {}".format(service["name"]))
subprocess.check_call("systemctl restart snap.microk8s.daemon-{}.service".format(service_name).split())
if "addon" in configuration:
for addon in configuration["addon"]:
print("{}".format(addon["name"]))
if "enable" in addon and addon["enable"]:
print("Enabling {}".format(addon["name"]))
subprocess.check_call("{}/microk8s-enable.wrapper {}".format(snap_path, addon["name"]).split())
if "disable" in addon and addon["disable"]:
print("Disabling {}".format(addon["name"]))
subprocess.check_call("{}/microk8s-disable.wrapper {}".format(snap_path, addon["name"]).split())
resp_date = {"result": "ok"}
resp = Response(json.dumps(resp_date), status=200, mimetype='application/json')
return resp
def usage():
print("Agent responsible for setting up a cluster. Arguments:")
print("-l, --listen: interfaces to listen to (defaults to {})".format(default_listen_interface))
print("-p, --port: port to listen to (default {})".format(default_port))
if __name__ == '__main__':
server_cert = "{SNAP_DATA}/certs/server.crt".format(SNAP_DATA=snapdata_path)
server_key = "{SNAP_DATA}/certs/server.key".format(SNAP_DATA=snapdata_path)
try:
opts, args = getopt.gnu_getopt(sys.argv[1:], "hl:p:", ["help", "listen=", "port="])
except getopt.GetoptError as err:
print(err)
usage()
sys.exit(2)
port = default_port
listen = default_listen_interface
for o, a in opts:
if o in ("-l", "--listen"):
listen = a
if o in ("-p", "--port"):
port = a
elif o in ("-h", "--help"):
usage()
sys.exit(1)
else:
assert False, "unhandled option"
app.run(host=listen, port=port, ssl_context=(server_cert, server_key))
| true | true |
f721099fd7f552499a35dce11281e52eec0ef465 | 887 | py | Python | OpenCV/Glyph/fontReplacePixel.py | GaryMK/Machine-Learning | 0eb89ed4c6ea712f518741fdcc63f1b2109b4212 | [
"MIT"
] | 1 | 2021-03-12T07:46:00.000Z | 2021-03-12T07:46:00.000Z | OpenCV/Glyph/fontReplacePixel.py | GaryMK/Kaggle | 0eb89ed4c6ea712f518741fdcc63f1b2109b4212 | [
"MIT"
] | null | null | null | OpenCV/Glyph/fontReplacePixel.py | GaryMK/Kaggle | 0eb89ed4c6ea712f518741fdcc63f1b2109b4212 | [
"MIT"
] | null | null | null | # @author: GaryMK
# @EMAIL: chenxingmk@gmail.com
# @Date: 2021/2/14 0:28
# @Version: 1.0
# @Description:
from PIL import Image, ImageDraw, ImageFont
import cv2
import os
def draw(pic):
img = cv2.imread('source/' + pic)
img = img[:, :, (2, 1, 0)]
blank = Image.new("RGB", [len(img[0]), len(img)], "white")
drawObj = ImageDraw.Draw(blank)
n = 10
font = ImageFont.truetype('C:/Windows/Fonts/Microsoft YaHei UI/msyhbd.ttc', size=n - 1)
for i in range(0, len(img), n):
for j in range(0, len(img[i]), n):
text = '晨星'
drawObj.ink = img[i][j][0] + img[i][j][1] * 256 + img[i][j][2] * 256 * 256
drawObj.text([j, i], text[int(j / n) % len(text)], font=font)
print('完成处理——', i, j)
blank.save('replaced/replaced_' + pic, 'jpeg')
filelist = os.listdir('source')
for file in filelist:
draw(file)
| 25.342857 | 91 | 0.563698 |
from PIL import Image, ImageDraw, ImageFont
import cv2
import os
def draw(pic):
img = cv2.imread('source/' + pic)
img = img[:, :, (2, 1, 0)]
blank = Image.new("RGB", [len(img[0]), len(img)], "white")
drawObj = ImageDraw.Draw(blank)
n = 10
font = ImageFont.truetype('C:/Windows/Fonts/Microsoft YaHei UI/msyhbd.ttc', size=n - 1)
for i in range(0, len(img), n):
for j in range(0, len(img[i]), n):
text = '晨星'
drawObj.ink = img[i][j][0] + img[i][j][1] * 256 + img[i][j][2] * 256 * 256
drawObj.text([j, i], text[int(j / n) % len(text)], font=font)
print('完成处理——', i, j)
blank.save('replaced/replaced_' + pic, 'jpeg')
filelist = os.listdir('source')
for file in filelist:
draw(file)
| true | true |
f7210a163a4280e095d1c9a4bc619202c8d534a1 | 29 | py | Python | nlpblock/model/__init__.py | graykode/nlpblock | d7cd9e6d7a0ee401b8fecdbbf3a0ac60bdb3c0d7 | [
"MIT"
] | 3 | 2019-02-27T13:41:26.000Z | 2021-05-13T07:02:39.000Z | nlpblock/model/__init__.py | graykode/nlpblock | d7cd9e6d7a0ee401b8fecdbbf3a0ac60bdb3c0d7 | [
"MIT"
] | null | null | null | nlpblock/model/__init__.py | graykode/nlpblock | d7cd9e6d7a0ee401b8fecdbbf3a0ac60bdb3c0d7 | [
"MIT"
] | 3 | 2019-03-02T02:19:46.000Z | 2021-10-03T18:46:52.000Z | from nlpblock.model import *
| 14.5 | 28 | 0.793103 | from nlpblock.model import *
| true | true |
f7210a7be7a7a9686e849af8805af4b5236ca87c | 1,558 | py | Python | Code/finance.py | Naghipourfar/TraderBot | 2604c9df7af7394dfab6a54ea9a65a1b0df6a0ce | [
"MIT"
] | 3 | 2019-02-06T09:45:39.000Z | 2022-01-15T04:48:07.000Z | Code/finance.py | Naghipourfar/TraderBot | 2604c9df7af7394dfab6a54ea9a65a1b0df6a0ce | [
"MIT"
] | null | null | null | Code/finance.py | Naghipourfar/TraderBot | 2604c9df7af7394dfab6a54ea9a65a1b0df6a0ce | [
"MIT"
] | 1 | 2020-01-07T05:20:24.000Z | 2020-01-07T05:20:24.000Z | import numpy as np
import pandas as pd
from pandas_datareader import data
import tensorflow as tf
import matplotlib.pyplot as plt
import keras
from keras.layers import Input, Dense, Dropout, BatchNormalization
from keras.models import Model
from keras.callbacks import History, CSVLogger
"""
Created by Mohsen Naghipourfar on 7/23/18.
Email : mn7697np@gmail.com or naghipourfar@ce.sharif.edu
Website: http://ce.sharif.edu/~naghipourfar
Github: https://github.com/naghipourfar
Skype: mn7697np
"""
tickers = ['AAPL', 'MSFT', '^GSPC'] # Apple, Microsoft and S&P500 index
# We would like all available data from 01/01/2000 until 12/31/2016.
start_date = '2010-01-01'
end_date = '2016-12-31'
panel_data = data.DataReader('INPX', 'google', start_date, end_date)
''' returns a panel object (3D Object)
1st dim: various fields of finance -> open, close, high, low, ...
2nd dim: date
3rd dim: instrument identifiers
'''
# df_data = panel_data.to_frame()
all_weekdays = pd.date_range(start_date, end_date, freq='B')
close = panel_data['close']
close = close.reindex(all_weekdays)
close = close.fillna(method='ffill')
short_rolling = close.rolling(window=20).mean()
long_rolling = close.rolling(window=100).mean()
fig, ax = plt.subplots(figsize=(16,9))
ax.plot(close.index, close, label='close')
ax.plot(short_rolling.index, short_rolling, label='20 days rolling')
ax.plot(long_rolling.index, long_rolling, label='100 days rolling')
ax.set_xlabel('Date')
ax.set_ylabel('Adjusted closing price ($)')
ax.legend()
plt.show()
| 28.327273 | 72 | 0.734275 | import numpy as np
import pandas as pd
from pandas_datareader import data
import tensorflow as tf
import matplotlib.pyplot as plt
import keras
from keras.layers import Input, Dense, Dropout, BatchNormalization
from keras.models import Model
from keras.callbacks import History, CSVLogger
tickers = ['AAPL', 'MSFT', '^GSPC']
start_date = '2010-01-01'
end_date = '2016-12-31'
panel_data = data.DataReader('INPX', 'google', start_date, end_date)
all_weekdays = pd.date_range(start_date, end_date, freq='B')
close = panel_data['close']
close = close.reindex(all_weekdays)
close = close.fillna(method='ffill')
short_rolling = close.rolling(window=20).mean()
long_rolling = close.rolling(window=100).mean()
fig, ax = plt.subplots(figsize=(16,9))
ax.plot(close.index, close, label='close')
ax.plot(short_rolling.index, short_rolling, label='20 days rolling')
ax.plot(long_rolling.index, long_rolling, label='100 days rolling')
ax.set_xlabel('Date')
ax.set_ylabel('Adjusted closing price ($)')
ax.legend()
plt.show()
| true | true |
f7210a7f9de0f160b00a0a52aaf0e082c37d647d | 1,685 | py | Python | lib/lib_apscheduler.py | ZhaoUncle/skstack | 9e00305f50fdd60125ec37884247b94b70a9020c | [
"Apache-2.0"
] | null | null | null | lib/lib_apscheduler.py | ZhaoUncle/skstack | 9e00305f50fdd60125ec37884247b94b70a9020c | [
"Apache-2.0"
] | null | null | null | lib/lib_apscheduler.py | ZhaoUncle/skstack | 9e00305f50fdd60125ec37884247b94b70a9020c | [
"Apache-2.0"
] | null | null | null | #! /usr/bin/env python
# -*- coding: utf-8 -*-
'''
Created on 2018年6月19日 @author: encodingl
'''
import time
import datetime
from apscheduler.schedulers.blocking import BlockingScheduler
from apscheduler.schedulers.background import BackgroundScheduler
def job1(f):
print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())), f)
def job2(args1, args2, f):
print(f, args1, args2)
def job3(**args):
print(args)
'''
APScheduler支持以下三种定时任务:
cron: crontab类型任务
interval: 固定时间间隔任务
date: 基于日期时间的一次性任务
'''
if __name__ == "__main__":
scheduler = BlockingScheduler()
#循环任务示例
scheduler.add_job(job1, 'interval', seconds=3, args=('循环',), id='test_job1')
#定时任务示例
scheduler.add_job(job1, 'cron', second='*/4', args=('定时',), id='test_job2')
#一次性任务示例
scheduler.add_job(job1, next_run_time=(datetime.datetime.now() + datetime.timedelta(seconds=5)), args=('一次',), id='test_job3')
'''
传递参数的方式有元组(tuple)、列表(list)、字典(dict)
注意:不过需要注意采用元组传递参数时后边需要多加一个逗号
'''
# #基于list
# scheduler.add_job(job2, 'interval', seconds=5, args=['a','b','list'], id='test_job4')
# #基于tuple
# scheduler.add_job(job2, 'interval', seconds=5, args=('a','b','tuple',), id='test_job5')
# #基于dict
# scheduler.add_job(job3, 'interval', seconds=5, kwargs={'f':'dict', 'a':1,'b':2}, id='test_job6')
#带有参数的示例
# scheduler.add_job(job2, 'interval', seconds=5, args=['a','b'], id='test_job7')
# scheduler.add_job(job2, 'interval', seconds=5, args=('a','b',), id='test_job8')
# scheduler.add_job(job3, 'interval', seconds=5, kwargs={'a':1,'b':2}, id='test_job9')
print(scheduler.get_jobs())
scheduler.start()
| 29.051724 | 130 | 0.645697 |
import time
import datetime
from apscheduler.schedulers.blocking import BlockingScheduler
from apscheduler.schedulers.background import BackgroundScheduler
def job1(f):
print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(time.time())), f)
def job2(args1, args2, f):
print(f, args1, args2)
def job3(**args):
print(args)
if __name__ == "__main__":
scheduler = BlockingScheduler()
scheduler.add_job(job1, 'interval', seconds=3, args=('循环',), id='test_job1')
scheduler.add_job(job1, 'cron', second='*/4', args=('定时',), id='test_job2')
scheduler.add_job(job1, next_run_time=(datetime.datetime.now() + datetime.timedelta(seconds=5)), args=('一次',), id='test_job3')
print(scheduler.get_jobs())
scheduler.start()
| true | true |
f7210b036da2023fc30a4f620fdbe6743b369a69 | 4,058 | py | Python | movienightbot/db/models.py | squirrelo/MovieNightBot | 53fad77d533f13587d47d64fe7583db55529184a | [
"WTFPL"
] | 3 | 2020-02-22T14:22:21.000Z | 2021-02-04T19:44:38.000Z | movienightbot/db/models.py | squirrelo/MovieNightBot | 53fad77d533f13587d47d64fe7583db55529184a | [
"WTFPL"
] | 42 | 2020-02-10T03:42:29.000Z | 2022-02-12T23:43:43.000Z | movienightbot/db/models.py | squirrelo/MovieNightBot | 53fad77d533f13587d47d64fe7583db55529184a | [
"WTFPL"
] | 3 | 2020-02-14T23:22:24.000Z | 2020-06-06T21:00:14.000Z | import datetime
import peewee as pw
from . import BaseModel
class Server(BaseModel):
id = pw.IntegerField(primary_key=True)
channel = pw.IntegerField(null=False)
movie_time = pw.TimeField(null=False, formats="%H:%M", default="12:00")
admin_role = pw.TextField(null=False, default="Movie Master")
tie_option = pw.TextField(null=False, default="breaker")
num_movies_per_vote = pw.SmallIntegerField(null=False, default=8)
num_votes_per_user = pw.SmallIntegerField(null=False, default=4)
block_suggestions = pw.BooleanField(null=False, default=False)
check_movie_names = pw.BooleanField(null=False, default=False)
message_timeout = pw.SmallIntegerField(null=False, default=10)
allow_tv_shows = pw.BooleanField(null=False, default=False)
class Meta:
table_name = "servers"
class IMDBInfo(BaseModel):
imdb_id = pw.TextField(primary_key=True)
title = pw.TextField(null=False)
canonical_title = pw.TextField()
year = pw.IntegerField()
thumbnail_poster_url = pw.TextField()
full_size_poster_url = pw.TextField()
class Meta:
table_name = "imdb_info"
class Movie(BaseModel):
id = pw.AutoField(primary_key=True)
server = pw.ForeignKeyField(Server, backref="movies")
movie_name = pw.TextField(null=False)
suggested_by = pw.TextField(null=False)
last_score = pw.FloatField(null=True)
num_votes_entered = pw.IntegerField(null=False, default=0)
total_score = pw.FloatField(null=False, default=0.0)
total_votes = pw.IntegerField(null=False, default=0)
suggested_on = pw.TimestampField(
utc=True, null=False, default=datetime.datetime.utcnow
)
watched_on = pw.TimestampField(utc=True, null=True, default=None)
imdb_id = pw.ForeignKeyField(IMDBInfo, backref="movie_suggestions", null=True)
class Meta:
table_name = "movies"
indexes = (
# create a unique index on server and movie name
(("server", "movie_name"), True),
)
# Genre linked to Movie and not IMDBInfo because this allows non-IMDB servers to still manually add genres to movies
# and do votes by genre
class MovieGenre(BaseModel):
movie_id = pw.ForeignKeyField(Movie, backref="movie_genres")
genre = pw.TextField(null=False, index=True)
class Meta:
table_name = "movie_genre"
indexes = (
# create a unique index on movie and genre
(("movie_id", "genre"), True),
)
class Vote(BaseModel):
"""Tracks the actual vote going on in a server"""
server_id = pw.ForeignKeyField(Server, backref="vote", primary_key=True)
message_id = pw.IntegerField(
null=True, help_text="The message ID holding the vote message on the server"
)
channel_id = pw.IntegerField(
null=True, help_text="The channel ID holding the vote channel on the server"
)
class Meta:
table_name = "votes"
class MovieVote(BaseModel):
"""Tracks the movies selected for voting on"""
id = pw.AutoField(primary_key=True)
vote = pw.ForeignKeyField(Vote, backref="movie_votes")
movie = pw.ForeignKeyField(Movie, backref="+")
score = pw.FloatField(null=False, default=0)
emoji = pw.TextField(null=False)
class Meta:
tablename = "movie_votes"
indexes = (
# create a unique index on vote and movie
(("vote", "movie"), True),
)
class UserVote(BaseModel):
"""Tracks the ranked votes of a user"""
id = pw.AutoField(primary_key=True)
movie_vote = pw.ForeignKeyField(MovieVote, backref="user_votes")
user_id = pw.IntegerField(null=False)
user_name = pw.TextField(null=False)
vote_rank = pw.SmallIntegerField(
null=False,
help_text="The numbered vote for the user, 1 is highest rank. Useful for ranked-choice voting",
)
class Meta:
tablename = "user_votes"
indexes = (
# create a unique index on movie, user, and rank
(("movie_vote", "user_id", "vote_rank"), True),
)
| 32.99187 | 116 | 0.673238 | import datetime
import peewee as pw
from . import BaseModel
class Server(BaseModel):
id = pw.IntegerField(primary_key=True)
channel = pw.IntegerField(null=False)
movie_time = pw.TimeField(null=False, formats="%H:%M", default="12:00")
admin_role = pw.TextField(null=False, default="Movie Master")
tie_option = pw.TextField(null=False, default="breaker")
num_movies_per_vote = pw.SmallIntegerField(null=False, default=8)
num_votes_per_user = pw.SmallIntegerField(null=False, default=4)
block_suggestions = pw.BooleanField(null=False, default=False)
check_movie_names = pw.BooleanField(null=False, default=False)
message_timeout = pw.SmallIntegerField(null=False, default=10)
allow_tv_shows = pw.BooleanField(null=False, default=False)
class Meta:
table_name = "servers"
class IMDBInfo(BaseModel):
imdb_id = pw.TextField(primary_key=True)
title = pw.TextField(null=False)
canonical_title = pw.TextField()
year = pw.IntegerField()
thumbnail_poster_url = pw.TextField()
full_size_poster_url = pw.TextField()
class Meta:
table_name = "imdb_info"
class Movie(BaseModel):
id = pw.AutoField(primary_key=True)
server = pw.ForeignKeyField(Server, backref="movies")
movie_name = pw.TextField(null=False)
suggested_by = pw.TextField(null=False)
last_score = pw.FloatField(null=True)
num_votes_entered = pw.IntegerField(null=False, default=0)
total_score = pw.FloatField(null=False, default=0.0)
total_votes = pw.IntegerField(null=False, default=0)
suggested_on = pw.TimestampField(
utc=True, null=False, default=datetime.datetime.utcnow
)
watched_on = pw.TimestampField(utc=True, null=True, default=None)
imdb_id = pw.ForeignKeyField(IMDBInfo, backref="movie_suggestions", null=True)
class Meta:
table_name = "movies"
indexes = (
(("server", "movie_name"), True),
)
class MovieGenre(BaseModel):
movie_id = pw.ForeignKeyField(Movie, backref="movie_genres")
genre = pw.TextField(null=False, index=True)
class Meta:
table_name = "movie_genre"
indexes = (
(("movie_id", "genre"), True),
)
class Vote(BaseModel):
server_id = pw.ForeignKeyField(Server, backref="vote", primary_key=True)
message_id = pw.IntegerField(
null=True, help_text="The message ID holding the vote message on the server"
)
channel_id = pw.IntegerField(
null=True, help_text="The channel ID holding the vote channel on the server"
)
class Meta:
table_name = "votes"
class MovieVote(BaseModel):
id = pw.AutoField(primary_key=True)
vote = pw.ForeignKeyField(Vote, backref="movie_votes")
movie = pw.ForeignKeyField(Movie, backref="+")
score = pw.FloatField(null=False, default=0)
emoji = pw.TextField(null=False)
class Meta:
tablename = "movie_votes"
indexes = (
(("vote", "movie"), True),
)
class UserVote(BaseModel):
id = pw.AutoField(primary_key=True)
movie_vote = pw.ForeignKeyField(MovieVote, backref="user_votes")
user_id = pw.IntegerField(null=False)
user_name = pw.TextField(null=False)
vote_rank = pw.SmallIntegerField(
null=False,
help_text="The numbered vote for the user, 1 is highest rank. Useful for ranked-choice voting",
)
class Meta:
tablename = "user_votes"
indexes = (
(("movie_vote", "user_id", "vote_rank"), True),
)
| true | true |
f7210b6d933a1774a42b9590a91353ac70a354f7 | 5,252 | py | Python | euler/large_sum.py | lsbardel/mathfun | 98e7c210409c2b5777e91059c3651cef4f3045dd | [
"BSD-3-Clause"
] | null | null | null | euler/large_sum.py | lsbardel/mathfun | 98e7c210409c2b5777e91059c3651cef4f3045dd | [
"BSD-3-Clause"
] | null | null | null | euler/large_sum.py | lsbardel/mathfun | 98e7c210409c2b5777e91059c3651cef4f3045dd | [
"BSD-3-Clause"
] | null | null | null | example = '''
37107287533902102798797998220837590246510135740250
46376937677490009712648124896970078050417018260538
74324986199524741059474233309513058123726617309629
91942213363574161572522430563301811072406154908250
23067588207539346171171980310421047513778063246676
89261670696623633820136378418383684178734361726757
28112879812849979408065481931592621691275889832738
44274228917432520321923589422876796487670272189318
47451445736001306439091167216856844588711603153276
70386486105843025439939619828917593665686757934951
62176457141856560629502157223196586755079324193331
64906352462741904929101432445813822663347944758178
92575867718337217661963751590579239728245598838407
58203565325359399008402633568948830189458628227828
80181199384826282014278194139940567587151170094390
35398664372827112653829987240784473053190104293586
86515506006295864861532075273371959191420517255829
71693888707715466499115593487603532921714970056938
54370070576826684624621495650076471787294438377604
53282654108756828443191190634694037855217779295145
36123272525000296071075082563815656710885258350721
45876576172410976447339110607218265236877223636045
17423706905851860660448207621209813287860733969412
81142660418086830619328460811191061556940512689692
51934325451728388641918047049293215058642563049483
62467221648435076201727918039944693004732956340691
15732444386908125794514089057706229429197107928209
55037687525678773091862540744969844508330393682126
18336384825330154686196124348767681297534375946515
80386287592878490201521685554828717201219257766954
78182833757993103614740356856449095527097864797581
16726320100436897842553539920931837441497806860984
48403098129077791799088218795327364475675590848030
87086987551392711854517078544161852424320693150332
59959406895756536782107074926966537676326235447210
69793950679652694742597709739166693763042633987085
41052684708299085211399427365734116182760315001271
65378607361501080857009149939512557028198746004375
35829035317434717326932123578154982629742552737307
94953759765105305946966067683156574377167401875275
88902802571733229619176668713819931811048770190271
25267680276078003013678680992525463401061632866526
36270218540497705585629946580636237993140746255962
24074486908231174977792365466257246923322810917141
91430288197103288597806669760892938638285025333403
34413065578016127815921815005561868836468420090470
23053081172816430487623791969842487255036638784583
11487696932154902810424020138335124462181441773470
63783299490636259666498587618221225225512486764533
67720186971698544312419572409913959008952310058822
95548255300263520781532296796249481641953868218774
76085327132285723110424803456124867697064507995236
37774242535411291684276865538926205024910326572967
23701913275725675285653248258265463092207058596522
29798860272258331913126375147341994889534765745501
18495701454879288984856827726077713721403798879715
38298203783031473527721580348144513491373226651381
34829543829199918180278916522431027392251122869539
40957953066405232632538044100059654939159879593635
29746152185502371307642255121183693803580388584903
41698116222072977186158236678424689157993532961922
62467957194401269043877107275048102390895523597457
23189706772547915061505504953922979530901129967519
86188088225875314529584099251203829009407770775672
11306739708304724483816533873502340845647058077308
82959174767140363198008187129011875491310547126581
97623331044818386269515456334926366572897563400500
42846280183517070527831839425882145521227251250327
55121603546981200581762165212827652751691296897789
32238195734329339946437501907836945765883352399886
75506164965184775180738168837861091527357929701337
62177842752192623401942399639168044983993173312731
32924185707147349566916674687634660915035914677504
99518671430235219628894890102423325116913619626622
73267460800591547471830798392868535206946944540724
76841822524674417161514036427982273348055556214818
97142617910342598647204516893989422179826088076852
87783646182799346313767754307809363333018982642090
10848802521674670883215120185883543223812876952786
71329612474782464538636993009049310363619763878039
62184073572399794223406235393808339651327408011116
66627891981488087797941876876144230030984490851411
60661826293682836764744779239180335110989069790714
85786944089552990653640447425576083659976645795096
66024396409905389607120198219976047599490197230297
64913982680032973156037120041377903785566085089252
16730939319872750275468906903707539413042652315011
94809377245048795150954100921645863754710598436791
78639167021187492431995700641917969777599028300699
15368713711936614952811305876380278410754449733078
40789923115535562561142322423255033685442488917353
44889911501440648020369068063960672322193204149535
41503128880339536053299340368006977710650566631954
81234880673210146739058568557934581403627822703280
82616570773948327592232845941706525094512325230608
22918802058777319719839450180888072429661980811197
77158542502016545090413245809786882778948721859617
72107838435069186155435662884062257473692284509516
20849603980134001723930671666823555245252804609722
53503534226472524250874054075591789781264330331690'''
if __name__ == '__main__':
numbers = example.split('\n')
v = sum((int(n) for n in numbers if n))
print(int(str(v)[:10]))
| 48.62963 | 53 | 0.967822 | example = '''
37107287533902102798797998220837590246510135740250
46376937677490009712648124896970078050417018260538
74324986199524741059474233309513058123726617309629
91942213363574161572522430563301811072406154908250
23067588207539346171171980310421047513778063246676
89261670696623633820136378418383684178734361726757
28112879812849979408065481931592621691275889832738
44274228917432520321923589422876796487670272189318
47451445736001306439091167216856844588711603153276
70386486105843025439939619828917593665686757934951
62176457141856560629502157223196586755079324193331
64906352462741904929101432445813822663347944758178
92575867718337217661963751590579239728245598838407
58203565325359399008402633568948830189458628227828
80181199384826282014278194139940567587151170094390
35398664372827112653829987240784473053190104293586
86515506006295864861532075273371959191420517255829
71693888707715466499115593487603532921714970056938
54370070576826684624621495650076471787294438377604
53282654108756828443191190634694037855217779295145
36123272525000296071075082563815656710885258350721
45876576172410976447339110607218265236877223636045
17423706905851860660448207621209813287860733969412
81142660418086830619328460811191061556940512689692
51934325451728388641918047049293215058642563049483
62467221648435076201727918039944693004732956340691
15732444386908125794514089057706229429197107928209
55037687525678773091862540744969844508330393682126
18336384825330154686196124348767681297534375946515
80386287592878490201521685554828717201219257766954
78182833757993103614740356856449095527097864797581
16726320100436897842553539920931837441497806860984
48403098129077791799088218795327364475675590848030
87086987551392711854517078544161852424320693150332
59959406895756536782107074926966537676326235447210
69793950679652694742597709739166693763042633987085
41052684708299085211399427365734116182760315001271
65378607361501080857009149939512557028198746004375
35829035317434717326932123578154982629742552737307
94953759765105305946966067683156574377167401875275
88902802571733229619176668713819931811048770190271
25267680276078003013678680992525463401061632866526
36270218540497705585629946580636237993140746255962
24074486908231174977792365466257246923322810917141
91430288197103288597806669760892938638285025333403
34413065578016127815921815005561868836468420090470
23053081172816430487623791969842487255036638784583
11487696932154902810424020138335124462181441773470
63783299490636259666498587618221225225512486764533
67720186971698544312419572409913959008952310058822
95548255300263520781532296796249481641953868218774
76085327132285723110424803456124867697064507995236
37774242535411291684276865538926205024910326572967
23701913275725675285653248258265463092207058596522
29798860272258331913126375147341994889534765745501
18495701454879288984856827726077713721403798879715
38298203783031473527721580348144513491373226651381
34829543829199918180278916522431027392251122869539
40957953066405232632538044100059654939159879593635
29746152185502371307642255121183693803580388584903
41698116222072977186158236678424689157993532961922
62467957194401269043877107275048102390895523597457
23189706772547915061505504953922979530901129967519
86188088225875314529584099251203829009407770775672
11306739708304724483816533873502340845647058077308
82959174767140363198008187129011875491310547126581
97623331044818386269515456334926366572897563400500
42846280183517070527831839425882145521227251250327
55121603546981200581762165212827652751691296897789
32238195734329339946437501907836945765883352399886
75506164965184775180738168837861091527357929701337
62177842752192623401942399639168044983993173312731
32924185707147349566916674687634660915035914677504
99518671430235219628894890102423325116913619626622
73267460800591547471830798392868535206946944540724
76841822524674417161514036427982273348055556214818
97142617910342598647204516893989422179826088076852
87783646182799346313767754307809363333018982642090
10848802521674670883215120185883543223812876952786
71329612474782464538636993009049310363619763878039
62184073572399794223406235393808339651327408011116
66627891981488087797941876876144230030984490851411
60661826293682836764744779239180335110989069790714
85786944089552990653640447425576083659976645795096
66024396409905389607120198219976047599490197230297
64913982680032973156037120041377903785566085089252
16730939319872750275468906903707539413042652315011
94809377245048795150954100921645863754710598436791
78639167021187492431995700641917969777599028300699
15368713711936614952811305876380278410754449733078
40789923115535562561142322423255033685442488917353
44889911501440648020369068063960672322193204149535
41503128880339536053299340368006977710650566631954
81234880673210146739058568557934581403627822703280
82616570773948327592232845941706525094512325230608
22918802058777319719839450180888072429661980811197
77158542502016545090413245809786882778948721859617
72107838435069186155435662884062257473692284509516
20849603980134001723930671666823555245252804609722
53503534226472524250874054075591789781264330331690'''
if __name__ == '__main__':
numbers = example.split('\n')
v = sum((int(n) for n in numbers if n))
print(int(str(v)[:10]))
| true | true |
f7210c49de22ec515aedef5c7f5415db79dc84ea | 21,828 | py | Python | recipes/openscenegraph/all/conanfile.py | rockandsalt/conan-center-index | d739adcec3e4dd4c250eff559ceb738e420673dd | [
"MIT"
] | 2 | 2021-08-12T06:17:58.000Z | 2021-09-07T23:12:25.000Z | recipes/openscenegraph/all/conanfile.py | rockandsalt/conan-center-index | d739adcec3e4dd4c250eff559ceb738e420673dd | [
"MIT"
] | 9 | 2020-01-21T08:27:51.000Z | 2021-01-23T19:21:46.000Z | recipes/openscenegraph/all/conanfile.py | rockandsalt/conan-center-index | d739adcec3e4dd4c250eff559ceb738e420673dd | [
"MIT"
] | 2 | 2021-05-12T10:37:57.000Z | 2021-12-15T13:38:16.000Z | from conans import CMake, ConanFile, tools
from conans.errors import ConanInvalidConfiguration
import os
required_conan_version = ">=1.29.1"
class OpenSceneGraphConanFile(ConanFile):
name = "openscenegraph"
description = "OpenSceneGraph is an open source high performance 3D graphics toolkit"
topics = ("openscenegraph", "graphics")
url = "https://github.com/conan-io/conan-center-index"
homepage = "http://www.openscenegraph.org"
license = "LGPL-2.1-only", "WxWindows-exception-3.1"
settings = "os", "arch", "compiler", "build_type"
options = {
"shared": [True, False],
"fPIC": [True, False],
"build_applications": [True, False],
"enable_notify": [True, False],
"enable_deprecated_api": [True, False],
"enable_readfile": [True, False],
"enable_ref_ptr_implicit_output_conversion": [True, False],
"enable_ref_ptr_safe_dereference": [True, False],
"enable_envvar_support": [True, False],
"enable_windowing_system": [True, False],
"enable_deprecated_serializers": [True, False],
"use_fontconfig": [True, False],
"with_asio": [True, False],
"with_curl": [True, False],
"with_dcmtk": [True, False],
"with_freetype": [True, False],
"with_gdal": [True, False],
"with_gif": [True, False],
"with_gta": [True, False],
"with_jasper": [True, False],
"with_jpeg": [True, False],
"with_openexr": [True, False],
"with_png": [True, False],
"with_tiff": [True, False],
"with_zlib": [True, False],
}
default_options = {
"shared": False,
"fPIC": True,
"build_applications": False,
"enable_notify": True,
"enable_deprecated_api": False,
"enable_readfile": True,
"enable_ref_ptr_implicit_output_conversion": True,
"enable_ref_ptr_safe_dereference": True,
"enable_envvar_support": True,
"enable_windowing_system": True,
"enable_deprecated_serializers": False,
"use_fontconfig": True,
"with_asio": False,
"with_curl": False,
"with_dcmtk": False,
"with_freetype": True,
"with_gdal": False,
"with_gif": True,
"with_gta": False,
"with_jasper": False,
"with_jpeg": True,
"with_openexr": False,
"with_png": True,
"with_tiff": True,
"with_zlib": True,
}
short_paths = True
exports_sources = "CMakeLists.txt", "patches/*.patch"
generators = "cmake", "cmake_find_package"
@property
def _source_subfolder(self):
return "source_subfolder"
def config_options(self):
if self.settings.os == "Windows":
del self.options.fPIC
del self.options.with_asio
# Default to false with fontconfig until it is supported on Windows
self.options.use_fontconfig = False
if tools.is_apple_os(self.settings.os):
# osg uses imageio on Apple platforms
del self.options.with_gif
del self.options.with_jpeg
del self.options.with_png
# imageio supports tiff files so the tiff plugin isn't needed on Apple platforms
self.options.with_tiff = False
def configure(self):
if self.options.shared:
del self.options.fPIC
if not self.options.with_zlib:
# These require zlib support
del self.options.with_openexr
del self.options.with_png
del self.options.with_dcmtk
def validate(self):
if self.options.get_safe("with_asio", False):
raise ConanInvalidConfiguration("ASIO support in OSG is broken, see https://github.com/openscenegraph/OpenSceneGraph/issues/921")
if hasattr(self, "settings_build") and tools.cross_building(self):
raise ConanInvalidConfiguration("openscenegraph recipe cannot be cross-built yet. Contributions are welcome.")
def requirements(self):
if self.options.enable_windowing_system and self.settings.os == "Linux":
self.requires("xorg/system")
self.requires("opengl/system")
if self.options.use_fontconfig:
self.requires("fontconfig/2.13.93")
if self.options.get_safe("with_asio", False):
# Should these be private requires?
self.requires("asio/1.18.1")
self.requires("boost/1.75.0")
if self.options.with_curl:
self.requires("libcurl/7.74.0")
if self.options.get_safe("with_dcmtk"):
self.requires("dcmtk/3.6.5")
if self.options.with_freetype:
self.requires("freetype/2.10.4")
if self.options.with_gdal:
self.requires("gdal/3.1.4")
if self.options.get_safe("with_gif"):
self.requires("giflib/5.2.1")
if self.options.with_gta:
self.requires("libgta/1.2.1")
if self.options.with_jasper:
self.requires("jasper/2.0.24")
if self.options.get_safe("with_jpeg"):
self.requires("libjpeg/9d")
if self.options.get_safe("with_openexr"):
self.requires("openexr/2.5.3")
if self.options.get_safe("with_png"):
self.requires("libpng/1.6.37")
if self.options.with_tiff:
self.requires("libtiff/4.2.0")
if self.options.with_zlib:
self.requires("zlib/1.2.11")
def source(self):
tools.get(**self.conan_data["sources"][self.version],
strip_root=True, destination=self._source_subfolder)
def _patch_sources(self):
for patch in self.conan_data["patches"].get(self.version, []):
tools.patch(**patch)
for package in ("Fontconfig", "Freetype", "GDAL", "GIFLIB", "GTA", "Jasper", "OpenEXR"):
# Prefer conan's find package scripts over osg's
os.unlink(os.path.join(self._source_subfolder, "CMakeModules", "Find{}.cmake".format(package)))
def _configured_cmake(self):
if hasattr(self, "_cmake"):
return self._cmake
self._cmake = cmake = CMake(self)
cmake.definitions["USE_3RDPARTY_BIN"] = False
cmake.definitions["DYNAMIC_OPENSCENEGRAPH"] = self.options.shared
cmake.definitions["DYNAMIC_OPENTHREADS"] = self.options.shared
cmake.definitions["BUILD_OSG_APPLICATIONS"] = self.options.build_applications
cmake.definitions["BUILD_OSG_EXAMPLES"] = False
cmake.definitions["OSG_NOTIFY_DISABLED"] = not self.options.enable_notify
cmake.definitions["OSG_USE_DEPRECATED_API"] = self.options.enable_deprecated_api
cmake.definitions["OSG_PROVIDE_READFILE"] = self.options.enable_readfile
cmake.definitions["OSG_USE_REF_PTR_IMPLICIT_OUTPUT_CONVERSION"] = self.options.enable_ref_ptr_implicit_output_conversion
cmake.definitions["OSG_USE_REF_PTR_SAFE_DEREFERENCE"] = self.options.enable_ref_ptr_safe_dereference
cmake.definitions["OSG_ENVVAR_SUPPORTED"] = self.options.enable_envvar_support
if not self.options.enable_windowing_system:
cmake.definitions["OSG_WINDOWING_SYSTEM"] = None
cmake.definitions["BUILD_OSG_DEPRECATED_SERIALIZERS"] = self.options.enable_deprecated_serializers
cmake.definitions["OSG_TEXT_USE_FONTCONFIG"] = self.options.use_fontconfig
# Disable option dependencies unless we have a package for them
cmake.definitions["OSG_WITH_FREETYPE"] = self.options.with_freetype
cmake.definitions["OSG_WITH_OPENEXR"] = self.options.get_safe("with_openexr", False)
cmake.definitions["OSG_WITH_INVENTOR"] = False
cmake.definitions["OSG_WITH_JASPER"] = self.options.with_jasper
cmake.definitions["OSG_WITH_OPENCASCADE"] = False
cmake.definitions["OSG_WITH_FBX"] = False
cmake.definitions["OSG_WITH_ZLIB"] = self.options.with_zlib
cmake.definitions["OSG_WITH_GDAL"] = self.options.with_gdal
cmake.definitions["OSG_WITH_GTA"] = self.options.with_gta
cmake.definitions["OSG_WITH_CURL"] = self.options.with_curl
cmake.definitions["OSG_WITH_LIBVNCSERVER"] = False
cmake.definitions["OSG_WITH_DCMTK"] = self.options.get_safe("with_dcmtk", False)
cmake.definitions["OSG_WITH_FFMPEG"] = False
cmake.definitions["OSG_WITH_DIRECTSHOW"] = False
cmake.definitions["OSG_WITH_SDL"] = False
cmake.definitions["OSG_WITH_POPPLER"] = False
cmake.definitions["OSG_WITH_RSVG"] = False
cmake.definitions["OSG_WITH_NVTT"] = False
cmake.definitions["OSG_WITH_ASIO"] = self.options.get_safe("with_asio", False)
cmake.definitions["OSG_WITH_ZEROCONF"] = False
cmake.definitions["OSG_WITH_LIBLAS"] = False
cmake.definitions["OSG_WITH_GIF"] = self.options.get_safe("with_gif", False)
cmake.definitions["OSG_WITH_JPEG"] = self.options.get_safe("with_jpeg", False)
cmake.definitions["OSG_WITH_PNG"] = self.options.get_safe("with_png", False)
cmake.definitions["OSG_WITH_TIFF"] = self.options.with_tiff
if self.settings.os == "Windows":
# osg has optional quicktime support on Windows
cmake.definitions["CMAKE_DISABLE_FIND_PACKAGE_QuickTime"] = True
cmake.definitions["OSG_MSVC_VERSIONED_DLL"] = False
cmake.configure()
return cmake
def build(self):
self._patch_sources()
self._configured_cmake().build()
def package(self):
self._configured_cmake().install()
self.copy(pattern="LICENSE.txt", dst="licenses", src=self._source_subfolder)
tools.rmdir(os.path.join(self.package_folder, "lib", "pkgconfig"))
tools.remove_files_by_mask(self.package_folder, "*.pdb")
def package_info(self):
# FindOpenSceneGraph.cmake is shipped with cmake and is a traditional cmake script
# It doesn't setup targets and only provides a few variables:
# - OPENSCENEGRAPH_FOUND
# - OPENSCENEGRAPH_VERSION
# - OPENSCENEGRAPH_INCLUDE_DIRS
# - OPENSCENEGRAPH_LIBRARIES
# Unfortunately, the cmake_find_package generators don't currently allow directly setting variables,
# but it will set the last three of these if the name of the package is OPENSCENEGRAPH (it uses
# the filename for the first, so OpenSceneGraph_FOUND gets set, not OPENSCENEGRAPH_FOUND)
# TODO: set OPENSCENEGRAPH_FOUND in cmake_find_package and cmake_find_package_multi
self.cpp_info.filenames["cmake_find_package"] = "OpenSceneGraph"
self.cpp_info.filenames["cmake_find_package_multi"] = "OpenSceneGraph"
self.cpp_info.names["cmake_find_package"] = "OPENSCENEGRAPH"
self.cpp_info.names["cmake_find_package_multi"] = "OPENSCENEGRAPH"
if self.settings.build_type == "Debug":
postfix = "d"
elif self.settings.build_type == "RelWithDebInfo":
postfix = "rd"
elif self.settings.build_type == "MinSizeRel":
postfix = "s"
else:
postfix = ""
def setup_plugin(plugin):
lib = "osgdb_" + plugin
plugin_library = self.cpp_info.components[lib]
plugin_library.libs = [] if self.options.shared else [lib + postfix]
plugin_library.requires = ["OpenThreads", "osg", "osgDB", "osgUtil"]
if not self.options.shared:
plugin_library.libdirs = [os.path.join("lib", "osgPlugins-{}".format(self.version))]
return plugin_library
def setup_serializers(lib):
plugins = []
if lib not in ("osgDB", "osgWidget", "osgPresentation"):
plugins.append("serializers_{}".format(lib.lower()))
if self.options.enable_deprecated_serializers:
if lib not in ("osgUtil", "osgDB", "osgGA", "osgManipulator", "osgUI", "osgPresentation"):
plugins.append("deprecated_{}".format(lib.lower()))
for plugin in plugins:
setup_plugin(plugin).requires.append(lib)
def setup_library(lib):
library = self.cpp_info.components[lib]
library.libs = [lib + postfix]
library.names["pkg_config"] = "openscenegraph-{}".format(lib)
setup_serializers(lib)
return library
# Core libraries
# requires obtained from osg's source code
# TODO: FindOpenThreads.cmake is shipped with CMake, so we should generate separate
# files for it with cmake_find_package and cmake_find_package_multi
library = self.cpp_info.components["OpenThreads"]
library.libs = ["OpenThreads" + postfix]
library.names["pkg_config"] = "openthreads"
if self.settings.os == "Linux":
library.system_libs = ["pthread"]
library = setup_library("osg")
library.requires = ["OpenThreads", "opengl::opengl"]
if self.settings.os == "Linux":
library.system_libs = ["m", "rt", "dl"]
if not self.options.shared:
library.defines.append("OSG_LIBRARY_STATIC")
library = setup_library("osgDB")
library.requires = ["osg", "osgUtil", "OpenThreads"]
if self.settings.os == "Linux":
library.system_libs = ["dl"]
elif self.settings.os == "Macos":
library.frameworks = ["Carbon", "Cocoa"]
if self.options.with_zlib:
library.requires.append("zlib::zlib")
setup_library("osgUtil").requires = ["osg", "OpenThreads"]
setup_library("osgGA").requires = ["osgDB", "osgUtil", "osg", "OpenThreads"]
library = setup_library("osgText")
library.requires = ["osgDB", "osg", "osgUtil", "OpenThreads"]
if self.options.use_fontconfig:
library.requires.append("fontconfig::fontconfig")
library = setup_library("osgViewer")
library.requires = ["osgGA", "osgText", "osgDB", "osgUtil", "osg"]
if self.options.enable_windowing_system:
if self.settings.os == "Linux":
library.requires.append("xorg::xorg")
elif tools.is_apple_os(self.settings.os):
library.frameworks = ["Cocoa"]
if self.settings.os == "Windows":
library.system_libs = ["gdi32"]
setup_library("osgAnimation").requires = ["osg", "osgText", "osgGA", "osgViewer", "OpenThreads"]
setup_library("osgFX").requires = ["osgUtil", "osgDB", "osg", "OpenThreads"]
setup_library("osgManipulator").requires = ["osgViewer", "osgGA", "osgUtil", "osg", "OpenThreads"]
setup_library("osgParticle").requires = ["osgUtil", "osgDB", "osg", "OpenThreads"]
setup_library("osgUI").requires = ["osgDB", "osgGA", "osgUtil", "osgText", "osgViewer", "osg", "OpenThreads"]
setup_library("osgVolume").requires = ["osgGA", "osgDB", "osgUtil", "osg", "OpenThreads"]
setup_library("osgShadow").requires = ["osgUtil", "osgDB", "osg", "OpenThreads"]
setup_library("osgSim").requires = ["osgText", "osgUtil", "osgDB", "osg", "OpenThreads"]
setup_library("osgTerrain").requires = ["osgUtil", "osgDB", "osg", "OpenThreads"]
setup_library("osgWidget").requires = ["osgText", "osgViewer", "osgDB", "osg", "OpenThreads"]
setup_library("osgPresentation").requires = ["osgViewer", "osgUI", "osgWidget", "osgManipulator", "osgVolume", "osgFX", "osgText", "osgGA", "osgUtil", "osgDB", "osg", "OpenThreads"]
# Start of plugins
# NodeKit/Psudo loader plugins
setup_plugin("osga")
setup_plugin("rot")
setup_plugin("scale")
setup_plugin("trans")
setup_plugin("normals")
setup_plugin("revisions")
setup_plugin("osgviewer").requires.append("osgViewer")
setup_plugin("osgshadow").requires.append("osgShadow")
setup_plugin("osgterrain").requires.append("osgTerrain")
# Main native plugins
setup_plugin("osg")
plugin = setup_plugin("ive")
plugin.requires.extend(("osgSim", "osgFX", "osgText", "osgTerrain", "osgVolume"))
if self.options.with_zlib:
plugin.requires.append("zlib::zlib")
# Viewer plugins
setup_plugin("cfg").requires.append("osgViewer")
# Shader plugins
setup_plugin("glsl")
# Image plugins
setup_plugin("rgb")
setup_plugin("bmp")
setup_plugin("pnm")
setup_plugin("dds")
setup_plugin("tga")
setup_plugin("hdr")
setup_plugin("dot")
setup_plugin("vtf")
setup_plugin("ktx")
if self.options.get_safe("with_jpeg"):
setup_plugin("jpeg").requires.append("libjpeg::libjpeg")
if self.options.with_jasper:
setup_plugin("jp2").requires.append("jasper::jasper")
if self.options.get_safe("with_openexr"):
setup_plugin("exr").requires.append("openexr::openexr")
if self.options.get_safe("with_gif"):
setup_plugin("gif").requires.append("giflib::giflib")
if self.options.get_safe("with_png"):
setup_plugin("png").requires.extend(("libpng::libpng", "zlib::zlib"))
if self.options.with_tiff:
setup_plugin("tiff").requires.append("libtiff::libtiff")
if self.options.with_gdal:
setup_plugin("gdal").requires.extend(("osgTerrain", "gdal::gdal"))
setup_plugin("ogr").requires.append("gdal::gdal")
if self.options.with_gta:
setup_plugin("gta").requires.append("libgta::libgta")
# 3D Image plugins
if self.options.get_safe("with_dcmtk"):
plugin = setup_plugin("dicom")
plugin.requires.extend(("osgVolume", "dcmtk::dcmtk"))
if self.settings.os == "Windows":
plugin.system_libs = ["wsock32", "ws2_32"]
# 3rd party 3d plugins
setup_plugin("3dc")
setup_plugin("p3d").requires.extend(("osgGA", "osgText", "osgVolume", "osgFX", "osgViewer", "osgPresentation"))
if self.options.with_curl:
plugin = setup_plugin("curl")
plugin.requires.append("libcurl::libcurl")
if self.options.with_zlib:
plugin.requires.append("zlib::zlib")
if self.options.with_zlib:
setup_plugin("gz").requires.append("zlib::zlib")
# with_inventor
# setup_plugin("iv")
# with_collada
# setup_plugin("dae")
# with_fbx
# setup_plugin("fbx")
# with_opencascade
# setup_plugin("opencascade")
setup_plugin("bvh").requires.append("osgAnimation")
setup_plugin("x")
setup_plugin("dxf").requires.append("osgText")
setup_plugin("openflight").requires.append("osgSim")
setup_plugin("obj")
setup_plugin("pic")
setup_plugin("stl")
setup_plugin("3ds")
setup_plugin("ac")
setup_plugin("pov")
setup_plugin("logo")
setup_plugin("lws")
setup_plugin("md2")
setup_plugin("osgtgz")
setup_plugin("tgz")
setup_plugin("shp").requires.extend(("osgSim", "osgTerrain"))
setup_plugin("txf").requires.append("osgText")
setup_plugin("bsp")
setup_plugin("mdl")
setup_plugin("gles").requires.extend(("osgUtil", "osgAnimation"))
setup_plugin("osgjs").requires.extend(("osgAnimation", "osgSim"))
setup_plugin("lwo").requires.append("osgFX")
setup_plugin("ply")
setup_plugin("txp").requires.extend(("osgSim", "osgText"))
# with_ffmpeg
# setup_plugin("ffmpeg")
# with_gstreamer
# setup_plugin("gstreamer")
# with_directshow
# setup_plugin("directshow")
if tools.is_apple_os(self.settings.os):
setup_plugin("imageio").frameworks = ["Accelerate"]
if ((self.settings.os == "Macos" and self.settings.os.version and tools.Version(self.settings.os.version) >= "10.8")
or (self.settings.os == "iOS" and tools.Version(self.settings.os.version) >= "6.0")):
plugin = setup_plugin("avfoundation")
plugin.requires.append("osgViewer")
plugin.frameworks = ["AVFoundation", "Cocoa", "CoreVideo", "CoreMedia", "QuartzCore"]
if self.settings.os == "Macos" and self.settings.os.version and tools.Version(self.settings.os.version) <= "10.6" and self.settings.arch == "x86":
setup_plugin("qt").frameworks = ["QuickTime"]
if self.settings.os == "Macos" and self.settings.arch == "x86":
plugin = setup_plugin("QTKit")
plugin.requires.append("osgViewer")
plugin.frameworks = ["QTKit", "Cocoa", "QuickTime", "CoreVideo"]
# with_nvtt
# setup_plugin("nvtt")
if self.options.with_freetype:
setup_plugin("freetype").requires.extend(("osgText", "freetype::freetype"))
if self.options.with_zlib:
setup_plugin("zip")
# with_svg
# setup_plugin("svg")
# with_pdf/poppler
# setup_plugin("pdf")
# with_vnc
# setup_plugin("vnc")
setup_plugin("pvr")
plugin = setup_plugin("osc")
plugin.requires.append("osgGA")
if self.settings.os == "Windows":
plugin.system_libs = ["ws2_32", "winmm"]
setup_plugin("trk")
setup_plugin("tf")
# with_blas
# setup_plugin("las")
setup_plugin("lua")
# with_sdl
# setup_plugin("sdl")
if self.options.get_safe("with_asio", False):
setup_plugin("resthttp").requires.extend(("osgPresentation", "asio::asio", "boost::boost"))
# with_zeroconf
# setup_plugin("zeroconf")
| 40.8 | 189 | 0.624748 | from conans import CMake, ConanFile, tools
from conans.errors import ConanInvalidConfiguration
import os
required_conan_version = ">=1.29.1"
class OpenSceneGraphConanFile(ConanFile):
name = "openscenegraph"
description = "OpenSceneGraph is an open source high performance 3D graphics toolkit"
topics = ("openscenegraph", "graphics")
url = "https://github.com/conan-io/conan-center-index"
homepage = "http://www.openscenegraph.org"
license = "LGPL-2.1-only", "WxWindows-exception-3.1"
settings = "os", "arch", "compiler", "build_type"
options = {
"shared": [True, False],
"fPIC": [True, False],
"build_applications": [True, False],
"enable_notify": [True, False],
"enable_deprecated_api": [True, False],
"enable_readfile": [True, False],
"enable_ref_ptr_implicit_output_conversion": [True, False],
"enable_ref_ptr_safe_dereference": [True, False],
"enable_envvar_support": [True, False],
"enable_windowing_system": [True, False],
"enable_deprecated_serializers": [True, False],
"use_fontconfig": [True, False],
"with_asio": [True, False],
"with_curl": [True, False],
"with_dcmtk": [True, False],
"with_freetype": [True, False],
"with_gdal": [True, False],
"with_gif": [True, False],
"with_gta": [True, False],
"with_jasper": [True, False],
"with_jpeg": [True, False],
"with_openexr": [True, False],
"with_png": [True, False],
"with_tiff": [True, False],
"with_zlib": [True, False],
}
default_options = {
"shared": False,
"fPIC": True,
"build_applications": False,
"enable_notify": True,
"enable_deprecated_api": False,
"enable_readfile": True,
"enable_ref_ptr_implicit_output_conversion": True,
"enable_ref_ptr_safe_dereference": True,
"enable_envvar_support": True,
"enable_windowing_system": True,
"enable_deprecated_serializers": False,
"use_fontconfig": True,
"with_asio": False,
"with_curl": False,
"with_dcmtk": False,
"with_freetype": True,
"with_gdal": False,
"with_gif": True,
"with_gta": False,
"with_jasper": False,
"with_jpeg": True,
"with_openexr": False,
"with_png": True,
"with_tiff": True,
"with_zlib": True,
}
short_paths = True
exports_sources = "CMakeLists.txt", "patches/*.patch"
generators = "cmake", "cmake_find_package"
@property
def _source_subfolder(self):
return "source_subfolder"
def config_options(self):
if self.settings.os == "Windows":
del self.options.fPIC
del self.options.with_asio
self.options.use_fontconfig = False
if tools.is_apple_os(self.settings.os):
del self.options.with_gif
del self.options.with_jpeg
del self.options.with_png
self.options.with_tiff = False
def configure(self):
if self.options.shared:
del self.options.fPIC
if not self.options.with_zlib:
# These require zlib support
del self.options.with_openexr
del self.options.with_png
del self.options.with_dcmtk
def validate(self):
if self.options.get_safe("with_asio", False):
raise ConanInvalidConfiguration("ASIO support in OSG is broken, see https://github.com/openscenegraph/OpenSceneGraph/issues/921")
if hasattr(self, "settings_build") and tools.cross_building(self):
raise ConanInvalidConfiguration("openscenegraph recipe cannot be cross-built yet. Contributions are welcome.")
def requirements(self):
if self.options.enable_windowing_system and self.settings.os == "Linux":
self.requires("xorg/system")
self.requires("opengl/system")
if self.options.use_fontconfig:
self.requires("fontconfig/2.13.93")
if self.options.get_safe("with_asio", False):
# Should these be private requires?
self.requires("asio/1.18.1")
self.requires("boost/1.75.0")
if self.options.with_curl:
self.requires("libcurl/7.74.0")
if self.options.get_safe("with_dcmtk"):
self.requires("dcmtk/3.6.5")
if self.options.with_freetype:
self.requires("freetype/2.10.4")
if self.options.with_gdal:
self.requires("gdal/3.1.4")
if self.options.get_safe("with_gif"):
self.requires("giflib/5.2.1")
if self.options.with_gta:
self.requires("libgta/1.2.1")
if self.options.with_jasper:
self.requires("jasper/2.0.24")
if self.options.get_safe("with_jpeg"):
self.requires("libjpeg/9d")
if self.options.get_safe("with_openexr"):
self.requires("openexr/2.5.3")
if self.options.get_safe("with_png"):
self.requires("libpng/1.6.37")
if self.options.with_tiff:
self.requires("libtiff/4.2.0")
if self.options.with_zlib:
self.requires("zlib/1.2.11")
def source(self):
tools.get(**self.conan_data["sources"][self.version],
strip_root=True, destination=self._source_subfolder)
def _patch_sources(self):
for patch in self.conan_data["patches"].get(self.version, []):
tools.patch(**patch)
for package in ("Fontconfig", "Freetype", "GDAL", "GIFLIB", "GTA", "Jasper", "OpenEXR"):
# Prefer conan's find package scripts over osg's
os.unlink(os.path.join(self._source_subfolder, "CMakeModules", "Find{}.cmake".format(package)))
def _configured_cmake(self):
if hasattr(self, "_cmake"):
return self._cmake
self._cmake = cmake = CMake(self)
cmake.definitions["USE_3RDPARTY_BIN"] = False
cmake.definitions["DYNAMIC_OPENSCENEGRAPH"] = self.options.shared
cmake.definitions["DYNAMIC_OPENTHREADS"] = self.options.shared
cmake.definitions["BUILD_OSG_APPLICATIONS"] = self.options.build_applications
cmake.definitions["BUILD_OSG_EXAMPLES"] = False
cmake.definitions["OSG_NOTIFY_DISABLED"] = not self.options.enable_notify
cmake.definitions["OSG_USE_DEPRECATED_API"] = self.options.enable_deprecated_api
cmake.definitions["OSG_PROVIDE_READFILE"] = self.options.enable_readfile
cmake.definitions["OSG_USE_REF_PTR_IMPLICIT_OUTPUT_CONVERSION"] = self.options.enable_ref_ptr_implicit_output_conversion
cmake.definitions["OSG_USE_REF_PTR_SAFE_DEREFERENCE"] = self.options.enable_ref_ptr_safe_dereference
cmake.definitions["OSG_ENVVAR_SUPPORTED"] = self.options.enable_envvar_support
if not self.options.enable_windowing_system:
cmake.definitions["OSG_WINDOWING_SYSTEM"] = None
cmake.definitions["BUILD_OSG_DEPRECATED_SERIALIZERS"] = self.options.enable_deprecated_serializers
cmake.definitions["OSG_TEXT_USE_FONTCONFIG"] = self.options.use_fontconfig
# Disable option dependencies unless we have a package for them
cmake.definitions["OSG_WITH_FREETYPE"] = self.options.with_freetype
cmake.definitions["OSG_WITH_OPENEXR"] = self.options.get_safe("with_openexr", False)
cmake.definitions["OSG_WITH_INVENTOR"] = False
cmake.definitions["OSG_WITH_JASPER"] = self.options.with_jasper
cmake.definitions["OSG_WITH_OPENCASCADE"] = False
cmake.definitions["OSG_WITH_FBX"] = False
cmake.definitions["OSG_WITH_ZLIB"] = self.options.with_zlib
cmake.definitions["OSG_WITH_GDAL"] = self.options.with_gdal
cmake.definitions["OSG_WITH_GTA"] = self.options.with_gta
cmake.definitions["OSG_WITH_CURL"] = self.options.with_curl
cmake.definitions["OSG_WITH_LIBVNCSERVER"] = False
cmake.definitions["OSG_WITH_DCMTK"] = self.options.get_safe("with_dcmtk", False)
cmake.definitions["OSG_WITH_FFMPEG"] = False
cmake.definitions["OSG_WITH_DIRECTSHOW"] = False
cmake.definitions["OSG_WITH_SDL"] = False
cmake.definitions["OSG_WITH_POPPLER"] = False
cmake.definitions["OSG_WITH_RSVG"] = False
cmake.definitions["OSG_WITH_NVTT"] = False
cmake.definitions["OSG_WITH_ASIO"] = self.options.get_safe("with_asio", False)
cmake.definitions["OSG_WITH_ZEROCONF"] = False
cmake.definitions["OSG_WITH_LIBLAS"] = False
cmake.definitions["OSG_WITH_GIF"] = self.options.get_safe("with_gif", False)
cmake.definitions["OSG_WITH_JPEG"] = self.options.get_safe("with_jpeg", False)
cmake.definitions["OSG_WITH_PNG"] = self.options.get_safe("with_png", False)
cmake.definitions["OSG_WITH_TIFF"] = self.options.with_tiff
if self.settings.os == "Windows":
# osg has optional quicktime support on Windows
cmake.definitions["CMAKE_DISABLE_FIND_PACKAGE_QuickTime"] = True
cmake.definitions["OSG_MSVC_VERSIONED_DLL"] = False
cmake.configure()
return cmake
def build(self):
self._patch_sources()
self._configured_cmake().build()
def package(self):
self._configured_cmake().install()
self.copy(pattern="LICENSE.txt", dst="licenses", src=self._source_subfolder)
tools.rmdir(os.path.join(self.package_folder, "lib", "pkgconfig"))
tools.remove_files_by_mask(self.package_folder, "*.pdb")
def package_info(self):
# FindOpenSceneGraph.cmake is shipped with cmake and is a traditional cmake script
# It doesn't setup targets and only provides a few variables:
# but it will set the last three of these if the name of the package is OPENSCENEGRAPH (it uses
# the filename for the first, so OpenSceneGraph_FOUND gets set, not OPENSCENEGRAPH_FOUND)
# TODO: set OPENSCENEGRAPH_FOUND in cmake_find_package and cmake_find_package_multi
self.cpp_info.filenames["cmake_find_package"] = "OpenSceneGraph"
self.cpp_info.filenames["cmake_find_package_multi"] = "OpenSceneGraph"
self.cpp_info.names["cmake_find_package"] = "OPENSCENEGRAPH"
self.cpp_info.names["cmake_find_package_multi"] = "OPENSCENEGRAPH"
if self.settings.build_type == "Debug":
postfix = "d"
elif self.settings.build_type == "RelWithDebInfo":
postfix = "rd"
elif self.settings.build_type == "MinSizeRel":
postfix = "s"
else:
postfix = ""
def setup_plugin(plugin):
lib = "osgdb_" + plugin
plugin_library = self.cpp_info.components[lib]
plugin_library.libs = [] if self.options.shared else [lib + postfix]
plugin_library.requires = ["OpenThreads", "osg", "osgDB", "osgUtil"]
if not self.options.shared:
plugin_library.libdirs = [os.path.join("lib", "osgPlugins-{}".format(self.version))]
return plugin_library
def setup_serializers(lib):
plugins = []
if lib not in ("osgDB", "osgWidget", "osgPresentation"):
plugins.append("serializers_{}".format(lib.lower()))
if self.options.enable_deprecated_serializers:
if lib not in ("osgUtil", "osgDB", "osgGA", "osgManipulator", "osgUI", "osgPresentation"):
plugins.append("deprecated_{}".format(lib.lower()))
for plugin in plugins:
setup_plugin(plugin).requires.append(lib)
def setup_library(lib):
library = self.cpp_info.components[lib]
library.libs = [lib + postfix]
library.names["pkg_config"] = "openscenegraph-{}".format(lib)
setup_serializers(lib)
return library
# Core libraries
# requires obtained from osg's source code
library = self.cpp_info.components["OpenThreads"]
library.libs = ["OpenThreads" + postfix]
library.names["pkg_config"] = "openthreads"
if self.settings.os == "Linux":
library.system_libs = ["pthread"]
library = setup_library("osg")
library.requires = ["OpenThreads", "opengl::opengl"]
if self.settings.os == "Linux":
library.system_libs = ["m", "rt", "dl"]
if not self.options.shared:
library.defines.append("OSG_LIBRARY_STATIC")
library = setup_library("osgDB")
library.requires = ["osg", "osgUtil", "OpenThreads"]
if self.settings.os == "Linux":
library.system_libs = ["dl"]
elif self.settings.os == "Macos":
library.frameworks = ["Carbon", "Cocoa"]
if self.options.with_zlib:
library.requires.append("zlib::zlib")
setup_library("osgUtil").requires = ["osg", "OpenThreads"]
setup_library("osgGA").requires = ["osgDB", "osgUtil", "osg", "OpenThreads"]
library = setup_library("osgText")
library.requires = ["osgDB", "osg", "osgUtil", "OpenThreads"]
if self.options.use_fontconfig:
library.requires.append("fontconfig::fontconfig")
library = setup_library("osgViewer")
library.requires = ["osgGA", "osgText", "osgDB", "osgUtil", "osg"]
if self.options.enable_windowing_system:
if self.settings.os == "Linux":
library.requires.append("xorg::xorg")
elif tools.is_apple_os(self.settings.os):
library.frameworks = ["Cocoa"]
if self.settings.os == "Windows":
library.system_libs = ["gdi32"]
setup_library("osgAnimation").requires = ["osg", "osgText", "osgGA", "osgViewer", "OpenThreads"]
setup_library("osgFX").requires = ["osgUtil", "osgDB", "osg", "OpenThreads"]
setup_library("osgManipulator").requires = ["osgViewer", "osgGA", "osgUtil", "osg", "OpenThreads"]
setup_library("osgParticle").requires = ["osgUtil", "osgDB", "osg", "OpenThreads"]
setup_library("osgUI").requires = ["osgDB", "osgGA", "osgUtil", "osgText", "osgViewer", "osg", "OpenThreads"]
setup_library("osgVolume").requires = ["osgGA", "osgDB", "osgUtil", "osg", "OpenThreads"]
setup_library("osgShadow").requires = ["osgUtil", "osgDB", "osg", "OpenThreads"]
setup_library("osgSim").requires = ["osgText", "osgUtil", "osgDB", "osg", "OpenThreads"]
setup_library("osgTerrain").requires = ["osgUtil", "osgDB", "osg", "OpenThreads"]
setup_library("osgWidget").requires = ["osgText", "osgViewer", "osgDB", "osg", "OpenThreads"]
setup_library("osgPresentation").requires = ["osgViewer", "osgUI", "osgWidget", "osgManipulator", "osgVolume", "osgFX", "osgText", "osgGA", "osgUtil", "osgDB", "osg", "OpenThreads"]
setup_plugin("osga")
setup_plugin("rot")
setup_plugin("scale")
setup_plugin("trans")
setup_plugin("normals")
setup_plugin("revisions")
setup_plugin("osgviewer").requires.append("osgViewer")
setup_plugin("osgshadow").requires.append("osgShadow")
setup_plugin("osgterrain").requires.append("osgTerrain")
setup_plugin("osg")
plugin = setup_plugin("ive")
plugin.requires.extend(("osgSim", "osgFX", "osgText", "osgTerrain", "osgVolume"))
if self.options.with_zlib:
plugin.requires.append("zlib::zlib")
setup_plugin("cfg").requires.append("osgViewer")
setup_plugin("glsl")
setup_plugin("rgb")
setup_plugin("bmp")
setup_plugin("pnm")
setup_plugin("dds")
setup_plugin("tga")
setup_plugin("hdr")
setup_plugin("dot")
setup_plugin("vtf")
setup_plugin("ktx")
if self.options.get_safe("with_jpeg"):
setup_plugin("jpeg").requires.append("libjpeg::libjpeg")
if self.options.with_jasper:
setup_plugin("jp2").requires.append("jasper::jasper")
if self.options.get_safe("with_openexr"):
setup_plugin("exr").requires.append("openexr::openexr")
if self.options.get_safe("with_gif"):
setup_plugin("gif").requires.append("giflib::giflib")
if self.options.get_safe("with_png"):
setup_plugin("png").requires.extend(("libpng::libpng", "zlib::zlib"))
if self.options.with_tiff:
setup_plugin("tiff").requires.append("libtiff::libtiff")
if self.options.with_gdal:
setup_plugin("gdal").requires.extend(("osgTerrain", "gdal::gdal"))
setup_plugin("ogr").requires.append("gdal::gdal")
if self.options.with_gta:
setup_plugin("gta").requires.append("libgta::libgta")
if self.options.get_safe("with_dcmtk"):
plugin = setup_plugin("dicom")
plugin.requires.extend(("osgVolume", "dcmtk::dcmtk"))
if self.settings.os == "Windows":
plugin.system_libs = ["wsock32", "ws2_32"]
setup_plugin("3dc")
setup_plugin("p3d").requires.extend(("osgGA", "osgText", "osgVolume", "osgFX", "osgViewer", "osgPresentation"))
if self.options.with_curl:
plugin = setup_plugin("curl")
plugin.requires.append("libcurl::libcurl")
if self.options.with_zlib:
plugin.requires.append("zlib::zlib")
if self.options.with_zlib:
setup_plugin("gz").requires.append("zlib::zlib")
setup_plugin("bvh").requires.append("osgAnimation")
setup_plugin("x")
setup_plugin("dxf").requires.append("osgText")
setup_plugin("openflight").requires.append("osgSim")
setup_plugin("obj")
setup_plugin("pic")
setup_plugin("stl")
setup_plugin("3ds")
setup_plugin("ac")
setup_plugin("pov")
setup_plugin("logo")
setup_plugin("lws")
setup_plugin("md2")
setup_plugin("osgtgz")
setup_plugin("tgz")
setup_plugin("shp").requires.extend(("osgSim", "osgTerrain"))
setup_plugin("txf").requires.append("osgText")
setup_plugin("bsp")
setup_plugin("mdl")
setup_plugin("gles").requires.extend(("osgUtil", "osgAnimation"))
setup_plugin("osgjs").requires.extend(("osgAnimation", "osgSim"))
setup_plugin("lwo").requires.append("osgFX")
setup_plugin("ply")
setup_plugin("txp").requires.extend(("osgSim", "osgText"))
if tools.is_apple_os(self.settings.os):
setup_plugin("imageio").frameworks = ["Accelerate"]
if ((self.settings.os == "Macos" and self.settings.os.version and tools.Version(self.settings.os.version) >= "10.8")
or (self.settings.os == "iOS" and tools.Version(self.settings.os.version) >= "6.0")):
plugin = setup_plugin("avfoundation")
plugin.requires.append("osgViewer")
plugin.frameworks = ["AVFoundation", "Cocoa", "CoreVideo", "CoreMedia", "QuartzCore"]
if self.settings.os == "Macos" and self.settings.os.version and tools.Version(self.settings.os.version) <= "10.6" and self.settings.arch == "x86":
setup_plugin("qt").frameworks = ["QuickTime"]
if self.settings.os == "Macos" and self.settings.arch == "x86":
plugin = setup_plugin("QTKit")
plugin.requires.append("osgViewer")
plugin.frameworks = ["QTKit", "Cocoa", "QuickTime", "CoreVideo"]
if self.options.with_freetype:
setup_plugin("freetype").requires.extend(("osgText", "freetype::freetype"))
if self.options.with_zlib:
setup_plugin("zip")
setup_plugin("pvr")
plugin = setup_plugin("osc")
plugin.requires.append("osgGA")
if self.settings.os == "Windows":
plugin.system_libs = ["ws2_32", "winmm"]
setup_plugin("trk")
setup_plugin("tf")
setup_plugin("lua")
if self.options.get_safe("with_asio", False):
setup_plugin("resthttp").requires.extend(("osgPresentation", "asio::asio", "boost::boost"))
| true | true |
f7210dc85edd4d0b6ad091c50f23892394528a1e | 1,558 | py | Python | examples/aws_lambda/aws_lambda_oauth.py | korymath/bolt-python | 67e0286d756ba92510315d044303f43b03380b52 | [
"MIT"
] | 1 | 2021-05-02T16:06:44.000Z | 2021-05-02T16:06:44.000Z | examples/aws_lambda/aws_lambda_oauth.py | korymath/bolt-python | 67e0286d756ba92510315d044303f43b03380b52 | [
"MIT"
] | 1 | 2021-02-23T21:05:57.000Z | 2021-02-23T21:05:57.000Z | examples/aws_lambda/aws_lambda_oauth.py | korymath/bolt-python | 67e0286d756ba92510315d044303f43b03380b52 | [
"MIT"
] | null | null | null | # ------------------------------------------------
# instead of slack_bolt in requirements.txt
import sys
sys.path.insert(1, "vendor")
# ------------------------------------------------
import logging
from slack_bolt import App
from slack_bolt.adapter.aws_lambda import SlackRequestHandler
from slack_bolt.adapter.aws_lambda.lambda_s3_oauth_flow import LambdaS3OAuthFlow
# process_before_response must be True when running on FaaS
app = App(process_before_response=True, oauth_flow=LambdaS3OAuthFlow(),)
@app.event("app_mention")
def handle_app_mentions(body, say, logger):
logger.info(body)
say("What's up?")
@app.command("/hello-bolt-python-lambda")
def respond_to_slack_within_3_seconds(ack):
# This method is for synchronous communication with the Slack API server
ack("Thanks!")
SlackRequestHandler.clear_all_log_handlers()
logging.basicConfig(format="%(asctime)s %(message)s", level=logging.DEBUG)
def handler(event, context):
slack_handler = SlackRequestHandler(app=app)
return slack_handler.handle(event, context)
# # -- OAuth flow -- #
# export SLACK_SIGNING_SECRET=***
# export SLACK_BOT_TOKEN=xoxb-***
# export SLACK_CLIENT_ID=111.111
# export SLACK_CLIENT_SECRET=***
# export SLACK_SCOPES=app_mentions:read,chat:write
# AWS IAM Role: bolt_python_s3_storage
# - AmazonS3FullAccess
# - AWSLambdaBasicExecutionRole
# rm -rf latest_slack_bolt && cp -pr ../../src latest_slack_bolt
# pip install python-lambda
# lambda deploy --config-file aws_lambda_oauth_config.yaml --requirements requirements_oauth.txt
| 29.396226 | 96 | 0.727856 |
import sys
sys.path.insert(1, "vendor")
import logging
from slack_bolt import App
from slack_bolt.adapter.aws_lambda import SlackRequestHandler
from slack_bolt.adapter.aws_lambda.lambda_s3_oauth_flow import LambdaS3OAuthFlow
app = App(process_before_response=True, oauth_flow=LambdaS3OAuthFlow(),)
@app.event("app_mention")
def handle_app_mentions(body, say, logger):
logger.info(body)
say("What's up?")
@app.command("/hello-bolt-python-lambda")
def respond_to_slack_within_3_seconds(ack):
# This method is for synchronous communication with the Slack API server
ack("Thanks!")
SlackRequestHandler.clear_all_log_handlers()
logging.basicConfig(format="%(asctime)s %(message)s", level=logging.DEBUG)
def handler(event, context):
slack_handler = SlackRequestHandler(app=app)
return slack_handler.handle(event, context)
# # -- OAuth flow -- #
# export SLACK_SIGNING_SECRET=***
# export SLACK_BOT_TOKEN=xoxb-***
# export SLACK_CLIENT_ID=111.111
# export SLACK_CLIENT_SECRET=***
# export SLACK_SCOPES=app_mentions:read,chat:write
# AWS IAM Role: bolt_python_s3_storage
# - AmazonS3FullAccess
# - AWSLambdaBasicExecutionRole
# rm -rf latest_slack_bolt && cp -pr ../../src latest_slack_bolt
# pip install python-lambda
# lambda deploy --config-file aws_lambda_oauth_config.yaml --requirements requirements_oauth.txt
| true | true |
f7210e74f4ea154ad8e0c98314be558c787c9440 | 483 | py | Python | app/settings.py | rchapman83/sticks-clothing | dfdb5283b00c9209f854648e50f30140a0bb3004 | [
"MIT"
] | null | null | null | app/settings.py | rchapman83/sticks-clothing | dfdb5283b00c9209f854648e50f30140a0bb3004 | [
"MIT"
] | null | null | null | app/settings.py | rchapman83/sticks-clothing | dfdb5283b00c9209f854648e50f30140a0bb3004 | [
"MIT"
] | null | null | null | # -*- settings:utf-8 -*-
# Flask settings
import logging
import os
proj_name = os.environ.get('PROJECT_NAME')
debug_mode = os.environ.get('FLASK_DEBUG')
secret_code = os.environ.get('FLASK_SECRET')
DEBUG = debug_mode
TESTING = False
USE_X_SENDFILE = False
CSRF_ENABLED = True
SECRET_KEY = secret_code
# LOGGING
LOGGER_NAME = '%s_log' % proj_name
LOG_FILENAME = '/var/tmp/app.%s.log' % proj_name
LOG_LEVEL = logging.INFO
LOG_FORMAT = '%(asctime)s %(levelname)s\t: %(message)s'
| 21.954545 | 55 | 0.730849 |
import logging
import os
proj_name = os.environ.get('PROJECT_NAME')
debug_mode = os.environ.get('FLASK_DEBUG')
secret_code = os.environ.get('FLASK_SECRET')
DEBUG = debug_mode
TESTING = False
USE_X_SENDFILE = False
CSRF_ENABLED = True
SECRET_KEY = secret_code
LOGGER_NAME = '%s_log' % proj_name
LOG_FILENAME = '/var/tmp/app.%s.log' % proj_name
LOG_LEVEL = logging.INFO
LOG_FORMAT = '%(asctime)s %(levelname)s\t: %(message)s'
| true | true |
f7210f83b40555129d292b05eb3bd12a490ff744 | 1,857 | py | Python | samplers.py | linkserendipity/deep-person-reid | 564ccf307336af1b3343fa42c55f9d53df0fa20a | [
"MIT"
] | null | null | null | samplers.py | linkserendipity/deep-person-reid | 564ccf307336af1b3343fa42c55f9d53df0fa20a | [
"MIT"
] | null | null | null | samplers.py | linkserendipity/deep-person-reid | 564ccf307336af1b3343fa42c55f9d53df0fa20a | [
"MIT"
] | null | null | null | from __future__ import absolute_import
from collections import defaultdict
import numpy as np
import torch
from torch.utils.data.sampler import Sampler
class RandomIdentitySampler(Sampler):
"""
Randomly sample N identities, then for each identity,
randomly sample K instances, therefore batch size is N*K.
Code imported from https://github.com/Cysu/open-reid/blob/master/reid/utils/data/sampler.py.
Args:
data_source (Dataset): dataset to sample from.
num_instances (int): number of instances per identity.
"""
def __init__(self, data_source, num_instances=4):
self.data_source = data_source
self.num_instances = num_instances
self.index_dic = defaultdict(list)
for index, (_, pid, _) in enumerate(data_source):
self.index_dic[pid].append(index)
self.pids = list(self.index_dic.keys())
self.num_identities = len(self.pids)
def __iter__(self):
# 3004 pictures list 32 batch_size [aaaaaaaaaaaaaaaaaa]
indices = torch.randperm(self.num_identities) # shuffle for 751 ids
ret = [] # [1111 2222 3333 4444 5555 6666 7777 ... 751 751 751 751] len(ret)=3004
for i in indices:
pid = self.pids[i]
t = self.index_dic[pid]
replace = False if len(t) >= self.num_instances else True
t = np.random.choice(t, size=self.num_instances, replace=replace) # choose 4 pictures from t pictures
ret.extend(t)
# from IPython import embed
# embed()
return iter(ret)
def __len__(self):
return self.num_identities * self.num_instances
# if __name__ == "__main__":
# from util.data_manager import Market1501
# dataset = Market1501(root='/home/ls')
# sampler = RandomIdentitySampler(dataset.train)
# a = sampler.__iter__() | 37.14 | 113 | 0.662897 | from __future__ import absolute_import
from collections import defaultdict
import numpy as np
import torch
from torch.utils.data.sampler import Sampler
class RandomIdentitySampler(Sampler):
def __init__(self, data_source, num_instances=4):
self.data_source = data_source
self.num_instances = num_instances
self.index_dic = defaultdict(list)
for index, (_, pid, _) in enumerate(data_source):
self.index_dic[pid].append(index)
self.pids = list(self.index_dic.keys())
self.num_identities = len(self.pids)
def __iter__(self):
indices = torch.randperm(self.num_identities)
ret = []
for i in indices:
pid = self.pids[i]
t = self.index_dic[pid]
replace = False if len(t) >= self.num_instances else True
t = np.random.choice(t, size=self.num_instances, replace=replace)
ret.extend(t)
return iter(ret)
def __len__(self):
return self.num_identities * self.num_instances
| true | true |
f7210fbfe983a9e81665dcac17e1a9498a07d28d | 5,545 | py | Python | examples/pwr_run/ml_regression/new_speedup_def/knn_k80.py | boringlee24/keras_old | 1e1176c45c4952ba1b9b9e58e9cc4df027ab111d | [
"MIT"
] | null | null | null | examples/pwr_run/ml_regression/new_speedup_def/knn_k80.py | boringlee24/keras_old | 1e1176c45c4952ba1b9b9e58e9cc4df027ab111d | [
"MIT"
] | null | null | null | examples/pwr_run/ml_regression/new_speedup_def/knn_k80.py | boringlee24/keras_old | 1e1176c45c4952ba1b9b9e58e9cc4df027ab111d | [
"MIT"
] | null | null | null | import pandas
import pdb
from datetime import datetime
import matplotlib
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import glob
import sys
from matplotlib.ticker import MultipleLocator
from scipy.stats import pearsonr, spearmanr
from sklearn import neighbors
from sklearn.metrics import mean_squared_error
from math import sqrt
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
import json
log_dir = '/scratch/li.baol/GPU_pwr_meas/tensorflow/round1/regression/pwr/*'
dirs = glob.glob(log_dir)
dirs.sort()
# store everything in a dict
all_pwr = {} # {densenet121_32:{K80:a, K100:b}...}
for tc in dirs:
test = tc.split('/')[6+1+1].split('.')[0]
gpu = test.split('_')[0]
model = test.replace(gpu + '_', '')
# read tc.csv into a list
data = pandas.read_csv(tc)
pwr = np.asarray(data[data.columns[0]].tolist())
if model in all_pwr:
all_pwr[model][gpu] = pwr
else:
all_pwr[model] = {gpu: pwr}
log_dir = '/scratch/li.baol/GPU_pwr_meas/tensorflow/round1/regression/util/*'
dirs = glob.glob(log_dir)
dirs.sort()
# store everything in a dict
all_util = {} # {densenet121_32:{K80:a, K100:b}...}
for tc in dirs:
test = tc.split('/')[6+1+1].split('.')[0]
gpu = test.split('_')[0]
model = test.replace(gpu + '_', '')
# read tc.csv into a list
data = pandas.read_csv(tc)
util = np.asarray(data[data.columns[0]].tolist())
if model in all_util:
all_util[model][gpu] = util
else:
all_util[model] = {gpu: util}
log_dir = '/scratch/li.baol/GPU_pwr_meas/tensorflow/round1/regression/mem_util/*'
dirs = glob.glob(log_dir)
dirs.sort()
# store everything in a dict
all_mem_util = {} # {densenet121_32:{K80:a, K100:b}...}
for tc in dirs:
test = tc.split('/')[6+1+1].split('.')[0]
gpu = test.split('_')[0]
model = test.replace(gpu + '_', '')
# read tc.csv into a list
data = pandas.read_csv(tc)
mem_util = np.asarray(data[data.columns[0]].tolist())
if model in all_mem_util:
all_mem_util[model][gpu] = mem_util
else:
all_mem_util[model] = {gpu: mem_util}
log_dir = '/scratch/li.baol/GPU_time_meas/tensorflow/round1/csv/*'
dirs = glob.glob(log_dir)
dirs.sort()
# store everything in a dict
all_time = {} # {densenet121_32:{K80:a, K100:b}...}
for tc in dirs:
test = tc.split('/')[6+1].split('.')[0]
gpu = test.split('_')[0]
model = test.replace(gpu + '_', '')
# read tc.csv into a list
data = pandas.read_csv(tc)
time = np.asarray(data[data.columns[0]].tolist())
if model in all_time:
all_time[model][gpu] = time
else:
all_time[model] = {gpu: time}
# Now plot V100 power save ratio (%) vs K80 power(W)
x1_data = [] # power
x2_data = [] # speed
x3_data = [] # utilization
x4_data = [] # mem util
y_data = []
for key in all_pwr:
# if ('mnasnet' not in key and 'mobilenet' not in key):
for i in all_pwr[key]['K80'].tolist(): # power
x1_data.append(i)
for i in (1 / all_time[key]['K80']).tolist(): # speed
x2_data.append(i)
for i in (all_util[key]['K80']).tolist(): # utilization
x3_data.append(i)
for i in (all_mem_util[key]['K80']).tolist(): # mem util
x4_data.append(i)
for i in (all_time[key]['K80'] / all_time[key]['V100']).tolist(): # speed up
y_data.append(i)
x1_norm = [(i - min(x1_data)) / (max(x1_data) - min(x1_data)) for i in x1_data]
x2_norm = [(i - min(x2_data)) / (max(x2_data) - min(x2_data)) for i in x2_data]
x3_norm = [(i - min(x3_data)) / (max(x3_data) - min(x3_data)) for i in x3_data]
x4_norm = [(i - min(x4_data)) / (max(x4_data) - min(x4_data)) for i in x4_data]
# create training data
x_data = []
for i in range(len(x1_norm)):
x_data.append([x1_norm[i], x2_norm[i], x3_norm[i], x4_norm[i]])
x_train, x_test, y_train, y_test = train_test_split(x_data, y_data, test_size=0.3)
with open('x1_data.json', 'w') as outfile:
json.dump(x1_data, outfile)
with open('x2_data.json', 'w') as outfile:
json.dump(x2_data, outfile)
with open('x3_data.json', 'w') as outfile:
json.dump(x3_data, outfile)
with open('x4_data.json', 'w') as outfile:
json.dump(x4_data, outfile)
with open('y_data.json', 'w') as outfile:
json.dump(y_data, outfile)
#with open('x_data.json') as f:
# x_data = json.load(f)
#with open('y_data.json') as f:
# y_data = json.load(f)
#x_train, x_test, y_train, y_test = train_test_split(x_data, y_data, test_size=0.3)
rmse_val = [] #to store rmse values for different k
for K in range(20):
K = K+1
model = neighbors.KNeighborsRegressor(n_neighbors = K, weights='distance')
model.fit(x_train, y_train) #fit the model
pred = model.predict(x_test) #make prediction on test set
# model.predict(np.array(x_test[0]).reshape((1, -1)))
err = sqrt(mean_squared_error(y_test, pred)) #calculate rmse
rmse_val.append(err) #store rmse values
err_pct = abs(y_test-pred) / y_test * 100
print('RMSE value for k= ' , K , 'is:', err)
print('error (%) is', np.mean(err_pct))
xx_data = []
for i in range(len(x1_norm)):
xx_data.append([x1_norm[i]])
# now compare with liear regression
x_train, x_test, y_train, y_test = train_test_split(xx_data, y_data, test_size=0.3)
model2 = LinearRegression().fit(x_train, y_train)
pred = model2.predict(x_test) #make prediction on test set
err = sqrt(mean_squared_error(y_test,pred)) #calculate rmse
print('RMSE value for linear regression is ', err)
| 31.327684 | 83 | 0.658431 | import pandas
import pdb
from datetime import datetime
import matplotlib
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import glob
import sys
from matplotlib.ticker import MultipleLocator
from scipy.stats import pearsonr, spearmanr
from sklearn import neighbors
from sklearn.metrics import mean_squared_error
from math import sqrt
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
import json
log_dir = '/scratch/li.baol/GPU_pwr_meas/tensorflow/round1/regression/pwr/*'
dirs = glob.glob(log_dir)
dirs.sort()
all_pwr = {}
for tc in dirs:
test = tc.split('/')[6+1+1].split('.')[0]
gpu = test.split('_')[0]
model = test.replace(gpu + '_', '')
data = pandas.read_csv(tc)
pwr = np.asarray(data[data.columns[0]].tolist())
if model in all_pwr:
all_pwr[model][gpu] = pwr
else:
all_pwr[model] = {gpu: pwr}
log_dir = '/scratch/li.baol/GPU_pwr_meas/tensorflow/round1/regression/util/*'
dirs = glob.glob(log_dir)
dirs.sort()
all_util = {}
for tc in dirs:
test = tc.split('/')[6+1+1].split('.')[0]
gpu = test.split('_')[0]
model = test.replace(gpu + '_', '')
data = pandas.read_csv(tc)
util = np.asarray(data[data.columns[0]].tolist())
if model in all_util:
all_util[model][gpu] = util
else:
all_util[model] = {gpu: util}
log_dir = '/scratch/li.baol/GPU_pwr_meas/tensorflow/round1/regression/mem_util/*'
dirs = glob.glob(log_dir)
dirs.sort()
all_mem_util = {}
for tc in dirs:
test = tc.split('/')[6+1+1].split('.')[0]
gpu = test.split('_')[0]
model = test.replace(gpu + '_', '')
data = pandas.read_csv(tc)
mem_util = np.asarray(data[data.columns[0]].tolist())
if model in all_mem_util:
all_mem_util[model][gpu] = mem_util
else:
all_mem_util[model] = {gpu: mem_util}
log_dir = '/scratch/li.baol/GPU_time_meas/tensorflow/round1/csv/*'
dirs = glob.glob(log_dir)
dirs.sort()
all_time = {}
for tc in dirs:
test = tc.split('/')[6+1].split('.')[0]
gpu = test.split('_')[0]
model = test.replace(gpu + '_', '')
data = pandas.read_csv(tc)
time = np.asarray(data[data.columns[0]].tolist())
if model in all_time:
all_time[model][gpu] = time
else:
all_time[model] = {gpu: time}
x1_data = []
x2_data = []
x3_data = []
x4_data = []
y_data = []
for key in all_pwr:
for i in all_pwr[key]['K80'].tolist():
x1_data.append(i)
for i in (1 / all_time[key]['K80']).tolist():
x2_data.append(i)
for i in (all_util[key]['K80']).tolist():
x3_data.append(i)
for i in (all_mem_util[key]['K80']).tolist():
x4_data.append(i)
for i in (all_time[key]['K80'] / all_time[key]['V100']).tolist():
y_data.append(i)
x1_norm = [(i - min(x1_data)) / (max(x1_data) - min(x1_data)) for i in x1_data]
x2_norm = [(i - min(x2_data)) / (max(x2_data) - min(x2_data)) for i in x2_data]
x3_norm = [(i - min(x3_data)) / (max(x3_data) - min(x3_data)) for i in x3_data]
x4_norm = [(i - min(x4_data)) / (max(x4_data) - min(x4_data)) for i in x4_data]
x_data = []
for i in range(len(x1_norm)):
x_data.append([x1_norm[i], x2_norm[i], x3_norm[i], x4_norm[i]])
x_train, x_test, y_train, y_test = train_test_split(x_data, y_data, test_size=0.3)
with open('x1_data.json', 'w') as outfile:
json.dump(x1_data, outfile)
with open('x2_data.json', 'w') as outfile:
json.dump(x2_data, outfile)
with open('x3_data.json', 'w') as outfile:
json.dump(x3_data, outfile)
with open('x4_data.json', 'w') as outfile:
json.dump(x4_data, outfile)
with open('y_data.json', 'w') as outfile:
json.dump(y_data, outfile)
rmse_val = []
for K in range(20):
K = K+1
model = neighbors.KNeighborsRegressor(n_neighbors = K, weights='distance')
model.fit(x_train, y_train)
pred = model.predict(x_test)
err = sqrt(mean_squared_error(y_test, pred))
rmse_val.append(err)
err_pct = abs(y_test-pred) / y_test * 100
print('RMSE value for k= ' , K , 'is:', err)
print('error (%) is', np.mean(err_pct))
xx_data = []
for i in range(len(x1_norm)):
xx_data.append([x1_norm[i]])
x_train, x_test, y_train, y_test = train_test_split(xx_data, y_data, test_size=0.3)
model2 = LinearRegression().fit(x_train, y_train)
pred = model2.predict(x_test)
err = sqrt(mean_squared_error(y_test,pred))
print('RMSE value for linear regression is ', err)
| true | true |
f7210feadbc98c8ee9e14ec28cba851c6e06e25b | 1,367 | py | Python | ssseg/cfgs/fcn/cfgs_voc_resnest101os8.py | nianjiuhuiyi/sssegmentation | 4fc12ea7b80fe83170b6d3da0826e53a99ef5325 | [
"MIT"
] | 411 | 2020-10-22T02:24:57.000Z | 2022-03-31T11:19:17.000Z | ssseg/cfgs/fcn/cfgs_voc_resnest101os8.py | nianjiuhuiyi/sssegmentation | 4fc12ea7b80fe83170b6d3da0826e53a99ef5325 | [
"MIT"
] | 24 | 2020-12-21T03:53:54.000Z | 2022-03-17T06:50:00.000Z | ssseg/cfgs/fcn/cfgs_voc_resnest101os8.py | nianjiuhuiyi/sssegmentation | 4fc12ea7b80fe83170b6d3da0826e53a99ef5325 | [
"MIT"
] | 59 | 2020-12-04T03:40:12.000Z | 2022-03-30T09:12:47.000Z | '''define the config file for voc and resnest101os8'''
import os
from .base_cfg import *
# modify dataset config
DATASET_CFG = DATASET_CFG.copy()
DATASET_CFG.update({
'type': 'voc',
'rootdir': os.path.join(os.getcwd(), 'VOCdevkit/VOC2012'),
})
DATASET_CFG['train']['set'] = 'trainaug'
# modify dataloader config
DATALOADER_CFG = DATALOADER_CFG.copy()
# modify optimizer config
OPTIMIZER_CFG = OPTIMIZER_CFG.copy()
OPTIMIZER_CFG.update(
{
'max_epochs': 60,
}
)
# modify losses config
LOSSES_CFG = LOSSES_CFG.copy()
# modify model config
MODEL_CFG = MODEL_CFG.copy()
MODEL_CFG.update(
{
'num_classes': 21,
'backbone': {
'type': 'resnest101',
'series': 'resnest',
'pretrained': True,
'outstride': 8,
'selected_indices': (2, 3),
},
}
)
# modify inference config
INFERENCE_CFG = INFERENCE_CFG.copy()
# modify common config
COMMON_CFG = COMMON_CFG.copy()
COMMON_CFG['train'].update(
{
'backupdir': 'fcn_resnest101os8_voc_train',
'logfilepath': 'fcn_resnest101os8_voc_train/train.log',
}
)
COMMON_CFG['test'].update(
{
'backupdir': 'fcn_resnest101os8_voc_test',
'logfilepath': 'fcn_resnest101os8_voc_test/test.log',
'resultsavepath': 'fcn_resnest101os8_voc_test/fcn_resnest101os8_voc_results.pkl'
}
) | 25.314815 | 88 | 0.653255 | import os
from .base_cfg import *
DATASET_CFG = DATASET_CFG.copy()
DATASET_CFG.update({
'type': 'voc',
'rootdir': os.path.join(os.getcwd(), 'VOCdevkit/VOC2012'),
})
DATASET_CFG['train']['set'] = 'trainaug'
DATALOADER_CFG = DATALOADER_CFG.copy()
OPTIMIZER_CFG = OPTIMIZER_CFG.copy()
OPTIMIZER_CFG.update(
{
'max_epochs': 60,
}
)
LOSSES_CFG = LOSSES_CFG.copy()
MODEL_CFG = MODEL_CFG.copy()
MODEL_CFG.update(
{
'num_classes': 21,
'backbone': {
'type': 'resnest101',
'series': 'resnest',
'pretrained': True,
'outstride': 8,
'selected_indices': (2, 3),
},
}
)
INFERENCE_CFG = INFERENCE_CFG.copy()
COMMON_CFG = COMMON_CFG.copy()
COMMON_CFG['train'].update(
{
'backupdir': 'fcn_resnest101os8_voc_train',
'logfilepath': 'fcn_resnest101os8_voc_train/train.log',
}
)
COMMON_CFG['test'].update(
{
'backupdir': 'fcn_resnest101os8_voc_test',
'logfilepath': 'fcn_resnest101os8_voc_test/test.log',
'resultsavepath': 'fcn_resnest101os8_voc_test/fcn_resnest101os8_voc_results.pkl'
}
) | true | true |
f721104366206bc775401b5c4d6634e901a2440d | 495 | py | Python | skype2.py | tullowhurler/GMIT-project-submissions | 5c75d5303bbdf75068b2b874debccf3531c7b80b | [
"Apache-2.0"
] | null | null | null | skype2.py | tullowhurler/GMIT-project-submissions | 5c75d5303bbdf75068b2b874debccf3531c7b80b | [
"Apache-2.0"
] | null | null | null | skype2.py | tullowhurler/GMIT-project-submissions | 5c75d5303bbdf75068b2b874debccf3531c7b80b | [
"Apache-2.0"
] | null | null | null | #Solution 2
#16/3/18 Ian's Solution
def ispalindrome(s): # s is the string
ans = True # thats what will print out
for i in range(len(s)): # loops through s which we put down in print
if s[i] != s[len(s) - 1 -i]: # len s of radar is = 5 as there is 5 digits, we want to get to 0-4 so have to -1, i starts at 0 and
ans = False # if i is not = i returns false
return ans # have to have return in the function
print(ispalindrome("eye"))
print(ispalindrome("eyes")) | 35.357143 | 137 | 0.640404 |
def ispalindrome(s): # s is the string
ans = True # thats what will print out
for i in range(len(s)): # loops through s which we put down in print
if s[i] != s[len(s) - 1 -i]: # len s of radar is = 5 as there is 5 digits, we want to get to 0-4 so have to -1, i starts at 0 and
ans = False # if i is not = i returns false
return ans # have to have return in the function
print(ispalindrome("eye"))
print(ispalindrome("eyes")) | true | true |
f7211163c547410a5d37c79cba8d58a47a6c46de | 7,205 | py | Python | final-exam/tic_toc_toe_messy.py | Tanner-York-Make-School/SPD-2.31-Testing-and-Architecture | 623537a05cf5a9d50370a414a5056a78f95288eb | [
"MIT"
] | null | null | null | final-exam/tic_toc_toe_messy.py | Tanner-York-Make-School/SPD-2.31-Testing-and-Architecture | 623537a05cf5a9d50370a414a5056a78f95288eb | [
"MIT"
] | null | null | null | final-exam/tic_toc_toe_messy.py | Tanner-York-Make-School/SPD-2.31-Testing-and-Architecture | 623537a05cf5a9d50370a414a5056a78f95288eb | [
"MIT"
] | null | null | null | """
Tic Tac Toe
Reference: With modification from http://inventwithpython.com/chapter10.html.
# TODOs:
# 1. Find all TODO items and see whether you can improve the code.
# In most cases (if not all), you can make them more readable/modular.
# 2. Add/fix function's docstrings
"""
import random
# I didn't refactor the draw and is_winner, that uses the magic number 10,
# function because that would be drastically changing how the
# code works. Instead of creating a normal tic tac toe game like intended,
# it would add a new feature for creating larger boards, no longer making this
# refactoring but adding a new feature.
def draw_board(board):
"""This function prints out the board that it was passed."""
# "board" is a list of 10 strings representing the board (ignore index 0)
print(' | |')
print(' ' + board[1] + ' | ' + board[2] + ' | ' + board[3])
print(' | |')
print('-----------')
print(' | |')
print(' ' + board[4] + ' | ' + board[5] + ' | ' + board[6])
print(' | |')
print('-----------')
print(' | |')
print(' ' + board[7] + ' | ' + board[8] + ' | ' + board[9])
print(' | |')
def input_player_letter():
"""Lets the player type which letter they want to be. Returns a list with the
player’s letter as the first item, and the computer's letter as the second."""
letter = ''
while letter not in ('X', 'O'):
print('Do you want to be X or O?')
letter = input().upper()
# the first element in the list is the player’s letter, the second is the computer's letter.
if letter == 'X':
return ['X', 'O']
return ['O', 'X']
def who_goes_first():
"""Randomly choose the player who goes first."""
if random.randint(0, 1) == 0:
return 'computer'
return 'player'
def play_again():
"""Returns True if the player wants to play again, otherwise it returns False."""
print('Do you want to play again? (yes or no)')
return input().lower().startswith('y')
def make_move(board, letter, move):
"""Makes a move on the given board with the given letter and move"""
board[move] = letter
def is_winner(board, letter):
"""Given a board and a player’s letter, this function returns True if
that player has won."""
return ((board[1] == letter and board[2] == letter and board[3] == letter) or # across the top
(board[4] == letter and board[5] == letter and board[6] == letter) or # across the middle
(board[7] == letter and board[8] == letter and board[9] == letter) or # across the bottom
(board[1] == letter and board[4] == letter and board[7] == letter) or # down the left side
(board[2] == letter and board[5] == letter and board[8] == letter) or # down the middle
(board[3] == letter and board[6] == letter and board[9] == letter) or # down the right side
(board[3] == letter and board[5] == letter and board[7] == letter) or # diagonal
(board[1] == letter and board[5] == letter and board[9] == letter)) # diagonal
def get_board_copy(board):
"""Make a duplicate of the board list and return it the duplicate."""
return list(board)
def is_space_free(board, move):
"""Return true if the passed move is free on the passed board."""
return board[move] == ' '
def get_player_move(board):
"""Let the player type in their move."""
player_move = ' '
options = set(str(i) for i in range(1, len(board)))
while (player_move not in options or
not is_space_free(board, int(player_move))):
print('What is your next move? (1-9)')
player_move = input()
return int(player_move)
def choose_random_move_from_list(board, moves_list):
"""Returns a valid move from the passed list on the passed board or None
if there is no valid move."""
possible_moves = []
for i in moves_list:
if is_space_free(board, i):
possible_moves.append(i)
if possible_moves:
return random.choice(possible_moves)
def is_next_move_win(board, letter):
"""Returns true is if the given letter can make a winning move, false if not"""
for i in range(1, 10):
copy = get_board_copy(board)
if is_space_free(copy, i):
make_move(copy, letter, i)
if is_winner(copy, letter):
return i
def get_computer_move(board, temp_computer_letter):
"""Given a board and the computer's letter, determine where to move and return that move."""
if temp_computer_letter == 'X':
temp_player_letter = 'O'
else:
temp_player_letter = 'X'
# Here is our algorithm for our Tic Tac Toe AI:
# First, check if we can win in the next move
is_ai_winner = is_next_move_win(board, temp_computer_letter)
if is_ai_winner:
return is_ai_winner
# Check if the player could win on their next move, and block them.
is_player_winner = is_next_move_win(board, temp_player_letter)
if is_player_winner:
return is_player_winner
# Try to take one of the corners, if they are free.
move = choose_random_move_from_list(board, [1, 3, 7, 9])
if move is not None:
return move
# Try to take the center, if it is free.
if is_space_free(board, 5):
return 5
# Move on one of the sides.
return choose_random_move_from_list(board, [2, 4, 6, 8])
def is_board_full(board):
"""Return True if every space on the board has been taken.
Otherwise return False."""
for i in range(1, len(board)):
if is_space_free(board, i):
return False
return True
def start_new_round(board, temp_player_letter, temp_computer_letter, temp_turn):
"""Starts a round and plays it through untill the player and computer takes their turn"""
while True:
if temp_turn == 'player':
# Player’s turn.
draw_board(board)
move = get_player_move(board)
make_move(board, temp_player_letter, move)
if is_winner(board, temp_player_letter):
draw_board(board)
print('Hooray! You have won the game!')
break
temp_turn = 'computer'
else:
# Computer’s turn.
move = get_computer_move(board, temp_computer_letter)
make_move(board, temp_computer_letter, move)
if is_winner(board, temp_computer_letter):
draw_board(board)
print('The computer has beaten you! You lose.')
break
temp_turn = 'player'
if is_board_full(board):
draw_board(board)
print('The game is a tie!')
break
def start_session(board_size=10):
"""Starts a session for playing mutliple games with the bot"""
print('Welcome to Tic Tac Toe!')
while True:
# Reset the board
the_board = [' '] * board_size
player_letter, computer_letter = input_player_letter()
turn = who_goes_first()
print('The ' + turn + ' will go first.')
start_new_round(the_board, player_letter, computer_letter, turn)
if not play_again():
break
if __name__ == '__main__':
start_session()
| 36.025 | 98 | 0.624427 |
import random
# function because that would be drastically changing how the
# code works. Instead of creating a normal tic tac toe game like intended,
# it would add a new feature for creating larger boards, no longer making this
# refactoring but adding a new feature.
def draw_board(board):
# "board" is a list of 10 strings representing the board (ignore index 0)
print(' | |')
print(' ' + board[1] + ' | ' + board[2] + ' | ' + board[3])
print(' | |')
print('-----------')
print(' | |')
print(' ' + board[4] + ' | ' + board[5] + ' | ' + board[6])
print(' | |')
print('-----------')
print(' | |')
print(' ' + board[7] + ' | ' + board[8] + ' | ' + board[9])
print(' | |')
def input_player_letter():
letter = ''
while letter not in ('X', 'O'):
print('Do you want to be X or O?')
letter = input().upper()
# the first element in the list is the player’s letter, the second is the computer's letter.
if letter == 'X':
return ['X', 'O']
return ['O', 'X']
def who_goes_first():
if random.randint(0, 1) == 0:
return 'computer'
return 'player'
def play_again():
print('Do you want to play again? (yes or no)')
return input().lower().startswith('y')
def make_move(board, letter, move):
board[move] = letter
def is_winner(board, letter):
return ((board[1] == letter and board[2] == letter and board[3] == letter) or
(board[4] == letter and board[5] == letter and board[6] == letter) or
(board[7] == letter and board[8] == letter and board[9] == letter) or
(board[1] == letter and board[4] == letter and board[7] == letter) or
(board[2] == letter and board[5] == letter and board[8] == letter) or
(board[3] == letter and board[6] == letter and board[9] == letter) or
(board[3] == letter and board[5] == letter and board[7] == letter) or
(board[1] == letter and board[5] == letter and board[9] == letter))
def get_board_copy(board):
return list(board)
def is_space_free(board, move):
return board[move] == ' '
def get_player_move(board):
player_move = ' '
options = set(str(i) for i in range(1, len(board)))
while (player_move not in options or
not is_space_free(board, int(player_move))):
print('What is your next move? (1-9)')
player_move = input()
return int(player_move)
def choose_random_move_from_list(board, moves_list):
possible_moves = []
for i in moves_list:
if is_space_free(board, i):
possible_moves.append(i)
if possible_moves:
return random.choice(possible_moves)
def is_next_move_win(board, letter):
for i in range(1, 10):
copy = get_board_copy(board)
if is_space_free(copy, i):
make_move(copy, letter, i)
if is_winner(copy, letter):
return i
def get_computer_move(board, temp_computer_letter):
if temp_computer_letter == 'X':
temp_player_letter = 'O'
else:
temp_player_letter = 'X'
is_ai_winner = is_next_move_win(board, temp_computer_letter)
if is_ai_winner:
return is_ai_winner
is_player_winner = is_next_move_win(board, temp_player_letter)
if is_player_winner:
return is_player_winner
move = choose_random_move_from_list(board, [1, 3, 7, 9])
if move is not None:
return move
if is_space_free(board, 5):
return 5
return choose_random_move_from_list(board, [2, 4, 6, 8])
def is_board_full(board):
for i in range(1, len(board)):
if is_space_free(board, i):
return False
return True
def start_new_round(board, temp_player_letter, temp_computer_letter, temp_turn):
while True:
if temp_turn == 'player':
draw_board(board)
move = get_player_move(board)
make_move(board, temp_player_letter, move)
if is_winner(board, temp_player_letter):
draw_board(board)
print('Hooray! You have won the game!')
break
temp_turn = 'computer'
else:
move = get_computer_move(board, temp_computer_letter)
make_move(board, temp_computer_letter, move)
if is_winner(board, temp_computer_letter):
draw_board(board)
print('The computer has beaten you! You lose.')
break
temp_turn = 'player'
if is_board_full(board):
draw_board(board)
print('The game is a tie!')
break
def start_session(board_size=10):
print('Welcome to Tic Tac Toe!')
while True:
the_board = [' '] * board_size
player_letter, computer_letter = input_player_letter()
turn = who_goes_first()
print('The ' + turn + ' will go first.')
start_new_round(the_board, player_letter, computer_letter, turn)
if not play_again():
break
if __name__ == '__main__':
start_session()
| true | true |
f721131d0c71c26b6d07fafc53e439f251dd92fe | 18,055 | py | Python | test/test_l2bd_arp_term.py | snergfdio/vppclone | a288f8a1020eb74687eeb0a0a771977ce9b0c01d | [
"Apache-2.0"
] | null | null | null | test/test_l2bd_arp_term.py | snergfdio/vppclone | a288f8a1020eb74687eeb0a0a771977ce9b0c01d | [
"Apache-2.0"
] | 1 | 2021-06-01T23:30:08.000Z | 2021-06-01T23:30:08.000Z | test/test_l2bd_arp_term.py | snergfdio/vppclone | a288f8a1020eb74687eeb0a0a771977ce9b0c01d | [
"Apache-2.0"
] | 1 | 2019-03-11T19:28:31.000Z | 2019-03-11T19:28:31.000Z | #!/usr/bin/env python
""" L2BD ARP term Test """
import unittest
import random
import copy
from socket import AF_INET, AF_INET6
from scapy.packet import Raw
from scapy.layers.l2 import Ether, ARP
from scapy.layers.inet import IP
from scapy.utils import inet_pton, inet_ntop
from scapy.utils6 import in6_getnsma, in6_getnsmac, in6_ptop, in6_islladdr, \
in6_mactoifaceid, in6_ismaddr
from scapy.layers.inet6 import IPv6, UDP, ICMPv6ND_NS, ICMPv6ND_RS, \
ICMPv6ND_RA, ICMPv6NDOptSrcLLAddr, getmacbyip6, ICMPv6MRD_Solicitation, \
ICMPv6NDOptMTU, ICMPv6NDOptSrcLLAddr, ICMPv6NDOptPrefixInfo, \
ICMPv6ND_NA, ICMPv6NDOptDstLLAddr, ICMPv6DestUnreach, icmp6types
from framework import VppTestCase, VppTestRunner
from util import Host, ppp
class TestL2bdArpTerm(VppTestCase):
""" L2BD arp termination Test Case """
@classmethod
def setUpClass(cls):
"""
Perform standard class setup (defined by class method setUpClass in
class VppTestCase) before running the test case, set test case related
variables and configure VPP.
"""
super(TestL2bdArpTerm, cls).setUpClass()
try:
# Create pg interfaces
n_bd = 1
cls.ifs_per_bd = ifs_per_bd = 3
n_ifs = n_bd * ifs_per_bd
cls.create_pg_interfaces(range(n_ifs))
# Set up all interfaces
for i in cls.pg_interfaces:
i.admin_up()
cls.hosts = set()
except Exception:
super(TestL2bdArpTerm, cls).tearDownClass()
raise
def setUp(self):
"""
Clear trace and packet infos before running each test.
"""
self.reset_packet_infos()
super(TestL2bdArpTerm, self).setUp()
def tearDown(self):
"""
Show various debug prints after each test.
"""
super(TestL2bdArpTerm, self).tearDown()
if not self.vpp_dead:
self.logger.info(self.vapi.ppcli("show l2fib verbose"))
self.logger.info(self.vapi.ppcli("show bridge-domain 1 detail"))
def add_del_arp_term_hosts(self, entries, bd_id=1, is_add=1, is_ipv6=0):
for e in entries:
ip = e.ip4 if is_ipv6 == 0 else e.ip6
self.vapi.bd_ip_mac_add_del(bd_id=bd_id, is_add=is_add, ip=ip,
mac=e.mac)
@classmethod
def mac_list(cls, b6_range):
return ["00:00:ca:fe:00:%02x" % b6 for b6 in b6_range]
@classmethod
def ip4_host(cls, subnet, host, mac):
return Host(mac=mac,
ip4="172.17.1%02u.%u" % (subnet, host))
@classmethod
def ip4_hosts(cls, subnet, start, mac_list):
return {cls.ip4_host(subnet, start + j, mac_list[j])
for j in range(len(mac_list))}
@classmethod
def ip6_host(cls, subnet, host, mac):
return Host(mac=mac,
ip6="fd01:%x::%x" % (subnet, host))
@classmethod
def ip6_hosts(cls, subnet, start, mac_list):
return {cls.ip6_host(subnet, start + j, mac_list[j])
for j in range(len(mac_list))}
@classmethod
def bd_swifs(cls, b):
n = cls.ifs_per_bd
start = (b - 1) * n
return [cls.pg_interfaces[j] for j in range(start, start + n)]
def bd_add_del(self, bd_id=1, is_add=1):
if is_add:
self.vapi.bridge_domain_add_del(bd_id=bd_id, is_add=is_add)
for swif in self.bd_swifs(bd_id):
swif_idx = swif.sw_if_index
self.vapi.sw_interface_set_l2_bridge(
swif_idx, bd_id=bd_id, enable=is_add)
if not is_add:
self.vapi.bridge_domain_add_del(bd_id=bd_id, is_add=is_add)
@classmethod
def arp_req(cls, src_host, host):
return (Ether(dst="ff:ff:ff:ff:ff:ff", src=src_host.mac) /
ARP(op="who-has",
hwsrc=src_host.bin_mac,
pdst=host.ip4,
psrc=src_host.ip4))
@classmethod
def arp_reqs(cls, src_host, entries):
return [cls.arp_req(src_host, e) for e in entries]
@classmethod
def garp_req(cls, host):
return cls.arp_req(host, host)
@classmethod
def garp_reqs(cls, entries):
return [cls.garp_req(e) for e in entries]
def arp_resp_host(self, src_host, arp_resp):
ether = arp_resp[Ether]
self.assertEqual(ether.dst, src_host.mac)
arp = arp_resp[ARP]
self.assertEqual(arp.hwtype, 1)
self.assertEqual(arp.ptype, 0x800)
self.assertEqual(arp.hwlen, 6)
self.assertEqual(arp.plen, 4)
arp_opts = {"who-has": 1, "is-at": 2}
self.assertEqual(arp.op, arp_opts["is-at"])
self.assertEqual(arp.hwdst, src_host.mac)
self.assertEqual(arp.pdst, src_host.ip4)
return Host(mac=arp.hwsrc, ip4=arp.psrc)
def arp_resp_hosts(self, src_host, pkts):
return {self.arp_resp_host(src_host, p) for p in pkts}
@staticmethod
def inttoip4(ip):
o1 = int(ip / 16777216) % 256
o2 = int(ip / 65536) % 256
o3 = int(ip / 256) % 256
o4 = int(ip) % 256
return '%s.%s.%s.%s' % (o1, o2, o3, o4)
def arp_event_host(self, e):
return Host(str(e.mac), ip4=str(e.ip))
def arp_event_hosts(self, evs):
return {self.arp_event_host(e) for e in evs}
def nd_event_host(self, e):
return Host(str(e.mac), ip6=str(e.ip))
def nd_event_hosts(self, evs):
return {self.nd_event_host(e) for e in evs}
@classmethod
def ns_req(cls, src_host, host):
nsma = in6_getnsma(inet_pton(AF_INET6, "fd10::ffff"))
d = inet_ntop(AF_INET6, nsma)
return (Ether(dst="ff:ff:ff:ff:ff:ff", src=src_host.mac) /
IPv6(dst=d, src=src_host.ip6) /
ICMPv6ND_NS(tgt=host.ip6) /
ICMPv6NDOptSrcLLAddr(lladdr=src_host.mac))
@classmethod
def ns_reqs_dst(cls, entries, dst_host):
return [cls.ns_req(e, dst_host) for e in entries]
@classmethod
def ns_reqs_src(cls, src_host, entries):
return [cls.ns_req(src_host, e) for e in entries]
def na_resp_host(self, src_host, rx):
self.assertEqual(rx[Ether].dst, src_host.mac)
self.assertEqual(in6_ptop(rx[IPv6].dst),
in6_ptop(src_host.ip6))
self.assertTrue(rx.haslayer(ICMPv6ND_NA))
self.assertTrue(rx.haslayer(ICMPv6NDOptDstLLAddr))
na = rx[ICMPv6ND_NA]
return Host(mac=na.lladdr, ip6=na.tgt)
def na_resp_hosts(self, src_host, pkts):
return {self.na_resp_host(src_host, p) for p in pkts}
def set_bd_flags(self, bd_id, **args):
"""
Enable/disable defined feature(s) of the bridge domain.
:param int bd_id: Bridge domain ID.
:param list args: List of feature/status pairs. Allowed features: \
learn, forward, flood, uu_flood and arp_term. Status False means \
disable, status True means enable the feature.
:raise: ValueError in case of unknown feature in the input.
"""
for flag in args:
if flag == "learn":
feature_bitmap = 1 << 0
elif flag == "forward":
feature_bitmap = 1 << 1
elif flag == "flood":
feature_bitmap = 1 << 2
elif flag == "uu_flood":
feature_bitmap = 1 << 3
elif flag == "arp_term":
feature_bitmap = 1 << 4
else:
raise ValueError("Unknown feature used: %s" % flag)
is_set = 1 if args[flag] else 0
self.vapi.bridge_flags(bd_id, is_set, feature_bitmap)
self.logger.info("Bridge domain ID %d updated" % bd_id)
def verify_arp(self, src_host, req_hosts, resp_hosts, bd_id=1):
reqs = self.arp_reqs(src_host, req_hosts)
for swif in self.bd_swifs(bd_id):
swif.add_stream(reqs)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
for swif in self.bd_swifs(bd_id):
resp_pkts = swif.get_capture(len(resp_hosts))
resps = self.arp_resp_hosts(src_host, resp_pkts)
self.assertEqual(len(resps ^ resp_hosts), 0)
def verify_nd(self, src_host, req_hosts, resp_hosts, bd_id=1):
reqs = self.ns_reqs_src(src_host, req_hosts)
for swif in self.bd_swifs(bd_id):
swif.add_stream(reqs)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
for swif in self.bd_swifs(bd_id):
resp_pkts = swif.get_capture(len(resp_hosts))
resps = self.na_resp_hosts(src_host, resp_pkts)
self.assertEqual(len(resps ^ resp_hosts), 0)
def test_l2bd_arp_term_01(self):
""" L2BD arp term - add 5 hosts, verify arp responses
"""
src_host = self.ip4_host(50, 50, "00:00:11:22:33:44")
self.bd_add_del(1, is_add=1)
self.set_bd_flags(1, arp_term=True, flood=False,
uu_flood=False, learn=False)
macs = self.mac_list(range(1, 5))
hosts = self.ip4_hosts(4, 1, macs)
self.add_del_arp_term_hosts(hosts, is_add=1)
self.verify_arp(src_host, hosts, hosts)
type(self).hosts = hosts
def test_l2bd_arp_term_02(self):
""" L2BD arp term - delete 3 hosts, verify arp responses
"""
src_host = self.ip4_host(50, 50, "00:00:11:22:33:44")
macs = self.mac_list(range(1, 3))
deleted = self.ip4_hosts(4, 1, macs)
self.add_del_arp_term_hosts(deleted, is_add=0)
remaining = self.hosts - deleted
self.verify_arp(src_host, self.hosts, remaining)
type(self).hosts = remaining
self.bd_add_del(1, is_add=0)
def test_l2bd_arp_term_03(self):
""" L2BD arp term - recreate BD1, readd 3 hosts, verify arp responses
"""
src_host = self.ip4_host(50, 50, "00:00:11:22:33:44")
self.bd_add_del(1, is_add=1)
self.set_bd_flags(1, arp_term=True, flood=False,
uu_flood=False, learn=False)
macs = self.mac_list(range(1, 3))
readded = self.ip4_hosts(4, 1, macs)
self.add_del_arp_term_hosts(readded, is_add=1)
self.verify_arp(src_host, self.hosts | readded, readded)
type(self).hosts = readded
def test_l2bd_arp_term_04(self):
""" L2BD arp term - 2 IP4 addrs per host
"""
src_host = self.ip4_host(50, 50, "00:00:11:22:33:44")
macs = self.mac_list(range(1, 3))
sub5_hosts = self.ip4_hosts(5, 1, macs)
self.add_del_arp_term_hosts(sub5_hosts, is_add=1)
hosts = self.hosts | sub5_hosts
self.verify_arp(src_host, hosts, hosts)
type(self).hosts = hosts
self.bd_add_del(1, is_add=0)
def test_l2bd_arp_term_05(self):
""" L2BD arp term - create and update 10 IP4-mac pairs
"""
src_host = self.ip4_host(50, 50, "00:00:11:22:33:44")
self.bd_add_del(1, is_add=1)
self.set_bd_flags(1, arp_term=True, flood=False,
uu_flood=False, learn=False)
macs1 = self.mac_list(range(10, 20))
hosts1 = self.ip4_hosts(5, 1, macs1)
self.add_del_arp_term_hosts(hosts1, is_add=1)
self.verify_arp(src_host, hosts1, hosts1)
macs2 = self.mac_list(range(20, 30))
hosts2 = self.ip4_hosts(5, 1, macs2)
self.add_del_arp_term_hosts(hosts2, is_add=1)
self.verify_arp(src_host, hosts1, hosts2)
self.bd_add_del(1, is_add=0)
def test_l2bd_arp_term_06(self):
""" L2BD arp/ND term - hosts with both ip4/ip6
"""
src_host4 = self.ip4_host(50, 50, "00:00:11:22:33:44")
src_host6 = self.ip6_host(50, 50, "00:00:11:22:33:44")
self.bd_add_del(1, is_add=1)
# enable flood to make sure requests are not flooded
self.set_bd_flags(1, arp_term=True, flood=True,
uu_flood=False, learn=False)
macs = self.mac_list(range(10, 20))
hosts6 = self.ip6_hosts(5, 1, macs)
hosts4 = self.ip4_hosts(5, 1, macs)
self.add_del_arp_term_hosts(hosts4, is_add=1)
self.add_del_arp_term_hosts(hosts6, is_add=1, is_ipv6=1)
self.verify_arp(src_host4, hosts4, hosts4)
self.verify_nd(src_host6, hosts6, hosts6)
self.bd_add_del(1, is_add=0)
def test_l2bd_arp_term_07(self):
""" L2BD ND term - Add and Del hosts, verify ND replies
"""
src_host6 = self.ip6_host(50, 50, "00:00:11:22:33:44")
self.bd_add_del(1, is_add=1)
self.set_bd_flags(1, arp_term=True, flood=False,
uu_flood=False, learn=False)
macs = self.mac_list(range(10, 20))
hosts6 = self.ip6_hosts(5, 1, macs)
self.add_del_arp_term_hosts(hosts6, is_add=1, is_ipv6=1)
self.verify_nd(src_host6, hosts6, hosts6)
del_macs = self.mac_list(range(10, 15))
deleted = self.ip6_hosts(5, 1, del_macs)
self.add_del_arp_term_hosts(deleted, is_add=0, is_ipv6=1)
self.verify_nd(src_host6, hosts6, hosts6 - deleted)
self.bd_add_del(1, is_add=0)
def test_l2bd_arp_term_08(self):
""" L2BD ND term - Add and update IP+mac, verify ND replies
"""
src_host = self.ip6_host(50, 50, "00:00:11:22:33:44")
self.bd_add_del(1, is_add=1)
self.set_bd_flags(1, arp_term=True, flood=False,
uu_flood=False, learn=False)
macs1 = self.mac_list(range(10, 20))
hosts = self.ip6_hosts(5, 1, macs1)
self.add_del_arp_term_hosts(hosts, is_add=1, is_ipv6=1)
self.verify_nd(src_host, hosts, hosts)
macs2 = self.mac_list(range(20, 30))
updated = self.ip6_hosts(5, 1, macs2)
self.add_del_arp_term_hosts(updated, is_add=1, is_ipv6=1)
self.verify_nd(src_host, hosts, updated)
self.bd_add_del(1, is_add=0)
def test_l2bd_arp_term_09(self):
""" L2BD arp term - send garps, verify arp event reports
"""
self.vapi.want_ip4_arp_events()
self.bd_add_del(1, is_add=1)
self.set_bd_flags(1, arp_term=True, flood=False,
uu_flood=False, learn=False)
macs = self.mac_list(range(90, 95))
hosts = self.ip4_hosts(5, 1, macs)
garps = self.garp_reqs(hosts)
self.bd_swifs(1)[0].add_stream(garps)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
evs = [self.vapi.wait_for_event(1, "ip4_arp_event")
for i in range(len(hosts))]
ev_hosts = self.arp_event_hosts(evs)
self.assertEqual(len(ev_hosts ^ hosts), 0)
def test_l2bd_arp_term_10(self):
""" L2BD arp term - send duplicate garps, verify suppression
"""
macs = self.mac_list(range(70, 71))
hosts = self.ip4_hosts(6, 1, macs)
""" send the packet 5 times expect one event
"""
garps = self.garp_reqs(hosts) * 5
self.bd_swifs(1)[0].add_stream(garps)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
evs = [self.vapi.wait_for_event(1, "ip4_arp_event")
for i in range(len(hosts))]
ev_hosts = self.arp_event_hosts(evs)
self.assertEqual(len(ev_hosts ^ hosts), 0)
def test_l2bd_arp_term_11(self):
""" L2BD arp term - disable ip4 arp events,send garps, verify no events
"""
self.vapi.want_ip4_arp_events(enable_disable=0)
macs = self.mac_list(range(90, 95))
hosts = self.ip4_hosts(5, 1, macs)
garps = self.garp_reqs(hosts)
self.bd_swifs(1)[0].add_stream(garps)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.sleep(1)
self.assertEqual(len(self.vapi.collect_events()), 0)
self.bd_add_del(1, is_add=0)
def test_l2bd_arp_term_12(self):
""" L2BD ND term - send NS packets verify reports
"""
self.vapi.want_ip6_nd_events(ip="::")
dst_host = self.ip6_host(50, 50, "00:00:11:22:33:44")
self.bd_add_del(1, is_add=1)
self.set_bd_flags(1, arp_term=True, flood=False,
uu_flood=False, learn=False)
macs = self.mac_list(range(10, 15))
hosts = self.ip6_hosts(5, 1, macs)
reqs = self.ns_reqs_dst(hosts, dst_host)
self.bd_swifs(1)[0].add_stream(reqs)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
evs = [self.vapi.wait_for_event(2, "ip6_nd_event")
for i in range(len(hosts))]
ev_hosts = self.nd_event_hosts(evs)
self.assertEqual(len(ev_hosts ^ hosts), 0)
def test_l2bd_arp_term_13(self):
""" L2BD ND term - send duplicate ns, verify suppression
"""
dst_host = self.ip6_host(50, 50, "00:00:11:22:33:44")
macs = self.mac_list(range(10, 11))
hosts = self.ip6_hosts(5, 1, macs)
reqs = self.ns_reqs_dst(hosts, dst_host) * 5
self.bd_swifs(1)[0].add_stream(reqs)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
evs = [self.vapi.wait_for_event(2, "ip6_nd_event")
for i in range(len(hosts))]
ev_hosts = self.nd_event_hosts(evs)
self.assertEqual(len(ev_hosts ^ hosts), 0)
def test_l2bd_arp_term_14(self):
""" L2BD ND term - disable ip4 arp events,send ns, verify no events
"""
self.vapi.want_ip6_nd_events(enable_disable=0, ip="::")
dst_host = self.ip6_host(50, 50, "00:00:11:22:33:44")
macs = self.mac_list(range(10, 15))
hosts = self.ip6_hosts(5, 1, macs)
reqs = self.ns_reqs_dst(hosts, dst_host)
self.bd_swifs(1)[0].add_stream(reqs)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.sleep(1)
self.assertEqual(len(self.vapi.collect_events()), 0)
self.bd_add_del(1, is_add=0)
if __name__ == '__main__':
unittest.main(testRunner=VppTestRunner)
| 36.92229 | 79 | 0.608419 |
import unittest
import random
import copy
from socket import AF_INET, AF_INET6
from scapy.packet import Raw
from scapy.layers.l2 import Ether, ARP
from scapy.layers.inet import IP
from scapy.utils import inet_pton, inet_ntop
from scapy.utils6 import in6_getnsma, in6_getnsmac, in6_ptop, in6_islladdr, \
in6_mactoifaceid, in6_ismaddr
from scapy.layers.inet6 import IPv6, UDP, ICMPv6ND_NS, ICMPv6ND_RS, \
ICMPv6ND_RA, ICMPv6NDOptSrcLLAddr, getmacbyip6, ICMPv6MRD_Solicitation, \
ICMPv6NDOptMTU, ICMPv6NDOptSrcLLAddr, ICMPv6NDOptPrefixInfo, \
ICMPv6ND_NA, ICMPv6NDOptDstLLAddr, ICMPv6DestUnreach, icmp6types
from framework import VppTestCase, VppTestRunner
from util import Host, ppp
class TestL2bdArpTerm(VppTestCase):
@classmethod
def setUpClass(cls):
super(TestL2bdArpTerm, cls).setUpClass()
try:
n_bd = 1
cls.ifs_per_bd = ifs_per_bd = 3
n_ifs = n_bd * ifs_per_bd
cls.create_pg_interfaces(range(n_ifs))
for i in cls.pg_interfaces:
i.admin_up()
cls.hosts = set()
except Exception:
super(TestL2bdArpTerm, cls).tearDownClass()
raise
def setUp(self):
self.reset_packet_infos()
super(TestL2bdArpTerm, self).setUp()
def tearDown(self):
super(TestL2bdArpTerm, self).tearDown()
if not self.vpp_dead:
self.logger.info(self.vapi.ppcli("show l2fib verbose"))
self.logger.info(self.vapi.ppcli("show bridge-domain 1 detail"))
def add_del_arp_term_hosts(self, entries, bd_id=1, is_add=1, is_ipv6=0):
for e in entries:
ip = e.ip4 if is_ipv6 == 0 else e.ip6
self.vapi.bd_ip_mac_add_del(bd_id=bd_id, is_add=is_add, ip=ip,
mac=e.mac)
@classmethod
def mac_list(cls, b6_range):
return ["00:00:ca:fe:00:%02x" % b6 for b6 in b6_range]
@classmethod
def ip4_host(cls, subnet, host, mac):
return Host(mac=mac,
ip4="172.17.1%02u.%u" % (subnet, host))
@classmethod
def ip4_hosts(cls, subnet, start, mac_list):
return {cls.ip4_host(subnet, start + j, mac_list[j])
for j in range(len(mac_list))}
@classmethod
def ip6_host(cls, subnet, host, mac):
return Host(mac=mac,
ip6="fd01:%x::%x" % (subnet, host))
@classmethod
def ip6_hosts(cls, subnet, start, mac_list):
return {cls.ip6_host(subnet, start + j, mac_list[j])
for j in range(len(mac_list))}
@classmethod
def bd_swifs(cls, b):
n = cls.ifs_per_bd
start = (b - 1) * n
return [cls.pg_interfaces[j] for j in range(start, start + n)]
def bd_add_del(self, bd_id=1, is_add=1):
if is_add:
self.vapi.bridge_domain_add_del(bd_id=bd_id, is_add=is_add)
for swif in self.bd_swifs(bd_id):
swif_idx = swif.sw_if_index
self.vapi.sw_interface_set_l2_bridge(
swif_idx, bd_id=bd_id, enable=is_add)
if not is_add:
self.vapi.bridge_domain_add_del(bd_id=bd_id, is_add=is_add)
@classmethod
def arp_req(cls, src_host, host):
return (Ether(dst="ff:ff:ff:ff:ff:ff", src=src_host.mac) /
ARP(op="who-has",
hwsrc=src_host.bin_mac,
pdst=host.ip4,
psrc=src_host.ip4))
@classmethod
def arp_reqs(cls, src_host, entries):
return [cls.arp_req(src_host, e) for e in entries]
@classmethod
def garp_req(cls, host):
return cls.arp_req(host, host)
@classmethod
def garp_reqs(cls, entries):
return [cls.garp_req(e) for e in entries]
def arp_resp_host(self, src_host, arp_resp):
ether = arp_resp[Ether]
self.assertEqual(ether.dst, src_host.mac)
arp = arp_resp[ARP]
self.assertEqual(arp.hwtype, 1)
self.assertEqual(arp.ptype, 0x800)
self.assertEqual(arp.hwlen, 6)
self.assertEqual(arp.plen, 4)
arp_opts = {"who-has": 1, "is-at": 2}
self.assertEqual(arp.op, arp_opts["is-at"])
self.assertEqual(arp.hwdst, src_host.mac)
self.assertEqual(arp.pdst, src_host.ip4)
return Host(mac=arp.hwsrc, ip4=arp.psrc)
def arp_resp_hosts(self, src_host, pkts):
return {self.arp_resp_host(src_host, p) for p in pkts}
@staticmethod
def inttoip4(ip):
o1 = int(ip / 16777216) % 256
o2 = int(ip / 65536) % 256
o3 = int(ip / 256) % 256
o4 = int(ip) % 256
return '%s.%s.%s.%s' % (o1, o2, o3, o4)
def arp_event_host(self, e):
return Host(str(e.mac), ip4=str(e.ip))
def arp_event_hosts(self, evs):
return {self.arp_event_host(e) for e in evs}
def nd_event_host(self, e):
return Host(str(e.mac), ip6=str(e.ip))
def nd_event_hosts(self, evs):
return {self.nd_event_host(e) for e in evs}
@classmethod
def ns_req(cls, src_host, host):
nsma = in6_getnsma(inet_pton(AF_INET6, "fd10::ffff"))
d = inet_ntop(AF_INET6, nsma)
return (Ether(dst="ff:ff:ff:ff:ff:ff", src=src_host.mac) /
IPv6(dst=d, src=src_host.ip6) /
ICMPv6ND_NS(tgt=host.ip6) /
ICMPv6NDOptSrcLLAddr(lladdr=src_host.mac))
@classmethod
def ns_reqs_dst(cls, entries, dst_host):
return [cls.ns_req(e, dst_host) for e in entries]
@classmethod
def ns_reqs_src(cls, src_host, entries):
return [cls.ns_req(src_host, e) for e in entries]
def na_resp_host(self, src_host, rx):
self.assertEqual(rx[Ether].dst, src_host.mac)
self.assertEqual(in6_ptop(rx[IPv6].dst),
in6_ptop(src_host.ip6))
self.assertTrue(rx.haslayer(ICMPv6ND_NA))
self.assertTrue(rx.haslayer(ICMPv6NDOptDstLLAddr))
na = rx[ICMPv6ND_NA]
return Host(mac=na.lladdr, ip6=na.tgt)
def na_resp_hosts(self, src_host, pkts):
return {self.na_resp_host(src_host, p) for p in pkts}
def set_bd_flags(self, bd_id, **args):
for flag in args:
if flag == "learn":
feature_bitmap = 1 << 0
elif flag == "forward":
feature_bitmap = 1 << 1
elif flag == "flood":
feature_bitmap = 1 << 2
elif flag == "uu_flood":
feature_bitmap = 1 << 3
elif flag == "arp_term":
feature_bitmap = 1 << 4
else:
raise ValueError("Unknown feature used: %s" % flag)
is_set = 1 if args[flag] else 0
self.vapi.bridge_flags(bd_id, is_set, feature_bitmap)
self.logger.info("Bridge domain ID %d updated" % bd_id)
def verify_arp(self, src_host, req_hosts, resp_hosts, bd_id=1):
reqs = self.arp_reqs(src_host, req_hosts)
for swif in self.bd_swifs(bd_id):
swif.add_stream(reqs)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
for swif in self.bd_swifs(bd_id):
resp_pkts = swif.get_capture(len(resp_hosts))
resps = self.arp_resp_hosts(src_host, resp_pkts)
self.assertEqual(len(resps ^ resp_hosts), 0)
def verify_nd(self, src_host, req_hosts, resp_hosts, bd_id=1):
reqs = self.ns_reqs_src(src_host, req_hosts)
for swif in self.bd_swifs(bd_id):
swif.add_stream(reqs)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
for swif in self.bd_swifs(bd_id):
resp_pkts = swif.get_capture(len(resp_hosts))
resps = self.na_resp_hosts(src_host, resp_pkts)
self.assertEqual(len(resps ^ resp_hosts), 0)
def test_l2bd_arp_term_01(self):
src_host = self.ip4_host(50, 50, "00:00:11:22:33:44")
self.bd_add_del(1, is_add=1)
self.set_bd_flags(1, arp_term=True, flood=False,
uu_flood=False, learn=False)
macs = self.mac_list(range(1, 5))
hosts = self.ip4_hosts(4, 1, macs)
self.add_del_arp_term_hosts(hosts, is_add=1)
self.verify_arp(src_host, hosts, hosts)
type(self).hosts = hosts
def test_l2bd_arp_term_02(self):
src_host = self.ip4_host(50, 50, "00:00:11:22:33:44")
macs = self.mac_list(range(1, 3))
deleted = self.ip4_hosts(4, 1, macs)
self.add_del_arp_term_hosts(deleted, is_add=0)
remaining = self.hosts - deleted
self.verify_arp(src_host, self.hosts, remaining)
type(self).hosts = remaining
self.bd_add_del(1, is_add=0)
def test_l2bd_arp_term_03(self):
src_host = self.ip4_host(50, 50, "00:00:11:22:33:44")
self.bd_add_del(1, is_add=1)
self.set_bd_flags(1, arp_term=True, flood=False,
uu_flood=False, learn=False)
macs = self.mac_list(range(1, 3))
readded = self.ip4_hosts(4, 1, macs)
self.add_del_arp_term_hosts(readded, is_add=1)
self.verify_arp(src_host, self.hosts | readded, readded)
type(self).hosts = readded
def test_l2bd_arp_term_04(self):
src_host = self.ip4_host(50, 50, "00:00:11:22:33:44")
macs = self.mac_list(range(1, 3))
sub5_hosts = self.ip4_hosts(5, 1, macs)
self.add_del_arp_term_hosts(sub5_hosts, is_add=1)
hosts = self.hosts | sub5_hosts
self.verify_arp(src_host, hosts, hosts)
type(self).hosts = hosts
self.bd_add_del(1, is_add=0)
def test_l2bd_arp_term_05(self):
src_host = self.ip4_host(50, 50, "00:00:11:22:33:44")
self.bd_add_del(1, is_add=1)
self.set_bd_flags(1, arp_term=True, flood=False,
uu_flood=False, learn=False)
macs1 = self.mac_list(range(10, 20))
hosts1 = self.ip4_hosts(5, 1, macs1)
self.add_del_arp_term_hosts(hosts1, is_add=1)
self.verify_arp(src_host, hosts1, hosts1)
macs2 = self.mac_list(range(20, 30))
hosts2 = self.ip4_hosts(5, 1, macs2)
self.add_del_arp_term_hosts(hosts2, is_add=1)
self.verify_arp(src_host, hosts1, hosts2)
self.bd_add_del(1, is_add=0)
def test_l2bd_arp_term_06(self):
src_host4 = self.ip4_host(50, 50, "00:00:11:22:33:44")
src_host6 = self.ip6_host(50, 50, "00:00:11:22:33:44")
self.bd_add_del(1, is_add=1)
self.set_bd_flags(1, arp_term=True, flood=True,
uu_flood=False, learn=False)
macs = self.mac_list(range(10, 20))
hosts6 = self.ip6_hosts(5, 1, macs)
hosts4 = self.ip4_hosts(5, 1, macs)
self.add_del_arp_term_hosts(hosts4, is_add=1)
self.add_del_arp_term_hosts(hosts6, is_add=1, is_ipv6=1)
self.verify_arp(src_host4, hosts4, hosts4)
self.verify_nd(src_host6, hosts6, hosts6)
self.bd_add_del(1, is_add=0)
def test_l2bd_arp_term_07(self):
src_host6 = self.ip6_host(50, 50, "00:00:11:22:33:44")
self.bd_add_del(1, is_add=1)
self.set_bd_flags(1, arp_term=True, flood=False,
uu_flood=False, learn=False)
macs = self.mac_list(range(10, 20))
hosts6 = self.ip6_hosts(5, 1, macs)
self.add_del_arp_term_hosts(hosts6, is_add=1, is_ipv6=1)
self.verify_nd(src_host6, hosts6, hosts6)
del_macs = self.mac_list(range(10, 15))
deleted = self.ip6_hosts(5, 1, del_macs)
self.add_del_arp_term_hosts(deleted, is_add=0, is_ipv6=1)
self.verify_nd(src_host6, hosts6, hosts6 - deleted)
self.bd_add_del(1, is_add=0)
def test_l2bd_arp_term_08(self):
src_host = self.ip6_host(50, 50, "00:00:11:22:33:44")
self.bd_add_del(1, is_add=1)
self.set_bd_flags(1, arp_term=True, flood=False,
uu_flood=False, learn=False)
macs1 = self.mac_list(range(10, 20))
hosts = self.ip6_hosts(5, 1, macs1)
self.add_del_arp_term_hosts(hosts, is_add=1, is_ipv6=1)
self.verify_nd(src_host, hosts, hosts)
macs2 = self.mac_list(range(20, 30))
updated = self.ip6_hosts(5, 1, macs2)
self.add_del_arp_term_hosts(updated, is_add=1, is_ipv6=1)
self.verify_nd(src_host, hosts, updated)
self.bd_add_del(1, is_add=0)
def test_l2bd_arp_term_09(self):
self.vapi.want_ip4_arp_events()
self.bd_add_del(1, is_add=1)
self.set_bd_flags(1, arp_term=True, flood=False,
uu_flood=False, learn=False)
macs = self.mac_list(range(90, 95))
hosts = self.ip4_hosts(5, 1, macs)
garps = self.garp_reqs(hosts)
self.bd_swifs(1)[0].add_stream(garps)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
evs = [self.vapi.wait_for_event(1, "ip4_arp_event")
for i in range(len(hosts))]
ev_hosts = self.arp_event_hosts(evs)
self.assertEqual(len(ev_hosts ^ hosts), 0)
def test_l2bd_arp_term_10(self):
macs = self.mac_list(range(70, 71))
hosts = self.ip4_hosts(6, 1, macs)
garps = self.garp_reqs(hosts) * 5
self.bd_swifs(1)[0].add_stream(garps)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
evs = [self.vapi.wait_for_event(1, "ip4_arp_event")
for i in range(len(hosts))]
ev_hosts = self.arp_event_hosts(evs)
self.assertEqual(len(ev_hosts ^ hosts), 0)
def test_l2bd_arp_term_11(self):
self.vapi.want_ip4_arp_events(enable_disable=0)
macs = self.mac_list(range(90, 95))
hosts = self.ip4_hosts(5, 1, macs)
garps = self.garp_reqs(hosts)
self.bd_swifs(1)[0].add_stream(garps)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.sleep(1)
self.assertEqual(len(self.vapi.collect_events()), 0)
self.bd_add_del(1, is_add=0)
def test_l2bd_arp_term_12(self):
self.vapi.want_ip6_nd_events(ip="::")
dst_host = self.ip6_host(50, 50, "00:00:11:22:33:44")
self.bd_add_del(1, is_add=1)
self.set_bd_flags(1, arp_term=True, flood=False,
uu_flood=False, learn=False)
macs = self.mac_list(range(10, 15))
hosts = self.ip6_hosts(5, 1, macs)
reqs = self.ns_reqs_dst(hosts, dst_host)
self.bd_swifs(1)[0].add_stream(reqs)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
evs = [self.vapi.wait_for_event(2, "ip6_nd_event")
for i in range(len(hosts))]
ev_hosts = self.nd_event_hosts(evs)
self.assertEqual(len(ev_hosts ^ hosts), 0)
def test_l2bd_arp_term_13(self):
dst_host = self.ip6_host(50, 50, "00:00:11:22:33:44")
macs = self.mac_list(range(10, 11))
hosts = self.ip6_hosts(5, 1, macs)
reqs = self.ns_reqs_dst(hosts, dst_host) * 5
self.bd_swifs(1)[0].add_stream(reqs)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
evs = [self.vapi.wait_for_event(2, "ip6_nd_event")
for i in range(len(hosts))]
ev_hosts = self.nd_event_hosts(evs)
self.assertEqual(len(ev_hosts ^ hosts), 0)
def test_l2bd_arp_term_14(self):
self.vapi.want_ip6_nd_events(enable_disable=0, ip="::")
dst_host = self.ip6_host(50, 50, "00:00:11:22:33:44")
macs = self.mac_list(range(10, 15))
hosts = self.ip6_hosts(5, 1, macs)
reqs = self.ns_reqs_dst(hosts, dst_host)
self.bd_swifs(1)[0].add_stream(reqs)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
self.sleep(1)
self.assertEqual(len(self.vapi.collect_events()), 0)
self.bd_add_del(1, is_add=0)
if __name__ == '__main__':
unittest.main(testRunner=VppTestRunner)
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.