hexsha stringlengths 40 40 | size int64 3 1.03M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 3 972 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 78 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 3 972 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 78 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 116k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 3 972 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 78 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 3 1.03M | avg_line_length float64 1.13 941k | max_line_length int64 2 941k | alphanum_fraction float64 0 1 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
842ec14c66c0643829e7c656d1a5e1e232009092 | 20,753 | py | Python | ext/modular.py | lun-4/avabot | f9fe22a497291773588734f7d3a33f7d42d65251 | [
"MIT"
] | null | null | null | ext/modular.py | lun-4/avabot | f9fe22a497291773588734f7d3a33f7d42d65251 | [
"MIT"
] | null | null | null | ext/modular.py | lun-4/avabot | f9fe22a497291773588734f7d3a33f7d42d65251 | [
"MIT"
] | null | null | null | # https://killsixbilliondemons.com/feed/
# http://twokinds.keenspot.com/feed.xml
# http://mylifewithfel.smackjeeves.com/rss/
import asyncio
import json
import re
import os
import math
import time
import discord
import aiohttp
# Oops, my finger slipped
import lxml.html
# import rethinkdb as r
import dateutil.parser
import datetime
import lxml.etree
import lxml.html
import urllib.parse
from discord.ext import commands
from .common import Cog
page_num_regex = r"((?:-|\d){3,5})" # Used to match page #s in RSS feed titles
# Some newer comics just seem to work better this way
comic_link_regex = r"\/(?:dnw)?comic\/([a-z0-9_\-]+)(?:\/)?$"
comic_link_num_regex = r"comic=((?:-|\d){3,5})$"
parser = lxml.etree.XMLParser(encoding="utf-8")
html_parser = lxml.html.HTMLParser(encoding="utf-8")
class BadPage(Exception):
pass
async def http_req(url, headers={}, body=None):
async with aiohttp.ClientSession() as session:
chosen_req = session.post if body else session.get
async with chosen_req(url, headers=headers, data=body) as resp:
return {
"text": await resp.read(),
"resp": resp
}
async def status_page(comic, bot):
base_url = comic["statuspage_slug"] + ".statuspage.io" \
if comic.get("statuspage_slug", None) != None \
else comic["statuspage_url"]
resp = await http_req("https://" + base_url + "/history.json")
text = resp["text"]
if resp["resp"].status == 200:
parsed = json.loads(text)
months = parsed.get("months", None)
if months == None:
raise BadPage(f"No months prop")
month = months[0]
if month == None:
raise BadPage(f"No months listed")
incidents = month.get("incidents", None)
if incidents == None:
raise BadPage(f"No incidents prop")
if len(incidents) < 1:
raise BadPage(f"No incidents listed")
incident = incidents[0]
if incident == None:
raise BadPage(f"No incidents listed")
return {
"latest_post": {
"unique_id": incident["code"],
"url": f"https://{base_url}/incidents/{incident['code']}",
"title": incident["name"],
"time": bot.r.now(),
}
}
else:
raise BadPage("Non-200 status code: " + str(resp["resp"].status))
async def common_rss(comic, bot):
resp = await http_req(comic["rss_url"])
text = resp["text"]
if resp["resp"].status == 200:
parsed = lxml.etree.fromstring(text, parser=parser)
post = parsed.cssselect("rss channel item")[0]
title = post.cssselect("title")[0].text
url = post.cssselect("link")[0].text
page_num_search = re.search(page_num_regex, title)
if not page_num_search:
page_num_search = re.search(comic_link_regex, url)
if not page_num_search:
page_num_search = re.search(comic_link_num_regex, url)
if not page_num_search:
page_num_search = re.search(r"(.*)", url)
# raise BadPage(f"No unique ID found for page title: '{title}' or url: '{url}'")
page_num = page_num_search.group(1)
found_pubdate = post.cssselect("pubDate")
if found_pubdate:
time = dateutil.parser.parse(found_pubdate[0].text).astimezone(bot.r.make_timezone("0:00"))
else:
time = bot.r.now()
return {
"latest_post": {
"unique_id": page_num,
"url": url,
"title": title,
"time": time
}
}
async def egs_scrape(comic, bot):
resp = await http_req(comic["base_url"])
text = resp["text"]
xml_document = lxml.html.fromstring(text, parser=html_parser)
comic_date_element = xml_document.cssselect('#leftarea div[style*="font-family"]')[0]
comic_img_element = xml_document.cssselect('#cc-comic')[0]
comic_date = lxml.html.tostring(comic_date_element)
comic_name = comic_img_element.attrib["title"]
return {
"latest_post": {
"unique_id": comic_name,
"url": f'{comic["base_url"]}{comic_name}',
"title": comic_date,
"time": bot.r.now()
}
}
async def twokinds_scrape(comic, bot):
resp = await http_req(comic["base_url"])
text = resp["text"]
xml_document = lxml.html.fromstring(text, parser=html_parser)
# Grab the newest page from the 'latest' button
article_obj = xml_document.cssselect("article.comic")[0]
permalink_page = article_obj.cssselect("div.below-nav p.permalink a[href^=\"/comic\/\"]")[0]
permalink_url = permalink_page.attrib["href"]
title = article_obj.cssselect("img[alt=\"Comic Page\"]")[0].attrib["title"]
try:
page_num = int(os.path.basename(os.path.split(permalink_url)[0]))
except ValueError as err:
raise BadPage(f"No unique ID found for page URL: '{permalink_url}'")
return {
"latest_post": {
"unique_id": page_num,
"url": f'{comic["base_url"]}{permalink_url}',
"title": title,
"time": bot.r.now()
}
}
# Ava's Demon scraper because the she doesn't update RSS as soon...
async def avasdemon_scrape(comic, bot):
resp = await http_req(f'{comic["base_url"]}/js/comicQuickLinks.js?v=' + str(math.floor(time.time())))
blob = re.search(r'var ad_cql="(.*)";$', resp["text"].decode()).group(1)
comic_data = ''.join([chr(int(chars, 16)) for chars in re.findall(r".{1,2}", blob)])
page_num = re.search(r"var latestComicLinkHtml=(\d+);", comic_data).group(1)
return {
"latest_post": {
"unique_id": page_num,
"url": f'{comic["base_url"]}/pages.php#{page_num}',
"title": f"Page {page_num}",
"time": bot.r.now()
}
}
async def xkcd_fetch(comic, bot):
resp = await http_req(f'{comic["base_url"]}/info.0.json')
text = resp["text"]
page = json.loads(text)
return {
"latest_post": {
"unique_id": page["num"],
"url": f'{comic["base_url"]}/{page["num"]}',
"title": f'{page["title"]} ({page["alt"]})',
"time": bot.r.time(int(page["year"]), int(page["month"]), int(page["day"]), "Z")
}
}
async def twitter_listener(user, bot):
handle = comic["handle"] # User's Twitter handle
webcomics = [
{
"slug": "discordstatus",
"friendly": "Discord Status",
"check_updates": status_page,
"statuspage_slug": "discord"
},
{
"slug": "cloudflarestatus",
"friendly": "Cloutflare Status",
"check_updates": status_page,
"statuspage_slug": "cloudflare"
},
{
"slug": "githubstatus",
"friendly": "Github Status",
"check_updates": status_page,
"statuspage_url": "www.githubstatus.com"
},
{
"slug": "redditstatus",
"friendly": "Reddit Status",
"check_updates": status_page,
"statuspage_slug": "reddit"
},
{
"slug": "dostatus",
"friendly": "Digital Ocean Status",
"check_updates": status_page,
"statuspage_url": "status.digitalocean.com"
},
{
"slug": "gitlabstatus",
"friendly": "GitLab Status",
"check_updates": common_rss,
"rss_url": "https://status.gitlab.com/pages/5b36dc6502d06804c08349f7/rss"
},
{
"slug": "webplatformnews",
"friendly": "Web Platform News",
"check_updates": common_rss,
"rss_url": "https://webplatform.news/feed.xml"
},
{
"slug": "questionablecontent",
"friendly": "Questionable Content",
"check_updates": common_rss,
"rss_url": "https://www.questionablecontent.net/QCRSS.xml"
},
{
"slug": "overthehedge",
"friendly": "Over the Hedge",
"check_updates": common_rss,
"rss_url": "https://overthehedgeblog.wordpress.com/feed"
},
{
"slug": "pv02",
"friendly": "A robot named Pivot",
"check_updates": common_rss,
"rss_url": "https://www.pv02comic.com/feed/"
},
{
"slug": "bodies",
"friendly": "Bodies",
"check_updates": common_rss,
"rss_url": "https://www.webtoons.com/en/challenge/bodies/rss?title_no=313877"
},
{
"slug": "mageanddemonqueen",
"friendly": "Mage & Demon Queen",
"check_updates": common_rss,
"rss_url": "https://www.webtoons.com/en/comedy/mage-and-demon-queen/rss?title_no=1438"
},
{
"slug": "ixbr",
"friendly": "ix.br status page",
"check_updates": common_rss,
"rss_url": "https://status.ix.br/rss"
},
{
"slug": "sourcehut",
"friendly": "sourcehut blog",
"check_updates": common_rss,
"rss_url": "https://sourcehut.org/blog/index.xml"
},
{
"slug": "rubenerd",
"friendly": "Rubenerd",
"check_updates": common_rss,
"rss_url": "https://rubenerd.com/feed/"
},
# {
# "slug": "smbc",
# "friendly": "Saturday Morning Breakfast Cereal",
# "check_updates": common_rss,
# "rss_url": "http://www.smbc-comics.com/comic/rss"
# },
# {
# "slug": "back",
# "friendly": "BACK",
# "check_updates": common_rss,
# "rss_url": "http://backcomic.com/rss.xml"
# },
{
"slug": "tove",
"friendly": "TOVE",
"check_updates": common_rss,
"rss_url": "http://www.tovecomic.com/comic/rss"
},
{
"slug": "drugsandwires",
"friendly": "DRUGS & WIRES",
"check_updates": common_rss,
"rss_url": "https://www.drugsandwires.fail/feed/"
},
{
"slug": "twokinds",
"friendly": "Two Kinds",
"check_updates": twokinds_scrape,
"base_url": "http://twokinds.keenspot.com"
},
{
"slug": "egs",
"friendly": "El Goonish Shive",
"check_updates": egs_scrape,
"base_url": "https://egscomics.com/comic/"
},
{
"base_url": "https://avasdemon.com",
"friendly": "Ava's Demon",
"check_updates": avasdemon_scrape,
"slug": "avasdemon"
},
{
"base_url": "https://xkcd.com",
"friendly": "XKCD",
"check_updates": xkcd_fetch,
"slug": "xkcd"
},
{
"slug": "mylifewithfel",
"friendly": "My Life With Fel",
"check_updates": common_rss,
"rss_url": "http://www.mylifewithfel.com/rss/"
},
{
"slug": "killsixbilliondemons",
"friendly": "Kill Six Billion Demons",
"check_updates": common_rss,
"rss_url": "https://killsixbilliondemons.com/feed/"
},
{
"slug": "garfield",
"friendly": "Garfield",
"check_updates": common_rss,
"rss_url": "https://www.comicsrss.com/rss/garfield.rss"
} # ,
# {
# "slug": "avasdemon",
# "friendly": "Ava's Demon",
# "check_updates": common_rss,
# "rss_url": "http://feeds.feedburner.com/AvasDemon?format=xml"
# }
]
class Modular(Cog):
"""Updates users when new webcomics are released!"""
def __init__(self, bot):
super().__init__(bot)
self.ready = False
if self.bot.is_ready():
self.check_updates()
# We cache this because we're intellectuals who want efficiency
self.comic_slugs = [comic["slug"] for comic in webcomics]
self.comic_dict = {}
for comic in webcomics:
self.comic_dict[comic["slug"]] = comic
async def run_check():
await self.bot.db_connect_task
while True:
self.bot.logger.info("Checking RSS automatically...")
try:
await self.check_updates()
except Exception as err:
self.bot.logger.exception("penid")
self.bot.logger.exception(err)
await asyncio.sleep(10 * 60) # Check RSS every 10 min
self.check_loop = self.bot.loop.create_task(run_check())
def cog_unload(self):
self.check_loop.cancel()
async def check_updates(self):
for comic in webcomics:
# haha yes we have the comics now we do their update hook!
friendly_name = comic["friendly"]
self.bot.logger.info(f"Fetching {friendly_name}")
try:
results = await comic["check_updates"](comic, self.bot)
except lxml.etree.XMLSyntaxError as err:
self.bot.logger.error(f"Error occurred while fetching {friendly_name}: {err}")
except aiohttp.client_exceptions.ClientConnectionError as err:
self.bot.logger.error(f"Error occurred while fetching {friendly_name}: {err}")
continue
except BadPage as err:
self.bot.logger.error(f"Error occurred while fetching {friendly_name}: {err}")
continue
except Exception as err:
self.bot.logger.error(f"VERY bad, this should never happen! {friendly_name}: {err}")
self.bot.logger.exception(err)
continue
self.bot.logger.info(f"Checked for updates on {friendly_name}")
announced_post = await self.bot.r.table("updates").get(comic["slug"]).run(self.bot.r_connection)
if announced_post and results["latest_post"]["unique_id"] == announced_post["unique_id"]:
self.bot.logger.info(f"No updates for {friendly_name}")
continue
self.bot.logger.info(f'Found update for {friendly_name}, unique_id: {results["latest_post"]["unique_id"]}')
await self.bot.r.table("updates").insert({
"id": comic["slug"],
"unique_id": results["latest_post"]["unique_id"],
"url": results["latest_post"]["url"],
"title": results["latest_post"]["title"],
"time": results["latest_post"]["time"]
}, conflict="update").run(self.bot.r_connection)
await self.announce_comic(comic, results)
async def announce_comic(self, comic, results):
channels = await self.get_channels(comic["slug"])
friendly_name = comic["friendly"]
post_title = results["latest_post"]["title"]
url = results["latest_post"]["url"]
response = f"New panels for {friendly_name}!\nLatest panel:\n{post_title}\n{url}"
if self.bot.prod and self.bot.config.mastodon and self.bot.config.mastodon["token"] and self.bot.config.mastodon["instance_url"]:
await http_req(f"{self.bot.config.mastodon['instance_url']}/api/v1/statuses",
{"Authorization":
f"Bearer {self.bot.config.mastodon['token']}"},
{
"status": f"{response}\n\n #avabot_update #avabot_update_{comic['slug']}",
"visibility": "unlisted"
})
for channel in channels:
if channel["role"]:
new_page_role = channel["role"]
try:
if self.bot.prod:
await new_page_role.edit(
mentionable=True,
reason=f"New panels for {friendly_name} ({post_title})")
else: # Safety precaution
await new_page_role.edit(
mentionable=False,
reason="Local bot, new page without ping")
except discord.Forbidden:
pass
try:
await channel["channel"].send(channel["role"].mention + ": " + response)
except discord.Forbidden:
pass
try:
await new_page_role.edit(
mentionable=False,
reason=f"New panels for {friendly_name} ({post_title})")
except discord.Forbidden:
pass
else:
await channel["channel"].send(response)
async def get_channels(self, comic_slug):
subscriptions = await self.bot.r.table("subscriptions").get_all(comic_slug, index="slug").run(self.bot.r_connection)
return [{
"channel": self.bot.get_channel(int(subscription["channel_id"])),
"role": (discord.utils.get(
self.bot.get_channel(int(subscription["channel_id"])).guild.roles,
id=int(subscription["role_id"])) if subscription["role_id"] else None) or None
} async for subscription in subscriptions
if self.bot.get_channel(int(subscription["channel_id"]))]
@commands.command()
async def latest(self, ctx, *, comic_slug: str):
"""Gets latest panel of a webcomic"""
if not comic_slug in self.comic_dict:
return await ctx.send("Comic doesn't exist")
update = await self.bot.r.table("updates").get(comic_slug).run(self.bot.r_connection)
await ctx.send(f"Latest panel for {comic_slug}: {update['title']} - {update['url']}")
@commands.command(aliases=["unsubscribe", "unsub", "sub"])
async def subscribe(self, ctx, *, role: discord.Role=None):
"""Toggles your subscription to a webcomic"""
if not role:
subscriptions = self.bot.r \
.table("subscriptions") \
.get_all(str(ctx.guild.id), index="guild_id").run(self.bot.r_connection)
role_list = "\n".join([
f'{self.bot.get_channel(int(subscription["channel_id"])).mention} '
f'**{self.comic_dict[subscription["slug"]]["friendly"]}**: '
f'`{discord.utils.get(ctx.guild.roles, id=int(subscription["role_id"])).name}`'
async for subscription in subscriptions
if subscription["role_id"] and
discord.utils.get(ctx.guild.roles,
id=int(subscription["role_id"]))
])
return await ctx.send(f"Available roles:\n"
f"{role_list}")
if role.guild.id != ctx.guild.id:
return await ctx.send("Role not found")
allowed = await self.bot.r \
.table("subscriptions") \
.get_all(str(role.id), index="role_id") \
.count() \
.gt(0) \
.run(self.bot.r_connection)
if not allowed:
return await ctx.send("Role not found")
if role in ctx.author.roles:
await ctx.author.remove_roles(role)
return await ctx.send("Unsubscribed!")
else:
await ctx.author.add_roles(role)
return await ctx.send("Subscribed!")
@commands.group(invoke_without_command=True)
async def subscriptions(self, ctx):
"""Manage subscriptions"""
return await ctx.invoke(self.bot.get_command("help"), ctx.invoked_with)
@subscriptions.command(name="list")
async def subscriptions_list(self, ctx, channel: discord.TextChannel=None):
"""Shows a list of subscriptions in the server (Or just the channel specified"""
filter_dict = {
"guild_id": str(ctx.guild.id)
}
if channel:
filter_dict["channel_id"] = str(channel.id)
header = f"Subscriptions in {channel.mention}"
else:
header = "Subscriptions in this server"
subscriptions = await self.bot.r \
.table("subscriptions") \
.filter(filter_dict) \
.run(self.bot.r_connection)
subscription_list = "\n".join([f'**{self.comic_dict[subscription["slug"]]["friendly"]}** {self.bot.get_channel(int(subscription["channel_id"])).mention}' +
((" " +
discord.utils.get(
self.bot.get_channel(int(subscription["channel_id"])).guild.roles,
id=int(subscription["role_id"])).name)
if subscription["role_id"] and discord.utils.get(
self.bot.get_channel(int(subscription["channel_id"])).guild.roles,
id=int(subscription["role_id"])) else "")
async for subscription in subscriptions if self.bot.get_channel(int(subscription["channel_id"]))])
await ctx.send(f"**{header}**\n"
f"{subscription_list}")
@subscriptions.command()
@commands.has_permissions(administrator=True)
async def remove(self, ctx, slug: str, channel: discord.TextChannel):
"""Removes subscription for a channel."""
if slug not in self.comic_slugs:
return await ctx.send("Comic not found!")
if channel.guild.id != ctx.guild.id:
return await ctx.send("Channel not found!")
sub_dict = {
"channel_id": str(channel.id),
"guild_id": str(ctx.guild.id),
"slug": slug
}
results = await self.bot.r.table("subscriptions").filter(sub_dict).delete().run(self.bot.r_connection)
if results["deleted"] <= 0:
return await ctx.send("No subscriptions deleted")
elif results["deleted"] > 0:
return await ctx.send("Removed!")
@subscriptions.command()
@commands.has_permissions(administrator=True)
async def add(self, ctx, slug: str, channel: discord.TextChannel, role: discord.Role=None):
"""Adds a subscription for a channel"""
if not channel or channel.guild.id != ctx.guild.id:
return await ctx.send("Channel not found!")
if role and role.guild.id != ctx.guild.id:
return await ctx.send("Role not found!")
if slug not in self.comic_slugs:
return await ctx.send("Comic not found!")
sub_dict = {
"channel_id": str(channel.id),
"guild_id": str(ctx.guild.id),
"slug": slug,
}
await self.bot.r.table("subscriptions").filter(sub_dict).delete().run(self.bot.r_connection)
sub_dict["role_id"] = str(role.id) if role else None
await self.bot.r.table("subscriptions").insert(sub_dict).run(self.bot.r_connection)
return await ctx.send(f'Done! {channel.mention} has a new subscription to {self.comic_dict[slug]["friendly"]}!')
@commands.command()
async def list(self, ctx):
"""Shows list of available webcomics and their slugs"""
res = "\n".join(
[f'**{webcomic["friendly"]}**: {webcomic["slug"]}' for webcomic in webcomics])
return await ctx.send(res)
@commands.command()
@commands.is_owner()
async def recheck_all(self, ctx):
"""Checks for updates to webcomics"""
await self.check_updates()
await ctx.send("triple gay")
def setup(bot):
bot.add_cog(Modular(bot))
| 33.41868 | 159 | 0.630945 |
d2ad70957920471751e9f65cbf2c39fa14560414 | 6,051 | py | Python | flaskblog/users/routes.py | oecorrechag/Block | b65152dc768ece64ea93535ade5e8c66c3ab9de4 | [
"MIT"
] | null | null | null | flaskblog/users/routes.py | oecorrechag/Block | b65152dc768ece64ea93535ade5e8c66c3ab9de4 | [
"MIT"
] | null | null | null | flaskblog/users/routes.py | oecorrechag/Block | b65152dc768ece64ea93535ade5e8c66c3ab9de4 | [
"MIT"
] | 1 | 2021-01-04T20:13:06.000Z | 2021-01-04T20:13:06.000Z | from flask import render_template, url_for, flash, redirect, request, Blueprint
from flask_login import login_user, current_user, logout_user, login_required
from flaskblog import db, bcrypt
from flaskblog.models import User, Post, Todo
from flaskblog.users.forms import (RegistrationForm, LoginForm, UpdateAccountForm,
RequestResetForm, ResetPasswordForm)
from flaskblog.users.utils import save_picture, send_reset_email
users = Blueprint('users', __name__)
@users.route("/register", methods=['GET', 'POST'])
def register():
if current_user.is_authenticated:
return redirect(url_for('main.home'))
form = RegistrationForm()
if form.validate_on_submit():
hashed_password = bcrypt.generate_password_hash(form.password.data).decode('utf-8')
user = User(username=form.username.data, email=form.email.data, password=hashed_password)
db.session.add(user)
db.session.commit()
flash('Your account has been created! You are now able to log in', 'success')
return redirect(url_for('users.login'))
return render_template('register.html', title='Register', form=form)
@users.route("/login", methods=['GET', 'POST'])
def login():
if current_user.is_authenticated:
return redirect(url_for('main.home'))
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user and bcrypt.check_password_hash(user.password, form.password.data):
login_user(user, remember=form.remember.data)
next_page = request.args.get('next')
return redirect(next_page) if next_page else redirect(url_for('main.home'))
else:
flash('Login Unsuccessful. Please check email and password', 'danger')
return render_template('login.html', title='Login', form=form)
@users.route("/logout")
def logout():
logout_user()
return redirect(url_for('main.home'))
@users.route("/account", methods=['GET', 'POST'])
@login_required
def account():
form = UpdateAccountForm()
if form.validate_on_submit():
if form.picture.data:
picture_file = save_picture(form.picture.data)
current_user.image_file = picture_file
current_user.username = form.username.data
current_user.email = form.email.data
current_user.bio = form.bio.data
current_user.facebook = form.facebook.data
current_user.twitter = form.twitter.data
db.session.commit()
flash('Your account has been updated!', 'success')
return redirect(url_for('users.account'))
elif request.method == 'GET':
form.username.data = current_user.username
form.email.data = current_user.email
form.bio.data = current_user.bio
form.facebook.data = current_user.facebook
form.twitter.data = current_user.twitter
image_file = url_for('static', filename='profile_pics/' + current_user.image_file)
return render_template('account.html', title='Account',
image_file=image_file, form=form)
@users.route("/user/<string:username>")
def user_posts(username):
page = request.args.get('page', 1, type=int)
user = User.query.filter_by(username=username).first_or_404()
posts = Post.query.filter_by(author=user)\
.order_by(Post.date_posted.desc())\
.paginate(page=page, per_page=6)
return render_template('user_posts.html', posts=posts, user=user)
@users.route("/reset_password", methods=['GET', 'POST'])
def reset_request():
if current_user.is_authenticated:
return redirect(url_for('main.home'))
form = RequestResetForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
send_reset_email(user)
flash('An email has been sent with instructions to reset your password.', 'info')
return redirect(url_for('users.login'))
return render_template('reset_request.html', title='Reset Password', form=form)
@users.route("/reset_password/<token>", methods=['GET', 'POST'])
def reset_token(token):
if current_user.is_authenticated:
return redirect(url_for('main.home'))
user = User.verify_reset_token(token)
if user is None:
flash('That is an invalid or expired token', 'warning')
return redirect(url_for('users.reset_request'))
form = ResetPasswordForm()
if form.validate_on_submit():
hashed_password = bcrypt.generate_password_hash(form.password.data).decode('utf-8')
user.password = hashed_password
db.session.commit()
flash('Your password has been updated! You are now able to log in', 'success')
return redirect(url_for('users.login'))
return render_template('reset_token.html', title='Reset Password', form=form)
@users.route('/agenda', methods=['POST', 'GET'])
def agenda():
if request.method == 'POST':
task_content = request.form['content']
new_task = Todo(content = task_content)
db.session.add(new_task)
db.session.commit()
return redirect(url_for('users.agenda'))
else:
tasks = Todo.query.order_by(Todo.date_created).all()
return render_template('agenda.html', tasks = tasks)
@users.route('/delete/<int:id>')
def delete(id):
task_to_delete = Todo.query.get_or_404(id)
try:
db.session.delete(task_to_delete)
db.session.commit()
return redirect(url_for('users.agenda'))
except:
return 'There was a problem deleting that task'
@users.route('/update_task/<int:id>', methods=['GET', 'POST'])
def update_task(id):
task = Todo.query.get_or_404(id)
if request.method == 'POST':
task.content = request.form['content']
try:
db.session.commit()
return redirect(url_for('users.agenda'))
except:
return 'There was a issue updating your task'
else:
return render_template('update_task.html', task = task) | 37.583851 | 97 | 0.672451 |
972d345195a25c000efe9c33b1dcdc3d1bcf361a | 4,030 | py | Python | src/main.py | kikei/vivaldiUpdater | c53315a1542f29d929ea627ff5f297023fa8fb13 | [
"MIT"
] | null | null | null | src/main.py | kikei/vivaldiUpdater | c53315a1542f29d929ea627ff5f297023fa8fb13 | [
"MIT"
] | null | null | null | src/main.py | kikei/vivaldiUpdater | c53315a1542f29d929ea627ff5f297023fa8fb13 | [
"MIT"
] | null | null | null | import requests
import os
from pyquery import PyQuery
from urllib.parse import urlparse, urljoin
import posixpath
import zipfile
USER_AGENT = 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.98 Safari/537.36 Vivaldi/1.6.689.46'
VIVALDI_COM_URL = 'https://vivaldi.com/download/'
LIBFFMPEG_URL = 'https://github.com/iteufel/nwjs-ffmpeg-prebuilt/releases/latest'
LIBFFMPEG = '/opt/vivaldi/lib/libffmpeg.so'
def http_get(url):
headers = {
'User-Agent': USER_AGENT
}
res = requests.get(url)
return res
def make_filename(url, dest_dir):
r = urlparse(url)
name = posixpath.basename(r.path)
filename = os.path.join(dest_dir, name)
return filename
def download_to(url, dest_file):
r = urlparse(url)
name = posixpath.basename(r.path)
print('Downloading {}...'.format(name))
res = http_get(url)
with open(dest_file, 'wb') as wb:
wb.write(res.content)
wb.close()
print('done.')
def download_to_dir(url, dest_dir):
out_file = make_filename(url, dest_dir)
download_to(url, out_file)
class VivaldiClawler(object):
def get_vivaldi_com(self):
res = http_get(VIVALDI_COM_URL)
return PyQuery(res.text)
def get_download_links(self):
dom = self.get_vivaldi_com()
anchors = dom('a')
for a in anchors.items():
href = a.attr['href']
if href.find('downloads') > 0:
yield href
def get_download_links_for(self, parts):
links = self.get_download_links()
for link in links:
matched = list(filter(lambda p:link.find(p) > -1, parts))
if len(matched) != len(parts):
continue
yield link
def get_download_link_for(self, parts):
links = self.get_download_links_for(parts)
link = next(links)
return link
class LibFFmpegClawler(object):
def __init__(self, url):
self.url = url
def get_libffmpeg_releases(self):
res = http_get(self.url)
return PyQuery(res.text)
def get_download_links(self):
dom = self.get_libffmpeg_releases()
anchors = dom('a')
for a in anchors.items():
href = a.attr['href']
if href.find('download') > 0:
yield href
def get_download_links_for(self, parts):
links = self.get_download_links()
for link in links:
matched = list(filter(lambda p:link.find(p) > -1, parts))
if len(matched) != len(parts):
continue
yield link
def get_download_link_for(self, parts):
links = self.get_download_links_for(parts)
link = next(links)
if link is not None:
link = urljoin(self.url, link)
return link
def download_vivaldi(dest_dir):
clawler = VivaldiClawler()
url = clawler.get_download_link_for(['x86_64', 'rpm'])
filename = make_filename(url, dest_dir)
if os.path.isfile(filename):
return None
download_to(url, filename)
return filename
def download_libffmpeg(dest_dir):
clawler = LibFFmpegClawler(LIBFFMPEG_URL)
url = clawler.get_download_link_for(['linux', 'x64'])
filename = make_filename(url, dest_dir)
if os.path.isfile(filename):
return None
download_to(url, filename)
zipFile = zipfile.ZipFile(filename)
zipFile.extract('libffmpeg.so', dest_dir)
zipFile.close()
filename = os.path.join(dest_dir, 'libffmpeg.so')
return filename
def main():
PWD = os.path.dirname((os.path.abspath(__file__)))
download_dir = os.path.join(PWD, '..', 'data')
os.makedirs(download_dir, mode=755, exist_ok=True)
vivaldi_file = download_vivaldi(download_dir)
libffmpeg_file = download_libffmpeg(download_dir)
commands = []
if vivaldi_file is not None:
commands.append('dnf install {src}'.format(src=vivaldi_file))
if libffmpeg_file is not None:
commands.append('install {src} {dest}'.format(src=libffmpeg_file, dest=LIBFFMPEG))
if len(commands) == 0:
print('Not updated.')
else:
print('Update found, run following command:')
script = '''
#!/bin/sh
sudo -- sh -c '{command}'
'''.strip()
script = script.format(command='; '.join(commands))
print(script)
main()
| 26.866667 | 138 | 0.6866 |
ec38afcaa153d1541d578cb150decef1b0c66d04 | 3,863 | py | Python | mythx_models/response/analysis.py | s0b0lev/mythx-models | 0fc14fef9e41a68a7d97e0bb170fd0eca5693d9a | [
"MIT"
] | null | null | null | mythx_models/response/analysis.py | s0b0lev/mythx-models | 0fc14fef9e41a68a7d97e0bb170fd0eca5693d9a | [
"MIT"
] | null | null | null | mythx_models/response/analysis.py | s0b0lev/mythx-models | 0fc14fef9e41a68a7d97e0bb170fd0eca5693d9a | [
"MIT"
] | null | null | null | """This module contains domain models regarding analysis jobs"""
from enum import Enum
from inflection import underscore
from mythx_models.response.base import BaseResponse
from mythx_models.util import deserialize_api_timestamp, serialize_api_timestamp
class AnalysisStatus(str, Enum):
"""An Enum describing the status an analysis job can be in."""
QUEUED = "Queued"
IN_PROGRESS = "In Progress"
ERROR = "Error"
FINISHED = "Finished"
class Analysis(BaseResponse):
"""An object describing an analysis job.
Such a model was built, because many other API responses deliver the same data when it comes
to analysis jobs. This makes the code more DRY, validation easier, and allows for recursive
SerDe (e.g. mapping :code:`from_dict` to a deserialized JSON list of job objects.
"""
def __init__(
self,
uuid: str,
api_version: str,
mythril_version: str,
harvey_version: str,
maru_version: str,
queue_time: int,
status: AnalysisStatus,
submitted_at: str,
submitted_by: str,
run_time: int = 0,
client_tool_name: str = None,
error: str = None,
info: str = None,
):
self.uuid = uuid
self.api_version = api_version
self.mythril_version = mythril_version
self.harvey_version = harvey_version
self.maru_version = maru_version
self.queue_time = queue_time
self.run_time = run_time
self.status = AnalysisStatus(status.title())
self.submitted_at = deserialize_api_timestamp(submitted_at)
self.submitted_by = submitted_by
self.client_tool_name = client_tool_name
self.error = error
self.info = info
@classmethod
def from_dict(cls, d):
"""Create the response domain model from a dict.
:param d: The dict to deserialize from
:return: The domain model with the data from :code:`d` filled in
"""
d = {underscore(k): v for k, v in d.items()}
return cls(**d)
def to_dict(self):
"""Serialize the reponse model to a Python dict.
:return: A dict holding the request model data
"""
d = {
"uuid": self.uuid,
"apiVersion": self.api_version,
"mythrilVersion": self.mythril_version,
"harveyVersion": self.harvey_version,
"maruVersion": self.maru_version,
"queueTime": self.queue_time,
"runTime": self.run_time,
"status": self.status.title(),
"submittedAt": serialize_api_timestamp(self.submitted_at),
"submittedBy": self.submitted_by,
"clientToolName": self.client_tool_name,
}
if self.error is not None:
d.update({"error": self.error})
if self.info is not None:
d.update({"info": self.error})
return d
def __eq__(self, candidate):
return all(
(
self.uuid == candidate.uuid,
self.api_version == candidate.api_version,
self.mythril_version == candidate.mythril_version,
self.harvey_version == candidate.harvey_version,
self.maru_version == candidate.maru_version,
self.queue_time == candidate.queue_time,
self.run_time == candidate.run_time,
self.status == candidate.status,
self.submitted_at == candidate.submitted_at,
self.submitted_by == candidate.submitted_by,
self.client_tool_name == candidate.client_tool_name,
self.error == candidate.error,
self.info == candidate.info,
)
)
def __repr__(self):
return "<Analysis uuid={} status={}>".format(self.uuid, self.status)
| 33.885965 | 96 | 0.609371 |
5843d53d1f764be53feee69786346a108a4348c8 | 792 | py | Python | profiles-rest-api/profiles_api/migrations/0002_profilesfeeditem.py | Naoya-abe/django_REST_framework_basic | dcdb7efcb4b75550bd01cbe18619f2d0b75e0421 | [
"MIT"
] | 1 | 2021-08-28T08:40:23.000Z | 2021-08-28T08:40:23.000Z | profiles-rest-api/profiles_api/migrations/0002_profilesfeeditem.py | Naoya-abe/django_REST_framework_basic | dcdb7efcb4b75550bd01cbe18619f2d0b75e0421 | [
"MIT"
] | null | null | null | profiles-rest-api/profiles_api/migrations/0002_profilesfeeditem.py | Naoya-abe/django_REST_framework_basic | dcdb7efcb4b75550bd01cbe18619f2d0b75e0421 | [
"MIT"
] | null | null | null | # Generated by Django 2.2.12 on 2020-05-24 22:47
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('profiles_api', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ProfilesFeedItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('status_text', models.CharField(max_length=255)),
('created_on', models.DateTimeField(auto_now_add=True)),
('user_profile', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| 31.68 | 126 | 0.633838 |
46d422a64062e4b0d13297ead64a301f0d186202 | 3,108 | py | Python | add_outline.py | azeemba/blender-add-outline | 36c6b92a10834f2e0ff108a7c4b18cd61daf5010 | [
"MIT"
] | 2 | 2020-12-08T13:10:05.000Z | 2022-01-30T02:10:38.000Z | add_outline.py | azeemba/blender-add-outline | 36c6b92a10834f2e0ff108a7c4b18cd61daf5010 | [
"MIT"
] | null | null | null | add_outline.py | azeemba/blender-add-outline | 36c6b92a10834f2e0ff108a7c4b18cd61daf5010 | [
"MIT"
] | 1 | 2021-08-28T11:04:30.000Z | 2021-08-28T11:04:30.000Z |
bl_info = {
"name": "Outline",
"description": "Add NPR/cartoon outline to an object that works in eevee and cycles",
"author": "Azeem Bande-Ali",
"version": (0, 1),
"blender": (2, 90),
"category": "Material"
}
import bpy
from bpy.props import StringProperty
from bpy.types import Operator, AddonPreferences
def get_path_to_blendfile(context):
preferences = context.preferences
addon_prefs = preferences.addons[__name__].preferences
return addon_prefs.source
def main(context):
"""Handles the user action to actually add outline to object"""
material_name = "ToonOutline"
filepath = get_path_to_blendfile(context)
for obj in context.selected_objects:
add_modifier(obj)
add_outline_material(obj, filepath, material_name)
def add_modifier(obj):
"""Add the solidify modifier to object"""
obj.modifiers.new("Outline", "SOLIDIFY")
modifier = obj.modifiers["Outline"]
modifier.thickness = 0.01
modifier.use_rim = False
modifier.offset = 1
modifier.use_flip_normals = True
modifier.use_quality_normals = True
# We want the material offset to be the "next one"
# but if there is no material at all, we assign a default one first
if len(obj.data.materials) == 0:
default_material = bpy.data.materials.new(name="DefaultMaterial")
obj.data.materials.append(default_material)
modifier.material_offset = len(obj.data.materials)
def add_outline_material(obj, filepath, material_name):
"""Loads material from filepath and appends to object"""
outline_material = bpy.data.materials.get(material_name)
if not outline_material:
with bpy.data.libraries.load(filepath, link=False) as (data_from, data_to):
source_index = data_from.materials.index(material_name)
data_to.materials.append(data_from.materials[source_index])
outline_material = bpy.data.materials.get(material_name)
obj.data.materials.append(outline_material)
class AddOutlineOperator(Operator):
"""
Add NPR/cartoon outline to an object that works in eevee and cycles
"""
bl_idname = "object.add_outline"
bl_label = "Add Outline"
bl_options = {'REGISTER', 'UNDO'}
def execute(self, context):
main(context)
return {'FINISHED'}
def menu_func(self, context):
self.layout.operator(AddOutlineOperator.bl_idname)
class OutlinePreferences(AddonPreferences):
bl_idname = __name__
source = StringProperty(
name="Blend file",
subtype="FILE_PATH")
def draw(self, context):
layout = self.layout
layout.label(text="Select blend file containing ToonOutline material")
layout.prop(self, "source")
classes = (AddOutlineOperator, OutlinePreferences)
def register():
for c in classes:
bpy.utils.register_class(c)
bpy.types.VIEW3D_MT_object.append(menu_func)
def unregister():
for c in classes:
bpy.utils.unregister_class(c)
bpy.types.VIEW3D_MT_object.remove(menu_func)
if __name__ == "__main__":
register()
| 29.320755 | 89 | 0.694659 |
96f0d84f6ab3812d8c490f393857d39c9d05b657 | 1,996 | py | Python | notifications/migrations/0001_initial.py | bizeasy17/investtrack | 3840948896573f3906a5df80ea80859a492f4133 | [
"MIT"
] | null | null | null | notifications/migrations/0001_initial.py | bizeasy17/investtrack | 3840948896573f3906a5df80ea80859a492f4133 | [
"MIT"
] | 3 | 2021-07-15T13:23:28.000Z | 2021-12-09T03:32:16.000Z | notifications/migrations/0001_initial.py | bizeasy17/investtrack | 3840948896573f3906a5df80ea80859a492f4133 | [
"MIT"
] | 1 | 2021-08-19T14:42:59.000Z | 2021-08-19T14:42:59.000Z | # Generated by Django 3.0.2 on 2020-02-13 09:29
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
initial = True
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Notification',
fields=[
('unread', models.BooleanField(db_index=True, default=True)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('uuid_id', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),
('slug', models.SlugField(blank=True, max_length=210, null=True)),
('verb', models.CharField(choices=[('L', 'liked'), ('C', 'commented'), ('F', 'cavorited'), ('A', 'answered'), ('W', 'accepted'), ('E', 'edited'), ('K', 'also commented'), ('I', 'logged in'), ('O', 'logged out'), ('V', 'voted on'), ('S', 'shared'), ('U', 'created an account'), ('R', 'replied to')], max_length=1)),
('action_object_object_id', models.CharField(blank=True, max_length=50, null=True)),
('action_object_content_type', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='notify_action_object', to='contenttypes.ContentType')),
('actor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='notify_actor', to=settings.AUTH_USER_MODEL)),
('recipient', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='notifications', to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'Notification',
'verbose_name_plural': 'Notifications',
'ordering': ('-timestamp',),
},
),
]
| 51.179487 | 330 | 0.622244 |
0eab2bb54e85a8fbdd97595fda895f75c24205ba | 5,955 | py | Python | m2-modified/ims/common/agentless-system-crawler/tests/unit/test_osinfo.py | CCI-MOC/ABMI | 955c12ae9d2dc7afe7323f6c25f2af120f5b281a | [
"Apache-2.0"
] | null | null | null | m2-modified/ims/common/agentless-system-crawler/tests/unit/test_osinfo.py | CCI-MOC/ABMI | 955c12ae9d2dc7afe7323f6c25f2af120f5b281a | [
"Apache-2.0"
] | 1 | 2017-05-12T14:33:23.000Z | 2017-05-12T14:33:23.000Z | m2-modified/ims/common/agentless-system-crawler/tests/unit/test_osinfo.py | CCI-MOC/ABMI | 955c12ae9d2dc7afe7323f6c25f2af120f5b281a | [
"Apache-2.0"
] | null | null | null | import unittest
from unittest import TestCase
import mock
from utils.osinfo import (_get_file_name,
parse_lsb_release,
parse_os_release,
parse_redhat_release,
parse_centos_release,
get_osinfo_from_lsb_release,
get_osinfo_from_os_release,
get_osinfo_from_redhat_centos
)
class Test_osinfo(TestCase):
def test_get_file_name(self):
self.assertEqual(_get_file_name('/', 'xyz'), '/xyz')
self.assertEqual(_get_file_name('/abc/def', 'xyz'), '/abc/def/xyz')
def test_parse_lsb_release(self):
data = ['DISTRIB_ID=Ubuntu', 'DISTRIB_RELEASE=15.10',
'DISTRIB_CODENAME=wily' 'DISTRIB_DESCRIPTION="Ubuntu 15.10"']
result = parse_lsb_release(data)
self.assertEqual(result['os'], 'ubuntu')
self.assertEqual(result['version'], '15.10')
def test_parse_os_release(self):
data = ['NAME="Ubuntu"', 'VERSION="14.04.4 LTS, Trusty Tahr"',
'ID=ubuntu', 'ID_LIKE=debian',
'PRETTY_NAME="Ubuntu 14.04.4 LTS"', 'VERSION_ID="14.04"',
'HOME_URL="http://www.ubuntu.com/"',
'SUPPORT_URL="http://help.ubuntu.com/"',
'BUG_REPORT_URL="http://bugs.launchpad.net/ubuntu/"'
]
result = parse_os_release(data)
self.assertEqual(result['os'], 'ubuntu')
self.assertEqual(result['version'], '14.04')
def test_alpine_parse_os_release(self):
data = ['NAME="Alpine Linux"',
'ID=alpine',
'VERSION_ID=3.4.0',
'PRETTY_NAME="Alpine Linux v3.4"',
'HOME_URL="http://alpinelinux.org"',
'BUG_REPORT_URL="http://bugs.alpinelinux.org"'
]
result = parse_os_release(data)
self.assertEqual(result['os'], 'alpine')
self.assertEqual(result['version'], '3.4.0')
def test_parse_redhat_release(self):
data = ['Red Hat Enterprise Linux Server release 7.2 (Maipo)']
result = parse_redhat_release(data)
self.assertEqual(result['os'], 'rhel')
self.assertEqual(result['version'], '7.2')
def test2_parse_redhat_release(self):
data = ['Red Hat Enterprise Linux Server release 7 (Maipo)']
result = parse_redhat_release(data)
self.assertEqual(result['os'], 'rhel')
self.assertEqual(result['version'], '7')
def test_parse_centos_release(self):
data = ['CentOS release 6.8 (Final)']
result = parse_centos_release(data)
self.assertEqual(result['os'], 'centos')
self.assertEqual(result['version'], '6.8')
def test2_parse_centos_release(self):
data = ['CentOS Linux release 6.8 (Final)']
result = parse_centos_release(data)
self.assertEqual(result['os'], 'centos')
self.assertEqual(result['version'], '6.8')
def test3_parse_centos_release(self):
data = ['CentOS release 6 (Final)']
result = parse_centos_release(data)
self.assertEqual(result['os'], 'centos')
self.assertEqual(result['version'], '6')
def test_get_osinfo_from_lsb_release(self):
data = ['DISTRIB_ID=Ubuntu', 'DISTRIB_RELEASE=15.10',
'DISTRIB_CODENAME=wily' 'DISTRIB_DESCRIPTION="Ubuntu 15.10"']
with mock.patch(
'__builtin__.open', mock.mock_open(read_data="\n".join(data)),
create=True) as m:
m.return_value.__iter__.return_value = data
result = get_osinfo_from_lsb_release()
self.assertEqual(result['os'], 'ubuntu')
self.assertEqual(result['version'], '15.10')
def test1_get_osinfo_from_lsb_release(self):
with mock.patch(
'__builtin__.open', mock.mock_open(), create=True) as m:
m.side_effect = IOError()
result = get_osinfo_from_lsb_release()
self.assertFalse(result)
def test_get_osinfo_from_os_release(self):
data = ['NAME="Ubuntu"', 'VERSION="14.04.4 LTS, Trusty Tahr"',
'ID=ubuntu', 'ID_LIKE=debian',
'PRETTY_NAME="Ubuntu 14.04.4 LTS"', 'VERSION_ID="14.04"',
'HOME_URL="http://www.ubuntu.com/"',
'SUPPORT_URL="http://help.ubuntu.com/"',
'BUG_REPORT_URL="http://bugs.launchpad.net/ubuntu/"'
]
with mock.patch(
'__builtin__.open', mock.mock_open(read_data="\n".join(data)),
create=True) as m:
m.return_value.__iter__.return_value = data
result = get_osinfo_from_os_release()
self.assertEqual(result['os'], 'ubuntu')
self.assertEqual(result['version'], '14.04')
def test1_get_osinfo_from_os_release(self):
with mock.patch(
'__builtin__.open', mock.mock_open(), create=True) as m:
m.side_effect = IOError()
result = get_osinfo_from_os_release()
self.assertFalse(result)
def test_get_osinfo_from_redhat_centos(self):
data = ['Red Hat Enterprise Linux Server release 7.2 (Maipo)']
with mock.patch(
'__builtin__.open', mock.mock_open(read_data="\n".join(data)),
create=True) as m:
m.return_value.__iter__.return_value = data
result = get_osinfo_from_redhat_centos()
self.assertEqual(result['os'], 'rhel')
self.assertEqual(result['version'], '7.2')
def mtest1_get_osinfo_from_redhat_centos(self):
with mock.patch(
'__builtin__.open', mock.mock_open(), create=True) as m:
m.side_effect = IOError()
result = get_osinfo_from_redhat_centos()
self.assertFalse(result)
if __name__ == '__main__':
unittest.main()
| 38.173077 | 78 | 0.584215 |
440a6bbcc93e62c390a0ce8c7ae95fc784c3b3e2 | 1,301 | py | Python | src/zope/app/server/accesslog.py | zopefoundation/zope.app.server | e0734fdc7327a1b41542b664eb745fa4299c2a57 | [
"ZPL-2.1"
] | null | null | null | src/zope/app/server/accesslog.py | zopefoundation/zope.app.server | e0734fdc7327a1b41542b664eb745fa4299c2a57 | [
"ZPL-2.1"
] | 6 | 2017-10-30T14:56:41.000Z | 2020-11-11T14:08:19.000Z | src/zope/app/server/accesslog.py | zopefoundation/zope.app.server | e0734fdc7327a1b41542b664eb745fa4299c2a57 | [
"ZPL-2.1"
] | 1 | 2015-04-03T08:06:09.000Z | 2015-04-03T08:06:09.000Z | ##############################################################################
#
# Copyright (c) 2004 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Configuration support for the access log.
This assumes that access logging is being performed through the logger
object returned by logging.getLogger('accesslog').
"""
import logging
from ZConfig.components.logger.logger import LoggerFactoryBase
class AccessLogFactory(LoggerFactoryBase):
"""Logger factory that returns the access logger."""
name = "accesslog"
def create(self):
logger = LoggerFactoryBase.create(self)
logger.setLevel(logging.INFO)
logger.propagate = False
formatter = logging.Formatter()
for handler in logger.handlers:
handler.setFormatter(formatter)
return logger
| 35.162162 | 78 | 0.648732 |
3fcee7147cfddbf915b7867580fea2e638a12e20 | 5,732 | py | Python | kubernetes/client/models/v1_env_var.py | carloscastrojumo/python | f461dd42d48650a4ae1b41d630875cad9fcb68ad | [
"Apache-2.0"
] | 2 | 2021-03-09T12:42:05.000Z | 2021-03-09T13:27:50.000Z | kubernetes/client/models/v1_env_var.py | carloscastrojumo/python | f461dd42d48650a4ae1b41d630875cad9fcb68ad | [
"Apache-2.0"
] | 7 | 2021-04-13T03:04:42.000Z | 2022-03-02T03:10:18.000Z | kubernetes/client/models/v1_env_var.py | carloscastrojumo/python | f461dd42d48650a4ae1b41d630875cad9fcb68ad | [
"Apache-2.0"
] | 1 | 2021-06-13T09:21:37.000Z | 2021-06-13T09:21:37.000Z | # coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: release-1.17
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from kubernetes.client.configuration import Configuration
class V1EnvVar(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'name': 'str',
'value': 'str',
'value_from': 'V1EnvVarSource'
}
attribute_map = {
'name': 'name',
'value': 'value',
'value_from': 'valueFrom'
}
def __init__(self, name=None, value=None, value_from=None, local_vars_configuration=None): # noqa: E501
"""V1EnvVar - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._name = None
self._value = None
self._value_from = None
self.discriminator = None
self.name = name
if value is not None:
self.value = value
if value_from is not None:
self.value_from = value_from
@property
def name(self):
"""Gets the name of this V1EnvVar. # noqa: E501
Name of the environment variable. Must be a C_IDENTIFIER. # noqa: E501
:return: The name of this V1EnvVar. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this V1EnvVar.
Name of the environment variable. Must be a C_IDENTIFIER. # noqa: E501
:param name: The name of this V1EnvVar. # noqa: E501
:type: str
"""
if self.local_vars_configuration.client_side_validation and name is None: # noqa: E501
raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501
self._name = name
@property
def value(self):
"""Gets the value of this V1EnvVar. # noqa: E501
Variable references $(VAR_NAME) are expanded using the previous defined environment variables in the container and any service environment variables. If a variable cannot be resolved, the reference in the input string will be unchanged. The $(VAR_NAME) syntax can be escaped with a double $$, ie: $$(VAR_NAME). Escaped references will never be expanded, regardless of whether the variable exists or not. Defaults to \"\". # noqa: E501
:return: The value of this V1EnvVar. # noqa: E501
:rtype: str
"""
return self._value
@value.setter
def value(self, value):
"""Sets the value of this V1EnvVar.
Variable references $(VAR_NAME) are expanded using the previous defined environment variables in the container and any service environment variables. If a variable cannot be resolved, the reference in the input string will be unchanged. The $(VAR_NAME) syntax can be escaped with a double $$, ie: $$(VAR_NAME). Escaped references will never be expanded, regardless of whether the variable exists or not. Defaults to \"\". # noqa: E501
:param value: The value of this V1EnvVar. # noqa: E501
:type: str
"""
self._value = value
@property
def value_from(self):
"""Gets the value_from of this V1EnvVar. # noqa: E501
:return: The value_from of this V1EnvVar. # noqa: E501
:rtype: V1EnvVarSource
"""
return self._value_from
@value_from.setter
def value_from(self, value_from):
"""Sets the value_from of this V1EnvVar.
:param value_from: The value_from of this V1EnvVar. # noqa: E501
:type: V1EnvVarSource
"""
self._value_from = value_from
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1EnvVar):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1EnvVar):
return True
return self.to_dict() != other.to_dict()
| 32.202247 | 443 | 0.604676 |
52bee7b395f739dcaa8b3c6ff04686fa4266ee70 | 25,324 | py | Python | tests/hazmat/backends/test_openssl.py | maqp/cryptography | d824f6005a8520e3b98fb349a899c33f300b7a08 | [
"Apache-2.0",
"BSD-3-Clause"
] | 2 | 2020-02-05T04:57:55.000Z | 2021-03-03T23:29:30.000Z | tests/hazmat/backends/test_openssl.py | maqp/cryptography | d824f6005a8520e3b98fb349a899c33f300b7a08 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | tests/hazmat/backends/test_openssl.py | maqp/cryptography | d824f6005a8520e3b98fb349a899c33f300b7a08 | [
"Apache-2.0",
"BSD-3-Clause"
] | 1 | 2021-03-03T23:31:18.000Z | 2021-03-03T23:31:18.000Z | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import datetime
import os
import subprocess
import sys
import textwrap
import pytest
from cryptography import utils, x509
from cryptography.exceptions import InternalError, _Reasons
from cryptography.hazmat.backends.interfaces import RSABackend
from cryptography.hazmat.backends.openssl.backend import (
Backend, backend
)
from cryptography.hazmat.backends.openssl.ec import _sn_to_elliptic_curve
from cryptography.hazmat.primitives import hashes, serialization
from cryptography.hazmat.primitives.asymmetric import dsa, ec, padding
from cryptography.hazmat.primitives.ciphers import Cipher
from cryptography.hazmat.primitives.ciphers.algorithms import AES
from cryptography.hazmat.primitives.ciphers.modes import CBC, CTR
from ..primitives.fixtures_dsa import DSA_KEY_2048
from ..primitives.fixtures_rsa import RSA_KEY_2048, RSA_KEY_512
from ..primitives.test_ec import _skip_curve_unsupported
from ...doubles import (
DummyAsymmetricPadding, DummyCipherAlgorithm, DummyHashAlgorithm, DummyMode
)
from ...test_x509 import _load_cert
from ...utils import load_vectors_from_file, raises_unsupported_algorithm
def skip_if_libre_ssl(openssl_version):
if u'LibreSSL' in openssl_version:
pytest.skip("LibreSSL hard-codes RAND_bytes to use arc4random.")
class TestLibreSkip(object):
def test_skip_no(self):
assert skip_if_libre_ssl(u"OpenSSL 0.9.8zf 19 Mar 2015") is None
def test_skip_yes(self):
with pytest.raises(pytest.skip.Exception):
skip_if_libre_ssl(u"LibreSSL 2.1.6")
class DummyMGF(object):
_salt_length = 0
class TestOpenSSL(object):
def test_backend_exists(self):
assert backend
def test_openssl_version_text(self):
"""
This test checks the value of OPENSSL_VERSION_TEXT.
Unfortunately, this define does not appear to have a
formal content definition, so for now we'll test to see
if it starts with OpenSSL or LibreSSL as that appears
to be true for every OpenSSL-alike.
"""
assert (
backend.openssl_version_text().startswith("OpenSSL") or
backend.openssl_version_text().startswith("LibreSSL")
)
def test_supports_cipher(self):
assert backend.cipher_supported(None, None) is False
def test_aes_ctr_always_available(self):
# AES CTR should always be available in both 0.9.8 and 1.0.0+
assert backend.cipher_supported(AES(b"\x00" * 16),
CTR(b"\x00" * 16)) is True
def test_register_duplicate_cipher_adapter(self):
with pytest.raises(ValueError):
backend.register_cipher_adapter(AES, CBC, None)
@pytest.mark.parametrize("mode", [DummyMode(), None])
def test_nonexistent_cipher(self, mode):
b = Backend()
b.register_cipher_adapter(
DummyCipherAlgorithm,
type(mode),
lambda backend, cipher, mode: backend._ffi.NULL
)
cipher = Cipher(
DummyCipherAlgorithm(), mode, backend=b,
)
with raises_unsupported_algorithm(_Reasons.UNSUPPORTED_CIPHER):
cipher.encryptor()
def test_openssl_assert(self):
backend.openssl_assert(True)
with pytest.raises(InternalError):
backend.openssl_assert(False)
def test_consume_errors(self):
for i in range(10):
backend._lib.ERR_put_error(backend._lib.ERR_LIB_EVP, 0, 0,
b"test_openssl.py", -1)
assert backend._lib.ERR_peek_error() != 0
errors = backend._consume_errors()
assert backend._lib.ERR_peek_error() == 0
assert len(errors) == 10
def test_ssl_ciphers_registered(self):
meth = backend._lib.TLSv1_method()
ctx = backend._lib.SSL_CTX_new(meth)
assert ctx != backend._ffi.NULL
backend._lib.SSL_CTX_free(ctx)
def test_evp_ciphers_registered(self):
cipher = backend._lib.EVP_get_cipherbyname(b"aes-256-cbc")
assert cipher != backend._ffi.NULL
def test_error_strings_loaded(self):
# returns a value in a static buffer
err = backend._lib.ERR_error_string(101183626, backend._ffi.NULL)
assert backend._ffi.string(err) == (
b"error:0607F08A:digital envelope routines:EVP_EncryptFinal_ex:"
b"data not multiple of block length"
)
def test_unknown_error_in_cipher_finalize(self):
cipher = Cipher(AES(b"\0" * 16), CBC(b"\0" * 16), backend=backend)
enc = cipher.encryptor()
enc.update(b"\0")
backend._lib.ERR_put_error(0, 0, 1,
b"test_openssl.py", -1)
with pytest.raises(InternalError):
enc.finalize()
def test_derive_pbkdf2_raises_unsupported_on_old_openssl(self):
if backend.pbkdf2_hmac_supported(hashes.SHA256()):
pytest.skip("Requires an older OpenSSL")
with raises_unsupported_algorithm(_Reasons.UNSUPPORTED_HASH):
backend.derive_pbkdf2_hmac(hashes.SHA256(), 10, b"", 1000, b"")
@pytest.mark.skipif(
backend._lib.OPENSSL_VERSION_NUMBER >= 0x1000000f,
reason="Requires an older OpenSSL. Must be < 1.0.0"
)
def test_large_key_size_on_old_openssl(self):
with pytest.raises(ValueError):
dsa.generate_parameters(2048, backend=backend)
with pytest.raises(ValueError):
dsa.generate_parameters(3072, backend=backend)
@pytest.mark.skipif(
backend._lib.OPENSSL_VERSION_NUMBER < 0x1000000f,
reason="Requires a newer OpenSSL. Must be >= 1.0.0"
)
def test_large_key_size_on_new_openssl(self):
parameters = dsa.generate_parameters(2048, backend)
param_num = parameters.parameter_numbers()
assert utils.bit_length(param_num.p) == 2048
parameters = dsa.generate_parameters(3072, backend)
param_num = parameters.parameter_numbers()
assert utils.bit_length(param_num.p) == 3072
def test_int_to_bn(self):
value = (2 ** 4242) - 4242
bn = backend._int_to_bn(value)
assert bn != backend._ffi.NULL
bn = backend._ffi.gc(bn, backend._lib.BN_free)
assert bn
assert backend._bn_to_int(bn) == value
def test_int_to_bn_inplace(self):
value = (2 ** 4242) - 4242
bn_ptr = backend._lib.BN_new()
assert bn_ptr != backend._ffi.NULL
bn_ptr = backend._ffi.gc(bn_ptr, backend._lib.BN_free)
bn = backend._int_to_bn(value, bn_ptr)
assert bn == bn_ptr
assert backend._bn_to_int(bn_ptr) == value
def test_bn_to_int(self):
bn = backend._int_to_bn(0)
assert backend._bn_to_int(bn) == 0
def test_actual_osrandom_bytes(self, monkeypatch):
skip_if_libre_ssl(backend.openssl_version_text())
sample_data = (b"\x01\x02\x03\x04" * 4)
length = len(sample_data)
def notrandom(size):
assert size == length
return sample_data
monkeypatch.setattr(os, "urandom", notrandom)
buf = backend._ffi.new("char[]", length)
backend._lib.RAND_bytes(buf, length)
assert backend._ffi.buffer(buf)[0:length] == sample_data
class TestOpenSSLRandomEngine(object):
def teardown_method(self, method):
# we need to reset state to being default. backend is a shared global
# for all these tests.
backend.activate_osrandom_engine()
current_default = backend._lib.ENGINE_get_default_RAND()
name = backend._lib.ENGINE_get_name(current_default)
assert name == backend._binding._osrandom_engine_name
def test_osrandom_engine_is_default(self, tmpdir):
engine_printer = textwrap.dedent(
"""
import sys
from cryptography.hazmat.backends.openssl.backend import backend
e = backend._lib.ENGINE_get_default_RAND()
name = backend._lib.ENGINE_get_name(e)
sys.stdout.write(backend._ffi.string(name).decode('ascii'))
res = backend._lib.ENGINE_free(e)
assert res == 1
"""
)
engine_name = tmpdir.join('engine_name')
# If we're running tests via ``python setup.py test`` in a clean
# environment then all of our dependencies are going to be installed
# into either the current directory or the .eggs directory. However the
# subprocess won't know to activate these dependencies, so we'll get it
# to do so by passing our entire sys.path into the subprocess via the
# PYTHONPATH environment variable.
env = os.environ.copy()
env["PYTHONPATH"] = os.pathsep.join(sys.path)
with engine_name.open('w') as out:
subprocess.check_call(
[sys.executable, "-c", engine_printer],
env=env,
stdout=out,
stderr=subprocess.PIPE,
)
osrandom_engine_name = backend._ffi.string(
backend._binding._osrandom_engine_name
)
assert engine_name.read().encode('ascii') == osrandom_engine_name
def test_osrandom_sanity_check(self):
# This test serves as a check against catastrophic failure.
buf = backend._ffi.new("char[]", 500)
res = backend._lib.RAND_bytes(buf, 500)
assert res == 1
assert backend._ffi.buffer(buf)[:] != "\x00" * 500
def test_activate_osrandom_no_default(self):
backend.activate_builtin_random()
e = backend._lib.ENGINE_get_default_RAND()
assert e == backend._ffi.NULL
backend.activate_osrandom_engine()
e = backend._lib.ENGINE_get_default_RAND()
name = backend._lib.ENGINE_get_name(e)
assert name == backend._binding._osrandom_engine_name
res = backend._lib.ENGINE_free(e)
assert res == 1
def test_activate_builtin_random(self):
e = backend._lib.ENGINE_get_default_RAND()
assert e != backend._ffi.NULL
name = backend._lib.ENGINE_get_name(e)
assert name == backend._binding._osrandom_engine_name
res = backend._lib.ENGINE_free(e)
assert res == 1
backend.activate_builtin_random()
e = backend._lib.ENGINE_get_default_RAND()
assert e == backend._ffi.NULL
def test_activate_builtin_random_already_active(self):
backend.activate_builtin_random()
e = backend._lib.ENGINE_get_default_RAND()
assert e == backend._ffi.NULL
backend.activate_builtin_random()
e = backend._lib.ENGINE_get_default_RAND()
assert e == backend._ffi.NULL
def test_activate_osrandom_already_default(self):
e = backend._lib.ENGINE_get_default_RAND()
name = backend._lib.ENGINE_get_name(e)
assert name == backend._binding._osrandom_engine_name
res = backend._lib.ENGINE_free(e)
assert res == 1
backend.activate_osrandom_engine()
e = backend._lib.ENGINE_get_default_RAND()
name = backend._lib.ENGINE_get_name(e)
assert name == backend._binding._osrandom_engine_name
res = backend._lib.ENGINE_free(e)
assert res == 1
class TestOpenSSLRSA(object):
def test_generate_rsa_parameters_supported(self):
assert backend.generate_rsa_parameters_supported(1, 1024) is False
assert backend.generate_rsa_parameters_supported(4, 1024) is False
assert backend.generate_rsa_parameters_supported(3, 1024) is True
assert backend.generate_rsa_parameters_supported(3, 511) is False
def test_generate_bad_public_exponent(self):
with pytest.raises(ValueError):
backend.generate_rsa_private_key(public_exponent=1, key_size=2048)
with pytest.raises(ValueError):
backend.generate_rsa_private_key(public_exponent=4, key_size=2048)
def test_cant_generate_insecure_tiny_key(self):
with pytest.raises(ValueError):
backend.generate_rsa_private_key(public_exponent=65537,
key_size=511)
with pytest.raises(ValueError):
backend.generate_rsa_private_key(public_exponent=65537,
key_size=256)
@pytest.mark.skipif(
backend._lib.OPENSSL_VERSION_NUMBER >= 0x1000100f,
reason="Requires an older OpenSSL. Must be < 1.0.1"
)
def test_non_sha1_pss_mgf1_hash_algorithm_on_old_openssl(self):
private_key = RSA_KEY_512.private_key(backend)
with raises_unsupported_algorithm(_Reasons.UNSUPPORTED_HASH):
private_key.signer(
padding.PSS(
mgf=padding.MGF1(
algorithm=hashes.SHA256(),
),
salt_length=padding.PSS.MAX_LENGTH
),
hashes.SHA1()
)
public_key = private_key.public_key()
with raises_unsupported_algorithm(_Reasons.UNSUPPORTED_HASH):
public_key.verifier(
b"sig",
padding.PSS(
mgf=padding.MGF1(
algorithm=hashes.SHA256(),
),
salt_length=padding.PSS.MAX_LENGTH
),
hashes.SHA1()
)
def test_rsa_padding_unsupported_pss_mgf1_hash(self):
assert backend.rsa_padding_supported(
padding.PSS(mgf=padding.MGF1(DummyHashAlgorithm()), salt_length=0)
) is False
def test_rsa_padding_unsupported(self):
assert backend.rsa_padding_supported(DummyAsymmetricPadding()) is False
def test_rsa_padding_supported_pkcs1v15(self):
assert backend.rsa_padding_supported(padding.PKCS1v15()) is True
def test_rsa_padding_supported_pss(self):
assert backend.rsa_padding_supported(
padding.PSS(mgf=padding.MGF1(hashes.SHA1()), salt_length=0)
) is True
def test_rsa_padding_supported_oaep(self):
assert backend.rsa_padding_supported(
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA1()),
algorithm=hashes.SHA1(),
label=None
),
) is True
def test_rsa_padding_unsupported_mgf(self):
assert backend.rsa_padding_supported(
padding.OAEP(
mgf=DummyMGF(),
algorithm=hashes.SHA1(),
label=None
),
) is False
assert backend.rsa_padding_supported(
padding.PSS(mgf=DummyMGF(), salt_length=0)
) is False
def test_unsupported_mgf1_hash_algorithm_decrypt(self):
private_key = RSA_KEY_512.private_key(backend)
with raises_unsupported_algorithm(_Reasons.UNSUPPORTED_HASH):
private_key.decrypt(
b"0" * 64,
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA256()),
algorithm=hashes.SHA1(),
label=None
)
)
def test_unsupported_oaep_hash_algorithm_decrypt(self):
private_key = RSA_KEY_512.private_key(backend)
with raises_unsupported_algorithm(_Reasons.UNSUPPORTED_HASH):
private_key.decrypt(
b"0" * 64,
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA1()),
algorithm=hashes.SHA256(),
label=None
)
)
def test_unsupported_oaep_label_decrypt(self):
private_key = RSA_KEY_512.private_key(backend)
with pytest.raises(ValueError):
private_key.decrypt(
b"0" * 64,
padding.OAEP(
mgf=padding.MGF1(algorithm=hashes.SHA1()),
algorithm=hashes.SHA1(),
label=b"label"
)
)
@pytest.mark.skipif(
backend._lib.OPENSSL_VERSION_NUMBER <= 0x10001000,
reason="Requires an OpenSSL version >= 1.0.1"
)
class TestOpenSSLCMAC(object):
def test_unsupported_cipher(self):
with raises_unsupported_algorithm(_Reasons.UNSUPPORTED_CIPHER):
backend.create_cmac_ctx(DummyCipherAlgorithm())
class TestOpenSSLCreateX509CSR(object):
@pytest.mark.skipif(
backend._lib.OPENSSL_VERSION_NUMBER >= 0x10001000,
reason="Requires an older OpenSSL. Must be < 1.0.1"
)
def test_unsupported_dsa_keys(self):
private_key = DSA_KEY_2048.private_key(backend)
with pytest.raises(NotImplementedError):
backend.create_x509_csr(object(), private_key, hashes.SHA1())
@pytest.mark.skipif(
backend._lib.OPENSSL_VERSION_NUMBER >= 0x10001000,
reason="Requires an older OpenSSL. Must be < 1.0.1"
)
def test_unsupported_ec_keys(self):
_skip_curve_unsupported(backend, ec.SECP256R1())
private_key = ec.generate_private_key(ec.SECP256R1(), backend)
with pytest.raises(NotImplementedError):
backend.create_x509_csr(object(), private_key, hashes.SHA1())
class TestOpenSSLSignX509Certificate(object):
def test_requires_certificate_builder(self):
private_key = RSA_KEY_2048.private_key(backend)
with pytest.raises(TypeError):
backend.create_x509_certificate(
object(), private_key, DummyHashAlgorithm()
)
@pytest.mark.skipif(
backend._lib.OPENSSL_VERSION_NUMBER >= 0x10001000,
reason="Requires an older OpenSSL. Must be < 1.0.1"
)
def test_sign_with_dsa_private_key_is_unsupported(self):
private_key = DSA_KEY_2048.private_key(backend)
builder = x509.CertificateBuilder()
builder = builder.subject_name(
x509.Name([x509.NameAttribute(x509.NameOID.COUNTRY_NAME, u'US')])
).issuer_name(
x509.Name([x509.NameAttribute(x509.NameOID.COUNTRY_NAME, u'US')])
).serial_number(
1
).public_key(
private_key.public_key()
).not_valid_before(
datetime.datetime(2002, 1, 1, 12, 1)
).not_valid_after(
datetime.datetime(2032, 1, 1, 12, 1)
)
with pytest.raises(NotImplementedError):
builder.sign(private_key, hashes.SHA512(), backend)
@pytest.mark.skipif(
backend._lib.OPENSSL_VERSION_NUMBER >= 0x10001000,
reason="Requires an older OpenSSL. Must be < 1.0.1"
)
def test_sign_with_ec_private_key_is_unsupported(self):
_skip_curve_unsupported(backend, ec.SECP256R1())
private_key = ec.generate_private_key(ec.SECP256R1(), backend)
builder = x509.CertificateBuilder()
builder = builder.subject_name(
x509.Name([x509.NameAttribute(x509.NameOID.COUNTRY_NAME, u'US')])
).issuer_name(
x509.Name([x509.NameAttribute(x509.NameOID.COUNTRY_NAME, u'US')])
).serial_number(
1
).public_key(
private_key.public_key()
).not_valid_before(
datetime.datetime(2002, 1, 1, 12, 1)
).not_valid_after(
datetime.datetime(2032, 1, 1, 12, 1)
)
with pytest.raises(NotImplementedError):
builder.sign(private_key, hashes.SHA512(), backend)
class TestOpenSSLSignX509CertificateRevocationList(object):
def test_invalid_builder(self):
private_key = RSA_KEY_2048.private_key(backend)
with pytest.raises(TypeError):
backend.create_x509_crl(object(), private_key, hashes.SHA256())
@pytest.mark.skipif(
backend._lib.OPENSSL_VERSION_NUMBER >= 0x10001000,
reason="Requires an older OpenSSL. Must be < 1.0.1"
)
def test_sign_with_dsa_private_key_is_unsupported(self):
private_key = DSA_KEY_2048.private_key(backend)
builder = x509.CertificateRevocationListBuilder()
builder = builder.issuer_name(
x509.Name([x509.NameAttribute(x509.NameOID.COUNTRY_NAME, u'US')])
).last_update(
datetime.datetime(2002, 1, 1, 12, 1)
).next_update(
datetime.datetime(2032, 1, 1, 12, 1)
)
with pytest.raises(NotImplementedError):
builder.sign(private_key, hashes.SHA1(), backend)
@pytest.mark.skipif(
backend._lib.OPENSSL_VERSION_NUMBER >= 0x10001000,
reason="Requires an older OpenSSL. Must be < 1.0.1"
)
def test_sign_with_ec_private_key_is_unsupported(self):
_skip_curve_unsupported(backend, ec.SECP256R1())
private_key = ec.generate_private_key(ec.SECP256R1(), backend)
builder = x509.CertificateRevocationListBuilder()
builder = builder.issuer_name(
x509.Name([x509.NameAttribute(x509.NameOID.COUNTRY_NAME, u'US')])
).last_update(
datetime.datetime(2002, 1, 1, 12, 1)
).next_update(
datetime.datetime(2032, 1, 1, 12, 1)
)
with pytest.raises(NotImplementedError):
builder.sign(private_key, hashes.SHA512(), backend)
class TestOpenSSLCreateRevokedCertificate(object):
def test_invalid_builder(self):
with pytest.raises(TypeError):
backend.create_x509_revoked_certificate(object())
class TestOpenSSLSerializationWithOpenSSL(object):
def test_pem_password_cb_buffer_too_small(self):
ffi_cb, userdata = backend._pem_password_cb(b"aa")
handle = backend._ffi.new_handle(userdata)
buf = backend._ffi.new('char *')
assert ffi_cb(buf, 1, False, handle) == 0
assert userdata.called == 1
assert isinstance(userdata.exception, ValueError)
def test_pem_password_cb(self):
password = b'abcdefg'
buf_size = len(password) + 1
ffi_cb, userdata = backend._pem_password_cb(password)
handle = backend._ffi.new_handle(userdata)
buf = backend._ffi.new('char[]', buf_size)
assert ffi_cb(buf, buf_size, False, handle) == len(password)
assert userdata.called == 1
assert backend._ffi.string(buf, len(password)) == password
def test_unsupported_evp_pkey_type(self):
key = backend._create_evp_pkey_gc()
with raises_unsupported_algorithm(None):
backend._evp_pkey_to_private_key(key)
with raises_unsupported_algorithm(None):
backend._evp_pkey_to_public_key(key)
def test_very_long_pem_serialization_password(self):
password = "x" * 1024
with pytest.raises(ValueError):
load_vectors_from_file(
os.path.join(
"asymmetric", "Traditional_OpenSSL_Serialization",
"key1.pem"
),
lambda pemfile: (
backend.load_pem_private_key(
pemfile.read().encode(), password
)
)
)
class DummyLibrary(object):
Cryptography_HAS_EC = 0
class TestOpenSSLEllipticCurve(object):
def test_elliptic_curve_supported(self, monkeypatch):
monkeypatch.setattr(backend, "_lib", DummyLibrary())
assert backend.elliptic_curve_supported(None) is False
def test_elliptic_curve_signature_algorithm_supported(self, monkeypatch):
monkeypatch.setattr(backend, "_lib", DummyLibrary())
assert backend.elliptic_curve_signature_algorithm_supported(
None, None
) is False
def test_sn_to_elliptic_curve_not_supported(self):
with raises_unsupported_algorithm(_Reasons.UNSUPPORTED_ELLIPTIC_CURVE):
_sn_to_elliptic_curve(backend, b"fake")
def test_elliptic_curve_exchange_algorithm_supported(self, monkeypatch):
monkeypatch.setattr(backend, "_lib", DummyLibrary())
assert not backend.elliptic_curve_exchange_algorithm_supported(
ec.ECDH(), ec.SECP256R1()
)
@pytest.mark.requires_backend_interface(interface=RSABackend)
class TestRSAPEMSerialization(object):
def test_password_length_limit(self):
password = b"x" * 1024
key = RSA_KEY_2048.private_key(backend)
with pytest.raises(ValueError):
key.private_bytes(
serialization.Encoding.PEM,
serialization.PrivateFormat.PKCS8,
serialization.BestAvailableEncryption(password)
)
class TestGOSTCertificate(object):
@pytest.mark.skipif(
backend._lib.OPENSSL_VERSION_NUMBER < 0x1000000f,
reason="Requires a newer OpenSSL. Must be >= 1.0.0"
)
def test_numeric_string_x509_name_entry(self):
cert = _load_cert(
os.path.join("x509", "e-trust.ru.der"),
x509.load_der_x509_certificate,
backend
)
with pytest.raises(ValueError) as exc:
cert.subject
# We assert on the message in this case because if the certificate
# fails to load it will also raise a ValueError and this test could
# erroneously pass.
assert str(exc.value) == "Unsupported ASN1 string type. Type: 18"
| 37.241176 | 79 | 0.649147 |
67906fcc568b07043cf8238642c44cb213e613ec | 2,483 | py | Python | sdk/identity/azure-identity/azure/identity/_internal/auth_code_redirect_handler.py | vbarbaresi/azure-sdk-for-python | 397ba46c51d001ff89c66b170f5576cf8f49c05f | [
"MIT"
] | 8 | 2021-01-13T23:44:08.000Z | 2021-03-17T10:13:36.000Z | sdk/identity/azure-identity/azure/identity/_internal/auth_code_redirect_handler.py | vbarbaresi/azure-sdk-for-python | 397ba46c51d001ff89c66b170f5576cf8f49c05f | [
"MIT"
] | null | null | null | sdk/identity/azure-identity/azure/identity/_internal/auth_code_redirect_handler.py | vbarbaresi/azure-sdk-for-python | 397ba46c51d001ff89c66b170f5576cf8f49c05f | [
"MIT"
] | 2 | 2020-05-21T22:51:22.000Z | 2020-05-26T20:53:01.000Z | # ------------------------------------
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
# ------------------------------------
from typing import TYPE_CHECKING
from six.moves.urllib_parse import parse_qs, urlparse
try:
from http.server import HTTPServer, BaseHTTPRequestHandler
except ImportError:
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler # type: ignore
if TYPE_CHECKING:
# pylint:disable=ungrouped-imports
from typing import Any, Mapping
class AuthCodeRedirectHandler(BaseHTTPRequestHandler):
"""HTTP request handler to capture the authentication server's response.
Mostly from the Azure CLI: https://github.com/Azure/azure-cli/blob/dev/src/azure-cli-core/azure/cli/core/_profile.py
"""
def do_GET(self):
if self.path.endswith("/favicon.ico"): # deal with legacy IE
self.send_response(204)
return
query = self.path.split("?", 1)[-1]
query = parse_qs(query, keep_blank_values=True)
self.server.query_params = query
self.send_response(200)
self.send_header("Content-Type", "text/html")
self.end_headers()
self.wfile.write(b"Authentication complete. You can close this window.")
def log_message(self, format, *args): # pylint: disable=redefined-builtin,unused-argument,no-self-use
pass # this prevents server dumping messages to stdout
class AuthCodeRedirectServer(HTTPServer):
"""HTTP server that listens for the redirect request following an authorization code authentication"""
query_params = {} # type: Mapping[str, Any]
def __init__(self, uri, timeout):
# type: (str, int) -> None
parsed = urlparse(uri)
HTTPServer.__init__(self, (parsed.hostname, parsed.port), AuthCodeRedirectHandler)
self.timeout = timeout
def wait_for_redirect(self):
# type: () -> Mapping[str, Any]
while not self.query_params:
try:
self.handle_request()
except ValueError:
# socket has been closed, probably by handle_timeout
break
# ensure the underlying socket is closed (a no-op when the socket is already closed)
self.server_close()
# if we timed out, this returns an empty dict
return self.query_params
def handle_timeout(self):
"""Break the request-handling loop by tearing down the server"""
self.server_close()
| 34.486111 | 120 | 0.655658 |
faca6bf58dccd65c12d7254a5c68e623f59161dc | 5,096 | py | Python | pymc3/distributions/__init__.py | HVoltBb/pymc3 | da482cd5ba08e421e8a62d879822cc41e5b91420 | [
"Apache-2.0"
] | null | null | null | pymc3/distributions/__init__.py | HVoltBb/pymc3 | da482cd5ba08e421e8a62d879822cc41e5b91420 | [
"Apache-2.0"
] | null | null | null | pymc3/distributions/__init__.py | HVoltBb/pymc3 | da482cd5ba08e421e8a62d879822cc41e5b91420 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 The PyMC Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from . import timeseries
from . import transforms
from . import shape_utils
from .continuous import Uniform
from .continuous import Flat
from .continuous import HalfFlat
from .continuous import TruncatedNormal
from .continuous import Normal
from .continuous import Beta
from .continuous import Kumaraswamy
from .continuous import Exponential
from .continuous import Laplace
from .continuous import StudentT
from .continuous import Cauchy
from .continuous import HalfCauchy
from .continuous import Gamma
from .continuous import Weibull
from .continuous import HalfStudentT
from .continuous import Lognormal
from .continuous import ChiSquared
from .continuous import HalfNormal
from .continuous import Wald
from .continuous import Pareto
from .continuous import InverseGamma
from .continuous import ExGaussian
from .continuous import VonMises
from .continuous import SkewNormal
from .continuous import Triangular
from .continuous import Gumbel
from .continuous import Logistic
from .continuous import LogitNormal
from .continuous import Interpolated
from .continuous import Rice
from .discrete import Binomial
from .discrete import BetaBinomial
from .discrete import Bernoulli
from .discrete import DiscreteWeibull
from .discrete import Poisson
from .discrete import NegativeBinomial
from .discrete import ConstantDist
from .discrete import Constant
from .discrete import ZeroInflatedPoisson
from .discrete import ZeroInflatedNegativeBinomial
from .discrete import ZeroInflatedBinomial
from .discrete import DiscreteUniform
from .discrete import Geometric
from .discrete import Categorical
from .discrete import OrderedLogistic
from .distribution import DensityDist
from .distribution import Distribution
from .distribution import Continuous
from .distribution import Discrete
from .distribution import NoDistribution
from .distribution import TensorType
from .distribution import draw_values
from .distribution import generate_samples
from .simulator import Simulator
from .mixture import Mixture
from .mixture import NormalMixture
from .multivariate import MvNormal
from .multivariate import MatrixNormal
from .multivariate import KroneckerNormal
from .multivariate import MvStudentT
from .multivariate import Dirichlet
from .multivariate import Multinomial
from .multivariate import Wishart
from .multivariate import WishartBartlett
from .multivariate import LKJCholeskyCov
from .multivariate import LKJCorr
from .timeseries import AR1
from .timeseries import AR
from .timeseries import GaussianRandomWalk
from .timeseries import GARCH11
from .timeseries import MvGaussianRandomWalk
from .timeseries import MvStudentTRandomWalk
from .bound import Bound
__all__ = ['Uniform',
'Flat',
'HalfFlat',
'TruncatedNormal',
'Normal',
'Beta',
'Kumaraswamy',
'Exponential',
'Laplace',
'StudentT',
'Cauchy',
'HalfCauchy',
'Gamma',
'Weibull',
'Bound',
'Lognormal',
'HalfStudentT',
'ChiSquared',
'HalfNormal',
'Wald',
'Pareto',
'InverseGamma',
'ExGaussian',
'VonMises',
'Binomial',
'BetaBinomial',
'Bernoulli',
'Poisson',
'NegativeBinomial',
'ConstantDist',
'Constant',
'ZeroInflatedPoisson',
'ZeroInflatedNegativeBinomial',
'ZeroInflatedBinomial',
'DiscreteUniform',
'Geometric',
'Categorical',
'OrderedLogistic',
'DensityDist',
'Distribution',
'Continuous',
'Discrete',
'NoDistribution',
'TensorType',
'MvNormal',
'MatrixNormal',
'KroneckerNormal',
'MvStudentT',
'Dirichlet',
'Multinomial',
'Wishart',
'WishartBartlett',
'LKJCholeskyCov',
'LKJCorr',
'AR1',
'AR',
'GaussianRandomWalk',
'MvGaussianRandomWalk',
'MvStudentTRandomWalk',
'GARCH11',
'SkewNormal',
'Mixture',
'NormalMixture',
'Triangular',
'DiscreteWeibull',
'Gumbel',
'Logistic',
'LogitNormal',
'Interpolated',
'Bound',
'Rice',
'Simulator'
]
| 29.456647 | 76 | 0.679749 |
8deabbe11489b65ac1755055fe2b86b0724f3e20 | 5,855 | py | Python | reconcile/openshift_clusterrolebindings.py | janboll/qontract-reconcile | 20d8136dfaec76700aa2b0487e9f7a02adae566c | [
"Apache-2.0"
] | null | null | null | reconcile/openshift_clusterrolebindings.py | janboll/qontract-reconcile | 20d8136dfaec76700aa2b0487e9f7a02adae566c | [
"Apache-2.0"
] | null | null | null | reconcile/openshift_clusterrolebindings.py | janboll/qontract-reconcile | 20d8136dfaec76700aa2b0487e9f7a02adae566c | [
"Apache-2.0"
] | null | null | null | import sys
from reconcile.utils import gql
import reconcile.openshift_base as ob
from reconcile import queries
from reconcile.utils.semver_helper import make_semver
from reconcile.utils.openshift_resource import (OpenshiftResource as OR,
ResourceKeyExistsError)
from reconcile.utils.defer import defer
from reconcile.utils import expiration
ROLES_QUERY = """
{
roles: roles_v1 {
name
users {
github_username
}
bots {
github_username
openshift_serviceaccount
}
access {
cluster {
name
}
clusterRole
}
expirationDate
}
}
"""
QONTRACT_INTEGRATION = 'openshift-clusterrolebindings'
QONTRACT_INTEGRATION_VERSION = make_semver(0, 1, 0)
def construct_user_oc_resource(role, user):
name = f"{role}-{user}"
# Note: In OpenShift 4.x this resource is in rbac.authorization.k8s.io/v1
body = {
"apiVersion": "rbac.authorization.k8s.io/v1",
"kind": "ClusterRoleBinding",
"metadata": {
"name": name
},
"roleRef": {
"name": role,
"kind": "ClusterRole"
},
"subjects": [
{"kind": "User",
"name": user}
]
}
return OR(body, QONTRACT_INTEGRATION, QONTRACT_INTEGRATION_VERSION,
error_details=name), name
def construct_sa_oc_resource(role, namespace, sa_name):
name = f"{role}-{namespace}-{sa_name}"
# Note: In OpenShift 4.x this resource is in rbac.authorization.k8s.io/v1
body = {
"apiVersion": "rbac.authorization.k8s.io/v1",
"kind": "ClusterRoleBinding",
"metadata": {
"name": name
},
"roleRef": {
"name": role,
"kind": "ClusterRole"
},
"subjects": [
{"kind": "ServiceAccount",
"name": sa_name,
"namespace": namespace}
],
"userNames": [
f"system:serviceaccount:{namespace}:{sa_name}"
]
}
return OR(body, QONTRACT_INTEGRATION, QONTRACT_INTEGRATION_VERSION,
error_details=name), name
def fetch_desired_state(ri, oc_map):
gqlapi = gql.get_api()
roles = expiration.filter(gqlapi.query(ROLES_QUERY)['roles'])
users_desired_state = []
# set namespace to something indicative
namepsace = 'cluster'
for role in roles:
permissions = [{'cluster': a['cluster']['name'],
'cluster_role': a['clusterRole']}
for a in role['access'] or []
if None not in [a['cluster'], a['clusterRole']]]
if not permissions:
continue
users = [user['github_username']
for user in role['users']]
bot_users = [bot['github_username']
for bot in role['bots']
if bot.get('github_username')]
users.extend(bot_users)
service_accounts = [bot['openshift_serviceaccount']
for bot in role['bots']
if bot.get('openshift_serviceaccount')]
for permission in permissions:
cluster = permission['cluster']
if not oc_map.get(cluster):
continue
for user in users:
# used by openshift-users and github integrations
# this is just to simplify things a bit on the their side
users_desired_state.append({
'cluster': cluster,
'user': user
})
if ri is None:
continue
oc_resource, resource_name = \
construct_user_oc_resource(
permission['cluster_role'], user)
try:
ri.add_desired(
cluster,
namepsace,
'ClusterRoleBinding',
resource_name,
oc_resource
)
except ResourceKeyExistsError:
# a user may have a Role assigned to them
# from multiple app-interface roles
pass
for sa in service_accounts:
if ri is None:
continue
namespace, sa_name = sa.split('/')
oc_resource, resource_name = \
construct_sa_oc_resource(
permission['cluster_role'], namespace, sa_name)
try:
ri.add_desired(
permission['cluster'],
namepsace,
'ClusterRoleBinding',
resource_name,
oc_resource
)
except ResourceKeyExistsError:
# a ServiceAccount may have a Role assigned to it
# from multiple app-interface roles
pass
return users_desired_state
@defer
def run(dry_run, thread_pool_size=10, internal=None,
use_jump_host=True, defer=None):
clusters = [cluster_info for cluster_info
in queries.get_clusters()
if cluster_info.get('managedClusterRoles')]
ri, oc_map = ob.fetch_current_state(
clusters=clusters,
thread_pool_size=thread_pool_size,
integration=QONTRACT_INTEGRATION,
integration_version=QONTRACT_INTEGRATION_VERSION,
override_managed_types=['ClusterRoleBinding'],
internal=internal,
use_jump_host=use_jump_host)
defer(oc_map.cleanup)
fetch_desired_state(ri, oc_map)
ob.realize_data(dry_run, oc_map, ri, thread_pool_size)
if ri.has_error_registered():
sys.exit(1)
| 31.820652 | 77 | 0.537148 |
05bb45ef652e1dd1462dc876f616152a0202ac6f | 512 | py | Python | Python/Logic-2/make_chocolate.py | LucasHenrique-dev/exerc-cios-codingbat | ff92db10387757b9a2e3f72be6b7e51824b1ffa6 | [
"MIT"
] | 2 | 2020-12-09T13:36:44.000Z | 2021-08-16T01:17:16.000Z | Python/Logic-2/make_chocolate.py | LucasHenrique-dev/exerc-cios-codingbat | ff92db10387757b9a2e3f72be6b7e51824b1ffa6 | [
"MIT"
] | null | null | null | Python/Logic-2/make_chocolate.py | LucasHenrique-dev/exerc-cios-codingbat | ff92db10387757b9a2e3f72be6b7e51824b1ffa6 | [
"MIT"
] | null | null | null | """
Sabendo que cada unidade de "small" vale 1 e que cada unidade de "big" vale 5,
retorne a quantidade de "small" necessária para se atingir o "goal", sabendo que
deverá se usar todas as "big" possíveis antes. Caso não dê para atingir o "goal",
retorne -1.
Ex.:((4, 1, 9) → 4; (4, 1, 10) → -1; (4, 1, 7) → 2).
"""
def make_chocolate(small, big, goal):
goal -= int(goal/5)*5 if big*5 > goal else big * 5
if goal > small:
return -1
else:
return goal
print(make_chocolate(4, 1, 9))
| 26.947368 | 81 | 0.623047 |
8b68bf55ef486673d897ede1bff8c6e6d7c1bcc0 | 36,764 | py | Python | tests/test_sdc_resource_properties.py | devfest-bugbust/python-onapsdk | 90c03dfbd3cf8e47978315ab04694b7f7a5032f8 | [
"Apache-2.0"
] | null | null | null | tests/test_sdc_resource_properties.py | devfest-bugbust/python-onapsdk | 90c03dfbd3cf8e47978315ab04694b7f7a5032f8 | [
"Apache-2.0"
] | 10 | 2021-09-20T15:42:47.000Z | 2021-09-23T12:49:51.000Z | tests/test_sdc_resource_properties.py | devfest-bugbust/python-onapsdk | 90c03dfbd3cf8e47978315ab04694b7f7a5032f8 | [
"Apache-2.0"
] | 2 | 2021-09-20T13:53:12.000Z | 2021-09-21T08:05:58.000Z | from unittest import mock
import pytest
from onapsdk.exceptions import ParameterError
from onapsdk.sdc.properties import Input, Property
from onapsdk.sdc.sdc_resource import SdcResource
from onapsdk.sdc.service import Service
from onapsdk.sdc.vf import Vf
from onapsdk.sdc.vl import Vl
INPUTS = {
'inputs': [
{
'uniqueId': '9ee5fb23-4c4a-46bd-8682-68698559ee9c.skip_post_instantiation_configuration',
'type': 'boolean',
'required': False,
'definition': False,
'defaultValue': 'true',
'description': None,
'schema': None,
'password': False,
'name': 'skip_post_instantiation_configuration',
'value': None,
'label': None,
'hidden': False,
'immutable': False,
'inputPath': None,
'status': None,
'inputId': None,
'instanceUniqueId': None,
'propertyId': None,
'parentPropertyType': None,
'subPropertyInputPath': None,
'annotations': None,
'parentUniqueId': '9ee5fb23-4c4a-46bd-8682-68698559ee9c',
'getInputValues': None,
'isDeclaredListInput': False,
'getPolicyValues': None,
'propertyConstraints': None,
'constraints': None,
'inputs': None,
'properties': None,
'schemaType': None,
'schemaProperty': None,
'getInputProperty': False,
'version': None,
'ownerId': '9ee5fb23-4c4a-46bd-8682-68698559ee9c',
'empty': False
},
{
'uniqueId': '4a84415b-4580-4a78-aa33-501f0cd3d079.test',
'type': 'string',
'required': False,
'definition': False,
'defaultValue': None,
'description': None,
'schema': {
'derivedFrom': None,
'constraints': None,
'properties': None,
'property': {
'uniqueId': None,
'type': '',
'required': False,
'definition': False,
'defaultValue': None,
'description': None,
'schema': None,
'password': False,
'name': None,
'value': None,
'label': None,
'hidden': False,
'immutable': False,
'inputPath': None,
'status': None,
'inputId': None,
'instanceUniqueId': None,
'propertyId': None,
'parentPropertyType': None,
'subPropertyInputPath': None,
'annotations': None,
'parentUniqueId': None,
'getInputValues': None,
'isDeclaredListInput': False,
'getPolicyValues': None,
'propertyConstraints': None,
'schemaType': None,
'schemaProperty': None,
'getInputProperty': False,
'version': None,
'ownerId': None,
'empty': False
},
'version': None,
'ownerId': None,
'empty': False,
'type': None
},
'password': False,
'name': 'test',
'value': None,
'label': None,
'hidden': False,
'immutable': False,
'inputPath': None,
'status': None,
'inputId': None,
'instanceUniqueId': '4a84415b-4580-4a78-aa33-501f0cd3d079',
'propertyId': '4a84415b-4580-4a78-aa33-501f0cd3d079.sraka',
'parentPropertyType': 'string',
'subPropertyInputPath': None,
'annotations': None,
'parentUniqueId': 'cs0008',
'getInputValues': None,
'isDeclaredListInput': False,
'getPolicyValues': None,
'propertyConstraints': None,
'constraints': None,
'inputs': None,
'properties': None,
'schemaType': '',
'schemaProperty': {
'uniqueId': None,
'type': '',
'required': False,
'definition': False,
'defaultValue': None,
'description': None,
'schema': None,
'password': False,
'name': None,
'value': None,
'label': None,
'hidden': False,
'immutable': False,
'inputPath': None,
'status': None,
'inputId': None,
'instanceUniqueId': None,
'propertyId': None,
'parentPropertyType': None,
'subPropertyInputPath': None,
'annotations': None,
'parentUniqueId': None,
'getInputValues': None,
'isDeclaredListInput': False,
'getPolicyValues': None,
'propertyConstraints': None,
'schemaType': None,
'schemaProperty': None,
'getInputProperty': False,
'version': None,
'ownerId': None,
'empty': False
},
'getInputProperty': False,
'version': None,
'ownerId': 'cs0008',
'empty': False
},
{
'uniqueId': '9ee5fb23-4c4a-46bd-8682-68698559ee9c.controller_actor',
'type': 'string',
'required': False,
'definition': False,
'defaultValue': 'SO-REF-DATA',
'description': None,
'schema': None,
'password': False,
'name': 'controller_actor',
'value': None,
'label': None,
'hidden': False,
'immutable': False,
'inputPath': None,
'status': None,
'inputId': None,
'instanceUniqueId': None,
'propertyId': None,
'parentPropertyType': None,
'subPropertyInputPath': None,
'annotations': None,
'parentUniqueId': '9ee5fb23-4c4a-46bd-8682-68698559ee9c',
'getInputValues': None,
'isDeclaredListInput': False,
'getPolicyValues': None,
'propertyConstraints': None,
'constraints': None,
'inputs': None,
'properties': None,
'schemaType': None,
'schemaProperty': None,
'getInputProperty': False,
'version': None,
'ownerId': '9ee5fb23-4c4a-46bd-8682-68698559ee9c',
'empty': False
},
{
'uniqueId': '4a84415b-4580-4a78-aa33-501f0cd3d079.lililili',
'type': 'list',
'required': False,
'definition': False,
'defaultValue': None,
'description': None,
'schema': {
'derivedFrom': None,
'constraints': None,
'properties': None,
'property': {
'uniqueId': None,
'type': 'abc',
'required': False,
'definition': False,
'defaultValue': None,
'description': None,
'schema': None,
'password': False,
'name': None,
'value': None,
'label': None,
'hidden': False,
'immutable': False,
'inputPath': None,
'status': None,
'inputId': None,
'instanceUniqueId': None,
'propertyId': None,
'parentPropertyType': None,
'subPropertyInputPath': None,
'annotations': None,
'parentUniqueId': None,
'getInputValues': None,
'isDeclaredListInput': False,
'getPolicyValues': None,
'propertyConstraints': None,
'schemaType': None,
'schemaProperty': None,
'getInputProperty': False,
'version': None,
'ownerId': None,
'empty': False
},
'version': None,
'ownerId': None,
'empty': False,
'type': None
},
'password': False,
'name': 'lililili',
'value': None,
'label': None,
'hidden': False,
'immutable': False,
'inputPath': None,
'status': None,
'inputId': None,
'instanceUniqueId': '4a84415b-4580-4a78-aa33-501f0cd3d079',
'propertyId': None,
'parentPropertyType': None,
'subPropertyInputPath': None,
'annotations': None,
'parentUniqueId': None,
'getInputValues': None,
'isDeclaredListInput': True,
'getPolicyValues': None,
'propertyConstraints': None,
'constraints': None,
'inputs': None,
'properties': None,
'schemaType': 'abc',
'schemaProperty': {
'uniqueId': None,
'type': 'abc',
'required': False,
'definition': False,
'defaultValue': None,
'description': None,
'schema': None,
'password': False,
'name': None,
'value': None,
'label': None,
'hidden': False,
'immutable': False,
'inputPath': None,
'status': None,
'inputId': None,
'instanceUniqueId': None,
'propertyId': None,
'parentPropertyType': None,
'subPropertyInputPath': None,
'annotations': None,
'parentUniqueId': None,
'getInputValues': None,
'isDeclaredListInput': False,
'getPolicyValues': None,
'propertyConstraints': None,
'schemaType': None,
'schemaProperty': None,
'getInputProperty': False,
'version': None,
'ownerId': None,
'empty': False
},
'getInputProperty': False,
'version': None,
'ownerId': None,
'empty': False
}
]
}
PROPERTIES = {
"properties": [{
'uniqueId': '4a84415b-4580-4a78-aa33-501f0cd3d079.llllll',
'type': 'integer',
'required': False,
'definition': False,
'defaultValue': None,
'description': None,
'schema': {
'derivedFrom': None,
'constraints': None,
'properties': None,
'property': {
'uniqueId': None,
'type': '',
'required': False,
'definition': False,
'defaultValue': None,
'description': None,
'schema': None,
'password': False,
'name': None,
'value': None,
'label': None,
'hidden': False,
'immutable': False,
'inputPath': None,
'status': None,
'inputId': None,
'instanceUniqueId': None,
'propertyId': None,
'parentPropertyType': None,
'subPropertyInputPath': None,
'annotations': None,
'parentUniqueId': None,
'getInputValues': None,
'isDeclaredListInput': False,
'getPolicyValues': None,
'propertyConstraints': None,
'schemaType': None,
'schemaProperty': None,
'getInputProperty': False,
'version': None,
'ownerId': None,
'empty': False
},
'version': None,
'ownerId': None,
'empty': False,
'type': None
},
'password': False,
'name': 'llllll',
'value': '{"get_input":["lililili","INDEX","llllll"]}',
'label': None,
'hidden': False,
'immutable': False,
'inputPath': None,
'status': None,
'inputId': None,
'instanceUniqueId': None,
'propertyId': None,
'parentPropertyType': None,
'subPropertyInputPath': None,
'annotations': None,
'parentUniqueId': '4a84415b-4580-4a78-aa33-501f0cd3d079',
'getInputValues': [
{
'propName': None,
'inputName': 'lililili',
'inputId': '4a84415b-4580-4a78-aa33-501f0cd3d079.lililili',
'indexValue': None,
'getInputIndex': None,
'list': False,
'version': None,
'ownerId': None,
'empty': False,
'type': None
}
],
'isDeclaredListInput': False,
'getPolicyValues': None,
'propertyConstraints': None,
'constraints': None,
'schemaType': '',
'schemaProperty': {
'uniqueId': None,
'type': '',
'required': False,
'definition': False,
'defaultValue': None,
'description': None,
'schema': None,
'password': False,
'name': None,
'value': None,
'label': None,
'hidden': False,
'immutable': False,
'inputPath': None,
'status': None,
'inputId': None,
'instanceUniqueId': None,
'propertyId': None,
'parentPropertyType': None,
'subPropertyInputPath': None,
'annotations': None,
'parentUniqueId': None,
'getInputValues': None,
'isDeclaredListInput': False,
'getPolicyValues': None,
'propertyConstraints': None,
'schemaType': None,
'schemaProperty': None,
'getInputProperty': False,
'version': None,
'ownerId': None,
'empty': False
},
'getInputProperty': True,
'version': None,
'ownerId': '4a84415b-4580-4a78-aa33-501f0cd3d079',
'empty': False
},
{
'uniqueId': '4a84415b-4580-4a78-aa33-501f0cd3d079.test',
'type': 'string',
'required': False,
'definition': False,
'defaultValue': None,
'description': None,
'schema': {
'derivedFrom': None,
'constraints': None,
'properties': None,
'property': {
'uniqueId': None,
'type': '',
'required': False,
'definition': False,
'defaultValue': None,
'description': None,
'schema': None,
'password': False,
'name': None,
'value': None,
'label': None,
'hidden': False,
'immutable': False,
'inputPath': None,
'status': None,
'inputId': None,
'instanceUniqueId': None,
'propertyId': None,
'parentPropertyType': None,
'subPropertyInputPath': None,
'annotations': None,
'parentUniqueId': None,
'getInputValues': None,
'isDeclaredListInput': False,
'getPolicyValues': None,
'propertyConstraints': None,
'schemaType': None,
'schemaProperty': None,
'getInputProperty': False,
'version': None,
'ownerId': None,
'empty': False
},
'version': None,
'ownerId': None,
'empty': False,
'type': None
},
'password': False,
'name': 'test',
'value': None,
'label': None,
'hidden': False,
'immutable': False,
'inputPath': None,
'status': None,
'inputId': None,
'instanceUniqueId': None,
'propertyId': None,
'parentPropertyType': None,
'subPropertyInputPath': None,
'annotations': None,
'parentUniqueId': '4a84415b-4580-4a78-aa33-501f0cd3d079',
'getInputValues': [],
'isDeclaredListInput': False,
'getPolicyValues': None,
'propertyConstraints': None,
'constraints': None,
'schemaType': '',
'schemaProperty': {
'uniqueId': None,
'type': '',
'required': False,
'definition': False,
'defaultValue': None,
'description': None,
'schema': None,
'password': False,
'name': None,
'value': None,
'label': None,
'hidden': False,
'immutable': False,
'inputPath': None,
'status': None,
'inputId': None,
'instanceUniqueId': None,
'propertyId': None,
'parentPropertyType': None,
'subPropertyInputPath': None,
'annotations': None,
'parentUniqueId': None,
'getInputValues': None,
'isDeclaredListInput': False,
'getPolicyValues': None,
'propertyConstraints': None,
'schemaType': None,
'schemaProperty': None,
'getInputProperty': False,
'version': None,
'ownerId': None,
'empty': False
},
'getInputProperty': True,
'version': None,
'ownerId': '4a84415b-4580-4a78-aa33-501f0cd3d079',
'empty': False
},
{
'uniqueId': '4a84415b-4580-4a78-aa33-501f0cd3d079.yyy',
'type': 'string',
'required': False,
'definition': False,
'defaultValue': None,
'description': None,
'schema': {
'derivedFrom': None,
'constraints': None,
'properties': None,
'property': {
'uniqueId': None,
'type': '',
'required': False,
'definition': False,
'defaultValue': None,
'description': None,
'schema': None,
'password': False,
'name': None,
'value': None,
'label': None,
'hidden': False,
'immutable': False,
'inputPath': None,
'status': None,
'inputId': None,
'instanceUniqueId': None,
'propertyId': None,
'parentPropertyType': None,
'subPropertyInputPath': None,
'annotations': None,
'parentUniqueId': None,
'getInputValues': None,
'isDeclaredListInput': False,
'getPolicyValues': None,
'propertyConstraints': None,
'schemaType': None,
'schemaProperty': None,
'getInputProperty': False,
'version': None,
'ownerId': None,
'empty': False
},
'version': None,
'ownerId': None,
'empty': False,
'type': None
},
'password': False,
'name': 'yyy',
'value': 'lalala',
'label': None,
'hidden': False,
'immutable': False,
'inputPath': None,
'status': None,
'inputId': None,
'instanceUniqueId': None,
'propertyId': None,
'parentPropertyType': None,
'subPropertyInputPath': None,
'annotations': None,
'parentUniqueId': '4a84415b-4580-4a78-aa33-501f0cd3d079',
'getInputValues': None,
'isDeclaredListInput': False,
'getPolicyValues': None,
'propertyConstraints': None,
'constraints': None,
'schemaType': '',
'schemaProperty': {
'uniqueId': None,
'type': '',
'required': False,
'definition': False,
'defaultValue': None,
'description': None,
'schema': None,
'password': False,
'name': None,
'value': None,
'label': None,
'hidden': False,
'immutable': False,
'inputPath': None,
'status': None,
'inputId': None,
'instanceUniqueId': None,
'propertyId': None,
'parentPropertyType': None,
'subPropertyInputPath': None,
'annotations': None,
'parentUniqueId': None,
'getInputValues': None,
'isDeclaredListInput': False,
'getPolicyValues': None,
'propertyConstraints': None,
'schemaType': None,
'schemaProperty': None,
'getInputProperty': False,
'version': None,
'ownerId': None,
'empty': False
},
'getInputProperty': False,
'version': None,
'ownerId': '4a84415b-4580-4a78-aa33-501f0cd3d079',
'empty': False
},
{
'uniqueId': '4a84415b-4580-4a78-aa33-501f0cd3d079.test2',
'type': 'boolean',
'required': False,
'definition': False,
'defaultValue': None,
'description': 'test2',
'schema': {
'derivedFrom': None,
'constraints': None,
'properties': None,
'property': {
'uniqueId': None,
'type': '',
'required': False,
'definition': False,
'defaultValue': None,
'description': None,
'schema': None,
'password': False,
'name': None,
'value': None,
'label': None,
'hidden': False,
'immutable': False,
'inputPath': None,
'status': None,
'inputId': None,
'instanceUniqueId': None,
'propertyId': None,
'parentPropertyType': None,
'subPropertyInputPath': None,
'annotations': None,
'parentUniqueId': None,
'getInputValues': None,
'isDeclaredListInput': False,
'getPolicyValues': None,
'propertyConstraints': None,
'schemaType': None,
'schemaProperty': None,
'getInputProperty': False,
'version': None,
'ownerId': None,
'empty': False
},
'version': None,
'ownerId': None,
'empty': False,
'type': None
},
'password': False,
'name': 'test2',
'value': '{"get_input":"test2"}',
'label': None,
'hidden': False,
'immutable': False,
'inputPath': None,
'status': None,
'inputId': None,
'instanceUniqueId': None,
'propertyId': None,
'parentPropertyType': None,
'subPropertyInputPath': None,
'annotations': None,
'parentUniqueId': '4a84415b-4580-4a78-aa33-501f0cd3d079',
'getInputValues': [
{
'propName': None,
'inputName': 'test2',
'inputId': '4a84415b-4580-4a78-aa33-501f0cd3d079.test2',
'indexValue': None,
'getInputIndex': None,
'list': False,
'version': None,
'ownerId': None,
'empty': False,
'type': None
}
],
'isDeclaredListInput': False,
'getPolicyValues': None,
'propertyConstraints': None,
'constraints': None,
'schemaType': '',
'schemaProperty': {
'uniqueId': None,
'type': '',
'required': False,
'definition': False,
'defaultValue': None,
'description': None,
'schema': None,
'password': False,
'name': None,
'value': None,
'label': None,
'hidden': False,
'immutable': False,
'inputPath': None,
'status': None,
'inputId': None,
'instanceUniqueId': None,
'propertyId': None,
'parentPropertyType': None,
'subPropertyInputPath': None,
'annotations': None,
'parentUniqueId': None,
'getInputValues': None,
'isDeclaredListInput': False,
'getPolicyValues': None,
'propertyConstraints': None,
'schemaType': None,
'schemaProperty': None,
'getInputProperty': False,
'version': None,
'ownerId': None,
'empty': False
},
'getInputProperty': True,
'version': None,
'ownerId': '4a84415b-4580-4a78-aa33-501f0cd3d079',
'empty': False
}]
}
VL_PROPERTIES = {
"properties": [{
'uniqueId': 'd37cd65e-9842-4490-9343-a1a874e6b52a.network_role',
'type': 'string',
'required': False,
'definition': False,
'defaultValue': None,
'description': 'Unique label that defines the role that this network performs. example: vce oam network, vnat sr-iov1 network\n',
'schema': None,
'password': False,
'name': 'network_role',
'value': None,
'label': None,
'hidden': False,
'immutable': False,
'inputPath': None,
'status': None,
'inputId': None,
'instanceUniqueId': None,
'propertyId': None,
'parentPropertyType': None,
'subPropertyInputPath': None,
'annotations': None,
'parentUniqueId': '1af9771b-0f79-4e98-8747-30fd06da85cb',
'getInputValues': None,
'isDeclaredListInput': False,
'getPolicyValues': None,
'propertyConstraints': None,
'constraints': None,
'schemaType': None,
'schemaProperty': None,
'getInputProperty': False,
'version': None,
'ownerId': '1af9771b-0f79-4e98-8747-30fd06da85cb',
'empty': False
}]
}
@mock.patch.object(Service, "send_message_json")
@mock.patch.object(Service, "send_message")
def test_service_properties(mock_send, mock_send_json):
service = Service(name="test")
service.unique_identifier = "toto"
mock_send_json.return_value = {}
assert if not list(service.properties):
mock_send_json.return_value = PROPERTIES
properties_list = list(service.properties)
assert len(properties_list) == 4
prop1, prop2, prop3, prop4 = properties_list
mock_send_json.return_value = INPUTS
assert prop1.sdc_resource == service
assert prop1.unique_id == "4a84415b-4580-4a78-aa33-501f0cd3d079.llllll"
assert prop1.name == "llllll"
assert prop1.property_type == "integer"
assert prop1.parent_unique_id == "4a84415b-4580-4a78-aa33-501f0cd3d079"
assert prop1.value == '{"get_input":["lililili","INDEX","llllll"]}'
assert prop1.description is None
assert prop1.get_input_values
prop1_input = prop1.input
assert prop1_input.unique_id == "4a84415b-4580-4a78-aa33-501f0cd3d079.lililili"
assert prop1_input.input_type == "list"
assert prop1_input.name == "lililili"
assert prop1_input.default_value is None
assert prop2.sdc_resource == service
assert prop2.unique_id == "4a84415b-4580-4a78-aa33-501f0cd3d079.test"
assert prop2.name == "test"
assert prop2.property_type == "string"
assert prop2.parent_unique_id == "4a84415b-4580-4a78-aa33-501f0cd3d079"
assert prop2.value is None
assert prop2.description is None
assert prop2.get_input_values == []
assert prop2.input is None
assert prop3.sdc_resource == service
assert prop3.unique_id == "4a84415b-4580-4a78-aa33-501f0cd3d079.yyy"
assert prop3.name == "yyy"
assert prop3.property_type == "string"
assert prop3.parent_unique_id == "4a84415b-4580-4a78-aa33-501f0cd3d079"
assert prop3.value == "lalala"
assert prop3.description is None
assert prop3.get_input_values is None
assert prop3.input is None
assert prop4.sdc_resource == service
assert prop4.unique_id == "4a84415b-4580-4a78-aa33-501f0cd3d079.test2"
assert prop4.name == "test2"
assert prop4.property_type == "boolean"
assert prop4.parent_unique_id == "4a84415b-4580-4a78-aa33-501f0cd3d079"
assert prop4.value == '{"get_input":"test2"}'
assert prop4.description == "test2"
assert prop4.get_input_values
with pytest.raises(ParameterError):
prop4.input
@mock.patch.object(Service, "send_message_json")
def test_service_inputs(mock_send_json):
service = Service(name="test")
service.unique_identifier = "toto"
mock_send_json.return_value = {}
assert len(list(service.inputs)) == 0
mock_send_json.return_value = INPUTS
inputs_list = list(service.inputs)
assert len(inputs_list) == 4
input1, input2, input3, input4 = inputs_list
assert input1.unique_id == "9ee5fb23-4c4a-46bd-8682-68698559ee9c.skip_post_instantiation_configuration"
assert input1.input_type == "boolean"
assert input1.name == "skip_post_instantiation_configuration"
assert input1.default_value == "true"
assert input2.unique_id == "4a84415b-4580-4a78-aa33-501f0cd3d079.test"
assert input2.input_type == "string"
assert input2.name == "test"
assert input2.default_value is None
assert input3.unique_id == "9ee5fb23-4c4a-46bd-8682-68698559ee9c.controller_actor"
assert input3.input_type == "string"
assert input3.name == "controller_actor"
assert input3.default_value == "SO-REF-DATA"
assert input4.unique_id == "4a84415b-4580-4a78-aa33-501f0cd3d079.lililili"
assert input4.input_type == "list"
assert input4.name == "lililili"
assert input4.default_value is None
@mock.patch.object(Vf, "send_message_json")
def test_vf_properties(mock_send_json):
vf = Vf(name="test")
vf.unique_identifier = "toto"
mock_send_json.return_value = {}
assert len(list(vf.properties)) == 0
mock_send_json.return_value = PROPERTIES
properties_list = list(vf.properties)
assert len(properties_list) == 4
prop1, prop2, prop3, prop4 = properties_list
mock_send_json.return_value = INPUTS
assert prop1.sdc_resource == vf
assert prop1.unique_id == "4a84415b-4580-4a78-aa33-501f0cd3d079.llllll"
assert prop1.name == "llllll"
assert prop1.property_type == "integer"
assert prop1.parent_unique_id == "4a84415b-4580-4a78-aa33-501f0cd3d079"
assert prop1.value == '{"get_input":["lililili","INDEX","llllll"]}'
assert prop1.description is None
assert prop1.get_input_values
prop1_input = prop1.input
assert prop1_input.unique_id == "4a84415b-4580-4a78-aa33-501f0cd3d079.lililili"
assert prop1_input.input_type == "list"
assert prop1_input.name == "lililili"
assert prop1_input.default_value is None
assert prop2.sdc_resource == vf
assert prop2.unique_id == "4a84415b-4580-4a78-aa33-501f0cd3d079.test"
assert prop2.name == "test"
assert prop2.property_type == "string"
assert prop2.parent_unique_id == "4a84415b-4580-4a78-aa33-501f0cd3d079"
assert prop2.value is None
assert prop2.description is None
assert prop2.get_input_values == []
assert prop2.input is None
assert prop3.sdc_resource == vf
assert prop3.unique_id == "4a84415b-4580-4a78-aa33-501f0cd3d079.yyy"
assert prop3.name == "yyy"
assert prop3.property_type == "string"
assert prop3.parent_unique_id == "4a84415b-4580-4a78-aa33-501f0cd3d079"
assert prop3.value == "lalala"
assert prop3.description is None
assert prop3.get_input_values is None
assert prop3.input is None
assert prop4.sdc_resource == vf
assert prop4.unique_id == "4a84415b-4580-4a78-aa33-501f0cd3d079.test2"
assert prop4.name == "test2"
assert prop4.property_type == "boolean"
assert prop4.parent_unique_id == "4a84415b-4580-4a78-aa33-501f0cd3d079"
assert prop4.value == '{"get_input":"test2"}'
assert prop4.description == "test2"
assert prop4.get_input_values
with pytest.raises(ParameterError):
prop4.input
@mock.patch.object(Vl, "send_message_json")
@mock.patch.object(Vl, "exists")
def test_vl_properties(mock_exists, mock_send_json):
mock_exists.return_value = True
vl = Vl(name="test")
vl.unique_identifier = "toto"
mock_send_json.return_value = {}
assert len(list(vl.properties)) == 0
mock_send_json.return_value = VL_PROPERTIES
properties_list = list(vl.properties)
assert len(properties_list) == 1
prop = properties_list[0]
assert prop.sdc_resource == vl
assert prop.unique_id == "d37cd65e-9842-4490-9343-a1a874e6b52a.network_role"
assert prop.name == "network_role"
assert prop.property_type == "string"
assert prop.parent_unique_id == "1af9771b-0f79-4e98-8747-30fd06da85cb"
assert prop.value is None
assert prop.description == "Unique label that defines the role that this network performs. example: vce oam network, vnat sr-iov1 network\n"
assert prop.get_input_values is None
assert prop.input is None
@mock.patch.object(SdcResource, "send_message_json")
def test_sdc_resource_is_own_property(mock_send_json):
sdc_resource = SdcResource(name="test")
sdc_resource.unique_identifier = "toto"
mock_send_json.return_value = PROPERTIES
prop1 = Property(
name="llllll",
property_type="integer"
)
prop2 = Property(
name="test2",
property_type="string"
)
assert sdc_resource.is_own_property(prop1)
assert not sdc_resource.is_own_property(prop2)
@mock.patch.object(SdcResource, "properties", new_callable=mock.PropertyMock)
@mock.patch.object(SdcResource, "send_message_json")
def test_sdc_resource_set_property_value(mock_send_message_json, mock_sdc_resource_properties):
sdc_resource = SdcResource(name="test")
sdc_resource.unique_identifier = "toto"
mock_sdc_resource_properties.return_value = [
Property(name="test",
property_type="string",
sdc_resource=sdc_resource)
]
with pytest.raises(ParameterError):
sdc_resource.set_property_value(Property(name="test2",
property_type="integer",
sdc_resource=sdc_resource),
value="lalala")
prop = sdc_resource.get_property(property_name="test")
assert prop.name == "test"
assert prop.property_type == "string"
assert not prop.value
prop.value = "test"
mock_send_message_json.assert_called_once()
assert prop.value == "test"
@mock.patch.object(SdcResource, "inputs", new_callable=mock.PropertyMock)
@mock.patch.object(SdcResource, "send_message_json")
def test_sdc_resource_input_default_value(mock_send_message_json, mock_inputs):
sdc_resource = SdcResource(name="test")
sdc_resource.unique_identifier = "toto"
mock_inputs.return_value = [
Input(unique_id="123",
input_type="integer",
name="test",
sdc_resource=sdc_resource)
]
assert sdc_resource.get_input("test")
input_obj = sdc_resource.get_input("test")
assert not input_obj.default_value
input_obj.default_value = "123"
mock_send_message_json.assert_called_once()
assert input_obj.default_value == "123"
| 34.423221 | 144 | 0.51205 |
9e8b768b5a3c0bbc16ae186764146f4ec70d3261 | 506 | py | Python | common/tests/test_sequence_field.py | Jenks18/mfl_api | ecbb8954053be06bbcac7e1132811d73534c78d9 | [
"MIT"
] | 19 | 2015-04-16T09:37:08.000Z | 2022-02-10T11:50:30.000Z | common/tests/test_sequence_field.py | Jenks18/mfl_api | ecbb8954053be06bbcac7e1132811d73534c78d9 | [
"MIT"
] | 125 | 2015-03-26T14:05:49.000Z | 2020-05-14T08:16:50.000Z | common/tests/test_sequence_field.py | Jenks18/mfl_api | ecbb8954053be06bbcac7e1132811d73534c78d9 | [
"MIT"
] | 39 | 2015-04-15T09:17:33.000Z | 2022-03-28T18:08:16.000Z | from django.contrib.auth import get_user_model
from django.test import TestCase
from model_mommy import mommy
from common.fields import SequenceField
from common.models import County
class SequenceFieldTest(TestCase):
def setUp(self):
self.user = mommy.make(get_user_model())
self.test_model = County(name='test county')
self.test_model.save()
def test_get_prepared_value(self):
seq = SequenceField()
self.assertEqual(seq.get_prep_value(value=''), None)
| 28.111111 | 60 | 0.731225 |
ac711b6d710130781fe03ae1e658fd081df5b410 | 29,880 | py | Python | hplip-3.20.3/base/LedmWifi.py | Deril-Pana/wikiBlackcoinNL | 9633307f0b485c27feae5da242944adf450e8963 | [
"MIT"
] | null | null | null | hplip-3.20.3/base/LedmWifi.py | Deril-Pana/wikiBlackcoinNL | 9633307f0b485c27feae5da242944adf450e8963 | [
"MIT"
] | 1 | 2021-11-20T16:33:39.000Z | 2021-11-20T16:33:39.000Z | hplip-3.20.3/base/LedmWifi.py | Deril-Pana/wikiBlackcoinNL | 9633307f0b485c27feae5da242944adf450e8963 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#
# (c) Copyright 2003-2015 HP Development Company, L.P.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Author: Shunmugaraj.K
#
# StdLib
import time
import io
import binascii
import xml.parsers.expat
from string import *
# Local
from .g import *
from . import device, utils
from .sixext import to_bytes_utf8
http_result_pat = re.compile("""HTTP/\d.\d\s(\d+)""", re.I)
HTTP_OK = 200
HTTP_ACCEPTED = 202
HTTP_NOCONTENT = 204
HTTP_ERROR = 500
MAX_RETRIES = 2
LEDM_WIFI_BASE_URI = "/IoMgmt/Adapters/"
# This payload is working for LaserJet Devices
adapterPowerXml_payload2 ="""<?xml version="1.0" encoding="UTF-8" ?><io:Adapter xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:io="http://www.hp.com/schemas/imaging/con/ledm/iomgmt/2008/11/30" xmlns:dd="http://www.hp.com/schemas/imaging/con/dictionaries/1.0/" xmlns:wifi="http://www.hp.com/schemas/imaging/con/wifi/2009/06/26"> <io:HardwareConfig> <dd:Power>%s</dd:Power> </io:HardwareConfig> </io:Adapter>"""
# This payload is working for OfficeJet and Photosmart Devices
adapterPowerXml_payload1 = """<?xml version="1.0" encoding="UTF-8"?><io:Adapters xmlns:io="http://www.hp.com/schemas/imaging/con/ledm/iomgmt/2008/11/30" xmlns:dd="http://www.hp.com/schemas/imaging/con/dictionaries/1.0/"><io:Adapter><io:HardwareConfig><dd:Power>%s</dd:Power></io:HardwareConfig></io:Adapter></io:Adapters>"""
passPhraseXml="""<io:Profile xmlns:io="http://www.hp.com/schemas/imaging/con/ledm/iomgmt/2008/11/30" xmlns:dd="http://www.hp.com/schemas/imaging/con/dictionaries/1.0/" xmlns:wifi="http://www.hp.com/schemas/imaging/con/wifi/2009/06/26" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://www.hp.com/schemas/imaging/con/ledm/iomgmt/2008/11/30 ../../schemas/IoMgmt.xsd http://www.hp.com/schemas/imaging/con/dictionaries/1.0/ ../../schemas/dd/DataDictionaryMasterLEDM.xsd"><io:AdapterProfile><io:WifiProfile><wifi:SSID>%s</wifi:SSID><wifi:CommunicationMode>%s</wifi:CommunicationMode><wifi:EncryptionType>%s</wifi:EncryptionType><wifi:AuthenticationMode>%s</wifi:AuthenticationMode></io:WifiProfile></io:AdapterProfile></io:Profile>"""
keyInfoXml = """<io:KeyInfo><io:WpaPassPhraseInfo><wifi:RsnEncryption>AESOrTKIP</wifi:RsnEncryption><wifi:RsnAuthorization>autoWPA</wifi:RsnAuthorization><wifi:PassPhrase>%s</wifi:PassPhrase></io:WpaPassPhraseInfo></io:KeyInfo>"""
def getAdaptorList(dev):
ret,params,elementCount,code ={},{},0,HTTP_ERROR
max_tries = 0
while max_tries < MAX_RETRIES:
max_tries +=1
URI = LEDM_WIFI_BASE_URI[0:len(LEDM_WIFI_BASE_URI)-1]# to remove "\" from the string
paramsList,code = readXmlTagDataFromURI(dev,URI,'<io:Adapters', '<io:Adapter>')
if code == HTTP_OK:
break
if code != HTTP_OK:
log.error("Request Failed With Response Code %d"%code)
return ret
ret['adaptorlistlength'] = len(paramsList)
if len(paramsList) != 0:
a = 0
for params in paramsList:
ret['adaptorpresence-%d' % a] = ''
ret['adaptorstate-%d' % a] = ''
try:
ret['adaptorid-%d' % a] = params['io:adapter-map:resourcenode-map:resourcelink-dd:resourceuri']
except KeyError as e:
log.debug("Missing response key: %s" % e) #changed from error to debug
ret['adaptorid-%d' % a]=""
try:
ret['adaptorname-%d' % a] = params['io:adapter-io:hardwareconfig-dd:name']
except KeyError as e:
log.debug("Missing response key: %s" % e) #changed from error to debug
ret['adaptorname-%d' % a] = ""
try:
ret['adaptortype-%d' % a] = params['io:adapter-io:hardwareconfig-dd:deviceconnectivityporttype']
except KeyError as e:
log.debug("Missing response key: %s" % e) #changed from error to debug
ret['adaptortype-%d' % a] = ""
a = a+1
return ret
def getWifiAdaptorID(dev):
rVal = []
ret = getAdaptorList(dev)
try:
num_adaptors = ret['adaptorlistlength']
except KeyError:
num_adaptors = 0
for n in range(num_adaptors):
try:
name = ret['adaptortype-%d' % n]
except KeyError:
name = ''
if name.lower() in ('wifiembedded', 'wifiaccessory'):
params = ['adaptorid', 'adaptorname', 'adaptorstate', 'adaptorpresence']
r = []
for p in params:
try:
x = ret[''.join([p, '-', str(n)])]
except KeyError:
if p == 'adaptorid':
x = -1
else:
x = 'Unknown'
r.append(x)
rVal.append(r)
return rVal
def setAdaptorPower(dev, adapterList, power_state='on'):
adaptor_id=-1
adaptorName =""
for a in adapterList:
adaptor_id = a[0]
adaptorName = a[1]
ret,powerXml,URI,code = {},'','',HTTP_ERROR
URI = LEDM_WIFI_BASE_URI + adaptorName
powerXml = adapterPowerXml_payload1 %(power_state)
ret['errorreturn'] = writeXmlDataToURI(dev,URI,powerXml,10)
if not(ret['errorreturn'] == HTTP_OK or ret['errorreturn'] == HTTP_NOCONTENT):
log.debug("Wifi Adapter turn ON request Failed. ResponseCode=%s AdaptorId=%s AdaptorName=%s. Trying another interface" %(ret['errorreturn'],adaptor_id,adaptorName))
powerXml = adapterPowerXml_payload2 %(power_state)
ret['errorreturn'] = writeXmlDataToURI(dev,URI,powerXml,10)
if not(ret['errorreturn'] == HTTP_OK or ret['errorreturn'] == HTTP_NOCONTENT):
log.error("Wifi Adapter turn ON request Failed. ResponseCode=%s AdaptorId=%s AdaptorName=%s" %(ret['errorreturn'],adaptor_id,adaptorName))
else:
log.debug("Wifi Adapter turn ON request is Success. AdaptorId=%s AdaptorName=%s" %(adaptor_id,adaptorName))
# adapaterState = a[2], adapterPresence= a[3]
return adaptor_id, adaptorName, a[2], a[3]
return -1 ,"","",""
def performScan(dev, adapterName, ssid=None):
ret ={}
if ssid is None:
URI = LEDM_WIFI_BASE_URI + adapterName + "/WifiNetworks"
else:
URI = LEDM_WIFI_BASE_URI + adapterName + "/WifiNetworks/SSID="+ssid
while True:
params,code,elementCount = readXmlDataFromURI(dev,URI,'<io:WifiNetworks', '<io:WifiNetwork>',10)
if code == HTTP_ACCEPTED:
continue
else:
break
ret['numberofscanentries'] = elementCount
if code != HTTP_OK:
log.error("Request Failed With Response Code %d"%code)
return ret
if params is not None:
if elementCount == 1:
try:
ssid = binascii.unhexlify(str(params['io:wifinetworks-io:wifinetwork-wifi:ssid']).encode('utf-8')).decode("utf-8")
if not ssid:
ret['ssid-0'] = to_unicode('(unknown)')
else:
ret['ssid-0'] = ssid
try:
ret['bssid-0'] = binascii.unhexlify(str(params['io:wifinetworks-io:wifinetwork-wifi:bssid']).encode('utf-8')).decode("utf-8")
except:
ret['bssid-0'] = params['io:wifinetworks-io:wifinetwork-wifi:bssid']
ret['channel-0'] = params['io:wifinetworks-io:wifinetwork-wifi:channel']
ret['communicationmode-0'] = params['io:wifinetworks-io:wifinetwork-wifi:communicationmode']
ret['dbm-0'] = params['io:wifinetworks-io:wifinetwork-io:signalinfo-wifi:dbm']
ret['encryptiontype-0'] = params['io:wifinetworks-io:wifinetwork-wifi:encryptiontype']
ret['signalstrength-0'] = params['io:wifinetworks-io:wifinetwork-io:signalinfo-wifi:signalstrength']
except KeyError as e:
log.debug("Missing response key: %s" % e)
else:
for a in range(elementCount):
try:
try:
ssid = binascii.unhexlify(str(params['io:wifinetworks-io:wifinetwork-wifi:ssid-%d' % a]).encode('utf-8')).decode('utf-8')
except TypeError:
# Some devices returns one invalid SSID (i.e. 0) along with valid SSIDs. e.g. Epic.
ssid = params['io:wifinetworks-io:wifinetwork-wifi:ssid-%d' % a]
if not ssid:
ret['ssid-%d' % a] = to_unicode('(unknown)')
else:
ret['ssid-%d' % a] = ssid
try:
ret['bssid-%d' % a] = binascii.unhexlify(str(params['io:wifinetworks-io:wifinetwork-wifi:bssid-%d' % a]).encode('utf-8')).decode("utf-8")
except:
ret['bssid-%d' % a] = params['io:wifinetworks-io:wifinetwork-wifi:bssid-%d' % a]
ret['channel-%d' % a] = params['io:wifinetworks-io:wifinetwork-wifi:channel-%d' % a]
ret['communicationmode-%d' % a] = params['io:wifinetworks-io:wifinetwork-wifi:communicationmode-%d' % a]
ret['dbm-%d' % a] = params['io:wifinetworks-io:wifinetwork-io:signalinfo-wifi:dbm-%d' % a]
ret['encryptiontype-%d' % a] = params['io:wifinetworks-io:wifinetwork-wifi:encryptiontype-%d' % a]
ret['signalstrength-%d' % a] = params['io:wifinetworks-io:wifinetwork-io:signalinfo-wifi:signalstrength-%d' % a]
except KeyError as e:
log.debug("Missing response key: %s" % e)
try:
ret['signalstrengthmax'] = 5
ret['signalstrengthmin'] = 0
except KeyError as e:
log.debug("Missing response key: %s" % e)
return ret
def getIPConfiguration(dev, adapterName):
ip, hostname, addressmode, subnetmask, gateway, pridns, sec_dns = \
'0.0.0.0', 'Unknown', 'Unknown', '0.0.0.0', '0.0.0.0', '0.0.0.0', '0.0.0.0'
protocol = 'old'
URI = LEDM_WIFI_BASE_URI + adapterName + "/Protocols"
#URI = "/DevMgmt/IOConfigDyn.xml"
params,code,elementCount = {},HTTP_ERROR,0
max_tries = 0
while max_tries < MAX_RETRIES:
max_tries +=1
params,code,elementCount = readXmlDataFromURI(dev,URI,'<io:Protocol', '<io:Protocol')
if code == HTTP_OK:
break
if code != HTTP_OK:
max_tries = 0
URI = "/DevMgmt/IOConfigDyn.xml"
while max_tries < MAX_RETRIES:
max_tries +=1
params,code,elementCount = readXmlDataFromURI(dev,URI,'<iocfgdyn2:IOConfigDyn', '<dd3:IOAdaptorConfig')
if code == HTTP_OK:
protocol = 'new'
break
if code != HTTP_OK:
log.error("Request Failed With Response Code %d" %code)
return ip, hostname, addressmode, subnetmask, gateway, pridns, sec_dns
if protocol == 'old':
if params is not None and code == HTTP_OK:
try:
ip = params['io:protocols-io:protocol-io:addresses-io:ipv4addresses-io:ipv4address-dd:ipv4address']
subnetmask = params['io:protocols-io:protocol-io:addresses-io:ipv4addresses-io:ipv4address-dd:subnetmask']
gateway = params['io:protocols-io:protocol-io:addresses-io:ipv4addresses-io:ipv4address-dd:defaultgateway']
if 'DHCP' in params['io:protocols-io:protocol-io:addresses-io:ipv4addresses-io:ipv4address-dd:configmethod']:
addressmode = 'dhcp'
else:
addressmode = 'autoip'
if elementCount ==1:
pridns = params['io:protocols-io:protocol-dd:dnsserveripaddress']
sec_dns = params['io:protocols-io:protocol-dd:secondarydnsserveripaddress']
for a in range(elementCount):
if params['io:protocols-io:protocol-dd:dnsserveripaddress-%d' %a] !="::":
pridns = params['io:protocols-io:protocol-dd:dnsserveripaddress-%d' %a]
sec_dns = params['io:protocols-io:protocol-dd:secondarydnsserveripaddress-%d' %a]
break
except KeyError as e:
log.error("Missing response key: %s" % str(e))
else:
if params is not None and code == HTTP_OK:
try:
#ip = params['io:protocols-io:protocol-io:addresses-io:ipv4addresses-io:ipv4address-dd:ipv4address']
try:
ip = params['iocfgdyn2:ioconfigdyn-dd3:ioadaptorconfig-dd3:networkadaptorconfig-dd3:ipversionconfig-dd3:ipconfig-dd:ipaddress']
except:
ip = params['iocfgdyn2:ioconfigdyn-dd3:ioadaptorconfig-dd3:networkadaptorconfig-dd3:ipversionconfig-dd3:ipconfig-dd:ipaddress-0']
#subnetmask = params['io:protocols-io:protocol-io:addresses-io:ipv4addresses-io:ipv4address-dd:subnetmask']
try:
subnetmask = params['iocfgdyn2:ioconfigdyn-dd3:ioadaptorconfig-dd3:networkadaptorconfig-dd3:ipversionconfig-dd3:ipconfig-dd:subnetmask']
except:
subnetmask = params['iocfgdyn2:ioconfigdyn-dd3:ioadaptorconfig-dd3:networkadaptorconfig-dd3:ipversionconfig-dd3:ipconfig-dd:subnetmask-0']
#gateway = params['io:protocols-io:protocol-io:addresses-io:ipv4addresses-io:ipv4address-dd:defaultgateway']
try:
gateway = params['iocfgdyn2:ioconfigdyn-dd3:ioadaptorconfig-dd3:networkadaptorconfig-dd3:ipversionconfig-dd3:ipconfig-dd:defaultgateway']
except:
gateway = params['iocfgdyn2:ioconfigdyn-dd3:ioadaptorconfig-dd3:networkadaptorconfig-dd3:ipversionconfig-dd3:ipconfig-dd:defaultgateway-0']
#if 'DHCP' in params['io:protocols-io:protocol-io:addresses-io:ipv4addresses-io:ipv4address-dd:configmethod']:
try:
addressmode = params['iocfgdyn2:ioconfigdyn-dd3:ioadaptorconfig-dd3:networkadaptorconfig-dd3:ipversionconfig-dd3:ipconfig-dd:ipconfigmethod']
except:
addressmode = params['iocfgdyn2:ioconfigdyn-dd3:ioadaptorconfig-dd3:networkadaptorconfig-dd3:ipversionconfig-dd3:ipconfig-dd:ipconfigmethod-0']
if 'dhcp' in addressmode.lower():
addressmode = 'dhcp'
else:
addressmode = 'autoip'
#if elementCount ==1:
# pridns = params['io:protocols-io:protocol-dd:dnsserveripaddress']
# sec_dns = params['io:protocols-io:protocol-dd:secondarydnsserveripaddress']
#for a in xrange(elementCount):
# if params['io:protocols-io:protocol-dd:dnsserveripaddress-%d' %a] !="::":
# pridns = params['io:protocols-io:protocol-dd:dnsserveripaddress-%d' %a]
# sec_dns = params['io:protocols-io:protocol-dd:secondarydnsserveripaddress-%d' %a]
# break
except KeyError as e:
log.error("Missing response key: %s" % str(e))
log.debug("ip=%s, hostname=%s, addressmode=%s, subnetmask=%s, gateway=%s, pridns=%s, sec_dns=%s"%(ip, hostname, addressmode, subnetmask, gateway, pridns, sec_dns))
return ip, hostname, addressmode, subnetmask, gateway, pridns, sec_dns
# TODO: Temporary Function. To be removed after refactoring.
def getwifiotherdetails(dev,adapterName):
ip, subnet, gateway, pri_dns, sec_dns, mode = '', '', '', '', '', ''
params1, params2, code1, code2, elementCount ={}, {}, HTTP_ERROR, HTTP_ERROR,0
URI1 = LEDM_WIFI_BASE_URI + adapterName + "/Profiles/Active"
URI2 = "/IoMgmt/IoConfig.xml"
max_tries = 0
while max_tries < MAX_RETRIES:
max_tries +=1
params1, code1, elementCount = readXmlDataFromURI(dev,URI1,'<io:Profile', '<io:Profile')
params2, code2, elementCount = readXmlDataFromURI(dev,URI2,'<io:IoConfig', '<io:IoConfig')
if code1 == HTTP_OK and code2 == HTTP_OK:
break
if code1 !=HTTP_OK and code2 != HTTP_OK:
log.error("Request Failed With Response Code %d" %code)
return ip, subnet, gateway, pri_dns, sec_dns
if params1 is not None and params2 is not None:
try:
ip = params1['io:profile-io:networkprofile-io:ipv4network-dd:ipaddress']
subnet = params1['io:profile-io:networkprofile-io:ipv4network-dd:subnetmask']
gateway = params1['io:profile-io:networkprofile-io:ipv4network-dd:defaultgateway']
pri_dns = params1['io:profile-io:networkprofile-io:ipv4network-dd:dnsserveripaddress']
sec_dns = params1['io:profile-io:networkprofile-io:ipv4network-dd:secondarydnsserveripaddress']
mode = params2['io:ioconfig-io:iodeviceprotocolconfig-io:ipv4domainname-dd:domainnameconfig-dd:configmethod']
except KeyError as e:
log.debug("Missing response key: %s" % str(e))
return ip, subnet, gateway, pri_dns, sec_dns, mode
def getCryptoSuite(dev, adapterName):
alg, mode, secretid = '', '', ''
parms,code,elementCount ={},HTTP_ERROR,0
URI = LEDM_WIFI_BASE_URI + adapterName + "/Profiles/Active"
max_tries = 0
while max_tries < MAX_RETRIES:
max_tries +=1
parms,code,elementCount = readXmlDataFromURI(dev,URI,'<io:Profile', '<io:Profile')
if code == HTTP_OK:
break
if code !=HTTP_OK:
log.error("Request Failed With Response Code %d" %code)
return alg, mode, secretid
if parms is not None:
try:
mode = parms['io:profile-io:adapterprofile-io:wifiprofile-wifi:communicationmode']
alg = parms['io:profile-io:adapterprofile-io:wifiprofile-wifi:encryptiontype']
secretid = parms['io:profile-io:adapterprofile-io:wifiprofile-wifi:bssid']
except KeyError as e:
log.debug("Missing response key: %s" % str(e))
return alg, mode, secretid
def associate(dev, adapterName, ssid, communication_mode, encryption_type, key):
ret,code = {},HTTP_ERROR
URI = LEDM_WIFI_BASE_URI + adapterName + "/Profiles/Active"
if encryption_type == 'none':
authMode = 'open'
ppXml = passPhraseXml%(binascii.hexlify(to_bytes_utf8(ssid)).decode('utf-8'), communication_mode,encryption_type,authMode)
else:
authMode = encryption_type
pos = passPhraseXml.find("</io:WifiProfile>",0,len(passPhraseXml))
ppXml = (passPhraseXml[:pos] + keyInfoXml + passPhraseXml[pos:])%(binascii.hexlify(to_bytes_utf8(ssid)).decode('utf-8'),communication_mode,encryption_type,\
authMode,binascii.hexlify(to_bytes_utf8(key)).decode('utf-8'))
code = writeXmlDataToURI(dev,URI,ppXml,10)
ret['errorreturn'] = code
if not(code == HTTP_OK or HTTP_NOCONTENT):
log.error("Request Failed With Response Code %d" % ret['errorreturn'])
return ret
def getVSACodes(dev, adapterName):
ret,params,code,elementCount = [],{},HTTP_ERROR,0
severity,rule ='',''
URI = LEDM_WIFI_BASE_URI + adapterName + "/VsaCodes.xml"
max_tries = 0
while max_tries < MAX_RETRIES:
max_tries +=1
params,code,elementCount = readXmlDataFromURI(dev,URI,"<io:VsaCodes","<io:VsaCodes",10)
if code == HTTP_OK:
break
if code != HTTP_OK:
log.warn("Request Failed With Response Code %d"%code)
return ret
if params is not None:
try:
severity= params['io:vsacodes-wifi:vsacode-dd:severity']
except:
severity = ""
try:
rule = params['io:vsacodes-wifi:vsacode-wifi:rulenumber']
# except KeyError, e:
# log.error("Missing response key: %s" % str(e))
except:
rule = ""
ret.append((rule, severity))
return ret
def getHostname(dev):
hostName = ''
URI = "/IoMgmt/IoConfig.xml"
max_tries = 0
while max_tries < MAX_RETRIES:
max_tries +=1
params,code,elementCount = readXmlDataFromURI(dev,URI,'<io:IoConfig', '<io:IoConfig')
if code == HTTP_OK:
break
if code != HTTP_OK:
log.warn("Request failed with Response code %d. HostName not found."%code)
return hostName
if params is not None:
try:
hostName = params['io:ioconfig-io:iodeviceconfig-dd3:hostname']
except KeyError as e:
log.debug("Missing response key: %s" % e)
return hostName
def getSignalStrength(dev, adapterName, ssid, adaptor_id=0):
ss_max, ss_min, ss_val, ss_dbm = 5, 0, 0, -200
params,code,elementCount = {},HTTP_ERROR,0
if ssid is not None:
URI = LEDM_WIFI_BASE_URI + adapterName + "/WifiNetworks/SSID="+ssid
else:
return ss_max, ss_min, ss_val, ss_dbm
while True:
params,code,elementCount = readXmlDataFromURI(dev,URI,'<io:WifiNetworks', '<io:WifiNetwork>',10)
if code == HTTP_ACCEPTED:
log.info("Got Response as HTTP_ACCEPTED, so retrying to get the actual result")
continue
else:
break
if code != HTTP_OK:
log.error("Request Failed With Response Code %d"%code)
return ss_max, ss_min, ss_val, ss_dbm
if params is not None:
if elementCount == 1:
try:
ss_dbm = params['io:wifinetworks-io:wifinetwork-io:signalinfo-wifi:dbm']
ss_val = params['io:wifinetworks-io:wifinetwork-io:signalinfo-wifi:signalstrength']
except KeyError as e:
log.error("Missing response key: %s" % e)
return ss_max, ss_min, ss_val, ss_dbm
def readXmlTagDataFromURI(dev,URI,xmlRootNode,xmlReqDataNode,timeout=5):
paramsList,code =[],HTTP_ERROR
data = format_http_get(URI,0,"")
log.info(data)
response = io.BytesIO()
if dev.openLEDM() == -1:
dev.closeLEDM()
if dev.openEWS_LEDM() == -1:
dev.openMarvell_EWS()
dev.writeMarvell_EWS(data)
try:
while dev.readMarvell_EWS(1024, response, timeout):
pass
except Error:
dev.closeMarvell_EWS()
log.error("Unable to read Marvell_EWS Channel")
else:
dev.writeEWS_LEDM(data)
try:
dev.readLEDMData(dev.readEWS_LEDM, response, timeout)
except Error:
dev.closeEWS_LEDM()
log.error("Unable to read EWS_LEDM Channel")
else:
dev.writeLEDM(data)
try:
dev.readLEDMData(dev.readLEDM, response, timeout)
except Error:
dev.closeLEDM()
log.error("Unable to read LEDM Channel")
strResp = response.getvalue().decode('utf-8')
if strResp is not None:
code = get_error_code(strResp)
if code == HTTP_OK:
strResp = utils.unchunck_xml_data(strResp)
pos = strResp.find(xmlRootNode,0,len(strResp))
repstr = strResp[pos:].strip()
repstr = repstr.replace('\r',' ').replace('\t',' ').replace('\n',' ') # To remove formating characters from the received xml
repstr = repstr.rstrip('0') # To remove trailing zero from the received xml
try:
parser_object = utils.extendedExpat()
root_element = parser_object.Parse(repstr)
xmlReqDataNode = ''.join(l for l in filter(lambda x: x not in '<>', xmlReqDataNode)) # [c for c in xmlReqDataNode if c not in "<>"] # To remove '<' and '>' characters
reqDataElementList = root_element.getElementsByTagName(xmlReqDataNode)
for node in reqDataElementList:
repstr = node.toString()
repstr = repstr.replace('\r',' ').replace('\t',' ').replace('\n',' ') # To remove formating characters from the received xml
params = utils.XMLToDictParser().parseXML(to_bytes_utf8(repstr))
paramsList.append(params)
except xml.parsers.expat.ExpatError as e:
log.debug("XML parser failed: %s" % e) #changed from error to debug
else:
log.debug("HTTP Responce failed with %s code"%code)
return paramsList,code
def readXmlDataFromURI(dev,URI,xmlRootNode,xmlChildNode,timeout=5):
params,code,elementCount ={},HTTP_ERROR,0
data = format_http_get(URI,0,"")
log.info(data)
response = io.BytesIO()
if dev.openLEDM() == -1:
dev.closeLEDM()
if dev.openEWS_LEDM() == -1:
dev.openMarvell_EWS()
dev.writeMarvell_EWS(data)
try:
while dev.readMarvell_EWS(1024, response, timeout):
pass
except Error:
dev.closeMarvell_EWS()
log.error("Unable to read Marvell_EWS Channel")
else:
dev.writeEWS_LEDM(data)
try:
dev.readLEDMData(dev.readEWS_LEDM, response,timeout)
except Error:
dev.closeEWS_LEDM()
log.error("Unable to read EWS_LEDM Channel")
else:
dev.writeLEDM(data)
try:
dev.readLEDMData(dev.readLEDM, response,timeout)
except Error:
dev.closeLEDM()
log.error("Unable to read LEDM Channel")
#dev.closeEWS_LEDM()
strResp = response.getvalue().decode('utf-8')
if strResp is not None:
code = get_error_code(strResp)
if code == HTTP_OK:
#strResp = utils.unchunck_xml_data(strResp)
strResp = utils.extract_xml_chunk(strResp)
pos = strResp.find(xmlRootNode,0,len(strResp))
repstr = strResp[pos:].strip()
repstr = repstr.replace('\r',' ').replace('\t',' ').replace('\n',' ') # To remove formating characters from the received xml
repstr = repstr.rstrip('0') # To remove trailing zero from the received xml
elementCount = repstr.count(xmlChildNode)
try:
params = utils.XMLToDictParser().parseXML(repstr)
except xml.parsers.expat.ExpatError as e:
log.debug("XML parser failed: %s" % e) #changed from error to debug
else:
log.debug(" HTTP Responce failed with %s code"%code)
return params,code,elementCount
def writeXmlDataToURI(dev,URI,xml,timeout=5):
code = HTTP_ERROR
data = format_http_put(URI,len(xml),xml)
response = io.BytesIO()
if dev.openLEDM() == -1:
if dev.openEWS_LEDM() == -1:
dev.openMarvell_EWS()
dev.writeMarvell_EWS(data)
try:
while dev.readMarvell_EWS(1000, response, timeout):
pass
except Error:
dev.closeMarvell_EWS()
log.error("Unable to read Marvell_EWS Channel")
else:
dev.writeEWS_LEDM(data)
try:
dev.readLEDMData(dev.readEWS_LEDM, response, timeout)
except Error:
dev.closeEWS_LEDM()
log.error("Unable to read EWS_LEDM Channel")
else:
dev.writeLEDM(data)
try:
dev.readLEDMData(dev.readLEDM, response,timeout )
except Error:
dev.closeLEDM()
log.error("Unable to read LEDM Channel")
strResp = response.getvalue().decode('utf-8')
if strResp is not None:
code = get_error_code(strResp)
return code
def get_error_code(ret):
if not ret: return HTTP_ERROR
match = http_result_pat.match(ret)
if match is None: return HTTP_ERROR
try:
code = int(match.group(1))
except (ValueError, TypeError):
code = HTTP_ERROR
return code
def format_http_get(requst, ledmlen, xmldata, content_type="text/xml; charset=utf-8"):
host = 'localhost'
return utils.cat(
"""GET $requst HTTP/1.1\r
Host: $host\r
User-Agent: hplip/3.0\r
Content-Type: $content_type\r
Content-Length: $ledmlen\r
\r
$xmldata""")
def format_http_put(requst, ledmlen, xmldata, content_type="text/xml; charset=utf-8"):
host = 'localhost'
return utils.cat(
"""PUT $requst HTTP/1.1\r
Host: $host\r
User-Agent: hplip/3.0\r
Content-Type: $content_type\r
Content-Length: $ledmlen\r
\r
$xmldata""")
| 44.332344 | 760 | 0.594344 |
3a7183871e525fed1826259a3e2158dd2c2f7258 | 1,237 | py | Python | Python/Python20/22b.py | sapieninja/AdventOfCode | 8190c11e3eb2e4292a0cf66a6ef9261dee880f2e | [
"MIT"
] | null | null | null | Python/Python20/22b.py | sapieninja/AdventOfCode | 8190c11e3eb2e4292a0cf66a6ef9261dee880f2e | [
"MIT"
] | 1 | 2021-03-30T12:31:38.000Z | 2021-03-30T12:31:38.000Z | Python/Python20/22b.py | sapieninja/AdventOfCode | 8190c11e3eb2e4292a0cf66a6ef9261dee880f2e | [
"MIT"
] | null | null | null | from collections import *
import itertools
import random
import re
import sys
import aoc_utils
import queue
from operator import *
import math
import functools
from copy import deepcopy
lines = aoc_utils.readlines()
p1 = []
p2 = []
count = 0
for line in lines:
if line.count(":") == 0 and count != 2:
p1.append(int(line))
elif line.count(":") == 1:
count+=1
if count == 2 and line.count(":") == 0:
p2.append(int(line))
def game(p1,p2):
setmoves = set()
while len(p1) != 0 and len(p2) != 0:
if (tuple(p1),tuple(p2)) in setmoves:
return (p1,p2,1)
setmoves.add((tuple(p1),tuple(p2)))
d1 = p1.pop(0)
d2 = p2.pop(0)
if len(p1) >= d1 and len(p2) >= d2:
winner = game(p1[:d1],p2[:d2])[2]
else:
if d1 >= d2:
winner = 1
else:
winner = 2
if winner == 1:
p1.append(d1)
p1.append(d2)
else:
p2.append(d2)
p2.append(d1)
return (p1,p2,winner)
p1,p2,_ = game(p1,p2)
if p1 == []:
winner = p2
else:
winner = p1
multi = len(winner)
score = 0
for x in winner:
score += x*multi
multi -= 1
print(score)
| 22.089286 | 45 | 0.522231 |
ed383ef30f3ba233d559e4451a496c9b05d256a3 | 2,754 | py | Python | src/training/lambertw_torch.py | kungfuai/d3m-segmentation-research | 5bc44ddd0e8522fb2b369866ad47aa62a24a8f63 | [
"MIT"
] | 1 | 2020-12-07T02:25:53.000Z | 2020-12-07T02:25:53.000Z | src/training/lambertw_torch.py | kungfuai/d3m-segmentation-research | 5bc44ddd0e8522fb2b369866ad47aa62a24a8f63 | [
"MIT"
] | null | null | null | src/training/lambertw_torch.py | kungfuai/d3m-segmentation-research | 5bc44ddd0e8522fb2b369866ad47aa62a24a8f63 | [
"MIT"
] | null | null | null | # Based off of https://github.com/locuslab/projected_sinkhorn
# Reimplementation of scipy version of lambertw for branching factor = 0
import torch
import math
import warnings
OMEGA = 0.56714329040978387299997 # W(1, 0)
EXPN1 = 0.36787944117144232159553 # exp(-1)
def evalpoly(coeff, degree, z):
powers = torch.arange(degree,-1,-1).float().to(z.device)
return ((z.unsqueeze(-1)**powers)*coeff).sum(-1)
def lambertw(z0, tol=1e-5):
# this is a direct port of the scipy version for the k=0 branch
w0 = z0.new(z0.shape)
I_branchpt = torch.abs(z0 + EXPN1) < 0.3
I_pade0 = (-1.0 < z0)*(z0 < 1.5)
I_asy = ~(I_branchpt | I_pade0)
if I_branchpt.any():
z = z0[I_branchpt]
coeffs = torch.Tensor([-1.0/3.0, 1.0, -1.0]).to(z.device)
p = torch.sqrt(2*(math.e*z + 1))
w0[I_branchpt] = evalpoly(coeffs, 2, p)
if I_pade0.any():
z = z0[I_pade0]
num = torch.Tensor([
12.85106382978723404255,
12.34042553191489361902,
1.0
]).to(z.device)
denom = torch.Tensor([
32.53191489361702127660,
14.34042553191489361702,
1.0
]).to(z.device)
w0[I_pade0] = z*evalpoly(num,2,z)/evalpoly(denom,2,z)
if I_asy.any():
z = z0[I_asy]
w = torch.log(z)
w0[I_asy] = w - torch.log(w)
# split on positive and negative,
# and ignore the divergent series case (z=1)
w0[z0 == 1] = OMEGA
I_pos = (w0 >= 0)*(z0 != 1)
I_neg = (w0 < 0)*(z0 != 1)
if I_pos.any():
w = w0[I_pos]
z = z0[I_pos]
for i in range(100):
# positive case
ew = torch.exp(-w)
wewz = w - z*ew
wn = w - wewz/(w + 1 - (w + 2)*wewz/(2*w + 2))
if (torch.abs(wn - w) < tol*torch.abs(wn)).all():
break
else:
w = wn
w0[I_pos] = w
if I_neg.any():
w = w0[I_neg]
z = z0[I_neg]
for i in range(100):
ew = torch.exp(w)
wew = w*ew
wewz = wew - z
wn = w - wewz/(wew + ew - (w + 2)*wewz/(2*w + 2))
if (torch.abs(wn - w) < tol*torch.abs(wn)).all():
break
else:
w = wn
w0[I_neg] = wn
return w0
if __name__ == '__main__':
from scipy.special import lambertw as sp_lamw
import numpy as np
torch.random.manual_seed(0)
x = torch.randn(1000)
x = torch.clamp(x, -EXPN1+0.08, torch.max(x))
torch_lamw = lambertw(x)
scipy_lamw = torch.from_numpy(np.real(sp_lamw(x.numpy()))).float()
print((torch_lamw - scipy_lamw).abs().max())
print(lambertw(torch.ones(1)*1e-8), sp_lamw(1e-8)) | 29.612903 | 72 | 0.525781 |
3f176a13f0e0cd09650524b787ffc3c944ec9fd5 | 1,342 | py | Python | server/models/AlexNet.py | dcsgfl/acceleratefl | 9c928ff06dd4dd02eb27cb71d7d539ba4527ec58 | [
"MIT"
] | null | null | null | server/models/AlexNet.py | dcsgfl/acceleratefl | 9c928ff06dd4dd02eb27cb71d7d539ba4527ec58 | [
"MIT"
] | null | null | null | server/models/AlexNet.py | dcsgfl/acceleratefl | 9c928ff06dd4dd02eb27cb71d7d539ba4527ec58 | [
"MIT"
] | null | null | null | import torch.nn as nn
#CIFAR10 => 10 classes
NUM_CLASSES = 10
class AlexNet(nn.Module):
def __init__(self, num_classes=NUM_CLASSES):
super(AlexNet, self).__init__()
self.features = nn.Sequential(
nn.Conv2d(3, 64, kernel_size=3, stride=2, padding=1),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2),
nn.Conv2d(64, 192, kernel_size=3, padding=1),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2),
nn.Conv2d(192, 384, kernel_size=3, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(384, 256, kernel_size=3, padding=1),
nn.ReLU(inplace=True),
nn.Conv2d(256, 256, kernel_size=3, padding=1),
nn.ReLU(inplace=True),
nn.MaxPool2d(kernel_size=2),
)
self.classifier = nn.Sequential(
nn.Dropout(),
nn.Linear(256 * 2 * 2, 4096),
nn.ReLU(inplace=True),
nn.Dropout(),
nn.Linear(4096, 4096),
nn.ReLU(inplace=True),
nn.Linear(4096, num_classes),
)
def forward(self, x):
x = self.features(x)
x = x.view(x.size(0), 256 * 2 * 2)
x = self.classifier(x)
return x
class Factory:
def get(self):
return AlexNet() | 31.209302 | 65 | 0.533532 |
0f4b448992daf91bd2a9143d074e07de2b2051a7 | 1,294 | py | Python | xlsxwriter/test/comparison/test_chart_column05.py | adgear/XlsxWriter | 79bcaad28d57ac29038b1c74bccc6d611b7a385e | [
"BSD-2-Clause-FreeBSD"
] | 2 | 2019-07-25T06:08:09.000Z | 2019-11-01T02:33:56.000Z | xlsxwriter/test/comparison/test_chart_column05.py | adgear/XlsxWriter | 79bcaad28d57ac29038b1c74bccc6d611b7a385e | [
"BSD-2-Clause-FreeBSD"
] | 13 | 2019-07-14T00:29:05.000Z | 2019-11-26T06:16:46.000Z | xlsxwriter/test/comparison/test_chart_column05.py | adgear/XlsxWriter | 79bcaad28d57ac29038b1c74bccc6d611b7a385e | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2019, John McNamara, jmcnamara@cpan.org
#
from ..excel_comparsion_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.set_filename('chart_column05.xlsx')
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet('Foo')
chart = workbook.add_chart({'type': 'column'})
chart.axis_ids = [47292800, 47295104]
data = [
[1, 2, 3, 4, 5],
[2, 4, 6, 8, 10],
[3, 6, 9, 12, 15],
]
worksheet.write_column('A1', data[0])
worksheet.write_column('B1', data[1])
worksheet.write_column('C1', data[2])
chart.add_series({'values': '=Foo!$A$1:$A$5'})
chart.add_series({'values': '=Foo!$B$1:$B$5'})
chart.add_series({'values': '=Foo!$C$1:$C$5'})
worksheet.insert_chart('E9', chart)
workbook.close()
self.assertExcelEqual()
| 24.884615 | 79 | 0.557187 |
f0c06c430eb22c455dab60dd9236793a31fff12e | 290 | py | Python | mmcv/visualization/__init__.py | XinYangDong/mmcv-0.2.10 | 527388ea7c5daf7149a88b3dc833373d5a5fb850 | [
"Apache-2.0"
] | 54 | 2021-11-05T02:15:15.000Z | 2022-03-23T13:40:43.000Z | mmcv/visualization/__init__.py | XinYangDong/mmcv-0.2.10 | 527388ea7c5daf7149a88b3dc833373d5a5fb850 | [
"Apache-2.0"
] | 8 | 2019-06-13T06:00:08.000Z | 2021-07-24T05:25:33.000Z | mmcv/visualization/__init__.py | XinYangDong/mmcv-0.2.10 | 527388ea7c5daf7149a88b3dc833373d5a5fb850 | [
"Apache-2.0"
] | 6 | 2019-07-30T06:36:27.000Z | 2021-06-03T11:57:36.000Z | from .color import Color, color_val
from .image import imshow, imshow_bboxes, imshow_det_bboxes
from .optflow import flowshow, flow2rgb, make_color_wheel
__all__ = [
'Color', 'color_val', 'imshow', 'imshow_bboxes', 'imshow_det_bboxes',
'flowshow', 'flow2rgb', 'make_color_wheel'
]
| 32.222222 | 73 | 0.751724 |
aec12f4cedceaf503d37e6bbfff16b50b27033bd | 33,982 | py | Python | pandas/tests/indexing/test_indexing.py | rlukevie/pandas | a7402c1140116661dbba98f3a6ca923f9a957875 | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 1 | 2021-05-13T17:34:58.000Z | 2021-05-13T17:34:58.000Z | pandas/tests/indexing/test_indexing.py | rlukevie/pandas | a7402c1140116661dbba98f3a6ca923f9a957875 | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | pandas/tests/indexing/test_indexing.py | rlukevie/pandas | a7402c1140116661dbba98f3a6ca923f9a957875 | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | """ test fancy indexing & misc """
from datetime import datetime
import re
import weakref
import numpy as np
import pytest
from pandas.core.dtypes.common import (
is_float_dtype,
is_integer_dtype,
)
import pandas as pd
from pandas import (
DataFrame,
Index,
NaT,
Series,
date_range,
offsets,
timedelta_range,
)
import pandas._testing as tm
from pandas.tests.indexing.common import _mklbl
from pandas.tests.indexing.test_floats import gen_obj
# ------------------------------------------------------------------------
# Indexing test cases
class TestFancy:
""" pure get/set item & fancy indexing """
def test_setitem_ndarray_1d(self):
# GH5508
# len of indexer vs length of the 1d ndarray
df = DataFrame(index=Index(np.arange(1, 11)))
df["foo"] = np.zeros(10, dtype=np.float64)
df["bar"] = np.zeros(10, dtype=complex)
# invalid
msg = "Must have equal len keys and value when setting with an iterable"
with pytest.raises(ValueError, match=msg):
df.loc[df.index[2:5], "bar"] = np.array([2.33j, 1.23 + 0.1j, 2.2, 1.0])
# valid
df.loc[df.index[2:6], "bar"] = np.array([2.33j, 1.23 + 0.1j, 2.2, 1.0])
result = df.loc[df.index[2:6], "bar"]
expected = Series(
[2.33j, 1.23 + 0.1j, 2.2, 1.0], index=[3, 4, 5, 6], name="bar"
)
tm.assert_series_equal(result, expected)
def test_setitem_ndarray_1d_2(self):
# GH5508
# dtype getting changed?
df = DataFrame(index=Index(np.arange(1, 11)))
df["foo"] = np.zeros(10, dtype=np.float64)
df["bar"] = np.zeros(10, dtype=complex)
msg = "Must have equal len keys and value when setting with an iterable"
with pytest.raises(ValueError, match=msg):
df[2:5] = np.arange(1, 4) * 1j
def test_getitem_ndarray_3d(self, index, frame_or_series, indexer_sli):
# GH 25567
obj = gen_obj(frame_or_series, index)
idxr = indexer_sli(obj)
nd3 = np.random.randint(5, size=(2, 2, 2))
msgs = []
if frame_or_series is Series and indexer_sli in [tm.setitem, tm.iloc]:
msgs.append(r"Wrong number of dimensions. values.ndim != ndim \[3 != 1\]")
if frame_or_series is Series or indexer_sli is tm.iloc:
msgs.append(r"Buffer has wrong number of dimensions \(expected 1, got 3\)")
if indexer_sli is tm.loc or (
frame_or_series is Series and indexer_sli is tm.setitem
):
msgs.append("Cannot index with multidimensional key")
if frame_or_series is DataFrame and indexer_sli is tm.setitem:
msgs.append("Index data must be 1-dimensional")
if isinstance(index, pd.IntervalIndex) and indexer_sli is tm.iloc:
msgs.append("Index data must be 1-dimensional")
if len(index) == 0 or isinstance(index, pd.MultiIndex):
msgs.append("positional indexers are out-of-bounds")
msg = "|".join(msgs)
potential_errors = (IndexError, ValueError, NotImplementedError)
with pytest.raises(potential_errors, match=msg):
with tm.assert_produces_warning(DeprecationWarning):
idxr[nd3]
def test_setitem_ndarray_3d(self, index, frame_or_series, indexer_sli):
# GH 25567
obj = gen_obj(frame_or_series, index)
idxr = indexer_sli(obj)
nd3 = np.random.randint(5, size=(2, 2, 2))
if indexer_sli.__name__ == "iloc":
err = ValueError
msg = f"Cannot set values with ndim > {obj.ndim}"
elif (
isinstance(index, pd.IntervalIndex)
and indexer_sli.__name__ == "setitem"
and obj.ndim == 1
):
err = AttributeError
msg = (
"'pandas._libs.interval.IntervalTree' object has no attribute 'get_loc'"
)
else:
err = ValueError
msg = r"Buffer has wrong number of dimensions \(expected 1, got 3\)|"
with pytest.raises(err, match=msg):
idxr[nd3] = 0
def test_inf_upcast(self):
# GH 16957
# We should be able to use np.inf as a key
# np.inf should cause an index to convert to float
# Test with np.inf in rows
df = DataFrame(columns=[0])
df.loc[1] = 1
df.loc[2] = 2
df.loc[np.inf] = 3
# make sure we can look up the value
assert df.loc[np.inf, 0] == 3
result = df.index
expected = pd.Float64Index([1, 2, np.inf])
tm.assert_index_equal(result, expected)
def test_loc_setitem_with_expasnion_inf_upcast_empty(self):
# Test with np.inf in columns
df = DataFrame()
df.loc[0, 0] = 1
df.loc[1, 1] = 2
df.loc[0, np.inf] = 3
result = df.columns
expected = pd.Float64Index([0, 1, np.inf])
tm.assert_index_equal(result, expected)
def test_setitem_dtype_upcast(self):
# GH3216
df = DataFrame([{"a": 1}, {"a": 3, "b": 2}])
df["c"] = np.nan
assert df["c"].dtype == np.float64
df.loc[0, "c"] = "foo"
expected = DataFrame(
[{"a": 1, "b": np.nan, "c": "foo"}, {"a": 3, "b": 2, "c": np.nan}]
)
tm.assert_frame_equal(df, expected)
@pytest.mark.parametrize("val", [3.14, "wxyz"])
def test_setitem_dtype_upcast2(self, val):
# GH10280
df = DataFrame(
np.arange(6, dtype="int64").reshape(2, 3),
index=list("ab"),
columns=["foo", "bar", "baz"],
)
left = df.copy()
left.loc["a", "bar"] = val
right = DataFrame(
[[0, val, 2], [3, 4, 5]],
index=list("ab"),
columns=["foo", "bar", "baz"],
)
tm.assert_frame_equal(left, right)
assert is_integer_dtype(left["foo"])
assert is_integer_dtype(left["baz"])
def test_setitem_dtype_upcast3(self):
left = DataFrame(
np.arange(6, dtype="int64").reshape(2, 3) / 10.0,
index=list("ab"),
columns=["foo", "bar", "baz"],
)
left.loc["a", "bar"] = "wxyz"
right = DataFrame(
[[0, "wxyz", 0.2], [0.3, 0.4, 0.5]],
index=list("ab"),
columns=["foo", "bar", "baz"],
)
tm.assert_frame_equal(left, right)
assert is_float_dtype(left["foo"])
assert is_float_dtype(left["baz"])
def test_dups_fancy_indexing(self):
# GH 3455
df = tm.makeCustomDataframe(10, 3)
df.columns = ["a", "a", "b"]
result = df[["b", "a"]].columns
expected = Index(["b", "a", "a"])
tm.assert_index_equal(result, expected)
def test_dups_fancy_indexing_across_dtypes(self):
# across dtypes
df = DataFrame([[1, 2, 1.0, 2.0, 3.0, "foo", "bar"]], columns=list("aaaaaaa"))
df.head()
str(df)
result = DataFrame([[1, 2, 1.0, 2.0, 3.0, "foo", "bar"]])
result.columns = list("aaaaaaa")
# TODO(wesm): unused?
df_v = df.iloc[:, 4] # noqa
res_v = result.iloc[:, 4] # noqa
tm.assert_frame_equal(df, result)
def test_dups_fancy_indexing_not_in_order(self):
# GH 3561, dups not in selected order
df = DataFrame(
{"test": [5, 7, 9, 11], "test1": [4.0, 5, 6, 7], "other": list("abcd")},
index=["A", "A", "B", "C"],
)
rows = ["C", "B"]
expected = DataFrame(
{"test": [11, 9], "test1": [7.0, 6], "other": ["d", "c"]}, index=rows
)
result = df.loc[rows]
tm.assert_frame_equal(result, expected)
result = df.loc[Index(rows)]
tm.assert_frame_equal(result, expected)
rows = ["C", "B", "E"]
with pytest.raises(KeyError, match="with any missing labels"):
df.loc[rows]
# see GH5553, make sure we use the right indexer
rows = ["F", "G", "H", "C", "B", "E"]
with pytest.raises(KeyError, match="with any missing labels"):
df.loc[rows]
def test_dups_fancy_indexing_only_missing_label(self):
# List containing only missing label
dfnu = DataFrame(np.random.randn(5, 3), index=list("AABCD"))
with pytest.raises(
KeyError,
match=re.escape(
"\"None of [Index(['E'], dtype='object')] are in the [index]\""
),
):
dfnu.loc[["E"]]
# ToDo: check_index_type can be True after GH 11497
def test_dups_fancy_indexing_missing_label(self):
# GH 4619; duplicate indexer with missing label
df = DataFrame({"A": [0, 1, 2]})
with pytest.raises(KeyError, match="with any missing labels"):
df.loc[[0, 8, 0]]
df = DataFrame({"A": list("abc")})
with pytest.raises(KeyError, match="with any missing labels"):
df.loc[[0, 8, 0]]
def test_dups_fancy_indexing_non_unique(self):
# non unique with non unique selector
df = DataFrame({"test": [5, 7, 9, 11]}, index=["A", "A", "B", "C"])
with pytest.raises(KeyError, match="with any missing labels"):
df.loc[["A", "A", "E"]]
def test_dups_fancy_indexing2(self):
# GH 5835
# dups on index and missing values
df = DataFrame(np.random.randn(5, 5), columns=["A", "B", "B", "B", "A"])
with pytest.raises(KeyError, match="with any missing labels"):
df.loc[:, ["A", "B", "C"]]
def test_dups_fancy_indexing3(self):
# GH 6504, multi-axis indexing
df = DataFrame(
np.random.randn(9, 2), index=[1, 1, 1, 2, 2, 2, 3, 3, 3], columns=["a", "b"]
)
expected = df.iloc[0:6]
result = df.loc[[1, 2]]
tm.assert_frame_equal(result, expected)
expected = df
result = df.loc[:, ["a", "b"]]
tm.assert_frame_equal(result, expected)
expected = df.iloc[0:6, :]
result = df.loc[[1, 2], ["a", "b"]]
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize("case", [tm.getitem, tm.loc])
def test_duplicate_int_indexing(self, case):
# GH 17347
s = Series(range(3), index=[1, 1, 3])
expected = s[1]
result = case(s)[[1]]
tm.assert_series_equal(result, expected)
def test_indexing_mixed_frame_bug(self):
# GH3492
df = DataFrame(
{"a": {1: "aaa", 2: "bbb", 3: "ccc"}, "b": {1: 111, 2: 222, 3: 333}}
)
# this works, new column is created correctly
df["test"] = df["a"].apply(lambda x: "_" if x == "aaa" else x)
# this does not work, ie column test is not changed
idx = df["test"] == "_"
temp = df.loc[idx, "a"].apply(lambda x: "-----" if x == "aaa" else x)
df.loc[idx, "test"] = temp
assert df.iloc[0, 2] == "-----"
def test_multitype_list_index_access(self):
# GH 10610
df = DataFrame(np.random.random((10, 5)), columns=["a"] + [20, 21, 22, 23])
with pytest.raises(KeyError, match=re.escape("'[-8, 26] not in index'")):
df[[22, 26, -8]]
assert df[21].shape[0] == df.shape[0]
def test_set_index_nan(self):
# GH 3586
df = DataFrame(
{
"PRuid": {
17: "nonQC",
18: "nonQC",
19: "nonQC",
20: "10",
21: "11",
22: "12",
23: "13",
24: "24",
25: "35",
26: "46",
27: "47",
28: "48",
29: "59",
30: "10",
},
"QC": {
17: 0.0,
18: 0.0,
19: 0.0,
20: np.nan,
21: np.nan,
22: np.nan,
23: np.nan,
24: 1.0,
25: np.nan,
26: np.nan,
27: np.nan,
28: np.nan,
29: np.nan,
30: np.nan,
},
"data": {
17: 7.9544899999999998,
18: 8.0142609999999994,
19: 7.8591520000000008,
20: 0.86140349999999999,
21: 0.87853110000000001,
22: 0.8427041999999999,
23: 0.78587700000000005,
24: 0.73062459999999996,
25: 0.81668560000000001,
26: 0.81927080000000008,
27: 0.80705009999999999,
28: 0.81440240000000008,
29: 0.80140849999999997,
30: 0.81307740000000006,
},
"year": {
17: 2006,
18: 2007,
19: 2008,
20: 1985,
21: 1985,
22: 1985,
23: 1985,
24: 1985,
25: 1985,
26: 1985,
27: 1985,
28: 1985,
29: 1985,
30: 1986,
},
}
).reset_index()
result = (
df.set_index(["year", "PRuid", "QC"])
.reset_index()
.reindex(columns=df.columns)
)
tm.assert_frame_equal(result, df)
def test_multi_assign(self):
# GH 3626, an assignment of a sub-df to a df
df = DataFrame(
{
"FC": ["a", "b", "a", "b", "a", "b"],
"PF": [0, 0, 0, 0, 1, 1],
"col1": list(range(6)),
"col2": list(range(6, 12)),
}
)
df.iloc[1, 0] = np.nan
df2 = df.copy()
mask = ~df2.FC.isna()
cols = ["col1", "col2"]
dft = df2 * 2
dft.iloc[3, 3] = np.nan
expected = DataFrame(
{
"FC": ["a", np.nan, "a", "b", "a", "b"],
"PF": [0, 0, 0, 0, 1, 1],
"col1": Series([0, 1, 4, 6, 8, 10]),
"col2": [12, 7, 16, np.nan, 20, 22],
}
)
# frame on rhs
df2.loc[mask, cols] = dft.loc[mask, cols]
tm.assert_frame_equal(df2, expected)
df2.loc[mask, cols] = dft.loc[mask, cols]
tm.assert_frame_equal(df2, expected)
# with an ndarray on rhs
# coerces to float64 because values has float64 dtype
# GH 14001
expected = DataFrame(
{
"FC": ["a", np.nan, "a", "b", "a", "b"],
"PF": [0, 0, 0, 0, 1, 1],
"col1": [0.0, 1.0, 4.0, 6.0, 8.0, 10.0],
"col2": [12, 7, 16, np.nan, 20, 22],
}
)
df2 = df.copy()
df2.loc[mask, cols] = dft.loc[mask, cols].values
tm.assert_frame_equal(df2, expected)
df2.loc[mask, cols] = dft.loc[mask, cols].values
tm.assert_frame_equal(df2, expected)
def test_multi_assign_broadcasting_rhs(self):
# broadcasting on the rhs is required
df = DataFrame(
{
"A": [1, 2, 0, 0, 0],
"B": [0, 0, 0, 10, 11],
"C": [0, 0, 0, 10, 11],
"D": [3, 4, 5, 6, 7],
}
)
expected = df.copy()
mask = expected["A"] == 0
for col in ["A", "B"]:
expected.loc[mask, col] = df["D"]
df.loc[df["A"] == 0, ["A", "B"]] = df["D"]
tm.assert_frame_equal(df, expected)
def test_setitem_list(self):
# GH 6043
# iloc with a list
df = DataFrame(index=[0, 1], columns=[0])
df.iloc[1, 0] = [1, 2, 3]
df.iloc[1, 0] = [1, 2]
result = DataFrame(index=[0, 1], columns=[0])
result.iloc[1, 0] = [1, 2]
tm.assert_frame_equal(result, df)
def test_iloc_setitem_custom_object(self):
# iloc with an object
class TO:
def __init__(self, value):
self.value = value
def __str__(self) -> str:
return f"[{self.value}]"
__repr__ = __str__
def __eq__(self, other) -> bool:
return self.value == other.value
def view(self):
return self
df = DataFrame(index=[0, 1], columns=[0])
df.iloc[1, 0] = TO(1)
df.iloc[1, 0] = TO(2)
result = DataFrame(index=[0, 1], columns=[0])
result.iloc[1, 0] = TO(2)
tm.assert_frame_equal(result, df)
# remains object dtype even after setting it back
df = DataFrame(index=[0, 1], columns=[0])
df.iloc[1, 0] = TO(1)
df.iloc[1, 0] = np.nan
result = DataFrame(index=[0, 1], columns=[0])
tm.assert_frame_equal(result, df)
def test_string_slice(self):
# GH 14424
# string indexing against datetimelike with object
# dtype should properly raises KeyError
df = DataFrame([1], Index([pd.Timestamp("2011-01-01")], dtype=object))
assert df.index._is_all_dates
with pytest.raises(KeyError, match="'2011'"):
df["2011"]
with pytest.raises(KeyError, match="'2011'"):
df.loc["2011", 0]
def test_string_slice_empty(self):
# GH 14424
df = DataFrame()
assert not df.index._is_all_dates
with pytest.raises(KeyError, match="'2011'"):
df["2011"]
with pytest.raises(KeyError, match="'2011'"):
df.loc["2011", 0]
def test_astype_assignment(self):
# GH4312 (iloc)
df_orig = DataFrame(
[["1", "2", "3", ".4", 5, 6.0, "foo"]], columns=list("ABCDEFG")
)
df = df_orig.copy()
df.iloc[:, 0:2] = df.iloc[:, 0:2].astype(np.int64)
expected = DataFrame(
[[1, 2, "3", ".4", 5, 6.0, "foo"]], columns=list("ABCDEFG")
)
tm.assert_frame_equal(df, expected)
df = df_orig.copy()
df.iloc[:, 0:2] = df.iloc[:, 0:2]._convert(datetime=True, numeric=True)
expected = DataFrame(
[[1, 2, "3", ".4", 5, 6.0, "foo"]], columns=list("ABCDEFG")
)
tm.assert_frame_equal(df, expected)
# GH5702 (loc)
df = df_orig.copy()
df.loc[:, "A"] = df.loc[:, "A"].astype(np.int64)
expected = DataFrame(
[[1, "2", "3", ".4", 5, 6.0, "foo"]], columns=list("ABCDEFG")
)
tm.assert_frame_equal(df, expected)
df = df_orig.copy()
df.loc[:, ["B", "C"]] = df.loc[:, ["B", "C"]].astype(np.int64)
expected = DataFrame(
[["1", 2, 3, ".4", 5, 6.0, "foo"]], columns=list("ABCDEFG")
)
tm.assert_frame_equal(df, expected)
def test_astype_assignment_full_replacements(self):
# full replacements / no nans
df = DataFrame({"A": [1.0, 2.0, 3.0, 4.0]})
df.iloc[:, 0] = df["A"].astype(np.int64)
expected = DataFrame({"A": [1, 2, 3, 4]})
tm.assert_frame_equal(df, expected)
df = DataFrame({"A": [1.0, 2.0, 3.0, 4.0]})
df.loc[:, "A"] = df["A"].astype(np.int64)
expected = DataFrame({"A": [1, 2, 3, 4]})
tm.assert_frame_equal(df, expected)
@pytest.mark.parametrize("indexer", [tm.getitem, tm.loc])
def test_index_type_coercion(self, indexer):
# GH 11836
# if we have an index type and set it with something that looks
# to numpy like the same, but is actually, not
# (e.g. setting with a float or string '0')
# then we need to coerce to object
# integer indexes
for s in [Series(range(5)), Series(range(5), index=range(1, 6))]:
assert s.index.is_integer()
s2 = s.copy()
indexer(s2)[0.1] = 0
assert s2.index.is_floating()
assert indexer(s2)[0.1] == 0
s2 = s.copy()
indexer(s2)[0.0] = 0
exp = s.index
if 0 not in s:
exp = Index(s.index.tolist() + [0])
tm.assert_index_equal(s2.index, exp)
s2 = s.copy()
indexer(s2)["0"] = 0
assert s2.index.is_object()
for s in [Series(range(5), index=np.arange(5.0))]:
assert s.index.is_floating()
s2 = s.copy()
indexer(s2)[0.1] = 0
assert s2.index.is_floating()
assert indexer(s2)[0.1] == 0
s2 = s.copy()
indexer(s2)[0.0] = 0
tm.assert_index_equal(s2.index, s.index)
s2 = s.copy()
indexer(s2)["0"] = 0
assert s2.index.is_object()
class TestMisc:
def test_float_index_to_mixed(self):
df = DataFrame({0.0: np.random.rand(10), 1.0: np.random.rand(10)})
df["a"] = 10
expected = DataFrame({0.0: df[0.0], 1.0: df[1.0], "a": [10] * 10})
tm.assert_frame_equal(expected, df)
def test_float_index_non_scalar_assignment(self):
df = DataFrame({"a": [1, 2, 3], "b": [3, 4, 5]}, index=[1.0, 2.0, 3.0])
df.loc[df.index[:2]] = 1
expected = DataFrame({"a": [1, 1, 3], "b": [1, 1, 5]}, index=df.index)
tm.assert_frame_equal(expected, df)
df = DataFrame({"a": [1, 2, 3], "b": [3, 4, 5]}, index=[1.0, 2.0, 3.0])
df2 = df.copy()
df.loc[df.index] = df.loc[df.index]
tm.assert_frame_equal(df, df2)
def test_float_index_at_iat(self):
s = Series([1, 2, 3], index=[0.1, 0.2, 0.3])
for el, item in s.items():
assert s.at[el] == item
for i in range(len(s)):
assert s.iat[i] == i + 1
def test_rhs_alignment(self):
# GH8258, tests that both rows & columns are aligned to what is
# assigned to. covers both uniform data-type & multi-type cases
def run_tests(df, rhs, right_loc, right_iloc):
# label, index, slice
lbl_one, idx_one, slice_one = list("bcd"), [1, 2, 3], slice(1, 4)
lbl_two, idx_two, slice_two = ["joe", "jolie"], [1, 2], slice(1, 3)
left = df.copy()
left.loc[lbl_one, lbl_two] = rhs
tm.assert_frame_equal(left, right_loc)
left = df.copy()
left.iloc[idx_one, idx_two] = rhs
tm.assert_frame_equal(left, right_iloc)
left = df.copy()
left.iloc[slice_one, slice_two] = rhs
tm.assert_frame_equal(left, right_iloc)
xs = np.arange(20).reshape(5, 4)
cols = ["jim", "joe", "jolie", "joline"]
df = DataFrame(xs, columns=cols, index=list("abcde"), dtype="int64")
# right hand side; permute the indices and multiplpy by -2
rhs = -2 * df.iloc[3:0:-1, 2:0:-1]
# expected `right` result; just multiply by -2
right_iloc = df.copy()
right_iloc["joe"] = [1, 14, 10, 6, 17]
right_iloc["jolie"] = [2, 13, 9, 5, 18]
right_iloc.iloc[1:4, 1:3] *= -2
right_loc = df.copy()
right_loc.iloc[1:4, 1:3] *= -2
# run tests with uniform dtypes
run_tests(df, rhs, right_loc, right_iloc)
# make frames multi-type & re-run tests
for frame in [df, rhs, right_loc, right_iloc]:
frame["joe"] = frame["joe"].astype("float64")
frame["jolie"] = frame["jolie"].map("@{}".format)
right_iloc["joe"] = [1.0, "@-28", "@-20", "@-12", 17.0]
right_iloc["jolie"] = ["@2", -26.0, -18.0, -10.0, "@18"]
run_tests(df, rhs, right_loc, right_iloc)
def test_str_label_slicing_with_negative_step(self):
SLC = pd.IndexSlice
def assert_slices_equivalent(l_slc, i_slc):
tm.assert_series_equal(s.loc[l_slc], s.iloc[i_slc])
if not idx.is_integer:
# For integer indices, .loc and plain getitem are position-based.
tm.assert_series_equal(s[l_slc], s.iloc[i_slc])
tm.assert_series_equal(s.loc[l_slc], s.iloc[i_slc])
for idx in [_mklbl("A", 20), np.arange(20) + 100, np.linspace(100, 150, 20)]:
idx = Index(idx)
s = Series(np.arange(20), index=idx)
assert_slices_equivalent(SLC[idx[9] :: -1], SLC[9::-1])
assert_slices_equivalent(SLC[: idx[9] : -1], SLC[:8:-1])
assert_slices_equivalent(SLC[idx[13] : idx[9] : -1], SLC[13:8:-1])
assert_slices_equivalent(SLC[idx[9] : idx[13] : -1], SLC[:0])
def test_slice_with_zero_step_raises(self, indexer_sl):
ser = Series(np.arange(20), index=_mklbl("A", 20))
with pytest.raises(ValueError, match="slice step cannot be zero"):
indexer_sl(ser)[::0]
def test_indexing_assignment_dict_already_exists(self):
index = Index([-5, 0, 5], name="z")
df = DataFrame({"x": [1, 2, 6], "y": [2, 2, 8]}, index=index)
expected = df.copy()
rhs = {"x": 9, "y": 99}
df.loc[5] = rhs
expected.loc[5] = [9, 99]
tm.assert_frame_equal(df, expected)
# GH#38335 same thing, mixed dtypes
df = DataFrame({"x": [1, 2, 6], "y": [2.0, 2.0, 8.0]}, index=index)
df.loc[5] = rhs
expected = DataFrame({"x": [1, 2, 9], "y": [2.0, 2.0, 99.0]}, index=index)
tm.assert_frame_equal(df, expected)
def test_indexing_dtypes_on_empty(self):
# Check that .iloc returns correct dtypes GH9983
df = DataFrame({"a": [1, 2, 3], "b": ["b", "b2", "b3"]})
df2 = df.iloc[[], :]
assert df2.loc[:, "a"].dtype == np.int64
tm.assert_series_equal(df2.loc[:, "a"], df2.iloc[:, 0])
@pytest.mark.parametrize("size", [5, 999999, 1000000])
def test_range_in_series_indexing(self, size):
# range can cause an indexing error
# GH 11652
s = Series(index=range(size), dtype=np.float64)
s.loc[range(1)] = 42
tm.assert_series_equal(s.loc[range(1)], Series(42.0, index=[0]))
s.loc[range(2)] = 43
tm.assert_series_equal(s.loc[range(2)], Series(43.0, index=[0, 1]))
def test_partial_boolean_frame_indexing(self):
# GH 17170
df = DataFrame(
np.arange(9.0).reshape(3, 3), index=list("abc"), columns=list("ABC")
)
index_df = DataFrame(1, index=list("ab"), columns=list("AB"))
result = df[index_df.notnull()]
expected = DataFrame(
np.array([[0.0, 1.0, np.nan], [3.0, 4.0, np.nan], [np.nan] * 3]),
index=list("abc"),
columns=list("ABC"),
)
tm.assert_frame_equal(result, expected)
def test_no_reference_cycle(self):
df = DataFrame({"a": [0, 1], "b": [2, 3]})
for name in ("loc", "iloc", "at", "iat"):
getattr(df, name)
wr = weakref.ref(df)
del df
assert wr() is None
def test_label_indexing_on_nan(self):
# GH 32431
df = Series([1, "{1,2}", 1, None])
vc = df.value_counts(dropna=False)
result1 = vc.loc[np.nan]
result2 = vc[np.nan]
expected = 1
assert result1 == expected
assert result2 == expected
class TestDataframeNoneCoercion:
EXPECTED_SINGLE_ROW_RESULTS = [
# For numeric series, we should coerce to NaN.
([1, 2, 3], [np.nan, 2, 3]),
([1.0, 2.0, 3.0], [np.nan, 2.0, 3.0]),
# For datetime series, we should coerce to NaT.
(
[datetime(2000, 1, 1), datetime(2000, 1, 2), datetime(2000, 1, 3)],
[NaT, datetime(2000, 1, 2), datetime(2000, 1, 3)],
),
# For objects, we should preserve the None value.
(["foo", "bar", "baz"], [None, "bar", "baz"]),
]
@pytest.mark.parametrize("expected", EXPECTED_SINGLE_ROW_RESULTS)
def test_coercion_with_loc(self, expected):
start_data, expected_result = expected
start_dataframe = DataFrame({"foo": start_data})
start_dataframe.loc[0, ["foo"]] = None
expected_dataframe = DataFrame({"foo": expected_result})
tm.assert_frame_equal(start_dataframe, expected_dataframe)
@pytest.mark.parametrize("expected", EXPECTED_SINGLE_ROW_RESULTS)
def test_coercion_with_setitem_and_dataframe(self, expected):
start_data, expected_result = expected
start_dataframe = DataFrame({"foo": start_data})
start_dataframe[start_dataframe["foo"] == start_dataframe["foo"][0]] = None
expected_dataframe = DataFrame({"foo": expected_result})
tm.assert_frame_equal(start_dataframe, expected_dataframe)
@pytest.mark.parametrize("expected", EXPECTED_SINGLE_ROW_RESULTS)
def test_none_coercion_loc_and_dataframe(self, expected):
start_data, expected_result = expected
start_dataframe = DataFrame({"foo": start_data})
start_dataframe.loc[start_dataframe["foo"] == start_dataframe["foo"][0]] = None
expected_dataframe = DataFrame({"foo": expected_result})
tm.assert_frame_equal(start_dataframe, expected_dataframe)
def test_none_coercion_mixed_dtypes(self):
start_dataframe = DataFrame(
{
"a": [1, 2, 3],
"b": [1.0, 2.0, 3.0],
"c": [datetime(2000, 1, 1), datetime(2000, 1, 2), datetime(2000, 1, 3)],
"d": ["a", "b", "c"],
}
)
start_dataframe.iloc[0] = None
exp = DataFrame(
{
"a": [np.nan, 2, 3],
"b": [np.nan, 2.0, 3.0],
"c": [NaT, datetime(2000, 1, 2), datetime(2000, 1, 3)],
"d": [None, "b", "c"],
}
)
tm.assert_frame_equal(start_dataframe, exp)
class TestDatetimelikeCoercion:
def test_setitem_dt64_string_scalar(self, tz_naive_fixture, indexer_sli):
# dispatching _can_hold_element to underlying DatetimeArray
tz = tz_naive_fixture
dti = date_range("2016-01-01", periods=3, tz=tz)
ser = Series(dti)
values = ser._values
newval = "2018-01-01"
values._validate_setitem_value(newval)
indexer_sli(ser)[0] = newval
if tz is None:
# TODO(EA2D): we can make this no-copy in tz-naive case too
assert ser.dtype == dti.dtype
assert ser._values._data is values._data
else:
assert ser._values is values
@pytest.mark.parametrize("box", [list, np.array, pd.array])
@pytest.mark.parametrize(
"key", [[0, 1], slice(0, 2), np.array([True, True, False])]
)
def test_setitem_dt64_string_values(self, tz_naive_fixture, indexer_sli, key, box):
# dispatching _can_hold_element to underling DatetimeArray
tz = tz_naive_fixture
if isinstance(key, slice) and indexer_sli is tm.loc:
key = slice(0, 1)
dti = date_range("2016-01-01", periods=3, tz=tz)
ser = Series(dti)
values = ser._values
newvals = box(["2019-01-01", "2010-01-02"])
values._validate_setitem_value(newvals)
indexer_sli(ser)[key] = newvals
if tz is None:
# TODO(EA2D): we can make this no-copy in tz-naive case too
assert ser.dtype == dti.dtype
assert ser._values._data is values._data
else:
assert ser._values is values
@pytest.mark.parametrize("scalar", ["3 Days", offsets.Hour(4)])
def test_setitem_td64_scalar(self, indexer_sli, scalar):
# dispatching _can_hold_element to underling TimedeltaArray
tdi = timedelta_range("1 Day", periods=3)
ser = Series(tdi)
values = ser._values
values._validate_setitem_value(scalar)
indexer_sli(ser)[0] = scalar
assert ser._values._data is values._data
@pytest.mark.parametrize("box", [list, np.array, pd.array])
@pytest.mark.parametrize(
"key", [[0, 1], slice(0, 2), np.array([True, True, False])]
)
def test_setitem_td64_string_values(self, indexer_sli, key, box):
# dispatching _can_hold_element to underling TimedeltaArray
if isinstance(key, slice) and indexer_sli is tm.loc:
key = slice(0, 1)
tdi = timedelta_range("1 Day", periods=3)
ser = Series(tdi)
values = ser._values
newvals = box(["10 Days", "44 hours"])
values._validate_setitem_value(newvals)
indexer_sli(ser)[key] = newvals
assert ser._values._data is values._data
def test_extension_array_cross_section():
# A cross-section of a homogeneous EA should be an EA
df = DataFrame(
{
"A": pd.array([1, 2], dtype="Int64"),
"B": pd.array([3, 4], dtype="Int64"),
},
index=["a", "b"],
)
expected = Series(pd.array([1, 3], dtype="Int64"), index=["A", "B"], name="a")
result = df.loc["a"]
tm.assert_series_equal(result, expected)
result = df.iloc[0]
tm.assert_series_equal(result, expected)
def test_extension_array_cross_section_converts():
# all numeric columns -> numeric series
df = DataFrame(
{"A": pd.array([1, 2], dtype="Int64"), "B": np.array([1, 2])}, index=["a", "b"]
)
result = df.loc["a"]
expected = Series([1, 1], dtype="Int64", index=["A", "B"], name="a")
tm.assert_series_equal(result, expected)
result = df.iloc[0]
tm.assert_series_equal(result, expected)
# mixed columns -> object series
df = DataFrame(
{"A": pd.array([1, 2], dtype="Int64"), "B": np.array(["a", "b"])},
index=["a", "b"],
)
result = df.loc["a"]
expected = Series([1, "a"], dtype=object, index=["A", "B"], name="a")
tm.assert_series_equal(result, expected)
result = df.iloc[0]
tm.assert_series_equal(result, expected)
| 33.44685 | 88 | 0.522806 |
3b22be3797b589f63b4849be485034dde8a632f5 | 2,036 | py | Python | python/fedml/simulation/mpi_p2p_mp/fedseg/FedSegTrainer.py | NCLPhD/FedML | ffa15262ee963b9c856f34f0b2202f4dfeb3a76b | [
"Apache-2.0"
] | null | null | null | python/fedml/simulation/mpi_p2p_mp/fedseg/FedSegTrainer.py | NCLPhD/FedML | ffa15262ee963b9c856f34f0b2202f4dfeb3a76b | [
"Apache-2.0"
] | null | null | null | python/fedml/simulation/mpi_p2p_mp/fedseg/FedSegTrainer.py | NCLPhD/FedML | ffa15262ee963b9c856f34f0b2202f4dfeb3a76b | [
"Apache-2.0"
] | null | null | null | from .utils import transform_tensor_to_list
class FedSegTrainer(object):
def __init__(
self,
client_index,
train_data_local_dict,
train_data_local_num_dict,
train_data_num,
test_data_local_dict,
device,
model,
args,
model_trainer,
):
self.trainer = model_trainer
self.client_index = client_index
self.train_data_local_dict = train_data_local_dict
self.train_data_local_num_dict = train_data_local_num_dict
self.test_data_local_dict = test_data_local_dict
self.all_train_data_num = train_data_num
self.train_local = self.train_data_local_dict[client_index]
self.local_sample_number = self.train_data_local_num_dict[client_index]
self.test_local = self.test_data_local_dict[client_index]
self.round_idx = 0
self.device = device
self.args = args
def update_model(self, weights):
self.trainer.set_model_params(weights)
def update_dataset(self, client_index):
self.client_index = client_index
self.train_local = self.train_data_local_dict[client_index]
self.local_sample_number = self.train_data_local_num_dict[client_index]
self.test_local = self.test_data_local_dict[client_index]
def train(self):
self.trainer.train(self.train_local, self.device)
weights = self.trainer.get_model_params()
# transform Tensor to list
if self.args.is_mobile == 1:
weights = transform_tensor_to_list(weights)
return weights, self.local_sample_number
def test(self):
train_evaluation_metrics = None
if self.round_idx and self.round_idx % self.args.evaluation_frequency == 0:
train_evaluation_metrics = self.trainer.test(self.train_local, self.device)
test_evaluation_metrics = self.trainer.test(self.test_local, self.device)
self.round_idx += 1
return train_evaluation_metrics, test_evaluation_metrics
| 33.377049 | 87 | 0.696955 |
a4bf2969f4a77f7f8700add5cdf2a3e91af8e11a | 45,471 | py | Python | pytorch_lightning/trainer/__init__.py | wdmwhh/pytorch-lightning | 5d10a36762776c4b6f6a9c55b4e6bf7bd258137f | [
"Apache-2.0"
] | null | null | null | pytorch_lightning/trainer/__init__.py | wdmwhh/pytorch-lightning | 5d10a36762776c4b6f6a9c55b4e6bf7bd258137f | [
"Apache-2.0"
] | null | null | null | pytorch_lightning/trainer/__init__.py | wdmwhh/pytorch-lightning | 5d10a36762776c4b6f6a9c55b4e6bf7bd258137f | [
"Apache-2.0"
] | null | null | null | # Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
.. testsetup:: *
import os
from pytorch_lightning.trainer.trainer import Trainer
from pytorch_lightning.core.lightning import LightningModule
from pytorch_lightning.utilities.seed import seed_everything
Once you've organized your PyTorch code into a LightningModule,
the Trainer automates everything else.
.. raw:: html
<video width="100%" max-width="800px" controls autoplay
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/pt_trainer_mov.m4v"></video>
|
This abstraction achieves the following:
1. You maintain control over all aspects via PyTorch code without an added abstraction.
2. The trainer uses best practices embedded by contributors and users
from top AI labs such as Facebook AI Research, NYU, MIT, Stanford, etc...
3. The trainer allows overriding any key part that you don't want automated.
|
-----------
Basic use
---------
This is the basic use of the trainer:
.. code-block:: python
model = MyLightningModule()
trainer = Trainer()
trainer.fit(model, train_dataloader, val_dataloader)
--------
Trainer in Python scripts
-------------------------
In Python scripts, it's recommended you use a main function to call the Trainer.
.. code-block:: python
from argparse import ArgumentParser
def main(hparams):
model = LightningModule()
trainer = Trainer(gpus=hparams.gpus)
trainer.fit(model)
if __name__ == '__main__':
parser = ArgumentParser()
parser.add_argument('--gpus', default=None)
args = parser.parse_args()
main(args)
So you can run it like so:
.. code-block:: bash
python main.py --gpus 2
.. note::
Pro-tip: You don't need to define all flags manually. Lightning can add them automatically
.. code-block:: python
from argparse import ArgumentParser
def main(args):
model = LightningModule()
trainer = Trainer.from_argparse_args(args)
trainer.fit(model)
if __name__ == '__main__':
parser = ArgumentParser()
parser = Trainer.add_argparse_args(parser)
args = parser.parse_args()
main(args)
So you can run it like so:
.. code-block:: bash
python main.py --gpus 2 --max_steps 10 --limit_train_batches 10 --any_trainer_arg x
.. note::
If you want to stop a training run early, you can press "Ctrl + C" on your keyboard.
The trainer will catch the `KeyboardInterrupt` and attempt a graceful shutdown, including
running callbacks such as `on_train_end`. The trainer object will also set an attribute
`interrupted` to `True` in such cases. If you have a callback which shuts down compute
resources, for example, you can conditionally run the shutdown logic for only uninterrupted runs.
------------
Testing
-------
Once you're done training, feel free to run the test set!
(Only right before publishing your paper or pushing to production)
.. code-block:: python
trainer.test(test_dataloader=test_dataloader)
------------
Deployment / prediction
-----------------------
You just trained a LightningModule which is also just a torch.nn.Module.
Use it to do whatever!
.. code-block:: python
# load model
pretrained_model = LightningModule.load_from_checkpoint(PATH)
pretrained_model.freeze()
# use it for finetuning
def forward(self, x):
features = pretrained_model(x)
classes = classifier(features)
# or for prediction
out = pretrained_model(x)
api_write({'response': out}
You may wish to run the model on a variety of devices. Instead of moving the data
manually to the correct device, decorate the forward method (or any other method you use for inference)
with :func:`~pytorch_lightning.core.decorators.auto_move_data` and Lightning will take care of the rest.
------------
Reproducibility
---------------
To ensure full reproducibility from run to run you need to set seeds for pseudo-random generators,
and set ``deterministic`` flag in ``Trainer``.
Example::
from pytorch_lightning import Trainer, seed_everything
seed_everything(42)
# sets seeds for numpy, torch, python.random and PYTHONHASHSEED.
model = Model()
trainer = Trainer(deterministic=True)
-------
Trainer flags
-------------
accelerator
^^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/distributed_backend.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/distributed_backend.mp4"></video>
|
The accelerator backend to use (previously known as distributed_backend).
- (```dp```) is DataParallel (split batch among GPUs of same machine)
- (```ddp```) is DistributedDataParallel (each gpu on each node trains, and syncs grads)
- (```ddp_cpu```) is DistributedDataParallel on CPU (same as `ddp`, but does not use GPUs.
Useful for multi-node CPU training or single-node debugging. Note that this will **not** give
a speedup on a single node, since Torch already makes effient use of multiple CPUs on a single
machine.)
- (```ddp2```) dp on node, ddp across nodes. Useful for things like increasing
the number of negative samples
.. testcode::
# default used by the Trainer
trainer = Trainer(distributed_backend=None)
Example::
# dp = DataParallel
trainer = Trainer(gpus=2, distributed_backend='dp')
# ddp = DistributedDataParallel
trainer = Trainer(gpus=2, num_nodes=2, distributed_backend='ddp')
# ddp2 = DistributedDataParallel + dp
trainer = Trainer(gpus=2, num_nodes=2, distributed_backend='ddp2')
.. note:: this option does not apply to TPU. TPUs use ```ddp``` by default (over each core)
You can also modify hardware behavior by subclassing an existing accelerator to adjust for your needs.
Example::
class MyOwnDDP(DDPAccelerator):
...
Trainer(accelerator=MyOwnDDP())
.. warning:: Passing in custom accelerators is experimental but work is in progress to enable full compatibility.
accumulate_grad_batches
^^^^^^^^^^^^^^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/accumulate_grad_batches.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/accumulate_grad_batches.mp4"></video>
|
Accumulates grads every k batches or as set up in the dict.
Trainer also calls ``optimizer.step()`` for the last indivisible step number.
.. testcode::
# default used by the Trainer (no accumulation)
trainer = Trainer(accumulate_grad_batches=1)
Example::
# accumulate every 4 batches (effective batch size is batch*4)
trainer = Trainer(accumulate_grad_batches=4)
# no accumulation for epochs 1-4. accumulate 3 for epochs 5-10. accumulate 20 after that
trainer = Trainer(accumulate_grad_batches={5: 3, 10: 20})
amp_backend
^^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/amp_backend.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/amp_backend.mp4"></video>
|
Use PyTorch AMP ('native') (available PyTorch 1.6+), or NVIDIA apex ('apex').
.. testcode::
# using PyTorch built-in AMP, default used by the Trainer
trainer = Trainer(amp_backend='native')
# using NVIDIA Apex
trainer = Trainer(amp_backend='apex')
amp_level
^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/amp_level.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/amp_level.mp4"></video>
|
The optimization level to use (O1, O2, etc...)
for 16-bit GPU precision (using NVIDIA apex under the hood).
Check `NVIDIA apex docs <https://nvidia.github.io/apex/amp.html#opt-levels>`_ for level
Example::
# default used by the Trainer
trainer = Trainer(amp_level='O2')
automatic_optimization
^^^^^^^^^^^^^^^^^^^^^^
When set to False, Lightning does not automate the optimization process. This means you are responsible for your own
optimizer behavior
Example::
def training_step(self, batch, batch_idx):
opt = self.optimizers()
loss = ...
self.manual_backward(loss, opt)
opt.step()
opt.zero_grad()
This is not recommended when using a single optimizer, instead it's recommended when using 2+ optimizers
AND you are an expert user. Most useful for research like RL, sparse coding and GAN research.
In the multi-optimizer case, ignore the optimizer_idx flag and use the optimizers directly
Example::
def training_step(self, batch, batch_idx, optimizer_idx):
(opt_a, opt_b) = self.optimizers()
gen_loss = ...
self.manual_backward(gen_loss, opt_a)
opt_a.step()
opt_a.zero_grad()
disc_loss = ...
self.manual_backward(disc_loss, opt_b)
opt_b.step()
opt_b.zero_grad()
auto_scale_batch_size
^^^^^^^^^^^^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/auto_scale%E2%80%A8_batch_size.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/auto_scale_batch_size.mp4"></video>
|
Automatically tries to find the largest batch size that fits into memory,
before any training.
.. code-block::
# default used by the Trainer (no scaling of batch size)
trainer = Trainer(auto_scale_batch_size=None)
# run batch size scaling, result overrides hparams.batch_size
trainer = Trainer(auto_scale_batch_size='binsearch')
# call tune to find the batch size
trainer.tune(model)
auto_select_gpus
^^^^^^^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/auto_select+_gpus.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/auto_select_gpus.mp4"></video>
|
If enabled and `gpus` is an integer, pick available gpus automatically.
This is especially useful when GPUs are configured to be in "exclusive mode",
such that only one process at a time can access them.
Example::
# no auto selection (picks first 2 gpus on system, may fail if other process is occupying)
trainer = Trainer(gpus=2, auto_select_gpus=False)
# enable auto selection (will find two available gpus on system)
trainer = Trainer(gpus=2, auto_select_gpus=True)
auto_lr_find
^^^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/auto_lr_find.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/auto_lr_find.mp4"></video>
|
Runs a learning rate finder algorithm (see this `paper <https://arxiv.org/abs/1506.01186>`_)
when calling trainer.tune(), to find optimal initial learning rate.
.. code-block:: python
# default used by the Trainer (no learning rate finder)
trainer = Trainer(auto_lr_find=False)
Example::
# run learning rate finder, results override hparams.learning_rate
trainer = Trainer(auto_lr_find=True)
# call tune to find the lr
trainer.tune(model)
Example::
# run learning rate finder, results override hparams.my_lr_arg
trainer = Trainer(auto_lr_find='my_lr_arg')
# call tune to find the lr
trainer.tune(model)
.. note::
See the :ref:`learning rate finder guide <lr_finder>`.
benchmark
^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/benchmark.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/benchmark.mp4"></video>
|
If true enables cudnn.benchmark.
This flag is likely to increase the speed of your system if your
input sizes don't change. However, if it does, then it will likely
make your system slower.
The speedup comes from allowing the cudnn auto-tuner to find the best
algorithm for the hardware `[see discussion here]
<https://discuss.pytorch.org/t/what-does-torch-backends-cudnn-benchmark-do/5936>`_.
Example::
# default used by the Trainer
trainer = Trainer(benchmark=False)
deterministic
^^^^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/deterministic.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/deterministic.mp4"></video>
|
If true enables cudnn.deterministic.
Might make your system slower, but ensures reproducibility.
Also sets ``$HOROVOD_FUSION_THRESHOLD=0``.
For more info check `[pytorch docs]
<https://pytorch.org/docs/stable/notes/randomness.html>`_.
Example::
# default used by the Trainer
trainer = Trainer(deterministic=False)
callbacks
^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/callbacks.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/callbacks.mp4"></video>
|
Add a list of :class:`~pytorch_lightning.callbacks.Callback`. These callbacks DO NOT replace the explicit callbacks
(loggers or :class:`~pytorch_lightning.callbacks.ModelCheckpoint`).
.. note:: Only user defined callbacks (ie: Not :class:`~pytorch_lightning.callbacks.ModelCheckpoint`)
.. code-block:: python
# a list of callbacks
callbacks = [PrintCallback()]
trainer = Trainer(callbacks=callbacks)
Example::
from pytorch_lightning.callbacks import Callback
class PrintCallback(Callback):
def on_train_start(self, trainer, pl_module):
print("Training is started!")
def on_train_end(self, trainer, pl_module):
print("Training is done.")
check_val_every_n_epoch
^^^^^^^^^^^^^^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/check_val_every_n_epoch.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/check_val_every_n_epoch.mp4"></video>
|
Check val every n train epochs.
Example::
# default used by the Trainer
trainer = Trainer(check_val_every_n_epoch=1)
# run val loop every 10 training epochs
trainer = Trainer(check_val_every_n_epoch=10)
checkpoint_callback
^^^^^^^^^^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/checkpoint_callback.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/checkpoint_callback.mp4"></video>
|
Pass in a callback for checkpointing. Checkpoints capture the exact value of all parameters used by a model.
By default Lightning saves a checkpoint for you in your current working directory, with the state of your last training epoch,
but you can override the default behavior by Initializing the :class:`~pytorch_lightning.callbacks.ModelCheckpoint` callback,
and passing it to :class:`~pytorch_lightning.trainer.Trainer` `checkpoint_callback` flag.
.. code-block:: python
from pytorch_lightning.callbacks import ModelCheckpoint
# default used by the Trainer
checkpoint_callback = ModelCheckpoint(
dirpath=os.getcwd(),
save_top_k=True,
verbose=True,
monitor='checkpoint_on',
mode='min',
prefix=''
)
trainer = Trainer(checkpoint_callback=checkpoint_callback)
To disable automatic checkpointing, set this to `False`.
.. code-block:: python
trainer = Trainer(checkpoint_callback=False)
See also :ref:`Saving and Loading Weights <weights_loading>`.
default_root_dir
^^^^^^^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/default%E2%80%A8_root_dir.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/default_root_dir.mp4"></video>
|
Default path for logs and weights when no logger or
:class:`pytorch_lightning.callbacks.ModelCheckpoint` callback passed. On
certain clusters you might want to separate where logs and checkpoints are
stored. If you don't then use this argument for convenience. Paths can be local
paths or remote paths such as `s3://bucket/path` or 'hdfs://path/'. Credentials
will need to be set up to use remote filepaths.
Example::
# default used by the Trainer
trainer = Trainer(default_root_path=os.getcwd())
distributed_backend
^^^^^^^^^^^^^^^^^^^
This has been renamed "accelerator".
fast_dev_run
^^^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/fast_dev_run.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/fast_dev_run.mp4"></video>
|
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/fast_dev_run.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/fast_dev_run.mp4"></video>
|
Runs 1 batch of train, test and val to find any bugs (ie: a sort of unit test).
Under the hood the pseudocode looks like this:
.. code-block:: python
# loading
__init__()
prepare_data
# test training step
training_batch = next(train_dataloader)
training_step(training_batch)
# test val step
val_batch = next(val_dataloader)
out = validation_step(val_batch)
validation_epoch_end([out])
.. testcode::
# default used by the Trainer
trainer = Trainer(fast_dev_run=False)
# runs 1 train, val, test batch and program ends
trainer = Trainer(fast_dev_run=True)
gpus
^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/gpus.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/gpus.mp4"></video>
|
- Number of GPUs to train on (int)
- or which GPUs to train on (list)
- can handle strings
.. testcode::
# default used by the Trainer (ie: train on CPU)
trainer = Trainer(gpus=None)
# equivalent
trainer = Trainer(gpus=0)
Example::
# int: train on 2 gpus
trainer = Trainer(gpus=2)
# list: train on GPUs 1, 4 (by bus ordering)
trainer = Trainer(gpus=[1, 4])
trainer = Trainer(gpus='1, 4') # equivalent
# -1: train on all gpus
trainer = Trainer(gpus=-1)
trainer = Trainer(gpus='-1') # equivalent
# combine with num_nodes to train on multiple GPUs across nodes
# uses 8 gpus in total
trainer = Trainer(gpus=2, num_nodes=4)
# train only on GPUs 1 and 4 across nodes
trainer = Trainer(gpus=[1, 4], num_nodes=4)
See Also:
- :ref:`Multi-GPU training guide <multi_gpu>`.
gradient_clip_val
^^^^^^^^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/gradient+_clip_val.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/gradient_clip_val.mp4"></video>
|
Gradient clipping value
- 0 means don't clip.
.. testcode::
# default used by the Trainer
trainer = Trainer(gradient_clip_val=0.0)
limit_test_batches
^^^^^^^^^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/limit_test_batches.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/limit_batches.mp4"></video>
|
How much of test dataset to check.
.. testcode::
# default used by the Trainer
trainer = Trainer(limit_test_batches=1.0)
# run through only 25% of the test set each epoch
trainer = Trainer(limit_test_batches=0.25)
# run for only 10 batches
trainer = Trainer(limit_test_batches=10)
In the case of multiple test dataloaders, the limit applies to each dataloader individually.
limit_val_batches
^^^^^^^^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/limit_val_batches.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/limit_batches.mp4"></video>
|
How much of validation dataset to check.
Useful when debugging or testing something that happens at the end of an epoch.
.. testcode::
# default used by the Trainer
trainer = Trainer(limit_val_batches=1.0)
# run through only 25% of the validation set each epoch
trainer = Trainer(limit_val_batches=0.25)
# run for only 10 batches
trainer = Trainer(limit_val_batches=10)
In the case of multiple validation dataloaders, the limit applies to each dataloader individually.
log_gpu_memory
^^^^^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/log_gpu_memory.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/log_gpu_memory.mp4"></video>
|
Options:
- None
- 'min_max'
- 'all'
.. testcode::
# default used by the Trainer
trainer = Trainer(log_gpu_memory=None)
# log all the GPUs (on master node only)
trainer = Trainer(log_gpu_memory='all')
# log only the min and max memory on the master node
trainer = Trainer(log_gpu_memory='min_max')
.. note:: Might slow performance because it uses the output of nvidia-smi.
flush_logs_every_n_steps
^^^^^^^^^^^^^^^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/flush_logs%E2%80%A8_every_n_steps.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/flush_logs_every_n_steps.mp4"></video>
|
Writes logs to disk this often.
.. testcode::
# default used by the Trainer
trainer = Trainer(flush_logs_every_n_steps=100)
See Also:
- :ref:`logging`
logger
^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/logger.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/logger.mp4"></video>
|
:ref:`Logger <loggers>` (or iterable collection of loggers) for experiment tracking.
.. testcode::
from pytorch_lightning.loggers import TensorBoardLogger
# default logger used by trainer
logger = TensorBoardLogger(
save_dir=os.getcwd(),
version=1,
name='lightning_logs'
)
Trainer(logger=logger)
max_epochs
^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/max_epochs.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/min_max_epochs.mp4"></video>
|
Stop training once this number of epochs is reached
.. testcode::
# default used by the Trainer
trainer = Trainer(max_epochs=1000)
min_epochs
^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/min_epochs.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/min_max_epochs.mp4"></video>
|
Force training for at least these many epochs
.. testcode::
# default used by the Trainer
trainer = Trainer(min_epochs=1)
max_steps
^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/max_steps.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/min_max_steps.mp4"></video>
|
Stop training after this number of steps
Training will stop if max_steps or max_epochs have reached (earliest).
.. testcode::
# Default (disabled)
trainer = Trainer(max_steps=None)
# Stop after 100 steps
trainer = Trainer(max_steps=100)
min_steps
^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/min_steps.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/min_max_steps.mp4"></video>
|
Force training for at least these number of steps.
Trainer will train model for at least min_steps or min_epochs (latest).
.. testcode::
# Default (disabled)
trainer = Trainer(min_steps=None)
# Run at least for 100 steps (disable min_epochs)
trainer = Trainer(min_steps=100, min_epochs=0)
num_nodes
^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/num_nodes.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/num_nodes.mp4"></video>
|
Number of GPU nodes for distributed training.
.. testcode::
# default used by the Trainer
trainer = Trainer(num_nodes=1)
# to train on 8 nodes
trainer = Trainer(num_nodes=8)
num_processes
^^^^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/num_processes.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/num_processes.mp4"></video>
|
Number of processes to train with. Automatically set to the number of GPUs
when using ``distrbuted_backend="ddp"``. Set to a number greater than 1 when
using ``distributed_backend="ddp_cpu"`` to mimic distributed training on a
machine without GPUs. This is useful for debugging, but **will not** provide
any speedup, since single-process Torch already makes effient use of multiple
CPUs.
.. testcode::
# Simulate DDP for debugging on your GPU-less laptop
trainer = Trainer(distributed_backend="ddp_cpu", num_processes=2)
num_sanity_val_steps
^^^^^^^^^^^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/num_sanity%E2%80%A8_val_steps.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/num_sanity_val_steps.mp4"></video>
|
Sanity check runs n batches of val before starting the training routine.
This catches any bugs in your validation without having to wait for the first validation check.
The Trainer uses 2 steps by default. Turn it off or modify it here.
.. testcode::
# default used by the Trainer
trainer = Trainer(num_sanity_val_steps=2)
# turn it off
trainer = Trainer(num_sanity_val_steps=0)
# check all validation data
trainer = Trainer(num_sanity_val_steps=-1)
This option will reset the validation dataloader unless ``num_sanity_val_steps=0``.
plugins
^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/cluster_environment.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/cluster_environment.mp4"></video>
|
Plugins allow you to connect arbitrary backends, precision libraries, SLURM, etc... For example:
- DDP
- SLURM
- TorchElastic
- Apex
To define your own behavior, subclass the relevant class and pass it in. Here's an example linking up your own cluster.
.. code-block:: python
from pytorch_lightning.cluster_environments import cluster_environment
class MyCluster(ClusterEnvironment):
def master_address(self):
return your_master_address
def master_port(self):
return your_master_port
def world_size(self):
return the_world_size
trainer = Trainer(cluster_environment=cluster_environment())
prepare_data_per_node
^^^^^^^^^^^^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/prepare_data_per_node.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/prepare_data_per_node.mp4"></video>
|
If True will call `prepare_data()` on LOCAL_RANK=0 for every node.
If False will only call from NODE_RANK=0, LOCAL_RANK=0
.. testcode::
# default
Trainer(prepare_data_per_node=True)
# use only NODE_RANK=0, LOCAL_RANK=0
Trainer(prepare_data_per_node=False)
tpu_cores
^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/tpu_cores.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/tpu_cores.mp4"></video>
|
- How many TPU cores to train on (1 or 8).
- Which TPU core to train on [1-8]
A single TPU v2 or v3 has 8 cores. A TPU pod has
up to 2048 cores. A slice of a POD means you get as many cores
as you request.
Your effective batch size is batch_size * total tpu cores.
.. note:: No need to add a DistributedDataSampler, Lightning automatically does it for you.
This parameter can be either 1 or 8.
.. testcode::
# your_trainer_file.py
# default used by the Trainer (ie: train on CPU)
trainer = Trainer(tpu_cores=None)
# int: train on a single core
trainer = Trainer(tpu_cores=1)
# list: train on a single selected core
trainer = Trainer(tpu_cores=[2])
# int: train on all cores few cores
trainer = Trainer(tpu_cores=8)
# for 8+ cores must submit via xla script with
# a max of 8 cores specified. The XLA script
# will duplicate script onto each TPU in the POD
trainer = Trainer(tpu_cores=8)
To train on more than 8 cores (ie: a POD),
submit this script using the xla_dist script.
Example::
python -m torch_xla.distributed.xla_dist
--tpu=$TPU_POD_NAME
--conda-env=torch-xla-nightly
--env=XLA_USE_BF16=1
-- python your_trainer_file.py
overfit_batches
^^^^^^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/overfit_batches.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/overfit_batches.mp4"></video>
|
Uses this much data of the training set. If nonzero, will use the same training set for validation and testing.
If the training dataloaders have `shuffle=True`, Lightning will automatically disable it.
Useful for quickly debugging or trying to overfit on purpose.
.. testcode::
# default used by the Trainer
trainer = Trainer(overfit_batches=0.0)
# use only 1% of the train set (and use the train set for val and test)
trainer = Trainer(overfit_batches=0.01)
# overfit on 10 of the same batches
trainer = Trainer(overfit_batches=10)
precision
^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/precision.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/precision.mp4"></video>
|
Full precision (32), half precision (16).
Can be used on CPU, GPU or TPUs.
If used on TPU will use torch.bfloat16 but tensor printing
will still show torch.float32.
.. testcode::
:skipif: not APEX_AVAILABLE and not NATIVE_AMP_AVALAIBLE
# default used by the Trainer
trainer = Trainer(precision=32)
# 16-bit precision
trainer = Trainer(precision=16)
Example::
# one day
trainer = Trainer(precision=8|4|2)
process_position
^^^^^^^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/process_position.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/process_position.mp4"></video>
|
Orders the progress bar. Useful when running multiple trainers on the same node.
.. testcode::
# default used by the Trainer
trainer = Trainer(process_position=0)
Note:
This argument is ignored if a custom callback is passed to :paramref:`~Trainer.callbacks`.
profiler
^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/profiler.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/profiler.mp4"></video>
|
To profile individual steps during training and assist in identifying bottlenecks.
See the :ref:`profiler documentation <profiler>`. for more details.
.. testcode::
from pytorch_lightning.profiler import SimpleProfiler, AdvancedProfiler
# default used by the Trainer
trainer = Trainer(profiler=None)
# to profile standard training events, equivalent to `profiler=SimpleProfiler()`
trainer = Trainer(profiler="simple")
# advanced profiler for function-level stats, equivalent to `profiler=AdvancedProfiler()`
trainer = Trainer(profiler="advanced")
progress_bar_refresh_rate
^^^^^^^^^^^^^^^^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/progress_bar%E2%80%A8_refresh_rate.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/progress_bar_refresh_rate.mp4"></video>
|
How often to refresh progress bar (in steps).
In notebooks, faster refresh rates (lower number) is known to crash them
because of their screen refresh rates, so raise it to 50 or more.
.. testcode::
# default used by the Trainer
trainer = Trainer(progress_bar_refresh_rate=1)
# disable progress bar
trainer = Trainer(progress_bar_refresh_rate=0)
Note:
This argument is ignored if a custom callback is passed to :paramref:`~Trainer.callbacks`.
reload_dataloaders_every_epoch
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/reload_%E2%80%A8dataloaders_%E2%80%A8every_epoch.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/reload_dataloaders_every_epoch.mp4"></video>
|
Set to True to reload dataloaders every epoch.
.. code-block:: python
# if False (default)
train_loader = model.train_dataloader()
for epoch in epochs:
for batch in train_loader:
...
# if True
for epoch in epochs:
train_loader = model.train_dataloader()
for batch in train_loader:
replace_sampler_ddp
^^^^^^^^^^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/replace_sampler_ddp.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/replace_sampler_ddp.mp4"></video>
|
Enables auto adding of distributed sampler. By default it will add ``shuffle=True``
for train sampler and ``shuffle=False`` for val/test sampler. If you want to customize
it, you can set ``replace_sampler_ddp=False`` and add your own distributed sampler.
If ``replace_sampler_ddp=True`` and a distributed sampler was already added,
Lightning will not replace the existing one.
.. testcode::
# default used by the Trainer
trainer = Trainer(replace_sampler_ddp=True)
By setting to False, you have to add your own distributed sampler:
.. code-block:: python
# default used by the Trainer
sampler = torch.utils.data.distributed.DistributedSampler(dataset, shuffle=True)
dataloader = DataLoader(dataset, batch_size=32, sampler=sampler)
resume_from_checkpoint
^^^^^^^^^^^^^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/resume_from_checkpoint.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/resume_from_checkpoint.mp4"></video>
|
To resume training from a specific checkpoint pass in the path here.
.. testcode::
# default used by the Trainer
trainer = Trainer(resume_from_checkpoint=None)
# resume from a specific checkpoint
trainer = Trainer(resume_from_checkpoint='some/path/to/my_checkpoint.ckpt')
log_every_n_steps
^^^^^^^^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/log_every_n_steps.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/log_every_n_steps.mp4"></video>
|
How often to add logging rows (does not write to disk)
.. testcode::
# default used by the Trainer
trainer = Trainer(log_every_n_steps=50)
See Also:
- :ref:`logging`
sync_batchnorm
^^^^^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/sync_batchnorm.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/sync_batchnorm.mp4"></video>
|
Enable synchronization between batchnorm layers across all GPUs.
.. testcode::
trainer = Trainer(sync_batchnorm=True)
track_grad_norm
^^^^^^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/track_grad_norm.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/track_grad_norm.mp4"></video>
|
- no tracking (-1)
- Otherwise tracks that norm (2 for 2-norm)
.. testcode::
# default used by the Trainer
trainer = Trainer(track_grad_norm=-1)
# track the 2-norm
trainer = Trainer(track_grad_norm=2)
limit_train_batches
^^^^^^^^^^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/limit_train_batches.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/limit_batches.mp4"></video>
|
How much of training dataset to check.
Useful when debugging or testing something that happens at the end of an epoch.
.. testcode::
# default used by the Trainer
trainer = Trainer(limit_train_batches=1.0)
Example::
# default used by the Trainer
trainer = Trainer(limit_train_batches=1.0)
# run through only 25% of the training set each epoch
trainer = Trainer(limit_train_batches=0.25)
# run through only 10 batches of the training set each epoch
trainer = Trainer(limit_train_batches=10)
truncated_bptt_steps
^^^^^^^^^^^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/truncated_bptt_steps.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/truncated_bptt_steps.mp4"></video>
|
Truncated back prop breaks performs backprop every k steps of
a much longer sequence.
If this is enabled, your batches will automatically get truncated
and the trainer will apply Truncated Backprop to it.
(`Williams et al. "An efficient gradient-based algorithm for on-line training of
recurrent network trajectories."
<http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.56.7941&rep=rep1&type=pdf>`_)
.. testcode::
# default used by the Trainer (ie: disabled)
trainer = Trainer(truncated_bptt_steps=None)
# backprop every 5 steps in a batch
trainer = Trainer(truncated_bptt_steps=5)
.. note:: Make sure your batches have a sequence dimension.
Lightning takes care to split your batch along the time-dimension.
.. code-block:: python
# we use the second as the time dimension
# (batch, time, ...)
sub_batch = batch[0, 0:t, ...]
Using this feature requires updating your LightningModule's
:meth:`pytorch_lightning.core.LightningModule.training_step` to include a `hiddens` arg
with the hidden
.. code-block:: python
# Truncated back-propagation through time
def training_step(self, batch, batch_idx, hiddens):
# hiddens are the hiddens from the previous truncated backprop step
out, hiddens = self.lstm(data, hiddens)
return {
"loss": ...,
"hiddens": hiddens # remember to detach() this
}
To modify how the batch is split,
override :meth:`pytorch_lightning.core.LightningModule.tbptt_split_batch`:
.. testcode::
class LitMNIST(LightningModule):
def tbptt_split_batch(self, batch, split_size):
# do your own splitting on the batch
return splits
val_check_interval
^^^^^^^^^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/val_check_interval.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/val_check_interval.mp4"></video>
|
How often within one training epoch to check the validation set.
Can specify as float or int.
- use (float) to check within a training epoch
- use (int) to check every n steps (batches)
.. testcode::
# default used by the Trainer
trainer = Trainer(val_check_interval=1.0)
# check validation set 4 times during a training epoch
trainer = Trainer(val_check_interval=0.25)
# check validation set every 1000 training batches
# use this when using iterableDataset and your dataset has no length
# (ie: production cases with streaming data)
trainer = Trainer(val_check_interval=1000)
weights_save_path
^^^^^^^^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/weights_save_path.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/weights_save_path.mp4"></video>
|
Directory of where to save weights if specified.
.. testcode::
# default used by the Trainer
trainer = Trainer(weights_save_path=os.getcwd())
# save to your custom path
trainer = Trainer(weights_save_path='my/path')
Example::
# if checkpoint callback used, then overrides the weights path
# **NOTE: this saves weights to some/path NOT my/path
checkpoint = ModelCheckpoint(dirpath='some/path')
trainer = Trainer(
checkpoint_callback=checkpoint,
weights_save_path='my/path'
)
weights_summary
^^^^^^^^^^^^^^^
.. raw:: html
<video width="50%" max-width="400px" controls
poster="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/thumb/weights_summary.jpg"
src="https://pl-bolts-doc-images.s3.us-east-2.amazonaws.com/pl_docs/trainer_flags/weights_summary.mp4"></video>
|
Prints a summary of the weights when training begins.
Options: 'full', 'top', None.
.. testcode::
# default used by the Trainer (ie: print summary of top level modules)
trainer = Trainer(weights_summary='top')
# print full summary of all modules and submodules
trainer = Trainer(weights_summary='full')
# don't print a summary
trainer = Trainer(weights_summary=None)
Trainer class API
-----------------
"""
from pytorch_lightning.trainer.trainer import Trainer
from pytorch_lightning.utilities.seed import seed_everything
__all__ = ["Trainer", "seed_everything"]
| 29.073529 | 148 | 0.710343 |
1a18e3a1d0c48aacb9b65fbfa63da8440e22b2c4 | 15,674 | py | Python | libcloud/dns/drivers/godaddy.py | dineshbhoopathy/libcloud | fec8af6a4d43ef45eafa3631fb1a91ff33a6440e | [
"Apache-2.0"
] | null | null | null | libcloud/dns/drivers/godaddy.py | dineshbhoopathy/libcloud | fec8af6a4d43ef45eafa3631fb1a91ff33a6440e | [
"Apache-2.0"
] | null | null | null | libcloud/dns/drivers/godaddy.py | dineshbhoopathy/libcloud | fec8af6a4d43ef45eafa3631fb1a91ff33a6440e | [
"Apache-2.0"
] | null | null | null | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License.You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
'GoDaddyDNSDriver'
]
try:
import simplejson as json
except:
import json
from libcloud.common.base import ConnectionKey, JsonResponse
from libcloud.common.types import LibcloudError
from libcloud.utils.py3 import httplib
from libcloud.dns.types import Provider, RecordType, RecordDoesNotExistError
from libcloud.dns.base import DNSDriver, Zone, Record
API_HOST = 'api.godaddy.com'
VALID_RECORD_EXTRA_PARAMS = ['prio', 'ttl']
class GoDaddyDNSException(LibcloudError):
def __init__(self, code, message):
self.code = code
self.message = message
self.args = (code, message)
def __str__(self):
return self.__repr__()
def __repr__(self):
return ('<GoDaddyDNSException in %s: %s>' % (self.code, self.message))
class GoDaddyDNSResponse(JsonResponse):
valid_response_codes = [httplib.OK, httplib.ACCEPTED, httplib.CREATED,
httplib.NO_CONTENT]
def parse_body(self):
if not self.body:
return None
# json.loads doesn't like the regex expressions used in godaddy schema
self.body = self.body.replace('\\.', '\\\\.')
data = json.loads(self.body)
return data
def parse_error(self):
data = self.parse_body()
raise GoDaddyDNSException(code=data['code'], message=data['message'])
def success(self):
return self.status in self.valid_response_codes
class GoDaddyDNSConnection(ConnectionKey):
responseCls = GoDaddyDNSResponse
host = API_HOST
allow_insecure = False
def __init__(self, key, secret, shopper_id, secure=True, host=None,
port=None, url=None, timeout=None,
proxy_url=None, backoff=None, retry_delay=None):
super(GoDaddyDNSConnection, self).__init__(
key,
secure=secure, host=host,
port=port, url=url,
timeout=timeout,
proxy_url=proxy_url,
backoff=backoff,
retry_delay=retry_delay)
self.key = key
self.secret = secret
self.shopper_id = shopper_id
def add_default_headers(self, headers):
headers['X-Shopper-Id'] = self.shopper_id
headers['Authorization'] = "sso-key %s:%s" % \
(self.key, self.secret)
return headers
class GoDaddyDNSDriver(DNSDriver):
"""
A driver for GoDaddy DNS.
This is for customers of GoDaddy
who wish to purchase, update existing domains
and manage records for DNS zones owned by GoDaddy NS servers.
"""
type = Provider.GODADDY
name = 'GoDaddy DNS'
website = 'https://www.godaddy.com/'
connectionCls = GoDaddyDNSConnection
RECORD_TYPE_MAP = {
RecordType.A: 'A',
RecordType.AAAA: 'AAAA',
RecordType.CNAME: 'CNAME',
RecordType.MX: 'MX',
RecordType.NS: 'SPF',
RecordType.SRV: 'SRV',
RecordType.TXT: 'TXT',
}
def __init__(self, shopper_id, key, secret,
secure=True, host=None, port=None):
"""
Instantiate a new `GoDaddyDNSDriver`
:param shopper_id: Your customer ID or shopper ID with GoDaddy
:type shopper_id: ``str``
:param key: Your access key from developer.godaddy.com
:type key: ``str``
:param secret: Your access key secret
:type secret: ``str``
"""
super(GoDaddyDNSDriver, self).__init__(key=key, secret=secret,
secure=secure,
host=host, port=port,
shopper_id=str(shopper_id))
def list_zones(self):
"""
Return a list of zones (purchased domains)
:return: ``list`` of :class:`Zone`
"""
result = self.connection.request(
'/v1/domains/').object
zones = self._to_zones(result)
return zones
def list_records(self, zone):
"""
Return a list of records for the provided zone.
:param zone: Zone to list records for.
:type zone: :class:`Zone`
:return: ``list`` of :class:`Record`
"""
result = self.connection.request(
'/v1/domains/%s/records' % (zone.domain)).object
records = self._to_records(items=result, zone=zone)
return records
def create_record(self, name, zone, type, data, extra=None):
"""
Create a new record.
:param name: Record name without the domain name (e.g. www).
Note: If you want to create a record for a base domain
name, you should specify empty string ('') for this
argument.
:type name: ``str``
:param zone: Zone where the requested record is created.
:type zone: :class:`Zone`
:param type: DNS record type (A, AAAA, ...).
:type type: :class:`RecordType`
:param data: Data for the record (depends on the record type).
:type data: ``str``
:param extra: Extra attributes (driver specific). (optional)
:type extra: ``dict``
:rtype: :class:`Record`
"""
new_record = self._format_record(name, type, data, extra)
self.connection.request(
'/v1/domains/%s/records' % (zone.domain), method='PATCH',
data=[new_record])
id = self._get_id_of_record(name, type)
return Record(
id=id, name=name,
type=type, data=data,
zone=zone, driver=self,
ttl=new_record['ttl'],
extra=extra)
def update_record(self, record, name, type, data, extra=None):
"""
Update an existing record.
:param record: Record to update.
:type record: :class:`Record`
:param name: Record name without the domain name (e.g. www).
Note: If you want to create a record for a base domain
name, you should specify empty string ('') for this
argument.
:type name: ``str``
:param type: DNS record type (A, AAAA, ...).
:type type: :class:`RecordType`
:param data: Data for the record (depends on the record type).
:type data: ``str``
:param extra: (optional) Extra attributes (driver specific).
:type extra: ``dict``
:rtype: :class:`Record`
"""
new_record = self._format_record(name, type, data, extra)
self.connection.request(
'/v1/domains/%s/records/%s/%s' % (record.zone.domain,
record.type,
record.name),
method='PUT',
data=[new_record])
id = self._get_id_of_record(name, type)
return Record(
id=id, name=name,
type=type, data=data,
zone=record.zone, driver=self,
ttl=new_record['ttl'],
extra=extra)
def get_record(self, zone_id, record_id):
"""
Return a Record instance.
:param zone_id: ID of the required zone
:type zone_id: ``str``
:param record_id: ID of the required record
:type record_id: ``str``
:rtype: :class:`Record`
"""
parts = record_id.split(':')
result = self.connection.request(
'/v1/domains/%s/records/%s/%s' % (
zone_id,
parts[1],
parts[0])).object
if len(result) == 0:
raise RecordDoesNotExistError(record_id,
driver=self,
record_id=record_id)
return self._to_record(result[0],
self.get_zone(zone_id))
def get_zone(self, zone_id):
"""
Get a zone (by domain)
:param zone_id: The domain, not the ID
:type zone_id: ``str``
:rtype: :class:`Zone`
"""
result = self.connection.request(
'/v1/domains/%s/' % zone_id).object
zone = self._to_zone(result)
return zone
def delete_zone(self, zone):
"""
Delete a zone.
Note: This will CANCEL a purchased domain
:param zone: Zone to delete.
:type zone: :class:`Zone`
:rtype: ``bool``
"""
self.connection.request(
'/v1/domains/%s' % (zone.domain),
method='DELETE')
# no error means ok
return True
def ex_check_availability(self, domain, for_transfer=False):
"""
Check the availability of the domain
:param domain: the domain name e.g. wazzlewobbleflooble.com
:type domain: ``str``
:param for_transfer: Check if domain is available for transfer
:type for_transfer: ``bool``
:rtype: `list` of :class:`GoDaddyAvailability`
"""
result = self.connection.request(
'/v1/domains/available',
method='GET',
params={
'domain': domain,
'forTransfer': str(for_transfer)
}
).object
return GoDaddyAvailability(
domain=result['domain'],
available=result['available'],
price=result['price'],
currency=result['currency'],
period=result['period']
)
def ex_list_tlds(self):
"""
List available TLDs for sale
:rtype: ``list`` of :class:`GoDaddyTLD`
"""
result = self.connection.request(
'/v1/domains/tlds',
method='GET'
).object
return self._to_tlds(result)
def ex_get_purchase_schema(self, tld):
"""
Get the schema that needs completing to purchase a new domain
Use this in conjunction with ex_purchase_domain
:param tld: The top level domain e.g com, eu, uk
:type tld: ``str``
:rtype: `dict` the JSON Schema
"""
result = self.connection.request(
'/v1/domains/purchase/schema/%s' % tld,
method='GET'
).object
return result
def ex_get_agreements(self, tld, privacy=True):
"""
Get the legal agreements for a tld
Use this in conjunction with ex_purchase_domain
:param tld: The top level domain e.g com, eu, uk
:type tld: ``str``
:rtype: `dict` the JSON Schema
"""
result = self.connection.request(
'/v1/domains/agreements',
params={
'tlds': tld,
'privacy': str(privacy)
},
method='GET'
).object
agreements = []
for item in result:
agreements.append(
GoDaddyLegalAgreement(
agreement_key=item['agreementKey'],
title=item['title'],
url=item['url'],
content=item['content']))
return agreements
def ex_purchase_domain(self, purchase_request):
"""
Purchase a domain with GoDaddy
:param purchase_request: The completed document
from ex_get_purchase_schema
:type purchase_request: ``dict``
:rtype: :class:`GoDaddyDomainPurchaseResponse` Your order
"""
result = self.connection.request(
'/v1/domains/purchase',
data=purchase_request,
method='POST'
).object
return GoDaddyDomainPurchaseResponse(
order_id=result['orderId'],
item_count=result['itemCount'],
total=result['total'],
currency=result['currency']
)
def _format_record(self, name, type, data, extra):
if extra is None:
extra = {}
new_record = {}
if type == RecordType.SRV:
new_record = {
'type': type,
'name': name,
'data': data,
'priority': 1,
'ttl': extra.get('ttl', 5),
'service': extra.get('service', ''),
'protocol': extra.get('protocol', ''),
'port': extra.get('port', ''),
'weight': extra.get('weight', '1')
}
else:
new_record = {
'type': type,
'name': name,
'data': data,
'priority': 1,
'ttl': extra.get('ttl', 5)
}
return new_record
def _to_zones(self, items):
zones = []
for item in items:
zones.append(self._to_zone(item))
return zones
def _to_zone(self, item):
extra = {"expires": item['expires']}
zone = Zone(id=item['domainId'], domain=item['domain'],
type='master', ttl=None,
driver=self, extra=extra)
return zone
def _to_records(self, items, zone=None):
records = []
for item in items:
records.append(self._to_record(item=item, zone=zone))
return records
def _to_record(self, item, zone=None):
ttl = item['ttl']
type = self._string_to_record_type(item['type'])
name = item['name']
id = self._get_id_of_record(name, type)
record = Record(id=id, name=name,
type=type, data=item['data'],
zone=zone, driver=self,
ttl=ttl)
return record
def _to_tlds(self, items):
tlds = []
for item in items:
tlds.append(self._to_tld(item))
return tlds
def _to_tld(self, item):
return GoDaddyTLD(
name=item['name'],
tld_type=item['type']
)
def _get_id_of_record(self, name, type):
return '%s:%s' % (name, type)
class GoDaddyAvailability(object):
def __init__(self, domain, available, price, currency, period):
self.domain = domain
self.available = bool(available)
# currency comes in micro-units, convert to dollars.
self.price = float(price) / 1000000
self.currency = currency
self.period = int(period)
class GoDaddyTLD(object):
def __init__(self, name, tld_type):
self.name = name
self.type = tld_type
class GoDaddyDomainPurchaseResponse(object):
def __init__(self, order_id, item_count, total, currency):
self.order_id = order_id
self.item_count = item_count
self.total = total
self.current = currency
class GoDaddyLegalAgreement(object):
def __init__(self, agreement_key, title, url, content):
self.agreement_key = agreement_key
self.title = title
self.url = url
self.content = content
| 31.161034 | 78 | 0.555378 |
f167c960979c680b993ac2182bdcdb02c5c95b24 | 168 | py | Python | app/utils/__init__.py | kryvokhyzha/lyrics-generation | 981bbc35c6bd6fe134e3a800a96f2c5825427139 | [
"MIT"
] | 1 | 2021-02-22T20:02:20.000Z | 2021-02-22T20:02:20.000Z | app/utils/__init__.py | kryvokhyzha/lyrics-generation | 981bbc35c6bd6fe134e3a800a96f2c5825427139 | [
"MIT"
] | null | null | null | app/utils/__init__.py | kryvokhyzha/lyrics-generation | 981bbc35c6bd6fe134e3a800a96f2c5825427139 | [
"MIT"
] | null | null | null | import yaml
def get_config(file_name='config.yaml'):
with open(file_name, 'r') as file:
config = yaml.load(file, Loader=yaml.FullLoader)
return config | 24 | 56 | 0.690476 |
8ec73a3d2aa9813c4ffb1b2b0d04a4c9358598c8 | 340 | py | Python | whimsical-woodpeckers/mysite/routing.py | Vthechamp22/summer-code-jam-2021 | 0a8bf1f22f6c73300891fd779da36efd8e1304c1 | [
"MIT"
] | 40 | 2020-08-02T07:38:22.000Z | 2021-07-26T01:46:50.000Z | whimsical-woodpeckers/mysite/routing.py | Vthechamp22/summer-code-jam-2021 | 0a8bf1f22f6c73300891fd779da36efd8e1304c1 | [
"MIT"
] | 134 | 2020-07-31T12:15:45.000Z | 2020-12-13T04:42:19.000Z | whimsical-woodpeckers/mysite/routing.py | Vthechamp22/summer-code-jam-2021 | 0a8bf1f22f6c73300891fd779da36efd8e1304c1 | [
"MIT"
] | 101 | 2020-07-31T12:00:47.000Z | 2021-11-01T09:06:58.000Z | from channels.auth import AuthMiddlewareStack
from channels.routing import ProtocolTypeRouter, URLRouter
import messages.routing
application = ProtocolTypeRouter({
# (http->django views is added by default)
'websocket': AuthMiddlewareStack(
URLRouter(
messages.routing.websocket_urlpatterns
)
),
})
| 26.153846 | 58 | 0.726471 |
b9f915bc4ba31343271d28ecf2882aeae79165e0 | 9,795 | py | Python | magenta/models/pianoroll_rnn_nade/pianoroll_rnn_nade_generate.py | workproduct/magenta | ba43c3e1a2b3b6a5731fa10a5a6bddd0c821eb84 | [
"Apache-2.0"
] | null | null | null | magenta/models/pianoroll_rnn_nade/pianoroll_rnn_nade_generate.py | workproduct/magenta | ba43c3e1a2b3b6a5731fa10a5a6bddd0c821eb84 | [
"Apache-2.0"
] | null | null | null | magenta/models/pianoroll_rnn_nade/pianoroll_rnn_nade_generate.py | workproduct/magenta | ba43c3e1a2b3b6a5731fa10a5a6bddd0c821eb84 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 The Magenta Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generate pianoroll tracks from a trained RNN-NADE checkpoint.
Uses flags to define operation.
"""
import ast
import os
import time
import magenta
from magenta.models.pianoroll_rnn_nade import pianoroll_rnn_nade_model
from magenta.models.pianoroll_rnn_nade.pianoroll_rnn_nade_sequence_generator import PianorollRnnNadeSequenceGenerator
from magenta.models.shared import sequence_generator
from magenta.models.shared import sequence_generator_bundle
from magenta.music import constants
from magenta.music.protobuf import generator_pb2
from magenta.music.protobuf import music_pb2
import tensorflow as tf
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string(
'run_dir', None,
'Path to the directory where the latest checkpoint will be loaded from.')
tf.app.flags.DEFINE_string(
'bundle_file', None,
'Path to the bundle file. If specified, this will take priority over '
'run_dir, unless save_generator_bundle is True, in which case both this '
'flag and run_dir are required')
tf.app.flags.DEFINE_boolean(
'save_generator_bundle', False,
'If true, instead of generating a sequence, will save this generator as a '
'bundle file in the location specified by the bundle_file flag')
tf.app.flags.DEFINE_string(
'bundle_description', None,
'A short, human-readable text description of the bundle (e.g., training '
'data, hyper parameters, etc.).')
tf.app.flags.DEFINE_string(
'config', 'rnn-nade', 'Config to use. Ignored if bundle is provided.')
tf.app.flags.DEFINE_string(
'output_dir', '/tmp/pianoroll_rnn_nade/generated',
'The directory where MIDI files will be saved to.')
tf.app.flags.DEFINE_integer(
'num_outputs', 10,
'The number of tracks to generate. One MIDI file will be created for '
'each.')
tf.app.flags.DEFINE_integer(
'num_steps', 128,
'The total number of steps the generated track should be, priming '
'track length + generated steps. Each step is a 16th of a bar.')
tf.app.flags.DEFINE_string(
'primer_pitches', '',
'A string representation of a Python list of pitches that will be used as '
'a starting chord with a quarter note duration. For example: '
'"[60, 64, 67]"')
tf.app.flags.DEFINE_string(
'primer_pianoroll', '',
'A string representation of a Python list of '
'`magenta.music.PianorollSequence` event values (tuples of active MIDI'
'pitches for a sequence of steps). For example: '
'"[(55,), (54,), (55, 53), (50,), (62, 52), (), (63, 55)]".')
tf.app.flags.DEFINE_string(
'primer_midi', '',
'The path to a MIDI file containing a polyphonic track that will be used '
'as a priming track.')
tf.app.flags.DEFINE_float(
'qpm', None,
'The quarters per minute to play generated output at. If a primer MIDI is '
'given, the qpm from that will override this flag. If qpm is None, qpm '
'will default to 60.')
tf.app.flags.DEFINE_integer(
'beam_size', 1,
'The beam size to use for beam search when generating tracks.')
tf.app.flags.DEFINE_integer(
'branch_factor', 1,
'The branch factor to use for beam search when generating tracks.')
tf.app.flags.DEFINE_string(
'log', 'INFO',
'The threshold for what messages will be logged DEBUG, INFO, WARN, ERROR, '
'or FATAL.')
tf.app.flags.DEFINE_string(
'hparams', '',
'Comma-separated list of `name=value` pairs. For each pair, the value of '
'the hyperparameter named `name` is set to `value`. This mapping is merged '
'with the default hyperparameters.')
def get_checkpoint():
"""Get the training dir or checkpoint path to be used by the model."""
if FLAGS.run_dir and FLAGS.bundle_file and not FLAGS.save_generator_bundle:
raise sequence_generator.SequenceGeneratorError(
'Cannot specify both bundle_file and run_dir')
if FLAGS.run_dir:
train_dir = os.path.join(os.path.expanduser(FLAGS.run_dir), 'train')
return train_dir
else:
return None
def get_bundle():
"""Returns a generator_pb2.GeneratorBundle object based read from bundle_file.
Returns:
Either a generator_pb2.GeneratorBundle or None if the bundle_file flag is
not set or the save_generator_bundle flag is set.
"""
if FLAGS.save_generator_bundle:
return None
if FLAGS.bundle_file is None:
return None
bundle_file = os.path.expanduser(FLAGS.bundle_file)
return sequence_generator_bundle.read_bundle_file(bundle_file)
def run_with_flags(generator):
"""Generates pianoroll tracks and saves them as MIDI files.
Uses the options specified by the flags defined in this module.
Args:
generator: The PianorollRnnNadeSequenceGenerator to use for generation.
"""
if not FLAGS.output_dir:
tf.logging.fatal('--output_dir required')
return
output_dir = os.path.expanduser(FLAGS.output_dir)
primer_midi = None
if FLAGS.primer_midi:
primer_midi = os.path.expanduser(FLAGS.primer_midi)
if not tf.gfile.Exists(output_dir):
tf.gfile.MakeDirs(output_dir)
primer_sequence = None
qpm = FLAGS.qpm if FLAGS.qpm else 60
if FLAGS.primer_pitches:
primer_sequence = music_pb2.NoteSequence()
primer_sequence.tempos.add().qpm = qpm
primer_sequence.ticks_per_quarter = constants.STANDARD_PPQ
for pitch in ast.literal_eval(FLAGS.primer_pitches):
note = primer_sequence.notes.add()
note.start_time = 0
note.end_time = 60.0 / qpm
note.pitch = pitch
note.velocity = 100
primer_sequence.total_time = primer_sequence.notes[-1].end_time
elif FLAGS.primer_pianoroll:
primer_pianoroll = magenta.music.PianorollSequence(
events_list=ast.literal_eval(FLAGS.primer_pianoroll),
steps_per_quarter=4, shift_range=True)
primer_sequence = primer_pianoroll.to_sequence(qpm=qpm)
elif primer_midi:
primer_sequence = magenta.music.midi_file_to_sequence_proto(primer_midi)
if primer_sequence.tempos and primer_sequence.tempos[0].qpm:
qpm = primer_sequence.tempos[0].qpm
else:
tf.logging.warning(
'No priming sequence specified. Defaulting to empty sequence.')
primer_sequence = music_pb2.NoteSequence()
primer_sequence.tempos.add().qpm = qpm
primer_sequence.ticks_per_quarter = constants.STANDARD_PPQ
# Derive the total number of seconds to generate.
seconds_per_step = 60.0 / qpm / generator.steps_per_quarter
generate_end_time = FLAGS.num_steps * seconds_per_step
# Specify start/stop time for generation based on starting generation at the
# end of the priming sequence and continuing until the sequence is num_steps
# long.
generator_options = generator_pb2.GeneratorOptions()
# Set the start time to begin when the last note ends.
generate_section = generator_options.generate_sections.add(
start_time=primer_sequence.total_time,
end_time=generate_end_time)
if generate_section.start_time >= generate_section.end_time:
tf.logging.fatal(
'Priming sequence is longer than the total number of steps '
'requested: Priming sequence length: %s, Total length '
'requested: %s',
generate_section.start_time, generate_end_time)
return
generator_options.args['beam_size'].int_value = FLAGS.beam_size
generator_options.args['branch_factor'].int_value = FLAGS.branch_factor
tf.logging.info('primer_sequence: %s', primer_sequence)
tf.logging.info('generator_options: %s', generator_options)
# Make the generate request num_outputs times and save the output as midi
# files.
date_and_time = time.strftime('%Y-%m-%d_%H%M%S')
digits = len(str(FLAGS.num_outputs))
for i in range(FLAGS.num_outputs):
generated_sequence = generator.generate(primer_sequence, generator_options)
midi_filename = '%s_%s.mid' % (date_and_time, str(i + 1).zfill(digits))
midi_path = os.path.join(output_dir, midi_filename)
magenta.music.sequence_proto_to_midi_file(generated_sequence, midi_path)
tf.logging.info('Wrote %d MIDI files to %s',
FLAGS.num_outputs, output_dir)
def main(unused_argv):
"""Saves bundle or runs generator based on flags."""
tf.logging.set_verbosity(FLAGS.log)
bundle = get_bundle()
config_id = bundle.generator_details.id if bundle else FLAGS.config
config = pianoroll_rnn_nade_model.default_configs[config_id]
config.hparams.parse(FLAGS.hparams)
# Having too large of a batch size will slow generation down unnecessarily.
config.hparams.batch_size = min(
config.hparams.batch_size, FLAGS.beam_size * FLAGS.branch_factor)
generator = PianorollRnnNadeSequenceGenerator(
model=pianoroll_rnn_nade_model.PianorollRnnNadeModel(config),
details=config.details,
steps_per_quarter=config.steps_per_quarter,
checkpoint=get_checkpoint(),
bundle=bundle)
if FLAGS.save_generator_bundle:
bundle_filename = os.path.expanduser(FLAGS.bundle_file)
if FLAGS.bundle_description is None:
tf.logging.warning('No bundle description provided.')
tf.logging.info('Saving generator bundle to %s', bundle_filename)
generator.create_bundle_file(bundle_filename, FLAGS.bundle_description)
else:
run_with_flags(generator)
def console_entry_point():
tf.app.run(main)
if __name__ == '__main__':
console_entry_point()
| 38.411765 | 117 | 0.741194 |
2dcea46078c483f955e3116bd8c6faf672e0ce9b | 3,328 | py | Python | ordertype.py | msfzq3/zqbt | 277bc95d6b42b0cd17ae890a165f95fbafc95477 | [
"Apache-2.0"
] | null | null | null | ordertype.py | msfzq3/zqbt | 277bc95d6b42b0cd17ae890a165f95fbafc95477 | [
"Apache-2.0"
] | null | null | null | ordertype.py | msfzq3/zqbt | 277bc95d6b42b0cd17ae890a165f95fbafc95477 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
from event import OrderEvent
# 设计一些委托单类型函数,方便策略中的下单操作
# 仅在Strategy中使用,不参与到具体的事件循环
# 优化:使用order类,直接加载bars、symbol、events,减少交易函数输入
class Order(object):
def __init__(self,events, bars, portfolio):
self.events = events
self.bars = bars
self.portfolio = portfolio
# 按委托数量下单,不填价格则默认为市价单
def order_share(self, symbol, qty, price='MARKET'):
if price == 'MARKET': # 市价单则委托价为收盘价
price = self.bars.get_latest_bar(symbol)['close']
if qty >= 100 or qty < 0:
my_order = OrderEvent(symbol, qty, price)
self.events.put(my_order)
# 按委托总额下单,不填价格则默认为市价单
def order_value(self, symbol, value, price='MARKET'):
if price == 'MARKET': # 市价单则委托价为收盘价
price = self.bars.get_latest_bar(symbol)['close']
qty = value/price # 计算委托数量
if qty >= 100 or qty < 0:
my_order = OrderEvent(symbol, qty, price)
self.events.put(my_order)
# 按委托的目标金额下单,需要提供持仓信息,不填价格则默认为市价单
def order_target_value(self, symbol, target_value, price='MARKET'):
cur_hold = self.portfolio.holdings[symbol] # 获取当前市值
value = target_value-cur_hold # 计算市值差额
if price == 'MARKET': # 市价单则委托价为收盘价
price = self.bars.get_latest_bar(symbol)['close']
qty = value/price # 计算委托数量
if qty >= 100 or qty < 0:
my_order = OrderEvent(symbol, qty, price)
self.events.put(my_order)
# 按委托的目标数量下单,需要提供持仓信息,不填价格则默认为市价单
def order_target_share(self, symbol, target_share, price='MARKET'):
cur_pos = self.portfolio.positions[symbol] # 获取当前持仓
if price == 'MARKET': # 市价单则委托价为收盘价
price = self.bars.get_latest_bar(symbol)['close']
qty = target_share-cur_pos # 计算委托数量
if qty >= 100 or qty < 0:
my_order = OrderEvent(symbol, qty, price)
self.events.put(my_order)
'''
# 按委托数量下单,不填价格则默认为市价单
def order_share(events, bars, symbol, qty, price='MARKET'):
if price == 'MARKET': # 市价单则委托价为收盘价
price = bars.get_latest_bar(symbol)['close']
my_order = OrderEvent(symbol, qty, price)
events.put(my_order)
# 按委托总额下单,不填价格则默认为市价单
def order_value(events, bars, symbol, value, price='MARKET'):
if price == 'MARKET': # 市价单则委托价为收盘价
price = bars.get_latest_bar(symbol)['close']
qty = value/price # 计算委托数量
my_order = OrderEvent(symbol, qty, price)
events.put(my_order)
# 按委托的目标金额下单,需要提供持仓信息,不填价格则默认为市价单
def order_target_value(events, bars, portfolio, symbol, target_value, price='MARKET'):
cur_hold = portfolio.holdings[symbol] # 获取当前市值
value = target_value-cur_hold # 计算市值差额
if price == 'MARKET': # 市价单则委托价为收盘价
price = bars.get_latest_bar(symbol)['close']
qty = value/price # 计算委托数量
my_order = OrderEvent(symbol, qty, price)
events.put(my_order)
# 按委托的目标数量下单,需要提供持仓信息,不填价格则默认为市价单
def order_target_share(events, bars, portfolio, symbol, target_share, price='MARKET'):
cur_pos = portfolio.positions[symbol] # 获取当前持仓
if price == 'MARKET': # 市价单则委托价为收盘价
price = bars.get_latest_bar(symbol)['close']
qty = target_share-cur_pos # 计算委托数量
my_order = OrderEvent(symbol, qty, price)
events.put(my_order)
''' | 38.252874 | 87 | 0.635216 |
b504185d3e365e2622b5cd097194c0852fb29e2f | 6,984 | py | Python | tests/x509/test_x509_revokedcertbuilder.py | wdscxsj/cryptography | 94590a9aecc9e5ef6fc8eda52bae43643a4c44bd | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause"
] | 4,492 | 2015-01-02T23:02:52.000Z | 2022-03-31T12:59:57.000Z | tests/x509/test_x509_revokedcertbuilder.py | wdscxsj/cryptography | 94590a9aecc9e5ef6fc8eda52bae43643a4c44bd | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause"
] | 3,692 | 2015-01-01T03:16:56.000Z | 2022-03-31T19:20:25.000Z | tests/x509/test_x509_revokedcertbuilder.py | wdscxsj/cryptography | 94590a9aecc9e5ef6fc8eda52bae43643a4c44bd | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause"
] | 1,155 | 2015-01-09T00:48:05.000Z | 2022-03-31T23:46:43.000Z | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
import datetime
import pytest
import pytz
from cryptography import x509
class TestRevokedCertificateBuilder(object):
def test_serial_number_must_be_integer(self):
with pytest.raises(TypeError):
x509.RevokedCertificateBuilder().serial_number(
"notanx509name" # type: ignore[arg-type]
)
def test_serial_number_must_be_non_negative(self):
with pytest.raises(ValueError):
x509.RevokedCertificateBuilder().serial_number(-1)
def test_serial_number_must_be_positive(self):
with pytest.raises(ValueError):
x509.RevokedCertificateBuilder().serial_number(0)
def test_minimal_serial_number(self, backend):
revocation_date = datetime.datetime(2002, 1, 1, 12, 1)
builder = (
x509.RevokedCertificateBuilder()
.serial_number(1)
.revocation_date(revocation_date)
)
revoked_certificate = builder.build(backend)
assert revoked_certificate.serial_number == 1
def test_biggest_serial_number(self, backend):
revocation_date = datetime.datetime(2002, 1, 1, 12, 1)
builder = (
x509.RevokedCertificateBuilder()
.serial_number((1 << 159) - 1)
.revocation_date(revocation_date)
)
revoked_certificate = builder.build(backend)
assert revoked_certificate.serial_number == (1 << 159) - 1
def test_serial_number_must_be_less_than_160_bits_long(self):
with pytest.raises(ValueError):
x509.RevokedCertificateBuilder().serial_number(1 << 159)
def test_set_serial_number_twice(self):
builder = x509.RevokedCertificateBuilder().serial_number(3)
with pytest.raises(ValueError):
builder.serial_number(4)
def test_aware_revocation_date(self, backend):
time = datetime.datetime(2012, 1, 16, 22, 43)
tz = pytz.timezone("US/Pacific")
time = tz.localize(time)
utc_time = datetime.datetime(2012, 1, 17, 6, 43)
serial_number = 333
builder = (
x509.RevokedCertificateBuilder()
.serial_number(serial_number)
.revocation_date(time)
)
revoked_certificate = builder.build(backend)
assert revoked_certificate.revocation_date == utc_time
def test_revocation_date_invalid(self):
with pytest.raises(TypeError):
x509.RevokedCertificateBuilder().revocation_date(
"notadatetime" # type: ignore[arg-type]
)
def test_revocation_date_before_1950(self):
with pytest.raises(ValueError):
x509.RevokedCertificateBuilder().revocation_date(
datetime.datetime(1940, 8, 10)
)
def test_set_revocation_date_twice(self):
builder = x509.RevokedCertificateBuilder().revocation_date(
datetime.datetime(2002, 1, 1, 12, 1)
)
with pytest.raises(ValueError):
builder.revocation_date(datetime.datetime(2002, 1, 1, 12, 1))
def test_add_extension_checks_for_duplicates(self):
builder = x509.RevokedCertificateBuilder().add_extension(
x509.CRLReason(x509.ReasonFlags.ca_compromise), False
)
with pytest.raises(ValueError):
builder.add_extension(
x509.CRLReason(x509.ReasonFlags.ca_compromise), False
)
def test_add_invalid_extension(self):
with pytest.raises(TypeError):
x509.RevokedCertificateBuilder().add_extension(
"notanextension", False # type: ignore[arg-type]
)
def test_no_serial_number(self, backend):
builder = x509.RevokedCertificateBuilder().revocation_date(
datetime.datetime(2002, 1, 1, 12, 1)
)
with pytest.raises(ValueError):
builder.build(backend)
def test_no_revocation_date(self, backend):
builder = x509.RevokedCertificateBuilder().serial_number(3)
with pytest.raises(ValueError):
builder.build(backend)
def test_create_revoked(self, backend):
serial_number = 333
revocation_date = datetime.datetime(2002, 1, 1, 12, 1)
builder = (
x509.RevokedCertificateBuilder()
.serial_number(serial_number)
.revocation_date(revocation_date)
)
revoked_certificate = builder.build(backend)
assert revoked_certificate.serial_number == serial_number
assert revoked_certificate.revocation_date == revocation_date
assert len(revoked_certificate.extensions) == 0
@pytest.mark.parametrize(
"extension",
[
x509.InvalidityDate(datetime.datetime(2015, 1, 1, 0, 0)),
x509.CRLReason(x509.ReasonFlags.ca_compromise),
x509.CertificateIssuer([x509.DNSName("cryptography.io")]),
],
)
def test_add_extensions(self, backend, extension):
serial_number = 333
revocation_date = datetime.datetime(2002, 1, 1, 12, 1)
builder = (
x509.RevokedCertificateBuilder()
.serial_number(serial_number)
.revocation_date(revocation_date)
.add_extension(extension, False)
)
revoked_certificate = builder.build(backend)
assert revoked_certificate.serial_number == serial_number
assert revoked_certificate.revocation_date == revocation_date
assert len(revoked_certificate.extensions) == 1
ext = revoked_certificate.extensions.get_extension_for_class(
type(extension)
)
assert ext.critical is False
assert ext.value == extension
def test_add_multiple_extensions(self, backend):
serial_number = 333
revocation_date = datetime.datetime(2002, 1, 1, 12, 1)
invalidity_date = x509.InvalidityDate(
datetime.datetime(2015, 1, 1, 0, 0)
)
certificate_issuer = x509.CertificateIssuer(
[x509.DNSName("cryptography.io")]
)
crl_reason = x509.CRLReason(x509.ReasonFlags.aa_compromise)
builder = (
x509.RevokedCertificateBuilder()
.serial_number(serial_number)
.revocation_date(revocation_date)
.add_extension(invalidity_date, True)
.add_extension(crl_reason, True)
.add_extension(certificate_issuer, True)
)
revoked_certificate = builder.build(backend)
assert len(revoked_certificate.extensions) == 3
for ext_data in [invalidity_date, certificate_issuer, crl_reason]:
ext = revoked_certificate.extensions.get_extension_for_class(
type(ext_data)
)
assert ext.critical is True
assert ext.value == ext_data
| 36 | 79 | 0.647766 |
571896e5a7bad3f2e01ca47232b179ede79e7220 | 98,145 | py | Python | neutron/tests/functional/agent/l3/test_dvr_router.py | weiqiLee/neutron | ddc72ebd41a0e7804b33a21583d3add008191229 | [
"Apache-2.0"
] | 1 | 2018-11-19T15:09:53.000Z | 2018-11-19T15:09:53.000Z | neutron/tests/functional/agent/l3/test_dvr_router.py | weiqiLee/neutron | ddc72ebd41a0e7804b33a21583d3add008191229 | [
"Apache-2.0"
] | null | null | null | neutron/tests/functional/agent/l3/test_dvr_router.py | weiqiLee/neutron | ddc72ebd41a0e7804b33a21583d3add008191229 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2014 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import functools
import mock
import netaddr
from neutron_lib.api.definitions import portbindings
from neutron_lib import constants as lib_constants
import six
import testtools
from neutron.agent.l3 import agent as neutron_l3_agent
from neutron.agent.l3 import dvr_edge_ha_router as dvr_ha_router
from neutron.agent.l3 import dvr_edge_router
from neutron.agent.l3 import dvr_fip_ns
from neutron.agent.l3 import dvr_local_router
from neutron.agent.l3 import dvr_snat_ns
from neutron.agent.l3 import namespaces
from neutron.agent.linux import ip_lib
from neutron.agent.linux import iptables_manager
from neutron.common import constants as n_const
from neutron.common import exceptions as n_exc
from neutron.common import utils
from neutron.tests.common import l3_test_common
from neutron.tests.common import machine_fixtures
from neutron.tests.common import net_helpers
from neutron.tests.functional.agent.l3 import framework
DEVICE_OWNER_COMPUTE = lib_constants.DEVICE_OWNER_COMPUTE_PREFIX + 'fake'
class TestDvrRouter(framework.L3AgentTestFramework):
def manage_router(self, agent, router):
def _safe_fipnamespace_delete_on_ext_net(ext_net_id):
try:
agent.fipnamespace_delete_on_ext_net(None, ext_net_id)
except RuntimeError:
pass
if router['gw_port']:
self.addCleanup(
_safe_fipnamespace_delete_on_ext_net,
router['gw_port']['network_id'])
return super(TestDvrRouter, self).manage_router(agent, router)
def test_dvr_update_floatingip_statuses(self):
self.agent.conf.agent_mode = 'dvr'
self._test_update_floatingip_statuses(self.generate_dvr_router_info())
def test_dvr_router_lifecycle_without_ha_without_snat_with_fips(self):
self._dvr_router_lifecycle(enable_ha=False, enable_snat=False)
def test_dvr_router_lifecycle_without_ha_with_snat_with_fips(self):
self._dvr_router_lifecycle(enable_ha=False, enable_snat=True)
def test_dvr_router_lifecycle_ha_with_snat_with_fips(self):
self._dvr_router_lifecycle(enable_ha=True, enable_snat=True)
def test_dvr_lifecycle_no_ha_with_snat_with_fips_with_cent_fips(self):
self._dvr_router_lifecycle(enable_ha=False, enable_snat=True,
snat_bound_fip=True)
def test_dvr_lifecycle_ha_with_snat_with_fips_with_cent_fips(self):
self._dvr_router_lifecycle(enable_ha=True, enable_snat=True,
snat_bound_fip=True)
def _helper_create_dvr_router_fips_for_ext_network(
self, agent_mode, **dvr_router_kwargs):
self.agent.conf.agent_mode = agent_mode
router_info = self.generate_dvr_router_info(**dvr_router_kwargs)
self.mock_plugin_api.get_external_network_id.return_value = (
router_info['_floatingips'][0]['floating_network_id'])
router = self.manage_router(self.agent, router_info)
fip_ns = router.fip_ns.get_name()
return router, fip_ns
def _validate_fips_for_external_network(self, router, fip_ns):
self.assertTrue(self._namespace_exists(router.ns_name))
self.assertTrue(self._namespace_exists(fip_ns))
self._assert_dvr_floating_ips(router)
self._assert_snat_namespace_does_not_exist(router)
def test_dvr_gateway_move_does_not_remove_redirect_rules(self):
"""Test to validate snat redirect rules not cleared with snat move."""
self.agent.conf.agent_mode = 'dvr_snat'
router_info = self.generate_dvr_router_info(enable_snat=True)
router1 = self.manage_router(self.agent, router_info)
router1.router['gw_port_host'] = ""
self.agent._process_updated_router(router1.router)
router_updated = self.agent.router_info[router1.router['id']]
self.assertTrue(self._namespace_exists(router_updated.ns_name))
ns_ipr = ip_lib.IPRule(namespace=router1.ns_name)
ip4_rules_list = ns_ipr.rule.list_rules(lib_constants.IP_VERSION_4)
self.assertEqual(6, len(ip4_rules_list))
# IPRule list should have 6 entries.
# Three entries from 'default', 'main' and 'local' table.
# One rule for the floatingip.
# The remaining 2 is for the two router interfaces(csnat ports).
default_rules_list_count = 0
interface_rules_list_count = 0
for ip_rule in ip4_rules_list:
tbl_index = ip_rule['table']
if tbl_index in ['local', 'default', 'main',
str(dvr_fip_ns.FIP_RT_TBL)]:
default_rules_list_count = default_rules_list_count + 1
else:
interface_rules_list_count = interface_rules_list_count + 1
self.assertEqual(4, default_rules_list_count)
self.assertEqual(2, interface_rules_list_count)
def test_dvr_update_gateway_port_no_fip_fg_port_recovers_itself_with_fpr(
self):
self.agent.conf.agent_mode = 'dvr'
# Create the router with external net
router_info = self.generate_dvr_router_info()
external_gw_port = router_info['gw_port']
ext_net_id = router_info['_floatingips'][0]['floating_network_id']
self.mock_plugin_api.get_external_network_id.return_value = ext_net_id
router = self.manage_router(self.agent, router_info)
fg_port = router.fip_ns.agent_gateway_port
fg_port_name = router.fip_ns.get_ext_device_name(fg_port['id'])
fg_device = ip_lib.IPDevice(fg_port_name,
namespace=router.fip_ns.name)
fip_2_rtr_name = router.fip_ns.get_int_device_name(router.router_id)
fpr_device = ip_lib.IPDevice(fip_2_rtr_name,
namespace=router.fip_ns.name)
# Now validate if the gateway is properly configured.
rtr_2_fip, fip_2_rtr = router.rtr_fip_subnet.get_pair()
tbl_index = router._get_snat_idx(fip_2_rtr)
tbl_filter = ['table', tbl_index]
self.assertIn('gateway', fg_device.route.get_gateway(
filters=tbl_filter))
self._validate_fips_for_external_network(
router, router.fip_ns.get_name())
# Now delete the fg- port that was created
ext_net_bridge = self.agent.conf.external_network_bridge
router.fip_ns.driver.unplug(fg_port_name,
bridge=ext_net_bridge,
namespace=router.fip_ns.name,
prefix=dvr_fip_ns.FIP_EXT_DEV_PREFIX)
# Now check if the fg- port is missing.
self.assertFalse(fg_device.exists())
fpr_device.link.set_down()
# Now change the gateway ip for the router and do an update.
router.ex_gw_port = copy.deepcopy(router.ex_gw_port)
new_fg_port = copy.deepcopy(fg_port)
for subnet in new_fg_port['subnets']:
subnet['gateway_ip'] = '19.4.4.2'
router.router[n_const.FLOATINGIP_AGENT_INTF_KEY] = [new_fg_port]
self.assertRaises(n_exc.FloatingIpSetupException,
self.agent._process_updated_router,
router.router)
self.agent._process_updated_router(router.router)
self.assertTrue(fg_device.exists())
self.assertTrue(fpr_device.exists())
updated_route = fg_device.route.list_routes(
ip_version=lib_constants.IP_VERSION_4,
table=tbl_index)
expected_route = [{'cidr': '0.0.0.0/0',
'dev': fg_port_name,
'table': tbl_index,
u'via': u'19.4.4.2'}]
self.assertEqual(expected_route, updated_route)
self._validate_fips_for_external_network(
router, router.fip_ns.get_name())
self._delete_router(self.agent, router.router_id)
self._assert_fip_namespace_deleted(external_gw_port)
def test_dvr_update_gateway_port_with_no_gw_port_in_namespace(self):
self.agent.conf.agent_mode = 'dvr'
# Create the router with external net
router_info = self.generate_dvr_router_info()
external_gw_port = router_info['gw_port']
ext_net_id = router_info['_floatingips'][0]['floating_network_id']
self.mock_plugin_api.get_external_network_id.return_value = ext_net_id
router = self.manage_router(self.agent, router_info)
fg_port = router.fip_ns.agent_gateway_port
fg_port_name = router.fip_ns.get_ext_device_name(fg_port['id'])
fg_device = ip_lib.IPDevice(fg_port_name,
namespace=router.fip_ns.name)
# Now validate if the gateway is properly configured.
rtr_2_fip, fip_2_rtr = router.rtr_fip_subnet.get_pair()
tbl_index = router._get_snat_idx(fip_2_rtr)
tbl_filter = ['table', tbl_index]
self.assertIn('gateway', fg_device.route.get_gateway(
filters=tbl_filter))
self._validate_fips_for_external_network(
router, router.fip_ns.get_name())
# Now delete the fg- port that was created
ext_net_bridge = self.agent.conf.external_network_bridge
router.fip_ns.driver.unplug(fg_port_name,
bridge=ext_net_bridge,
namespace=router.fip_ns.name,
prefix=dvr_fip_ns.FIP_EXT_DEV_PREFIX)
# Now check if the fg- port is missing.
self.assertFalse(fg_device.exists())
# Now change the gateway ip for the router and do an update.
router.ex_gw_port = copy.deepcopy(router.ex_gw_port)
new_fg_port = copy.deepcopy(fg_port)
for subnet in new_fg_port['subnets']:
subnet['gateway_ip'] = '19.4.4.2'
router.router[n_const.FLOATINGIP_AGENT_INTF_KEY] = [new_fg_port]
self.assertRaises(n_exc.FloatingIpSetupException,
self.manage_router,
self.agent,
router.router)
router = self.manage_router(self.agent, router.router)
self.assertTrue(fg_device.exists())
updated_route = fg_device.route.list_routes(
ip_version=lib_constants.IP_VERSION_4,
table=tbl_index)
expected_route = [{'cidr': '0.0.0.0/0',
'dev': fg_port_name,
'table': tbl_index,
u'via': u'19.4.4.2'}]
self.assertEqual(expected_route, updated_route)
self._validate_fips_for_external_network(
router, router.fip_ns.get_name())
self._delete_router(self.agent, router.router_id)
self._assert_fip_namespace_deleted(external_gw_port)
@mock.patch.object(dvr_fip_ns.FipNamespace, 'subscribe')
def test_dvr_process_fips_with_no_gw_port_in_namespace(
self, fip_subscribe):
self.agent.conf.agent_mode = 'dvr'
# Create the router with external net
router_info = self.generate_dvr_router_info()
external_gw_port = router_info['gw_port']
ext_net_id = router_info['_floatingips'][0]['floating_network_id']
self.mock_plugin_api.get_external_network_id.return_value = ext_net_id
# Create the fip namespace up front
fip_ns = dvr_fip_ns.FipNamespace(ext_net_id,
self.agent.conf,
self.agent.driver,
self.agent.use_ipv6)
fip_ns.create()
# Create the router with the fip, this shouldn't allow the
# update_gateway_port to be called without the fg- port
fip_subscribe.return_value = False
fip_ns.agent_gateway_port = (
router_info[n_const.FLOATINGIP_AGENT_INTF_KEY])
# This will raise the exception and will also clear
# subscription for the ext_net_id
self.assertRaises(n_exc.FloatingIpSetupException,
self.manage_router,
self.agent,
router_info)
fip_subscribe.return_value = True
self.manage_router(self.agent, router_info)
# Now update the router again
router = self.manage_router(self.agent, router_info)
fg_port = router.fip_ns.agent_gateway_port
fg_port_name = router.fip_ns.get_ext_device_name(fg_port['id'])
fg_device = ip_lib.IPDevice(fg_port_name,
namespace=router.fip_ns.name)
rtr_2_fip, fip_2_rtr = router.rtr_fip_subnet.get_pair()
tbl_index = router._get_snat_idx(fip_2_rtr)
tbl_filter = ['table', tbl_index]
# Now validate if the gateway is properly configured.
self.assertIn('gateway', fg_device.route.get_gateway(
filters=tbl_filter))
self._validate_fips_for_external_network(
router, router.fip_ns.get_name())
self._delete_router(self.agent, router.router_id)
self._assert_fip_namespace_deleted(external_gw_port)
def test_dvr_router_fips_stale_gw_port(self):
self.agent.conf.agent_mode = 'dvr'
# Create the router with external net
dvr_router_kwargs = {'ip_address': '19.4.4.3',
'subnet_cidr': '19.4.4.0/24',
'gateway_ip': '19.4.4.1',
'gateway_mac': 'ca:fe:de:ab:cd:ef'}
router_info = self.generate_dvr_router_info(**dvr_router_kwargs)
external_gw_port = router_info['gw_port']
ext_net_id = router_info['_floatingips'][0]['floating_network_id']
self.mock_plugin_api.get_external_network_id.return_value(ext_net_id)
# Create the fip namespace up front
stale_fip_ns = dvr_fip_ns.FipNamespace(ext_net_id,
self.agent.conf,
self.agent.driver,
self.agent.use_ipv6)
stale_fip_ns.create()
# Add a stale fg port to the namespace
fixed_ip = external_gw_port['fixed_ips'][0]
float_subnet = external_gw_port['subnets'][0]
fip_gw_port_ip = str(netaddr.IPAddress(fixed_ip['ip_address']) + 10)
prefixlen = netaddr.IPNetwork(float_subnet['cidr']).prefixlen
stale_agent_gw_port = {
'subnets': [{'cidr': float_subnet['cidr'],
'gateway_ip': float_subnet['gateway_ip'],
'id': fixed_ip['subnet_id']}],
'network_id': external_gw_port['network_id'],
'device_owner': lib_constants.DEVICE_OWNER_AGENT_GW,
'mac_address': 'fa:16:3e:80:8f:89',
portbindings.HOST_ID: self.agent.conf.host,
'fixed_ips': [{'subnet_id': fixed_ip['subnet_id'],
'ip_address': fip_gw_port_ip,
'prefixlen': prefixlen}],
'id': framework._uuid(),
'device_id': framework._uuid()}
stale_fip_ns.create_or_update_gateway_port(stale_agent_gw_port)
stale_dev_exists = self.device_exists_with_ips_and_mac(
stale_agent_gw_port,
stale_fip_ns.get_ext_device_name,
stale_fip_ns.get_name())
self.assertTrue(stale_dev_exists)
# Create the router, this shouldn't allow the duplicate port to stay
router = self.manage_router(self.agent, router_info)
# Assert the device no longer exists
stale_dev_exists = self.device_exists_with_ips_and_mac(
stale_agent_gw_port,
stale_fip_ns.get_ext_device_name,
stale_fip_ns.get_name())
self.assertFalse(stale_dev_exists)
# Validate things are looking good and clean up
self._validate_fips_for_external_network(
router, router.fip_ns.get_name())
ext_gateway_port = router_info['gw_port']
self._delete_router(self.agent, router.router_id)
self._assert_fip_namespace_deleted(ext_gateway_port)
def test_dvr_router_gateway_redirect_cleanup_on_agent_restart(self):
"""Test to validate the router namespace gateway redirect rule cleanup.
This test checks for the non existence of the gateway redirect
rules in the router namespace after the agent restarts while the
gateway is removed for the router.
"""
self.agent.conf.agent_mode = 'dvr_snat'
router_info = self.generate_dvr_router_info()
router1 = self.manage_router(self.agent, router_info)
self._assert_snat_namespace_exists(router1)
self.assertTrue(self._namespace_exists(router1.ns_name))
restarted_agent = neutron_l3_agent.L3NATAgentWithStateReport(
self.agent.host, self.agent.conf)
router1.router['gw_port'] = ""
router1.router['gw_port_host'] = ""
router1.router['external_gateway_info'] = ""
restarted_router = self.manage_router(restarted_agent, router1.router)
self.assertTrue(self._namespace_exists(restarted_router.ns_name))
ns_ipr = ip_lib.IPRule(namespace=router1.ns_name)
ip4_rules_list = ns_ipr.rule.list_rules(lib_constants.IP_VERSION_4)
ip6_rules_list = ns_ipr.rule.list_rules(lib_constants.IP_VERSION_6)
# Just make sure the basic set of rules are there in the router
# namespace
self.assertEqual(3, len(ip4_rules_list))
self.assertEqual(2, len(ip6_rules_list))
def test_dvr_unused_snat_ns_deleted_when_agent_restarts_after_move(self):
"""Test to validate the stale snat namespace delete with snat move.
This test validates the stale snat namespace cleanup when
the agent restarts after the gateway port has been moved
from the agent.
"""
self.agent.conf.agent_mode = 'dvr_snat'
router_info = self.generate_dvr_router_info()
router1 = self.manage_router(self.agent, router_info)
self._assert_snat_namespace_exists(router1)
restarted_agent = neutron_l3_agent.L3NATAgentWithStateReport(
self.agent.host, self.agent.conf)
router1.router['gw_port_host'] = "my-new-host"
restarted_router = self.manage_router(restarted_agent, router1.router)
self._assert_snat_namespace_does_not_exist(restarted_router)
def test_dvr_router_fips_for_multiple_ext_networks(self):
agent_mode = 'dvr'
# Create the first router fip with external net1
dvr_router1_kwargs = {'ip_address': '19.4.4.3',
'subnet_cidr': '19.4.4.0/24',
'gateway_ip': '19.4.4.1',
'gateway_mac': 'ca:fe:de:ab:cd:ef'}
router1, fip1_ns = (
self._helper_create_dvr_router_fips_for_ext_network(
agent_mode, **dvr_router1_kwargs))
# Validate the fip with external net1
self._validate_fips_for_external_network(router1, fip1_ns)
# Create the second router fip with external net2
dvr_router2_kwargs = {'ip_address': '19.4.5.3',
'subnet_cidr': '19.4.5.0/24',
'gateway_ip': '19.4.5.1',
'gateway_mac': 'ca:fe:de:ab:cd:fe'}
router2, fip2_ns = (
self._helper_create_dvr_router_fips_for_ext_network(
agent_mode, **dvr_router2_kwargs))
# Validate the fip with external net2
self._validate_fips_for_external_network(router2, fip2_ns)
def _dvr_router_lifecycle(self, enable_ha=False, enable_snat=False,
custom_mtu=2000,
ip_version=lib_constants.IP_VERSION_4,
dual_stack=False,
snat_bound_fip=False):
'''Test dvr router lifecycle
:param enable_ha: sets the ha value for the router.
:param enable_snat: the value of enable_snat is used
to set the agent_mode.
'''
# The value of agent_mode can be dvr, dvr_snat, or legacy.
# Since by definition this is a dvr (distributed = true)
# only dvr and dvr_snat are applicable
self.agent.conf.agent_mode = 'dvr_snat' if enable_snat else 'dvr'
# We get the router info particular to a dvr router
router_info = self.generate_dvr_router_info(
enable_ha, enable_snat, extra_routes=True,
snat_bound_fip=snat_bound_fip)
for key in ('_interfaces', '_snat_router_interfaces',
'_floatingip_agent_interfaces'):
for port in router_info[key]:
port['mtu'] = custom_mtu
router_info['gw_port']['mtu'] = custom_mtu
if enable_ha:
router_info['_ha_interface']['mtu'] = custom_mtu
# We need to mock the get_agent_gateway_port return value
# because the whole L3PluginApi is mocked and we need the port
# gateway_port information before the l3_agent will create it.
# The port returned needs to have the same information as
# router_info['gw_port']
fip_agent_gw_port = self._get_fip_agent_gw_port_for_router(
router_info['gw_port'])
self.mock_plugin_api.get_agent_gateway_port.return_value = (
fip_agent_gw_port)
# We also need to mock the get_external_network_id method to
# get the correct fip namespace.
self.mock_plugin_api.get_external_network_id.return_value = (
router_info['_floatingips'][0]['floating_network_id'])
# With all that set we can now ask the l3_agent to
# manage the router (create it, create namespaces,
# attach interfaces, etc...)
router = self.manage_router(self.agent, router_info)
if enable_ha:
port = router.get_ex_gw_port()
interface_name = router.get_external_device_name(port['id'])
self._assert_no_ip_addresses_on_interface(router.ha_namespace,
interface_name)
utils.wait_until_true(lambda: router.ha_state == 'master')
# Keepalived notifies of a state transition when it starts,
# not when it ends. Thus, we have to wait until keepalived finishes
# configuring everything. We verify this by waiting until the last
# device has an IP address.
device = router.router[lib_constants.INTERFACE_KEY][-1]
device_exists = functools.partial(
self.device_exists_with_ips_and_mac,
device,
router.get_internal_device_name,
router.ns_name)
utils.wait_until_true(device_exists)
name = router.get_internal_device_name(device['id'])
self.assertEqual(custom_mtu,
ip_lib.IPDevice(name, router.ns_name).link.mtu)
ext_gateway_port = router_info['gw_port']
self.assertTrue(self._namespace_exists(router.ns_name))
utils.wait_until_true(
lambda: self._metadata_proxy_exists(self.agent.conf, router))
self._assert_internal_devices(router)
self._assert_dvr_external_device(router)
self._assert_dvr_gateway(router)
self._assert_dvr_floating_ips(router, snat_bound_fip=snat_bound_fip)
self._assert_snat_chains(router)
self._assert_floating_ip_chains(router, snat_bound_fip=snat_bound_fip)
self._assert_metadata_chains(router)
self._assert_rfp_fpr_mtu(router, custom_mtu)
if enable_snat:
if (ip_version == lib_constants.IP_VERSION_6 or dual_stack):
ip_versions = [lib_constants.IP_VERSION_4,
lib_constants.IP_VERSION_6]
else:
ip_versions = [lib_constants.IP_VERSION_4]
snat_ns_name = dvr_snat_ns.SnatNamespace.get_snat_ns_name(
router.router_id)
self._assert_onlink_subnet_routes(
router, ip_versions, snat_ns_name)
self._assert_extra_routes(router, namespace=snat_ns_name)
# During normal operation, a router-gateway-clear followed by
# a router delete results in two notifications to the agent. This
# code flow simulates the exceptional case where the notification of
# the clearing of the gateway hast been missed, so we are checking
# that the L3 agent is robust enough to handle that case and delete
# the router correctly.
self._delete_router(self.agent, router.router_id)
self._assert_fip_namespace_deleted(ext_gateway_port)
self._assert_router_does_not_exist(router)
self._assert_snat_namespace_does_not_exist(router)
def generate_dvr_router_info(self,
enable_ha=False,
enable_snat=False,
enable_gw=True,
snat_bound_fip=False,
agent=None,
extra_routes=False,
enable_floating_ip=True,
enable_centralized_fip=False,
**kwargs):
if not agent:
agent = self.agent
router = l3_test_common.prepare_router_data(
enable_snat=enable_snat,
enable_floating_ip=enable_floating_ip,
enable_ha=enable_ha,
extra_routes=extra_routes,
num_internal_ports=2,
enable_gw=enable_gw,
snat_bound_fip=snat_bound_fip,
**kwargs)
internal_ports = router.get(lib_constants.INTERFACE_KEY, [])
router['distributed'] = True
router['gw_port_host'] = agent.conf.host
if enable_floating_ip:
for floating_ip in router[lib_constants.FLOATINGIP_KEY]:
floating_ip['host'] = agent.conf.host
if enable_floating_ip and enable_centralized_fip:
# For centralizing the fip, we are emulating the legacy
# router behavior were the fip dict does not contain any
# host information.
router[lib_constants.FLOATINGIP_KEY][0]['host'] = None
# In order to test the mixed dvr_snat and compute scenario, we create
# two floating IPs, one is distributed, another is centralized.
# The distributed floating IP should have the host, which was
# just set to None above, then we set it back. The centralized
# floating IP has host None, and this IP will be used to test
# migration from centralized to distributed.
if snat_bound_fip:
router[lib_constants.FLOATINGIP_KEY][0]['host'] = agent.conf.host
router[lib_constants.FLOATINGIP_KEY][1][
lib_constants.DVR_SNAT_BOUND] = True
router[lib_constants.FLOATINGIP_KEY][1]['host'] = None
if enable_gw:
external_gw_port = router['gw_port']
router['gw_port'][portbindings.HOST_ID] = agent.conf.host
self._add_snat_port_info_to_router(router, internal_ports)
# FIP has a dependency on external gateway. So we need to create
# the snat_port info and fip_agent_gw_port_info irrespective of
# the agent type the dvr supports. The namespace creation is
# dependent on the agent_type.
if enable_floating_ip:
for index, floating_ip in enumerate(router['_floatingips']):
floating_ip['floating_network_id'] = (
external_gw_port['network_id'])
floating_ip['port_id'] = internal_ports[index]['id']
floating_ip['status'] = 'ACTIVE'
self._add_fip_agent_gw_port_info_to_router(router,
external_gw_port)
return router
def _get_fip_agent_gw_port_for_router(
self, external_gw_port):
# Add fip agent gateway port information to the router_info
if external_gw_port:
# Get values from external gateway port
fixed_ip = external_gw_port['fixed_ips'][0]
float_subnet = external_gw_port['subnets'][0]
port_ip = fixed_ip['ip_address']
# Pick an ip address which is not the same as port_ip
fip_gw_port_ip = str(netaddr.IPAddress(port_ip) + 5)
# Add floatingip agent gateway port info to router
prefixlen = netaddr.IPNetwork(float_subnet['cidr']).prefixlen
fip_agent_gw_port_info = {
'subnets': [
{'cidr': float_subnet['cidr'],
'gateway_ip': float_subnet['gateway_ip'],
'id': fixed_ip['subnet_id']}],
'network_id': external_gw_port['network_id'],
'device_owner': lib_constants.DEVICE_OWNER_AGENT_GW,
'mac_address': 'fa:16:3e:80:8d:89',
portbindings.HOST_ID: self.agent.conf.host,
'fixed_ips': [{'subnet_id': fixed_ip['subnet_id'],
'ip_address': fip_gw_port_ip,
'prefixlen': prefixlen}],
'id': framework._uuid(),
'device_id': framework._uuid()
}
return fip_agent_gw_port_info
def _add_fip_agent_gw_port_info_to_router(self, router, external_gw_port):
# Add fip agent gateway port information to the router_info
fip_gw_port_list = router.get(
n_const.FLOATINGIP_AGENT_INTF_KEY, [])
if not fip_gw_port_list and external_gw_port:
# Get values from external gateway port
fixed_ip = external_gw_port['fixed_ips'][0]
float_subnet = external_gw_port['subnets'][0]
port_ip = fixed_ip['ip_address']
# Pick an ip address which is not the same as port_ip
fip_gw_port_ip = str(netaddr.IPAddress(port_ip) + 5)
# Add floatingip agent gateway port info to router
prefixlen = netaddr.IPNetwork(float_subnet['cidr']).prefixlen
router[n_const.FLOATINGIP_AGENT_INTF_KEY] = [
{'subnets': [
{'cidr': float_subnet['cidr'],
'gateway_ip': float_subnet['gateway_ip'],
'id': fixed_ip['subnet_id']}],
'network_id': external_gw_port['network_id'],
'device_owner': lib_constants.DEVICE_OWNER_AGENT_GW,
'mac_address': 'fa:16:3e:80:8d:89',
portbindings.HOST_ID: self.agent.conf.host,
'fixed_ips': [{'subnet_id': fixed_ip['subnet_id'],
'ip_address': fip_gw_port_ip,
'prefixlen': prefixlen}],
'id': framework._uuid(),
'device_id': framework._uuid()}
]
def _add_snat_port_info_to_router(self, router, internal_ports):
# Add snat port information to the router
snat_port_list = router.get(n_const.SNAT_ROUTER_INTF_KEY, [])
if not snat_port_list and internal_ports:
router[n_const.SNAT_ROUTER_INTF_KEY] = []
for port in internal_ports:
# Get values from internal port
fixed_ip = port['fixed_ips'][0]
snat_subnet = port['subnets'][0]
port_ip = fixed_ip['ip_address']
# Pick an ip address which is not the same as port_ip
snat_ip = str(netaddr.IPAddress(port_ip) + 5)
# Add the info to router as the first snat port
# in the list of snat ports
prefixlen = netaddr.IPNetwork(snat_subnet['cidr']).prefixlen
snat_router_port = {
'subnets': [
{'cidr': snat_subnet['cidr'],
'gateway_ip': snat_subnet['gateway_ip'],
'id': fixed_ip['subnet_id']}],
'network_id': port['network_id'],
'device_owner': lib_constants.DEVICE_OWNER_ROUTER_SNAT,
'mac_address': 'fa:16:3e:80:8d:89',
'fixed_ips': [{'subnet_id': fixed_ip['subnet_id'],
'ip_address': snat_ip,
'prefixlen': prefixlen}],
'id': framework._uuid(),
'device_id': framework._uuid()}
# Get the address scope if there is any
if 'address_scopes' in port:
snat_router_port['address_scopes'] = port['address_scopes']
router[n_const.SNAT_ROUTER_INTF_KEY].append(
snat_router_port)
def _assert_dvr_external_device(self, router):
external_port = router.get_ex_gw_port()
snat_ns_name = dvr_snat_ns.SnatNamespace.get_snat_ns_name(
router.router_id)
# if the agent is in dvr_snat mode, then we have to check
# that the correct ports and ip addresses exist in the
# snat_ns_name namespace
if self.agent.conf.agent_mode == 'dvr_snat':
device_exists = functools.partial(
self.device_exists_with_ips_and_mac,
external_port,
router.get_external_device_name,
snat_ns_name)
utils.wait_until_true(device_exists)
# if the agent is in dvr mode then the snat_ns_name namespace
# should not be present at all:
elif self.agent.conf.agent_mode == 'dvr':
self.assertFalse(
self._namespace_exists(snat_ns_name),
"namespace %s was found but agent is in dvr mode not dvr_snat"
% (str(snat_ns_name))
)
# if the agent is anything else the test is misconfigured
# we force a test failure with message
else:
self.fail("Agent not configured for dvr or dvr_snat")
def _assert_dvr_gateway(self, router):
gateway_expected_in_snat_namespace = (
self.agent.conf.agent_mode == 'dvr_snat'
)
if gateway_expected_in_snat_namespace:
self._assert_dvr_snat_gateway(router)
self._assert_removal_of_already_deleted_gateway_device(router)
snat_namespace_should_not_exist = (
self.agent.conf.agent_mode == 'dvr'
)
if snat_namespace_should_not_exist:
self._assert_snat_namespace_does_not_exist(router)
def _assert_dvr_snat_gateway(self, router):
namespace = dvr_snat_ns.SnatNamespace.get_snat_ns_name(
router.router_id)
external_port = router.get_ex_gw_port()
external_device_name = router.get_external_device_name(
external_port['id'])
external_device = ip_lib.IPDevice(external_device_name,
namespace=namespace)
existing_gateway = (
external_device.route.get_gateway().get('gateway'))
expected_gateway = external_port['subnets'][0]['gateway_ip']
self.assertEqual(expected_gateway, existing_gateway)
def _assert_removal_of_already_deleted_gateway_device(self, router):
namespace = dvr_snat_ns.SnatNamespace.get_snat_ns_name(
router.router_id)
device = ip_lib.IPDevice("fakedevice",
namespace=namespace)
# Assert that no exception is thrown for this case
self.assertIsNone(router._delete_gateway_device_if_exists(
device, "192.168.0.1", 0))
def _assert_snat_namespace_does_not_exist(self, router):
namespace = dvr_snat_ns.SnatNamespace.get_snat_ns_name(
router.router_id)
self.assertFalse(self._namespace_exists(namespace))
def _assert_dvr_floating_ips(self, router, snat_bound_fip=False):
# in the fip namespace:
# Check that the fg-<port-id> (floatingip_agent_gateway)
# is created with the ip address of the external gateway port
floating_ips = router.router[lib_constants.FLOATINGIP_KEY]
self.assertTrue(floating_ips)
# We need to fetch the floatingip agent gateway port info
# from the router_info
floating_agent_gw_port = (
router.router[n_const.FLOATINGIP_AGENT_INTF_KEY])
self.assertTrue(floating_agent_gw_port)
external_gw_port = floating_agent_gw_port[0]
fip_ns = self.agent.get_fip_ns(floating_ips[0]['floating_network_id'])
fip_ns_name = fip_ns.get_name()
fg_port_created_successfully = ip_lib.device_exists_with_ips_and_mac(
fip_ns.get_ext_device_name(external_gw_port['id']),
[self._port_first_ip_cidr(external_gw_port)],
external_gw_port['mac_address'],
namespace=fip_ns_name)
self.assertTrue(fg_port_created_successfully)
# Check fpr-router device has been created
device_name = fip_ns.get_int_device_name(router.router_id)
fpr_router_device_created_successfully = ip_lib.device_exists(
device_name, namespace=fip_ns_name)
self.assertTrue(fpr_router_device_created_successfully)
# In the router namespace
# Check rfp-<router-id> is created correctly
for fip in floating_ips:
device_name = fip_ns.get_rtr_ext_device_name(router.router_id)
self.assertTrue(ip_lib.device_exists(
device_name, namespace=router.ns_name))
# In the router namespace, check the iptables rules are set
# correctly
for fip in floating_ips:
expected_rules = router.floating_forward_rules(fip)
if fip.get(lib_constants.DVR_SNAT_BOUND):
iptables_mgr = router.snat_iptables_manager
else:
iptables_mgr = router.iptables_manager
self._assert_iptables_rules_exist(
iptables_mgr, 'nat', expected_rules)
def test_dvr_router_with_ha_for_fip_disassociation(self):
"""Test to validate the fip rules are deleted in dvr_snat_ha router.
This test validates the fip rules are getting deleted in
a router namespace when the router has ha and snat enabled after
the floatingip is disassociated.
"""
self.agent.conf.agent_mode = 'dvr_snat'
router_info = self.generate_dvr_router_info(
enable_snat=True, enable_ha=True, enable_gw=True)
fip_agent_gw_port = router_info[n_const.FLOATINGIP_AGENT_INTF_KEY]
self.mock_plugin_api.get_agent_gateway_port.return_value = (
fip_agent_gw_port[0])
router1 = self.manage_router(self.agent, router_info)
fip_ns_name = router1.fip_ns.get_name()
self.assertTrue(self._namespace_exists(router1.ns_name))
self.assertTrue(self._namespace_exists(fip_ns_name))
self._assert_snat_namespace_exists(router1)
ns_ipr = ip_lib.IPRule(namespace=router1.ns_name)
ip4_rules_list_with_fip = ns_ipr.rule.list_rules(
lib_constants.IP_VERSION_4)
# The rules_list should have 6 entries:
# 3 default rules (local, main and default)
# 1 Fip forward rule
# 2 interface rules to redirect to snat
self.assertEqual(6, len(ip4_rules_list_with_fip))
rfp_device_name = router1.fip_ns.get_rtr_ext_device_name(
router1.router_id)
rfp_device = ip_lib.IPDevice(rfp_device_name,
namespace=router1.ns_name)
rtr_2_fip, fip_2_rtr = router1.rtr_fip_subnet.get_pair()
self._assert_default_gateway(
fip_2_rtr, rfp_device, rfp_device_name)
router1.router[lib_constants.FLOATINGIP_KEY] = []
self.agent._process_updated_router(router1.router)
router_updated = self.agent.router_info[router1.router['id']]
self.assertTrue(self._namespace_exists(router_updated.ns_name))
self._assert_snat_namespace_exists(router1)
ip4_rules_list = ns_ipr.rule.list_rules(lib_constants.IP_VERSION_4)
self.assertEqual(5, len(ip4_rules_list))
interface_rules_list_count = 0
fip_rule_count = 0
for ip_rule in ip4_rules_list:
tbl_index = ip_rule['table']
if tbl_index not in ['local', 'default', 'main']:
interface_rules_list_count += 1
if tbl_index == dvr_fip_ns.FIP_RT_TBL:
fip_rule_count += 1
self.assertEqual(2, interface_rules_list_count)
self.assertEqual(0, fip_rule_count)
def _assert_default_gateway(self, fip_2_rtr, rfp_device, device_name):
expected_gateway = [{'dev': device_name,
'cidr': '0.0.0.0/0',
'via': str(fip_2_rtr.ip),
'table': dvr_fip_ns.FIP_RT_TBL}]
self.assertEqual(expected_gateway, rfp_device.route.list_routes(
ip_version=lib_constants.IP_VERSION_4,
table=dvr_fip_ns.FIP_RT_TBL,
via=str(fip_2_rtr.ip)))
def test_dvr_router_rem_fips_on_restarted_agent(self):
self.agent.conf.agent_mode = 'dvr_snat'
router_info = self.generate_dvr_router_info()
router1 = self.manage_router(self.agent, router_info)
fip_ns = router1.fip_ns.get_name()
self.assertTrue(self._namespace_exists(fip_ns))
restarted_agent = neutron_l3_agent.L3NATAgentWithStateReport(
self.agent.host, self.agent.conf)
router1.router[lib_constants.FLOATINGIP_KEY] = []
self.manage_router(restarted_agent, router1.router)
self._assert_dvr_snat_gateway(router1)
self.assertTrue(self._namespace_exists(fip_ns))
def test_dvr_router_update_on_restarted_agent_sets_rtr_fip_connect(self):
self.agent.conf.agent_mode = 'dvr_snat'
router_info = self.generate_dvr_router_info()
router1 = self.manage_router(self.agent, router_info)
self.assertTrue(router1.rtr_fip_connect)
fip_ns = router1.fip_ns.get_name()
self.assertTrue(self._namespace_exists(fip_ns))
restarted_agent = neutron_l3_agent.L3NATAgentWithStateReport(
self.agent.host, self.agent.conf)
router_updated = self.manage_router(restarted_agent, router1.router)
self.assertTrue(router_updated.rtr_fip_connect)
def test_dvr_router_add_fips_on_restarted_agent(self):
self.agent.conf.agent_mode = 'dvr'
router_info = self.generate_dvr_router_info()
router = self.manage_router(self.agent, router_info)
floating_ips = router.router[lib_constants.FLOATINGIP_KEY]
router_ns = router.ns_name
fip_rule_prio_1 = self._get_fixed_ip_rule_priority(
router_ns, floating_ips[0]['fixed_ip_address'])
restarted_agent = neutron_l3_agent.L3NATAgent(
self.agent.host, self.agent.conf)
floating_ips[0]['floating_ip_address'] = '21.4.4.2'
floating_ips[0]['fixed_ip_address'] = '10.0.0.2'
self.manage_router(restarted_agent, router_info)
fip_rule_prio_2 = self._get_fixed_ip_rule_priority(
router_ns, floating_ips[0]['fixed_ip_address'])
self.assertNotEqual(fip_rule_prio_1, fip_rule_prio_2)
def test_dvr_router_floating_ip_moved(self):
self.agent.conf.agent_mode = 'dvr'
router_info = self.generate_dvr_router_info()
router = self.manage_router(self.agent, router_info)
floating_ips = router.router[lib_constants.FLOATINGIP_KEY]
router_ns = router.ns_name
fixed_ip = floating_ips[0]['fixed_ip_address']
self.assertTrue(self._fixed_ip_rule_exists(router_ns, fixed_ip))
# Floating IP reassigned to another fixed IP
new_fixed_ip = '10.0.0.2'
self.assertNotEqual(new_fixed_ip, fixed_ip)
floating_ips[0]['fixed_ip_address'] = new_fixed_ip
self.agent._process_updated_router(router.router)
self.assertFalse(self._fixed_ip_rule_exists(router_ns, fixed_ip))
self.assertTrue(self._fixed_ip_rule_exists(router_ns, new_fixed_ip))
def _assert_iptables_rules_exist(
self, router_iptables_manager, table_name, expected_rules):
rules = router_iptables_manager.get_rules_for_table(table_name)
for rule in expected_rules:
self.assertIn(
str(iptables_manager.IptablesRule(rule[0], rule[1])), rules)
return True
def _assert_iptables_rules_not_exist(
self, router_iptables_manager, table_name, expected_rules):
rules = router_iptables_manager.get_rules_for_table(table_name)
for rule in expected_rules:
self.assertNotIn(
str(iptables_manager.IptablesRule(rule[0], rule[1])), rules)
return True
def test_prevent_snat_rule_exist_on_restarted_agent(self):
self.agent.conf.agent_mode = 'dvr_snat'
router_info = self.generate_dvr_router_info()
router = self.manage_router(self.agent, router_info)
ext_port = router.get_ex_gw_port()
rfp_devicename = router.get_external_device_interface_name(ext_port)
prevent_snat_rule = router._prevent_snat_for_internal_traffic_rule(
rfp_devicename)
self._assert_iptables_rules_exist(
router.iptables_manager, 'nat', [prevent_snat_rule])
restarted_agent = neutron_l3_agent.L3NATAgentWithStateReport(
self.agent.host, self.agent.conf)
restarted_router = self.manage_router(restarted_agent, router_info)
self._assert_iptables_rules_exist(
restarted_router.iptables_manager, 'nat', [prevent_snat_rule])
def _get_fixed_ip_rule_priority(self, namespace, fip):
iprule = ip_lib.IPRule(namespace)
lines = iprule.rule._as_root([4], ['show']).splitlines()
for line in lines:
if fip in line:
info = iprule.rule._parse_line(4, line)
return info['priority']
def _fixed_ip_rule_exists(self, namespace, ip):
iprule = ip_lib.IPRule(namespace)
lines = iprule.rule._as_root([4], ['show']).splitlines()
for line in lines:
if ip in line:
info = iprule.rule._parse_line(4, line)
if info['from'] == ip:
return True
return False
def test_dvr_router_add_internal_network_set_arp_cache(self):
# Check that, when the router is set up and there are
# existing ports on the uplinked subnet, the ARP
# cache is properly populated.
self.agent.conf.agent_mode = 'dvr_snat'
router_info = self.generate_dvr_router_info(enable_snat=True)
expected_neighbor = '35.4.1.10'
port_data = {
'fixed_ips': [{'ip_address': expected_neighbor}],
'mac_address': 'fa:3e:aa:bb:cc:dd',
'device_owner': DEVICE_OWNER_COMPUTE
}
self.agent.plugin_rpc.get_ports_by_subnet.return_value = [port_data]
router1 = self.manage_router(self.agent, router_info)
internal_device = router1.get_internal_device_name(
router_info['_interfaces'][0]['id'])
neighbor = ip_lib.dump_neigh_entries(4, internal_device,
router1.ns_name,
dst=expected_neighbor)
self.assertNotEqual([], neighbor)
self.assertEqual(expected_neighbor, neighbor[0]['dst'])
def _assert_rfp_fpr_mtu(self, router, expected_mtu=1500):
dev_mtu = self.get_device_mtu(
router.router_id, router.fip_ns.get_rtr_ext_device_name,
router.ns_name)
self.assertEqual(expected_mtu, dev_mtu)
dev_mtu = self.get_device_mtu(
router.router_id, router.fip_ns.get_int_device_name,
router.fip_ns.get_name())
self.assertEqual(expected_mtu, dev_mtu)
def test_dvr_router_fip_agent_mismatch(self):
"""Test to validate the floatingip agent mismatch.
This test validates the condition where floatingip agent
gateway port host mismatches with the agent and so the
binding will not be there.
"""
self.agent.conf.agent_mode = 'dvr'
router_info = self.generate_dvr_router_info()
floating_ip = router_info['_floatingips'][0]
floating_ip['host'] = 'my_new_host'
# In this case the floatingip binding is different and so it
# should not create the floatingip namespace on the given agent.
# This is also like there is no current binding.
router1 = self.manage_router(self.agent, router_info)
fip_ns = router1.fip_ns.get_name()
self.assertTrue(self._namespace_exists(router1.ns_name))
# FIP Namespace creation does not depend on the floatingip's
# anymore and will be created on each agent when there is
# a valid gateway.
self.assertTrue(self._namespace_exists(fip_ns))
self._assert_snat_namespace_does_not_exist(router1)
def test_dvr_router_fip_create_for_migrating_port(self):
"""Test to validate the floatingip create on port migrate.
This test validates the condition where floatingip host
mismatches with the agent, but the 'dest_host' variable
matches with the agent host, due to port pre-migrate
phase.
"""
self.agent.conf.agent_mode = 'dvr'
router_info = self.generate_dvr_router_info()
floating_ip = router_info['_floatingips'][0]
floating_ip['host'] = 'my_new_host'
floating_ip['dest_host'] = self.agent.host
# Now we have the floatingip 'host' pointing to host that
# does not match to the 'agent.host' and the floatingip
# 'dest_host' matches with the agent.host in the case
# of live migration due to the port_profile update from
# nova.
router1 = self.manage_router(self.agent, router_info)
fip_ns = router1.fip_ns.get_name()
self.assertTrue(self._namespace_exists(router1.ns_name))
self.assertTrue(self._namespace_exists(fip_ns))
def test_dvr_router_fip_late_binding(self):
"""Test to validate the floatingip migration or latebinding.
This test validates the condition where floatingip private
port changes while migration or when the private port host
binding is done later after floatingip association.
"""
self.agent.conf.agent_mode = 'dvr'
router_info = self.generate_dvr_router_info()
fip_agent_gw_port = router_info[n_const.FLOATINGIP_AGENT_INTF_KEY]
# Now let us not pass the FLOATINGIP_AGENT_INTF_KEY, to emulate
# that the server did not create the port, since there was no valid
# host binding.
router_info[n_const.FLOATINGIP_AGENT_INTF_KEY] = []
self.mock_plugin_api.get_agent_gateway_port.return_value = (
fip_agent_gw_port[0])
router1 = self.manage_router(self.agent, router_info)
fip_ns = router1.fip_ns.get_name()
self.assertTrue(self._namespace_exists(router1.ns_name))
self.assertTrue(self._namespace_exists(fip_ns))
self._assert_snat_namespace_does_not_exist(router1)
def test_dvr_router_fip_namespace_create_without_floatingip(self):
"""Test to validate the floatingip namespace creation without fip.
This test validates the condition where floatingip namespace gets
created on the agent when the gateway is added and without floatingip
configured for the router.
"""
self.agent.conf.agent_mode = 'dvr'
router_info = self.generate_dvr_router_info(enable_floating_ip=False)
fip_agent_gw_port = self._get_fip_agent_gw_port_for_router(
router_info['gw_port'])
self.mock_plugin_api.get_agent_gateway_port.return_value = (
fip_agent_gw_port)
router1 = self.manage_router(self.agent, router_info)
fip_ns = router1.fip_ns.get_name()
self.assertTrue(self._namespace_exists(router1.ns_name))
self.assertTrue(self._namespace_exists(fip_ns))
self.assertTrue(router1.rtr_fip_connect)
self._assert_snat_namespace_does_not_exist(router1)
def _assert_snat_namespace_exists(self, router):
namespace = dvr_snat_ns.SnatNamespace.get_snat_ns_name(
router.router_id)
self.assertTrue(self._namespace_exists(namespace))
def _get_dvr_snat_namespace_device_status(
self, router, internal_dev_name=None):
"""Function returns the internal and external device status."""
snat_ns = dvr_snat_ns.SnatNamespace.get_snat_ns_name(
router.router_id)
external_port = router.get_ex_gw_port()
external_device_name = router.get_external_device_name(
external_port['id'])
qg_device_created_successfully = ip_lib.device_exists(
external_device_name, namespace=snat_ns)
sg_device_created_successfully = ip_lib.device_exists(
internal_dev_name, namespace=snat_ns)
return qg_device_created_successfully, sg_device_created_successfully
def test_snat_bound_floating_ip(self):
"""Test to validate the snat bound floatingip lifecycle."""
self.agent.conf.agent_mode = lib_constants.L3_AGENT_MODE_DVR_SNAT
router_info = self.generate_dvr_router_info(snat_bound_fip=True)
router1 = self.manage_router(self.agent, router_info)
snat_bound_floatingips = router_info[lib_constants.FLOATINGIP_KEY]
self._assert_snat_namespace_exists(router1)
# In the snat namespace, check the iptables rules are set correctly
for fip in snat_bound_floatingips:
expected_rules = router1.floating_forward_rules(fip)
if fip.get(lib_constants.DVR_SNAT_BOUND):
self._assert_iptables_rules_exist(
router1.snat_iptables_manager, 'nat', expected_rules)
def test_floating_ip_migrate_when_unbound_port_is_bound_to_a_host(self):
"""Test to check floating ips migrate from unbound to bound host."""
self.agent.conf.agent_mode = lib_constants.L3_AGENT_MODE_DVR_SNAT
router_info = self.generate_dvr_router_info(
enable_floating_ip=True, enable_centralized_fip=True,
enable_snat=True, snat_bound_fip=True)
router1 = self.manage_router(self.agent, router_info)
floatingips = router_info[lib_constants.FLOATINGIP_KEY]
distributed_fip = floatingips[0]
centralized_floatingip = floatingips[1]
# For private ports hosted in dvr_no_fip agent, the floatingip
# dict will contain the fip['host'] key, but the value will always
# be None to emulate the legacy router.
self.assertIsNone(centralized_floatingip['host'])
self.assertTrue(self._namespace_exists(router1.ns_name))
fip_ns = router1.fip_ns.get_name()
self.assertTrue(self._namespace_exists(fip_ns))
self._assert_snat_namespace_exists(router1)
# If fips are centralized then, the DNAT rules are only
# configured in the SNAT Namespace and not in the router-ns.
expected_rules = router1.floating_forward_rules(distributed_fip)
self.assertTrue(self._assert_iptables_rules_exist(
router1.iptables_manager, 'nat', expected_rules))
expected_rules = router1._centralized_floating_forward_rules(
centralized_floatingip['floating_ip_address'],
centralized_floatingip['fixed_ip_address'])
self.assertTrue(self._assert_iptables_rules_exist(
router1.snat_iptables_manager, 'nat', expected_rules))
qrouter_ns = router1.ns_name
fixed_ip_dist = distributed_fip['fixed_ip_address']
snat_ns = router1.snat_namespace.name
fixed_ip_cent = centralized_floatingip['fixed_ip_address']
self.assertFalse(self._fixed_ip_rule_exists(qrouter_ns, fixed_ip_cent))
self.assertTrue(self._fixed_ip_rule_exists(qrouter_ns, fixed_ip_dist))
self.assertFalse(self._fixed_ip_rule_exists(snat_ns, fixed_ip_dist))
self.assertFalse(self._fixed_ip_rule_exists(snat_ns, fixed_ip_cent))
# Now let us edit the centralized floatingIP info with 'host'
# and remove the 'dvr_snat_bound'
router1.router[lib_constants.FLOATINGIP_KEY][1]['host'] = (
self.agent.conf.host)
del router1.router[lib_constants.FLOATINGIP_KEY][1]['dvr_snat_bound']
self.agent._process_updated_router(router1.router)
router_updated = self.agent.router_info[router_info['id']]
qrouter_ns = router_updated.ns_name
fixed_ip_dist = distributed_fip['fixed_ip_address']
self._assert_snat_namespace_exists(router_updated)
snat_ns = router_updated.snat_namespace.name
fixed_ip_cent = centralized_floatingip['fixed_ip_address']
router_updated.get_centralized_fip_cidr_set = mock.Mock(
return_value=set(["19.4.4.3/32"]))
self.assertTrue(self._assert_iptables_rules_not_exist(
router_updated.snat_iptables_manager, 'nat', expected_rules))
port = router_updated.get_ex_gw_port()
interface_name = router_updated.get_external_device_name(port['id'])
self._assert_ip_address_not_on_interface(
snat_ns, interface_name,
centralized_floatingip['floating_ip_address'])
self.assertTrue(self._fixed_ip_rule_exists(qrouter_ns, fixed_ip_dist))
self.assertFalse(self._fixed_ip_rule_exists(snat_ns, fixed_ip_dist))
self.assertTrue(self._fixed_ip_rule_exists(qrouter_ns, fixed_ip_cent))
self.assertFalse(self._fixed_ip_rule_exists(snat_ns, fixed_ip_cent))
self.assertTrue(self._namespace_exists(fip_ns))
def test_floating_ip_not_deployed_on_dvr_no_external_agent(self):
"""Test to check floating ips not configured for dvr_no_external."""
self.agent.conf.agent_mode = (
lib_constants.L3_AGENT_MODE_DVR_NO_EXTERNAL)
router_info = self.generate_dvr_router_info(
enable_floating_ip=True, enable_centralized_fip=True)
router1 = self.manage_router(self.agent, router_info)
centralized_floatingips = router_info[lib_constants.FLOATINGIP_KEY]
# For private ports hosted in dvr_no_fip agent, the floatingip
# dict will contain the fip['host'] key, but the value will always
# be None to emulate the legacy router.
self.assertIsNone(centralized_floatingips[0]['host'])
self.assertTrue(self._namespace_exists(router1.ns_name))
fip_ns = router1.fip_ns.get_name()
self.assertFalse(self._namespace_exists(fip_ns))
# If fips are centralized then, the DNAT rules are only
# configured in the SNAT Namespace and not in the router-ns.
for fip in centralized_floatingips:
expected_rules = router1.floating_forward_rules(fip)
self.assertEqual(0, len(expected_rules))
def test_floating_ip_create_does_not_raise_keyerror_on_missing_host(self):
"""Test to check floating ips configure does not raise Keyerror."""
self.agent.conf.agent_mode = 'dvr'
router_info = self.generate_dvr_router_info(
enable_floating_ip=True)
del router_info[lib_constants.FLOATINGIP_KEY][0]['host']
centralized_floatingips = router_info[lib_constants.FLOATINGIP_KEY][0]
self.assertIsNone(centralized_floatingips.get('host'))
# No Keyerror should be raised when calling manage_router
self.manage_router(self.agent, router_info)
def test_dvr_router_snat_namespace_with_interface_remove(self):
"""Test to validate the snat namespace with interface remove.
This test validates the snat namespace for all the external
and internal devices. It also validates if the internal
device corresponding to the router interface is removed
when the router interface is deleted.
"""
self.agent.conf.agent_mode = 'dvr_snat'
router_info = self.generate_dvr_router_info()
snat_internal_port = router_info[n_const.SNAT_ROUTER_INTF_KEY]
router1 = self.manage_router(self.agent, router_info)
csnat_internal_port = (
router1.router[n_const.SNAT_ROUTER_INTF_KEY])
# Now save the internal device name to verify later
internal_device_name = router1._get_snat_int_device_name(
csnat_internal_port[0]['id'])
self._assert_snat_namespace_exists(router1)
qg_device, sg_device = self._get_dvr_snat_namespace_device_status(
router1, internal_dev_name=internal_device_name)
self.assertTrue(qg_device)
self.assertTrue(sg_device)
self.assertEqual(router1.snat_ports, snat_internal_port)
# Now let us not pass INTERFACE_KEY, to emulate
# the interface has been removed.
router1.router[lib_constants.INTERFACE_KEY] = []
# Now let us not pass the SNAT_ROUTER_INTF_KEY, to emulate
# that the server did not send it, since the interface has been
# removed.
router1.router[n_const.SNAT_ROUTER_INTF_KEY] = []
self.agent._process_updated_router(router1.router)
router_updated = self.agent.router_info[router_info['id']]
self._assert_snat_namespace_exists(router_updated)
qg_device, sg_device = self._get_dvr_snat_namespace_device_status(
router_updated, internal_dev_name=internal_device_name)
self.assertFalse(sg_device)
self.assertTrue(qg_device)
def _mocked_dvr_ha_router(self, agent, enable_ha=True, enable_gw=True,
enable_centralized_fip=False,
snat_bound_fip=False):
r_info = self.generate_dvr_router_info(
enable_ha=enable_ha,
enable_snat=True,
agent=agent,
enable_gw=enable_gw,
enable_centralized_fip=enable_centralized_fip,
snat_bound_fip=snat_bound_fip)
r_snat_ns_name = namespaces.build_ns_name(dvr_snat_ns.SNAT_NS_PREFIX,
r_info['id'])
mocked_r_snat_ns_name = r_snat_ns_name + '@' + agent.host
r_ns_name = namespaces.build_ns_name(namespaces.NS_PREFIX,
r_info['id'])
mocked_r_ns_name = r_ns_name + '@' + agent.host
return r_info, mocked_r_ns_name, mocked_r_snat_ns_name
def _setup_dvr_ha_agents(self):
self.agent.conf.agent_mode = 'dvr_snat'
conf = self._configure_agent('agent2')
self.failover_agent = neutron_l3_agent.L3NATAgentWithStateReport(
'agent2', conf)
self.failover_agent.conf.agent_mode = 'dvr_snat'
def _setup_dvr_ha_bridges(self):
br_int_1 = self._get_agent_ovs_integration_bridge(self.agent)
br_int_2 = self._get_agent_ovs_integration_bridge(self.failover_agent)
veth1, veth2 = self.useFixture(net_helpers.VethFixture()).ports
br_int_1.add_port(veth1.name)
br_int_2.add_port(veth2.name)
def _create_dvr_ha_router(self, agent, enable_gw=True,
enable_centralized_fip=False,
snat_bound_fip=False, ha_interface=True):
get_ns_name = mock.patch.object(namespaces.RouterNamespace,
'_get_ns_name').start()
get_snat_ns_name = mock.patch.object(dvr_snat_ns.SnatNamespace,
'get_snat_ns_name').start()
(r_info,
mocked_r_ns_name,
mocked_r_snat_ns_name) = self._mocked_dvr_ha_router(
agent, ha_interface, enable_gw, enable_centralized_fip,
snat_bound_fip)
if not ha_interface:
r_info['ha'] = True
get_ns_name.return_value = mocked_r_ns_name
get_snat_ns_name.return_value = mocked_r_snat_ns_name
router = self.manage_router(agent, r_info)
return router
def _assert_ip_addresses_in_dvr_ha_snat_namespace_with_fip(self, router):
namespace = router.ha_namespace
ex_gw_port = router.get_ex_gw_port()
snat_ports = router.get_snat_interfaces()
if not snat_ports:
return
if router.is_router_master():
centralized_floatingips = (
router.router[lib_constants.FLOATINGIP_KEY])
for fip in centralized_floatingips:
expected_rules = router.floating_forward_rules(fip)
self.assertFalse(self._assert_iptables_rules_exist(
router.snat_iptables_manager, 'nat', expected_rules))
snat_port = snat_ports[0]
ex_gw_port_name = router.get_external_device_name(
ex_gw_port['id'])
snat_port_name = router._get_snat_int_device_name(
snat_port['id'])
ex_gw_port_cidrs = utils.fixed_ip_cidrs(ex_gw_port["fixed_ips"])
snat_port_cidrs = utils.fixed_ip_cidrs(snat_port["fixed_ips"])
self._assert_ip_addresses_on_interface(namespace,
ex_gw_port_name,
ex_gw_port_cidrs)
self._assert_ip_addresses_on_interface(namespace,
snat_port_name,
snat_port_cidrs)
def _assert_no_ip_addresses_in_dvr_ha_snat_namespace_with_fip(self,
router):
namespace = router.ha_namespace
ex_gw_port = router.get_ex_gw_port()
snat_ports = router.get_snat_interfaces()
if not snat_ports:
return
snat_port = snat_ports[0]
ex_gw_port_name = router.get_external_device_name(
ex_gw_port['id'])
snat_port_name = router._get_snat_int_device_name(
snat_port['id'])
self._assert_no_ip_addresses_on_interface(namespace,
snat_port_name)
self._assert_no_ip_addresses_on_interface(namespace,
ex_gw_port_name)
def _assert_ip_addresses_in_dvr_ha_snat_namespace(self, router):
namespace = router.ha_namespace
ex_gw_port = router.get_ex_gw_port()
snat_ports = router.get_snat_interfaces()
if not snat_ports:
return
snat_port = snat_ports[0]
ex_gw_port_name = router.get_external_device_name(
ex_gw_port['id'])
snat_port_name = router._get_snat_int_device_name(
snat_port['id'])
ip = ex_gw_port["fixed_ips"][0]['ip_address']
prefix_len = ex_gw_port["fixed_ips"][0]['prefixlen']
ex_gw_port_cidr = ip + "/" + str(prefix_len)
ip = snat_port["fixed_ips"][0]['ip_address']
prefix_len = snat_port["fixed_ips"][0]['prefixlen']
snat_port_cidr = ip + "/" + str(prefix_len)
self._assert_ip_address_on_interface(namespace,
ex_gw_port_name,
ex_gw_port_cidr)
self._assert_ip_address_on_interface(namespace,
snat_port_name,
snat_port_cidr)
def _assert_no_ip_addresses_in_dvr_ha_snat_namespace(self, router):
namespace = router.ha_namespace
ex_gw_port = router.get_ex_gw_port()
snat_ports = router.get_snat_interfaces()
if not snat_ports:
return
snat_port = snat_ports[0]
ex_gw_port_name = router.get_external_device_name(
ex_gw_port['id'])
snat_port_name = router._get_snat_int_device_name(
snat_port['id'])
self._assert_no_ip_addresses_on_interface(namespace,
snat_port_name)
self._assert_no_ip_addresses_on_interface(namespace,
ex_gw_port_name)
@mock.patch.object(dvr_local_router.DvrLocalRouter, 'connect_rtr_2_fip')
@mock.patch.object(
dvr_ha_router.DvrEdgeHaRouter, 'get_centralized_fip_cidr_set')
def test_dvr_ha_router_with_centralized_fip_calls_keepalived_cidr(
self, connect_rtr_2_fip_mock, fip_cidr_centralized_mock):
self._setup_dvr_ha_agents()
self._setup_dvr_ha_bridges()
router1 = self._create_dvr_ha_router(
self.agent, enable_gw=True,
enable_centralized_fip=True,
snat_bound_fip=True)
self.assertTrue(fip_cidr_centralized_mock.called)
restarted_agent = neutron_l3_agent.L3NATAgentWithStateReport(
self.agent.host, self.agent.conf)
self.manage_router(restarted_agent, router1.router)
self.assertTrue(fip_cidr_centralized_mock.called)
@mock.patch.object(dvr_local_router.DvrLocalRouter, 'connect_rtr_2_fip')
@mock.patch.object(
dvr_edge_router.DvrEdgeRouter, 'get_centralized_fip_cidr_set')
def test_dvr_router_with_centralized_fip_calls_keepalived_cidr(
self, connect_rtr_2_fip_mock, fip_cidr_centralized_mock):
router_info = self.generate_dvr_router_info(
enable_gw=True, enable_centralized_fip=True, snat_bound_fip=True)
router1 = self.manage_router(self.agent, router_info)
self.assertTrue(fip_cidr_centralized_mock.called)
restarted_agent = neutron_l3_agent.L3NATAgentWithStateReport(
self.agent.host, self.agent.conf)
self.manage_router(restarted_agent, router1.router)
self.assertTrue(fip_cidr_centralized_mock.called)
def _test_dvr_ha_router_failover_with_gw_and_fip(self, enable_gw,
enable_centralized_fip,
snat_bound_fip):
self._setup_dvr_ha_agents()
self._setup_dvr_ha_bridges()
router1 = self._create_dvr_ha_router(
self.agent, enable_gw=enable_gw,
enable_centralized_fip=enable_centralized_fip,
snat_bound_fip=snat_bound_fip)
router2 = self._create_dvr_ha_router(
self.failover_agent, enable_gw=enable_gw,
enable_centralized_fip=enable_centralized_fip,
snat_bound_fip=snat_bound_fip)
utils.wait_until_true(lambda: router1.ha_state == 'master')
utils.wait_until_true(lambda: router2.ha_state == 'backup')
self._assert_ip_addresses_in_dvr_ha_snat_namespace_with_fip(router1)
self._assert_no_ip_addresses_in_dvr_ha_snat_namespace_with_fip(router2)
self.fail_ha_router(router1)
utils.wait_until_true(lambda: router2.ha_state == 'master')
utils.wait_until_true(lambda: router1.ha_state == 'backup')
self._assert_ip_addresses_in_dvr_ha_snat_namespace_with_fip(router2)
self._assert_no_ip_addresses_in_dvr_ha_snat_namespace_with_fip(router1)
def _test_dvr_ha_router_failover(self, enable_gw):
self._setup_dvr_ha_agents()
self._setup_dvr_ha_bridges()
router1 = self._create_dvr_ha_router(self.agent, enable_gw=enable_gw)
router2 = self._create_dvr_ha_router(self.failover_agent, enable_gw)
utils.wait_until_true(lambda: router1.ha_state == 'master')
utils.wait_until_true(lambda: router2.ha_state == 'backup')
self._assert_ip_addresses_in_dvr_ha_snat_namespace(router1)
self._assert_no_ip_addresses_in_dvr_ha_snat_namespace(router2)
self.fail_ha_router(router1)
utils.wait_until_true(lambda: router2.ha_state == 'master')
utils.wait_until_true(lambda: router1.ha_state == 'backup')
self._assert_ip_addresses_in_dvr_ha_snat_namespace(router2)
self._assert_no_ip_addresses_in_dvr_ha_snat_namespace(router1)
def test_dvr_ha_router_failover_with_gw(self):
self._test_dvr_ha_router_failover(enable_gw=True)
def test_dvr_ha_router_failover_with_gw_and_floatingip(self):
self._test_dvr_ha_router_failover_with_gw_and_fip(
enable_gw=True, enable_centralized_fip=True, snat_bound_fip=True)
def test_dvr_ha_router_failover_without_gw(self):
self._test_dvr_ha_router_failover(enable_gw=False)
def test_dvr_non_ha_router_update(self):
self._setup_dvr_ha_agents()
self._setup_dvr_ha_bridges()
router1 = self._create_dvr_ha_router(self.agent)
router2 = self._create_dvr_ha_router(self.failover_agent,
ha_interface=False)
r1_chsfr = mock.patch.object(self.agent,
'check_ha_state_for_router').start()
r2_chsfr = mock.patch.object(self.failover_agent,
'check_ha_state_for_router').start()
utils.wait_until_true(lambda: router1.ha_state == 'master')
self.agent._process_updated_router(router1.router)
self.assertTrue(r1_chsfr.called)
self.failover_agent._process_updated_router(router2.router)
self.assertFalse(r2_chsfr.called)
def _setup_dvr_router_static_routes(
self, router_namespace=True,
check_fpr_int_rule_delete=False, enable_ha=False):
"""Test to validate the extra routes on dvr routers."""
self.agent.conf.agent_mode = 'dvr_snat'
router_info = self.generate_dvr_router_info(
enable_snat=True, enable_ha=enable_ha)
router1 = self.manage_router(self.agent, router_info)
self.assertTrue(self._namespace_exists(router1.ns_name))
self._assert_snat_namespace_exists(router1)
fip_ns_name = router1.fip_ns.get_name()
self.assertTrue(self._namespace_exists(fip_ns_name))
snat_ns_name = dvr_snat_ns.SnatNamespace.get_snat_ns_name(
router1.router_id)
if router_namespace:
router1.router['routes'] = [{'destination': '8.8.4.0/24',
'nexthop': '35.4.0.20'}]
else:
router1.router['routes'] = [{'destination': '8.8.4.0/24',
'nexthop': '19.4.4.10'}]
self.agent._process_updated_router(router1.router)
router_updated = self.agent.router_info[router_info['id']]
if router_namespace:
self._assert_extra_routes(router_updated)
if not enable_ha:
self._assert_extra_routes(router_updated,
namespace=snat_ns_name)
else:
rtr_2_fip, fip_2_rtr = router_updated.rtr_fip_subnet.get_pair()
# Now get the table index based on the fpr-interface ip.
router_fip_table_idx = router_updated._get_snat_idx(fip_2_rtr)
self._assert_extra_routes_for_fipns(
router_updated, router_fip_table_idx)
self._assert_extra_routes(router_updated, namespace=snat_ns_name)
if check_fpr_int_rule_delete:
router_updated.router[lib_constants.FLOATINGIP_KEY] = []
router_updated.router['gw_port'] = ""
router_updated.router['gw_port_host'] = ""
router_updated.router['external_gateway_info'] = ""
self.agent._process_updated_router(router_updated.router)
new_router_info = self.agent.router_info[router_updated.router_id]
self.assertTrue(self._namespace_exists(fip_ns_name))
self._assert_extra_routes_for_fipns(
new_router_info, router_fip_table_idx,
check_fpr_int_rule_delete=check_fpr_int_rule_delete)
def _assert_extra_routes_for_fipns(self, router, router_fip_table_idx,
check_fpr_int_rule_delete=False):
fip_ns_name = router.fip_ns.get_name()
self.assertTrue(self._namespace_exists(fip_ns_name))
fg_port = router.fip_ns.agent_gateway_port
fg_port_name = router.fip_ns.get_ext_device_name(fg_port['id'])
fip_ns_int_name = router.fip_ns.get_int_device_name(router.router_id)
fg_device = ip_lib.IPDevice(fg_port_name,
namespace=fip_ns_name)
tbl_filter = ['table', router_fip_table_idx]
if not check_fpr_int_rule_delete:
self.assertIn('gateway', fg_device.route.get_gateway(
filters=tbl_filter))
else:
self.assertIsNone(fg_device.route.get_gateway(filters=tbl_filter))
ip_rule = ip_lib.IPRule(namespace=fip_ns_name)
ext_net_fw_rules_list = ip_rule.rule.list_rules(
lib_constants.IP_VERSION_4)
if not check_fpr_int_rule_delete:
# When floatingip are associated, make sure that the
# corresponding rules and routes in route table are created
# for the router.
expected_rule = {u'from': '0.0.0.0/0',
u'iif': fip_ns_int_name,
'priority': str(router_fip_table_idx),
'table': str(router_fip_table_idx),
'type': 'unicast'}
for rule in ext_net_fw_rules_list:
rule_tbl = rule['table']
if rule_tbl in ['default', 'local', 'main']:
continue
if rule_tbl == str(router_fip_table_idx):
self.assertEqual(expected_rule, rule)
# Now check the routes in the table.
destination = router.router['routes'][0]['destination']
next_hop = router.router['routes'][0]['nexthop']
actual_routes = fg_device.route.list_routes(
ip_version=lib_constants.IP_VERSION_4,
table=router_fip_table_idx,
via=str(next_hop))
expected_extra_route = [{'cidr': six.u(destination),
'dev': fg_port_name,
'table': router_fip_table_idx,
'via': next_hop}]
self.assertEqual(expected_extra_route, actual_routes)
else:
# When floatingip are deleted or disassociated, make sure that the
# corresponding rules and routes are cleared from the table
# corresponding to the router.
self.assertEqual(3, len(ext_net_fw_rules_list))
rule_exist = False
for rule in ext_net_fw_rules_list:
rule_tbl = rule['table']
if rule_tbl not in ['default', 'local', 'main']:
rule_exist = True
self.assertFalse(rule_exist)
tbl_routes = fg_device.route.list_routes(
ip_version=lib_constants.IP_VERSION_4,
table=router_fip_table_idx)
self.assertEqual([], tbl_routes)
def test_dvr_router_static_routes_in_fip_and_snat_namespace(self):
self._setup_dvr_router_static_routes(router_namespace=False)
def test_dvr_router_static_routes_in_snat_namespace_and_router_namespace(
self):
self._setup_dvr_router_static_routes()
def test_dvr_ha_rtr_static_routes_in_rtr_namespace(self):
self._setup_dvr_router_static_routes(enable_ha=True)
def test_dvr_router_rule_and_route_table_cleared_when_fip_removed(
self):
self._setup_dvr_router_static_routes(
router_namespace=False, check_fpr_int_rule_delete=True)
def _assert_fip_namespace_interface_static_routes(
self, address_scopes, fpr_device,
router_info, rtr_2_fip, fpr_device_name):
fixed_ips_1 = router_info[lib_constants.INTERFACE_KEY][0]['fixed_ips']
fixed_ips_2 = router_info[lib_constants.INTERFACE_KEY][1]['fixed_ips']
actual_routes = fpr_device.route.list_routes(
ip_version=lib_constants.IP_VERSION_4, table='main',
via=str(rtr_2_fip.ip))
if not address_scopes:
self.assertEqual([], actual_routes)
if address_scopes:
cidr1 = (
str(fixed_ips_1[0]['ip_address']) +
'/' + str(fixed_ips_1[0]['prefixlen']))
cidr2 = (
str(fixed_ips_2[0]['ip_address']) +
'/' + str(fixed_ips_2[0]['prefixlen']))
net_addr_1 = netaddr.IPNetwork(cidr1).network
net_addr_2 = netaddr.IPNetwork(cidr2).network
route_cidr_1 = (
str(net_addr_1) + '/' +
str(fixed_ips_1[0]['prefixlen']))
route_cidr_2 = (
str(net_addr_2) + '/' +
str(fixed_ips_2[0]['prefixlen']))
expected_routes = [{'dev': fpr_device_name,
'cidr': six.u(route_cidr_1),
'via': str(rtr_2_fip.ip),
'table': 'main'},
{'dev': fpr_device_name,
'cidr': six.u(route_cidr_2),
'via': str(rtr_2_fip.ip),
'table': 'main'}]
# Comparing the static routes for both internal interfaces on the
# main table.
self.assertEqual(expected_routes, actual_routes)
else:
self.assertEqual([], actual_routes)
def _assert_interface_rules_on_gateway_remove(
self, router, agent, address_scopes, agent_gw_port,
rfp_device, fpr_device, no_external=False):
router.router[n_const.SNAT_ROUTER_INTF_KEY] = []
router.router['gw_port'] = ""
router.router['gw_port_host'] = ""
self.agent._process_updated_router(router.router)
router_updated = self.agent.router_info[router.router['id']]
self.assertTrue(self._namespace_exists(router_updated.ns_name))
if not no_external:
self.assertFalse(rfp_device.exists())
self.assertFalse(fpr_device.exists())
self._assert_fip_namespace_deleted(
agent_gw_port, assert_ovs_interface=False)
if not address_scopes or no_external:
ns_ipr = ip_lib.IPRule(namespace=router_updated.ns_name)
ip4_rules_list = ns_ipr.rule.list_rules(lib_constants.IP_VERSION_4)
ip6_rules_list = ns_ipr.rule.list_rules(lib_constants.IP_VERSION_6)
self.assertEqual(3, len(ip4_rules_list))
self.assertEqual(2, len(ip6_rules_list))
def _setup_dvr_router_for_fast_path_exit(self, address_scopes=True):
"""Test to validate the fip and router namespace routes.
This test validates the fip and router namespace routes
that are based on the address scopes.
If the address scopes of internal network and external network
matches, the traffic will be forwarded to the fip namespace and
the reverse traffic to the private network is forwarded to the
router namespace.
"""
self.agent.conf.agent_mode = 'dvr'
router_info = self.generate_dvr_router_info(
enable_snat=True, enable_gw=True, enable_floating_ip=True)
router_info[lib_constants.FLOATINGIP_KEY] = []
if address_scopes:
address_scope1 = {
str(lib_constants.IP_VERSION_4): 'scope1'}
address_scope2 = {
str(lib_constants.IP_VERSION_4): 'scope1'}
else:
address_scope1 = {
str(lib_constants.IP_VERSION_4): 'scope2'}
address_scope2 = {
str(lib_constants.IP_VERSION_4): 'scope2'}
router_info['gw_port']['address_scopes'] = {
str(lib_constants.IP_VERSION_4): 'scope1'}
router_info[lib_constants.INTERFACE_KEY][0]['address_scopes'] = (
address_scope1)
router_info[lib_constants.INTERFACE_KEY][1]['address_scopes'] = (
address_scope2)
# should have the same address_scopes as gw_port
fip_agent_gw_ports = router_info[n_const.FLOATINGIP_AGENT_INTF_KEY]
fip_agent_gw_ports[0]['address_scopes'] = (
router_info['gw_port']['address_scopes'])
self.mock_plugin_api.get_agent_gateway_port.return_value = (
fip_agent_gw_ports[0])
router1 = self.manage_router(self.agent, router_info)
fip_ns_name = router1.fip_ns.get_name()
self.assertTrue(self._namespace_exists(router1.ns_name))
self.assertTrue(self._namespace_exists(fip_ns_name))
# Check the router namespace for default route.
rfp_device_name = router1.fip_ns.get_rtr_ext_device_name(
router1.router_id)
rfp_device = ip_lib.IPDevice(rfp_device_name,
namespace=router1.ns_name)
fpr_device_name = router1.fip_ns.get_int_device_name(router1.router_id)
fpr_device = ip_lib.IPDevice(fpr_device_name,
namespace=fip_ns_name)
rtr_2_fip, fip_2_rtr = router1.rtr_fip_subnet.get_pair()
self._assert_default_gateway(
fip_2_rtr, rfp_device, rfp_device_name)
# Check if any snat redirect rules in the router namespace exist.
ns_ipr = ip_lib.IPRule(namespace=router1.ns_name)
ip4_rules_list = ns_ipr.rule.list_rules(lib_constants.IP_VERSION_4)
ip6_rules_list = ns_ipr.rule.list_rules(lib_constants.IP_VERSION_6)
# Just make sure the basic set of rules are there in the router
# namespace
self.assertEqual(5, len(ip4_rules_list))
self.assertEqual(2, len(ip6_rules_list))
# Now check the fip namespace static routes for reaching the private
# network.
self._assert_fip_namespace_interface_static_routes(
address_scopes, fpr_device,
router_info, rtr_2_fip, fpr_device_name)
# Now remove the gateway and validate if the respective interface
# routes in router namespace is deleted respectively.
self. _assert_interface_rules_on_gateway_remove(
router1, self.agent, address_scopes, fip_agent_gw_ports[0],
rfp_device, fpr_device)
def test_dvr_fip_and_router_namespace_rules_with_address_scopes_match(
self):
self._setup_dvr_router_for_fast_path_exit(address_scopes=True)
def test_dvr_fip_and_router_namespace_rules_with_address_scopes_mismatch(
self):
self._setup_dvr_router_for_fast_path_exit(address_scopes=False)
@mock.patch.object(dvr_local_router.DvrLocalRouter,
'_add_interface_routing_rule_to_router_ns')
@mock.patch.object(dvr_local_router.DvrLocalRouter,
'_add_interface_route_to_fip_ns')
def test_dvr_no_external_router_namespace_rules_with_address_scopes_match(
self, mock_add_interface_route_rule,
mock_add_fip_interface_route_rule):
"""Test to validate the router namespace routes.
This test validates the router namespace routes
that are based on the address scopes.
If the address scopes of internal network and external network
matches, the traffic will be forwarded to SNAT namespace
for agents that don't have external connectivity or configured
as DVR_NO_EXTERNAL.
"""
self.agent.conf.agent_mode = (
lib_constants.L3_AGENT_MODE_DVR_NO_EXTERNAL)
router_info = self.generate_dvr_router_info(
enable_snat=True, enable_gw=True, enable_floating_ip=True)
router_info[lib_constants.FLOATINGIP_KEY] = []
address_scope1 = {
str(lib_constants.IP_VERSION_4): 'scope1'}
address_scope2 = {
str(lib_constants.IP_VERSION_4): 'scope1'}
router_info['gw_port']['address_scopes'] = {
str(lib_constants.IP_VERSION_4): 'scope1'}
router_info[lib_constants.INTERFACE_KEY][0]['address_scopes'] = (
address_scope1)
router_info[lib_constants.INTERFACE_KEY][1]['address_scopes'] = (
address_scope2)
router1 = self.manage_router(self.agent, router_info)
self.assertTrue(self._namespace_exists(router1.ns_name))
self.assertFalse(mock_add_interface_route_rule.called)
self.assertFalse(mock_add_fip_interface_route_rule.called)
# Check if any snat redirect rules in the router namespace exist.
ns_ipr = ip_lib.IPRule(namespace=router1.ns_name)
ip4_rules_list = ns_ipr.rule.list_rules(lib_constants.IP_VERSION_4)
ip6_rules_list = ns_ipr.rule.list_rules(lib_constants.IP_VERSION_6)
# Just make sure the basic set of rules are there in the router
# namespace
self.assertEqual(5, len(ip4_rules_list))
self.assertEqual(2, len(ip6_rules_list))
# Now remove the gateway and validate if the respective interface
# routes in router namespace is deleted respectively.
self. _assert_interface_rules_on_gateway_remove(
router1, self.agent, True, mock.ANY,
mock.ANY, mock.ANY, True)
def test_dvr_router_gateway_update_to_none(self):
self.agent.conf.agent_mode = 'dvr_snat'
router_info = self.generate_dvr_router_info(enable_snat=True)
router = self.manage_router(self.agent, router_info)
gw_port = router.get_ex_gw_port()
ex_gw_port_name = router.get_external_device_name(gw_port['id'])
ex_gw_device = ip_lib.IPDevice(ex_gw_port_name,
namespace=router.snat_namespace.name)
fg_port = router.fip_ns.agent_gateway_port
fg_port_name = router.fip_ns.get_ext_device_name(fg_port['id'])
fg_device = ip_lib.IPDevice(fg_port_name,
namespace=router.fip_ns.name)
rtr_2_fip, fip_2_rtr = router.rtr_fip_subnet.get_pair()
tbl_index = router._get_snat_idx(fip_2_rtr)
self.assertIn('gateway', ex_gw_device.route.get_gateway())
tbl_filter = ['table', tbl_index]
self.assertIn('gateway', fg_device.route.get_gateway(
filters=tbl_filter))
# Make this copy to make agent think gw_port changed.
router.ex_gw_port = copy.deepcopy(router.ex_gw_port)
for subnet in gw_port['subnets']:
subnet['gateway_ip'] = None
new_fg_port = copy.deepcopy(fg_port)
for subnet in new_fg_port['subnets']:
subnet['gateway_ip'] = None
router.router[n_const.FLOATINGIP_AGENT_INTF_KEY] = [new_fg_port]
router.process()
self.assertIsNone(ex_gw_device.route.get_gateway())
self.assertIsNone(fg_device.route.get_gateway())
def _assert_fip_namespace_deleted(
self, ext_gateway_port, assert_ovs_interface=True):
ext_net_id = ext_gateway_port['network_id']
fip_ns = self.agent.get_fip_ns(ext_net_id)
fip_ns.unsubscribe = mock.Mock()
self.agent.fipnamespace_delete_on_ext_net(
self.agent.context, ext_net_id)
if assert_ovs_interface:
self._assert_interfaces_deleted_from_ovs()
fip_ns_name = fip_ns.get_name()
self.assertFalse(self._namespace_exists(fip_ns_name))
self.assertTrue(fip_ns.destroyed)
self.assertTrue(fip_ns.unsubscribe.called)
def _setup_address_scope(self, internal_address_scope1,
internal_address_scope2, gw_address_scope=None):
router_info = self.generate_dvr_router_info(enable_snat=True)
address_scope1 = {
str(lib_constants.IP_VERSION_4): internal_address_scope1}
address_scope2 = {
str(lib_constants.IP_VERSION_4): internal_address_scope2}
if gw_address_scope:
router_info['gw_port']['address_scopes'] = {
str(lib_constants.IP_VERSION_4): gw_address_scope}
fip_agent_gw_ports = router_info[
n_const.FLOATINGIP_AGENT_INTF_KEY]
fip_agent_gw_ports[0]['address_scopes'] = router_info['gw_port'][
'address_scopes']
router_info[lib_constants.INTERFACE_KEY][0]['address_scopes'] = (
address_scope1)
router_info[lib_constants.INTERFACE_KEY][1]['address_scopes'] = (
address_scope2)
# Renew the address scope
router_info[n_const.SNAT_ROUTER_INTF_KEY] = []
self._add_snat_port_info_to_router(
router_info, router_info[lib_constants.INTERFACE_KEY])
router = self.manage_router(self.agent, router_info)
router_ip_cidr1 = self._port_first_ip_cidr(router.internal_ports[0])
router_ip1 = router_ip_cidr1.partition('/')[0]
router_ip_cidr2 = self._port_first_ip_cidr(router.internal_ports[1])
router_ip2 = router_ip_cidr2.partition('/')[0]
br_int = framework.get_ovs_bridge(
self.agent.conf.ovs_integration_bridge)
test_machine1 = self.useFixture(
machine_fixtures.FakeMachine(
br_int,
net_helpers.increment_ip_cidr(router_ip_cidr1, 10),
router_ip1))
test_machine2 = self.useFixture(
machine_fixtures.FakeMachine(
br_int,
net_helpers.increment_ip_cidr(router_ip_cidr2, 10),
router_ip2))
return test_machine1, test_machine2, router
def test_connection_from_same_address_scope(self):
self.agent.conf.agent_mode = 'dvr_snat'
test_machine1, test_machine2, _ = self._setup_address_scope(
'scope1', 'scope1')
# Internal networks that are in the same address scope can connected
# each other
net_helpers.assert_ping(test_machine1.namespace, test_machine2.ip)
net_helpers.assert_ping(test_machine2.namespace, test_machine1.ip)
def test_connection_from_diff_address_scope(self):
self.agent.conf.agent_mode = 'dvr_snat'
test_machine1, test_machine2, _ = self._setup_address_scope(
'scope1', 'scope2')
# Internal networks that are not in the same address scope should
# not reach each other
test_machine1.assert_no_ping(test_machine2.ip)
test_machine2.assert_no_ping(test_machine1.ip)
@testtools.skip('bug/1543885')
def test_fip_connection_for_address_scope(self):
self.agent.conf.agent_mode = 'dvr_snat'
(machine_same_scope, machine_diff_scope,
router) = self._setup_address_scope('scope1', 'scope2', 'scope1')
router.router[lib_constants.FLOATINGIP_KEY] = []
fip_same_scope = '19.4.4.10'
self._add_fip(router, fip_same_scope,
fixed_address=machine_same_scope.ip,
host=self.agent.conf.host,
fixed_ip_address_scope='scope1')
fip_diff_scope = '19.4.4.11'
self._add_fip(router, fip_diff_scope,
fixed_address=machine_diff_scope.ip,
host=self.agent.conf.host,
fixed_ip_address_scope='scope2')
router.process()
br_ex = framework.get_ovs_bridge(
self.agent.conf.external_network_bridge)
src_machine = self.useFixture(
machine_fixtures.FakeMachine(br_ex, '19.4.4.12/24'))
# Floating ip should work no matter of address scope
net_helpers.assert_ping(src_machine.namespace, fip_same_scope)
net_helpers.assert_ping(src_machine.namespace, fip_diff_scope)
def test_direct_route_for_address_scope(self):
self.agent.conf.agent_mode = 'dvr_snat'
(machine_same_scope, machine_diff_scope,
router) = self._setup_address_scope('scope1', 'scope2', 'scope1')
gw_port = router.get_ex_gw_port()
gw_ip = self._port_first_ip_cidr(gw_port).partition('/')[0]
br_ex = framework.get_ovs_bridge(
self.agent.conf.external_network_bridge)
src_machine = self.useFixture(
machine_fixtures.FakeMachine(br_ex, '19.4.4.12/24', gw_ip))
# For the internal networks that are in the same address scope as
# external network, they can directly route to external network
net_helpers.assert_ping(src_machine.namespace, machine_same_scope.ip)
# For the internal networks that are not in the same address scope as
# external networks. SNAT will be used. Direct route will not work
# here.
src_machine.assert_no_ping(machine_diff_scope.ip)
def test_dvr_snat_namespace_has_ip_nonlocal_bind_disabled(self):
self.agent.conf.agent_mode = 'dvr_snat'
router_info = self.generate_dvr_router_info(
enable_ha=True, enable_snat=True)
router = self.manage_router(self.agent, router_info)
try:
ip_nonlocal_bind_value = ip_lib.get_ip_nonlocal_bind(
router.snat_namespace.name)
except RuntimeError as rte:
stat_message = 'cannot stat /proc/sys/net/ipv4/ip_nonlocal_bind'
if stat_message in str(rte):
raise self.skipException(
"This kernel doesn't support %s in network namespaces." % (
ip_lib.IP_NONLOCAL_BIND))
raise
self.assertEqual(0, ip_nonlocal_bind_value)
| 48.804078 | 79 | 0.653665 |
850a6a04d88a2b3863a9105b9b4c55b53193fb0b | 3,983 | py | Python | src/train.py | Throvn/WordDetectorNN | 087d028519c7a3cd148adf7907978c8cc1b4f032 | [
"MIT"
] | null | null | null | src/train.py | Throvn/WordDetectorNN | 087d028519c7a3cd148adf7907978c8cc1b4f032 | [
"MIT"
] | null | null | null | src/train.py | Throvn/WordDetectorNN | 087d028519c7a3cd148adf7907978c8cc1b4f032 | [
"MIT"
] | null | null | null | import argparse
import json
import torch
from path import Path
from torch.utils.tensorboard import SummaryWriter
from dataloader import DataLoaderIAM
from dataset import DatasetIAM, DatasetIAMSplit
from eval import evaluate
from loss import compute_loss
from net import WordDetectorNet
from visualization import visualize
global_step = 0
def validate(net, loader, writer):
global global_step
net.eval()
loader.reset()
res = evaluate(net, loader, max_aabbs=1000)
for i, (img, aabbs) in enumerate(zip(res.batch_imgs, res.batch_aabbs)):
vis = visualize(img, aabbs)
writer.add_image(f"img{i}", vis.transpose((2, 0, 1)), global_step)
writer.add_scalar("val_loss", res.loss, global_step)
writer.add_scalar("val_recall", res.metrics.recall(), global_step)
writer.add_scalar("val_precision", res.metrics.precision(), global_step)
writer.add_scalar("val_f1", res.metrics.f1(), global_step)
return res.metrics.f1()
def train(net, optimizer, loader, writer):
global global_step
net.train()
loader.reset()
loader.random()
for i in range(len(loader)):
# get batch
loader_item = loader[i]
# forward pass
optimizer.zero_grad()
y = net(loader_item.batch_imgs)
loss = compute_loss(y, loader_item.batch_gt_maps)
# backward pass, optimize loss
loss.backward()
optimizer.step()
# output
print(f"{i + 1}/{len(loader)}: {loss}")
writer.add_scalar("loss", loss, global_step)
global_step += 1
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--batch_size", type=int, default=10)
parser.add_argument("--caching", action="store_true")
parser.add_argument("--data_dir", type=Path, required=True)
parser.add_argument("--pretrained", action="store_true")
parser.add_argument("--val_freq", type=int, default=1)
parser.add_argument("--early_stopping", type=int, default=50)
args = parser.parse_args()
writer = SummaryWriter("../log")
net = WordDetectorNet()
if args.pretrained:
net.load_state_dict(torch.load("../model/weights"))
net.to("cpu")
# dataset that actually holds the data and 2 views for training and validation set
dataset = DatasetIAM(
args.data_dir, net.input_size, net.output_size, caching=args.caching
)
dataset_train = DatasetIAMSplit(dataset, 2 * args.batch_size, len(dataset))
dataset_val = DatasetIAMSplit(dataset, 0, 2 * args.batch_size)
# loaders
loader_train = DataLoaderIAM(
dataset_train, args.batch_size, net.input_size, net.output_size
)
loader_val = DataLoaderIAM(
dataset_val, args.batch_size, net.input_size, net.output_size
)
# optimizer
optimizer = torch.optim.Adam(net.parameters())
# main training loop
epoch = 0
best_val_f1 = 0
no_improvement_since = 0
while True:
epoch += 1
print(f"Epoch: {epoch}")
train(net, optimizer, loader_train, writer)
if epoch % args.val_freq == 0:
val_f1 = validate(net, loader_val, writer)
if val_f1 > best_val_f1:
print(
f"Improved on validation set (f1: {best_val_f1}->{val_f1}), save model"
)
no_improvement_since = 0
best_val_f1 = val_f1
torch.save(net.state_dict(), "../model/weights")
with open("../model/metadata.json", "w") as f:
json.dump({"epoch": epoch, "val_f1": val_f1}, f)
else:
no_improvement_since += 1
# stop training if there were too many validation steps without improvement
if no_improvement_since >= args.early_stopping:
print(
f"No improvement for {no_improvement_since} validation steps, stop training"
)
break
if __name__ == "__main__":
main()
| 30.875969 | 92 | 0.640221 |
4b758c44cf65f1f26c98fdce1dccafbec022c327 | 4,035 | py | Python | service/common/common_service.py | youjiajia/loonflow | 0542e543ffea49b2eda864397b9875b6bf107dd5 | [
"MIT"
] | null | null | null | service/common/common_service.py | youjiajia/loonflow | 0542e543ffea49b2eda864397b9875b6bf107dd5 | [
"MIT"
] | null | null | null | service/common/common_service.py | youjiajia/loonflow | 0542e543ffea49b2eda864397b9875b6bf107dd5 | [
"MIT"
] | null | null | null | import time
import hashlib
from service.base_service import BaseService
from service.common.log_service import auto_log
class CommonService(BaseService):
def __init__(self):
pass
@classmethod
@auto_log
def signature_check(cls, timestamp: str, signature: str, md5_key: str) -> tuple:
"""
signature check
:param timestamp:
:param signature:
:param md5_key:
:return:
"""
ori_str = timestamp + md5_key
tar_str = hashlib.md5(ori_str.encode(encoding='utf-8')).hexdigest()
if tar_str == signature:
# The validity of the signature: 120s
time_now_int = int(time.time())
# if abs(time_now_int - int(timestamp)) <= 120:
# todo for test
if abs(time_now_int - int(timestamp)) <= 12000000000000000:
return True, ''
else:
msg = 'The signature you provide in request header is expire, please ensure in 120s'
else:
msg = 'The signature you provide in request header is invalid'
return False, msg
@classmethod
@auto_log
def gen_signature(cls, app_name: str) -> tuple:
"""
gen signature info
:param app_name:
:return:
"""
from apps.account.models import AppToken
app_obj = AppToken.objects.filter(app_name=app_name, is_deleted=0).first()
md5_key = app_obj.token
timestamp = str(int(time.time()))
ori_str = timestamp + md5_key
tar_str = hashlib.md5(ori_str.encode(encoding='utf-8')).hexdigest()
return True, dict(signature=tar_str, timestamp=timestamp)
@classmethod
@auto_log
def gen_signature_by_token(cls, token: str) -> tuple:
md5_key = token
timestamp = str(int(time.time()))
ori_str = timestamp + md5_key
tar_str = hashlib.md5(ori_str.encode(encoding='utf-8')).hexdigest()
return True, dict(signature=tar_str, timestamp=timestamp)
@classmethod
@auto_log
def gen_hook_signature(cls, token: str) -> tuple:
"""
gen hook token signature
:param token:
:return:
"""
timestamp = str(int(time.time()))
ori_str = timestamp + token
tar_str = hashlib.md5(ori_str.encode(encoding='utf-8')).hexdigest()
return True, dict(signature=tar_str, timestamp=timestamp)
@classmethod
@auto_log
def get_model_field(cls, app_name: str, model_name: str) -> tuple:
"""
get model's field list
:param app_name:
:param model_name:
:return:
"""
from django.apps import apps
model_obj = apps.get_model(app_name, model_name)
fields = model_obj._meta.fields
field_dict = {}
for field0 in fields:
field_dict[field0.name] = field0.verbose_name
return True, dict(field_dict=field_dict)
@classmethod
@auto_log
def get_dict_blank_or_false_value_key_list(cls, dict_obj: object) -> tuple:
"""
get blank item value's key list in dict
:param dict_obj:
:return:
"""
result_list = []
for key, value in dict_obj.items():
if not value:
result_list.append(key)
return True, dict(result_list=result_list)
@classmethod
@auto_log
def check_dict_has_all_same_value(cls, dict_obj: object) -> tuple:
"""
check whether all key are equal in a dict
:param dict_obj:
:return:
"""
value_list = []
for key, value in dict_obj.items():
value_list.append(value)
value_0 = value_list[0]
for value in value_list:
if value_0 != value:
return False, 'not all dict value is same'
return True, ''
common_service_ins = CommonService()
if __name__ == '__main__':
print(common_service_ins.check_dict_has_all_same_value({'a': {'a': 1, 'b': 2}, 'b': {'a': 1, 'b': 2}}))
| 31.523438 | 107 | 0.596035 |
3f416e12353887116df7c6799e0ae142067007b8 | 13,130 | py | Python | nova/tests/api/openstack/compute/contrib/test_flavors_extra_specs.py | vasart/nova | bca5004d367e0418e35f8a72fe0f2e106e977ab0 | [
"Apache-2.0"
] | 1 | 2021-09-10T15:29:02.000Z | 2021-09-10T15:29:02.000Z | nova/tests/api/openstack/compute/contrib/test_flavors_extra_specs.py | PFZheng/nova | 84be8abbccb5ddc2d7c5a7db59019ed1edb19e7f | [
"Apache-2.0"
] | null | null | null | nova/tests/api/openstack/compute/contrib/test_flavors_extra_specs.py | PFZheng/nova | 84be8abbccb5ddc2d7c5a7db59019ed1edb19e7f | [
"Apache-2.0"
] | null | null | null | # Copyright 2011 University of Southern California
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import webob
from nova.api.openstack.compute.contrib import flavorextraspecs
import nova.db
from nova import exception
from nova import test
from nova.tests.api.openstack import fakes
from nova.tests.objects import test_flavor
def return_create_flavor_extra_specs(context, flavor_id, extra_specs):
return stub_flavor_extra_specs()
def return_flavor_extra_specs(context, flavor_id):
return stub_flavor_extra_specs()
def return_flavor_extra_specs_item(context, flavor_id, key):
return {key: stub_flavor_extra_specs()[key]}
def return_empty_flavor_extra_specs(context, flavor_id):
return {}
def delete_flavor_extra_specs(context, flavor_id, key):
pass
def stub_flavor_extra_specs():
specs = {
"key1": "value1",
"key2": "value2",
"key3": "value3",
"key4": "value4",
"key5": "value5"}
return specs
class FlavorsExtraSpecsTest(test.TestCase):
def setUp(self):
super(FlavorsExtraSpecsTest, self).setUp()
fakes.stub_out_key_pair_funcs(self.stubs)
self.controller = flavorextraspecs.FlavorExtraSpecsController()
def test_index(self):
flavor = dict(test_flavor.fake_flavor,
extra_specs={'key1': 'value1'})
req = fakes.HTTPRequest.blank('/v2/fake/flavors/1/os-extra_specs')
with mock.patch('nova.db.flavor_get_by_flavor_id') as mock_get:
mock_get.return_value = flavor
res_dict = self.controller.index(req, 1)
self.assertEqual('value1', res_dict['extra_specs']['key1'])
def test_index_no_data(self):
self.stubs.Set(nova.db, 'flavor_extra_specs_get',
return_empty_flavor_extra_specs)
req = fakes.HTTPRequest.blank('/v2/fake/flavors/1/os-extra_specs')
res_dict = self.controller.index(req, 1)
self.assertEqual(0, len(res_dict['extra_specs']))
def test_show(self):
flavor = dict(test_flavor.fake_flavor,
extra_specs={'key5': 'value5'})
req = fakes.HTTPRequest.blank('/v2/fake/flavors/1/os-extra_specs' +
'/key5')
with mock.patch('nova.db.flavor_get_by_flavor_id') as mock_get:
mock_get.return_value = flavor
res_dict = self.controller.show(req, 1, 'key5')
self.assertEqual('value5', res_dict['key5'])
def test_show_spec_not_found(self):
self.stubs.Set(nova.db, 'flavor_extra_specs_get',
return_empty_flavor_extra_specs)
req = fakes.HTTPRequest.blank('/v2/fake/flavors/1/os-extra_specs' +
'/key6')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.show,
req, 1, 'key6')
def test_not_found_because_flavor(self):
req = fakes.HTTPRequestV3.blank('/flavors/1/extra-specs/key5',
use_admin_context=True)
with mock.patch('nova.db.flavor_get_by_flavor_id') as mock_get:
mock_get.side_effect = exception.FlavorNotFound(flavor_id='1')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.show,
req, 1, 'key5')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.update,
req, 1, 'key5', {'key5': 'value5'})
self.assertRaises(webob.exc.HTTPNotFound, self.controller.delete,
req, 1, 'key5')
req = fakes.HTTPRequestV3.blank('/flavors/1/extra-specs',
use_admin_context=True)
with mock.patch('nova.db.flavor_get_by_flavor_id') as mock_get:
mock_get.side_effect = exception.FlavorNotFound(flavor_id='1')
self.assertRaises(webob.exc.HTTPNotFound, self.controller.create,
req, 1, {'extra_specs': {'key5': 'value5'}})
def test_delete(self):
flavor = dict(test_flavor.fake_flavor,
extra_specs={'key5': 'value5'})
self.stubs.Set(nova.db, 'flavor_extra_specs_delete',
delete_flavor_extra_specs)
req = fakes.HTTPRequest.blank('/v2/fake/flavors/1/os-extra_specs' +
'/key5', use_admin_context=True)
with mock.patch('nova.db.flavor_get_by_flavor_id') as mock_get:
mock_get.return_value = flavor
self.controller.delete(req, 1, 'key5')
def test_delete_no_admin(self):
self.stubs.Set(nova.db, 'flavor_extra_specs_delete',
delete_flavor_extra_specs)
req = fakes.HTTPRequest.blank('/v2/fake/flavors/1/os-extra_specs' +
'/key5')
self.assertRaises(exception.Forbidden, self.controller.delete,
req, 1, 'key 5')
def test_delete_spec_not_found(self):
req = fakes.HTTPRequest.blank('/v2/fake/flavors/1/os-extra_specs' +
'/key6', use_admin_context=True)
self.assertRaises(webob.exc.HTTPNotFound, self.controller.delete,
req, 1, 'key6')
def test_create(self):
self.stubs.Set(nova.db,
'flavor_extra_specs_update_or_create',
return_create_flavor_extra_specs)
body = {"extra_specs": {"key1": "value1"}}
req = fakes.HTTPRequest.blank('/v2/fake/flavors/1/os-extra_specs',
use_admin_context=True)
res_dict = self.controller.create(req, 1, body)
self.assertEqual('value1', res_dict['extra_specs']['key1'])
def test_create_no_admin(self):
self.stubs.Set(nova.db,
'flavor_extra_specs_update_or_create',
return_create_flavor_extra_specs)
body = {"extra_specs": {"key1": "value1"}}
req = fakes.HTTPRequest.blank('/v2/fake/flavors/1/os-extra_specs')
self.assertRaises(exception.Forbidden, self.controller.create,
req, 1, body)
def _test_create_bad_request(self, body):
self.stubs.Set(nova.db,
'flavor_extra_specs_update_or_create',
return_create_flavor_extra_specs)
req = fakes.HTTPRequest.blank('/v2/fake/flavors/1/os-extra_specs',
use_admin_context=True)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, 1, body)
def test_create_empty_body(self):
self._test_create_bad_request('')
def test_create_non_dict_extra_specs(self):
self._test_create_bad_request({"extra_specs": "non_dict"})
def test_create_non_string_value(self):
self._test_create_bad_request({"extra_specs": {"key1": None}})
def test_create_zero_length_key(self):
self._test_create_bad_request({"extra_specs": {"": "value1"}})
def test_create_long_key(self):
key = "a" * 256
self._test_create_bad_request({"extra_specs": {key: "value1"}})
def test_create_long_value(self):
value = "a" * 256
self._test_create_bad_request({"extra_specs": {"key1": value}})
@mock.patch('nova.db.flavor_extra_specs_update_or_create')
def test_create_invalid_specs_key(self, mock_flavor_extra_specs):
invalid_keys = ("key1/", "<key>", "$$akey$", "!akey", "")
mock_flavor_extra_specs.side_effects = return_create_flavor_extra_specs
for key in invalid_keys:
body = {"extra_specs": {key: "value1"}}
req = fakes.HTTPRequest.blank('/v2/fake/flavors/1/os-extra_specs',
use_admin_context=True)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.create,
req, 1, body)
@mock.patch('nova.db.flavor_extra_specs_update_or_create')
def test_create_valid_specs_key(self, mock_flavor_extra_specs):
valid_keys = ("key1", "month.price", "I_am-a Key", "finance:g2")
mock_flavor_extra_specs.side_effects = return_create_flavor_extra_specs
for key in valid_keys:
body = {"extra_specs": {key: "value1"}}
req = fakes.HTTPRequest.blank('/v2/fake/flavors/1/os-extra_specs',
use_admin_context=True)
res_dict = self.controller.create(req, 1, body)
self.assertEqual('value1', res_dict['extra_specs'][key])
def test_update_item(self):
self.stubs.Set(nova.db,
'flavor_extra_specs_update_or_create',
return_create_flavor_extra_specs)
body = {"key1": "value1"}
req = fakes.HTTPRequest.blank('/v2/fake/flavors/1/os-extra_specs' +
'/key1', use_admin_context=True)
res_dict = self.controller.update(req, 1, 'key1', body)
self.assertEqual('value1', res_dict['key1'])
def test_update_item_no_admin(self):
self.stubs.Set(nova.db,
'flavor_extra_specs_update_or_create',
return_create_flavor_extra_specs)
body = {"key1": "value1"}
req = fakes.HTTPRequest.blank('/v2/fake/flavors/1/os-extra_specs' +
'/key1')
self.assertRaises(exception.Forbidden, self.controller.update,
req, 1, 'key1', body)
def _test_update_item_bad_request(self, body):
self.stubs.Set(nova.db,
'flavor_extra_specs_update_or_create',
return_create_flavor_extra_specs)
req = fakes.HTTPRequest.blank('/v2/fake/flavors/1/os-extra_specs' +
'/key1', use_admin_context=True)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
req, 1, 'key1', body)
def test_update_item_empty_body(self):
self._test_update_item_bad_request('')
def test_update_item_too_many_keys(self):
body = {"key1": "value1", "key2": "value2"}
self._test_update_item_bad_request(body)
def test_update_item_non_dict_extra_specs(self):
self._test_update_item_bad_request("non_dict")
def test_update_item_non_string_value(self):
self._test_update_item_bad_request({"key1": None})
def test_update_item_zero_length_key(self):
self._test_update_item_bad_request({"": "value1"})
def test_update_item_long_key(self):
key = "a" * 256
self._test_update_item_bad_request({key: "value1"})
def test_update_item_long_value(self):
value = "a" * 256
self._test_update_item_bad_request({"key1": value})
def test_update_item_body_uri_mismatch(self):
self.stubs.Set(nova.db,
'flavor_extra_specs_update_or_create',
return_create_flavor_extra_specs)
body = {"key1": "value1"}
req = fakes.HTTPRequest.blank('/v2/fake/flavors/1/os-extra_specs/bad',
use_admin_context=True)
self.assertRaises(webob.exc.HTTPBadRequest, self.controller.update,
req, 1, 'bad', body)
class FlavorsExtraSpecsXMLSerializerTest(test.TestCase):
def test_serializer(self):
serializer = flavorextraspecs.ExtraSpecsTemplate()
expected = ("<?xml version='1.0' encoding='UTF-8'?>\n"
'<extra_specs><key1>value1</key1></extra_specs>')
text = serializer.serialize(dict(extra_specs={"key1": "value1"}))
self.assertEqual(text, expected)
def test_show_update_serializer(self):
serializer = flavorextraspecs.ExtraSpecTemplate()
expected = ("<?xml version='1.0' encoding='UTF-8'?>\n"
'<extra_spec key="key1">value1</extra_spec>')
text = serializer.serialize(dict({"key1": "value1"}))
self.assertEqual(text, expected)
def test_serializer_with_colon_tagname(self):
# Our test object to serialize
obj = {'extra_specs': {'foo:bar': '999'}}
serializer = flavorextraspecs.ExtraSpecsTemplate()
expected_xml = (("<?xml version='1.0' encoding='UTF-8'?>\n"
'<extra_specs><foo:bar xmlns:foo="foo">999</foo:bar>'
'</extra_specs>'))
result = serializer.serialize(obj)
self.assertEqual(expected_xml, result)
| 41.159875 | 79 | 0.61706 |
89f324bc963b3e80a28cc87c398a40101bb900b6 | 12,264 | py | Python | spar_python/report_generation/common/graphing.py | nathanawmk/SPARTA | 6eeb28b2dd147088b6e851876b36eeba3e700f16 | [
"BSD-2-Clause"
] | 37 | 2017-06-09T13:55:23.000Z | 2022-01-28T12:51:17.000Z | spar_python/report_generation/common/graphing.py | nathanawmk/SPARTA | 6eeb28b2dd147088b6e851876b36eeba3e700f16 | [
"BSD-2-Clause"
] | null | null | null | spar_python/report_generation/common/graphing.py | nathanawmk/SPARTA | 6eeb28b2dd147088b6e851876b36eeba3e700f16 | [
"BSD-2-Clause"
] | 5 | 2017-06-09T13:55:26.000Z | 2021-11-11T03:51:56.000Z | # *****************************************************************
# Copyright 2013 MIT Lincoln Laboratory
# Project: SPAR
# Authors: MZ
# Description: Class for graphing results from other analytics ocde
#
#
# Modifications:
# Date Name Modification
# ---- ---- ------------
# Aug 2 MZ Original Version
# Aug 12 SY Refactored into classes
#
# **************************************************************
# general imports:
import sys
import os
import matplotlib
# Force matplotlib to not use any Xwindows backend.
matplotlib.use('Agg')
import matplotlib.markers as mrk
import matplotlib.pyplot as plt
import numpy as np
from mpl_toolkits.mplot3d import Axes3D
import colorsys
import StringIO
DETAIL_2D = 300
# The number of input points to use when graphing the best-fit line
DETAIL_3D = 12
# The number of input points to use when graphing the best-fit surface
VALID_SCALES = ["log", "linear", "symlog"]
GOOD_POINTS = ['o','D','d','s','x','^','h','+','*','1','p','3','4','H']
ALL_POINTS = [i for i in mrk.MarkerStyle().markers]
BAD_POINTS = [val for val in ALL_POINTS if val not in GOOD_POINTS]
BIGNUMBER = 1000
class BadGraphingInputs(Exception): pass
def get_colors(num_colors):
"""
Returns num_colors colors for use in 2-d graphing.
"""
for index in xrange(num_colors):
# finds the 3 (rgb) values for the ith color:
col = [int(x) for x in colorsys.hsv_to_rgb(
1. * index / num_colors, 1.0, 230)]
# formats a string representing that color:
yield "#{0:02x}{1:02x}{2:02x}".format(*col)
def comparison_percentile_graph(percentile_getter, y_label, y_scale='linear'):
"""
Args:
percentile_getter: a PercentileGetter object
y_label: a label for the y-axis
y_scale: The scale to use when graphing the output values
possible values are 'linear','log', and 'symlog'
Returns:
A string representation of the graph, created by StringIO
"""
performer_percentiles = percentile_getter.get_performer_percentiles()
baseline_percentiles = percentile_getter.get_baseline_percentiles()
return general_percentile_graph(
datasets=[(performer_percentiles, "Performer Percentiles"),
(baseline_percentiles, "Baseline Percentiles")],
y_label=y_label, y_scale=y_scale)
def general_percentile_graph(datasets, y_label, y_scale='linear'):
"""
Args:
datasets: A list of (y value list, data_set_name) tuples.
y_label: a label for the y-axis
y_scale: The scale to use when graphing the output values
possible values are 'linear','log', and 'symlog'
Returns:
A string representation of the graph, created by StringIO
"""
plt.close()
fig = plt.figure()
perf_plot = fig.add_subplot(211) # The regular 111 orientation cuts of the
# line information.
#perf_plot.set_title("Performer vs. Baseline query latency")
perf_plot.set_ylabel(y_label)
perf_plot.set_yscale(y_scale)
if ((max([max(values) for (label, values) in datasets]) > BIGNUMBER)
and (y_scale == 'linear')):
plt.ticklabel_format(style='sci', axis='y', scilimits=(0,0))
plt.xlim((0, 100))
colors = get_colors(len(datasets))
for (color, (values, label)) in zip(colors, datasets):
assert len(values) == 100
perf_plot.plot(xrange(100), values, 'o', color=color, label=label)
perf_plot.legend(loc='upper center', bbox_to_anchor=(0.5, -0.10),
fancybox=True, shadow=True)
string_fig = StringIO.StringIO()
plt.savefig(string_fig, bbox_inches='tight')
return string_fig.getvalue()
def graph2d(plot_name, datasets, x_label, y_label,
x_scale='linear', y_scale='linear', drawlines=False):
"""
Args:
plot_name: The name of the plot.
datasets: A list of (x value list, y value list, data_set_name,
best_fit_function) tuples.
x_label: The x-axis label
y_label: The y-axis label
x_scale: The scale to use when graphing the x_value values
possible values are 'linear','log', and 'symlog'
y_scale: The scale to use when graphing the output values
possible values are 'linear','log', and 'symlog'
drawlines: Boolean dictating whether lines are drawn between
adjacent points (default=False)
Returns:
A string representation of the graph, created by StringIO
"""
assert x_scale in VALID_SCALES, 'Invalid x_scale argument'
assert y_scale in VALID_SCALES, 'Invalid y_scale argument'
plt.close()
fig = plt.figure()
plot = fig.add_subplot(211)
plot.set_title(plot_name)
plot.set_ylabel(y_label)
plot.set_xlabel(x_label)
plot.set_xscale(x_scale)
plot.set_yscale(y_scale)
num_datasets = len(datasets)
colors = get_colors(num_datasets)
mark_id = 0
for (this_color, (x_value, y_value, name, best_fit_function)
) in zip(colors, datasets):
if (drawlines):
plot.plot(x_value, y_value, c=this_color, label=name,
marker=GOOD_POINTS[mark_id])
else:
plot.scatter(x_value, y_value, c=this_color, label=name,
marker=GOOD_POINTS[mark_id])
max_x_val = max([max(i) for i in [datasets[j][0]
for j in xrange(len(datasets))]])
min_x_val = min([min(i) for i in [datasets[j][0]
for j in xrange(len(datasets))]])
max_y_val = max([max(i) for i in [datasets[j][1]
for j in xrange(len(datasets))]])
if(max_x_val > BIGNUMBER):
plt.ticklabel_format(style='sci', axis='x', scilimits=(0,0))
if(max_y_val > BIGNUMBER):
plt.ticklabel_format(style='sci', axis='y', scilimits=(0,0))
if best_fit_function:
best_fit_label = "%s (rsquared = %s)" % (
best_fit_function.string,
str(best_fit_function.get_rsquared(
inputs=[x_value], outputs=y_value)))
plot.plot(
np.linspace(min_x_val, max_x_val, DETAIL_2D),
[best_fit_function.function([i]) for i in np.linspace(
min_x_val, max_x_val, DETAIL_2D)],
color=this_color,
label=best_fit_label)
else:
# for legend allignment, add an empty plot with a blank label:
plot.plot([0], [0], c="w", linewidth=0, label=" ")
if len(datasets) <= len(GOOD_POINTS):
mark_id += 1
plot.legend(
loc='upper center', bbox_to_anchor=(0.5, -.20),
fancybox=True, shadow=True, ncol=2)
string_fig = StringIO.StringIO()
plt.savefig(string_fig, bbox_inches='tight')
return string_fig.getvalue()
def graph3d(plot_name,
x_values, y_values, z_values,
x_label, y_label, z_label,
x_scale='linear', y_scale='linear', z_scale='linear',
best_fit_function=None):
"""
Args:
plot_name: The name of the plot.
x_values: List of values of the first independant variable
y_values: List of values of the second independant variable
z_values: List of values of the dependant variable
x_label: The x-axis label
y_label: The y-axis label
z_label: The z-axis label
x_scale: The scale to use when graphing x_value's values
possible values are 'linear','log', and 'symlocng'
y_scale: The scale to use when graphing y_value's values
possible values are 'linear','log', and 'symlog'
z_scale: The scale to use when graphing z_value's values
possible values are 'linear','log', and 'symlog'
best_fit_function (optional): The function to use to create
a best-fit-surface. to compare to the value of the data.
Returns:
A string representation of the graph, created by StringIO
"""
if ((not x_values) or (not y_values) or (not z_values)):
raise BadGraphingInputs
assert x_scale in VALID_SCALES, 'Invalid x_value_scale argument'
assert y_scale in VALID_SCALES, 'Invalid y_value_scale argument'
assert z_scale in VALID_SCALES, 'Invalid z_value_value_scale argument'
plt.close()
[color, best_fit_color] = get_colors(2)
fig = plt.figure()
regress_plot = fig.add_subplot(111, projection='3d')
regress_plot.set_title(plot_name)
xlabel = regress_plot.set_xlabel(x_label)
ylabel = regress_plot.set_ylabel(y_label)
zlabel = regress_plot.set_zlabel(z_label)
regress_plot.set_xscale(x_scale)
regress_plot.set_yscale(y_scale)
regress_plot.set_zscale(z_scale)
regress_plot.scatter(x_values, y_values, z_values,
c=color, label=plot_name)
if(max(x_values) > BIGNUMBER):
plt.ticklabel_format(style='sci', axis='x', scilimits=(0,0))
if(max(y_values) > BIGNUMBER):
plt.ticklabel_format(style='sci', axis='y', scilimits=(0,0))
if(max(z_values) > BIGNUMBER):
plt.ticklabel_format(style='sci', axis='z', scilimits=(0,0))
if (best_fit_function) and all(
[len(list(set(values))) > 1
for values in [x_values, y_values, z_values]]):
best_fit_label = "%s (rsquared = %s)" % (
best_fit_function.string,
str(best_fit_function.get_rsquared(
inputs=[x_values, y_values], outputs=z_values)))
sample_x_values = []
sample_y_values = []
for i in np.linspace(min(x_values), max(x_values), DETAIL_3D):
for j in np.linspace(min(y_values), max(y_values), DETAIL_3D):
sample_x_values.append(i)
sample_y_values.append(j)
sample_z_values = []
for i in xrange(len(sample_x_values)):
sample_z_values.append(best_fit_function.function(
[sample_x_values[i], sample_y_values[i]]))
regress_plot.plot_trisurf(sample_x_values, sample_y_values,
sample_z_values,
color=best_fit_color,
label=best_fit_label)
#regress_plot.legend(loc='upper center', bbox_to_anchor=(0.5, .20),
# fancybox=True, shadow=True)
string_fig = StringIO.StringIO()
plt.savefig(string_fig, bbox_extra_artists=[xlabel, ylabel, zlabel],
bbox_inches='tight')
return string_fig.getvalue()
def box_plot(plot_name, inputs, y_label=None, y_scale='linear'):
"""
Args:
plot_name: The name of the plot
inputs: A list of tuples of the following form: (label, data), where
label is the string corresponding to the list of data points, and
data is the list of data points.
y_label: The y-axis label
y_scale: The scale to use when graphing y_value's values
possible values are 'linear','log', and 'symlog'
Returns:
A string representation of the graph, created by StringIO
"""
num_boxes = len(inputs)
plt.close()
fig, ax1 = plt.subplots(1, figsize=(num_boxes,6))
plot = fig.add_subplot(111)
plot.set_title(plot_name)
if y_label: plot.set_ylabel(y_label)
plot.set_yscale(y_scale)
plot.boxplot([data for (label, data) in inputs])
# set the x-tick names:
x_tick_names = plt.setp(
ax1, xticklabels=[label for (label, data) in inputs])
plt.setp(x_tick_names, rotation=90, fontsize=8)
# save the box plot:
string_fig = StringIO.StringIO()
plt.savefig(string_fig, bbox_inches='tight')
return string_fig.getvalue()
def write_graph(path, graph):
"""
Args:
path: The path to write the graph to. Should be something like
"<filename>.png".
graph: The string representing a graph, recieved from one of the
graphing functions.
"""
graph_file = open(path,'w')
graph_file.write(graph)
graph_file.close()
| 41.292929 | 78 | 0.617335 |
b9d191f1ae8baa638208ff3434519d62148b33b9 | 8,671 | py | Python | config/pydPiper/pages_lcd_20x4 (original).py | DennisB66/MoodeAudio-Scripting | 9534fdadc9989e3b5cf9c15efd011fcbefd5820c | [
"MIT"
] | 72 | 2017-03-13T11:01:01.000Z | 2021-11-29T20:53:53.000Z | config/pydPiper/pages_lcd_20x4 (original).py | DennisB66/MoodeAudio-Scripting | 9534fdadc9989e3b5cf9c15efd011fcbefd5820c | [
"MIT"
] | 126 | 2017-03-13T16:06:59.000Z | 2022-03-27T14:14:49.000Z | config/pydPiper/pages_lcd_20x4 (original).py | DennisB66/MoodeAudio-Scripting | 9534fdadc9989e3b5cf9c15efd011fcbefd5820c | [
"MIT"
] | 41 | 2017-10-11T18:37:50.000Z | 2021-06-18T17:02:45.000Z | #!/usr/bin/python.pydPiper
# coding: UTF-8
from __future__ import unicode_literals
# Page Definitions
# See Page Format.txt for instructions and examples on how to modify your display settings
# Load the fonts needed for this system
FONTS = {
'small': { 'default':True, 'file':'latin1_5x8_lcd.fnt','size':(5,8) },
'large': { 'file':'BigFont_10x16_fixed.fnt', 'size':(10,16) },
'tiny': { 'file':'upperasciiwide_3x5_fixed.fnt', 'size':(5,5) },
}
IMAGES = {
'progbar': {'file':'progressbar_100x8.png' },
}
# Load the Widgets that will be used to produce the display pages
WIDGETS = {
'splash': { 'type':'text', 'format':'pydPiper\nStarting...', 'font':'small' },
'nowplaying': { 'type':'text', 'format':'{0}', 'variables':['actPlayer|upper'], 'font':'small', 'varwidth':True},
'nowplayingdata': { 'type':'text', 'format':'{0} OF {1}', 'variables':['playlist_position', 'playlist_length'], 'font':'small', 'just':'right','size':(40,8),'varwidth':True},
'title': { 'type':'text', 'format':'{0}', 'variables':['title'], 'font':'small','varwidth':True,'effect':('scroll','left',5,1,20,'onloop',3,100) },
'artist': { 'type':'text', 'format':'{0}', 'variables':['artist'], 'font':'small','varwidth':True,'effect':('scroll','left',5,1,20,'onloop',3,100)},
'artistradio': { 'type':'text', 'format':'{0}', 'variables':['artist'], 'font':'small','varwidth':True},
'album': { 'type':'text', 'format':'{0}', 'variables':['album'], 'font':'small','varwidth':True,'effect':('scroll','left',5,1,20,'onloop',3,100)},
'time': { 'type':'text', 'format':'{0}', 'variables':['localtime|strftime+%-I:%M'], 'font':'large', 'just':'right', 'varwidth':True, 'size':(65,16) },
'timesmall': { 'type':'text', 'format':'{0}', 'variables':['localtime|strftime+%-I:%M'], 'font':'small', 'just':'right', 'varwidth':True, 'size':(40,8) },
'ampm': { 'type':'text', 'format':'{0}', 'variables':['localtime|strftime+%p'], 'font':'small', 'varwidth':True },
'temp': { 'type':'text', 'format':'{0}', 'variables':['outside_temp_formatted'], 'font':'small', 'just':'right', 'size':(25,8) },
'temphilow': { 'type':'text', 'format':'H {0}\nL {1}', 'variables':['outside_temp_max|int', 'outside_temp_min|int'], 'font':'small', 'just':'right', 'size':(25,16) },
'conditions': { 'type':'text', 'format':'{0}', 'variables':['outside_conditions|capitalize'], 'font':'small','varwidth':True, 'size':(55,16), 'effect':('scroll','left',5,1,20,'onloop',3,55)},
'conditions2': { 'type':'text', 'format':'{0}', 'variables':['outside_conditions|capitalize'], 'font':'small','varwidth':True, 'size':(75,8), 'effect':('scroll','left',5,1,20,'onloop',3,75)},
'radio': { 'type':'text', 'format':"RADIO", 'font':'small', 'varwidth':True, 'size':(25,8), 'just':'right' },
'volume': { 'type':'text', 'format':'VOLUME ({0})', 'variables':['volume'], 'font':'small', 'varwidth':True, 'just':'left', 'size':(95,8)},
'volumebar': { 'type':'progressimagebar', 'image':'progbar','value':'volume', 'rangeval':(0,100) },
'songprogresstext': { 'type':'text', 'format':'SONG', 'font':'small' },
'songprogress': { 'type':'progressbar', 'value':'elapsed', 'rangeval':(0,'length'), 'size':(40,8) },
'trkprogresstext': { 'type':'text', 'format':'TRK', 'font':'small' },
'trkprogress': { 'type':'progressbar', 'value':'playlist_position', 'rangeval':(1,'playlist_length'), 'size':(25,8) },
'elapsed': { 'type':'text', 'format':'{0}', 'variables':['elapsed_formatted'], 'font':'small' },
'playstopsymbol': { 'type':'text', 'format':'{0}', 'variables':['state|select+play+\ue000+stop+\ue001'], 'font':'small', 'just':'left' },
'randomsymbol': { 'type':'text', 'format':'\ue002 ', 'font':'large', 'varwidth':True, 'size':(10,16) },
'random': { 'type':'text', 'format':'Random\n{0}', 'variables':['random|onoff|Capitalize'], 'font':'small', 'varwidth':True, 'size':(65,16) },
'repeatoncesymbol': { 'type':'text', 'format':'\ue003 ', 'font':'large', 'varwidth':True, 'size':(10,16) },
'repeatonce': { 'type':'text', 'format':'Repeat Once\n{0}', 'variables':['single|onoff|Capitalize'], 'font':'small', 'varwidth':True, 'just':'center', 'size':(65,16) },
'repeatallsymbol': { 'type':'text', 'format':'\ue004 ', 'font':'large', 'varwidth':True, 'size':(10,16) },
'repeatall': { 'type':'text', 'format':'Repeat All\n{0}', 'variables':['repeat|onoff|Capitalize'], 'font':'small', 'varwidth':True, 'size':(65,16) },
'temptoohigh': { 'type':'text', 'format':'\ue005 Warning System Too Hot ({0})', 'variables':['system_temp_formatted'], 'font':'large', 'varwidth':True, 'effect':('scroll','left',5,1,20,'onstart',3,80) }
}
# Assemble the widgets into canvases. Only needed if you need to combine multiple widgets together so you can produce effects on them as a group.
CANVASES = {
'playartist': { 'widgets': [ ('artist',0,8), ('nowplaying',0,0), ('nowplayingdata',60,0), ('playstopsymbol', 50,16), ('songprogresstext',0,24), ('songprogress',20,24), ('trkprogresstext',60,24), ('trkprogress',75,24) ], 'size':(100,32) },
'playalbum': { 'widgets': [ ('album',0,8), ('nowplaying',0,0), ('nowplayingdata',60,0), ('playstopsymbol', 50,16), ('songprogresstext',0,24), ('songprogress',20,24), ('trkprogresstext',60,24), ('trkprogress',75,24) ], 'size':(100,32) },
'playtitle': { 'widgets': [ ('title',0,8), ('nowplaying',0,0), ('nowplayingdata',60,0), ('playstopsymbol', 50,16), ('songprogresstext',0,24), ('songprogress',20,24), ('trkprogresstext',60,24), ('trkprogress',75,24) ], 'size':(100,32) },
'play_radio': { 'widgets': [ ('artistradio',0,0), ('title',0,8), ('radio',75,0), ('playstopsymbol', 50,16), ('elapsed',0,24), ('timesmall',60,24) ], 'size':(100,32) },
'showrandom': { 'widgets': [ ('randomsymbol',0,0), ('random', 15,0) ], 'size':(80,16) },
'showrepeatonce': { 'widgets': [ ('repeatoncesymbol',0,0), ('repeatonce', 15,0) ], 'size':(100,16) },
'showrepeatall': { 'widgets': [ ('repeatallsymbol',0,0), ('repeatall', 15,0) ], 'size':(100,16) },
'blank': { 'widgets': [], 'size':(80,16) },
'stoptime': { 'widgets': [ ('time',10,8), ('ampm',75,8) ], 'size':(100,32) },
'stoptimeweather': { 'widgets': [ ('time',0,0), ('ampm',65,0), ('temphilow',75,0), ('conditions2',0,24), ('temp',75,24) ], 'size':(100,32) },
'weather': { 'widgets': [ ('temp',0,0), ('conditions',0,8), ('temphilow', 55,0) ], 'size':(100,16) },
'volume_changed': { 'widgets': [ ('volume',5,0), ('volumebar',0,8) ], 'size':(100,16) },
}
# Place the canvases into sequences to display when their condition is met
# More than one sequence can be active at the same time to allow for alert messages
# You are allowed to include a widget in the sequence without placing it on a canvas
# Note about Conditionals
# Conditionals must evaluate to a True or False resulting
# To access system variables, refer to them within the db dictionary (e.g. db['title'])
# To access the most recent previous state of a variable, refer to them within the dbp dictionary (e.g. dbp['title'])
SEQUENCES = [
{ 'name': 'seqSplash', 'canvases': [ { 'name':'splash', 'duration':4 } ], 'conditional':"db['state']=='starting'" },
{
'name': 'seqPlay',
'canvases': [
{ 'name':'playartist', 'duration':8, 'conditional':"not db['actPlayer']=='webradio'" },
{ 'name':'playalbum', 'duration':8, 'conditional':"not db['stream']=='webradio'" },
{ 'name':'playtitle', 'duration':8, 'conditional':"not db['stream']=='webradio'" },
{ 'name':'play_radio', 'duration':9999, 'conditional':"db['stream']=='webradio'" },
],
'conditional': "db['state']=='play'"
},
{
'name': 'seqStop',
'canvases': [
{ 'name':'stoptime', 'duration':9999, 'conditional':"db['outside_conditions']=='No data'" },
{ 'name':'stoptimeweather', 'duration':9999, 'conditional':"not db['outside_conditions']=='No data'" }
],
'conditional': "db['state']=='stop' or db['state']=='pause'"
},
{
'name':'seqVolume',
'coordinates':(0,16),
'canvases': [ { 'name':'volume_changed', 'duration':2 } ],
'conditional': "db['volume'] != dbp['volume'] and db['state']=='play'",
'minimum':2,
},
{
'name':'seqAnnounceRandom',
'canvases': [ { 'name':'showrandom', 'duration':2 } ],
'conditional': "db['random'] != dbp['random']",
'minimum':2,
},
{
'name':'seqAnnounceSingle',
'canvases': [ { 'name':'showrepeatonce', 'duration':2 } ],
'conditional': "db['single'] != dbp['single']",
'minimum':2,
},
{
'name':'seqAnnounceRepeat',
'canvases': [ { 'name':'showrepeatall', 'duration':2 } ],
'conditional': "db['repeat'] != dbp['repeat']",
'minimum':2,
},
{
'name':'seqAnnounceTooHot',
'canvases': [ { 'name':'temptoohigh', 'duration':5 } ],
'conditional': "db['system_tempc'] > 85",
'minimum':5,
'coolingperiod':30
}
]
| 65.689394 | 239 | 0.607658 |
5f33d0fc59f508ff570b523a239f6b4bd646ad43 | 28,024 | py | Python | bzt/swagger2yaml.py | Avi-Labs/taurus | 3aa9bc294778d99be545575467fb5897dc815330 | [
"Apache-2.0"
] | 1,743 | 2015-03-30T20:56:03.000Z | 2022-03-31T09:08:37.000Z | bzt/swagger2yaml.py | Avi-Labs/taurus | 3aa9bc294778d99be545575467fb5897dc815330 | [
"Apache-2.0"
] | 1,159 | 2015-04-01T08:25:53.000Z | 2022-03-29T08:15:31.000Z | bzt/swagger2yaml.py | Avi-Labs/taurus | 3aa9bc294778d99be545575467fb5897dc815330 | [
"Apache-2.0"
] | 497 | 2015-03-31T21:05:18.000Z | 2022-03-17T12:45:21.000Z | """
Swagger to YAML converter for Taurus
Copyright 2017 BlazeMeter Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import copy
import json
import logging
import os
import re
import sys
import traceback
from collections import namedtuple, OrderedDict
from optparse import OptionParser
from urllib import parse
from urllib.parse import urlencode
import yaml
from bzt import TaurusInternalException, TaurusConfigError
from bzt.cli import CLI
from bzt.engine import Configuration
from bzt.utils import iteritems, BetterDict
def yaml_ordered_load(stream, Loader=yaml.SafeLoader, object_pairs_hook=OrderedDict):
class OrderedLoader(Loader):
pass
def construct_mapping(loader, node):
loader.flatten_mapping(node)
return object_pairs_hook(loader.construct_pairs(node))
OrderedLoader.add_constructor(
yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
construct_mapping)
return yaml.load(stream, OrderedLoader)
class Swagger(object):
METHODS = ["get", "put", "post", "delete", "options", "head", "patch"]
INTERPOLATE_WITH_VALUES = 'values'
INTERPOLATE_WITH_JMETER_VARS = 'variables'
INTERPOLATE_DISABLE = 'none'
Definition = namedtuple("Definition", "name, schema")
Parameter = namedtuple("Parameter", "name, location, description, required, schema, type, format")
Response = namedtuple("Response", "name, description, schema, headers")
Path = namedtuple("Path", "ref, get, put, post, delete, options, head, patch, parameters")
Operation = namedtuple("Operation",
"summary, description, operation_id, consumes, produces, parameters, responses, security")
SecurityDef = namedtuple("SecurityDef", "type, description, name, location")
def __init__(self, parent_log=None):
self.log = (parent_log or logging.getLogger('')).getChild(self.__class__.__name__)
self.swagger = None
self.info = None
self.definitions = {}
self.parameters = {}
self.responses = {}
self.paths = OrderedDict()
self.security_defs = {}
self.default_security = []
def _load(self, swagger_spec_fd):
content = swagger_spec_fd.read()
try:
self.log.debug("Loading Swagger spec as YAML")
self.swagger = yaml_ordered_load(content, yaml.SafeLoader)
self.log.info("Loaded Swagger spec %s", swagger_spec_fd)
except BaseException as exc:
self.log.debug("Can't parse Swagger spec as YAML")
try:
self.log.debug("Loading Swagger spec as JSON")
self.swagger = json.loads(content)
self.log.info("Loaded Swagger spec %s", swagger_spec_fd)
except BaseException:
raise TaurusConfigError("Error when parsing Swagger file '%s': %s" % (swagger_spec_fd, exc))
def _validate_swagger_version(self):
swagger_version = self.swagger.get("swagger", self.swagger.get("openapi"))
if swagger_version != "2.0":
raise ValueError("Only Swagger 2.0 specs are supported, got %s" % swagger_version)
def _extract_toplevel_definitions(self):
self.info = self.swagger.get("info", {})
for name, schema in iteritems(self.swagger.get("definitions", {})):
self.definitions[name] = Swagger.Definition(name=name, schema=schema)
for name, response in iteritems(self.swagger.get("responses", {})):
self.responses[name] = Swagger.Response(name=name, description=response.get("description"),
schema=response.get("schema"), headers=response.get("headers"))
for name, param in iteritems(self.swagger.get("parameters", {})):
parameter = Swagger.Parameter(name=name, location=param.get("in"), description=param.get("description"),
required=param.get("required"), schema=param.get("schema"),
type=param.get("type"), format=param.get("format"))
self.parameters[name] = parameter
for name, secdef in iteritems(self.swagger.get("securityDefinitions", {})):
self.security_defs[name] = Swagger.SecurityDef(type=secdef.get('type'),
description=secdef.get('description'),
name=secdef.get('name'),
location=secdef.get('in'))
def _lookup_reference(self, reference):
if not reference.startswith("#/"):
return
path = reference[2:].split('/')
pointer = self.swagger
for component in path:
if component not in pointer:
raise IndexError("Can't find location by reference %r at part %r" % (reference, component))
pointer = pointer[component]
self.log.debug("Found by reference %r: %r", reference, pointer)
return pointer
def _extract_operation(self, operation):
parameters = OrderedDict()
for param in operation.get("parameters", []):
if "$ref" in param:
param = self._lookup_reference(param["$ref"])
param_name = param["name"]
parameter = Swagger.Parameter(name=param_name, location=param.get("in"),
description=param.get("description"), required=param.get("required"),
schema=param.get("schema"), type=param.get("type"),
format=param.get("format"))
parameters[param_name] = parameter
responses = OrderedDict()
for name, resp in iteritems(operation.get("responses", {})):
response = Swagger.Response(name=name, description=resp.get("description"), schema=resp.get("schema"),
headers=resp.get("headers"))
responses[name] = response
return Swagger.Operation(summary=operation.get("summary"), description=operation.get("description"),
operation_id=operation.get("operationId"), consumes=operation.get("consumes"),
produces=operation.get("produces"), parameters=parameters, responses=responses,
security=operation.get("security"))
def _extract_paths(self):
for name, path_item in iteritems(self.swagger["paths"]):
path = {"ref": None, "get": None, "put": None, "post": None, "delete": None, "options": None, "head": None,
"patch": None, "parameters": {}}
for method in Swagger.METHODS:
if method in path_item:
operation = path_item[method]
path[method] = self._extract_operation(operation)
for param in path_item.get("parameters", []):
if "$ref" in param:
param = self._lookup_reference(param["$ref"])
param_name = param["name"]
parameter = Swagger.Parameter(name=param_name, location=param.get("in"),
description=param.get("description"), required=param.get("required"),
schema=param.get("schema"), type=param.get("type"),
format=param.get("format"))
path["parameters"][param_name] = parameter
self.paths[name] = Swagger.Path(**path)
def parse(self, swagger_spec_fd):
self._load(swagger_spec_fd)
self._validate_swagger_version()
self._extract_toplevel_definitions()
self._extract_paths()
def get_definitions(self):
return self.definitions
def get_responses(self):
return self.responses
def get_parameters(self):
return self.parameters
def get_paths(self):
return self.paths
def get_interpolated_paths(self, parameter_interpolation=INTERPOLATE_WITH_VALUES):
paths = OrderedDict()
replacer_regex = lambda name: r'(?<!\$)(\{' + name + r'\})' # replace '{name}', but skip '${name}'
for path, path_obj in iteritems(self.paths):
new_path = path
for method in Swagger.METHODS:
operation = getattr(path_obj, method)
if operation is not None:
for _, param in iteritems(operation.parameters):
if param.location == "path":
name = param.name
if parameter_interpolation == Swagger.INTERPOLATE_WITH_VALUES:
value = str(Swagger.get_data_for_type(param.type, param.format))
elif parameter_interpolation == Swagger.INTERPOLATE_WITH_JMETER_VARS:
value = "${" + param.name + "}"
else:
value = None
if value is not None:
new_path = re.sub(replacer_regex(name), value, new_path)
for _, param in iteritems(path_obj.parameters):
if param.location == "path":
name = param.name
if parameter_interpolation == Swagger.INTERPOLATE_WITH_VALUES:
value = str(Swagger.get_data_for_type(param.type, param.format))
elif parameter_interpolation == Swagger.INTERPOLATE_WITH_JMETER_VARS:
value = "${" + param.name + "}"
else:
value = None
if value is not None:
new_path = re.sub(replacer_regex(name), value, new_path)
path_obj = copy.deepcopy(path_obj)
paths[new_path] = path_obj
return paths
def get_info(self):
return copy.deepcopy(self.info)
def get_host(self):
host = self.swagger.get("host", "")
if not host:
self.log.warning("Warning: no `host` declared, using HOST placeholder")
host = "HOST"
return host
def get_base_path(self):
return self.swagger.get("basePath")
@staticmethod
def get_data_for_type(data_type, data_format):
del data_format
if data_type == "string":
return "some_string"
elif data_type == "number":
return 1
elif data_type == "integer":
return 1
elif data_type == "boolean":
return True
elif data_type == "array":
return [1, 2, 3]
else:
raise ValueError("Can't generate dummy data for type %s" % data_type)
@staticmethod
def get_data_for_schema(schema):
del schema
# TODO: generate dummy data from JSONSchema
return None
class SwaggerConverter(object):
def __init__(
self,
parent_log,
scenarios_from_paths=False,
parameter_interpolation=Swagger.INTERPOLATE_WITH_VALUES,
):
self.scenarios_from_paths = scenarios_from_paths
self.parameter_interpolation = parameter_interpolation
self.log = parent_log.getChild(self.__class__.__name__)
self.swagger = Swagger(self.log)
def _interpolate_parameter(self, param):
if self.parameter_interpolation == Swagger.INTERPOLATE_WITH_VALUES:
return Swagger.get_data_for_type(param.type, param.format)
elif self.parameter_interpolation == Swagger.INTERPOLATE_WITH_JMETER_VARS:
return '${' + param.name + '}'
else:
return None
def _interpolate_body(self, param):
if self.parameter_interpolation == Swagger.INTERPOLATE_WITH_VALUES:
return Swagger.get_data_for_schema(param.schema)
elif self.parameter_interpolation == Swagger.INTERPOLATE_WITH_JMETER_VARS:
return '${body}'
else:
return None
def _handle_parameters(self, parameters):
query_params = OrderedDict()
form_data = {}
request_body = None
headers = {}
for _, param in iteritems(parameters):
if not param.required:
continue
if param.location == "header":
name = param.name
value = self._interpolate_parameter(param)
headers[name] = value
elif param.location == "query":
name = param.name
value = self._interpolate_parameter(param)
query_params[name] = value
elif param.location == "formData":
name = param.name
value = self._interpolate_parameter(param)
form_data[name] = value
elif param.location == "body":
request_body = self._interpolate_body(param)
elif param.location == "path":
pass # path parameters are resolved at a different level
else:
self.log.warning("Unsupported parameter location (%s). Skipping", param.location)
return query_params, form_data, request_body, headers
def _embed_query_in_path(self, path, query_dict):
self.log.debug("Query dict: %s", query_dict)
parts = parse.urlparse(path)
query = urlencode(query_dict)
replaced = parts._replace(query=query)
return parse.urlunparse(replaced)
def _extract_request(self, path, path_obj, method, operation):
request = {}
if method != "get":
request["method"] = method.upper()
if operation.operation_id is not None:
request["label"] = operation.operation_id
parameters = BetterDict()
if path_obj.parameters:
parameters.merge(path_obj.parameters)
if operation.parameters:
parameters.merge(operation.parameters)
query_params, form_data, request_body, headers = self._handle_parameters(parameters)
if headers:
request["headers"] = headers
if form_data and request_body:
self.log.warning("Both form data and request body are specified. Omitting form data")
if request_body:
request["body"] = request_body
elif form_data:
request["body"] = form_data
if query_params:
url = self._embed_query_in_path(path, query_params)
else:
url = path
request["url"] = url
return request
def _extract_requests_from_paths(self, paths, scenario_name, default_address, global_security):
base_path = self.swagger.get_base_path()
requests = []
scenario = {
"default-address": "${default-address}",
"variables": {},
}
global_vars = {
"default-address": default_address,
}
if base_path:
global_vars["default-path"] = base_path
if global_security:
self._add_global_security(scenario, global_security, global_vars)
for path, path_obj in iteritems(paths):
self.log.debug("Handling path %s", path)
for method in Swagger.METHODS:
operation = getattr(path_obj, method)
if operation is not None:
self.log.debug("Handling method %s", method.upper())
if base_path:
route = "${default-path}" + path
else:
route = path
request = self._extract_request(route, path_obj, method, operation)
# TODO: Swagger responses -> JMeter assertions?
if request is not None:
if operation.security:
self._add_local_security(request, operation.security, scenario)
elif global_security:
self._add_local_security(request, global_security, scenario, disable_basic=True)
requests.append(request)
if not scenario["variables"]:
scenario.pop("variables")
scenario["requests"] = requests
config = {
"scenarios": {
scenario_name: scenario
},
"execution": [{
"concurrency": 1,
"scenario": scenario_name,
"hold-for": "1m",
}]
}
if global_vars:
config["settings"] = {"env": global_vars}
return config
def _extract_scenarios_from_paths(self, paths, default_address, global_security):
base_path = self.swagger.get_base_path()
scenarios = OrderedDict()
global_vars = {
"default-address": default_address
}
if base_path:
global_vars["default-path"] = base_path
for path, path_obj in iteritems(paths):
self.log.info("Handling path %s", path)
scenario_name = path
scenario = {
"default-address": "${default-address}",
"variables": {},
}
if base_path:
route = "${default-path}" + path
else:
route = path
requests = []
for method in Swagger.METHODS:
operation = getattr(path_obj, method)
if operation is not None:
self.log.debug("Handling method %s", method.upper())
request = self._extract_request(route, path_obj, method, operation)
if operation.security:
self._add_local_security(request, operation.security, scenario)
elif global_security:
self._add_local_security(request, global_security, scenario)
requests.append(request)
# TODO: Swagger responses -> assertions?
if not requests:
continue
scenario["requests"] = requests
if global_security:
self._add_global_security(scenario, global_security, global_vars)
if not scenario["variables"]:
scenario.pop("variables")
scenarios[scenario_name] = scenario
config = {
"scenarios": scenarios,
"execution": [{
"concurrency": 1,
"scenario": scenario_name,
"hold-for": "1m",
} for scenario_name, scenario in iteritems(scenarios)]
}
if global_vars:
config["settings"] = {"env": global_vars}
return config
def _insert_global_basic_auth(self, scenario, global_vars):
headers = scenario.get('headers', {})
headers['Authorization'] = 'Basic ${__base64Encode(${auth})}'
global_vars['auth'] = 'USER:PASSWORD'
scenario['headers'] = headers
def _insert_local_basic_auth(self, request, scenario):
headers = request.get('headers', {})
variables = scenario.get('variables', {})
headers['Authorization'] = 'Basic ${__base64Encode(${auth})}'
variables['auth'] = 'USER:PASSWORD'
request['headers'] = headers
scenario['variables'] = variables
def _insert_global_apikey_auth(self, scenario, sec_name, param_name, location, global_vars):
# location == 'query' is deliberately ignored
if location == 'header':
header_name = sec_name
var_name = param_name
headers = scenario.get('headers', {})
headers[header_name] = '${' + var_name + '}'
global_vars[var_name] = 'TOKEN'
scenario['headers'] = headers
def _insert_local_apikey_auth(self, request, scenario, sec_name, param_name, location):
# location == 'header' is deliberately ignored
if location == 'query':
query_name = sec_name
var_name = param_name
body = request.get('body', {})
variables = scenario.get('variables', {})
body[query_name] = '${' + var_name + '}'
variables[var_name] = 'TOKEN'
request['body'] = body
scenario['variables'] = variables
def _add_global_security(self, scenario, global_security, global_vars):
if not global_security:
return
security = global_security[0]
for sec_name, _ in iteritems(security):
secdef = self.swagger.security_defs.get(sec_name)
if not secdef:
self.log.warning("Security definition %r not found, skipping" % sec_name)
continue
if secdef.type == 'basic':
self._insert_global_basic_auth(scenario, global_vars)
elif secdef.type == 'apiKey':
if secdef.name is None:
self.log.warning("apiKey security definition has no header name, skipping")
continue
if secdef.location is None:
self.log.warning("apiKey location (`in`) is not given, assuming header")
secdef.location = 'header'
self._insert_global_apikey_auth(scenario, secdef.name, sec_name, secdef.location, global_vars)
elif secdef.type == 'oauth2':
self.log.warning("OAuth2 security is not yet supported, skipping")
continue
def _add_local_security(self, request, securities, scenario, disable_basic=False):
if not securities:
return # TODO: disable global security for request
security = securities[0]
for sec_name, _ in iteritems(security):
secdef = self.swagger.security_defs.get(sec_name)
if not secdef:
self.log.warning("Security definition %r not found, skipping" % sec_name)
continue
if secdef.type == 'basic':
if not disable_basic:
self._insert_local_basic_auth(request, scenario)
elif secdef.type == 'apiKey':
if secdef.name is None:
self.log.warning("apiKey security definition has no header name, skipping")
continue
if secdef.location is None:
self.log.warning("apiKey location (`in`) is not given, assuming header")
secdef.location = 'header'
self._insert_local_apikey_auth(request, scenario, secdef.name, sec_name, secdef.location)
elif secdef.type == 'oauth2':
self.log.warning("OAuth2 security is not yet supported, skipping")
continue
@staticmethod
def join_base_with_endpoint_url(*path):
return '/'.join(s.strip('/') for s in (('',) + path))
def convert_path(self, swagger_path):
if not os.path.exists(swagger_path):
raise ValueError("Swagger file %s doesn't exist" % swagger_path)
with open(swagger_path) as swagger_fd:
return self.convert(swagger_fd)
def convert(self, swagger_fd):
self.swagger.parse(swagger_fd)
info = self.swagger.get_info()
title = info.get("title", "Swagger")
host = self.swagger.get_host()
paths = self.swagger.get_interpolated_paths(self.parameter_interpolation)
schemes = self.swagger.swagger.get("schemes", ["http"])
scheme = schemes[0]
security = self.swagger.swagger.get("security", [])
default_address = scheme + "://" + host
scenario_name = title.replace(' ', '-')
if self.scenarios_from_paths:
config = self._extract_scenarios_from_paths(paths, default_address, security)
else:
config = self._extract_requests_from_paths(paths, scenario_name, default_address, security)
return config
class Swagger2YAML(object):
def __init__(self, options, file_name):
self.log = logging.getLogger(self.__class__.__name__)
self.options = options
self.setup_logging()
self.converter = None
self.file_to_convert = file_name
def setup_logging(self):
CLI.setup_logging(self.options)
if self.options.quiet:
logging.disable(logging.WARNING)
def process(self):
output_format = Configuration.JSON if self.options.json else Configuration.YAML
self.log.info('Loading Swagger spec %s', self.file_to_convert)
self.file_to_convert = os.path.abspath(os.path.expanduser(self.file_to_convert))
if not os.path.exists(self.file_to_convert):
raise TaurusInternalException("File does not exist: %s" % self.file_to_convert)
self.converter = SwaggerConverter(
self.log,
scenarios_from_paths=self.options.scenarios_from_paths,
parameter_interpolation=self.options.parameter_interpolation,
)
try:
converted_config = self.converter.convert_path(self.file_to_convert)
except BaseException:
self.log.error("Error while processing Swagger spec: %s", self.file_to_convert)
raise
exporter = Configuration.from_dict(converted_config)
if self.options.file_name:
file_name = self.options.file_name
else:
file_name = self.file_to_convert + "." + output_format.lower()
exporter.dump(file_name, output_format)
self.log.info("Done processing, result saved in %s", file_name)
def process(parsed_options, args):
tool = Swagger2YAML(parsed_options, args[0])
tool.process()
def main():
usage = "Usage: swagger2yaml [input Swagger spec] [options]"
parser = OptionParser(usage=usage, prog="swagger2yaml")
parser.add_option('-v', '--verbose', action='store_true', default=False,
help="Prints all logging messages to console")
parser.add_option('-o', '--out', dest="file_name",
help="Set output .yml file name, by default input file name + .yml is used")
parser.add_option('-q', '--quiet', action='store_true', default=False, dest='quiet',
help="Do not display any log messages")
parser.add_option('-j', '--json', action='store_true', default=False, dest='json',
help="Use JSON format for results")
parser.add_option('-l', '--log', action='store', default=False, help="Log file location")
parser.add_option('--scenarios-from-paths', action='store_true', default=False,
help="Generate one scenario per path (disabled by default)")
parser.add_option('--parameter-interpolation', action='store', default='values',
help="Templated parameters interpolation. Valid values are 'variables', 'values', 'none'")
parsed_options, args = parser.parse_args()
if len(args) > 0:
try:
process(parsed_options, args)
except BaseException as exc:
logging.error("Exception during conversion: %s: %s", type(exc).__name__, str(exc))
if not parsed_options.verbose:
logging.error("Rerun with --verbose to see the stack trace")
logging.debug("Exception: %s", traceback.format_exc())
sys.exit(1)
sys.exit(0)
else:
sys.stdout.write(usage + "\n")
if __name__ == "__main__":
main()
| 40.555716 | 119 | 0.588139 |
60c928cc5ccbd5c0317099368c4c2c4f16278f52 | 413 | py | Python | proj_spam_detector/proj_spam_detector/wsgi.py | rizcodes/aiml-nlp-spam-detector-app | e894aacc0d76ad2eb20258d358edd28bad9c5170 | [
"MIT"
] | null | null | null | proj_spam_detector/proj_spam_detector/wsgi.py | rizcodes/aiml-nlp-spam-detector-app | e894aacc0d76ad2eb20258d358edd28bad9c5170 | [
"MIT"
] | null | null | null | proj_spam_detector/proj_spam_detector/wsgi.py | rizcodes/aiml-nlp-spam-detector-app | e894aacc0d76ad2eb20258d358edd28bad9c5170 | [
"MIT"
] | null | null | null | """
WSGI config for proj_spam_detector project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'proj_spam_detector.settings')
application = get_wsgi_application()
| 24.294118 | 78 | 0.79661 |
5fa25187124d329c2301d8dfd5f87e1d0ef7aebd | 1,251 | py | Python | theory/fit.py | akononovicius/anomalous-diffusion-in-nonlinear-transformations-of-the-noisy-voter-model | 471fcf1bbc7cbf3865144e4b1c1bbfcd7be182f6 | [
"Unlicense"
] | null | null | null | theory/fit.py | akononovicius/anomalous-diffusion-in-nonlinear-transformations-of-the-noisy-voter-model | 471fcf1bbc7cbf3865144e4b1c1bbfcd7be182f6 | [
"Unlicense"
] | null | null | null | theory/fit.py | akononovicius/anomalous-diffusion-in-nonlinear-transformations-of-the-noisy-voter-model | 471fcf1bbc7cbf3865144e4b1c1bbfcd7be182f6 | [
"Unlicense"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from numpy import exp
from scipy.special import hyp1f1
from scipy.special import gamma
def yStationaryMoment(k, alpha, epsi1, epsi2):
return gamma(epsi2-k/alpha)*gamma(epsi1+k/alpha)/(gamma(epsi1)*gamma(epsi2))
def yMoment(k, T, x0, alpha, epsi1, epsi2):
y0 = (x0/(1-x0))**(1/alpha)
sigmaSq = 2 / alpha**2
if y0>1:
eta = 1 + alpha / 2
lamb = 1 + alpha*epsi2
else:
eta = 1 - alpha / 2
lamb = 1 - alpha * epsi1
tmp1 = eta-1
tmp2 = 1 / tmp1 # 1/(eta-1)
tmp3 = tmp2 / 2 # 1/(2*(eta-1))
tmp4 = (lamb - 1) * tmp3 # (lambda - 1) / (2*(eta-1))
tmp5 = k * tmp3 # k/(2*(eta-1))
tmp6 = 2 * tmp1*tmp1 * sigmaSq * T # 2*((eta-1)^2)*(sigma^2)*t
res = 1
res *= gamma(tmp4 - tmp5) / gamma(tmp4)
res *= tmp6**(-tmp5)
res *= hyp1f1(tmp5,tmp4,-1/(tmp6*(y0**(2*tmp1))))
return res
def yMean(T, x0, alpha, epsi1, epsi2):
return yMoment(1, T, x0, alpha, epsi1, epsi2)
def ySqMean(T, x0, alpha, epsi1, epsi2):
return yMoment(2, T, x0, alpha, epsi1, epsi2)
def yVariance(T, x0, alpha, epsi1, epsi2):
mn = yMean(T, x0, alpha, epsi1, epsi2)
sq = ySqMean(T, x0, alpha, epsi1, epsi2)
return sq - mn**2
| 27.195652 | 80 | 0.565947 |
9642c4ee58c4ee2d219afb195d8e0c0d0a3e0739 | 98,861 | py | Python | src/sage/algebras/clifford_algebra.py | bopopescu/sage | 2d495be78e0bdc7a0a635454290b27bb4f5f70f0 | [
"BSL-1.0"
] | 3 | 2019-07-15T13:48:24.000Z | 2019-11-08T12:31:43.000Z | src/sage/algebras/clifford_algebra.py | bopopescu/sage | 2d495be78e0bdc7a0a635454290b27bb4f5f70f0 | [
"BSL-1.0"
] | 2 | 2018-10-30T13:40:20.000Z | 2020-07-23T12:13:30.000Z | src/sage/algebras/clifford_algebra.py | bopopescu/sage | 2d495be78e0bdc7a0a635454290b27bb4f5f70f0 | [
"BSL-1.0"
] | 1 | 2019-06-02T03:16:55.000Z | 2019-06-02T03:16:55.000Z | r"""
Clifford Algebras
AUTHORS:
- Travis Scrimshaw (2013-09-06): Initial version
"""
#*****************************************************************************
# Copyright (C) 2013 Travis Scrimshaw <tscrim at ucdavis.edu>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
# http://www.gnu.org/licenses/
#*****************************************************************************
from six import iteritems
from sage.misc.six import with_metaclass
from sage.misc.cachefunc import cached_method
from sage.structure.unique_representation import UniqueRepresentation
from copy import copy
from sage.categories.algebras_with_basis import AlgebrasWithBasis
from sage.categories.hopf_algebras_with_basis import HopfAlgebrasWithBasis
from sage.modules.with_basis.morphism import ModuleMorphismByLinearity
from sage.categories.poor_man_map import PoorManMap
from sage.rings.all import ZZ
from sage.modules.free_module import FreeModule, FreeModule_generic
from sage.matrix.constructor import Matrix
from sage.matrix.args import MatrixArgs
from sage.sets.family import Family
from sage.combinat.free_module import CombinatorialFreeModule
from sage.combinat.subset import SubsetsSorted
from sage.quadratic_forms.quadratic_form import QuadraticForm
from sage.algebras.weyl_algebra import repr_from_monomials
from sage.misc.inherit_comparison import InheritComparisonClasscallMetaclass
class CliffordAlgebraElement(CombinatorialFreeModule.Element):
"""
An element in a Clifford algebra.
TESTS::
sage: Q = QuadraticForm(ZZ, 3, [1, 2, 3, 4, 5, 6])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: elt = ((x^3-z)*x + y)^2
sage: TestSuite(elt).run()
"""
def _repr_(self):
"""
Return a string representation of ``self``.
TESTS::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: ((x^3-z)*x + y)^2
-2*x*y*z - x*z + 5*x - 4*y + 2*z + 2
sage: Cl.zero()
0
"""
return repr_from_monomials(self.list(), self.parent()._repr_term)
def _latex_(self):
r"""
Return a `\LaTeX` representation of ``self``.
TESTS::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: latex( ((x^3-z)*x + y)^2 )
-2 x y z - x z + 5 x - 4 y + 2 z + 2
sage: Cl.<x0,x1,x2> = CliffordAlgebra(Q)
sage: latex( (x1 - x2)*x0 + 5*x0*x1*x2 )
5 x_{0} x_{1} x_{2} - x_{0} x_{1} + x_{0} x_{2} - 1
"""
return repr_from_monomials(self.list(), self.parent()._latex_term, True)
def _mul_(self, other):
"""
Return ``self`` multiplied by ``other``.
INPUT:
- ``other`` -- element of the same Clifford algebra as ``self``
EXAMPLES::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: (x^3 - z*y)*x*(y*z + x*y*z)
x*y*z + y*z - 24*x + 12*y + 2*z - 24
sage: y*x
-x*y + 2
sage: z*x
-x*z + 3
sage: z*z
6
sage: x*0
0
sage: 0*x
0
"""
Q = self.parent()._quadratic_form
zero = self.parent().base_ring().zero()
d = {}
for ml,cl in self:
# Distribute the current term ``cl`` * ``ml`` over ``other``.
cur = copy(other._monomial_coefficients) # The current distribution of the term
for i in reversed(ml):
# Distribute the current factor ``e[i]`` (the ``i``-th
# element of the standard basis).
next = {}
# At the end of the following for-loop, ``next`` will be
# the dictionary describing the element
# ``e[i]`` * (the element described by the dictionary ``cur``)
# (where ``e[i]`` is the ``i``-th standard basis vector).
for mr,cr in iteritems(cur):
# Commute the factor as necessary until we are in order
pos = 0
for j in mr:
if i <= j:
break
# Add the additional term from the commutation
t = list(mr)
t.pop(pos)
t = tuple(t)
next[t] = next.get(t, zero) + cr * Q[i,j]
# Note: ``Q[i,j] == Q(e[i]+e[j]) - Q(e[i]) - Q(e[j])`` for
# ``i != j``, where ``e[k]`` is the ``k``-th standard
# basis vector.
cr = -cr
if next[t] == zero:
del next[t]
pos += 1
# Check to see if we have a squared term or not
t = list(mr)
if i in t:
t.remove(i)
cr *= Q[i,i]
# Note: ``Q[i,i] == Q(e[i])`` where ``e[i]`` is the
# ``i``-th standard basis vector.
else:
t.insert(pos, i)
# Note that ``t`` is now sorted.
t = tuple(t)
next[t] = next.get(t, zero) + cr
if next[t] == zero:
del next[t]
cur = next
# Add the distributed terms to the total
for index,coeff in iteritems(cur):
d[index] = d.get(index, zero) + cl * coeff
if d[index] == zero:
del d[index]
return self.__class__(self.parent(), d)
def list(self):
"""
Return the list of monomials and their coefficients in ``self``
(as a list of `2`-tuples, each of which has the form
``(monomial, coefficient)``).
EXAMPLES::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: elt = 5*x + y
sage: elt.list()
[((0,), 5), ((1,), 1)]
"""
return sorted(self._monomial_coefficients.items(), key=lambda m_c : (-len(m_c[0]), m_c[0]))
def support(self):
"""
Return the support of ``self``.
This is the list of all monomials which appear with nonzero
coefficient in ``self``.
EXAMPLES::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: elt = 5*x + y
sage: elt.support()
[(0,), (1,)]
"""
return sorted(self._monomial_coefficients.keys(), key=lambda x: (-len(x), x))
def reflection(self):
r"""
Return the image of the reflection automorphism on ``self``.
The *reflection automorphism* of a Clifford algebra is defined
as the linear endomorphism of this algebra which maps
.. MATH::
x_1 \wedge x_2 \wedge \cdots \wedge x_m \mapsto
(-1)^m x_1 \wedge x_2 \wedge \cdots \wedge x_m.
It is an algebra automorphism of the Clifford algebra.
:meth:`degree_negation` is an alias for :meth:`reflection`.
EXAMPLES::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: elt = 5*x + y + x*z
sage: r = elt.reflection(); r
x*z - 5*x - y
sage: r.reflection() == elt
True
TESTS:
We check that the reflection is an involution::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: all(x.reflection().reflection() == x for x in Cl.basis())
True
"""
return self.__class__(self.parent(), {m: (-1)**len(m) * c for m,c in self})
degree_negation = reflection
def transpose(self):
r"""
Return the transpose of ``self``.
The transpose is an anti-algebra involution of a Clifford algebra
and is defined (using linearity) by
.. MATH::
x_1 \wedge x_2 \wedge \cdots \wedge x_m \mapsto
x_m \wedge \cdots \wedge x_2 \wedge x_1.
EXAMPLES::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: elt = 5*x + y + x*z
sage: t = elt.transpose(); t
-x*z + 5*x + y + 3
sage: t.transpose() == elt
True
sage: Cl.one().transpose()
1
TESTS:
We check that the transpose is an involution::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: all(x.transpose().transpose() == x for x in Cl.basis())
True
Zero is sent to zero::
sage: Cl.zero().transpose() == Cl.zero()
True
"""
P = self.parent()
if not self._monomial_coefficients:
return P.zero()
g = P.gens()
return P.sum(c * P.prod(g[i] for i in reversed(m)) for m,c in self)
def conjugate(self):
r"""
Return the Clifford conjugate of ``self``.
The Clifford conjugate of an element `x` of a Clifford algebra is
defined as
.. MATH::
\bar{x} := \alpha(x^t) = \alpha(x)^t
where `\alpha` denotes the :meth:`reflection <reflection>`
automorphism and `t` the :meth:`transposition <transpose>`.
EXAMPLES::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: elt = 5*x + y + x*z
sage: c = elt.conjugate(); c
-x*z - 5*x - y + 3
sage: c.conjugate() == elt
True
TESTS:
We check that the conjugate is an involution::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: all(x.conjugate().conjugate() == x for x in Cl.basis())
True
"""
return self.reflection().transpose()
clifford_conjugate = conjugate
# TODO: This is a general function which should be moved to a
# superalgebras category when one is implemented.
def supercommutator(self, x):
r"""
Return the supercommutator of ``self`` and ``x``.
Let `A` be a superalgebra. The *supercommutator* of homogeneous
elements `x, y \in A` is defined by
.. MATH::
[x, y\} = x y - (-1)^{|x| |y|} y x
and extended to all elements by linearity.
EXAMPLES::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: a = x*y - z
sage: b = x - y + y*z
sage: a.supercommutator(b)
-5*x*y + 8*x*z - 2*y*z - 6*x + 12*y - 5*z
sage: a.supercommutator(Cl.one())
0
sage: Cl.one().supercommutator(a)
0
sage: Cl.zero().supercommutator(a)
0
sage: a.supercommutator(Cl.zero())
0
sage: Q = QuadraticForm(ZZ, 2, [-1,1,-3])
sage: Cl.<x,y> = CliffordAlgebra(Q)
sage: [a.supercommutator(b) for a in Cl.basis() for b in Cl.basis()]
[0, 0, 0, 0, 0, -2, 1, -x - 2*y, 0, 1,
-6, 6*x + y, 0, x + 2*y, -6*x - y, 0]
sage: [a*b-b*a for a in Cl.basis() for b in Cl.basis()]
[0, 0, 0, 0, 0, 0, 2*x*y - 1, -x - 2*y, 0,
-2*x*y + 1, 0, 6*x + y, 0, x + 2*y, -6*x - y, 0]
Exterior algebras inherit from Clifford algebras, so
supercommutators work as well. We verify the exterior algebra
is supercommutative::
sage: E.<x,y,z,w> = ExteriorAlgebra(QQ)
sage: all(b1.supercommutator(b2) == 0
....: for b1 in E.basis() for b2 in E.basis())
True
"""
P = self.parent()
ret = P.zero()
for ms,cs in self:
for mx,cx in x:
ret += P.term(ms, cs) * P.term(mx, cx)
s = (-1)**(P.degree_on_basis(ms) * P.degree_on_basis(mx))
ret -= s * P.term(mx, cx) * P.term(ms, cs)
return ret
class CliffordAlgebra(CombinatorialFreeModule):
r"""
The Clifford algebra of a quadratic form.
Let `Q : V \to \mathbf{k}` denote a quadratic form on a vector space `V`
over a field `\mathbf{k}`. The Clifford algebra `Cl(V, Q)` is defined as
`T(V) / I_Q` where `T(V)` is the tensor algebra of `V` and `I_Q` is the
two-sided ideal generated by all elements of the form `v \otimes v - Q(v)`
for all `v \in V`.
We abuse notation to denote the projection of a pure tensor
`x_1 \otimes x_2 \otimes \cdots \otimes x_m \in T(V)` onto
`T(V) / I_Q = Cl(V, Q)` by `x_1 \wedge x_2 \wedge \cdots \wedge x_m`.
This is motivated by the fact that `Cl(V, Q)` is the exterior algebra
`\wedge V` when `Q = 0` (one can also think of a Clifford algebra as
a quantization of the exterior algebra). See :class:`ExteriorAlgebra`
for the concept of an exterior algebra.
From the definition, a basis of `Cl(V, Q)` is given by monomials of
the form
.. MATH::
\{ e_{i_1} \wedge \cdots \wedge e_{i_k} \mid 1 \leq i_1 < \cdots <
i_k \leq n \},
where `n = \dim(V)` and where `\{ e_1, e_2, \cdots, e_n \}` is any
fixed basis of `V`. Hence
.. MATH::
\dim(Cl(V, Q)) = \sum_{k=0}^n \binom{n}{k} = 2^n.
.. NOTE::
The algebra `Cl(V, Q)` is a `\ZZ / 2\ZZ`-graded algebra, but not
(in general) `\ZZ`-graded (in a reasonable way).
This construction satisfies the following universal property. Let
`i : V \to Cl(V, Q)` denote the natural inclusion (which is an
embedding). Then for every associative `\mathbf{k}`-algebra `A`
and any `\mathbf{k}`-linear map `j : V \to A` satisfying
.. MATH::
j(v)^2 = Q(v) \cdot 1_A
for all `v \in V`, there exists a unique `\mathbf{k}`-algebra
homomorphism `f : Cl(V, Q) \to A` such that `f \circ i = j`.
This property determines the Clifford algebra uniquely up to
canonical isomorphism. The inclusion `i` is commonly used to
identify `V` with a vector subspace of `Cl(V)`.
The Clifford algebra `Cl(V, Q)` is a `\ZZ_2`-graded algebra
(where `\ZZ_2 = \ZZ / 2 \ZZ`); this grading is determined by
placing all elements of `V` in degree `1`. It is also an
`\NN`-filtered algebra, with the filtration too being defined
by placing all elements of `V` in degree `1`. The :meth:`degree` gives
the `\NN`-*filtration* degree, and to get the super degree use instead
:meth:`~sage.categories.super_modules.SuperModules.ElementMethods.is_even_odd`.
The Clifford algebra also can be considered as a covariant functor
from the category of vector spaces equipped with quadratic forms
to the category of algebras. In fact, if `(V, Q)` and `(W, R)`
are two vector spaces endowed with quadratic forms, and if
`g : W \to V` is a linear map preserving the quadratic form,
then we can define an algebra morphism
`Cl(g) : Cl(W, R) \to Cl(V, Q)` by requiring that it send every
`w \in W` to `g(w) \in V`. Since the quadratic form `R` on `W`
is uniquely determined by the quadratic form `Q` on `V` (due to
the assumption that `g` preserves the quadratic form), this fact
can be rewritten as follows: If `(V, Q)` is a vector space with a
quadratic form, and `W` is another vector space, and
`\phi : W \to V` is any linear map, then we obtain an algebra
morphism `Cl(\phi) : Cl(W, \phi(Q)) \to Cl(V, Q)` where
`\phi(Q) = \phi^T \cdot Q \cdot \phi` (we consider `\phi` as a
matrix) is the quadratic form `Q` pulled back to `W`. In fact, the
map `\phi` preserves the quadratic form because of
.. MATH::
\phi(Q)(x) = x^T \cdot \phi^T \cdot Q \cdot \phi \cdot x
= (\phi \cdot x)^T \cdot Q \cdot (\phi \cdot x) = Q(\phi(x)).
Hence we have `\phi(w)^2 = Q(\phi(w)) = \phi(Q)(w)` for all `w \in W`.
REFERENCES:
- :wikipedia:`Clifford_algebra`
INPUT:
- ``Q`` -- a quadratic form
- ``names`` -- (default: ``'e'``) the generator names
EXAMPLES:
To create a Clifford algebra, all one needs to do is specify a
quadratic form::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Cl = CliffordAlgebra(Q)
sage: Cl
The Clifford algebra of the Quadratic form in 3 variables
over Integer Ring with coefficients:
[ 1 2 3 ]
[ * 4 5 ]
[ * * 6 ]
We can also explicitly name the generators. In this example, the
Clifford algebra we construct is an exterior algebra (since we
choose the quadratic form to be zero)::
sage: Q = QuadraticForm(ZZ, 4, [0]*10)
sage: Cl.<a,b,c,d> = CliffordAlgebra(Q)
sage: a*d
a*d
sage: d*c*b*a + a + 4*b*c
a*b*c*d + 4*b*c + a
"""
@staticmethod
def __classcall_private__(cls, Q, names=None):
"""
Normalize arguments to ensure a unique representation.
EXAMPLES::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Cl1.<e0,e1,e2> = CliffordAlgebra(Q)
sage: Cl2 = CliffordAlgebra(Q)
sage: Cl3 = CliffordAlgebra(Q, ['e0','e1','e2'])
sage: Cl1 is Cl2 and Cl2 is Cl3
True
"""
if not isinstance(Q, QuadraticForm):
raise ValueError("{} is not a quadratic form".format(Q))
if names is None:
names = 'e'
names = tuple(names)
if len(names) != Q.dim():
if len(names) == 1:
names = tuple( '{}{}'.format(names[0], i) for i in range(Q.dim()) )
else:
raise ValueError("the number of variables does not match the number of generators")
return super(CliffordAlgebra, cls).__classcall__(cls, Q, names)
def __init__(self, Q, names, category=None):
r"""
Initialize ``self``.
EXAMPLES::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Cl = CliffordAlgebra(Q)
sage: Cl.category()
Category of finite dimensional super algebras with basis over
(euclidean domains and infinite enumerated sets and metric spaces)
sage: TestSuite(Cl).run()
TESTS:
We check that the basis elements are indeed indexed by
*strictly increasing* tuples::
sage: Q = QuadraticForm(ZZ, 9)
sage: Cl = CliffordAlgebra(Q)
sage: ba = Cl.basis().keys()
sage: all( tuple(sorted(S)) in ba
....: for S in Subsets(range(9)) )
True
"""
self._quadratic_form = Q
R = Q.base_ring()
category = AlgebrasWithBasis(R.category()).Super().Filtered().FiniteDimensional().or_subcategory(category)
indices = SubsetsSorted(range(Q.dim()))
CombinatorialFreeModule.__init__(self, R, indices, category=category)
self._assign_names(names)
def _repr_(self):
r"""
Return a string representation of ``self``.
EXAMPLES::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: CliffordAlgebra(Q)
The Clifford algebra of the Quadratic form in 3 variables
over Integer Ring with coefficients:
[ 1 2 3 ]
[ * 4 5 ]
[ * * 6 ]
"""
return "The Clifford algebra of the {}".format(self._quadratic_form)
def _repr_term(self, m):
"""
Return a string representation of the basis element indexed by ``m``.
EXAMPLES::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: Cl._repr_term((0,2))
'x*z'
sage: Cl._repr_term(())
'1'
sage: Cl._repr_term((1,))
'y'
"""
if not m:
return '1'
term = ''
for i in m:
if term:
term += '*'
term += self.variable_names()[i]
return term
def _latex_term(self, m):
r"""
Return a `\LaTeX` representation of the basis element indexed
by ``m``.
EXAMPLES::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: Cl._latex_term((0,2))
' x z'
"""
if not m:
return '1'
term = ''
for i in m:
term += ' ' + self.latex_variable_names()[i]
return term
def _coerce_map_from_(self, V):
"""
Return if there is a coerce map from ``V`` into ``self``.
The things which coerce into ``self`` are:
- Clifford algebras with the same generator names and an equal
quadratic form over a ring which coerces into the base
ring of ``self``.
- The underlying free module of ``self``.
- The base ring of ``self``.
EXAMPLES::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Qp = QuadraticForm(QQ, 3, [1,2,3,4,5,6])
sage: Cl = CliffordAlgebra(Q)
sage: Clp = CliffordAlgebra(Qp)
sage: Cl.has_coerce_map_from(Clp)
False
sage: Clp.has_coerce_map_from(Cl)
True
Check that we preserve the multiplicative structure::
sage: all(Clp(b)*Clp(b) == Clp(b*b) for b in Cl.basis())
True
Check from the underlying free module::
sage: M = ZZ^3
sage: Mp = QQ^3
sage: Cl.has_coerce_map_from(M)
True
sage: Cl.has_coerce_map_from(Mp)
False
sage: Clp.has_coerce_map_from(M)
True
sage: Clp.has_coerce_map_from(Mp)
True
Names matter::
sage: Cln = CliffordAlgebra(Q, names=['x','y','z'])
sage: Cln.has_coerce_map_from(Cl)
False
sage: Cl.has_coerce_map_from(Cln)
False
Non-injective homomorphisms of base rings don't cause zero
values in the coordinate dictionary (this had to be manually
ensured)::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Qp = QuadraticForm(Integers(3), 3, [1,2,3,4,5,6])
sage: Cl = CliffordAlgebra(Q)
sage: Clp = CliffordAlgebra(Qp)
sage: a = Cl.basis()[(1,2)]
sage: a
e1*e2
sage: Clp(a) # so far so good
e1*e2
sage: Clp(3*a) # but now
0
sage: Clp(3*a) == 0
True
sage: b = Cl.basis()[(0,2)]
sage: Clp(3*a-4*b)
2*e0*e2
"""
if isinstance(V, CliffordAlgebra):
Q = self._quadratic_form
try:
return (V.variable_names() == self.variable_names() and
V._quadratic_form.base_change_to(self.base_ring()) == Q)
except Exception:
return False
if self.free_module().has_coerce_map_from(V):
return True
return super(CliffordAlgebra, self)._coerce_map_from_(V)
def _element_constructor_(self, x):
"""
Construct an element of ``self`` from ``x``.
EXAMPLES::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Qp = QuadraticForm(QQ, 3, [1,2,3,4,5,6])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: Clp = CliffordAlgebra(Qp, names=['x','y','z'])
sage: M = ZZ^3
sage: Mp = QQ^3
sage: Cl(2/3)
Traceback (most recent call last):
...
TypeError: do not know how to make x (= 2/3) an element of self ...
sage: Clp(2/3)
2/3
sage: Clp(x)
x
sage: M = ZZ^3
sage: Clp( M((1,-3,2)) )
x - 3*y + 2*z
Zero coordinates are handled appropriately::
sage: Q3 = QuadraticForm(Integers(3), 3, [1,2,3,4,5,6])
sage: Cl3 = CliffordAlgebra(Q3, names='xyz') # different syntax for a change
sage: Cl3( M((1,-3,2)) )
x + 2*z
"""
# This is the natural lift morphism of the underlying free module
if x in self.free_module():
R = self.base_ring()
if x.parent().base_ring() is R:
return self.element_class(self, {(i,): c for i,c in iteritems(x)})
return self.element_class(self, {(i,): R(c) for i,c in iteritems(x) if R(c) != R.zero()})
if isinstance(x, CliffordAlgebraElement):
if x.parent() is self:
return x
if self.has_coerce_map_from(x.parent()):
R = self.base_ring()
return self.element_class(self, {i: R(c) for i,c in x if R(c) != R.zero()})
return super(CliffordAlgebra, self)._element_constructor_(x)
def gen(self, i):
"""
Return the ``i``-th standard generator of the algebra ``self``.
This is the ``i``-th basis vector of the vector space on which
the quadratic form defining ``self`` is defined, regarded as an
element of ``self``.
EXAMPLES::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: [Cl.gen(i) for i in range(3)]
[x, y, z]
"""
return self._from_dict({(i,): self.base_ring().one()}, remove_zeros=False)
def algebra_generators(self):
"""
Return the algebra generators of ``self``.
EXAMPLES::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: Cl.algebra_generators()
Finite family {'x': x, 'y': y, 'z': z}
"""
d = {x: self.gen(i) for i,x in enumerate(self.variable_names())}
return Family(self.variable_names(), lambda x: d[x])
def gens(self):
r"""
Return the generators of ``self`` (as an algebra).
EXAMPLES::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: Cl.gens()
(x, y, z)
"""
return tuple(self.algebra_generators())
def ngens(self):
"""
Return the number of algebra generators of ``self``.
EXAMPLES::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: Cl.ngens()
3
"""
return self._quadratic_form.dim()
@cached_method
def one_basis(self):
"""
Return the basis index of the element `1`.
EXAMPLES::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: Cl.one_basis()
()
"""
return ()
def is_commutative(self):
"""
Check if ``self`` is a commutative algebra.
EXAMPLES::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: Cl.is_commutative()
False
"""
return self._quadratic_form.dim() < 2
def quadratic_form(self):
"""
Return the quadratic form of ``self``.
This is the quadratic form used to define ``self``. The
quadratic form on ``self`` is yet to be implemented.
EXAMPLES::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: Cl.quadratic_form()
Quadratic form in 3 variables over Integer Ring with coefficients:
[ 1 2 3 ]
[ * 4 5 ]
[ * * 6 ]
"""
return self._quadratic_form
def degree_on_basis(self, m):
r"""
Return the degree of the monomial indexed by ``m``.
We are considering the Clifford algebra to be `\NN`-filtered,
and the degree of the monomial ``m`` is the length of ``m``.
EXAMPLES::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: Cl.degree_on_basis((0,))
1
sage: Cl.degree_on_basis((0,1))
2
"""
return ZZ(len(m))
def graded_algebra(self):
"""
Return the associated graded algebra of ``self``.
EXAMPLES::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: Cl.graded_algebra()
The exterior algebra of rank 3 over Integer Ring
"""
return ExteriorAlgebra(self.base_ring(), self.variable_names())
@cached_method
def free_module(self):
"""
Return the underlying free module `V` of ``self``.
This is the free module on which the quadratic form that was
used to construct ``self`` is defined.
EXAMPLES::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: Cl.free_module()
Ambient free module of rank 3 over the principal ideal domain Integer Ring
"""
return FreeModule(self.base_ring(), self._quadratic_form.dim())
def dimension(self):
"""
Return the rank of ``self`` as a free module.
Let `V` be a free `R`-module of rank `n`; then, `Cl(V, Q)` is a
free `R`-module of rank `2^n`.
EXAMPLES::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: Cl.dimension()
8
"""
return ZZ(2)**self._quadratic_form.dim()
def pseudoscalar(self):
r"""
Return the unit pseudoscalar of ``self``.
Given the basis `e_1, e_2, \ldots, e_n` of the underlying
`R`-module, the unit pseudoscalar is defined as
`e_1 \cdot e_2 \cdots e_n`.
This depends on the choice of basis.
EXAMPLES::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: Cl.pseudoscalar()
x*y*z
sage: Q = QuadraticForm(ZZ, 0, [])
sage: Cl = CliffordAlgebra(Q)
sage: Cl.pseudoscalar()
1
REFERENCES:
- :wikipedia:`Classification_of_Clifford_algebras#Unit_pseudoscalar`
"""
d = self._quadratic_form.dim()
return self.element_class(self, {tuple(range(d)): self.base_ring().one()})
def lift_module_morphism(self, m, names=None):
r"""
Lift the matrix ``m`` to an algebra morphism of Clifford algebras.
Given a linear map `m : W \to V` (here represented by a matrix
acting on column vectors), this method returns the algebra
morphism `Cl(m) : Cl(W, m(Q)) \to Cl(V, Q)`, where `Cl(V, Q)`
is the Clifford algebra ``self`` and where `m(Q)` is the pullback
of the quadratic form `Q` to `W`. See the documentation
of :class:`CliffordAlgebra` for how this pullback and the
morphism `Cl(m)` are defined.
.. NOTE::
This is a map into ``self``.
INPUT:
- ``m`` -- a matrix
- ``names`` -- (default: ``'e'``) the names of the generators of the
Clifford algebra of the domain of (the map represented by) ``m``
OUTPUT:
The algebra morphism `Cl(m)` from `Cl(W, m(Q))` to ``self``.
EXAMPLES::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: m = matrix([[1,-1,-1],[0,1,-1],[1,1,1]])
sage: phi = Cl.lift_module_morphism(m, 'abc')
sage: phi
Generic morphism:
From: The Clifford algebra of the Quadratic form in 3 variables over Integer Ring with coefficients:
[ 10 17 3 ]
[ * 11 0 ]
[ * * 5 ]
To: The Clifford algebra of the Quadratic form in 3 variables over Integer Ring with coefficients:
[ 1 2 3 ]
[ * 4 5 ]
[ * * 6 ]
sage: a,b,c = phi.domain().gens()
sage: phi(a)
x + z
sage: phi(b)
-x + y + z
sage: phi(c)
-x - y + z
sage: phi(a + 3*b)
-2*x + 3*y + 4*z
sage: phi(a) + 3*phi(b)
-2*x + 3*y + 4*z
sage: phi(a*b)
x*y + 2*x*z - y*z + 7
sage: phi(b*a)
-x*y - 2*x*z + y*z + 10
sage: phi(a*b + c)
x*y + 2*x*z - y*z - x - y + z + 7
sage: phi(a*b) + phi(c)
x*y + 2*x*z - y*z - x - y + z + 7
We check that the map is an algebra morphism::
sage: phi(a)*phi(b)
x*y + 2*x*z - y*z + 7
sage: phi(a*b)
x*y + 2*x*z - y*z + 7
sage: phi(a*a)
10
sage: phi(a)*phi(a)
10
sage: phi(b*a)
-x*y - 2*x*z + y*z + 10
sage: phi(b) * phi(a)
-x*y - 2*x*z + y*z + 10
sage: phi((a + b)*(a + c)) == phi(a + b) * phi(a + c)
True
We can also lift arbitrary linear maps::
sage: m = matrix([[1,1],[0,1],[1,1]])
sage: phi = Cl.lift_module_morphism(m, 'ab')
sage: a,b = phi.domain().gens()
sage: phi(a)
x + z
sage: phi(b)
x + y + z
sage: phi(a*b)
x*y - y*z + 15
sage: phi(a)*phi(b)
x*y - y*z + 15
sage: phi(b*a)
-x*y + y*z + 12
sage: phi(b)*phi(a)
-x*y + y*z + 12
sage: m = matrix([[1,1,1,2], [0,1,1,1], [0,1,1,1]])
sage: phi = Cl.lift_module_morphism(m, 'abcd')
sage: a,b,c,d = phi.domain().gens()
sage: phi(a)
x
sage: phi(b)
x + y + z
sage: phi(c)
x + y + z
sage: phi(d)
2*x + y + z
sage: phi(a*b*c + d*a)
-x*y - x*z + 21*x + 7
sage: phi(a*b*c*d)
21*x*y + 21*x*z + 42
TESTS:
Check that the resulting morphism knows it is for
finite-dimensional algebras (:trac:`25339`)::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: m = matrix([[1,-1,-1],[0,1,-1],[1,1,1]])
sage: phi = Cl.lift_module_morphism(m, 'abc')
sage: phi.category_for()
Category of finite dimensional super algebras with basis over
(euclidean domains and infinite enumerated sets and metric spaces)
sage: phi.matrix()
[ 1 0 0 0 7 -3 -7 0]
[ 0 1 -1 -1 0 0 0 -17]
[ 0 0 1 -1 0 0 0 -4]
[ 0 1 1 1 0 0 0 3]
[ 0 0 0 0 1 -1 2 0]
[ 0 0 0 0 2 2 0 0]
[ 0 0 0 0 -1 1 2 0]
[ 0 0 0 0 0 0 0 4]
"""
Q = self._quadratic_form(m)
# If R is a quadratic form and m is a matrix, then R(m) returns
# the quadratic form m^t R m.
if Q == self._quadratic_form and names is None:
Cl = self
else:
Cl = CliffordAlgebra(Q, names)
n = self._quadratic_form.dim()
f = lambda x: self.prod(self._from_dict( {(j,): m[j,i] for j in range(n)},
remove_zeros=True )
for i in x)
cat = AlgebrasWithBasis(self.category().base_ring()).Super().FiniteDimensional()
return Cl.module_morphism(on_basis=f, codomain=self, category=cat)
def lift_isometry(self, m, names=None):
r"""
Lift an invertible isometry ``m`` of the quadratric form of
``self`` to a Clifford algebra morphism.
Given an invertible linear map `m : V \to W` (here represented by
a matrix acting on column vectors), this method returns the
algebra morphism `Cl(m)` from `Cl(V, Q)` to `Cl(W, m^{-1}(Q))`,
where `Cl(V, Q)` is the Clifford algebra ``self`` and where
`m^{-1}(Q)` is the pullback of the quadratic form `Q` to `W` along
the inverse map `m^{-1} : W \to V`. See the documentation of
:class:`CliffordAlgebra` for how this pullback and the morphism
`Cl(m)` are defined.
INPUT:
- ``m`` -- an isometry of the quadratic form of ``self``
- ``names`` -- (default: ``'e'``) the names of the generators of
the Clifford algebra of the codomain of (the map represented by)
``m``
OUTPUT:
The algebra morphism `Cl(m)` from ``self`` to `Cl(W, m^{-1}(Q))`.
EXAMPLES::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: m = matrix([[1,1,2],[0,1,1],[0,0,1]])
sage: phi = Cl.lift_isometry(m, 'abc')
sage: phi(x)
a
sage: phi(y)
a + b
sage: phi(x*y)
a*b + 1
sage: phi(x) * phi(y)
a*b + 1
sage: phi(z*y)
a*b - a*c - b*c
sage: phi(z) * phi(y)
a*b - a*c - b*c
sage: phi(x + z) * phi(y + z) == phi((x + z) * (y + z))
True
TESTS:
Check that the resulting morphism knows it is for
finite-dimensional algebras (:trac:`25339`)::
sage: Q = QuadraticForm(ZZ, 3, [1,2,3,4,5,6])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: m = matrix([[1,1,2],[0,1,1],[0,0,1]])
sage: phi = Cl.lift_isometry(m, 'abc')
sage: phi.category_for()
Category of finite dimensional super algebras with basis over
(euclidean domains and infinite enumerated sets and metric spaces)
sage: phi.matrix()
[ 1 0 0 0 1 2 5 0]
[ 0 1 1 2 0 0 0 5]
[ 0 0 1 1 0 0 0 -1]
[ 0 0 0 1 0 0 0 1]
[ 0 0 0 0 1 1 -1 0]
[ 0 0 0 0 0 1 1 0]
[ 0 0 0 0 0 0 1 0]
[ 0 0 0 0 0 0 0 1]
"""
MS = m.parent()
if not m.is_invertible():
raise ValueError('{} is not invertible')
Q = self._quadratic_form(MS(m.inverse()))
if Q == self._quadratic_form and names is None:
Cl = self
else:
if names is None:
names = 'e'
Cl = CliffordAlgebra(Q, names)
n = Q.dim()
f = lambda x: Cl.prod(Cl._from_dict( {(j,): m[j,i] for j in range(n)},
remove_zeros=True )
for i in x)
cat = AlgebrasWithBasis(self.category().base_ring()).Super().FiniteDimensional()
return self.module_morphism(on_basis=f, codomain=Cl, category=cat)
# This is a general method for finite dimensional algebras with bases
# and should be moved to the corresponding category once there is
# a category level method for getting the indexing set of the basis;
# similar to #15289 but on a category level.
@cached_method
def center_basis(self):
"""
Return a list of elements which correspond to a basis for the center
of ``self``.
This assumes that the ground ring can be used to compute the
kernel of a matrix.
.. SEEALSO::
:meth:`supercenter_basis`,
http://math.stackexchange.com/questions/129183/center-of-clifford-algebra-depending-on-the-parity-of-dim-v
.. TODO::
Deprecate this in favor of a method called `center()` once
subalgebras are properly implemented in Sage.
EXAMPLES::
sage: Q = QuadraticForm(QQ, 3, [1,2,3,4,5,6])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: Z = Cl.center_basis(); Z
(1, -2/5*x*y*z + x - 3/5*y + 2/5*z)
sage: all(z*b - b*z == 0 for z in Z for b in Cl.basis())
True
sage: Q = QuadraticForm(QQ, 3, [1,-2,-3, 4, 2, 1])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: Z = Cl.center_basis(); Z
(1, -x*y*z + x + 3/2*y - z)
sage: all(z*b - b*z == 0 for z in Z for b in Cl.basis())
True
sage: Q = QuadraticForm(QQ, 2, [1,-2,-3])
sage: Cl.<x,y> = CliffordAlgebra(Q)
sage: Cl.center_basis()
(1,)
sage: Q = QuadraticForm(QQ, 2, [-1,1,-3])
sage: Cl.<x,y> = CliffordAlgebra(Q)
sage: Cl.center_basis()
(1,)
A degenerate case::
sage: Q = QuadraticForm(QQ, 3, [4,4,-4,1,-2,1])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: Cl.center_basis()
(1, x*y*z + x - 2*y - 2*z, x*y + x*z - 2*y*z)
The most degenerate case (the exterior algebra)::
sage: Q = QuadraticForm(QQ, 3)
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: Cl.center_basis()
(1, x*y, x*z, y*z, x*y*z)
"""
R = self.base_ring()
B = self.basis()
K = list(B.keys())
k = len(K)
d = {}
for a,i in enumerate(K):
Bi = B[i]
for b,j in enumerate(K):
Bj = B[j]
for m,c in (Bi*Bj - Bj*Bi):
d[(a, K.index(m)+k*b)] = c
m = Matrix(R, d, nrows=k, ncols=k*k, sparse=True)
from_vector = lambda x: self.sum_of_terms(((K[i], c) for i,c in iteritems(x)),
distinct=True)
return tuple(map( from_vector, m.kernel().basis() ))
# Dense version
# R = self.base_ring()
# B = self.basis()
# K = list(B.keys())
# eqns = [[] for dummy in range(k)]
# for a,i in enumerate(K):
# for b,j in enumerate(K):
# v = B[i]*B[j] - B[j]*B[i]
# eqns[a].extend([v[k] for k in K])
# m = Matrix(R, eqns)
# from_vector = lambda x: self.sum_of_terms(((K[i], c) for i,c in iteritems(x)),
# distinct=True)
# return tuple(map( from_vector, m.kernel().basis() ))
# Same as center except for superalgebras
@cached_method
def supercenter_basis(self):
"""
Return a list of elements which correspond to a basis for the
supercenter of ``self``.
This assumes that the ground ring can be used to compute the
kernel of a matrix.
.. SEEALSO::
:meth:`center_basis`,
http://math.stackexchange.com/questions/129183/center-of-clifford-algebra-depending-on-the-parity-of-dim-v
.. TODO::
Deprecate this in favor of a method called `supercenter()` once
subalgebras are properly implemented in Sage.
EXAMPLES::
sage: Q = QuadraticForm(QQ, 3, [1,2,3,4,5,6])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: SZ = Cl.supercenter_basis(); SZ
(1,)
sage: all(z.supercommutator(b) == 0 for z in SZ for b in Cl.basis())
True
sage: Q = QuadraticForm(QQ, 3, [1,-2,-3, 4, 2, 1])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: Cl.supercenter_basis()
(1,)
sage: Q = QuadraticForm(QQ, 2, [1,-2,-3])
sage: Cl.<x,y> = CliffordAlgebra(Q)
sage: Cl.supercenter_basis()
(1,)
sage: Q = QuadraticForm(QQ, 2, [-1,1,-3])
sage: Cl.<x,y> = CliffordAlgebra(Q)
sage: Cl.supercenter_basis()
(1,)
Singular vectors of a quadratic form generate in the supercenter::
sage: Q = QuadraticForm(QQ, 3, [1/2,-2,4,256/249,3,-185/8])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: Cl.supercenter_basis()
(1, x + 249/322*y + 22/161*z)
sage: Q = QuadraticForm(QQ, 3, [4,4,-4,1,-2,1])
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: Cl.supercenter_basis()
(1, x + 2*z, y + z, x*y + x*z - 2*y*z)
The most degenerate case::
sage: Q = QuadraticForm(QQ, 3)
sage: Cl.<x,y,z> = CliffordAlgebra(Q)
sage: Cl.supercenter_basis()
(1, x, y, z, x*y, x*z, y*z, x*y*z)
"""
R = self.base_ring()
B = self.basis()
K = list(B.keys())
k = len(K)
d = {}
for a,i in enumerate(K):
Bi = B[i]
for b,j in enumerate(K):
Bj = B[j]
if len(i) % 2 and len(j) % 2:
supercommutator = Bi * Bj + Bj * Bi
else:
supercommutator = Bi * Bj - Bj * Bi
for m,c in supercommutator:
d[(a, K.index(m)+k*b)] = c
m = Matrix(R, d, nrows=k, ncols=k*k, sparse=True)
from_vector = lambda x: self.sum_of_terms(((K[i], c) for i,c in iteritems(x)),
distinct=True)
return tuple(map( from_vector, m.kernel().basis() ))
# Dense version
# R = self.base_ring()
# B = self.basis()
# K = list(B.keys())
# eqns = [[] for dummy in range(k)]
# for a,i in enumerate(K):
# for b,j in enumerate(K):
# v = B[i].supercommutator(B[j]) # or better an if-loop as above
# eqns[a].extend([v[k] for k in K])
# m = Matrix(R, eqns)
# from_vector = lambda x: self.sum_of_terms(((K[i], c) for i,c in iteritems(x)),
# distinct=True)
# return tuple(map( from_vector, m.kernel().basis() ))
Element = CliffordAlgebraElement
class ExteriorAlgebra(CliffordAlgebra):
r"""
An exterior algebra of a free module over a commutative ring.
Let `V` be a module over a commutative ring `R`. The exterior algebra
(or Grassmann algebra) `\Lambda(V)` of `V` is defined as the quotient
of the tensor algebra `T(V)` of `V` modulo the two-sided ideal
generated by all tensors of the form `x \otimes x` with `x \in V`. The
multiplication on `\Lambda(V)` is denoted by `\wedge` (so
`v_1 \wedge v_2 \wedge \cdots \wedge v_n` is the projection of
`v_1 \otimes v_2 \otimes \cdots \otimes v_n` onto `\Lambda(V)`) and
called the "exterior product" or "wedge product".
If `V` is a rank-`n` free `R`-module with a basis
`\{e_1, \ldots, e_n\}`, then `\Lambda(V)` is the `R`-algebra
noncommutatively generated by the `n` generators `e_1, \ldots, e_n`
subject to the relations `e_i^2 = 0` for all `i`, and
`e_i e_j = - e_j e_i` for all `i < j`. As an `R`-module,
`\Lambda(V)` then has a basis `(\bigwedge_{i \in I} e_i)` with `I`
ranging over the subsets of `\{1, 2, \ldots, n\}` (where
`\bigwedge_{i \in I} e_i` is the wedge product of `e_i` for `i`
running through all elements of `I` from smallest to largest), and
hence is free of rank `2^n`.
The exterior algebra of an `R`-module `V` can also be realized
as the Clifford algebra of `V` for the quadratic form `Q` given by
`Q(v) = 0` for all vectors `v \in V`. See :class:`CliffordAlgebra`
for the notion of a Clifford algebra.
The exterior algebra of an `R`-module `V` is a connected `\ZZ`-graded
Hopf superalgebra. It is commutative in the super sense (i.e., the
odd elements anticommute and square to `0`).
This class implements the exterior algebra `\Lambda(R^n)` for
`n` a nonnegative integer.
.. WARNING::
We initialize the exterior algebra as an object of the category
of Hopf algebras, but this is not really correct, since it is a
Hopf superalgebra with the odd-degree components forming the odd
part. So use Hopf-algebraic methods with care!
INPUT:
- ``R`` -- the base ring, *or* the free module whose exterior algebra
is to be computed
- ``names`` -- a list of strings to name the generators of the
exterior algebra; this list can either have one entry only (in which
case the generators will be called ``e + '0'``, ``e + '1'``, ...,
``e + 'n-1'``, with ``e`` being said entry), or have ``n`` entries
(in which case these entries will be used directly as names for the
generators)
- ``n`` -- the number of generators, i.e., the rank of the free
module whose exterior algebra is to be computed (this doesn't have
to be provided if it can be inferred from the rest of the input)
REFERENCES:
- :wikipedia:`Exterior_algebra`
"""
@staticmethod
def __classcall_private__(cls, R, names=None, n=None):
"""
Normalize arguments to ensure a unique representation.
EXAMPLES::
sage: E1.<e0,e1,e2> = ExteriorAlgebra(QQ)
sage: E2 = ExteriorAlgebra(QQ, 3)
sage: E3 = ExteriorAlgebra(QQ, ['e0','e1','e2'])
sage: E1 is E2 and E2 is E3
True
"""
if names is None:
names = 'e'
elif names in ZZ:
n = names
names = 'e'
if isinstance(R, FreeModule_generic):
if n is not None and n != R.dimension():
raise ValueError("the number of variables does not match the dimension")
n = R.dimension()
R = R.base_ring()
names = tuple(names)
if n is not None and len(names) != n:
if len(names) == 1:
names = tuple( '{}{}'.format(names[0], i) for i in range(n) )
else:
raise ValueError("the number of variables does not match the number of generators")
return super(ExteriorAlgebra, cls).__classcall__(cls, R, names)
def __init__(self, R, names):
"""
Initialize ``self``.
EXAMPLES::
sage: E.<x,y,z> = ExteriorAlgebra(QQ)
sage: E.category()
Category of finite dimensional super hopf algebras with basis
over Rational Field
sage: TestSuite(E).run()
"""
cat = HopfAlgebrasWithBasis(R).Super().FiniteDimensional()
CliffordAlgebra.__init__(self, QuadraticForm(R, len(names)), names, category=cat)
# TestSuite will fail if the HopfAlgebra classes will ever have tests for
# the coproduct being an algebra morphism -- since this is really a
# Hopf superalgebra, not a Hopf algebra.
def _repr_(self):
r"""
Return a string representation of ``self``.
EXAMPLES::
sage: ExteriorAlgebra(QQ, 3)
The exterior algebra of rank 3 over Rational Field
"""
return "The exterior algebra of rank {} over {}".format(self.ngens(), self.base_ring())
def _repr_term(self, m):
"""
Return a string representation of the basis element indexed by
``m``.
EXAMPLES::
sage: E.<x,y,z> = ExteriorAlgebra(QQ)
sage: E._repr_term((0,1,2))
'x^y^z'
"""
if len(m) == 0:
return '1'
term = ''
for i in m:
if len(term) != 0:
term += '^'
term += self.variable_names()[i]
return term
def _latex_term(self, m):
r"""
Return a `\LaTeX` representation of the basis element indexed
by ``m``.
EXAMPLES::
sage: E.<x,y,z> = ExteriorAlgebra(QQ)
sage: E._latex_term((0,1,2))
' x \\wedge y \\wedge z'
sage: E.<x0,x1,x2> = ExteriorAlgebra(QQ)
sage: E._latex_term((0,1,2))
' x_{0} \\wedge x_{1} \\wedge x_{2}'
sage: E._latex_term(())
'1'
sage: E._latex_term((0,))
' x_{0}'
"""
if len(m) == 0:
return '1'
term = ''
for i in m:
if len(term) != 0:
term += ' \\wedge'
term += ' ' + self.latex_variable_names()[i]
return term
def lift_morphism(self, phi, names=None):
r"""
Lift the matrix ``m`` to an algebra morphism of exterior algebras.
Given a linear map `\phi : V \to W` (here represented by a matrix
acting on column vectors over the base ring of `V`), this method
returns the algebra morphism
`\Lambda(\phi) : \Lambda(V) \to \Lambda(W)`. This morphism is defined
on generators `v_i \in \Lambda(V)` by `v_i \mapsto \phi(v_i)`.
.. NOTE::
This is the map going out of ``self`` as opposed to
:meth:`~sage.algebras.clifford_algebra.CliffordAlgebraElement.lift_module_morphism()`
for general Clifford algebras.
INPUT:
- ``phi`` -- a linear map `\phi` from `V` to `W`, encoded as a
matrix
- ``names`` -- (default: ``'e'``) the names of the generators of
the Clifford algebra of the domain of (the map represented by)
``phi``
OUTPUT:
The algebra morphism `\Lambda(\phi)` from ``self`` to `\Lambda(W)`.
EXAMPLES::
sage: E.<x,y> = ExteriorAlgebra(QQ)
sage: phi = matrix([[0,1],[1,1],[1,2]]); phi
[0 1]
[1 1]
[1 2]
sage: L = E.lift_morphism(phi, ['a','b','c']); L
Generic morphism:
From: The exterior algebra of rank 2 over Rational Field
To: The exterior algebra of rank 3 over Rational Field
sage: L(x)
b + c
sage: L(y)
a + b + 2*c
sage: L.on_basis()((1,))
a + b + 2*c
sage: p = L(E.one()); p
1
sage: p.parent()
The exterior algebra of rank 3 over Rational Field
sage: L(x*y)
-a^b - a^c + b^c
sage: L(x)*L(y)
-a^b - a^c + b^c
sage: L(x + y)
a + 2*b + 3*c
sage: L(x) + L(y)
a + 2*b + 3*c
sage: L(1/2*x + 2)
1/2*b + 1/2*c + 2
sage: L(E(3))
3
sage: psi = matrix([[1, -3/2]]); psi
[ 1 -3/2]
sage: Lp = E.lift_morphism(psi, ['a']); Lp
Generic morphism:
From: The exterior algebra of rank 2 over Rational Field
To: The exterior algebra of rank 1 over Rational Field
sage: Lp(x)
a
sage: Lp(y)
-3/2*a
sage: Lp(x + 2*y + 3)
-2*a + 3
TESTS:
Check that the resulting morphism knows it is for
finite-dimensional algebras (:trac:`25339`)::
sage: E = ExteriorAlgebra(ZZ, 'e', 3)
sage: T = jordan_block(0, 2).block_sum(jordan_block(0, 1))
sage: phi = E.lift_morphism(T)
sage: phi.category_for()
Category of finite dimensional super algebras with basis over Integer Ring
sage: phi.matrix()
[1 0 0 0 0 0 0 0]
[0 0 1 0 0 0 0 0]
[0 0 0 0 0 0 0 0]
[0 0 0 0 0 0 0 0]
[0 0 0 0 0 0 0 0]
[0 0 0 0 0 0 0 0]
[0 0 0 0 0 0 0 0]
[0 0 0 0 0 0 0 0]
"""
n = phi.nrows()
R = self.base_ring()
E = ExteriorAlgebra(R, names, n)
f = lambda x: E.prod(E._from_dict( {(j,): phi[j,i] for j in range(n)},
remove_zeros=True )
for i in x)
cat = AlgebrasWithBasis(R).Super().FiniteDimensional()
return self.module_morphism(on_basis=f, codomain=E, category=cat)
def volume_form(self):
r"""
Return the volume form of ``self``.
Given the basis `e_1, e_2, \ldots, e_n` of the underlying
`R`-module, the volume form is defined as `e_1 \wedge e_2
\wedge \cdots \wedge e_n`.
This depends on the choice of basis.
EXAMPLES::
sage: E.<x,y,z> = ExteriorAlgebra(QQ)
sage: E.volume_form()
x^y^z
"""
d = self._quadratic_form.dim()
return self.element_class(self, {tuple(range(d)): self.base_ring().one()})
def boundary(self, s_coeff):
r"""
Return the boundary operator `\partial` defined by the structure
coefficients ``s_coeff`` of a Lie algebra.
For more on the boundary operator, see
:class:`ExteriorAlgebraBoundary`.
INPUT:
- ``s_coeff`` -- a dictionary whose keys are in `I \times I`, where
`I` is the index set of the underlying vector space `V`, and whose
values can be coerced into 1-forms (degree 1 elements) in ``E``
(usually, these values will just be elements of `V`)
EXAMPLES::
sage: E.<x,y,z> = ExteriorAlgebra(QQ)
sage: E.boundary({(0,1): z, (1,2): x, (2,0): y})
Boundary endomorphism of The exterior algebra of rank 3 over Rational Field
"""
return ExteriorAlgebraBoundary(self, s_coeff)
def coboundary(self, s_coeff):
r"""
Return the coboundary operator `d` defined by the structure
coefficients ``s_coeff`` of a Lie algebra.
For more on the coboundary operator, see
:class:`ExteriorAlgebraCoboundary`.
INPUT:
- ``s_coeff`` -- a dictionary whose keys are in `I \times I`, where
`I` is the index set of the underlying vector space `V`, and whose
values can be coerced into 1-forms (degree 1 elements) in ``E``
(usually, these values will just be elements of `V`)
EXAMPLES::
sage: E.<x,y,z> = ExteriorAlgebra(QQ)
sage: E.coboundary({(0,1): z, (1,2): x, (2,0): y})
Coboundary endomorphism of The exterior algebra of rank 3 over Rational Field
"""
return ExteriorAlgebraCoboundary(self, s_coeff)
def degree_on_basis(self, m):
r"""
Return the degree of the monomial indexed by ``m``.
The degree of ``m`` in the `\ZZ`-grading of ``self`` is defined
to be the length of ``m``.
EXAMPLES::
sage: E.<x,y,z> = ExteriorAlgebra(QQ)
sage: E.degree_on_basis(())
0
sage: E.degree_on_basis((0,))
1
sage: E.degree_on_basis((0,1))
2
"""
return ZZ(len(m))
def coproduct_on_basis(self, a):
r"""
Return the coproduct on the basis element indexed by ``a``.
The coproduct is defined by
.. MATH::
\Delta(e_{i_1} \wedge \cdots \wedge e_{i_m}) = \sum_{k=0}^m
\sum_{\sigma \in Ush_{k,m-k}} (-1)^{\sigma}
(e_{i_{\sigma(1)}} \wedge \cdots \wedge e_{i_{\sigma(k)}}) \otimes
(e_{i_{\sigma(k+1)}} \wedge \cdots \wedge e_{i_{\sigma(m)}}),
where `Ush_{k,m-k}` denotes the set of all `(k,m-k)`-unshuffles
(i.e., permutations in `S_m` which are increasing on the interval
`\{1, 2, \ldots, k\}` and on the interval
`\{k+1, k+2, \ldots, k+m\}`).
.. WARNING::
This coproduct is a homomorphism of superalgebras, not a
homomorphism of algebras!
EXAMPLES::
sage: E.<x,y,z> = ExteriorAlgebra(QQ)
sage: E.coproduct_on_basis((0,))
1 # x + x # 1
sage: E.coproduct_on_basis((0,1))
1 # x^y + x # y + x^y # 1 - y # x
sage: E.coproduct_on_basis((0,1,2))
1 # x^y^z + x # y^z + x^y # z + x^y^z # 1
- x^z # y - y # x^z + y^z # x + z # x^y
"""
from sage.combinat.combinat import unshuffle_iterator
one = self.base_ring().one()
return self.tensor_square().sum_of_terms(unshuffle_iterator(a, one),
distinct=True)
def antipode_on_basis(self, m):
r"""
Return the antipode on the basis element indexed by ``m``.
Given a basis element `\omega`, the antipode is defined by
`S(\omega) = (-1)^{\deg(\omega)} \omega`.
EXAMPLES::
sage: E.<x,y,z> = ExteriorAlgebra(QQ)
sage: E.antipode_on_basis(())
1
sage: E.antipode_on_basis((1,))
-y
sage: E.antipode_on_basis((1,2))
y^z
"""
return self.term(m, (-self.base_ring().one())**len(m))
def counit(self, x):
r"""
Return the counit of ``x``.
The counit of an element `\omega` of the exterior algebra
is its constant coefficient.
EXAMPLES::
sage: E.<x,y,z> = ExteriorAlgebra(QQ)
sage: elt = x*y - 2*x + 3
sage: E.counit(elt)
3
"""
return x.constant_coefficient()
def interior_product_on_basis(self, a, b):
r"""
Return the interior product `\iota_b a` of ``a`` with respect to
``b``.
See :meth:`~sage.algebras.clifford_algebra.CliffordAlgebra.Element.interior_product`
for more information.
In this method, ``a`` and ``b`` are supposed to be
basis elements (see
:meth:`~sage.algebras.clifford_algebra.CliffordAlgebra.Element.interior_product`
for a method that computes interior product of arbitrary
elements), and to be input as their keys.
This depends on the choice of basis of the vector space
whose exterior algebra is ``self``.
EXAMPLES::
sage: E.<x,y,z> = ExteriorAlgebra(QQ)
sage: E.interior_product_on_basis((0,), (0,))
1
sage: E.interior_product_on_basis((0,2), (0,))
z
sage: E.interior_product_on_basis((1,), (0,2))
0
sage: E.interior_product_on_basis((0,2), (1,))
0
sage: E.interior_product_on_basis((0,1,2), (0,2))
-y
"""
sgn = True
t = list(a)
for i in b:
if i not in t:
return self.zero()
if t.index(i) % 2:
sgn = not sgn
t.remove(i)
R = self.base_ring()
return self.term(tuple(t), (R.one() if sgn else - R.one()))
def lifted_bilinear_form(self, M):
r"""
Return the bilinear form on the exterior algebra ``self``
`= \Lambda(V)` which is obtained by lifting the bilinear
form `f` on `V` given by the matrix ``M``.
Let `V` be a module over a commutative ring `R`, and let
`f : V \times V \to R` be a bilinear form on `V`. Then,
a bilinear form `\Lambda(f) : \Lambda(V) \times
\Lambda(V) \to R` on `\Lambda(V)` can be canonically
defined as follows: For every `n \in \NN`, `m \in \NN`,
`v_1, v_2, \ldots, v_n, w_1, w_2, \ldots, w_m \in V`,
we define
.. MATH::
\Lambda(f)
( v_1 \wedge v_2 \wedge \cdots \wedge v_n ,
w_1 \wedge w_2 \wedge \cdots \wedge w_m )
:= \begin{cases}
0, &\mbox{if } n \neq m ; \\
\det G, & \mbox{if } n = m \end{cases} ,
where `G` is the `n \times m`-matrix whose
`(i, j)`-th entry is `f(v_i, w_j)`. This bilinear form
`\Lambda(f)` is known as the bilinear form on
`\Lambda(V)` obtained by lifting the bilinear form `f`.
Its restriction to the `1`-st homogeneous component
`V` of `\Lambda(V)` is `f`.
The bilinear form `\Lambda(f)` is symmetric if `f` is.
INPUT:
- ``M`` -- a matrix over the same base ring as ``self``,
whose `(i, j)`-th entry is `f(e_i, e_j)`, where
`(e_1, e_2, \ldots, e_N)` is the standard basis of the
module `V` for which ``self`` `= \Lambda(V)` (so that
`N = \dim(V)`), and where `f` is the bilinear form
which is to be lifted.
OUTPUT:
A bivariate function which takes two elements `p` and
`q` of ``self`` to `\Lambda(f)(p, q)`.
.. NOTE::
This takes a bilinear form on `V` as matrix, and
returns a bilinear form on ``self`` as a function in
two arguments. We do not return the bilinear form as
a matrix since this matrix can be huge and one often
needs just a particular value.
.. TODO::
Implement a class for bilinear forms and rewrite this
method to use that class.
EXAMPLES::
sage: E.<x,y,z> = ExteriorAlgebra(QQ)
sage: M = Matrix(QQ, [[1, 2, 3], [2, 3, 4], [3, 4, 5]])
sage: Eform = E.lifted_bilinear_form(M)
sage: Eform
Bilinear Form from The exterior algebra of rank 3 over Rational
Field (+) The exterior algebra of rank 3 over Rational Field to
Rational Field
sage: Eform(x*y, y*z)
-1
sage: Eform(x*y, y)
0
sage: Eform(x*(y+z), y*z)
-3
sage: Eform(x*(y+z), y*(z+x))
0
sage: N = Matrix(QQ, [[3, 1, 7], [2, 0, 4], [-1, -3, -1]])
sage: N.determinant()
-8
sage: Eform = E.lifted_bilinear_form(N)
sage: Eform(x, E.one())
0
sage: Eform(x, x*z*y)
0
sage: Eform(E.one(), E.one())
1
sage: Eform(E.zero(), E.one())
0
sage: Eform(x, y)
1
sage: Eform(z, y)
-3
sage: Eform(x*z, y*z)
20
sage: Eform(x+x*y+x*y*z, z+z*y+z*y*x)
11
TESTS:
Exterior algebra over a zero space (a border case)::
sage: E = ExteriorAlgebra(QQ, 0)
sage: M = Matrix(QQ, [])
sage: Eform = E.lifted_bilinear_form(M)
sage: Eform(E.one(), E.one())
1
sage: Eform(E.zero(), E.one())
0
.. TODO::
Another way to compute this bilinear form seems to be to
map `x` and `y` to the appropriate Clifford algebra and
there compute `x^t y`, then send the result back to the
exterior algebra and return its constant coefficient. Or
something like this. Once the maps to the Clifford and
back are implemented, check if this is faster.
"""
R = self.base_ring()
def lifted_form(x, y):
result = R.zero()
for mx, cx in x:
for my, cy in y:
n = len(mx)
m = len(my)
if m != n:
continue
matrix_list = [M[mx[i], my[j]]
for i in range(n)
for j in range(n)]
MA = MatrixArgs(R, n, matrix_list)
del matrix_list
result += cx * cy * MA.matrix(False).determinant()
return result
from sage.categories.cartesian_product import cartesian_product
return PoorManMap(lifted_form, domain=cartesian_product([self, self]),
codomain=self.base_ring(),
name="Bilinear Form")
class Element(CliffordAlgebraElement):
"""
An element of an exterior algebra.
"""
def _mul_(self, other):
"""
Return ``self`` multiplied by ``other``.
INPUT:
- ``other`` -- element of the same exterior algebra as ``self``
EXAMPLES::
sage: E.<x,y,z> = ExteriorAlgebra(QQ)
sage: x*y
x^y
sage: y*x
-x^y
sage: z*y*x
-x^y^z
sage: (x*z)*y
-x^y^z
sage: (3*x + y)^2
0
sage: (x - 3*y + z/3)^2
0
sage: (x+y) * (y+z)
x^y + x^z + y^z
"""
zero = self.parent().base_ring().zero()
d = {}
for ml,cl in self:
for mr,cr in other:
# Create the next term
t = list(mr)
for i in reversed(ml):
pos = 0
for j in t:
if i == j:
pos = None
break
if i < j:
break
pos += 1
cr = -cr
if pos is None:
t = None
break
t.insert(pos, i)
if t is None: # The next term is 0, move along
continue
t = tuple(t)
d[t] = d.get(t, zero) + cl * cr
if d[t] == zero:
del d[t]
return self.__class__(self.parent(), d)
def interior_product(self, x):
r"""
Return the interior product (also known as antiderivation) of
``self`` with respect to ``x`` (that is, the element
`\iota_{x}(\text{self})` of the exterior algebra).
If `V` is an `R`-module, and if `\alpha` is a fixed element of
`V^*`, then the *interior product* with respect to `\alpha` is
an `R`-linear map
`i_{\alpha} \colon \Lambda(V) \to \Lambda(V)`, determined by
the following requirements:
- `i_{\alpha}(v) = \alpha(v)` for all `v \in V = \Lambda^1(V)`,
- it is a graded derivation of degree `-1`: all `x` and `y`
in `\Lambda(V)` satisfy
.. MATH::
i_{\alpha}(x \wedge y) = (i_{\alpha} x) \wedge y
+ (-1)^{\deg x} x \wedge (i_{\alpha} y).
It can be shown that this map `i_{\alpha}` is graded of
degree `-1` (that is, sends `\Lambda^k(V)` into
`\Lambda^{k-1}(V)` for every `k`).
When `V` is a finite free `R`-module, the interior product can
also be defined by
.. MATH::
(i_{\alpha} \omega)(u_1, \ldots, u_k)
= \omega(\alpha, u_1, \ldots, u_k),
where `\omega \in \Lambda^k(V)` is thought of as an
alternating multilinear mapping from
`V^* \times \cdots \times V^*` to `R`.
Since Sage is only dealing with exterior powers of modules
of the form `R^d` for some nonnegative integer `d`, the
element `\alpha \in V^*` can be thought of as an element of
`V` (by identifying the standard basis of `V = R^d` with its
dual basis). This is how `\alpha` should be passed to this
method.
We then extend the interior product to all
`\alpha \in \Lambda (V^*)` by
.. MATH::
i_{\beta \wedge \gamma} = i_{\gamma} \circ i_{\beta}.
INPUT:
- ``x`` -- element of (or coercing into) `\Lambda^1(V)`
(for example, an element of `V`); this plays the role of
`\alpha` in the above definition
EXAMPLES::
sage: E.<x,y,z> = ExteriorAlgebra(QQ)
sage: x.interior_product(x)
1
sage: (x + x*y).interior_product(2*y)
-2*x
sage: (x*z + x*y*z).interior_product(2*y - x)
-2*x^z - y^z - z
sage: x.interior_product(E.one())
x
sage: E.one().interior_product(x)
0
sage: x.interior_product(E.zero())
0
sage: E.zero().interior_product(x)
0
REFERENCES:
- :wikipedia:`Exterior_algebra#Interior_product`
"""
P = self.parent()
return P.sum([c * cx * P.interior_product_on_basis(m, mx)
for m,c in self for mx,cx in x])
antiderivation = interior_product
def hodge_dual(self):
r"""
Return the Hodge dual of ``self``.
The Hodge dual of an element `\alpha` of the exterior algebra is
defined as `i_{\alpha} \sigma`, where `\sigma` is the volume
form
(:meth:`~sage.algebras.clifford_algebra.ExteriorAlgebra.volume_form`)
and `i_{\alpha}` denotes the antiderivation function with
respect to `\alpha` (see :meth:`interior_product` for the
definition of this).
.. NOTE::
The Hodge dual of the Hodge dual of a homogeneous element
`p` of `\Lambda(V)` equals `(-1)^{k(n-k)} p`, where
`n = \dim V` and `k = \deg(p) = |p|`.
EXAMPLES::
sage: E.<x,y,z> = ExteriorAlgebra(QQ)
sage: x.hodge_dual()
y^z
sage: (x*z).hodge_dual()
-y
sage: (x*y*z).hodge_dual()
1
sage: [a.hodge_dual().hodge_dual() for a in E.basis()]
[1, x, y, z, x^y, x^z, y^z, x^y^z]
sage: (x + x*y).hodge_dual()
y^z + z
sage: (x*z + x*y*z).hodge_dual()
-y + 1
sage: E = ExteriorAlgebra(QQ, 'wxyz')
sage: [a.hodge_dual().hodge_dual() for a in E.basis()]
[1, -w, -x, -y, -z, w^x, w^y, w^z, x^y, x^z, y^z,
-w^x^y, -w^x^z, -w^y^z, -x^y^z, w^x^y^z]
"""
volume_form = self.parent().volume_form()
return volume_form.interior_product(self)
def constant_coefficient(self):
"""
Return the constant coefficient of ``self``.
.. TODO::
Define a similar method for general Clifford algebras once
the morphism to exterior algebras is implemented.
EXAMPLES::
sage: E.<x,y,z> = ExteriorAlgebra(QQ)
sage: elt = 5*x + y + x*z + 10
sage: elt.constant_coefficient()
10
sage: x.constant_coefficient()
0
"""
return self._monomial_coefficients.get(self.parent().one_basis(),
self.base_ring().zero())
def scalar(self, other):
r"""
Return the standard scalar product of ``self`` with ``other``.
The standard scalar product of `x, y \in \Lambda(V)` is
defined by `\langle x, y \rangle = \langle x^t y \rangle`, where
`\langle a \rangle` denotes the degree-0 term of `a`, and where
`x^t` denotes the transpose
(:meth:`~sage.algebras.clifford_algebra.CliffordAlgebraElement.transpose`)
of `x`.
.. TODO::
Define a similar method for general Clifford algebras once
the morphism to exterior algebras is implemented.
EXAMPLES::
sage: E.<x,y,z> = ExteriorAlgebra(QQ)
sage: elt = 5*x + y + x*z
sage: elt.scalar(z + 2*x)
0
sage: elt.transpose() * (z + 2*x)
-2*x^y + 5*x^z + y^z
"""
return (self.transpose() * other).constant_coefficient()
#####################################################################
## Differentials
class ExteriorAlgebraDifferential(with_metaclass(
InheritComparisonClasscallMetaclass,
ModuleMorphismByLinearity, UniqueRepresentation
)):
r"""
Internal class to store the data of a boundary or coboundary of
an exterior algebra `\Lambda(L)` defined by the structure
coefficients of a Lie algebra `L`.
See :class:`ExteriorAlgebraBoundary` and
:class:`ExteriorAlgebraCoboundary` for the actual classes, which
inherit from this.
.. WARNING::
This is not a general class for differentials on the exterior
algebra.
"""
@staticmethod
def __classcall__(cls, E, s_coeff):
"""
Standardize the structure coefficients to ensure a unique
representation.
EXAMPLES::
sage: from sage.algebras.clifford_algebra import ExteriorAlgebraDifferential
sage: E.<x,y,z> = ExteriorAlgebra(QQ)
sage: par1 = ExteriorAlgebraDifferential(E, {(0,1): z, (1,2): x, (2,0): y})
sage: par2 = ExteriorAlgebraDifferential(E, {(0,1): z, (1,2): x, (0,2): -y})
sage: par3 = ExteriorAlgebraDifferential(E, {(1,0): {2:-1}, (1,2): {0:1}, (2,0):{1:1}})
sage: par1 is par2 and par2 is par3
True
sage: par4 = ExteriorAlgebraDifferential(E, {})
sage: par5 = ExteriorAlgebraDifferential(E, {(1,0): 0, (1,2): {}, (0,2): E.zero()})
sage: par6 = ExteriorAlgebraDifferential(E, {(1,0): 0, (1,2): 0, (0,2): 0})
sage: par4 is par5 and par5 is par6
True
"""
d = {}
for k,v in iteritems(dict(s_coeff)):
if not v: # Strip terms with 0
continue
if isinstance(v, dict):
R = E.base_ring()
v = E._from_dict({(i,): R(c) for i,c in iteritems(v)})
else:
# Make sure v is in ``E``
v = E(v)
# It's okay if v.degree results in an error
# (we'd throw a similar error) unless v == 0 (which
# is what v.list() is testing for)
if v.list() and v.degree() != 1:
raise ValueError("elements must be degree 1")
if k[0] < k[1]:
d[tuple(k)] = v
else:
d[(k[1], k[0])] = -v
from sage.sets.family import Family
return super(ExteriorAlgebraDifferential, cls).__classcall__(cls, E, Family(d))
def __init__(self, E, s_coeff):
"""
Initialize ``self``.
EXAMPLES::
sage: E.<x,y,z> = ExteriorAlgebra(QQ)
sage: par = E.boundary({(0,1): z, (1,2):x, (2,0):y})
We skip the pickling test as there is an infinite recursion when
doing equality checks::
sage: TestSuite(par).run(skip="_test_pickling")
Check that it knows it is a finite-dimensional algebra
morphism (:trac:`25339`):;
sage: par.category_for()
Category of finite dimensional algebras with basis over Rational Field
sage: par.matrix()
[ 0 0 0 0 0 0 0 0]
[ 0 0 0 0 0 0 1 0]
[ 0 0 0 0 0 -1 0 0]
[ 0 0 0 0 1 0 0 0]
[ 0 0 0 0 0 0 0 0]
[ 0 0 0 0 0 0 0 0]
[ 0 0 0 0 0 0 0 0]
[ 0 0 0 0 0 0 0 0]
"""
self._s_coeff = s_coeff
# Technically this preserves the grading but with a shift of -1
cat = AlgebrasWithBasis(E.base_ring()).FiniteDimensional()
ModuleMorphismByLinearity.__init__(self, domain=E, codomain=E, category=cat)
def homology(self, deg=None, **kwds):
"""
Return the homology determined by ``self``.
EXAMPLES::
sage: E.<x,y,z> = ExteriorAlgebra(QQ)
sage: par = E.boundary({(0,1): z, (1,2): x, (2,0): y})
sage: par.homology()
{0: Vector space of dimension 1 over Rational Field,
1: Vector space of dimension 0 over Rational Field,
2: Vector space of dimension 0 over Rational Field,
3: Vector space of dimension 1 over Rational Field}
sage: d = E.coboundary({(0,1): z, (1,2): x, (2,0): y})
sage: d.homology()
{0: Vector space of dimension 1 over Rational Field,
1: Vector space of dimension 0 over Rational Field,
2: Vector space of dimension 0 over Rational Field,
3: Vector space of dimension 1 over Rational Field}
"""
return self.chain_complex().homology(deg, **kwds)
class ExteriorAlgebraBoundary(ExteriorAlgebraDifferential):
r"""
The boundary `\partial` of an exterior algebra `\Lambda(L)` defined
by the structure coefficients of `L`.
Let `L` be a Lie algebra. We give the exterior algebra
`E = \Lambda(L)` a chain complex structure by considering a
differential `\partial : \Lambda^{k+1}(L) \to \Lambda^k(L)` defined by
.. MATH::
\partial(x_1 \wedge x_2 \wedge \cdots \wedge x_{k+1})
= \sum_{i < j} (-1)^{i+j+1}
[x_i, x_j] \wedge x_1 \wedge \cdots \wedge \hat{x}_i \wedge \cdots
\wedge \hat{x}_j \wedge \cdots \wedge x_{k+1}
where `\hat{x}_i` denotes a missing index. The corresponding homology is
the Lie algebra homology.
INPUT:
- ``E`` -- an exterior algebra of a vector space `L`
- ``s_coeff`` -- a dictionary whose keys are in `I \times I`, where
`I` is the index set of the basis of the vector space `L`, and whose
values can be coerced into 1-forms (degree 1 elements) in ``E``;
this dictionary will be used to define the Lie algebra structure
on `L` (indeed, the `i`-th coordinate of the Lie bracket of the
`j`-th and `k`-th basis vectors of `L` for `j < k` is set to be
the value at the key `(j, k)` if this key appears in ``s_coeff``,
or otherwise the negated of the value at the key `(k, j)`)
.. WARNING::
The values of ``s_coeff`` are supposed to be coercible into
1-forms in ``E``; but they can also be dictionaries themselves
(in which case they are interpreted as giving the coordinates of
vectors in ``L``). In the interest of speed, these dictionaries
are not sanitized or checked.
.. WARNING::
For any two distinct elements `i` and `j` of `I`, the dictionary
``s_coeff`` must have only one of the pairs `(i, j)` and
`(j, i)` as a key. This is not checked.
EXAMPLES:
We consider the differential given by Lie algebra given by the cross
product `\times` of `\RR^3`::
sage: E.<x,y,z> = ExteriorAlgebra(QQ)
sage: par = E.boundary({(0,1): z, (1,2): x, (2,0): y})
sage: par(x)
0
sage: par(x*y)
z
sage: par(x*y*z)
0
sage: par(x+y-y*z+x*y)
-x + z
sage: par(E.zero())
0
We check that `\partial \circ \partial = 0`::
sage: p2 = par * par
sage: all(p2(b) == 0 for b in E.basis())
True
Another example: the Lie algebra `\mathfrak{sl}_2`, which has a
basis `e,f,h` satisfying `[h,e] = 2e`, `[h,f] = -2f`, and `[e,f] = h`::
sage: E.<e,f,h> = ExteriorAlgebra(QQ)
sage: par = E.boundary({(0,1): h, (2,1): -2*f, (2,0): 2*e})
sage: par(E.zero())
0
sage: par(e)
0
sage: par(e*f)
h
sage: par(f*h)
2*f
sage: par(h*f)
-2*f
sage: C = par.chain_complex(); C
Chain complex with at most 4 nonzero terms over Rational Field
sage: ascii_art(C)
[ 0 -2 0] [0]
[ 0 0 2] [0]
[0 0 0] [ 1 0 0] [0]
0 <-- C_0 <-------- C_1 <----------- C_2 <---- C_3 <-- 0
sage: C.homology()
{0: Vector space of dimension 1 over Rational Field,
1: Vector space of dimension 0 over Rational Field,
2: Vector space of dimension 0 over Rational Field,
3: Vector space of dimension 1 over Rational Field}
Over the integers::
sage: C = par.chain_complex(R=ZZ); C
Chain complex with at most 4 nonzero terms over Integer Ring
sage: ascii_art(C)
[ 0 -2 0] [0]
[ 0 0 2] [0]
[0 0 0] [ 1 0 0] [0]
0 <-- C_0 <-------- C_1 <----------- C_2 <---- C_3 <-- 0
sage: C.homology()
{0: Z, 1: C2 x C2, 2: 0, 3: Z}
REFERENCES:
- :wikipedia:`Exterior_algebra#Lie_algebra_homology`
"""
def _repr_type(self):
"""
TESTS::
sage: E.<x,y,z> = ExteriorAlgebra(QQ)
sage: par = E.boundary({(0,1): z, (1,2): x, (2,0): y})
sage: par._repr_type()
'Boundary'
"""
return "Boundary"
def _on_basis(self, m):
"""
Return the differential on the basis element indexed by ``m``.
EXAMPLES::
sage: E.<x,y,z> = ExteriorAlgebra(QQ)
sage: par = E.boundary({(0,1): z, (1,2): x, (2,0): y})
sage: par._on_basis(())
0
sage: par._on_basis((0,))
0
sage: par._on_basis((0,1))
z
sage: par._on_basis((0,2))
-y
sage: par._on_basis((0,1,2))
0
"""
E = self.domain()
sc = self._s_coeff
keys = sc.keys()
return E.sum((-1)**b * sc[(i,j)]
* E.monomial(m[:a] + m[a+1:a+b+1] + m[a+b+2:])
for a,i in enumerate(m) for b,j in enumerate(m[a+1:]) if (i,j) in keys)
@cached_method
def chain_complex(self, R=None):
"""
Return the chain complex over ``R`` determined by ``self``.
INPUT:
- ``R`` -- the base ring; the default is the base ring of
the exterior algebra
EXAMPLES::
sage: E.<x,y,z> = ExteriorAlgebra(QQ)
sage: par = E.boundary({(0,1): z, (1,2): x, (2,0): y})
sage: C = par.chain_complex(); C
Chain complex with at most 4 nonzero terms over Rational Field
sage: ascii_art(C)
[ 0 0 1] [0]
[ 0 -1 0] [0]
[0 0 0] [ 1 0 0] [0]
0 <-- C_0 <-------- C_1 <----------- C_2 <---- C_3 <-- 0
TESTS:
This still works in degree `1`::
sage: E.<x> = ExteriorAlgebra(QQ)
sage: par = E.boundary({})
sage: C = par.chain_complex(); C
Chain complex with at most 2 nonzero terms over Rational Field
sage: ascii_art(C)
[0]
0 <-- C_0 <---- C_1 <-- 0
Also in degree `0`::
sage: E = ExteriorAlgebra(QQ, 0)
sage: par = E.boundary({})
sage: C = par.chain_complex(); C
Chain complex with at most 1 nonzero terms over Rational Field
sage: ascii_art(C)
0 <-- C_0 <-- 0
"""
from sage.homology.chain_complex import ChainComplex
from sage.matrix.constructor import Matrix
E = self.domain()
n = E.ngens()
if R is None:
R = E.base_ring()
if n == 0:
# Special case because there are no matrices and thus the
# ChainComplex constructor needs the dimension of the
# 0th degree space explicitly given.
return ChainComplex({1: Matrix(R, [[]])}, degree=-1)
# If you are reading this because you changed something about
# the ChainComplex constructor and the doctests are failing:
# This should return a chain complex with degree -1 and
# only one nontrivial module, namely a free module of rank 1,
# situated in degree 0.
# Group the basis into degrees
basis_by_deg = {deg: [] for deg in range(n+1)}
for b in E.basis().keys():
basis_by_deg[len(b)].append(b)
# Construct the transition matrices
data = {}
prev_basis = basis_by_deg[0]
for deg in range(1,n+1):
# Make sure within each basis we're sorted by lex
basis = sorted(basis_by_deg[deg])
mat = []
for b in basis:
ret = self._on_basis(b)
mat.append([ret[p] for p in prev_basis])
data[deg] = Matrix(mat).transpose().change_ring(R)
prev_basis = basis
return ChainComplex(data, degree=-1)
class ExteriorAlgebraCoboundary(ExteriorAlgebraDifferential):
r"""
The coboundary `d` of an exterior algebra `\Lambda(L)` defined
by the structure coefficients of a Lie algebra `L`.
Let `L` be a Lie algebra. We endow its exterior algebra
`E = \Lambda(L)` with a cochain complex structure by considering a
differential `d : \Lambda^k(L) \to \Lambda^{k+1}(L)` defined by
.. MATH::
d x_i = \sum_{j < k} s_{jk}^i x_j x_k,
where `(x_1, x_2, \ldots, x_n)` is a basis of `L`, and where
`s_{jk}^i` is the `x_i`-coordinate of the Lie bracket `[x_j, x_k]`.
The corresponding cohomology is the Lie algebra cohomology of `L`.
This can also be thought of as the exterior derivative, in which case
the resulting cohomology is the de Rham cohomology of a manifold whose
exterior algebra of differential forms is ``E``.
INPUT:
- ``E`` -- an exterior algebra of a vector space `L`
- ``s_coeff`` -- a dictionary whose keys are in `I \times I`, where
`I` is the index set of the basis of the vector space `L`, and whose
values can be coerced into 1-forms (degree 1 elements) in ``E``;
this dictionary will be used to define the Lie algebra structure
on `L` (indeed, the `i`-th coordinate of the Lie bracket of the
`j`-th and `k`-th basis vectors of `L` for `j < k` is set to be
the value at the key `(j, k)` if this key appears in ``s_coeff``,
or otherwise the negated of the value at the key `(k, j)`)
.. WARNING::
For any two distinct elements `i` and `j` of `I`, the dictionary
``s_coeff`` must have only one of the pairs `(i, j)` and
`(j, i)` as a key. This is not checked.
EXAMPLES:
We consider the differential coming from the Lie algebra given by the
cross product `\times` of `\RR^3`::
sage: E.<x,y,z> = ExteriorAlgebra(QQ)
sage: d = E.coboundary({(0,1): z, (1,2): x, (2,0): y})
sage: d(x)
y^z
sage: d(y)
-x^z
sage: d(x+y-y*z)
-x^z + y^z
sage: d(x*y)
0
sage: d(E.one())
0
sage: d(E.zero())
0
We check that `d \circ d = 0`::
sage: d2 = d * d
sage: all(d2(b) == 0 for b in E.basis())
True
Another example: the Lie algebra `\mathfrak{sl}_2`, which has a
basis `e,f,h` satisfying `[h,e] = 2e`, `[h,f] = -2f`, and `[e,f] = h`::
sage: E.<e,f,h> = ExteriorAlgebra(QQ)
sage: d = E.coboundary({(0,1): h, (2,1): -2*f, (2,0): 2*e})
sage: d(E.zero())
0
sage: d(e)
-2*e^h
sage: d(f)
2*f^h
sage: d(h)
e^f
sage: d(e*f)
0
sage: d(f*h)
0
sage: d(e*h)
0
sage: C = d.chain_complex(); C
Chain complex with at most 4 nonzero terms over Rational Field
sage: ascii_art(C)
[ 0 0 1] [0]
[-2 0 0] [0]
[0 0 0] [ 0 2 0] [0]
0 <-- C_3 <-------- C_2 <----------- C_1 <---- C_0 <-- 0
sage: C.homology()
{0: Vector space of dimension 1 over Rational Field,
1: Vector space of dimension 0 over Rational Field,
2: Vector space of dimension 0 over Rational Field,
3: Vector space of dimension 1 over Rational Field}
Over the integers::
sage: C = d.chain_complex(R=ZZ); C
Chain complex with at most 4 nonzero terms over Integer Ring
sage: ascii_art(C)
[ 0 0 1] [0]
[-2 0 0] [0]
[0 0 0] [ 0 2 0] [0]
0 <-- C_3 <-------- C_2 <----------- C_1 <---- C_0 <-- 0
sage: C.homology()
{0: Z, 1: 0, 2: C2 x C2, 3: Z}
REFERENCES:
- :wikipedia:`Exterior_algebra#Differential_geometry`
"""
def __init__(self, E, s_coeff):
"""
Initialize ``self``.
EXAMPLES::
sage: E.<x,y,z> = ExteriorAlgebra(QQ)
sage: d = E.coboundary({(0,1): z, (1,2):x, (2,0):y})
sage: TestSuite(d).run() # known bug - morphisms are properly in a category
"""
# Construct the dictionary of costructure coefficients, i.e. given
# [x_j, x_k] = \sum_i s_{jk}^i x_i, we get x^i |-> \sum_{j<k} s_{jk}^i x^j x^k.
# This dictionary might contain 0 values and might also be missing
# some keys (both times meaning that the respective `s_{jk}^i` are
# zero for all `j` and `k`).
self._cos_coeff = {}
zero = E.zero()
B = E.basis()
for k, v in iteritems(dict(s_coeff)):
k = B[k]
for m,c in v:
self._cos_coeff[m] = self._cos_coeff.get(m, zero) + c * k
ExteriorAlgebraDifferential.__init__(self, E, s_coeff)
def _repr_type(self):
"""
TESTS::
sage: E.<x,y,z> = ExteriorAlgebra(QQ)
sage: d = E.coboundary({(0,1): z, (1,2): x, (2,0): y})
sage: d._repr_type()
'Coboundary'
"""
return "Coboundary"
def _on_basis(self, m):
r"""
Return the differential on the basis element indexed by ``m``.
EXAMPLES:
The vector space `\RR^3` made into a Lie algebra using the
cross product::
sage: E.<x,y,z> = ExteriorAlgebra(QQ)
sage: d = E.coboundary({(0,1): z, (1,2): x, (2,0): y})
sage: d._on_basis(())
0
sage: d._on_basis((0,))
y^z
sage: d._on_basis((1,))
-x^z
sage: d._on_basis((2,))
x^y
sage: d._on_basis((0,1))
0
sage: d._on_basis((0,2))
0
sage: d._on_basis((0,1,2))
0
"""
E = self.domain()
cc = self._cos_coeff
keys = cc.keys()
return E.sum((-1)**a * E.monomial(m[:a]) * cc[(i,)] * E.monomial(m[a+1:])
for a,i in enumerate(m) if (i,) in keys)
@cached_method
def chain_complex(self, R=None):
"""
Return the chain complex over ``R`` determined by ``self``.
INPUT:
- ``R`` -- the base ring; the default is the base ring of
the exterior algebra
EXAMPLES::
sage: E.<x,y,z> = ExteriorAlgebra(QQ)
sage: d = E.coboundary({(0,1): z, (1,2): x, (2,0): y})
sage: C = d.chain_complex(); C
Chain complex with at most 4 nonzero terms over Rational Field
sage: ascii_art(C)
[ 0 0 1] [0]
[ 0 -1 0] [0]
[0 0 0] [ 1 0 0] [0]
0 <-- C_3 <-------- C_2 <----------- C_1 <---- C_0 <-- 0
TESTS:
This still works in degree `1`::
sage: E.<x> = ExteriorAlgebra(QQ)
sage: d = E.coboundary({})
sage: C = d.chain_complex(); C
Chain complex with at most 2 nonzero terms over Rational Field
sage: ascii_art(C)
[0]
0 <-- C_1 <---- C_0 <-- 0
Also in degree `0`::
sage: E = ExteriorAlgebra(QQ, 0)
sage: d = E.coboundary({})
sage: C = d.chain_complex(); C
Chain complex with at most 1 nonzero terms over Rational Field
sage: ascii_art(C)
0 <-- C_0 <-- 0
"""
from sage.homology.chain_complex import ChainComplex
from sage.matrix.constructor import Matrix
E = self.domain()
n = E.ngens()
if R is None:
R = E.base_ring()
if n == 0:
# Special case because there are no matrices and thus the
# ChainComplex constructor needs the dimension of the
# 0th degree space explicitly given.
return ChainComplex({-1: Matrix(R, [[]])}, degree=1)
# If you are reading this because you changed something about
# the ChainComplex constructor and the doctests are failing:
# This should return a chain complex with degree 1 and
# only one nontrivial module, namely a free module of rank 1,
# situated in degree 0.
# Group the basis into degrees
basis_by_deg = {deg: [] for deg in range(n+1)}
for b in E.basis().keys():
basis_by_deg[len(b)].append(b)
# Construct the transition matrices
data = {}
basis = basis_by_deg[0]
for deg in range(n):
# Make sure within each basis we're sorted by lex
next_basis = sorted(basis_by_deg[deg+1])
mat = []
for b in basis:
ret = self._on_basis(b)
mat.append([ret[p] for p in next_basis])
data[deg] = Matrix(mat).transpose().change_ring(R)
basis = next_basis
return ChainComplex(data, degree=1)
| 34.797958 | 118 | 0.502048 |
e4271a7efffe2a2de2bd150e26e18d8488d80775 | 1,538 | py | Python | jdcloud_sdk/services/ipanti/apis/ModifyForwardRuleRequest.py | lidaobing/jdcloud-sdk-python | f305e8ddd74ab4ad445477744534e7299d4d93fb | [
"Apache-2.0"
] | null | null | null | jdcloud_sdk/services/ipanti/apis/ModifyForwardRuleRequest.py | lidaobing/jdcloud-sdk-python | f305e8ddd74ab4ad445477744534e7299d4d93fb | [
"Apache-2.0"
] | null | null | null | jdcloud_sdk/services/ipanti/apis/ModifyForwardRuleRequest.py | lidaobing/jdcloud-sdk-python | f305e8ddd74ab4ad445477744534e7299d4d93fb | [
"Apache-2.0"
] | 1 | 2019-03-01T08:44:37.000Z | 2019-03-01T08:44:37.000Z | # coding=utf8
# Copyright 2018-2025 JDCLOUD.COM
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This class is auto generated by the jdcloud code generator program.
from jdcloud_sdk.core.jdcloudrequest import JDCloudRequest
class ModifyForwardRuleRequest(JDCloudRequest):
"""
更新某条非网站类规则
"""
def __init__(self, parameters, header=None, version="v1"):
super(ModifyForwardRuleRequest, self).__init__(
'/regions/{regionId}/instances/{instanceId}/forwardRules/{forwardRuleId}', 'PATCH', header, version)
self.parameters = parameters
class ModifyForwardRuleParameters(object):
def __init__(self, regionId, instanceId, forwardRuleId, forwardRuleSpec):
"""
:param regionId: Region ID
:param instanceId: 实例id
:param forwardRuleId: 转发规则id
:param forwardRuleSpec: 非网站类规则参数
"""
self.regionId = regionId
self.instanceId = instanceId
self.forwardRuleId = forwardRuleId
self.forwardRuleSpec = forwardRuleSpec
| 32.041667 | 112 | 0.717815 |
99cba8667e8139a7dd425caa5b8e0ea8e230e675 | 1,914 | py | Python | handlers/private.py | AftahBagas/AlphaMusik | c8c3804a26ad393b6f666fecd4d3464727ce2544 | [
"MIT"
] | null | null | null | handlers/private.py | AftahBagas/AlphaMusik | c8c3804a26ad393b6f666fecd4d3464727ce2544 | [
"MIT"
] | null | null | null | handlers/private.py | AftahBagas/AlphaMusik | c8c3804a26ad393b6f666fecd4d3464727ce2544 | [
"MIT"
] | 1 | 2021-06-22T08:08:43.000Z | 2021-06-22T08:08:43.000Z | from telethon import Client, filters
from telethon.types import Message, InlineKeyboardMarkup, InlineKeyboardButton
from config import BOT_NAME as bn
from helpers.filters import other_filters2
@Client.on_message(other_filters2)
async def start(_, message: Message):
await message.reply_sticker("CAACAgUAAxkBAAIVxGC-q27I1NoiCYx7UtrijASqOaycAAKJAgACXbvgVaglLqi7A0crHwQ")
await message.reply_text(
f"""**Hey, I'm {bn} 🎵
I can play music in your group's voice call. Developed by [Alfareza](https://t.me/kanjengingsun).
Add me to your group and play music freely!**
""",
reply_markup=InlineKeyboardMarkup(
[
[
InlineKeyboardButton(
"📩 Contact Me 📩", url="https://t.me/kanjengingsun")
],[
InlineKeyboardButton(
"🌀 Instagram", url="https://www.instagram.com/aftahbagas"
),
InlineKeyboardButton(
"💬 Group", url="https://t.me/VcgMusicGroup"
),
InlineKeyboardButton(
"🔊 Channel", url="https://t.me/VcgChannelSupport"
)
],[
InlineKeyboardButton(
"➕ Add To Your Group ➕", url="https://t.me/AlphaaMusicBot?startgroup=true"
)]
]
),
disable_web_page_preview=True
)
@Client.on_message(filters.command("start") & ~filters.private & ~filters.channel)
async def gstart(_, message: Message):
await message.reply_text("""**Group Music Player Online ✅**""",
reply_markup=InlineKeyboardMarkup(
[
[
InlineKeyboardButton(
"🔊 Channel", url="https://t.me/VcgChannelSupport")
]
]
)
)
| 34.178571 | 106 | 0.539707 |
e9e7df0468fc9d3a9e4c3d9f38099edb0214a9bc | 1,598 | py | Python | setup.py | tranquochuy/data-science-utilities | 578b05cce40ae410868c79e7427c401bf0531e80 | [
"MIT"
] | 1 | 2018-05-21T03:30:44.000Z | 2018-05-21T03:30:44.000Z | setup.py | tranquochuy/data-science-utilities | 578b05cce40ae410868c79e7427c401bf0531e80 | [
"MIT"
] | null | null | null | setup.py | tranquochuy/data-science-utilities | 578b05cce40ae410868c79e7427c401bf0531e80 | [
"MIT"
] | 1 | 2018-09-21T08:51:18.000Z | 2018-09-21T08:51:18.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""The setup script."""
from setuptools import setup, find_packages
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read()
requirements = ['Click>=6.0']
setup_requirements = []
test_requirements = []
setup(
author="Truoc Pham",
author_email='truoc.phamkhac@asnet.com.vn',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
description="Data Science utilities in python.",
entry_points={
'console_scripts': [
'data_science_utilities=data_science_utilities.cli:main',
],
},
install_requires=requirements,
license='MIT license',
long_description=readme + '\n\n' + history,
include_package_data=True,
keywords='data_science_utilities',
name='data_science_utilities',
packages=find_packages(include=['data_science_utilities']),
setup_requires=setup_requirements,
test_suite='tests',
tests_require=test_requirements,
url='https://github.com/truocphamkhac/data-science-utilities',
version='0.2.4',
zip_safe=False
)
| 29.054545 | 69 | 0.649562 |
14eaedc33d9e4fcf9d0cb5baedd0e5b70465969d | 358 | py | Python | main.py | znkd/kysc-service | dcb17d3ecf376dcbb2a54cbcf3e1e1b7fc907fd2 | [
"MIT"
] | 1 | 2019-08-24T14:07:29.000Z | 2019-08-24T14:07:29.000Z | main.py | znkd/kysc-service | dcb17d3ecf376dcbb2a54cbcf3e1e1b7fc907fd2 | [
"MIT"
] | null | null | null | main.py | znkd/kysc-service | dcb17d3ecf376dcbb2a54cbcf3e1e1b7fc907fd2 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
from flask import Flask
from flask_restful import Api
from api.user import User
from api.login import Tel
from api.good import List
app = Flask(__name__)
api = Api(app)
api.add_resource(Tel, '/login/tel')
api.add_resource(User, '/user')
api.add_resource(List, '/good/list')
if __name__ == "__main__":
app.run(debug=True)
| 17.047619 | 36 | 0.706704 |
19eb4e30334abfefd3f77272e4936e215f2d6484 | 13,327 | py | Python | LeagueClient/api/utils_data.py | CharmingDays/League-Wrapper | 0f388f25e3be5cee62b36cd5ed9520b96e56a562 | [
"MIT"
] | 1 | 2022-02-01T12:18:08.000Z | 2022-02-01T12:18:08.000Z | LeagueClient/api/utils_data.py | CharmingDays/League-Wrapper | 0f388f25e3be5cee62b36cd5ed9520b96e56a562 | [
"MIT"
] | null | null | null | LeagueClient/api/utils_data.py | CharmingDays/League-Wrapper | 0f388f25e3be5cee62b36cd5ed9520b96e56a562 | [
"MIT"
] | null | null | null | queue_types={
"queue_ids":[100,310,313,317,325,400,420,430,440,450,600,610,700,820,830,840,850,900,910,920,940,950,960,980,990,100,101,102,72,73,75,76,78,83,98],
72: "1v1 Snowdown Showdown games",
73: "2v2 Snowdown Showdown games",
75: "6v6 Hexakill games",
76: "Ultra Rapid Fire games",
78: "One For All: Mirror Mode games",
83: "Co-op vs AI Ultra Rapid Fire games",
98: "6v6 Hexakill games",
100: "5v5 ARAM games",
310: "Nemesis games",
313: "Black Market Brawlers games",
317: "Definitely Not Dominion games",
325: "All Random games",
400: "5v5 Draft Pick games",
420: "5v5 Ranked Solo games",
430: "5v5 Blind Pick games",
440: "5v5 Ranked Flex games",
450: "5v5 ARAM games",
600: "Blood Hunt Assassin games",
610: "Dark Star: Singularity games",
700: "Clash games",
820: "Co-op vs. AI Beginner Bot games",
830: "Co-op vs. AI Intro Bot games",
840: "Co-op vs. AI Beginner Bot games",
850: "Co-op vs. AI Intermediate Bot games",
900: "URF games",
910: "Ascension games",
920: "Legend of the Poro King games",
940: "Nexus Siege games",
950: "Doom Bots Voting games",
960: "Doom Bots Standard games",
980: "Star Guardian Invasion: Normal games",
990: "Star Guardian Invasion: Onslaught games",
100: "PROJECT: Hunters games",
101: "Snow ARURF games",
102: "One for All games",
1090: "Teamfight Tactics games",
1100: "Ranked Teamfight Tactics games",
1110: "Teamfight Tactics Tutorial games",
2000: "Tutorial 1",
2010: "Tutorial 2",
2020: "Tutorial 3"
}
seasons={
"season_ids":[1,2,3,4,5,6,7,8,9,10,11,12,13],
"season_data": [
{
"id": 0,
"season": "PRESEASON 3"
},
{
"id": 1,
"season": "SEASON 3"
},
{
"id": 2,
"season": "PRESEASON 2014"
},
{
"id": 3,
"season": "SEASON 2014"
},
{
"id": 4,
"season": "PRESEASON 2015"
},
{
"id": 5,
"season": "SEASON 2015"
},
{
"id": 6,
"season": "PRESEASON 2016"
},
{
"id": 7,
"season": "SEASON 2016"
},
{
"id": 8,
"season": "PRESEASON 2017"
},
{
"id": 9,
"season": "SEASON 2017"
},
{
"id": 10,
"season": "PRESEASON 2018"
},
{
"id": 11,
"season": "SEASON 2018"
},
{
"id": 12,
"season": "PRESEASON 2019"
},
{
"id": 13,
"season": "SEASON 2019"
}
]
}
champs={
"version":"10.11.1",
"Aatrox": 266,
"Ahri": 103,
"Akali": 84,
"Alistar": 12,
"Amumu": 32,
"Anivia": 34,
"Annie": 1,
"Aphelios": 523,
"Ashe": 22,
"AurelionSol": 136,
"Azir": 268,
"Bard": 432,
"Blitzcrank": 53,
"Brand": 63,
"Braum": 201,
"Caitlyn": 51,
"Camille": 164,
"Cassiopeia": 69,
"Chogath": 31,
"Corki": 42,
"Darius": 122,
"Diana": 131,
"Draven": 119,
"DrMundo": 36,
"Ekko": 245,
"Elise": 60,
"Evelynn": 28,
"Ezreal": 81,
"Fiddlesticks": 9,
"Fiora": 114,
"Fizz": 105,
"Galio": 3,
"Gangplank": 41,
"Garen": 86,
"Gnar": 150,
"Gragas": 79,
"Graves": 104,
"Hecarim": 120,
"Heimerdinger": 74,
"Illaoi": 420,
"Irelia": 39,
"Ivern": 427,
"Janna": 40,
"JarvanIV": 59,
"Jax": 24,
"Jayce": 126,
"Jhin": 202,
"Jinx": 222,
"Kaisa": 145,
"Kalista": 429,
"Karma": 43,
"Karthus": 30,
"Kassadin": 38,
"Katarina": 55,
"Kayle": 10,
"Kayn": 141,
"Kennen": 85,
"Khazix": 121,
"Kindred": 203,
"Kled": 240,
"KogMaw": 96,
"Leblanc": 7,
"LeeSin": 64,
"Leona": 89,
"Lissandra": 127,
"Lucian": 236,
"Lulu": 117,
"Lux": 99,
"Malphite": 54,
"Malzahar": 90,
"Maokai": 57,
"MasterYi": 11,
"MissFortune": 21,
"MonkeyKing": 62,
"Mordekaiser": 82,
"Morgana": 25,
"Nami": 267,
"Nasus": 75,
"Nautilus": 111,
"Neeko": 518,
"Nidalee": 76,
"Nocturne": 56,
"Nunu": 20,
"Olaf": 2,
"Orianna": 61,
"Ornn": 516,
"Pantheon": 80,
"Poppy": 78,
"Pyke": 555,
"Qiyana": 246,
"Quinn": 133,
"Rakan": 497,
"Rammus": 33,
"RekSai": 421,
"Renekton": 58,
"Rengar": 107,
"Riven": 92,
"Rumble": 68,
"Ryze": 13,
"Sejuani": 113,
"Senna": 235,
"Sett": 875,
"Shaco": 35,
"Shen": 98,
"Shyvana": 102,
"Singed": 27,
"Sion": 14,
"Sivir": 15,
"Skarner": 72,
"Sona": 37,
"Soraka": 16,
"Swain": 50,
"Sylas": 517,
"Syndra": 134,
"TahmKench": 223,
"Taliyah": 163,
"Talon": 91,
"Taric": 44,
"Teemo": 17,
"Thresh": 412,
"Tristana": 18,
"Trundle": 48,
"Tryndamere": 23,
"TwistedFate": 4,
"Twitch": 29,
"Udyr": 77,
"Urgot": 6,
"Varus": 110,
"Vayne": 67,
"Veigar": 45,
"Velkoz": 161,
"Vi": 254,
"Viktor": 112,
"Vladimir": 8,
"Volibear": 106,
"Warwick": 19,
"Xayah": 498,
"Xerath": 101,
"XinZhao": 5,
"Yasuo": 157,
"Yorick": 83,
"Yuumi": 350,
"Zac": 154,
"Zed": 238,
"Ziggs": 115,
"Zilean": 26,
"Zoe": 142,
"Zyra": 143,
"champion_ids": [
266,
103,
84,
12,
32,
34,
1,
523,
22,
136,
268,
432,
53,
63,
201,
51,
164,
69,
31,
42,
122,
131,
119,
36,
245,
60,
28,
81,
9,
114,
105,
3,
41,
86,
150,
79,
104,
120,
74,
420,
39,
427,
40,
59,
24,
126,
202,
222,
145,
429,
43,
30,
38,
55,
10,
141,
85,
121,
203,
240,
96,
7,
64,
89,
127,
236,
117,
99,
54,
90,
57,
11,
21,
62,
82,
25,
267,
75,
111,
518,
76,
56,
20,
2,
61,
516,
80,
78,
555,
246,
133,
497,
33,
421,
58,
107,
92,
68,
13,
113,
235,
875,
35,
98,
102,
27,
14,
15,
72,
37,
16,
50,
517,
134,
223,
163,
91,
44,
17,
412,
18,
48,
23,
4,
29,
77,
6,
110,
67,
45,
161,
254,
112,
8,
106,
19,
498,
101,
5,
157,
83,
350,
154,
238,
115,
26,
142,
143
],
"champion_names": [
"aatrox",
"ahri",
"akali",
"alistar",
"amumu",
"anivia",
"annie",
"aphelios",
"ashe",
"aurelionsol",
"azir",
"bard",
"blitzcrank",
"brand",
"braum",
"caitlyn",
"camille",
"cassiopeia",
"chogath",
"corki",
"darius",
"diana",
"draven",
"drmundo",
"ekko",
"elise",
"evelynn",
"ezreal",
"fiddlesticks",
"fiora",
"fizz",
"galio",
"gangplank",
"garen",
"gnar",
"gragas",
"graves",
"hecarim",
"heimerdinger",
"illaoi",
"irelia",
"ivern",
"janna",
"jarvaniv",
"jax",
"jayce",
"jhin",
"jinx",
"kaisa",
"kalista",
"karma",
"karthus",
"kassadin",
"katarina",
"kayle",
"kayn",
"kennen",
"khazix",
"kindred",
"kled",
"kogmaw",
"leblanc",
"leesin",
"leona",
"lissandra",
"lucian",
"lulu",
"lux",
"malphite",
"malzahar",
"maokai",
"masteryi",
"missfortune",
"monkeyking",
"mordekaiser",
"morgana",
"nami",
"nasus",
"nautilus",
"neeko",
"nidalee",
"nocturne",
"nunu",
"olaf",
"orianna",
"ornn",
"pantheon",
"poppy",
"pyke",
"qiyana",
"quinn",
"rakan",
"rammus",
"reksai",
"renekton",
"rengar",
"riven",
"rumble",
"ryze",
"sejuani",
"senna",
"sett",
"shaco",
"shen",
"shyvana",
"singed",
"sion",
"sivir",
"skarner",
"sona",
"soraka",
"swain",
"sylas",
"syndra",
"tahmkench",
"taliyah",
"talon",
"taric",
"teemo",
"thresh",
"tristana",
"trundle",
"tryndamere",
"twistedfate",
"twitch",
"udyr",
"urgot",
"varus",
"vayne",
"veigar",
"velkoz",
"vi",
"viktor",
"vladimir",
"volibear",
"warwick",
"xayah",
"xerath",
"xinzhao",
"yasuo",
"yorick",
"yuumi",
"zac",
"zed",
"ziggs",
"zilean",
"zoe",
"zyra"
],
"reversed_order":
{
266:"Aatrox",
103:"Ahri",
84:"Akali",
12:"Alistar",
32:"Amumu",
34:"Anivia",
1:"Annie",
523:"Aphelios",
22:"Ashe",
136:"AurelionSol",
268:"Azir",
432:"Bard",
53:"Blitzcrank",
63:"Brand",
201:"Braum",
51:"Caitlyn",
164:"Camille",
69:"Cassiopeia",
31:"Chogath",
42:"Corki",
122:"Darius",
131:"Diana",
119:"Draven",
36:"DrMundo",
245:"Ekko",
60:"Elise",
28:"Evelynn",
81:"Ezreal",
9:"Fiddlesticks",
114:"Fiora",
105:"Fizz",
3:"Galio",
41:"Gangplank",
86:"Garen",
150:"Gnar",
79:"Gragas",
104:"Graves",
120:"Hecarim",
74:"Heimerdinger",
420:"Illaoi",
39:"Irelia",
427:"Ivern",
40:"Janna",
59:"JarvanIV",
24:"Jax",
126:"Jayce",
202:"Jhin",
222:"Jinx",
145:"Kaisa",
429:"Kalista",
43:"Karma",
30:"Karthus",
38:"Kassadin",
55:"Katarina",
10:"Kayle",
141:"Kayn",
85:"Kennen",
121:"Khazix",
203:"Kindred",
240:"Kled",
96:"KogMaw",
7:"Leblanc",
64:"LeeSin",
89:"Leona",
127:"Lissandra",
236:"Lucian",
117:"Lulu",
99:"Lux",
54:"Malphite",
90:"Malzahar",
57:"Maokai",
11:"MasterYi",
21:"MissFortune",
62:"MonkeyKing",
82:"Mordekaiser",
25:"Morgana",
267:"Nami",
75:"Nasus",
111:"Nautilus",
518:"Neeko",
76:"Nidalee",
56:"Nocturne",
20:"Nunu",
2:"Olaf",
61:"Orianna",
516:"Ornn",
80:"Pantheon",
78:"Poppy",
555:"Pyke",
246:"Qiyana",
133:"Quinn",
497:"Rakan",
33:"Rammus",
421:"RekSai",
58:"Renekton",
107:"Rengar",
92:"Riven",
68:"Rumble",
13:"Ryze",
113:"Sejuani",
235:"Senna",
875:"Sett",
35:"Shaco",
98:"Shen",
102:"Shyvana",
27:"Singed",
14:"Sion",
15:"Sivir",
72:"Skarner",
37:"Sona",
16:"Soraka",
50:"Swain",
517:"Sylas",
134:"Syndra",
223:"TahmKench",
163:"Taliyah",
91:"Talon",
44:"Taric",
17:"Teemo",
412:"Thresh",
18:"Tristana",
48:"Trundle",
23:"Tryndamere",
4:"TwistedFate",
29:"Twitch",
77:"Udyr",
6:"Urgot",
110:"Varus",
67:"Vayne",
45:"Veigar",
161:"Velkoz",
254:"Vi",
112:"Viktor",
8:"Vladimir",
106:"Volibear",
19:"Warwick",
498:"Xayah",
101:"Xerath",
5:"XinZhao",
157:"Yasuo",
83:"Yorick",
350:"Yuumi",
154:"Zac",
238:"Zed",
115:"Ziggs",
26:"Zilean",
142:"Zoe",
143:"Zyra"
}
} | 18.796897 | 151 | 0.385308 |
3e1b90f91dedd4a960cd891d3cb93aed75c01164 | 3,734 | py | Python | pyqc/backends/simulator/fullAmplitude/full_amplitude_sim.py | shunzgim/PyQC | 8bcbb5b6c5990cac578b2645c558a1fdac29bc1f | [
"MIT"
] | null | null | null | pyqc/backends/simulator/fullAmplitude/full_amplitude_sim.py | shunzgim/PyQC | 8bcbb5b6c5990cac578b2645c558a1fdac29bc1f | [
"MIT"
] | null | null | null | pyqc/backends/simulator/fullAmplitude/full_amplitude_sim.py | shunzgim/PyQC | 8bcbb5b6c5990cac578b2645c558a1fdac29bc1f | [
"MIT"
] | null | null | null | from pyqc.gates import *
from pyqc.backends.simulator.libs import fullAlib
class FullAmplitudeSimulator:
"""
"""
def __init__(self):
self.fullASim = fullAlib.fullASim()
def exec_circ(self, circ):
self.fullASim.flush(circ.qubit_nums)
for tup in circ.qgate_list:
gate, target, control = tup
if isinstance(gate, OneGate):
to_array = gate.matrix.A.reshape(4)
self.fullASim.applyOneGate(to_array,target[0], 0)
elif gate.name=="CNOT" or gate.name=="CNOT.dag" or gate.name=="CZ" or gate.name=="CZ.dag":
to_array = gate.cmatrix.A.reshape(4)
self.fullASim.applyControlOneGate(to_array, target[0], control[0], 0)
elif isinstance(gate ,CR) or isinstance(gate, CRDag):
to_array = gate.cmatrix.A.reshape(4)
self.fullASim.applyControlOneGate(to_array, target[0], control[0], 0)
elif isinstance(gate, CUOne):
to_array = gate.cmatrix.A.reshape(4)
self.fullASim.applyControlOneGate(to_array, target[0], control[0], 0)
elif gate.name == "Toffili":
h_array = H.matrix.A.reshape(4)
s_array = S.matrix.A.reshape(4)
s_d_array = SDag.matrix.A.reshape(4)
x_array = X.matrix.A.reshape(4)
self.fullASim.applyOneGate(h_array,target[0], 0)
self.fullASim.applyControlOneGate(s_array, target[0], control[1], 0)
self.fullASim.applyControlOneGate(x_array, control[1], control[0], 0)
self.fullASim.applyControlOneGate(s_d_array, target[0], control[1], 0)
self.fullASim.applyControlOneGate(x_array, control[1], control[0], 0)
self.fullASim.applyControlOneGate(s_array, target[0], control[0], 0)
self.fullASim.applyOneGate(h_array,target[0], 0)
elif gate.name == "Toffili.dag":
h_array = H.matrix.A.reshape(4)
s_array = S.matrix.A.reshape(4)
s_d_array = SDag.matrix.A.reshape(4)
x_array = X.matrix.A.reshape(4)
self.fullASim.applyOneGate(h_array,target[0], 0)
self.fullASim.applyControlOneGate(s_d_array, target[0], control[0], 0)
self.fullASim.applyControlOneGate(x_array, control[1], control[0], 0)
self.fullASim.applyControlOneGate(s_array, target[0], control[1], 0)
self.fullASim.applyControlOneGate(x_array, control[1], control[0], 0)
self.fullASim.applyControlOneGate(s_d_array, target[0], control[1], 0)
self.fullASim.applyOneGate(h_array,target[0], 0)
elif gate.name=="Swap" or gate.name=="Swap.dag":
to_array = X.matrix.A.reshape(4)
self.fullASim.applyControlOneGate(to_array, target[1], target[0], 0)
self.fullASim.applyControlOneGate(to_array, target[0], target[1], 0)
self.fullASim.applyControlOneGate(to_array, target[1], target[0], 0)
elif gate.name == "CMExp":
self.fullASim.applyConstantModExp(gate.a, gate.N, len(control))
else:
print(gate.name)
raise RuntimeError('error')
def getOneAmplitudeFromBinstring(self,binstring):
return self.fullASim.getOneAmplitudeFromBinstring(binstring)
def getExpectation(self, target):
return self.fullASim.getExpectation(target, len(target))
def getMeasureResultHandle(self, target):
return self.fullASim.getMeasureResultHandle(len(target))
| 51.150685 | 103 | 0.592394 |
de016b4c767ec8aba4458997a1432c68e557066d | 11,907 | py | Python | src/utils/python/arc/control/Services.py | fnevgeny/arc | e09294554745b33fccec0705b13c2e2488acd020 | [
"Apache-2.0"
] | null | null | null | src/utils/python/arc/control/Services.py | fnevgeny/arc | e09294554745b33fccec0705b13c2e2488acd020 | [
"Apache-2.0"
] | null | null | null | src/utils/python/arc/control/Services.py | fnevgeny/arc | e09294554745b33fccec0705b13c2e2488acd020 | [
"Apache-2.0"
] | null | null | null | from __future__ import print_function
from __future__ import absolute_import
from .ControlCommon import *
import sys
from .OSService import OSServiceManagement
from .OSPackage import OSPackageManagement
def complete_service_name(prefix, parsed_args, **kwargs):
arcconf = get_parsed_arcconf(parsed_args.config)
return ServicesControl(arcconf).get_all_services()
def add_services_to_parser(parser):
services_list = parser.add_mutually_exclusive_group(required=True)
services_list.add_argument('-a', '--as-configured', action='store_true',
help='Use information from arc.conf to get services list')
services_list.add_argument('-s', '--service', action='append',
help='Service name').completer = complete_service_name
class ServicesControl(ComponentControl):
__blocks_map = {
'arex': {
'package': 'arex',
'service': 'arc-arex'
},
'gridftpd': {
'package': 'gridftpd',
'service': 'arc-gridftpd'
},
'infosys/ldap': {
'package': 'infosys-ldap',
'service': 'arc-infosys-ldap'
},
'datadelivery-service': {
'package': 'datadelivery-service',
'service': 'arc-datadelivery-service'
},
'acix-scanner': {
'package': 'acix-scanner',
'service': 'arc-acix-scanner'
},
'acix-index': {
'package': 'acix-index',
'service': 'arc-acix-index'
},
'nordugridmap': {
'package': 'nordugridmap',
'service': None
}
}
def __init__(self, arcconfig):
self.logger = logging.getLogger('ARCCTL.Services')
if arcconfig is None:
self.logger.info('Controlling ARC CE Services is not possible without arc.conf.')
sys.exit(1)
self.arcconfig = arcconfig
self.sm = None
self.pm = None
self.package_base = 'nordugrid-arc'
def __get_pm_sm(self):
if self.sm is None:
self.pm = OSPackageManagement()
# check is arcctl-service package (that contains service control modules) installed via packet manager
if self.pm.is_installed(self.package_base + '-arcctl-service'):
self.sm = OSServiceManagement()
# epel6 and epel7 contains 'nordugrid-arc6' base to coexist with ARC5 release
elif self.pm.is_installed(self.package_base + '6-arcctl-service'):
self.package_base += '6'
self.sm = OSServiceManagement()
# ARC installed without known packet manager
else:
self.pm = None
self.sm = OSServiceManagement(ARC_LOCATION + '/etc/rc.d/init.d/')
return self.pm, self.sm
def __get_configured(self):
packages_needed = set()
services_needed = set()
services_all = set()
for block in self.__blocks_map:
bservice = self.__blocks_map[block]['service']
if bservice is not None:
services_all.add(bservice)
if self.arcconfig.check_blocks(block):
packages_needed.add(self.package_base + '-' + self.__blocks_map[block]['package'])
if bservice is not None:
services_needed.add(bservice)
return packages_needed, services_all, services_needed
def __packages_install(self, packages_needed):
pm, _ = self.__get_pm_sm()
if pm is None:
self.logger.info('ARC is installed from sources. Skipping OS packages management.')
return
install_list = []
for p in packages_needed:
if not pm.is_installed(p):
install_list.append(p)
if install_list:
self.logger.info('Installing the following needed packages: %s', ','.join(install_list))
pm.install(install_list)
def __services_stop(self, services_stop, sm):
for ds in services_stop:
self.logger.debug('Checking %s service is already stopped', ds)
if sm.is_active(ds): # if service not installed is_active also returns False
self.logger.info('Stopping %s service in accordance to arc.conf configuration', ds)
sm.stop(ds)
def __services_start(self, services_start, sm):
for ss in services_start:
self.logger.debug('Checking %s service is already started', ss)
if not sm.is_active(ss):
self.logger.info('Starting %s service in accordance to arc.conf configuration', ss)
sm.start(ss)
def __services_enable(self, services, sm, now=False):
for es in services:
self.logger.debug('Checking %s service is already enabled', es)
if not sm.is_enabled(es):
self.logger.info('Enabling %s service in accordance to arc.conf configuration', es)
sm.enable(es)
if now:
# start services as configured in current arc.conf
self.__services_start(services, sm)
def __services_disable(self, services, sm, now=False):
for ds in services:
self.logger.debug('Checking %s service is already disabled', ds)
if sm.is_enabled(ds): # if service not installed is_enabled also returns False
self.logger.info('Disabling %s service in accordance to arc.conf configuration', ds)
sm.disable(ds)
if now:
# stop services not configured in current arc.conf
self.__services_stop(services, sm)
def start_as_configured(self):
pm, sm = self.__get_pm_sm()
packages_needed, services_all, services_needed = self.__get_configured()
# ensure packages are installed
self.__packages_install(packages_needed)
# stop services not configured in current arc.conf
self.__services_stop(list(services_all - services_needed), sm)
# start services as configured in current arc.conf
self.__services_start(services_needed, sm)
def enable_as_configured(self, now=False):
pm, sm = self.__get_pm_sm()
packages_needed, services_all, services_needed = self.__get_configured()
# ensure packages are installed
self.__packages_install(packages_needed)
# disable services not configured in arc.conf
self.__services_disable(list(services_all - services_needed), sm, now)
# enable necessary services
self.__services_enable(services_needed, sm, now)
def list_services(self, args):
pm, sm = self.__get_pm_sm()
services = {}
for s in self.__blocks_map.values():
sname = s['service']
if sname is None:
continue
if sname in services:
continue
if pm is None:
installed = sm.is_installed(s['service'])
installed_str = 'Built from source' if installed else 'Not built'
else:
installed = pm.is_installed(self.package_base + '-' + s['package'])
installed_str = 'Installed' if installed else 'Not installed'
active = sm.is_active(s['service'])
enabled = sm.is_enabled(s['service'])
services[sname] = {
'name': sname,
'installed': installed,
'installed_str': installed_str,
'active': active,
'active_str': 'Running' if active else 'Stopped',
'enabled': enabled,
'enabled_str': 'Enabled' if enabled else 'Disabled'
}
if args.installed:
print(' '.join(sorted([s['name'] for s in [s for s in services.values() if s['installed']]])))
elif args.enabled:
print(' '.join(sorted([s['name'] for s in [s for s in services.values() if s['enabled']]])))
elif args.active:
print(' '.join(sorted([s['name'] for s in [s for s in services.values() if s['active']]])))
else:
for ss in sorted(list(services.values()), key=lambda k: k['name']):
print('{name:32} ({installed_str}, {enabled_str}, {active_str})'.format(**ss))
def control(self, args):
_, sm = self.__get_pm_sm()
if args.action == 'enable':
if args.as_configured:
self.enable_as_configured(args.now)
else:
self.__services_enable(args.service, sm, args.now)
elif args.action == 'disable':
if args.as_configured:
services = self.get_all_services()
else:
services = args.service
self.__services_disable(services, sm, args.now)
elif args.action == 'start':
if args.as_configured:
self.start_as_configured()
else:
self.__services_start(args.service, sm)
elif args.action == 'stop':
if args.as_configured:
services = self.get_all_services()
else:
services = args.service
self.__services_stop(services, sm)
elif args.action == 'restart':
if args.as_configured:
self.__services_stop(self.get_all_services(), sm)
self.start_as_configured()
else:
self.__services_stop(args.service, sm)
self.__services_start(args.service, sm)
pass
elif args.action == 'list':
self.list_services(args)
else:
self.logger.critical('Unsupported ARC services control action %s', args.action)
sys.exit(1)
def get_all_services(self):
services = set()
for s in self.__blocks_map.values():
if s['service'] is not None:
services.add(s['service'])
return list(services)
@staticmethod
def register_parser(root_parser):
services_ctl = root_parser.add_parser('service', help='ARC CE services control')
services_ctl.set_defaults(handler_class=ServicesControl)
services_actions = services_ctl.add_subparsers(title='Services Actions', dest='action',
metavar='ACTION', help='DESCRIPTION')
services_actions.required = True
services_enable = services_actions.add_parser('enable', help='Enable ARC CE services')
services_enable.add_argument('--now', help='Start the services just after enable', action='store_true')
add_services_to_parser(services_enable)
services_disable = services_actions.add_parser('disable', help='Disable ARC CE services')
services_disable.add_argument('--now', help='Stop the services just after disable', action='store_true')
add_services_to_parser(services_disable)
services_start = services_actions.add_parser('start', help='Start ARC CE services')
add_services_to_parser(services_start)
services_restart = services_actions.add_parser('restart', help='Restart ARC CE services')
add_services_to_parser(services_restart)
services_stop = services_actions.add_parser('stop', help='Stop ARC CE services')
add_services_to_parser(services_stop)
services_list = services_actions.add_parser('list', help='List ARC CE services and their states')
services_filter = services_list.add_mutually_exclusive_group(required=False)
services_filter.add_argument('-i', '--installed', help='Show only installed services', action='store_true')
services_filter.add_argument('-e', '--enabled', help='Show only enabled services', action='store_true')
services_filter.add_argument('-a', '--active', help='Show only running services', action='store_true')
| 43.298182 | 115 | 0.606282 |
563f0f2ae228157639dcc150f299ecebc65e4e5d | 885 | py | Python | src/sdk/python/test/test_schema_error_model.py | mstest123/self-managed-osdu_from_Daniel | 10a0c1d25804caa920bf18c6c7c1d8e711c63756 | [
"MIT"
] | 3 | 2021-11-05T20:52:54.000Z | 2021-11-23T23:02:29.000Z | src/sdk/python/test/test_schema_error_model.py | mstest123/self-managed-osdu_from_Daniel | 10a0c1d25804caa920bf18c6c7c1d8e711c63756 | [
"MIT"
] | 4 | 2021-11-05T19:57:08.000Z | 2021-12-14T13:59:04.000Z | src/sdk/python/test/test_schema_error_model.py | mstest123/self-managed-osdu_from_Daniel | 10a0c1d25804caa920bf18c6c7c1d8e711c63756 | [
"MIT"
] | 36 | 2021-08-31T20:58:25.000Z | 2022-03-30T17:02:57.000Z | # coding: utf-8
"""
self-managed-osdu
Rest API Documentation for Self Managed OSDU # noqa: E501
OpenAPI spec version: 0.11.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import OsduClient
from OsduClient.models.schema_error_model import SchemaErrorModel # noqa: E501
from OsduClient.rest import ApiException
class TestSchemaErrorModel(unittest.TestCase):
"""SchemaErrorModel unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testSchemaErrorModel(self):
"""Test SchemaErrorModel"""
# FIXME: construct object with mandatory attributes with example values
# model = OsduClient.models.schema_error_model.SchemaErrorModel() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 21.585366 | 87 | 0.703955 |
6efc453b256cf0f9828d72022e9cfc280ee76474 | 555 | py | Python | tests/base/views_test.py | Dafov/portfolio | fb3cb3721b944624c092d6046b0d9b005b7d9019 | [
"MIT"
] | null | null | null | tests/base/views_test.py | Dafov/portfolio | fb3cb3721b944624c092d6046b0d9b005b7d9019 | [
"MIT"
] | null | null | null | tests/base/views_test.py | Dafov/portfolio | fb3cb3721b944624c092d6046b0d9b005b7d9019 | [
"MIT"
] | null | null | null | import django
import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'portfolio.settings')
django.setup()
from django.test import TestCase, Client
from django.urls.base import reverse
class TestView(TestCase):
def test_home_view_template(self):
client = Client()
response = client.get(reverse('hero'))
self.assertEqual(response.status_code, 200)
def test_resume_view_template(self):
client = Client()
response = client.get(reverse('resume'))
self.assertEqual(response.status_code, 200) | 25.227273 | 69 | 0.708108 |
54ea112649ba175257d148e038ce21ecd0ecbdff | 6,537 | py | Python | algorithm/binary_search_examples.py | ganeshskudva/Algorithm_Templates | 8d180acb1fe885b2585c611f7edec14f26e45479 | [
"MIT"
] | 190 | 2019-05-08T06:50:30.000Z | 2022-03-31T15:09:26.000Z | algorithm/binary_search_examples.py | sm2774us/Python_Algorithm_Templates | d493139e0ed03695ff6a111ddcffef44b2cfc353 | [
"MIT"
] | null | null | null | algorithm/binary_search_examples.py | sm2774us/Python_Algorithm_Templates | d493139e0ed03695ff6a111ddcffef44b2cfc353 | [
"MIT"
] | 61 | 2019-06-25T11:07:42.000Z | 2022-03-25T07:39:31.000Z | import bisect
# [367] https://leetcode.com/problems/valid-perfect-square/
# Given a positive integer num, write a function which returns True if num is a perfect square else False.
#
# standard scenario
def is_perfect_square(num: 'int') -> 'bool':
low, high = 1, num // 2
while low <= high:
mid = low + (high - low) // 2
if mid * mid == num:
return True
elif mid * mid < num:
low = mid + 1
else:
high = mid - 1
return False
# [33] https://leetcode.com/problems/search-in-rotated-sorted-array/
# an array sorted in ascending order is rotated at some pivot unknown,
# given a target value to search. If found in the array return its index,
#
# variation with rotated sort
# other clever way: https://leetcode.com/problems/search-in-rotated-sorted-array/discuss/14435/Clever-idea-making-it-simple
def search_in_rotated_sorted_array(nums: 'List[int]', target: int) -> int:
lo, hi = 0, len(nums) - 1
while lo <= hi:
if nums[lo] == target:
return lo
if nums[hi] == target:
return hi
mid = lo + (hi - lo) // 2
if nums[mid] == target:
return mid
# find the in-order side, and compare in this side
if nums[lo] < nums[mid]:
if nums[lo] < target < nums[mid]:
hi = mid - 1
else:
lo = mid + 1
else:
if nums[mid] < target < nums[hi]:
lo = mid + 1
else:
hi = mid - 1
return -1
# https://leetcode.com/problems/search-in-rotated-sorted-array/discuss/14419/Pretty-short-C%2B%2BJavaRubyPython
# So I have the three checks (nums[0] <= target), (target <= nums[i]) and (nums[i] < nums[0]), and I want to know
# whether exactly two of them are true. They can't all be true or all be false (check it), so I just need to
# distinguish between "two true" and "one true". Parity is enough for that, so instead of adding them I xor them
#
# variation with xor
def search_in_rotated_sorted_array2(nums, target):
lo, hi = 0, len(nums) - 1
while lo < hi:
mid = (lo + hi) / 2
if (nums[0] > target) ^ (nums[0] > nums[mid]) ^ (target > nums[mid]):
lo = mid + 1
else:
hi = mid
return lo if target in nums[lo:lo + 1] else -1
# [374] https://leetcode.com/problems/guess-number-higher-or-lower/
# You have to guess which number I picked, I'll tell you whether the number is higher or lower.
#
# variation with tri-partition search
def guessNumber(n):
# fake API
def guess(num):
return 0
low, high = 1, n
while low <= high:
mid1 = low + (high - low) // 3
mid2 = high - (high - low) // 3
res1, res2 = guess(mid1), guess(mid2)
if res1 == 0:
return mid1
if res2 == 0:
return mid2
elif res1 < 0:
high = mid1 - 1
elif res2 > 0:
low = mid2 + 1
else:
low, high = mid1 + 1, mid2 - 1
return -1
# [374] https://leetcode.com/problems/guess-number-higher-or-lower/
# You have to guess which number I picked, I'll tell you whether the number is higher or lower.
#
# variation with construct a sorted iterator
def guessNumber1(n):
# fake API
def guess(num):
return 0
# construct a sorted iterator
class C: __getitem__ = lambda _, i: -guess(i)
return bisect.bisect_right(C(), -1, 1, n)
# [683] https://leetcode.com/problems/k-empty-slots/
# Also given an integer k, you need to output in which day there exists two flowers in the status of blooming
def kEmptySlots(flowers: 'List[int]', k: int) -> int:
blooms = []
for i, f in enumerate(flowers):
idx = bisect.bisect_left(blooms, f)
if idx > 0:
if f - blooms[idx - 1] - 1 == k:
return i + 1
if idx < len(blooms):
if blooms[idx] - f - 1 == k:
return i + 1
# has already done binary search
blooms.insert(idx, f)
return -1
# [635] https://leetcode.com/problems/design-log-storage-system
# Design a log storage system to implement the following functions:
# 1. Given a log's unique id and timestamp, store the log in your storage system.
# 2. Return the id of logs whose timestamps are within the range from start to end.
#
# variation with application design
class LogSystem:
def __init__(self):
self.logs = []
self.start = '2000:01:01:00:00:00'
self.end = '2017:12:31:23:59:59'
self.gra_idx = {"Year": 4, "Month": 7, "Day": 10, "Hour": 13, "Minute": 16, "Second": 19}
# O(log(n)) to binary search, O(n) to insert, so skip list or black-red tree is a better solution
def put(self, id: int, timestamp: str) -> None:
bisect.insort_left(self.logs, (timestamp, id))
# O(log(n)) to binary search
def retrieve(self, s: str, e: str, gra: str) -> 'List[int]':
idx = self.gra_idx[gra]
lo = bisect.bisect_left(self.logs, (s[:idx] + self.start[idx:], 0))
hi = bisect.bisect_right(self.logs, (e[:idx] + self.end[idx:], 300))
return [log[1] for log in self.logs[lo:hi]]
# [240] https://leetcode.com/problems/search-a-2d-matrix-ii/
# Write an efficient algorithm that searches for a value in matrix which sorted in each row and column
#
# variation with in matrix, not the most efficient solution
def searchMatrix(matrix, target):
if not matrix:
return False
def binary_search(start, vertical):
lo = start
hi = len(matrix[0]) - 1 if vertical else len(matrix) - 1
while lo <= hi:
mid = (lo + hi) // 2
if vertical: # searching a column
if matrix[start][mid] < target:
lo = mid + 1
elif matrix[start][mid] > target:
hi = mid - 1
else:
return True
else: # searching a row
if matrix[mid][start] < target:
lo = mid + 1
elif matrix[mid][start] > target:
hi = mid - 1
else:
return True
return False
# iterate over matrix diagonals starting in bottom left.
for i in range(min(len(matrix), len(matrix[0]))):
vertical_found = binary_search(i, True)
horizontal_found = binary_search(i, False)
if vertical_found or horizontal_found:
return True
return False
| 34.225131 | 123 | 0.579165 |
124a8ec4f2b1d70a9bd559a43e1203e86c99b706 | 18,948 | py | Python | dldp/patch_extract/Patch_Extractor.py | 3dimaging/DeepLearningCamelyon_II | 1d48032fd6e229190a3f95264f7871cb348cc294 | [
"CC0-1.0"
] | 1 | 2022-01-06T18:35:52.000Z | 2022-01-06T18:35:52.000Z | dldp/patch_extract/Patch_Extractor.py | DIDSR/dldp | 2749f86f98280220617b4ffcd2597f8a087c9332 | [
"CC0-1.0"
] | null | null | null | dldp/patch_extract/Patch_Extractor.py | DIDSR/dldp | 2749f86f98280220617b4ffcd2597f8a087c9332 | [
"CC0-1.0"
] | 2 | 2021-01-16T02:29:40.000Z | 2022-01-07T14:15:39.000Z | #!/home/wli/env python3
# -*- coding: utf-8 -*-
"""
Title: patch extractor
======================
Created: 10-31-2019
Python-Version: 3.5, 3.6
Description:
------------
This libray module provides functions for patch extraction.
"""
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from PIL import Image
import os.path as osp
import openslide
from pathlib import Path
# scipy.misc.imsave is deprecated! imsave is deprecated in SciPy 1.0.0,
# and will be removed in 1.2.0. Use imageio.imwrite instead.
#from scipy.misc import imsave as saveim
from imageio import imwrite as saveim
from skimage.filters import threshold_otsu
import glob
# before importing HDFStore, make sure 'tables' is installed by pip3 install tables
from pandas import HDFStore
from openslide.deepzoom import DeepZoomGenerator
import cv2 as cv2
from skimage import io as skio
import xml.etree.ElementTree as et
import math
import os
# go through all the file
def slides_for_patch_extraction(slide_folder, file_ext):
"""
This function is to generate a list for all the slides in a folder.
:param slide_folder: a folder storing WSI images.
:type slide_folder: string
:param file_ext: file type, for exaple, "tif"
:type file_ext: string
:return: slide_paths
:rtype: a list including all the obsolute paths of the slides from a
folder.
"""
slide_paths = glob.glob(osp.join(slide_folder, '*.%s' % file_ext))
slide_paths.sort()
return slide_paths
def hsv_thumbnail(slide):
"""
generate a HSV thumbnail image for WSI image with downsample of 32.
The ratio of length and width of the image is still the same as the
level 0 image.
:param slide: the initialized slide oject from openslide
:type slide: object
:return: hsv image
:rtype: array
"""
thumbnail = slide.get_thumbnail(
(slide.dimensions[0] / 32, slide.dimensions[1] / 32))
thum = np.array(thumbnail)
hsv_image = cv2.cvtColor(thum, cv2.COLOR_RGB2HSV)
return hsv_image
def tissue_patch_threshold(slide):
"""
get a threshold for tissue region
:param slide: the initialized slide oject from openslide
:type slide: objec
:returns: threshold
:rtype: list
"""
hsv_image = hsv_thumbnail(slide)
h, s, v = cv2.split(hsv_image)
hthresh = threshold_otsu(h)
sthresh = threshold_otsu(s)
vthresh = threshold_otsu(v)
# be min value for v can be changed later
minhsv = np.array([hthresh, sthresh, 70], np.uint8)
maxhsv = np.array([180, 255, vthresh], np.uint8)
thresh = [minhsv, maxhsv]
return thresh
def bbox_generation_tissue(slide):
"""
generate a bounding box for tissue region in a WSI image
:param slide: the initialized slide oject from openslide
:type slide: object
:returns: bbox_tissue, the coordinates for the four corners of
the tissue region.
:rtype: tuple
"""
hsv_image = hsv_thumbnail(slide)
# h, s, v = cv2.split(hsv_image)
# hthresh = threshold_otsu(h)
# sthresh = threshold_otsu(s)
# vthresh = threshold_otsu(v)
# be min value for v can be changed later
# minhsv = np.array([hthresh, sthresh, 70], np.uint8)
# maxhsv = np.array([180, 255, vthresh], np.uint8)
# thresh = [minhsv, maxhsv]
thresh = tissue_patch_threshold(slide)
print(thresh)
# extraction the countor for tissue
rgbbinary = cv2.inRange(hsv_image, thresh[0], thresh[1])
print(rgbbinary.shape)
# old version of cv2.findContours gives three returns
_, contours, _ = cv2.findContours(
rgbbinary, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
bboxtcols = ['xmin', 'xmax', 'ymin', 'ymax']
bboxt = pd.DataFrame(columns=bboxtcols)
for c in contours:
(x, y, w, h) = cv2.boundingRect(c)
bboxt = bboxt.append(
pd.Series([x, x + w, y, y + h], index=bboxtcols),
ignore_index=True)
bboxt = pd.DataFrame(bboxt)
xxmin = list(bboxt['xmin'].get_values())
xxmax = list(bboxt['xmax'].get_values())
yymin = list(bboxt['ymin'].get_values())
yymax = list(bboxt['ymax'].get_values())
bbox_tissue = (math.floor(np.min(xxmin) * 32), math.floor(
np.max(xxmax) * 32), math.floor(
np.min(yymin) * 32), math.floor(np.max(yymax) * 32))
print(str(bbox_tissue))
return bbox_tissue
def bbox_generation_tumor(single_slide_for_patch_extraction, anno_dir):
"""
generate a bounding box for tumor region. If several regions exist,
a big bounding box will be generated to include all the regions.
:param single_slide_for_patch_extraction: a slide for path extraction, a path
:type single_slide_for_patch_extraction: string
:param anno_dir: annotations files
:type anno_dir: list
:returns: bbox_tumor, the coordinates for the four corners of
the tumor region.
:rtype: tuple
"""
Anno_pathxml = osp.join(anno_dir, osp.basename(
single_slide_for_patch_extraction).replace('.tif', '.xml'))
# slide = openslide.open_slide(single_slide_for_patch_extraction)
annotations = convert_xml_df(str(Anno_pathxml))
x_values = list(annotations['X'].get_values())
y_values = list(annotations['Y'].get_values())
bbox_tumor = (math.floor(np.min(x_values)), math.floor(np.max(x_values)),
math.floor(
np.min(y_values)), math.floor(
np.max(y_values)))
return bbox_tumor
def convert_xml_df(file):
"""
convert the xml file to a list of coordinates
:param file: path for an xml file
:returns: coordinates
:rtype: tuple including all the coordinates
"""
parseXML = et.parse(file)
root = parseXML.getroot()
dfcols = ['Name', 'Order', 'X', 'Y']
df_xml = pd.DataFrame(columns=dfcols)
for child in root.iter('Annotation'):
for coordinate in child.iter('Coordinate'):
Name = child.attrib.get('Name')
Order = coordinate.attrib.get('Order')
X_coord = float(coordinate.attrib.get('X'))
# X_coord = X_coord - 30000
# X_coord = ((X_coord)*dims[0])/Ximageorg
Y_coord = float(coordinate.attrib.get('Y'))
# Y_coord = Y_coord - 155000
# Y_coord = ((Y_coord)*dims[1])/Yimageorg
df_xml = df_xml.append(
pd.Series([Name, Order, X_coord, Y_coord], index=dfcols), ignore_index=True)
df_xml = pd.DataFrame(df_xml)
return (df_xml)
def random_crop_tumor(slide, truth, thresh, crop_size, bbox):
"""
The major function to extract image patches from tumor WSI images together
with ground truth. This function is used for normal or tumor patch
extraction with its ground truth.
:param slide: slide object created by openslide
:type slide: object
:param truth: ground truth object created by openslide
:type param: object
:param thresh: threshold for tissue region
:type thresh: list
:param crop_size: the size of image patch to be generated
:type crop_size: list
:param bbox: the coordinates of a bounding box
:type bbox: tuple
:returns: rgb_image, rgb_binary, rgb_mask, index
:rtype: tuple
:note: The "bbox" will the bbox for tissue region if extract normal
patches; The "bbox" will be the bbox for tumor region if
extract tumor patches.
"""
# width, height = slide.level_dimensions[0]
dy, dx = crop_size
x = np.random.randint(bbox[0], bbox[1] - dx + 1)
y = np.random.randint(bbox[2], bbox[3] - dy + 1)
# x = np.random.choice(range(width - dx + 1), replace = False)
# y = np.random.choice(range(height - dy +1), replace = False)
index = [x, y]
# print(index)
# cropped_img = (image[x:(x+dx), y:(y+dy),:], rgb_binary[x:(x+dx), y:(y+dy)], mask[x:(x+dx), y:(y+dy)])
rgb_image = slide.read_region((x, y), 0, crop_size)
rgb_mask = truth.read_region((x, y), 0, crop_size)
rgb_mask = (cv2.cvtColor(np.array(rgb_mask),
cv2.COLOR_RGB2GRAY) > 0).astype(int)
rgb_array = np.array(rgb_image)
hsv_rgbimage = cv2.cvtColor(rgb_array, cv2.COLOR_RGB2HSV)
rgb_binary = cv2.inRange(hsv_rgbimage, thresh[0], thresh[1])
# cropped_img = image[x:(x+dx), y:(y+dy),:]
# cropped_binary = rgb_binary[x:(x+dx), y:(y+dy)]
# cropped_mask = mask[x:(x+dx), y:(y+dy)]
# print(index)
return (rgb_image, rgb_binary, rgb_mask, index)
# random_crop2 is not nessary.
def random_crop2(slide, truth, thresh, crop_size, bboxt):
"""
The major function to extract image patches from WSI images together with
ground truth. This function is used for normal patch extraction with its
ground truth.
:param slide: object generated using openslide
:type slide: object
:param truth: object generated using openslide
:type truth: object
:param thresh: the threshhold for tissue region
:type thresh: list
:param crop_size: the size of image patches to be extracted
:type crop_size: list
:param bboxt: the bounding box for tissue region
:type bboxt: tuple
:returns: rgb_image, rgb_binary, rgb_mask, index
:rtype: tuple
"""
# width, height = slide.level_dimensions[0]
dy, dx = crop_size
# print(bboxt[0], bboxt[1])
x = np.random.randint(bboxt[0], bboxt[1] - dx + 1)
y = np.random.randint(bboxt[2], bboxt[3] - dy + 1)
# x = np.random.choice(range(width - dx + 1), replace = False)
# y = np.random.choice(range(height - dy +1), replace = False)
index = [x, y]
# print(index)
# cropped_img = (image[x:(x+dx), y:(y+dy),:], rgb_binary[x:(x+dx), y:(y+dy)], mask[x:(x+dx), y:(y+dy)])
rgb_image = slide.read_region((x, y), 0, crop_size)
rgb_mask = truth.read_region((x, y), 0, crop_size)
rgb_mask = (cv2.cvtColor(np.array(rgb_mask),
cv2.COLOR_RGB2GRAY) > 0).astype(int)
rgb_array = np.array(rgb_image)
hsv_rgbimage = cv2.cvtColor(rgb_array, cv2.COLOR_RGB2HSV)
rgb_binary = cv2.inRange(hsv_rgbimage, thresh[0], thresh[1])
# cropped_img = image[x:(x+dx), y:(y+dy),:]
# cropped_binary = rgb_binary[x:(x+dx), y:(y+dy)]
# cropped_mask = mask[x:(x+dx), y:(y+dy)]
print(index)
return (rgb_image, rgb_binary, rgb_mask, index)
def random_crop_normal(slide, thresh, crop_size, bbox_tissue):
"""
The major function for image patch generation. This function is used to get
image patches from normal WSI slides.
:param slide: object generated by openslide
:type slide: object
:param thresh: the threshold for tissue region
:type thresh: list
:param crop_size: the size of image patches to be extracted
:type crop_size: list
:param bbox_tissue: the bounding box for tissue region
:type bbox_tissue: tuple
:returns: rgb_image, rgb_binary, index
:rtype: tuple
"""
# width, height = slide.level_dimensions[0]
dy, dx = crop_size
x = np.random.randint(bbox_tissue[0], bbox_tissue[1] - dx + 1)
y = np.random.randint(bbox_tissue[2], bbox_tissue[3] - dy + 1)
index = [x, y]
# cropped_img = (image[x:(x+dx), y:(y+dy),:], rgb_binary[x:(x+dx), y:(y+dy)], mask[x:(x+dx), y:(y+dy)])
rgb_image = slide.read_region((x, y), 0, crop_size)
# rgb_mask = truth.read_region((x, y), 0, crop_size)
# rgb_mask = (cv2.cvtColor(np.array(mask), cv2.COLOR_RGB2GRAY) > 0).astype(int)
# rgb_grey = np.array(rgb_image.convert('L'))
# rgb_binary = (rgb_grey < thresh).astype(int)
rgb_array = np.array(rgb_image)
hsv_rgbimage = cv2.cvtColor(rgb_array, cv2.COLOR_RGB2HSV)
rgb_binary = cv2.inRange(hsv_rgbimage, thresh[0], thresh[1])
# cropped_img = image[x:(x+dx), y:(y+dy),:]
# cropped_binary = rgb_binary[x:(x+dx), y:(y+dy)]
# cropped_mask = mask[x:(x+dx), y:(y+dy)]
return (rgb_image, rgb_binary, index)
def testduplicates(list):
"""
get rid of duplicate entries
:param list: the list for duplication checking
:type list: list
:returns: the list with no duplication
:rtype: list
"""
for each in list:
count = list.count(each)
if count > 1:
z = 0
else:
z = 1
return z
def create_folder(single_slide_for_patch_extraction, destination_folder):
"""
The function is used to create folder and store image patches. All the
image patches extracted from the same slide will be saved in the same
folder.
:param single_slide_for_patch_extraction: the slide used to get image
patches
:type single_slide_for_patch_extraction: str
:param destination_folder: the place to store all the extracted image
patches
:return: the folder to be created
:rtype: str
"""
print(single_slide_for_patch_extraction)
print(osp.splitext(osp.basename(single_slide_for_patch_extraction)))
folder_to_create = osp.splitext(
osp.basename(single_slide_for_patch_extraction))[0]
print(folder_to_create)
path_for_folder = osp.join(destination_folder, folder_to_create)
print(path_for_folder)
try:
os.makedirs(path_for_folder)
except Exception:
print('folder exist, skipped')
return path_for_folder
# sampletotal = pd.DataFrame([])
def extract_normal_patches_from_normal_slide(slide, thresh, crop_size,
bbox_tissue,
des_folder_normal_patches,
single_slide_for_patch_extraction):
"""
The actual function for patch extraction from normal slides.
:param slide: object generated by openslide
:type slide: object
:param thresh: the threshhold for tissue region
:type thresh: list
:param crop_size: the size of image patches to be extracted
:type crop_size: list
:param bbox_tissue: the bounding box for tissue region
:type bbox_tissue: tuple
:param des_folder_normal_patches: the folder to store the extracted patches
:type des_folder_normal_patches: string
:param single_slide_for_patch_extraction: the path of a WSI slide
:type single_slide_for_patch_extraction: string
:returns: None
:note: The extracted image patches will be saved.
"""
o = 0
while o in range(0, 1000):
nr = random_crop_normal(slide, thresh, crop_size, bbox_tissue)
if (cv2.countNonZero(nr[1]) > crop_size[0] * crop_size[1] * 0.1):
nmask = np.zeros((256, 256))
saveim('%s/%s_%d_%d_N.png' % (des_folder_normal_patches,
osp.splitext(osp.basename(
single_slide_for_patch_extraction))[0], nr[2][0], nr[2][1]), nr[0])
# io.imsave('/home/wli/Downloads/test/nmask/%s_%d_%d_mask.png' % (
# osp.splitext(osp.basename(slide_paths_total[i]))[0], nr[2][0], nr[2][1]), nmask)
# c.append(r[3])
# zzz = testduplicates(c)
o = o + 1
def extract_tumor_patches_from_tumor_slide(slide, ground_truth, crop_size,
thresh, bbox_tumor,
des_folder_tumor_patches,
des_folder_tumor_patches_mask,
single_slide_for_patch_extraction):
"""
The actual function for tumor patch extraction from tumor slides.
:param slide: object generated by openslide
:type slide: object
:param ground_truth: the object generated by openslide
:type ground_truth: object
:param crop_size: the size of image patches to be extracted
:type crop_size: list
:param thresh: the threshhold for tissue region
:type thresh: list
:param bbox_tumor: the bounding box for tumor region
:type bbox_tumor: tuple
:param des_folder_tumor_patches: the folder to store the extracted patches
:param des_folder_tumor_patches_mask: the folder to store the extracted
ground truth
:param single_slide_for_patch_extraction: the path of a WSI slide
:type single_slide_for_patch_extraction: string
:returns: None
:note: The extracted image patches will be saved.
"""
m = 0
# a = []
while m in range(0, 1000):
r = random_crop_tumor(slide, ground_truth,
thresh, crop_size, bbox_tumor)
if (cv2.countNonZero(r[2]) > crop_size[0] * crop_size[1] * 0.5):
saveim('%s/%s_%d_%d_T.png' % (des_folder_tumor_patches,
osp.splitext(osp.basename(single_slide_for_patch_extraction))[0], r[3][0], r[3][1]), r[0])
skio.imsave('%s/%s_%d_%d_T_mask.png' % (des_folder_tumor_patches_mask,
osp.splitext(osp.basename(single_slide_for_patch_extraction))[0], r[3][0], r[3][1]), r[2])
# print(r[2])
# a.append(r[3])
# z = testduplicates(a)
m = m + 1
def extract_normal_patches_from_tumor_slide(slide, ground_truth, crop_size, thresh, bbox_tissue, des_folder_normal_patches, single_slide_for_patch_extraction):
"""
The actual function for normal patch extraction from tumor slides.
:param slide: object generated by openslide
:type slide: object
:param ground_truth: the object generated by openslide
:type ground_truth: object
:param crop_size: the size of image patches to be extracted
:type crop_size: list
:param thresh: the threshhold for tissue region
:type thresh: list
:param bbox_tissue: the bounding box for tissue region
:type bbox_tissue: tuple
:param des_folder_normal_patches: the folder to store the extracted patches
:type des_folder_normal_patches: string
:param single_slide_for_patch_extraction: the path of a WSI slide
:type single_slide_for_patch_extraction: string
:returns: None
:note: The extracted image patches will be saved.
"""
n = 0
# b=[]
while n in range(0, 1000):
# slide = openslide.open_slide(slide_paths[i])
r = random_crop_tumor(slide, ground_truth, thresh,
crop_size, bbox_tissue)
if (cv2.countNonZero(r[1]) > crop_size[0] * crop_size[1] * 0.1) and (cv2.countNonZero(r[2]) == 0):
saveim('%s/%s_%d_%d_N.png' % (des_folder_normal_patches,
osp.splitext(osp.basename(single_slide_for_patch_extraction))[0], r[3][0], r[3][1]), r[0])
# io.imsave('/home/wli/Downloads/test/validation/nmask/%s_%d_%d_mask.png' % (
# osp.splitext(osp.basename(slide_paths_total[i]))[0], r[3][0], r[3][1]), r[2])
# b.append(r[3])
# zz = testduplicates(b)
n = n + 1
| 36.368522 | 159 | 0.642337 |
8ed9d2d00987414cb0cd50cef4667f9616de2dd4 | 156 | py | Python | CONFIG/SavedPostAPP/apps.py | Brktrlw/Instagram-Clone-Django-and-React | 6390db2133d3beae2097a680097e170bd4fbcabe | [
"MIT",
"PostgreSQL",
"Unlicense"
] | null | null | null | CONFIG/SavedPostAPP/apps.py | Brktrlw/Instagram-Clone-Django-and-React | 6390db2133d3beae2097a680097e170bd4fbcabe | [
"MIT",
"PostgreSQL",
"Unlicense"
] | null | null | null | CONFIG/SavedPostAPP/apps.py | Brktrlw/Instagram-Clone-Django-and-React | 6390db2133d3beae2097a680097e170bd4fbcabe | [
"MIT",
"PostgreSQL",
"Unlicense"
] | null | null | null | from django.apps import AppConfig
class SavedpostappConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'SavedPostAPP'
| 22.285714 | 56 | 0.775641 |
6c9f863391c22d584a34243766731ca5e8d95055 | 720 | py | Python | src/Python/car_detection.py | codemaniac011/graph_algos | 845f8387d0f26873138df7e27c4d0752926de4a4 | [
"MIT"
] | 1 | 2022-01-19T03:26:53.000Z | 2022-01-19T03:26:53.000Z | src/Python/car_detection.py | mahimonga/graph_algos | 845f8387d0f26873138df7e27c4d0752926de4a4 | [
"MIT"
] | null | null | null | src/Python/car_detection.py | mahimonga/graph_algos | 845f8387d0f26873138df7e27c4d0752926de4a4 | [
"MIT"
] | 2 | 2021-10-21T23:26:23.000Z | 2021-10-30T18:48:14.000Z | import cv2
# image
img_file = 'Car Image.jpg'
#video = cv2.VideoCapture('Cars_video.mp4')
#video = cv2.VideoCapture('Traffic.mp4')
video = cv2.VideoCapture('Real_time_Video.mp4')
# pre-trained car classifier
classifier_file = 'cars.xml'
car_tracker = cv2.CascadeClassifier(classifier_file)
while True:
(read_succ, frame) = video.read() # reading the currrent frame
if read_succ:
gray_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
else:
break
cars = car_tracker.detectMultiScale(gray_frame)
for (x, y, w, h) in cars:
cv2.rectangle(frame, (x, y), (x+w, y+h), (0, 0, 255), 2)
#display
cv2.imshow("Car Detector", frame)
cv2.waitKey(1) # don't autoclose | 22.5 | 67 | 0.669444 |
53e3ea89d095de29f73c6a1baf69a1dcdf8473c0 | 13 | py | Python | projects/isort/test.py | quinn-dougherty/python-on-nix | 910d3f6554acd4a4ef0425ebccd31104dccb283c | [
"Unlicense"
] | 25 | 2021-10-30T19:54:59.000Z | 2022-03-29T06:11:02.000Z | projects/isort/test.py | quinn-dougherty/python-on-nix | 910d3f6554acd4a4ef0425ebccd31104dccb283c | [
"Unlicense"
] | 21 | 2021-10-19T01:09:38.000Z | 2022-03-24T16:08:53.000Z | projects/isort/test.py | quinn-dougherty/python-on-nix | 910d3f6554acd4a4ef0425ebccd31104dccb283c | [
"Unlicense"
] | 3 | 2022-01-25T20:25:13.000Z | 2022-03-08T02:58:50.000Z | import isort
| 6.5 | 12 | 0.846154 |
db74a4b64e0b57490914e743219260669f950338 | 1,072 | py | Python | ArmRuntime/generateReplacements.py | daeken/GrinningSoul | d35ac197dade35d8a831ffb10fffd2ec93a791ea | [
"Apache-2.0"
] | 94 | 2020-11-22T00:35:08.000Z | 2022-02-22T15:45:41.000Z | ArmRuntime/generateReplacements.py | CrackerCat/GrinningSoul | d35ac197dade35d8a831ffb10fffd2ec93a791ea | [
"Apache-2.0"
] | 1 | 2021-05-14T14:18:23.000Z | 2021-05-15T16:03:24.000Z | ArmRuntime/generateReplacements.py | CrackerCat/GrinningSoul | d35ac197dade35d8a831ffb10fffd2ec93a791ea | [
"Apache-2.0"
] | 8 | 2020-11-22T10:23:26.000Z | 2022-02-22T08:34:46.000Z | import glob
with file('../Runtime/replacements.generated.h', 'w') as fp:
nfuncs = []
sels = []
for fn in glob.glob('*.cpp') + glob.glob('*.m') + glob.glob('*.mm') + glob.glob('*.h'):
source = file(fn, 'r').read().split('\n')
for line in source:
line = line.strip()
if not line.startswith('/// REPLACE'):
continue
args = line[11:].strip().split(' ')
if len(args) == 1:
if ':' not in args[0]:
nfuncs.append(args[0])
else:
sels.append((args[0], args[0].replace(':', '_')))
else:
print 'Unhandled replacement:', args
print >>fp, 'vector<tuple<string, string>> armReplacements = {'
for func in nfuncs:
print >>fp, '{ "_%s", "replace_%s" }, ' % (func, func)
print >>fp, '};'
print >>fp, 'vector<tuple<const char*, const char*>> armSelReplacements = {'
for sel, func in sels:
print >>fp, '{ "%s", "replace_%s" }, ' % (sel, func)
print >>fp, '};'
| 38.285714 | 91 | 0.476679 |
9026f941d105d33639451db1553a5526747557d0 | 1,214 | py | Python | example/first_example/model/userModel.py | suuperhu/Pyside2MVCFramework | c28bd0fbb6b53bb2cdad8d0723b4251fc93319d6 | [
"MIT"
] | 1 | 2021-03-11T13:34:41.000Z | 2021-03-11T13:34:41.000Z | example/first_example/model/userModel.py | suuperhu/Pyside2MVCFramework | c28bd0fbb6b53bb2cdad8d0723b4251fc93319d6 | [
"MIT"
] | null | null | null | example/first_example/model/userModel.py | suuperhu/Pyside2MVCFramework | c28bd0fbb6b53bb2cdad8d0723b4251fc93319d6 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
# @SoftwareIDE : PyCharm2020Pro
# @ProjectName : PySide2MVCFramework
# @FileName : userModel.py
# @Author : 胡守杰
# @Email : 2839414139@qq.com
# @ZhFileDescription :
# @EnFileDescription :
"""
from PySide2.QtCore import Signal, QObject
from pyside2mvcframework.core.model import Model
class UserModel(Model):
name = None
age = None
nameChanged: QObject = Signal()
ageChanged: QObject = Signal()
def getName(self):
return self.name
def getAge(self):
return self.age
def setName(self, value):
print("设置name, 发送信号")
self.name = value
self.nameChanged.emit()
def setAge(self, value):
print("设置age, 发送信号")
self.age = value
self.ageChanged.emit()
if __name__ == '__main__':
print("unit test from {filename}".format(filename=__file__))
def showName(name):
print(name)
def showAge(age):
print(age)
model = UserModel()
model.ageChanged.connect(lambda: showName(model.name))
model.ageChanged.connect(lambda: showAge(model.age))
model.setName("hushoujie")
model.setAge("24")
| 21.678571 | 64 | 0.605437 |
81db2ed40c7b8229615eaf0791c50362dc103ee2 | 27 | py | Python | test/plugins/blog/__init__.py | Kokemomo/Kokemomo | 614504dc49b2f509b25c9ec2229f4438db73bab7 | [
"MIT"
] | 4 | 2016-06-12T13:19:23.000Z | 2020-01-29T09:46:15.000Z | test/plugins/blog/__init__.py | Kokemomo/Kokemomo | 614504dc49b2f509b25c9ec2229f4438db73bab7 | [
"MIT"
] | 67 | 2015-09-10T04:28:33.000Z | 2019-09-19T09:08:11.000Z | test/plugins/blog/__init__.py | Kokemomo/Kokemomo | 614504dc49b2f509b25c9ec2229f4438db73bab7 | [
"MIT"
] | 2 | 2016-06-13T11:20:42.000Z | 2016-07-22T07:44:31.000Z |
__author__ = 'hiroki-m'
| 5.4 | 23 | 0.62963 |
03e9819f0ef73ce7909ccd2eeeb92d76c8f0127e | 2,863 | py | Python | build_scripts/CompileCurl-Linux.py | tahussle/bebbang | 93be7665680d454f2f5a8c1124b1f1fa83f95960 | [
"MIT"
] | null | null | null | build_scripts/CompileCurl-Linux.py | tahussle/bebbang | 93be7665680d454f2f5a8c1124b1f1fa83f95960 | [
"MIT"
] | 1 | 2019-04-19T03:20:52.000Z | 2019-04-19T03:20:52.000Z | build_scripts/CompileCurl-Linux.py | tahussle/bebbang | 93be7665680d454f2f5a8c1124b1f1fa83f95960 | [
"MIT"
] | null | null | null | import os
from subprocess import call
import sys
import re
import multiprocessing as mp
import string
import urllib
import shutil
version = "7.61.1"
def get_curl_filename(ver):
return "curl-" + ver + ".tar.gz"
def get_curl_link(ver):
link = "https://curl.haxx.se/download/" + get_curl_filename(ver)
# print(link)
return link
def download_file(filelink, target):
try:
testfile = urllib.URLopener()
try:
os.remove(target)
print("Found file " + target + ", which is now deleted.")
except:
pass
testfile.retrieve(filelink, target)
return True
except:
return False
def download_curl():
curl_version_found = False
filename_ = ""
for ver_suffix in list(reversed(string.ascii_lowercase))+[""]:
version_str = version + ver_suffix
if(download_file(get_curl_link(version_str), get_curl_filename(version_str))):
curl_version_found = True
filename_ = get_curl_filename(version_str)
print("Found latest Curl version to be " + version_str)
break
if curl_version_found == False:
print("Could not find the latest Curl version. Probably you're not connected to the internet.")
print("If you have already downloaded Curl, put the file name in the first argument of the script.")
return filename_
if len(sys.argv) < 2:
filename = download_curl()
else:
filename = sys.argv[1]
dirname = filename.replace(".tar.gz","")
try:
shutil.rmtree(dirname)
except:
pass
working_dir = os.getcwd()
call("tar -xf " + filename, shell=True) #extract the .tar.gz file
dirname_bin = dirname + "_build"
final_dirname = "curl_build"
try:
shutil.rmtree(dirname_bin)
except:
pass
try:
shutil.rmtree(final_dirname)
except:
pass
#Go back to base dir
os.chdir(working_dir)
################
os.chdir(dirname)
# prepend ccache to the path, necessary since prior steps prepend things to the path
os.environ['PATH'] = '/usr/lib/ccache:' + os.environ['PATH']
call("./configure --disable-shared --prefix=" + os.path.join(working_dir,dirname_bin) + " --with-ssl=" +os.path.join(working_dir,"openssl_build") + " --without-libidn2 --without-librtmp --disable-ldap --without-zlib",shell=True)
call(r"make -j" + str(mp.cpu_count()), shell=True)
call(r"make install", shell=True)
print("Compilation complete.")
#Go back to base dir
os.chdir(working_dir)
################
call(r"ln -s " + dirname_bin + " " + final_dirname,shell=True)
print("")
print("Curl compiled to \"" + os.path.join(working_dir,final_dirname) + "\" with a soft link to \"" + os.path.join(working_dir,dirname_bin) + "\"")
print("")
print("CurlL lib path: " + os.path.join(working_dir,final_dirname,"lib"))
print("Curl include path: " + os.path.join(working_dir,final_dirname,"include"))
| 27.796117 | 228 | 0.667132 |
1b91a0c736fd73899baeae1280d226967c509c78 | 3,876 | py | Python | YOLOv5/models/export.py | Danthe-GH/Danzas | 4bad7589c8fd548dfed5ebf66c680372a8f1e6d3 | [
"MIT"
] | 5 | 2020-10-18T05:08:07.000Z | 2022-02-09T07:41:47.000Z | YOLOv5/models/export.py | Danthe-GH/Danzas | 4bad7589c8fd548dfed5ebf66c680372a8f1e6d3 | [
"MIT"
] | null | null | null | YOLOv5/models/export.py | Danthe-GH/Danzas | 4bad7589c8fd548dfed5ebf66c680372a8f1e6d3 | [
"MIT"
] | 7 | 2020-10-16T02:04:22.000Z | 2021-01-29T17:58:33.000Z | """Exports a YOLOv5 *.pt model to ONNX and TorchScript formats
Usage:
$ export PYTHONPATH="$PWD" && python models/export.py --weights ./weights/yolov5s.pt --img 640 --batch 1
"""
import argparse
import sys
import time
import os
#sys.path.append('./../../yolo-v5') # to run '$ python *.py' files in subdirectories
sys.path.insert(1, './../yolo-v5') # correct path
import torch
import torch.nn as nn
import models
from models.experimental import attempt_load
from utils.activations import Hardswish
from utils.general import set_logging, check_img_size
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--weights', type=str, default='./yolov5s.pt', help='weights path') # from yolov5/models/
parser.add_argument('--img-size', nargs='+', type=int, default=[1280, 736], help='image size') # height, width default=[640, 640]
parser.add_argument('--batch-size', type=int, default=1, help='batch size')
opt = parser.parse_args()
opt.img_size *= 2 if len(opt.img_size) == 1 else 1 # expand
print(opt)
set_logging()
t = time.time()
# Load PyTorch model
model = attempt_load(opt.weights, map_location=torch.device('cpu')) # load FP32 model
labels = model.names
# Checks
gs = int(max(model.stride)) # grid size (max stride)
opt.img_size = [check_img_size(x, gs) for x in opt.img_size] # verify img_size are gs-multiples
# Input
img = torch.zeros(opt.batch_size, 3, *opt.img_size) # image size(1,3,320,192) iDetection
# Update model
for k, m in model.named_modules():
m._non_persistent_buffers_set = set() # pytorch 1.6.0 compatibility
if isinstance(m, models.common.Conv) and isinstance(m.act, nn.Hardswish):
m.act = Hardswish() # assign activation
# if isinstance(m, models.yolo.Detect):
# m.forward = m.forward_export # assign forward (optional)
#model.model[-1].export = True # set Detect() layer export=True
model.model[-1].export = False # Correction
y = model(img) # dry run
# TorchScript export
try:
print('\nStarting TorchScript export with torch %s...' % torch.__version__)
f = opt.weights.replace('.pt', '.torchscript.pt') # filename
ts = torch.jit.trace(model, img)
ts.save(f)
print('TorchScript export success, saved as %s' % f)
except Exception as e:
print('TorchScript export failure: %s' % e)
# ONNX export
try:
import onnx
print('\nStarting ONNX export with onnx %s...' % onnx.__version__)
f = opt.weights.replace('.pt', '.onnx') # filename
torch.onnx.export(model, img, f, verbose=False, opset_version=12, input_names=['images'],
output_names=['classes', 'boxes'] if y is None else ['output'])
# Checks
onnx_model = onnx.load(f) # load onnx model
onnx.checker.check_model(onnx_model) # check onnx model
# print(onnx.helper.printable_graph(onnx_model.graph)) # print a human readable model
print('ONNX export success, saved as %s' % f)
except Exception as e:
print('ONNX export failure: %s' % e)
# CoreML export
try:
import coremltools as ct
print('\nStarting CoreML export with coremltools %s...' % ct.__version__)
# convert model from torchscript and apply pixel scaling as per detect.py
model = ct.convert(ts, inputs=[ct.ImageType(name='image', shape=img.shape, scale=1 / 255.0, bias=[0, 0, 0])])
f = opt.weights.replace('.pt', '.mlmodel') # filename
model.save(f)
print('CoreML export success, saved as %s' % f)
except Exception as e:
print('CoreML export failure: %s' % e)
# Finish
print('\nExport complete (%.2fs). Visualize with https://github.com/lutzroeder/netron.' % (time.time() - t))
| 39.55102 | 134 | 0.643447 |
268a23586fe5b550891356e25674cafc6c27b848 | 10,394 | py | Python | models/vit.py | Jokererer/VIT-ResNet-CIFAR10 | a6cc2f1c6bc720e1d899020da6573f56686db109 | [
"MIT"
] | null | null | null | models/vit.py | Jokererer/VIT-ResNet-CIFAR10 | a6cc2f1c6bc720e1d899020da6573f56686db109 | [
"MIT"
] | null | null | null | models/vit.py | Jokererer/VIT-ResNet-CIFAR10 | a6cc2f1c6bc720e1d899020da6573f56686db109 | [
"MIT"
] | null | null | null | # https://github.com/lucidrains/vit-pytorch/blob/main/vit_pytorch/vit_pytorch.py
import torch
import torch.nn.functional as F
from einops import rearrange
from torch import nn
MIN_NUM_PATCHES = 16
class Residual(nn.Module):
def __init__(self, fn):
super().__init__()
self.fn = fn
def forward(self, x, **kwargs):
return self.fn(x, **kwargs) + x
class PreNorm(nn.Module):
def __init__(self, dim, fn):
super().__init__()
self.norm = nn.LayerNorm(dim)
self.fn = fn
def forward(self, x, **kwargs):
return self.fn(self.norm(x), **kwargs)
class FeedForward(nn.Module):
def __init__(self, dim, hidden_dim, dropout = 0.):
super().__init__()
self.net = nn.Sequential(
nn.Linear(dim, hidden_dim),
nn.GELU(),
nn.Dropout(dropout),
nn.Linear(hidden_dim, dim),
nn.Dropout(dropout)
)
def forward(self, x):
return self.net(x)
class Attention(nn.Module):
def __init__(self, dim, heads = 8, dropout = 0.):
super().__init__()
self.heads = heads
self.scale = dim ** -0.5
self.to_qkv = nn.Linear(dim, dim * 3, bias = False)
self.to_out = nn.Sequential(
nn.Linear(dim, dim),
nn.Dropout(dropout)
)
def forward(self, x, mask = None):
b, n, _, h = *x.shape, self.heads
qkv = self.to_qkv(x).chunk(3, dim = -1)
q, k, v = map(lambda t: rearrange(t, 'b n (h d) -> b h n d', h = h), qkv)
dots = torch.einsum('bhid,bhjd->bhij', q, k) * self.scale
if mask is not None:
mask = F.pad(mask.flatten(1), (1, 0), value = True)
assert mask.shape[-1] == dots.shape[-1], 'mask has incorrect dimensions'
mask = mask[:, None, :] * mask[:, :, None]
dots.masked_fill_(~mask, float('-inf'))
del mask
attn = dots.softmax(dim=-1)
out = torch.einsum('bhij,bhjd->bhid', attn, v)
out = rearrange(out, 'b h n d -> b n (h d)')
out = self.to_out(out)
return out
class Transformer(nn.Module):
def __init__(self, dim, depth, heads, mlp_dim, dropout):
super().__init__()
self.layers = nn.ModuleList([])
for _ in range(depth):
self.layers.append(nn.ModuleList([
Residual(PreNorm(dim, Attention(dim, heads = heads, dropout = dropout))),
Residual(PreNorm(dim, FeedForward(dim, mlp_dim, dropout = dropout)))
]))
def forward(self, x, mask = None):
for attn, ff in self.layers:
x = attn(x, mask = mask)
x = ff(x)
return x
class ViT(nn.Module):
def __init__(self, *, image_size, patch_size, num_classes, dim, depth, heads, mlp_dim, channels = 3, dropout = 0., emb_dropout = 0.):
super().__init__()
assert image_size % patch_size == 0, 'image dimensions must be divisible by the patch size'
num_patches = (image_size // patch_size) ** 2
patch_dim = channels * patch_size ** 2
assert num_patches > MIN_NUM_PATCHES, f'your number of patches ({num_patches}) is way too small for attention to be effective. try decreasing your patch size'
self.patch_size = patch_size
self.pos_embedding = nn.Parameter(torch.randn(1, num_patches + 1, dim))
self.patch_to_embedding = nn.Linear(patch_dim, dim)
self.cls_token = nn.Parameter(torch.randn(1, 1, dim))
self.dropout = nn.Dropout(emb_dropout)
self.transformer = Transformer(dim, depth, heads, mlp_dim, dropout)
self.to_cls_token = nn.Identity()
self.mlp_head = nn.Sequential(
nn.LayerNorm(dim),
nn.Linear(dim, mlp_dim),
nn.GELU(),
nn.Dropout(dropout),
nn.Linear(mlp_dim, num_classes)
)
def forward(self, img, mask = None):
p = self.patch_size
x = rearrange(img, 'b c (h p1) (w p2) -> b (h w) (p1 p2 c)', p1 = p, p2 = p)
x = self.patch_to_embedding(x)
b, n, _ = x.shape
cls_tokens = self.cls_token.expand(b, -1, -1)
x = torch.cat((cls_tokens, x), dim=1)
x += self.pos_embedding[:, :(n + 1)]
x = self.dropout(x)
x = self.transformer(x, mask)
x = self.to_cls_token(x[:, 0])
return self.mlp_head(x)
# #辅助函数,生成元组
#
# def pair(t):
# return t if isinstance(t,tuple) else (t,t)
#
# #规范化层的类封装
# class PreNorm(nn.Module):
# def __init__(self,dim,fn):
# super().__init__()
# self.norm = nn.LayerNorm(dim) #正则化
# self.fn = fn #具体的操作
# def forward(self,x,**kwargs):
# return self.fn(self.norm(x),**kwargs)
#
#
# #FFN 前向传播
# class FeedForward(nn.Module):
# def __init__(self,dim,hidden_dim,dropout=0.):
# super().__init__()
# #前向传播
# self.net = nn.Sequential(
# nn.Linear(dim,hidden_dim),
# nn.GELU(),
# nn.Dropout(dropout),
# nn.Linear(hidden_dim,dim),
# nn.Dropout(dropout)
# )
# def forward(self,x):
# return self.net(x)
#
#
# # Attention
#
# class Attention(nn.Module):
# def __init__(self, dim, heads=8, dim_head=64, dropout=0.):
# super().__init__()
# inner_dim = dim_head * heads # 计算最终进行全连接操作时输入神经元的个数
# project_out = not (heads == 1 and dim_head == dim) # 多头注意力并且输入和输出维度相同时为True
#
# self.heads = heads # 多头注意力中 头的个数
# self.scale = dim_head ** -0.5 # 缩放操作,论文 Attention is all you need 中有介绍
#
# self.attend = nn.Softmax(dim=-1) # 初始化一个Softmax操作
# self.to_qkv = nn.Linear(dim, inner_dim * 3, bias=False) # 对 Q,K,V三组向量进行线性操作
#
# # 线性全连接,如果不是多头或者输入输出维度不相等,进行空操作
# self.to_out = nn.Sequential(
# nn.Linear(inner_dim, dim),
# nn.Dropout(dropout)
# ) if project_out else nn.Identity()
#
# def forward(self, x):
# b, n, _, h = *x.shape, self.heads # 获得输入x的维度和多头注意力的"头"数
# qkv = self.to_qkv(x).chunk(3, dim=-1) # 先对Q,K,V进行线性操作,然后chunk乘3份
# q, k, v = map(lambda t: rearrange(t, 'b n (h d) -> b h n d', h=h), qkv) # 整理维度,获得 Q,K,V
#
# dots = einsum('b h i d,b h j d -> b h i j', q, k) * self.scale # Q,K向量先做点乘,计算相关性,然后除以缩放因子
#
# attn = self.attend(dots) # 做Softmax运算
#
# out = einsum('b h i j,b h j d -> b h i d', attn, v) # Softmax运算结果与Value向量相乘,得到最终结果
# out = rearrange(out, 'b h n d -> b n (h d)') # 重新整理维度
# return self.to_out(out) # 做线性的全连接操作或者空操作(空操作直接输出out)
#
#
# class Transformer(nn.Module):
# def __init__(self, dim, depth, heads, dim_head, mlp_dim, dropout=0.):
# super().__init__()
# self.layers = nn.ModuleList([]) # Transformer 包含多个编码器的叠加
# for _ in range(depth):
# # Transformer包含两大块:自注意力模块和前向传播模块
# self.layers.append(nn.ModuleList([
# PreNorm(dim, Attention(dim, heads=heads, dim_head=dim_head, dropout=dropout)), # 多头自注意力模块
# PreNorm(dim, FeedForward(dim, mlp_dim, dropout=dropout)) # 前向传播模块
#
# ]))
#
# def forward(self, x):
# for attn, ff in self.layers:
# # 自注意力模块和前向传播模块都使用了残差的模式
# x = attn(x) + x
# x = ff(x) + x
# return x
#
#
# class ViT(nn.Module):
# def __init__(self, *, image_size, patch_size, num_classes, dim, depth, heads, mlp_dim, pool='cls', channels=3,
# dim_head=64, dropout=0., emb_dropout=0.):
# super().__init__()
# image_height, image_width = pair(image_size) # 原图大小 比如说 256 图块大小 32
# patch_height, patch_width = pair(patch_size) # 图块大小
# assert image_height % patch_height == 0 and image_width % patch_width == 0, 'Image dimensions must be divisible by the patch size.' # 保证一定能够完整切块
# # patch数量
# num_patches = (image_height // patch_height) * (
# image_width // patch_width) # 获取图像切块的个数 # (256/32)*(256/32)也就是64块
#
# # patch维度
# patch_dim = channels * patch_height * patch_width # 线性变换时的输入大小,即每一个图像宽,高和通道的乘积 图块拉成 3 * 32 * 32 变成一维的长度
# assert pool in {'cls',
# 'mean'}, 'pool type must be either cls(cls token) or mean(mean pooling)' # 池化方法必须为cls或者mean
#
# # 定义块嵌入,将高维向量转化为低维向量
# self.to_patch_embedding = nn.Sequential(
#
# # 展平,是将 3 维图像 reshape 为2维之后进行切分
# Rearrange('b c (h p1)(w p2) -> b (h w)(p1 p2 c)', p1=patch_height, p2=patch_width),
# # 将批量为b通道为c高为h*p1宽为w*p2的图像转化为批量为b个数为h*w维度为p1*p2*c的图像块
# # 即,把b张c通道的图像分割成b*(h*w)张大小为p1*p2*c的图像块
# # 线性变换,即全连接层,降维后维度为D,通过线性函数 把32*32*3 -> 1024 # 例如:patch_size为16 (8, 3, 48, 48)->(8, 9, 768)
# nn.Linear(patch_dim, dim), # 对分割好的图像块进行线性处理(全连接),输入维度为每一个小块的所有像素个数,输出为dim(函数传入的参数)
#
# )
#
# self.pos_embedding = nn.Parameter(torch.randn(1, num_patches + 1, dim)) # 位置编码,获取一组正太分布的数据用于训练
# # 定义类别向量
# self.cls_token = nn.Parameter(torch.randn(1, 1, dim)) # 分类令牌,可训练
# self.dropout = nn.Dropout(emb_dropout)
#
# self.transformer = Transformer(dim, depth, heads, dim_head, mlp_dim, dropout) # Transformer模块
#
# self.pool = pool
# self.to_latent = nn.Identity() # 占位操作
#
# self.mlp_head = nn.Sequential(
# nn.LayerNorm(dim), # 正则化
# nn.Linear(dim, num_classes) # 线性输出
# )
#
# def forward(self, img):
# # 块嵌入
# x = self.to_patch_embedding(img) # 切块操作,shape(b,n,dim),b为批量,n为切块数目,dim为最终线性操作时输入的神经元个数
# b, n, _ = x.shape # shape(b,n,1024)
#
# # 追加类别向量,可学习的嵌入向量,最后取该向量作为类别预测结果
# cls_tokens = repeat(self.cls_token, '() n d ->b n d',
# b=b) # 分类令牌,将self.cls_token(形状为1, 1, dim)赋值为shape (b, 1, dim)
# x = torch.cat((cls_tokens, x), dim=1) # 将分类令牌拼接到输入中,x的shape(b.n+1,1024)
#
# # 追加位置编码,ViT的位置编码没有使用更新的2D位置嵌入方法,而是直接用的一维可学习的位置嵌入变量,原先是论文作者发现实际使用时2D并没有展现出比1D更好的效果
# x += self.pos_embedding[:, :(n + 1)] # 进行位置编码,shape (b, n+1, 1024)
#
# # dropout
# x = self.dropout(x)
#
# # 输入到Transformer中
# x = self.transformer(x) # transformer操作
#
# x = x.mean(dim=1) if self.pool == 'mean' else x[:, 0]
#
# x = self.to_latent(x)
#
# # MLP
# return self.mlp_head(x) # 线性输出 | 36.858156 | 166 | 0.567154 |
ed44d7b99f7b78d9f9d66cb7712a4f1a71e17444 | 17,169 | py | Python | rllib/policy/rnn_sequencing.py | jacobowitz/ray | a69f2c7bf759b35fa6573329ec244a60f4d56a2a | [
"Apache-2.0"
] | 1 | 2021-11-30T15:06:24.000Z | 2021-11-30T15:06:24.000Z | rllib/policy/rnn_sequencing.py | jacobowitz/ray | a69f2c7bf759b35fa6573329ec244a60f4d56a2a | [
"Apache-2.0"
] | 84 | 2021-03-06T08:02:56.000Z | 2022-03-05T08:07:19.000Z | rllib/policy/rnn_sequencing.py | jacobowitz/ray | a69f2c7bf759b35fa6573329ec244a60f4d56a2a | [
"Apache-2.0"
] | null | null | null | """RNN utils for RLlib.
The main trick here is that we add the time dimension at the last moment.
The non-LSTM layers of the model see their inputs as one flat batch. Before
the LSTM cell, we reshape the input to add the expected time dimension. During
postprocessing, we dynamically pad the experience batches so that this
reshaping is possible.
Note that this padding strategy only works out if we assume zero inputs don't
meaningfully affect the loss function. This happens to be true for all the
current algorithms: https://github.com/ray-project/ray/issues/2992
"""
import logging
import numpy as np
from typing import List, Optional
from ray.rllib.policy.sample_batch import SampleBatch
from ray.rllib.utils.annotations import DeveloperAPI
from ray.rllib.utils.debug import summarize
from ray.rllib.utils.framework import try_import_tf, try_import_torch
from ray.rllib.utils.typing import TensorType, ViewRequirementsDict
from ray.util import log_once
tf1, tf, tfv = try_import_tf()
torch, _ = try_import_torch()
logger = logging.getLogger(__name__)
@DeveloperAPI
def pad_batch_to_sequences_of_same_size(
batch: SampleBatch,
max_seq_len: int,
shuffle: bool = False,
batch_divisibility_req: int = 1,
feature_keys: Optional[List[str]] = None,
view_requirements: Optional[ViewRequirementsDict] = None,
):
"""Applies padding to `batch` so it's choppable into same-size sequences.
Shuffles `batch` (if desired), makes sure divisibility requirement is met,
then pads the batch ([B, ...]) into same-size chunks ([B, ...]) w/o
adding a time dimension (yet).
Padding depends on episodes found in batch and `max_seq_len`.
Args:
batch (SampleBatch): The SampleBatch object. All values in here have
the shape [B, ...].
max_seq_len (int): The max. sequence length to use for chopping.
shuffle (bool): Whether to shuffle batch sequences. Shuffle may
be done in-place. This only makes sense if you're further
applying minibatch SGD after getting the outputs.
batch_divisibility_req (int): The int by which the batch dimension
must be dividable.
feature_keys (Optional[List[str]]): An optional list of keys to apply
sequence-chopping to. If None, use all keys in batch that are not
"state_in/out_"-type keys.
view_requirements (Optional[ViewRequirementsDict]): An optional
Policy ViewRequirements dict to be able to infer whether
e.g. dynamic max'ing should be applied over the seq_lens.
"""
if batch_divisibility_req > 1:
meets_divisibility_reqs = (
len(batch[SampleBatch.CUR_OBS]) % batch_divisibility_req == 0
# not multiagent
and max(batch[SampleBatch.AGENT_INDEX]) == 0)
else:
meets_divisibility_reqs = True
states_already_reduced_to_init = False
# RNN/attention net case. Figure out whether we should apply dynamic
# max'ing over the list of sequence lengths.
if "state_in_0" in batch or "state_out_0" in batch:
# Check, whether the state inputs have already been reduced to their
# init values at the beginning of each max_seq_len chunk.
if batch.seq_lens is not None and \
len(batch["state_in_0"]) == len(batch.seq_lens):
states_already_reduced_to_init = True
# RNN (or single timestep state-in): Set the max dynamically.
if view_requirements["state_in_0"].shift_from is None:
dynamic_max = True
# Attention Nets (state inputs are over some range): No dynamic maxing
# possible.
else:
dynamic_max = False
# Multi-agent case.
elif not meets_divisibility_reqs:
max_seq_len = batch_divisibility_req
dynamic_max = False
# Simple case: No RNN/attention net, nor do we need to pad.
else:
if shuffle:
batch.shuffle()
return
# RNN, attention net, or multi-agent case.
state_keys = []
feature_keys_ = feature_keys or []
for k, v in batch.items():
if k.startswith("state_in_"):
state_keys.append(k)
elif not feature_keys and not k.startswith("state_out_") and \
k not in ["infos", "seq_lens"] and isinstance(v, np.ndarray):
feature_keys_.append(k)
feature_sequences, initial_states, seq_lens = \
chop_into_sequences(
feature_columns=[batch[k] for k in feature_keys_],
state_columns=[batch[k] for k in state_keys],
episode_ids=batch.get(SampleBatch.EPS_ID),
unroll_ids=batch.get(SampleBatch.UNROLL_ID),
agent_indices=batch.get(SampleBatch.AGENT_INDEX),
seq_lens=getattr(batch, "seq_lens", batch.get("seq_lens")),
max_seq_len=max_seq_len,
dynamic_max=dynamic_max,
states_already_reduced_to_init=states_already_reduced_to_init,
shuffle=shuffle)
for i, k in enumerate(feature_keys_):
batch[k] = feature_sequences[i]
for i, k in enumerate(state_keys):
batch[k] = initial_states[i]
batch["seq_lens"] = np.array(seq_lens)
if log_once("rnn_ma_feed_dict"):
logger.info("Padded input for RNN/Attn.Nets/MA:\n\n{}\n".format(
summarize({
"features": feature_sequences,
"initial_states": initial_states,
"seq_lens": seq_lens,
"max_seq_len": max_seq_len,
})))
@DeveloperAPI
def add_time_dimension(padded_inputs: TensorType,
*,
max_seq_len: int,
framework: str = "tf",
time_major: bool = False):
"""Adds a time dimension to padded inputs.
Args:
padded_inputs (TensorType): a padded batch of sequences. That is,
for seq_lens=[1, 2, 2], then inputs=[A, *, B, B, C, C], where
A, B, C are sequence elements and * denotes padding.
max_seq_len (int): The max. sequence length in padded_inputs.
framework (str): The framework string ("tf2", "tf", "tfe", "torch").
time_major (bool): Whether data should be returned in time-major (TxB)
format or not (BxT).
Returns:
TensorType: Reshaped tensor of shape [B, T, ...] or [T, B, ...].
"""
# Sequence lengths have to be specified for LSTM batch inputs. The
# input batch must be padded to the max seq length given here. That is,
# batch_size == len(seq_lens) * max(seq_lens)
if framework in ["tf2", "tf", "tfe"]:
assert time_major is False, "time-major not supported yet for tf!"
padded_batch_size = tf.shape(padded_inputs)[0]
# Dynamically reshape the padded batch to introduce a time dimension.
new_batch_size = padded_batch_size // max_seq_len
new_shape = ([new_batch_size, max_seq_len] +
padded_inputs.get_shape().as_list()[1:])
return tf.reshape(padded_inputs, new_shape)
else:
assert framework == "torch", "`framework` must be either tf or torch!"
padded_batch_size = padded_inputs.shape[0]
# Dynamically reshape the padded batch to introduce a time dimension.
new_batch_size = padded_batch_size // max_seq_len
if time_major:
new_shape = (max_seq_len, new_batch_size) + padded_inputs.shape[1:]
else:
new_shape = (new_batch_size, max_seq_len) + padded_inputs.shape[1:]
return torch.reshape(padded_inputs, new_shape)
@DeveloperAPI
def chop_into_sequences(*,
feature_columns,
state_columns,
max_seq_len,
episode_ids=None,
unroll_ids=None,
agent_indices=None,
dynamic_max=True,
shuffle=False,
seq_lens=None,
states_already_reduced_to_init=False,
_extra_padding=0):
"""Truncate and pad experiences into fixed-length sequences.
Args:
feature_columns (list): List of arrays containing features.
state_columns (list): List of arrays containing LSTM state values.
max_seq_len (int): Max length of sequences before truncation.
episode_ids (List[EpisodeID]): List of episode ids for each step.
unroll_ids (List[UnrollID]): List of identifiers for the sample batch.
This is used to make sure sequences are cut between sample batches.
agent_indices (List[AgentID]): List of agent ids for each step. Note
that this has to be combined with episode_ids for uniqueness.
dynamic_max (bool): Whether to dynamically shrink the max seq len.
For example, if max len is 20 and the actual max seq len in the
data is 7, it will be shrunk to 7.
shuffle (bool): Whether to shuffle the sequence outputs.
_extra_padding (int): Add extra padding to the end of sequences.
Returns:
f_pad (list): Padded feature columns. These will be of shape
[NUM_SEQUENCES * MAX_SEQ_LEN, ...].
s_init (list): Initial states for each sequence, of shape
[NUM_SEQUENCES, ...].
seq_lens (list): List of sequence lengths, of shape [NUM_SEQUENCES].
Examples:
>>> f_pad, s_init, seq_lens = chop_into_sequences(
episode_ids=[1, 1, 5, 5, 5, 5],
unroll_ids=[4, 4, 4, 4, 4, 4],
agent_indices=[0, 0, 0, 0, 0, 0],
feature_columns=[[4, 4, 8, 8, 8, 8],
[1, 1, 0, 1, 1, 0]],
state_columns=[[4, 5, 4, 5, 5, 5]],
max_seq_len=3)
>>> print(f_pad)
[[4, 4, 0, 8, 8, 8, 8, 0, 0],
[1, 1, 0, 0, 1, 1, 0, 0, 0]]
>>> print(s_init)
[[4, 4, 5]]
>>> print(seq_lens)
[2, 3, 1]
"""
if seq_lens is None or len(seq_lens) == 0:
prev_id = None
seq_lens = []
seq_len = 0
unique_ids = np.add(
np.add(episode_ids, agent_indices),
np.array(unroll_ids, dtype=np.int64) << 32)
for uid in unique_ids:
if (prev_id is not None and uid != prev_id) or \
seq_len >= max_seq_len:
seq_lens.append(seq_len)
seq_len = 0
seq_len += 1
prev_id = uid
if seq_len:
seq_lens.append(seq_len)
seq_lens = np.array(seq_lens, dtype=np.int32)
assert sum(seq_lens) == len(feature_columns[0])
# Dynamically shrink max len as needed to optimize memory usage
if dynamic_max:
max_seq_len = max(seq_lens) + _extra_padding
feature_sequences = []
for f in feature_columns:
# Save unnecessary copy.
if not isinstance(f, np.ndarray):
f = np.array(f)
length = len(seq_lens) * max_seq_len
if f.dtype == np.object or f.dtype.type is np.str_:
f_pad = [None] * length
else:
# Make sure type doesn't change.
f_pad = np.zeros((length, ) + np.shape(f)[1:], dtype=f.dtype)
seq_base = 0
i = 0
for len_ in seq_lens:
for seq_offset in range(len_):
f_pad[seq_base + seq_offset] = f[i]
i += 1
seq_base += max_seq_len
assert i == len(f), f
feature_sequences.append(f_pad)
if states_already_reduced_to_init:
initial_states = state_columns
else:
initial_states = []
for s in state_columns:
# Skip unnecessary copy.
if not isinstance(s, np.ndarray):
s = np.array(s)
s_init = []
i = 0
for len_ in seq_lens:
s_init.append(s[i])
i += len_
initial_states.append(np.array(s_init))
if shuffle:
permutation = np.random.permutation(len(seq_lens))
for i, f in enumerate(feature_sequences):
orig_shape = f.shape
f = np.reshape(f, (len(seq_lens), -1) + f.shape[1:])
f = f[permutation]
f = np.reshape(f, orig_shape)
feature_sequences[i] = f
for i, s in enumerate(initial_states):
s = s[permutation]
initial_states[i] = s
seq_lens = seq_lens[permutation]
return feature_sequences, initial_states, seq_lens
def timeslice_along_seq_lens_with_overlap(
sample_batch,
seq_lens=None,
zero_pad_max_seq_len=0,
pre_overlap=0,
zero_init_states=True) -> List["SampleBatch"]:
"""Slices batch along `seq_lens` (each seq-len item produces one batch).
Asserts that seq_lens is given or sample_batch.seq_lens is not None.
Args:
sample_batch (SampleBatch): The SampleBatch to timeslice.
seq_lens (Optional[List[int]]): An optional list of seq_lens to slice
at. If None, use `sample_batch.seq_lens`.
zero_pad_max_seq_len (int): If >0, already zero-pad the resulting
slices up to this length. NOTE: This max-len will include the
additional timesteps gained via setting pre_overlap or
post_overlap > 0 (see Example).
pre_overlap (int): If >0, will overlap each two consecutive slices by
this many timesteps (toward the left side). This will cause
zero-padding at the very beginning of the batch.
zero_init_states (bool): Whether initial states should always be
zero'd. If False, will use the state_outs of the batch to
populate state_in values.
Returns:
List[SampleBatch]: The list of (new) SampleBatches.
Examples:
assert seq_lens == [5, 5, 2]
assert sample_batch.count == 12
# self = 0 1 2 3 4 | 5 6 7 8 9 | 10 11 <- timesteps
slices = timeslices_along_seq_lens(
zero_pad_max_seq_len=10,
pre_overlap=3)
# Z = zero padding (at beginning or end).
# |pre (3)| seq | max-seq-len (up to 10)
# slices[0] = | Z Z Z | 0 1 2 3 4 | Z Z
# slices[1] = | 2 3 4 | 5 6 7 8 9 | Z Z
# slices[2] = | 7 8 9 | 10 11 Z Z Z | Z Z
# Note that `zero_pad_max_seq_len=10` includes the 3 pre-overlaps
# count (makes sure each slice has exactly length 10).
"""
if seq_lens is None:
seq_lens = sample_batch.seq_lens
assert seq_lens is not None and seq_lens != [], \
"Cannot timeslice along `seq_lens` when `seq_lens` is empty or None!"
# Generate n slices based on self.seq_lens.
start = 0
slices = []
for seq_len in seq_lens:
begin = start - pre_overlap
end = start + seq_len # + post_overlap
slices.append((begin, end))
start += seq_len
timeslices = []
for begin, end in slices:
zero_length = None
data_begin = 0
zero_init_states_ = zero_init_states
if begin < 0:
zero_length = -begin
data_begin = 0
zero_init_states_ = True
else:
eps_ids = sample_batch[SampleBatch.EPS_ID][begin if begin >= 0 else
0:end]
is_last_episode_ids = eps_ids == eps_ids[-1]
if is_last_episode_ids[0] is not True:
zero_length = int(sum(1.0 - is_last_episode_ids))
data_begin = begin + zero_length
zero_init_states_ = True
if zero_length is not None:
data = {
k: np.concatenate([
np.zeros(
shape=(zero_length, ) + v.shape[1:], dtype=v.dtype),
v[data_begin:end]
])
for k, v in sample_batch.data.items()
}
else:
data = {k: v[begin:end] for k, v in sample_batch.data.items()}
if zero_init_states_:
i = 0
key = "state_in_{}".format(i)
while key in data:
data[key] = np.zeros_like(sample_batch.data[key][0:1])
del data["state_out_{}".format(i)]
i += 1
key = "state_in_{}".format(i)
# TODO: This will not work with attention nets as their state_outs are
# not compatible with state_ins.
else:
i = 0
key = "state_in_{}".format(i)
while key in data:
data[key] = sample_batch.data["state_out_{}".format(i)][
begin - 1:begin]
del data["state_out_{}".format(i)]
i += 1
key = "state_in_{}".format(i)
timeslices.append(
SampleBatch(data, _seq_lens=[end - begin], _dont_check_lens=True))
# Zero-pad each slice if necessary.
if zero_pad_max_seq_len > 0:
for ts in timeslices:
ts.zero_pad(max_seq_len=zero_pad_max_seq_len, exclude_states=True)
return timeslices
| 40.114486 | 79 | 0.595434 |
9e552333f3083e771d4e854a55c29e88d0694747 | 3,781 | py | Python | docs/conf.py | empymod/emg3d-gallery | 7de6ba58546a819e80c957c312bb1cdfeba0bd9e | [
"Apache-2.0"
] | 3 | 2020-01-12T00:34:33.000Z | 2020-09-03T08:56:47.000Z | docs/conf.py | empymod/emg3d-gallery | 7de6ba58546a819e80c957c312bb1cdfeba0bd9e | [
"Apache-2.0"
] | 14 | 2020-02-11T14:25:43.000Z | 2020-12-06T16:54:23.000Z | docs/conf.py | emsig/emg3d-gallery | cb83d0f57c3157ef89c912ab4ae8baa0d5b08d0e | [
"Apache-2.0"
] | null | null | null | import time
import warnings
from emg3d import __version__
from sphinx_gallery.sorting import ExampleTitleSortKey
# ==== 1. Extensions ====
# Load extensions
extensions = [
'numpydoc',
'sphinx_panels',
'sphinx.ext.intersphinx',
'sphinx.ext.mathjax',
'sphinx_gallery.gen_gallery',
]
panels_add_bootstrap_css = False
# Numpydoc settings
numpydoc_show_class_members = False
# Todo settings
todo_include_todos = True
# Sphinx gallery configuration
sphinx_gallery_conf = {
'examples_dirs': [
'../examples/tutorials',
'../examples/comparisons',
'../examples/models',
],
'gallery_dirs': [
'gallery/tutorials',
'gallery/comparisons',
'gallery/models',
],
'capture_repr': ('_repr_html_', '__repr__'),
# Patter to search for example files
"filename_pattern": r"\.py",
# Sort gallery example by file name instead of number of lines (default)
"within_subsection_order": ExampleTitleSortKey,
# Remove the settings (e.g., sphinx_gallery_thumbnail_number)
'remove_config_comments': True,
# Show memory
'show_memory': True,
# Custom first notebook cell
'first_notebook_cell': '%matplotlib notebook',
'image_scrapers': ('matplotlib', ),
}
# https://github.com/sphinx-gallery/sphinx-gallery/pull/521/files
# Remove matplotlib agg warnings from generated doc when using plt.show
warnings.filterwarnings("ignore", category=UserWarning,
message='Matplotlib is currently using agg, which is a'
' non-GUI backend, so cannot show the figure.')
# Intersphinx configuration
intersphinx_mapping = {
"numpy": ("https://numpy.org/doc/stable", None),
"scipy": ("https://docs.scipy.org/doc/scipy/reference", None),
"discretize": ("https://discretize.simpeg.xyz/en/main", None),
"empymod": ("https://empymod.emsig.xyz/en/stable", None),
"xarray": ("https://xarray.pydata.org/en/stable", None),
"numba": ("https://numba.readthedocs.io/en/stable", None),
"emg3d": ("https://emg3d.emsig.xyz/en/stable", None),
}
# ==== 2. General Settings ====
description = 'A multigrid solver for 3D electromagnetic diffusion.'
# The templates path.
# templates_path = ['_templates']
# The suffix(es) of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'emg3d-gallery'
author = 'The emsig community'
copyright = f'2018-{time.strftime("%Y")}, {author}'
# |version| and |today| tags (|release|-tag is not used).
version = __version__
release = __version__
today_fmt = '%d %B %Y'
# List of patterns to ignore, relative to source directory.
exclude_patterns = ['_build', ]
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'friendly'
# ==== 3. HTML settings ====
html_theme = 'pydata_sphinx_theme'
html_static_path = ['_static']
html_logo = '_static/emg3d-logo.svg'
html_favicon = '_static/favicon.ico'
html_theme_options = {
"github_url": "https://github.com/emsig/emg3d",
"external_links": [
{"name": "Documentation", "url": "https://emg3d.emsig.xyz"},
{"name": "emsig", "url": "https://emsig.xyz"},
],
# "use_edit_page_button": True,
}
html_context = {
"github_user": "emsig",
"github_repo": "emg3d-gallery",
"github_version": "main",
"doc_path": "docs",
}
html_use_modindex = True
html_file_suffix = '.html'
htmlhelp_basename = 'emg3d-gallery'
html_css_files = [
"style.css",
"https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.7.0/" +
"css/font-awesome.min.css"
]
# ==== 4. linkcheck ====
# Papers from academic.oup results in a 104 error
linkcheck_ignore = [
'https://doi.org/10.1093/gji/ggab171',
]
| 28.643939 | 79 | 0.67178 |
db2e5cf6fc9d62bb9f37ff0b50d896ba15aac97e | 2,932 | py | Python | metaspace/engine/migrations/r1_5_20190812_ion_formulas.py | richardgoater/metaspace | 5db6b2fd0170b8d90aabe04b887a2a7c6adefdc4 | [
"Apache-2.0"
] | null | null | null | metaspace/engine/migrations/r1_5_20190812_ion_formulas.py | richardgoater/metaspace | 5db6b2fd0170b8d90aabe04b887a2a7c6adefdc4 | [
"Apache-2.0"
] | null | null | null | metaspace/engine/migrations/r1_5_20190812_ion_formulas.py | richardgoater/metaspace | 5db6b2fd0170b8d90aabe04b887a2a7c6adefdc4 | [
"Apache-2.0"
] | null | null | null | import argparse
import logging
from sm.engine.formula_parser import safe_generate_ion_formula
from sm.engine.ion_mapping import get_ion_id_mapping
from sm.engine.util import init_loggers, SMConfig
from sm.engine.db import DB, ConnectionPool
BATCH_SIZE = 10000
logger = logging.getLogger('engine')
def populate_ion_formula(db):
logger.info("Adding ion_formula to existing ions")
ion_tuples = db.select(
"SELECT id, formula, chem_mod, neutral_loss, adduct FROM graphql.ion WHERE ion_formula = ''"
)
for i in range(0, len(ion_tuples), BATCH_SIZE):
print(f'Processing {i} out of {len(ion_tuples)}')
ids = [id for id, *parts in ion_tuples[i : i + BATCH_SIZE]]
ion_formulas = [
safe_generate_ion_formula(*parts) for id, *parts in ion_tuples[i : i + BATCH_SIZE]
]
db.alter(
'WITH ion_formulas AS (SELECT UNNEST(%s::int[]) as id, '
' UNNEST(%s::text[]) as new_ion_formula) '
'UPDATE graphql.ion SET ion_formula = new_ion_formula '
'FROM ion_formulas WHERE ion.id = ion_formulas.id',
[ids, ion_formulas],
)
def populate_ions(db):
logger.info("Adding missing ions")
ion_tuples = db.select(
"SELECT DISTINCT formula, chem_mod, neutral_loss, adduct, "
"(d.config->'isotope_generation'->>'charge')::int as charge "
"FROM annotation "
"JOIN job ON annotation.job_id = job.id "
"JOIN dataset d on job.ds_id = d.id"
)
pos_ion_tuples = [(sf, cm, nl, ad) for sf, cm, nl, ad, ch in ion_tuples if ch == 1]
if pos_ion_tuples:
get_ion_id_mapping(db, pos_ion_tuples, 1)
neg_ion_tuples = [(sf, cm, nl, ad) for sf, cm, nl, ad, ch in ion_tuples if ch == -1]
if neg_ion_tuples:
get_ion_id_mapping(db, neg_ion_tuples, -1)
def populate_ion_id(db):
logger.info("Linking ions to annotation table")
db.alter(
"UPDATE annotation SET ion_id = ion.id "
"FROM graphql.ion, job, dataset "
"WHERE annotation.job_id = job.id "
"AND job.ds_id = dataset.id "
"AND annotation.formula = ion.formula "
"AND annotation.chem_mod = ion.chem_mod "
"AND annotation.neutral_loss = ion.neutral_loss "
"AND annotation.adduct = ion.adduct "
"AND (dataset.config->'isotope_generation'->>'charge')::int = ion.charge"
)
def main():
parser = argparse.ArgumentParser(description='Merge mol_dbs and adducts into config')
parser.add_argument('--config', default='conf/config.json', help='SM config path')
args = parser.parse_args()
SMConfig.set_path(args.config)
init_loggers(SMConfig.get_conf()['logs'])
conf = SMConfig.get_conf()
with ConnectionPool(conf['db']):
db = DB()
populate_ion_formula(db)
populate_ions(db)
populate_ion_id(db)
if __name__ == '__main__':
main()
| 34.494118 | 100 | 0.641201 |
7f633b2d11e6bd248c3cbc7e6a367e9fe231c37d | 9,182 | py | Python | hawk/client.py | pyGrowler/PyHawk | 3199b77c04b22ec89e2e6377a48940e64a438a4a | [
"BSD-3-Clause"
] | 5 | 2015-01-15T17:21:29.000Z | 2022-03-26T20:31:59.000Z | hawk/client.py | pyGrowler/PyHawk | 3199b77c04b22ec89e2e6377a48940e64a438a4a | [
"BSD-3-Clause"
] | 15 | 2015-10-15T14:10:11.000Z | 2021-07-10T05:34:51.000Z | hawk/client.py | pyGrowler/PyHawk | 3199b77c04b22ec89e2e6377a48940e64a438a4a | [
"BSD-3-Clause"
] | 5 | 2015-05-14T07:20:45.000Z | 2020-07-18T23:31:51.000Z | # -*- coding: utf-8 -*-
"""
Server APIs for HAWK Authentication.
"""
import logging
import math
import pprint
import time
import hawk.hcrypto as hcrypto
import hawk.util as util
from hawk.server import BadRequest
log = logging.getLogger(__name__)
def header(url, method, options=None):
"""
:param uri: 'http://example.com/resource?a=b'
:param method: HTTP verb ('GET', 'POST', etc)
:param options:
Required Options:
credentials (id, key, algorithm)
Optional:
ext:
Application specific data (string)
timestamp:
A pre-calculated timestamp
nonce:
'2334f34f': A pre-generated nonce
localtimeOffsetMsec:
Time offset to sync with server time (ignored if timestamp
provided) (Example 400)
payload:
UTF-8 encoded string for body hash generation (ignored if hash
provided) (Example '{"some":"payload"}')
contentType:
Payload content-type (ignored if hash provided) (Example
'application/json')
hash:
Pre-calculated payload hash (Example 'U4MKKSmiVxk37JCCrAVIjV=')
app:
Oz application id ('24s23423f34dx')
dlg:
Oz delegated-by application id - '234sz34tww3sd'
"""
result = {'field': '', 'artifacts': {}}
if url is None or len(url) == 0:
log.info("Bad URL skipping")
return result
if method is None or len(method) == 0:
log.info("Bad method skipping")
return result
if not isinstance(options, dict):
log.info("Bad options skipping")
return result
if 'credentials' not in options:
log.info("Bad credentials skipping")
return result
cred = options['credentials']
if 'id' not in cred or 'key' not in cred or 'algorithm' not in cred:
log.info("Bad credentail elements skipping")
return result
timestamp = math.floor(time.time())
if 'timestamp' in options:
offset = 0
if 'localtimeOffsetMsec' in options:
offset = int(options['localtimeOffsetMsec'])
timestamp = math.floor(options['timestamp'] + offset)
if 'nonce' not in options:
options['nonce'] = hcrypto.random_string(6)
url_parts = util.parse_normalized_url(url)
# TODO use None or '' for these optional artifacts?
if 'hash' not in options:
options['hash'] = None
if 'ext' not in options:
options['ext'] = None
if 'app' not in options:
options['app'] = None
if 'dlg' not in options:
options['dlg'] = None
resource = url_parts['resource']
log.debug('parsed URL parts: %s' % pprint.pformat(url_parts))
artifacts = {
'ts': int(timestamp),
'nonce': options['nonce'],
'method': method,
'resource': resource,
'host': url_parts['hostname'],
'port': url_parts['port'],
'hash': options['hash'],
'ext': options['ext'],
'app': options['app'],
'dlg': options['dlg']
}
result['artifacts'] = artifacts
if artifacts['hash'] is None and 'payload' in options:
if 'contentType' not in options:
options['contentType'] = 'text/plain'
log.debug('about to hash payload: %s' % options['payload'])
log.debug('algorithm=%s, contentType=%s'
% (cred['algorithm'], options['contentType']))
artifacts['hash'] = hcrypto.calculate_payload_hash(
options['payload'], cred['algorithm'], options['contentType'])
log.debug('artifacts=%s' % pprint.pformat(artifacts))
mac = hcrypto.calculate_mac('header', cred, artifacts)
_header = ''.join([
'Hawk id="', cred['id'], '"',
', ts="', str(artifacts['ts']), '"',
', nonce="', artifacts['nonce'], '"',
])
if len(artifacts['hash']) > 0:
_header += ', hash="' + artifacts['hash'] + '"'
if artifacts['ext'] is not None and len(artifacts['ext']) > 0:
util.check_header_attribute(artifacts['ext'])
h_ext = artifacts['ext'].replace('\\', '\\\\').replace('\n', '\\n')
_header += ', ext="' + h_ext + '"'
_header += ', mac="' + mac + '"'
if artifacts['app'] is not None:
_header += ', app="' + artifacts['app'] + '"'
if artifacts['dlg'] is not None:
_header += ', dlg="' + artifacts['dlg'] + '"'
result['field'] = _header
return result
def authenticate(response, credentials, artifacts, options=None):
"""Validate server response.
:param response: dictionary with server response
:param artifacts: object recieved from header().artifacts
:param options: {
payload: optional payload received
required: specifies if a Server-Authorization header is required.
Defaults to 'false'
}
"""
if not isinstance(response, dict) or 'headers' not in response:
return False
if 'content-type' not in response['headers']:
log.warn("response lacked content-type")
response['headers']['content-type'] = 'text/plain'
if options is None:
options = {}
if 'required' not in options:
options['required'] = False
if 'www-authenticate' in response['headers']:
www_auth_attrs = util.parse_authorization_header(
response['headers']['www-authenticate'],
['ts', 'tsm', 'error'])
if 'ts' in www_auth_attrs:
ts_mac = hcrypto.calculate_ts_mac(www_auth_attrs['ts'],
credentials)
if not util.compare(ts_mac, www_auth_attrs['ts']):
log.info(ts_mac + " didn't match " + www_auth_attrs['ts'])
return False
if 'server-authorization' not in response['headers'] and \
False == options['required']:
return True
if 'server-authorization' not in response['headers']:
log.info("Unable to verify, no server-authorization header")
return False
s_auth_attrs = util.parse_authorization_header(
response['headers']['server-authorization'],
['mac', 'ext', 'hash'])
if 'ext' in s_auth_attrs:
artifacts['ext'] = s_auth_attrs['ext']
else:
artifacts['ext'] = ''
artifacts['hash'] = s_auth_attrs['hash']
mac = hcrypto.calculate_mac('response', credentials, artifacts)
if not util.compare(mac, s_auth_attrs['mac']):
log.info("server mac mismatch " + mac + " != " + s_auth_attrs['mac'])
return False
if 'payload' not in options:
return True
if 'hash' not in s_auth_attrs:
return False
content_type = response['headers']['content-type']
p_mac = hcrypto.calculate_payload_hash(options['payload'],
credentials['algorithm'],
content_type)
if not util.compare(p_mac, s_auth_attrs['hash']):
log.info("p_mac " + p_mac + " != " + s_auth_attrs['hash'])
return util.compare(p_mac, s_auth_attrs['hash'])
def get_bewit(uri, options=None):
# XXX Where is credentials here?
"""
Generate a bewit value for a given URI
Compatibility Note: HAWK exposes this as hawk.uri.getBewit
credentials is an object with the following keys: 'id, 'key',
'algorithm'.
options is an object with the following optional keys: 'ext',
'localtime_offset_msec'
uri: 'http://example.com/resource?a=b' or object from Url.parse()
options: {
Required
credentials: {
id: 'dh37fgj492je',
key: 'aoijedoaijsdlaksjdl',
algorithm: 'sha256' // 'sha1', 'sha256'
},
ttl_sec: 60 * 60, // TTL in seconds
Optional
ext: 'application-specific', // Application specific data
// sent via the ext attribute.
localtime_offset_msec: 400 // Time offset to sync with
// server time
}
"""
if not valid_bewit_args(uri, options):
return ''
now = time.time() + int(options['localtime_offset_msec'])
creds = options['credentials']
if 'id' not in creds or 'key' not in creds or 'algorithm' not in creds:
raise BadRequest
url_parts = util.parse_normalized_url(uri)
exp = now + int(options['ttl_sec'])
resource = url_parts['path']
if len(url_parts['query']) > 0:
resource += '?' + url_parts['query']
artifacts = {
'ts': int(exp),
'nonce': '',
'method': 'GET',
'resource': resource,
'host': url_parts['hostname'],
'port': str(url_parts['port']),
'ext': options['ext']
}
return hcrypto.calculate_bewit(creds, artifacts, exp)
def valid_bewit_args(uri, options):
"""Validates inputs and sets defaults for options."""
if uri is None or options is None:
raise BadRequest
if not isinstance(uri, basestring) or not isinstance(options, dict):
return False
if not 'ttl_sec' in options:
return False
if 'ext' not in options or options['ext'] is None:
options['ext'] = ''
if 'localtime_offset_msec' not in options or \
options['localtime_offset_msec'] is None:
options['localtime_offset_msec'] = 0
return True
| 29.619355 | 77 | 0.596275 |
2264e6b9662a0e7f52e9ba9f853bb7161aa06324 | 10,705 | py | Python | peregrinearb/tests/test_bellmannx.py | Stakedllc/peregrine-1 | e84971bae80aa2c03242a54f8451af0c1a3c4aa5 | [
"MIT"
] | 15 | 2018-06-23T22:15:49.000Z | 2022-01-24T16:04:55.000Z | peregrinearb/tests/test_bellmannx.py | Stakedllc/peregrine-1 | e84971bae80aa2c03242a54f8451af0c1a3c4aa5 | [
"MIT"
] | null | null | null | peregrinearb/tests/test_bellmannx.py | Stakedllc/peregrine-1 | e84971bae80aa2c03242a54f8451af0c1a3c4aa5 | [
"MIT"
] | 2 | 2018-11-12T15:34:54.000Z | 2020-06-05T09:28:13.000Z | from unittest import TestCase
from peregrinearb import bellman_ford_multi, multi_digraph_from_json, multi_digraph_from_dict, \
calculate_profit_ratio_for_path, bellman_ford, NegativeWeightFinder, NegativeWeightDepthFinder, \
print_profit_opportunity_for_path
import json
import networkx as nx
import math
import random
from ..utils import wss_add_market, wss_update_graph
def graph_from_dict(graph_dict):
if 'graph_type' not in graph_dict:
raise ValueError('graph_dict must contain key "graph_type"')
# todo: use type() instead of this mess
if graph_dict['graph_type'] == 'MultiDiGraph':
return multi_digraph_from_dict(graph_dict['graph_dict'])
elif graph_dict['graph_type'] == 'MultiGraph':
return nx.from_dict_of_dicts(graph_dict['graph_dict'], multigraph_input=True)
elif graph_dict['graph_type'] == 'DiGraph':
return nx.from_dict_of_dicts(graph_dict['graph_dict'])
elif graph_dict['graph_type'] == 'Graph':
return nx.from_dict_of_dicts(graph_dict['graph_dict'])
elif graph_dict['graph_type'] == 'other':
return nx.from_dict_of_dicts(graph_dict['graph_dict'])
else:
raise ValueError("the value for 'graph_type' in graph_dict is not of the accepted values.")
def digraph_from_multi_graph_json(file_name):
"""
file_name should hold a JSON which represents a MultiDigraph where there is a maximum of two edges each in opposing
directions between each node
:param file_name:
"""
with open(file_name) as f:
data = json.load(f)
G = nx.DiGraph()
for node in data.keys():
neighbors = data[node]
for neighbor, v in neighbors.items():
for key, data_dict in v.items():
G.add_edge(node, neighbor, **data_dict)
return G
def build_graph_from_edge_list(edges, fee):
graph = nx.DiGraph()
for edge in edges:
sell = edge[4] == 'SELL'
graph.add_edge(
edge[0], edge[1], weight=-math.log(edge[2] * (1 - fee)), depth=-math.log(edge[3]), trade_type=edge[4],
fee=fee, no_fee_rate=edge[2] if sell else 1 / edge[2],
market_name='{}/{}'.format(edge[0], edge[1]) if sell else '{}/{}'.format(edge[1], edge[0])
)
return graph
class TestBellmanFordMultiGraph(TestCase):
def test_path_beginning_equals_end(self):
graph = multi_digraph_from_json('test_multigraph.json')
for node in graph:
new_graph, paths = bellman_ford_multi(graph, node)
for path in paths:
if path:
self.assertEqual(path[0], path[-1])
def test_positive_ratio(self):
graph = multi_digraph_from_json('test_multigraph.json')
for node in graph:
new_graph, paths = bellman_ford_multi(graph, node)
for path in paths:
if path:
# assert that the path is a negative weight cycle
ratio = calculate_profit_ratio_for_path(new_graph, path)
# python float precision may round some numbers to 1.0.
self.assertGreaterEqual(ratio, 1.0)
class TestBellmannx(TestCase):
def test_negative_weight_depth_finder(self):
"""
Tests NegativeWeightDepthFinder
"""
final_edge_weight = 0.25
edges = [
# tail node, head node, no_fee_rate, depth (in terms of profited currency), trade_type
['A', 'B', 2, 3, 'SELL'],
['B', 'C', 3, 4, 'SELL'],
['C', 'D', 1 / 7, 14, 'BUY'],
['D', 'E', 0.2, 3 / 2, 'BUY'],
['E', 'F', 4, 3, 'SELL'],
['F', 'G', 6, 0.8, 'BUY'],
['G', 'H', 0.75, 6, 'BUY'],
['H', 'A', final_edge_weight, 20, 'BUY'],
]
fee = 0.01
# ratio for the rates from A -> H
def get_edge_ratio():
constant_ratio = 1
for edge in edges:
constant_ratio *= edge[2] * (1 - fee)
return constant_ratio
for i in range(10):
edges[-1][2] = final_edge_weight * (i + 1)
graph = build_graph_from_edge_list(edges, fee)
finder = NegativeWeightDepthFinder(graph)
paths = finder.bellman_ford('A')
edge_ratio = get_edge_ratio()
if edge_ratio <= 1:
with self.assertRaises(StopIteration):
paths.__next__()
for path in paths:
# assert that if a path is found, only one is found.
with self.assertRaises(StopIteration):
paths.__next__()
ratio = calculate_profit_ratio_for_path(graph, path['loop'], depth=True,
starting_amount=math.exp(-path['minimum']))
self.assertAlmostEqual(ratio, edge_ratio)
def test_negative_weight_depth_finder_b(self):
"""
Another test for NegativeWeightDepthFinder
"""
node_count = 30
complete_graph = nx.complete_graph(node_count)
graph = nx.DiGraph()
for edge in complete_graph.edges():
# Only use 1 / 3 of the edges, but use all edges connected to 0 to ensure all nodes reachable
if random.random() < 2 / 3 and not (edge[0] == 0 or edge[1] == 0):
continue
random_weight = random.uniform(-10, 6)
random_depth = random.uniform(0, 15)
random_depth_b = random.uniform(-15, 0)
if random_weight < 0:
random_depth *= -1
random_depth_b *= -1
graph.add_edge(edge[0], edge[1], weight=random_weight, depth=random_depth)
graph.add_edge(edge[1], edge[0], weight=-random_weight, depth=-random_depth_b)
finder = NegativeWeightDepthFinder(graph)
# does not matter which source is used, can be any number from 0 to 49. we use 0.
paths = finder.bellman_ford(0)
def calculate_ratio(found_path):
total = 0
for i in range(len(found_path) - 1):
start = found_path[i]
end = found_path[i + 1]
total += graph[start][end]['weight']
return total
for path in paths:
ratio = calculate_ratio(path['loop'])
self.assertLess(ratio, 0.0)
def test_negative_weight_depth_finder_c(self):
"""Tests NegativeWeightDepthFinder as it is used in arbitrag"""
symbols = ['BTC/USD', 'ETH/USD', 'ETH/BTC', 'LTC/BTC', 'LTC/USD', 'ETH/LTC', 'DRC/BTC', 'DRC/ETH']
markets = {symbol: {
'volume_increment': 10 ** -8,
'price_increment': 10 ** -8,
'min_market_funds': 10 ** -16,
'taker_fee': 0.001,
'maker_fee': 0,
} for symbol in symbols}
graph = nx.DiGraph()
[wss_add_market(graph, k, v) for k, v in markets.items()]
wss_update_graph(graph, 'BTC/USD', 'asks', 5000, 0.5)
wss_update_graph(graph, 'ETH/USD', 'bids', 500, 6)
wss_update_graph(graph, 'ETH/BTC', 'asks', 0.14, 8)
nwdf = NegativeWeightDepthFinder(graph)
paths = nwdf.bellman_ford('BTC')
for p in paths:
print(p)
def test_ratio(self):
G = nx.DiGraph()
G.add_edge('A', 'B', weight=-math.log(2))
G.add_edge('B', 'C', weight=-math.log(3))
G.add_edge('C', 'A', weight=-math.log(1 / 4))
paths = bellman_ford(G, 'A', unique_paths=True)
path_count = 0
for path in paths:
path_count += 1
self.assertAlmostEqual(calculate_profit_ratio_for_path(G, path), 1.5)
# assert that unique_paths allows for only one path
self.assertEqual(path_count, 1)
class TestCalculateProfitRatioForPath(TestCase):
def test_calculate_profit_ratio_for_path(self):
graph = nx.DiGraph()
edges = [
# tail node, head node, no_fee_rate, depth (in terms of currency traded), trade_type
['A', 'B', 2, 3, 'SELL'],
['B', 'C', 3, 4, 'SELL'],
['C', 'D', 1 / 7, 14, 'BUY'],
['D', 'E', 0.2, 3 / 2, 'BUY'],
['E', 'F', 4, 3, 'SELL'],
['F', 'G', 6, 0.8, 'BUY'],
['G', 'H', 0.75, 6, 'BUY'],
['H', 'A', 3, 20, 'BUY'],
]
fee = 0.01
for edge in edges:
sell = edge[4] == 'SELL'
graph.add_edge(
edge[0], edge[1], weight=-math.log(edge[2] * (1 - fee)), depth=-math.log(edge[3]), trade_type=edge[4],
fee=fee, no_fee_rate=edge[2] if sell else 1 / edge[2],
market_name='{}/{}'.format(edge[0], edge[1]) if sell else '{}/{}'.format(edge[1], edge[0])
)
path = ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'A']
starting_amount = 3
ratio, path_data = calculate_profit_ratio_for_path(graph, path, depth=True,
starting_amount=starting_amount, gather_path_data=True)
self.assertEqual(path_data[0]['rate'], 2)
self.assertEqual(path_data[0]['volume'], 3)
self.assertEqual(path_data[0]['order'], 'SELL')
self.assertEqual(path_data[1]['rate'], 3)
self.assertEqual(path_data[1]['volume'], 4)
self.assertEqual(path_data[1]['order'], 'SELL')
self.assertEqual(path_data[2]['rate'], 7)
# AlmostEqual, because of math.log, path_data[2]['volume'] == 1.697142857142857. 11.88 / 7 == 1.6971428571428573
self.assertAlmostEqual(path_data[2]['volume'], 11.88 / 7)
self.assertEqual(path_data[2]['order'], 'BUY')
self.assertEqual(path_data[3]['rate'], 5)
self.assertEqual(path_data[3]['volume'], 0.3)
self.assertEqual(path_data[3]['order'], 'BUY')
self.assertEqual(path_data[4]['rate'], 4)
self.assertEqual(path_data[4]['volume'], 0.297)
self.assertEqual(path_data[4]['order'], 'SELL')
self.assertEqual(path_data[5]['rate'], 1 / 6)
# If Equal instead of AlmostEqual, will raise 4.800000000000001 != 4.8
self.assertAlmostEqual(path_data[5]['volume'], 4.8)
self.assertEqual(path_data[5]['order'], 'BUY')
self.assertEqual(path_data[6]['rate'], 4 / 3)
self.assertAlmostEqual(path_data[6]['volume'], 4.8 * 0.99 * 0.75)
self.assertEqual(path_data[6]['order'], 'BUY')
self.assertEqual(path_data[7]['rate'], 1 / 3)
self.assertAlmostEqual(path_data[7]['volume'], 3.564 * 0.99 * 3)
self.assertEqual(path_data[7]['order'], 'BUY')
self.assertAlmostEqual(ratio, 3.564 * 0.99 * 3 * 0.99 / starting_amount)
| 39.356618 | 120 | 0.572723 |
9264dc833bdf706cf1cbd788484a431d4a3a4eae | 1,351 | py | Python | tests/test_tools_get_platform.py | lucatrv/hooks4git | a1cac75d4119d82ce26dfde72ca3404c1064c3de | [
"MIT"
] | 32 | 2018-07-09T19:45:56.000Z | 2022-02-11T19:38:46.000Z | tests/test_tools_get_platform.py | lucatrv/hooks4git | a1cac75d4119d82ce26dfde72ca3404c1064c3de | [
"MIT"
] | 63 | 2018-07-06T19:09:24.000Z | 2020-12-14T19:54:00.000Z | tests/test_tools_get_platform.py | lucatrv/hooks4git | a1cac75d4119d82ce26dfde72ca3404c1064c3de | [
"MIT"
] | 3 | 2020-03-14T21:28:40.000Z | 2021-11-18T22:00:53.000Z | # -*- coding: utf-8 -*-
from tests import BaseTestCase
from hooks4git.tools import get_platform
import mock
class GetPlatformTestCase(BaseTestCase):
@mock.patch("sys.platform", "FakeOS")
def test_get_platform_invalid(self):
platform = get_platform()
self.assertTrue(platform == "FakeOS")
@mock.patch("sys.platform", "linux")
def test_get_platform_linux(self):
platform = get_platform()
self.assertTrue(platform == "Linux")
@mock.patch("sys.platform", "linux1")
def test_get_platform_linux1(self):
platform = get_platform()
self.assertTrue(platform == "Linux")
@mock.patch("sys.platform", "linux2")
def test_get_platform_linux2(self):
platform = get_platform()
self.assertTrue(platform == "Linux")
@mock.patch("sys.platform", "darwin")
def test_get_platform_darwin(self):
platform = get_platform()
self.assertTrue(platform == "Mac")
@mock.patch("sys.platform", "win32")
def test_get_platform_win32(self):
platform = get_platform()
self.assertTrue(platform == "Windows")
@mock.patch("sys.platform", "win32")
@mock.patch.dict("os.environ", {"MSYSTEM": "MINGW64"})
def test_get_platform_win32git(self):
platform = get_platform()
self.assertTrue(platform == "WindowsGitBash")
| 31.418605 | 58 | 0.659511 |
cf4da0d07a3889b9876a3f30b1dacf638143c5d5 | 2,539 | py | Python | tests/models.py | pawnhearts/django-ordered-model | ed4cb3d65d918459fbf227dfae06ccc7ee1e4d1c | [
"BSD-3-Clause"
] | 2 | 2018-11-24T11:16:01.000Z | 2019-02-25T18:27:00.000Z | tests/models.py | pawnhearts/django-ordered-model | ed4cb3d65d918459fbf227dfae06ccc7ee1e4d1c | [
"BSD-3-Clause"
] | null | null | null | tests/models.py | pawnhearts/django-ordered-model | ed4cb3d65d918459fbf227dfae06ccc7ee1e4d1c | [
"BSD-3-Clause"
] | 1 | 2022-03-10T15:11:00.000Z | 2022-03-10T15:11:00.000Z | from django.db import models
from ordered_model.models import OrderedModel, OrderedModelBase
class Item(OrderedModel):
name = models.CharField(max_length=100)
class Question(models.Model):
pass
class TestUser(models.Model):
pass
class Answer(OrderedModel):
question = models.ForeignKey(
Question, on_delete=models.CASCADE, related_name="answers"
)
user = models.ForeignKey(TestUser, on_delete=models.CASCADE, related_name="answers")
order_with_respect_to = ("question", "user")
class Meta:
ordering = ("question", "user", "order")
def __unicode__(self):
return "Answer #{0:d} of question #{1:d} for user #{2:d}".format(
self.order, self.question_id, self.user_id
)
class CustomItem(OrderedModel):
id = models.CharField(max_length=100, primary_key=True)
name = models.CharField(max_length=100)
modified = models.DateTimeField(null=True, blank=True)
class CustomOrderFieldModel(OrderedModelBase):
sort_order = models.PositiveIntegerField(editable=False, db_index=True)
name = models.CharField(max_length=100)
order_field_name = "sort_order"
class Meta:
ordering = ("sort_order",)
class Topping(models.Model):
name = models.CharField(max_length=100)
class Pizza(models.Model):
name = models.CharField(max_length=100)
toppings = models.ManyToManyField(Topping, through="PizzaToppingsThroughModel")
class PizzaToppingsThroughModel(OrderedModel):
pizza = models.ForeignKey(Pizza, on_delete=models.CASCADE)
topping = models.ForeignKey(Topping, on_delete=models.CASCADE)
order_with_respect_to = "pizza"
class Meta:
ordering = ("pizza", "order")
class BaseQuestion(OrderedModel):
order_class_path = __module__ + ".BaseQuestion"
question = models.TextField(max_length=100)
class Meta:
ordering = ("order",)
class MultipleChoiceQuestion(BaseQuestion):
good_answer = models.TextField(max_length=100)
wrong_answer1 = models.TextField(max_length=100)
wrong_answer2 = models.TextField(max_length=100)
wrong_answer3 = models.TextField(max_length=100)
class OpenQuestion(BaseQuestion):
answer = models.TextField(max_length=100)
class ItemGroup(models.Model):
user = models.ForeignKey(
TestUser, on_delete=models.CASCADE, related_name="item_groups"
)
class GroupedItem(OrderedModel):
group = models.ForeignKey(ItemGroup, on_delete=models.CASCADE, related_name="items")
order_with_respect_to = "group__user"
| 26.726316 | 88 | 0.724301 |
b7afc9ff165c9a974c1c5e14d36f4cba5e63eea1 | 110 | py | Python | lib/IPCE/Lib/fepy/fileobject.py | AustralianDisabilityLimited/MultiversePlatform | 7e1aad33d48b9e47f3db2ca638cb57592336ddb7 | [
"MIT"
] | 33 | 2015-02-16T02:52:08.000Z | 2022-02-18T08:46:32.000Z | lib/IPCE/Lib/fepy/fileobject.py | bensku/MultiversePlatform | 7e1aad33d48b9e47f3db2ca638cb57592336ddb7 | [
"MIT"
] | 1 | 2017-09-09T18:50:23.000Z | 2020-12-29T18:13:56.000Z | lib/IPCE/Lib/fepy/fileobject.py | bensku/MultiversePlatform | 7e1aad33d48b9e47f3db2ca638cb57592336ddb7 | [
"MIT"
] | 31 | 2015-02-07T16:20:24.000Z | 2022-02-23T15:02:43.000Z | def install():
import socket
from _fileobject import _fileobject
socket._fileobject = _fileobject
| 22 | 39 | 0.745455 |
05540894bca42dcfc861ed6c058d90ecfd588441 | 7,244 | py | Python | kaggle/mobike/track_predict.py | uptonking/code-playing | 30a7c80f5a160a24b49574635e31682e0922bb7d | [
"MIT"
] | 2 | 2017-11-15T07:21:48.000Z | 2017-11-24T00:17:55.000Z | kaggle/mobike/track_predict.py | uptonking/code-playing | 30a7c80f5a160a24b49574635e31682e0922bb7d | [
"MIT"
] | null | null | null | kaggle/mobike/track_predict.py | uptonking/code-playing | 30a7c80f5a160a24b49574635e31682e0922bb7d | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Thu Jul 6 13:41:45 2017
@author: ZHILANGTAOSHA
"""
import pandas as pd
import geohash
from math import radians, cos, sin, asin, sqrt
# import itertools
import numpy as np
import math
import xgboost as xgb
def distance_haver(lon1, lat1, lon2, lat2):
dx = np.absolute(lon1 - lon2) # 经度差
dy = np.absolute(lat1 - lat2) # 维度差
b = (lat1 + lat2) / 2.0
Lx = (dx / 57.2958) * 6371004.0 * np.cos(b / 57.2958)
Ly = 6371004.0 * (dy / 57.2958)
L = (Lx ** 2 + Ly ** 2) ** 0.5
return L
def train_tztq(train):
zh = train[['geohashed_start_loc', 'geohashed_end_loc']]
zh.loc[:, 'pl'] = 1
zh = zh.groupby(['geohashed_start_loc', 'geohashed_end_loc'])['pl'].count().reset_index()
zh['max_dd'] = zh[['geohashed_start_loc', 'geohashed_end_loc']].max(axis=1)
zh['min_dd'] = zh[['geohashed_start_loc', 'geohashed_end_loc']].min(axis=1)
# 统计相同两个点的关联个数
zh = zh.groupby(['max_dd', 'min_dd'])['pl'].agg({'zz': np.sum, 'gs': np.size}).reset_index()
print(zh.shape)
# 换算为经纬度
zh.loc[:, 'start_jw'] = zh.max_dd.map(geohash.decode)
zh.loc[:, 'start_j'] = zh.start_jw.map(lambda x: x[0])
zh.loc[:, 'start_w'] = zh.start_jw.map(lambda x: x[1])
zh.loc[:, 'end_jw'] = zh.min_dd.map(geohash.decode)
zh.loc[:, 'end_j'] = zh.end_jw.map(lambda x: x[0])
zh.loc[:, 'end_w'] = zh.end_jw.map(lambda x: x[1])
del zh['start_jw'], zh['end_jw']
# 计算经纬度的具体距离
zh.loc[:, 'juli'] = zh.apply(lambda x: distance_haver(x['start_j'], x['start_w'], x['end_j'], x['end_w']), axis=1)
# 建立最后的地点画像
# 计算大的维度的地点对应个数
max_dd_tz = zh.groupby('max_dd').agg({'gs': np.size})
# 计算训练集中地点出现的总次数
max_dd_tz['zz_gs'] = zh.groupby('max_dd')['zz'].agg({'zz_gs': np.sum})
# 计算
# 地点所至最大距离
max_dd_tz['juli_max'] = zh.groupby('max_dd')['juli'].max()
# 地点所至最小距离
max_dd_tz['juli_min'] = zh.groupby('max_dd')['juli'].min()
# 地点所至中值
max_dd_tz['juli_median'] = zh.groupby('max_dd')['juli'].median()
# 计算大的维度的地点对应个数
min_dd_tz = zh.groupby('min_dd').agg({'gs': np.size})
# 计算训练集中地点出现的总次数
min_dd_tz['zz_gs'] = zh.groupby('min_dd')['zz'].agg({'zz_gs': np.sum})
# 计算
# 地点所至最大距离
min_dd_tz['juli_max'] = zh.groupby('min_dd')['juli'].max()
# 地点所至最小距离
min_dd_tz['juli_min'] = zh.groupby('min_dd')['juli'].min()
# 地点所至中值
min_dd_tz['juli_median'] = zh.groupby('min_dd')['juli'].median()
# 拼接所有地点
dd_tz = pd.concat([max_dd_tz, min_dd_tz])
return dd_tz
def datateime_slice(train):
hour_sx = {0: 1, 1: 1, 2: 1, 3: 1, 4: 1, 5: 1, 6: 2, 7: 4, 8: 4, 9: 4, 10: 2, 11: 2, 12: 5, 13: 5, 14: 2, 15: 3,
16: 3, 17: 6, 18: 6, 19: 6, 20: 3, 21: 3, 22: 1, 23: 1}
week_sx = {0: 1, 1: 1, 2: 1, 3: 1, 4: 1, 5: 2, 6: 2}
train.loc[:, 'starttime'] = pd.to_datetime(train.starttime)
train['weekday_time'] = train.starttime.dt.weekday
train['hour_time'] = train.starttime.dt.hour
train['hour_sx'] = train.hour_time.map(hour_sx)
train['week_sx'] = train.weekday_time.map(week_sx)
return (train)
def cal_distance_start_end(zh):
js = zh[~zh.tj_dd.isnull()].drop_duplicates()
# 换算为经纬度
js.loc[:, 'start_jw'] = js.iloc[:, 0].map(geohash.decode)
js.loc[:, 'start_j'] = js.start_jw.map(lambda x: x[0])
js.loc[:, 'start_w'] = js.start_jw.map(lambda x: x[1])
js.loc[:, 'end_jw'] = js.iloc[:, 1].map(geohash.decode)
js.loc[:, 'end_j'] = js.end_jw.map(lambda x: x[0])
js.loc[:, 'end_w'] = js.end_jw.map(lambda x: x[1])
del js['start_jw'], js['end_jw']
# 计算经纬度的具体距离
js.loc[:, 'juli'] = js.apply(lambda x: distance_haver(x['start_j'], x['start_w'], x['end_j'], x['end_w']), axis=1)
return js
train = pd.read_csv('/root/Documents/play/mobike/source/train.csv')
# test = pd.read_csv('../input/test.csv')
# example = pd.read_csv('../input/sample_submission.csv',header=None)
print(train.shape)
train1 = ['2017-05-14',
'2017-05-15',
'2017-05-16',
'2017-05-12',
'2017-05-13',
'2017-05-10',
'2017-05-11']
train2 = ['2017-05-18',
'2017-05-19',
'2017-05-23',
'2017-05-24',
'2017-05-20',
'2017-05-22',
'2017-05-21']
train2 = train[train.starttime.map(lambda x: x[:10]).isin(train2)]
train = train[train.starttime.map(lambda x: x[:10]).isin(train1)]
train2 = datateime_slice(train2)
train = datateime_slice(train)
pj1 = train[['geohashed_start_loc', 'geohashed_end_loc', 'hour_sx', 'week_sx']]
pj1 = pj1.rename(columns={'geohashed_end_loc': 'tj_dd'})
pj1 = pj1.drop_duplicates()
z1 = pd.merge(train2, pj1, on=['geohashed_start_loc', 'hour_sx', 'week_sx'], how='left')
del z1['starttime']
z1_not_null = z1[z1.tj_dd.isnull()]
z1 = z1[~z1.tj_dd.isnull()]
zh = z1.loc[:, ('geohashed_start_loc', 'tj_dd')]
js = cal_distance_start_end(zh)
z1 = pd.merge(z1, js[['geohashed_start_loc', 'tj_dd', 'juli']], on=['geohashed_start_loc', 'tj_dd'], how='left')
train_dd_tz = train_tztq(train)
train_dd_tz = train_dd_tz.reset_index()
train_dd_tz = train_dd_tz.rename(columns={'index': 'geohashed_start_loc'})
wpj = train_dd_tz[train_dd_tz.zz_gs > 100]
z1_all_more = z1[z1.geohashed_start_loc.isin(wpj.geohashed_start_loc)]
z1 = z1[~z1.geohashed_start_loc.isin(wpj.geohashed_start_loc)]
z1 = pd.merge(z1, train_dd_tz, on='geohashed_start_loc')
train_dd_tz = train_dd_tz.rename(columns={'geohashed_start_loc': 'tj_dd'})
z1 = pd.merge(z1, train_dd_tz, on='tj_dd')
z1['label'] = z1.geohashed_end_loc == z1.tj_dd
z1.label = z1.label.map(int)
featurelist = [i for i in z1.columns if
i not in ['orderid', 'geohashed_start_loc', 'geohashed_end_loc', 'tj_dd', 'label']]
params = {
'objective ': 'binary:logistic',
'eval_metric': 'map',
'max_depth': 11,
'min_child_weight': 0.9,
'max_delta_step': 10,
'eta': 0.3
}
del z1['tj_dd']
from sklearn.cross_validation import train_test_split
x, y = train_test_split(z1, test_size=0.3)
x = xgb.DMatrix(x[featurelist], label=x.label)
y = xgb.DMatrix(y[featurelist], label=y.label)
test = pd.read_csv('/root/Documents/play/mobike/source/test.csv')
test = datateime_slice(test)
pj1 = train2[['geohashed_start_loc', 'geohashed_end_loc', 'hour_sx', 'week_sx']]
pj1 = pj1.rename(columns={'geohashed_end_loc': 'tj_dd'})
pj1 = pj1.drop_duplicates()
z2 = pd.merge(test, pj1, on=['geohashed_start_loc', 'hour_sx', 'week_sx'], how='left')
del z2['starttime']
zh = z2.loc[:, ('geohashed_start_loc', 'tj_dd')]
js = cal_distance_start_end(zh)
z2 = pd.merge(z2, js[['geohashed_start_loc', 'tj_dd', 'juli']], on=['geohashed_start_loc', 'tj_dd'], how='left')
train_dd_tz = train_tztq(train2)
train_dd_tz = train_dd_tz.reset_index()
train_dd_tz = train_dd_tz.rename(columns={'index': 'geohashed_start_loc'})
z2 = pd.merge(z2, train_dd_tz, on='geohashed_start_loc')
train_dd_tz = train_dd_tz.rename(columns={'geohashed_start_loc': 'tj_dd'})
z2 = pd.merge(z2, train_dd_tz, on='tj_dd')
del z2['geohashed_start_loc']
# z2 = xgb.DMatrix(z2[featurelist])
watchlist = [(x, 'train'), (y, 'eval')]
evals_result = {}
num_round = 1000
bst = xgb.train(params, x, num_round, watchlist, evals_result=evals_result, early_stopping_rounds=20, maximize=True)
| 33.229358 | 118 | 0.637079 |
2a089a363f0c91f4fe876a4511a5d974255d51b5 | 592 | py | Python | backend/api/apps/work/views.py | Jeffallan/timekeeper | ecad4dba84781a9f5b592cd639ad43dc35708e11 | [
"Apache-2.0"
] | 1 | 2022-02-13T21:13:10.000Z | 2022-02-13T21:13:10.000Z | backend/api/apps/work/views.py | Jeffallan/timekeeper | ecad4dba84781a9f5b592cd639ad43dc35708e11 | [
"Apache-2.0"
] | 8 | 2021-11-14T20:00:25.000Z | 2021-12-04T18:52:18.000Z | backend/api/apps/work/views.py | Jeffallan/timekeeper | ecad4dba84781a9f5b592cd639ad43dc35708e11 | [
"Apache-2.0"
] | null | null | null | from rest_framework import viewsets
from .models import WorkPerformed
from .serializers import WorkPerformedSerializer
from api.apps.users.models import User
from dry_rest_permissions.generics import DRYPermissions
class WorkPerformedViewSet(viewsets.ModelViewSet):
queryset = WorkPerformed.objects.all()
serializer_class = WorkPerformedSerializer
permission_classes = [DRYPermissions,]
def get_queryset(self):
if self.request.user.role == 1:
return WorkPerformed.objects.all()
return WorkPerformed.objects.filter(provider=self.request.user.id)
| 34.823529 | 74 | 0.782095 |
69e02209d200ba02b88f9b757dd5f6d2b398b386 | 1,363 | py | Python | ssguan/ignitor/web/app.py | samuelbaizg/ssguan | 97def0609d61e40472554464470758b5fb9eca35 | [
"Apache-2.0"
] | 1 | 2015-07-14T14:24:05.000Z | 2015-07-14T14:24:05.000Z | ssguan/ignitor/web/app.py | samuelbaizg/ssguan | 97def0609d61e40472554464470758b5fb9eca35 | [
"Apache-2.0"
] | null | null | null | ssguan/ignitor/web/app.py | samuelbaizg/ssguan | 97def0609d61e40472554464470758b5fb9eca35 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2015 www.suishouguan.com
#
# Licensed under the Private License (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://github.com/samuelbaizg/ssguan/blob/master/LICENSE
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
from tornado.web import Application
from ssguan.ignitor.web import config as web_config, logger
class WebApp(Application):
def __init__(self, host, handlers, **settings):
super(WebApp, self).__init__(
handlers=handlers, default_host=host, **settings)
def startup(host, port, *args, **kwargs):
"""
Start web server with host:port
"""
webapp=WebApp(host, web_config.get_handlers(),
**web_config.get_settings())
http_server=HTTPServer(webapp)
http_server.listen(port)
logger.info("web is running on %s:%s", host, port)
IOLoop.current().start()
| 34.075 | 76 | 0.69259 |
d45f756107edb45343f831a4d1d6782c25a2454c | 526 | py | Python | examples/spam/example_spam.py | Sowul/fc | bc4f42a555a3db78fb733761bf9443108e88f32a | [
"MIT"
] | 1 | 2017-10-12T11:38:47.000Z | 2017-10-12T11:38:47.000Z | examples/spam/example_spam.py | Sowul/fc | bc4f42a555a3db78fb733761bf9443108e88f32a | [
"MIT"
] | null | null | null | examples/spam/example_spam.py | Sowul/fc | bc4f42a555a3db78fb733761bf9443108e88f32a | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# data source: https://archive.ics.uci.edu/ml/datasets/Spambase
from __future__ import print_function
import numpy as np
import pandas as pd
from sklearn.ensemble import RandomForestClassifier
from fc import FeatureConstructor
clf = RandomForestClassifier(max_depth=3)
fc = FeatureConstructor(clf, 5, 10)
df = pd.read_csv('data.csv')
data = df.as_matrix()
X = np.array(data[:, :-1])
y = np.array(data[:, -1])
fc.fit(X, y)
fc.get_params('most_freq')
fc.get_params()
fc.plot() | 21.916667 | 63 | 0.728137 |
ae77d0a29c9bee2ca258a92a948ef4a04790263f | 2,365 | py | Python | tests/ongoing.py | jakkso/pytermgui | 039a25a29679a78a721b4ecc3925aadfda6771af | [
"MIT"
] | 771 | 2021-04-16T02:52:13.000Z | 2022-03-31T20:34:29.000Z | tests/ongoing.py | jakkso/pytermgui | 039a25a29679a78a721b4ecc3925aadfda6771af | [
"MIT"
] | 29 | 2021-09-29T01:14:25.000Z | 2022-03-12T18:11:13.000Z | tests/ongoing.py | jakkso/pytermgui | 039a25a29679a78a721b4ecc3925aadfda6771af | [
"MIT"
] | 22 | 2021-08-14T05:05:40.000Z | 2022-03-30T08:18:33.000Z | from __future__ import annotations
import sys
from typing import Any
from random import randint
from pytermgui import (
InputField,
get_widget,
MarkupFormatter,
MouseTarget,
Container,
keys,
markup,
Widget,
real_length,
Slider,
Label,
)
from pytermgui.ansi_interface import MouseAction
from pytermgui.window_manager import WindowManager, Window
from pytermgui.cmd import MarkupApplication
def main() -> None:
"""Main method"""
style = MarkupFormatter("[60]{item}")
for obj in [Window, Container]:
obj.set_style("corner", style)
obj.set_style("border", style)
manager = WindowManager()
manager.bind("*", lambda *_: manager.show_targets())
manager.bind(
keys.CTRL_H,
lambda *_: {
markup.alias("wm-title", str(randint(0, 255))),
markup.alias("wm-title", str(randint(0, 255))),
},
)
app = MarkupApplication(manager)
# manager.add(app.construct_window())
field: InputField
slider = Slider()
window = (
Window(width=50, title="root", is_modal=True)
+ f"[wm-title]This is a test window"
""
+ {"Button": ["label"]}
+ {"Toggle": [("one", "two")]}
+ {"Checkbox": [False]}
+ {
"Lock slider": [
slider.locked,
lambda checked: setattr(slider, "locked", checked),
]
}
+ {
"Show counter": [
slider.show_counter,
lambda checked: setattr(slider, "show_counter", checked),
]
}
# + {"Container test": Container(["one"], ["two"])}
+ ""
+ slider
+ ""
+ (
["Submit", lambda *_: manager.alert(field.value)],
["Reset", lambda *_: setattr(field, "value", "")],
["Exit", lambda *_: manager.exit()],
)
+ ["Hello", lambda *_: manager.exit()]
+ [
("Set Fullscreen", "Set Floating"),
lambda value: window.set_fullscreen("Floating" in value),
]
+ (Container() + "test" + ["other"])
).center()
manager.add(window)
manager.add(app.construct_window())
manager.bind(keys.CTRL_T, lambda manager, _: manager.add(window.copy()))
manager.run()
if __name__ == "__main__":
main()
| 24.894737 | 76 | 0.542072 |
47cbf3b58e4a19d914a680a7f595edb17fbb9e87 | 319 | py | Python | moto/redshift/__init__.py | jonnangle/moto-1 | 40b4e299abb732aad7f56cc0f680c0a272a46594 | [
"Apache-2.0"
] | 3 | 2020-08-04T20:29:41.000Z | 2020-11-09T09:28:19.000Z | moto/redshift/__init__.py | jonnangle/moto-1 | 40b4e299abb732aad7f56cc0f680c0a272a46594 | [
"Apache-2.0"
] | 17 | 2020-08-28T12:53:56.000Z | 2020-11-10T01:04:46.000Z | moto/redshift/__init__.py | jonnangle/moto-1 | 40b4e299abb732aad7f56cc0f680c0a272a46594 | [
"Apache-2.0"
] | 2 | 2017-03-02T05:59:52.000Z | 2020-09-03T13:25:44.000Z | from __future__ import unicode_literals
from .models import redshift_backends
from ..core.models import base_decorator, deprecated_base_decorator
redshift_backend = redshift_backends["us-east-1"]
mock_redshift = base_decorator(redshift_backends)
mock_redshift_deprecated = deprecated_base_decorator(redshift_backends)
| 39.875 | 71 | 0.871473 |
2e0684d94f4e373bc5a4e35b2b28ea85877b402c | 1,035 | py | Python | tests/dm_control/test_dm_control_tf_policy.py | shadiakiki1986/garage | 095bb5d25b32df1d44b47e99a78a9b01796941d9 | [
"MIT"
] | 3 | 2019-08-11T22:26:55.000Z | 2020-11-28T10:23:50.000Z | tests/dm_control/test_dm_control_tf_policy.py | shadiakiki1986/garage | 095bb5d25b32df1d44b47e99a78a9b01796941d9 | [
"MIT"
] | null | null | null | tests/dm_control/test_dm_control_tf_policy.py | shadiakiki1986/garage | 095bb5d25b32df1d44b47e99a78a9b01796941d9 | [
"MIT"
] | 2 | 2019-08-11T22:30:14.000Z | 2021-03-25T02:57:50.000Z | from dm_control.suite import ALL_TASKS
from garage.baselines import LinearFeatureBaseline
from garage.envs.dm_control import DmControlEnv
from garage.tf.algos import TRPO
from garage.tf.envs import TfEnv
from garage.tf.policies import GaussianMLPPolicy
from tests.fixtures import TfGraphTestCase
class TestDmControlTfPolicy(TfGraphTestCase):
def test_dm_control_tf_policy(self):
task = ALL_TASKS[0]
with self.graph.as_default():
env = TfEnv(DmControlEnv(domain_name=task[0], task_name=task[1]))
policy = GaussianMLPPolicy(
env_spec=env.spec,
hidden_sizes=(32, 32),
)
baseline = LinearFeatureBaseline(env_spec=env.spec)
algo = TRPO(
env=env,
policy=policy,
baseline=baseline,
batch_size=10,
max_path_length=5,
n_itr=1,
discount=0.99,
step_size=0.01,
)
algo.train()
| 28.75 | 77 | 0.601932 |
4fdf147a593d066f7147ef70a0baaf6c57e57c95 | 5,453 | py | Python | pymongo/daemon.py | blink1073/mongo-python-driver | 98d393336411b7cd5ad4e184ca45192f76fb48e8 | [
"Apache-2.0"
] | null | null | null | pymongo/daemon.py | blink1073/mongo-python-driver | 98d393336411b7cd5ad4e184ca45192f76fb48e8 | [
"Apache-2.0"
] | null | null | null | pymongo/daemon.py | blink1073/mongo-python-driver | 98d393336411b7cd5ad4e184ca45192f76fb48e8 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019-present MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Support for spawning a daemon process.
PyMongo only attempts to spawn the mongocryptd daemon process when automatic
client-side field level encryption is enabled. See
:ref:`automatic-client-side-encryption` for more info.
"""
import os
import subprocess
import sys
import warnings
# The maximum amount of time to wait for the intermediate subprocess.
_WAIT_TIMEOUT = 10
_THIS_FILE = os.path.realpath(__file__)
def _popen_wait(popen, timeout):
"""Implement wait timeout support for Python 3."""
try:
return popen.wait(timeout=timeout)
except subprocess.TimeoutExpired:
# Silence TimeoutExpired errors.
return None
def _silence_resource_warning(popen):
"""Silence Popen's ResourceWarning.
Note this should only be used if the process was created as a daemon.
"""
# Set the returncode to avoid this warning when popen is garbage collected:
# "ResourceWarning: subprocess XXX is still running".
# See https://bugs.python.org/issue38890 and
# https://bugs.python.org/issue26741.
# popen is None when mongocryptd spawning fails
if popen is not None:
popen.returncode = 0
if sys.platform == "win32":
# On Windows we spawn the daemon process simply by using DETACHED_PROCESS.
_DETACHED_PROCESS = getattr(subprocess, "DETACHED_PROCESS", 0x00000008)
def _spawn_daemon(args):
"""Spawn a daemon process (Windows)."""
try:
with open(os.devnull, "r+b") as devnull:
popen = subprocess.Popen(
args,
creationflags=_DETACHED_PROCESS,
stdin=devnull,
stderr=devnull,
stdout=devnull,
)
_silence_resource_warning(popen)
except FileNotFoundError as exc:
warnings.warn(
f"Failed to start {args[0]}: is it on your $PATH?\nOriginal exception: {exc}",
RuntimeWarning,
stacklevel=2,
)
else:
# On Unix we spawn the daemon process with a double Popen.
# 1) The first Popen runs this file as a Python script using the current
# interpreter.
# 2) The script then decouples itself and performs the second Popen to
# spawn the daemon process.
# 3) The original process waits up to 10 seconds for the script to exit.
#
# Note that we do not call fork() directly because we want this procedure
# to be safe to call from any thread. Using Popen instead of fork also
# avoids triggering the application's os.register_at_fork() callbacks when
# we spawn the mongocryptd daemon process.
def _spawn(args):
"""Spawn the process and silence stdout/stderr."""
try:
with open(os.devnull, "r+b") as devnull:
return subprocess.Popen(
args, close_fds=True, stdin=devnull, stderr=devnull, stdout=devnull
)
except FileNotFoundError as exc:
warnings.warn(
f"Failed to start {args[0]}: is it on your $PATH?\nOriginal exception: {exc}",
RuntimeWarning,
stacklevel=2,
)
def _spawn_daemon_double_popen(args):
"""Spawn a daemon process using a double subprocess.Popen."""
spawner_args = [sys.executable, _THIS_FILE]
spawner_args.extend(args)
temp_proc = subprocess.Popen(spawner_args, close_fds=True)
# Reap the intermediate child process to avoid creating zombie
# processes.
_popen_wait(temp_proc, _WAIT_TIMEOUT)
def _spawn_daemon(args):
"""Spawn a daemon process (Unix)."""
# "If Python is unable to retrieve the real path to its executable,
# sys.executable will be an empty string or None".
if sys.executable:
_spawn_daemon_double_popen(args)
else:
# Fallback to spawn a non-daemon process without silencing the
# resource warning. We do not use fork here because it is not
# safe to call from a thread on all systems.
# Unfortunately, this means that:
# 1) If the parent application is killed via Ctrl-C, the
# non-daemon process will also be killed.
# 2) Each non-daemon process will hang around as a zombie process
# until the main application exits.
_spawn(args)
if __name__ == "__main__":
# Attempt to start a new session to decouple from the parent.
if hasattr(os, "setsid"):
try:
os.setsid()
except OSError:
pass
# We are performing a double fork (Popen) to spawn the process as a
# daemon so it is safe to ignore the resource warning.
_silence_resource_warning(_spawn(sys.argv[1:]))
os._exit(0)
| 38.401408 | 94 | 0.644233 |
6fab9c1690a45746322163eb969d1a2d5e40215a | 14,044 | py | Python | edk2toollib/uefi/edk2/parsers/base_parser.py | joschock/edk2-pytool-library | 7281a7c5cff7b0ed273b89717cd1304c3db73e50 | [
"BSD-2-Clause-Patent"
] | null | null | null | edk2toollib/uefi/edk2/parsers/base_parser.py | joschock/edk2-pytool-library | 7281a7c5cff7b0ed273b89717cd1304c3db73e50 | [
"BSD-2-Clause-Patent"
] | null | null | null | edk2toollib/uefi/edk2/parsers/base_parser.py | joschock/edk2-pytool-library | 7281a7c5cff7b0ed273b89717cd1304c3db73e50 | [
"BSD-2-Clause-Patent"
] | null | null | null | # @file BaseParser.py
# Code to support parsing EDK2 files
#
# Copyright (c) Microsoft Corporation
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
##
import os
import logging
class BaseParser(object):
""" """
def __init__(self, log=""):
self.Logger = logging.getLogger(log)
self.Lines = []
self.LocalVars = {}
self.InputVars = {}
self.CurrentSection = ""
self.CurrentFullSection = ""
self.Parsed = False
self.ConditionalStack = []
self.RootPath = ""
self.PPs = []
self.TargetFile = None
self.TargetFilePath = None
self.CurrentLine = -1
self._MacroNotDefinedValue = "0" # value to used for undefined macro
#
# For include files set the base root path
#
def SetBaseAbsPath(self, path):
"""
Args:
path:
Returns:
"""
self.RootPath = path
return self
def SetPackagePaths(self, pps=[]):
"""
Args:
pps: (Default value = [])
Returns:
"""
self.PPs = pps
return self
def SetInputVars(self, inputdict):
"""
Args:
inputdict:
Returns:
"""
self.InputVars = inputdict
return self
def FindPath(self, *p):
"""
Args:
*p:
Returns:
"""
# NOTE: Some of this logic should be replaced
# with the path resolution from Edk2Module code.
# If the absolute path exists, return it.
Path = os.path.join(self.RootPath, *p)
if os.path.exists(Path):
return Path
# If that fails, check a path relative to the target file.
if self.TargetFilePath is not None:
Path = os.path.join(self.TargetFilePath, *p)
if os.path.exists(Path):
return Path
# If that fails, check in every possible Pkg path.
for Pkg in self.PPs:
Path = os.path.join(self.RootPath, Pkg, *p)
if os.path.exists(Path):
return Path
# log invalid file path
Path = os.path.join(self.RootPath, *p)
self.Logger.error("Invalid file path %s" % Path)
return Path
def WriteLinesToFile(self, filepath):
"""
Args:
filepath:
Returns:
"""
self.Logger.debug("Writing all lines to file: %s" % filepath)
f = open(filepath, "w")
for l in self.Lines:
f.write(l + "\n")
f.close()
#
# do logical comparisons
#
def ComputeResult(self, value, cond, value2):
"""
Args:
value:
cond:
value2:
Returns:
"""
ivalue = value
ivalue2 = value2
# convert it to interpretted value
try:
ivalue = self.ConvertToInt(ivalue)
except ValueError:
pass
try:
ivalue2 = self.ConvertToInt(ivalue2)
except ValueError:
pass
# check our truthyness
if(cond == "=="):
# equal
return (ivalue == ivalue2) or (value == value2)
elif (cond == "!="):
# not equal
return (ivalue != ivalue2) and (value != value2)
elif (cond == "in"):
# contains
return value in value2
# check to make sure we only have digits from here on out
if not str.isdigit(value):
self.Logger.error(f"{self.__class__}: Unknown value: {value} {ivalue.__class__}")
self.Logger.debug(f"{self.__class__}: Conditional: {value} {cond}{value2}")
raise ValueError("Unknown value")
if not str.isdigit(value2):
self.Logger.error(f"{self.__class__}: Unknown value: {value2} {ivalue2}")
self.Logger.debug(f"{self.__class__}: Conditional: {value} {cond} {value2}")
raise ValueError("Unknown value")
if (cond == "<"):
return (ivalue < ivalue2)
elif (cond == "<="):
return (ivalue <= ivalue2)
elif (cond == ">"):
return (ivalue > ivalue2)
elif (cond == ">="):
return (ivalue >= ivalue2)
else:
self.Logger.error(f"{self.__class__}: Unknown conditional: {cond}")
raise RuntimeError("Unknown conditional")
#
# convert to int based on prefix
#
def ConvertToInt(self, value):
"""
Args:
value: must be str or int
Returns:
"""
if isinstance(value, str) and value.upper() == "TRUE":
return 1
elif isinstance(value, str) and value.upper() == "FALSE":
return 0
elif isinstance(value, str) and value.upper().startswith("0X"):
return int(value, 16)
else:
return int(value, 10)
#
# Push new value on stack
#
def PushConditional(self, v):
"""
Args:
v:
Returns:
"""
self.ConditionalStack.append(v)
#
# Pop conditional and return the value
#
def PopConditional(self):
""" """
if(len(self.ConditionalStack) > 0):
return self.ConditionalStack.pop()
else:
self.Logger.critical("Tried to pop an empty conditional stack. Line Number %d" % self.CurrentLine)
return self.ConditionalStack.pop() # this should cause a crash but will give trace.
def _FindReplacementForToken(self, token, replace_if_not_found=False):
v = self.LocalVars.get(token)
if(v is None):
v = self.InputVars.get(token)
if(v is None and replace_if_not_found):
v = self._MacroNotDefinedValue
elif(v is None):
return None
if (type(v) is bool):
v = "true" if v else "false"
if(type(v) is str and (v.upper() == "TRUE" or v.upper() == "FALSE")):
v = v.upper()
return str(v)
#
# Method to replace variables
# in a line with their value from input dict or local dict
#
def ReplaceVariables(self, line):
"""
Args:
line:
Returns:
"""
# first tokenize and look for tokens require special macro
# handling without $. This must be done first otherwise
# both syntax options can not be supported.
result = line
tokens = result.split()
replace = len(tokens) > 1 and tokens[0].lower() in ["!ifdef", "!ifndef", "!if", "!elseif"]
if len(tokens) > 1 and tokens[0].lower() in ["!ifdef", "!ifndef"]:
if not tokens[1].startswith("$("):
v = self._FindReplacementForToken(tokens[1], replace)
if v is not None:
result = result.replace(tokens[1], v, 1)
# use line to avoid change by handling above
rep = line.count("$")
index = 0
while(rep > 0):
start = line.find("$(", index)
end = line.find(")", start)
token = line[start + 2:end]
replacement_token = line[start:end + 1]
self.Logger.debug("Token is %s" % token)
v = self._FindReplacementForToken(token, replace)
if v is not None:
result = result.replace(replacement_token, v, 1)
index = end + 1
rep = rep - 1
return result
#
# Process Conditional
# return true if line is a conditional otherwise false
#
def ProcessConditional(self, text):
"""
Args:
text:
Returns:
"""
tokens = text.split()
if(tokens[0].lower() == "!if"):
# need to add support for OR/AND
if (len(tokens) == 2):
value = self.ConvertToInt(tokens[1].strip())
self.PushConditional(value == 1) # if the value is true
# we can have tokens in 4, 8, 12 etc
elif len(tokens) >= 4 and len(tokens) % 4 == 0:
con = self.ComputeResult(tokens[1].strip(), tokens[2].strip(), tokens[3].strip())
self.PushConditional(con)
else:
self.Logger.error("!if conditionals need to be formatted correctly (spaces between each token)")
raise RuntimeError("Invalid conditional", text)
return True
elif(tokens[0].lower() == "!ifdef"):
if len(tokens) != 2:
self.Logger.error("!ifdef conditionals need to be formatted correctly (spaces between each token)")
raise RuntimeError("Invalid conditional", text)
self.PushConditional((tokens[1] != self._MacroNotDefinedValue))
return True
elif(tokens[0].lower() == "!ifndef"):
if len(tokens) != 2:
self.Logger.error("!ifdef conditionals need to be formatted correctly (spaces between each token)")
raise RuntimeError("Invalid conditional", text)
self.PushConditional((tokens[1] == self._MacroNotDefinedValue))
return True
elif(tokens[0].lower() == "!else"):
if len(tokens) != 1:
self.Logger.error("!ifdef conditionals need to be formatted correctly (spaces between each token)")
raise RuntimeError("Invalid conditional", text)
v = self.PopConditional()
# TODO make sure we can't do multiple else statements
self.PushConditional(not v)
return True
elif(tokens[0].lower() == "!endif"):
if len(tokens) != 1:
self.Logger.error("!ifdef conditionals need to be formatted correctly (spaces between each token)")
raise RuntimeError("Invalid conditional", text)
self.PopConditional()
return True
return False
#
# returns true or false depending on what state of conditional you are currently in
#
def InActiveCode(self):
""" """
ret = True
for a in self.ConditionalStack:
if not a:
ret = False
break
return ret
def IsGuidString(self, l):
"""
will return true if the the line has
= { 0xD3B36F2C, 0xD551, 0x11D4, { 0x9A, 0x46, 0x00, 0x90, 0x27, 0x3F, 0xC1, 0x4D }}
Args:
l:
Returns:
"""
if(l.count("{") == 2 and l.count("}") == 2 and l.count(",") == 10 and l.count("=") == 1):
return True
return False
def ParseGuid(self, l):
"""
parse a guid into a different format
Will throw exception if missing any of the 11 parts of isn't long enough
Args:
l: the guid to parse ex: { 0xD3B36F2C, 0xD551, 0x11D4, { 0x9A, 0x46, 0x00, 0x90, 0x27, 0x3F, 0xC1, 0x4D }}
Returns: a string of the guid. ex: D3B36F2C-D551-11D4-9A46-0090273FC14D
"""
entries = l.lstrip(' {').rstrip(' }').split(',')
if len(entries) != 11:
raise RuntimeError(f"Invalid GUID found {l}. We are missing some parts since we only found: {len(entries)}")
gu = entries[0].lstrip(' 0').lstrip('x').strip()
# pad front until 8 chars
while(len(gu) < 8):
gu = "0" + gu
gut = entries[1].lstrip(' 0').lstrip('x').strip()
while(len(gut) < 4):
gut = "0" + gut
gu = gu + "-" + gut
gut = entries[2].lstrip(' 0').lstrip('x').strip()
while(len(gut) < 4):
gut = "0" + gut
gu = gu + "-" + gut
# strip off extra {
gut = entries[3].lstrip(' { 0').lstrip('x').strip()
while(len(gut) < 2):
gut = "0" + gut
gu = gu + "-" + gut
gut = entries[4].lstrip(' 0').lstrip('x').strip()
while(len(gut) < 2):
gut = "0" + gut
gu = gu + gut
gut = entries[5].lstrip(' 0').lstrip('x').strip()
while(len(gut) < 2):
gut = "0" + gut
gu = gu + "-" + gut
gut = entries[6].lstrip(' 0').lstrip('x').strip()
while(len(gut) < 2):
gut = "0" + gut
gu = gu + gut
gut = entries[7].lstrip(' 0').lstrip('x').strip()
while(len(gut) < 2):
gut = "0" + gut
gu = gu + gut
gut = entries[8].lstrip(' 0').lstrip('x').strip()
while(len(gut) < 2):
gut = "0" + gut
gu = gu + gut
gut = entries[9].lstrip(' 0').lstrip('x').strip()
while(len(gut) < 2):
gut = "0" + gut
gu = gu + gut
gut = entries[10].split()[0].lstrip(' 0').lstrip('x').rstrip(' } ').strip()
while(len(gut) < 2):
gut = "0" + gut
gu = gu + gut
proper_guid_length = 36
if len(gu) > proper_guid_length:
raise RuntimeError(f"The guid we parsed was too long: {gu}")
if len(gu) < proper_guid_length:
raise RuntimeError(f"The guid we parsed was too short: {gu}")
return gu.upper()
def ResetParserState(self):
""" """
self.ConditionalStack = []
self.CurrentSection = ''
self.CurrentFullSection = ''
self.Parsed = False
#
# Base Class for Edk2 build files that use # for comments
#
class HashFileParser(BaseParser):
""" """
def __init__(self, log):
BaseParser.__init__(self, log)
def StripComment(self, l):
"""
Args:
l:
Returns:
"""
return l.split('#')[0].strip()
def ParseNewSection(self, l):
"""
Args:
l:
Returns:
"""
if(l.count("[") == 1 and l.count("]") == 1): # new section
section = l.strip().lstrip("[").split(".")[0].split(",")[0].rstrip("]").strip()
self.CurrentFullSection = l.strip().lstrip("[").split(",")[0].rstrip("]").strip()
return (True, section)
return (False, "")
| 27.645669 | 120 | 0.51538 |
ec1a8f877703d9ff60bbadc936d76cd8e06f2926 | 79,811 | py | Python | statsmodels/tsa/statespace/sarimax.py | rdhyee/statsmodels | e45c1b8be327807c58e7c21bc985bce609907a3f | [
"BSD-3-Clause"
] | 1 | 2019-04-20T09:42:23.000Z | 2019-04-20T09:42:23.000Z | statsmodels/tsa/statespace/sarimax.py | christianjauregui/statsmodels | e45c1b8be327807c58e7c21bc985bce609907a3f | [
"BSD-3-Clause"
] | null | null | null | statsmodels/tsa/statespace/sarimax.py | christianjauregui/statsmodels | e45c1b8be327807c58e7c21bc985bce609907a3f | [
"BSD-3-Clause"
] | 1 | 2019-04-20T09:42:12.000Z | 2019-04-20T09:42:12.000Z | """
SARIMAX Model
Author: Chad Fulton
License: Simplified-BSD
"""
from __future__ import division, absolute_import, print_function
from statsmodels.compat.python import long
from warnings import warn
import numpy as np
from .initialization import Initialization
from .mlemodel import MLEModel, MLEResults, MLEResultsWrapper
from .tools import (
companion_matrix, diff, is_invertible, constrain_stationary_univariate,
unconstrain_stationary_univariate, solve_discrete_lyapunov,
prepare_exog
)
from statsmodels.tools.tools import Bunch
from statsmodels.tools.data import _is_using_pandas
from statsmodels.tsa.tsatools import lagmat
from statsmodels.tools.decorators import cache_readonly
from statsmodels.tools.sm_exceptions import ValueWarning
import statsmodels.base.wrapper as wrap
class SARIMAX(MLEModel):
r"""
Seasonal AutoRegressive Integrated Moving Average with eXogenous regressors
model
Parameters
----------
endog : array_like
The observed time-series process :math:`y`
exog : array_like, optional
Array of exogenous regressors, shaped nobs x k.
order : iterable or iterable of iterables, optional
The (p,d,q) order of the model for the number of AR parameters,
differences, and MA parameters. `d` must be an integer
indicating the integration order of the process, while
`p` and `q` may either be an integers indicating the AR and MA
orders (so that all lags up to those orders are included) or else
iterables giving specific AR and / or MA lags to include. Default is
an AR(1) model: (1,0,0).
seasonal_order : iterable, optional
The (P,D,Q,s) order of the seasonal component of the model for the
AR parameters, differences, MA parameters, and periodicity.
`d` must be an integer indicating the integration order of the process,
while `p` and `q` may either be an integers indicating the AR and MA
orders (so that all lags up to those orders are included) or else
iterables giving specific AR and / or MA lags to include. `s` is an
integer giving the periodicity (number of periods in season), often it
is 4 for quarterly data or 12 for monthly data. Default is no seasonal
effect.
trend : str{'n','c','t','ct'} or iterable, optional
Parameter controlling the deterministic trend polynomial :math:`A(t)`.
Can be specified as a string where 'c' indicates a constant (i.e. a
degree zero component of the trend polynomial), 't' indicates a
linear trend with time, and 'ct' is both. Can also be specified as an
iterable defining the polynomial as in `numpy.poly1d`, where
`[1,1,0,1]` would denote :math:`a + bt + ct^3`. Default is to not
include a trend component.
measurement_error : boolean, optional
Whether or not to assume the endogenous observations `endog` were
measured with error. Default is False.
time_varying_regression : boolean, optional
Used when an explanatory variables, `exog`, are provided provided
to select whether or not coefficients on the exogenous regressors are
allowed to vary over time. Default is False.
mle_regression : boolean, optional
Whether or not to use estimate the regression coefficients for the
exogenous variables as part of maximum likelihood estimation or through
the Kalman filter (i.e. recursive least squares). If
`time_varying_regression` is True, this must be set to False. Default
is True.
simple_differencing : boolean, optional
Whether or not to use partially conditional maximum likelihood
estimation. If True, differencing is performed prior to estimation,
which discards the first :math:`s D + d` initial rows but results in a
smaller state-space formulation. If False, the full SARIMAX model is
put in state-space form so that all datapoints can be used in
estimation. Default is False.
enforce_stationarity : boolean, optional
Whether or not to transform the AR parameters to enforce stationarity
in the autoregressive component of the model. Default is True.
enforce_invertibility : boolean, optional
Whether or not to transform the MA parameters to enforce invertibility
in the moving average component of the model. Default is True.
hamilton_representation : boolean, optional
Whether or not to use the Hamilton representation of an ARMA process
(if True) or the Harvey representation (if False). Default is False.
**kwargs
Keyword arguments may be used to provide default values for state space
matrices or for Kalman filtering options. See `Representation`, and
`KalmanFilter` for more details.
Attributes
----------
measurement_error : boolean
Whether or not to assume the endogenous
observations `endog` were measured with error.
state_error : boolean
Whether or not the transition equation has an error component.
mle_regression : boolean
Whether or not the regression coefficients for
the exogenous variables were estimated via maximum
likelihood estimation.
state_regression : boolean
Whether or not the regression coefficients for
the exogenous variables are included as elements
of the state space and estimated via the Kalman
filter.
time_varying_regression : boolean
Whether or not coefficients on the exogenous
regressors are allowed to vary over time.
simple_differencing : boolean
Whether or not to use partially conditional maximum likelihood
estimation.
enforce_stationarity : boolean
Whether or not to transform the AR parameters
to enforce stationarity in the autoregressive
component of the model.
enforce_invertibility : boolean
Whether or not to transform the MA parameters
to enforce invertibility in the moving average
component of the model.
hamilton_representation : boolean
Whether or not to use the Hamilton representation of an ARMA process.
trend : str{'n','c','t','ct'} or iterable
Parameter controlling the deterministic
trend polynomial :math:`A(t)`. See the class
parameter documentation for more information.
polynomial_ar : array
Array containing autoregressive lag polynomial
coefficients, ordered from lowest degree to highest.
Initialized with ones, unless a coefficient is
constrained to be zero (in which case it is zero).
polynomial_ma : array
Array containing moving average lag polynomial
coefficients, ordered from lowest degree to highest.
Initialized with ones, unless a coefficient is
constrained to be zero (in which case it is zero).
polynomial_seasonal_ar : array
Array containing seasonal moving average lag
polynomial coefficients, ordered from lowest degree
to highest. Initialized with ones, unless a
coefficient is constrained to be zero (in which
case it is zero).
polynomial_seasonal_ma : array
Array containing seasonal moving average lag
polynomial coefficients, ordered from lowest degree
to highest. Initialized with ones, unless a
coefficient is constrained to be zero (in which
case it is zero).
polynomial_trend : array
Array containing trend polynomial coefficients,
ordered from lowest degree to highest. Initialized
with ones, unless a coefficient is constrained to be
zero (in which case it is zero).
k_ar : int
Highest autoregressive order in the model, zero-indexed.
k_ar_params : int
Number of autoregressive parameters to be estimated.
k_diff : int
Order of intergration.
k_ma : int
Highest moving average order in the model, zero-indexed.
k_ma_params : int
Number of moving average parameters to be estimated.
seasonal_periods : int
Number of periods in a season.
k_seasonal_ar : int
Highest seasonal autoregressive order in the model, zero-indexed.
k_seasonal_ar_params : int
Number of seasonal autoregressive parameters to be estimated.
k_seasonal_diff : int
Order of seasonal intergration.
k_seasonal_ma : int
Highest seasonal moving average order in the model, zero-indexed.
k_seasonal_ma_params : int
Number of seasonal moving average parameters to be estimated.
k_trend : int
Order of the trend polynomial plus one (i.e. the constant polynomial
would have `k_trend=1`).
k_exog : int
Number of exogenous regressors.
Notes
-----
The SARIMA model is specified :math:`(p, d, q) \times (P, D, Q)_s`.
.. math::
\phi_p (L) \tilde \phi_P (L^s) \Delta^d \Delta_s^D y_t = A(t) +
\theta_q (L) \tilde \theta_Q (L^s) \zeta_t
In terms of a univariate structural model, this can be represented as
.. math::
y_t & = u_t + \eta_t \\
\phi_p (L) \tilde \phi_P (L^s) \Delta^d \Delta_s^D u_t & = A(t) +
\theta_q (L) \tilde \theta_Q (L^s) \zeta_t
where :math:`\eta_t` is only applicable in the case of measurement error
(although it is also used in the case of a pure regression model, i.e. if
p=q=0).
In terms of this model, regression with SARIMA errors can be represented
easily as
.. math::
y_t & = \beta_t x_t + u_t \\
\phi_p (L) \tilde \phi_P (L^s) \Delta^d \Delta_s^D u_t & = A(t) +
\theta_q (L) \tilde \theta_Q (L^s) \zeta_t
this model is the one used when exogenous regressors are provided.
Note that the reduced form lag polynomials will be written as:
.. math::
\Phi (L) \equiv \phi_p (L) \tilde \phi_P (L^s) \\
\Theta (L) \equiv \theta_q (L) \tilde \theta_Q (L^s)
If `mle_regression` is True, regression coefficients are treated as
additional parameters to be estimated via maximum likelihood. Otherwise
they are included as part of the state with a diffuse initialization.
In this case, however, with approximate diffuse initialization, results
can be sensitive to the initial variance.
This class allows two different underlying representations of ARMA models
as state space models: that of Hamilton and that of Harvey. Both are
equivalent in the sense that they are analytical representations of the
ARMA model, but the state vectors of each have different meanings. For
this reason, maximum likelihood does not result in identical parameter
estimates and even the same set of parameters will result in different
loglikelihoods.
The Harvey representation is convenient because it allows integrating
differencing into the state vector to allow using all observations for
estimation.
In this implementation of differenced models, the Hamilton representation
is not able to accomodate differencing in the state vector, so
`simple_differencing` (which performs differencing prior to estimation so
that the first d + sD observations are lost) must be used.
Many other packages use the Hamilton representation, so that tests against
Stata and R require using it along with simple differencing (as Stata
does).
Detailed information about state space models can be found in [1]_. Some
specific references are:
- Chapter 3.4 describes ARMA and ARIMA models in state space form (using
the Harvey representation), and gives references for basic seasonal
models and models with a multiplicative form (for example the airline
model). It also shows a state space model for a full ARIMA process (this
is what is done here if `simple_differencing=False`).
- Chapter 3.6 describes estimating regression effects via the Kalman filter
(this is performed if `mle_regression` is False), regression with
time-varying coefficients, and regression with ARMA errors (recall from
above that if regression effects are present, the model estimated by this
class is regression with SARIMA errors).
- Chapter 8.4 describes the application of an ARMA model to an example
dataset. A replication of this section is available in an example
IPython notebook in the documentation.
References
----------
.. [1] Durbin, James, and Siem Jan Koopman. 2012.
Time Series Analysis by State Space Methods: Second Edition.
Oxford University Press.
"""
def __init__(self, endog, exog=None, order=(1, 0, 0),
seasonal_order=(0, 0, 0, 0), trend=None,
measurement_error=False, time_varying_regression=False,
mle_regression=True, simple_differencing=False,
enforce_stationarity=True, enforce_invertibility=True,
hamilton_representation=False, **kwargs):
# Model parameters
self.seasonal_periods = seasonal_order[3]
self.measurement_error = measurement_error
self.time_varying_regression = time_varying_regression
self.mle_regression = mle_regression
self.simple_differencing = simple_differencing
self.enforce_stationarity = enforce_stationarity
self.enforce_invertibility = enforce_invertibility
self.hamilton_representation = hamilton_representation
# Save given orders
self.order = order
self.seasonal_order = seasonal_order
# Enforce non-MLE coefficients if time varying coefficients is
# specified
if self.time_varying_regression and self.mle_regression:
raise ValueError('Models with time-varying regression coefficients'
' must integrate the coefficients as part of the'
' state vector, so that `mle_regression` must'
' be set to False.')
# Lag polynomials
# Assume that they are given from lowest degree to highest, that all
# degrees except for the constant are included, and that they are
# boolean vectors (0 for not included, 1 for included).
if isinstance(order[0], (int, long, np.integer)):
self.polynomial_ar = np.r_[1., np.ones(order[0])]
else:
self.polynomial_ar = np.r_[1., order[0]]
if isinstance(order[2], (int, long, np.integer)):
self.polynomial_ma = np.r_[1., np.ones(order[2])]
else:
self.polynomial_ma = np.r_[1., order[2]]
# Assume that they are given from lowest degree to highest, that the
# degrees correspond to (1*s, 2*s, ..., P*s), and that they are
# boolean vectors (0 for not included, 1 for included).
if isinstance(seasonal_order[0], (int, long, np.integer)):
self.polynomial_seasonal_ar = np.r_[
1., # constant
([0] * (self.seasonal_periods - 1) + [1]) * seasonal_order[0]
]
else:
self.polynomial_seasonal_ar = np.r_[
1., [0] * self.seasonal_periods * len(seasonal_order[0])
]
for i in range(len(seasonal_order[0])):
tmp = (i + 1) * self.seasonal_periods
self.polynomial_seasonal_ar[tmp] = seasonal_order[0][i]
if isinstance(seasonal_order[2], (int, long, np.integer)):
self.polynomial_seasonal_ma = np.r_[
1., # constant
([0] * (self.seasonal_periods - 1) + [1]) * seasonal_order[2]
]
else:
self.polynomial_seasonal_ma = np.r_[
1., [0] * self.seasonal_periods * len(seasonal_order[2])
]
for i in range(len(seasonal_order[2])):
tmp = (i + 1) * self.seasonal_periods
self.polynomial_seasonal_ma[tmp] = seasonal_order[2][i]
# Deterministic trend polynomial
self.trend = trend
if trend is None or trend == 'n':
self.polynomial_trend = np.ones((0))
elif trend == 'c':
self.polynomial_trend = np.r_[1]
elif trend == 't':
self.polynomial_trend = np.r_[0, 1]
elif trend == 'ct':
self.polynomial_trend = np.r_[1, 1]
else:
self.polynomial_trend = (np.array(trend) > 0).astype(int)
# Model orders
# Note: k_ar, k_ma, k_seasonal_ar, k_seasonal_ma do not include the
# constant term, so they may be zero.
# Note: for a typical ARMA(p,q) model, p = k_ar_params = k_ar - 1 and
# q = k_ma_params = k_ma - 1, although this may not be true for models
# with arbitrary log polynomials.
self.k_ar = int(self.polynomial_ar.shape[0] - 1)
self.k_ar_params = int(np.sum(self.polynomial_ar) - 1)
self.k_diff = int(order[1])
self.k_ma = int(self.polynomial_ma.shape[0] - 1)
self.k_ma_params = int(np.sum(self.polynomial_ma) - 1)
self.k_seasonal_ar = int(self.polynomial_seasonal_ar.shape[0] - 1)
self.k_seasonal_ar_params = (
int(np.sum(self.polynomial_seasonal_ar) - 1)
)
self.k_seasonal_diff = int(seasonal_order[1])
self.k_seasonal_ma = int(self.polynomial_seasonal_ma.shape[0] - 1)
self.k_seasonal_ma_params = (
int(np.sum(self.polynomial_seasonal_ma) - 1)
)
# Make internal copies of the differencing orders because if we use
# simple differencing, then we will need to internally use zeros after
# the simple differencing has been performed
self._k_diff = self.k_diff
self._k_seasonal_diff = self.k_seasonal_diff
# We can only use the Hamilton representation if differencing is not
# performed as a part of the state space
if (self.hamilton_representation and not (self.simple_differencing or
self._k_diff == self._k_seasonal_diff == 0)):
raise ValueError('The Hamilton representation is only available'
' for models in which there is no differencing'
' integrated into the state vector. Set'
' `simple_differencing` to True or set'
' `hamilton_representation` to False')
# Note: k_trend is not the degree of the trend polynomial, because e.g.
# k_trend = 1 corresponds to the degree zero polynomial (with only a
# constant term).
self.k_trend = int(np.sum(self.polynomial_trend))
# Model order
# (this is used internally in a number of locations)
self._k_order = max(self.k_ar + self.k_seasonal_ar,
self.k_ma + self.k_seasonal_ma + 1)
if self._k_order == 1 and self.k_ar + self.k_seasonal_ar == 0:
# Handle time-varying regression
if self.time_varying_regression:
self._k_order = 0
# Exogenous data
(self.k_exog, exog) = prepare_exog(exog)
# Redefine mle_regression to be true only if it was previously set to
# true and there are exogenous regressors
self.mle_regression = (
self.mle_regression and exog is not None and self.k_exog > 0
)
# State regression is regression with coefficients estiamted within
# the state vector
self.state_regression = (
not self.mle_regression and exog is not None and self.k_exog > 0
)
# If all we have is a regression (so k_ar = k_ma = 0), then put the
# error term as measurement error
if self.state_regression and self._k_order == 0:
self.measurement_error = True
# Number of states
k_states = self._k_order
if not self.simple_differencing:
k_states += (self.seasonal_periods * self._k_seasonal_diff +
self._k_diff)
if self.state_regression:
k_states += self.k_exog
# Number of diffuse states
k_diffuse_states = k_states
if self.enforce_stationarity:
k_diffuse_states -= self._k_order
# Number of positive definite elements of the state covariance matrix
k_posdef = int(self._k_order > 0)
# Only have an error component to the states if k_posdef > 0
self.state_error = k_posdef > 0
if self.state_regression and self.time_varying_regression:
k_posdef += self.k_exog
# Diffuse initialization can be more sensistive to the variance value
# in the case of state regression, so set a higher than usual default
# variance
if self.state_regression:
kwargs.setdefault('initial_variance', 1e10)
# Number of parameters
self.k_params = (
self.k_ar_params + self.k_ma_params +
self.k_seasonal_ar_params + self.k_seasonal_ar_params +
self.k_trend +
self.measurement_error + 1
)
if self.mle_regression:
self.k_params += self.k_exog
# We need to have an array or pandas at this point
self.orig_endog = endog
self.orig_exog = exog
if not _is_using_pandas(endog, None):
endog = np.asanyarray(endog)
# Update the differencing dimensions if simple differencing is applied
self.orig_k_diff = self._k_diff
self.orig_k_seasonal_diff = self._k_seasonal_diff
if (self.simple_differencing and
(self._k_diff > 0 or self._k_seasonal_diff > 0)):
self._k_diff = 0
self._k_seasonal_diff = 0
# Internally used in several locations
self._k_states_diff = (
self._k_diff + self.seasonal_periods * self._k_seasonal_diff
)
# Set some model variables now so they will be available for the
# initialize() method, below
self.nobs = len(endog)
self.k_states = k_states
self.k_posdef = k_posdef
# By default, do not calculate likelihood while it is controlled by
# diffuse initial conditions.
kwargs.setdefault('loglikelihood_burn', k_diffuse_states)
# Initialize the statespace
super(SARIMAX, self).__init__(
endog, exog=exog, k_states=k_states, k_posdef=k_posdef, **kwargs
)
# Set as time-varying model if we have time-trend or exog
if self.k_exog > 0 or len(self.polynomial_trend) > 1:
self.ssm._time_invariant = False
# Initialize the fixed components of the statespace model
self.ssm['design'] = self.initial_design
self.ssm['state_intercept'] = self.initial_state_intercept
self.ssm['transition'] = self.initial_transition
self.ssm['selection'] = self.initial_selection
# update _init_keys attached by super
self._init_keys += ['order', 'seasonal_order', 'trend',
'measurement_error', 'time_varying_regression',
'mle_regression', 'simple_differencing',
'enforce_stationarity', 'enforce_invertibility',
'hamilton_representation'] + list(kwargs.keys())
# TODO: I think the kwargs or not attached, need to recover from ???
# Initialize the state
if self.ssm.initialization is None:
self.initialize_default()
def _get_init_kwds(self):
kwds = super(SARIMAX, self)._get_init_kwds()
for key, value in kwds.items():
if value is None and hasattr(self.ssm, key):
kwds[key] = getattr(self.ssm, key)
return kwds
def prepare_data(self):
endog, exog = super(SARIMAX, self).prepare_data()
# Perform simple differencing if requested
if (self.simple_differencing and
(self.orig_k_diff > 0 or self.orig_k_seasonal_diff > 0)):
# Save the original length
orig_length = endog.shape[0]
# Perform simple differencing
endog = diff(endog.copy(), self.orig_k_diff,
self.orig_k_seasonal_diff, self.seasonal_periods)
if exog is not None:
exog = diff(exog.copy(), self.orig_k_diff,
self.orig_k_seasonal_diff, self.seasonal_periods)
# Reset the ModelData datasets and cache
self.data.endog, self.data.exog = (
self.data._convert_endog_exog(endog, exog))
# Reset indexes, if provided
new_length = self.data.endog.shape[0]
if self.data.row_labels is not None:
self.data._cache['row_labels'] = (
self.data.row_labels[orig_length - new_length:])
if self._index is not None:
if self._index_generated:
self._index = self._index[:-(orig_length - new_length)]
else:
self._index = self._index[orig_length - new_length:]
# Reset the nobs
self.nobs = endog.shape[0]
# Cache the arrays for calculating the intercept from the trend
# components
time_trend = np.arange(1, self.nobs + 1)
self._trend_data = np.zeros((self.nobs, self.k_trend))
i = 0
for k in self.polynomial_trend.nonzero()[0]:
if k == 0:
self._trend_data[:, i] = np.ones(self.nobs,)
else:
self._trend_data[:, i] = time_trend**k
i += 1
return endog, exog
def initialize(self):
"""
Initialize the SARIMAX model.
Notes
-----
These initialization steps must occur following the parent class
__init__ function calls.
"""
super(SARIMAX, self).initialize()
# Cache the indexes of included polynomial orders (for update below)
# (but we do not want the index of the constant term, so exclude the
# first index)
self._polynomial_ar_idx = np.nonzero(self.polynomial_ar)[0][1:]
self._polynomial_ma_idx = np.nonzero(self.polynomial_ma)[0][1:]
self._polynomial_seasonal_ar_idx = np.nonzero(
self.polynomial_seasonal_ar
)[0][1:]
self._polynomial_seasonal_ma_idx = np.nonzero(
self.polynomial_seasonal_ma
)[0][1:]
# Save the indices corresponding to the reduced form lag polynomial
# parameters in the transition and selection matrices so that they
# don't have to be recalculated for each update()
start_row = self._k_states_diff
end_row = start_row + self.k_ar + self.k_seasonal_ar
col = self._k_states_diff
if not self.hamilton_representation:
self.transition_ar_params_idx = (
np.s_['transition', start_row:end_row, col]
)
else:
self.transition_ar_params_idx = (
np.s_['transition', col, start_row:end_row]
)
start_row += 1
end_row = start_row + self.k_ma + self.k_seasonal_ma
col = 0
if not self.hamilton_representation:
self.selection_ma_params_idx = (
np.s_['selection', start_row:end_row, col]
)
else:
self.design_ma_params_idx = (
np.s_['design', col, start_row:end_row]
)
# Cache indices for exog variances in the state covariance matrix
if self.state_regression and self.time_varying_regression:
idx = np.diag_indices(self.k_posdef)
self._exog_variance_idx = ('state_cov', idx[0][-self.k_exog:],
idx[1][-self.k_exog:])
def initialize_default(self, approximate_diffuse_variance=None):
if approximate_diffuse_variance is None:
approximate_diffuse_variance = self.ssm.initial_variance
init = Initialization(
self.k_states,
approximate_diffuse_variance=approximate_diffuse_variance)
if self.enforce_stationarity:
# Differencing operators are at the beginning
init.set((0, self._k_states_diff), 'approximate_diffuse')
# Stationary component in the middle
init.set((self._k_states_diff, self._k_states_diff + self._k_order),
'stationary')
# Regression components at the end
init.set((self._k_states_diff + self._k_order,
self._k_states_diff + self._k_order + self.k_exog),
'approximate_diffuse')
# If we're not enforcing a stationarity, then we can't initialize a
# stationary component
else:
init.set(None, 'approximate_diffuse')
self.ssm.initialization = init
@property
def initial_design(self):
"""Initial design matrix"""
# Basic design matrix
design = np.r_[
[1] * self._k_diff,
([0] * (self.seasonal_periods - 1) + [1]) * self._k_seasonal_diff,
[1] * self.state_error, [0] * (self._k_order - 1)
]
if len(design) == 0:
design = np.r_[0]
# If we have exogenous regressors included as part of the state vector
# then the exogenous data is incorporated as a time-varying component
# of the design matrix
if self.state_regression:
if self._k_order > 0:
design = np.c_[
np.reshape(
np.repeat(design, self.nobs),
(design.shape[0], self.nobs)
).T,
self.exog
].T[None, :, :]
else:
design = self.exog.T[None, :, :]
return design
@property
def initial_state_intercept(self):
"""Initial state intercept vector"""
# TODO make this self.k_trend > 1 and adjust the update to take
# into account that if the trend is a constant, it is not time-varying
if self.k_trend > 0:
state_intercept = np.zeros((self.k_states, self.nobs))
else:
state_intercept = np.zeros((self.k_states,))
return state_intercept
@property
def initial_transition(self):
"""Initial transition matrix"""
transition = np.zeros((self.k_states, self.k_states))
# Exogenous regressors component
if self.state_regression:
start = -self.k_exog
# T_\beta
transition[start:, start:] = np.eye(self.k_exog)
# Autoregressive component
start = -(self.k_exog + self._k_order)
end = -self.k_exog if self.k_exog > 0 else None
else:
# Autoregressive component
start = -self._k_order
end = None
# T_c
if self._k_order > 0:
transition[start:end, start:end] = companion_matrix(self._k_order)
if self.hamilton_representation:
transition[start:end, start:end] = np.transpose(
companion_matrix(self._k_order)
)
# Seasonal differencing component
# T^*
if self._k_seasonal_diff > 0:
seasonal_companion = companion_matrix(self.seasonal_periods).T
seasonal_companion[0, -1] = 1
for d in range(self._k_seasonal_diff):
start = self._k_diff + d * self.seasonal_periods
end = self._k_diff + (d + 1) * self.seasonal_periods
# T_c^*
transition[start:end, start:end] = seasonal_companion
# i
for i in range(d + 1, self._k_seasonal_diff):
transition[start, end + self.seasonal_periods - 1] = 1
# \iota
transition[start, self._k_states_diff] = 1
# Differencing component
if self._k_diff > 0:
idx = np.triu_indices(self._k_diff)
# T^**
transition[idx] = 1
# [0 1]
if self.seasonal_periods > 0:
start = self._k_diff
end = self._k_states_diff
transition[:self._k_diff, start:end] = (
([0] * (self.seasonal_periods - 1) + [1]) *
self._k_seasonal_diff)
# [1 0]
column = self._k_states_diff
transition[:self._k_diff, column] = 1
return transition
@property
def initial_selection(self):
"""Initial selection matrix"""
if not (self.state_regression and self.time_varying_regression):
if self.k_posdef > 0:
selection = np.r_[
[0] * (self._k_states_diff),
[1] * (self._k_order > 0), [0] * (self._k_order - 1),
[0] * ((1 - self.mle_regression) * self.k_exog)
][:, None]
if len(selection) == 0:
selection = np.zeros((self.k_states, self.k_posdef))
else:
selection = np.zeros((self.k_states, 0))
else:
selection = np.zeros((self.k_states, self.k_posdef))
# Typical state variance
if self._k_order > 0:
selection[0, 0] = 1
# Time-varying regression coefficient variances
for i in range(self.k_exog, 0, -1):
selection[-i, -i] = 1
return selection
@property
def _res_classes(self):
return {'fit': (SARIMAXResults, SARIMAXResultsWrapper)}
@staticmethod
def _conditional_sum_squares(endog, k_ar, polynomial_ar, k_ma,
polynomial_ma, k_trend=0, trend_data=None):
k = 2 * k_ma
r = max(k + k_ma, k_ar)
k_params_ar = 0 if k_ar == 0 else len(polynomial_ar.nonzero()[0]) - 1
k_params_ma = 0 if k_ma == 0 else len(polynomial_ma.nonzero()[0]) - 1
residuals = None
if k_ar + k_ma + k_trend > 0:
# If we have MA terms, get residuals from an AR(k) model to use
# as data for conditional sum of squares estimates of the MA
# parameters
if k_ma > 0:
Y = endog[k:]
X = lagmat(endog, k, trim='both')
params_ar = np.linalg.pinv(X).dot(Y)
residuals = Y - np.dot(X, params_ar)
# Run an ARMA(p,q) model using the just computed residuals as data
Y = endog[r:]
X = np.empty((Y.shape[0], 0))
if k_trend > 0:
if trend_data is None:
raise ValueError('Trend data must be provided if'
' `k_trend` > 0.')
X = np.c_[X, trend_data[:(-r if r > 0 else None), :]]
if k_ar > 0:
cols = polynomial_ar.nonzero()[0][1:] - 1
X = np.c_[X, lagmat(endog, k_ar)[r:, cols]]
if k_ma > 0:
cols = polynomial_ma.nonzero()[0][1:] - 1
X = np.c_[X, lagmat(residuals, k_ma)[r-k:, cols]]
# Get the array of [ar_params, ma_params]
params = np.linalg.pinv(X).dot(Y)
residuals = Y - np.dot(X, params)
# Default output
params_trend = []
params_ar = []
params_ma = []
params_variance = []
# Get the params
offset = 0
if k_trend > 0:
params_trend = params[offset:k_trend + offset]
offset += k_trend
if k_ar > 0:
params_ar = params[offset:k_params_ar + offset]
offset += k_params_ar
if k_ma > 0:
params_ma = params[offset:k_params_ma + offset]
offset += k_params_ma
if residuals is not None:
params_variance = (residuals[k_params_ma:]**2).mean()
return (params_trend, params_ar, params_ma,
params_variance)
@property
def start_params(self):
"""
Starting parameters for maximum likelihood estimation
"""
# Perform differencing if necessary (i.e. if simple differencing is
# false so that the state-space model will use the entire dataset)
trend_data = self._trend_data
if not self.simple_differencing and (
self._k_diff > 0 or self._k_seasonal_diff > 0):
endog = diff(self.endog, self._k_diff,
self._k_seasonal_diff, self.seasonal_periods)
if self.exog is not None:
exog = diff(self.exog, self._k_diff,
self._k_seasonal_diff, self.seasonal_periods)
else:
exog = None
trend_data = trend_data[:endog.shape[0], :]
else:
endog = self.endog.copy()
exog = self.exog.copy() if self.exog is not None else None
endog = endog.squeeze()
# Although the Kalman filter can deal with missing values in endog,
# conditional sum of squares cannot
if np.any(np.isnan(endog)):
mask = ~np.isnan(endog).squeeze()
endog = endog[mask]
if exog is not None:
exog = exog[mask]
if trend_data is not None:
trend_data = trend_data[mask]
# Regression effects via OLS
params_exog = []
if self.k_exog > 0:
params_exog = np.linalg.pinv(exog).dot(endog)
endog = endog - np.dot(exog, params_exog)
if self.state_regression:
params_exog = []
# Non-seasonal ARMA component and trend
(params_trend, params_ar, params_ma,
params_variance) = self._conditional_sum_squares(
endog, self.k_ar, self.polynomial_ar, self.k_ma,
self.polynomial_ma, self.k_trend, trend_data
)
# If we have estimated non-stationary start parameters but enforce
# stationarity is on, raise an error
invalid_ar = (
self.k_ar > 0 and
self.enforce_stationarity and
not is_invertible(np.r_[1, -params_ar])
)
if invalid_ar:
warn('Non-stationary starting autoregressive parameters'
' found. Using zeros as starting parameters.')
params_ar *= 0
# If we have estimated non-invertible start parameters but enforce
# invertibility is on, raise an error
invalid_ma = (
self.k_ma > 0 and
self.enforce_invertibility and
not is_invertible(np.r_[1, params_ma])
)
if invalid_ma:
warn('Non-invertible starting MA parameters found.'
' Using zeros as starting parameters.')
params_ma *= 0
# Seasonal Parameters
_, params_seasonal_ar, params_seasonal_ma, params_seasonal_variance = (
self._conditional_sum_squares(
endog, self.k_seasonal_ar, self.polynomial_seasonal_ar,
self.k_seasonal_ma, self.polynomial_seasonal_ma
)
)
# If we have estimated non-stationary start parameters but enforce
# stationarity is on, raise an error
invalid_seasonal_ar = (
self.k_seasonal_ar > 0 and
self.enforce_stationarity and
not is_invertible(np.r_[1, -params_seasonal_ar])
)
if invalid_seasonal_ar:
warn('Non-stationary starting seasonal autoregressive'
' Using zeros as starting parameters.')
params_seasonal_ar *= 0
# If we have estimated non-invertible start parameters but enforce
# invertibility is on, raise an error
invalid_seasonal_ma = (
self.k_seasonal_ma > 0 and
self.enforce_invertibility and
not is_invertible(np.r_[1, params_seasonal_ma])
)
if invalid_seasonal_ma:
warn('Non-invertible starting seasonal moving average'
' Using zeros as starting parameters.')
params_seasonal_ma *= 0
# Variances
params_exog_variance = []
if self.state_regression and self.time_varying_regression:
# TODO how to set the initial variance parameters?
params_exog_variance = [1] * self.k_exog
if (self.state_error and type(params_variance) == list and
len(params_variance) == 0):
if not (type(params_variance) == list and
params_seasonal_variance == []):
params_variance = params_seasonal_variance
elif self.k_exog > 0:
params_variance = np.inner(endog, endog)
else:
params_variance = np.inner(endog, endog) / self.nobs
params_measurement_variance = 1 if self.measurement_error else []
# Combine all parameters
return np.r_[
params_trend,
params_exog,
params_ar,
params_ma,
params_seasonal_ar,
params_seasonal_ma,
params_exog_variance,
params_measurement_variance,
params_variance
]
@property
def endog_names(self, latex=False):
"""Names of endogenous variables"""
diff = ''
if self.k_diff > 0:
if self.k_diff == 1:
diff = '\Delta' if latex else 'D'
else:
diff = ('\Delta^%d' if latex else 'D%d') % self.k_diff
seasonal_diff = ''
if self.k_seasonal_diff > 0:
if self.k_seasonal_diff == 1:
seasonal_diff = (('\Delta_%d' if latex else 'DS%d') %
(self.seasonal_periods))
else:
seasonal_diff = (('\Delta_%d^%d' if latex else 'D%dS%d') %
(self.k_seasonal_diff, self.seasonal_periods))
endog_diff = self.simple_differencing
if endog_diff and self.k_diff > 0 and self.k_seasonal_diff > 0:
return (('%s%s %s' if latex else '%s.%s.%s') %
(diff, seasonal_diff, self.data.ynames))
elif endog_diff and self.k_diff > 0:
return (('%s %s' if latex else '%s.%s') %
(diff, self.data.ynames))
elif endog_diff and self.k_seasonal_diff > 0:
return (('%s %s' if latex else '%s.%s') %
(seasonal_diff, self.data.ynames))
else:
return self.data.ynames
params_complete = [
'trend', 'exog', 'ar', 'ma', 'seasonal_ar', 'seasonal_ma',
'exog_variance', 'measurement_variance', 'variance'
]
@property
def param_terms(self):
"""
List of parameters actually included in the model, in sorted order.
TODO Make this an OrderedDict with slice or indices as the values.
"""
model_orders = self.model_orders
# Get basic list from model orders
params = [
order for order in self.params_complete
if model_orders[order] > 0
]
# k_exog may be positive without associated parameters if it is in the
# state vector
if 'exog' in params and not self.mle_regression:
params.remove('exog')
return params
@property
def param_names(self):
"""
List of human readable parameter names (for parameters actually
included in the model).
"""
params_sort_order = self.param_terms
model_names = self.model_names
return [
name for param in params_sort_order for name in model_names[param]
]
@property
def model_orders(self):
"""
The orders of each of the polynomials in the model.
"""
return {
'trend': self.k_trend,
'exog': self.k_exog,
'ar': self.k_ar,
'ma': self.k_ma,
'seasonal_ar': self.k_seasonal_ar,
'seasonal_ma': self.k_seasonal_ma,
'reduced_ar': self.k_ar + self.k_seasonal_ar,
'reduced_ma': self.k_ma + self.k_seasonal_ma,
'exog_variance': self.k_exog if (
self.state_regression and self.time_varying_regression) else 0,
'measurement_variance': int(self.measurement_error),
'variance': int(self.state_error),
}
@property
def model_names(self):
"""
The plain text names of all possible model parameters.
"""
return self._get_model_names(latex=False)
@property
def model_latex_names(self):
"""
The latex names of all possible model parameters.
"""
return self._get_model_names(latex=True)
def _get_model_names(self, latex=False):
names = {
'trend': None,
'exog': None,
'ar': None,
'ma': None,
'seasonal_ar': None,
'seasonal_ma': None,
'reduced_ar': None,
'reduced_ma': None,
'exog_variance': None,
'measurement_variance': None,
'variance': None,
}
# Trend
if self.k_trend > 0:
trend_template = 't_%d' if latex else 'trend.%d'
names['trend'] = []
for i in self.polynomial_trend.nonzero()[0]:
if i == 0:
names['trend'].append('intercept')
elif i == 1:
names['trend'].append('drift')
else:
names['trend'].append(trend_template % i)
# Exogenous coefficients
if self.k_exog > 0:
names['exog'] = self.exog_names
# Autoregressive
if self.k_ar > 0:
ar_template = '$\\phi_%d$' if latex else 'ar.L%d'
names['ar'] = []
for i in self.polynomial_ar.nonzero()[0][1:]:
names['ar'].append(ar_template % i)
# Moving Average
if self.k_ma > 0:
ma_template = '$\\theta_%d$' if latex else 'ma.L%d'
names['ma'] = []
for i in self.polynomial_ma.nonzero()[0][1:]:
names['ma'].append(ma_template % i)
# Seasonal Autoregressive
if self.k_seasonal_ar > 0:
seasonal_ar_template = (
'$\\tilde \\phi_%d$' if latex else 'ar.S.L%d'
)
names['seasonal_ar'] = []
for i in self.polynomial_seasonal_ar.nonzero()[0][1:]:
names['seasonal_ar'].append(seasonal_ar_template % i)
# Seasonal Moving Average
if self.k_seasonal_ma > 0:
seasonal_ma_template = (
'$\\tilde \\theta_%d$' if latex else 'ma.S.L%d'
)
names['seasonal_ma'] = []
for i in self.polynomial_seasonal_ma.nonzero()[0][1:]:
names['seasonal_ma'].append(seasonal_ma_template % i)
# Reduced Form Autoregressive
if self.k_ar > 0 or self.k_seasonal_ar > 0:
reduced_polynomial_ar = reduced_polynomial_ar = -np.polymul(
self.polynomial_ar, self.polynomial_seasonal_ar
)
ar_template = '$\\Phi_%d$' if latex else 'ar.R.L%d'
names['reduced_ar'] = []
for i in reduced_polynomial_ar.nonzero()[0][1:]:
names['reduced_ar'].append(ar_template % i)
# Reduced Form Moving Average
if self.k_ma > 0 or self.k_seasonal_ma > 0:
reduced_polynomial_ma = np.polymul(
self.polynomial_ma, self.polynomial_seasonal_ma
)
ma_template = '$\\Theta_%d$' if latex else 'ma.R.L%d'
names['reduced_ma'] = []
for i in reduced_polynomial_ma.nonzero()[0][1:]:
names['reduced_ma'].append(ma_template % i)
# Exogenous variances
if self.state_regression and self.time_varying_regression:
exog_var_template = '$\\sigma_\\text{%s}^2$' if latex else 'var.%s'
names['exog_variance'] = [
exog_var_template % exog_name for exog_name in self.exog_names
]
# Measurement error variance
if self.measurement_error:
meas_var_tpl = (
'$\\sigma_\\eta^2$' if latex else 'var.measurement_error'
)
names['measurement_variance'] = [meas_var_tpl]
# State variance
if self.state_error:
var_tpl = '$\\sigma_\\zeta^2$' if latex else 'sigma2'
names['variance'] = [var_tpl]
return names
def transform_params(self, unconstrained):
"""
Transform unconstrained parameters used by the optimizer to constrained
parameters used in likelihood evaluation.
Used primarily to enforce stationarity of the autoregressive lag
polynomial, invertibility of the moving average lag polynomial, and
positive variance parameters.
Parameters
----------
unconstrained : array_like
Unconstrained parameters used by the optimizer.
Returns
-------
constrained : array_like
Constrained parameters used in likelihood evaluation.
Notes
-----
If the lag polynomial has non-consecutive powers (so that the
coefficient is zero on some element of the polynomial), then the
constraint function is not onto the entire space of invertible
polynomials, although it only excludes a very small portion very close
to the invertibility boundary.
"""
unconstrained = np.array(unconstrained, ndmin=1)
constrained = np.zeros(unconstrained.shape, unconstrained.dtype)
start = end = 0
# Retain the trend parameters
if self.k_trend > 0:
end += self.k_trend
constrained[start:end] = unconstrained[start:end]
start += self.k_trend
# Retain any MLE regression coefficients
if self.mle_regression:
end += self.k_exog
constrained[start:end] = unconstrained[start:end]
start += self.k_exog
# Transform the AR parameters (phi) to be stationary
if self.k_ar_params > 0:
end += self.k_ar_params
if self.enforce_stationarity:
constrained[start:end] = (
constrain_stationary_univariate(unconstrained[start:end])
)
else:
constrained[start:end] = unconstrained[start:end]
start += self.k_ar_params
# Transform the MA parameters (theta) to be invertible
if self.k_ma_params > 0:
end += self.k_ma_params
if self.enforce_invertibility:
constrained[start:end] = (
-constrain_stationary_univariate(unconstrained[start:end])
)
else:
constrained[start:end] = unconstrained[start:end]
start += self.k_ma_params
# Transform the seasonal AR parameters (\tilde phi) to be stationary
if self.k_seasonal_ar > 0:
end += self.k_seasonal_ar_params
if self.enforce_stationarity:
constrained[start:end] = (
constrain_stationary_univariate(unconstrained[start:end])
)
else:
constrained[start:end] = unconstrained[start:end]
start += self.k_seasonal_ar_params
# Transform the seasonal MA parameters (\tilde theta) to be invertible
if self.k_seasonal_ma_params > 0:
end += self.k_seasonal_ma_params
if self.enforce_invertibility:
constrained[start:end] = (
-constrain_stationary_univariate(unconstrained[start:end])
)
else:
constrained[start:end] = unconstrained[start:end]
start += self.k_seasonal_ma_params
# Transform the standard deviation parameters to be positive
if self.state_regression and self.time_varying_regression:
end += self.k_exog
constrained[start:end] = unconstrained[start:end]**2
start += self.k_exog
if self.measurement_error:
constrained[start] = unconstrained[start]**2
start += 1
end += 1
if self.state_error:
constrained[start] = unconstrained[start]**2
# start += 1
# end += 1
return constrained
def untransform_params(self, constrained):
"""
Transform constrained parameters used in likelihood evaluation
to unconstrained parameters used by the optimizer
Used primarily to reverse enforcement of stationarity of the
autoregressive lag polynomial and invertibility of the moving average
lag polynomial.
Parameters
----------
constrained : array_like
Constrained parameters used in likelihood evaluation.
Returns
-------
constrained : array_like
Unconstrained parameters used by the optimizer.
Notes
-----
If the lag polynomial has non-consecutive powers (so that the
coefficient is zero on some element of the polynomial), then the
constraint function is not onto the entire space of invertible
polynomials, although it only excludes a very small portion very close
to the invertibility boundary.
"""
constrained = np.array(constrained, ndmin=1)
unconstrained = np.zeros(constrained.shape, constrained.dtype)
start = end = 0
# Retain the trend parameters
if self.k_trend > 0:
end += self.k_trend
unconstrained[start:end] = constrained[start:end]
start += self.k_trend
# Retain any MLE regression coefficients
if self.mle_regression:
end += self.k_exog
unconstrained[start:end] = constrained[start:end]
start += self.k_exog
# Transform the AR parameters (phi) to be stationary
if self.k_ar_params > 0:
end += self.k_ar_params
if self.enforce_stationarity:
unconstrained[start:end] = (
unconstrain_stationary_univariate(constrained[start:end])
)
else:
unconstrained[start:end] = constrained[start:end]
start += self.k_ar_params
# Transform the MA parameters (theta) to be invertible
if self.k_ma_params > 0:
end += self.k_ma_params
if self.enforce_invertibility:
unconstrained[start:end] = (
unconstrain_stationary_univariate(-constrained[start:end])
)
else:
unconstrained[start:end] = constrained[start:end]
start += self.k_ma_params
# Transform the seasonal AR parameters (\tilde phi) to be stationary
if self.k_seasonal_ar > 0:
end += self.k_seasonal_ar_params
if self.enforce_stationarity:
unconstrained[start:end] = (
unconstrain_stationary_univariate(constrained[start:end])
)
else:
unconstrained[start:end] = constrained[start:end]
start += self.k_seasonal_ar_params
# Transform the seasonal MA parameters (\tilde theta) to be invertible
if self.k_seasonal_ma_params > 0:
end += self.k_seasonal_ma_params
if self.enforce_invertibility:
unconstrained[start:end] = (
unconstrain_stationary_univariate(-constrained[start:end])
)
else:
unconstrained[start:end] = constrained[start:end]
start += self.k_seasonal_ma_params
# Untransform the standard deviation
if self.state_regression and self.time_varying_regression:
end += self.k_exog
unconstrained[start:end] = constrained[start:end]**0.5
start += self.k_exog
if self.measurement_error:
unconstrained[start] = constrained[start]**0.5
start += 1
end += 1
if self.state_error:
unconstrained[start] = constrained[start]**0.5
# start += 1
# end += 1
return unconstrained
def update(self, params, transformed=True, complex_step=False):
"""
Update the parameters of the model
Updates the representation matrices to fill in the new parameter
values.
Parameters
----------
params : array_like
Array of new parameters.
transformed : boolean, optional
Whether or not `params` is already transformed. If set to False,
`transform_params` is called. Default is True..
Returns
-------
params : array_like
Array of parameters.
"""
params = super(SARIMAX, self).update(params, transformed=transformed,
complex_step=False)
params_trend = None
params_exog = None
params_ar = None
params_ma = None
params_seasonal_ar = None
params_seasonal_ma = None
params_exog_variance = None
params_measurement_variance = None
params_variance = None
# Extract the parameters
start = end = 0
end += self.k_trend
params_trend = params[start:end]
start += self.k_trend
if self.mle_regression:
end += self.k_exog
params_exog = params[start:end]
start += self.k_exog
end += self.k_ar_params
params_ar = params[start:end]
start += self.k_ar_params
end += self.k_ma_params
params_ma = params[start:end]
start += self.k_ma_params
end += self.k_seasonal_ar_params
params_seasonal_ar = params[start:end]
start += self.k_seasonal_ar_params
end += self.k_seasonal_ma_params
params_seasonal_ma = params[start:end]
start += self.k_seasonal_ma_params
if self.state_regression and self.time_varying_regression:
end += self.k_exog
params_exog_variance = params[start:end]
start += self.k_exog
if self.measurement_error:
params_measurement_variance = params[start]
start += 1
end += 1
if self.state_error:
params_variance = params[start]
# start += 1
# end += 1
# Update lag polynomials
if self.k_ar > 0:
if self.polynomial_ar.dtype == params.dtype:
self.polynomial_ar[self._polynomial_ar_idx] = -params_ar
else:
polynomial_ar = self.polynomial_ar.real.astype(params.dtype)
polynomial_ar[self._polynomial_ar_idx] = -params_ar
self.polynomial_ar = polynomial_ar
if self.k_ma > 0:
if self.polynomial_ma.dtype == params.dtype:
self.polynomial_ma[self._polynomial_ma_idx] = params_ma
else:
polynomial_ma = self.polynomial_ma.real.astype(params.dtype)
polynomial_ma[self._polynomial_ma_idx] = params_ma
self.polynomial_ma = polynomial_ma
if self.k_seasonal_ar > 0:
idx = self._polynomial_seasonal_ar_idx
if self.polynomial_seasonal_ar.dtype == params.dtype:
self.polynomial_seasonal_ar[idx] = -params_seasonal_ar
else:
polynomial_seasonal_ar = (
self.polynomial_seasonal_ar.real.astype(params.dtype)
)
polynomial_seasonal_ar[idx] = -params_seasonal_ar
self.polynomial_seasonal_ar = polynomial_seasonal_ar
if self.k_seasonal_ma > 0:
idx = self._polynomial_seasonal_ma_idx
if self.polynomial_seasonal_ma.dtype == params.dtype:
self.polynomial_seasonal_ma[idx] = params_seasonal_ma
else:
polynomial_seasonal_ma = (
self.polynomial_seasonal_ma.real.astype(params.dtype)
)
polynomial_seasonal_ma[idx] = params_seasonal_ma
self.polynomial_seasonal_ma = polynomial_seasonal_ma
# Get the reduced form lag polynomial terms by multiplying the regular
# and seasonal lag polynomials
# Note: that although the numpy np.polymul examples assume that they
# are ordered from highest degree to lowest, whereas our are from
# lowest to highest, it does not matter.
if self.k_seasonal_ar > 0:
reduced_polynomial_ar = -np.polymul(
self.polynomial_ar, self.polynomial_seasonal_ar
)
else:
reduced_polynomial_ar = -self.polynomial_ar
if self.k_seasonal_ma > 0:
reduced_polynomial_ma = np.polymul(
self.polynomial_ma, self.polynomial_seasonal_ma
)
else:
reduced_polynomial_ma = self.polynomial_ma
# Observation intercept
# Exogenous data with MLE estimation of parameters enters through a
# time-varying observation intercept (is equivalent to simply
# subtracting it out of the endogenous variable first)
if self.mle_regression:
self.ssm['obs_intercept'] = np.dot(self.exog, params_exog)[None, :]
# State intercept (Harvey) or additional observation intercept
# (Hamilton)
# SARIMA trend enters through the a time-varying state intercept,
# associated with the first row of the stationary component of the
# state vector (i.e. the first element of the state vector following
# any differencing elements)
if self.k_trend > 0:
data = np.dot(self._trend_data, params_trend).astype(params.dtype)
if not self.hamilton_representation:
self.ssm['state_intercept', self._k_states_diff, :] = data
else:
# The way the trend enters in the Hamilton representation means
# that the parameter is not an ``intercept'' but instead the
# mean of the process. The trend values in `data` are meant for
# an intercept, and so must be transformed to represent the
# mean instead
if self.hamilton_representation:
data /= np.sum(-reduced_polynomial_ar)
# If we already set the observation intercept for MLE
# regression, just add to it
if self.mle_regression:
self.ssm.obs_intercept += data[None, :]
# Otherwise set it directly
else:
self.ssm['obs_intercept'] = data[None, :]
# Observation covariance matrix
if self.measurement_error:
self.ssm['obs_cov', 0, 0] = params_measurement_variance
# Transition matrix
if self.k_ar > 0 or self.k_seasonal_ar > 0:
self.ssm[self.transition_ar_params_idx] = reduced_polynomial_ar[1:]
elif not self.ssm.transition.dtype == params.dtype:
# This is required if the transition matrix is not really in use
# (e.g. for an MA(q) process) so that it's dtype never changes as
# the parameters' dtype changes. This changes the dtype manually.
self.ssm['transition'] = self.ssm['transition'].real.astype(
params.dtype)
# Selection matrix (Harvey) or Design matrix (Hamilton)
if self.k_ma > 0 or self.k_seasonal_ma > 0:
if not self.hamilton_representation:
self.ssm[self.selection_ma_params_idx] = (
reduced_polynomial_ma[1:]
)
else:
self.ssm[self.design_ma_params_idx] = reduced_polynomial_ma[1:]
# State covariance matrix
if self.k_posdef > 0:
self.ssm['state_cov', 0, 0] = params_variance
if self.state_regression and self.time_varying_regression:
self.ssm[self._exog_variance_idx] = params_exog_variance
return params
class SARIMAXResults(MLEResults):
"""
Class to hold results from fitting an SARIMAX model.
Parameters
----------
model : SARIMAX instance
The fitted model instance
Attributes
----------
specification : dictionary
Dictionary including all attributes from the SARIMAX model instance.
polynomial_ar : array
Array containing autoregressive lag polynomial coefficients,
ordered from lowest degree to highest. Initialized with ones, unless
a coefficient is constrained to be zero (in which case it is zero).
polynomial_ma : array
Array containing moving average lag polynomial coefficients,
ordered from lowest degree to highest. Initialized with ones, unless
a coefficient is constrained to be zero (in which case it is zero).
polynomial_seasonal_ar : array
Array containing seasonal autoregressive lag polynomial coefficients,
ordered from lowest degree to highest. Initialized with ones, unless
a coefficient is constrained to be zero (in which case it is zero).
polynomial_seasonal_ma : array
Array containing seasonal moving average lag polynomial coefficients,
ordered from lowest degree to highest. Initialized with ones, unless
a coefficient is constrained to be zero (in which case it is zero).
polynomial_trend : array
Array containing trend polynomial coefficients, ordered from lowest
degree to highest. Initialized with ones, unless a coefficient is
constrained to be zero (in which case it is zero).
model_orders : list of int
The orders of each of the polynomials in the model.
param_terms : list of str
List of parameters actually included in the model, in sorted order.
See Also
--------
statsmodels.tsa.statespace.kalman_filter.FilterResults
statsmodels.tsa.statespace.mlemodel.MLEResults
"""
def __init__(self, model, params, filter_results, cov_type='opg',
**kwargs):
super(SARIMAXResults, self).__init__(model, params, filter_results,
cov_type, **kwargs)
self.df_resid = np.inf # attribute required for wald tests
# Save _init_kwds
self._init_kwds = self.model._get_init_kwds()
# Save model specification
self.specification = Bunch(**{
# Set additional model parameters
'seasonal_periods': self.model.seasonal_periods,
'measurement_error': self.model.measurement_error,
'time_varying_regression': self.model.time_varying_regression,
'simple_differencing': self.model.simple_differencing,
'enforce_stationarity': self.model.enforce_stationarity,
'enforce_invertibility': self.model.enforce_invertibility,
'hamilton_representation': self.model.hamilton_representation,
'order': self.model.order,
'seasonal_order': self.model.seasonal_order,
# Model order
'k_diff': self.model.k_diff,
'k_seasonal_diff': self.model.k_seasonal_diff,
'k_ar': self.model.k_ar,
'k_ma': self.model.k_ma,
'k_seasonal_ar': self.model.k_seasonal_ar,
'k_seasonal_ma': self.model.k_seasonal_ma,
# Param Numbers
'k_ar_params': self.model.k_ar_params,
'k_ma_params': self.model.k_ma_params,
# Trend / Regression
'trend': self.model.trend,
'k_trend': self.model.k_trend,
'k_exog': self.model.k_exog,
'mle_regression': self.model.mle_regression,
'state_regression': self.model.state_regression,
})
# Polynomials
self.polynomial_trend = self.model.polynomial_trend
self.polynomial_ar = self.model.polynomial_ar
self.polynomial_ma = self.model.polynomial_ma
self.polynomial_seasonal_ar = self.model.polynomial_seasonal_ar
self.polynomial_seasonal_ma = self.model.polynomial_seasonal_ma
self.polynomial_reduced_ar = np.polymul(
self.polynomial_ar, self.polynomial_seasonal_ar
)
self.polynomial_reduced_ma = np.polymul(
self.polynomial_ma, self.polynomial_seasonal_ma
)
# Distinguish parameters
self.model_orders = self.model.model_orders
self.param_terms = self.model.param_terms
start = end = 0
for name in self.param_terms:
if name == 'ar':
k = self.model.k_ar_params
elif name == 'ma':
k = self.model.k_ma_params
elif name == 'seasonal_ar':
k = self.model.k_seasonal_ar_params
elif name == 'seasonal_ma':
k = self.model.k_seasonal_ma_params
else:
k = self.model_orders[name]
end += k
setattr(self, '_params_%s' % name, self.params[start:end])
start += k
# Handle removing data
self._data_attr_model.extend(['orig_endog', 'orig_exog'])
@cache_readonly
def arroots(self):
"""
(array) Roots of the reduced form autoregressive lag polynomial
"""
return np.roots(self.polynomial_reduced_ar)**-1
@cache_readonly
def maroots(self):
"""
(array) Roots of the reduced form moving average lag polynomial
"""
return np.roots(self.polynomial_reduced_ma)**-1
@cache_readonly
def arfreq(self):
"""
(array) Frequency of the roots of the reduced form autoregressive
lag polynomial
"""
z = self.arroots
if not z.size:
return
return np.arctan2(z.imag, z.real) / (2 * np.pi)
@cache_readonly
def mafreq(self):
"""
(array) Frequency of the roots of the reduced form moving average
lag polynomial
"""
z = self.maroots
if not z.size:
return
return np.arctan2(z.imag, z.real) / (2 * np.pi)
@cache_readonly
def arparams(self):
"""
(array) Autoregressive parameters actually estimated in the model.
Does not include seasonal autoregressive parameters (see
`seasonalarparams`) or parameters whose values are constrained to be
zero.
"""
return self._params_ar
@cache_readonly
def seasonalarparams(self):
"""
(array) Seasonal autoregressive parameters actually estimated in the
model. Does not include nonseasonal autoregressive parameters (see
`arparams`) or parameters whose values are constrained to be zero.
"""
return self._params_seasonal_ar
@cache_readonly
def maparams(self):
"""
(array) Moving average parameters actually estimated in the model.
Does not include seasonal moving average parameters (see
`seasonalmaparams`) or parameters whose values are constrained to be
zero.
"""
return self._params_ma
@cache_readonly
def seasonalmaparams(self):
"""
(array) Seasonal moving average parameters actually estimated in the
model. Does not include nonseasonal moving average parameters (see
`maparams`) or parameters whose values are constrained to be zero.
"""
return self._params_seasonal_ma
def get_prediction(self, start=None, end=None, dynamic=False, index=None,
exog=None, **kwargs):
"""
In-sample prediction and out-of-sample forecasting
Parameters
----------
start : int, str, or datetime, optional
Zero-indexed observation number at which to start forecasting, ie.,
the first forecast is start. Can also be a date string to
parse or a datetime type. Default is the the zeroth observation.
end : int, str, or datetime, optional
Zero-indexed observation number at which to end forecasting, ie.,
the first forecast is start. Can also be a date string to
parse or a datetime type. However, if the dates index does not
have a fixed frequency, end must be an integer index if you
want out of sample prediction. Default is the last observation in
the sample.
exog : array_like, optional
If the model includes exogenous regressors, you must provide
exactly enough out-of-sample values for the exogenous variables if
end is beyond the last observation in the sample.
dynamic : boolean, int, str, or datetime, optional
Integer offset relative to `start` at which to begin dynamic
prediction. Can also be an absolute date string to parse or a
datetime type (these are not interpreted as offsets).
Prior to this observation, true endogenous values will be used for
prediction; starting with this observation and continuing through
the end of prediction, forecasted endogenous values will be used
instead.
full_results : boolean, optional
If True, returns a FilterResults instance; if False returns a
tuple with forecasts, the forecast errors, and the forecast error
covariance matrices. Default is False.
**kwargs
Additional arguments may required for forecasting beyond the end
of the sample. See `FilterResults.predict` for more details.
Returns
-------
forecast : array
Array of out of sample forecasts.
"""
if start is None:
start = self.model._index[0]
# Handle start, end, dynamic
_start, _end, _out_of_sample, prediction_index = (
self.model._get_prediction_index(start, end, index, silent=True))
# Handle exogenous parameters
if _out_of_sample and (self.model.k_exog + self.model.k_trend > 0):
# Create a new faux SARIMAX model for the extended dataset
nobs = self.model.data.orig_endog.shape[0] + _out_of_sample
endog = np.zeros((nobs, self.model.k_endog))
if self.model.k_exog > 0:
if exog is None:
raise ValueError('Out-of-sample forecasting in a model'
' with a regression component requires'
' additional exogenous values via the'
' `exog` argument.')
exog = np.array(exog)
required_exog_shape = (_out_of_sample, self.model.k_exog)
if not exog.shape == required_exog_shape:
raise ValueError('Provided exogenous values are not of the'
' appropriate shape. Required %s, got %s.'
% (str(required_exog_shape),
str(exog.shape)))
exog = np.c_[self.model.data.orig_exog.T, exog.T].T
model_kwargs = self._init_kwds.copy()
model_kwargs['exog'] = exog
model = SARIMAX(endog, **model_kwargs)
model.update(self.params)
# Set the kwargs with the update time-varying state space
# representation matrices
for name in self.filter_results.shapes.keys():
if name == 'obs':
continue
mat = getattr(model.ssm, name)
if mat.shape[-1] > 1:
if len(mat.shape) == 2:
kwargs[name] = mat[:, -_out_of_sample:]
else:
kwargs[name] = mat[:, :, -_out_of_sample:]
elif self.model.k_exog == 0 and exog is not None:
warn('Exogenous array provided to predict, but additional data not'
' required. `exog` argument ignored.', ValueWarning)
return super(SARIMAXResults, self).get_prediction(
start=start, end=end, dynamic=dynamic, index=index, exog=exog,
**kwargs)
def summary(self, alpha=.05, start=None):
# Create the model name
# See if we have an ARIMA component
order = ''
if self.model.k_ar + self.model.k_diff + self.model.k_ma > 0:
if self.model.k_ar == self.model.k_ar_params:
order_ar = self.model.k_ar
else:
order_ar = tuple(self.polynomial_ar.nonzero()[0][1:])
if self.model.k_ma == self.model.k_ma_params:
order_ma = self.model.k_ma
else:
order_ma = tuple(self.polynomial_ma.nonzero()[0][1:])
# If there is simple differencing, then that is reflected in the
# dependent variable name
k_diff = 0 if self.model.simple_differencing else self.model.k_diff
order = '(%s, %d, %s)' % (order_ar, k_diff, order_ma)
# See if we have an SARIMA component
seasonal_order = ''
has_seasonal = (
self.model.k_seasonal_ar +
self.model.k_seasonal_diff +
self.model.k_seasonal_ma
) > 0
if has_seasonal:
if self.model.k_ar == self.model.k_ar_params:
order_seasonal_ar = (
int(self.model.k_seasonal_ar / self.model.seasonal_periods)
)
else:
order_seasonal_ar = (
tuple(self.polynomial_seasonal_ar.nonzero()[0][1:])
)
if self.model.k_ma == self.model.k_ma_params:
order_seasonal_ma = (
int(self.model.k_seasonal_ma / self.model.seasonal_periods)
)
else:
order_seasonal_ma = (
tuple(self.polynomial_seasonal_ma.nonzero()[0][1:])
)
# If there is simple differencing, then that is reflected in the
# dependent variable name
k_seasonal_diff = self.model.k_seasonal_diff
if self.model.simple_differencing:
k_seasonal_diff = 0
seasonal_order = ('(%s, %d, %s, %d)' %
(str(order_seasonal_ar), k_seasonal_diff,
str(order_seasonal_ma),
self.model.seasonal_periods))
if not order == '':
order += 'x'
model_name = (
'%s%s%s' % (self.model.__class__.__name__, order, seasonal_order)
)
return super(SARIMAXResults, self).summary(
alpha=alpha, start=start, model_name=model_name
)
summary.__doc__ = MLEResults.summary.__doc__
class SARIMAXResultsWrapper(MLEResultsWrapper):
_attrs = {}
_wrap_attrs = wrap.union_dicts(MLEResultsWrapper._wrap_attrs,
_attrs)
_methods = {}
_wrap_methods = wrap.union_dicts(MLEResultsWrapper._wrap_methods,
_methods)
wrap.populate_wrapper(SARIMAXResultsWrapper, SARIMAXResults)
| 40.761491 | 80 | 0.600368 |
10f712e601281931519f0041296953f302e4ca8b | 1,068 | py | Python | kubernetes/test/test_apps_v1beta1_deployment_strategy.py | woqer/python | 3a6fe8231cefe1fa39a0a69d4b2f33044ab32745 | [
"Apache-2.0"
] | 1 | 2019-07-12T05:38:06.000Z | 2019-07-12T05:38:06.000Z | kubernetes/test/test_apps_v1beta1_deployment_strategy.py | woqer/python | 3a6fe8231cefe1fa39a0a69d4b2f33044ab32745 | [
"Apache-2.0"
] | null | null | null | kubernetes/test/test_apps_v1beta1_deployment_strategy.py | woqer/python | 3a6fe8231cefe1fa39a0a69d4b2f33044ab32745 | [
"Apache-2.0"
] | 1 | 2021-05-18T12:25:56.000Z | 2021-05-18T12:25:56.000Z | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.11.3
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.models.apps_v1beta1_deployment_strategy import AppsV1beta1DeploymentStrategy
class TestAppsV1beta1DeploymentStrategy(unittest.TestCase):
""" AppsV1beta1DeploymentStrategy unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testAppsV1beta1DeploymentStrategy(self):
"""
Test AppsV1beta1DeploymentStrategy
"""
# FIXME: construct object with mandatory attributes with example values
#model = kubernetes.client.models.apps_v1beta1_deployment_strategy.AppsV1beta1DeploymentStrategy()
pass
if __name__ == '__main__':
unittest.main()
| 23.733333 | 106 | 0.737828 |
7245021b90a853ba763f54e0c76f0b63a6f899a2 | 359 | py | Python | src/sessions/ses-friends-s2.py | eddyfortier/task_stimuli | b3e0c477775d42b0efa4389531042a80a848fe86 | [
"MIT"
] | 4 | 2019-09-10T13:21:23.000Z | 2021-11-17T11:37:54.000Z | src/sessions/ses-friends-s2.py | eddyfortier/task_stimuli | b3e0c477775d42b0efa4389531042a80a848fe86 | [
"MIT"
] | 14 | 2019-11-14T16:41:42.000Z | 2022-03-25T14:55:40.000Z | src/sessions/ses-friends-s2.py | eddyfortier/task_stimuli | b3e0c477775d42b0efa4389531042a80a848fe86 | [
"MIT"
] | 9 | 2019-08-19T19:08:11.000Z | 2021-09-16T15:45:43.000Z | from ..tasks import video
TASKS = []
for episode in range(1, 25):
for segment in "ab":
TASKS.append(
video.SingleVideo(
"data/videos/friends/s2/friends_s2e%02d%s.mkv" % (episode, segment),
aspect_ratio=4 / 3.0,
name="task-friends-s2e%d%s" % (episode, segment),
)
)
| 25.642857 | 84 | 0.518106 |
ad7bc6799ee46ecd099879d4daafe4e3717d48b6 | 596 | py | Python | src/PyMIPS/__main__.py | shenganzhang/Py-MI-PS | 2d22327c75bac1b58a4804a61e7a703ecc5ba978 | [
"MIT"
] | 3 | 2019-05-14T21:24:59.000Z | 2021-08-04T01:43:22.000Z | src/PyMIPS/__main__.py | shenganzhang/Py-MI-PS | 2d22327c75bac1b58a4804a61e7a703ecc5ba978 | [
"MIT"
] | null | null | null | src/PyMIPS/__main__.py | shenganzhang/Py-MI-PS | 2d22327c75bac1b58a4804a61e7a703ecc5ba978 | [
"MIT"
] | 2 | 2021-08-04T01:43:25.000Z | 2021-11-23T06:54:17.000Z | #!/usr/local/bin/python
import sys
from PyMIPS.lexer import lex
from PyMIPS.AST.ast import parse
from PyMIPS.Datastructure.execution_stack import run_program
def main(args=None):
if len(sys.argv) == 1:
print("Usage: pymips <path to file>")
return
elif len(sys.argv) > 2:
print("Too many arguments")
return
else:
filename = sys.argv[1]
with open(filename) as file:
characters = file.read()
tokens = lex(characters)
res = parse(tokens)
run_program(res.value)
if __name__ == "__main__":
main()
| 22.074074 | 60 | 0.61745 |
e3f8cebfd3c8193ca0ae882ef9bc4307e2b9ce1f | 7,707 | py | Python | simulator/simuDropout.py | neevor/brie | 42873ade2ed4e11288c7626182d6ef732aa1bd09 | [
"Apache-2.0"
] | 38 | 2017-01-06T00:18:46.000Z | 2022-01-25T19:44:10.000Z | simulator/simuDropout.py | neevor/brie | 42873ade2ed4e11288c7626182d6ef732aa1bd09 | [
"Apache-2.0"
] | 28 | 2017-01-11T09:12:57.000Z | 2022-02-14T14:53:48.000Z | simulator/simuDropout.py | neevor/brie | 42873ade2ed4e11288c7626182d6ef732aa1bd09 | [
"Apache-2.0"
] | 12 | 2018-02-13T20:23:00.000Z | 2022-01-05T18:39:19.000Z | # This file to simulate single cell RNA-seq reads based on real bulk RNA-seq
# expression profile and input dropout rate and number of reads.
import os
import sys
import time
import subprocess
import numpy as np
from optparse import OptionParser, OptionGroup
# import pyximport; pyximport.install()
from utils import id_mapping
START_TIME = time.time()
def logistic(x):
"""
Logistic function, mapping (-inf, inf) to (0,1)
Parameters
----------
x: float, int, array, list
input variable
Returns
-------
val: float, int, array
logistic(x)
"""
return np.exp(x)/(1+np.exp(x))
def logit(x, minval=0.001):
"""
Logit function, mapping (0,1) to (-inf, inf)
Parameters
----------
x: float, int, array, list
input variable
minval: float (optional, default=0.001)
minimum value of input x, and maximum of 1-x
Returns
-------
val: float, int, array
logit(x)
"""
if isinstance(x, (list, tuple, np.ndarray)):
x[1-x<minval] = 1-minval
x[x<minval] = minval
else:
x = max(minval, x)
x = min(1-minval, x)
val = np.log(x/(1-x))
return val
def adjust_drop_prob(drop_prob, rate_new=0.3):
"""
Adjust the drop-out rate based on the input
drop-out probability profile.
Parameters:
-----------
drop_prob: array like
the drop-out probability distribution
rate_new: float
the new drop-out rate for output
Returns
-------
drop_prob_new: array like
the updated drop-out probability with the
average drop-out rate as rate_new
"""
gaps_all = np.arange(-10, 10, 0.05)
rate_all = np.zeros(len(gaps_all))
drop_logit = logit(drop_prob)
for i in range(len(gaps_all)):
drop_prob_tmp = logistic(drop_logit + gaps_all[i])
rate_all[i] = np.mean(drop_prob_tmp)
idx = np.argmin(np.abs(rate_all-rate_new))
drop_prob_new = logistic(drop_logit + gaps_all[idx])
return drop_prob_new
def main():
#part 0. parse command line options
parser = OptionParser()
parser.add_option("--anno_file", "-a", dest="anno_file", default=None,
help="Annotation file for genes and transcripts.")
parser.add_option("--ref_file", "-f", dest="ref_file", default=None,
help="Reference genome in fasta formate.")
parser.add_option("--out_dir", "-o", dest="out_dir", default=None,
help="Directory of the output files.")
parser.add_option("--dice_file", "-d", dest="dice_file", default=None,
help="diceseq output file from bulk RNA-seq.")
group = OptionGroup(parser, "Optional arguments")
group.add_option("--tranLevel", action="store_true", dest="tran_level",
default=False, help="Dropout at transcript level; otherwise gene level")
group.add_option("--dropoutRate", "-r", dest="dropout_rate", type="float",
default=None, help="Dropout rate on average.")
group.add_option("--dropoutProb", dest="dropout_prob", default=None,
help="Dropout probability of transcript. This will ignore the "
"dropoutRate argument. File formate (tsv with header): gene,tran,prob.")
group.add_option("--num-reads", "-N", dest="num_reads", type="int",
default=1000000, help="Number of reads in total. [default: %default]")
parser.add_option_group(group)
group = OptionGroup(parser, "Spanki arguments")
group.add_option("-m", dest="mismatch_mode", default="random",
help="Error mode: random, errorfree, NIST, dm3, flyheads, or custom."
" [default: %default]")
group.add_option("--bp", dest="read_len", type="int", default=76,
help="Length of each read. [default: %default]")
group.add_option("--frag", dest="frag_len", type="int", default=200,
help="Length of fragments. [default: %default]")
group.add_option("--ends", dest="ends_num", type="int", default=2,
help="Number of reads ends: 1 or 2. [default: %default]")
parser.add_option_group(group)
### under development
# group.add_option("--corr-FPKM", "-c", dest="corr_FPKM", type="float",
# default=0.7, help="Pearson's correlation coefficient between log2 FPKM "
# "and dropout probablity. [default: %default]")
(options, args) = parser.parse_args()
if len(sys.argv[1:]) == 0:
print("Welcome to dice-simulator for single-cell RNA-seq!\n")
print("use -h or --help for help on argument.")
sys.exit(1)
if options.anno_file == None:
print("[dice-simu] Error: need --anno_file for annotation.")
sys.exit(1)
else:
anno_file = options.anno_file
if options.ref_file == None:
print("[dice-simu] Error: need --ref_file for reference genome seq.")
sys.exit(1)
else:
ref_file = options.ref_file
if options.dice_file == None:
print("[dice-simu] Error: need --dice_file for DICEseq output file.")
sys.exit(1)
else:
dice_data = np.genfromtxt(options.dice_file, skip_header=1, dtype="str")
tran_ids = dice_data[:,0]
gene_ids = dice_data[:,1]
tran_len = dice_data[:,3].astype(float)
FPKM_all = dice_data[:,4].astype(float)
if options.tran_level:
flag_ids = tran_ids
else:
flag_ids = gene_ids
num_reads = options.num_reads
if options.dropout_prob is None:
dropout_prob = np.ones(len(dice_data)) * 0.001
else:
temp = np.genfromtxt(options.dropout_prob, skip_header=1, dtype="str")
idx = id_mapping(tran_ids, temp[:, 0])
dropout_prob = temp[idx,2].astype(float)
dropout_prob[dropout_prob<0.001] = 0.001
dropout_prob[dropout_prob>0.999] = 0.999
if options.dropout_rate is not None:
idx_drop = FPKM_all > 0
dropout_prob[idx_drop] = adjust_drop_prob(dropout_prob[idx_drop],
options.dropout_rate)
if options.out_dir is None:
out_dir = os.path.join(os.path.dirname(options.dice_file), "simuRNA")
else:
out_dir = options.out_dir
if not os.path.exists(out_dir):
os.makedirs(out_dir)
rpk_file = os.path.join(out_dir, "tran_rpk.txt")
np.random.seed(0)
flag = flag_ids[0]
keep = np.random.binomial(1, 1-dropout_prob[0])
FPKM = np.zeros(len(FPKM_all))
for i in range(len(FPKM_all)):
if flag != flag_ids[i]:
flag = flag_ids[i]
keep = np.random.binomial(1, 1-dropout_prob[i])
FPKM[i] = keep * FPKM_all[i]
rpk = FPKM * num_reads * 1000.0 / (np.sum(FPKM*tran_len))
print("Drop-out rate: %.3f" %np.mean(rpk[idx_drop]==0))
fid = open(rpk_file, "w")
fid.writelines("txid\trpk\n")
for i in range(len(tran_ids)):
aLine = "%s\t%.4f\n" %(tran_ids[i], rpk[i])
fid.writelines(aLine)
fid.close()
bashCommand = "spankisim_transcripts -o %s -g %s -f %s -t %s " %(out_dir,
anno_file, ref_file, rpk_file)
bashCommand += "-bp %d -frag %d -ends %d -m %s" %(options.read_len,
options.frag_len, options.ends_num, options.mismatch_mode)
print(bashCommand)
pro = subprocess.Popen(bashCommand.split(), stdout=subprocess.PIPE)
output = pro.communicate()[0]
bashCommand = "gzip %s/sim_1.fastq %s/sim_2.fastq" %(out_dir, out_dir)
pro = subprocess.Popen(bashCommand.split(), stdout=subprocess.PIPE)
output = pro.communicate()[0]
bashCommand = "rm -rf %s/tmp %s/log %s/sim.*" %(out_dir, out_dir, out_dir)
pro = subprocess.Popen(bashCommand.split(), stdout=subprocess.PIPE)
output = pro.communicate()[0]
if __name__ == "__main__":
main()
| 35.353211 | 82 | 0.626314 |
2db932d4583a97e4ad371ebfd07d0768b4f3a8d1 | 8,175 | py | Python | sal/decorators.py | lfaraone/sal | d0dff90cebcbc87f18c2c6957264f21566d52000 | [
"Apache-2.0"
] | 1 | 2019-11-01T20:54:47.000Z | 2019-11-01T20:54:47.000Z | sal/decorators.py | grahamgilbert/sal | d247ec1ea8855e65e5855b0dd63eae93b40f86ca | [
"Apache-2.0"
] | null | null | null | sal/decorators.py | grahamgilbert/sal | d247ec1ea8855e65e5855b0dd63eae93b40f86ca | [
"Apache-2.0"
] | null | null | null | """Decorators for class based views."""
import base64
import logging
from functools import wraps
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied
from django.http import HttpResponse
from django.http.response import Http404, HttpResponseServerError
from django.shortcuts import get_object_or_404, redirect
from django.urls import reverse
from django.utils.decorators import method_decorator
from django.views.generic import View
from server.models import BusinessUnit, Machine, MachineGroup, ProfileLevel
def class_login_required(cls):
"""Class decorator for View subclasses to restrict to logged in."""
decorator = method_decorator(login_required)
cls.dispatch = decorator(cls.dispatch)
return cls
def class_ga_required(cls):
"""Class decorator for View subclasses to restrict to GA."""
decorator = method_decorator(ga_required)
cls.dispatch = decorator(cls.dispatch)
return cls
def class_staff_required(cls):
"""Class decorator for View subclasses to restrict to staff."""
decorator = method_decorator(staff_required)
cls.dispatch = decorator(cls.dispatch)
return cls
def class_access_required(cls):
"""Decorator for View subclasses to restrict by business unit.
Class must declare a classmethod `get_business_unit` that returns
the BusinessUnit object that applies to the query in question.
Args:
cls: Class to decorate.
Returns:
Decorated class.
Raises:
403 Pemission Denied if current user does not have access.
404 if requested group doesn't exist.
"""
def access_required(function):
def decorator(*args, **kwargs):
# The request object is the first arg to a view
request = args[0]
user = request.user
business_unit = cls.get_business_unit(**kwargs)
if has_access(user, business_unit):
return function(*args, **kwargs)
else:
raise PermissionDenied()
return decorator
access_decorator = method_decorator(access_required)
cls.dispatch = access_decorator(cls.dispatch)
return cls
def access_required(model):
"""Decorator for view functions to restrict by business unit.
This decorator requires the view to have a parameter whose name
ends with '_id'. If there is more than on parameter that meets that
criteria, who knows what will happen!
Args:
model (BusinessUnit, MachineGroup, Machine): The model class
that will be retrieved by URL parameter.
Returns:
Decorated view function.
Raises:
403 Pemission Denied if current user does not have access.
404 if requested group doesn't exist.
"""
def decorator(function):
@wraps(function)
def wrapper(*args, **kwargs):
# The request object is the first arg to a view
request = args[0]
user = request.user
instance, business_unit = get_business_unit_by(model, **kwargs)
if has_access(user, business_unit):
# Stash the business unit and instance to minimize
# later DB queries.
kwargs['business_unit'] = business_unit
kwargs['instance'] = instance
return function(*args, **kwargs)
else:
# Hide the 404 response from users without perms.
raise PermissionDenied()
return wrapper
return decorator
def get_business_unit_by(model, **kwargs):
try:
pk = [v for k, v in kwargs.items() if k.endswith('_id')].pop()
except IndexError:
raise ValueError('View lacks an ID parameter!')
try:
instance = get_object_or_404(model, pk=pk)
except ValueError:
# Sal allows machine serials instead of machine ID in URLs.
# Handle that special case.
if model is Machine:
instance = get_object_or_404(model, serial=pk)
if isinstance(instance, MachineGroup):
return (instance, instance.business_unit)
elif isinstance(instance, Machine):
return (instance, instance.machine_group.business_unit)
else:
return (instance, instance)
def is_global_admin(user):
return user.userprofile.level == ProfileLevel.global_admin
def key_auth_required(function):
@wraps(function)
def wrap(request, *args, **kwargs):
# Check for valid basic auth header
if hasattr(settings, 'BASIC_AUTH'):
use_auth = settings.BASIC_AUTH
else:
use_auth = True
if use_auth is False:
return view(request, *args, **kwargs) # noqa: F821
if 'HTTP_AUTHORIZATION' in request.META:
auth = request.META['HTTP_AUTHORIZATION'].split()
if len(auth) == 2:
if auth[0].lower() == "basic":
uname, key = base64.b64decode(auth[1]).split(':')
try:
machine_group = MachineGroup.objects.get(key=key)
except MachineGroup.DoesNotExist:
machine_group = None
if machine_group is not None and uname == 'sal':
return function(request, *args, **kwargs)
# Either they did not provide an authorization header or
# something in the authorization attempt failed. Send a 401
# back to them to ask them to authenticate.
response = HttpResponse()
response.status_code = 401
response['WWW-Authenticate'] = 'Basic realm=Sal'
return response
return wrap
def has_access(user, business_unit):
if is_global_admin(user):
return True
if business_unit:
return user.businessunit_set.filter(pk=business_unit.pk).exists()
else:
# Special case: If a user is in ALL business units, they don't
# need GA.
return user.businessunit_set.count() == BusinessUnit.objects.count()
def ga_required(function):
"""View decorator to redirect non GA users.
Wrapped function must have the request object as the first argument.
"""
# TODO: This can be removed once a class_required_level decoratir is created
@wraps(function)
def wrapper(*args, **kwargs):
if args[0].user.userprofile.level != ProfileLevel.global_admin:
return redirect(reverse('home'))
else:
return function(*args, **kwargs)
return wrapper
def required_level(*decorator_args):
"""View decorator to redirect users without acceptable userprofile..
Wrapped function must have the request object as the first argument.
Args:
*args (server.model.UserProfile.LEVEL_CHOICES) Any number of
user profile level choices that should be permitted access.
"""
def decorator(function):
@wraps(function)
def wrapper(*args, **kwargs):
if args[0].user.userprofile.level not in decorator_args:
return redirect(reverse('home'))
else:
return function(*args, **kwargs)
return wrapper
return decorator
def staff_required(function):
"""View decorator to redirect non staff users.
Wrapped function must have the request object as the first argument.
"""
@wraps(function)
def wrapper(*args, **kwargs):
if not args[0].user.is_staff:
return redirect(reverse('home'))
else:
return function(*args, **kwargs)
return wrapper
def handle_access(request, group_type, group_id):
models = {
'machine_group': MachineGroup,
'business_unit': BusinessUnit,
'machine': Machine}
if group_type == 'all':
business_unit = None
else:
_, business_unit = get_business_unit_by(models[group_type], group_id=group_id)
if not has_access(request.user, business_unit):
logging.warning("%s attempted to access %s for which they have no permissions.",
request.user, group_type)
raise Http404
| 30.965909 | 88 | 0.648196 |
5d5510adef4947a959954e8e8752aac9623ddb6f | 126 | py | Python | eds/openmtc-gevent/common/openmtc/src/openmtc/mapper/exc.py | piyush82/elastest-device-emulator-service | b4d6b393d6042c54a7b3dfb5f58cad5efd00f0e7 | [
"Apache-2.0"
] | null | null | null | eds/openmtc-gevent/common/openmtc/src/openmtc/mapper/exc.py | piyush82/elastest-device-emulator-service | b4d6b393d6042c54a7b3dfb5f58cad5efd00f0e7 | [
"Apache-2.0"
] | null | null | null | eds/openmtc-gevent/common/openmtc/src/openmtc/mapper/exc.py | piyush82/elastest-device-emulator-service | b4d6b393d6042c54a7b3dfb5f58cad5efd00f0e7 | [
"Apache-2.0"
] | null | null | null | """
Created on 02.06.2013
@author: kca
"""
from openmtc.exc import OpenMTCError
class MapperError(OpenMTCError):
pass
| 10.5 | 36 | 0.714286 |
a6ab814ec7a556b8306acea6227cc1635f7b6d89 | 1,641 | py | Python | Leetcode-cn/1002.查找共用字符.py | joey66666/Codeyard | 08fc599baf1d99e39f878386124af854006a3602 | [
"MIT"
] | null | null | null | Leetcode-cn/1002.查找共用字符.py | joey66666/Codeyard | 08fc599baf1d99e39f878386124af854006a3602 | [
"MIT"
] | 3 | 2020-08-11T10:18:23.000Z | 2021-05-18T15:25:42.000Z | Leetcode-cn/1002.查找共用字符.py | joey66666/Codeyard | 08fc599baf1d99e39f878386124af854006a3602 | [
"MIT"
] | null | null | null | #
# @lc app=leetcode.cn id=1002 lang=python3
#
# [1002] 查找共用字符
#
# https://leetcode-cn.com/problems/find-common-characters/description/
#
# algorithms
# Easy (73.68%)
# Likes: 240
# Dislikes: 0
# Total Accepted: 56.8K
# Total Submissions: 77.2K
# Testcase Example: '["bella","label","roller"]'
#
# 给你一个字符串数组 words ,请你找出所有在 words 的每个字符串中都出现的共用字符( 包括重复字符),并以数组形式返回。你可以按 任意顺序
# 返回答案。
#
#
# 示例 1:
#
#
# 输入:words = ["bella","label","roller"]
# 输出:["e","l","l"]
#
#
# 示例 2:
#
#
# 输入:words = ["cool","lock","cook"]
# 输出:["c","o"]
#
#
#
#
# 提示:
#
#
# 1 <= words.length <= 100
# 1 <= words[i].length <= 100
# words[i] 由小写英文字母组成
#
#
#
# @lc code=start
"""
1. Solution1, Hash统计次数, Time: O(n), Space: O(1), Runtime: 53%
- https://leetcode-cn.com/problems/find-common-characters/solution/1002-cha-zhao-chang-yong-zi-fu-ha-xi-fa-jing-dian-/
- 
"""
class Solution:
def commonChars(self, words: List[str]) -> List[str]:
if not words:
return []
alphabet1 = [0] * 26
n = len(words)
res = []
for w in words[0]:
alphabet1[ord(w) - ord('a')] += 1
for i in range(1, n):
alphabet2 = [0] * 26
for w in words[i]:
alphabet2[ord(w) - ord('a')] += 1
for j in range(26):
alphabet1[j] = min(alphabet1[j], alphabet2[j])
for i in range(26):
while alphabet1[i] > 0:
res.append(chr(ord('a') + i))
alphabet1[i] -= 1
return res
# @lc code=end
| 21.88 | 122 | 0.540524 |
a60f631c5d15f6f6d5d22a5941df4cd24d54bfc3 | 2,263 | py | Python | colour/models/rgb/datasets/xtreme_rgb.py | aurelienpierre/colour | 3ac45c12fbc0493e49ba4d4b2cb253df9fe14c47 | [
"BSD-3-Clause"
] | null | null | null | colour/models/rgb/datasets/xtreme_rgb.py | aurelienpierre/colour | 3ac45c12fbc0493e49ba4d4b2cb253df9fe14c47 | [
"BSD-3-Clause"
] | null | null | null | colour/models/rgb/datasets/xtreme_rgb.py | aurelienpierre/colour | 3ac45c12fbc0493e49ba4d4b2cb253df9fe14c47 | [
"BSD-3-Clause"
] | null | null | null | """
Xtreme RGB Colourspace
======================
Defines the *Xtreme RGB* colourspace:
- :attr:`colour.models.RGB_COLOURSPACE_XTREME_RGB`.
References
----------
- :cite:`HutchColore` : HutchColor. (n.d.). XtremeRGB (4 K).
http://www.hutchcolor.com/profiles/XtremeRGB.zip
"""
from __future__ import annotations
import numpy as np
from functools import partial
from colour.colorimetry import CCS_ILLUMINANTS
from colour.hints import NDArray
from colour.models.rgb import (
RGB_Colourspace,
gamma_function,
normalised_primary_matrix,
)
__author__ = "Colour Developers"
__copyright__ = "Copyright 2013 Colour Developers"
__license__ = "New BSD License - https://opensource.org/licenses/BSD-3-Clause"
__maintainer__ = "Colour Developers"
__email__ = "colour-developers@colour-science.org"
__status__ = "Production"
__all__ = [
"PRIMARIES_XTREME_RGB",
"WHITEPOINT_NAME_XTREME_RGB",
"CCS_WHITEPOINT_XTREME_RGB",
"MATRIX_XTREME_RGB_TO_XYZ",
"MATRIX_XYZ_TO_XTREME_RGB",
"RGB_COLOURSPACE_XTREME_RGB",
]
PRIMARIES_XTREME_RGB: NDArray = np.array(
[
[1.0, 0.0],
[0.0, 1.0],
[0.0, 0.0],
]
)
"""*Xtreme RGB* colourspace primaries."""
WHITEPOINT_NAME_XTREME_RGB: str = "D50"
"""*Xtreme RGB* colourspace whitepoint name."""
CCS_WHITEPOINT_XTREME_RGB: NDArray = CCS_ILLUMINANTS[
"CIE 1931 2 Degree Standard Observer"
][WHITEPOINT_NAME_XTREME_RGB]
"""*Xtreme RGB* colourspace whitepoint chromaticity coordinates."""
MATRIX_XTREME_RGB_TO_XYZ: NDArray = normalised_primary_matrix(
PRIMARIES_XTREME_RGB, CCS_WHITEPOINT_XTREME_RGB
)
"""*Xtreme RGB* colourspace to *CIE XYZ* tristimulus values matrix."""
MATRIX_XYZ_TO_XTREME_RGB: NDArray = np.linalg.inv(MATRIX_XTREME_RGB_TO_XYZ)
"""*CIE XYZ* tristimulus values to *Xtreme RGB* colourspace matrix."""
RGB_COLOURSPACE_XTREME_RGB: RGB_Colourspace = RGB_Colourspace(
"Xtreme RGB",
PRIMARIES_XTREME_RGB,
CCS_WHITEPOINT_XTREME_RGB,
WHITEPOINT_NAME_XTREME_RGB,
MATRIX_XTREME_RGB_TO_XYZ,
MATRIX_XYZ_TO_XTREME_RGB,
partial(gamma_function, exponent=1 / 2.2),
partial(gamma_function, exponent=2.2),
)
RGB_COLOURSPACE_XTREME_RGB.__doc__ = """
*Xtreme RGB* colourspace.
References
----------
:cite:`HutchColore`
"""
| 26.313953 | 78 | 0.734865 |
34ff4422b94dd3f26749bef600870681ff75f8c5 | 5,384 | py | Python | pychron/entry/editors/irradiation_editor.py | WiscAr/pychron | 8d335d53ba7a5fc70760d9a7cb60540ad169ae84 | [
"Apache-2.0"
] | null | null | null | pychron/entry/editors/irradiation_editor.py | WiscAr/pychron | 8d335d53ba7a5fc70760d9a7cb60540ad169ae84 | [
"Apache-2.0"
] | 80 | 2018-07-17T20:10:20.000Z | 2021-08-17T15:38:24.000Z | pychron/entry/editors/irradiation_editor.py | UManPychron/pychron | b84c9fd70072f9cbda30abe2c471e64fe3dd75d8 | [
"Apache-2.0"
] | null | null | null | # ===============================================================================
# Copyright 2014 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
import json
import os
from traits.api import Instance, Dict, List, Str
from traitsui.api import Item, UItem, VGroup, EnumEditor
# ============= standard library imports ========================
# ============= local library imports ==========================
from pychron.core.helpers.traitsui_shortcuts import okcancel_view
from pychron.core.pychron_traits import BorderVGroup, BorderHGroup
from pychron.core.ui.strings import SpacelessStr
from pychron.entry.editors.base_editor import ModelView
from pychron.entry.editors.chronology import IrradiationChronology
from pychron.entry.editors.production import IrradiationProduction
from pychron.loggable import Loggable
from pychron.paths import paths
class AddView(ModelView):
def traits_view(self):
v = okcancel_view(VGroup(VGroup(Item('name'),
BorderVGroup(UItem('chronology', style='custom'),
label='Chronology')),
BorderHGroup(UItem('selected_reactor_name', editor=EnumEditor(name='reactor_names')),
label='Reactor')),
title='Add Irradiation',
width=500)
return v
class EditView(ModelView):
def traits_view(self):
v = okcancel_view(VGroup(Item('name', style='readonly'),
BorderVGroup(UItem('chronology', style='custom'),
label='Chronology')),
title='Edit Irradiation',
width=500)
return v
class IrradiationEditor(Loggable):
"""
class used to create/edit an irradiation
"""
chronology = Instance(IrradiationChronology, ())
dvc = Instance('pychron.dvc.dvc.DVC')
reactors = Dict
reactor_names = List
selected_reactor_name = Str
name = SpacelessStr
def add(self):
self._load_reactors()
v = AddView(model=self)
info = v.edit_traits()
while 1:
if info.result:
name = self.name
if not name:
if self.confirmation_dialog('No name enter. Would you like to enter one?'):
info = v.edit_traits()
continue
else:
break
if not self.dvc.get_irradiation(name):
if not self.selected_reactor_name:
self.information_dialog('Please select a reactor')
info = v.edit_traits()
continue
self._add_irradiation()
return name
else:
if self.confirmation_dialog('Irradiation "{}" already exists. '
'Would you like to try again ?'.format(name)):
info = v.edit_traits()
continue
else:
break
else:
break
def edit(self):
self._load_reactors()
chronology = self.dvc.get_chronology(self.name)
self.chronology.set_dosages(chronology.get_doses())
v = EditView(model=self)
info = v.edit_traits()
if info.result:
self._add_irradiation()
if self.selected_reactor_name:
self.dvc.add_production_to_irradiation(self.name, self.reactor.name, self.reactor.get_params())
self.dvc.update_chronology(self.name, self.chronology.get_doses())
return self.name
def _add_irradiation(self):
self.debug('add irradiation={}'.format(self.name))
self.dvc.add_irradiation(self.name, self.chronology.get_doses(), verbose=False)
if self.selected_reactor_name:
self.dvc.add_production_to_irradiation(self.name, self.reactor.name, self.reactor.get_params())
def _load_reactors(self):
p = os.path.join(paths.meta_root, 'reactors.json')
reactors = {}
if os.path.isfile(p):
with open(p, 'r') as rfile:
reactors = json.load(rfile)
for k, v in reactors.items():
reactors[k] = IrradiationProduction(k, v)
self.reactors = reactors
self.reactor_names = list(reactors.keys())
@property
def reactor(self):
return self.reactors[self.selected_reactor_name]
# ============= EOF =============================================
| 36.378378 | 118 | 0.549034 |
f09e658d8141b9aef3b461719e8d452596ce338d | 21,058 | py | Python | conda_concourse_ci/concourse_config.py | mingwandroid/conda-concourse-ci | 06a581aff81a0b3df59471d49955f0b9708fd04a | [
"BSD-3-Clause"
] | null | null | null | conda_concourse_ci/concourse_config.py | mingwandroid/conda-concourse-ci | 06a581aff81a0b3df59471d49955f0b9708fd04a | [
"BSD-3-Clause"
] | null | null | null | conda_concourse_ci/concourse_config.py | mingwandroid/conda-concourse-ci | 06a581aff81a0b3df59471d49955f0b9708fd04a | [
"BSD-3-Clause"
] | null | null | null | """
Classes for representing Concourse pipeline configuration items
These map to the schema's in https://concourse-ci.org/docs.html
"""
import os
CONDA_SUBDIR_TO_CONCOURSE_PLATFORM = {
'win-64': 'windows',
'win-32': 'windows',
'osx-64': 'darwin',
'linux-64': 'linux',
'linux-32': 'linux',
'linux-ppc64le': 'linux-ppc64le'
}
class PipelineConfig:
""" configuration for a concourse pipeline. """
# https://concourse-ci.org/pipelines.html
jobs = []
resources = []
resource_types = []
var_sources = []
groups = []
def add_job(self, name, plan=None, **kwargs):
if plan is None:
plan = []
job = {"name": name, "plan": plan, **kwargs}
self.jobs.append(job)
def add_resource(self, name, type_, source, **kwargs):
resource = {'name': name, 'type': type_, "source": source, **kwargs}
self.resources.append(resource)
def add_resource_type(self, name, type_, source, **kwargs):
rtype = {'name': name, 'type': type_, "source": source, **kwargs}
self.resource_types.append(rtype)
def to_dict(self):
out = {}
attrs = ['jobs', 'resources', 'resource_types', 'var_sources', 'groups']
for attr in attrs:
items = getattr(self, attr)
if not len(items):
continue
out[attr] = [v if isinstance(v, dict) else v.to_dict() for v in items]
return out
def add_rsync_resource_type(self, docker_user=None, docker_pass=None):
_source = {
'repository': 'conda/concourse-rsync-resource',
'tag': 'latest'
}
if docker_user and docker_pass:
_source.update({'username': docker_user, 'password': docker_pass})
self.add_resource_type(
name='rsync-resource',
type_='docker-image',
source=_source,
)
def add_rsync_recipes(self, config_vars, recipe_folder):
self.add_resource(
name='rsync-recipes',
type_='rsync-resource',
source={
'server': config_vars['intermediate-server'],
'base_dir': recipe_folder,
'user': config_vars['intermediate-user'],
'private_key': config_vars['intermediate-private-key-job'],
'disable_version_path': True,
},
)
def add_rsync_source(self, config_vars):
self.add_resource(
name='rsync-source',
type_='rsync-resource',
source={
'server': config_vars['intermediate-server'],
'base_dir': os.path.join(config_vars['intermediate-base-folder'], 'source'),
'user': config_vars['intermediate-user'],
'private_key': config_vars['intermediate-private-key-job'],
'disable_version_path': True,
},
)
def add_rsync_stats(self, config_vars):
self.add_resource(
name='rsync-stats',
type_='rsync-resource',
source={
'server': config_vars['intermediate-server'],
'base_dir': os.path.join(config_vars['intermediate-base-folder'], 'stats'),
'user': config_vars['intermediate-user'],
'private_key': config_vars['intermediate-private-key-job'],
'disable_version_path': True,
},
)
def add_rsync_build_pack(self, config_vars):
self.add_resource(
name='rsync-build-pack',
type_='rsync-resource',
source={
'server': config_vars['intermediate-server'],
'base_dir': config_vars['build_env_pkgs'],
'user': config_vars['intermediate-user'],
'private_key': config_vars['intermediate-private-key-job'],
'disable_version_path': True,
},
)
def add_rsync_packages(self, resource_name, config_vars):
source = {
'server': config_vars['intermediate-server'],
'base_dir': os.path.join(
config_vars['intermediate-base-folder'],
config_vars['base-name'], 'artifacts'),
'user': config_vars['intermediate-user'],
'private_key': config_vars['intermediate-private-key-job'],
'disable_version_path': True,
}
self.add_resource(resource_name, 'rsync-resource', source=source)
def add_anaconda_upload(self, all_rsync, config_vars):
self.add_jobs(
name='anaconda_upload',
plan=all_rsync + [{'put': 'anaconda_upload_resource'}]
)
_source = {
'repository': 'conda/concourse-anaconda_org-resource',
'tag': 'latest'
}
if config_vars.get('docker-user', None) and config_vars.get('docker-pass', None):
_source.update({'username': config_vars.get('docker-user'),
'password': config_vars.get('docker-pass')})
self.add_resource_type(
name='anacondaorg-resource',
type_='docker-image',
source=_source,
)
self.add_resource(
name='anaconda_upload_resource',
type_='anacondaorg-resource',
source={'token': config_vars['anaconda-upload-token']}
)
def add_repo_v6_upload(self, all_rsync, config_vars):
self.add_job(
name='repo_v6_upload',
plan=all_rsync + [{'put': 'repo_resource'}]
)
_source = {
'repository': 'condatest/repo_cli',
'tag': 'latest'}
if config_vars.get('docker-user', None) and config_vars.get('docker-pass', None):
_source.update({'username': config_vars.get('docker-user'),
'password': config_vars.get('docker-pass')})
self.add_resource_type(
name='repo-resource-type',
type_='docker-image',
source=_source,
)
self.add_resource(
name='repo_resource',
type_='repo-resource-type',
source={
'token': config_vars['repo-token'],
'user': config_vars['repo-username'],
'password': config_vars['repo-password'],
'channel': config_vars['repo-channel'],
},
)
def add_pr_merged_resource(self, pr_repo, pr_file):
self.add_resource(
name="pr-merged",
type_="git",
source={
"uri": pr_repo,
"branch": "master",
"paths": [pr_file],
},
)
def add_upload_job(self, config_vars, commit_msg, pr_merged_resource):
""" Adds the upload job and a resource (if needed) to the pipeline. """
plan = []
if pr_merged_resource:
plan.append({'get': 'pr-merged', 'trigger': True})
# add a git resource if specified in the configuration file
# this resource should be added as an input to the stage-for-upload-config
# if it is needed in the upload job
if "stage-for-upload-repo" in config_vars:
self.add_resource(
name="stage-packages-scripts",
type_="git",
source={
"uri": config_vars["stage-for-upload-repo"],
"branch": config_vars.get("stage-for-upload-branch", "master"),
},
)
plan.append({'get': 'stage-packages-scripts', 'trigger': False})
config = config_vars.get('stage-for-upload-config')
# add PIPELINE and GIT_COMMIT_MSG to params
params = config.get('params', {})
params['PIPELINE'] = config_vars['base-name']
params['GIT_COMMIT_MSG'] = commit_msg
config['params'] = params
plan.append({
'task': 'stage-packages',
'trigger': False,
'config': config,
})
self.add_job('stage_for_upload', plan)
def add_push_branch_job(
self,
config_vars,
folders,
branches,
pr_merged_resource,
stage_job_name):
plan = []
if pr_merged_resource:
# The branch push causes a version change in the pull-recipes-<branch>
# resource(s) which causes the artifacts to be removed. To avoid a
# race condition between these jobs the packages need to be uploaded
# before pushing branch(es).
if stage_job_name:
plan.append({'get': 'pr-merged', 'trigger': True, 'passed': ['stage_for_upload']})
else:
plan.append({'get': 'pr-merged', 'trigger': True})
# resources to add
if branches is None:
branches = ['automated-build']
for n, folder in enumerate(folders):
if len(branches) == 1:
branch = branches[0]
elif len(folders) == len(branches):
branch = branches[n]
else:
raise Exception(
"The number of branches either needs to be exactly one or "
"equal to the number of feedstocks submitted. Exiting.")
config = config_vars.get('push-branch-config')
# add PIPELINE and GIT_COMMIT_MSG to params
params = config.get('params', {})
params['BRANCH'] = branch
params['FEEDSTOCK'] = folder
config['params'] = params
plan.append({
'task': 'push-branch',
'trigger': False,
'config': config,
})
self.add_job(f'push_branch_to_{folder}', plan)
def add_destroy_pipeline_job(self, config_vars, folders):
"""
Adds a destroy pipeline job to the pipeline.
"""
passed_jobs = [f'push_branch_to_{folder}' for folder in folders]
passed_jobs.append('stage_for_upload')
config = config_vars.get("destroy-pipeline-config")
params = config.get("params", {})
params['PIPELINE'] = config_vars['base-name']
config['params'] = params
plan = [{
'get': 'pr-merged',
'trigger': True,
'passed': passed_jobs
}, {
'task': 'destroy-pipeline',
'trigger': False,
'config': config
}]
self.add_job('destroy_pipeline', plan)
class JobConfig:
""" configuration for a concourse job. """
# https://concourse-ci.org/jobs.html
def __init__(self, name="placeholder", plan=None):
self.name = name
self.plan = plan
if plan is None:
self.plan = []
def to_dict(self):
return {"name": self.name, "plan": self.plan}
def add_rsync_recipes(self):
self.plan.append({
'get': 'rsync-recipes',
'trigger': True
})
def add_rsync_source(self):
self.plan.append({
'put': 'rsync-source',
'params': {
'sync_dir': 'output-source',
'rsync_opts': [
"--archive",
"--no-perms",
"--omit-dir-times",
"--verbose",
"--exclude",
'"*.json*"']
},
'get_params': {'skip_download': True}
})
def add_rsync_stats(self):
self.plan.append({
'put': 'rsync-stats',
'params': {
'sync_dir': 'stats',
'rsync_opts': [
"--archive",
"--no-perms",
"--omit-dir-times",
"--verbose"]},
'get_params': {'skip_download': True}
})
def add_rsync_build_pack_win(self):
self.plan.append({
'get': 'rsync-build-pack',
'params': {
'rsync_opts': [
'--include',
'loner_conda_windows.exe',
'--exclude', '*',
'-v'
]
},
})
def add_rsync_build_pack_osx(self):
self.plan.append({
'get': 'rsync-build-pack',
'params': {
'rsync_opts': [
'--include',
'loner_conda_osx.exe',
'--exclude',
'*',
'-v'
]
}
})
def add_rsync_prereq(self, prereq):
self.plan.append({
'get': 'rsync_' + prereq,
'trigger': False,
'passed': [prereq]}
)
def add_put_artifacts(self, resource_name):
self.plan.append({
'put': resource_name,
'params': {
'sync_dir': 'converted-artifacts',
'rsync_opts': [
"--archive",
"--no-perms",
"--omit-dir-times",
"--verbose",
"--exclude", '"**/*.json*"',
# html and xml files
"--exclude", '"**/*.*ml"',
# conda index cache
"--exclude", '"**/.cache"',
]
},
'get_params': {'skip_download': True}
})
def add_consolidate_task(self, inputs, subdir, docker_user=None, docker_pass=None):
_source = {
'repository': 'conda/c3i-linux-64',
'tag': 'latest',
}
if docker_user and docker_pass:
_source.update({
'username': docker_user,
'password': docker_pass
})
config = {
# we can always do this on linux, so prefer it for speed.
'platform': 'linux',
'image_resource': {
'type': 'docker-image',
'source': _source,
},
'inputs': [{'name': 'rsync_' + req} for req in inputs],
'outputs': [{'name': 'indexed-artifacts'}],
'run': {
'path': 'sh',
'args': ['-exc', (
'mkdir -p indexed-artifacts/{subdir}\n'
'mkdir -p indexed-artifacts/noarch \n'
'find . -name "indexed-artifacts" -prune -o -path "*/{subdir}/*.tar.bz2" -print0 | xargs -0 -I file mv file indexed-artifacts/{subdir}\n' # NOQA
'find . -name "indexed-artifacts" -prune -o -path "*/noarch/*.tar.bz2" -print0 | xargs -0 -I file mv file indexed-artifacts/noarch\n' # NOQA
'conda-index indexed-artifacts\n'.format(subdir=subdir))
]
}
}
self.plan.append({'task': 'update-artifact-index', 'config': config})
def add_convert_task(self, subdir, docker_user=None, docker_pass=None):
inputs = [{'name': 'output-artifacts'}]
outputs = [{'name': 'converted-artifacts'}]
_source = {
'repository': 'conda/c3i-linux-64',
'tag': 'latest',
}
if docker_user and docker_pass:
_source.update({
'username': docker_user,
'password': docker_pass
})
config = {
# we can always do this on linux, so prefer it for speed.
'platform': 'linux',
'inputs': inputs,
'outputs': outputs,
'image_resource': {
'type': 'docker-image',
'source': _source,
},
'run': {
'path': 'sh',
'args': [
'-exc',
'mkdir -p converted-artifacts/{subdir}\n'
'mkdir -p converted-artifacts/noarch\n'
'find . -name "converted-artifacts" -prune -o -path "*/{subdir}/*.tar.bz2" -print0 | xargs -0 -I file mv file converted-artifacts/{subdir}\n' # NOQA
'find . -name "converted-artifacts" -prune -o -path "*/noarch/*.tar.bz2" -print0 | xargs -0 -I file mv file converted-artifacts/noarch\n' # NOQA
'pushd converted-artifacts/{subdir} && cph t "*.tar.bz2" .conda && popd\n'
'pushd converted-artifacts/noarch && cph t "*.tar.bz2" .conda && popd\n'
.format(subdir=subdir)
],
}
}
self.plan.append({'task': 'convert .tar.bz2 to .conda', 'config': config})
class BuildStepConfig:
""" Class for creating a Concourse step for package build jobs. """
def __init__(self, test_only, platform, worker_tags):
self.task_name = 'test' if test_only else 'build'
self.platform = platform
self.worker_tags = worker_tags
self.config = {}
self.cb_args = [] # list of arguments to pass to conda build
self.cmds = ''
def set_config_inputs(self, artifact_input):
""" Add inputs to the task config. """
inputs = [{'name': 'rsync-recipes'}]
if self.platform in ['win', 'osx']:
inputs.append({'name': 'rsync-build-pack'})
if artifact_input:
inputs.append({'name': 'indexed-artifacts'})
self.config["inputs"] = inputs
def set_config_outputs(self):
self.config["outputs"] = [
{'name': 'output-artifacts'},
{'name': 'output-source'},
{'name': 'stats'}
]
def set_config_platform(self, arch):
subdir = f"{self.platform}-{arch}"
self.config["platform"] = CONDA_SUBDIR_TO_CONCOURSE_PLATFORM[subdir]
def set_config_init_run(self):
if self.platform == 'win':
self.config["run"] = {'path': 'cmd.exe', 'args': ['/d', '/c']}
else:
self.config["run"] = {'path': 'sh', 'args': ['-exc']}
def set_initial_cb_args(self):
self.cb_args = [
'--no-anaconda-upload',
'--error-overlinking',
'--error-overdepending',
'--output-folder=output-artifacts',
'--cache-dir=output-source',
]
def create_build_cmds(self, build_prefix_cmds, build_suffix_cmds):
build_cmd = " conda-build " + " ".join(self.cb_args) + " "
prefix = " ".join(build_prefix_cmds)
suffix = " ".join(build_suffix_cmds)
self.cmds = prefix + build_cmd + suffix
def add_autobuild_cmds(self, recipe_path, cbc_path):
# combine the recipe from recipe_path with the conda_build_config.yaml
# file in the cbc_path directory into a combined_recipe directory
if self.platform == 'win':
win_cbc_path = cbc_path.replace("/", "\\")
win_recipe_path = recipe_path.replace("/", "\\")
# no need to mkdir, xcopy /i creates the directory
cmd = (
f"xcopy /i /s /e /f /y {win_recipe_path} combined_recipe&&"
f"copy /y {win_cbc_path} combined_recipe\\conda_build_config.yaml&&"
"dir combined_recipe&&"
)
else:
cmd = (
"mkdir -p combined_recipe && "
f"cp -r {recipe_path}/* combined_recipe/ && "
f"cp {cbc_path} combined_recipe/ && "
"ls -lh combined_recipe/* && "
)
self.cmds = cmd + self.cmds
def add_prefix_cmds(self, prefix_cmds):
prefix = "&& ".join(prefix_cmds)
if prefix:
self.cmds = prefix + "&& " + self.cmds
def add_repo_access(self, github_user, github_token):
self.config['params'] = {
'GITHUB_USER': github_user,
'GITHUB_TOKEN': github_token,
}
if self.platform == 'win':
creds_cmds = [
'(echo machine github.com '
'login %GITHUB_USER% '
'password %GITHUB_TOKEN% '
'protocol https > %USERPROFILE%\\_netrc || exit 0)'
]
else:
creds_cmds = [
'set +x',
'echo machine github.com '
'login $GITHUB_USER '
'password $GITHUB_TOKEN '
'protocol https > ~/.netrc',
'set -x'
]
cmds = "&& ".join(creds_cmds)
self.cmds = cmds + '&& ' + self.cmds
def add_suffix_cmds(self, suffix_cmds):
suffix = "&& ".join(suffix_cmds)
if suffix:
self.cmds = self.cmds + "&& " + suffix
def add_staging_channel_cmd(self, channel):
# todo: add proper source package path
path = "*.tar.bz2"
cmd = f"anaconda upload --skip-existing --force -u {channel} {path}"
self.cmds += cmd
def to_dict(self):
step = {'task': self.task_name, 'config': self.config}
if self.worker_tags:
step['tags'] = self.worker_tags
return step
| 35.935154 | 173 | 0.507266 |
acdcc3437578126c1b60767c9a023679a58d1cf3 | 615 | py | Python | sales_register/adapters/repositories/postgres/settings.py | tamercuba/purchase-system | cfd3e4fecbd96c130f620d11491fa14979c0d996 | [
"MIT"
] | null | null | null | sales_register/adapters/repositories/postgres/settings.py | tamercuba/purchase-system | cfd3e4fecbd96c130f620d11491fa14979c0d996 | [
"MIT"
] | 6 | 2021-05-15T21:44:19.000Z | 2021-05-23T22:20:13.000Z | sales_register/adapters/repositories/postgres/settings.py | tamercuba/sales-register | cfd3e4fecbd96c130f620d11491fa14979c0d996 | [
"MIT"
] | null | null | null | from decouple import config
from sqlalchemy import create_engine, orm, pool
from sqlalchemy.engine import URL
_DB_HOST = config('POSTGRES_PORT_5432_TCP_ADDR', None) or config('DB_HOST')
_DB_PORT = config('DB_PORT')
_DB_USER = config('DB_USER')
_DB_PW = config('DB_PW')
_DB_NAME = config('DB_NAME')
DB_URI = URL(
drivername='postgresql',
username=_DB_USER,
password=_DB_PW,
host=_DB_HOST,
port=_DB_PORT,
database=_DB_NAME,
)
engine = create_engine(DB_URI, poolclass=pool.NullPool, future=True)
Session = orm.sessionmaker(
autocommit=False, autoflush=False, bind=engine, future=True
)
| 24.6 | 75 | 0.746341 |
1fc2cf84056520014399ffb7a4c4b9b731bade23 | 3,161 | py | Python | nearface/detectors/SsdWrapper.py | palmtrey/nearface | 9274f13b2924a3ad9f97446772eb63bc7c482bff | [
"MIT"
] | null | null | null | nearface/detectors/SsdWrapper.py | palmtrey/nearface | 9274f13b2924a3ad9f97446772eb63bc7c482bff | [
"MIT"
] | null | null | null | nearface/detectors/SsdWrapper.py | palmtrey/nearface | 9274f13b2924a3ad9f97446772eb63bc7c482bff | [
"MIT"
] | null | null | null | import gdown
from pathlib import Path
import os
import cv2
import pandas as pd
from nearface.detectors import OpenCvWrapper
from nearface.commons import functions
def build_model():
home = functions.get_deepface_home()
#model structure
if os.path.isfile(home+'/.deepface/weights/deploy.prototxt') != True:
print("deploy.prototxt will be downloaded...")
url = "https://github.com/opencv/opencv/raw/3.4.0/samples/dnn/face_detector/deploy.prototxt"
output = home+'/.deepface/weights/deploy.prototxt'
gdown.download(url, output, quiet=False)
#pre-trained weights
if os.path.isfile(home+'/.deepface/weights/res10_300x300_ssd_iter_140000.caffemodel') != True:
print("res10_300x300_ssd_iter_140000.caffemodel will be downloaded...")
url = "https://github.com/opencv/opencv_3rdparty/raw/dnn_samples_face_detector_20170830/res10_300x300_ssd_iter_140000.caffemodel"
output = home+'/.deepface/weights/res10_300x300_ssd_iter_140000.caffemodel'
gdown.download(url, output, quiet=False)
face_detector = cv2.dnn.readNetFromCaffe(
home+"/.deepface/weights/deploy.prototxt",
home+"/.deepface/weights/res10_300x300_ssd_iter_140000.caffemodel"
)
eye_detector = OpenCvWrapper.build_cascade("haarcascade_eye")
detector = {}
detector["face_detector"] = face_detector
detector["eye_detector"] = eye_detector
return detector
def detect_face(detector, img, align = True):
resp = []
detected_face = None
img_region = [0, 0, img.shape[0], img.shape[1]]
ssd_labels = ["img_id", "is_face", "confidence", "left", "top", "right", "bottom"]
target_size = (300, 300)
base_img = img.copy() #we will restore base_img to img later
original_size = img.shape
img = cv2.resize(img, target_size)
aspect_ratio_x = (original_size[1] / target_size[1])
aspect_ratio_y = (original_size[0] / target_size[0])
imageBlob = cv2.dnn.blobFromImage(image = img)
face_detector = detector["face_detector"]
face_detector.setInput(imageBlob)
detections = face_detector.forward()
detections_df = pd.DataFrame(detections[0][0], columns = ssd_labels)
detections_df = detections_df[detections_df['is_face'] == 1] #0: background, 1: face
detections_df = detections_df[detections_df['confidence'] >= 0.90]
detections_df['left'] = (detections_df['left'] * 300).astype(int)
detections_df['bottom'] = (detections_df['bottom'] * 300).astype(int)
detections_df['right'] = (detections_df['right'] * 300).astype(int)
detections_df['top'] = (detections_df['top'] * 300).astype(int)
if detections_df.shape[0] > 0:
for index, instance in detections_df.iterrows():
left = instance["left"]
right = instance["right"]
bottom = instance["bottom"]
top = instance["top"]
detected_face = base_img[int(top*aspect_ratio_y):int(bottom*aspect_ratio_y), int(left*aspect_ratio_x):int(right*aspect_ratio_x)]
img_region = [int(left*aspect_ratio_x), int(top*aspect_ratio_y), int(right*aspect_ratio_x) - int(left*aspect_ratio_x), int(bottom*aspect_ratio_y) - int(top*aspect_ratio_y)]
if align:
detected_face = OpenCvWrapper.align_face(detector["eye_detector"], detected_face)
resp.append((detected_face, img_region))
return resp
| 30.68932 | 175 | 0.746599 |
048dfa6888e739aeb6ee0a0fd720a724b3b8f1e3 | 8,971 | py | Python | assets/src/ba_data/python/bastd/activity/coopjoin.py | Awesome-Logic/ballistica | 233a4a4f7840c9c666a1809626b6993a4b145349 | [
"MIT"
] | 1 | 2020-04-04T01:32:29.000Z | 2020-04-04T01:32:29.000Z | assets/src/ba_data/python/bastd/activity/coopjoin.py | Awesome-Logic/ballistica | 233a4a4f7840c9c666a1809626b6993a4b145349 | [
"MIT"
] | null | null | null | assets/src/ba_data/python/bastd/activity/coopjoin.py | Awesome-Logic/ballistica | 233a4a4f7840c9c666a1809626b6993a4b145349 | [
"MIT"
] | null | null | null | # Copyright (c) 2011-2020 Eric Froemling
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# -----------------------------------------------------------------------------
"""Functionality related to the co-op join screen."""
from __future__ import annotations
from typing import TYPE_CHECKING
import _ba
import ba
from ba.internal import JoinActivity
if TYPE_CHECKING:
from typing import Any, Dict, List, Optional, Sequence, Union
class CoopJoinActivity(JoinActivity):
"""Join-screen for co-op mode."""
def __init__(self, settings: Dict[str, Any]):
super().__init__(settings)
session = ba.getsession()
# Let's show a list of scores-to-beat for 1 player at least.
assert session.campaign is not None
level_name_full = (session.campaign.name + ':' +
session.campaign_state['level'])
config_str = (
'1p' + session.campaign.get_level(session.campaign_state['level']).
get_score_version_string().replace(' ', '_'))
_ba.get_scores_to_beat(level_name_full, config_str,
ba.WeakCall(self._on_got_scores_to_beat))
def on_transition_in(self) -> None:
from bastd.actor.controlsguide import ControlsGuide
from bastd.actor.text import Text
super().on_transition_in()
assert self.session.campaign
Text(self.session.campaign.get_level(
self.session.campaign_state['level']).displayname,
scale=1.3,
h_attach=Text.HAttach.CENTER,
h_align=Text.HAlign.CENTER,
v_attach=Text.VAttach.TOP,
transition=Text.Transition.FADE_IN,
transition_delay=4.0,
color=(1, 1, 1, 0.6),
position=(0, -95)).autoretain()
ControlsGuide(delay=1.0).autoretain()
def _on_got_scores_to_beat(self,
scores: Optional[List[Dict[str, Any]]]) -> None:
# pylint: disable=too-many-locals
# pylint: disable=too-many-statements
from bastd.actor.text import Text
from ba.internal import get_achievements_for_coop_level
# Sort by originating date so that the most recent is first.
if scores is not None:
scores.sort(reverse=True, key=lambda score: score['time'])
# We only show achievements and challenges for CoopGameActivities.
session = self.session
assert isinstance(session, ba.CoopSession)
gameinstance = session.get_current_game_instance()
if isinstance(gameinstance, ba.CoopGameActivity):
score_type = gameinstance.get_score_type()
if scores is not None:
achievement_challenges = [
a for a in scores if a['type'] == 'achievement_challenge'
]
score_challenges = [
a for a in scores if a['type'] == 'score_challenge'
]
else:
achievement_challenges = score_challenges = []
delay = 1.0
vpos = -140.0
spacing = 25
delay_inc = 0.1
def _add_t(
text: Union[str, ba.Lstr],
h_offs: float = 0.0,
scale: float = 1.0,
color: Sequence[float] = (1.0, 1.0, 1.0, 0.46)
) -> None:
Text(text,
scale=scale * 0.76,
h_align=Text.HAlign.LEFT,
h_attach=Text.HAttach.LEFT,
v_attach=Text.VAttach.TOP,
transition=Text.Transition.FADE_IN,
transition_delay=delay,
color=color,
position=(60 + h_offs, vpos)).autoretain()
if score_challenges:
_add_t(ba.Lstr(value='${A}:',
subs=[('${A}',
ba.Lstr(resource='scoreChallengesText'))
]),
scale=1.1)
delay += delay_inc
vpos -= spacing
for chal in score_challenges:
_add_t(str(chal['value'] if score_type == 'points' else ba.
timestring(int(chal['value']) * 10,
timeformat=ba.TimeFormat.MILLISECONDS
).evaluate()) + ' (1 player)',
h_offs=30,
color=(0.9, 0.7, 1.0, 0.8))
delay += delay_inc
vpos -= 0.6 * spacing
_add_t(chal['player'],
h_offs=40,
color=(0.8, 1, 0.8, 0.6),
scale=0.8)
delay += delay_inc
vpos -= 1.2 * spacing
vpos -= 0.5 * spacing
if achievement_challenges:
_add_t(ba.Lstr(
value='${A}:',
subs=[('${A}',
ba.Lstr(resource='achievementChallengesText'))]),
scale=1.1)
delay += delay_inc
vpos -= spacing
for chal in achievement_challenges:
_add_t(str(chal['value']),
h_offs=30,
color=(0.9, 0.7, 1.0, 0.8))
delay += delay_inc
vpos -= 0.6 * spacing
_add_t(chal['player'],
h_offs=40,
color=(0.8, 1, 0.8, 0.6),
scale=0.8)
delay += delay_inc
vpos -= 1.2 * spacing
vpos -= 0.5 * spacing
# Now list our remaining achievements for this level.
assert self.session.campaign is not None
levelname = (self.session.campaign.name + ':' +
self.session.campaign_state['level'])
ts_h_offs = 60
if not ba.app.kiosk_mode:
achievements = [
a for a in get_achievements_for_coop_level(levelname)
if not a.complete
]
have_achievements = bool(achievements)
achievements = [a for a in achievements if not a.complete]
vrmode = ba.app.vr_mode
if have_achievements:
Text(ba.Lstr(resource='achievementsRemainingText'),
host_only=True,
position=(ts_h_offs - 10, vpos),
transition=Text.Transition.FADE_IN,
scale=1.1 * 0.76,
h_attach=Text.HAttach.LEFT,
v_attach=Text.VAttach.TOP,
color=(1, 1, 1.2, 1) if vrmode else (0.8, 0.8, 1, 1),
shadow=1.0,
flatness=1.0 if vrmode else 0.6,
transition_delay=delay).autoretain()
hval = ts_h_offs + 50
vpos -= 35
for ach in achievements:
delay += 0.05
ach.create_display(hval, vpos, delay, style='in_game')
vpos -= 55
if not achievements:
Text(ba.Lstr(resource='noAchievementsRemainingText'),
host_only=True,
position=(ts_h_offs + 15, vpos + 10),
transition=Text.Transition.FADE_IN,
scale=0.7,
h_attach=Text.HAttach.LEFT,
v_attach=Text.VAttach.TOP,
color=(1, 1, 1, 0.5),
transition_delay=delay + 0.5).autoretain()
| 43.548544 | 79 | 0.50496 |
8970c9aa2c103d01792fbf180ce3b4ca55137936 | 1,912 | py | Python | code.py | karthik-charan/The-Stable-Marriage-Problem | f1af816a967ce740eabe971c6e4c8c944005b21a | [
"MIT"
] | null | null | null | code.py | karthik-charan/The-Stable-Marriage-Problem | f1af816a967ce740eabe971c6e4c8c944005b21a | [
"MIT"
] | null | null | null | code.py | karthik-charan/The-Stable-Marriage-Problem | f1af816a967ce740eabe971c6e4c8c944005b21a | [
"MIT"
] | null | null | null | #### Complete code for Stable Marriage Problem
## Declaring the required datastructures
guy_preferences = {
'andrew': ['caroline', 'abigail', 'betty'],
'bill': ['caroline', 'betty', 'abigail'],
'chester': ['betty', 'caroline', 'abigail'],
}
gal_preferences = {
'abigail': ['andrew', 'bill', 'chester'],
'betty': ['bill', 'andrew', 'chester'],
'caroline': ['bill', 'chester', 'andrew']
}
## Defining required methods
def new_over_old(woman, man_old, man_new):
for i in gal_preferences[woman]:
if i==man_old:
return False
elif i==man_new:
return True
def stable_marriage_algo(guy_preferences, gal_preferences):
guy_gal = {}
guy_gal_sum = {}
## Initializing the Datastructures
for i in guy_preferences:
guy_gal[i]=''
guy_gal_sum[i] = 0
for j in gal_preferences:
guy_gal[j]=''
guy_gal_sum[j] = 0
## Logic
while sum(list(guy_gal_sum.values()))<len(guy_gal):
for i in guy_preferences:
z = 0
while z<len(guy_preferences[i]):
if sum(list(guy_gal_sum.values()))==len(guy_gal):
break
if new_over_old(guy_preferences[i][z],guy_gal[guy_preferences[i][z]],i):
guy_gal[guy_preferences[i][z]]=''
guy_gal_sum[guy_preferences[i][z]]=0
guy_gal[guy_preferences[i][z]]=i
guy_gal[i]=guy_preferences[i][z]
guy_gal_sum[i] = 1
guy_gal_sum[guy_preferences[i][z]]=1
break
else:
z+=1
return guy_gal
##main function
if __name__ == "__main__":
print("The most stable arrangement of couples: ")
print(stable_marriage_algo(guy_preferences, gal_preferences))
| 29.415385 | 88 | 0.54341 |
dcdc6fa4fcaf2cc3ee6c3f72ab7cc22feb6e39bd | 38,637 | py | Python | detectron2/utils/visualizer.py | sayef/detectron2 | 2d0f20d5862e114f9efa0e09efa93a9f6d756377 | [
"Apache-2.0"
] | null | null | null | detectron2/utils/visualizer.py | sayef/detectron2 | 2d0f20d5862e114f9efa0e09efa93a9f6d756377 | [
"Apache-2.0"
] | null | null | null | detectron2/utils/visualizer.py | sayef/detectron2 | 2d0f20d5862e114f9efa0e09efa93a9f6d756377 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import colorsys
import numpy as np
from enum import Enum, unique
import cv2
import matplotlib as mpl
import matplotlib.colors as mplc
import matplotlib.figure as mplfigure
import pycocotools.mask as mask_util
import torch
from matplotlib.backends.backend_agg import FigureCanvasAgg
from detectron2.structures import BitMasks, Boxes, BoxMode, Keypoints, PolygonMasks
from .colormap import random_color
__all__ = ["ColorMode", "VisImage", "Visualizer"]
_SMALL_OBJECT_AREA_THRESH = 1000
_LARGE_MASK_AREA_THRESH = 120000
_OFF_WHITE = (1.0, 1.0, 240.0 / 255)
_BLACK = (0, 0, 0)
_RED = (1.0, 0, 0)
_KEYPOINT_THRESHOLD = 0.05
@unique
class ColorMode(Enum):
"""
Enum of different color modes to use for instance visualizations.
Attributes:
IMAGE: Picks a random color for every instance and overlay segmentations with low opacity.
SEGMENTATION: Let instances of the same category have similar colors, and overlay them with
high opacity. This provides more attention on the quality of segmentation.
IMAGE_BW: same as IMAGE, but convert all areas without masks to gray-scale.
"""
IMAGE = 0
SEGMENTATION = 1
IMAGE_BW = 2
class GenericMask:
"""
Attribute:
polygons (list[ndarray]): list[ndarray]: polygons for this mask.
Each ndarray has format [x, y, x, y, ...]
mask (ndarray): a binary mask
"""
def __init__(self, mask_or_polygons, height, width):
self._mask = self._polygons = self._has_holes = None
self.height = height
self.width = width
m = mask_or_polygons
if isinstance(m, dict):
# RLEs
assert "counts" in m and "size" in m
if isinstance(m["counts"], list): # uncompressed RLEs
h, w = m["size"]
assert h == height and w == width
m = mask_util.frPyObjects(m, h, w)
self._mask = mask_util.decode(m)[:, :]
return
if isinstance(m, list): # list[ndarray]
self._polygons = [np.asarray(x).reshape(-1) for x in m]
return
if isinstance(m, np.ndarray): # assumed to be a binary mask
assert m.shape[1] != 2, m.shape
assert m.shape == (height, width), m.shape
self._mask = m.astype("uint8")
return
raise ValueError("GenericMask cannot handle object {} of type '{}'".format(m, type(m)))
@property
def mask(self):
if self._mask is None:
self._mask = self.polygons_to_mask(self._polygons)
return self._mask
@property
def polygons(self):
if self._polygons is None:
self._polygons, self._has_holes = self.mask_to_polygons(self._mask)
return self._polygons
@property
def has_holes(self):
if self._has_holes is None:
if self._mask is not None:
self._polygons, self._has_holes = self.mask_to_polygons(self._mask)
else:
self._has_holes = False # if original format is polygon, does not have holes
return self._has_holes
def mask_to_polygons(self, mask):
# cv2.RETR_CCOMP flag retrieves all the contours and arranges them to a 2-level
# hierarchy. External contours (boundary) of the object are placed in hierarchy-1.
# Internal contours (holes) are placed in hierarchy-2.
# cv2.CHAIN_APPROX_NONE flag gets vertices of polygons from contours.
res = cv2.findContours(mask.astype("uint8"), cv2.RETR_CCOMP, cv2.CHAIN_APPROX_NONE)
hierarchy = res[-1]
has_holes = (hierarchy.reshape(-1, 4)[:, 3] >= 0).sum() > 0
res = res[-2]
res = [x.flatten() for x in res]
res = [x for x in res if len(x) >= 6]
return res, has_holes
def polygons_to_mask(self, polygons):
rle = mask_util.frPyObjects(polygons, self.height, self.width)
rle = mask_util.merge(rle)
return mask_util.decode(rle)[:, :]
def area(self):
return self.mask.sum()
def bbox(self):
p = mask_util.frPyObjects(self.polygons, self.height, self.width)
p = mask_util.merge(p)
bbox = mask_util.toBbox(p)
bbox[2] += bbox[0]
bbox[3] += bbox[1]
return bbox
class _PanopticPrediction:
def __init__(self, panoptic_seg, segments_info):
self._seg = panoptic_seg
self._sinfo = {s["id"]: s for s in segments_info} # seg id -> seg info
segment_ids, areas = torch.unique(panoptic_seg, sorted=True, return_counts=True)
areas = areas.numpy()
sorted_idxs = np.argsort(-areas)
self._seg_ids, self._seg_areas = segment_ids[sorted_idxs], areas[sorted_idxs]
self._seg_ids = self._seg_ids.tolist()
for sid, area in zip(self._seg_ids, self._seg_areas):
if sid in self._sinfo:
self._sinfo[sid]["area"] = float(area)
def non_empty_mask(self):
"""
Returns:
(H, W) array, a mask for all pixels that have a prediction
"""
empty_ids = []
for id in self._seg_ids:
if id not in self._sinfo:
empty_ids.append(id)
if len(empty_ids) == 0:
return np.zeros(self._seg.shape, dtype=np.uint8)
assert (
len(empty_ids) == 1
), ">1 ids corresponds to no labels. This is currently not supported"
return (self._seg != empty_ids[0]).numpy().astype(np.bool)
def semantic_masks(self):
for sid in self._seg_ids:
sinfo = self._sinfo.get(sid)
if sinfo is None or sinfo["isthing"]:
# Some pixels (e.g. id 0 in PanopticFPN) have no instance or semantic predictions.
continue
yield (self._seg == sid).numpy().astype(np.bool), sinfo
def instance_masks(self):
for sid in self._seg_ids:
sinfo = self._sinfo.get(sid)
if sinfo is None or not sinfo["isthing"]:
continue
mask = (self._seg == sid).numpy().astype(np.bool)
if mask.sum() > 0:
yield mask, sinfo
def _create_text_labels(classes, scores, class_names):
"""
Args:
classes (list[int] or None):
scores (list[float] or None):
class_names (list[str] or None):
Returns:
list[str] or None
"""
labels = None
if class_names is not None and len(class_names) > 1:
labels = [class_names[i] for i in classes]
if scores is not None:
if labels is None:
labels = ["{:.0f}%".format(s * 100) for s in scores]
else:
labels = ["{} {:.0f}%".format(l, s * 100) for l, s in zip(labels, scores)]
return labels
class VisImage:
def __init__(self, img, scale=1.0):
"""
Args:
img (ndarray): an RGB image of shape (H, W, 3).
scale (float): scale the input image
"""
self.img = img
self.scale = scale
self.width, self.height = img.shape[1], img.shape[0]
self._setup_figure(img)
def _setup_figure(self, img):
"""
Args:
Same as in :meth:`__init__()`.
Returns:
fig (matplotlib.pyplot.figure): top level container for all the image plot elements.
ax (matplotlib.pyplot.Axes): contains figure elements and sets the coordinate system.
"""
fig = mplfigure.Figure(frameon=False)
self.dpi = fig.get_dpi()
# add a small 1e-2 to avoid precision lost due to matplotlib's truncation
# (https://github.com/matplotlib/matplotlib/issues/15363)
fig.set_size_inches(
(self.width * self.scale + 1e-2) / self.dpi,
(self.height * self.scale + 1e-2) / self.dpi,
)
self.canvas = FigureCanvasAgg(fig)
# self.canvas = mpl.backends.backend_cairo.FigureCanvasCairo(fig)
ax = fig.add_axes([0.0, 0.0, 1.0, 1.0])
ax.axis("off")
ax.set_xlim(0.0, self.width)
ax.set_ylim(self.height)
self.fig = fig
self.ax = ax
def save(self, filepath):
"""
Args:
filepath (str): a string that contains the absolute path, including the file name, where
the visualized image will be saved.
"""
cv2.imwrite(filepath, self.get_image()[:, :, ::-1])
def get_image(self):
"""
Returns:
ndarray: the visualized image of shape (H, W, 3) (RGB) in uint8 type.
The shape is scaled w.r.t the input image using the given `scale` argument.
"""
canvas = self.canvas
s, (width, height) = canvas.print_to_buffer()
if (self.width, self.height) != (width, height):
img = cv2.resize(self.img, (width, height))
else:
img = self.img
# buf = io.BytesIO() # works for cairo backend
# canvas.print_rgba(buf)
# width, height = self.width, self.height
# s = buf.getvalue()
buffer = np.frombuffer(s, dtype="uint8")
# imshow is slow. blend manually (still quite slow)
img_rgba = buffer.reshape(height, width, 4)
rgb, alpha = np.split(img_rgba, [3], axis=2)
try:
import numexpr as ne # fuse them with numexpr
visualized_image = ne.evaluate("img * (1 - alpha / 255.0) + rgb * (alpha / 255.0)")
except ImportError:
alpha = alpha.astype("float32") / 255.0
visualized_image = img * (1 - alpha) + rgb * alpha
visualized_image = visualized_image.astype("uint8")
return visualized_image
class Visualizer:
def __init__(self, img_rgb, metadata, scale=1.0, instance_mode=ColorMode.IMAGE):
"""
Args:
img_rgb: a numpy array of shape (H, W, C), where H and W correspond to
the height and width of the image respectively. C is the number of
color channels. The image is required to be in RGB format since that
is a requirement of the Matplotlib library. The image is also expected
to be in the range [0, 255].
metadata (MetadataCatalog): image metadata.
"""
self.img = np.asarray(img_rgb).clip(0, 255).astype(np.uint8)
self.metadata = metadata
self.output = VisImage(self.img, scale=scale)
self.outputs = []
self.cpu_device = torch.device("cpu")
# too small texts are useless, therefore clamp to 9
self._default_font_size = max(
np.sqrt(self.output.height * self.output.width) // 90, 10 // scale
)
self._instance_mode = instance_mode
def draw_instance_predictions(self, predictions):
"""
Draw instance-level prediction results on an image.
Args:
predictions (Instances): the output of an instance detection/segmentation
model. Following fields will be used to draw:
"pred_boxes", "pred_classes", "scores", "pred_masks" (or "pred_masks_rle").
Returns:
output (VisImage): image object with visualizations.
"""
boxes = predictions.pred_boxes if predictions.has("pred_boxes") else None
scores = predictions.scores if predictions.has("scores") else None
classes = predictions.pred_classes if predictions.has("pred_classes") else None
labels = _create_text_labels(classes, scores, self.metadata.get("thing_classes", None))
keypoints = predictions.pred_keypoints if predictions.has("pred_keypoints") else None
if predictions.has("pred_masks"):
masks = predictions.pred_masks.numpy()
masks = [GenericMask(x, self.output.height, self.output.width) for x in masks]
else:
masks = None
if self._instance_mode == ColorMode.SEGMENTATION and self.metadata.get("thing_colors"):
colors = [
self._jitter([x / 255 for x in self.metadata.thing_colors[c]]) for c in classes
]
alpha = 0.8
else:
colors = None
alpha = 0.5
if self._instance_mode == ColorMode.IMAGE_BW:
self.output.img = self._create_grayscale_image(
(predictions.pred_masks.any(dim=0) > 0).numpy()
)
alpha = 0.3
self.overlay_instances(
masks=masks,
boxes=boxes,
labels=labels,
keypoints=keypoints,
assigned_colors=colors,
alpha=alpha,
)
return self.outputs
def draw_sem_seg(self, sem_seg, area_threshold=None, alpha=0.8):
"""
Draw semantic segmentation predictions/labels.
Args:
sem_seg (Tensor or ndarray): the segmentation of shape (H, W).
area_threshold (int): segments with less than `area_threshold` are not drawn.
alpha (float): the larger it is, the more opaque the segmentations are.
Returns:
output (VisImage): image object with visualizations.
"""
if isinstance(sem_seg, torch.Tensor):
sem_seg = sem_seg.numpy()
labels, areas = np.unique(sem_seg, return_counts=True)
sorted_idxs = np.argsort(-areas).tolist()
labels = labels[sorted_idxs]
for label in labels:
try:
mask_color = [x / 255 for x in self.metadata.stuff_colors[label]]
except (AttributeError, IndexError):
mask_color = None
binary_mask = (sem_seg == label).astype(np.uint8)
text = self.metadata.stuff_classes[label]
self.draw_binary_mask(
binary_mask,
color=mask_color,
edge_color=_OFF_WHITE,
text=text,
alpha=alpha,
area_threshold=area_threshold,
)
return self.output
def draw_panoptic_seg_predictions(
self, panoptic_seg, segments_info, area_threshold=None, alpha=0.7
):
"""
Draw panoptic prediction results on an image.
Args:
panoptic_seg (Tensor): of shape (height, width) where the values are ids for each
segment.
segments_info (list[dict]): Describe each segment in `panoptic_seg`.
Each dict contains keys "id", "category_id", "isthing".
area_threshold (int): stuff segments with less than `area_threshold` are not drawn.
Returns:
output (VisImage): image object with visualizations.
"""
pred = _PanopticPrediction(panoptic_seg, segments_info)
if self._instance_mode == ColorMode.IMAGE_BW:
self.output.img = self._create_grayscale_image(pred.non_empty_mask())
# draw mask for all semantic segments first i.e. "stuff"
for mask, sinfo in pred.semantic_masks():
category_idx = sinfo["category_id"]
try:
mask_color = [x / 255 for x in self.metadata.stuff_colors[category_idx]]
except AttributeError:
mask_color = None
text = self.metadata.stuff_classes[category_idx]
self.draw_binary_mask(
mask,
color=mask_color,
edge_color=_OFF_WHITE,
text=text,
alpha=alpha,
area_threshold=area_threshold,
)
# draw mask for all instances second
all_instances = list(pred.instance_masks())
if len(all_instances) == 0:
return self.output
masks, sinfo = list(zip(*all_instances))
category_ids = [x["category_id"] for x in sinfo]
try:
scores = [x["score"] for x in sinfo]
except KeyError:
scores = None
labels = _create_text_labels(category_ids, scores, self.metadata.thing_classes)
try:
colors = [random_color(rgb=True, maximum=1) for k in category_ids]
except AttributeError:
colors = None
self.overlay_instances(masks=masks, labels=labels, assigned_colors=colors)
return self.output
def draw_dataset_dict(self, dic):
annos = dic.get("annotations", None)
if annos:
if "segmentation" in annos[0]:
masks = [x["segmentation"] for x in annos]
else:
masks = None
if "keypoints" in annos[0]:
keypts = [x["keypoints"] for x in annos]
keypts = np.array(keypts).reshape(len(annos), -1, 3)
else:
keypts = None
boxes = [BoxMode.convert(x["bbox"], x["bbox_mode"], BoxMode.XYXY_ABS) for x in annos]
labels = [x["category_id"] for x in annos]
names = self.metadata.get("thing_classes", None)
if names:
labels = [names[i] for i in labels]
labels = [i + ("|crowd" if a.get("iscrowd", 0) else "") for i, a in zip(labels, annos)]
self.overlay_instances(labels=labels, boxes=boxes, masks=masks, keypoints=keypts)
sem_seg = dic.get("sem_seg", None)
if sem_seg is None and "sem_seg_file_name" in dic:
sem_seg = cv2.imread(dic["sem_seg_file_name"], cv2.IMREAD_GRAYSCALE)
if sem_seg is not None:
self.draw_sem_seg(sem_seg, area_threshold=0, alpha=0.5)
return self.output
def overlay_instances(
self,
*,
boxes=None,
labels=None,
masks=None,
keypoints=None,
assigned_colors=None,
alpha=0.5
):
"""
Args:
boxes (Boxes or ndarray): either a :class:`Boxes` or a Nx4 numpy array
of XYXY_ABS format for the N objects in a single image.
labels (list[str]): the text to be displayed for each instance.
masks (masks-like object): Supported types are:
* `structures.masks.PolygonMasks`, `structures.masks.BitMasks`.
* list[list[ndarray]]: contains the segmentation masks for all objects in one image.
The first level of the list corresponds to individual instances. The second
level to all the polygon that compose the instance, and the third level
to the polygon coordinates. The third level should have the format of
[x0, y0, x1, y1, ..., xn, yn] (n >= 3).
* list[ndarray]: each ndarray is a binary mask of shape (H, W).
* list[dict]: each dict is a COCO-style RLE.
keypoints (Keypoint or array like): an array-like object of shape (N, K, 3),
where the N is the number of instances and K is the number of keypoints.
The last dimension corresponds to (x, y, visibility or score).
assigned_colors (list[matplotlib.colors]): a list of colors, where each color
corresponds to each mask or box in the image. Refer to 'matplotlib.colors'
for full list of formats that the colors are accepted in.
Returns:
output (VisImage): image object with visualizations.
"""
num_instances = None
if boxes is not None:
boxes = self._convert_boxes(boxes)
num_instances = len(boxes)
if masks is not None:
masks = self._convert_masks(masks)
if num_instances:
assert len(masks) == num_instances
else:
num_instances = len(masks)
if keypoints is not None:
if num_instances:
assert len(keypoints) == num_instances
else:
num_instances = len(keypoints)
keypoints = self._convert_keypoints(keypoints)
if labels is not None:
assert len(labels) == num_instances
if assigned_colors is None:
assigned_colors = [random_color(rgb=True, maximum=1) for _ in range(num_instances)]
if num_instances == 0:
return self.output
# Display in largest to smallest order to reduce occlusion.
areas = None
if boxes is not None:
areas = np.prod(boxes[:, 2:] - boxes[:, :2], axis=1)
elif masks is not None:
areas = np.asarray([x.area() for x in masks])
if areas is not None:
sorted_idxs = np.argsort(-areas).tolist()
# Re-order overlapped instances in descending order.
boxes = boxes[sorted_idxs] if boxes is not None else None
labels = [labels[k] for k in sorted_idxs] if labels is not None else None
masks = [masks[idx] for idx in sorted_idxs] if masks is not None else None
assigned_colors = [assigned_colors[idx] for idx in sorted_idxs]
keypoints = keypoints[sorted_idxs] if keypoints is not None else None
for i in range(num_instances):
color = assigned_colors[i]
if masks is not None:
self.output = VisImage(self.img, self.output.scale)
for segment in masks[i].polygons:
self.draw_polygon(segment.reshape(-1, 2), color, alpha=alpha)
self.outputs.append((self.output.get_image(), labels[i].split(' ')[0]))
continue
if boxes is not None:
self.draw_box(boxes[i], edge_color=color)
if labels is not None:
# first get a box
if boxes is not None:
x0, y0, x1, y1 = boxes[i]
text_pos = (x0, y0) # if drawing boxes, put text on the box corner.
horiz_align = "left"
elif masks is not None:
x0, y0, x1, y1 = masks[i].bbox()
# draw text in the center (defined by median) when box is not drawn
# median is less sensitive to outliers.
text_pos = np.median(masks[i].mask.nonzero(), axis=1)[::-1]
horiz_align = "center"
else:
continue # drawing the box confidence for keypoints isn't very useful.
# for small objects, draw text at the side to avoid occlusion
instance_area = (y1 - y0) * (x1 - x0)
if (
instance_area < _SMALL_OBJECT_AREA_THRESH * self.output.scale
or y1 - y0 < 40 * self.output.scale
):
if y1 >= self.output.height - 5:
text_pos = (x1, y0)
else:
text_pos = (x0, y1)
height_ratio = (y1 - y0) / np.sqrt(self.output.height * self.output.width)
lighter_color = self._change_color_brightness(color, brightness_factor=0.7)
font_size = (
np.clip((height_ratio - 0.02) / 0.08 + 1, 1.2, 2)
* 0.5
* self._default_font_size
)
self.draw_text(
labels[i],
text_pos,
color=lighter_color,
horizontal_alignment=horiz_align,
font_size=font_size,
)
# draw keypoints
if keypoints is not None:
for keypoints_per_instance in keypoints:
self.draw_and_connect_keypoints(keypoints_per_instance)
return self.output
def draw_and_connect_keypoints(self, keypoints):
"""
Draws keypoints of an instance and follows the rules for keypoint connections
to draw lines between appropriate keypoints. This follows color heuristics for
line color.
Args:
keypoints (Tensor): a tensor of shape (K, 3), where K is the number of keypoints
and the last dimension corresponds to (x, y, probability).
Returns:
output (VisImage): image object with visualizations.
"""
visible = {}
for idx, keypoint in enumerate(keypoints):
# draw keypoint
x, y, prob = keypoint
if prob > _KEYPOINT_THRESHOLD:
self.draw_circle((x, y), color=_RED)
keypoint_name = self.metadata.keypoint_names[idx]
visible[keypoint_name] = (x, y)
for kp0, kp1, color in self.metadata.keypoint_connection_rules:
if kp0 in visible and kp1 in visible:
x0, y0 = visible[kp0]
x1, y1 = visible[kp1]
color = tuple(x / 255.0 for x in color)
self.draw_line([x0, x1], [y0, y1], color=color)
# draw lines from nose to mid-shoulder and mid-shoulder to mid-hip
# Note that this strategy is specific to person keypoints.
# For other keypoints, it should just do nothing
try:
ls_x, ls_y = visible["left_shoulder"]
rs_x, rs_y = visible["right_shoulder"]
mid_shoulder_x, mid_shoulder_y = (ls_x + rs_x) / 2, (ls_y + rs_y) / 2
except KeyError:
pass
else:
# draw line from nose to mid-shoulder
nose_x, nose_y = visible.get("nose", (None, None))
if nose_x is not None:
self.draw_line([nose_x, mid_shoulder_x], [nose_y, mid_shoulder_y], color=_RED)
try:
# draw line from mid-shoulder to mid-hip
lh_x, lh_y = visible["left_hip"]
rh_x, rh_y = visible["right_hip"]
except KeyError:
pass
else:
mid_hip_x, mid_hip_y = (lh_x + rh_x) / 2, (lh_y + rh_y) / 2
self.draw_line([mid_hip_x, mid_shoulder_x], [mid_hip_y, mid_shoulder_y], color=_RED)
return self.output
"""
Primitive drawing functions:
"""
def draw_text(
self, text, position, *, font_size=None, color="g", horizontal_alignment="center"
):
"""
Args:
text (str): class label
position (tuple): a tuple of the x and y coordinates to place text on image.
font_size (int, optional): font of the text. If not provided, a font size
proportional to the image width is calculated and used.
color: color of the text. Refer to `matplotlib.colors` for full list
of formats that are accepted.
horizontal_alignment (str): see `matplotlib.text.Text`
Returns:
output (VisImage): image object with text drawn.
"""
if not font_size:
font_size = self._default_font_size
# since the text background is dark, we don't want the text to be dark
color = np.maximum(list(mplc.to_rgb(color)), 0.2)
color[np.argmax(color)] = max(0.8, np.max(color))
x, y = position
self.output.ax.text(
x,
y,
text,
size=font_size * self.output.scale,
family="sans-serif",
bbox={"facecolor": "black", "alpha": 0.8, "pad": 0.7, "edgecolor": "none"},
verticalalignment="top",
horizontalalignment=horizontal_alignment,
color=color,
zorder=10,
)
return self.output
def draw_box(self, box_coord, alpha=0.5, edge_color="g", line_style="-"):
"""
Args:
box_coord (tuple): a tuple containing x0, y0, x1, y1 coordinates, where x0 and y0
are the coordinates of the image's top left corner. x1 and y1 are the
coordinates of the image's bottom right corner.
alpha (float): blending efficient. Smaller values lead to more transparent masks.
edge_color: color of the outline of the box. Refer to `matplotlib.colors`
for full list of formats that are accepted.
line_style (string): the string to use to create the outline of the boxes.
Returns:
output (VisImage): image object with box drawn.
"""
x0, y0, x1, y1 = box_coord
width = x1 - x0
height = y1 - y0
linewidth = max(self._default_font_size / 4, 1)
self.output.ax.add_patch(
mpl.patches.Rectangle(
(x0, y0),
width,
height,
fill=False,
edgecolor=edge_color,
linewidth=linewidth * self.output.scale,
alpha=alpha,
linestyle=line_style,
)
)
return self.output
def draw_circle(self, circle_coord, color, radius=3):
"""
Args:
circle_coord (list(int) or tuple(int)): contains the x and y coordinates
of the center of the circle.
color: color of the polygon. Refer to `matplotlib.colors` for a full list of
formats that are accepted.
radius (int): radius of the circle.
Returns:
output (VisImage): image object with box drawn.
"""
x, y = circle_coord
self.output.ax.add_patch(
mpl.patches.Circle(circle_coord, radius=radius, fill=True, color=color)
)
return self.output
def draw_line(self, x_data, y_data, color):
"""
Args:
x_data (list[int]): a list containing x values of all the points being drawn.
Length of list should match the length of y_data.
y_data (list[int]): a list containing y values of all the points being drawn.
Length of list should match the length of x_data.
color: color of the line. Refer to `matplotlib.colors` for a full list of
formats that are accepted.
Returns:
output (VisImage): image object with line drawn.
"""
linewidth = max(self._default_font_size / 3, 1)
self.output.ax.add_line(
mpl.lines.Line2D(x_data, y_data, linewidth=linewidth * self.output.scale, color=color)
)
return self.output
def draw_binary_mask(
self, binary_mask, color=None, *, edge_color=None, text=None, alpha=0.5, area_threshold=4096
):
"""
Args:
binary_mask (ndarray): numpy array of shape (H, W), where H is the image height and
W is the image width. Each value in the array is either a 0 or 1 value of uint8
type.
color: color of the mask. Refer to `matplotlib.colors` for a full list of
formats that are accepted. If None, will pick a random color.
edge_color: color of the polygon edges. Refer to `matplotlib.colors` for a
full list of formats that are accepted.
text (str): if None, will be drawn in the object's center of mass.
alpha (float): blending efficient. Smaller values lead to more transparent masks.
area_threshold (float): a connected component small than this will not be shown.
Returns:
output (VisImage): image object with mask drawn.
"""
if color is None:
color = random_color(rgb=True, maximum=1)
if area_threshold is None:
area_threshold = 4096
has_valid_segment = False
binary_mask = binary_mask.astype("uint8") # opencv needs uint8
mask = GenericMask(binary_mask, self.output.height, self.output.width)
shape2d = (binary_mask.shape[0], binary_mask.shape[1])
if not mask.has_holes:
# draw polygons for regular masks
for segment in mask.polygons:
area = mask_util.area(mask_util.frPyObjects([segment], shape2d[0], shape2d[1]))
if area < area_threshold:
continue
has_valid_segment = True
segment = segment.reshape(-1, 2)
self.draw_polygon(segment, color=color, edge_color=edge_color, alpha=alpha)
else:
rgba = np.zeros(shape2d + (4,), dtype="float32")
rgba[:, :, :3] = color
rgba[:, :, 3] = (mask.mask == 1).astype("float32") * alpha
has_valid_segment = True
self.output.ax.imshow(rgba)
if text is not None and has_valid_segment:
# TODO sometimes drawn on wrong objects. the heuristics here can improve.
lighter_color = self._change_color_brightness(color, brightness_factor=0.7)
_num_cc, cc_labels, stats, centroids = cv2.connectedComponentsWithStats(binary_mask, 8)
largest_component_id = np.argmax(stats[1:, -1]) + 1
# draw text on the largest component, as well as other very large components.
for cid in range(1, _num_cc):
if cid == largest_component_id or stats[cid, -1] > _LARGE_MASK_AREA_THRESH:
# median is more stable than centroid
# center = centroids[largest_component_id]
center = np.median((cc_labels == cid).nonzero(), axis=1)[::-1]
self.draw_text(text, center, color=lighter_color)
return self.output
def draw_polygon(self, segment, color, edge_color=None, alpha=0.5):
"""
Args:
segment: numpy array of shape Nx2, containing all the points in the polygon.
color: color of the polygon. Refer to `matplotlib.colors` for a full list of
formats that are accepted.
edge_color: color of the polygon edges. Refer to `matplotlib.colors` for a
full list of formats that are accepted. If not provided, a darker shade
of the polygon color will be used instead.
alpha (float): blending efficient. Smaller values lead to more transparent masks.
Returns:
output (VisImage): image object with polygon drawn.
"""
if edge_color is None:
# make edge color darker than the polygon color
if alpha > 0.8:
edge_color = self._change_color_brightness(color, brightness_factor=-0.7)
else:
edge_color = color
edge_color = mplc.to_rgb(edge_color) + (1,)
polygon = mpl.patches.Polygon(
segment,
fill=True,
facecolor=mplc.to_rgb(color) + (alpha,),
edgecolor=edge_color,
linewidth=max(self._default_font_size // 15 * self.output.scale, 1),
)
self.output.ax.add_patch(polygon)
return self.output
"""
Internal methods:
"""
def _jitter(self, color):
"""
Randomly modifies given color to produce a slightly different color than the color given.
Args:
color (tuple[double]): a tuple of 3 elements, containing the RGB values of the color
picked. The values in the list are in the [0.0, 1.0] range.
Returns:
jittered_color (tuple[double]): a tuple of 3 elements, containing the RGB values of the
color after being jittered. The values in the list are in the [0.0, 1.0] range.
"""
color = mplc.to_rgb(color)
vec = np.random.rand(3)
# better to do it in another color space
vec = vec / np.linalg.norm(vec) * 0.5
res = np.clip(vec + color, 0, 1)
return tuple(res)
def _create_grayscale_image(self, mask=None):
"""
Create a grayscale version of the original image.
The colors in masked area, if given, will be kept.
"""
img_bw = self.img.astype("f4").mean(axis=2)
img_bw = np.stack([img_bw] * 3, axis=2)
if mask is not None:
img_bw[mask] = self.img[mask]
return img_bw
def _change_color_brightness(self, color, brightness_factor):
"""
Depending on the brightness_factor, gives a lighter or darker color i.e. a color with
less or more saturation than the original color.
Args:
color: color of the polygon. Refer to `matplotlib.colors` for a full list of
formats that are accepted.
brightness_factor (float): a value in [-1.0, 1.0] range. A lightness factor of
0 will correspond to no change, a factor in [-1.0, 0) range will result in
a darker color and a factor in (0, 1.0] range will result in a lighter color.
Returns:
modified_color (tuple[double]): a tuple containing the RGB values of the
modified color. Each value in the tuple is in the [0.0, 1.0] range.
"""
assert brightness_factor >= -1.0 and brightness_factor <= 1.0
color = mplc.to_rgb(color)
polygon_color = colorsys.rgb_to_hls(*mplc.to_rgb(color))
modified_lightness = polygon_color[1] + (brightness_factor * polygon_color[1])
modified_lightness = 0.0 if modified_lightness < 0.0 else modified_lightness
modified_lightness = 1.0 if modified_lightness > 1.0 else modified_lightness
modified_color = colorsys.hls_to_rgb(polygon_color[0], modified_lightness, polygon_color[2])
return modified_color
def _convert_boxes(self, boxes):
"""
Convert different format of boxes to a Nx4 array.
"""
if isinstance(boxes, Boxes):
return boxes.tensor.numpy()
else:
return np.asarray(boxes)
def _convert_masks(self, masks_or_polygons):
"""
Convert different format of masks or polygons to a tuple of masks and polygons.
Returns:
list[GenericMask]:
"""
m = masks_or_polygons
if isinstance(m, PolygonMasks):
m = m.polygons
if isinstance(m, BitMasks):
m = m.tensor.numpy()
if isinstance(m, torch.Tensor):
m = m.numpy()
ret = []
for x in m:
if isinstance(x, GenericMask):
ret.append(x)
else:
ret.append(GenericMask(x, self.output.height, self.output.width))
return ret
def _convert_keypoints(self, keypoints):
if isinstance(keypoints, Keypoints):
keypoints = keypoints.tensor
keypoints = np.asarray(keypoints)
return keypoints
def get_output(self):
"""
Returns:
output (VisImage): the image output containing the visualizations added
to the image.
"""
return self.output
| 39.345214 | 100 | 0.579186 |
1e788a64c4cd2e41e2d001fb4877392bf61e30b1 | 17,097 | py | Python | venv/lib/python3.8/site-packages/matplotlib/texmanager.py | willBear/willBear-Fundamental_Analysis | bc67eb1e69dcf6765c0b77314d37f7f165a7318f | [
"MIT"
] | 15 | 2020-06-29T08:33:39.000Z | 2022-02-12T00:28:51.000Z | venv/lib/python3.8/site-packages/matplotlib/texmanager.py | willBear/willBear-Fundamental_Analysis | bc67eb1e69dcf6765c0b77314d37f7f165a7318f | [
"MIT"
] | 30 | 2020-04-15T19:37:40.000Z | 2020-04-22T21:19:35.000Z | venv/lib/python3.8/site-packages/matplotlib/texmanager.py | willBear/willBear-Fundamental_Analysis | bc67eb1e69dcf6765c0b77314d37f7f165a7318f | [
"MIT"
] | 11 | 2020-06-29T08:40:24.000Z | 2022-02-24T17:39:16.000Z | r"""
This module supports embedded TeX expressions in matplotlib via dvipng
and dvips for the raster and postscript backends. The tex and
dvipng/dvips information is cached in ~/.matplotlib/tex.cache for reuse between
sessions
Requirements:
* latex
* \*Agg backends: dvipng>=1.6
* PS backend: psfrag, dvips, and Ghostscript>=8.60
Backends:
* \*Agg
* PS
* PDF
For raster output, you can get RGBA numpy arrays from TeX expressions
as follows::
texmanager = TexManager()
s = ('\TeX\ is Number '
'$\displaystyle\sum_{n=1}^\infty\frac{-e^{i\pi}}{2^n}$!')
Z = texmanager.get_rgba(s, fontsize=12, dpi=80, rgb=(1, 0, 0))
To enable tex rendering of all text in your matplotlib figure, set
:rc:`text.usetex` to True.
"""
import copy
import functools
import glob
import hashlib
import logging
import os
from pathlib import Path
import re
import subprocess
import numpy as np
import matplotlib as mpl
from matplotlib import cbook, dviread, rcParams
_log = logging.getLogger(__name__)
class TexManager:
"""
Convert strings to dvi files using TeX, caching the results to a directory.
Repeated calls to this constructor always return the same instance.
"""
cachedir = mpl.get_cachedir()
if cachedir is not None:
texcache = os.path.join(cachedir, 'tex.cache')
Path(texcache).mkdir(parents=True, exist_ok=True)
else:
# Should only happen in a restricted environment (such as Google App
# Engine). Deal with this gracefully by not creating a cache directory.
texcache = None
# Caches.
rgba_arrayd = {}
grey_arrayd = {}
serif = ('cmr', '')
sans_serif = ('cmss', '')
monospace = ('cmtt', '')
cursive = ('pzc', r'\usepackage{chancery}')
font_family = 'serif'
font_families = ('serif', 'sans-serif', 'cursive', 'monospace')
font_info = {
'new century schoolbook': ('pnc', r'\renewcommand{\rmdefault}{pnc}'),
'bookman': ('pbk', r'\renewcommand{\rmdefault}{pbk}'),
'times': ('ptm', r'\usepackage{mathptmx}'),
'palatino': ('ppl', r'\usepackage{mathpazo}'),
'zapf chancery': ('pzc', r'\usepackage{chancery}'),
'cursive': ('pzc', r'\usepackage{chancery}'),
'charter': ('pch', r'\usepackage{charter}'),
'serif': ('cmr', ''),
'sans-serif': ('cmss', ''),
'helvetica': ('phv', r'\usepackage{helvet}'),
'avant garde': ('pag', r'\usepackage{avant}'),
'courier': ('pcr', r'\usepackage{courier}'),
# Loading the type1ec package ensures that cm-super is installed, which
# is necessary for unicode computer modern. (It also allows the use of
# computer modern at arbitrary sizes, but that's just a side effect.)
'monospace': ('cmtt', r'\usepackage{type1ec}'),
'computer modern roman': ('cmr', r'\usepackage{type1ec}'),
'computer modern sans serif': ('cmss', r'\usepackage{type1ec}'),
'computer modern typewriter': ('cmtt', r'\usepackage{type1ec}')}
_rc_cache = None
_rc_cache_keys = (
('text.latex.preamble', 'text.latex.unicode', 'text.latex.preview',
'font.family') + tuple('font.' + n for n in font_families))
@functools.lru_cache() # Always return the same instance.
def __new__(cls):
self = object.__new__(cls)
self._reinit()
return self
def _reinit(self):
if self.texcache is None:
raise RuntimeError('Cannot create TexManager, as there is no '
'cache directory available')
Path(self.texcache).mkdir(parents=True, exist_ok=True)
ff = rcParams['font.family']
if len(ff) == 1 and ff[0].lower() in self.font_families:
self.font_family = ff[0].lower()
elif isinstance(ff, str) and ff.lower() in self.font_families:
self.font_family = ff.lower()
else:
_log.info('font.family must be one of (%s) when text.usetex is '
'True. serif will be used by default.',
', '.join(self.font_families))
self.font_family = 'serif'
fontconfig = [self.font_family]
for font_family in self.font_families:
font_family_attr = font_family.replace('-', '_')
for font in rcParams['font.' + font_family]:
if font.lower() in self.font_info:
setattr(self, font_family_attr,
self.font_info[font.lower()])
_log.debug('family: %s, font: %s, info: %s',
font_family, font, self.font_info[font.lower()])
break
else:
_log.debug('%s font is not compatible with usetex.',
font_family)
else:
_log.info('No LaTeX-compatible font found for the %s font '
'family in rcParams. Using default.', font_family)
setattr(self, font_family_attr, self.font_info[font_family])
fontconfig.append(getattr(self, font_family_attr)[0])
# Add a hash of the latex preamble to self._fontconfig so that the
# correct png is selected for strings rendered with same font and dpi
# even if the latex preamble changes within the session
preamble_bytes = self.get_custom_preamble().encode('utf-8')
fontconfig.append(hashlib.md5(preamble_bytes).hexdigest())
self._fontconfig = ''.join(fontconfig)
# The following packages and commands need to be included in the latex
# file's preamble:
cmd = [self.serif[1], self.sans_serif[1], self.monospace[1]]
if self.font_family == 'cursive':
cmd.append(self.cursive[1])
self._font_preamble = '\n'.join(
[r'\usepackage{type1cm}'] + cmd + [r'\usepackage{textcomp}'])
def get_basefile(self, tex, fontsize, dpi=None):
"""
Return a filename based on a hash of the string, fontsize, and dpi.
"""
s = ''.join([tex, self.get_font_config(), '%f' % fontsize,
self.get_custom_preamble(), str(dpi or '')])
return os.path.join(
self.texcache, hashlib.md5(s.encode('utf-8')).hexdigest())
def get_font_config(self):
"""Reinitializes self if relevant rcParams on have changed."""
if self._rc_cache is None:
self._rc_cache = dict.fromkeys(self._rc_cache_keys)
changed = [par for par in self._rc_cache_keys
if rcParams[par] != self._rc_cache[par]]
if changed:
_log.debug('following keys changed: %s', changed)
for k in changed:
_log.debug('%-20s: %-10s -> %-10s',
k, self._rc_cache[k], rcParams[k])
# deepcopy may not be necessary, but feels more future-proof
self._rc_cache[k] = copy.deepcopy(rcParams[k])
_log.debug('RE-INIT\nold fontconfig: %s', self._fontconfig)
self._reinit()
_log.debug('fontconfig: %s', self._fontconfig)
return self._fontconfig
def get_font_preamble(self):
"""
Return a string containing font configuration for the tex preamble.
"""
return self._font_preamble
def get_custom_preamble(self):
"""Return a string containing user additions to the tex preamble."""
return rcParams['text.latex.preamble']
def make_tex(self, tex, fontsize):
"""
Generate a tex file to render the tex string at a specific font size.
Return the file name.
"""
basefile = self.get_basefile(tex, fontsize)
texfile = '%s.tex' % basefile
custom_preamble = self.get_custom_preamble()
fontcmd = {'sans-serif': r'{\sffamily %s}',
'monospace': r'{\ttfamily %s}'}.get(self.font_family,
r'{\rmfamily %s}')
tex = fontcmd % tex
unicode_preamble = "\n".join([
r"\usepackage[utf8]{inputenc}",
r"\DeclareUnicodeCharacter{2212}{\ensuremath{-}}",
]) if rcParams["text.latex.unicode"] else ""
s = r"""
\documentclass{article}
%s
%s
%s
\usepackage[papersize={72in,72in},body={70in,70in},margin={1in,1in}]{geometry}
\pagestyle{empty}
\begin{document}
\fontsize{%f}{%f}%s
\end{document}
""" % (self._font_preamble, unicode_preamble, custom_preamble,
fontsize, fontsize * 1.25, tex)
with open(texfile, 'wb') as fh:
if rcParams['text.latex.unicode']:
fh.write(s.encode('utf8'))
else:
try:
fh.write(s.encode('ascii'))
except UnicodeEncodeError:
_log.info("You are using unicode and latex, but have not "
"enabled the 'text.latex.unicode' rcParam.")
raise
return texfile
_re_vbox = re.compile(
r"MatplotlibBox:\(([\d.]+)pt\+([\d.]+)pt\)x([\d.]+)pt")
def make_tex_preview(self, tex, fontsize):
"""
Generate a tex file to render the tex string at a specific font size.
It uses the preview.sty to determine the dimension (width, height,
descent) of the output.
Return the file name.
"""
basefile = self.get_basefile(tex, fontsize)
texfile = '%s.tex' % basefile
custom_preamble = self.get_custom_preamble()
fontcmd = {'sans-serif': r'{\sffamily %s}',
'monospace': r'{\ttfamily %s}'}.get(self.font_family,
r'{\rmfamily %s}')
tex = fontcmd % tex
unicode_preamble = "\n".join([
r"\usepackage[utf8]{inputenc}",
r"\DeclareUnicodeCharacter{2212}{\ensuremath{-}}",
]) if rcParams["text.latex.unicode"] else ""
# newbox, setbox, immediate, etc. are used to find the box
# extent of the rendered text.
s = r"""
\documentclass{article}
%s
%s
%s
\usepackage[active,showbox,tightpage]{preview}
\usepackage[papersize={72in,72in},body={70in,70in},margin={1in,1in}]{geometry}
%% we override the default showbox as it is treated as an error and makes
%% the exit status not zero
\def\showbox#1%%
{\immediate\write16{MatplotlibBox:(\the\ht#1+\the\dp#1)x\the\wd#1}}
\begin{document}
\begin{preview}
{\fontsize{%f}{%f}%s}
\end{preview}
\end{document}
""" % (self._font_preamble, unicode_preamble, custom_preamble,
fontsize, fontsize * 1.25, tex)
with open(texfile, 'wb') as fh:
if rcParams['text.latex.unicode']:
fh.write(s.encode('utf8'))
else:
try:
fh.write(s.encode('ascii'))
except UnicodeEncodeError:
_log.info("You are using unicode and latex, but have not "
"enabled the 'text.latex.unicode' rcParam.")
raise
return texfile
def _run_checked_subprocess(self, command, tex):
_log.debug(cbook._pformat_subprocess(command))
try:
report = subprocess.check_output(command,
cwd=self.texcache,
stderr=subprocess.STDOUT)
except FileNotFoundError as exc:
raise RuntimeError(
'Failed to process string with tex because {} could not be '
'found'.format(command[0])) from exc
except subprocess.CalledProcessError as exc:
raise RuntimeError(
'{prog} was not able to process the following string:\n'
'{tex!r}\n\n'
'Here is the full report generated by {prog}:\n'
'{exc}\n\n'.format(
prog=command[0],
tex=tex.encode('unicode_escape'),
exc=exc.output.decode('utf-8'))) from exc
_log.debug(report)
return report
def make_dvi(self, tex, fontsize):
"""
Generate a dvi file containing latex's layout of tex string.
Return the file name.
"""
if rcParams['text.latex.preview']:
return self.make_dvi_preview(tex, fontsize)
basefile = self.get_basefile(tex, fontsize)
dvifile = '%s.dvi' % basefile
if not os.path.exists(dvifile):
texfile = self.make_tex(tex, fontsize)
with cbook._lock_path(texfile):
self._run_checked_subprocess(
["latex", "-interaction=nonstopmode", "--halt-on-error",
texfile], tex)
for fname in glob.glob(basefile + '*'):
if not fname.endswith(('dvi', 'tex')):
try:
os.remove(fname)
except OSError:
pass
return dvifile
def make_dvi_preview(self, tex, fontsize):
"""
Generate a dvi file containing latex's layout of tex string.
It calls make_tex_preview() method and store the size information
(width, height, descent) in a separate file.
Return the file name.
"""
basefile = self.get_basefile(tex, fontsize)
dvifile = '%s.dvi' % basefile
baselinefile = '%s.baseline' % basefile
if not os.path.exists(dvifile) or not os.path.exists(baselinefile):
texfile = self.make_tex_preview(tex, fontsize)
report = self._run_checked_subprocess(
["latex", "-interaction=nonstopmode", "--halt-on-error",
texfile], tex)
# find the box extent information in the latex output
# file and store them in ".baseline" file
m = TexManager._re_vbox.search(report.decode("utf-8"))
with open(basefile + '.baseline', "w") as fh:
fh.write(" ".join(m.groups()))
for fname in glob.glob(basefile + '*'):
if not fname.endswith(('dvi', 'tex', 'baseline')):
try:
os.remove(fname)
except OSError:
pass
return dvifile
def make_png(self, tex, fontsize, dpi):
"""
Generate a png file containing latex's rendering of tex string.
Return the file name.
"""
basefile = self.get_basefile(tex, fontsize, dpi)
pngfile = '%s.png' % basefile
# see get_rgba for a discussion of the background
if not os.path.exists(pngfile):
dvifile = self.make_dvi(tex, fontsize)
self._run_checked_subprocess(
["dvipng", "-bg", "Transparent", "-D", str(dpi),
"-T", "tight", "-o", pngfile, dvifile], tex)
return pngfile
def get_grey(self, tex, fontsize=None, dpi=None):
"""Return the alpha channel."""
from matplotlib import _png
key = tex, self.get_font_config(), fontsize, dpi
alpha = self.grey_arrayd.get(key)
if alpha is None:
pngfile = self.make_png(tex, fontsize, dpi)
with open(os.path.join(self.texcache, pngfile), "rb") as file:
X = _png.read_png(file)
self.grey_arrayd[key] = alpha = X[:, :, -1]
return alpha
def get_rgba(self, tex, fontsize=None, dpi=None, rgb=(0, 0, 0)):
"""Return latex's rendering of the tex string as an rgba array."""
if not fontsize:
fontsize = rcParams['font.size']
if not dpi:
dpi = rcParams['savefig.dpi']
r, g, b = rgb
key = tex, self.get_font_config(), fontsize, dpi, tuple(rgb)
Z = self.rgba_arrayd.get(key)
if Z is None:
alpha = self.get_grey(tex, fontsize, dpi)
Z = np.dstack([r, g, b, alpha])
self.rgba_arrayd[key] = Z
return Z
def get_text_width_height_descent(self, tex, fontsize, renderer=None):
"""Return width, height and descent of the text."""
if tex.strip() == '':
return 0, 0, 0
dpi_fraction = renderer.points_to_pixels(1.) if renderer else 1
if rcParams['text.latex.preview']:
# use preview.sty
basefile = self.get_basefile(tex, fontsize)
baselinefile = '%s.baseline' % basefile
if not os.path.exists(baselinefile):
dvifile = self.make_dvi_preview(tex, fontsize)
with open(baselinefile) as fh:
l = fh.read().split()
height, depth, width = [float(l1) * dpi_fraction for l1 in l]
return width, height + depth, depth
else:
# use dviread. It sometimes returns a wrong descent.
dvifile = self.make_dvi(tex, fontsize)
with dviread.Dvi(dvifile, 72 * dpi_fraction) as dvi:
page, = dvi
# A total height (including the descent) needs to be returned.
return page.width, page.height + page.descent, page.descent
| 37.575824 | 79 | 0.570451 |
e797de3f3cf89a187c6faff9031c9a319654c1c6 | 508 | py | Python | 300-/560.py | yshshadow/Leetcode | 5097f69bb0050d963c784d6bc0e88a7e871568ed | [
"MIT"
] | null | null | null | 300-/560.py | yshshadow/Leetcode | 5097f69bb0050d963c784d6bc0e88a7e871568ed | [
"MIT"
] | null | null | null | 300-/560.py | yshshadow/Leetcode | 5097f69bb0050d963c784d6bc0e88a7e871568ed | [
"MIT"
] | null | null | null | # Given an array of integers and an integer k, you need to find the total number of continuous subarrays whose sum equals to k.
#
# Example 1:
# Input:nums = [1,1,1], k = 2
# Output: 2
# Note:
# The length of the array is in range [1, 20,000].
# The range of numbers in the array is [-1000, 1000] and the range of the integer k is [-1e7, 1e7].
class Solution(object):
def subarraySum(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: int
"""
| 29.882353 | 127 | 0.608268 |
847e209d58138ed2c5972141c7d82f50d4419f3f | 145 | py | Python | result/admin.py | ShwethaRGowda/FADB | c6f29701b884ea69fe9a357c6a6b8e5672669222 | [
"MIT"
] | 149 | 2019-09-29T08:56:12.000Z | 2022-03-27T00:52:07.000Z | result/admin.py | ShwethaRGowda/FADB | c6f29701b884ea69fe9a357c6a6b8e5672669222 | [
"MIT"
] | 8 | 2021-03-30T13:59:50.000Z | 2022-03-12T00:41:22.000Z | result/admin.py | Krittin-Khanueng/school_management_system | b34e40da452cf87d2acddd183f1e5af59ec7b3ce | [
"MIT"
] | 75 | 2020-01-24T13:00:10.000Z | 2022-02-17T06:19:29.000Z | from django.contrib import admin
from .models import SubjectRegistration
# Register your models here.
admin.site.register(SubjectRegistration)
| 20.714286 | 40 | 0.834483 |
248d966668e9a598ef33c277337d6abb2e592cda | 10,648 | py | Python | tensor2tensor/utils/cloud_mlengine.py | xueeinstein/tensor2tensor | b42e7bae72044916d465b7e298569b2823fe9bc0 | [
"Apache-2.0"
] | 1 | 2019-03-02T15:59:06.000Z | 2019-03-02T15:59:06.000Z | tensor2tensor/utils/cloud_mlengine.py | xueeinstein/tensor2tensor | b42e7bae72044916d465b7e298569b2823fe9bc0 | [
"Apache-2.0"
] | null | null | null | tensor2tensor/utils/cloud_mlengine.py | xueeinstein/tensor2tensor | b42e7bae72044916d465b7e298569b2823fe9bc0 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# Copyright 2018 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Launch on GCP's ML Engine."""
import datetime
import os
import shutil
import sys
import tempfile
from googleapiclient import discovery
from oauth2client.client import GoogleCredentials
from tensor2tensor.data_generators import text_encoder
from tensor2tensor.layers import common_hparams
from tensor2tensor.utils import cloud_tpu as cloud
from tensor2tensor.utils import registry
from tensor2tensor.utils import usr_dir as usr_dir_lib
import tensorflow as tf
FLAGS = tf.flags.FLAGS
CONSOLE_URL = "https://console.cloud.google.com/mlengine/jobs/"
# TODO(rsepassi):
# * Enable multi-machine sync/async training
def get_setup_file(name, packages=None):
if not packages:
packages = []
return """
from setuptools import find_packages
from setuptools import setup
setup(
name="{name}",
version="0.1",
packages=find_packages(),
install_requires={pypi_packages}
)
""".format(name=name, pypi_packages=str(list(packages)))
def job_dir():
# The flag --job-dir is parsed differently before and after switching to absl
return getattr(FLAGS, "job-dir", "") or getattr(FLAGS, "job_dir", "")
def get_requirements(usr_dir):
requirements_file = os.path.join(usr_dir, "requirements.txt")
if not tf.gfile.Exists(requirements_file):
return []
with tf.gfile.Open(requirements_file) as f:
pkg_list = f.readlines()
return [pkg.strip() for pkg in pkg_list if "tensor2tensor" not in pkg]
def flags_as_args():
"""Convert FLAGS to list of args suitable for passing on cmd line."""
if hasattr(FLAGS, "flag_values_dict"):
args_dict = FLAGS.flag_values_dict()
else:
args_dict = dict(FLAGS.__dict__["__flags"])
del args_dict["cloud_mlengine"]
# Configured later
del args_dict["t2t_usr_dir"]
args_dict.pop("h", None)
args_dict.pop("helpfull", None)
args_dict.pop("helpshort", None)
args_dict.pop("help", None)
args = []
for name, val in args_dict.items():
if val is None:
continue
if name.startswith("autotune"):
continue
args.extend(["--%s" % name, str(val)])
return args
def get_default_master_type(num_gpus=1, use_tpu=False):
"""Returns master_type for trainingInput."""
if use_tpu:
return "standard_tpu"
elif num_gpus <= 0:
return "standard"
elif num_gpus == 1:
return "standard_p100"
elif num_gpus == 4:
return "complex_model_m_p100"
elif num_gpus == 8:
return "complex_model_l_gpu"
assert False
def configure_job():
"""Construct jobSpec for ML Engine job."""
# See documentation:
# https://cloud.google.com/ml-engine/reference/rest/v1/projects.jobs#traininginput
training_input = {
"pythonModule": "tensor2tensor.bin.t2t_trainer",
"args": flags_as_args(),
"region": text_encoder.native_to_unicode(cloud.default_region()),
"runtimeVersion": "1.5",
"pythonVersion": "3.5" if sys.version_info.major == 3 else "2.7",
"jobDir": FLAGS.output_dir,
"scaleTier": "CUSTOM",
"masterType": FLAGS.cloud_mlengine_master_type or get_default_master_type(
num_gpus=FLAGS.worker_gpu,
use_tpu=FLAGS.use_tpu)
}
if FLAGS.hparams_range:
tf.logging.info("Configuring hyperparameter tuning.")
training_input["hyperparameters"] = configure_autotune(
FLAGS.hparams_range,
FLAGS.autotune_objective,
FLAGS.autotune_maximize,
FLAGS.autotune_max_trials,
FLAGS.autotune_parallel_trials,
)
timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
job_name = "%s_%s_t2t_%s" % (FLAGS.model, FLAGS.problem, timestamp)
job_spec = {"jobId": job_name, "trainingInput": training_input}
return job_spec
def launch_job(job_spec):
"""Launch job on ML Engine."""
project_id = "projects/{}".format(
text_encoder.native_to_unicode(cloud.default_project()))
credentials = GoogleCredentials.get_application_default()
cloudml = discovery.build("ml", "v1", credentials=credentials,
cache_discovery=False)
request = cloudml.projects().jobs().create(body=job_spec, parent=project_id)
request.execute()
def _tar_and_copy(src_dir, target_dir):
"""Tar and gzip src_dir and copy to GCS target_dir."""
src_dir = src_dir.rstrip("/")
target_dir = target_dir.rstrip("/")
tmp_dir = tempfile.gettempdir().rstrip("/")
src_base = os.path.basename(src_dir)
cloud.shell_run(
"tar -zcf {tmp_dir}/{src_base}.tar.gz -C {src_dir} .",
src_dir=src_dir,
src_base=src_base,
tmp_dir=tmp_dir)
final_destination = "%s/%s.tar.gz" % (target_dir, src_base)
cloud.shell_run(
("gsutil cp {tmp_dir}/{src_base}.tar.gz "
"{final_destination}"),
tmp_dir=tmp_dir,
src_base=src_base,
final_destination=final_destination)
return final_destination
def tar_and_copy_t2t(train_dir):
"""Tar Tensor2Tensor and cp to train_dir."""
tf.logging.info("Tarring and pushing local Tensor2Tensor package.")
output = text_encoder.native_to_unicode(cloud.shell_output(
"pip show tensor2tensor")).split("\n")
assert output[1].startswith("Version")
assert output[7].startswith("Location")
t2t_version = output[1].split(":")[1].strip()
t2t_dir = output[7].split(":")[1].strip()
# A local installation cloned from GitHub will have a setup.py file and a docs
# folder
is_local_t2t = all([
tf.gfile.Exists(os.path.join(t2t_dir, fname))
for fname in ["setup.py", "docs/cloud_mlengine.md"]
])
if is_local_t2t:
tf.logging.info("Found local T2T installation. Tarring directory %s",
t2t_dir)
else:
# PyPI installation
# Create a folder with just a setup.py file pointing to the right version
tf.logging.info("Found PyPI T2T installation. Launching tensor2tensor==%s",
t2t_version)
t2t_dir = os.path.join(tempfile.gettempdir(), "tensor2tensor_tmp")
shutil.rmtree(t2t_dir, ignore_errors=True)
os.mkdir(t2t_dir)
setup_fname = os.path.join(t2t_dir, "setup.py")
setup_file_str = get_setup_file(
name="DummyT2TPackage",
packages=["tensor2tensor==%s" % t2t_version]
)
with tf.gfile.Open(setup_fname, "w") as f:
f.write(setup_file_str)
t2t_tar = _tar_and_copy(t2t_dir, train_dir)
return t2t_tar
def tar_and_copy_usr_dir(usr_dir, train_dir):
"""Package, tar, and copy usr_dir to GCS train_dir."""
tf.logging.info("Tarring and pushing t2t_usr_dir.")
usr_dir = os.path.abspath(os.path.expanduser(usr_dir))
# Copy usr dir to a temp location
top_dir = os.path.join(tempfile.gettempdir(), "t2t_usr_container")
tmp_usr_dir = os.path.join(top_dir, usr_dir_lib.INTERNAL_USR_DIR_PACKAGE)
shutil.rmtree(top_dir, ignore_errors=True)
shutil.copytree(usr_dir, tmp_usr_dir)
# Insert setup.py if one does not exist
top_setup_fname = os.path.join(top_dir, "setup.py")
setup_file_str = get_setup_file(
name="DummyUsrDirPackage",
packages=get_requirements(usr_dir)
)
with tf.gfile.Open(top_setup_fname, "w") as f:
f.write(setup_file_str)
usr_tar = _tar_and_copy(top_dir, train_dir)
return usr_tar
def autotune_paramspecs(hparams_range):
rhp = common_hparams.RangedHParams()
registry.ranged_hparams(hparams_range)(rhp)
return rhp.to_parameter_specs(name_prefix="hp_")
def configure_autotune(hparams_range,
objective,
maximize=True,
max_trials=10,
parallel_trials=1):
return {
"goal": "MAXIMIZE" if maximize else "MINIMIZE",
"params": autotune_paramspecs(hparams_range),
"maxTrials": max_trials,
"maxParallelTrials": parallel_trials,
"hyperparameterMetricTag": objective,
}
def configure_trainer_package(job_spec, t2t_tar):
assert t2t_tar.startswith("gs://")
job_spec["trainingInput"]["packageUris"] = [t2t_tar]
def configure_usr_dir(job_spec, usr_tar):
assert usr_tar.startswith("gs://")
job_spec["trainingInput"]["packageUris"].append(usr_tar)
usr_args = ["--t2t_usr_dir", usr_dir_lib.INTERNAL_USR_DIR_PACKAGE]
job_spec["trainingInput"]["args"].extend(usr_args)
def validate_flags():
"""Validates flags are set to acceptable values for CloudML Engine runs."""
assert not FLAGS.cloud_tpu
assert not job_dir()
assert FLAGS.output_dir.startswith("gs://")
assert FLAGS.data_dir.startswith("gs://")
assert FLAGS.worker_replicas <= 1
assert FLAGS.ps_replicas <= 0
if FLAGS.hparams_range:
assert FLAGS.autotune_objective
if FLAGS.worker_gpu:
assert FLAGS.worker_gpu in [1, 4, 8]
if FLAGS.cloud_mlengine_master_type:
if FLAGS.use_tpu:
assert FLAGS.cloud_mlengine_master_type == "standard_tpu"
elif FLAGS.worker_gpu:
if FLAGS.worker_gpu == 1:
assert FLAGS.cloud_mlengine_master_type in ["standard_gpu",
"standard_p100"]
elif FLAGS.worker_gpu == 4:
assert FLAGS.cloud_mlengine_master_type in ["complex_model_m_gpu",
"complex_model_m_p100"]
else:
assert FLAGS.cloud_mlengine_master_type == "complex_model_l_gpu"
else:
assert FLAGS.cloud_mlengine_master_type in ["standard", "large_model",
"complex_model_s",
"complex_model_m",
"complex_model_l"]
def launch():
"""Launch t2t_trainer on Cloud ML Engine."""
validate_flags()
job_spec = configure_job()
job_name = job_spec["jobId"]
tf.logging.info("Launching job %s with ML Engine spec:\n%s", job_name,
job_spec)
assert cloud.confirm()
train_dir = FLAGS.output_dir
t2t_tar = tar_and_copy_t2t(train_dir)
configure_trainer_package(job_spec, t2t_tar)
if FLAGS.t2t_usr_dir:
usr_tar = tar_and_copy_usr_dir(FLAGS.t2t_usr_dir, train_dir)
configure_usr_dir(job_spec, usr_tar)
launch_job(job_spec)
tf.logging.info("Launched %s. See console to track: %s.", job_name,
CONSOLE_URL)
| 34.019169 | 84 | 0.692524 |
c850ea0fc27659ec72837d1d1e8f7c6e787a495d | 273,324 | py | Python | Ch15/s15_04/ex15_04_rc.py | Deteriorator/PyQt5-Guide | d34a9659894d279b17369528359844a4ae4aeb20 | [
"CC0-1.0"
] | 1 | 2022-02-23T09:58:11.000Z | 2022-02-23T09:58:11.000Z | Ch15/s15_04/ex15_04_rc.py | Deteriorator/PyQt5-Guide | d34a9659894d279b17369528359844a4ae4aeb20 | [
"CC0-1.0"
] | null | null | null | Ch15/s15_04/ex15_04_rc.py | Deteriorator/PyQt5-Guide | d34a9659894d279b17369528359844a4ae4aeb20 | [
"CC0-1.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Resource object code
#
# Created by: The Resource Compiler for PyQt5 (Qt v5.9.7)
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore
qt_resource_data = b"\
\x00\x00\x66\x07\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\xc8\x00\x00\x00\xf8\x08\x06\x00\x00\x01\xde\x25\x99\xa5\
\x00\x00\x00\x01\x73\x52\x47\x42\x00\xae\xce\x1c\xe9\x00\x00\x00\
\x04\x67\x41\x4d\x41\x00\x00\xb1\x8f\x0b\xfc\x61\x05\x00\x00\x00\
\x09\x70\x48\x59\x73\x00\x00\x0b\x3a\x00\x00\x0b\x3a\x01\x64\x7f\
\x57\x0d\x00\x00\x65\x9c\x49\x44\x41\x54\x78\x5e\xed\x9d\x05\x9c\
\x1d\xd5\xf5\xc7\x4f\x7c\xe3\xee\x4e\x94\xb8\xbb\xbb\x12\x57\xe2\
\x42\x70\x0f\xee\x4e\x8b\x53\x9c\x16\xb7\xd2\x16\x0a\x34\x44\x89\
\x21\x21\x38\x0d\xae\x45\xca\xbf\x85\x52\xa0\x68\xa1\x7b\xff\xe7\
\x7b\x67\xee\x66\x76\x32\xef\xed\x7b\x6f\x25\xbb\xc9\xfc\x3e\x9f\
\xdf\xbe\x7d\xf3\x46\xee\x5c\x3d\xe7\xdc\x73\xcf\x95\x62\x09\xb3\
\x59\xb2\xcc\xf3\x52\xce\xff\x5a\xf0\x30\x1b\xa4\x7a\xf6\x06\x39\
\x58\x1f\x34\x55\x59\xd6\x3f\x5c\xb0\xc8\xde\x24\x1d\xcd\x5b\xb2\
\x28\xfb\x09\x59\xae\x0f\xab\xed\x1f\x2e\x38\x18\x23\xa5\xb2\x37\
\xcb\xa4\x52\x95\x6a\x98\x9b\x8f\x97\x8b\xb2\xb7\x48\x77\xff\xa7\
\xfc\xc1\xfc\x5e\xca\x67\x3f\x2d\x15\xa1\xd9\x2e\x35\xf5\x0d\x16\
\xcf\x7a\xc3\x98\xe6\x53\x97\x19\x7d\xc8\xb4\xec\x97\xa5\x72\xce\
\xef\x99\x64\x9f\xcd\xff\x4d\x32\xcf\x6c\x94\x25\x50\xff\x5f\x6a\
\xb6\xc9\xca\x19\xaf\x1a\x03\xf5\xfb\x4a\xf7\x9b\xfd\x7d\x83\x4c\
\x31\x37\xa5\x59\x21\x34\x65\x35\xb8\x71\xf9\x9a\xf5\xec\x4d\x93\
\xb1\x76\x55\xf9\x58\xcf\x9d\x9d\x76\xad\xa3\x0c\xf4\x6d\x0e\xd0\
\x8b\x17\x75\x6e\x21\x3b\xa3\x6e\x0e\x7f\x78\x5c\x0e\xd5\xb2\x9a\
\x99\xaf\x8a\xd0\xb0\x96\x3c\x31\x67\xb8\x7c\x6f\x9e\x94\x15\xc1\
\x9b\xcf\x7e\xcb\x18\xf3\x84\x1c\x32\xb6\xb7\x98\xfa\xb5\x64\x89\
\x7f\x7a\x46\x30\xfc\xc9\xde\x28\x7d\xce\x5c\x24\xd7\x04\x1f\x02\
\x29\x97\xec\xb5\xd2\xd4\x9e\x29\xf2\x9d\xff\x99\x16\xfe\xc8\x1f\
\xad\x61\x65\xf4\x66\xd3\x3b\x1c\x71\x9e\xbd\x71\xff\x0e\xf2\x84\
\x94\xaf\x6c\xff\xef\xa4\xd9\xa8\x59\x3a\xc8\x9e\x2d\x52\x4a\xd9\
\xdc\xfb\x37\x75\x78\x6f\xb1\x43\xaa\x69\x9e\x2f\xe5\xa6\xe6\x69\
\x59\x6e\xb6\xc8\x2c\xfd\xbe\x60\xf6\x30\x79\x60\xc4\x7d\x3b\x8d\
\x56\xeb\x39\x81\xea\x6b\xaf\x49\x15\x39\xb5\xc4\xbc\x2a\xf5\xd7\
\xfe\x4a\xae\x3e\x6a\x9a\x7c\x31\xa2\x9b\x98\xac\xf2\x62\x6a\x56\
\x11\x33\x75\x90\x98\x93\xe6\xca\x87\x0f\x9f\x2f\x97\x65\x3f\x29\
\x55\xfd\xd3\xd3\xc2\x37\x4a\x52\xf5\x1f\xbe\x68\xcd\xe9\xaf\xa9\
\x6d\x4f\xd6\xf1\x1d\xe8\xf7\x2c\x3d\xde\x5b\xb3\x72\xc0\x98\x7e\
\x52\x8b\x43\x3e\x7f\xcb\xef\xa9\xe0\xbf\xfe\xa7\x43\x4b\xe5\x00\
\x9f\x41\x0c\xf1\x3f\x83\x78\xd5\xff\xcc\x13\xeb\xfd\xcf\x20\xae\
\x50\x5e\xa4\xbc\xd3\x7e\xcb\x8d\x3b\xfc\xcf\xb4\xd1\xda\xff\x04\
\x87\x29\xd7\x79\xff\xca\x07\xfe\x27\xa8\xa9\xac\xa4\xfc\xa7\xfd\
\x26\xf2\xb8\xff\x99\x32\x5c\x4d\x69\xe5\x7f\x82\x87\x95\x2f\x7a\
\xff\xca\x6f\x94\xd5\x95\x0d\x95\x7f\xe5\x80\x22\xad\xda\xe5\xc0\
\x45\xdd\xbc\x7f\x73\xe1\x25\xe5\xf5\xde\xbf\x39\xc8\xe8\x01\x0e\
\xa9\x5c\x9c\xaf\x07\x38\xdc\xaf\xe4\x46\x93\xed\x37\x0f\x94\x13\
\xc7\x0e\xb7\xdf\x4a\x12\x1a\xfb\x9f\x05\x82\xaf\x94\x77\xf9\x0c\
\xe2\x68\x65\x7b\xef\xdf\x1c\xac\x52\x1e\xea\xfd\x6b\xff\x5f\xa0\
\x3c\xc4\xff\x3f\x29\xa8\xf7\x89\x30\xcc\xff\x74\xe0\xa1\x1d\xbd\
\x7f\xd3\x83\xeb\xec\xf2\x1a\x4a\x39\x8f\x82\x9f\x65\xbf\xa5\x89\
\x6b\x94\x74\x21\x7c\x5e\xad\x3c\x40\x19\xc4\x81\xfe\xe7\xd3\xfe\
\xa7\xc3\xcc\x10\x63\xec\x2b\xb0\x42\x05\x22\xe9\xd9\x52\xda\x3f\
\x54\xf0\xd0\x21\x77\x10\xf2\x70\xf6\x9a\x5c\xdd\x7f\xc1\xc1\x0a\
\xdf\x9b\x64\xbe\x48\x19\x93\xbd\x55\xc6\x20\x61\xfa\x3f\x15\x1c\
\xcc\x63\xd2\x40\x45\xa1\x15\xb3\xdf\xb6\x42\xdd\x42\x04\x09\xff\
\xa7\xcc\xa1\x29\x2d\x9d\xfd\x80\xaf\x16\x3c\x22\x95\x90\x4a\x0e\
\x9d\x2c\xbf\x73\x92\xa3\xaa\x12\x07\x04\xd4\x86\xac\xb4\xdf\xcc\
\x0a\xdb\x7e\xfe\xe7\xa8\x06\x1b\x65\x45\x87\x23\xce\xb7\x92\xe3\
\x81\x4d\xe4\x79\x7d\xd0\xb2\x9c\xdf\x54\x28\xd7\xb7\x0c\xca\x04\
\x79\x83\x87\xa8\x92\x33\x1c\x81\x9a\x9b\x26\xe3\xcc\xbf\xea\x9b\
\x69\x36\x66\x3f\x2e\xed\xfc\xcb\x53\x87\xcd\x8a\x4d\x32\x86\xac\
\x99\xb2\xe3\xdb\xc8\x07\x64\xd5\x6f\x8a\x98\xba\x5c\x1f\xd2\x2b\
\x28\xf8\xa5\x05\xcd\xb2\xb2\x3d\xda\x8a\x21\xab\x2a\xea\x0d\x83\
\x0f\xe8\xd0\x4c\x9e\xfb\xe4\xf7\x72\xf8\x01\x8d\xf4\xc5\xf3\x59\
\xd3\x8c\x2d\xf4\x2d\xb2\x68\xd6\x9b\xb9\xdf\xa2\xc9\xb8\xb9\x3c\
\x7c\x9a\xff\x06\x19\x0b\x12\xf6\x42\xad\x45\xcd\xc8\x32\x6e\x3c\
\x61\xe3\x27\xc6\xec\x94\x65\xf6\x41\x5a\x16\xfa\xa6\x4b\xb2\x1f\
\xce\x19\x7f\xd2\x7e\xd0\xc7\xfc\xb1\x15\x60\x9b\x0c\x6d\x52\x53\
\xde\x92\x0a\x55\xa9\xba\xcb\xf4\xc6\x53\x7f\xd1\x87\x0e\xbf\xf7\
\x59\x63\x5e\xd2\x87\x6c\x91\x36\xf6\x0a\x0f\xda\x58\x53\x87\xf7\
\x16\xda\x56\xcc\x8b\x32\xbe\xef\x81\xf2\xe3\x82\x51\x62\x9a\xd5\
\x13\x53\xa6\x8c\x98\xae\xad\xc4\x2c\x1d\x6f\xcf\xf9\xd9\xfc\x53\
\x7a\x04\xca\x24\xad\xb7\xf9\x4c\xc9\x05\xcb\xcd\x33\xd2\x42\x53\
\x8b\xea\x50\xc3\xfe\xa2\xb0\x6f\xb8\x59\x9a\x64\xaf\x97\x91\xe6\
\x03\x69\xa0\x87\xce\xe0\xb0\xcf\x94\x81\xd4\x11\x84\x53\x1b\x9c\
\x7e\x98\x08\x69\x3d\x24\x78\xf2\x31\xca\x93\x95\x8c\xfb\x41\xfc\
\xda\xff\x0c\x22\xe3\x87\xfc\x4d\xb9\x42\xf9\x98\xfd\xe6\x01\x61\
\x02\xed\x0a\xd5\x21\x88\x9f\xfc\xcf\x94\x31\xca\xff\x04\x0f\xf9\
\x9f\x7f\xf0\x3f\xff\xa5\x44\xa2\xbc\x4d\xb9\x9d\x03\x8a\xe7\xfc\
\xcf\xb4\x40\xe1\x47\xe1\x58\xff\xd3\xa1\xa2\xb2\x85\xf2\x14\xfb\
\x2d\x03\xbc\xa3\x1c\xef\xfd\x9b\x10\x48\xfc\xe7\x79\xff\xe6\x0f\
\x94\x51\xb6\xb2\xae\xfd\xe6\x19\x05\x38\x96\x56\x41\x97\x28\x74\
\xf0\x3f\xf3\x84\x53\x1b\xac\xa1\x20\x00\x24\x78\xa7\x3a\x50\x4e\
\x68\xba\x9b\x94\x8f\x70\x40\x81\xba\x40\xf6\xbd\xa2\x5c\xc8\x81\
\xbc\x70\xb3\xff\x09\x9a\xf9\x9f\x41\x60\x75\x40\x65\x40\xa7\xa7\
\x66\x81\x2d\xfe\x27\xd6\x8c\xa4\x20\x25\xa8\x0c\x14\x6c\x15\x0e\
\x28\xa2\xf4\x95\xcb\xfd\xcf\x5f\xf9\x9f\x0e\x29\x55\x04\xd4\x05\
\x48\x4a\xf9\x04\x43\x95\x98\x41\xb0\x67\x0d\xe6\x80\xc2\x35\x40\
\xa7\x66\xf3\xb6\xa8\x0b\x3c\x84\xcf\x69\xca\x18\x31\x0a\x07\x88\
\x6c\xca\x06\x2a\x58\xf6\xc0\x18\xab\x32\x48\x3b\xfd\x5e\xa5\x50\
\x55\x9b\x82\x86\xd5\xc7\x76\x48\x13\x15\xc5\xa7\x5a\x89\x6d\x88\
\x3c\x68\x55\xa7\x27\xa4\x2f\xa2\xa2\x7f\x5a\xf1\x86\x95\x95\x9e\
\x94\xaa\x9a\xfb\x83\x94\x4b\x54\x60\xdb\xd2\xeb\x82\xdb\x8d\x79\
\x4a\x96\xa9\x0c\x35\x33\x7b\x9d\xb4\xd4\x73\x32\x93\xcc\x8b\x12\
\x9a\xc8\xb2\xd9\x1b\xa4\xad\xe6\xfe\x1c\xf3\x82\x2c\x1d\x7c\xf3\
\x06\x2b\x9c\xab\xde\xf7\x25\x22\xae\x72\x84\xbe\x60\x8d\x80\xc4\
\xb9\x77\xc0\x44\x4f\xf6\x33\x52\x5f\x73\xb6\x93\xad\xfb\xdb\xa4\
\x6f\x0e\x37\x4b\x3f\x95\x66\x07\xdb\x9c\x57\xc5\x4b\x4f\xcf\x76\
\x5a\x06\x33\x54\x28\x03\xfa\x92\x8b\x95\xe3\x90\x7a\x73\x5d\x4b\
\xb5\x7b\x42\x7a\xa2\x86\x6a\xd5\x2c\xdc\xb6\xc4\xcd\xb3\x9f\x92\
\x7a\xfa\xd0\x89\x6f\xdc\x29\x47\x57\xa9\x28\xff\xaa\x5c\x3e\x21\
\xbf\x98\xf3\x4e\x6e\x95\xa9\x72\x93\x56\x26\xe2\xbc\x1c\x56\xac\
\x20\x5f\x6b\x89\xa1\xb8\x77\xcb\x5e\x23\x15\xfc\xc7\x16\x0e\xcc\
\x2e\x6b\x92\x68\xa5\x39\x3e\x89\x36\xc0\xfc\x13\x87\xc3\x7a\x5e\
\xaa\xec\x7d\xf1\xdd\xe6\xba\x63\xe4\x62\x4a\x50\xef\x3b\xcf\xea\
\x27\xdb\xa5\x66\x91\xf4\x6e\xd4\x6f\x6b\x63\x79\x52\x1a\x69\xf5\
\x1a\x6c\x5e\x93\x29\xab\x26\xc9\x0f\xf3\x47\x8a\x69\xbf\xf2\xf4\
\xc8\x04\x07\xc9\x4b\x73\x9b\x55\x93\xc5\x1c\x36\x45\x3e\x33\xdb\
\x64\x92\xbe\x44\x47\x2d\x85\x6a\xfa\xcb\x5e\xe9\x9e\x4d\xe9\x52\
\x62\xee\x3d\x55\xdb\x0c\x75\x1e\xeb\xc8\x0e\x59\x3e\xe8\xa6\xf5\
\x91\x2f\xe0\x58\xad\xbc\x7c\xa6\x09\x5f\xa9\xd7\x4c\xfb\x61\x8d\
\xb4\x6a\x5a\x57\x06\x95\x2a\x65\x45\xec\x0f\xbd\xdb\x16\x1d\x6c\
\x8e\xda\x7f\xe8\x6e\x9f\x96\x5a\x5a\x32\xa3\x95\xcb\xb5\x5f\xfd\
\x36\x98\xe8\x39\xef\x1a\xd3\x62\xda\xb2\x5c\x2f\x32\xed\xf9\x1f\
\x8d\xf9\x50\x16\x68\x75\x9a\xa7\xd7\x74\xa0\xba\x72\x2f\x1f\xdc\
\xf7\x65\xef\xdf\xc2\x05\x0f\x42\x49\xb4\x30\xb7\x49\x96\xe6\x6e\
\x57\x2d\x8d\x83\x31\x25\xb9\xb6\xd2\x7a\xfe\x51\x66\xdb\xd5\x72\
\xb2\xcd\x79\xad\xff\xda\x9e\x56\x66\x95\x96\xaf\xe6\xbc\xef\xfd\
\x5e\xaa\x72\x0d\xcf\xe2\xb3\x5e\x26\xea\xcb\xd4\x0f\xb5\x09\x34\
\x60\x9b\x51\x85\x05\x6e\xee\x34\xac\x5c\xbd\x18\x89\xad\xd5\x63\
\xa8\xd1\xac\xfd\x8f\xd9\xaa\x0d\x97\x91\x7c\x93\x4c\xd0\x84\x1e\
\xa8\x9f\xcd\xfd\xaa\x37\x47\x13\xbd\xe2\xc3\x7b\xe5\xe8\x46\x23\
\xa7\x99\xe5\xe3\xe4\x4e\xfd\x0d\x23\x5c\x2f\x2c\x83\xfe\x6d\x1d\
\x4e\x52\x16\xca\xcb\x30\x2f\xe9\x6e\xcc\xc8\xbc\x86\xf6\x51\xa7\
\xba\x98\xd1\x3d\xc5\x2c\x1e\xeb\x51\x1b\xef\x73\xc7\xce\x94\xb3\
\x07\x75\x95\x05\x0d\xeb\x48\x4f\x3d\x8f\x49\xbb\x9a\xd5\x2a\x48\
\xab\x3e\xed\xa5\xdf\xa2\xd1\xb2\xea\x8c\x45\x72\xdd\x51\xd3\xe4\
\x8d\xe5\xe3\xc5\x2c\x1c\x2d\x66\x40\x47\x31\xda\x95\x73\x6f\x78\
\xb7\xd2\x81\xef\x61\xf3\x40\xbe\xf1\x8b\xf2\x07\x65\x0f\xfb\xcd\
\x87\xed\xc1\x9e\xd7\x41\x72\x9d\x54\x86\xf4\x66\x1c\xf3\x7f\x8e\
\x04\xb2\x18\x32\x17\x23\x3c\x16\xb2\x04\xde\x14\x07\x29\x79\x11\
\x14\xfb\x02\x05\xb6\x0d\x67\xe7\x08\x23\x99\x8d\xe3\x6d\xff\x33\
\x88\xeb\xfc\xcf\x47\x95\x18\x69\xe0\x46\x0e\x84\xc0\x8b\x84\x75\
\xc4\x02\x01\x37\x8e\xc2\x51\xfe\xe7\xb5\xca\x35\x3e\x6f\xe0\x80\
\x22\x38\xcb\xcd\x4c\x1f\x93\x6d\xcc\x0f\x83\xe1\x4a\xf4\x4e\x74\
\xca\xd1\x1c\x08\x80\x52\x4d\xf4\xbc\x7c\xe3\x09\x65\xd4\xcd\x97\
\xf9\x9f\x79\xc1\x19\x28\x78\x51\xfc\x3a\xa0\x9b\xea\xff\x87\xff\
\xe9\xc0\x73\x0a\x67\xc6\xc4\x07\xb3\x88\xc1\x97\x21\xe7\xb0\xcf\
\x61\x99\xa7\x61\x3b\xf0\x3f\x9c\x62\xbf\xed\x39\x71\xd8\xdf\xff\
\x5c\xea\x7f\xba\x49\x5e\x2c\x00\x85\x56\x12\x51\xf8\x51\xe9\x1e\
\xf8\xa5\xf2\x8b\x04\xe4\x37\x7a\x39\xe8\xdc\x0d\x3e\x57\xbe\xa6\
\x3c\x47\x89\xc5\xf4\x42\x25\xd5\x8e\xfb\xd1\x6e\xf6\x0a\xa8\xe7\
\x24\x00\xf6\xe1\x40\x1a\xc0\xa9\xc9\x5d\x5b\xec\xc0\xc0\xf6\x82\
\xd2\x25\x30\x8a\xce\x13\x25\x46\x26\xa0\x8e\x63\x8a\x0b\xf2\xdf\
\xca\x44\x98\xe3\x7f\x86\x71\xa4\xd2\x4d\xd5\xd1\x35\x33\x37\x78\
\xbb\x92\xd9\x9c\xa0\xc1\x1f\x50\x6a\xc1\x4f\xec\xf2\xc1\x8e\x24\
\x23\x44\xb9\x4c\xbd\xe5\x7f\x3a\x50\x9d\xe8\x6d\x18\x1f\x30\xd1\
\xf2\x7f\xd0\xda\x3a\x50\x89\x02\xb6\xd2\x7e\x13\x39\xdf\xff\x74\
\xd3\x22\xce\xbc\x0b\x92\x79\x1b\x86\x9f\x9b\x16\x82\x75\x3b\xc8\
\xa0\x1d\x39\x98\x68\x07\x8c\xad\x61\x30\x60\xfe\x5e\xf9\xb3\x12\
\xfd\x83\xfb\x20\xfa\xf0\x19\xc4\x93\x4a\xba\x6b\x7c\xd4\xf8\xcc\
\xc8\x01\x31\x19\xea\xf8\x9f\x61\xd0\x95\x32\x9e\x50\x05\xc0\xc1\
\xfe\x67\x38\x81\x37\x29\x19\x57\x9e\xb7\xdf\x44\xae\x54\x72\x4f\
\x26\x41\x1c\xfe\xa4\x44\x52\x80\x5c\xef\xfe\xbf\x57\x19\x23\x46\
\x8c\x18\x31\x92\x03\xc3\x84\x55\x81\x9d\x2b\x51\xa6\xbe\x31\x7b\
\x13\xf6\x25\x36\x4b\x95\xec\x4d\xd2\x2e\x1b\x43\xf7\xe3\xd2\x43\
\xbf\x37\x50\x16\xce\x8a\x94\xc2\x02\x09\xc6\x1f\x4e\x3f\xa7\x5a\
\xb3\x90\xb7\xb4\x66\x10\xcb\x3c\xfc\x53\x4a\x06\xb2\xef\x94\xca\
\xfa\x02\x4c\x15\x2c\x3e\x64\xa2\xdc\x96\xbd\xde\xae\x43\x99\x6a\
\x67\xb1\x4a\x4a\x15\xb3\x26\xd4\x0d\x52\x1b\xe7\x41\x7d\x91\xe5\
\xf6\x10\xd6\xc6\x4d\x32\x5f\xd9\x11\x73\x91\x77\x66\x31\x87\xbe\
\x48\x19\x7d\x89\x96\xd9\x9b\x65\xa6\xd9\x2e\xcb\x3b\x1c\x7e\xae\
\x69\xdf\x58\x9e\xd7\xaa\xb5\x84\xea\x65\xa7\xe5\xf2\xb0\x7b\x15\
\x0b\x68\x62\xb3\xb2\xb7\x48\x77\xcd\xfd\x85\x6b\x2e\x92\xd3\x31\
\x62\x57\x6a\xd1\xc1\x73\x1d\x64\x4e\xe5\x31\x6d\xf4\xc5\xfd\x45\
\x48\xa0\xbe\x08\x4b\xaa\x46\x28\x97\xd5\xad\x22\x1f\x60\xb0\x1e\
\xfd\xd0\x2e\x63\x76\xd9\x59\xdd\x39\xcc\x2f\xea\x79\x7b\xbf\xf7\
\xf2\x13\xcb\x1c\x7a\x96\x73\x39\xcd\xe1\xcb\x52\xd9\xba\xa2\xb2\
\x92\x4d\xdb\x45\xbd\x01\xe3\xac\xe5\x7d\xe6\x6b\xc6\xcc\x19\x26\
\x0f\xd8\x1e\x6c\x93\x0c\xb0\x33\x52\x78\x34\x06\xaf\x7d\x40\xc7\
\x1b\xcf\xdc\x5a\x04\x33\x55\x8c\x0f\x4c\x54\x32\x61\xf9\x84\xf4\
\xb4\x3d\x53\x70\x42\x13\x8b\x3b\x13\x9d\x1b\x64\x31\xf3\x1f\x07\
\x3d\xf5\xef\x9c\x39\x11\x2e\xb7\xd5\x6b\xa3\xcc\xd4\xf3\x98\x30\
\xed\x97\xeb\x5a\xc6\x1b\x6f\x82\xb5\x7e\xda\x2b\xeb\xd2\x85\x9d\
\xd1\xdd\x2c\xad\x35\xa1\x53\x34\x67\x17\x39\xff\xdc\x20\xa9\x52\
\x94\x06\xf3\xeb\xb8\x72\xba\x17\xe9\x79\xde\x6d\x86\xf9\x46\xa6\
\x15\xf4\xf7\xa5\x7b\x5c\xe7\x95\xd6\x3c\x7d\xa9\x81\x8c\x37\x94\
\xbc\xff\xd8\x82\x07\x33\xad\x5a\xa5\xba\xe9\x03\x17\x56\xce\x92\
\x7f\x57\xae\x10\x3d\x3b\x0b\x4b\x95\x2b\x9f\xf3\x12\x10\x57\xeb\
\x4a\xe5\xe4\xcb\xa8\x73\x21\x33\xc4\x7f\xb9\x44\x4e\xd5\x17\x9a\
\xc8\x9c\x4b\x68\xe2\xa7\x60\x61\x65\xa7\xc7\x55\xec\xd8\x24\xb3\
\xa9\x26\x2d\xea\xca\x6b\xd3\x5e\xfc\x6f\xae\x04\x67\x42\xa6\xad\
\xbf\x79\x54\x0e\xa3\xa4\xb4\xda\x0d\xa7\xc3\x28\xd4\x12\xb1\x6d\
\x84\x25\xaa\xb4\x05\xaa\x81\xbe\xcc\x23\x17\xca\x19\x5d\x4e\xf8\
\x75\x64\x02\xf3\xe2\xcc\xd7\xbd\xb6\x43\x95\xd3\xc4\x2f\xd1\xfb\
\x31\xbb\xdb\x2a\x34\xa7\x58\x38\x40\xc4\xb0\x23\x37\xd3\x65\x78\
\x38\xe8\xe8\xcd\xdc\x61\xb9\x1a\x75\x23\x13\x9b\x88\x94\x64\xd3\
\xba\xf2\x86\x26\x9c\x51\x7f\x91\x72\x8c\x5d\xa4\xaa\x3d\x58\xa1\
\x96\x46\x10\xfa\xa0\xd2\xcc\x89\xeb\xc3\x3b\x6a\xe3\x9d\xfc\xab\
\x55\xf2\xe7\x43\x26\x59\x8b\x87\x6d\x0b\x51\x09\x0f\xb2\xcb\x89\
\x97\x9b\xe9\x83\xc5\xac\x98\x28\x66\xdb\x55\x72\x92\xed\xc5\x9e\
\x94\x46\x7e\xf7\x5b\xb4\x83\xa5\x7b\x60\xcb\x06\xde\x1c\xe2\xc3\
\x17\xc8\xa5\x5a\x3d\x56\xf6\x69\x27\x5b\x92\xbd\x4c\xf5\x03\x7b\
\xd8\x95\xb9\xe6\x29\x59\x7a\xee\x52\xb9\xaf\x57\x3b\x3b\x0f\x89\
\x6d\x6b\xaf\x61\xae\xd2\x64\x95\x93\x4d\xfa\x02\x2d\x94\x53\x69\
\x33\x4d\x6a\xc8\x5b\x51\x2f\xe0\x38\xfc\xee\x67\x8c\x79\xc5\xef\
\x6e\x9f\xd0\xae\xd6\x48\x95\xd2\xa5\xbd\xd2\xb4\x77\x2d\x62\x9c\
\xa5\xe4\xc1\x3d\xe9\x00\xb4\x7a\x35\xd6\x44\x31\xb6\xac\xac\xd5\
\x6d\x70\xe4\x0b\x38\xce\xd2\x06\xbe\x68\xb4\xdc\x6d\x5f\x64\xbd\
\xf4\xd3\x9e\xd0\x39\xa2\x5d\xa5\xe4\x9e\x45\x26\xe6\x33\xcf\xce\
\x03\xad\xbf\xbc\xed\x92\x37\x4a\x07\x7d\x89\x79\xe6\x1d\x59\x38\
\x71\xd3\xa7\xb9\x12\xde\x76\xc9\x89\x66\xae\xef\x24\xe0\xc8\x65\
\x7a\xcd\x72\x1a\x38\x1d\x47\xa0\x5d\x5c\xac\x2c\xb2\x92\xe1\x41\
\x2c\x53\xb4\xb0\xd3\xd2\xc8\x4f\x3a\x3a\x0f\xe9\x2a\x1b\x66\xaa\
\x7c\xe8\x12\x5c\xba\x92\x0e\xd0\xda\xb5\x4e\xee\x27\x0f\x8d\x79\
\xe4\xcd\x9c\xe3\x3d\xce\xb9\xd5\x98\xad\xfa\x22\x2a\xae\xd8\x6a\
\x99\x5b\xd9\x62\x96\xab\xd0\x5f\xe6\xef\xca\x9c\x87\xd8\x6a\xb5\
\x4e\xea\xe9\x8b\x4c\xa0\x1b\xe6\x10\x09\xa5\x04\x74\x58\xfe\x91\
\xc6\x4f\xbb\xa1\xca\xbd\x79\x87\x1c\xdb\x74\xfc\x3c\xfb\x3b\x9d\
\xc1\xb6\x2b\xe5\x64\xfd\x6d\x81\x59\x17\xe9\x9b\xc5\x33\x16\x7b\
\xff\x16\x0e\x78\x40\xce\x6a\x2e\xad\x12\x65\xb2\xd7\xaa\xec\x45\
\x74\x83\x1d\xb2\x6c\xd0\x8d\xeb\xcc\xd8\xc7\xde\x31\x23\x7b\xc8\
\x1a\x56\xb4\xd8\x84\x12\x06\x83\xc5\xf1\xac\x74\xd1\x73\x44\xca\
\xd9\xa5\x4d\x35\xb3\x6c\x54\x84\x65\x76\x24\xdf\x53\xb6\x1a\xab\
\x2c\xb4\x52\xd9\xa0\xcc\x75\xf3\xa0\xec\x75\xe1\x32\xb9\xf2\x80\
\xd9\x87\x9a\x1d\xd7\xcb\x6a\xad\x32\xf8\xa2\xcc\xf1\x25\x80\xe6\
\xbe\x3f\x0a\xa5\x86\x60\xb8\xb2\x6a\x39\xf9\x67\xd3\xc9\x8b\x3d\
\x69\x78\x93\x1c\xa4\x1a\x65\xc3\x08\xd9\x8a\x67\x15\xca\x08\xcf\
\x8d\x27\x78\xff\x7a\xd0\x97\xc8\x32\x1f\x49\xa7\x97\x6f\x95\x13\
\xcb\x95\x91\xef\xce\x5c\x28\xaf\x2d\x1d\x27\xff\x23\x62\x00\xcb\
\x7f\x88\x1e\xc0\x52\x20\x22\x09\xb0\x2c\x88\xa8\x02\x2c\x11\x3a\
\xec\x20\xf9\x76\x44\x77\xf9\xe6\xee\xd3\xe4\xde\x37\x6f\x57\xf1\
\xe4\xe3\x48\xa3\x04\x73\x28\x9f\x78\xff\x16\x2c\x82\xa5\xc1\x94\
\xf2\x3f\xcb\x97\x15\xd3\xa9\xa5\x8e\xe6\xc3\xbc\x35\x4b\x2b\x27\
\xca\xff\x1d\x33\x5d\xee\x5b\x3c\x46\x0e\xe9\xde\x42\x86\xa8\xb4\
\x8b\xdf\x0a\x4e\x31\x35\x71\xb0\x99\x3e\x54\x46\x9d\xbe\x40\x4e\
\x5c\x3d\x5b\xd6\xaf\x9c\x24\x5f\x2f\x9f\xe0\x85\x49\x68\x5e\x5f\
\x5f\xd8\x1b\x4b\x58\xa6\xcf\x2c\x31\xc0\xb9\x20\x57\x0d\x28\x28\
\x70\xd3\x20\xef\xaf\x5c\x4e\x3a\x31\xa0\x69\xf5\xe8\xaa\xa5\x33\
\x48\xab\x14\x5e\xd7\x8d\x6d\x49\x45\x88\x1a\xe4\xba\xfe\x56\x43\
\xab\x1c\x55\x6d\x88\x72\x84\xfe\xdf\x99\xb6\xa6\x25\x47\xbb\x60\
\xa6\x2a\xfc\x9c\x02\x07\x37\x65\xd0\xca\x05\x4d\x04\x56\xc5\x2c\
\x65\x15\x5f\x5d\x4d\x3a\xa0\x59\x09\x1a\x07\x4f\xfc\x17\x69\xe4\
\x5c\xbb\x67\xfb\x00\xcc\x2b\x16\xda\x8b\x24\x03\x13\x9e\x6e\x71\
\x19\xe4\x7b\x14\x28\x29\x1c\x69\x52\x59\x97\xbf\x57\x5e\x24\x55\
\x74\x56\xf2\x32\x74\xe3\xce\xc5\x89\x45\x3a\x61\x60\x5e\x2d\xb4\
\x17\x89\x5a\x11\x1c\x8c\xc4\xc2\x74\xb3\xa3\x43\x78\x02\x35\x2a\
\x6a\x48\x14\x38\xaf\x50\x5e\x84\xe9\xe4\xd5\xde\xbf\xb9\xe0\x56\
\x27\x01\xe7\xab\x05\x1d\xdc\x8b\xb0\x38\x12\x77\x28\xd7\x2b\x01\
\xaa\x18\xdc\x69\xbf\xe5\x06\x2f\x51\x28\xde\xa6\x4c\xf2\x27\xca\
\x21\x02\x8f\x30\xe5\xcc\x1a\x2f\x24\xe3\xe0\x32\x2d\x07\x57\x4a\
\x34\x6c\xdb\x25\xfb\x4c\x84\x42\x29\x0d\x07\x6e\x1e\x76\x0e\x38\
\xdb\xff\x4c\x04\xe7\xda\xe4\x56\x6d\x51\x2a\xce\xe9\xcc\x06\x15\
\x8a\x00\x0b\x43\x0b\xf5\x45\x36\x2b\xc3\x0f\x70\xd5\x8d\xea\xe4\
\xdc\xfe\xe8\xb5\xf0\x7f\x84\xac\xb5\x05\xb8\x76\x20\xe6\x80\x49\
\x4a\xaa\x1a\xde\x73\x53\x95\xee\x65\x1d\x78\x46\x81\x7b\x97\x86\
\xc1\x43\x82\x75\x1a\x1f\x45\x1c\x2b\x9d\xc7\x1c\x60\x2c\x71\xdf\
\x6f\xe5\x80\xe2\x41\xff\x13\x17\x0e\x80\xd7\x03\x20\xb0\x0b\x70\
\x6b\xe9\xb8\xff\xf7\xde\xbf\x85\x0f\x1e\xe6\xaa\x54\x5e\xe2\xb6\
\x5b\xf4\xe7\xe0\x4a\xf4\x0d\xff\xd3\x7d\x3f\x42\x99\x4b\x4d\x28\
\x0a\x54\x53\xf2\x40\x1a\x35\xd5\x25\xec\xfe\x17\xa4\x0b\x76\x42\
\xa9\x6d\x53\xd2\x8b\xd1\x39\x10\x44\xc8\xf5\x4a\xdc\x83\x85\xa2\
\x45\xfa\x12\x41\xf0\xe0\xfc\x3e\x9c\x6a\x58\x10\xf7\xc9\x37\x70\
\x41\x22\x11\xe1\x30\x33\x79\x01\xad\xd0\xb9\x3a\x21\x2c\xee\x21\
\x64\xee\x0d\x30\x36\xe0\x32\xeb\x72\x96\x35\xc5\xe1\x08\x40\x28\
\x49\xfd\x94\x3b\x94\xee\x3c\x96\xcc\x17\x5b\x5c\xa2\xc4\x55\xd0\
\x25\x36\x4c\x12\x9f\x51\x04\xa2\x18\xfb\x33\xd2\x8e\x49\x98\x04\
\x4e\x26\x2b\x10\x4c\x57\xb2\x44\x22\xe8\x2a\x4b\x44\x3a\x1a\x6d\
\x14\x08\x08\x90\x08\xb4\x0f\xc0\x48\x8f\xdb\x2c\xab\xe6\xf9\x0c\
\xcb\x6a\x8c\x2f\x84\xb6\x08\xf6\x62\xff\xf3\x3f\x33\x06\x31\xe2\
\x90\x9f\x5c\x74\x03\x48\x60\xc0\x5c\x8b\x61\x02\x60\x35\x83\x8b\
\x78\x00\xa3\xa2\x7b\xe1\x69\x4a\xe0\x34\x06\x41\x3e\x83\x4e\x97\
\x38\x61\x22\x93\xf1\x72\xfc\x86\x27\xeb\xbb\xfe\xff\x79\x06\x56\
\x4b\x86\x60\xb0\xbb\x20\xc2\xd1\x5a\xe9\x6a\x91\x8a\x9f\xf2\x3f\
\xa1\x0b\xdf\x00\x48\x84\x5b\x9a\x41\x89\x82\x4b\xfd\x4f\xa2\xbf\
\x39\xb8\x52\x8b\x82\x0b\xab\x99\x11\x82\x13\xfb\x5f\xfb\x9f\x51\
\x88\x4a\x40\x78\x49\x91\x93\x92\xf1\xe5\xe5\xc5\x7f\xe7\x7f\x86\
\xc7\x1b\x10\x2e\xc9\x70\xa4\xba\xb4\x81\x14\xea\xfa\xff\x30\x83\
\x08\x7f\x07\x89\x5e\x84\x73\x9d\xd3\x32\xd1\x24\x30\xc4\xb9\x10\
\x93\x0e\x27\x28\x71\x5c\x86\x74\x20\x89\xda\x64\x4a\xa0\x21\xd6\
\x53\x32\x22\x33\x81\x8f\x50\xc7\x27\x0c\xbb\x60\x44\x85\xe4\x09\
\x57\x3f\x57\x95\x00\xbd\xd1\x05\xca\x64\xa5\x0c\x12\x59\x62\xf2\
\x85\x64\x91\x42\xc8\x61\x94\x2e\xe0\x12\x47\x42\x83\xe0\x45\x5c\
\xc9\xe1\x79\xed\xc2\x03\x71\xcc\x79\x3a\xb0\x50\x13\xfd\xc4\x79\
\x5f\x53\x32\x7c\xb2\x40\x80\xa5\x80\x19\x01\xf7\x70\x17\x60\x04\
\x52\x0d\x82\xdf\xe9\x7d\x40\xb0\x5a\x39\x0b\xc9\x3c\xff\x33\xf8\
\x1b\xba\x06\xf8\x8b\xff\x19\x5c\xcd\xe6\x56\x38\xcc\x50\xd2\x3e\
\x1c\x09\xca\x14\xfc\x1e\x23\x46\x8c\x18\x31\x62\xc4\xd8\x17\xc1\
\x14\xa5\x25\x33\x79\x41\xfa\xc7\xfd\xd3\x62\x14\x36\x6c\x86\xdf\
\x24\xe5\xf0\x29\xca\x5e\x2b\xb5\x0c\x11\xbc\xf0\x4d\xf2\xe6\x92\
\x1b\xd8\x63\xfa\x9b\x3d\x27\x2e\x98\xc2\x05\x2d\xc0\xba\x6c\x6f\
\x94\xfa\xd9\xf8\x20\x32\x51\xbf\x5e\x26\x9a\x8d\x32\x15\xf2\xbf\
\x3f\x79\x8f\x7f\x62\x7d\xeb\x1c\xa9\xd7\xf8\x97\xc7\x28\x48\x50\
\xdb\x71\xe0\xc9\xde\x26\x75\x35\xb3\xbb\x64\x6f\x96\x71\x9a\xf1\
\xac\x47\x59\xf6\xca\x2d\x72\xdc\x9f\xce\x95\x33\xf8\xdf\x1e\xe3\
\x37\xce\xe1\x5c\x9c\x7e\xe2\x96\x52\xb0\xb0\xdd\x94\xb7\x64\xab\
\x1a\xae\x61\x66\x83\x0c\xd3\x8c\xc7\x11\x7c\x29\xee\x5f\x6d\x1a\
\xc9\xab\x8d\xeb\xc8\xbb\x44\x89\xe3\x98\xfd\xed\x09\x19\x6a\xdd\
\xc8\xf0\x12\x61\x61\x45\x5c\x28\x05\x87\x9c\xae\x6a\xbd\x34\x32\
\xdb\xa5\xa7\xd6\xfe\xc9\x9a\xe9\x84\x4d\x5a\x71\xd9\x2a\xb9\xac\
\x6a\xbb\x1e\xa6\xc1\xb0\x29\xe6\xb8\x19\x72\x13\xc7\xec\x6f\x7a\
\x8e\x3d\x57\xaf\x89\xbb\xae\x02\x84\x11\x6d\x1d\xde\xd2\xd2\x5a\
\x38\x78\x69\x6d\x1f\xa5\x19\x4e\x64\xb1\xe5\xe6\x75\x59\x54\x41\
\xe4\x6b\xd6\x3c\x4d\x79\xf6\x5b\x4c\x35\xc6\xbc\x27\x0b\xf8\x8d\
\x73\xec\xb9\x38\x85\xe9\xb5\xdc\x83\x7b\x79\x77\x8d\x91\x31\x70\
\x60\xb3\x61\xae\xbc\xc0\xef\x03\x94\xd3\xf1\x2d\xc6\x7b\x75\xf2\
\x00\xf9\x53\xcb\x99\xab\x8c\xf5\x31\xfe\xab\x31\xec\x27\x35\xa0\
\xa3\x75\x6d\x5f\x61\x57\x07\xb1\xc6\x6b\x8b\x8d\x2e\xd8\xcc\x0f\
\x95\x95\xd4\x19\x6e\xbf\x83\x1d\x0b\x20\xfa\x02\xc4\x7b\x30\x19\
\x29\x0c\xf6\x78\xf0\xa4\xaa\xae\x9a\xb9\xe3\x75\x6c\x38\x18\x1f\
\x69\xa2\xc2\x71\xcb\x60\x58\x48\x96\x3a\x4a\xb9\xca\x66\xd7\x6f\
\xe5\x38\xce\xb1\xe7\x72\x0d\xd7\x72\x0f\xdf\x43\x71\x8f\xe7\x84\
\xb9\x3b\x7d\xfb\x9e\x4e\x63\x5f\x8c\x01\x99\x7e\x1c\x17\xcc\x47\
\xa4\x8e\x7e\x6f\x90\xfd\xa8\xea\x0e\x6b\xa4\x49\x42\x6e\x96\x26\
\x7a\x7e\x33\xfd\x6c\x9d\xbd\x55\x7a\x68\xe6\x8e\xa5\x1b\x52\x2e\
\x63\xc3\xb3\xb6\x0d\xe5\xc5\xde\x17\xdd\x95\x53\x18\x8e\x83\x6e\
\x78\xdc\x34\xa8\xea\x0f\xf0\x9e\xd4\x35\xcf\x5e\xcb\x3d\xf4\x5e\
\xfe\x3d\xa3\x9f\xe9\xa8\x69\x23\x8d\x36\xad\x9a\x66\x9b\x76\x04\
\x03\x7d\x17\xff\xb5\x4a\x26\xa8\x6d\xbc\x8c\x0d\x41\xcb\x72\xe9\
\x0d\x76\xc3\x91\xb1\x76\xc0\x65\xb5\x8c\xd7\xa5\xb0\x74\x74\x7a\
\x24\xb7\x28\x59\x75\x49\xa6\x6e\xd1\x81\x9a\x5a\xaf\x5d\xd5\x8d\
\xc7\xc9\xc5\x15\x9b\xb6\x8d\x5c\xfb\x44\xe8\xda\xba\xfd\xc7\x9a\
\x13\xe7\xc8\x0d\x9c\x6b\xaf\xe1\x5a\x6f\xdc\x61\x05\x67\xf4\xb3\
\xa0\x9f\x1e\x9b\x36\x04\x07\x4d\xab\x4d\x33\x6b\x49\x9c\x5e\xa3\
\xef\xe4\xbf\x5e\xc9\x02\x4d\x5d\x6b\x1b\x8e\xff\x75\x54\xd2\xe9\
\x6c\x0b\x62\xbd\xcc\xd7\xae\x66\x89\x79\x46\xc5\xd5\xa7\xb4\xa6\
\xa7\x4a\xb6\x00\x84\x2c\xed\x7b\x5f\x16\x56\x2a\x2d\x5f\x8c\x7e\
\xe8\xb5\x3d\x0a\xc3\x71\xf2\xf6\x2f\x19\xe0\x7f\x36\xff\xa7\xcf\
\xe3\x1a\x77\x7d\xd4\xbd\x13\x91\x34\x6e\xb7\x2b\x8d\xe6\xfb\x69\
\xef\x6c\xdf\xa5\xa4\xea\x35\x34\x6f\x12\xaf\xac\xab\x0a\x5a\x17\
\xad\x61\xe3\x95\x0b\x8c\xd6\xd8\x4f\x1f\x90\x23\xcb\x97\x95\xef\
\x6b\x74\xe8\x69\x26\x3e\xf1\x77\x33\x7e\xed\x07\x69\x71\xe2\x13\
\x9f\xd9\x41\x3c\xaa\x30\x1c\x27\x6d\xfb\x3c\xf2\xda\x64\x9c\xb4\
\xe5\x1f\xa6\xc1\xa0\xf1\xac\x31\xf8\xf9\xd5\x5b\xe5\x78\xd2\x4a\
\x9a\x6d\xda\x79\x07\xde\x85\x02\x29\xa9\x5d\x97\x93\x90\x54\x41\
\x6b\xaa\xfd\x76\x2f\xed\x0e\x26\x68\xd7\x80\x76\x8d\x02\x67\x6b\
\xee\xd2\x31\x72\xa7\x94\x2e\x67\x97\xeb\xe7\x95\xc9\x85\xc5\x09\
\x1b\x3f\x55\x61\xa0\xa2\x19\xd7\x4b\x1e\xb5\x6b\xba\x3d\x5d\x86\
\x31\x88\xb4\x4e\xb0\x69\xe7\x1d\xf6\x05\x49\xcd\x4a\x35\xde\x3a\
\x0c\xba\xae\x36\xfa\x72\xfd\xec\x4b\xb2\x56\x89\xb5\x49\xde\x82\
\xf6\x95\x9f\xfd\x41\x8e\xac\x58\x56\xbe\xaa\xd3\x73\x88\x99\x9d\
\x61\x40\xed\x74\x38\xfb\x1d\x63\x9a\x8c\x9d\x6d\x34\x77\x7f\x7c\
\xf7\x1e\x39\x46\xd3\xe7\x2d\xae\xf7\x56\x76\xce\xf1\x0b\xa2\x1f\
\x69\x26\xed\xf6\x1d\x4a\x7a\x61\x38\xd0\xe7\x06\x0a\xa6\x46\xf6\
\x63\xd2\xdc\x2e\xd3\x63\x05\xcf\x46\x1d\x48\xe9\x16\x90\x88\x68\
\x35\x1f\xc9\xdc\x59\x43\x65\x87\x94\x2a\x67\x86\xde\xb6\xc5\xd3\
\x31\x22\x32\x34\x23\x6a\x0b\x64\xec\x91\x32\x15\xd0\x57\xbe\xff\
\xe8\x3e\x59\xad\xe3\x85\x27\x8d\x91\x06\x4f\xc0\x18\x61\xd3\x46\
\x1a\x59\x47\xef\x17\x44\x89\x1c\x37\x52\x81\x15\x83\xd1\x9a\x59\
\xb9\x44\x04\x84\xe7\xa4\xd5\xa5\x87\xc8\xa1\x93\xfa\xcb\xd7\xac\
\x31\x23\x68\xf3\x8a\x09\x76\x9d\x99\xb9\xed\x24\xf9\x6d\x8d\x4a\
\xf2\x69\xed\x6e\x03\xec\x92\xde\xc8\x4c\x4e\x81\xe8\x27\xf5\x07\
\x8c\x31\x15\x4a\xcb\x37\x9b\x2f\x97\x73\xce\x5c\x28\x2f\xf3\x8c\
\x25\xfa\xac\x39\xc3\xc5\x4c\xe8\x2b\x66\x4c\x4f\x79\xd1\x7c\x28\
\x2d\xbf\x25\x4c\x0b\x9b\x70\x90\xc6\x92\x2e\xe6\xa6\x81\x52\x15\
\x2a\x48\x6b\x1d\x3c\xff\x53\xa1\x9c\x98\x1a\x55\xc4\x34\xad\x27\
\xe6\xb0\xc9\x72\x92\xd9\x22\xa3\xfd\x2e\x83\x9a\xbb\x52\x07\xd6\
\x43\x16\x8e\x96\xbb\x9b\x4f\x5f\x61\xa3\xa9\x44\x65\x78\x32\xd2\
\xc2\xda\x2d\x3f\xd5\x0c\xef\x2e\x6b\xb9\x17\xf7\xd4\x96\xc8\x18\
\x36\x57\xa5\xa8\xd1\xcf\x5d\x2b\xc3\x3a\x36\x97\x4d\x35\xab\x7a\
\xab\x23\x59\x34\x58\xaa\x54\x64\xec\xe7\x7d\x16\x67\x2a\xad\xdd\
\x09\x96\x2e\x6d\x57\xf3\xcb\xa7\x84\xa9\xc1\x78\x48\x94\x98\x2d\
\xbb\x8d\x87\x44\x85\xe1\xbc\xfc\x0c\xf8\x6e\xbf\x83\xff\x6e\x94\
\x55\xdc\x53\xef\x8f\x4e\x33\x45\x3f\x7b\xeb\xf7\x26\x76\x42\x4b\
\xbb\xa5\xd6\xad\xad\x4f\x3b\xab\x0e\x5c\xfa\xf0\x8d\xdf\x27\xcd\
\x2d\xf4\xbf\x84\x87\xce\x29\x08\xa5\xb7\x88\x9f\xf1\x05\x4d\x9e\
\xd0\x54\x44\xdc\xd8\xe8\xed\x55\xa2\xc7\x96\x68\x0d\x5e\xde\xa1\
\x99\x3c\xcf\xee\x31\x51\x19\xed\x48\x2b\xc8\xab\xf5\x0c\xb8\xe6\
\x11\xd3\xb4\x96\xbc\x81\xb9\x9e\xd6\x67\x36\xc9\x2c\xd5\xde\x87\
\xe8\x33\x0e\xf0\xc3\x62\x85\xc7\x09\xdc\xe0\x5d\x5a\x89\x04\xbd\
\x4f\x01\x4f\x40\xf7\x72\xbd\x38\xe0\xe0\x17\x48\x96\xdd\xf0\x94\
\x65\xbd\x5e\x3c\x97\x85\xd4\xe4\xcd\x97\xc9\x29\xe5\xea\x34\xb6\
\x12\x51\x54\x26\xb3\xad\x7e\xed\x6e\x03\x4d\xbd\x6a\xf2\x41\xeb\
\x06\xf2\x4a\xb5\xb6\x5d\xfd\xdf\xb2\x73\x9d\xe7\xce\xad\xd6\xbe\
\xa7\x61\x65\xba\x6d\x25\x3c\x43\x5b\xa2\x6d\x91\x5b\xa5\xa1\xb5\
\x73\x45\x6b\xe1\x7f\x56\xba\x95\x20\x2e\xbe\x6c\x89\x45\x6f\xa5\
\x2b\x88\x5d\x1c\x08\xc2\x16\x86\xb1\x51\xb9\x6a\x64\xb3\x15\x11\
\x52\x97\x37\x7e\x2c\x63\x57\xcd\x1a\xe5\xe5\xd3\x91\x0f\xbc\x90\
\x2b\x63\x09\xc8\xd3\xef\x8a\x3f\xda\x7b\x9e\x30\x47\x6e\x64\x87\
\x1f\xc6\x06\x4b\xd5\x6b\xae\x3c\xdc\x5b\xe4\x47\xab\x0a\xef\x47\
\x33\x7e\xdd\x87\xa6\x9c\xc8\x37\x84\xd3\xe0\x19\x4a\xe2\x44\x8d\
\x34\x6b\xa5\xbd\x56\x88\x9a\x76\x20\x8f\x96\xa6\x5c\x68\x0e\x98\
\x68\x3b\xc5\x62\x8f\x91\x4a\xf7\x12\xf8\xb5\xee\x01\xfa\x6d\x1b\
\xf3\x70\x87\xb0\x19\x68\x3f\x2d\x18\x2f\x80\xa3\x0e\xbc\x0b\x46\
\xcb\x3d\x4d\x27\x2f\xb1\x05\x60\x0b\x42\xbb\x25\x5a\x40\xc3\x9a\
\xf2\xce\x37\x6b\x6c\x88\x27\x6b\xa7\xa2\x6b\x23\x02\x19\xb4\xff\
\xfb\xf6\x2e\x0a\xa8\x6d\x63\x79\xa9\x52\x93\x56\x66\xfa\xcb\xff\
\x33\x33\x5e\xc9\xb6\xf7\x68\x7f\xc8\x99\x66\x58\x77\x59\xe7\x5f\
\x8f\x1e\x34\x4d\x5b\x4c\x3f\x6d\x25\x79\x29\x7e\xb4\x1e\xf7\x3e\
\x04\x04\x2f\x51\xc0\xe1\xd8\x25\xfe\x38\x0e\x84\x61\x5b\x87\x9b\
\x78\xda\x66\x03\xc1\xb0\x37\x30\x76\xa3\xe5\xfe\x40\x9e\x3d\x5d\
\x33\xb1\xd7\xf9\xb7\xd9\xfb\x5c\xb0\x4c\xae\xb0\xfd\x3f\x03\xbd\
\x17\xaa\x0a\xcb\xef\x41\xca\x11\x36\x43\x3d\x85\x93\x60\x7d\x07\
\xf1\x9b\x7f\xce\x0a\x34\xef\xdb\x4e\xf1\xd6\x13\x77\x59\x7d\x45\
\xf6\xdc\xf7\xf4\xc9\x3c\x5e\x5b\x93\x2d\x3c\x4c\xf4\xcc\xc1\xfb\
\xb6\x2a\x82\x03\x24\x11\x77\x83\x85\xf2\x11\x07\x4a\x0a\xdc\x9a\
\xb0\x3d\xba\x29\x07\xab\x8f\xe8\xd8\x91\xfd\xb8\xf6\xdf\xeb\xa5\
\x97\x9f\xb9\x8b\x08\x54\xd3\xa1\xb9\x3c\x2f\xa5\xcb\x9a\x96\xf5\
\xe4\x65\x14\x37\xf3\xaa\x1c\x6c\xde\x97\x59\x6f\xdd\x29\x47\xef\
\xba\x4d\x56\xde\x73\xa6\x1c\x7e\xfa\x02\x39\x62\xd1\x38\x59\x70\
\x60\x33\x39\xa3\x71\x6d\x39\x05\xaa\x00\x70\xe6\xcc\x61\xb2\x8a\
\xdf\xae\x39\x5a\x4e\xd9\x76\x8d\x1c\xfd\xce\x5d\x72\xd4\x07\xf7\
\xcb\xb1\xe6\x75\x2d\xa4\xd7\x64\x41\xe7\x16\xac\x98\x2d\x4d\x08\
\xaf\xb7\xad\x11\x91\x89\x2c\xcf\xd2\xdb\xdf\x9a\xe6\xf3\x36\x8f\
\xb0\x2c\xc0\x15\x8a\x5b\x8d\x5b\xac\xe1\x56\xb7\x43\xd6\x41\x47\
\xa2\x5d\x6d\xa9\x5a\xb7\xba\x2c\x6d\xd5\x50\x1e\xec\xdf\x41\x3e\
\x98\xdc\xdf\xdb\x95\x67\xde\x08\x31\xe3\xfa\x88\x39\x64\xb2\x18\
\x82\x8a\x2c\x19\xe7\x1d\x9b\x3c\x40\xcc\xe0\x2e\x62\x3a\xb6\x10\
\xd3\xa8\x8e\x98\x6a\x95\xc4\x94\x57\x1d\x86\xc0\x55\xaa\x37\x58\
\xf2\x3f\xc7\xf8\x8d\x73\x38\x97\x6b\xb8\x96\x7b\x70\x2f\x14\x42\
\x42\x97\x11\x8e\x6c\x8a\x1e\x5f\xaa\xc7\x50\x48\x87\x75\x95\x1f\
\x3b\xb5\x90\x9d\xf5\x6b\xca\x55\x55\xb2\xf2\x8c\x75\x70\xae\x92\
\xf7\x63\x61\x48\xb1\x07\x21\x64\x48\xac\x5b\xed\xc2\xd2\x67\xd6\
\x86\xbb\x42\xb2\x24\x03\xcb\x96\xf1\x32\xaf\x5d\x53\x79\x71\xfe\
\x28\xb9\xee\xf2\x23\xe4\xac\xb5\x97\xc8\xb1\x7f\x7f\x40\x16\x9a\
\x37\x65\x9c\x0e\xec\x63\xb4\x9b\x1a\xac\x35\xb9\xbb\x15\x89\x09\
\x70\xe7\x26\x8b\xa2\x07\x5f\x0b\xdb\x1d\xfa\x93\x62\x36\xb6\x95\
\x5e\x8b\x34\xa5\x22\xee\x30\x15\x02\xc6\x9b\x17\xe4\x20\xed\x9a\
\x0e\xfa\xf0\x3e\x59\xfe\xd0\xf9\x72\xe2\xe9\x0b\xe5\x92\xe1\xdd\
\xe4\xb1\x06\x35\xe5\x6b\x94\xc3\x40\x74\xb2\x20\x91\xb4\x58\x84\
\x3f\x4e\x89\xbe\xe2\x8e\x17\x6a\x44\xa6\x82\x00\xdb\x40\x05\x5f\
\x04\xb2\xe4\x9b\x35\x21\x6c\x4f\x9d\x03\x9b\x71\xda\x3d\xd0\x75\
\x69\xa6\x55\x67\x22\x8b\x6e\x43\xbf\xb7\xb0\xdd\x07\xdd\x19\x63\
\x0c\xdd\x88\x6f\xca\x48\x56\x10\x61\xd8\xfb\x73\xcd\xef\xa5\x3c\
\xb1\x54\x6d\x81\xe2\xe1\x48\xe1\x6e\x53\xa9\x4e\x25\x3b\x9e\xa5\
\xc7\xea\x5b\x5d\x44\xcf\x4b\x30\x7e\xb0\x30\xce\x05\x95\x09\xd3\
\x6d\x1b\x5c\x6c\xe1\x12\x9a\xd6\x62\xb4\x40\xe6\x95\xc9\x61\x9a\
\x05\x90\x17\x72\x3d\x83\x56\xe6\xb5\xb4\x4c\x9f\xe3\x16\x84\xbf\
\x67\xbf\x15\x63\x7c\xab\x24\xa1\x2c\xe5\x4a\x07\x2c\x03\x23\xde\
\x06\x11\x52\xd8\x27\x29\x2f\xb2\x8a\x8b\x7d\xc9\x52\xd9\x31\x9b\
\xe5\x66\x64\x20\x5b\xac\xd3\x85\xb2\xd8\xd6\x6d\xdb\x92\x09\x28\
\x3c\x57\xf1\xdc\xaa\xb0\x62\x0b\xb6\x91\x24\xa1\xe9\xec\x92\xcf\
\x9a\x57\x36\x39\x0c\xa2\xad\xb2\x6f\x04\x59\x3a\x1a\x04\x8b\x83\
\x73\xf6\x2e\x4c\x00\x06\x69\xb7\x1e\x30\x2f\x3c\xe0\x7f\x26\x03\
\x71\x84\x5c\x81\x84\xf7\x6e\x2b\x76\xb0\xa1\x3f\x7d\x06\x03\x20\
\x25\x03\xcb\x54\xc3\x05\x92\x2a\x58\xbd\x1d\xb5\xa5\x4e\x65\x25\
\x51\x6e\x68\x41\x8c\x5f\x6e\x47\x77\xc0\xca\xd5\x8b\x94\xac\xc2\
\x76\xc4\xe8\xc9\xe7\x9b\xca\xbc\xc0\xee\x66\xbc\x5f\x9e\xdb\xcd\
\x17\x17\xb8\x80\x32\xe9\xac\x97\x67\x31\x29\xcb\x7e\xdd\x5e\x75\
\x41\x44\x2d\xf5\x75\xb8\x4f\x19\x6e\x21\x74\x97\x48\x43\xe9\x82\
\x2d\xfa\xf3\xda\x41\x0a\xc1\xc4\x55\x38\xb7\x67\x52\x89\x80\x5b\
\xc8\x9d\xea\x5e\x79\x28\x91\xc1\x8c\x45\x9f\x09\xcf\x4b\x4c\x54\
\x12\xa8\x8a\x4d\xa2\x6e\x54\x72\xef\x8f\x95\x1c\x0f\x82\xc2\x75\
\x3b\x2b\x82\x63\x95\x6c\x1a\xc5\x35\x41\x12\x94\x81\x8d\xa9\x82\
\x08\xef\xba\x18\x84\x0b\x24\x04\xd3\x0d\x57\xb3\xd7\x11\x34\x33\
\x10\x1f\x21\x19\x9e\xf5\x3f\xa3\x90\x97\xf4\x43\xeb\x09\x6f\x3a\
\x8a\x06\xcd\x46\x71\x99\x20\x1c\x01\xc0\x21\x58\x18\x04\x99\x28\
\xb1\x08\xc6\x0b\x0a\x6f\xf0\xeb\x80\x32\xc9\x0e\x7b\x6c\x46\x47\
\x14\x8f\x60\xed\xce\x0b\x8c\x07\xd4\xfe\x20\x88\xac\xe6\x36\x4f\
\x65\x4f\x51\x9e\x7d\x8f\x92\x35\xf4\xb4\xa8\x60\xfc\x53\x7e\xc3\
\xc4\xee\x40\x28\xc5\xf0\xa6\x78\xe8\x21\xee\x1d\x82\x41\x00\x4b\
\x2c\x78\x09\xf7\x42\xc1\xc1\xd5\xe1\x53\x25\x05\x41\xc6\x3e\x14\
\x60\x78\x8b\x5e\xe2\x53\xd0\x4d\x05\xcf\x21\xa3\xc9\x64\x37\xb0\
\xb3\xb1\x65\x7d\xef\x5f\x0b\x62\x5b\x84\x03\x42\x05\x5b\x14\xd7\
\x85\xbb\x2d\xd7\x1d\x51\xd8\x2e\xdd\x30\x1c\x98\xad\x44\x83\x01\
\x3b\xd8\x5a\x88\xd4\x08\xc8\x30\xf7\x7f\xa6\xc0\x7c\xe1\x62\xd4\
\xb0\xc5\xa4\xeb\xae\x68\x75\x98\x6d\x4e\x54\x3a\x45\x95\x81\x9b\
\xe7\x33\x0e\x61\x02\x21\x60\x10\xe9\x72\xe2\x2e\x5d\xed\x5a\xe5\
\xfb\x4a\x37\x31\xb5\x4f\x6f\x4e\x48\x2c\x43\x17\xc7\x0d\xb2\x95\
\x6b\x54\xf0\xd3\xbd\x01\xb6\xcb\x74\xe9\x82\xe1\xae\x70\x9f\x07\
\x7b\xac\x07\x33\x80\xae\x23\x97\xbd\xab\x08\x80\x9e\xc2\xb3\x5d\
\x6b\x60\x67\xe6\x4c\x44\xe6\x7d\x0e\x88\xa1\x2e\x53\x1c\x99\x99\
\x63\xb6\x31\x99\x1e\x92\x0e\x88\xc6\x83\x22\x19\x7c\x06\x64\x5a\
\x36\x6a\x33\xd9\x18\x01\xa0\x87\x30\xf8\x86\x33\x2f\x11\x31\xf7\
\x47\x1d\x0f\x93\xc8\x46\x0c\xda\xf9\x0a\x27\x16\x63\x37\x98\xcd\
\xa3\x36\x13\x59\xff\x71\x25\xc1\xec\x88\x0b\x4a\xd4\x56\x8c\x86\
\xc4\xbc\x62\xa3\x6a\x8c\x87\x08\x0c\x31\x62\xc4\x88\xe1\xa3\xab\
\x12\x93\x38\xce\x6f\x79\x91\x73\x1b\x29\xd3\xc1\x56\x65\x32\xb3\
\x4a\x14\x18\x27\xc2\x42\x00\x0e\x6e\x28\x90\x18\x22\x21\xd2\x1d\
\x74\xdf\x51\x40\x51\x4c\x93\x01\x8d\xdf\x81\xf9\x15\x9e\xc3\x9e\
\x1b\x0e\xe8\x59\x05\x25\x7c\x64\x0c\x4c\x0a\xe1\x20\xee\x89\xc0\
\x00\x9d\x28\xa8\x69\x22\x60\x5c\x44\x02\xc2\x76\x14\x45\x14\xb8\
\x28\x30\x09\x45\x86\x39\x13\x0c\x71\x44\x83\xf1\x13\x31\xc7\x07\
\x37\x74\x40\xa9\x64\x8a\x36\x19\xd0\xd2\xb9\xa7\x9b\x43\xe7\x7f\
\x87\x26\xca\xe0\xf7\xbd\x06\x0a\x84\x8c\x26\x6a\x26\xfa\x43\x22\
\xa2\x31\x33\x71\x95\x6e\x81\x30\x60\x13\x8b\x31\xbf\x60\x76\x11\
\x93\x0b\x81\x5e\x97\x28\x11\x06\xd8\x15\x80\xef\xf8\x1c\x33\xc9\
\x15\x55\x20\x64\x32\x31\x54\x6f\x49\x93\xd8\xc6\x30\x07\x15\x39\
\x98\x54\x22\xd1\xa9\x10\x73\x7c\xb2\x50\xa3\x48\x44\x9c\x87\xf8\
\xea\x5a\x00\x73\x13\xc1\xef\x90\xb9\x16\x17\x54\x33\x08\x26\xa3\
\xd0\x31\xc8\x40\xcc\x25\x6e\x27\x03\x40\xb7\x84\x4e\xc3\xe4\x13\
\xfc\x57\xe8\x3b\x26\x77\xb7\x8d\x43\x10\xa4\xa7\x93\xf7\xaf\x75\
\xf9\x61\x8a\x80\x34\x45\x11\x47\x0e\x5a\x26\xc0\x88\xb9\x57\x1c\
\xe9\xe8\x52\x48\xb4\x0b\x91\xea\x80\x73\x75\x2a\x3b\xea\x04\xc1\
\x34\x2e\xf7\xca\x0b\xd8\xa4\xc2\x01\x75\xc3\x58\xad\x24\xf0\x34\
\x60\xe6\x10\x9b\x15\x7d\x3c\x44\x6c\xa6\xbb\x72\x13\x62\x41\xba\
\x2d\x2b\xa2\xc0\x32\x85\x45\xde\xbf\x91\x20\x96\x2e\x9b\xf0\xec\
\x35\xb0\xaf\x53\xa2\x79\x73\xa6\x36\xab\x7a\xff\xe6\x02\x2f\x5d\
\xc3\xfb\x77\x0f\x30\x57\x4e\x81\x30\x26\x61\x42\x49\x44\xba\xbe\
\x0f\x94\xc9\x80\x13\x44\x94\x55\x19\x10\x2c\x9b\xe7\x60\x15\x08\
\x06\x14\x76\x48\x34\xff\xc2\x2e\x15\x74\x7b\x4e\x18\xa0\xc5\x63\
\xc5\xe6\x3b\x02\x01\xc6\x48\x17\xf2\xb6\xc8\x41\xbf\xeb\x06\x54\
\xdc\x46\x79\x39\xa6\x6b\x1d\x79\xe1\xe0\x77\x77\x2c\xd9\xb6\x02\
\x98\xcc\x5d\x0d\xa5\xc5\x51\x78\x0e\x78\x40\x06\xfd\xa5\xd8\x7c\
\x28\xd9\xbd\xc8\xe8\xcb\x94\x38\x37\x04\xbb\xb7\xcf\x95\xec\x3d\
\x87\xc3\x03\x93\x58\xcc\xcf\x90\xae\x5c\xfb\xd1\x05\x40\xe5\x61\
\x9b\x11\x9e\x45\x37\xe4\x38\x5f\xc9\x75\xe4\x43\xf0\x38\xfe\x04\
\xb4\xbc\xbd\xda\x52\xa2\x40\xf3\x75\x13\x44\xa9\x00\xc9\x85\x17\
\xc4\x34\x0e\xdc\x32\x06\x07\x32\x80\x42\x0d\x82\x96\x72\x83\xf7\
\xef\x1e\x40\x53\xe7\x7a\x5a\x0a\xdb\x8b\x52\x7b\x99\x78\x72\x70\
\x05\x12\x04\xe3\x00\xd7\x9c\x66\xbf\x25\x87\x73\x03\x72\xe9\xdd\
\xeb\x20\x74\x36\xf3\xd4\x99\x72\xb6\xd2\x81\xc1\x8f\x2e\x20\x0c\
\xe6\x20\x82\x33\x87\x51\xee\x36\x48\x4a\xd4\xd2\x30\x68\x4d\x4c\
\x2e\x25\x02\x63\x48\xa2\x4d\xa4\x1c\x88\xa6\x8d\x10\x11\xdc\x74\
\x2a\x55\x32\x71\xb6\x57\xa4\xac\x18\x31\x62\xc4\x88\x11\x23\x46\
\x8c\x18\x31\x62\xc4\x88\x11\x23\x46\x0c\x85\x5d\x2f\xe8\xd6\x0c\
\x3a\xfa\xc7\xfc\x53\x62\x14\x15\x6c\xe6\xb3\xec\xd9\x8b\x7a\x9a\
\x65\x1e\x91\x4a\x7c\xfa\xdf\x4b\x7e\x7c\xdd\x92\x04\x56\xd0\x92\
\xf1\xca\x6a\xd9\x6c\x47\x41\xb0\x7d\x02\x23\xf3\xe9\x6d\x4f\x51\
\xcd\x16\x8c\x9e\xe7\x5f\x12\xa3\x30\x90\xd3\x3d\xdd\xa6\x2d\xc1\
\xdb\x59\xa7\x05\xf1\x49\xb4\x30\x7a\xe9\x27\xb1\x4e\xf8\x24\x5e\
\x49\x0b\xfb\xbb\x9e\x17\xb7\x94\x42\x84\x2d\x10\xba\x23\x02\x0c\
\xac\x93\x96\x36\x1a\xdc\x46\x19\x4d\x8c\x14\x3d\x3e\xd5\xc6\x4a\
\xe1\x3b\xc7\xf5\x77\xce\xb3\xe7\xe7\xbd\x02\x2b\x46\x26\xf0\xc7\
\x0d\x1b\xe8\xcc\x46\x0d\x25\xda\x34\x1b\xb7\x10\xb0\xc6\x0b\x20\
\x43\x38\x71\x36\x72\x19\xcb\xef\xf6\x3c\xce\x8f\x5b\x49\xc1\xc3\
\x97\x9e\x88\xbe\x50\x25\x7b\xab\xb6\x8e\x27\x64\xa0\x66\xfe\x74\
\xfd\x5c\x6c\xb6\xcb\xb2\x8b\x97\xcb\xe5\x84\x06\xe7\xbb\x7f\x7c\
\x20\xe7\x71\xbe\xbd\x2e\x96\xbc\x0a\x16\x74\x3b\x39\xb1\xb5\xa2\
\x36\x75\x29\x27\xdf\x46\x6e\xe4\xe2\xed\xd8\x16\x6f\xe4\x52\xd0\
\xf0\xc7\x0f\x2f\x2e\xe3\x16\xe9\xae\x03\x78\x4e\x5c\xc6\x35\x17\
\xc9\xe9\x9c\x72\xf3\xf1\x72\x11\xdf\xfd\xe3\x93\x38\x2f\xa7\xdb\
\x8a\x5b\x48\xc1\xc2\x8a\xba\x44\x04\x62\xa7\x9c\x2d\xd2\x5f\x39\
\x8d\x71\x43\x33\x7b\xc5\xf8\x3e\xf2\x88\x94\x2a\x6d\x06\x77\x92\
\x8d\x6c\xf2\x12\xef\xac\x53\x04\xd0\x8c\x4f\x18\x28\xb3\x4e\x25\
\xf9\x5b\xd7\x93\xae\x32\x59\x22\x5f\x9a\x5d\x76\x0c\x59\xe6\xff\
\x3e\xc2\x0f\xbf\x44\xb8\xf0\x64\x0e\x0f\x31\xd2\x81\xed\xae\xd0\
\x3d\xd6\x49\x3d\x55\xfc\xba\x68\xcd\xcf\xd9\xf6\xe8\xbb\xbf\x78\
\x4e\x6f\xf3\x3e\x36\xa6\x54\xa5\x1a\xe6\xe9\xdf\xc8\x6a\x8e\xdb\
\xdf\x39\x8f\xf3\xb9\x0e\x45\x31\xee\xb6\x0a\x06\xb6\xbb\x0a\x46\
\x2e\x65\x47\x4f\x3f\x72\xe9\x11\x53\xe4\xb7\xec\xa6\xc3\x4e\x3b\
\x2d\x66\xac\x34\x33\x87\xca\xef\x39\xce\xef\xfe\x79\x7d\xb9\xce\
\x9a\x56\xe2\x6e\x2b\xff\xf0\x07\xf3\xb2\x9a\xa9\x91\x1b\x4a\xb6\
\xaa\x2f\xaf\xf6\xbe\xf0\x0e\x1b\x46\x76\xd0\x4d\xeb\x77\xef\x3d\
\xe5\x6f\x2e\x69\xcf\x8f\x37\x97\x2c\x38\xd8\x02\x21\x3c\x1f\xa1\
\xf9\xd8\xdf\x76\x6b\x20\x94\xec\x47\x36\x12\x8f\xdd\xbf\x90\x02\
\x71\xf1\xdd\x35\xe3\xbd\x7d\xa7\x38\xcf\x3b\xbf\x23\xd7\x73\x9f\
\xb8\x40\xf2\x09\xb4\x6c\x1b\xc8\xd2\xed\xf0\xb9\xde\x0b\x25\x8b\
\x78\x7b\xf5\x51\x72\x69\xa5\x96\x1d\x73\x36\x06\xb3\x9b\x81\xf5\
\x1b\x63\x8e\x9e\x2e\x37\xfb\xe2\x6f\xbc\xdb\x67\x32\xd8\xda\xee\
\x88\x19\x04\xba\x7d\x03\x13\x11\x65\x90\xee\x86\x80\xf9\xc1\x20\
\xfd\x9a\xe1\xfd\x0f\x94\xcd\x1d\x8e\xbc\x20\xd7\xae\x09\x3d\xcf\
\xbd\xd5\x74\x68\x26\xcf\xd9\x78\xbc\x5e\x20\xe5\x99\x5c\x67\xaf\
\xe7\x3e\x28\x89\x51\xcf\x09\x72\x77\xda\x72\xd2\xeb\xbf\xc2\xbe\
\x81\x9c\x17\xd3\x41\x95\x3d\xce\x91\x78\xec\x66\x91\x90\xda\x9f\
\x88\xaa\x3f\xd8\x48\xa1\xde\x7e\xe9\x5d\x72\xba\xab\x5c\x5b\xae\
\xe6\xde\xa9\x6d\xf2\x93\xfe\xee\x6c\xbb\xb5\x76\xaf\xdb\xd2\xeb\
\xed\x7d\xf4\x7e\xf6\xbe\x51\xcf\x73\x74\x69\x43\x32\x63\x4f\x76\
\xd2\xbd\xaf\x14\x8c\x7d\x11\xad\x6d\xf6\xe5\xb0\x43\xad\xb5\x21\
\x5f\xeb\x69\x26\x35\xd4\x4c\xca\x73\x63\x49\x3d\xb7\xa5\x9e\xd7\
\x41\xaf\x1d\xa4\xc4\xa2\x6b\xbb\xab\x1d\xd7\xcb\xea\x52\x15\xab\
\xdb\x9d\x73\x82\x05\x42\xf7\xc5\xfe\x86\x01\xad\x7d\x11\xd7\x71\
\x3d\xf7\xb1\xf7\xcb\x6b\x53\x49\xd2\x45\xfa\x48\xa7\x67\xe6\xaf\
\xe2\xc2\x8f\x97\xe8\x42\xf1\x6b\x55\xe9\xec\xb7\xb5\x30\xb6\x4b\
\x4d\x7d\xc9\xa6\xfa\x72\xed\xad\xa5\x76\x83\xf4\xb6\x22\xe9\xb6\
\x24\x24\x96\xbb\x67\x44\xf4\xe2\xb9\x53\xdb\x55\xe9\xfb\x65\x83\
\x1c\x32\x65\x90\xfc\xb1\xc5\x74\x15\x73\xc3\x7b\x86\x68\xf7\xd5\
\xe1\xf0\x73\xcc\x40\xb4\x76\x06\x77\x6f\x07\x04\xae\xf3\x62\xc4\
\x73\x3f\xee\x1b\xf5\x3c\x47\xd2\xa5\xe9\xb3\x16\x63\x4d\xaf\x4d\
\x37\xe9\xe7\x3d\xbc\x6e\xad\x64\x16\x8a\x2d\x10\xfa\xec\xe7\xfd\
\xf9\x0b\x26\x90\xc8\x14\xb6\xce\x63\x77\x1b\x76\x25\xf0\x36\xe1\
\x8a\xde\x75\x13\x3e\x21\x33\xf4\x73\xb6\x9e\xcb\xa6\x61\xde\x56\
\x7b\x2a\xd6\xaa\x78\xfb\xde\xa0\x1b\xd7\xe6\x2e\x0c\x9f\x6c\xbf\
\x57\xa9\xb4\x7c\x6e\xb5\x76\xaf\x95\x2c\xf5\xaf\xc7\x34\xcf\xfd\
\xa2\x9f\x05\x49\x0f\xe9\xf2\x76\xdf\x61\x8b\x3f\x02\xfd\xf7\x22\
\xfd\xbc\x87\x3f\x06\x95\xd8\x02\x29\x6d\x3e\x10\x76\x65\xc3\xc0\
\xc7\x8e\x6c\x63\xec\xae\x36\x9b\x35\x73\xd8\x5e\xc2\x53\xde\x96\
\xe4\x45\xbd\x0e\xb3\xfa\x72\x5b\x18\xec\x11\xf2\xa4\xbf\x29\xb1\
\x4a\x55\x51\x05\x32\xe7\x1d\x7d\x72\xb9\x4a\x66\xd7\xed\x72\x1c\
\xe7\xdb\x42\xf1\xc4\xe0\xa5\x51\xf7\x0f\x93\x74\xd9\xf4\x69\x3a\
\x6d\x7a\x49\xb7\xa6\xdf\xbe\x07\xef\xa3\xef\xe5\xbd\x61\x09\x83\
\x1d\x0c\x19\x37\x36\x49\x73\x7d\x49\x3b\x7f\xf1\x0b\x85\xc0\x6e\
\x07\xc1\x2d\x4e\xf3\xa2\xdb\x36\x15\x6a\x61\x9c\x38\x5b\x6e\x40\
\x3b\x4f\x54\x20\x74\x63\xcd\xa7\xaf\x30\xb3\x54\x6b\xcf\xb5\xed\
\x2a\x8c\xba\x7f\x62\x2e\x25\xbd\xa4\xdb\x4f\x3f\xdb\xe8\x55\xe1\
\xbd\xfc\x57\x2c\x39\xb0\xdd\x95\x26\xdc\x4a\x35\x9b\xa4\x95\xf6\
\xc3\x43\xf4\x73\x36\xfb\xe0\x76\x6f\x2d\x4f\x56\xce\x92\x2f\xab\
\x54\x94\x7f\x55\x2e\xa7\x2c\x9f\x1e\x35\x37\x7e\xe8\x75\x7e\xf2\
\x3d\xa9\x06\xdd\xb8\xce\x68\xbf\xf2\x53\xd4\xf5\x49\xa9\xe9\x21\
\x5d\xca\x2f\x5b\x35\x92\x57\xcd\x0e\x7f\x16\xd2\x4b\x7f\x2b\x3f\
\x3e\x7c\xc9\x9c\xf0\xd2\x44\x97\xc9\x7e\x58\x0b\xc4\x9b\xb9\x43\
\xca\xf1\x74\x08\xad\x7d\xf3\x47\xda\xe5\x6a\x66\xf0\x2d\x1b\xec\
\x36\x44\x51\xfb\xd2\x26\xe2\xb8\xc7\xdf\x37\xd3\x5e\xf8\x29\xb2\
\x20\x1c\xa7\xbf\xfc\x8b\x3d\x2f\xea\xfa\x64\x1c\xf9\xfb\x17\xad\
\xb6\x3f\xac\x8b\xac\xd3\x81\xdc\xdb\xbd\xc7\x4b\xf7\x20\xde\x83\
\xf7\x29\x91\x2d\x04\xd0\xd7\x5a\xb9\xfe\x49\xeb\xa6\xd3\x4b\x5f\
\x6c\xb2\x1d\x3f\x30\x7b\xe8\xcb\x3e\x70\xa6\x9c\xc3\x69\xdd\x4e\
\xbb\x36\x61\xf7\x53\x54\xa4\x9b\xeb\x7f\xe5\x9f\x6c\x61\x5c\x7e\
\xa8\xfc\x9a\xf4\xd9\x71\x87\xf4\x32\xd9\xe5\x79\xb4\x78\xda\x7e\
\x89\x1d\x43\x90\xdb\x7d\x3b\x94\xd6\xb0\xf6\xca\x61\xfa\x92\x48\
\x39\x39\xfb\xa2\x7f\x74\xbf\x1c\x95\x55\x56\xbe\xae\xd7\x6f\x94\
\xdd\x45\x2d\x2a\xb3\x0a\x9b\x3c\xb7\xc5\xd4\x25\xec\x89\xfb\xd3\
\xbb\x77\xf9\x7b\xe2\x7a\xe9\x23\x9d\x33\x48\x37\xe9\xb7\xef\x81\
\x3d\x4c\xdf\xcb\x7f\xc5\x92\x05\x37\x8e\xa8\xa8\x58\x49\x9b\x3b\
\x8a\x60\x17\xb3\x41\x46\x85\x0b\xc5\xf8\x9b\x77\x95\xaf\x51\xcf\
\x4c\x7b\xfe\x87\xc8\x4c\x2b\x4c\x66\xd5\x6f\xca\x46\x63\xef\xfd\
\xf0\xb8\x1c\x4a\x7a\x72\x15\x06\xe9\xf5\xac\x04\x0d\x79\x8f\x12\
\x3b\x7e\x38\xd8\x42\xf1\xed\x51\xda\xec\x9b\xa8\xe2\xc5\x04\xd3\
\x70\x7d\xc9\xdc\x1b\x12\xab\x04\xb4\x6c\x9c\xdc\xc1\x36\xde\x23\
\xee\xdb\x69\x66\x16\xf6\x36\xde\x7a\xff\x71\x6b\xde\xb7\x5b\x76\
\x8f\xef\x2d\x8f\xf0\x7c\xd2\x61\xd3\x43\xba\x3c\x1d\x69\xb8\x4d\
\xaf\xa7\xd9\x3b\x3b\x58\xc9\x2d\x0c\x07\xdb\x75\x51\x28\x48\x5c\
\xf4\xc3\x98\xc3\xb7\xc8\x60\xfd\xb4\xda\xb7\x76\x07\x39\xbb\xa9\
\xad\xbd\xc4\x73\x58\xe8\x78\xd4\x85\xd9\x85\xd5\x85\x31\x5e\x75\
\x3f\xe3\x7a\x3b\x5e\xdc\x7c\xa2\x5c\xc4\x73\xed\xf3\x49\x87\xd3\
\xea\xbd\xf4\x75\xb4\xe9\x45\xb2\xa2\x30\x4a\x6a\x57\x15\x05\x57\
\x28\xfa\xd2\x55\xb4\xf6\xd5\xd7\x9a\xc7\x76\x43\xfd\xf5\xa5\x77\
\xef\xab\xae\x5d\x05\x3b\xb4\xbd\x76\x9b\xac\xae\x52\x41\x7e\xb6\
\x3b\x44\x17\xf0\xbe\xea\xd8\xba\x1a\x8d\x98\x6a\xca\x95\x12\x73\
\xce\x22\xd9\x6c\xde\x92\xb9\x9a\x9e\xdc\x7b\xa8\x93\x2e\xd2\xa7\
\xe9\x24\xbd\xfb\x5c\x61\x38\xd8\xee\x8b\xd9\x3f\x24\x15\xf6\x0b\
\xc4\x24\x8e\xf4\xb2\x45\x46\x9b\x2d\x32\xfd\x83\x7b\xe4\xf0\xa3\
\xa7\xcb\x47\x6c\xdb\xcd\x2e\x6a\x6d\x1a\x8b\x29\x53\xb9\x86\x99\
\xf2\xf4\xd7\x91\x99\x9b\x1e\xb3\xcd\x54\x1d\x9f\xca\xd7\xaa\x6f\
\x1a\xd7\x11\xb3\x52\xef\xcf\x73\x56\x4c\x94\xff\x9e\x7e\xb0\x6c\
\x31\x9f\x5a\x4b\xf2\x18\xd2\x43\xba\x48\x9f\x9f\xce\x7d\x7b\x96\
\xd1\x16\x0a\x03\xa3\xe7\x63\x55\x53\x07\xcd\x66\xe6\x6b\x69\x33\
\x63\x88\x6c\x9f\x3a\x50\xcc\xc1\xa3\xc4\x2c\x1b\xef\x65\xd6\xa9\
\x07\xcb\xdb\x5a\x40\xbf\x15\x3d\x7d\xf0\xcd\x1b\x32\xda\xba\x1b\
\x32\x1e\xb1\x7d\x2b\x1b\xe6\xcf\x19\x26\x0f\xfc\xf1\x1c\xb9\x49\
\x0b\xfc\x67\x9e\xb3\x60\xb4\x98\x69\x83\xc5\x8c\xea\x21\xdf\xaf\
\x18\x2f\xa7\x99\x0f\x75\xf0\xde\x1f\x36\xb5\x0f\xc2\x16\x8a\x67\
\x35\x2d\xdb\xa9\xb9\x4c\x6b\x54\x5b\x7e\xe8\xd0\x5c\xcc\xa0\x4e\
\x62\xd8\xcb\xf0\x8c\x05\xb2\xee\x9d\xbb\xe4\x78\x4c\x17\x66\x9b\
\xac\x5c\x7f\xa9\x9c\xa6\xfd\xc5\x0f\x89\x8c\x89\x79\x71\xd8\x9d\
\x4f\xd9\x39\x92\x3b\x4f\x95\xf3\xac\x29\x85\xf1\x62\xab\x2c\x59\
\x7b\xa9\x9c\x35\x7b\x98\xfc\x63\x48\x60\x6f\xc4\x9a\x55\xbc\xf8\
\x8e\x7e\xfa\xf6\xfd\xc2\x08\xa2\x4c\x29\xb9\x9f\x7d\x0c\x2b\x67\
\x89\xa9\x57\x43\x4c\xd7\x56\xf2\xe2\xd3\x57\xdb\x29\x57\xf6\x52\
\xb7\x5e\x89\x88\xc4\xe6\x7d\x59\xa8\x3a\xc2\x7f\x26\x6d\xf9\x47\
\x64\x86\xe7\x45\xb4\x7a\x7d\x9c\x61\x3b\x57\xee\x67\xc7\x0c\x37\
\x4f\xb2\x4d\x06\x6a\xb7\x75\x48\x83\x9a\x62\xaa\x54\x94\xec\x72\
\x65\xc5\x94\x2e\x65\x03\x2d\x13\xfe\x69\xbf\x02\x41\x8f\x73\x36\
\x97\x6c\x5e\x4f\xa6\x99\xaf\xa4\x96\x16\x42\x27\xc6\x14\xcd\x30\
\xf6\xb9\xb5\xbb\x40\x3f\x72\xbe\x9c\x51\xbe\x5e\xb3\x84\x5b\x78\
\xe7\x45\xb7\x6d\xf7\x99\x8b\xe4\x1a\xbf\x40\xd0\x33\x70\x94\x18\
\x8b\x58\xab\x05\xd3\x60\xc7\xd5\x52\xad\x7c\x59\xb9\x9d\xf4\x90\
\x2e\x9f\xec\xa8\xb0\x5f\x80\x20\x62\xbc\x30\x41\xcf\xde\xee\xd9\
\x50\x15\xae\xe0\x16\xde\x1b\x55\xfe\x77\x9e\x89\x9a\x81\x83\xbb\
\xc8\x86\x4e\xc7\x5e\x9a\x6b\xee\x3c\x5d\x76\x3f\xeb\x26\xd3\xb9\
\xa5\xec\xc0\xd5\x94\xfb\x6a\xc1\xcf\xd5\x67\x8c\xd4\x67\xb6\xd7\
\x16\x59\x13\x0d\x9c\x84\x29\x08\xc2\x1f\x2c\x94\x60\xe4\xd2\x7d\
\x12\x04\x9f\x74\x2f\xfb\x20\x07\xf4\x3f\x6f\x22\x8b\x41\x9e\x7d\
\xcd\xd1\x8e\x5d\x0b\x79\x45\x96\x54\x2b\x27\x9f\x61\xf4\x8b\xca\
\xe8\x54\x99\x6b\x1f\x75\x5a\x08\x3b\x4d\x7b\x8b\x7b\x76\x7b\xc8\
\xef\x1e\x33\x5c\x30\x4e\xc7\x12\xb1\x01\x71\x26\x20\xfe\xae\x7b\
\x49\x36\x53\xb1\xb0\x3a\x0a\xae\xa2\x4f\x49\x3d\x2d\x84\xae\xca\
\x09\x9a\x59\xd6\x08\xf9\xcd\xa3\x76\xb3\xc9\x3c\xed\x5c\x79\x19\
\x27\xd1\x3f\x4a\x57\xa9\x65\x36\x5f\x26\xa7\x70\x5f\x7d\xc6\x42\
\x55\xfa\x26\x6a\x0b\xe9\xa6\xcf\xaa\xaf\x9a\x78\xd8\xd5\x34\xb8\
\xfd\x38\x24\xa8\xe6\x3e\x05\x42\xb0\xba\x97\x63\x5b\x8a\x1c\x20\
\x5e\x26\xda\xe4\x7e\xfe\x70\xb9\xaf\xd9\x41\xbb\x37\xb9\x8f\xe2\
\xe4\x6d\x9f\x9b\x2a\xcd\x5a\x9b\xf1\x6b\x3f\x8c\xfc\x1d\xb2\xb1\
\x7d\xbb\x95\xa7\x9b\xa1\x5d\x65\x3d\xf7\xe5\xfe\xf6\x39\xc9\x37\
\xb6\x67\x2b\xa4\x60\xa1\x10\xc1\x7b\x9f\xc0\xf1\x4a\xf7\x52\xcf\
\x73\xc0\x81\x5a\xa9\x19\x93\xd0\x55\xb4\x49\x4d\x79\x73\xf0\xcd\
\x1b\xa3\x33\x59\xf5\x92\x41\x37\xac\xd1\x7b\x96\x36\xb3\x47\x94\
\x62\xbb\x22\xd3\xe7\x92\x7b\x13\xb6\x96\xe1\xf7\xec\x30\x35\x2a\
\xc8\xa7\x78\xcc\xeb\x33\x3c\x0f\x79\xec\x55\xeb\xa5\x8d\xa6\xc1\
\x7a\xc8\x47\x88\xba\xc1\xee\x8b\x28\xa4\x25\x1e\xd4\x3a\x17\x91\
\x14\x06\x23\x8d\x3a\x45\xd1\x73\x15\x5d\xa7\x12\x96\xb7\x6e\xd0\
\x73\x15\x55\x31\x95\x53\xa6\xbf\xf4\xf3\x1e\x99\x3b\xe7\x3d\x63\
\x5a\xcf\x3f\x8a\x59\xc1\xff\xa2\xa7\xa0\x5f\xbc\x7a\xab\x1c\xaf\
\xe3\xc4\xb7\xcd\xa7\x2c\xd9\xc3\x3d\x08\x52\x50\xdc\xef\x17\x24\
\x2d\xba\x2d\x3c\xe4\x37\xcb\x38\xed\xb2\xba\xa8\xb4\xc5\x72\xea\
\x0a\x09\x4c\x24\x07\x29\x5d\xfa\x09\xd0\x5c\xa2\xf1\x86\xd2\xbd\
\x4c\x77\x0e\x04\xa1\x59\x94\xe3\x2a\xaa\x99\xc3\x24\x50\x8e\xab\
\xe8\x85\xcb\xe4\xca\x1a\x9d\xfb\xef\x61\xcf\x9a\xa5\xdd\x57\xcd\
\x8e\xbd\x4d\x9d\xaa\xf2\xe1\x97\x8f\xc8\xe1\x56\x57\xf1\xf9\xdf\
\x8d\xb2\xaa\x65\x3d\xd9\x55\xa5\xe5\x81\xbe\x54\x96\x9d\xeb\xba\
\x26\xe3\xe6\xb1\x89\xfe\xdd\xb6\xdb\xe2\x39\x5b\xad\xf7\x4b\x1f\
\x65\xe3\x3c\x26\x9f\x08\xe4\xef\xde\xa3\x20\x42\x9f\xef\x15\x8c\
\x54\xf2\x02\x88\xb7\x39\x83\xb8\x83\x6d\x1d\x74\x57\x58\x81\x99\
\x1e\x0d\xba\x8a\x6e\x95\xe5\x1d\x9b\xcb\x73\xbd\xce\xbf\x2d\x27\
\x43\xe1\x84\x0d\x1f\xdb\x75\x20\xc3\xbb\xca\x5a\xed\xd2\x96\xe3\
\xa3\x45\xe1\x69\x41\xae\xe4\x93\xef\x66\xa7\x2c\x53\x0d\xfc\x01\
\x29\x93\x65\x46\x3d\xf8\x72\x2e\x71\x79\xc0\x6f\x1e\x35\x4d\x6b\
\xc9\x9b\x74\x87\x5a\x20\xb8\x15\xcd\xd4\xcf\x21\xd6\xae\x86\x55\
\x37\xb1\xed\x2a\xb8\xa1\x3d\x21\xca\x4b\x24\xdc\x7e\xb8\x30\x57\
\x57\x05\x6c\x81\xa8\xb8\xa9\x35\xb3\x96\x16\x46\x07\xa5\xa7\x10\
\xe2\x2a\xaa\xe2\x69\x59\x15\x53\x27\x6c\xfc\xc4\x66\x24\xdd\x0d\
\xe3\x03\x97\x59\x05\x8f\x0c\xa5\x20\xbc\xb1\x00\x01\x80\x09\x25\
\x6f\xf5\x14\x1a\xfe\xd3\xb2\xfc\x86\xe3\xe4\x62\xce\xef\x7e\xc6\
\xf5\x39\xa6\xfc\xa9\xcf\xfd\xc0\x3d\xfe\x97\x4b\xfc\x45\x11\xe5\
\xf9\xcc\x08\xe2\xa1\x18\x5d\x20\x80\xd0\xb6\xee\x7d\xdc\x0e\x09\
\x25\x06\x7d\x94\x2e\xf1\x2e\x40\x7d\x2e\x58\x71\xd7\x2d\xe4\xdc\
\x28\x3d\x34\x73\x72\x16\x72\x6e\xbb\x52\x4e\x2e\x53\xbd\x9e\x15\
\x57\xe7\x7e\x60\x4c\xcb\x99\x87\x18\x2d\xa0\xef\xb6\x5d\x2d\x27\
\x5b\xe5\xce\x99\xcc\x29\xc0\x2d\xd6\x4d\x07\xc7\x36\x9c\xdd\xe6\
\xd9\xe3\xfa\xbb\xde\x7b\xc5\x9b\x77\xc8\xb1\x15\x4b\xcb\xbf\x1b\
\x8f\x9a\x6e\xc7\x95\xd9\xda\xfd\x55\x55\xad\xfd\xaa\xc3\xe4\x57\
\x7e\x81\x2e\xd4\xeb\x26\x6b\x77\xd7\xd3\xce\x0a\x92\x9e\xc4\xa6\
\x76\x0a\xca\xed\xef\x8b\x3e\x55\xa2\x40\x8c\x76\x57\x20\x91\xdb\
\xad\x5a\x6b\xaa\x37\xbd\xdb\xd4\x9f\x1b\x99\x6e\x1d\xd5\xb4\x7f\
\x57\xed\x7c\x63\xfb\x55\x67\x59\x4b\x6d\xb5\xd6\x9d\x4d\xd3\x3a\
\xf2\x7a\xce\x14\xeb\x16\xdb\xdd\x30\xce\xcc\xd4\xeb\xc6\x69\x26\
\x0e\xd2\x42\xe8\x6b\x3f\x19\xa4\xbd\x6e\x68\x91\x7f\xde\x4a\x06\
\x7c\x3c\xe2\xb3\x1a\xb6\x30\x53\x77\x7e\x6f\x7a\x9c\x79\xa3\xe9\
\xd4\x42\x76\x52\x60\x4a\x1c\xe3\x66\x28\x07\x9a\xc7\xa5\x85\x7e\
\xcf\xcb\xef\x6a\x92\xd2\xbd\x57\x17\x0e\x94\x14\xb8\xed\xef\x08\
\x54\xbf\x07\x6c\x77\x85\xfe\x41\xbf\xbd\x41\xfb\x6f\xed\xc7\x95\
\x9e\xb8\xab\x62\x69\xf5\xf2\xf2\xf7\x5e\x17\xdc\x6e\x3d\x10\xc7\
\xf6\x92\x47\x19\x17\x7e\xf1\x06\x62\x6f\x8a\xd5\x73\x41\xc5\x2d\
\xb5\xbb\x7e\xb6\xc2\x8c\xcf\xa7\xff\x9d\xe3\xbb\xa7\x88\x29\x14\
\xbd\x7e\xc1\x48\x06\xe6\xb2\x86\x71\x49\x07\x84\xaf\xcc\x5b\x5a\
\x68\xde\xfd\xe6\xea\xf9\xc3\x31\xdb\xe8\x7d\xaa\x5b\x8f\xf7\xc4\
\xdd\x16\x3b\x2d\xb8\x02\x61\x43\xcc\x12\x01\xf6\x1d\x74\x89\x8e\
\xdc\x25\xcd\x16\x08\x03\x28\x71\x4b\x54\x0f\xb0\x19\x82\x7d\x49\
\x33\x08\x69\x89\x53\xe0\xa5\x87\xc8\xa5\xe6\x0d\x39\xd8\xbc\x2e\
\xf3\xbe\xf9\x8b\xac\x7c\xf3\x4e\x39\xec\xcd\xdb\x65\xc1\x45\x2b\
\xe5\xc4\xa3\xa6\xc9\x8a\xc1\x5d\xe5\x88\x96\x0d\xe5\xb4\x86\xb5\
\xe5\x64\xfd\x3c\xb5\x7f\x07\x39\xf9\xe8\xe9\xb2\xfc\x92\x43\xe4\
\xf8\xbb\x4e\x95\xd3\xde\xbe\x5b\x0e\x7d\xfb\x2e\x39\x56\xef\xbd\
\xc2\xbc\x2d\xf3\xef\x3f\x5b\xce\xd0\xfb\x62\xcd\x35\x3b\xae\xf1\
\x17\x88\x3a\x43\xe3\x76\x7f\xa5\x55\x6e\x33\x4a\x14\xd8\x97\x84\
\xf4\xb1\x5b\x69\x89\x00\x1b\xa4\xb8\x02\x89\x7c\xb1\x60\x81\x98\
\xcf\xb4\xab\xf8\x4c\xfa\x7e\xf2\xa0\x2c\xdb\xf0\x6b\xb9\x70\xda\
\x40\xd9\x96\x55\x5e\xcc\x31\xd3\xe5\xad\xe3\x67\xc9\x87\x87\x4e\
\x96\xff\x2c\x1e\x2b\x66\xce\x70\x31\x13\xfa\x8a\xd1\x4c\x37\x6d\
\x9b\x88\xa9\xef\x99\xcc\x0d\x26\xf3\x32\x65\xbc\x4f\xbe\x73\x9c\
\xdf\x39\x8f\xf3\xb9\x8e\xeb\x57\x4c\x10\x73\xe4\x54\xf9\x17\xb3\
\x92\x35\xaa\x88\xe9\xd7\x41\x76\xfd\xee\x24\xb9\x6d\xcd\xc5\x72\
\x9e\x79\x51\x46\x9b\xcf\xa5\x9d\x16\x46\x1d\xb7\xf4\xc0\x4f\x6a\
\x14\xd8\x4b\xcb\xbd\x1f\xbb\x07\x15\x7b\xb0\x33\x02\x89\x7d\xd8\
\x7e\x4b\x80\xaa\x55\xa5\x5f\xa3\x5a\x72\x71\xe7\x03\x64\xc7\x88\
\x6e\xf2\xdd\x6c\xcd\xb8\xe5\xe3\x55\xdb\x6e\xef\x4d\xb1\xae\x98\
\x28\x66\x99\x66\xa2\xea\x0d\xac\xae\x35\xa3\x7b\x89\xe9\xd5\x4e\
\xcc\x01\x8d\xc4\xd4\xa9\x26\xa6\x62\x05\xcf\x74\x5f\xda\x37\x99\
\xf3\xc9\x77\x8e\xf3\x3b\xe7\x71\x3e\xd7\x71\x3d\xf7\xe1\x7e\xcc\
\x46\xae\x9a\x6c\x0b\xc4\x4e\x15\x73\x7c\x52\x3f\x31\x7d\xdb\xcb\
\x47\x2d\x1b\xc8\xc3\x35\xab\xcb\x8a\xda\x15\x93\x6e\x5e\x16\xdc\
\x57\x3d\xbc\x9b\x75\xb1\x84\x4b\xec\x08\xfb\x6d\x37\x06\x28\xd9\
\xe1\xdf\xce\x85\x94\x29\xed\xd5\xe8\xe6\xf5\xb5\x36\x77\x14\x33\
\xc5\x9f\xc2\x5d\x3a\x4e\xcc\x12\xa5\x7e\xfe\xa0\xad\xe3\xed\x23\
\xa6\xca\x86\x63\x67\xc8\xb5\x47\x4e\x93\xa3\x0f\xea\x2f\x4b\x7b\
\xb4\x91\x69\xed\x9a\x4a\xaf\xea\x95\xa4\x7b\x85\x0a\xd2\x42\xef\
\x55\xd3\xb1\x5a\x35\xa9\xa5\xc7\x7b\x6a\x0b\xe9\x3d\xaa\xa7\x0c\
\x9a\x3d\x54\xe6\x6a\x8b\x38\xf2\x8c\x05\x72\xe1\x69\x0b\xe4\xde\
\xe3\x67\xca\xd3\x7a\xbf\x8f\x96\x4f\x90\xff\x2a\xcd\x12\x6d\x39\
\x73\x47\x88\x19\xa3\x85\xd6\xa9\xa5\x16\x64\x75\x31\xe5\xbd\x49\
\x2a\xd2\xcf\x16\x47\x6c\xcc\xcf\x06\x96\x8c\x1d\x41\xb8\x31\xf2\
\x03\xfb\xad\x18\xa3\x9b\xd2\x15\x08\xc6\x39\x36\x01\x43\x29\xfc\
\xde\x3f\x96\xc3\xd2\x5a\x20\x74\x4d\xf5\x6b\xc8\xd7\x2a\x55\xfd\
\xf9\xf4\x05\x72\xde\x03\x67\xca\x89\xaf\xdc\xaa\x83\xf0\x53\x32\
\xd5\xbc\xac\x12\xd4\xf3\x32\xc2\x3c\x23\x7d\x75\xe0\xed\x60\xb5\
\x79\xfc\xa3\x3c\x5d\x21\xe1\x14\xab\xed\x0e\xf9\x5d\xcf\xb3\xfe\
\x60\x5c\xc7\x2a\xac\xad\x32\x40\xef\x35\x5a\xef\x39\x49\xbb\xa8\
\x29\xaa\x0c\xce\x7e\xf5\x16\x59\x75\xeb\x6a\x39\x6d\xd1\x68\xb9\
\xb9\x7d\x33\x79\x55\x0b\xd3\xb6\xb2\xd0\x44\x95\x23\x92\x23\x7b\
\x8b\x50\xf8\xb4\x7e\x8e\x15\x7b\xf1\xf7\x54\xa5\x7b\x81\x6f\x03\
\xff\x07\x89\x81\xf1\x22\xed\xf7\xc7\x68\xb7\x51\xcb\x66\xe0\xbb\
\x52\x4f\x33\xee\x40\x1d\x54\x59\x59\x35\xd0\x72\xab\xf4\xd1\xc1\
\xbe\xb3\xd5\xe2\xb1\x35\x61\x91\x4d\xc3\x25\xc7\xea\x39\x28\x9e\
\xac\x29\xf4\x4c\xfb\xad\x94\x98\xf7\x07\x68\x01\x31\x09\x36\x52\
\x39\x94\x67\xea\x78\xd6\x5a\xd3\x61\xb7\x68\xaa\x5c\xd9\x6e\x02\
\xc6\x4e\x6e\x48\x88\x41\xe5\x36\xc8\x7f\xfa\x9f\xec\x2c\x5d\xac\
\x71\x9d\x32\x9c\x78\x88\xf9\x9d\x0d\x52\x72\x21\x57\xa6\x3d\x2b\
\xb5\x35\xa3\x1a\x6b\x86\x35\x47\x27\x50\xc9\x08\x4f\xc7\xba\x58\
\x82\x35\xc3\x58\xf8\x93\xb6\x4b\x8e\x2d\x6c\xae\xe3\x7a\x24\x3a\
\xe6\x3e\x58\xaf\x82\x64\x87\xef\x15\x85\xc4\x73\x58\xfb\x88\x2d\
\xeb\xec\xc8\x67\xb0\xe5\x1f\x15\xed\x63\x65\xf8\xbd\xe8\xba\x8a\
\x35\xd8\x0b\xd0\x25\x76\xbb\x92\x9a\x96\xb4\x46\xdb\x4c\xd3\x8c\
\xb0\xd6\x56\x2f\xba\x68\x15\x4b\x94\x46\x6f\xb1\x28\x99\x94\xb1\
\x17\x88\xbd\x3f\xd7\xa3\x5f\x78\x05\x5b\xc5\x4a\x77\x4c\x19\x53\
\xd8\xf8\xec\xd2\x0d\xa2\xa8\xe6\xfd\x0c\x0a\x87\x6e\x8b\xed\x96\
\xdc\x7b\x16\x6b\xb0\x55\x6a\x46\x09\xb5\x19\x47\xa6\x38\x92\x89\
\xb4\xa0\x02\x0a\x3e\xe6\x17\x8c\xf3\x0b\xa3\xd5\x78\x4c\xad\x20\
\xc2\x40\x98\x70\xef\x99\xc5\x81\xe2\x0a\xbb\x10\xc7\x67\x78\x6f\
\xc3\x54\x50\x45\xca\x4a\x6f\x25\x12\x59\x32\x62\xca\x0f\x4b\x3e\
\x79\x01\x9d\x61\xb0\xbd\xbf\x87\xfc\x14\x74\x50\x17\x29\xd6\xb8\
\x44\xe9\x12\x9a\x6e\x86\x21\xc5\x20\xd7\xb3\xb3\x5a\x2a\x64\xe3\
\xad\xd7\x95\xa9\x64\xec\x59\x4a\xba\x53\xf6\xbc\xbd\x48\xf9\x7f\
\xca\xfc\xc0\x79\xa7\x14\xfb\x59\xc4\xe5\x4a\x57\x20\xec\x4b\x98\
\x2a\x28\x88\x8e\xde\xbf\x69\xa1\xbf\x32\xd1\x06\x92\x41\x6c\xf1\
\x3f\x1d\xd8\x73\x90\x6d\xf4\x82\x64\x5f\x46\xb6\x60\x65\x13\xe2\
\xbc\x40\xa1\xf2\x8e\x48\x92\xc5\x1a\xb5\x95\xae\x40\xda\x72\x20\
\x45\x60\x6e\x09\xd7\x74\x32\x1b\xc7\x82\x20\xe9\xae\x82\xd1\xe1\
\xb8\x86\x96\x92\x17\x5e\xf1\x3f\xf3\x02\xba\xd3\x93\xde\xbf\x49\
\xc1\x9e\xb8\xbc\x63\x7e\x5b\x5a\x91\xc0\x69\xb1\x44\x7b\x4b\x15\
\xe1\x02\x69\xaf\xbc\x5a\x89\xa3\x5a\x90\x6c\xf7\x8a\xf2\xe9\x80\
\x04\xf7\x27\xef\xdf\xa4\xc0\x3a\x90\x0a\x28\x90\xe7\xbc\x7f\x93\
\xc2\x55\x3a\x36\xa3\x2c\xf6\x70\x9e\x89\x6c\xca\x98\x2a\xa2\x5a\
\x48\x2a\x48\x56\x20\x4c\x92\x2d\x54\xb2\x3f\x6d\x58\xa3\x1e\xaa\
\x3c\x3b\xc4\x53\x94\x6c\x54\x49\x77\x96\x0c\xec\x60\xea\x0a\x84\
\x85\xab\xc5\x1e\x41\x1f\xac\x54\x07\x76\x6a\x5a\xb0\x40\xd8\x9f\
\x9d\xfb\xb0\xf9\x6f\x90\x1c\x0b\x4e\x0c\x25\x2a\x10\xb4\x6c\xfc\
\xbf\xf0\x18\xc1\xa6\xd6\x4f\x19\x04\x3b\x59\x33\x56\x38\xe2\x5c\
\x8d\xcd\x8a\x2d\x5a\xf3\xc2\x44\xa5\x7b\xbf\x12\x11\x5c\x93\xbe\
\xdf\x25\x98\xcd\x81\x53\xc1\x33\x4a\xe6\xdd\xd9\xd4\xb7\x15\x07\
\x42\x48\x24\x20\xa0\x03\x44\x8d\x21\xa9\x64\x6c\x14\xd8\xd0\xb8\
\x9e\xf7\x6f\x42\x38\x0f\x4c\xf6\x4f\x2f\x31\x70\x26\xf8\x54\xad\
\xa1\x88\xa3\x5c\x83\xf4\x42\xa6\xe0\xff\xc4\xce\xd1\x64\x36\x35\
\x1d\xff\x5a\x06\x66\xc6\x96\x20\x70\xc2\x43\xb2\x0b\x23\x97\x43\
\x9e\x82\x96\x9a\x63\x15\x0e\x10\x97\xd1\x20\x98\xa2\x1d\xe4\xfd\
\x1b\x89\x60\x77\x75\x07\x07\x4a\x0a\x9c\x35\x14\xe6\x55\xe3\x00\
\x2d\xc3\x6d\x48\x0f\x28\x0c\xb4\x7e\xa4\xaa\x20\x1e\x57\xb2\x43\
\x34\x16\x64\x76\x84\xa6\xe0\x36\x29\x83\x5d\x07\xdd\x58\xb8\x40\
\x48\x07\x76\x36\x0a\xd7\x91\xfb\x30\x76\xa1\x75\x3b\xd0\xc5\x85\
\x9f\x19\x04\xbb\x4c\xbb\xf7\x2a\x51\xc0\x51\xc0\x49\x5b\x64\x6c\
\x5e\xf8\x44\x99\xcc\xb9\x20\x19\x90\xe6\xa6\x7b\xff\x5a\x44\x15\
\x08\x82\x06\xba\x4e\x78\x57\xe7\x97\x94\xc1\x6d\xc6\x93\x15\x08\
\x95\xc4\x15\xc6\xdb\x1c\x28\x69\x78\x42\xe9\x5e\x20\x6a\x5c\x70\
\x40\x29\x4b\x65\x2a\x34\x4a\x0a\x23\xf3\x6f\x53\x8e\xb5\xdf\x76\
\x23\x28\xba\xa6\x23\xbd\x71\x9f\x44\x05\x72\xbd\xd2\xbd\x4f\x3a\
\x3a\x56\xb1\x01\x99\xe5\x5e\x80\x1a\x9a\x08\xef\x29\x5d\x97\x43\
\xe6\x31\x4e\xa0\x37\x20\x3d\x31\xc1\x15\x04\xdd\x0c\x35\xdd\x91\
\x35\x26\xdc\x3f\x0c\x8e\x21\x28\x50\x29\x10\x71\x01\xc2\x06\x04\
\x2b\x95\x48\x6d\x88\xad\xd8\xdc\x7e\xa7\x44\x21\xa4\x55\x87\xc7\
\x29\x80\xd4\xe7\xde\xe5\x45\x0e\x94\x54\x5c\xab\x74\x2f\x82\xc7\
\x47\x14\xd0\x15\x50\xfa\x98\xab\x26\xc3\xf0\xea\x48\x07\x88\xb8\
\xc1\xc1\x99\xd6\xb8\xce\xfb\xd7\x82\xf1\x0c\x89\xe8\x04\x25\x02\
\x03\x69\x71\x73\x33\xce\x33\xdf\x5d\xdf\x44\x49\x41\x85\xf1\x8d\
\x92\xf3\x28\xb0\x3d\xbc\x30\x4b\x1a\x30\x2f\xb8\x42\xe9\xc9\x81\
\x08\xa0\x44\xe2\x98\x8d\xd1\x10\xc7\x3a\xfa\x6b\x18\xb6\x18\x57\
\x51\xba\xdf\x20\xfd\x3f\x66\x8c\x60\x26\xd1\x2a\x82\x26\xf1\x45\
\x4a\xf4\x0e\x87\xbf\xfa\x9f\x0e\xff\xf6\x3f\x1d\xc2\x7a\x0d\x63\
\x8d\x1b\x0f\x51\x20\x4b\x3c\xd0\x21\x5c\x81\xc0\x1a\xca\x20\xce\
\x57\xa2\x21\xd3\x32\x10\x73\x1d\xa9\xd9\x61\x85\x8e\x2e\x2b\x78\
\x0e\x99\xc5\xe7\x2d\x4a\xd0\x52\x89\x24\x16\x04\x63\x14\xd6\x59\
\x07\x5c\x5b\x07\x7a\xff\x5a\x20\xb1\xd1\x32\x1c\x98\xc2\x75\x06\
\xcb\x63\x94\x2e\xdd\xaf\x72\x60\x5f\x81\x0d\x0b\xee\x93\xda\x46\
\xf7\xe4\xf0\x91\xff\x99\x1f\x38\x83\x20\x35\x38\xd8\x0a\x69\x45\
\x3c\x33\xb8\x4e\x90\xf9\x90\xbf\x78\xff\x5a\x8c\x51\x62\xe9\x75\
\x60\x3c\x7b\x47\x89\x09\xc5\xa5\x99\x2e\x6b\x9f\x43\x70\x3c\x81\
\xd6\xa9\x40\x91\x8e\xcd\x2b\x11\xc8\x40\x30\x5a\x79\x9f\xf7\xaf\
\x05\xe3\x01\x3a\x4e\xf0\x19\x4c\xc1\xf2\x7c\x0a\xab\xb9\x12\x3d\
\xe6\x53\xa5\xc3\x30\xa5\x1b\x33\x20\x0b\x8e\x1a\x28\xf7\x49\x04\
\x45\x47\xc8\x1c\x35\x3a\x04\xe6\x08\x06\xde\x4c\x48\x66\xb2\xda\
\xca\x81\x05\x35\x28\x7c\x70\x2e\x07\x14\x4c\x50\xe1\x29\x42\xc1\
\xe1\x1b\x56\x49\x89\x68\x4c\x77\x86\xbb\x2b\x85\xc3\x04\x19\xd6\
\x00\xe7\xe1\xee\x48\xa1\x65\x62\xf8\x2c\x31\xc0\xac\x1e\x7c\xe1\
\xf9\xca\xe2\x00\x32\xdd\x89\xd1\x10\x47\x39\x0c\x99\xfb\x74\x61\
\x38\x1c\xae\x0c\x16\x0a\x35\x35\xdd\x29\xdf\x82\x04\x46\x50\xd2\
\xe1\xa4\x29\x5a\x13\x12\xde\x7e\x51\x18\x0e\x28\x7d\xce\x08\xe9\
\x32\xe2\x28\x65\x51\x66\x02\x3e\xbc\x08\x04\x3c\xdb\x31\x4a\x0f\
\xd9\xaf\x10\xf4\xe5\x72\x44\xdc\x74\x83\x7e\x61\x80\xf9\x7b\x94\
\xc7\xe0\x33\x19\x3b\x50\x52\x63\x28\xe8\xab\x99\xa4\x0a\x66\x10\
\x44\x49\x43\x67\x28\x88\x89\x20\x94\x43\x32\xfc\x6f\xca\xf0\x73\
\x9c\x64\xb6\x5f\x75\x51\xa9\x00\x6b\x2b\x96\xd4\x70\x86\x41\x34\
\x6c\xcc\x1f\x28\x8b\x48\x46\xac\x8c\x4d\x04\xe6\xc6\x31\x58\x22\
\x06\xdf\xac\x64\x80\x8e\xba\x27\xdd\x53\x78\x4e\x24\x46\x04\xe8\
\x52\x98\xae\xfd\x41\x19\x95\x91\x61\xe2\x77\x8b\xe3\x73\xd4\x6f\
\x61\xd2\x42\xae\x51\xc6\xc8\x10\xcc\x61\x23\x85\x61\x15\x8e\xca\
\xe0\xbc\x88\x92\x87\xf5\x18\x6d\x9d\x59\xc2\x18\x05\x0c\x37\x16\
\x30\xc3\xb8\x59\x89\x31\x11\x93\x38\x8b\x6b\xf8\x9f\x96\x75\xa6\
\x12\xc7\x85\x12\x6f\x99\x8d\x11\x23\x46\x8c\x18\x31\xf6\x05\x30\
\xeb\x87\xbe\x11\x19\x9a\x63\x2f\x03\xa7\xf2\xd6\xde\xbf\xc5\x0b\
\x28\x72\xbd\x52\x24\x11\x1a\xd2\x01\x66\x6f\xc4\xd6\xb0\xa3\x43\
\x32\x20\x5d\x11\x5d\x2e\x08\x4c\xfc\x48\x61\x2c\x26\x42\x23\xe7\
\xf3\x4e\xff\x7f\xf7\x1d\x77\xa0\x64\x12\x59\x70\x42\x0d\x30\xe1\
\x16\x76\xdc\x48\xd5\x4b\xb3\x50\xf1\xb5\x12\x37\x1c\xe6\x39\x92\
\xd1\x05\x00\x4b\x07\x88\xb7\xe9\x5e\x03\xbe\x52\x06\x9d\x1c\x70\
\xd4\xe3\x3e\xcc\xbf\xe3\x3d\xc2\x8c\x62\xf0\x3b\x5e\x26\x79\x3d\
\x87\x35\x85\x28\x94\xce\x77\x6c\xbc\x12\x1f\x00\x07\xf4\x1b\xee\
\xc1\x64\xd7\x5e\x05\x4a\x17\x66\x8a\xbc\x80\x27\x08\xe6\x8b\x74\
\xe0\x0a\x04\x53\x0a\x33\x7e\x61\x92\x49\x89\xdc\x3e\x71\x5c\x60\
\xd1\x29\x60\x0e\x3f\x98\xe1\x18\x2d\xc3\x05\x90\x57\x81\x00\x26\
\xbc\x5c\x7c\x13\xe6\xe8\x99\xd0\x72\xc0\x19\x30\x15\x87\xc0\x42\
\x07\xb5\x91\x49\x9d\xbb\xf2\x20\x8e\x08\xe9\xae\xe9\x76\x05\x42\
\xd7\x45\x2d\x0e\x92\x9a\x3d\x59\x89\xf9\x3c\x0a\xcc\x65\xd0\x85\
\x60\x2c\x74\x05\x42\xd0\x31\xbc\x1c\x8f\x0b\x7d\xb7\xa1\x68\x95\
\x61\xe0\x30\x41\x37\x8b\xdf\x31\x5c\xa6\xc4\xd7\x8b\xff\x99\x8e\
\x66\x96\x93\xff\x57\x2b\xb9\x9e\x39\x78\x77\x2e\xdc\x2b\x4a\x29\
\x5d\x16\xb5\x14\x43\x5e\x32\x52\xa3\x88\xc2\x93\x0e\x32\xed\xb2\
\xc2\x70\x05\xc2\x74\x2e\x99\x8a\xd7\x49\xf0\xbb\x5b\x86\x17\x06\
\x1e\x8c\x4c\x11\x63\x9c\xc4\xab\x25\x1d\x16\x44\xba\x33\x02\x5d\
\x16\x12\x47\x5e\xa0\xb6\xe5\xd5\x65\x21\x51\xc1\xa6\xca\x66\x4a\
\xbc\x43\x78\x31\xba\x44\xbe\x43\x7e\x63\x90\x66\x0e\x3c\x0c\x16\
\xdc\x90\xb9\xec\x04\x4a\xdc\x76\x0a\x14\x84\xbb\x2c\x7c\xbb\xc2\
\x19\x16\x95\x81\x2c\xa1\x4b\x75\xe5\x55\x10\xce\x63\x73\xaf\x20\
\x55\x6b\xac\x63\x32\xec\x50\xe2\xb0\x10\x1c\x27\xe8\xb7\x83\xdf\
\x71\x52\x60\x76\x71\x8a\x32\x0c\xe6\x4e\xb0\x6f\xe1\xd9\xce\xb3\
\x5c\x45\xa1\x20\xf9\x4e\x9f\xcf\xfd\x9c\x79\x9f\x15\xbc\xce\x19\
\x22\x2a\x6d\x2c\xf4\x71\x6b\x13\xdd\x32\x68\xe6\x6c\xa2\xe8\xee\
\x09\xe8\x2e\xa3\xee\x57\x24\x88\xb2\x9e\xb2\x12\x89\x04\xa5\xab\
\x3f\x50\x20\x61\xa7\xe9\x28\x90\xb1\xe1\x48\x43\x61\xf0\x7c\x27\
\x11\xa1\xcf\xe0\xfc\x86\x49\x1f\x52\xf3\xf9\x9d\xd6\xe6\x8e\xf1\
\x7b\xb2\xc9\x29\xc6\xb1\x64\x81\x66\x68\xb1\xc5\x22\x10\x0d\xb5\
\x35\x1c\xe4\x92\x70\x1a\x99\x78\xf9\x15\x46\x81\xd0\x7d\xa0\x43\
\x30\xc0\x3a\xd2\x05\xba\x1a\x1c\x3c\x4e\x05\x4a\x34\xe9\xc5\x1a\
\x92\xb0\xcb\x69\x10\x14\x2e\x8e\x11\x7b\x15\x17\x28\x79\xb1\x74\
\x99\xa8\x26\x52\x20\xcc\xf0\xe5\x85\x54\x0b\x04\x30\x76\xb9\xe7\
\x32\x67\xee\x66\x0e\x9d\x53\x05\xdf\x39\xee\xa2\x6f\x47\x46\x50\
\x55\x50\x20\x08\x30\x48\x75\x6e\xac\x0b\x92\x69\xe6\x64\x1e\xfe\
\x45\x02\xa4\x26\x12\x13\x06\x7e\x56\x41\xd7\xcc\x20\x3a\xf8\x9f\
\x51\x60\xbb\x21\x94\x2f\x32\x3c\x11\x99\xc6\x25\xe3\x50\xcc\x92\
\x81\x73\x12\xc1\x15\x48\x3a\xee\xa1\xac\x07\x61\x0c\x43\xc3\xc7\
\xdb\x9e\xb4\xb2\x28\xc8\x7d\x47\xf4\x0f\x3b\x71\x17\x29\x90\xc1\
\x19\x80\xe9\x12\xf0\xa7\x62\x20\x73\x44\x76\x47\x63\xe5\x37\x77\
\x8c\x2e\x21\x6c\x7e\x08\x03\xfd\x22\x15\xe0\xd6\x99\x6c\xe6\x2f\
\x28\xe5\x84\x03\xc3\x60\x35\x60\xe0\xe7\x77\x6a\x35\x2b\xb7\x32\
\x01\x62\x7c\xa6\xd7\x16\x38\xe8\x6b\x11\x07\xd1\x78\x51\xce\x78\
\x39\x9a\xbc\x23\xb5\x0f\x86\x8f\x71\x5e\xb2\x81\x13\x6d\xde\xfd\
\x1e\x2c\x3c\xb7\x3c\xc1\x81\x0c\x67\x7d\x61\x22\x04\x0b\x84\xa5\
\x0a\x53\xbd\x7f\xad\xcb\x2a\xab\x73\x79\x86\xfb\x1d\x51\x19\x8d\
\x3f\x11\xd0\x53\xce\x55\xa2\xf4\x05\x79\xa9\x92\x4a\x89\x2e\x13\
\x3c\x8e\xb2\xc9\x1a\x94\x62\x05\x12\xc5\xf2\x82\x74\xe0\x96\x52\
\xd3\xba\xdc\x66\x2a\x6e\xed\xdf\xd1\xca\x70\x90\x17\x7e\xc7\x69\
\x3a\x0a\xae\x40\x30\x34\xd2\xcd\x00\xd6\xa2\x73\x0c\x9d\x84\xc2\
\xe5\x7f\x07\x1c\x1e\xf8\x8e\x7e\x13\x06\xfe\x61\x2c\x16\xc5\x81\
\xcf\x91\x30\xb8\xb4\x34\x06\x71\xfe\x0f\xfe\x86\x57\x0c\xb6\xbd\
\x62\x05\x94\xb3\x8d\xde\xbf\x29\x83\xee\x0f\x87\x68\x07\x82\x14\
\x07\x07\x58\x7e\xc7\xc2\xea\x80\x43\x35\x99\x18\xd5\x0d\x3a\x63\
\x22\xad\x12\xd8\x6d\x2f\x94\x6e\xbc\x0b\x17\x08\x60\x49\x02\xc7\
\x88\x0d\x99\x0a\x18\xc3\x9c\xc7\xfd\x5e\xc7\x6a\x25\xb6\x1c\x6a\
\x56\x14\x29\x0c\xdc\x72\x88\x4b\x12\xf5\x3b\x35\x28\xb8\x14\x99\
\xc1\x31\xbc\xe3\x00\x85\x83\x97\x89\x03\x4b\xcf\xc2\xa6\x17\xbc\
\xd7\xa3\x84\x07\x57\x58\x98\xed\x91\xda\x82\x85\x01\xa2\x0a\x04\
\x30\x36\x05\xad\x0e\x74\x47\x2c\x7d\x0b\x93\xfb\xb3\xd2\x8b\x2e\
\x8b\x6e\x30\xea\x1c\x58\x64\xa1\xc8\x31\xda\x1d\x9b\x0f\xd2\xac\
\xe9\x3a\x00\x26\x0e\x32\x07\x39\x3e\x08\x26\x7f\xc2\x4d\x3f\x2c\
\xa1\xa1\xd0\xd1\xff\x23\x30\x84\x91\xcc\x89\xce\x75\x89\xc9\xc0\
\x38\x83\x48\x7c\xbf\x32\xb8\x8c\x3a\x55\xb2\x7e\xbe\x44\xc4\x41\
\x89\x11\x23\x46\x8c\x18\x31\x62\xc4\x88\x11\x23\x46\x8c\x18\x31\
\x62\xc4\x88\x11\x23\x46\x8c\x18\x31\x62\xc4\x88\x11\x23\x46\x8c\
\x18\x01\xe4\x6c\x57\x16\xa0\xff\x53\x8c\x18\xfb\x07\x72\x2a\x3f\
\xfb\x00\x42\x6f\x4f\xc0\xdd\x7b\xf7\x41\xb6\x31\x0d\x7e\xf7\xf6\
\xf3\xf3\xce\xf7\xae\x89\x1b\x4f\x8c\x7d\x0f\x39\x15\xdc\xab\xf4\
\xe5\xf5\x33\xcb\xee\x62\xfa\xb0\x54\xb5\x9b\x2d\xfb\xbb\x8e\x9a\
\xed\x52\xd3\x7e\xb2\x45\x2c\x9b\x2a\xeb\xef\x76\x17\x52\xce\xf7\
\xae\x2b\xeb\x1a\x8a\x7f\xeb\x18\x31\x4a\x2e\x6c\x8f\xcf\x28\xc0\
\x16\xbc\xb7\xf9\x5b\xf0\x6a\x23\xc8\x5e\x27\xf5\xd8\x83\x59\xbf\
\xb3\xff\x72\xeb\xec\x4d\xd2\x4e\xff\x6f\x6f\x9e\x52\xea\x27\xdf\
\x39\x6e\x7f\x67\xaf\x66\x3d\xdf\x6f\x3c\x55\xec\x7d\xb8\x9f\x37\
\xba\xc4\x0d\x25\x46\xc9\x83\x13\x85\x6c\x25\xd6\x9e\xdf\xee\x02\
\xff\x98\x36\x8c\xc7\xa5\xa1\x8e\x0e\x07\x64\x6f\x93\x0e\xe6\x09\
\xe9\xa9\x0d\x61\x80\xdd\xcd\x7d\xab\x8c\xd0\xca\x3f\x4a\x1b\xc1\
\x28\x3e\xf9\x6e\x8f\xeb\xef\xf6\x3c\xce\xe7\x3a\xae\xe7\x3e\xdc\
\x8f\x11\x25\x6e\x24\x31\x4a\x1a\x72\x35\x0e\x36\x68\xd7\x5e\x3f\
\x7b\x83\xd4\xd6\x4a\xdf\x54\xff\x6f\x9f\xbd\x5e\x7a\xd9\xca\xbf\
\x59\xc6\xe9\xe7\x14\x1d\x21\x66\xea\xe7\x1c\x6d\x0c\xf3\x94\xf3\
\xed\x27\xdf\xbd\xe3\x53\xfc\xf3\x86\x72\x9d\xbd\x5e\xef\xc3\xfd\
\xb8\x6f\x1a\x1b\xc0\xc7\x88\xb1\xf7\x91\xab\x71\xf8\x23\x47\xf6\
\x5a\xa9\xa5\x95\xbc\x89\x56\xf0\x03\xb5\xd2\xf7\xb1\xa3\xc3\x16\
\x99\xac\x0d\x61\xb6\x7e\x5f\x60\x36\xca\x12\xfd\x7f\xa9\xd9\x22\
\xcb\x7e\x5a\x2f\x2b\x7e\x50\x1a\xbe\x73\x5c\x7f\xb7\xe7\x71\x3e\
\xd7\x71\x3d\xf7\xe1\x7e\xdc\x37\x1e\x49\x62\x94\x34\xe4\x28\xe3\
\x28\xd6\xaa\x70\x6b\xc5\x6e\xa8\x15\xbc\x0d\xa2\x92\x7e\x1f\xa6\
\x95\x7b\x92\xad\xf4\x9b\x64\x21\x0d\x43\x47\x87\xe5\xaa\x88\x2f\
\xb9\xea\x08\xf9\x55\xaf\x76\xb2\xfd\xe4\xb9\x72\x9d\x79\x45\x16\
\x73\xdc\xfe\xee\x9d\x37\x9b\xeb\xb8\xde\x8a\x5c\xeb\xa5\x0d\xf7\
\xf5\x95\x7b\x9e\x63\x95\x77\x3f\x09\x31\x62\x14\x4f\x04\x45\x2b\
\x6b\xa1\xda\x26\x75\xb5\xf2\xb6\xd0\x06\xd2\x59\x2b\xfc\x40\x5f\
\x5c\x9a\xa1\x3c\x58\x8f\x2f\xb1\x8d\x63\xb3\x8e\x18\x4f\xcb\xf2\
\xa1\xdd\xbc\x3d\x1a\x7a\xb5\x91\x27\x3f\xba\x5f\x8e\x52\x7d\x64\
\xb9\xff\x3b\xe7\x1d\x6c\xaf\xf3\xae\x1f\xc8\xfd\xec\x7d\xbd\xfb\
\xc7\xa2\x56\x8c\x92\x01\x3b\x7a\xdc\x24\xe5\xb2\x9f\x96\x8a\x5a\
\x71\x6b\xa8\x48\xd4\x58\x7b\xff\x76\x5a\xa1\x7b\x6b\xc5\x1e\xa9\
\x9c\xa2\xdf\xd1\x35\x16\x29\x97\x29\x57\x6a\xe5\x5e\x7c\xd2\x1c\
\xb9\xa1\x5a\x25\x1b\x20\xc5\x94\x2f\x23\xdf\x2f\x1f\x27\x77\x98\
\x77\xec\xc8\xb1\xd2\x3f\x6f\x91\x36\x8e\xb9\xf6\x7a\xee\xc3\xfd\
\xb8\xaf\x67\x09\xab\x61\x9f\x87\x65\x2b\x1e\x45\x62\x14\x47\xe4\
\xe8\x1e\x46\xca\x64\xaf\x91\x0a\xd9\x4f\xea\xe8\xb1\x51\xea\xeb\
\x28\x70\x80\x56\xe4\xae\xfa\xff\x60\x15\x8b\x26\x6a\x25\x9f\xa9\
\x44\xaf\xf0\x44\x2b\x1d\x39\x5e\xbc\x45\x4e\xe8\xd0\x5c\x76\x4a\
\xc5\xea\xa6\x6e\xef\xe1\xa6\x52\x8b\x8e\xa6\x45\x1d\xd9\xf5\xa7\
\x73\xe5\x2c\xf3\x82\x77\x9e\x3d\x9f\xeb\xb8\x9e\xfb\x70\x3f\xbd\
\xaf\xbd\xbf\x3e\xc7\x3e\x4f\x9f\xcb\xf3\x5d\x5a\xfc\xa4\xc5\x88\
\xb1\xf7\x61\x2b\xa5\x13\xad\x9c\x62\x8e\xb5\x69\xa3\x74\xd0\x4a\
\xdd\x4f\x7b\xfc\xd1\xda\xd3\x4f\xd5\x4a\x8d\x85\x6a\xb1\x1e\xa7\
\xd2\xaf\x34\xef\xc9\x82\x79\xc3\x6d\x3c\x82\x5f\x9a\x8c\x9f\x6f\
\x26\x6d\xfd\xa7\x69\xb7\xec\x64\xe2\x18\x98\xf1\x7d\xe5\x11\xf3\
\x96\x2c\xd2\xeb\x56\xd8\xf3\xb9\x4e\xaf\xb7\xf7\xd1\xfb\xd9\xfb\
\x72\x7f\x9e\x13\x2b\xec\x31\x8a\x33\x92\x29\xe6\x5a\x89\x87\x2b\
\x3d\xab\x95\xa7\x70\x2f\xd3\xca\xbd\x82\xd1\xe1\xb6\x93\xe4\xfc\
\xc6\x35\xe4\xad\xf2\x75\x9b\x99\x7e\x57\xfc\xd1\xcc\xfb\xc8\x98\
\xe1\x77\x3f\x63\x6a\x74\xea\x67\xaa\x97\x97\xbf\x9f\xbb\x54\xae\
\x32\x2f\xa9\x0e\x42\x23\xf1\x44\x2d\x4f\x61\xe7\x7e\x7a\xdf\x5c\
\x0a\x3b\x33\xf0\xb1\xc2\x1e\xa3\xb8\x21\xd7\xe8\x81\xa8\x13\x50\
\xcc\xcd\x06\x19\xa4\x95\x3b\x5a\x31\xd7\x8a\x3f\xcc\x57\xcc\xdb\
\x2e\x59\x6d\x66\xbe\x66\xcc\xcc\xbf\x1a\x33\xe7\x5d\x63\xba\xac\
\xbe\xc2\x8e\x22\x3d\x5a\xcb\x53\xe9\x28\xec\xd6\x35\x25\x56\xd8\
\x63\x14\x27\xd8\xd1\xe3\x79\x5f\x31\x67\x96\x3b\x45\xc5\xfc\xd4\
\xb9\x72\x5d\xd5\xb2\xf2\x8f\x1a\x9d\xfb\xeb\xa8\xb1\xc3\xcc\x7e\
\xcb\x98\x19\xaf\x6a\x23\x79\xdd\x98\x89\x9b\xfe\x6e\x1a\x8f\x9b\
\x67\x74\x18\xf8\x7e\xe9\x38\xb9\x73\x0f\x85\xdd\x13\xd5\x62\x85\
\x3d\x46\xf1\x85\x1d\x39\x3c\x46\x2b\xe6\xeb\x64\x48\x22\xc5\xfc\
\xdd\xbb\xe4\x98\xce\x2d\x6d\x58\x62\xd3\xf5\xa4\xab\xec\xa8\x41\
\xe3\x70\x9c\xf3\x8e\x31\x7d\x2f\x7b\xc0\x94\xad\xd9\xc0\x2a\xec\
\x0f\x9d\x27\x67\x9a\x9d\xb1\xc2\x1e\xa3\x10\xe1\x2a\x4b\x81\x91\
\x91\x03\x51\xc6\xcd\x98\xe3\xfe\xb1\x5d\x9a\x69\x65\xed\xa8\x95\
\x37\xb1\x62\xae\xa3\xc1\xfc\x91\x76\x53\xa5\x1f\x51\xcc\x19\x2d\
\x18\x35\x82\x0d\x64\xe6\x2e\x63\xa6\xbf\xfc\x3f\xd3\x7a\xc1\xb1\
\x56\xd4\x1a\xd3\x5b\x1e\xcb\x53\x61\x7f\x5c\x3a\xda\xe7\xe3\xd6\
\x12\x54\xd8\x49\x67\x54\xfa\xf3\x41\x9b\xa1\x31\x4a\x1e\x72\x0a\
\xd1\xab\x14\x1e\xbd\x4a\x4c\x6f\x5a\x46\x2b\x54\x59\xfd\xf4\xd6\
\x5a\xf0\x99\x09\x59\xb3\x81\x08\x83\xac\xff\x81\x55\x8a\x3d\x5f\
\x2b\xdc\x3f\x10\x75\x36\x58\x91\x07\xb7\x10\x4f\x31\xdf\x1c\x50\
\xcc\x77\xca\xb2\xfb\xce\x94\x73\x9a\xd6\x94\x37\xca\xd7\x6d\x6a\
\xfa\x5f\xf5\x90\x99\xad\xa3\x45\xb0\x71\x38\x22\x72\x0d\xbd\x63\
\xbb\xa9\x76\x60\x2f\x53\xb3\x82\x7c\x72\xd1\x32\xb9\x22\x97\xc2\
\xee\xdd\xd7\x53\xd8\x79\x9e\xf7\x5c\xbc\x80\x9b\xd8\xc6\xca\x04\
\x22\xe9\x23\x9d\xa4\x97\x74\x47\xbd\x4f\x2a\xcc\x9d\x6f\x5e\x7e\
\x7a\x79\xea\xe5\x71\xa0\x11\xfa\x45\x11\xa3\xb8\xc0\x16\x8c\x6b\
\x10\x14\x1a\x13\x75\x88\x3c\x0f\xa8\x2c\xce\x9a\x8b\x97\xb5\x77\
\x47\xec\xd8\x21\xd5\xf4\xbb\xb7\xee\x02\xbe\x94\x01\x71\x39\xc7\
\x5d\x7d\xad\xd4\x32\x8f\x48\x1d\xab\x18\xaf\x97\x46\x7a\x3f\x94\
\x72\x7c\xa4\x70\x28\x1c\xa6\x3d\xfc\x44\xb3\x49\x66\xe9\xf7\xdd\
\xa2\x95\xaf\x98\x8f\xe8\x6e\xe3\x0a\x5b\x73\x2e\x4a\xf9\x0c\x18\
\xd1\x40\x20\xa2\x56\xd7\x93\xaf\xb2\xa3\x48\xaf\x36\xb2\xfd\xab\
\x47\xe5\x30\x7d\xbe\x55\xd8\xed\x7d\xf5\xfe\xf6\x39\x3c\xcf\x73\
\x63\xe9\x45\x3a\x6c\x7a\x34\x5d\xd6\x60\x40\x3a\x49\xaf\x9f\xf6\
\x3d\xde\x29\x15\xba\x3c\xd3\xfc\x23\x1f\x6d\x7e\x6a\xbe\xda\xfc\
\xd5\x7c\xb6\x22\x1d\x8d\xd0\x35\x1a\xbf\xc1\xd8\x02\x8a\xb1\x77\
\x90\xd3\x30\x20\x8d\xe2\x6d\x6d\x14\x14\x1a\x8b\x8d\x9e\xd5\x1e\
\xfd\x19\x95\xc7\x9f\xb6\x0a\x73\x73\xeb\x2a\xce\xba\x8b\x6d\xd2\
\x56\x0b\xd7\x5b\x7b\xe1\x79\xd6\x1e\xa8\x85\xdb\x21\x65\xd2\x08\
\x20\xf3\x0f\x88\x52\xeb\xa5\xb3\xf6\xd6\xdd\xf5\x7b\x1f\xe5\x60\
\xe5\x68\xfd\x7d\xb2\x5f\x69\x73\xac\x56\x7a\xee\x4a\xad\x68\x4b\
\xce\x59\x22\x57\x63\xbe\xb5\x8a\xf9\x3d\xcf\xe6\x28\xe6\x89\x38\
\x4b\x45\xaf\xf1\xeb\xfe\x66\x1a\x8d\x9e\x65\xca\x8a\xfc\x67\xc5\
\x04\xb9\x1d\x51\xeb\x17\x14\x76\xaf\xd1\x59\xab\x96\x7d\x1e\x0e\
\x8d\x3c\xdf\x4b\x47\x1f\x9b\x2e\xd2\x47\x3a\x49\xaf\x4b\x7b\xd4\
\x7b\x25\xa2\xe6\x4f\x4e\x5e\x69\xbe\xd9\xfc\x63\x9d\x0a\xf9\xa9\
\xf9\x6a\xf3\x97\x7c\xd6\xfc\xb6\xf9\x8e\xa8\xa9\xe5\x60\x1b\x8b\
\x2b\x9b\xb8\xa1\x14\x3d\x6c\xe3\x70\x23\x86\x67\xd6\xb4\x0b\x91\
\xb4\x20\x1b\x28\x5b\xd8\x02\x45\x51\xf6\xc4\x8e\x01\xca\x21\x5a\
\x49\x86\x6b\x65\x1a\xa9\xbf\x8d\xf2\x2b\xd2\x68\x3d\x3e\x26\x63\
\x3e\x21\x63\xb5\xb2\x8c\xd3\x4a\x37\xc1\xaf\x9c\xd3\xcc\x16\x3b\
\x6a\x38\x8b\x95\x53\xaa\x57\x9a\xad\xb2\xe2\xd3\xdf\xcb\x51\x5d\
\x5b\xda\x1d\x0f\x4d\xd7\x53\xae\x49\x28\x5a\x85\xc9\x28\xd2\xef\
\xf2\x07\x4d\x99\x1a\xf5\x4d\xcb\xba\xf2\xd7\xf5\x17\xcb\x69\x66\
\x87\x27\xb2\xf9\xf7\xc7\xa1\x11\x57\x94\x79\xf6\xf9\x9a\x0e\x9b\
\x1e\xd2\xe5\x99\x82\xc7\xea\xef\xd1\xef\x90\x0a\xfd\xbc\xb2\xf9\
\x86\xd5\x8c\x7c\x24\x3f\xc9\x57\x2f\x7f\xbb\x92\xdf\x7e\xbe\x37\
\xb0\xe5\x40\x79\xec\x36\x37\xc7\x8d\xa4\x28\x61\x1b\x07\x3d\x93\
\x66\xbe\xf6\x72\x15\xb4\x50\xaa\x28\xeb\x68\x0f\xd6\xcc\xf6\x90\
\x88\x19\x5e\x01\x52\xb8\x54\x94\x69\x7e\x8f\x3e\x47\x7f\x9b\x4b\
\x45\xd2\xdf\x58\x77\x31\x5f\x0b\xf8\xe0\x8c\xe8\xcd\x45\x78\xf4\
\xc4\x28\xf4\x01\xaf\x51\xa0\x1f\x50\x71\x3d\x5d\x61\xa5\x79\x42\
\x0e\x41\x31\x5f\x34\x5a\xee\x2e\x23\xf2\x5d\xe3\x31\xb3\xcd\xf8\
\xf5\x7f\xb3\xa3\x43\x54\x83\x08\xd3\x2a\xec\x2f\xfd\x62\xda\x2c\
\x3a\xc1\x29\xec\x7f\xb1\x0a\xbb\xde\x97\xfb\xdb\xe7\x78\x0d\x05\
\x13\xb0\xd7\x58\x48\x0f\xe9\x0a\xa6\x33\xea\x3d\x52\xa1\xcb\x2b\
\xf2\xcd\xf3\x07\x23\x1f\x67\x92\xaf\x36\x7f\xbd\x86\x34\x44\x8f\
\x79\xe2\x9d\x96\x83\x2d\x8f\xa0\x13\x65\x3c\x92\x14\x1d\x6c\x03\
\xa1\x57\x42\xe9\x44\x06\xc6\xcd\x02\xc5\x94\xc6\xb1\x55\x45\x0b\
\xaf\x97\x9b\x6c\x7b\x53\x0a\x78\x8b\x5f\x71\x37\x07\x2a\x2f\xdc\
\x68\x2d\x42\xf9\x23\x95\x33\x48\x2a\xac\x4f\xdb\x30\xa0\x2a\xe6\
\x98\x69\x9b\xd6\x96\xd7\x19\x05\xfa\x5d\xf1\x07\x3b\x2a\x44\x35\
\x86\x44\x44\x14\x1b\x76\xe7\x93\x39\x0a\xfb\x85\xcb\xe4\x4a\xf3\
\x8a\x2c\x71\xcf\x08\x3e\x77\x8f\x34\x45\xa5\x3b\x5d\xba\x3c\xf3\
\x8c\x03\xe4\xe3\x52\x9b\xaf\x9a\xbf\xfe\xa8\x35\xd9\xe6\x3b\xf9\
\xef\x75\x52\x76\xbd\x0a\xe5\x63\xcb\x29\x1e\x45\x8a\x06\xb6\x71\
\x04\x46\x0f\xab\x2c\xb2\x5e\xdb\x93\x89\xbb\x6a\xe1\x0c\xd6\x82\
\xca\x99\x7b\xd0\xe1\x7e\xf1\xc6\xcb\xe4\xa4\x7b\x4e\x95\x73\x7e\
\x7f\x9a\x9c\xfd\x87\xb3\xe5\xac\x3f\x9c\x5b\xb4\xdc\x76\x8d\x9c\
\xd4\xb7\x83\x6c\x21\xf9\x6d\x96\xac\xb6\xa3\x01\xa3\x42\x54\x43\
\x48\xc6\xd9\x6f\x1b\xd3\xed\xd4\x6b\xed\x28\xd2\xbe\x89\xbc\x78\
\xef\x69\x72\x2e\x0d\x2f\xea\x99\x85\x46\xf2\xef\x4c\x39\x8b\xfc\
\x7c\xf4\x02\x39\x95\xfc\xd5\xc6\xe1\xcd\xc9\x78\xa2\xa6\x9d\x93\
\xd1\x86\xd2\x92\x72\xf1\x83\x52\x54\x88\x47\x91\x22\x42\xc2\x06\
\xb2\x5d\x0e\xd0\x21\xbd\x9b\x16\xce\x10\x1a\x88\xaf\x0b\x58\x0b\
\xd2\x2f\xda\x03\x3e\x7b\xbd\x9c\xd0\xab\xad\x6c\xe3\x16\x8e\x95\
\x9b\xb7\x33\xd5\xda\x74\x31\x55\x5a\xb6\x57\x1e\x58\xa8\xac\xdc\
\xa4\x95\x69\x3c\x72\xba\x19\x71\xff\xf3\x66\xd6\x9b\xd1\x0d\x20\
\x2f\x5a\x85\x7d\xfd\x47\xa6\xd5\xbc\x23\x4d\xa5\x46\x2d\x22\x9f\
\x53\xa0\x6c\xd1\xde\x54\x6d\xdd\xd9\xfe\x5f\xaa\x5c\x56\x4e\xbe\
\xb5\x6c\x28\x7f\xfd\xd3\xf9\x72\xc6\x77\xeb\x74\xe4\x62\x34\xa1\
\x23\x22\xbf\xe9\x98\x34\xff\x29\x07\xdb\x61\xd1\x40\xdc\xa4\x65\
\xdc\x40\x8a\x0e\xb6\x91\x90\xe1\x51\x13\x74\x1b\xa4\xbf\x7e\x8e\
\xd1\x9e\x6c\x9a\x7e\xce\xd3\xef\xde\x04\x9d\x8a\x1a\x7a\xce\xf2\
\x6f\x1e\x95\xc3\xce\x57\xf1\xa4\x7a\x25\xf9\x87\x94\x2e\x6f\x6a\
\x74\xe8\x69\xe7\x1a\x10\x61\x66\xbd\xa1\xf2\x3e\xfe\x50\x85\x45\
\x46\x8d\x24\x26\xdd\x54\xc9\x7d\x22\xef\x5f\x40\xa4\x21\x62\x40\
\x18\xf5\xc7\x57\x4d\xdd\x3e\x23\x8c\x94\xab\x68\xca\x97\x95\x7f\
\xe3\xf2\xf2\x8f\x3f\xca\x11\xe4\x23\xf9\x69\xf3\x95\xfc\x25\x9f\
\xbd\xfc\x1e\xe3\xe7\x7f\x47\xab\x0f\x86\x27\x2c\xe3\xc6\x51\x34\
\xc8\x19\x45\x36\x4b\x59\x3b\x8a\x10\x3f\xea\x29\x5f\xcc\xda\x26\
\x5d\xb4\x27\x1b\xa8\x1c\xab\x05\x38\x4d\x0b\x72\xae\x16\x58\x6e\
\x8b\x12\xdc\x2a\x2b\x5e\xbe\x45\x4e\xe8\xd7\x5e\x36\xeb\x2d\xff\
\x57\xaa\x42\x65\xd3\x72\xd6\x2a\x33\x79\xfb\xbf\xac\x28\x13\x55\
\x31\xf7\x75\x32\xb2\x4d\xdd\xf9\x9d\xe9\x74\xf4\x45\xa6\x5c\xf5\
\xda\x76\xb4\x68\xdb\x58\x5e\x7a\xec\x12\x39\xdd\xea\x3a\x9a\x67\
\xe4\x9f\x9f\x8f\xce\x72\x36\xd7\xe6\x33\xf9\x4d\xbe\x6b\xfe\xdb\
\x72\xd0\xf2\xb0\xe5\xc2\xe8\xc1\xe4\x62\x3c\x7a\x14\x2d\x6c\x23\
\xf1\x15\x75\x9c\xf3\xec\xe4\x15\x7e\x48\xeb\xac\xec\xdb\x49\x0b\
\xab\xaf\xca\xc1\x23\xb4\x00\x27\x29\x67\x6a\x61\x62\x89\xd9\xdd\
\x50\xe8\x01\x69\x28\x4f\x7b\xa3\xca\x65\xab\xe4\xb2\x3a\x55\xe5\
\x23\x6e\x5d\xf5\x80\x0e\x66\xe0\x75\x7f\xc9\xe9\x51\xa3\x2a\xd3\
\xbe\x42\x46\x23\x46\x4e\x0c\x00\xb5\xba\xf4\xd3\x5c\x2d\x67\xb2\
\xca\xca\xe7\xc7\x4c\x97\x5b\x3e\x67\xb4\xd0\xfc\x21\x9f\x6c\x7e\
\x05\x1b\x86\x67\xd5\x42\xcf\x63\x7d\x3d\x21\x8a\xfa\xda\x7c\x27\
\xff\x29\x07\x26\x65\x99\xc5\x8f\x15\xf4\xbd\x07\xdb\x48\xfc\x91\
\x44\x99\x65\xe5\x5d\x26\x08\x3d\x8b\x56\x1b\x2d\x38\x94\xf6\xfe\
\x5a\x78\x78\xd4\x22\x1f\x4f\xd7\xdf\x18\x51\x16\xea\xf9\x44\x12\
\x59\xa6\x85\xe9\x35\x14\xbf\xb1\xb0\xba\x6f\x44\x37\x59\x5b\x5a\
\xe4\xbf\x8c\x2a\x2d\xa6\x2d\x35\x13\x36\x7c\x9c\xe7\x84\x5e\x49\
\x23\xef\x33\x65\xc7\x7f\xcc\x81\x87\x9c\x61\xca\x54\xaa\x66\x47\
\x8b\xce\x2d\x64\xe7\xe6\x2b\xe4\x14\xcd\x1b\x3b\xc2\x92\x2f\x7e\
\xfe\x2c\xf3\xf3\x0b\xd3\x31\xf9\x37\xdd\xe6\x27\xf9\x4a\xfe\x7a\
\xf9\xdc\xc6\xe6\x3b\xf9\x4f\x39\x30\x59\x18\x8f\x1c\x7b\x1f\x39\
\x8d\x04\x19\xd7\x8d\x26\x84\xf2\xdc\x2c\x75\xb4\x70\x1b\x6b\xa1\
\xb5\xb6\xb3\xc9\x5b\xa5\x8f\x0e\xfd\xb8\x62\x30\x71\x86\xfb\x39\
\x8b\x97\xf2\x1c\x55\xea\x56\x93\xbf\xf1\x98\xca\xcd\xda\xd8\xc9\
\x3a\x5c\x43\xc2\x8e\x85\x25\x85\x4e\xbf\x18\x7c\xd3\x7a\xab\x7b\
\x89\x66\x59\xe5\x0a\xf2\xc5\xd1\x3a\x5a\x7c\xf6\x07\x39\x12\x73\
\x34\xef\x6f\xf3\x21\x3c\x5a\x90\x5f\xe4\x9b\xe6\x9f\xcd\x47\xcc\
\xb9\xe4\x2b\xf9\xeb\xb9\xd9\xd7\xb1\x22\x15\xae\xf6\x8c\x1a\xb1\
\x52\x5e\xbc\x10\x6e\x28\x5a\x60\x9e\x13\x21\xb6\x78\x7f\x76\x1d\
\xb7\x09\x2d\xcc\x1e\x5a\xd0\x03\xb5\x90\x47\xa9\x12\x89\x78\x90\
\x78\x54\xa1\x27\xd5\xc6\xf2\xf6\x1d\x72\xec\xf8\xde\xf2\xe7\x32\
\x22\xff\x91\xf2\x95\x4c\x8b\xa9\x4b\xad\x35\xa9\xa4\xe8\x2a\x28\
\xdc\x93\xb7\x7f\x61\x5a\x2f\x3c\xd6\x94\xae\x58\x95\xd1\xe2\xbb\
\x7e\x07\xca\xfa\x6d\x57\xc9\x49\xe6\x49\x59\x91\xe7\x68\xa1\xf9\
\x64\xf3\x0b\x1d\x83\xfc\xf3\xdc\x4f\xec\xac\xb9\x9f\xbf\x4c\xd4\
\x7a\xe2\x54\xdc\x30\x8a\x37\x72\x1a\x0a\x72\xaf\xaf\xc4\x6b\xc1\
\x55\xd2\x02\x26\x18\x74\x5d\x2d\xf4\x26\xb6\xd7\x23\x0c\x8f\x1b\
\x55\x36\x78\xa3\x8a\x9d\x6d\xf7\x47\x15\xad\x34\x8b\xcd\x9b\x32\
\xe7\xcd\x3b\xe5\xa4\xab\x8e\x90\x87\x4f\x99\x2f\x6f\x1f\x3e\x45\
\xbe\x99\x32\x50\xfe\x57\xbf\x86\x67\xee\xac\xd8\xb0\xb9\xe9\x75\
\xc1\x1d\xb6\x12\xa6\x3a\x2b\x5e\x54\xb4\xfa\x93\xea\x17\xfd\xaf\
\x7a\xd8\x54\x6b\xd3\x59\xd3\x5b\xda\x54\xaf\x28\x66\x4c\x2f\x31\
\xcb\x27\x48\xf6\x91\x53\xe5\xbb\xe3\x66\xca\xc7\x17\xaf\x90\x8d\
\xdb\xaf\x95\x73\xcc\x8b\x32\x4f\x3b\x83\xf9\xbf\x78\x33\xe7\x7b\
\x8e\x16\xe4\x17\xf9\x46\xfe\x69\x3e\x92\x9f\xe4\xab\xcd\x5f\x44\
\x29\xf2\x3b\x6e\x18\x25\x0b\x39\x8d\x85\x5e\x0d\x93\x30\xde\xbd\
\x51\xa3\xca\x16\xe9\xae\x95\x60\x80\xd9\x25\x83\xcc\x36\x19\xf7\
\xeb\xc3\xe4\x37\x87\x4c\x92\x37\x17\x8d\x96\x1f\x17\x8f\x15\xb3\
\x6c\xbc\x98\x15\x13\xc4\xac\x9c\xe8\xf1\xd0\xc9\x62\x96\xe8\xf1\
\x5e\xed\xc4\xe8\xa8\x62\xcd\xa0\x4d\xc6\xce\x36\x63\x1f\x7d\xdb\
\x5b\xf8\x54\x00\xa6\xdc\x4c\xc9\xf3\x59\x5f\xd2\x72\xe6\x4a\x53\
\x2a\xcb\x8e\x16\xe6\xc0\x66\xf2\xbf\xf9\x23\xc4\xac\xd2\x74\xeb\
\x7b\xd9\x77\x58\xa1\xe4\xbd\x96\x8c\x13\xb3\x70\xb4\x98\x79\x23\
\xe4\x97\x45\x63\xe4\x6f\x27\xcf\x96\x3b\xdf\xb9\x43\x3b\x8b\x7f\
\x48\x37\x43\xbe\x44\x8c\x16\x36\x1f\x9d\xe9\x36\x6e\x14\x25\x1f\
\x39\x0d\x25\x62\x54\xd1\x63\x95\x1e\x38\x5b\x3a\x0c\xec\x24\x37\
\x75\x6f\x2d\x7f\xeb\xad\x95\x7e\x48\x17\xaf\xa7\x3d\x68\x80\x98\
\x59\x43\xc5\xcc\x1f\x29\x66\x91\x36\x88\xe5\xe3\xe5\x97\x33\x17\
\xc8\xf3\x0f\x9e\x2d\xd7\xef\xba\x5d\x4e\x55\x71\xe3\x50\xed\x71\
\x0f\xb9\xf9\x78\xb9\xa8\x45\x3d\x79\x8d\x47\x55\x6c\xd4\x82\xd9\
\xee\xec\x29\xcf\x7c\x9d\xd1\x6c\x79\x26\x44\x2f\x9a\xf6\xc2\x4f\
\xa6\xcf\xc5\x77\x9b\xaa\x07\x1c\x68\x1b\x05\x16\x39\x3c\x87\xbf\
\x7c\x44\x0e\xc7\x8b\xf8\x97\x8d\xb2\xea\x6d\x1d\x0d\xd7\x5c\x22\
\x97\xe9\xa8\xb1\x4e\x1b\xc8\xd7\x07\x8f\x12\x33\x7b\x98\x98\xa9\
\x03\xc5\x8c\xeb\x23\x66\x58\x57\x31\x7d\x0e\x54\x65\xfd\x00\x31\
\x6d\x9b\xca\x17\x1d\x9a\xc9\xfd\x2b\xc7\x49\x6f\x9b\x87\xf1\x68\
\xb1\x7f\xc0\x15\xea\xe8\x2e\x52\x39\xab\xbc\x9c\x56\xb1\xbc\xbc\
\x53\x45\x45\x8f\x9a\x55\xc5\xd4\xaf\x29\xa6\x79\x7d\x31\xed\x9a\
\x8a\xd1\xc6\x62\x06\x74\xd2\xc6\x31\x4a\x36\xff\xee\x64\x39\xe3\
\x83\x7b\x64\xa9\x8e\x2e\xf3\x54\x66\x5f\xa0\x23\xce\x32\xdc\xcd\
\x73\x7c\xad\xe0\x2e\x59\xec\x5c\xd9\xab\xb6\xe9\x6a\xcd\xa6\x45\
\x65\xf9\x62\xfe\xc2\x4e\xea\xf5\x1d\x65\x5d\xe2\x59\xad\xe8\xaf\
\x3e\xcc\x49\xa3\x8a\x46\x56\xf1\x26\xde\xaf\x8e\x0c\xf3\xf5\x3d\
\x66\xfc\xf3\x4f\x32\xfb\xce\xd3\xe4\xe8\x79\x23\xe5\xc1\x9e\xed\
\xe4\xc7\x0e\xcd\xc5\xb4\x6c\x20\xa6\x61\x2d\x31\xb5\x34\x3f\xc8\
\x17\xcd\x1f\x26\x09\xff\x5e\xae\x8c\x5c\x57\xbd\x82\xb4\xb4\x99\
\x28\x71\xc3\xd8\x97\x31\x5c\xf9\x44\xa9\x52\x92\xad\x34\xa5\x4b\
\x8b\x29\x5b\x46\x2b\x41\x39\x31\x95\x2a\x88\xa9\x51\x59\xbe\x3c\
\xb0\xb9\x9c\x7b\xea\x3c\x19\xfa\x23\xe2\xc5\x8b\xd2\x55\xf5\x11\
\x3c\x83\x27\xe8\x88\x41\xb4\xf5\x83\x6d\x50\x69\xcf\xca\xe3\x55\
\x40\x5f\x99\xef\xd1\x5a\x9e\xd4\x7b\x9b\xee\xa7\x5f\xe7\xb9\x93\
\x14\xa1\xa8\x85\xc1\xa0\xcf\x25\xf7\x68\xdf\x5e\xde\xb4\x6a\x28\
\xaf\xbc\x7a\xab\x1c\x6f\x15\x70\x1a\x07\x0a\xb8\xdf\x38\xf4\x1d\
\x72\x16\x58\xfd\x8c\xdb\xfa\x06\xe9\x6d\x5e\x96\xb6\x7a\x4e\xf3\
\xd5\xb3\xa5\x47\x9b\x26\x72\x54\xd5\x8a\xf2\x46\x05\xcd\x0f\xf2\
\x85\xfc\x29\xad\xf9\x44\x5e\xf1\x6e\xca\x97\x95\xf3\x94\x31\xf6\
\x31\x9c\xa8\xfc\x50\xe9\x0a\x1a\x66\xf3\xa9\x85\xff\x55\xd9\xd2\
\x72\x56\xc3\x2a\xd2\xde\x8a\x11\xd9\x52\x31\xfb\x69\xcf\x43\xd8\
\xac\xd5\x63\x5e\xd4\x90\x11\xca\x83\xb4\x62\xb1\x4d\x01\xa6\x4f\
\x6f\x09\x2d\x0d\xe4\x75\x59\x7c\xd8\x64\xf9\x6d\x96\xc8\xbf\x1a\
\x8e\x9c\x61\xc6\xad\x79\xcf\x4e\xbe\x45\x55\xe4\xc2\x22\x4a\xf9\
\xe4\x6d\x5f\x98\x16\x33\x57\xf1\x5e\xff\x9d\x32\x40\xfe\x64\x3e\
\xd4\xc6\xe0\x46\x0f\xd2\xcb\x3a\x76\x2c\x53\x18\x24\xbc\xb5\x1d\
\x7d\xb5\xc1\xb0\xef\x48\x33\xbb\xea\x90\x90\x41\xaa\x51\x31\xca\
\xd6\xae\x2d\x8d\xb4\x71\x1c\xa3\xf7\xb2\x93\xa7\x2e\xaf\x02\xfc\
\x52\xf9\x6b\xa5\xaa\x61\x31\x4a\x32\xce\x54\xfe\x4b\x19\x2c\x5c\
\xc7\xed\xca\x51\x4a\x2b\x32\x58\xd9\x9a\x95\x89\x9e\x32\x5f\x43\
\x15\xd4\x46\xda\xc3\xb6\x35\xdb\xed\x46\x37\xd1\x0d\x44\x47\x0e\
\x7a\x6b\x15\x4d\x5e\x95\xf2\x95\x4d\xbf\x2b\xff\x98\xb6\x2b\x7b\
\x41\x91\x51\x64\xc8\xad\x9b\x4c\x56\xe3\x56\xa6\x5e\x55\xf9\xe0\
\xb7\x27\xca\x85\x36\x74\x29\x23\x08\xe9\xf5\xdc\xfe\xe7\xe9\x48\
\x32\x55\xdf\xc3\x86\x0b\xd2\xf7\x6c\xcf\xaa\x40\xc2\x15\x69\x23\
\x49\xe4\x9a\xde\x53\xf9\x07\x65\x54\x1e\x7e\xa7\xbc\x42\x19\x37\
\x94\x12\x86\x33\x94\x89\x1a\xc6\x4d\xca\x56\x4a\x07\x2f\xd0\x83\
\xb3\x76\x85\x1d\x20\xbd\xd9\x62\x16\x05\x4d\x57\xce\xb7\x3d\xb1\
\x13\xb1\x3e\x92\x83\x09\x11\xaa\xf7\xf8\x19\x57\xf6\xa9\xcf\x7e\
\x5b\x64\xca\xf9\x1e\x64\x12\x53\x9f\xdd\xf9\xb8\x5f\xd9\xf7\xec\
\xd3\x4e\xb6\xda\x35\x23\x4e\xff\xf0\x97\xe7\x2a\x09\x62\x37\x16\
\xcb\x9d\x36\xf2\x4e\xfa\x1e\xcd\xf5\xb7\x3d\x16\x38\xd9\x9c\xc9\
\x8d\x3a\xca\xf3\x94\xe1\xfc\x84\xdf\x2b\x2f\x53\xc6\x28\xe6\x18\
\xa4\xfc\xab\x32\xaa\x10\x69\x18\x4d\x95\xb9\x60\x1b\x87\xb3\x70\
\xb9\xd1\x83\xd0\xa1\x3a\x7a\xe8\x28\xc2\x2a\xb9\xe1\x4a\x56\x26\
\xee\x11\x3a\x14\x0b\x56\xdd\xca\xf2\x61\xe5\x56\x9d\x55\x31\x7f\
\x2a\x43\xc5\x3c\x3b\xc7\x51\x70\xd4\x1f\x5e\x31\x93\xb6\x7d\xee\
\x4d\x42\xbe\x92\x1d\x71\x6e\x72\x22\xda\x8d\x79\xf8\x35\x53\x77\
\xc0\x38\x53\xa9\xb4\x7c\x7e\xfc\x4c\xb9\x09\x03\x42\x8e\x98\xc5\
\xe8\xc7\x28\xb8\x5e\x47\x43\x1d\x15\xed\xe8\xe8\xbd\x27\x81\x27\
\xbc\xa0\x73\x5e\x24\x93\x64\x56\x2a\x1a\xca\x25\xca\xa8\x3c\xfe\
\x58\x39\x5f\x09\x62\x65\xbe\x98\xe1\x56\xe5\xcf\x4a\x57\x58\x4e\
\x6e\xfe\xbd\xd2\x59\x60\xf6\x80\x9b\x2b\x41\xff\xc8\xb5\xa7\xc7\
\x46\xe9\xa2\x9f\x5e\xe8\x50\xa7\xa0\x7b\xbd\xb0\x17\xa1\x44\x7b\
\xe7\x1e\x6d\x3c\xc5\xbc\xdb\xa9\xbf\xf1\x26\x0b\xd3\x51\xcc\xf5\
\xdc\x39\xef\x79\x16\xa8\xa6\xe3\xe6\xd8\xf9\x14\xee\x25\x65\xca\
\x99\xfa\x03\xc7\x99\xe1\x77\x3d\x6d\x7f\x4f\x77\x44\xb2\x0a\xfb\
\xa5\xf7\x69\xf5\x2e\x67\x5a\xab\xc2\xfe\xfe\xbd\x72\xb4\x75\x51\
\x0f\x8e\x22\x9e\x03\x27\x86\x87\xc1\xd6\x03\xda\x2d\x70\x7a\x52\
\xf5\x10\x6f\xb3\xd1\x54\xfc\xa8\xea\x2a\xaf\x52\xba\xfc\x0e\x72\
\xbd\xb2\xb1\x32\x46\x31\x00\xb6\xfa\x37\x94\xe1\x42\xfa\x42\x39\
\x49\x99\x10\x54\x00\xdb\x5b\xfa\x7e\x5c\x2e\x74\xa8\x55\xce\x89\
\x0c\xb2\xcd\x2e\x23\x8d\x0c\x1d\x7a\xdc\x0c\xb9\xa9\x52\x29\xf9\
\xbc\xc1\xf0\xa9\x66\xec\x63\xef\xa4\xa6\x98\xeb\xa8\xc0\x28\x33\
\xf5\xb9\xef\x4d\xd7\x53\xae\xce\xae\xd4\xa4\x95\x4d\xab\x56\xe4\
\x97\x6f\x38\x4e\x2e\xfe\xe1\x71\x39\x74\xcd\x25\x72\x7a\x2f\xcf\
\x22\x96\x5d\xae\x76\x23\xd3\xe1\xb0\x73\xb2\x71\xc3\x4f\x55\xb7\
\x41\x61\x9f\xb4\xf5\x73\xd3\x72\xd6\x61\xdc\xfb\xa7\x83\x06\xc8\
\x43\x88\x82\xa4\x5b\xdf\x29\x77\xd0\x39\x02\x32\xac\xf7\xa3\xc4\
\xef\x90\x26\xd6\x38\x71\xa7\x0d\xe7\x93\xce\x2e\xba\x7d\x95\xaf\
\x28\x83\x79\x0f\x11\x71\x17\x2b\x63\xec\x45\x60\x9d\xfa\x5a\xe9\
\x0a\xc5\x8d\x1a\x88\x53\x88\x02\x09\x61\x1b\x07\xbd\x24\xbd\xa5\
\xf6\x9a\xd6\x0b\xd5\x0f\x1d\x6a\xd6\xf9\xab\x13\xa3\x42\x87\x6a\
\x6f\x4c\xaf\xdc\x0a\xc5\xbc\x6c\x96\x8d\xca\x9e\x97\x5f\x96\x0b\
\x4a\x3d\xe2\xbe\x9d\xa6\xd1\xc8\x69\xfa\xe4\xf2\x26\xab\x8c\x7c\
\x35\x7b\x98\x3c\xf0\xe6\x1d\x72\xac\xd9\xa1\xf7\xc5\x22\xe6\xb8\
\x53\x96\xe1\x40\x78\xc4\x54\xb9\xb5\x66\x25\xf9\x54\x34\x89\x2c\
\x5e\x1a\x7c\xcb\x46\xeb\x5b\x95\xd7\xa8\x62\x15\xf6\xdf\x6d\x36\
\x59\x8d\x0e\x30\x75\xab\xc8\x87\x39\x0a\x3b\xa2\x96\xb7\xa6\xdc\
\x0b\x3a\xc7\xb6\x6e\xb8\x93\xe0\x63\xb5\x56\x5a\x6b\x07\xd1\xc0\
\xba\xaa\xbb\x28\xf1\x5e\x07\x92\x4a\x23\x29\xab\x3c\x5c\xe9\xca\
\xc1\xf1\xbf\xca\x9b\x95\x31\xf6\x02\x1e\x54\xfe\x4f\x19\x2e\x94\
\x83\x95\x79\x5a\x55\x6c\xe1\xd3\x4b\xaa\x62\xea\x14\x73\x1d\x2d\
\x76\xef\xe9\xe1\x45\x45\x61\xd1\x55\xee\xd0\xa1\xda\x1b\x4f\xea\
\x27\x0f\xeb\x2d\x7e\x22\x34\xe8\x41\x4f\xfd\x3b\x7a\xdd\x88\x1b\
\x2d\x76\xea\x68\xb1\xfa\x8a\xec\xac\x86\x2d\x6c\xfa\x3a\x34\x91\
\xe7\x6e\xa1\xc2\xd2\x10\x82\x73\x15\x21\xda\xdf\xb7\xe9\xa7\x9e\
\xf3\xd8\x85\x72\x46\xff\x03\x65\x33\x6e\xf8\x65\xab\xd7\x35\xed\
\x97\x9f\x62\xf7\x12\x09\xc7\xf4\xcd\x21\xa2\x9e\xb2\xf3\x09\x97\
\xd9\x67\xf6\x6a\x27\xdb\x72\x29\xec\xcc\x89\x78\x7e\x67\xb8\xae\
\x13\x16\x68\x00\xeb\x39\xac\x61\x22\x7f\x2b\x01\xfb\x2b\x9d\x69\
\x38\x48\xd6\xe4\xd3\x88\x62\x14\x11\x1e\x52\xfe\xa2\x0c\x16\xc2\
\x5b\xca\xee\xca\x3c\xe1\xf7\x8c\x7b\x2a\xe6\x6e\x4f\x0f\xa7\x98\
\x7b\x66\xdd\x5c\x8a\x39\xbd\x31\xbd\x72\xc5\x26\x6d\xcd\xd0\xdb\
\xb7\xef\x31\x7a\xd0\xbb\xa3\x3b\x0c\xbd\x7d\x9b\x69\x38\x74\x92\
\x3e\xa9\xbc\xa9\x5c\x56\x3e\x5f\x38\x5a\xee\x7e\xf7\x1e\x39\x86\
\xd1\x81\xf5\x27\x7a\xcf\xdd\xae\xe5\xd0\x6b\x80\x78\x13\x7b\xdf\
\x21\xbf\xeb\x79\xb8\xb4\x70\x1d\xae\x23\xc7\xcf\x92\x1b\xeb\x54\
\x65\x4e\xa7\x94\xa9\xdd\x7d\x90\x5d\xdc\x45\x43\x0c\x37\x52\x4f\
\x61\x7f\xdd\x38\x85\x1d\x91\x30\x99\xc2\xae\xc7\x7b\x65\xa0\xb0\
\x47\x01\x7d\x8f\x95\x9a\xc1\xb2\x81\xc4\x04\x88\x1b\x49\x11\x80\
\xde\x3b\xdc\x38\xd0\x41\xda\x2b\x53\x42\x9e\x8a\xf9\x46\x19\xaf\
\xdc\x53\x31\xd7\x0a\xe6\x14\xf3\xae\x27\x5f\x93\xd3\x53\xdb\xd1\
\x42\x1b\xca\x94\x1d\xdf\x98\x8e\x47\x5f\x98\x5d\x5e\x75\x07\xce\
\xe9\xd0\x4c\x76\xde\x71\x8a\x9c\x8f\x4b\xb9\x55\x94\x69\x14\xbe\
\x5b\xb9\xd2\x2d\x5b\x45\x7c\x63\x85\x1e\xcb\x57\x89\xdb\x85\x9b\
\x39\xdf\x39\x8e\xde\xc3\x79\xbb\xdd\xf0\xb9\xd7\xd3\xb2\x1c\x5d\
\x65\x60\x47\xd9\xa4\x35\xf7\xa7\xd2\x55\x6b\x99\x36\x0b\x8f\xb3\
\xf1\xb6\xe6\xbe\xbf\xbb\x91\xd0\x70\xfa\xfe\xfa\x01\x4d\x4b\x59\
\xd3\xba\x91\xbc\x9c\xb3\xa6\xdc\x7b\x9f\xdd\x0a\xfb\x16\x19\x6f\
\xdf\x3b\x1c\x91\x24\xf3\x85\x4f\x28\xe8\x6b\x94\xc1\x32\x82\x34\
\x92\x72\xca\x18\x85\x84\xeb\x95\x3f\x28\x83\x99\x4e\xe3\x68\xa7\
\x4c\x09\x14\xb4\xed\x15\x19\x3d\xe8\x25\xe9\x2d\x83\x7b\x7a\xb0\
\x54\xd7\x5b\x54\xb5\x87\x62\x7e\xec\x74\xb9\x99\xde\xb8\xfe\xa0\
\x89\x66\xcc\x9f\xdf\xb0\x15\x70\x2e\xa3\xc5\x6d\x5b\x4d\xfd\x41\
\xe3\x34\x2d\x65\x4d\xb5\xf2\xf2\xf7\x65\xe3\xe4\x8e\x0f\x1f\x90\
\xa3\x13\x2e\x42\xa2\x62\xa2\x03\x10\xec\xc0\x5b\x16\x4c\x84\xc6\
\xd1\xaa\x0f\x8c\xe2\xd3\x7e\xe7\xb8\x17\x0c\x01\xf7\x73\xce\xf7\
\x1a\x4b\x60\x64\x71\xa3\xca\xc9\xf3\xe5\xba\x86\x35\xe5\x1d\x5e\
\xaf\x66\xa7\x3e\x36\xf6\x96\x0d\xbe\xa0\xe9\x9b\xb8\xf9\xff\x4c\
\x8b\x19\x87\x18\xad\xdd\x3f\xba\x19\x76\xae\xd5\x77\x8e\x56\xd8\
\x09\x39\x8a\x5b\x7b\x30\xae\x55\xfa\xa2\x16\x40\x07\x7c\x40\x19\
\x2c\x2b\x74\x44\x62\x13\xc7\x28\x04\x1c\xaa\xfc\xb7\x32\x98\xe1\
\xdf\x2a\x53\x12\xab\x1c\xdc\xe8\x81\xee\xa1\x15\xc3\x8b\xca\xc8\
\xe8\xc1\x9a\x87\x6d\x76\x51\x90\x37\x7a\x78\x9b\xf8\x2f\xd5\x06\
\x63\x15\x73\x76\x7a\x6a\xd9\x40\x76\xb1\x12\x6f\xe0\x0d\x8f\x7b\
\x4b\x56\x0f\x3d\x2b\xbb\x5c\xcd\xfa\x36\x2d\x5d\x0f\x90\xa7\x1e\
\x38\x5b\xce\xb1\x7a\x45\x70\x11\xd2\x16\x7f\x11\x12\x23\x02\xa3\
\xc3\x06\x99\xa1\x64\xcf\xf3\x51\xca\x41\xfa\xbb\x0d\xdf\xa9\xf4\
\x62\xe8\xf2\xe9\x2d\x63\x65\xa6\x7b\x10\xe7\x71\xbe\xbd\x8e\xeb\
\x09\xb3\xc3\xfd\xf4\xbe\xf6\xfe\x34\x14\xfc\xc1\x76\xc8\xb2\x6d\
\x57\xcb\xc9\xc3\xbb\xca\x5a\x55\xc0\x7e\x28\x55\xa9\xba\x69\x35\
\xf7\x08\xdb\x40\x46\x3e\xf0\x82\x29\x57\xa7\x31\x33\xec\xef\x13\
\x5d\x3e\x27\x74\xa9\xaf\xb0\xe7\x84\xed\xd9\xea\x85\xed\xd1\x63\
\xad\xf4\xde\xf5\xb5\xb1\xe4\x37\xf0\x02\xe2\x56\xd8\xc2\xc5\xec\
\xfb\xaf\x94\x31\x0a\x10\x07\x2a\xed\xf2\xd7\x10\x27\x2b\x53\xde\
\x49\xc9\x8e\x1e\xce\x72\xc5\x3a\x76\x82\x3d\x10\x7c\x79\xad\x5d\
\x3a\xda\x43\x8f\x0d\xd3\x8a\xe7\x74\x0f\x3b\x7a\x58\xcf\x5d\xed\
\x75\x51\xcc\xb5\xe2\xfd\xc8\x02\x24\x6f\xc9\x6a\x39\x53\xbd\x82\
\x7c\xba\x7c\x82\xdc\x41\xdc\xdd\xe0\x68\xa1\xca\xf5\x52\x6d\x54\
\x8b\xb4\xb1\x1c\xac\x9c\x63\x9e\x91\xe9\xe6\x39\x99\x60\x5e\x92\
\xe1\xe6\x75\xe9\x67\xde\x97\xae\xe6\x13\x69\xab\xe9\x69\xac\xac\
\xa9\xac\xac\x64\xd3\x9b\xf2\xfe\x27\xdf\x39\xde\xc8\xfc\x53\xf5\
\x22\xce\xe7\x3a\xae\x7f\x59\xc6\x99\x17\x65\x8a\x8a\x59\xb3\xed\
\xfd\xbd\xe7\x2c\x31\x4f\xe9\x33\x79\xb6\x56\x7e\xcc\xc5\x67\x2c\
\x94\x6b\x1b\xf9\xa3\x0a\x69\xae\xd5\xb5\xbf\xcd\xb3\xde\xed\x55\
\x61\x7f\xc7\xae\xa0\xf4\x42\xf7\x78\x3e\x5a\xf3\xf4\x7f\x42\x89\
\x8e\xd6\xef\x7d\x75\x24\x21\xc8\x75\x13\x6b\xf6\x76\xcb\x68\xd3\
\xd7\x45\x1c\x7a\x29\x7f\x54\x06\xcb\xed\xff\x94\xb8\xf9\xc4\x28\
\x20\x44\x29\xe5\x38\xcb\xd5\x50\xa6\x0c\x0a\xd8\x16\xb4\xdf\x40\
\x58\x13\x62\x95\x73\x94\x53\x2f\xa6\xef\x08\xed\x4d\x0f\xd2\x4a\
\x37\x5b\x2b\xe0\xc1\x5a\xd9\x16\x9a\xb7\x65\xbe\xf6\xba\x67\x35\
\xa8\x25\x1f\x70\x0b\xad\x21\xdf\xf5\x69\x2b\x4f\x5d\x7b\xa4\x5c\
\xbf\xf9\x4a\xb9\xf4\xc1\xb3\xe4\xa6\x5b\x4f\x90\x7b\x2e\x3f\x4c\
\x1e\x3d\x6f\x89\x6c\x39\x7b\xb1\x6c\x39\x71\x8e\xec\x3c\x62\xaa\
\xbc\xb2\x72\xa2\xbc\xb4\x68\xb4\xbc\x30\x7b\xa8\x3c\x3b\xa6\x97\
\xec\xea\xdf\x41\xde\xd3\x91\xe6\x9d\x36\x4d\xe4\xad\x26\x75\xe5\
\xf5\x3a\xd5\xe5\xa3\xaa\x95\xe4\xab\x8a\x15\xe4\xab\x0a\xe5\x76\
\x93\xef\x7a\xfc\xeb\xda\xd5\xe5\xf3\xc6\x75\xe4\x3d\x3d\xff\x4d\
\xae\xd3\xeb\xdf\x1d\xd9\x43\xde\x3f\xa8\xbf\xbc\x77\xf0\x48\xd9\
\xa5\xf7\x7f\x51\x9f\xf3\xf2\x31\x33\xe4\xd5\xd5\x73\xe4\x15\x6d\
\x14\xcf\x9e\xb7\x54\xb6\x5e\x76\x98\xac\xb9\xf5\x44\xb9\xf7\x0f\
\x67\xcb\xcd\xbf\x39\x4a\x7e\x37\xa4\x8b\x3c\x9f\x55\xce\x73\xbd\
\xa9\x5e\x49\xfe\x79\xf2\x6c\xb9\x56\x1b\xde\x2c\x6d\x70\x0b\xcc\
\xb3\xfa\x9e\xcf\x68\x23\xde\xae\x0d\xc4\xe9\x22\x9e\x2e\xc6\x42\
\xa9\x54\x5c\x50\xf2\x02\x3a\x07\x96\xc5\x60\xd9\xc1\x9d\xca\x18\
\x05\x80\xd1\x4a\x7a\x9c\x60\xe6\x7e\xa5\x4c\x59\xef\x70\x88\xea\
\x01\xbf\x78\x54\x1a\xff\x6a\xa5\x8c\x39\x69\xae\x1c\xab\x15\xed\
\xba\xa3\xa6\xc9\x9a\xa3\xa7\xcb\x73\xc7\xcd\x94\x5d\xc7\xce\x94\
\x0f\x4e\x98\x2d\xff\x77\x40\x43\xf9\xa9\x76\x35\x31\x23\xba\xdb\
\x25\xab\xe6\xd0\x83\x76\xaf\xcc\xe3\xfb\xd2\x71\x62\x16\x8d\x11\
\xa3\x95\xd6\xcc\x1a\x26\x66\x0a\x8b\x90\x7a\x8b\x19\xea\x16\x21\
\xb5\x14\xd3\xba\xb1\x18\x6d\x14\xa6\x6e\x75\xad\xa4\x95\xc5\x54\
\xca\x12\xe3\xdc\xca\xcb\x04\xdc\xca\xf9\xe4\x3b\xc7\xf9\x9d\xf3\
\x38\x9f\xeb\xb8\x9e\xfb\x70\x3f\xee\xcb\xfd\x79\x0e\xcf\xe3\xb9\
\x3c\x9f\x74\x90\x1e\xd2\x45\xfa\xdc\x2a\x48\x3e\x27\xf4\x15\xa3\
\x0d\xdd\x34\xaa\x6d\x57\x49\xfe\x70\xc4\x14\xf9\xea\xc8\xa9\xf2\
\xc9\xe1\x53\xe5\xfd\xc3\xa7\xc8\x0b\x87\x1f\x24\x6b\x0e\x9b\x2c\
\x37\x1e\x35\x55\x8e\x3f\x6d\xbe\x8c\x7f\xeb\xf6\x02\x9b\x0d\x6f\
\xa2\xcc\x15\xd9\x52\x89\x47\xf0\x51\xca\x18\xf9\xc4\x9f\x95\xe1\
\xd1\x03\x57\xec\x4a\xca\x54\x80\xff\x15\xeb\x40\x56\x6a\x05\xbc\
\x42\x2b\xe0\x1f\xb4\x02\x3e\xab\x95\xef\x93\xaa\x15\x25\xbb\x5e\
\x4d\x31\x07\x34\x12\xd3\xad\xb5\xb7\x9a\x90\x4a\x34\x63\x88\x5d\
\x2c\x65\x97\xd6\xce\x1b\x21\x66\x81\xfe\x4f\x65\x73\x0d\x62\x99\
\xdf\x28\x58\x92\xcb\xca\xbc\x39\xc3\xc5\x4c\x1f\x2c\x66\x62\x3f\
\x31\xa3\x7a\xd8\xfb\xfc\xbb\x4f\x3b\xf9\xa2\x53\x0b\xf9\x5c\xef\
\xcd\x28\xf0\x61\xbd\x1a\xf2\x6c\x8d\x2a\xb2\xb9\x52\x05\xd9\x52\
\xbe\x9c\x6c\xd6\x34\x6c\x2d\x53\x46\xee\xd7\x34\x5d\xa3\x69\xfb\
\x4d\x98\x7a\xfc\x37\xfa\xfb\x75\x65\x4b\xcb\x83\x7a\xfe\xd6\xca\
\x7a\x9d\x36\x94\x2d\xf5\x6b\xca\xb6\xe6\xf5\x65\x7b\xfb\x66\xb2\
\xb3\xcb\x01\xf2\x41\xef\x76\xf2\xa5\x36\x94\x7f\x8d\xed\x2d\x5f\
\xf1\x7c\xd2\x41\x7a\x48\xb3\x5b\x32\xbc\x1c\x6a\x9a\x69\x24\x2c\
\xbb\x5d\xac\x8d\x88\xf7\x62\x29\x31\xef\xc8\x3b\xcc\xd4\x77\xe6\
\xdd\x07\x6b\x1e\x74\x6d\xa5\x79\xd2\x50\x8c\xa6\xd9\xe8\x48\xc6\
\x7a\x99\xcf\x34\xbd\x2f\x68\xde\x3d\xa6\xe9\xc2\xa5\xe7\x74\xe5\
\x6c\x25\xfa\x5f\x05\x65\x2a\x18\xab\x0c\x96\x21\xc4\x6f\x2e\x46\
\x3e\x40\x23\x78\x5f\x19\xcc\x54\x2c\x21\x1d\x95\x61\x30\x49\x75\
\x9c\xf2\x77\xca\xe7\x94\x61\xb9\xd7\x92\x5e\x9a\x1e\xba\x5c\x59\
\x6f\x91\x14\xab\x09\xb5\x02\xdb\x95\x84\x3d\xda\x7a\x8d\x44\x7b\
\xe5\x5f\x66\x0d\x91\x7f\x6b\xe5\x7a\x57\x7b\xd9\x5d\xab\xe7\xca\
\xf6\x73\x16\xc9\xe3\xbf\x5e\x25\xf7\xff\xe6\x68\xb9\xe5\x96\x13\
\xe4\xaa\x7b\xcf\x94\xf3\x1e\xbe\x40\x56\x6f\xbc\x5c\x56\xbd\x70\
\xa3\xcc\xf9\xfc\x21\x19\x69\xde\x53\xd1\xe4\x2b\x69\x65\xb2\xad\
\x6e\x51\x5b\x59\x45\x69\xe5\x77\x7d\x76\x81\x80\x51\x50\xc9\xfc\
\x44\x96\x92\x35\x1c\xb5\xcd\x77\xaa\x2f\x7c\xac\xba\xca\x47\xd2\
\x59\xd9\xfb\xe7\xf5\x32\xf6\x8d\xdb\xe4\xe0\xe7\x6e\x90\x55\x6b\
\x2f\x95\x93\x1f\x3a\x4f\xce\xbd\xe7\x74\xb9\xe2\xb6\x93\xe5\x3a\
\x15\xb9\xee\xba\xec\x50\x79\xf0\xcc\x45\xb2\x49\x47\xcd\x27\x57\
\x4d\x96\xb7\xb4\x31\xfd\x9d\xc6\x31\xbc\x9b\xea\x28\xed\xc4\x68\
\xe3\xb3\xa3\x15\x79\x43\x1e\x91\x57\xe4\x19\x79\x47\x12\x22\xf8\
\x93\x92\xc5\x54\xec\xbd\x48\xe3\xa1\x31\x84\x3d\x19\x50\xd8\x99\
\xab\x0a\x5e\xf7\x77\x25\x0e\xa6\x31\x32\x04\xab\xd7\xf0\xa9\x0a\
\x66\x2a\x9b\xd0\xb0\xce\xe3\x06\x25\x73\x12\x41\x57\x93\x3c\xa9\
\x85\xfc\x8b\xf6\x84\x6f\x6b\xcf\xbc\x56\xc5\x97\xeb\xeb\xd6\x94\
\x93\x3b\xb7\x90\xc5\xb3\x87\xc8\x84\x1b\x4f\x94\xfe\xe6\x15\x69\
\x67\xbe\x90\x0e\xe6\x5d\xe9\xa5\x0a\xf1\x20\x55\xae\x87\xab\x8c\
\xce\x06\x32\xe3\xa0\x2a\xb1\x63\xf5\x13\x0b\x14\x5b\xad\x0d\x54\
\xfd\xa5\x37\x32\x7b\x36\xbb\x2f\x79\xae\xe3\xde\x26\x32\x2e\xc0\
\xc1\x6e\x33\x69\xa6\x4a\x6e\x0e\xb8\xde\xde\x87\xfb\xe1\x3f\xe6\
\x4d\x72\xe6\x04\xa2\xe0\xf9\x4a\x76\x82\xea\xa2\x69\xea\xa3\xc7\
\x98\xdb\x18\xa1\x4a\xf7\x58\x25\x3b\xe2\x4e\xd6\xf4\x4e\xb1\x7a\
\xd6\x56\x99\xa8\x7a\xd6\x38\xbd\xcf\x08\xf3\x57\x19\xa0\x0d\xac\
\xab\x7e\xef\x7c\xf3\x6a\x19\x3c\x67\x98\x4c\xe9\xd1\x46\x96\x34\
\xd4\xbc\xa9\xac\x79\xa4\x0d\x64\x2d\x79\xa6\x79\x17\xe5\xb9\x90\
\x8c\x74\x66\xac\xd7\x67\x3d\x09\x96\xab\xf0\xdc\xc8\x7f\x94\xe7\
\x28\x63\x64\x88\x0b\x95\x98\x72\x83\x99\x9a\x0a\xff\xa1\xa4\x30\
\xce\x55\xa2\x20\x0e\x54\x36\x53\xee\x01\xad\x70\xbb\x23\x33\x6a\
\x45\xb3\x56\x2d\x5c\x4e\x30\x75\x7a\x95\x8d\xed\xcb\x3a\xdb\x46\
\xb0\x5e\x3f\x09\xce\xcc\x42\x23\xb6\x22\xc3\x3c\xcc\x9c\x01\x26\
\x51\xae\x25\x68\xdd\x23\x81\x00\x07\xbe\x62\x9b\xdf\x86\x11\x86\
\x6d\x28\xce\x5c\xcd\x7c\x0e\xcf\xe3\xb9\x58\xe5\x58\xcb\x42\x63\
\xf1\xdf\xc1\x3a\x5e\x6e\x96\x4e\x7a\xac\x9b\x1e\xeb\x61\x8d\x11\
\x34\x6a\x5c\xf9\xfd\x6d\xda\xf4\xb7\xf6\x86\x98\xba\x7e\x10\x38\
\x8c\x17\x6e\x01\x95\xff\xc8\x30\xd0\x4d\x70\x12\x9d\xa2\x3c\x55\
\xc9\x9a\x18\x5c\xdd\xa3\xca\x22\x19\x11\x9d\x1f\x55\xc6\xc8\x10\
\x77\x2a\x19\xbe\xa3\x32\x17\xd2\x78\xb6\x2a\xb1\x68\xcd\x51\x76\
\x56\xa6\x5c\x19\x73\x2a\x9a\x6f\xd9\xa2\xa2\xf9\x93\x87\x55\x6c\
\x25\xc1\xcc\xe9\x55\x38\xac\x39\x44\x0f\xac\x6b\xbf\x33\x42\x60\
\x01\xc3\x3d\x9c\xc9\x34\x2c\x62\xf8\x2e\x15\x62\xa3\x48\x84\x70\
\x63\xc1\xe2\x64\x47\x96\xdf\xeb\xc8\x42\x83\x65\xb5\x20\x69\x27\
\xc0\x37\x0d\x87\x95\x92\x70\xab\x34\xb4\xdf\xd9\x88\x94\xc6\x4d\
\xe3\xc2\x07\x8b\x77\x61\xd4\x3b\x5b\xef\x95\xd9\x7b\xb4\x51\x62\
\x7e\xa7\xe1\xa0\x3f\x46\xf9\x66\x05\xf9\x92\x32\x46\x86\xa0\xf2\
\x87\x33\xf4\x72\x25\xb6\xf5\xb2\xca\x02\x43\xae\xc6\xe2\x2a\x9b\
\x27\x1e\x95\xb7\x66\x4e\xcf\xd4\xe9\x91\xe3\xae\x31\x40\x5f\x7c\
\xca\xa0\x32\x15\x28\x12\xbe\x03\x69\xf7\xd6\x79\x64\xd1\x78\xfc\
\x4e\x20\xcb\x7a\x31\x33\xfa\x04\xdf\xa7\xf0\xde\xe5\x00\xe5\x0a\
\x65\xb8\xc3\x7b\x4f\x59\xa0\x65\xb9\x3f\xe1\x2f\xca\xb0\x05\x8b\
\x35\x1e\x05\xa6\xf4\x26\xc0\x5e\xad\xe8\x85\x09\xdb\x80\x82\xf4\
\x1b\x83\x66\x6c\x51\xbc\x33\xc6\x95\x60\x59\x42\x26\x33\x63\x64\
\x08\x14\xf1\xb0\xef\xd5\x4a\x65\x96\xb2\xb0\x40\x45\x19\xa2\xbc\
\x58\x89\x72\xc9\x28\xb6\x4b\xf9\xb6\x12\x2b\x4c\xa6\xe4\x7a\xdc\
\x2e\x36\x29\x71\xd5\xe7\xfe\x3c\xa7\x20\x9d\xf7\xe8\xa5\x51\x86\
\x77\x28\x49\x33\x2b\xfb\x70\x35\xc7\x9c\xba\x41\x79\xac\xb2\x9a\
\x72\x6f\x61\xa8\x32\x58\x96\x90\xe0\x19\x31\x32\x04\x26\xc3\x6f\
\x94\xc1\x0c\x25\x38\x40\x5a\x33\xe8\x69\x60\xbc\x92\xf0\x40\x7f\
\x52\x52\x79\xab\x2b\x0b\x03\x35\x95\x23\x94\x1b\x95\x54\x64\xfe\
\xcf\x2f\x5a\x28\x31\x4c\x30\x21\x57\x95\x03\x21\x74\x53\xbe\xa8\
\xfc\x5c\x89\x98\x4a\x3e\xf2\x19\x45\x7e\x83\xa7\x28\x7b\x28\x0b\
\x02\xe5\x95\x0b\x95\xc1\xb2\x44\xdc\xba\x4b\x19\x23\x43\x30\xb7\
\x81\xad\x3c\x98\xa9\x54\x28\x2a\x43\x41\x83\x9e\x9c\x0a\x44\x40\
\x82\xca\x1c\x28\x02\x30\xcf\xc3\x68\x82\xd7\x6b\x7e\x7b\x76\xf2\
\x0a\x13\x38\x06\x8b\x44\x60\x21\x59\x3d\x25\x73\x14\xc9\xd8\x50\
\xd9\x49\x79\xb4\x12\x1f\xb8\xdf\x2a\xf3\x8b\xfa\xca\x70\xf8\x20\
\x66\xd3\x0f\x53\xc6\xc8\x07\x6c\xdc\xdb\x10\x0b\x63\x72\xa9\xb9\
\x12\x51\x88\x4a\x91\x48\x1e\xef\xa0\xa4\x32\x23\x37\x93\xae\xbc\
\xc8\x79\x98\x31\x59\xb7\x1d\x05\x74\x29\x2c\x75\x8c\x58\x54\xcc\
\xfc\x80\x11\x8f\xc9\xba\x64\x0d\x24\x5d\x90\xa6\x1b\x95\x74\x1c\
\xf9\x8d\x7d\xd5\x56\x19\x56\xd0\x69\x7c\x71\x70\x87\x7c\x82\xc0\
\x64\xb8\x48\x07\x33\x96\x1e\x2d\xbf\x15\x2a\x0c\x1a\xc8\x9b\x4a\
\xdc\x58\xa2\x1a\x48\x7e\x95\xd8\xa8\xeb\x0b\xb2\x81\x60\xd9\x7b\
\x41\x59\x90\xf1\xa9\x06\x2b\x69\x1c\x18\x4b\xf2\x03\x46\xca\xe3\
\x95\xc1\x32\x64\x12\x11\x11\x33\x46\x3e\xc1\xd0\x8c\x29\x30\x98\
\xb9\x90\xc2\x2b\x48\xe4\xd5\x40\x40\x2d\x25\x7a\x0a\xd1\x3a\x16\
\xa4\x40\xce\x63\x4e\x80\x77\x88\x42\xba\x0d\x04\xb1\xe7\x3a\xe5\
\xd3\x4a\xc4\x29\xf4\x8d\xc7\x94\x54\x60\x8c\x09\xcf\x2a\x31\xa5\
\x46\x01\x11\x12\x71\xe6\x9f\xca\x77\x13\xf0\x03\x25\x62\x8f\xcb\
\x63\xfe\x3f\x44\x99\xdf\xce\x81\xd5\x9e\xcc\x9a\x07\xcb\xef\x33\
\x65\x41\x97\xe1\x7e\x0b\xdc\x11\xc2\xca\xfa\x13\xca\x46\xca\x82\
\x02\x71\x9e\xb0\xf6\x30\xc1\x85\x42\x19\x05\x94\xeb\x3e\x4a\xd6\
\x32\xa0\x58\x3b\x62\x9d\x41\xec\xa3\xc0\x87\xf9\xc7\x20\xe7\x21\
\x5e\x71\x5d\x14\xa8\x78\x77\x28\x53\x69\x20\x33\x94\xcc\x56\x13\
\x8b\xaa\x28\x80\x83\x27\x62\xe2\x1f\xed\xb7\xcc\x41\xa7\x82\x03\
\x66\xb0\xec\x10\xb5\x78\xef\x18\x05\x08\xd6\x0f\x04\x33\x19\x5e\
\xa4\x2c\x48\x4b\x13\x33\xbf\xf8\x77\xe1\x9e\x82\x9b\x0b\xeb\x50\
\x58\x03\xcf\x71\x44\x3d\x1a\x41\xb0\x37\xc5\x85\x85\x30\xa7\xf4\
\xea\xf7\x2b\xa9\x4c\x2c\x0b\x46\x8f\x61\x44\xca\x0b\xf4\xac\xf4\
\xfa\xb7\x29\xf3\x32\x5d\xd3\x49\xa0\x63\x60\x8d\x2a\x2a\x60\x75\
\xa2\x51\xe2\xa1\x90\x09\x18\xb5\xc2\x96\x2b\xc8\x32\x69\x1a\x4e\
\x8c\x02\x04\x36\xfe\x70\x84\x76\x88\xdb\x75\x7e\x15\x48\x07\x64\
\x65\x17\x91\x63\x11\x07\x02\x60\x4d\x03\xba\x0f\xa2\x08\x8a\x3a\
\xa2\x0a\xae\xdf\x61\x93\x33\xb3\xc2\x34\x1c\xc4\x1f\x6c\xfc\xc4\
\x8b\xc2\x0d\x86\x90\x9c\x10\x07\x4c\xc4\xaf\xb3\x95\x28\xa9\xf8\
\x8d\x31\x3a\x24\x03\xa2\xd8\xb5\x4a\xbc\x94\x31\x14\x44\x01\x1d\
\x04\x85\xfa\x6e\xe5\x3d\x29\x12\x13\x6b\xb2\xa5\x03\x07\x29\x49\
\xdf\x00\xfb\x2d\x7d\x60\x59\x73\xb1\xca\x1c\xb9\x5f\xbc\x9a\xb0\
\x90\x30\x55\x89\x1d\x3f\x98\xe1\x78\x98\x52\x90\xf9\x6d\x24\xcc\
\x1d\x60\x71\x62\x32\x2f\x91\xd5\x09\xd0\x2b\x62\x2a\xad\xad\x4c\
\xe6\x22\x41\xa5\xa6\xf1\x60\xa5\x61\x4d\x0a\x0d\x2c\x48\xee\x41\
\xcf\x4c\x63\x61\x64\xa0\x41\x25\x5a\x04\xc6\xbd\x58\x37\xf2\xbc\
\x32\x51\x03\xc1\xd5\x1f\x1d\x02\x57\x73\x9e\x89\x7b\x79\x22\x32\
\xba\x61\x55\x42\x5c\xc3\x6c\x1e\xe9\xc8\xa9\xc8\x4f\x03\x41\x14\
\x0d\xc7\x10\x40\x0f\xa1\x63\x88\x51\x88\x98\xab\x8c\x8a\xdc\x9e\
\x52\xc0\xb8\x04\x98\xa8\x64\xdd\x09\x16\xa0\xbd\x31\xd3\xcc\xc8\
\x82\xfe\x93\x28\x54\x67\x2a\x23\x08\xeb\xf6\xb1\x14\x11\x6d\x72\
\x75\x1a\xa4\xd3\x49\x24\xde\x65\xda\x40\xa2\x1a\x07\x1e\x11\xe8\
\x22\x31\x8a\x00\x88\x29\x51\x8d\x84\x99\xdf\x8a\xca\x74\x40\x8f\
\x8a\xd8\x74\xbe\x32\x91\xa8\x01\x98\x51\xc6\x0a\xe4\x2a\xe0\x91\
\x4a\xc4\xbe\x44\x60\x34\xc0\x2d\x86\x73\x4f\x88\x20\x6e\x1f\x97\
\x2a\x71\x07\xa1\xf2\x20\xee\x30\x2a\x25\x02\x3a\xc8\x27\x4a\xf4\
\x9b\x09\xca\x71\xca\xae\xca\x2a\xca\x74\xc0\xc8\x85\x1e\x83\xfe\
\x13\x9e\x10\x45\x2f\x40\x3c\x9c\xae\x64\x24\x22\x4f\x78\x66\xaa\
\xf3\x4e\x34\x64\xae\x0d\x06\x11\x87\x6c\x8f\x80\x6b\x4d\x8c\x22\
\x04\x3d\x1f\xa6\xc2\x60\x41\x40\xe2\x2e\xa5\x3b\xd3\x8e\xde\x81\
\xbe\x10\x55\xe1\x31\xd1\x62\x71\x41\x01\xa7\x42\x86\x11\x65\x02\
\x4d\xc7\x2c\xca\xba\x8a\x57\x95\xe8\x33\xc9\xfc\xb2\x68\x54\xcc\
\x07\x91\x0e\x46\x51\x46\x1d\x46\x15\x44\x42\xe6\x3f\x4e\x53\x9e\
\xac\x64\x55\x1f\x79\xc0\xfb\x20\xb6\x21\x9a\xa1\x4f\x61\x08\xc0\
\x8c\x8d\x31\x81\x7d\x3e\x88\x7a\xcf\xc4\x28\xb3\xdb\x17\x28\x51\
\x9c\x31\x1f\xb3\x18\x6d\xa6\x92\x86\x48\xbe\x30\xb2\xa5\x62\x8e\
\xa5\xb1\x61\xd8\x08\x97\x07\xd6\x47\xf4\x9c\x18\x7b\x01\x98\x79\
\x29\xd4\x70\xa1\x60\xbf\x67\x31\x4f\x3a\x15\x95\xc2\x45\x46\xc6\
\xa9\x8f\x8a\x47\xd0\x65\x2c\x5a\xb8\x82\x20\xaf\x17\x16\x10\x0b\
\x51\x98\xb1\x94\x61\x6a\x8e\x02\x15\x0c\x93\xeb\x34\xfb\x6d\x4f\
\x50\xa1\xa9\xec\x4b\xed\xb7\xdc\x60\xa4\x22\x4f\x82\xe6\x61\x97\
\x2f\xe8\x1e\xcc\xa3\xb0\x2a\xd3\x89\x77\xe1\x3c\xc3\x80\xc0\x68\
\x9d\xac\x92\x33\xba\x62\x8d\x0b\x97\x03\x23\x73\xbc\xa4\xb6\x18\
\x80\xc2\x27\x92\x78\xb8\x80\xb0\x36\x25\x52\x40\x83\x40\xb4\xa1\
\x57\x65\x1d\x3b\x4b\x7c\xa9\xac\x38\xed\x21\x02\x21\x37\x63\x1d\
\x0a\x92\xe8\xf1\x98\x76\x93\x05\xad\x43\x01\x67\x45\xe3\x2d\xca\
\xf0\xf5\x78\x29\xf3\x89\x19\xd9\xad\xc4\x63\xb2\x0f\x71\x2f\x0c\
\x2a\x26\x62\xce\x32\xfb\x2d\x1a\x98\x9a\xa9\xa0\x51\x0d\x99\x11\
\x10\xf1\x0d\x51\x2e\xaa\xc3\xc0\xfa\xc5\xe8\x91\xcc\x69\x90\x11\
\x0b\x0b\x62\x78\x24\xc1\x08\x71\xb5\x32\x9c\xef\x90\x51\x6c\x6f\
\x7a\x0e\xc7\x08\xa1\x9f\x12\x6b\x50\xb8\xa0\xb0\x72\x61\x39\x49\
\x54\x58\xc8\xdd\x54\x10\x1a\x53\xa2\x09\xbd\x64\x88\xaa\x74\xe9\
\x8c\x5c\x0e\xe8\x18\x8c\x12\x8c\x06\x0e\x58\xb8\x70\x93\x27\xaa\
\x64\x22\xcc\x52\x22\xc6\x20\x7e\x31\x5a\x44\x01\x7d\x05\x93\x32\
\x3a\x45\x14\x10\xc1\xd0\x1b\xd0\xaf\xa2\xc0\x24\x26\x1d\x05\x46\
\x02\xc0\xfb\x31\xe2\xa0\xc0\x87\xf3\x9b\xc6\xec\x76\x99\x8a\x51\
\x0c\x41\x45\x0b\xfb\x6e\x39\xd2\x50\xc2\x13\x8b\xf4\xa0\x78\x0c\
\xa3\xd3\xec\x4d\xe0\x6c\xc8\x64\x28\x7a\x84\x03\xee\xfe\x34\xde\
\x44\x3a\x00\xe2\x0f\x6e\x38\x74\x0c\x7c\xb2\x36\x3c\xca\x22\x45\
\xe7\x41\x03\xc1\xa4\x8b\x85\x29\x0c\x2a\x34\xf9\x43\x3e\xd0\xe0\
\xc2\xa0\x41\x60\x94\x20\x3e\x19\xa3\x55\xd4\xee\xb7\x2c\x6c\x63\
\x54\x44\x51\x8f\x51\xcc\x41\x23\x60\x46\x3b\x2a\xf4\x0f\x6e\x0e\
\xf4\xa4\xce\x3f\x0a\xab\x17\x8b\x98\x98\x43\xc0\xcd\x84\xca\xc2\
\xec\x6f\x51\x91\x51\x02\xf1\x09\x85\x9a\x06\x12\x9c\x83\x21\x6d\
\xce\xe5\x9c\xdf\xf1\x8b\xc2\x95\x05\x2b\x16\x4a\x36\x32\x3e\xba\
\x16\x60\x94\x40\x5c\x43\x94\xc2\xff\x8b\xc9\x38\xee\x8d\xf2\x8f\
\x31\x03\xa7\x43\xc4\x1e\xee\x85\xa2\x4e\x1e\xe0\x25\xf0\xba\x92\
\xe7\xf2\x1c\x1a\x24\x1e\xc8\xe4\x03\x0d\x09\xbf\x33\x8e\xe1\xf7\
\x45\x63\x08\xfb\x53\x41\x46\x68\xd2\x12\x7b\xe5\x96\x40\x20\x56\
\xd1\x50\xc2\x6e\xd6\x8e\xe8\x00\x4e\x89\x44\x9e\x1e\xa9\xa4\x72\
\xb1\xac\xb7\xa8\xc8\x5c\x03\x23\x44\x5e\x2e\x33\xf8\x87\x51\xe9\
\x4f\x52\x32\x3b\x1f\x74\x65\x71\x22\x1d\x66\x6a\xee\x89\xb9\x7b\
\x95\x92\xf3\xa3\x5c\x5e\xb0\x38\x35\xf0\xfe\xcd\x81\xbb\x07\x23\
\x00\x33\xe0\x34\x18\xf4\xac\x75\x4a\xcc\xb4\x51\xf9\x47\x83\xeb\
\xa2\x04\x99\x88\x95\x31\x8a\x09\xa8\x5c\x28\xe1\xb8\x87\x04\x0b\
\xd8\x89\x08\xf4\x8c\x2c\x94\x22\x22\xc7\xfe\x0e\x66\xfd\x19\x41\
\xc2\x0b\xd5\x1c\x89\x24\x43\xa7\x83\x37\x00\x88\x1b\xc6\x3e\x06\
\x74\x8d\xa7\x94\x51\x85\x0f\xa9\x18\x34\x26\xe6\x27\xf6\x87\x68\
\x1b\x54\x70\x66\xe1\x89\x93\x8b\xb8\x15\x95\x27\x10\x5d\xe8\x08\
\x65\x8c\x7d\x1c\xae\xc7\xa3\xf2\x33\xb9\x46\xc1\x47\x55\x08\x47\
\xe6\x42\x96\x28\x99\x79\x4e\xe4\x06\x5f\xd2\xd0\x5a\xc9\x8c\xf7\
\x7d\xca\xa8\x77\x76\xfc\x54\x89\xfb\x4d\xac\x5f\xec\xe7\x40\x61\
\xc7\xc2\x85\x22\x1b\x76\x93\x08\x13\x25\x96\x99\x67\xd6\x7b\x30\
\x63\x9f\x6c\xf6\xbb\x38\x80\xca\x8d\x15\x0b\x67\x46\xde\x2f\xea\
\x9d\x82\x64\xc2\x11\xdd\x23\xe5\x2d\xed\x62\xec\x7f\xc0\xe4\x8a\
\xa5\x07\x8b\x4f\x54\x25\x0a\x93\xf9\x00\xce\x47\x04\x41\xd1\xc7\
\x25\x05\x19\xbd\x30\xc3\x13\x39\x30\x12\xa2\x70\x13\x73\x0a\x83\
\x03\xa3\x1d\xab\x14\x53\x0d\x05\xca\xc4\x28\x8a\x38\x16\xaf\x58\
\x9f\x88\x91\x11\xa8\x78\xf4\xaa\x84\xc9\xcc\x24\x56\x30\xae\x2f\
\xc4\xaa\xc2\x9d\x9e\x99\x67\x66\xc7\x99\x10\x64\xef\x13\x9c\x02\
\x31\xd1\xb2\x2a\x91\x39\x19\x66\xe7\xb1\x6a\x71\x8c\xdf\x38\x07\
\x17\x13\x1a\xdf\x95\x4a\x7c\xa7\xd0\xa1\xc2\x41\xbd\x53\x21\xd6\
\x3c\x26\x1e\x69\xcc\xe8\x62\xc5\x7d\xf4\x8b\x51\x42\x41\x4f\xcb\
\x92\x54\xdc\x51\x08\xcc\x86\xab\x7c\x26\x0d\xa7\xb0\x88\x77\x30\
\x93\x79\xf8\xa9\x61\x71\xa2\x31\xc6\x2b\xf8\x62\x14\x0b\xa0\xcf\
\xd0\xfb\x63\x2a\xa5\xb7\x67\x92\x0d\x6f\x5b\x7a\x6e\x94\x5e\x46\
\x93\xbc\xf4\x9c\x28\x32\x53\xcd\xce\x5a\xcc\x66\xa3\x07\x31\x7b\
\xce\xa8\xc4\x7a\x76\x26\xfd\x98\x55\x2f\x8c\x78\x61\x31\x62\x14\
\x1b\xa0\x50\x33\xb1\x17\xeb\x02\x31\x62\xc4\x88\x11\x23\x46\x8c\
\x18\x31\x62\xc4\x88\x11\xa3\x48\xc1\x04\x57\x61\xad\x1d\xc0\x25\
\xa4\x30\xd7\x25\x10\x90\x01\x0b\x15\x33\xd3\xac\x01\x67\xfe\xa2\
\x30\xc0\x64\x23\x1e\xb9\x79\x39\x51\xb2\x15\x33\x4e\x86\xac\xa9\
\x67\xd2\x10\x4b\x5a\xba\x24\xa2\x7b\x2b\x25\xf7\x29\x0c\x23\x01\
\xae\xfb\x84\x45\x65\xfe\x06\xf7\xfa\x78\x51\x55\x1e\xc0\x39\x2e\
\xca\x9c\x59\x50\x4c\x25\xd0\x40\xa6\xc0\xda\xc4\xda\x75\x9e\x83\
\x0b\x47\x61\x00\xd7\x75\x16\x4f\xb9\x90\xab\x34\x48\x7c\xa8\xa2\
\xc0\xb2\x62\x26\xff\x38\xcf\xcd\x81\xb0\xb2\xcf\x11\x07\x4c\xb7\
\x36\x86\xcf\xf0\xef\xd0\x3d\x87\x40\x0d\x99\x84\x4f\x62\x35\x26\
\x8b\xd5\x58\x94\x15\x15\x57\x8c\x46\x87\xf9\x9b\x67\xe0\xce\x92\
\x68\x92\x12\xf7\x79\xd6\xc8\x13\x6b\x98\xfb\x14\x54\x50\xc0\x12\
\x07\xd6\x5d\x50\x28\x44\xe5\x28\xa8\xfd\x38\x08\xe2\x40\xe4\x0e\
\xd6\xa7\x27\x5b\x2f\x9e\x5f\xd0\x40\x88\x0d\x4c\x61\xb3\x36\x83\
\xde\x97\xde\x3b\x55\xba\x9e\x3a\xd5\x10\x45\xcc\xa2\x33\x07\xc2\
\xf3\x70\x0b\x21\x88\x75\xb0\x97\x67\x2d\x0b\x81\x26\xf8\x1d\x4f\
\xe4\x30\x68\x6c\xac\xaf\xe7\x77\x26\x37\xa3\xc0\x62\x2c\x7e\x67\
\x55\x62\x26\xa0\x22\xd3\x00\xdc\x6a\x43\x46\x0a\x46\x09\xf7\x8e\
\xa4\x97\x05\x58\xfc\xc6\xd2\xe2\x30\x18\xf1\x19\x2d\xf9\x9d\x19\
\x7f\xa2\x58\x26\x0a\x62\xb1\x5f\xc0\x35\x10\xbc\x69\x71\xb7\x66\
\xa6\x97\xb5\xcf\x99\x92\x4a\xc2\x2a\x38\x96\x9e\x32\x11\x57\x50\
\x3b\x23\x45\x81\x06\x42\xe8\x7e\x57\x11\xf0\xd9\xa2\x62\xa4\x4a\
\x2a\x3b\x41\x16\x98\x8d\x4f\x07\x34\x14\xe7\xb6\x12\xbc\x96\x77\
\x77\x01\xa2\x59\xd7\x02\xf0\x03\x83\x2e\x12\x24\xb1\x85\xf9\x9d\
\x30\x40\xc0\xfd\x8e\x38\x4a\x25\x76\x23\x7a\x2a\x0d\x84\xc6\xe0\
\xae\x0f\x12\x51\x8f\xdf\x70\x8e\x64\x94\x60\x52\x93\x85\x57\x80\
\x67\xb0\xfa\x91\x67\xe0\x56\x0f\xdc\x35\x2c\x16\x63\xdd\x3d\xbf\
\x31\x9a\xb1\x6a\x92\x06\x15\xbe\xbf\x3b\xbf\x30\x44\xc0\x62\x07\
\x1a\x08\xd1\xf7\x58\xb2\xc9\x42\x26\x32\x33\x3f\x64\x59\x29\x61\
\x49\x59\x04\x85\x98\x51\xd8\x0d\xc4\x8d\x20\x7b\x6b\x2d\x7b\x78\
\x04\xa1\xe2\x93\x1e\x66\xdc\x11\xa3\x82\xa4\x47\x76\x9b\xa1\x46\
\xfd\x0e\xdd\xec\x7e\x2a\x0d\xc4\x89\x4a\x79\xd1\x3d\x33\x48\xca\
\x26\x2a\x02\x0d\xcf\xe7\xb7\xbc\xbc\x0c\x88\x37\x40\x23\xd9\xe7\
\xe1\x46\x10\x82\xa6\x25\x8b\x28\x98\x0e\x08\x9b\x43\xcf\x55\xd8\
\x23\x08\x8a\x30\xeb\xd8\x29\xb0\x44\xb1\xaa\x32\x05\x85\x4f\xb4\
\x44\xd7\x5b\x06\x89\xa8\x84\x38\x1a\x5e\xc8\x15\x1c\x41\xd0\x03\
\xc2\xe0\x5a\x22\x1b\xf2\x7b\xa2\x1d\xa8\x96\x2b\xf9\x3d\x95\x06\
\xc2\x48\x4d\xc7\xe6\xa2\x9c\x14\x05\xf0\x90\x26\x7d\x48\x08\xfb\
\x05\x58\xb0\x43\x03\xc1\xaf\x08\x6f\x59\x2c\x43\x88\x5a\x51\x64\
\x6d\x02\x8c\xfa\xcd\x11\x4b\x0f\x3d\x9b\x73\x51\xef\xa9\xcc\x04\
\x28\x86\x6e\xc3\x1e\x94\x45\xb7\xbb\x2d\x9f\xee\x7f\x9c\x13\xdd\
\x3a\x77\x46\x2e\xf4\x1e\xf7\x7b\x90\x9c\x0b\xdd\xd2\xde\x64\xb1\
\xad\x1c\x70\x57\x8f\x0a\x8e\xe0\x88\x71\x20\xec\x73\x85\x78\x8a\
\x53\x22\xbf\x33\x92\x20\xb2\xa0\xe3\x40\xf4\x32\xe2\xf9\x12\x71\
\x84\xdf\x09\xe2\xc0\x5a\x7d\xf7\x3b\x96\x32\x94\x7c\x7c\xc7\xf8\
\x1d\x8f\xe3\xbc\x40\x79\x71\x2e\xa2\x52\xd0\xc2\x46\x43\x24\x36\
\x17\xc7\xb8\xaf\x7b\x46\x3a\xa4\x93\xe3\x1e\xc1\x10\x4c\x74\x08\
\xd4\x17\x9e\xc9\xd2\x82\xfd\x02\x28\x70\x38\xf1\xd1\x63\x11\x16\
\xf3\x77\x81\xcf\x20\x09\xbc\x86\x9b\x39\x3d\x16\x15\x13\xd9\x9f\
\xf5\x0e\xe1\xf3\x20\x8a\x1d\xab\xe4\x88\x50\x92\xe9\x08\x42\x65\
\x41\xc6\xa7\x30\x88\x53\x9b\x5f\x60\x02\x66\x94\x24\x4d\x28\xa1\
\xc9\x90\x4c\xb6\xc6\x45\xde\x55\xf0\xb0\x09\x9b\x06\xc3\x71\x22\
\x20\xe2\x10\x19\x16\x3d\xa9\x54\xce\xdb\x18\x11\x05\xab\x56\xf0\
\x1c\xc8\x31\x44\x54\xe2\x83\x25\x5b\x39\x49\x1a\x09\x66\x4d\xc4\
\x49\x14\xff\xe0\xc2\x2a\x2a\x36\x0a\x38\xcf\xe1\x5e\x44\x5d\xa1\
\xb3\x49\x95\xae\x73\x43\xaf\xa3\xb3\x73\x20\x3d\x04\x18\x27\x02\
\x25\x1d\x40\xd4\x2e\xbe\xfb\x1c\x88\xba\x81\xb2\x4a\x38\x1b\xd6\
\x3c\x24\x5b\x9b\x80\xa2\x47\x6f\xce\xf9\x2c\x0c\x2a\x4c\x04\x1b\
\x08\xeb\x31\xf2\x0b\x14\x4e\x2a\x0d\x23\x25\xab\x10\x33\xc5\x6a\
\x25\x69\xa2\x13\x48\xd7\xf4\x99\x8a\x15\xab\x20\x40\x43\xa5\x3c\
\xd1\x2d\xc3\xfb\xab\xa4\x02\x46\x10\x46\x4f\x1a\x34\x96\xba\xfd\
\x16\x0c\xc1\x2e\x3a\x3b\x61\x3e\xa9\xf4\x64\x6e\x94\x49\x14\x85\
\x98\x30\x36\xf4\x7c\xdb\x95\x58\x48\xf0\x78\x8d\x3a\x37\x4c\xe6\
\x0d\xd2\x8d\x7e\x5e\xdc\x1b\x48\x70\x04\x41\x77\xc3\xfa\x83\x85\
\x8a\x49\x38\x2a\x25\x11\x0f\x83\x24\xf6\x16\x9d\x11\xa1\x4d\xb9\
\x9e\x50\x3d\xac\x0f\x09\x9f\xe7\xc8\xca\x42\xb6\x51\x60\x61\x17\
\x69\x4f\x07\x94\x21\xf3\x17\x58\xda\xd8\x8e\x22\x5d\x20\x0e\xd2\
\x40\x30\xda\xb8\x70\x42\xfb\x1d\xe8\xcd\x90\xcb\x29\x2c\x86\x75\
\x44\x8f\x64\x44\xac\x62\xb8\x46\xc4\x72\x93\x60\x1c\x8b\x3a\x37\
\x48\xce\xa3\x51\x61\x65\x4a\xc7\x2c\x18\x6c\x20\xc4\xa4\x45\x9e\
\x8f\x9a\x79\x4e\x85\x5c\x8b\xec\x8c\x3e\x92\xdf\x06\xc2\xa4\x21\
\x69\x0a\xee\x62\xcb\x3c\x01\x11\x0f\x11\x77\x88\xad\xcb\x96\x74\
\x6c\xb9\x40\xa3\x81\x34\x1a\x46\x8e\xf0\xf2\x5b\xd2\x43\xa7\x13\
\x3c\x97\x46\x44\x04\x79\x74\x19\xc4\x54\x56\x50\x32\xb2\xa7\x03\
\xf4\x48\xee\x1d\x7c\x56\x26\x44\x52\xd8\xdb\x91\x2e\xf7\x0a\x08\
\x6b\x49\x25\x27\x13\x08\xec\x9c\xea\x44\x19\x16\x16\x7a\x16\xf4\
\x0f\x26\xe5\x0a\x13\xf4\x5c\x58\xa8\x9c\x3c\x8f\x5d\x9e\xcf\x4c\
\xc9\xf5\xf4\x88\x34\x90\x4c\x0b\x9d\x11\x23\x13\x11\x09\x65\x9a\
\x99\x6d\x3a\x0a\x3a\x0c\xac\x7b\x2c\xb4\x62\x72\x96\x4a\x58\x58\
\x5e\x00\x89\x40\xf9\xf3\x0e\xd4\x81\x5f\x71\x20\xc6\x6e\x30\xc4\
\x33\x0a\x90\x41\x64\x14\xbd\x3a\x72\x26\x05\xce\xd6\x03\x88\x07\
\x6c\x8c\x83\xf2\x1e\x24\xe7\x12\x4d\x9d\x4c\xa5\xb0\x99\x2c\x73\
\xfb\x63\x04\xc9\xb5\x34\x24\xee\x77\xbb\x12\x25\x3d\x9d\x91\xa3\
\x28\x80\xb8\x87\xa5\x27\x5d\x24\x6a\x20\x58\xbc\xa8\x68\x51\x62\
\x24\x9b\xfd\x70\xfe\x1a\x25\x91\xe6\xdd\xde\x1d\xee\x7a\xb6\x4a\
\xe0\x3b\x8a\x31\x33\xdc\xe9\x82\xbc\xc5\x14\x8d\xc2\x8c\x54\x80\
\x0e\x99\x88\x58\xa1\x50\xb4\x31\xed\xf3\x4c\x94\x77\x46\x3f\x74\
\xa9\xa8\xf3\x1d\xb9\x86\x77\x83\x61\xd3\xf6\x3e\x09\x32\x95\x97\
\x75\x15\x97\x4a\x8c\xd2\x89\xc3\x1f\xc3\x3b\x95\x1f\x91\x21\x48\
\x86\x7e\xac\x17\x4c\x66\x61\x4e\x25\x56\x2c\xee\x1d\xe1\xf3\xb8\
\x96\xe8\xe4\x58\xb3\xd8\xf3\x02\x5d\x25\x93\x06\xc2\xf2\x55\x0c\
\x02\x58\xcf\x5c\xd8\x4e\x2a\x02\x21\x3d\xd9\x20\x93\xb8\xb8\x61\
\xd0\x30\x91\xbb\x89\x51\x1b\x15\xea\x13\xbf\x30\x2c\x59\x54\x0e\
\xcc\xa8\x89\x7c\xaa\x12\x21\xd8\x40\x98\x29\x47\x74\x63\xa9\x2d\
\xdf\x11\x07\x89\xc2\xe2\x80\xde\xc0\x08\xc1\x6f\x67\x71\x40\x81\
\x59\x97\xf9\x0f\x8e\x11\xf8\xc1\x01\x1d\xc6\xcd\xb0\xa3\x18\xa3\
\x83\x24\xb3\x60\x05\x81\x6e\x88\xc5\x8b\x6b\x21\x9d\x5f\x5e\x0c\
\x06\xbb\x8e\xfa\x3d\x4c\x77\x3e\xe5\x41\x88\xd6\x18\x09\x10\x14\
\xb1\x50\xe4\x0a\x0b\xce\xe1\x8f\x39\x0e\x7a\xe7\xa0\x39\x95\x86\
\xc9\x71\xf4\x9f\x60\x85\x04\x8c\x0a\x34\x72\xae\xe5\x1e\xe8\x31\
\x61\xa0\xc0\x62\x90\xe0\x1c\x1a\x4b\x3a\xfa\x08\x3d\x2d\x23\x85\
\xab\x2c\xcc\x1f\x41\x3a\x05\x07\xa2\xa2\xb0\xc9\x27\xe7\xd0\x41\
\xe0\xef\xe5\x40\x83\x8a\x6a\x20\x0e\xf4\xce\xe8\x21\xdc\x93\x73\
\x30\xfb\x12\x03\x18\xb3\x2d\xa3\x43\x41\x01\x9d\x86\xfb\xa3\xa7\
\xa4\xda\x10\xf7\x0b\xa0\x6b\x30\x72\x90\xd9\xe9\x92\x4a\x8a\xb8\
\x40\x03\xc1\xd3\x94\x82\x77\xb3\xca\xa9\x92\xd9\x67\x98\xc8\x3c\
\x4a\x98\x51\xb7\xb7\x08\x13\x71\x89\xdc\xcb\x5d\x24\x42\x3e\xa9\
\xf0\x41\x20\x6a\xe0\x71\xcb\xef\x34\x92\xa8\x91\x04\xb8\x91\x80\
\xca\x18\xb5\x23\x54\x14\x08\x26\x1d\xdc\x27\x9e\x2d\xe1\x78\x2f\
\x66\xf4\x19\x81\xdd\x71\x46\xa7\x60\xc3\x70\xc8\xab\x81\x04\x81\
\xa2\x4d\x47\x84\x3b\x08\xd1\xdf\xc3\xef\xc9\xa8\x4c\x83\x4a\x97\
\x88\x4c\x8c\xc2\xa4\x01\x11\x8b\xd9\x7f\xca\x36\xea\xdc\xbc\x18\
\xec\xb8\xf6\x09\x50\x29\xa8\xe0\xf8\xe6\xa0\x28\xa6\x4b\x7a\x6e\
\x14\x4d\xe7\xab\x93\x2e\x29\x14\x7c\xa7\xc2\xa2\x0d\x22\x06\x76\
\x7b\x57\xc1\x50\x5a\x11\xa7\x12\x01\x67\x3b\x94\x6d\xce\x8d\x32\
\x63\x22\x32\x62\xe6\xa4\xf2\x33\xea\x25\x02\xfb\x00\x22\x42\x70\
\x1f\x7a\xea\x54\xc0\xf3\x18\x21\x30\xc3\x06\xc1\x2c\x3a\x15\x2f\
\x6a\xff\x44\x07\x1a\x88\x1b\x81\x68\xbc\x99\x82\xc6\x81\x18\xe6\
\xf2\x2b\x13\xd2\xf0\xa2\x7c\xb4\xd2\xa1\x73\xb8\x8c\x51\x88\x70\
\xfa\x09\xae\x0d\x54\xd8\xa2\x9e\x98\xc2\x5a\xc6\x5c\x4d\x51\x80\
\x8e\x80\xc9\x56\x46\x06\xe2\x11\xa7\x3b\xd1\x18\x23\x46\x8c\x18\
\x31\x62\xc4\x88\x11\x23\x46\x8c\x18\x31\x62\xc4\x88\x11\x23\x46\
\x8c\x18\x31\x62\xc4\x88\x11\x23\x46\x8c\x18\x31\x62\xc4\x88\x11\
\x23\x46\x8c\x18\x31\x62\xc4\x88\x11\x23\x46\x8c\x18\x45\x0a\x91\
\xff\x07\x5e\xc7\xf9\xa9\xdb\x06\x7d\x7b\x00\x00\x00\x00\x49\x45\
\x4e\x44\xae\x42\x60\x82\
\x00\x00\x01\x30\
\x00\
\x00\x04\x36\x78\x9c\xa5\x53\x31\x6e\xc3\x30\x0c\x3c\xd9\x01\x32\
\x08\x68\xfc\x83\x78\x2a\xfa\x87\x0e\xc9\xcf\xaa\xa1\x43\x9f\xa3\
\xdd\x8b\x9f\x92\xb1\x63\xc6\x0c\x45\x15\x9e\x14\x46\xb4\xe3\x24\
\x05\x4a\x9b\xa2\x8e\x94\x48\x98\x3c\x03\x0e\x0d\xfa\xbe\x03\xe5\
\xbb\x01\x5e\xc5\x76\x5d\xc1\x6f\x0e\x78\x69\xc5\xca\xbe\x17\xdd\
\x8b\x3a\xac\x72\x2c\x34\xb8\x23\x21\xbf\x08\xa1\x18\x3e\x74\xc9\
\x32\x8e\xa3\x78\x52\x7e\x91\x52\x31\x7c\xe8\xe2\xb2\x28\x83\x48\
\x45\x7e\xc8\x12\x6d\xd4\x9c\x18\x86\x89\xc3\x57\x1c\x35\x1c\x4d\
\x4e\x42\x9e\x41\xc1\xba\xcd\xce\x58\xc3\x7a\x80\xf8\x92\x4d\xb1\
\x56\x10\x9b\xaf\x1b\x1c\x97\xb0\x91\xbf\xe1\xe8\xed\xae\x54\x05\
\x16\x31\xa6\x18\x4f\xb1\xb7\xf9\xe3\xa4\x7d\xd7\xfe\x4c\xf0\xbc\
\x9f\xf3\x7e\xdf\xcc\xe3\x66\x5e\xf3\x79\x5a\x49\xc7\x4d\x4a\xc1\
\xa5\x5f\xe0\xe3\x07\xd8\x9d\x80\xed\x11\xd8\x1c\x80\xb5\x30\xa7\
\xa5\x0a\x95\x9c\xea\x1d\xc2\x5c\xc5\x9e\xd5\xfb\xcc\xc5\x9c\xcc\
\xcd\x1a\xac\xc5\x9a\xac\x4d\x9e\x93\xf9\x3d\x2a\xcf\xc7\x47\xd9\
\x03\xfe\xc9\xf3\x42\xb3\xd2\x24\xf2\x4d\xf4\x9d\xd3\x91\x26\xb5\
\xe4\x13\xe3\x9c\x8e\x5a\xe5\x45\xf4\x79\x99\x59\x14\xab\x43\xa8\
\x79\xb5\xce\x45\x4e\x5b\xe0\xb0\x96\x6f\x93\x7f\xfb\x53\xba\xf8\
\xf5\xa0\x93\xc1\x15\xe5\x59\xde\xe1\xdd\x33\xb2\x1c\x6a\xc1\
\x00\x00\x99\x8b\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x01\x00\x00\x00\x01\x00\x08\x06\x00\x00\x00\x5c\x72\xa8\x66\
\x00\x00\x00\x04\x67\x41\x4d\x41\x00\x00\xb1\x8e\x7c\xfb\x51\x93\
\x00\x00\x00\x20\x63\x48\x52\x4d\x00\x00\x7a\x25\x00\x00\x80\x83\
\x00\x00\xf9\xff\x00\x00\x80\xe9\x00\x00\x75\x30\x00\x00\xea\x60\
\x00\x00\x3a\x98\x00\x00\x17\x6f\x92\x5f\xc5\x46\x00\x00\x00\x06\
\x62\x4b\x47\x44\x00\x00\x00\x00\x00\x00\xf9\x43\xbb\x7f\x00\x00\
\x00\x09\x70\x48\x59\x73\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\
\x9a\x9c\x18\x00\x00\x80\x00\x49\x44\x41\x54\x78\xda\xed\xbd\x77\
\x9c\x1d\xd5\x7d\xf7\xff\x3e\x33\xb7\x6e\x5f\xed\xae\xba\x56\xab\
\x02\x02\x84\x84\x64\xc0\x80\x4d\x75\x8b\x6d\x5c\x70\xec\x38\xae\
\x01\x52\xcc\xe3\xf4\x3c\x4f\x7a\xf2\xfc\x6c\x3f\xaf\xc7\x71\x9c\
\xe4\x89\xed\x14\x97\x34\xe4\x1a\x3b\x15\x1b\x8c\xbb\x11\xcd\x06\
\x03\x46\x12\x12\x6a\x20\xed\xaa\xad\x56\xbb\xab\xed\x7b\xeb\x9c\
\xf3\xfb\x63\xca\x3d\x33\x77\xe6\x96\xd5\x5d\x35\xf6\x0b\xa3\x7b\
\xef\x94\x9d\x99\x73\xe6\xf3\xf9\xd6\x73\x46\x28\xa5\x58\x90\x05\
\x59\x90\x97\xa6\x18\xe7\xfa\x02\x16\x64\x41\x16\xe4\xdc\xc9\x02\
\x01\x2c\xc8\x82\xbc\x84\x65\x81\x00\x16\x64\x41\x5e\xc2\x12\x8b\
\xda\x20\x84\x38\xd7\xd7\xb6\x20\x73\x91\x26\x1a\xdf\x71\xb3\x2c\
\x04\x8a\x2e\x50\xa9\x16\xe3\x13\x51\x3b\x2c\x10\xc0\x79\x2a\xd1\
\x00\x17\x81\xcf\x46\x8a\x0a\x7c\xfa\x65\x81\x20\xce\x5b\x59\x20\
\x80\x0b\x59\xfc\x60\xaf\x04\x70\xa1\x2d\x06\x8d\x21\x03\x1d\xf4\
\xd2\xf9\x54\x84\x93\x40\x38\x41\x2c\x10\xc3\x39\x97\x05\x02\xb8\
\x10\xa4\x5c\xab\xeb\x00\x16\xda\x6f\x23\x64\x5d\x09\xf8\x02\xd3\
\xd9\xc7\xf4\xd6\x8b\x39\x91\x80\x42\x79\x60\xb7\xb0\x09\xc0\x42\
\x85\x12\x81\xfb\x5d\x06\x7e\x43\x18\x31\x2c\x90\xc2\x59\x95\x05\
\x02\x38\x1f\x25\x1c\xf0\x51\x60\x37\x28\x07\xb7\xbb\x4e\xd7\xfa\
\x31\x01\x71\x20\x8e\x20\x8e\xf0\xed\x53\xaf\x28\x07\xec\x12\x45\
\x01\x45\x01\x28\x28\x28\x3a\xeb\x74\xab\xc0\x26\x07\x7b\xbd\x74\
\x7e\xeb\x44\x11\x45\x1a\xb6\x2c\x10\xc2\xbc\xca\x02\x01\x9c\x2f\
\x52\x6e\xce\x07\xcd\x76\x77\x11\x1a\xd8\x63\x80\x89\x28\x03\xb7\
\xbb\xde\xd0\xf6\x8b\x0b\x48\x21\x48\x22\x48\x08\x97\x04\x5c\x02\
\xa8\x8d\x08\x94\x03\x47\x85\x42\x2a\x1b\xfc\x79\x14\x39\x14\x59\
\x05\x05\xa0\xe8\x00\xdd\xb5\x08\x8a\x3e\x82\x50\x58\xa0\x2d\xa5\
\xdf\x95\x88\xa1\x24\x0b\x84\xd0\x50\x59\x20\x80\x73\x29\xd1\xa0\
\x77\x81\x6b\x3a\x60\xf7\x80\x0e\x98\x0e\xd8\x13\x88\xd2\x22\x20\
\x89\x20\x85\x41\x4a\xd8\xeb\x62\x94\x2c\x83\x98\x6f\xbb\x41\x4a\
\x15\xc5\x32\x50\x69\x34\xe8\xcb\x02\xdd\x28\x12\x61\x97\x6a\x24\
\x38\xe1\xfd\x50\x28\xa5\x90\x46\x52\xf4\x23\x55\x56\x59\x64\x51\
\x64\x95\x4d\x04\x45\x34\x60\x2b\x69\x13\x84\x52\x64\x81\x9c\x6e\
\x31\xa0\x28\x28\xc8\x3b\xbf\x8b\xde\x12\x4d\x0a\x7e\x0b\x61\x81\
\x0c\xce\x58\x16\x08\xe0\x6c\x4b\x53\x59\x00\x4e\x07\xbd\x49\x09\
\xf4\x71\x0d\xd8\x09\x6c\xc0\x27\x11\xc4\x05\x24\x31\x48\x21\x48\
\x0b\x41\xda\x05\x35\x82\x26\x61\x92\x52\x45\xb1\x12\xa5\x9a\x63\
\x22\xb9\x12\x10\xd9\x99\xdc\x8a\x44\x3c\x61\xe4\x73\xf9\xe5\x4d\
\x4d\x4d\xa9\xd9\xd9\x59\x00\xe2\xf1\x38\x4d\xe9\x26\x26\x26\x27\
\x6a\xbf\xfc\xa6\x26\x00\xdc\xbf\xd1\xd4\xd4\xc4\xec\xec\x2c\xc2\
\xe0\x50\x73\x73\x8b\x9c\xc9\x4e\x8f\xb4\xb5\xb5\x65\xa7\x33\x93\
\xa7\x80\x59\x23\xc1\x8b\x4a\x92\x45\x92\x51\x92\x1c\x90\x53\x8a\
\x3c\x92\x1c\x8a\x8c\x52\x64\x51\x64\x50\xe4\xb1\x09\x22\xa7\x91\
\x82\x6b\x51\x54\x26\x84\x05\x22\x98\xb3\x2c\x10\xc0\xd9\x90\xca\
\xe6\xbd\xe9\xe8\xe0\x18\xb6\xa6\x4e\x38\x80\x4f\x62\x90\x06\x9a\
\x84\x41\x1a\xe1\x01\x3e\x81\x41\x5a\x18\xa4\x95\xc5\x12\x14\x8b\
\xe3\x22\xd9\x27\xa5\xea\x45\xa9\xae\x98\x11\x5f\x3a\x9b\x99\x25\
\x1e\x8f\x53\x28\x14\x00\x88\xb5\x82\xd9\x0a\x46\x02\xe2\x5d\xa5\
\x0b\x49\x2e\x73\xbe\x54\x4e\xe2\x95\x8b\x80\xc2\x28\xc8\xbc\xfd\
\xd3\x9a\x86\xe2\x94\xfd\xbb\x30\x5a\xda\x2d\x1e\x8f\x13\x8f\xc7\
\x99\xcd\xcc\x8e\x27\x12\x89\xd3\x86\x29\x0e\xe4\xad\xdc\x90\x30\
\x39\x2d\x62\x1c\x45\x32\xab\x6c\x62\x98\x41\x91\x43\x32\xab\x14\
\x19\xb0\x2d\x0a\xc7\xb5\xc8\x2b\xc8\x53\x9d\x0c\x6c\x59\x20\x83\
\xba\x64\x81\x00\xe6\x53\x9a\x08\x8b\xd6\xbb\xa0\x77\x7d\xf3\x98\
\x10\x9e\x49\xef\x82\xbc\x09\x41\x0b\x06\x2d\xc2\xa0\x55\x98\xb4\
\x08\x83\x66\x59\x64\x55\x5c\x24\x7b\x65\x51\x5d\x92\xcf\xe5\x57\
\xc4\x63\xf1\x0e\x80\x42\xa1\xe0\x81\x3c\xb5\x1c\x44\x02\x12\x5d\
\x60\xb6\xd8\x4b\x53\x5b\x0a\x33\x6e\x90\x48\xc7\x49\x36\x25\x50\
\x28\x92\x4d\x71\x12\x4d\x71\xfb\xea\x9c\x2e\x4e\x34\xc5\x49\x36\
\xc7\xcb\x1e\x8a\x62\x5e\x92\x99\xc8\x06\xd6\x59\x64\x26\x72\x20\
\xc0\x2a\x48\x32\xe3\xf6\xf7\xa9\x91\x59\x54\xc1\x26\x82\xe2\x8c\
\x4d\x0e\x85\x51\x28\x3a\x9f\x2e\x31\xc5\xe3\xf1\xac\x30\xc5\x49\
\xc3\x10\x07\x0a\x2a\x37\x60\xc4\x79\x51\x49\xa6\x95\x4d\x0a\x33\
\x28\x66\x1c\x42\x98\xc5\x5e\x32\x5a\x9c\xc1\x75\x21\x6c\x77\x63\
\xc1\x2a\x98\xb3\x2c\x10\xc0\x7c\x48\x53\xa8\x79\xef\xfa\xf4\x2e\
\xe8\x6d\x2d\x6f\x83\x3e\x29\x0c\x1b\xf4\xc2\xa0\x55\x18\xb4\x61\
\xd2\x86\x64\x65\x8c\xe4\xe5\xb9\x4c\xee\x92\x98\x11\xef\x03\x52\
\x85\x42\x01\x23\x09\xc9\xe5\x36\xc8\x93\xcb\x21\xbe\x08\x52\xed\
\x71\x12\x4d\x09\x5a\xbb\x9b\x48\xa6\x6d\x70\x37\xb5\x27\x31\xe3\
\x26\x4a\x39\xa8\xd0\x34\xbd\x1b\xcd\x8b\xd2\xfe\xaa\x02\x7e\x84\
\xb7\xbb\x0a\xdf\x08\x4c\x0f\xcf\x62\x15\x25\xb3\x13\x39\x32\x13\
\x39\xf2\x99\x82\xfd\x79\x12\x0a\xa7\xed\x25\x77\xd2\xb6\x1c\xe2\
\x71\x9b\x88\x84\x29\x06\x0a\xc5\xfc\x41\x23\xce\x80\x11\xe7\x80\
\x43\x08\xd3\xca\x62\x12\x65\x7f\x77\x2c\x83\x59\x20\xa3\x14\x39\
\x5c\xeb\xc0\x6f\x19\x2c\x10\x41\x8d\xb2\x40\x00\x8d\x14\x3f\xf0\
\x83\xa0\x8f\x0b\xdb\xaf\x4f\x22\x6c\x13\x1e\x41\xb3\xa3\xe5\x9b\
\x84\x49\x9b\x30\x69\xb3\xb2\x6c\x8d\x1b\x89\xf5\xb2\xa8\x36\xa1\
\xe8\x70\xb5\x7b\x72\x85\x0d\xf6\xe4\x32\x68\x5a\x1c\xa7\xa9\x2d\
\x45\x53\x5b\x8a\xd6\xee\x66\x9a\xda\x93\x18\x71\xd3\x06\xb6\xf3\
\xe8\xdb\x80\x2f\x81\xdc\xfb\xaa\x77\xa7\x52\xe4\xa6\x0a\xe4\xa6\
\x0b\xde\x7e\xde\x87\x8f\x31\x4a\x62\x26\x4c\x9a\xba\x92\xf8\x0e\
\x08\x3c\x0a\xc2\xfd\x47\xf8\xf7\x11\x02\xa6\x86\x33\x64\x26\x72\
\x64\x26\x6d\x62\x98\x1d\xc9\x91\x3b\x89\xb7\x78\x56\x42\xb1\x90\
\x6d\x6d\x6b\xd9\x37\x93\x9d\x3e\x64\x24\xd8\x83\x62\x58\x49\x26\
\x94\x64\x16\xc9\x04\x8a\x29\x87\x10\x74\x32\x08\x5a\x05\x92\x05\
\x22\xa8\x28\x0b\x04\xd0\x08\x29\x07\xbe\x1b\xcc\x8b\x03\x09\x21\
\x48\x01\x69\x0c\x9a\x84\x41\x33\x82\x16\x61\xd2\x2a\x62\xb4\x89\
\x98\x68\x97\x19\xb5\x29\x1d\x6f\xd9\x34\x3d\x35\x7d\x25\x8a\x14\
\x40\x6a\x05\xa4\xd7\xd8\x80\x6f\x59\x19\xa7\xb5\xab\x99\xd6\x6e\
\x7b\x49\x34\xc5\x1d\x50\xab\x72\xd0\x4b\xfb\xb3\x98\xb3\x98\x1d\
\xcd\x92\x9d\x2a\x90\x9b\x2c\x90\x9b\xca\xd3\x9a\xee\xa0\xbb\xa7\
\x87\xc9\xc1\x59\x7a\xd7\xac\xa6\xa9\xb9\xd9\xc3\x67\xcf\xe2\x1e\
\xba\x17\xf7\x44\xde\xe2\xec\xcc\x0c\x03\x87\x06\x4a\x77\x09\xec\
\x7b\xee\x79\x9a\xba\x92\xe4\x0a\x59\x8e\x1f\x3f\x4a\x53\x77\x92\
\x58\xc2\xa0\xa9\x2b\x45\xb2\x35\x46\xa2\xd5\x71\x31\x84\x7d\x12\
\xf7\x91\x11\x5a\x35\x83\x2c\x4a\xa6\x46\x66\x99\x1e\xc9\x30\x3d\
\x9a\x61\xfa\x84\x4d\x08\xf9\x93\x90\x19\xb0\xe3\x0a\xf1\x78\x1c\
\x61\x8a\xfd\x66\x5c\xec\x29\x88\xdc\x3e\x14\x43\xca\x62\x4c\x49\
\xc6\xb1\x49\x61\x0a\xc9\x0c\x38\x41\x45\x7f\x66\x61\x81\x08\x2a\
\xc8\x02\x01\x9c\x89\x84\x03\x3f\xe6\x04\xf1\xe2\x8e\x2f\xdf\x2c\
\x0c\x5a\x11\xb4\x62\xd2\x2e\x4c\xda\x8c\x18\xed\xca\x62\xad\x59\
\x4c\xdc\x90\xcb\xe6\xaf\x8c\x99\xf1\x64\xb1\x58\x20\xbd\x16\x9a\
\xfa\x20\xd5\x07\xed\xcb\x9a\xe9\x5c\xd6\x46\x6b\x57\x13\xe9\xf6\
\x54\x19\xd0\x95\xe7\xf1\x2a\x26\x4f\xcc\x32\x33\x92\x65\x66\x34\
\xcb\xe2\xae\x65\x24\x8a\x4d\xf4\xf6\xad\xa6\xab\xa7\x87\xee\xc5\
\xdd\x0e\xd8\x9b\xbc\xcb\x9e\x2f\x04\x08\x60\xef\x73\x7b\x11\xc0\
\xbe\xdd\x7b\x19\x39\x35\xc2\xac\x35\xc9\xe1\xfe\x17\x68\xea\x4a\
\xd2\xba\x3c\x4d\x53\x57\x12\x33\x61\x78\x24\xe0\x7d\xa2\x40\xd8\
\x84\x30\x3e\x38\xc3\xf4\x48\x86\x89\x93\xd3\xcc\x1c\x92\xe4\x4e\
\x42\xf6\x08\x88\xac\x1d\x3f\x48\xa4\x12\x07\x2c\x91\xff\xa9\x91\
\x14\x3b\x64\x51\x9d\x56\x16\xe3\x58\x4c\x20\x99\x54\x8a\x29\x14\
\x33\xca\xb6\x0c\x72\xd8\x81\xc3\x60\xac\x60\x81\x08\x1c\x59\x20\
\x80\xb9\x48\x65\xe0\xa7\x1c\xe0\xa7\x85\x41\x3b\x06\x1d\xc2\x64\
\x91\x30\xe9\x14\x26\x4b\xac\x0c\x5b\x4d\x15\x7f\x85\x92\xb4\x17\
\x8b\x05\x9a\xd6\xda\x9a\x3e\xbd\x1a\x16\xad\x6e\xa3\x63\x59\x1b\
\x9d\x4b\x5b\x31\x63\x26\x00\x4a\x82\xd4\x34\x7d\x6e\xaa\xc0\xe4\
\x89\x19\x26\x8e\xcf\xb0\x72\x69\x1f\x8b\x5a\x17\xb3\xaa\x6f\x35\
\xbd\x7d\xbd\xf4\xae\x59\xed\xbb\xcc\x7a\x9e\x70\x85\xab\x94\xe7\
\xd6\xaf\xd5\x8e\x1a\x39\x35\xcc\xd1\x43\x03\x1c\x39\x7c\x84\xc3\
\xfd\x2f\x30\x32\x39\x44\xac\x4d\xd1\xba\x2c\x4d\x53\x77\xd2\x21\
\x02\xa5\x11\x82\xfd\x39\x31\x38\xc3\xc4\xc9\x19\x26\x4e\x4e\x93\
\x39\x26\x99\x79\x01\xb2\x03\x60\xaa\x38\x40\xae\xa8\x0a\xcf\x98\
\x69\x9e\x00\x06\x54\x91\x31\x25\x39\x8d\x64\x5c\xd9\x6e\xc2\x34\
\x78\x35\x08\xf9\x05\x22\x28\x97\x05\x02\xa8\x47\x6a\x03\x7e\xab\
\x30\xe8\xc0\x8e\xde\x77\x19\x71\xba\x95\xe4\x12\xa3\x18\xbf\x3e\
\x9f\x29\x6c\x8e\xc5\xe2\x18\x9d\x05\x9a\x2f\x83\xe6\x4b\x6d\xd0\
\x77\x2e\x6d\xa3\x63\x99\x03\x7a\x07\xe8\x4a\xda\x9f\x56\xce\x62\
\xb4\x7f\x92\x89\xe3\x33\x2c\xed\x5c\xc9\xea\xde\xb5\x5c\xba\xf1\
\x32\x2e\xdd\x78\x59\xc8\x05\x46\xf7\x49\xbd\x4f\xb9\xbf\x30\x50\
\x84\x6c\xaf\xf7\xef\xb9\xd7\x51\xba\x92\xa3\x87\x06\xd8\xbf\x7b\
\x1f\x2f\x1e\x38\xc8\x89\x91\x23\xc4\xda\x25\x1d\x7d\x2d\x24\x5b\
\x63\x25\x37\xc1\xb1\x12\x84\x4b\x06\x83\xd3\x4c\x0c\xcd\x30\xb5\
\x4f\x92\x3d\x62\xbb\x09\xf1\x58\x1c\x11\x63\xa0\xa8\x0a\x4f\x1b\
\x49\x9e\x50\x45\x46\x94\x64\x14\xc9\x98\x13\x27\x98\x42\x31\xbb\
\x40\x04\xe5\xb2\x40\x00\xb5\x48\x1d\xc0\x17\x26\x8b\x44\x8c\x6e\
\x61\xd2\x6e\x65\xb9\x41\x15\xb9\x26\x15\x4f\xaf\xca\x64\x33\xb4\
\x5c\x0e\xad\x9b\xa1\x79\x65\x82\xa5\xeb\xba\xe8\x5c\xda\x46\xa2\
\x29\x01\x8e\x96\x57\xae\x96\x9f\x2c\x70\xfa\xf0\x24\xc5\x31\x93\
\xf5\x6b\x37\x70\xe9\x15\x1b\xd8\xf2\xf2\xad\x38\x86\x72\x99\x94\
\xb4\x37\xda\x65\x96\xb6\x9d\x4b\x11\x75\xac\x3b\x72\xe8\x08\x3b\
\x9e\x78\x96\xbd\x7b\x76\x33\xad\x4e\xd3\x7d\x69\xab\x1d\x70\x74\
\x89\xc0\xb0\x89\x40\x16\x2c\x26\x4e\xce\x70\xfa\xe8\x14\x13\x03\
\x19\x66\x5f\x84\xe9\x3d\x60\xca\x38\x18\x4c\x58\x14\x1e\x33\x52\
\x3c\xa1\x24\xc7\x91\x4c\x28\x8b\xd3\x4e\xbc\x60\xca\x49\x2b\x66\
\x9d\xc2\xa3\x97\x3c\x11\x2c\x10\x40\x35\x69\x2a\x4b\xe5\xe9\xc0\
\x6f\x2e\x03\x7e\x9c\x6e\x2b\xc3\x75\x49\x23\x7d\x4b\x21\x5f\x68\
\xa7\xa9\x48\xf3\x65\x36\xf0\x17\xaf\xef\xa4\x7b\x55\x27\xad\xdd\
\x76\xf0\x4d\x49\x7b\x41\x41\x76\x2a\xcf\xe8\xe1\x49\xe2\xb9\x16\
\x36\x5d\xb9\x85\xcd\xd7\x6e\x61\xd1\xe2\xae\xc0\xc5\xe8\x63\x81\
\x4a\xa2\x7c\x7b\x88\xb2\x75\x95\xe4\x4c\x9f\xf6\x5a\x9f\x82\x6a\
\x93\x14\x04\xd7\xcd\xce\xcc\xb2\xf3\x89\x67\xd9\xbb\x67\x0f\x47\
\x06\x0f\xd2\x75\x69\xab\xe6\x2a\x80\x10\x0a\x61\x40\x21\x53\x64\
\xf8\xc5\x71\x4e\x1f\x9b\x62\x6a\x9f\x64\xfa\x79\x50\x13\x71\x84\
\x20\x67\x89\xc2\xe3\x46\x13\x8f\x22\x39\xa1\x8a\x8c\xf8\x88\x40\
\x32\x0b\x65\x44\x50\x1a\xb5\xf8\x12\x21\x82\x05\x02\xa8\x24\x7e\
\xf0\xbb\xf5\xf8\x49\xa7\x58\xa7\x05\x41\xbb\xe3\xdf\x77\x89\x18\
\xdd\x32\xcb\x8d\xb2\xc0\x6b\x51\xb4\xc7\xda\xa0\xfd\xe5\xd0\x76\
\xa5\xc9\xd2\xb5\xdd\x74\xaf\xea\x74\xb4\x7d\x49\xd3\x17\xb3\x16\
\x43\xfb\xc7\xb0\x4e\x1b\x5c\x73\xdd\x0d\x6c\xba\xf6\x2a\xba\x7a\
\xba\x7c\x4f\x5e\xa5\xc1\xf5\x51\xdb\xcb\xa5\x72\x5f\xd9\x16\x44\
\x63\xfa\xb3\x1e\xa0\x97\xaf\x57\xa1\xeb\x33\x33\x19\x76\x3e\xb1\
\x83\x67\x9f\x7c\x9a\x91\xa9\x13\x2c\xde\xd4\x41\xb2\xd5\xb4\x5d\
\x03\xc7\x2a\xb0\x8a\x92\x89\x93\x33\x0c\xed\x3f\xcd\x54\x7f\x91\
\xa9\x1d\x90\x1b\x04\x04\x39\x23\xc6\xa3\x46\x8a\xef\x2a\xc5\x09\
\x55\x64\x54\x59\x8c\x39\x01\xc3\x69\xa7\xc0\xc8\x4e\x21\x06\xeb\
\x08\x5e\x02\x24\xb0\x40\x00\x61\x52\xae\xf5\xe3\xce\xa0\x9b\xa4\
\x06\xfc\x2e\x07\xf8\x8b\x65\x8e\x1b\x65\xde\x06\x7e\x6a\x25\xb4\
\x5e\x05\x1d\x57\x24\x58\x71\xd9\x12\x3a\x97\xb6\x61\x98\x7e\xdf\
\x7e\xe2\xc4\x34\x93\x47\xb3\x2c\x69\x5b\xc5\x2d\x6f\xbc\x8d\x15\
\x7d\x2b\x81\xe8\x8a\xdc\x7a\x7f\xd7\xba\x0d\x1a\x07\x7c\xff\xdf\
\xac\x7f\x7b\x65\xbb\xa6\xb4\xcf\xe9\xa1\x51\x9e\xfc\xc1\x13\xec\
\x7d\x7e\x17\xa2\xa3\x40\xf7\xa5\xad\x1e\x11\xb8\x3d\x36\x79\x72\
\x86\x91\x43\x13\x8c\xbd\x90\x61\xfa\x79\x98\x3d\x08\x08\x72\x22\
\xc6\xa3\x46\x92\xef\x22\x39\xa1\x2c\x46\x95\xc5\x69\x24\x13\x0a\
\x8f\x08\x4a\xe3\x0f\x5c\x22\xb8\xc8\x49\x60\x81\x00\x74\x29\x01\
\xbf\xa4\xf5\x6d\xe0\xbb\xe6\x7e\xb3\xa3\xf1\x7b\x44\x8c\x25\x71\
\x12\x37\x50\x34\xdf\x94\x99\xcd\xb4\x27\x7a\xa0\xf3\x26\x68\x5b\
\x67\x03\xbf\x67\x55\x67\x29\x98\x27\xa1\x90\xb3\x18\x3d\x3c\x89\
\x35\x12\xe3\xe5\x37\x5e\xcf\x95\xd7\x6e\x22\xdd\x9c\x0e\x05\x7d\
\x25\x0d\xef\xd7\x91\xa5\xb1\xbc\x54\x38\x26\x6a\xdd\x7c\x4a\xad\
\xbe\x7f\xb5\x7d\x2b\xcd\x84\xf2\xdc\x13\xbb\x78\xe6\xd1\xa7\x39\
\x35\x7d\x84\xee\x4b\x5b\x49\xb4\x39\xc1\x43\xc3\x26\x84\x99\xd1\
\x0c\x27\xf7\x8f\x31\x79\x24\xc3\xc4\x4f\xec\x80\x61\x3a\x9d\xce\
\x49\x61\x3d\x5a\x24\xff\x5d\xa5\x38\xa6\x2c\x46\xb1\x03\x86\x13\
\x0a\x32\x0e\xe4\xdd\xb1\x07\xa5\x1a\x82\x8b\x94\x08\x16\x08\xc0\
\x15\xbf\xd6\x77\x47\xe3\xa5\xdc\x3c\xbe\x30\xe8\x14\x26\xed\x22\
\xc6\x12\x55\xe4\x6a\x43\xc6\xdf\x58\xc8\x15\x56\xc7\xda\xa0\xfd\
\x3a\x58\xb4\x35\xc1\xca\x0d\x4b\xe8\xee\x5d\xe4\x80\x5e\x79\x66\
\xfe\xf1\xe7\x46\x58\xde\xd9\xc7\x96\xeb\x5f\xc6\xda\x8d\xeb\x00\
\xbf\xb6\x2f\xf7\xdb\xed\x89\x7a\x6a\x23\x82\xf0\xdf\xe7\xab\x88\
\x2a\xbf\xf5\x75\x95\xf6\xd5\xbf\x9f\x1e\x3a\xcd\x23\xdf\x78\x98\
\xc3\x47\xf6\xd1\xb6\x26\x41\x53\x77\xd2\x23\x01\x61\xc0\xcc\x48\
\x86\xa1\xfd\x63\x8c\x3e\x9f\x61\x6a\xa7\xe7\x1a\x4c\xa4\x9a\x93\
\xdf\x2d\x90\x7b\x58\x15\x19\x52\x16\xc3\x28\xa6\x94\x62\x1c\x7b\
\x2c\x82\x9d\x31\xd0\xad\x81\x8b\x90\x08\x16\x08\xc0\xaf\xf5\x4b\
\xd1\x7d\x41\x93\xb0\x03\x7c\x9d\xd8\xe9\xbc\xc5\x08\x96\x24\x69\
\xfa\xf9\x99\xc9\xd9\x2d\x46\x12\x3a\xae\x83\x8e\x6b\x4c\x56\x6e\
\x58\xc2\xd2\xb5\x3d\x48\x4d\xe3\x67\x27\xf3\x1c\x79\xfa\x24\x2b\
\x97\xae\xe1\xd5\xef\x78\x2d\x9d\x3d\x8b\x20\x00\xf4\xea\xfe\xfd\
\x5c\x8d\x65\x7d\xef\xf3\xbf\x9f\xaa\x91\x42\x39\x21\xa8\xd0\xfd\
\xb2\x33\x19\x1e\xfd\xfa\xa3\xec\x78\xea\x29\x3a\x37\xa4\x69\x5b\
\x9e\x2e\x11\x81\x50\xcc\x8c\x66\x39\xba\xe3\x14\xd3\xfd\x45\xc6\
\x1e\xb5\x07\x28\xc5\x93\xf1\x01\x69\x14\xfe\x1b\x93\xdd\xca\x62\
\x0c\x8b\x61\x65\x31\xe6\x16\x14\x39\x44\xe0\x8f\x0f\x5c\x44\x24\
\xf0\xd2\x26\x80\x72\xad\xef\x8e\xc8\x6b\x11\x06\xed\xc2\xa4\x5b\
\x98\x2c\x36\x12\x2c\xb1\x32\xbc\xc6\x90\xb1\x57\x17\x0b\xc5\x64\
\xdb\x56\x68\xbf\x16\x7a\x37\x2f\x65\xe9\xba\x6e\x0c\xd3\xf4\xf2\
\xf6\x99\xc9\x3c\xc3\xcf\x4f\xb0\x6e\xed\xe5\x6c\xbd\x65\x2b\x1d\
\x3d\x9d\x6e\x8b\x11\x96\x9e\xab\xe6\x02\x50\xc5\x12\xf0\xef\x59\
\xb9\x4f\x2e\x04\x32\x28\x5d\x6b\xd8\x77\x55\x75\xbb\xfb\xfd\x99\
\x1f\x3c\xcd\x93\x0f\x3d\x4e\x7a\xa5\xd0\x88\xc0\xce\x1c\x8c\x1d\
\x99\xe2\xc4\x9e\x51\x26\x9f\x93\x4c\x3e\x6b\x97\x1b\x27\x9b\xe2\
\x4f\x58\x66\xe1\x5b\x4a\x72\x54\x15\x39\xa5\x2c\x46\x95\x62\x0c\
\x69\xd7\x10\xe0\x56\x15\xba\xd9\x82\x8b\x84\x04\x5e\xba\x04\xd0\
\x54\x56\xb7\x9f\x44\xd0\x2c\x04\x6d\x8e\xd6\x5f\x6c\xc4\x59\x8a\
\x64\xa3\x95\xe5\xad\xa6\x88\xad\x8a\x2d\x2d\xd2\x79\x33\x74\x5f\
\xda\xc2\xda\xab\x7b\x49\xa6\x12\x48\x09\x48\x45\x21\x6b\x31\xf0\
\xf4\x10\x8b\x3b\x57\x70\xdb\x3b\x5e\x45\x47\x4f\x87\xef\x71\xad\
\x05\xf0\xd5\xd6\x45\x89\x3e\xab\xd7\x85\x04\xf2\x5a\x25\xcc\x25\
\x08\x7e\x57\xbe\x39\x47\x4b\xdb\x7f\xf8\x95\x1f\xb2\xfb\xa9\x1d\
\xf4\x6c\x6a\xa6\xa9\x27\x09\x42\x61\x18\x20\x2d\xc9\xf0\xa1\x09\
\x06\x77\x8f\x31\xf1\x14\xe4\x0f\xc7\x28\x5a\xc5\xc9\x74\x5b\xe2\
\xc1\x82\xca\xff\x44\x16\x18\xc2\xe2\x94\xb2\x18\x51\xca\xa9\x2a\
\xb4\xe7\x2a\xc8\x73\x11\xc5\x06\x5e\x9a\x04\x50\x02\x7f\x0c\xbc\
\x20\x5f\x0b\x06\x1d\xc2\xa0\x5b\xc4\x59\x62\xc4\x59\x6e\x14\xe2\
\xb7\x5b\x39\x75\xb3\x34\x8b\x74\x5c\x0f\xdd\x37\x24\x58\x7d\xe5\
\x0a\x3a\x97\xb6\x97\x82\x7b\x59\x8b\xe3\xbb\x86\xe9\xe9\x5c\xce\
\x2b\xde\x7c\xa3\x07\x7c\xa1\x69\x7c\xfd\x91\xac\x97\x08\xe6\x02\
\xe8\x8b\x91\x04\xec\xfb\x0a\xff\x0c\xb3\x0c\x74\x7b\x2b\x3b\x93\
\xe5\xc7\x5f\xff\x31\x07\xf6\xed\x66\xd1\x86\x34\xc9\xb6\x98\x17\
\x1f\x28\x64\x8a\x1c\x7d\x76\x98\xd3\x7b\x33\x8c\x3d\x0a\x64\x63\
\x18\x31\x71\x50\x26\x0a\xff\xa1\x2c\x8e\xa8\x22\x83\xca\xe2\x94\
\x52\x9c\x46\x32\xe9\x58\x03\xfe\xda\x81\x0b\x98\x04\x5e\x5a\x04\
\x50\x9e\xd7\x4f\x62\xd0\x22\x6c\x93\x7f\x11\x26\x4b\x1c\xad\x7f\
\x65\x5c\xa5\xde\x91\x99\xcd\x2e\x69\x5a\x07\x9d\x37\xc3\xca\xab\
\x7a\x58\x79\xd9\x52\xdb\xdc\x57\x80\x05\x23\x87\x27\x28\x8e\x99\
\x5c\xfb\xba\xeb\x58\x7d\xc5\x6a\xf4\x37\xa9\x05\x67\xb3\xf4\xff\
\x8e\x8a\x05\x5c\x9c\x1a\x7c\x3e\x44\xcc\xe1\x73\x7c\x68\x9c\x47\
\xff\xf3\x11\x06\x87\xfa\x59\xfe\xb2\x4e\x8f\x04\x84\x01\x63\x47\
\xa7\x38\xf6\x6c\xc9\x2d\x88\xc5\x63\xb9\x78\x93\x71\x7f\x41\xe5\
\x9f\x94\x05\x4e\x60\x71\x52\x59\x0c\x2b\xc5\xb8\x93\x32\xcc\xa0\
\xc7\x06\x2e\x50\x12\x78\xe9\x10\x40\x09\xfc\x6e\xa0\x2f\xe5\x54\
\xf1\xb9\x03\x75\x96\x19\x09\x96\x5b\x19\x5e\x23\xf3\xdc\x6a\x24\
\x48\x2e\xba\x05\x16\x5f\x97\x66\xdd\xcb\x56\xd3\xd4\x9a\x46\x4a\
\xe5\x05\xf8\x06\x7e\x32\xc4\x35\xaf\xba\x8e\x4d\xb7\x6c\x0a\xd5\
\xea\xe5\x5a\xdf\x1f\xd9\xaf\x6e\xe2\xcf\xdd\x9f\x7f\x29\x91\x48\
\xb9\x8d\x15\x74\x0f\xca\xb7\x0d\x3c\x37\xc0\x13\xf7\xff\x18\xd1\
\x99\xa1\x6d\x45\x0a\x0c\x6c\xb7\xa0\x28\x39\xb2\x63\x98\xd1\x7d\
\x33\x8c\x3d\x66\xcf\x4d\x20\x62\xec\x36\xd3\x7c\x43\x49\x0e\xa9\
\x02\x27\x94\xe4\x94\x52\x4c\x86\x58\x03\x17\x24\x09\xbc\x34\x08\
\xc0\xef\xef\xc7\x9d\x91\x7a\xad\x18\x74\x89\x18\x4b\x8d\x18\x3d\
\xc2\x64\x6d\x71\x9a\x9f\x37\x89\x5d\x12\x5b\x5a\xa4\xeb\xb5\xd0\
\x77\xed\x32\x56\x6e\x58\xea\xcb\xe7\x0f\x3c\x3d\x48\x77\xf7\x72\
\x5e\xf9\xf6\x57\x92\x68\xb2\x27\xc6\x28\x9f\xbb\x5a\x07\x7a\xbd\
\xc0\xaf\x5f\xa2\x01\x1f\x5e\x27\x70\xe1\x8b\xfb\x5e\x12\xf7\x2e\
\xf5\xcf\x60\x4f\x94\x3e\x83\x04\xf1\xd4\xd7\x9f\x62\xf7\x8f\x76\
\xb0\xec\xea\x36\x62\x69\xc3\xab\x2a\x9c\x1c\x9a\x61\xe0\x27\xc3\
\x8c\x3f\x23\xc9\xee\x8f\x81\x60\x52\x25\x8a\x5f\x45\xf0\x9c\x2a\
\x70\x5c\x49\x4e\x2b\xc9\x08\x92\x71\x77\xc4\x21\xae\x4b\x70\x81\
\x91\xc0\xc5\x4f\x00\x25\xf0\x1b\x94\xfc\xfd\x56\xec\xbc\xfe\x52\
\x23\xc1\x2a\x25\xb9\x56\xe4\x62\xef\x2f\x16\x8b\xc9\x8e\xeb\x61\
\xf1\x6d\x09\xd6\x6d\x5d\x4d\x5b\x57\x8b\x07\xfc\xf1\xe3\xd3\x8c\
\x1d\x9a\xe5\x96\xf7\xdc\xca\xe2\xd5\x4b\x42\xb4\x7d\xd8\xbc\xd5\
\xe5\xf9\xfd\xf9\x78\x3a\xa2\x08\xe0\x62\xb6\x04\xa2\xa7\x2c\x53\
\x75\xb9\x05\xb9\x99\x2c\xdf\xf9\xf4\x77\x99\xcd\x8f\xb2\x64\x53\
\x3b\x08\xdb\x1a\x28\x64\x8b\x1c\xdd\x61\xc7\x06\x46\x7f\x00\x86\
\x8c\x21\x8d\xe2\xf7\x44\x9c\x6f\x38\xe3\x0a\x06\x91\x0c\x2b\xc5\
\x18\x38\x93\x9a\x5e\x80\xa9\xc2\x8b\x9b\x00\x74\xcd\x6f\x0f\xe0\
\x69\x12\x06\x6d\xc2\xd6\xfc\xcb\x8c\x04\x2b\x65\x9e\x3b\x44\x3e\
\xf6\x6a\x9a\x8b\x2c\x7e\x0b\x2c\xb9\xb2\x83\x75\x5b\x7b\x31\x63\
\xa6\x13\xe1\x87\x17\x1f\x3f\xc6\xca\x4b\xd6\x70\xfd\xcf\x5e\x5f\
\x05\xf0\xa0\xeb\x9a\xda\xb4\x7e\x18\x4c\xe7\x0a\xdd\x0b\xa0\x4f\
\xe6\x4d\xa2\x35\xbf\xfb\x19\xbd\x4e\xf1\xc2\x13\x2f\xf0\xd8\xd7\
\x1e\x65\xd9\xd5\x6d\x76\x90\xd0\xb4\x63\x03\x43\xfb\xc7\x18\xdc\
\x33\xc6\xe9\x87\xc0\x1a\x8e\x61\x51\xdc\x6d\xa4\xf9\x3a\x45\x0e\
\xca\x22\xc7\x91\x9c\x54\x92\x31\xec\x72\x62\xd7\x12\xb8\x60\x48\
\xe0\xe2\x25\x00\x3d\xd2\x6f\xd7\xf0\x37\x23\xe8\x14\x26\x3d\x46\
\x9c\x65\x22\xc6\x7a\x6b\x96\x3b\x85\x8c\xad\x4f\xae\x29\xb2\xe4\
\x8d\x26\x6b\xaf\x5e\x49\xcf\xaa\x2e\x7b\x12\x0e\xa9\x98\x1e\xce\
\x70\xfa\xd0\x0c\xd7\xbd\xfd\x7a\xba\x57\x77\xdb\x0d\x16\x58\xdc\
\x75\x02\xa3\x4c\xfb\xd7\x6e\xee\xeb\x09\xbc\xf3\xbc\x5d\xeb\xbc\
\xc2\x73\x89\x80\xda\xc0\x5f\x22\x81\xdc\x4c\x96\xef\x7f\xfa\x87\
\xc8\xc4\x14\x9d\x7d\xcd\x1e\x09\x64\xa7\xf2\xf4\x3f\x75\x92\xd1\
\x27\x8a\xcc\x3e\x17\x43\x18\x8c\xc8\x78\xf1\xbf\x51\xfc\x44\x15\
\x38\xaa\x24\x83\x0e\x09\x4c\x69\x03\x8b\x2e\x08\x12\xb8\x38\x09\
\xa0\x1c\xfc\x2d\x18\x2c\x12\x06\x4b\x8d\x04\x2b\x30\xd9\x64\x4d\
\xf3\x0b\x06\xb1\x9e\xd6\x97\x17\x59\x72\x5b\x82\x0d\x2f\x5f\x47\
\x53\x6b\xda\x01\x3f\x1c\x7f\xee\x14\xcd\xad\x1d\x5c\xf3\xb3\xd7\
\x90\x68\x4a\x56\x34\xf9\xa3\xca\x7a\xc3\x86\xb3\x88\x8a\xbf\xed\
\x75\xb5\x4a\x65\x3b\xa1\x11\x0e\xc0\x7c\xf7\xf1\xdc\xb1\xa1\x6a\
\xf8\x1b\x2a\xc2\x1d\x08\xfb\xee\xba\x0e\x02\xd8\xfb\xfd\x7d\xec\
\x79\x78\x27\x8b\x37\xb5\x7a\xb1\x01\x65\x49\x0e\x3f\x75\x92\xd1\
\x9d\x59\xc6\x1e\x05\x43\xc6\x72\x2a\x51\xfc\x0f\xe0\x51\x55\xe4\
\xa8\xb2\x38\xa9\x14\xa7\x9d\xc9\x47\xdc\x32\x62\xdb\x86\x3c\x8f\
\x49\xe0\xe2\x23\x80\x72\xf0\xb7\x62\x47\xfa\x97\x19\x09\x7a\x11\
\x5c\xcb\x6c\xec\x4e\x69\x16\x93\xdd\x3f\x03\x4b\xaf\x6d\x65\xc3\
\xf5\x6b\x31\x0d\xdb\xe4\x2f\x64\x2d\xfa\x9f\x1c\x64\xd3\xeb\x36\
\xd3\x77\xf5\x6a\x94\xa3\xc9\xc3\x40\x5f\x2d\x0e\x10\x96\xd6\x9b\
\x5b\x5e\x3f\xfa\xc8\xb3\xeb\xe7\x9f\xc9\xb9\xce\x2e\x06\x54\x84\
\x4b\xe0\x6e\x09\x23\x02\x7d\xdd\xc8\xa1\x11\x7e\xfc\xc5\x1f\xd3\
\xba\xca\xa0\xa9\x3b\x51\x72\x09\x0e\x8c\x71\xec\xc9\x31\xc6\x1e\
\x03\x35\x11\x43\xc6\x8a\xdf\x14\x26\xdf\x56\x05\x8e\x39\x96\xc0\
\x28\xca\x97\x21\x38\xaf\xd3\x84\x17\x17\x01\x94\xc0\x6f\xbf\x56\
\x4b\x38\x91\x7e\x1b\xfc\xab\x55\x91\xdb\x63\x56\xf2\x2d\x56\x3a\
\xc7\xe2\x37\xc3\xea\xeb\x16\xb3\x7a\xd3\x2a\x94\x65\xa7\xf7\xa6\
\x87\x33\x0c\xee\x1d\xe3\xa6\x5f\xba\x91\x96\x9e\x96\xaa\xda\x5e\
\x69\x00\x0f\x8f\x07\x04\xda\x2c\x94\x0c\xe6\x46\x08\x42\xfb\x77\
\x7e\xe4\x6c\xf5\xef\xfc\xe0\x42\x39\xff\xaa\x00\x15\x44\x59\x00\
\xe5\x24\x60\xf7\xe8\x23\x9f\x7e\x8c\xd9\x19\x7b\x76\x22\x97\x04\
\xc6\x8e\x4d\x71\x6c\xc7\x28\xc3\xdf\x95\x58\xc3\x31\x8c\xb8\xd8\
\x2d\x63\x85\xcf\xcb\x3c\xc7\x42\x82\x83\xa5\x91\x85\xe7\x21\x09\
\x5c\x3c\x04\xe0\x07\xbf\x3d\x7c\x57\x03\xbf\x2c\xf2\x0e\x99\xe1\
\xb5\x89\xc5\xb0\xf4\x1d\x70\xc9\xf5\x7d\xf4\xf4\x76\xa1\x2c\x7b\
\xe4\xde\xc9\x7d\xa3\xc4\x12\x4d\x5c\xf7\xde\x6b\x41\xd3\xfa\x51\
\xda\xbe\x2c\xc0\x37\x4f\xed\x51\xdf\xcb\x7b\xa3\x65\x11\x5d\x2c\
\xa2\x9b\x15\xf4\x92\xa6\x89\xe5\xac\x22\x8d\x3d\x53\xf0\x3a\x36\
\xd4\xf5\xb7\x32\xcc\x72\x82\xa3\x00\x9c\x66\x94\x31\x46\x39\xcd\
\x08\x63\x8c\x72\x9c\xa3\x64\x98\x39\xa3\x6b\x55\xf3\x88\x13\x55\
\x56\xa5\x51\xae\xfd\x83\x64\x70\xe0\xfb\x2f\x70\xe0\xe1\xbd\xac\
\xb8\xa6\xdd\xae\x19\x30\x21\x33\x95\xe3\xd0\x8f\x07\x19\xd9\x2e\
\x99\x3d\x08\x46\x9c\xbd\x46\x8a\xff\x52\xfe\xe0\xe0\x69\x70\xa6\
\x20\x3b\x4f\x49\xe0\xe2\x20\x80\x72\xf0\xb7\x39\xb3\xf1\x2e\x33\
\x12\xac\x16\x85\xd8\x07\x85\x34\xb7\xc4\x7a\x73\x2c\xbd\xdd\xe4\
\xf2\x9b\xd6\xd1\xda\xd5\xea\xa5\xf8\xf6\x3f\xd4\xcf\x25\x37\x6e\
\x60\xfd\x4d\xeb\xa2\x81\xaf\x00\x21\x22\x49\xa1\x9a\x9c\x49\x7b\
\xd5\x7b\xec\x0a\x7a\x59\xcf\x65\xac\x60\x15\x8b\xe8\x66\x3d\x97\
\xd5\x75\xfc\x99\x4a\x86\x59\x8e\x73\xc4\x59\x8e\x7a\xdf\xcf\x17\
\x71\x9f\xe9\x5a\x2c\x01\x77\x19\x3d\x74\x9a\xc7\x3f\xf7\x38\xcb\
\xaf\x29\xd5\x0c\x48\x69\x71\xe8\x47\x83\x8c\xfc\x24\xcf\xf4\x53\
\x31\x62\xc9\xd8\x48\x31\x9e\xdd\xa6\x0a\xec\x57\x05\x8e\x38\x2e\
\xc1\x04\x30\x79\xbe\x92\xc0\x85\x4f\x00\xe1\xe0\xef\x11\x26\xdd\
\x46\x82\xd5\x56\x96\x0f\xaa\x02\x5b\x5a\xae\x84\xa5\x6f\x34\xd9\
\x78\xe3\xa5\x34\xb5\x36\xa1\x24\xe4\x33\x16\x07\x1f\x3d\xc2\xf5\
\xef\xbf\x9e\xce\x55\x9d\xd1\x1a\x5f\x88\xd2\x7c\xfc\xd4\x07\xfe\
\x7a\xda\x69\xae\x6d\xba\x9e\xcb\xb8\x44\x5c\xce\x25\xe2\xb2\xb3\
\x0e\xf6\x5a\x25\xc3\x2c\x2f\xb0\x8f\x83\x6a\x1f\x2f\xb0\xaf\x2a\
\x21\x54\x7b\x30\x1b\x29\xc2\x3e\x61\xb8\x25\x20\x4a\xbf\x0b\xd3\
\x79\xbe\xff\xe7\x0f\xb1\x68\x7d\xc2\x8b\x0b\x48\xcb\xe2\xc5\x1f\
\x0d\x72\x7a\x47\x9e\xb1\x47\x41\x98\x8c\x9a\x2d\x7c\x41\x15\xd8\
\xa7\x0a\x0c\x28\xc9\x88\x92\x0c\x73\x9e\x92\xc0\x85\x4d\x00\x51\
\xe0\x8f\xb1\xc2\x88\xb3\xd4\x03\xff\x46\x58\xfd\xce\x26\x36\xbe\
\x72\x03\xa6\x69\xa0\x24\x64\x26\xf2\x9c\x78\x7e\x94\x6b\xdf\x7d\
\x2d\xcd\xdd\xcd\x36\xa8\x55\x08\xc0\x1d\xad\x1f\x45\x00\xf5\x48\
\xa3\xda\xac\x49\x34\xb1\x59\x5c\xcd\x66\x71\x35\x97\x88\xcb\x3d\
\x53\xfe\x42\x92\xd3\x8c\x70\x50\xed\x63\x97\x7a\x86\x5d\xea\x99\
\x86\xff\xfd\x7a\x09\x24\xd4\x0a\x10\xda\x7a\x8d\x20\x7e\xfc\xb9\
\x27\x91\xc6\x2c\x6d\x2b\x52\x4e\x5c\x40\x71\x74\xc7\x30\xc7\x1f\
\x9e\xf6\x48\x20\xd6\xca\x17\xa4\x4d\x02\x27\x95\xc5\xf1\xf3\x95\
\x04\x2e\x5c\x02\xa8\x04\xfe\x04\x7d\x56\x86\xbb\x55\x81\x2d\x5d\
\xaf\x82\xa5\x37\x37\xb1\xf1\xc6\x0d\x98\xce\xb8\xfd\xa9\xe1\x19\
\x06\xf7\x8f\x73\xe3\xaf\xdc\x40\x3c\x9d\xb0\x7b\xc1\x35\xef\x23\
\x80\x2e\x9d\x2f\x95\xc0\xdf\x88\x36\x89\xfa\x1b\x69\x9a\xb8\xca\
\xb8\x86\xab\x0c\x1b\xf8\x17\x93\x64\x98\x65\x97\x7a\x86\x9d\x72\
\x6e\x64\xd0\x08\x6b\x41\x69\x00\x87\x72\x02\x08\x5a\x07\x4f\xfd\
\xcb\x33\xcc\x4e\xda\xc1\x41\x62\xf6\x10\xe3\xa3\x3b\x86\x39\xb5\
\x7b\x9a\x91\x07\x41\x59\x8c\xc6\x5a\xf9\xa2\x2c\x72\x58\xe5\xe9\
\x57\x16\xc7\x1c\x12\x98\x72\x06\x12\x9d\x17\xe3\x07\x2e\x4c\x02\
\x08\x4f\xf5\x79\x01\x3f\x57\xf3\xf7\xbc\x01\x16\x5f\xdf\xc4\x95\
\x37\x6e\xc0\x30\x5c\xf0\xcf\x32\x35\x92\xe7\xda\xf7\x5e\x1d\x09\
\x78\xef\x6d\xba\x01\x52\x80\x70\xf0\xcf\x67\x5b\x5c\x6a\x5c\xc1\
\x0d\xe6\xcd\x5c\x65\x5c\x73\x41\x6a\xfa\x7a\x25\xc3\x2c\x3f\xb6\
\x1e\xe1\x21\xf9\x6d\x46\xd5\xf0\xbc\x9c\xa3\xea\x43\x4f\x75\x02\
\x10\x02\x5e\xfc\xe1\x21\x8e\xef\x3a\x64\x93\x80\xa9\x10\x26\x1c\
\xd3\x49\xa0\xc8\x50\xac\x8d\xaf\xc8\x02\xfb\x54\x9e\x7e\x25\x39\
\xe1\xa4\x09\xa7\xc0\x2b\x1d\x3e\xa7\xc5\x42\x17\x1e\x01\xd4\x0a\
\xfe\x37\xc2\xe2\xeb\x1c\xf0\x9b\x26\xca\x82\xc1\x7d\xa3\xe4\x73\
\x8a\xeb\xef\xbc\x36\x14\xf0\x7a\x7c\x38\xb8\x1d\xca\xc1\x3f\x5f\
\x1a\xbf\x49\x34\x73\x95\x71\x0d\x6f\x8a\xbd\x9d\x2e\xd1\x33\x87\
\xbf\x7a\x71\xc8\x01\xf9\x3c\x3f\xb6\x1e\xe1\xc7\xd6\xc3\x67\xf7\
\xc4\x5a\x90\x30\x94\x04\x9c\x2e\x33\x80\x17\x1f\x3a\xcc\xfe\xef\
\xed\x65\xcd\xcd\x3d\x36\x09\x18\x70\x6c\x67\x99\x25\xf0\x05\x99\
\xe7\x79\x55\xf0\x48\x60\x04\xc5\x0c\x36\x09\x9c\xd3\x8a\xc1\x0b\
\x8b\x00\x2a\x81\x3f\xc9\x6a\x2b\x13\x0e\x7e\x24\x0c\xee\x1d\x25\
\x97\x95\x5c\x77\xd7\xb5\x1e\xe0\x25\x94\xfb\xf6\xbe\xdf\x61\xe3\
\xcb\x22\x1a\xaa\x01\xed\xd1\x6d\x2c\xe6\x06\xf3\x16\x5e\x1d\x7b\
\x23\x4d\xe2\xe2\xd7\xf6\xb5\xca\xa8\x1a\xe6\x81\xe2\x7f\xb0\xc3\
\x7a\x9a\x59\x75\x66\x29\xc6\x4a\x12\x7c\xd6\x75\x6d\x1f\x24\x00\
\xfd\xf7\xa1\xed\x87\x39\xf0\xdd\x7d\xac\xb9\xc5\x4f\x02\xc3\xbb\
\xa7\x19\x76\x49\xa0\x85\x7b\x65\x81\xdd\xb2\x40\x3f\x92\xe3\x4a\
\x32\xe6\x14\x0b\xb9\xf1\x00\x59\xfb\x95\xce\xdf\x3d\x07\xe5\xfc\
\x21\x80\x1a\xc1\xdf\x72\x25\xf4\xbd\xb3\x89\x8d\xaf\xbc\xcc\xf1\
\xf9\x15\x83\xfb\x46\xc9\x65\x24\x2f\xbf\xf3\x1a\x0f\xe4\x6e\x6b\
\x4b\xdd\xbc\x57\xfa\x3b\xa2\xfc\xa6\xff\x99\xde\x73\xa5\x63\xbb\
\x44\x0f\x6f\x49\xfc\x1c\xaf\x88\xdd\x7a\x76\xdb\xf4\x02\x93\x59\
\x35\xcb\x0f\x8a\x0f\xf2\x83\xe2\x83\x0d\x25\x82\x9a\x5c\x02\x17\
\xf4\x4a\x95\x5e\x73\xae\x6d\x1b\xef\x1f\x67\xc7\x57\x7e\xca\x8a\
\x6b\x3a\xc0\x54\x18\xa6\x1d\x13\x18\x7c\x62\x9a\xd1\xef\x83\x30\
\x19\x31\x9b\xf9\x9c\x2a\xb0\x5b\x16\xe9\x57\x16\x27\xb1\x27\x17\
\xc9\x72\x0e\x87\x12\x5f\x18\x04\xe0\x1f\xd5\xe7\x4e\xdf\xd5\x25\
\x4c\x96\x1b\x49\x56\x1b\x56\xec\xbd\xe4\x8d\x9b\x93\x1b\xf2\xac\
\x7e\x67\x13\x57\xbe\xf2\x32\x6f\xa2\xce\xc1\x7d\x23\xa4\xba\x5a\
\xb8\xe4\x55\xeb\x4b\xda\x5d\xd3\xf2\x6e\x70\xcf\x6f\x0d\x9c\xf9\
\xb0\xdd\x5a\xda\xa7\x49\x34\xf3\x96\xc4\x3b\x79\x6d\xe2\xf6\xb3\
\xd7\x96\x17\x81\xcc\xaa\x19\xbe\x5f\x78\x90\xfb\x0b\xff\xee\x5b\
\xdf\xa8\x60\x60\x98\x94\x69\x7e\x51\x22\x03\x77\xdd\xf8\xc0\x38\
\x3b\xff\x75\x87\x9f\x04\x76\x3a\xd9\x81\x47\x20\xd5\x9c\x3c\x5e\
\x8c\xe5\x3e\x29\xb3\xbc\xa0\x2c\x8e\x28\xc9\xb0\x53\x36\xec\x92\
\xc0\x59\x0f\x0a\x9e\xff\x04\xe0\xd7\xfc\x71\xa0\xd5\x19\xce\xbb\
\xdc\x48\xd0\xa7\x2c\x6e\xb7\x66\x79\x4b\xeb\x26\x58\xf1\xb6\x24\
\x5b\x6e\xdd\xa8\xf9\xfc\x23\x64\x32\x45\xae\x79\xdf\xcb\xfc\xc0\
\x57\x61\x26\xbf\x7d\x3f\x52\xd5\x66\xf2\x47\x36\x58\x8d\xc0\x7f\
\x5d\xf2\x4d\xbc\x36\xf1\x26\x9a\x44\xf3\xd9\x69\xc7\x8b\x50\x46\
\xe4\x30\xdf\xc8\x7f\x8d\xc7\x0b\xdb\x2b\xee\x37\x17\x62\x08\x75\
\x07\x1c\xb0\x1b\xce\x77\x77\x02\x38\x9d\x0c\x4e\x3c\x3d\xc8\xde\
\xfb\x9f\xa7\xef\xe6\x1e\x84\x93\x1d\x18\x78\x7a\x88\xc1\x1f\xcd\
\x32\xf6\x08\x88\x38\xbb\xcd\x14\x9f\x93\x39\x0e\x39\x99\x81\x51\
\xf0\x06\x10\x9d\xf5\x71\x03\xe7\x37\x01\x94\xa6\xf1\xb2\xe7\xef\
\xb3\x67\xf2\xe9\x76\x34\x7f\x1f\x70\x8b\xc8\x26\xde\x27\x3a\xf3\
\xac\x7c\x9f\xc9\x55\xaf\xba\x8c\x74\x5b\x13\xca\xb2\xeb\xb5\xc7\
\x86\x66\xb8\xfa\x7d\x5b\x7c\x66\xbf\x4e\x02\x5e\xb0\x2f\xc4\xef\
\xaf\x57\x6a\x6d\x8f\x97\xc5\xaf\xe3\x3d\xe9\x5f\xa4\xdb\x58\x3c\
\xff\xed\xf7\x12\x91\x7d\xc5\x3d\xfc\x6b\xf6\x5f\x38\x62\x1d\xae\
\xfb\xd8\x39\xd5\x0b\x88\xd2\xa7\x81\x1e\x18\xb4\x9f\x9e\x81\x47\
\x8e\x30\xf0\xd8\x21\x56\xdd\xd0\x05\xa6\x42\x59\x92\x43\x4f\x0c\
\x72\xf2\x07\x79\xa6\x77\x43\xa2\x39\xf6\x98\x45\xf1\x4b\xaa\x40\
\xbf\x53\x23\xe0\x66\x06\x4a\xaf\x25\x3b\x4b\x24\x70\xbe\x13\x40\
\x69\xbe\x7e\x7b\xf2\xce\x56\x61\xd8\x79\x7e\x4c\x5e\x51\x9c\xe2\
\xce\xc4\x62\x92\xab\xde\x67\xb2\xf9\x55\x97\xdb\xf3\xf6\x59\x76\
\xaa\xef\xf4\xd0\x34\x9b\xdf\x71\xa5\x07\xf4\x20\xf8\x83\xda\x5f\
\x8f\xf6\x57\x6c\x90\x39\xde\x77\xb7\xb1\x98\x0f\xb4\xfc\x16\x97\
\xc5\xae\x9c\xff\x76\x7b\x89\xca\x77\x73\xf7\x73\x5f\xf6\x6b\xcc\
\xaa\x99\x33\x76\x07\x2a\x1d\xaf\x03\xdf\xb5\x04\x74\x52\xb0\x49\
\xe0\x28\xc3\x7b\x8f\xd1\xb5\xa1\x05\x61\x82\x92\x36\x09\x0c\x3e\
\x98\x67\xe6\x00\x18\x29\xbe\x21\x4c\xee\xd7\xd2\x83\x23\x4e\x7d\
\x80\x3b\xa9\xc8\x59\xc9\x0c\x54\x6b\xa7\xd8\x7c\x5f\x40\xa4\xf8\
\x4d\xff\xb4\x33\x5f\xff\x22\x11\x63\x99\x88\x73\x89\x9a\x35\x7f\
\xd6\x48\x5a\xc9\x9e\x37\xc0\xba\x97\xad\xa6\xb9\xb5\x09\x29\x61\
\x7a\x64\x86\xa1\xfe\x31\x5e\xf6\xbe\x2d\xf6\x24\x9e\xce\x4d\x96\
\x00\x2f\xca\x48\x40\x3a\xce\x7f\xad\xad\x1d\x45\x02\x51\xeb\x7f\
\xb6\xe9\xdd\xfc\x4c\xea\x2d\x0b\xe6\xfe\x3c\xcb\xeb\x92\x6f\xe6\
\xc6\xc4\xab\xf8\xc7\x99\x4f\xf1\x4c\xfe\xc9\x33\xfa\x5b\x95\x80\
\xe1\x3e\x3b\xb6\xe9\x0f\xca\xf9\x14\x94\x62\x02\xab\x6e\x5a\x41\
\x7e\x26\xcf\xe8\x81\x53\x74\x6f\x68\xc1\x30\x0c\x56\x6d\xee\x21\
\x37\x3e\x48\x7e\x54\x22\xa6\x12\x6f\xb1\x62\xf9\x21\x11\xa7\xa0\
\x0a\x14\x04\xe4\x95\x42\x20\xbd\x38\xb4\xfb\xba\xf2\x73\x2a\xe7\
\xc6\x02\xf0\x83\xbf\x09\x83\x76\xc3\x60\xa9\x30\x59\x62\xa4\x58\
\x6f\xe4\x13\xbf\x93\x9b\xcd\xaf\x58\xf6\x1e\xb8\xf4\x55\x2b\x58\
\xb5\x61\x05\xd2\x82\x42\xc6\xe2\xd0\x33\xc7\xd9\xfc\xae\x8d\xc4\
\x52\x71\x2f\xc8\x17\x05\xfc\xa0\x25\x50\xb1\x21\x6a\xbc\x5f\x7d\
\xbf\x1e\x73\x09\xbf\xd3\xf6\x27\xac\x8e\xad\x9d\xbf\xb6\x5a\x90\
\x50\x79\x26\xff\x04\xff\x30\xfd\x49\x66\x64\x63\xb2\x05\x61\x38\
\x70\x2d\x01\x3b\x36\x60\x9b\xff\x86\x5b\x23\xe0\x10\xc1\x33\xff\
\xb8\x8b\x96\x6e\x68\xea\x89\x23\x4c\x98\x1d\xcb\xf2\xc2\x23\x83\
\x9c\xfc\x37\x10\xd2\xcc\x89\x26\xeb\xaf\x9c\xcc\xc0\x61\x25\x39\
\x8d\x9d\x1e\x9c\xc1\x9d\x72\x7c\x9e\xad\x80\xf3\xcf\x05\xf0\xfb\
\xfd\x29\xec\x39\xfc\x16\x8b\x38\xab\x8c\x38\x2b\x44\xd1\xfc\xc5\
\xc2\xac\x75\x75\xd7\x6b\x60\xfd\x5b\xbb\xb9\x64\xeb\x5a\x7b\x48\
\xaf\x05\xbb\x7f\xf8\x22\x2f\xbb\xf3\x2a\xcc\x54\x4c\x03\x77\x88\
\xc6\x0f\x21\x81\x33\xb9\xaf\xb0\x63\xde\xd0\x74\x07\xef\x68\x79\
\xef\x82\xd6\x3f\x87\x32\x22\x4f\xf1\xd9\xc9\x4f\xb0\xb7\xf0\xdc\
\xbc\xb8\x04\x9e\xc9\x2f\x82\x59\x02\xe5\x5b\xf7\xcc\x67\x77\xd2\
\xb6\xcc\xb4\x49\xc0\x80\xf1\x13\xd3\x1c\xde\x3e\xcc\xd0\x7f\x43\
\x2c\x6e\x0e\xd3\x64\x7d\x5c\x66\x79\x41\x49\x86\x94\xc5\x49\xec\
\x11\x84\xb3\x9c\x85\x78\xc0\xf9\x48\x00\xee\x0c\xbe\x71\x9c\x17\
\x76\x38\xf5\xfd\x6b\x0c\x62\xaf\xca\x4f\x15\xdf\xd3\xba\x19\x27\
\xdd\x77\xb9\x9d\xeb\xb7\xe0\xe0\x13\x47\xe9\xbb\x6d\x35\xad\xcb\
\x5b\x4b\xe9\x3d\x1d\xec\x88\x72\xad\x5f\x45\xf3\xcf\xe5\x1e\x9b\
\x8d\x16\x3e\xd8\xfe\xbf\xb8\x36\x75\xc3\xfc\xb4\xcf\x82\xd4\x2d\
\xff\x39\xf3\x65\xfe\x73\xe6\x2b\x40\x63\x32\x02\x81\xad\x65\x81\
\x41\x2f\x40\xe8\x7c\x2f\x66\x0a\x3c\xfb\x8f\xbb\x58\xb6\xa9\x05\
\x33\x2d\x30\x4c\xbb\x50\xe8\xf8\xc3\xd3\x9c\x7e\x04\xe2\xe9\xd8\
\x93\xd2\x28\xfe\xa3\x2a\x72\x42\x15\x39\xe6\xc4\x03\xce\x4a\xb9\
\x70\xb5\xf6\x30\x6a\xfc\x3b\x8d\x11\xfd\xe5\x1d\x82\xa4\x30\x68\
\x11\x26\x8b\x44\x8c\xa5\x08\x36\x58\xb3\xea\xed\x89\x25\xb0\xf8\
\x75\x26\x97\xbe\x6c\x1d\xb1\x98\x9d\xeb\x3f\xfa\xdc\x49\x16\x6f\
\xe9\xa1\x65\x79\x0b\x52\x29\xa4\xb4\x17\x4b\x29\xfb\xb7\xb6\xce\
\xdb\x26\xed\xf5\x2a\x62\x71\x1b\xa7\x9e\x65\x75\x6c\x2d\x1f\xef\
\xfe\xfb\x05\xf0\x9f\x67\xf2\xf6\xe6\xf7\xf2\xa7\x1d\x1f\x23\xa5\
\xd2\xbe\xfe\x92\x52\xd6\xb4\x54\xee\x77\xca\x9f\x2f\xe5\x7f\xc6\
\xcc\x74\x8c\x8d\xef\xb9\x9c\x63\x4f\x8f\x81\x14\x48\x09\x2b\xb7\
\xf4\xb0\x68\x6b\x82\xe6\x4b\xa1\x90\x29\x5e\x17\x13\xb1\x57\x89\
\x18\xcb\x85\x49\x0f\x06\x6d\x08\xd2\xd8\x56\xb0\x17\x6f\x3c\x17\
\x72\xf6\x82\x80\x7e\xf0\x27\x10\x34\x0b\x83\x0e\x11\x63\x89\x91\
\x64\xb9\x9c\x32\x7f\x41\xc5\xad\x64\xcf\x1b\xe1\xb2\xeb\xd7\xd1\
\xdc\xd6\x84\xb4\x60\xfc\xf8\x24\xaa\xc9\xa0\xe7\xf2\x2e\x2c\x29\
\x4b\x9a\x3d\x68\xf2\xab\x40\x2a\x30\x40\x7c\x73\xb5\x68\xdc\xe3\
\x6e\x6b\x7e\x1d\x77\xb5\x7f\x90\x66\xa3\xe5\xac\x35\xd9\x82\xd4\
\x2e\x57\x24\x36\xf3\xf1\xee\xbf\xe7\x2f\x4f\x7f\x98\xc3\xf9\x17\
\xe7\xf4\x37\xaa\x65\x06\xfc\x16\x80\xed\x06\x48\x6c\x4b\x20\xb9\
\x28\xc1\xba\x37\xad\xe3\xc8\x0f\x0e\xb3\xfc\x65\x9d\x28\xa1\xe8\
\xbb\x66\x09\xb9\xc9\xe3\xe4\x47\x25\xc5\x71\xf5\x76\x52\x1c\xc7\
\x24\x23\x14\x19\xe5\x4e\x25\xa6\x9c\xa0\x60\x13\xe2\x5c\x54\x0a\
\x9e\xcd\x2c\x80\x1e\xf8\x4b\x09\x83\x36\x4c\xba\x8d\x38\x4b\xc9\
\x9a\x6f\xb6\x0a\xd6\x92\x9e\x9f\x81\x35\xd7\x2f\x65\xd1\xd2\x4e\
\xef\xc5\x9c\xc7\x0e\x9e\x62\xcb\x9d\x9b\xb0\x5c\x56\xd7\x09\x40\
\x95\x4c\x7f\x19\x20\x05\x7f\xe7\x89\x39\x99\x86\xee\x71\x77\x77\
\xfc\x2a\x6f\x6a\xfd\xd9\xb3\xd8\x54\x73\x97\x61\x6b\x88\x53\xc5\
\x93\xec\xc9\xed\x02\xe0\xf9\xfc\x4e\x00\x66\xe4\x0c\xfd\x85\xea\
\xc0\xd8\x98\xbc\x0a\xb0\x8b\x99\xfa\xe2\xeb\x68\x36\xec\x4f\xfb\
\xfb\xf9\x4d\x7e\x3d\xe6\x12\x3e\xd4\xf5\x97\xdc\x3b\xfe\x19\xb6\
\xcf\x7e\xb7\xe2\xbe\xf5\x3e\x0f\x4a\x85\xc4\x04\x94\xc2\x70\x48\
\x40\x28\xe8\xbc\xb4\x83\x89\xa3\xdd\x8c\xf7\x4f\xd2\xb1\xa6\x99\
\x58\x2a\x46\xef\x35\x3d\xe4\xc7\x87\x18\xfe\xa6\x95\x54\x59\xde\
\x65\xb6\x70\x4a\xda\x06\x7f\x56\x59\xde\xa4\xa2\xb6\x17\xdb\x04\
\x67\x9b\x04\xce\x4e\x0c\x20\x98\xf2\x33\xe8\x74\x5e\xdc\xb1\x56\
\x29\x6e\x51\xb3\xc6\x2f\xa7\xd7\x4b\xd6\xde\xd5\xc4\x96\x5b\x36\
\xd9\x53\x77\x5b\xb0\xfb\x07\x07\xb9\xe2\xdd\x1b\x30\x93\x31\x7f\
\xb0\x4f\x05\xa2\xff\x81\x75\x67\x72\x0f\xfa\x31\xcd\x46\x0b\xbf\
\xb4\xe8\xd7\xb8\xad\xf9\x67\xce\x4e\x6f\xd4\x29\x33\x72\x9a\x3d\
\xb9\x9d\xec\xc9\xed\xe4\x70\xfe\x45\xf6\xe4\x76\x56\xbd\xa7\x33\
\x11\x9b\x0c\xd6\xb3\x31\xb9\x99\x8d\xc9\xab\x3c\xb2\x38\x1f\xe5\
\x5f\xc6\x3e\xcd\x03\x53\xff\x59\xf3\xfe\xb5\x12\x82\x4e\x00\x86\
\xd0\x7f\x2b\x2f\x2e\xb0\xeb\x9f\x9f\xa7\xab\x2f\x45\xb2\x3d\x86\
\x61\x2a\x06\xf7\x8e\xd2\xff\xcd\x49\xc6\x9f\x80\x78\x3a\xf6\x88\
\x34\x8b\xff\xaa\xf2\xbc\xe8\x54\x0a\xba\x23\x07\xed\x69\xc6\x1b\
\x3c\x68\xe8\xdc\x07\x01\xf5\xa8\xbf\x3d\xc8\xa7\x4d\x98\x2c\x16\
\x09\x56\x1b\x71\x36\xca\x69\xe3\xf7\x54\x4c\xb6\xaf\xfd\xcd\x18\
\x9b\x6f\xbd\xdc\xcb\xf7\x1f\x7e\xea\x38\xdd\xd7\x77\xd3\xb2\xb4\
\xa5\x34\xba\xcf\x05\xbd\xd4\x4d\x7d\x51\xe6\x16\xd4\x7b\x0f\x61\
\xfb\x35\x1b\x2d\x7c\x74\xd9\x27\x59\x93\x58\xdf\xc8\xfe\x38\x63\
\xd9\x9d\xdd\xc9\x4f\x32\x8f\xb1\x3b\xbb\x93\xc3\xf9\x17\x1a\xfe\
\xf7\xeb\xed\xf7\x8d\xc9\xab\xb8\x32\x75\x15\x2f\x4f\xbf\xf2\xbc\
\x6b\xab\x1f\x4e\x7f\x87\xbf\x19\xf9\xf3\xba\x8e\xa9\x65\xe0\x10\
\xa2\x04\x7e\x3f\x09\xd8\xae\x81\x95\x29\xb2\xf3\x33\xbb\x59\x73\
\x73\x37\xc2\xb4\x63\x11\x87\x9e\x18\xe4\xf8\x7d\x79\xf2\x47\x4d\
\x48\x59\x7f\x0f\x3c\x21\xf3\x1c\x52\x16\x83\x28\xc6\xe6\x6b\x12\
\x91\x73\x4b\x00\xfe\xd7\x75\x27\x30\x68\x16\x06\xdd\x22\xc6\x4a\
\x33\xc5\x7a\x72\xe6\x9d\x56\x4e\x5d\xb7\xf4\xdd\x92\x2b\x5e\xdf\
\xc7\xb2\xb5\x4b\x50\x16\x9c\x3e\x36\xc9\xd8\xf4\x34\x7d\xb7\xad\
\xd2\xc0\xef\x07\xba\x8c\xf8\x5e\xeb\xf5\x57\xda\xde\x6c\xb4\xf0\
\xb1\x15\x7f\x73\xde\x3c\xd0\x4f\xce\x3e\xe6\x2c\x8f\x33\x23\xa7\
\xcf\xf5\xe5\x44\xca\xe2\xd8\x52\xae\x6b\x7a\x25\xaf\x6a\x79\xfd\
\x79\xd3\x76\x3f\x9c\xfe\x36\x9f\x1a\x6e\x2c\x09\x80\x13\x03\x20\
\x40\x00\x1a\x21\x8c\x1f\x9c\xe0\xc4\xf6\xa3\x2c\xbf\xba\x13\x61\
\x2a\x72\xd3\x79\xf6\xff\xf0\x38\x27\xbf\x06\x31\x91\x18\x52\xe9\
\xfc\x5f\x5b\x39\x0e\x38\xf3\x0a\x9e\x42\x32\xc9\x3c\x54\x09\x9e\
\x0f\x04\xe0\x37\xfd\x4d\xcd\xf4\xcf\x18\xbf\xdc\x7a\x8d\x64\xed\
\xcf\xb5\xb1\xf9\x95\x57\x20\x2d\xc8\x67\x8a\x1c\x78\x7a\x80\x0d\
\xef\x58\xa7\x01\xdb\xef\xe7\xcf\x27\xf8\x5b\xcc\x56\x3e\xb6\xe2\
\x6f\x58\x9b\xbc\xa4\x11\xed\x3f\x67\x39\x55\x3c\xc9\x37\x26\xfe\
\x9d\x27\x66\x1e\xe3\x54\xf1\xe4\x39\xbd\x96\xb9\xc8\xe2\xd8\x52\
\xde\xd2\xfe\x73\xbc\xba\xf5\xf5\xe7\x3c\x76\xf0\x83\xa9\x6f\xf1\
\x8f\x23\x7f\x5b\x33\x79\xd6\xea\x0e\xd8\x60\x57\x65\xe0\x77\x3f\
\x0f\x3f\x78\x84\xb8\xb2\x9c\xb9\x05\x15\xa7\x0e\x8e\xd3\xff\xc3\
\x31\x46\xbf\x07\xf1\xb4\xf9\x88\x8c\x59\x5f\x55\x79\x5e\x90\x45\
\x8e\x11\x74\x05\x1a\x44\x02\xe7\x8e\x00\xfc\x6f\xed\x4d\x62\xd0\
\x66\xd8\x05\x3f\x7d\x22\xc1\x46\x35\x63\xfc\x9e\x68\x91\xed\x7d\
\x1f\x88\x71\xf5\xeb\x36\x91\x4c\x25\x91\x16\x3c\xff\xf0\x8b\xac\
\x7a\xfd\x0a\x12\xed\xc9\x52\x9e\x5f\x82\x44\xd8\xd3\x7c\x47\x10\
\x41\xbd\xd7\x1d\x65\xf6\x7f\x7c\xd5\xdf\x9d\x53\xf0\x3f\x97\x79\
\x96\xaf\x8f\xff\x3b\x4f\xce\x3e\x76\xce\xae\xa1\xd1\xf2\xea\xd6\
\xd7\xf3\x9e\x45\xbf\xc8\xe2\xd8\xd2\x73\x76\x0d\x87\x72\x07\xf9\
\xa3\xe3\xbf\x19\x4a\x02\xf5\x07\x04\x9d\x19\x85\xf4\x9a\x00\xa3\
\xdc\x25\x30\x84\x62\xe7\xdf\x3d\x4f\xef\xb5\x9d\xf6\x54\xe3\xa6\
\xe2\xe0\x63\xc7\x39\xfe\xdf\x79\xf2\x47\x4c\x48\x5b\x7f\xaf\x14\
\x4f\xa8\xc2\xfc\xb9\x02\xe7\x92\x00\xdc\x81\x3e\x31\x27\xe5\xd7\
\x25\x62\xac\x32\x93\xac\xb7\x72\xbc\x5b\xe6\xb8\x79\xe9\x3b\xe0\
\xca\x3b\xfa\x58\xbe\x76\x29\xca\x82\x53\x87\xc6\x98\x66\x96\x65\
\xd7\x2d\xb5\x41\x1f\x88\xf8\xbb\x6e\x80\x94\xe5\x41\xbf\x46\x04\
\xfc\xfe\x72\xf5\xa7\xcf\x19\xf8\xbf\x3f\xf9\x20\xf7\x8d\xfd\xdb\
\xbc\xf8\xf5\xb5\xdc\x7f\xa3\x25\xec\xb9\xda\x94\xde\xca\x7b\xbb\
\x7e\x91\x4d\xe9\xad\x67\xed\x1e\x75\xd9\x35\xfb\x2c\x7f\x78\xec\
\xd7\xeb\xba\xe6\x6a\xfb\x46\x69\x7f\x61\xd8\x19\x82\xe9\xe3\x33\
\x1c\x7b\xe0\x28\xab\x6e\xe8\xc2\x30\x21\x3b\x9d\x63\xff\x43\xc7\
\x39\xf9\x55\x30\x30\x8f\x92\xb2\xfe\x56\xe6\x39\xa8\xec\x49\x44\
\x4e\xa1\x3c\x57\xa0\x21\xef\x1d\x3c\x37\x83\x81\x9a\xbc\xc2\x06\
\x03\x41\x4c\x08\x92\xd8\x45\x3f\x1d\x4a\xb1\x9a\x82\x71\x73\xf3\
\x06\x49\xcf\xd5\xcd\xac\x58\xb7\xd4\x36\xfd\xb3\x16\xc7\x0f\x0f\
\xb1\xe1\xe7\xd7\xd9\xf9\xfe\x32\x02\x10\xf6\xe0\x1f\xdd\x2a\x98\
\x63\xae\xdf\xdd\xcf\x6d\x9c\x66\xa3\x85\xbf\xe8\xfd\xfb\x73\x02\
\xfe\x1f\x4f\x3f\xc2\x67\x87\x3e\xd9\x70\x33\xbf\x96\xb6\x38\x9b\
\x73\xf3\x03\xec\x9a\xfd\x29\xbb\x66\x7f\xca\xa6\xf4\x56\xee\x59\
\xfc\x5b\x67\xbd\xbd\x37\x37\x6d\xe5\x77\x96\xfc\x31\xff\x6f\xf0\
\xff\x9e\x51\xdb\x94\xcd\x25\x20\x9c\x01\x43\x86\xf3\x29\xc0\x70\
\x06\x11\x35\x2f\x6f\x22\xd5\xdb\xc2\xd4\x89\x2c\x6d\x2b\x52\xa4\
\x5b\x93\x2c\xbb\xa2\x93\xd9\xab\xc7\x98\x7c\x92\x55\x64\xb8\x55\
\xc4\x19\x55\x82\x31\x04\x53\x40\x16\xe5\xc4\x01\xce\x82\xcc\x67\
\x1d\x80\x81\xb0\x23\xff\xd8\xc3\x7c\xbb\x8c\x38\x4b\xe4\xac\xf1\
\x6e\x12\x92\xae\xd7\xc0\x86\xad\xeb\x3c\xb3\xfe\xf0\x4f\x8f\xd2\
\xfb\xba\xe5\xa5\x62\x1f\x8f\x00\x84\xa3\xf1\x95\x0f\xf8\x2a\x44\
\xfb\x57\x2e\xe4\x28\x01\x42\xdf\xaf\xc5\x6c\xe5\xaf\xfa\x3e\xc3\
\xba\xd4\xa5\x67\xa3\xbd\x3d\xd9\x35\xfb\x53\xbe\x38\xfc\x4f\xec\
\x9a\xfd\xe9\x9c\x8e\xaf\x06\xf0\xb3\x0d\xee\x7a\x64\xe7\xcc\x33\
\xfc\xea\xe1\x5f\xe0\xb5\xed\x6f\xe4\x7d\xdd\xbf\xcc\x92\xf8\xb2\
\xb3\x76\xee\xd7\xb6\xdf\x8e\x52\xf0\x57\x27\xfe\x8f\x6f\xfd\x5c\
\xdd\x00\x28\x11\x80\x0b\x7a\xc3\x21\x02\x43\xd8\x28\xee\xfd\x99\
\x65\xec\xf9\xec\x01\x5a\x97\xa5\x50\x02\xba\xfb\xda\x19\xbb\x66\
\x8a\xcc\x40\x91\xe2\x90\xf9\x6a\x4c\xeb\x29\x21\x99\x44\x31\xad\
\x24\x59\x14\x05\xdc\x69\x2d\xe7\xb9\x36\xa0\xf1\x04\xa0\x57\xfc\
\x41\x42\x40\x5a\x18\xb4\x8b\x18\xdd\xc2\x32\x6f\xb0\x0a\xd6\xea\
\xce\x1b\x60\xf5\x96\x65\x34\xb5\x35\xa3\x24\x8c\x1d\x9f\xc0\x58\
\x6c\x10\x6f\x8b\x63\x59\xd2\x1f\xe0\x93\xa2\x4c\xeb\xbb\x6e\x40\
\xa9\x03\xaa\x07\xfd\xa2\x3a\xf8\x23\x6b\xfe\xf2\xac\x82\x7f\xda\
\x9a\xe2\x33\x27\xff\x9a\xef\x8e\x7f\xb3\xe6\x63\xc2\xee\x6f\x3e\
\x00\x5e\xa9\x1d\xe7\xe3\x7c\xdf\x19\x7b\x80\xc7\x27\x1f\xe6\x6d\
\x8b\x7e\x9e\xf7\xf7\xfc\x4a\xc3\xff\x7e\x94\xbc\xae\xe3\x76\xa6\
\xad\x49\x3e\x3d\xf8\xd7\x67\x74\xbf\x65\x24\xe0\xc4\x01\x14\x0e\
\xf8\xdd\xe1\xc4\x06\xf4\xbe\x69\x39\x27\x7e\x38\xc4\x8a\x97\x75\
\x62\xc4\x0c\x96\x5f\xde\xc5\xd4\x56\xbb\x40\x28\x56\x30\x6f\x27\
\x66\x0d\x2b\xc9\x24\x92\x19\x04\x39\xa7\x42\xd0\x72\x96\x79\x93\
\xf9\xb2\x00\x5c\xd3\x3f\xe5\x98\xfe\x9d\x46\x82\x1e\x6b\x4a\xbd\
\x29\xd6\x01\x3d\xb7\xc5\xe8\xdd\xb0\xd2\x1e\xa3\x2f\x61\x60\xef\
\x71\xd6\xbc\x63\x95\x53\x9b\x5d\x02\xbb\x0b\x7c\xdb\x02\xf0\xfb\
\xff\x4a\xd5\xa6\xf9\x4b\x1d\x54\xfe\x70\xff\x61\xef\xff\xe1\xaa\
\xe6\xb3\xf7\x12\x8e\xc7\x27\xb7\xf3\x17\xc7\x3e\xc2\xb4\x35\x55\
\xf7\xb1\xe7\xfc\x45\x2d\xf3\x24\x93\x72\x82\xcf\x0f\xfd\x03\x8f\
\x4d\x6c\xe7\xf7\x57\x7e\xe8\xac\x91\xf1\xcf\x76\xbd\x9b\x83\xb3\
\xfb\xf9\xce\xd8\xfd\xbe\xf5\x73\x25\x3a\xe5\xb8\xa7\x42\x38\xcf\
\xa7\x28\x05\x07\x95\x82\xf4\xb2\x14\xaa\x35\x46\x6e\xaa\x48\xb2\
\x3d\x46\xdb\x92\x66\x16\x6d\x4c\x33\x7b\x30\xc3\xcc\x01\xeb\x1a\
\xb3\x89\x9f\x08\x83\x31\x0c\x26\x94\x64\x06\xb0\x1c\x57\x60\x5e\
\xad\x80\xc6\x06\x01\x4b\xda\x3f\x8e\xa0\x49\x98\x2c\x72\xde\xe1\
\xb7\x9e\x82\x79\x57\x31\x6b\xbd\x66\xe9\xcf\xc1\xa6\x3b\xfa\x58\
\xb6\x66\x19\xca\x82\x43\x4f\x1f\x23\x7e\x89\x49\xf3\xaa\x66\x9f\
\xe9\x5f\x06\x7a\x59\xee\xfb\xd7\x3b\x71\x87\xbe\xed\x1d\x3d\xef\
\xe5\x37\x56\xfe\xde\x7c\xb4\x69\x99\x4c\x5b\x53\x7c\xfc\xe8\x87\
\x78\x6c\x62\xfb\x19\xb7\xf5\xc5\x46\x04\xc1\xe7\xef\xce\x25\xf7\
\x70\xe7\x92\x0f\x9c\xb5\xf3\xff\xca\x81\x77\xf1\x42\xe6\xc0\x19\
\xb9\x00\xae\x08\xec\x38\x80\x21\x82\x9f\xce\xb8\x81\x9c\xc5\x8b\
\xf7\xf6\xb3\xee\x55\x4b\x30\x4c\x28\xe4\x8b\x3c\xff\x9d\x23\x9c\
\xfc\x1a\x08\x69\x1e\x25\x69\xfd\x3f\x99\x67\xbf\xb2\x18\x40\x32\
\xae\x05\x04\xe7\x3c\x6f\xc0\xb9\x08\x02\xda\xda\xdf\x20\x25\x0c\
\x5a\x85\x49\xa7\x30\x59\xa1\xb2\xe2\xa6\x74\x1f\x2c\xba\x2a\xc9\
\x8a\x75\xcb\xec\x09\x3e\x72\x45\x66\xd4\x2c\xcb\x57\xf6\x94\x02\
\x7f\x12\x7f\xa4\xdf\x67\x11\x38\xa9\x40\xb7\xc1\x6b\x00\x43\xd8\
\x3e\x5b\x5b\xaf\x3d\x6b\xe0\x7f\x76\xea\x69\xfe\xe4\xd0\x6f\xd7\
\x9c\x83\xae\x46\x6a\xe7\xb3\x6f\x5f\x8b\x54\xbb\xfe\x7b\x07\x3f\
\xc3\xa3\xe3\x3f\xe4\x0f\x7b\x3f\xc2\xfa\x74\x7d\xaf\x35\x9f\x8b\
\x7c\x62\xdd\x3f\xf2\x8b\x7b\xdf\xc9\xc9\xfc\x89\xba\xae\x33\x4a\
\x84\x02\x29\x14\x86\x02\xe9\xc6\x05\x5c\x32\x48\x0a\x3a\xaf\xed\
\x60\xbc\x7f\x86\xce\x35\xcd\x24\x52\x31\xba\xd6\xb4\x32\x7d\xe5\
\x14\xd3\xcf\xaa\x55\x14\xd9\x2c\x4c\x46\x90\x8c\x29\x41\x9e\x52\
\x40\x70\xde\xac\x80\xc6\x0d\x07\x76\xb5\xbf\xc0\x14\xf6\x68\xbf\
\x26\x61\xd2\x21\xe2\x74\x59\x19\x5e\x5b\x2c\x14\x93\x9d\x37\xc1\
\x86\xad\xeb\x51\xce\xa4\x48\x87\x7e\x7a\x94\xae\xab\xdb\xed\x61\
\x95\x52\x62\x49\x89\x54\xce\x30\x4b\xa9\xb0\x2c\xe7\xbb\xe5\x7c\
\xb7\xa4\x13\x24\x2c\x1f\xee\x19\x35\x9c\x33\x38\xf4\x73\x49\x7c\
\x19\x1f\x5b\xff\xa9\x46\xb7\x63\xa8\xdc\x3b\xf8\x19\x7e\xf3\xc0\
\x2f\x32\x55\x9c\xac\x79\xc8\x71\xd4\xbd\xd4\x37\x84\xf5\xc2\x5e\
\x0e\xce\xee\xe3\xb7\x0e\xfe\x32\xdf\x1a\xfd\xc6\xbc\xf7\x51\x8b\
\xd9\xca\x47\xd7\x7e\xb2\xe6\xe7\xa9\x5a\xdf\x49\x4b\x7a\xcf\xae\
\xf4\x3d\xc7\x12\xcb\x92\x74\x6e\x6d\x67\xf4\xe8\x8c\x67\xed\x2e\
\xdf\xd8\x45\xfb\x16\x03\xa3\x59\xa2\xf2\xbc\xc9\x19\x2e\xdc\x29\
\x04\xad\x40\x8a\xd2\x90\xe1\x79\x91\x46\x5b\x00\xfa\x68\xbf\x56\
\x11\x63\x11\x8a\xb5\x32\xc7\x6b\x5a\xb7\xc0\xd2\xad\xed\xb4\x77\
\xb5\x23\x2d\xc5\xe4\xc8\x0c\x85\x96\x3c\x66\x6b\x6b\xc9\xf7\x77\
\xfd\x7b\x29\x4a\x6e\x80\xbe\x5e\x55\xd6\x82\x61\xda\x33\xb8\xee\
\xcf\x2f\xf9\x1b\x5a\xcc\xd6\xf9\x6a\x4f\xc0\x36\xf9\xff\xe8\x85\
\xdf\xe4\xd9\xa9\xa7\xbd\x75\xa5\xbc\x71\x6d\xe9\xb9\x7a\xa3\xfc\
\x67\xdb\x35\x98\x4f\x4b\x44\x29\xc5\xa4\x9c\xe0\xcf\xfa\xff\x94\
\x83\xb3\x7b\xf9\xcd\x55\x7f\x30\xaf\xf7\x72\x49\xd3\x06\x7e\x63\
\xe5\xef\xf1\xa9\x23\x1f\x9f\xf3\xbd\xe9\xc7\x18\xca\x49\x09\x3a\
\xb1\x2a\x37\x0e\x60\x18\xf6\xfa\x9e\x5b\x16\x71\xea\xb9\x09\x96\
\x5c\xd9\x8e\x19\x33\x58\x7c\x69\x3b\x33\x57\x8f\x71\x7a\x3b\x8b\
\x0d\xcb\x7c\xa5\x32\xad\x61\xa5\xc8\x3a\x93\x86\xcc\xab\x15\xd0\
\x18\x02\x88\xd2\xfe\x09\xba\x8b\x93\xdc\x01\xb0\xe8\x26\x58\xbd\
\x61\x95\xa3\xfd\x05\x87\x7e\x7a\x84\xee\xd7\x77\x38\x8c\xa9\xf9\
\xf7\x52\x38\x6c\xea\x27\x80\x5a\xa2\xfe\xd5\x40\xf0\x3f\xd7\xfc\
\x31\x97\x34\x5d\xd6\xc8\xf6\x2b\x93\x83\xb3\xfb\xf8\x83\x03\xbf\
\x51\x66\x52\xd6\x73\xad\x73\x19\xbe\x7c\x3e\xc5\x06\xe6\x1c\x48\
\x0b\x39\xee\x6b\x27\xbf\xc8\x81\x99\x7d\xf3\x4e\xdc\xef\x5c\xf2\
\x7e\x9e\x99\xf8\x09\x8f\x8c\xfd\x60\xce\xf7\xe1\xee\xef\x0e\x0f\
\x36\x44\x29\x90\x6d\x08\x3b\x8d\x6d\x48\x48\xaf\x4e\x71\xea\xd1\
\x51\xba\x72\xad\xc4\x0d\x83\xee\x35\xed\x8c\x5c\x3e\xc1\xe4\x33\
\x12\x72\xc6\x6b\x45\xc2\x7a\x1c\x49\x06\xc9\xb8\xb2\x5f\x2f\xe6\
\x0e\x1b\x6e\x78\x46\xa0\x91\xa6\x45\xa8\xf6\x57\x05\x5e\xd6\xba\
\x05\xba\xd6\xb4\xd3\xde\xd5\x86\x52\x70\xfa\xc4\x24\x66\xaf\x81\
\x88\x0b\xdb\x34\x52\xb2\xe4\x02\x48\xcb\x36\xf5\x2d\xdb\x9c\xb2\
\x3c\xb3\xaa\xfa\x62\x59\x56\xe4\xb6\xad\xad\xd7\xf2\xae\x65\x77\
\x36\xba\xfd\x7c\x72\x60\x66\x1f\x1f\xdc\xf3\x0b\x9c\xc8\x1e\xab\
\xcd\x5c\x8c\x98\x9d\xa6\xfe\x59\x6b\x1a\x35\xfb\xcd\xdc\xff\xee\
\x7c\xba\x27\x3f\x9d\xfc\x09\xbf\xb6\xf7\x4e\x06\x73\xc7\xe7\xb5\
\xff\xfe\x74\xed\x9f\xb1\x24\xbe\xac\xea\xb3\x54\xd3\xe2\x3c\xc3\
\x96\x65\xd9\x8b\xd4\x9e\x67\x29\x59\x76\x7b\x37\x43\x7b\x26\x50\
\x0a\x4c\xd3\xa0\xbb\xaf\x9d\xb6\xab\xc1\x2a\x5a\xed\x86\x65\xbe\
\x52\xc4\xe8\xc6\xa0\x03\x41\x13\x82\x04\x76\x5a\x5d\x68\x45\x76\
\x0d\x91\x33\x27\x80\x40\xde\x1f\x41\x93\x33\xd3\x4f\xb7\x35\xcd\
\x8d\x00\x8b\x6e\xd6\xb5\x3f\x1c\x3b\x78\x82\xce\xab\x5a\x6d\xf0\
\x6b\x8d\x25\x1d\x9f\xdf\x6d\x24\xd7\x6f\x2a\x5a\xd5\x01\x52\x89\
\x04\x9a\x8d\x16\xfe\xea\xf2\x4f\xcf\xeb\xc3\xf3\xc0\xa9\xff\xe2\
\x7d\x3b\xdf\xca\x64\x61\xe2\x8c\x80\x52\xcf\x7d\xcd\xd9\x4f\x3d\
\x0b\x60\xae\xe7\x1c\x2e\x48\x2a\xed\xb3\x7f\x7a\x2f\xef\xdf\xf5\
\x36\x0e\xcc\xec\x9b\xb7\x3e\x6c\x8d\xb5\xf2\xbf\xd7\x7d\xcc\x77\
\xdf\x73\x25\x00\xcb\x72\x9f\x61\x27\x16\xe0\xc5\x04\xec\xe7\xda\
\x6c\x37\x29\x24\x24\xc5\xac\x1d\xfc\xee\x5e\xd3\x4e\xeb\x15\x76\
\x2c\xc0\xca\xab\xd7\x8a\x18\xdd\xa2\x14\x0b\x48\x32\x4f\xb1\x80\
\x46\xc5\x00\x0c\xc7\xfc\x8f\x0b\x41\x1a\x83\x56\x04\x8b\x65\x81\
\x6b\x5c\xed\xdf\xd1\xd5\x86\x94\x30\x76\x62\x12\x63\x25\x0e\xc8\
\xf1\x26\xff\xf0\xfb\xfb\xc2\x33\xfb\xa5\x55\xdf\xf4\x5e\x61\xe6\
\xf3\x87\x2f\xfd\x38\xad\xb1\xb6\x46\xb7\x9d\x27\xf7\x0f\xfd\x17\
\x1f\x39\x58\xf2\x53\x6b\x31\xe1\x6b\xdd\x47\x97\x60\x6c\xe0\x42\
\xc9\x08\x9c\xa9\x4f\xed\xca\xa4\x9c\xe0\x83\x7b\xde\xcf\x67\x36\
\x7e\x81\x4b\x9b\x2f\x9f\x97\x6b\xbd\xba\xfd\xe5\xbc\x6b\xd9\x2f\
\xf0\x95\xe3\xdb\xea\xbe\xf6\xe0\xbe\x4a\x81\x90\x0a\x69\xd8\xfe\
\xbf\xa1\x9c\xf8\x80\x13\x1b\xe8\xbc\xa1\x95\x53\x0f\x4f\xb0\xfc\
\xea\x4e\xdb\x0a\x58\xdd\xce\xd4\xd5\x63\x9c\xde\x2e\xdb\x45\x81\
\xcb\x30\x39\x81\xa4\x19\x45\x0a\xc8\x38\x15\x82\x0d\x8d\x05\x9c\
\x19\xa3\xf8\xc7\xfb\xc7\x9d\x35\xed\x22\x46\x97\xca\x18\xb7\xa2\
\x48\xb6\x6d\x86\xbe\x0d\xf6\xb8\x7e\x14\x1c\x3d\x78\x82\x96\x0d\
\x69\xdb\xac\x77\xcd\x24\x87\x15\x8b\x9a\xd6\xd7\x97\x5a\x4c\xe4\
\x28\x4d\x79\x6b\xd7\x6b\xb8\xad\xfb\x75\x0d\x7a\x3c\xca\xe5\xfe\
\xa1\xff\xe4\xc3\x07\x7e\xbf\xee\x88\x7d\xbd\x66\x7d\x2d\xae\xc3\
\xf9\xba\xb8\xe0\xa8\xc7\x5a\x88\xda\x77\xb2\x30\xc1\x3d\xcf\xbd\
\x8f\x03\x33\x7b\xe7\xad\x4f\x3f\xd0\xfb\x9b\x2c\x4d\x2c\xaf\xf9\
\x79\x8b\xb2\x60\x2c\x27\x23\x60\x5b\x00\xb2\xcc\xb5\x35\xda\x0c\
\xb2\xf1\xa2\x6d\x05\x28\xcd\x0a\x48\x82\xcc\x71\xab\x61\xd2\x2d\
\x0c\x3a\x84\xa0\x19\x34\x37\xa0\x81\xd2\x08\x0b\xc0\x0b\xfe\x09\
\x83\x66\x61\x96\xcc\xff\x74\x1f\x74\x6f\x6a\xf6\x22\xff\x63\x27\
\x26\x61\x85\xe5\xcb\xf9\x4b\xcb\x1f\xe8\x73\x03\x81\x51\xda\xdf\
\x3b\x69\x0d\xe5\xbf\xad\xb1\x36\x7e\xff\x92\x0f\x35\xb2\xbd\x7c\
\xf2\x8d\x93\xff\xc1\x87\xf6\xff\x7e\xd5\x6b\xd3\x35\x43\xad\x1a\
\xbc\xd2\xdf\xa8\x57\xce\x55\x80\xb0\x91\xc1\x40\x5d\x26\xe5\x04\
\xbf\xb2\xf3\x3d\xfc\xc3\xe6\x2f\xb3\xa1\xe5\x8a\x86\x5f\x77\x6b\
\xac\x8d\x8f\x6c\xf8\x0b\x7e\xe9\xd9\x77\xd5\x7d\x2f\x15\xad\x00\
\xdc\xc9\xff\x9c\xef\x0a\xda\xaf\x6b\xe6\xd4\xe3\x13\x2c\x7f\x59\
\x27\x66\xcc\x8e\x05\x8c\x6f\x1a\x63\x66\x47\x6c\x95\x2a\x16\xaf\
\xc4\x60\x08\xc1\x28\x82\x49\xf4\x81\x42\x0d\xb2\x02\xe6\x6e\x01\
\xf8\xb5\xbf\xfb\x92\x8f\x56\x61\xb2\x88\x9c\x71\xbd\x94\x32\xd9\
\x71\x1d\xac\x58\xb3\xdc\xd1\xfe\x82\x63\x07\x4e\x92\x5a\x9d\xf0\
\xd8\xb1\x4c\xd3\x6b\xf9\x7e\x19\xa1\xfd\xeb\xd1\xb2\xbf\xba\xf6\
\x77\x58\x9e\x5a\xd9\xc8\x67\xc3\x93\x1f\x0e\x7f\x97\xff\xbd\xf7\
\x77\xcf\xba\x0f\x5d\xaf\xe6\x75\x1f\xca\xb3\xb9\x9c\x69\x8c\xa2\
\x96\x76\x98\x2c\x4c\xf0\xcb\x3b\xde\xcd\xfe\xe9\xe7\xe7\xa5\x7f\
\xaf\xe9\xb8\x9e\x5b\xbb\x5e\x5b\xd3\xb3\x57\xc9\x42\xf5\x5b\x01\
\x7a\x2c\xc0\xde\xcf\x6c\x33\x98\x29\xe6\x28\xe6\x6c\xa5\xb8\x68\
\x65\x2b\x2d\x57\x82\x34\x8b\x60\x19\x5b\x85\x49\xa7\x30\x68\x13\
\x90\xc6\xb6\xb2\x1b\x6a\x05\x9c\x69\x50\xc1\xd5\xfe\x71\x6c\xdf\
\xbf\x4d\x24\xe8\xb4\x72\xdc\x18\xef\x80\x45\x5b\x92\x2c\xe9\x5d\
\x0c\x0a\x32\x53\x39\xb2\xad\xb3\x10\xc7\x33\xfb\xa5\x0c\x16\xfe\
\x94\xc8\xa0\x58\xac\x0f\xf4\xc1\x6d\x97\x36\x5f\xc6\xfb\x7b\x7f\
\x79\x5e\x1e\x8e\x7d\x53\x7b\xf8\xd3\xe7\xff\x67\x4d\xe6\x7d\x3d\
\x99\x8a\x6a\x60\x87\xfa\xc0\x7c\xb6\xdc\x84\xa8\xf3\x34\x22\x83\
\xa0\x4b\x98\x3b\xf0\xbf\xf7\xfe\x2e\x53\xc5\xc9\x79\xe9\xe7\xdf\
\xbf\xe4\x43\x34\x1b\x2d\x75\x05\x2b\xa3\x48\xc0\xd2\xdd\x5d\x4b\
\x52\xb4\x4a\xd9\xae\xa6\x4d\x29\xc6\x07\xec\xe2\xa0\xb8\x53\x1d\
\x98\xee\x03\x55\x64\xb3\x80\xd5\x18\xb4\x63\xbb\x01\x49\x1a\x4c\
\x00\x67\xea\x02\xb8\xd1\xff\xa4\x30\x69\x16\x31\xda\x55\x9e\x2b\
\x95\xa5\x96\xb4\x5f\x0f\x4b\x56\x2e\x01\xa7\x88\xe7\xd0\x33\x47\
\x68\xbe\x3e\xe9\x34\xa0\xf0\x05\xff\x7c\x6e\x80\x55\x32\xff\x43\
\x4f\x58\x63\xf9\xef\x1f\x5d\xf6\x7f\xaa\xee\x37\x17\x99\x2a\x4e\
\x72\xd7\xd3\x3f\xe7\x7b\xe8\x82\xd7\x14\x15\xe0\xab\x56\xce\x5b\
\xab\xd9\x7f\xa6\xef\x38\x68\x84\x9c\x89\x4b\x52\xcb\xb1\x51\xfb\
\x04\xd7\xef\x9d\xdc\xcd\xdd\xcf\xbc\x93\xff\xb8\xee\xdb\x0d\xbb\
\x37\x57\x56\xa4\x57\xf2\xbe\x55\xbf\xc4\xdf\xbf\xf8\xff\x6a\xba\
\xee\xe8\xed\xca\x0b\xfc\x29\x05\xca\x2c\x05\x02\x15\x90\x5c\x65\
\x32\xfa\xe4\x14\xdd\x97\xb4\x82\x01\x9d\x2b\x5b\x69\xd9\x34\xc5\
\xcc\x7e\x89\x51\x34\xaf\x16\xa6\xb5\x5f\x59\xb4\xa1\x98\xa0\xc1\
\xc1\xc0\xb9\x11\x40\x30\xf8\x27\x48\x23\x68\x33\x62\x74\xca\x59\
\xe3\x1a\xa5\x24\xad\x57\xc1\xca\xb5\xcb\x50\x0a\x8a\xb9\x22\x33\
\x89\x29\x9a\x63\x49\xc7\x04\x72\xa3\xff\x25\xb0\x07\xc9\xc0\xad\
\xfa\xab\x24\x51\x0f\xf4\x6b\x97\xbe\x91\x97\x2f\x7a\xc5\x99\xb4\
\x4b\xa8\x4c\x15\x27\xf9\x85\x9f\xbc\x9d\x89\xfc\xb8\xef\x1a\x82\
\x3e\x7e\xf0\x41\xa8\x04\xfc\xb9\x44\xf5\xcf\x14\xc8\xf3\x11\x13\
\x68\x64\x46\x22\x4c\xf3\x57\xda\x67\xef\xe4\x6e\xfe\x64\xcf\xef\
\xf0\xd1\x8d\x9f\x68\xf8\x7d\xfd\xfa\xfa\xff\xc5\x7f\x1d\xfb\x57\
\x8e\xcd\x1e\x9d\xd3\x7d\xbb\xdb\x85\xb4\x81\x2f\x15\x98\x0a\x0c\
\xd3\x25\x00\x85\x52\x90\xb8\x34\xc6\xd4\xc9\x2c\x6d\xcb\x53\x34\
\x2f\x4a\xd1\xd6\x97\x60\x7c\x79\x9e\xc2\x90\xda\x6a\x24\xf8\x96\
\x30\x68\x43\xd2\xa4\x14\xd3\xe8\xaf\x14\x3b\x43\x39\x13\x17\xa0\
\x14\xfc\xb3\x2b\xff\xda\x30\x58\x6a\xe5\xe5\xd6\xb6\xad\xb0\x7c\
\xfd\x62\xcc\x58\x0c\x05\x9c\x38\x70\x8a\xc4\x7a\xa3\x64\xf6\x07\
\x22\xa2\x52\x33\x8d\x4a\x26\x53\x75\x53\x2b\x6a\x9f\x3f\xdd\x58\
\x7d\xc6\x97\xb9\xc8\x9f\xed\xfd\xdf\xec\x9d\xdc\x5d\xd5\x67\xad\
\xc7\x27\x9e\x8b\x99\x7e\xb6\x62\x02\xe7\x4b\xbd\x40\x2d\x71\x82\
\xff\x3a\xf6\x35\xfe\xfb\xf8\xbf\xcd\x4b\xbf\xff\xfa\xfa\xdf\xad\
\xeb\xda\x22\xdd\x81\xa8\x67\x5d\x4a\xd2\x1b\xe3\x0c\xbf\x30\x69\
\xc7\xcb\xa4\x3d\x69\x48\xf3\x06\x90\x96\x6c\x93\x79\xb6\x60\xd0\
\x89\xa0\xd5\x99\x60\xa7\xf4\x4a\xb1\x33\x2c\x0c\xaa\xdf\x02\x28\
\x0f\xfe\x25\x9d\xe0\x5f\x27\x39\xe3\x2a\x21\x14\xad\x57\x29\xba\
\x96\x74\x79\x21\xcf\xc1\x13\x27\x49\x5d\x6a\x94\xca\x7e\x3d\x4d\
\xef\xcf\xf7\xeb\x96\x40\xb5\x3c\x79\x98\x56\x15\x42\xf0\x8e\xde\
\xf7\xb0\xb2\xa9\xb7\xe1\x0f\xc1\x7f\x1d\xfb\x2a\xff\x79\xf4\xab\
\x55\xaf\x21\xb8\x4d\xff\x5d\xef\xfd\x54\xdb\xb7\xd2\xfe\x67\x6b\
\x12\x91\x73\xe5\x0a\x84\x6d\xfb\xa3\xe7\x7e\x8b\xcb\x5a\x37\x72\
\x79\xdb\xc6\x86\xde\xe3\xcf\xae\x7c\x17\x9f\xdc\xf7\x71\x8e\xcd\
\x1e\xa9\xeb\x3a\xc3\x32\x02\x5e\x26\x40\x29\x14\x8e\x2b\xe0\xac\
\xcb\xb7\x16\xc8\x4d\x5b\xa4\xda\x4c\xda\x97\x34\xd3\xb2\x7e\x94\
\xf1\x1f\x49\x84\x34\xae\x20\x29\x7f\x8c\x45\x2b\x92\xb4\x22\x30\
\x7b\xf0\x19\xc8\x5c\x63\x00\x7e\xf3\xdf\x2e\xfd\xed\x28\x4e\xc8\
\x57\xc6\xda\xa1\xf3\xb2\x14\xdd\xcb\xba\x90\x16\x8c\x9d\x18\x87\
\x95\x96\x03\x76\x85\x65\x69\x3e\xbf\x07\x78\x61\xaf\xb3\xc0\x0a\
\xa4\xfe\x6a\x9d\xe2\xdb\x05\xd8\x6f\x5f\xd6\xf8\x81\x23\x7b\x27\
\x77\xf3\x7f\xf7\xfc\x09\x52\x4a\xdf\x39\x6b\xf1\xf3\xe7\x6a\xfe\
\x57\x9b\x9d\xe7\x42\x9d\x12\xac\x16\xf3\xbe\xde\x7b\xd2\xd7\xff\
\xea\x33\x77\xf2\xf5\x1b\x7f\x40\x5b\xbc\xbd\xa1\xd7\xfd\x9b\x97\
\xfe\x3e\xbf\xf7\xec\xaf\xd5\x75\xfd\x61\x04\xe0\x81\xde\x31\xff\
\x0d\x6d\x97\xa6\x2d\x31\x46\x9f\x9d\x62\xf9\xd5\x9d\x18\xa6\x41\
\xc7\xaa\x66\x46\xfb\xa6\x98\xd9\x2f\x37\xc7\x52\x2c\x51\x06\x27\
\x10\x34\xa1\x48\x00\x59\x6c\x37\x40\x70\x06\x24\x30\x17\x02\xf0\
\x0d\xfc\x11\x76\xe9\x6f\x1b\x8a\x5e\x81\xb9\xa4\x75\xab\x45\xd7\
\x92\x45\x25\xed\x7f\xe0\x14\xb1\xad\x82\xa2\x65\x39\x80\x17\x3e\
\x02\xf0\x08\x41\x23\x03\xdf\xc9\x6a\xa8\xfa\x73\x3f\x7f\x6e\xf5\
\x7b\xe7\x45\xfb\xff\xee\x4f\x7f\x8d\xf1\xdc\x78\xe8\xf9\xc3\xae\
\xa7\x96\xfd\xa0\xb2\xf6\x6e\x54\xbc\xa0\x52\xdb\x35\x5a\xea\x01\
\x43\x2d\xfb\xcd\x25\xe8\x76\x74\x66\x80\xbf\x39\xf0\x97\x0d\x77\
\x03\xdf\xd1\xfb\x6e\x3e\xb1\xf7\x63\x1c\x9d\x19\xa8\xe9\x3a\xa2\
\xc9\x0a\x84\xa9\x3c\x32\x70\x17\x09\x98\x2d\x30\x99\x99\x61\xb9\
\xec\x04\x43\xd0\xb9\xa2\x95\xd6\xcd\x53\x64\x0e\x1a\x50\x64\xa3\
\x30\xe4\x8b\xca\xa0\x19\xe5\x94\x06\xdb\xc1\x40\xce\xe4\xc5\xa2\
\xf5\xc5\x00\x4a\xfe\x86\xfd\xaa\x2f\xdb\xfc\x6f\x16\x26\x6d\x2a\
\x67\x5c\x21\xa5\x45\xdb\x55\xb0\x72\xed\x0a\x50\xf6\x84\x1f\x13\
\xb1\x71\x54\xcc\x9f\xfb\xd4\x7d\x1f\xdf\x2b\xbd\xad\xfa\x06\xb5\
\x04\x7f\xff\xcf\x2b\xfe\xb8\xa1\x9d\x0e\xf0\xc9\x7d\x7f\xce\x9e\
\xf1\x5d\x65\xfe\x9e\xdb\xc9\x73\xf1\xb9\xab\xa5\xbc\xe6\xe2\x2f\
\xcf\x87\xef\x7d\xa6\x29\xbc\x5a\xda\xab\xd2\xdf\xa8\xd6\x1e\x61\
\xa0\x53\x4a\x71\xef\xa1\xcf\xf2\xe4\xe8\xe3\x0d\x7f\x16\x7e\xfb\
\xb2\x3f\xf4\x9d\x27\xea\x59\xac\x94\xa2\x76\xeb\x5d\xf4\xf4\xa0\
\x94\x12\x25\x6d\x3c\x88\x55\x8a\xa9\x93\x19\x94\x82\x96\xae\x34\
\xcd\x2b\x62\x18\xcd\x12\x99\xe3\x06\x61\xd2\x2e\x04\x2d\xa2\x34\
\x4f\xc0\x19\xa7\x04\xe7\x6a\x01\xd8\x73\xfe\xdb\x43\x7f\xd3\xc2\
\xa4\xd9\x9a\x55\x37\x24\x97\x41\xc7\xaa\x66\x92\x4d\x49\x94\x05\
\xc3\x03\xa7\x51\xdd\x45\x8a\x96\x28\x69\x78\x0b\xac\xa2\x56\xff\
\xef\x58\x04\xee\xfa\x5a\xa6\xf3\x0a\x5b\xf7\xae\xb5\xbf\xc0\xaa\
\xe6\xc6\x6a\xff\xe7\x27\x9e\xe3\xaf\x9f\xff\x58\xe8\xf9\x82\x9f\
\xee\x83\x51\xcf\x75\xd7\x12\x13\xa8\x75\xdb\xf9\x64\xf2\xb7\xc6\
\xda\xb8\xae\xeb\x95\x7c\x7f\xe8\x5b\x35\x5f\xdb\x5c\x2c\x84\xa8\
\xf5\x4a\x29\xfe\xd7\x33\xbf\xca\x83\xb7\x3d\xd2\x50\x57\xe0\xe7\
\x56\xbf\x87\xbf\xda\xfd\x7f\xbd\x58\x40\x94\xb5\x52\xf9\x5e\x94\
\xf7\x9e\x4b\xc3\x2c\x59\x00\x26\x60\xa0\x88\xad\x86\x91\x87\xa7\
\x68\x5d\x96\x46\x19\xd0\xb6\xa4\x99\xf4\x9a\x09\xa6\x76\xc9\xc5\
\x42\xb2\x14\x83\xc3\x18\x24\xb0\x88\x81\x17\x8f\x9b\xb3\xcc\x25\
\x0b\x20\x10\x18\xc2\xf6\xff\x53\xc2\xa4\x45\x29\x56\xa2\x44\x47\
\xdb\x16\x58\xb2\x6a\x89\x3d\xd9\x27\x30\x7c\x7c\x14\x63\xb9\xd2\
\x46\xfb\x05\xaa\xfb\xbc\xe1\x92\xa5\xc2\x1f\xbd\xb6\xda\x8d\xf2\
\x47\x69\x4e\xb7\xb1\x95\x52\xfc\xfc\x9a\xf7\x35\xac\xa3\x5d\xf9\
\xd0\x8e\x3f\xa8\x1a\xbd\x0f\x2b\x0e\xa9\x25\x6a\x7f\x26\x9a\x36\
\xac\x8d\xce\x55\x84\x3e\xb8\xb4\x98\xad\x7c\xf9\x86\xfb\xf8\xcc\
\x35\x9f\xe7\x6d\x2b\xde\x59\xd3\xf9\x2b\xed\x53\xcd\xf2\x0b\xcb\
\x06\x29\xa5\x38\x32\xdd\xcf\x27\xf6\x7e\xec\xcc\x1f\x82\x80\xfc\
\xf2\x25\xbf\x5a\xb7\x75\xea\xef\x33\x3f\x16\x4a\xdf\xed\x6d\x2a\
\x2e\x99\x35\x67\x3d\x66\xe8\x5c\xd9\x4a\xb3\x33\x33\x9a\xca\x73\
\x85\x30\x68\x03\x67\x88\xb0\xf0\x2c\x80\x39\x93\x40\xbd\x04\x50\
\x1a\xfa\xab\xf9\xff\xb6\xf9\x2f\x69\xbe\x0c\xba\x97\x76\x01\xb6\
\xf9\x3f\x56\x1c\x05\x4b\xa1\x9c\x45\x86\x2d\x45\x85\x2c\x4a\x64\
\x51\xd9\x33\xab\xea\x8b\x52\xbe\xc6\x0b\x36\xae\xbb\x6e\x63\xc7\
\x66\x5e\xb1\xf8\xe6\x86\x76\xf4\xbf\xf5\x7f\x89\x1f\x9d\x7a\xc4\
\x6e\xf8\x2a\xa0\x28\x37\xf3\xa2\x09\xa1\x91\x00\xad\x35\x95\x38\
\x1f\x26\x7d\xd8\x62\x83\xff\xeb\x5c\xde\x76\x25\x00\x7f\xb1\xe5\
\xef\xf8\xd9\x95\xef\xaa\x4a\x64\xb5\x5c\x43\x35\xf7\x2f\x8c\x50\
\xfe\xe9\xe0\xa7\xd9\x33\xbe\xab\xa1\xcf\xc5\xcf\xaf\x79\x3f\xad\
\xb1\xb6\x9a\xae\x33\xea\x79\xb0\x8a\x2e\xf0\x2d\xa4\x46\x0c\x45\
\x87\x10\x44\x9f\x64\xfc\x88\x5d\x19\x98\x6e\x4d\xd2\xbc\x32\x46\
\xac\x15\x50\xc6\x95\xc2\xa4\x05\x83\x26\x51\x1a\x1c\x64\x63\x78\
\x8e\xe9\xc0\xda\x5d\x00\xfd\x6d\x3f\xae\xf9\x6f\xd0\x24\x62\xb4\
\xc8\x8c\xba\xc6\x35\xff\x53\xe9\x14\x52\x2a\x86\xfb\x4f\x63\x2e\
\x05\x55\x04\x9c\x20\x9f\xef\xbb\xe5\x5f\xaf\x8a\x9a\xd9\xe4\xd2\
\x92\x7e\x4b\x02\x94\x70\x22\xeb\xd8\x2f\x65\x73\xcd\xe8\x0f\x6c\
\xf8\xf5\x6a\x57\x5f\x97\x4c\x16\x26\xf8\xdf\x3f\xfd\x3d\x2f\xea\
\x0f\x95\xdd\x8f\x60\x2a\x32\x6a\x5d\x2d\xc7\x47\x49\x3d\xb3\x20\
\x9d\x89\x3b\x50\x6b\x70\x30\x78\x8e\xb6\x78\x3b\xff\xfa\xca\x6f\
\x70\x45\xfb\x95\xbe\xf5\x7f\xb9\xf5\xef\x50\x4a\xf1\x1f\x47\xbe\
\x52\xf5\xda\x6a\x31\xef\xab\x6d\x0f\xee\xfb\xa1\x1d\x7f\xc0\x7f\
\xdc\xfa\xad\x39\xb7\x47\x50\xda\xe2\xed\xfc\xcc\xf2\x37\xf1\xd5\
\x43\x5f\xa8\x98\x11\xa8\x96\x2d\x50\xc1\x45\xd8\x2e\x80\x00\xcc\
\x5e\x18\x7f\x68\x86\xce\x3e\x7b\xa6\xec\xb6\xc5\xcd\xa4\xd7\x4e\
\x30\xbd\x4b\xf5\x01\x5d\x42\xd0\xac\x04\x29\x94\x56\x0f\x30\xc7\
\x4c\x40\xbd\x31\x00\xd7\xfc\x8f\x21\x48\x0a\x83\x66\x25\x59\x2e\
\x2d\xb5\x34\xdd\x07\x1d\x5d\x1d\xde\xdd\x0d\x9f\x18\x25\xb6\x11\
\x64\xd1\x5e\x5c\xa0\x4b\x17\xf0\x1a\x09\x48\xbb\xb0\xd1\x3d\x43\
\x29\xb9\xa1\x2f\x7a\xe3\x09\xe5\x11\x42\x7b\xb2\x83\x37\xae\x7a\
\x6b\xc3\x3a\x18\xe0\x73\xfb\xfe\x96\xf1\xdc\x58\xf9\xcd\x57\x99\
\x73\x30\xac\x2a\x10\xa2\xe7\x03\xac\x27\x06\x10\x95\x26\x6c\x84\
\xef\x7f\xa6\xd9\x85\xb6\x78\x3b\x5f\xbd\xf1\x7e\xae\x68\xdf\x14\
\xba\xfd\xaf\x5e\xf6\xf7\x48\x29\xf9\xf7\x81\x2f\xd7\xf4\xf7\xe6\
\x42\x04\x51\x41\xc1\xc7\x87\x1e\xe6\x47\xa7\x1e\xe5\x15\x8b\x6f\
\x3a\xe3\x76\x72\xe5\x9e\xcb\x7e\x83\xaf\xbc\xb0\x2d\xf4\xbc\xbe\
\xdf\xca\xf9\xad\x4a\xbf\xbd\xf5\x4e\x35\x2c\x12\xef\x1d\x18\xb8\
\xd5\x81\x0a\xa6\x8d\x19\x1b\x13\x06\x34\x2f\x4a\x93\xee\x9b\x60\
\x7a\x17\xc8\x3c\x1b\x31\xd9\x8f\x45\x12\x41\x1c\x3b\x1b\x60\xcf\
\x19\x38\x07\xa9\x97\x00\x5c\xed\xef\xbe\xf4\xa3\x49\xe6\x59\x2f\
\x84\xa0\x6d\xab\x62\xe9\xca\x25\xf6\x8d\x28\xc1\x58\x6e\x94\x84\
\x0b\x74\x59\x02\xbc\x07\x7c\x37\x10\x58\x74\x08\x41\xd9\x2f\x52\
\x00\xfc\xa0\x0f\xfb\xae\x4a\xbf\x6f\x5f\x79\x07\xed\x89\x8e\x86\
\x75\xee\xd1\x99\x01\x3e\xbb\xf7\x53\xa1\xda\xbf\x9a\x56\xaf\x96\
\xcb\xaf\x65\xd2\xd2\xb0\xbf\x75\x36\x82\x7e\xb5\x5c\x47\x98\xb4\
\x27\xda\xf9\xea\x8d\x0f\x44\x82\xdf\x95\xbf\xbe\xe6\x33\x28\xa5\
\x2a\x92\x40\x3d\xe9\x34\x7d\x5b\x35\xc2\xf8\xcd\x27\x7e\x99\xa7\
\xdf\xb2\xbf\x61\x6d\x75\x65\xe7\x55\x5c\xd9\xb9\x99\x5d\xa3\x3b\
\x9c\x13\xb9\x1f\x81\xdc\x9e\x2b\x3a\x34\x9d\x6d\x4a\xfb\x14\xda\
\x77\xaf\x56\x60\x89\x64\x6a\x28\x43\xdb\xf2\x34\xed\x4b\x9b\x69\
\xea\x35\x10\x09\x89\x81\xb9\x5e\x99\x56\x8b\x32\x68\x42\x7a\xf5\
\x00\x36\x32\xe6\x30\x36\xa0\x1e\x02\x08\x4e\xfd\x95\x12\x26\xcd\
\x2a\x27\xae\x14\x49\x45\xd3\xca\x18\x2d\xed\xcd\x48\x0b\x4e\x0d\
\x8c\x62\x76\xba\xa6\x3d\xb6\xa9\x5f\x2c\x01\x1f\xed\xbb\xb7\x5e\
\x6f\x33\xd7\xa8\x71\xce\x2a\x02\xae\x80\x7e\x35\x1f\xdc\xf8\x5b\
\x0d\xeb\x58\x80\x3f\xdf\xf1\x11\x9f\xf6\xd7\xb5\x7a\x98\xa6\xac\
\x64\x9a\x57\x03\x4f\x2d\x04\x51\xeb\xb1\xf5\x6e\x0f\xca\x5c\x35\
\xff\xbf\xdd\xfc\x20\x1b\x3b\x36\xd5\xb4\xff\x27\xae\xfd\x2c\x4a\
\x29\xbe\x76\xf8\x8b\x0d\xbb\xa6\x5a\x48\x60\x60\xea\x30\x5f\x3d\
\xf4\x05\xde\xb5\xf6\x17\xea\xbe\xc7\x28\x79\xd7\x9a\x3b\xd9\x79\
\xea\x59\xe7\x24\x54\x06\x7d\xc8\x3e\xca\xc9\x82\x21\xed\x71\x02\
\xde\xcb\x6e\x1c\x02\x30\x97\xc3\xd8\x9e\x19\xda\x96\xa5\x41\x41\
\xcb\xa2\x34\xc9\x15\x33\x64\xfb\xe5\x46\x33\x45\x8b\xb0\x8b\x80\
\x93\xca\xb6\x00\x4c\xe6\x38\x36\xa0\xb6\x20\xa0\xeb\xff\x0b\xef\
\x85\x9f\x5e\xfa\x4f\x59\xac\x6b\x5a\x03\x1d\x8b\xda\xbd\xb3\x8f\
\x0d\x4e\x60\x2c\xd6\x34\xbe\x4e\x02\xee\x52\x28\x7d\xe2\x6c\x77\
\x5e\x88\x5c\x9a\xff\x54\x8f\x13\xe8\xf1\x02\x67\x7d\x6f\x53\x1f\
\x9b\xba\xb6\x34\xac\x53\x27\xf2\xe3\x7c\xe5\x85\x6d\x75\xe5\xde\
\xeb\x5d\x5f\xe9\x6f\xd7\x3a\xef\x1f\x54\xaf\x41\x98\x8f\x3c\xbf\
\xbe\xb4\xc6\xda\xf8\x8f\x5b\xbf\xc5\xc6\x8e\xcd\x75\xb5\xf1\x27\
\x5f\xfe\x39\xde\xd9\xf7\xbe\xaa\xd7\x58\x29\x38\x18\x96\x01\xa9\
\x96\x5d\xf8\xf8\xce\xc6\x8e\x0e\xbd\xbd\xf7\xad\x9e\x32\xf3\x9e\
\x55\xfd\xd9\x55\x81\xdf\x4e\xd1\xae\x6b\xfa\xab\x00\x1e\x28\x02\
\x05\xed\xb7\x09\xb3\xb9\xac\x67\x29\xb4\x2f\x69\xa6\x69\x0d\x28\
\xa9\x52\xaa\xc8\x5a\x0c\x5a\x10\xa4\xb0\x95\xb1\xae\x32\xeb\x92\
\x7a\xb2\x00\x02\x3c\xff\x3f\x21\x4c\x9a\x64\x81\x3e\x20\x95\x5e\
\xe3\x44\xff\x1d\x13\x66\x62\x7a\xdc\xb6\xd4\xb5\x9b\x94\xc5\x00\
\x01\xb8\x31\x81\x02\x7e\xc0\xcb\x90\xef\x6e\x23\x6b\x65\x53\xca\
\x82\x37\xf6\xdd\xd1\xd0\x4e\xfd\xcc\x9e\x4f\xa1\x9c\xe9\x9b\xa4\
\x25\xbd\x4c\x44\x54\x1a\xb2\xde\x88\x79\xd8\x7e\x41\xa9\xe5\x98\
\x46\x64\x0e\xce\x64\x69\x8b\xb7\xf1\x9f\xb7\x7d\xbb\x6e\xf0\xbb\
\xf2\xa9\xeb\xfe\x81\x77\xf6\xbd\x2f\xf4\x7e\xa3\xda\x45\xdf\xb7\
\xd2\x71\x51\x04\x32\x30\x75\x98\x07\x8f\x7e\xbd\x61\xcf\x4a\x6f\
\x6b\x1f\x9b\x16\x6d\x29\x29\x2d\x6d\xd1\x15\x55\x10\xec\x68\x56\
\xb0\x6e\x0d\xfb\xe2\x62\xce\xbe\xf9\x54\x8e\xdc\x74\x11\x94\x1d\
\x07\x48\x2e\xc3\x8e\x7d\x15\x58\x2b\x6c\x02\x68\x42\x38\x73\x04\
\x08\x3b\x36\x5e\xaf\xd4\xee\x02\xd8\x2c\x63\x60\x4f\xfc\x99\xc2\
\xa0\x89\x82\x58\xab\x50\x34\x5f\x06\x1d\x5d\xb6\x05\x90\x99\xce\
\x32\x6b\xce\x10\xb3\x34\x2d\xef\xde\x68\x60\x91\xce\x3e\x41\x3f\
\x5f\x84\x05\xff\x42\x7e\xbf\xf7\xb2\x3b\x1b\xd6\xa1\x13\xf9\x71\
\x3e\xb3\xeb\x93\x76\xa7\x38\xe7\x51\xae\x4d\x13\x38\xb7\x42\x79\
\xbf\xbd\xf8\x80\xa8\x5e\x04\x54\xeb\x7c\xff\xc1\x22\xa3\x7a\xcd\
\xf3\xf9\x9c\x02\xac\x2d\xde\xce\x7f\xde\xf6\x9d\x39\x83\xdf\x95\
\xbf\xb9\xfe\x1f\x51\x4a\xf2\xaf\x2f\x7e\x21\x74\x7b\x3d\x81\xc0\
\xd0\x7d\x55\x60\x9b\x82\x4f\x3f\xf7\xc9\x86\x06\x8c\xdf\x73\xc9\
\x9d\xfc\xe1\xc9\x1d\xbe\xf3\x79\xdf\xc3\x96\xc0\xf5\xb9\xc1\xbf\
\xe0\xe2\xba\x03\x46\x37\x4c\x9d\xcc\x90\x6a\x6f\x25\x91\x8e\x91\
\x5e\x1c\x23\xd6\x56\xc4\x9a\xe1\x12\xc3\x4e\x07\xa6\x51\x24\x10\
\xc4\x50\x8e\x15\xd0\x0c\xcc\xd4\xee\x0a\xd4\x66\x01\x94\xb8\xc5\
\x9d\xfb\x3f\x21\x04\x29\x65\x71\x89\x91\x82\xd6\x65\x29\x52\x4d\
\xb6\xaf\x32\x36\x38\x81\xd1\xe6\x67\x3f\xb7\xd2\xcf\x67\xc2\x4b\
\x87\x18\x82\x1a\x3f\x68\x1e\x69\xd1\x52\x7d\xbf\xde\x96\x3e\x36\
\xf5\x6c\x69\x58\x67\x7e\xf3\xf0\x7d\x4c\xcc\x8e\x97\x5d\x4b\x99\
\x29\xa7\x9b\x77\x12\xaf\xc6\x41\x15\x4b\xf5\x0e\xba\xe5\x50\xa9\
\x90\xe9\x4c\xcc\xf7\xf9\x34\xff\xa3\xfe\x46\x6b\xac\x8d\xfb\x5e\
\xf3\x3d\xae\xec\xbc\xaa\x21\x6d\xfe\xb7\x37\xfc\x33\xef\x5a\xfb\
\xfe\x9a\xad\xac\xa8\x9a\x0a\xa5\xb4\xda\x91\x40\x5f\x78\x7d\x27\
\xe1\xb1\x13\xdb\x79\xce\x0d\xdc\x35\x40\x6e\x5f\x73\x47\xf9\xb3\
\x11\xe6\x12\x04\xad\x04\x6d\xec\x4b\xd9\xb3\xae\x65\xc7\x8c\x45\
\x30\x35\x94\xf5\x2c\xeb\x96\x45\x69\x92\xcb\x01\x25\xfa\x30\x48\
\x39\xda\x5f\x9f\x26\xac\x6e\xe6\xaf\xc7\x05\x30\x10\x98\x08\x92\
\x18\xa4\x88\xd1\xa4\x24\xeb\x9b\xd6\x40\x4b\x5b\xb3\xc7\x70\x63\
\x83\x13\x18\xad\x0e\xb8\x0b\x9a\x7f\x13\x08\x08\xca\x02\xc8\x9c\
\x66\x16\x05\x4d\xa3\xc0\xba\xa0\x49\x75\xfb\xba\x3b\x1a\xd6\x91\
\x00\x9f\x7e\xf6\x53\xbe\xc6\x0f\xbd\x96\xb0\x18\x45\x88\x09\x18\
\x7c\x10\x7c\xe4\x60\x95\x5c\x8c\x6a\x80\xae\xa5\x1a\x70\x3e\x0a\
\x7b\xc2\xf6\x6d\x8b\xb7\xf3\xf5\xd7\x7e\xbf\x61\xe0\x77\xe5\xef\
\x5e\xf1\x2f\xbc\x6b\xed\xfb\x23\xef\xc7\xbb\x06\xe9\x6f\x43\x0f\
\xdc\xb5\xf4\x87\xb6\xdf\x67\x76\x35\xee\xbd\x90\xbd\x6d\x7d\x6c\
\xea\xdc\x52\x16\xb3\xa2\xe8\xff\x0c\x66\xc0\x74\x57\x40\x15\x1c\
\x05\x67\x61\xe3\xa5\x50\xfa\x54\x05\x98\x2d\x66\x9c\xea\x00\x7b\
\x6c\x40\xa2\x1b\x50\xa4\x54\x81\x5e\x21\x48\x09\xe1\x4d\x13\x36\
\xa7\x38\x40\x75\x17\xa0\xd9\x97\x8d\xb7\xb5\xbf\x41\x0a\x8b\xa5\
\x4a\xa9\x54\x7a\x2d\xb4\x77\x75\xd8\xf8\x57\x82\x19\x6b\xa6\xe4\
\xdf\x07\x82\x7e\x04\x00\xe6\x9a\xff\x0a\xed\x0c\xba\x0b\x00\xfe\
\x5b\x32\xf0\x28\xeb\xc6\x55\xb7\x34\xac\x23\x9f\x1b\xde\xc1\x73\
\x43\x3b\x4a\xa6\x7f\xd8\xb9\x3d\x93\x3f\xa2\x99\xab\xb9\x2c\x84\
\xfc\x4d\x4d\x3c\x17\x22\xaa\x0b\x45\xf8\xf1\x4a\x55\x1f\x1a\xac\
\xef\x3b\x17\x69\x4f\x74\xf0\xf5\xd7\xfe\xa0\xe1\xe0\x77\xe5\xef\
\x5f\x79\x2f\x4a\xc2\x57\x0e\x6e\x0b\x35\x95\xcb\xbe\xab\x90\x6d\
\xc1\x63\x42\x8e\x53\x0a\x1e\x78\xe1\x3e\x3e\x7d\xdb\xbd\x0d\xbb\
\xf6\xdb\xd7\xde\xc1\x73\x83\x3b\x50\x61\x2e\x40\xf0\xba\x65\xe0\
\xb7\x02\xe5\xbc\x37\x50\xc6\x9c\x74\x20\xf6\x5b\x85\x85\xb0\x9f\
\x43\x2b\x51\x20\x3f\x5d\x24\xd5\x1e\x23\xd5\x9a\x24\xb9\xdc\x76\
\x41\x95\xc5\x72\x4c\x52\x4e\x40\x5e\x2f\x08\xaa\x4b\xaa\x5b\x00\
\xee\xe0\x5f\x81\x21\x4a\xef\xfe\x4b\xa9\x22\xab\x84\x21\x48\x2e\
\x85\x96\xb6\x16\x50\xf6\xfb\xce\x67\xb2\xd3\x25\x4d\x1a\x0c\x88\
\xb8\x56\x41\x21\x10\xfc\x0b\xb0\x78\x59\x84\xd4\x35\xc5\x1d\x96\
\xa4\x08\x6f\xba\xf4\x8e\x86\x75\xe2\x97\xf7\x7c\x3e\x5c\x83\x84\
\xac\x2b\x0b\xd8\x84\xed\xef\xb2\xbf\x9b\xe1\xd0\xb3\x1c\x56\xc8\
\x12\xb5\x3e\x68\x75\x44\x68\xb6\xa0\x66\x8c\x5c\x64\x8d\x8b\x76\
\x4c\x9b\xd9\xce\xfd\x3f\xf3\x43\x36\x2d\x9a\x1f\xf0\xbb\xf2\xe9\
\x9b\xee\xe5\x3d\xeb\xee\xf2\xb7\x85\x6e\x7d\x85\xb9\x65\x61\xee\
\x59\xd0\x95\x74\xad\x46\x67\xff\x89\xec\x38\x5f\xd9\xbb\xad\x61\
\xd7\x7d\xd3\xaa\x5b\xbc\x48\x7d\x59\x3f\x69\xcf\x2b\x01\x77\xd7\
\xf7\x8c\x6b\x81\x3f\x7d\x3b\x16\x18\x9d\x30\x7b\x3a\x07\x0a\xd2\
\xed\x49\xd2\x4b\x1d\xc8\x4a\xb1\x56\x18\xa4\x30\xbc\x82\x20\xd3\
\xb9\xa4\xba\x48\xa0\xd6\x20\xa0\x3d\x02\x50\x94\x2c\x00\x99\x17\
\xcb\x94\x52\xa4\xfb\xa0\xb3\xbb\x1d\xcb\x82\xa9\xd1\x19\x8c\xe6\
\x52\xc7\x79\xc1\x3f\xbd\xf8\xc7\x35\x7b\xdc\xd7\x1d\x06\x8b\x7c\
\x44\xd9\x59\x4b\xec\xea\x6c\xbf\xa9\xef\xd6\x86\x75\x20\xc0\x57\
\x76\x6d\xf3\x5f\x4b\xd4\xf5\xe8\xdb\x9c\x4f\x9f\xf5\x12\xd8\x2e\
\xdc\xa2\xa5\x6a\x85\x9a\xa2\xc2\xf7\x7a\xba\x73\x2e\xb1\xbf\x0a\
\xc7\xb4\x27\x3a\x78\xe0\x0d\x0f\xd9\xd1\xee\xb3\x20\x9f\xbe\xe5\
\x5e\x90\xf0\x95\x7d\xdb\x4a\x2b\x2b\x68\x79\xa5\x6f\xaf\xc5\x1a\
\x70\x3e\x1f\x78\xe1\xeb\xbc\xe7\xf2\xbb\x1a\x72\xcd\x37\xf6\xde\
\x5a\x02\x7d\xd8\xf9\x94\xf6\xfc\x86\x6c\x73\x8f\xd3\x9f\x23\x7d\
\x17\xd1\x64\x13\x40\xe7\xea\x66\x84\x82\x54\x5b\x92\xd4\x8a\x0c\
\xf9\x93\xac\xc4\x20\x8d\x70\xac\x80\x92\x7d\x5c\xd7\x0b\x44\x6b\
\x21\x00\xb7\x06\xc0\xc4\x1e\x01\x98\xc4\x24\xa5\x24\x1b\x92\xcb\
\x1c\xed\xef\xec\x34\x76\x72\x1c\x91\xd2\xcc\x7f\xdd\xe4\xd7\x53\
\x1c\x16\xc8\xbc\xc3\xca\xae\xe1\xa2\x3d\x88\x42\x37\x66\x64\x60\
\xbb\x80\x9b\x56\xdf\xda\x90\xce\x03\x78\xe0\xc0\x7d\x8c\xcf\x8c\
\x87\x9b\xd8\xc2\xf9\x19\x45\x4e\xd1\xad\xe5\x94\x2a\x97\xaf\xab\
\x78\xbc\xd0\x56\xd7\xe2\x3e\x54\x03\xfc\x19\x24\x03\xda\x13\x1d\
\x3c\x70\xfb\x43\x0d\xad\xb3\xa8\x45\x3e\x7d\x9b\x4d\x02\x5f\x76\
\xb5\x74\x35\xaf\x25\x08\xa8\x30\xd3\x1b\xff\xb6\x6f\xee\xbf\x8f\
\x89\xd7\x8d\xd3\x9e\xec\x68\xc8\x35\xdf\xb4\xf2\x56\x1e\xed\xdf\
\x1e\x0d\xf8\x30\xd7\x45\x95\x7d\x2d\x7d\x17\x25\x37\x40\xc4\x60\
\x7a\x22\xe3\x6d\x6f\x59\x94\x26\xde\x93\x21\x37\xc8\x52\xc3\x20\
\x2d\x04\x29\x65\x4f\xcb\x5f\x0a\x04\xb6\x22\x98\xaa\x2d\x13\x50\
\x6b\x16\xc0\x4b\x01\x62\x9b\x1d\x69\x14\xcb\x93\xcb\xa1\xa5\xb5\
\x14\x00\x9c\x9e\x98\xb6\xeb\x99\xb5\x48\x67\xb0\x80\x47\xdf\x16\
\x65\xbe\xa9\x30\xf3\x57\xdb\xff\xc6\xbe\xc6\xf9\xff\x8f\x0d\x3c\
\x1c\x6e\x5a\xba\xf5\x06\xd2\x4f\x62\x65\xd1\xdd\x60\xaa\x27\xe8\
\x12\x84\x7d\x06\x17\xed\x78\x2f\x1d\x24\xcb\x97\xd0\x60\x63\xa5\
\x00\x58\x2d\x81\xb1\x90\xa5\x3d\xd6\xc1\x03\x6f\x3e\xfb\xe0\x77\
\x45\x05\xdb\x26\xea\x7e\xa3\x5c\x84\x2a\x2e\x13\x96\x4d\xfc\x8d\
\x92\x1b\x7b\x6f\xf5\xb9\x1b\xbe\x6b\x0b\x09\x0a\x07\x83\xc5\xc1\
\x78\x99\x1e\x3c\xc4\x82\x6c\x21\xe7\x34\x0c\x24\xd2\x71\x12\x5d\
\xce\xcf\x22\xab\x30\x48\x3a\x43\xf3\xe7\x34\x39\x48\x65\x0b\xa0\
\x55\xd3\x7d\x76\x06\x20\x2e\x0c\x52\x40\x87\x52\x2a\x15\xef\x80\
\x54\x53\xca\x33\xc5\xa6\x33\x33\xd0\x84\x07\x1c\x2f\xe5\xa1\x57\
\x02\x16\x4a\xbf\x83\x75\xfd\x7a\xb2\xb1\x2c\x30\x48\x69\xfb\xe6\
\x65\x5b\x1a\xd6\x79\x0f\xec\xbd\xaf\x64\x34\x55\x1c\x80\x14\x72\
\x9d\x9a\x88\x28\x6d\x1f\x1c\xbf\xe0\x3b\x28\xe4\x7b\x05\xb3\x5f\
\x45\x1d\x5b\x69\x5d\x94\x88\xf0\xc3\xda\x93\x1d\x3c\xf0\xd6\x87\
\xd8\xd4\xbd\xe5\x4c\x9a\x75\xce\xf2\xc1\xef\xdc\xcd\x57\x9e\xdf\
\x56\xe1\xc6\x09\xcf\xab\x47\xad\xd7\xc6\xe2\xe8\xaa\xf6\xb1\x81\
\x87\x79\xef\xa6\xbb\x1a\x72\xcd\x37\xae\xbe\xa5\xf4\x0c\x55\xca\
\xff\xbb\xe6\x7e\x40\xe5\x0b\xcd\x1a\xf0\xa6\xdb\xd1\x02\xd2\x2a\
\x26\xc9\x4f\x17\x49\xb6\xc7\x48\xb7\x25\x30\x5b\xed\x80\xae\x2a\
\xb2\x54\xc4\x48\x78\x16\x80\xaa\x3f\x10\x58\x6b\x1a\x50\x08\x9c\
\x37\x00\x19\x24\x55\x81\x95\x42\x08\xd2\x6b\x4a\x23\x00\x15\x90\
\x2b\x66\xbd\x02\x07\x0f\xfc\x5a\x09\xa4\xcb\x7c\xb2\xa8\xfd\x0e\
\x6b\xac\xe0\x36\xe9\x7c\x55\xb0\x69\xd9\x16\xda\xd3\x1d\x0d\xe9\
\xb8\x23\xe3\xfd\x1c\x19\xeb\x2f\xad\x08\xd3\xe4\x41\xd1\xb7\x69\
\x9d\xeb\x6a\x6e\xdf\xf1\x41\x8b\x20\xea\x9e\x83\xc7\x59\x21\xfb\
\xfb\x7a\x23\xac\x87\xea\xbc\xf9\x10\xab\xa3\x2d\xd1\xc1\x03\x77\
\x9c\x27\xe0\x8f\x32\xe9\x2b\x59\x2f\x5a\xb4\x5f\xb7\xa2\xc2\xda\
\xfd\x81\xfd\xf7\x35\xec\xba\x37\x2f\xdb\x52\x6e\x15\x06\xda\x5a\
\x59\xda\x33\x12\xb8\x1e\x15\xfc\xee\x06\x2e\x9d\xdf\xa2\x1d\x72\
\xd3\x05\x04\x90\x6e\x4b\x92\x5a\x81\xab\x11\x3b\x7d\x41\x40\xbb\
\x54\xbf\xae\x8a\xc0\xda\x0b\x81\x9c\x71\x00\xc2\x20\xa6\x8a\x2c\
\x55\x28\x5c\x0b\x00\x04\x63\x27\x27\x10\xf1\x90\x9b\x09\x98\xd5\
\xca\x69\x0c\xaf\xa3\x64\xe4\x39\x43\xfd\xa7\xcd\x2b\xb6\x34\xac\
\xe3\x1e\xd8\x77\x9f\xef\x6f\x7b\xdf\xf5\x4e\x0c\x02\x36\xd0\xb1\
\xfa\xba\xb2\x6c\x5c\x18\xb9\x45\xa4\xf3\x42\x1f\x76\x15\xf1\x77\
\x2a\x5d\x4b\xd4\xf6\x2a\xd2\x9e\xec\xe0\x9b\x6f\x3b\xc7\xe0\xdf\
\xb3\x2d\x3a\x50\x16\x04\x96\x5e\x99\x19\xa8\xa4\xf3\x8e\x8d\x12\
\x01\x13\xb9\x71\x3b\xf5\xdb\x00\x69\x4f\x75\xd0\xdb\xd9\xa7\x5d\
\x10\xd1\x7d\xa1\x5f\x5f\x10\x2b\xae\xb2\x0c\xe0\x47\x24\x60\x6a\
\x28\xe3\x1d\x9b\x6e\x4d\x12\x6b\x03\x25\xb9\x14\x83\x24\x06\x09\
\x87\x04\xc2\x66\xd2\xa8\x28\x95\x09\xc0\x35\xfe\x4b\x59\x00\xd3\
\x3e\x91\x48\x03\xc4\x3a\x20\x95\x4e\x01\x76\x09\x30\x49\xfc\xd5\
\x4c\xba\xff\xaa\x59\x03\x41\xd6\x2e\xab\xf6\x0b\x82\x4e\xdb\xd6\
\xdb\xd1\x47\xa3\xe4\xd1\x43\x0f\xfb\x08\xab\xec\xe1\xab\xe4\xe3\
\x87\x80\x33\xac\xac\xd3\xb7\x4f\x50\xbb\x57\x32\x63\xc3\x8e\x0d\
\x69\x8f\xb2\xbf\x17\x15\x8f\xa8\xe4\xf3\xc7\xcf\x31\xf8\xbf\x1d\
\x00\x7f\x2d\x9a\x5e\xd3\xf6\x65\xed\x16\x05\xbc\xc0\xdf\x7f\xb4\
\x7f\x7b\xc3\xee\x61\xf3\xd2\x2d\xa1\xd7\x5e\xd3\xb3\x1d\xc0\x4a\
\x10\x3f\x22\x01\xb9\xe9\xa2\x77\x3b\x66\xcc\x70\x66\x08\xa2\x49\
\x94\x32\x01\x2e\x01\xd4\x35\xcb\x57\x3d\x16\x80\x3b\x15\xb8\xa9\
\x2c\x2e\x4d\x2e\x83\x4e\xd7\xfc\x57\x90\x9d\xce\x61\x24\xa3\x3b\
\xc2\x35\x7d\xf4\x9c\x6c\xb0\x63\xca\x8a\x29\x42\x3e\x6f\x5a\xd7\
\xb8\x00\xe0\x2e\x47\x03\xa8\xb0\x87\xc5\xbe\xef\x12\x97\xd6\xf2\
\x50\x85\x68\x5e\x15\xbc\x2f\x02\xc7\x87\x3c\xd4\x65\xd7\x51\xed\
\x7c\x95\x2a\xe2\x54\xe5\xa5\x3d\xd1\xc1\x37\x7f\xf6\x3c\x30\xfb\
\x83\xcf\x83\xdb\xfe\x15\x44\x55\x6a\x9f\xb0\x76\x0c\xb8\x76\x8f\
\x0d\x3c\xdc\xb0\xfb\xf0\xe2\x52\xda\xb9\x54\xb5\xfe\xd3\xef\x23\
\xa4\x48\x48\xc7\x45\xc1\x99\x01\x1c\x65\x57\x04\xc6\x7b\x40\x20\
\x96\x22\x48\x0a\x37\x06\x20\xb4\x9c\x5a\x5b\x6d\x56\x40\xb5\x34\
\xa0\xae\xfd\x63\xb8\x31\x00\x54\x93\x99\xf6\xdf\xe0\xd4\xe9\x69\
\xfb\x05\xc6\x9a\xaf\xe3\xf3\x77\x34\xed\xaf\xb4\x01\x37\x3e\x53\
\xdf\x0d\x80\xe8\x0d\x16\x30\x99\x57\x2f\xea\x6b\x48\x87\x1d\x19\
\xef\xe7\xc8\xe9\xfe\x52\xee\x55\xbb\x06\x5f\xcb\x45\x91\x01\x81\
\xdf\x7a\xa0\x2f\xec\x98\xe0\xdf\x8c\x30\x67\xcb\xd6\x55\x33\xe5\
\xa3\xba\x39\x78\x4d\x21\xd2\x9e\xec\xe0\x9b\x6f\x7f\xa8\xa1\x63\
\x2a\xea\x91\x0f\x7e\xfb\x6e\xbe\xb2\x7b\x5b\xf4\xf5\x87\x90\x69\
\xad\xfb\x46\x9a\xdd\xda\xf7\x5d\xee\x40\x9e\x06\xc8\xa6\x65\x57\
\x95\xc5\x21\xc2\x5c\xd8\xd0\x6b\x01\x7f\xe6\x47\x57\x88\xce\x92\
\x99\xcc\x81\x12\xb8\x93\x8e\x18\x09\x90\x4a\x62\x40\x1b\xc2\xce\
\x04\xa8\x39\x64\x02\x2a\x13\x80\x5b\x01\xe0\x9a\xfe\x06\x29\x4c\
\x52\x28\x96\xbb\x16\x80\x33\x3b\x17\x56\xbc\xe8\xf3\x5b\xf4\x91\
\x4e\xc1\x00\x87\x3b\xd5\x51\x59\x16\xc0\x8d\x0f\x18\x4e\x2d\x40\
\x88\xf4\x36\x88\x00\x76\x0d\xee\xf0\x77\x84\x6f\xb4\x5f\x48\x2b\
\x86\x90\x91\xef\x7b\x14\xa0\x9d\xe3\xdc\x87\x57\x54\x23\x84\xa8\
\x99\x90\xa2\xfb\x27\x5a\xaa\x81\xff\x1d\xe7\x10\xfc\xdf\xd2\xcc\
\xfe\x0a\xd7\x1d\x6a\x15\x46\xdd\x5b\x04\xb8\x42\xd7\x29\x38\x32\
\xd6\xcf\x44\x76\x9c\xf6\x54\xc7\x19\xdf\x4f\x7b\xaa\xc3\xaf\xf5\
\xa3\xdc\xca\x08\x6b\xd3\x8b\x87\x85\xa4\xa1\x91\xa0\x52\xae\x89\
\x20\xbc\x31\x01\x42\x08\x94\xa5\x96\x09\x83\x98\x72\x03\x80\x75\
\xc6\x7e\xa2\x5d\x80\x76\x4d\xfb\xbb\xc3\x80\x0d\x92\xc2\x24\x0d\
\x60\xa4\xfc\xbb\x4f\x9d\x9e\x2e\xdd\x7c\x88\x9f\xa3\xcf\x7f\xe6\
\xdd\x78\x98\xf9\xeb\xa6\x3f\x42\xcc\xd7\xcd\xcb\xb7\x9c\x71\x47\
\xb9\xf2\xdc\xe0\xce\x8a\x26\x7c\x19\x23\x6b\x9d\x15\xe9\xab\x12\
\xd8\x1e\x62\xda\x97\xc5\x07\xc2\x44\x84\x9c\x2f\x4c\xaa\x98\xf7\
\x91\x5d\x9b\xec\xe0\x9b\x3f\x77\x8e\x35\xbf\x0e\xfe\x4a\xa6\x71\
\xa5\x76\x09\x6b\x8b\xb0\x58\x49\x98\xcb\xe9\xfc\xfd\x46\x59\x01\
\x37\xad\xbb\x35\xbc\xe2\x2f\xf8\xbb\x92\x65\xe2\x2e\x16\xa1\x19\
\xa5\xd9\xb1\x9c\x77\x8c\x91\xc4\x19\x07\xe2\xc5\xe6\xe6\x34\x22\
\xb0\x96\xb1\x00\xc2\x19\x03\x10\x47\x90\x94\x59\xd6\x01\xc4\x3b\
\x9d\x51\x80\xce\x0d\x58\xa2\x58\x02\x7b\xa0\x03\xca\xac\x80\x0a\
\x00\xd1\xd7\xab\xc0\xba\x46\x55\x6e\x01\xec\x3a\xb1\x23\x3c\xc2\
\x1f\xc1\xdc\xbe\x60\x4e\xd4\xf5\x87\x11\x5a\xf0\x41\x0c\x66\x44\
\xaa\x68\xc0\xc8\xa0\x58\x2d\x80\x0f\xd9\x76\xce\xc1\xff\xad\xbb\
\xf9\xca\x73\xdb\xaa\x92\x5b\xe8\xe0\x9a\x28\x60\x55\x00\x79\xd9\
\xbe\x81\xdf\xcf\xb9\x96\x60\xa3\x24\xac\xa8\x0c\xed\x77\xe0\x5a\
\xf4\xc0\x9f\xcf\xec\xd7\xc7\x34\x00\x46\x2b\x14\xf3\xf6\x1f\x68\
\xe9\x6a\xb2\x83\x80\x02\x64\x9e\x15\x80\x3e\x66\xa7\x2e\x1a\xa8\
\xb9\x0e\x00\x30\x84\x41\x0c\x93\x98\x30\x04\xb1\x0e\x88\xc5\xed\
\xd7\x7f\x17\xf2\x45\xed\xb5\xa7\xa5\x9b\xd5\xa3\xfe\x9e\xe6\x0b\
\x2b\x98\x88\x7a\xb0\x03\x1a\x78\xf3\xca\x2d\x0d\xeb\xa7\x89\xcc\
\xb8\xff\x3a\x2a\x45\xd2\x83\x5a\x29\x6c\x7b\x44\x60\x33\xd2\x6a\
\xd0\x09\x32\xc4\x5d\x8a\x8a\x7a\x97\x2e\x24\xe2\x5a\x2b\x10\x45\
\x7b\xe2\x1c\x83\xff\x41\x0d\xfc\x15\x2c\xa3\xb2\x6c\x51\xa5\xfe\
\xa9\xd4\x5f\x51\x44\xae\x45\xd9\x07\xc6\x06\x1a\x76\x7f\x37\xad\
\xbd\x35\xba\x4f\x02\xfd\xe7\x61\x41\xbf\xa7\xa8\xe0\xad\xf3\x7c\
\xe4\x67\x0a\xde\xf1\x66\xab\xfd\x29\x0c\x43\x9f\xaf\xa3\x6e\x89\
\x8e\x01\x94\xfc\x4f\x2f\x03\xe0\x54\x1b\xa5\x83\xc3\x4a\xa7\xc6\
\xa6\x6d\x9f\x3d\xec\xa1\x77\x01\x1c\xd4\x9c\x86\xb6\x8f\x1e\x08\
\x0c\xd6\x32\xa9\xd2\x47\xa3\x0a\x80\x00\x1e\x3d\xb4\xbd\xfc\x5e\
\xc3\xb4\x51\xa0\x69\x43\x87\x03\xeb\xf7\x50\xde\x7e\x95\xff\x66\
\xd9\xf0\xde\x0a\xbb\xab\xf0\x63\x2a\x76\xbf\x73\x6d\xe7\x05\xf8\
\x43\x02\x7e\x15\x87\xd1\xd6\x22\x61\x6d\x12\x04\x3d\x94\x93\x82\
\xf3\xbd\xa1\x16\x40\xa5\x7b\xa9\x76\x3d\x0a\x7b\x3c\x9f\xa6\x08\
\x7c\x0a\xc1\x80\xfc\x4c\xd1\xfb\x73\x66\xdc\x00\x21\x91\x45\xd9\
\x6a\x98\xcc\x59\xea\x19\x0d\xe8\x9a\x1c\xdd\x60\xbb\x00\x9d\x5d\
\x1d\x48\xb7\xa4\x37\xe1\xb0\xab\x7b\x93\x61\x2c\xe6\x96\x03\xbb\
\x05\x0e\x7a\x20\xd0\x05\x91\xa4\x14\x79\x50\x84\x92\x41\x43\xc4\
\x3d\x4f\x48\x20\xb2\xec\xa1\x52\xa5\x7d\x95\xf6\x5b\x04\xf7\xd1\
\xf7\xd5\xff\x46\xd0\x2c\x0b\x9e\x2f\x78\x6f\x01\xf2\x08\x4d\x27\
\xa1\x05\x14\x55\xf8\x71\xae\xb4\x27\x3a\xf8\xe6\x3b\x1f\x62\xd3\
\xe2\x2d\x0d\x6c\xc0\xda\x25\x08\xfe\x5a\xd2\xbd\x65\x52\xe9\x18\
\xf7\x7b\x88\x96\x8d\x74\x1d\xe6\x41\xda\x53\x1d\xd1\x6e\xad\xee\
\xee\x49\x42\xaf\x27\xac\x3a\xd6\x75\x11\x0c\x3d\xeb\xa6\xa0\xa9\
\x2d\x49\xa2\x3b\x43\x7e\x94\x36\x4d\x49\xd7\x5d\x11\x5a\x7b\xd1\
\x80\xfd\xc0\x0b\x61\xd8\x5f\xe2\x1d\x11\x8d\x1f\x51\x0a\xe9\x7e\
\xba\xbe\x8e\xb7\x3e\xcc\x64\x8b\x30\x83\x6e\xba\xe4\x96\x86\x74\
\xd4\xa3\x2f\x6e\xf7\x9f\xab\x16\x09\x31\xb3\xcb\x82\x84\x75\x1c\
\x1b\xe6\x5e\x84\xb5\x57\xc5\x3f\x1b\xe5\x16\x04\x8b\x7c\x7e\xfe\
\xdc\x82\xff\xcb\xcf\x6d\xab\x6c\xde\x57\x8a\x63\x10\xb2\x4f\x98\
\x45\x19\x74\x27\x82\xcf\x95\x2e\x1a\x71\x3e\xfa\xe2\xf6\x86\xdd\
\x6b\x68\x95\xaa\x0b\xee\xa8\xe7\x2d\xca\x6d\x0c\x3e\x1b\x12\xac\
\xbc\xed\x3f\x2b\x27\xab\x64\x24\xc1\x30\x0c\x28\x15\x00\xd5\x4d\
\x01\xb5\xb8\x00\x81\x9c\x7d\x79\x2f\x8d\x0d\x4d\x78\x37\x11\xcc\
\xff\x87\xb2\xb0\xcb\x6e\x81\x53\x79\x1a\x31\x58\x7c\x13\xac\x0f\
\x68\x84\x84\x69\xe1\xe0\xfd\x47\x99\xdc\xfa\xf1\xce\xf5\x29\xa3\
\x42\x8a\x2f\x78\xbe\xe0\x39\x83\x96\x4e\xd8\x3c\x02\x51\x03\x84\
\x2a\x90\x44\x47\xaa\x83\x6f\xbe\xfb\xdc\x83\xdf\x77\xff\x61\x6d\
\x52\x49\xaa\x59\x0b\x41\x93\x3a\x78\xec\x5c\xac\x8d\xb9\x4a\x08\
\x51\xd5\x33\x34\x18\x0d\x3b\x0a\x10\x81\xc0\xf3\xec\x78\xce\x7f\
\x3e\x01\x52\x49\x61\xda\xce\x83\xbb\xd4\x45\x02\x73\x79\x3d\xb8\
\x76\xbf\xce\x13\xea\x8c\x5b\x0e\x2d\x7e\xb0\x08\x05\xbf\x1e\x07\
\xf0\x3d\xff\xfa\x3c\x00\x50\x72\x05\xea\x9a\xe6\xa0\xe6\x1b\xf0\
\x99\xf7\x15\xa5\x12\x18\xb5\x4e\xf3\x46\x2f\x46\xf9\xf9\xfa\x39\
\xab\x9d\x27\x78\x8e\xb0\xbf\x17\xb1\xae\x3d\x79\x9e\x81\xbf\x1a\
\xc9\xea\xf7\x18\x65\xe2\x87\xec\x17\xe5\x1e\xd5\x42\x1c\x0d\x17\
\x5d\xc3\x2b\x2a\x17\x03\x05\xf6\xf5\x9e\x73\x37\x20\x6c\xe1\xc5\
\x04\xdc\xc5\x48\x87\x9e\xb5\x09\x7b\x38\x70\xcc\x19\xaf\xa3\x5b\
\x02\x55\xef\xb2\x6e\xbd\x2a\x0b\x2c\x47\x94\x26\x02\xc1\x9b\xc2\
\xda\xcf\x76\x9e\x25\x10\xf0\xab\xcb\xea\x00\xdc\x9f\x51\x9d\x52\
\x45\x03\xce\x45\x8e\x8c\xf5\x97\xcd\x32\xe4\xeb\x98\xb0\x6b\xa8\
\xe4\x43\x06\xfc\xfc\x8a\x83\x82\x82\xfb\x07\xff\xa6\xaa\x70\x5c\
\xd8\xfa\x10\x93\xb8\x3d\xd9\xc1\x83\xef\x39\x87\xe0\xff\xe6\xdd\
\x7c\x79\xd7\xb6\xf0\x78\x4a\xb5\x36\x8d\x32\x89\xc3\xda\x5e\x84\
\x14\x8c\x55\x08\x06\x46\xb9\x5d\xbe\x80\xf0\x99\x88\xd6\xa7\xde\
\x29\xc2\x88\xbb\xc2\x75\xa9\x4a\x7f\xdb\x2c\xdd\x5c\x6b\x57\x93\
\x5b\x7a\xbf\x1c\x48\x81\x37\x43\x70\x5d\x43\x82\x6b\x77\x01\xb4\
\x9b\x4b\x2e\xb3\x53\x80\xba\x14\x0e\x81\x8a\x81\x4a\x80\x4a\x6a\
\x37\xa4\xcf\xc9\xe6\x2e\x3a\x3f\x59\xa5\x4b\x76\xcd\x1e\xdf\x35\
\x04\x4b\x83\x1b\x20\x03\xa3\x03\x36\xd3\x46\x69\x6b\x5d\x43\x07\
\x09\x22\x2c\x02\x1f\x16\x7c\x0a\xec\x23\x82\x9d\x5f\x29\x4b\x10\
\x06\x9a\x6a\x56\x83\xb3\x4f\x7b\xaa\x83\x07\xcf\xa5\xe6\x7f\x40\
\xd3\xfc\x61\xf7\xa4\xff\x56\x15\xb6\x85\xad\xd7\xb6\xab\x28\xc2\
\xac\xe5\x33\xec\x1a\x1a\x20\xbd\x9d\xab\xfd\x31\x0e\xfd\x1c\x61\
\xc4\xae\x97\x0e\x63\xdf\x93\xd0\xd7\x87\xc6\x39\x4a\x56\x77\xa2\
\x1b\x66\x0f\x01\x82\x34\xf8\xa6\x08\xb7\x11\xb5\xa8\xba\x15\x30\
\x27\x17\xc0\x4c\x13\xfa\x30\xca\x19\x50\xa7\x41\x66\x41\x26\xb0\
\xdf\x60\xde\xa4\xdd\xb4\x4b\x00\x7a\x84\x3f\x10\x09\xf7\x81\x32\
\xe0\x2a\x34\x4c\x42\x5c\x10\x57\x7c\xb7\x55\xa9\xf9\xc2\x7c\x7a\
\xfd\xb3\x96\x32\xdd\x4a\x7f\x3f\x6c\x5b\x90\x18\x02\x59\x80\xf6\
\x94\xa3\xf9\x97\x6c\x69\x70\x83\xd5\x26\x1f\x7c\xc0\xd1\xfc\x95\
\xda\x2b\x6a\x7d\x94\x8b\xe0\x6e\xd3\x7d\xfd\x4a\xc7\x44\x99\xf9\
\xd5\xac\xa8\x06\xc8\xea\x45\x7d\xe5\x15\xa1\x95\xce\xa9\x11\x99\
\x80\xf2\x80\x78\xf0\x18\x67\x47\x2f\xed\x5f\x7a\xce\x52\xce\xa2\
\x0f\x09\xae\x49\xea\x27\x80\xc0\xb9\x55\x60\xbd\xf7\x22\x96\x29\
\x9b\x08\xac\x9c\x7d\x69\x2a\x01\x2a\x4d\x49\xeb\xeb\xb9\xcb\x60\
\xe0\x2f\x4a\xfb\x36\x52\xaa\x0d\xec\xd1\xf7\x0b\x33\xd5\x2b\x69\
\xe3\x10\x73\xd7\xfb\xb3\x95\xfe\x56\x2d\xe4\x11\x61\x15\xb4\xa7\
\x3a\x78\xf0\xbd\xe7\x10\xfc\xf7\xd7\xa0\xf9\x2b\x81\x5c\xdf\x1e\
\xb1\x9f\xfb\xe2\xcc\x9a\x88\x33\xea\xb9\x89\x02\x56\xa3\x24\xa8\
\x4d\x82\x16\x41\xe0\xbe\xbd\xb8\x7a\x44\x50\xb0\xda\x75\xda\x53\
\xc2\x2b\x37\x06\xa0\x8f\x08\xac\x49\xe6\xe4\x02\x44\xdd\xb8\xc0\
\x89\x05\x04\xfe\x8e\x9c\x04\x99\xb3\xc9\xa0\x90\xb1\xdf\x78\x62\
\x74\x82\x68\xa6\xe4\xb1\x48\x7c\x9d\xe6\xf5\x61\xad\x1a\xb5\x5e\
\x09\x9a\xe2\x2e\x71\x69\xe7\x11\xc1\x73\x47\x11\x52\x50\xfb\x87\
\xb5\x9f\xae\xc1\x08\x14\x14\x45\x3d\xa4\xfa\xdf\xa9\xf0\xfd\x5c\
\x83\x7f\x22\x3b\x6e\x0f\xad\xae\x06\xf6\x5a\x34\x77\x60\x9d\x0a\
\xdb\x56\x8b\x15\xa0\xaa\xec\x3b\x9f\x81\xc0\xe0\xdf\x97\x81\x4f\
\xed\xbc\xbe\xa1\xc0\xd5\x2a\x51\xa3\x95\x8e\x9b\x06\xac\x3b\x15\
\x38\x37\xe3\x5a\x04\xbe\x86\x11\x44\x44\x1c\x52\xcd\x82\x3c\x0e\
\xc5\xe7\xa1\xf0\x2c\x58\x87\x40\x0e\x63\x4f\x84\x18\x61\x1e\x45\
\x05\x6f\xe6\x2c\x51\x0f\x88\xfe\x5b\x8f\xe2\x86\x8d\x19\xd0\xf7\
\x27\x64\x7b\x44\x80\xce\x3b\xac\x9e\x80\x63\x58\x3b\xb8\x79\xfe\
\xc4\xb9\x05\x3f\x38\x04\xf4\xbe\x88\x6b\x08\x0b\x58\x46\x6c\x2f\
\x9b\x10\xb5\x42\xdf\xf8\xda\x5e\x6f\x9b\x4a\xe5\xc2\x21\x6d\x3a\
\xc7\x77\xa5\x84\xca\xf8\xec\x78\xf9\x35\x84\x8d\x0d\xa1\x74\x9f\
\xc1\x81\x64\x22\xe4\x98\x48\xae\x6a\x80\x62\xac\x9f\x00\xa2\x4e\
\x1a\xa1\x9d\x82\xa5\xb3\x42\x3f\x6b\x11\xe4\x28\x58\x87\xa1\xb0\
\xc3\x26\x05\xeb\x98\xed\x3e\x9c\x15\xa9\xe6\x77\x06\xf7\xad\xf6\
\x30\x57\x6a\x93\x90\x07\x42\x45\xfd\x9d\x4a\x2e\x88\xf6\xb7\xda\
\x93\x1d\x3c\xf8\xfe\x73\x0b\x7e\x57\x7c\x24\x10\xe5\xc7\x06\xa5\
\x96\x75\x55\x80\x54\xa6\x1c\x2a\x11\x76\x30\xf0\xd6\x60\x0b\xe0\
\xb9\xa3\x3b\x4b\x4a\x2f\x42\x61\xf9\xc6\x7c\x44\xdd\xb7\x08\x81\
\x58\x30\xe6\xe4\x77\xb9\x25\xc2\x5b\x94\xb3\xd4\x44\x0e\xd1\x04\
\x20\x22\x96\xe0\x76\xf7\xa7\x1b\xb8\xd3\x3d\x90\x40\x80\xcc\x23\
\x03\xa9\x6d\xd7\xcc\x66\x95\x01\x79\x0a\x8a\x07\xa1\xf8\x53\xb0\
\xf6\x83\x75\x1c\x64\x83\x09\xe1\xa6\xf5\xb7\xf8\xcd\xb1\xa8\x41\
\x34\x61\x03\x75\x2a\x4c\xe9\x54\x51\x0b\x05\x3b\xd3\xfd\xfb\xfa\
\x2b\xa4\x83\x33\x26\x29\x22\x35\xda\xf9\x04\x7e\x57\x3c\x12\x58\
\xbc\xa5\xb6\x81\x3b\xba\x04\x1f\xfa\x4a\xc7\x55\x1a\x19\xa9\x1f\
\x5f\x61\xd0\x95\xde\x9f\x0d\x1b\x66\xee\xc6\xb7\x22\xae\xb5\xac\
\x3c\x5e\x7f\xd9\x2c\x94\x29\x87\xe0\x33\x57\x96\x38\x72\x7d\x64\
\xe1\xbd\x7f\x28\x6a\x5a\xd2\x48\x99\x93\x0b\x60\x65\xdd\x0e\x0b\
\x44\x04\x83\xb9\xf0\xc0\x15\x7b\xfe\x5c\x2d\xbe\x2f\xa0\xa6\x41\
\x0e\x81\xf5\x82\x6d\x21\xa8\x4c\x63\xfa\xc9\x13\xdd\xfc\x0a\x6a\
\xab\x40\xd0\xc6\x97\x2a\x8c\x2a\x77\xae\x14\x60\xaa\xc7\xe7\xac\
\x72\x6c\x7b\xaa\x83\x07\xef\x7c\x88\x4d\x4b\xb7\x34\xb8\x41\xce\
\x5c\xda\x53\x1d\x3c\xf8\x0b\x81\x6b\x8b\x48\x75\x96\xbd\xfb\xa0\
\x9a\x7b\x15\x36\xb2\xae\x52\x7d\x81\xb6\x4d\x45\x91\x83\x6a\xec\
\x20\x33\xdf\x75\x6b\xdf\x23\xc7\x25\x84\x1d\x13\x14\x01\xe4\xf0\
\x4f\xa0\xeb\xc7\x56\xd6\x59\x0a\xd4\x5e\xdc\x0e\xcc\xd1\x05\xc8\
\x0d\x42\xb1\xe0\x8c\x4c\x12\x7e\x12\xf0\x80\x6f\x94\x3e\x85\x13\
\x9e\x10\xba\x45\xa0\xb4\xfd\xd0\x7e\x07\xb2\x09\xbe\x9b\x6d\x64\
\x35\x60\x94\x6f\x19\x7c\x78\x5c\x46\x0e\xf3\x49\x03\xed\x12\x7a\
\x8e\x30\xd3\xb5\x06\x22\x08\x35\x15\x5d\xcd\xff\x0b\xe7\x97\xe6\
\x0f\x4a\x19\x09\x04\xee\x5d\xa9\x40\x13\x54\xb2\x10\xc2\x9e\x83\
\x4a\xbe\x7d\xc8\xdf\xf4\x0d\xc2\xa9\x36\x6f\x40\xa3\x24\x18\x67\
\x08\x8b\xe5\x04\xaf\xdb\x7d\xce\x2c\xec\x58\xd9\x28\xc8\x63\x76\
\x9c\xcc\x3a\x0e\xe4\xfd\x8a\x35\x7b\xdc\x6b\x97\x0c\x82\x1c\x82\
\x02\x02\xcb\x73\x05\x46\xaa\xdf\x5d\xdd\x2e\x80\x91\xe0\x04\xc0\
\xd4\xd4\x74\x29\x95\x21\x84\x7f\x3f\x43\xfb\xeb\x9a\x4b\x20\x0c\
\x10\xc1\x6a\x65\x19\x38\x9f\xd3\x50\x22\x48\x0e\xc0\xce\xfd\x3b\
\x1a\xd2\x37\xde\xbc\x02\x61\x0f\x99\xde\x41\x11\xe3\xb2\xcb\xdc\
\x01\xfc\xfb\xd4\x64\xf6\x46\x99\xb6\x81\x75\x7a\x50\xac\x2d\xd1\
\xc1\x37\xcf\x53\xcd\x1f\x14\x8f\x04\x5c\xa2\x72\x40\x1f\x0a\x86\
\x5a\xdd\x84\xa8\xc7\x39\xca\xdc\xd7\xad\x0a\xdd\xf2\xd4\xce\xdb\
\xa8\x39\x26\x01\x06\x86\xfb\xcb\xfa\xb1\x2c\x90\x1d\x32\x12\x50\
\x4d\xda\xae\xaf\x07\xf6\xfd\x60\x0d\xd8\x04\xa0\xb2\xf8\x31\xe2\
\xdc\xc4\xf4\xe9\x8c\x8b\x99\x13\xd8\xda\x3f\x47\xc9\x02\x98\x5f\
\x17\xa0\x74\x31\x81\x4f\x07\xec\x02\xc0\xd4\xd8\xca\x2d\xd9\x14\
\x94\x0f\x57\x08\x9b\x04\x34\x8c\x1c\x80\xf1\xa9\x89\x86\x74\x54\
\x7b\xba\x23\x7a\x24\x1d\x21\xeb\x03\x91\xda\x9a\xda\x25\x4c\x33\
\x85\x05\x87\xa2\x4c\xd8\x00\x08\xda\x53\x1d\x3c\x78\xf7\x43\xf6\
\xf4\xd3\x17\x88\x78\xae\xca\xe2\x2d\xd1\xd6\x53\x58\x7c\xa9\x92\
\xeb\x54\xc1\x94\xd7\xdb\xd6\x57\x79\x19\x35\xa9\x0a\x8d\x9b\x63\
\x12\x60\x60\xb4\xbf\x74\x4f\x10\x5a\xad\xa8\xa6\x41\x8e\x80\x75\
\x04\x8a\xfb\xa1\xb0\x0b\x8a\x87\x41\x9e\x04\x39\xee\x00\xde\xb5\
\x9c\x9d\xbf\xa5\x6b\x7d\x3b\xd6\x26\x74\xdc\xcd\x52\x7a\x07\x71\
\xa5\x29\x64\x42\x65\xce\x59\x00\xdd\x27\xee\x5c\xd2\x11\x6e\xfe\
\x3b\xeb\x84\x51\xd2\xe8\x22\x08\xfe\x90\x58\x81\xef\x5c\x50\xa7\
\x57\x53\x9b\x5c\x19\xdf\x82\x1c\xb6\x6b\x14\xbc\xd8\x42\x35\xbf\
\xdd\x0d\xf8\x05\x5f\x3b\x1d\x15\xb4\xab\x94\x96\x0a\x06\x0d\x2b\
\x8c\x23\x6f\x4f\x5c\x78\xe0\x77\xc5\x47\x5c\x51\xc1\xbc\xa8\xf6\
\x08\x0b\xac\x42\x78\x7b\x6a\xe6\x76\xe8\x54\x6b\xfa\x39\xb5\x99\
\x77\x56\xeb\x2f\xf4\x38\x53\xd1\x83\xb9\x39\x50\x33\x4e\x0c\x6b\
\xc0\x8e\x63\x15\xf7\xd8\x19\x2f\x39\x08\x6a\x1c\x5b\x6f\xbb\xe2\
\xba\xc9\x3a\x22\x35\x5c\x08\x01\x4d\x9d\x49\xdf\x6f\xf7\xce\x84\
\xd0\x26\x84\xd7\xb3\x00\x35\x48\x7d\x2e\x80\xd6\xae\x85\x71\x07\
\xdc\xc1\xfd\x03\xfe\xbe\xae\xf9\xdd\x58\x80\x77\x66\x11\xb8\x92\
\xb0\xd9\x80\x34\xab\xe0\x91\x67\xb6\x37\xac\xaf\xda\x8d\x0e\xdb\
\xc7\x3a\x6e\x77\x50\x71\x3f\x58\xfd\x76\x1a\x52\x8e\xd8\x4c\xed\
\x23\x86\xe0\x7d\x6a\x1d\x14\x3a\xfe\x5c\x85\xec\x4f\xe0\xef\x45\
\x59\x02\xda\x43\xdd\x9e\xea\xe0\xc1\x5f\xbc\x30\xc1\xef\x4a\x45\
\x12\xa8\xa4\xab\xaa\xb9\x49\xc1\x76\x0b\x1e\x1b\x36\xf1\x46\xe0\
\x5c\xbd\x0d\x24\x80\x1d\x4f\xee\x40\x1e\xb1\xd3\xd9\xc5\x83\x0e\
\xd8\x87\x6d\x13\x5f\xb9\x60\x0f\x20\xae\x6c\x86\xa9\x40\x2c\x4d\
\x7f\xe7\xa4\x99\x30\x7d\xcf\x5e\x71\xca\x3b\xb6\x94\x02\xac\x33\
\xaa\x51\xab\x05\xe0\x05\xf0\xcd\x14\xc7\xc1\x26\x80\xb1\xd1\x71\
\xff\x4d\xe8\x1a\x5e\x03\xbf\xd0\x2d\x82\x18\xe5\xa9\xc2\xc0\xfb\
\x4c\x7c\x2f\xda\x9c\x8f\x2a\x40\xe0\xe6\x2d\xb7\x96\xfb\x62\x2e\
\x6b\x8f\x82\x35\x68\x93\x41\xf1\xa0\x4d\x0c\xf2\x98\xd3\x99\xa7\
\x6d\x62\x50\x39\xca\x34\x93\x8a\x7a\x60\xa3\xb4\x3b\x44\x3f\xd8\
\x68\xe0\x6f\xe0\xcb\x50\xcf\x95\xf8\x88\x2c\xaa\x8d\xa2\x52\x7a\
\x41\xb7\x2a\x84\x10\xca\x0a\x6b\x2a\xc5\x5b\xb4\xfd\x36\xaf\xbc\
\xaa\x61\xf7\x38\x36\x38\x8e\x9a\xa5\x7c\x34\x68\xf0\xb7\xfe\xf2\
\x4f\x2d\x18\xee\x6a\xff\xb2\x0c\x9a\xd0\xf6\x73\x64\xea\xf4\x2c\
\xc5\x29\x30\x62\x8c\x78\xc0\xaf\xb5\x6a\x37\x70\x29\xd5\xc4\x6d\
\x36\x0b\x85\x85\x42\x06\xd3\x7d\x6d\x9d\x2d\x3e\x0b\xc0\x33\xfb\
\xb5\x02\x45\xa1\x65\x05\xca\xea\x04\xa2\xc0\x1f\xb0\x0a\x1e\x7e\
\x7a\x7b\xc3\x3a\x6b\xf5\x92\xd5\x95\x3b\x49\xf7\x25\x8b\x36\x83\
\xab\x49\x90\xa7\x1d\x32\x38\x6a\x9b\x75\xd6\x00\xc8\x13\x76\x10\
\x47\x8d\xd9\x35\x0b\x2a\x83\xfd\xf2\x13\xfd\x6f\x55\x32\x71\x43\
\xc8\xa0\x3d\xd9\xc1\x83\xbf\x74\x71\x80\xdf\x95\xf6\x94\x73\x4f\
\x4b\xb7\x84\x8f\x78\x8b\x72\xc1\x42\x5c\x2c\x3d\xc8\x17\x4a\xbc\
\x61\x6d\x1d\x32\xd8\xa6\x51\x13\xcd\xee\x3c\xb8\x23\x7c\x43\xd0\
\x5a\xd1\x33\x00\x1a\xb1\x89\x80\xe6\xf7\x14\x27\x25\x3c\x35\x75\
\x24\xfd\x71\x01\x00\x83\x9c\xef\x6f\xd5\x29\xd5\xc7\x02\x94\x6e\
\x43\xa2\x28\x22\x98\x01\x27\x15\x58\xb4\x9f\xf2\x58\x22\x56\xf2\
\xf7\x85\x1f\xec\xba\xf6\xc7\x04\x11\x07\x32\x94\x91\x81\x8f\x35\
\xf5\x63\x02\x64\x31\x3e\x35\x4e\x47\x6b\xc7\x19\x77\xd8\x55\xeb\
\xb7\x84\xdf\x73\xa5\x86\x54\x21\xfb\x14\x6d\xbf\x8f\x2c\xfe\x57\
\x88\x6b\xf7\x22\x92\xf6\x3a\xe1\x4c\xe8\x20\x92\x78\x83\x36\x45\
\x52\xfb\xdb\x8e\xb4\xa7\x3b\x78\xf0\x97\x2f\x2e\xf0\x7b\xf7\x96\
\xb2\xef\xed\x8d\xff\x74\x9b\x3d\x35\x7b\x54\x4d\x08\x21\xeb\xa3\
\x02\xac\x95\xb2\x05\x05\x50\xee\x64\xba\x19\x4a\x96\x9e\x53\x46\
\xdd\xa8\x20\xe0\xc4\xd4\xb8\xef\xb7\x30\x28\xe5\xed\xdd\xe2\x37\
\x5d\x8b\xbb\xc5\x3d\x11\xd9\x36\x2f\xe0\xaf\x29\xc1\x58\xc2\x8e\
\x0c\x5a\x45\x49\x71\x9a\x12\xa1\xd4\x69\xf6\xeb\x52\xcb\x68\x40\
\xa5\xec\x00\x43\x41\x29\x72\x66\x82\x63\x60\x0f\xee\x99\x9e\x9a\
\xa6\xbb\xbb\x1b\x01\xa4\x5b\x52\x64\xc7\xb3\x25\xb3\xbf\x34\x2a\
\xb9\x54\x07\x10\x61\x09\xd4\x0a\x7e\x80\x5d\x07\x76\x70\xf3\xd5\
\xb7\x9e\x71\x87\x6d\x5e\xb7\xa5\xfc\x5d\xaa\x95\xfc\xf6\xe0\x7e\
\x61\x69\xcf\xb0\xfd\x9d\x07\x0e\xc0\xe1\xea\x72\xa2\x10\x76\x4f\
\x88\x38\xb4\x37\x75\xf0\xe0\xaf\x5f\x9c\xe0\x77\xc5\x23\x81\x7f\
\xbc\xad\xf4\x86\xa6\xd2\xc3\x5c\x6e\x0d\xe8\x51\xf4\x3c\xa5\xa9\
\xe5\x25\x76\x9b\xba\xe6\xbf\xeb\x96\x59\xf6\x7e\xbe\x63\x03\x44\
\xa1\x14\x6c\x0e\x53\x02\x73\x94\x47\x7e\xfa\x70\xf8\x86\x60\x5f\
\xeb\x44\x10\x88\x63\x08\xf0\xd5\xcd\xe8\xd8\x81\x52\x0c\x20\x33\
\x99\xb5\xfd\x7f\xc0\x48\x72\x5c\x2b\x10\xaa\x39\xf8\xe7\x4a\xb4\
\x0b\x30\xe4\xf9\x15\x76\x80\x41\x51\x40\x91\x53\x16\x19\x84\x1d\
\x03\x28\x5a\x45\x2f\x25\x91\x6e\x49\xf9\x2f\xda\x35\x53\x02\x2e\
\x80\x48\x10\x0e\x7e\x9d\xed\x74\x37\x21\xe0\x32\xec\x3c\xb0\xa3\
\x61\x9d\x76\xf3\x55\xb7\xba\xcd\xe6\xef\xb0\x6a\x8d\x18\x24\xa8\
\xe0\x88\xad\x40\x26\x24\xf2\x6f\xe8\x62\x41\x9b\xd1\xc1\xb7\x7f\
\xef\xa1\x86\xbe\x02\xfd\x7c\x95\xf6\x54\x07\x0f\xfe\xca\x43\x6c\
\x6a\xdf\x82\x1a\x73\xdc\xa7\xd3\x4e\x3a\xcc\x5d\x86\xec\x74\x99\
\x35\x00\xc5\x7e\x7b\xb1\x4e\x38\xdb\x06\xed\xed\x72\xcc\x49\x9f\
\x39\x81\x36\x95\xa3\xfc\xdd\x93\xee\x67\xa0\xcd\x6f\xde\x7c\x6b\
\xc3\xee\xa7\x7f\xb0\xbf\xec\x3c\xde\x9c\x16\xba\xe6\xd7\xeb\x5d\
\x28\xf9\xf6\xc2\x99\xd1\xcf\xb3\x08\xcc\x72\x1c\x35\x77\xa6\x3c\
\xcb\x40\x96\x5e\x11\x20\x51\x5a\x06\xa0\xa1\x41\x40\x27\xf2\x4f\
\xc9\x02\xc8\x2a\x45\x56\x18\x1c\x2a\x8c\xf9\x8b\x81\x52\xcd\xa9\
\x72\xc0\x87\xc4\x01\x3c\xcb\x00\xca\x83\x7d\x61\x99\x01\xff\xb5\
\x30\x30\x38\xd0\xb0\x4e\xf3\x1e\x80\x60\x4d\x42\xa5\x66\x14\x21\
\xfb\x04\x09\x21\x24\x27\x2d\x2a\x91\x8a\xb2\xcd\xfe\xef\xfc\xef\
\x87\xd8\xbc\x7a\x4b\xc3\xee\xaf\x1e\x99\x98\x1d\x3f\xeb\xe7\x6c\
\x4f\x75\xf0\xad\xdf\x78\x88\x2b\x3b\xb6\x20\x27\x1c\x10\x67\x34\
\x20\x67\x29\xcf\xeb\x07\x45\x44\x7c\xaf\x61\x9f\x9b\x37\xdd\xd2\
\xb0\x7b\xd9\xf5\xc2\x8e\x12\xb0\xf5\x22\xa0\x2a\x2e\x8e\xe7\x32\
\x1b\x25\x12\xf0\x82\xe9\x5a\xe6\x2c\xd1\x1c\xf7\xc8\x61\x7a\x2c\
\x43\x7e\xc4\x39\x3e\xc6\x20\xa5\x1a\x80\xba\x5d\x81\x6a\x41\x40\
\x3d\x9c\x52\x04\xdb\x0a\x00\x64\x61\x1c\x2c\xab\xe0\xdd\x45\x53\
\x4b\x3a\xd2\x7c\xf1\xd6\x99\xf6\x4d\x1a\xb1\x2a\xe0\xd7\xcd\x6a\
\x9d\x51\x8d\x0a\xc1\x96\x39\xc8\xcd\x57\xdd\x52\xae\x29\x02\xe7\
\x0c\x6d\x11\x7d\x9f\x20\xf0\x03\x7f\xa3\x22\xf0\x9d\xfd\xda\x5b\
\x3a\xf8\xce\xff\x77\xee\xc0\x7f\xcf\x3f\xdd\xcd\xeb\xff\xfc\xb6\
\x73\x43\x02\x4d\x1a\xf1\x85\x3d\x07\xfa\xa7\xd6\x66\xa1\xfb\x41\
\xb9\x12\x89\xc8\xbe\x08\xd1\x58\x0b\xc0\xb3\x4c\xa3\x52\xbe\xfa\
\xb5\x07\x2c\x44\x11\x82\x11\x8f\x04\x9c\xf5\xc9\xe6\xb8\xf3\x3c\
\x09\x64\xd1\x42\xe6\xbd\xe3\x27\x95\xf2\xaa\x00\xdd\x3a\x80\x06\
\x65\x01\x4a\x7f\xc4\x9f\x09\x30\x39\x50\x18\xf7\x5b\x00\xad\x6e\
\x26\x40\x63\x2f\xfd\x26\x84\x6e\xd2\x98\xda\xdf\x0f\x9a\xfc\x41\
\xdf\xdf\xfd\xea\xec\xf3\xc8\xb3\xdb\x1b\xd6\x69\x37\x5f\x75\x2b\
\x1d\xcd\x1d\xa1\x1d\xe4\xd3\xda\x11\xd7\x54\xd6\xc9\x21\xfb\xfb\
\x8a\x52\x82\x6e\x8e\x01\xed\xcd\xe7\x56\xf3\xdf\xf3\xcf\x77\xf3\
\xa5\xc7\xb7\xb1\xeb\xe8\x0e\x5e\xff\xf1\x73\x4c\x02\xbd\x5b\xa2\
\x07\x5a\x11\xe8\x93\xa8\x3a\x00\xdd\xc7\xae\x00\x84\x37\x5f\x7f\
\x47\xc3\xae\xff\x91\x67\xb7\x97\x9d\xc7\xbb\x85\xa8\x67\x48\xb7\
\x96\x85\x7f\x41\xb7\x94\x1d\x72\x48\x77\x94\xcc\xff\xd9\xe9\x1c\
\xf9\x11\x10\x06\xfd\x2e\xf8\x95\x7f\x24\xa0\x62\xb0\x51\x95\x80\
\xa5\xca\x22\x09\x58\x4a\x51\x10\x06\x19\x80\xdc\x10\x64\x33\x59\
\x84\x80\x74\x4b\x1a\x25\xf0\x9b\x2e\xee\xf7\xc0\xcd\x18\x71\xad\
\x01\x22\x82\x7d\xfa\xa7\x6e\x55\x20\x1a\x6d\x05\xdc\x1a\xe8\xb1\
\xb2\xfb\xaf\x18\x94\xf4\x5d\xab\x2e\xc1\x87\x36\xc4\x45\xf0\x69\
\xbf\x73\x20\xf7\xfc\xd3\xdd\x7c\xe9\xd1\x6d\x1e\xbd\xef\x1a\xd8\
\x71\x6e\x2d\x81\xff\xef\x21\x36\xaf\xd9\x52\x9f\x59\x1f\x24\x82\
\xc0\xf8\x91\xa8\xbf\x71\xf3\xe6\xc6\x99\xff\x3b\x0f\xee\xf0\x5f\
\x87\x11\xa2\x40\xdc\xcf\xc0\x77\xcf\xf7\x37\xfc\x8b\x8e\x17\x05\
\xc4\x92\x86\x77\x6c\x3e\x5b\xb0\x5d\x00\xc1\x0c\x92\x8c\x52\xbe\
\x91\x80\xb2\x9e\x40\x60\x3d\x85\x40\xb6\x1b\xa0\x28\x8a\x38\x27\
\xdd\x40\x60\x36\x6b\x97\x38\x79\xb5\x00\x01\x4d\xef\x99\x33\xfa\
\xa2\x4f\x5e\x5c\x0d\xfc\x21\x44\xd1\x48\x2b\xe0\xcd\xaf\x78\x6b\
\xa8\x25\xa2\x6a\x21\x84\x28\x33\x34\xec\xb7\xdb\x8a\xce\xdf\x6d\
\x6f\x39\x3f\x34\x7f\x90\xe0\xce\x0b\x4b\xa0\x6f\x8b\xbf\x3f\x02\
\x66\xbd\x08\x6b\xf3\x3a\x02\xb9\x4a\x35\xda\x02\x78\xb8\x3c\x5e\
\xa5\x57\xc4\xea\xd7\x1a\x74\x6f\x0d\xec\xe2\xb8\x30\x9c\x68\x56\
\x74\xdb\xe2\x26\x0f\x0f\xf9\x5c\x01\x99\x03\x0c\x8e\x3a\x16\x40\
\x1e\x97\x00\x4a\x71\xbb\x9a\xa4\xba\x0b\x20\xbc\x36\xb3\x94\x6d\
\x6a\xe4\x8c\xb8\x9d\x0a\xcc\x9d\x84\xb1\xb1\x31\xef\x26\x5b\x3b\
\x5b\xca\xd3\x80\x46\x39\xbb\x19\x09\xaa\x82\x3f\x74\x82\x11\xe7\
\x8a\x1f\x79\xf6\x61\x1a\x25\x6f\x79\xe5\x1d\xa1\x95\x88\x22\x40\
\x08\x15\x9b\x34\xd8\xb9\xc1\x0c\x47\x48\x9b\xde\x7c\xc5\xad\xe7\
\x1d\xf8\x7d\x24\xf0\x17\xe7\x41\x4c\x20\x22\x18\xeb\x23\xe7\xb0\
\x54\x6e\x54\x5d\x80\x73\x7f\x57\xad\xdb\xc2\xea\x25\x7d\x0d\xbb\
\xe6\x9d\x2f\xec\x28\x5d\x4b\x88\x99\x2f\x82\xd7\xa8\xed\xab\x07\
\x00\x75\x65\x89\xb6\x5e\x09\x3b\x06\x20\x04\x4c\x8d\xcd\x92\x3d\
\xe1\xfc\xb9\x18\x43\xe8\xfe\xff\x1c\x02\x81\x95\x09\xe0\x84\x2f\
\x23\x6b\xa1\x28\x20\xc9\x22\x18\x47\x90\xcd\x9d\x84\x6c\x2e\xeb\
\xdd\x6c\xaa\x39\xed\x03\xbf\x30\x29\xaf\x0a\x34\x6d\xc6\x13\xf1\
\x40\x43\x69\x0d\xa4\x07\x3f\xc2\xb2\x02\x8d\xb4\x00\xda\x9b\x3b\
\x78\xcb\x2b\xee\xf0\xce\x5b\x66\xae\x07\x3a\xb3\xd6\xb4\x9e\x08\
\x23\x05\x6d\xb9\xff\xe9\xfb\xf8\xc0\x67\xef\x6e\xd8\x7d\xd4\x2a\
\x1e\xf8\xab\xc8\xae\x23\xe7\x01\x09\xf4\x6d\x09\x55\x12\x65\x23\
\xfd\x74\x09\x53\x2c\x81\xfe\x7c\xff\x6b\xee\x6c\xd8\xb5\xee\x7c\
\x61\x07\x03\x27\xfb\x2b\xfb\xfa\xa2\x02\x09\xb8\xd8\x30\xfd\x24\
\x60\x68\x98\x31\x13\x26\xc9\x16\x3b\x08\x98\xcf\x15\xc8\x8f\xe2\
\xe2\xec\x34\x90\x47\x05\x08\xa0\xa1\x2e\x80\xfd\xc7\xdc\x42\x4a\
\x3b\x15\x28\xc9\x08\x83\x13\xd9\x93\x30\x3d\x3d\xe5\xed\xd7\xd6\
\xd9\xe2\x63\x2e\x21\x4a\x9f\x3e\x96\x33\xc1\x08\x0e\x0b\x0e\x82\
\x3f\xb8\x4d\x6b\xc0\xf1\x99\xf1\x12\xeb\x36\x40\xde\xff\x9a\x3b\
\xcb\x47\x32\x06\x3a\x30\x68\xb6\x55\xcc\x00\xf8\xdb\xce\xdf\xda\
\xda\xba\x2f\x3d\xb2\x8d\x0f\x7c\xee\xec\x91\xc0\x3d\xff\x72\x37\
\x5f\xfa\xd1\xb6\x48\xcd\x1f\x04\xca\x79\x41\x02\xae\x95\x14\xd0\
\xec\x91\xfd\x05\xd1\x7d\xe4\x7c\x7f\xf3\x0d\x77\x34\xec\x3a\xbd\
\x00\x60\xf0\x19\xa9\x81\x04\x5c\x7c\xe8\x24\x40\x88\xdb\xac\x97\
\x00\x67\xa6\x73\x5e\x11\x90\x88\xf9\x5c\x80\x52\x06\xa0\x0e\xa9\
\x6d\x2c\x80\x7d\x03\x16\x6e\x1a\x50\x92\xc5\x60\x7f\x61\x0c\x26\
\x86\xa7\x3d\xdf\x64\xd1\x92\xce\x92\x05\xe0\x82\x3f\xc8\x6e\x6e\
\xc0\x23\x59\xde\x71\xc1\x60\x5f\x59\x63\x6a\xdb\xee\x7f\xf4\xeb\
\x0d\xeb\xc4\x37\xdf\x70\x07\xed\xad\x1d\xd1\xc0\x0f\x39\x7f\x99\
\x85\xa0\x15\x6d\x88\x28\x52\x08\x99\x0b\xe0\x4b\x8f\x6c\xe3\x03\
\xff\x30\xff\x24\x70\xcf\x3f\xdf\xcd\x97\x1e\xdb\x16\x3d\x1a\x2f\
\xcc\x87\x3e\x1f\xdc\x81\xff\x4f\xb3\x04\xa2\x2c\xb0\x30\xa5\x11\
\x26\x8e\xef\xbf\x7a\x71\x5f\xc3\xae\xf1\x91\x9d\x0f\xfb\xdb\xac\
\x1e\x12\x08\x31\xfd\x85\x46\x02\xee\x30\xfa\xd6\x9e\x66\x10\xf6\
\x04\x7c\x99\xe9\xac\x97\x01\x40\x91\xa1\x14\x00\x2c\xcd\x46\x78\
\xbc\x51\x31\x00\x5f\xd3\x79\xb5\x00\x79\x14\x59\x23\xce\x20\xd8\
\x99\x80\xf1\xb1\x71\x00\x16\x2d\xee\x28\xbd\xd9\x47\x2f\x06\xd2\
\xc1\xef\x10\x83\x91\xd0\x1a\x02\xfc\x63\x06\xa2\xc0\xaf\xed\xff\
\x8d\xc7\xef\x6b\x58\x27\x02\xfc\xc6\x5b\x7f\xbb\xbc\x23\xa3\x34\
\x0a\xe5\xeb\xcb\xa2\xbe\x51\xe9\xcd\x90\x00\xd6\x97\x1e\x9e\x5f\
\x4b\xa0\x9a\xcf\x1f\x0a\x1c\x8d\x18\xce\xb9\x25\xe0\x92\x80\xde\
\xb6\x7a\x7b\xeb\xd7\x1b\xbc\x07\xf7\x18\x67\x79\xff\xab\x1b\x67\
\xfe\x4f\x4c\x8f\x73\xff\xe3\xf7\x85\x07\x00\xc3\xbe\xeb\x24\x50\
\xc1\xff\xd7\x95\xa6\x02\x9a\x3b\x93\x8e\xc5\x23\x98\x1a\x9f\xb5\
\xa7\x02\x33\x38\xaa\x24\xb3\x4a\x91\xc1\xb5\x00\xea\xcc\x00\x40\
\xed\x2e\x80\x02\xa4\xb2\xd3\x80\x79\xa5\xc8\x8a\x18\x47\x11\x30\
\xdb\x0f\xe3\x7a\x20\xb0\xa3\xc5\x7f\x43\xda\x8b\x8b\x85\xc0\x1b\
\x0e\x6c\xc4\xf1\xea\x01\x6a\x02\x7f\xe0\xf7\xae\x17\x1d\xdf\xab\
\x41\xf2\xbe\x57\xdf\x19\x7e\x0d\x61\xb1\x88\x6a\xc0\x8f\xd8\x37\
\x92\x44\x04\x7c\xe9\xb1\x6d\x7c\xe0\x1f\x1b\x4f\x02\xf7\xfc\x4b\
\x6d\x3e\x7f\x99\x04\xda\xe2\xbc\x21\x81\x90\x2a\xcb\xc8\x6b\x0f\
\x10\xf0\xea\xa5\x7d\x8d\x8d\xfe\xef\xd8\x5e\xba\x86\x30\x12\x08\
\x53\x5e\xba\x82\xd4\x2a\xfd\x7c\x51\x7f\x7d\xee\x0c\x51\xca\x00\
\x64\xa6\xb3\xe4\xdc\x00\xa0\xc9\x31\x14\xb3\x28\xb2\x4a\x2f\x02\
\x6a\xf8\x7c\x00\xc7\x9c\x3a\x00\xfb\x0f\x17\xc1\x21\x00\x93\x93\
\x08\xc6\x72\x27\x61\x7a\x66\x1a\x81\x00\x01\x8b\x7a\x3a\xcb\xcc\
\x1b\x6f\x46\xa0\x80\xa9\x63\x24\xea\x00\xbf\x76\xc5\x6e\xa7\xdf\
\xff\xd8\x7d\x0d\xeb\xcc\xd5\x8b\xfb\x78\xff\x6b\xee\x2a\x7f\x78\
\x1a\x01\xfc\x4a\x41\x45\x6d\x69\x34\x09\x78\xe0\x8f\x08\x44\xd6\
\xbb\x9c\x17\xee\xc0\x9a\x2d\xa1\x69\x41\x51\x89\xa8\x9d\xf5\xbf\
\xfe\xe6\xdf\x6a\xe8\x35\x7d\xe3\x47\x25\x37\x34\x72\x4a\xfc\x08\
\x12\x10\xa6\x1f\x1f\x9e\x6b\xac\x93\x80\x09\x4d\xed\x29\x62\xce\
\x20\xa0\xa9\xf1\xd9\x52\x00\x30\xce\x0b\x4a\x31\x8b\x7f\x36\xe0\
\x79\x89\x01\xb8\xe2\x4e\xa6\x94\xc3\x0e\x04\xce\x0a\x93\x17\x66\
\x0f\xc3\xd8\xf8\x18\x08\x30\x84\x60\x91\x3b\x3d\x98\x5e\xd7\x1c\
\xf4\x6b\xdc\x48\xa7\x1b\x07\x98\x03\xf8\x01\xbe\xf8\xdd\xcf\x37\
\xb4\x43\xdf\xf7\xaa\x3b\xcb\x1f\x7c\xf7\x9a\x6a\x01\x7e\x10\xd4\
\x51\x04\x52\x61\x9f\x2f\x3f\xbe\x8d\x7b\xfe\xe9\xcc\x49\xa0\x4c\
\xf3\xab\x39\x2c\xfa\x75\xbb\x96\xc0\xf9\x40\x02\x11\x31\x81\xd0\
\xc0\xa0\x73\xfd\xed\x2d\x1d\xbc\xff\x55\x77\x35\xf4\x7a\xee\xff\
\xd1\x7d\xe5\x69\xdf\x08\xdf\x1f\xf0\x91\x80\x70\xb3\x61\x55\x48\
\xa0\xa5\xbb\xc9\x7b\xe6\xa6\xc7\x67\xc9\xd8\x33\x01\x67\x85\xc1\
\x20\x25\x02\x28\xe2\xce\x08\x74\x74\x1e\x67\x04\xa2\x54\x0c\x94\
\x47\x92\x15\x06\xc7\xac\x2c\x4c\x1f\x2b\xda\xd9\x00\x01\x8b\x16\
\x2f\xb2\x6f\x38\x58\xd8\x10\x04\x7f\x4c\x7b\xcb\x70\x35\xf0\xbb\
\x16\x45\x80\x0c\x76\x1e\xde\xc1\xc0\x50\x7f\xc3\x3a\xf4\xe6\x2b\
\x6f\xe5\xe6\x4d\xb7\x96\x91\x4e\xcd\xc0\xaf\x14\x20\xac\x00\x7a\
\x11\xf8\xfb\x5f\x7a\xec\xcc\x48\xc0\xa7\xf9\xcf\x64\xd1\x7b\x5f\
\x7b\xac\xce\x1b\x12\x08\xe9\x0b\x5f\x2a\x57\x5b\xff\x1b\x6f\xfe\
\x6d\xda\x83\x65\xdf\x67\x20\x5f\xfc\xee\x36\xc6\x67\xc6\xcb\xfa\
\xb3\x26\x12\x70\xf0\x11\xb4\x02\x82\xee\x80\x92\xd0\xae\x15\x00\
\x4d\x4f\xd8\xfe\xbf\x30\x78\x11\x45\x36\x50\x03\x30\xa7\x39\x01\
\x6a\x23\x00\xfb\x06\xec\x54\xa0\xf2\x46\x05\xce\x1a\x09\x0e\x01\
\x64\x06\x60\x6c\x6c\x1c\x01\xc4\x13\x31\x5a\x3b\x5a\x7d\x11\xcd\
\x20\xb3\x19\xee\x4d\x47\xd5\x03\x84\x98\x52\xa1\x01\x36\x1a\x6f\
\x05\xfc\xc9\xcf\x7f\xc8\x7f\x1e\x28\x23\x9e\xba\x81\x1f\xa1\xad\
\x42\x47\x09\x3a\xfb\x7e\xe9\x47\xdb\xb8\xe7\x5f\xea\x27\x81\x7b\
\xee\xbd\x9b\x2f\xfd\x78\x9b\xff\xbc\x8d\x5e\x04\xec\x3a\xb6\x83\
\xd7\xff\xe5\x79\xe0\x0e\x44\x90\xb2\x4e\x04\xed\x2d\x1d\xfc\xfa\
\xed\x8d\x35\xff\xef\xd7\xcc\xff\x7a\x49\xc0\xe7\xfb\xc7\x34\x2c\
\xe8\xda\xdf\xb0\x11\xdd\xb6\xb8\x19\xe1\xf8\xff\x99\x21\x7b\x10\
\x90\x30\x39\xa0\x24\x33\xca\x26\x01\x7d\x10\x50\xc3\x47\x03\xea\
\xe2\x06\x02\xed\x38\x80\x64\x56\x24\xe8\xc7\x60\x7c\xf6\x30\x8c\
\x4f\x8c\x79\x40\xed\xec\xe9\x88\xd6\xfc\xce\xcd\xba\x0c\x68\xa6\
\xa8\x0f\xfc\x81\x80\xcb\x17\xbf\xbb\xad\xa1\x1d\x7b\xf3\xc6\x5b\
\x79\xf3\x75\x77\x44\x6b\x7d\xb4\xdf\xb5\x68\xfc\x08\x6d\xef\x13\
\x11\xb2\xbf\x80\x2f\x3d\x5e\x1f\x09\xdc\xb3\xed\x6e\xbe\xf4\xc4\
\xb6\xf0\xf3\x9f\xc9\x12\x41\x04\xbb\x8e\xef\xe0\xf5\x7f\x75\x1e\
\x14\x0b\x85\x91\xb2\xd6\xde\xbf\xf1\xa6\xc6\x6a\xff\x81\xa1\x7e\
\xbe\xf1\xa3\xfb\xca\x9f\xdb\x1a\xdd\x01\xdd\xfc\xaf\x44\x02\x3e\
\xff\xdf\xd1\xfe\x00\x46\x92\xbd\xae\xff\xaf\xec\x0c\x40\x3d\x13\
\xd7\xfb\xa4\x36\x02\x70\xfd\x0a\xdb\xcf\x70\x53\x81\x19\x2c\x66\
\x84\xc9\x8b\x53\x7b\xed\x38\x80\xfb\x60\x2f\x59\xd1\x63\x97\x6b\
\xea\x31\x00\x4d\xf3\xeb\xf1\x00\xcf\x0d\x70\xaf\xa6\x0e\xf0\x23\
\xec\xce\x78\x64\xd7\xf6\x86\x75\x2e\xc0\x5f\xde\xfd\x89\xda\xfc\
\xfc\x6a\xc0\x77\x37\x85\x69\xfb\x08\xd0\x07\xff\xe6\x97\x7e\xb4\
\x8d\x7b\xee\xad\x4e\x02\xf7\x6c\x73\x34\x7f\x25\xa9\xd7\xf4\xaf\
\xe1\xef\xec\x3a\xbe\x83\xd7\xff\xf5\x79\x40\x02\xc1\x6b\x77\xda\
\x75\x75\x4f\x1f\x7f\xf2\x73\x1f\x6a\xe8\x79\xbf\xf8\xfd\xcf\x97\
\x2b\x03\xf7\xb3\x1a\x09\xe8\x66\x7e\x0c\x9f\x82\xd4\x49\x00\x01\
\x1d\xcb\x5a\x1d\x0c\x08\xa6\xc7\x9d\x12\x60\xc1\x38\xae\xff\xaf\
\xc8\x73\x06\x73\x01\xb8\xcd\x54\x9b\x94\x32\x01\x16\x36\xeb\x64\
\x95\x64\xc6\x88\xf1\x02\x02\x26\x5f\x2c\x7a\xe3\x02\x16\x2d\xee\
\x24\x96\x88\x85\x46\x35\xd1\x6f\xda\x71\x01\x42\x67\x0a\xae\x04\
\xfe\x00\xf3\x7e\xf1\xfb\x8d\x75\x03\x56\xf7\xf4\xf1\xeb\xb7\xff\
\x76\x7d\xb5\x00\x15\x80\x1f\x68\xc7\xaa\xa0\xf7\xed\x63\xc0\x97\
\x7e\xbc\x8d\x7b\xb6\x45\x93\xc0\x3d\x9f\x77\x34\x7f\x35\xed\x3d\
\x17\x8d\x5f\x03\x49\xec\x3a\x7a\x8e\x49\xe0\x4f\x43\x06\x10\x39\
\x6d\xf9\x17\x77\x7f\xa2\xe1\xe7\xfc\xe2\xf7\xb7\x85\x5b\x84\xee\
\x67\x05\x12\x10\xba\x05\x1c\x0c\x90\x6b\x24\x00\xb0\x68\x45\xab\
\x67\x49\x8f\x8f\x4c\x31\x73\x08\x84\xc9\x8b\x28\x66\x28\xe5\xff\
\x5d\x02\x80\x23\xf3\xeb\x02\x40\x29\x13\x50\x74\x4a\x82\x67\x8d\
\x34\x7b\x00\xa6\xf7\xc1\xf0\xc8\xb0\x77\xb3\x8b\x97\xf5\x94\x81\
\x5f\x04\x33\x01\xae\x15\xd0\x14\x68\xc0\x6a\xe0\xd7\xaf\x5e\xd8\
\x1d\xd2\xc8\x60\x20\xc0\x9f\xfc\xdc\x87\x4a\x66\x63\x54\xaa\xb2\
\x56\xe0\x57\x71\x0b\x2a\x9a\xda\xae\x3b\x10\x41\x02\x65\xe0\x8f\
\x0a\x38\x36\xca\xfc\x0f\x12\x94\x6b\x09\x1c\x73\x48\x20\x33\xde\
\xd0\x7e\xa8\x45\x3c\x12\x70\x27\x15\x71\xae\xef\xe6\x2b\x6e\xe5\
\xcd\xd7\xdc\xd1\xd0\x73\x79\xcf\x5a\x20\x16\x05\x54\x27\x01\xdd\
\x2d\x8e\xf9\x31\x11\x2c\x98\x4b\xa4\xe3\x34\x77\xa6\x40\xc0\xf8\
\xf0\x14\xb3\xfd\xf6\xdf\x32\xe2\x3c\xaf\x24\x33\xca\x8e\xfe\xeb\
\x25\xc0\xf3\x6e\x01\xb8\x56\x80\x1d\x07\x50\xe4\x94\x62\x06\x18\
\x15\x26\xfd\x53\x7b\x61\x78\x74\xd8\x7b\xf8\x17\x3b\x6e\x80\xef\
\x26\xcd\x40\x03\x38\x56\x80\x99\xf2\x9f\xa7\x1e\xf0\xbb\xeb\xfe\
\xee\xeb\x9f\x6a\x68\x47\xb7\x37\x75\xf0\x0f\x1f\xbc\x37\x1c\xb0\
\xf5\x02\xbf\x9a\xa6\xaf\x44\x0e\xda\xbe\x5f\x7a\x62\x1b\xf7\x7c\
\xa1\x44\x02\xf7\x7c\xe1\x6e\xbe\xf4\xe4\xb6\xea\xe6\x7b\x00\xd4\
\x22\x62\xa9\xc9\x15\xa8\x60\x29\x78\xee\xc0\x79\x42\x02\x9f\xfb\
\xe0\xbd\x0d\x3f\xcf\x97\x7e\xf0\xf9\xf2\xa0\x70\xd0\x52\x74\x3f\
\x83\x41\xc9\x80\x32\x44\x27\x81\x40\x4a\xb0\x63\x69\x2b\x42\x80\
\x21\x60\x7c\x74\x8a\xd9\xc3\x4e\xdf\xc5\x39\xa8\x4a\x05\x40\xfa\
\x2b\xc1\xe7\x24\xb5\x13\xc0\x80\x2f\x15\x68\xbb\x01\x92\x59\x65\
\x31\x2d\x62\xec\x2e\x8c\xc3\xc4\x40\x96\xa9\xe9\x29\x84\xb0\xe3\
\x00\xb1\x78\xac\x14\xf1\x0f\x98\x3a\x7a\xf0\xc3\x88\xdb\x45\x41\
\xbe\x54\x5f\x1d\xe0\x17\x02\xbe\xf8\x83\x6d\x4c\xcc\x8c\x37\xb4\
\xb3\xdf\x7c\xcd\x1d\xdc\x7c\xc5\xad\xa5\x15\x11\x6e\x40\xdd\xc0\
\x0f\x46\xe8\x2b\x80\x3e\xb8\xaf\x4b\x02\xf7\x7c\xc1\xd1\xfc\xda\
\xfe\xfa\xb4\xec\xbe\xa5\x1a\xd0\xa3\x88\xc1\x08\xff\x5b\x95\xae\
\x77\xd7\xf1\x1d\xbc\xfe\x13\xe7\x9e\x04\xfe\xe4\xed\x1f\x66\x75\
\x4f\x5f\x43\xff\xfe\x23\xcf\x6d\xe7\x91\xdd\xdb\x43\x53\xd2\x65\
\xd6\xa1\xfb\xa9\x93\x80\xee\xf7\x87\x2c\x6e\x1f\x2b\x05\xed\x4e\
\xf4\x5f\x38\x04\xe0\x98\xff\xfb\x80\x51\x24\x33\x40\x0e\xa1\xe5\
\xff\x6b\x89\xdd\x84\x48\x7d\x2e\x40\xc9\xd4\xb0\x80\xbc\x82\x59\
\x25\x99\x34\xd3\x3c\x0f\x30\xb1\x03\x06\x4f\x0e\xda\xbb\x6a\x6e\
\x40\x94\x9f\xa3\xff\x36\xd3\x73\x07\x3f\xc2\x9e\xd4\xf2\xef\xbe\
\xd1\x58\x2b\x00\xe0\x6b\xff\xf3\xbf\x6d\x57\x20\x44\xeb\xcf\x09\
\xf8\x11\xd6\x43\xd9\x7d\x47\xc5\x1c\x80\x2f\x3d\xb9\x8d\x2f\x3f\
\xb9\xad\x36\x60\x36\x5a\xc2\x08\x22\x70\xee\x5d\xc7\xce\x3d\x09\
\xfc\xc9\xcf\x36\x36\xf0\x07\xf0\xd1\x7f\xfd\x48\xe9\x47\xbd\x24\
\x60\x84\x3f\xfb\x65\x31\x31\x13\x12\xa9\x38\x8b\x56\xb4\x82\xe3\
\xfb\x7b\xe9\xbf\x18\x7b\x94\x64\xca\x29\x00\x2a\xd5\xff\x03\xf4\
\xcf\xb7\x0b\x50\x12\xd7\x0d\x28\xa0\xc8\x2a\x8b\x69\x04\xc7\x84\
\xc1\x90\xee\x06\xe0\x58\x01\x0a\xca\x46\x03\x06\xc9\x80\x18\xc4\
\xf4\xa2\xa0\x7a\xc0\xaf\x01\xe9\x6f\xbf\xf1\xc9\x86\x5b\x01\xed\
\x4d\x1d\xfc\xc3\x3d\xf7\x86\x9a\xfb\xa5\x1f\xd4\x0e\xfc\xb9\x80\
\x3e\x0a\x70\x61\x52\x8f\x3f\x5f\x21\xcf\x5f\x4b\xf0\xaf\xec\xfa\
\xb4\x6b\x7c\xee\xf8\x0e\x5e\xff\xc9\x73\x47\x02\x8d\x96\x47\x76\
\x6f\xe7\x91\xe7\xb7\x97\x3d\x8f\xb5\x92\x80\x0e\x7c\x23\x10\x0b\
\xf3\x45\xff\x4d\xe8\x5c\xd2\x06\x42\x20\x04\x8c\x8d\x4e\x31\xbd\
\xcf\xf9\xf3\x49\x76\x23\x99\x54\x68\x04\x70\x06\xfe\xbf\x7b\xc9\
\xf5\x8a\x37\x3f\x20\xca\x49\x07\x4a\x66\x8c\x04\x3f\xf5\xdc\x80\
\x99\x69\x2f\x0e\x10\x8b\xc5\xca\xcd\x9d\x80\xef\xe3\xd6\x06\x78\
\x35\x01\x61\x57\x15\x05\x7e\x57\x5c\x2b\xe0\xfe\xc6\x5b\x01\x6f\
\xbe\xfa\x0e\xde\x77\xe3\x5d\xde\x79\xab\xbe\xc8\xc4\xed\xfc\x28\
\xe0\x87\x01\x33\xcc\xad\xa8\x04\x78\x51\xe5\xef\xd4\x0b\xec\x6a\
\x44\x11\xdc\x56\x8d\x18\x84\x4d\x02\x6f\xf8\xd4\xb9\x21\x81\x46\
\xcb\x47\xbf\xe6\x68\xff\x60\x5f\x46\x91\x80\xd6\x0e\xe0\x3c\xf3\
\xda\x73\x6f\xb8\xbf\x03\x19\x32\x65\x41\x4f\x5f\x87\xd3\xef\x82\
\x89\xd1\x29\xa6\xf6\x82\x30\x39\x20\xe0\xb4\xe3\xff\xe7\xd4\x19\
\xa6\xff\xf4\xcb\xad\x5d\xfa\x3d\x5f\xc3\x1d\x18\x94\x43\x31\x23\
\x8b\x4c\x1a\xa9\x92\x1b\x70\xf2\xe4\xa0\xa7\x15\x96\xaf\x5a\x56\
\xce\x74\x11\x3e\x50\xac\x85\x32\x50\xfb\x1a\xb5\x02\xf8\xdd\xe5\
\x6f\xef\x6f\xbc\x15\x00\xf0\x97\xef\xfd\x04\x57\xe9\x53\x78\x55\
\xd0\xd6\x35\x01\x3f\x2a\x88\xe8\x80\xbe\x26\xc0\x87\x9d\x2b\xcc\
\x1a\x69\xc4\x52\x8d\x08\x22\x88\x70\xd7\xb1\x0b\x9f\x04\x1e\xd9\
\xb3\x9d\x47\xf6\x6c\x2f\x7f\x1e\x29\xfd\x0e\x0d\x0c\x3a\xe2\x0b\
\x7c\xeb\x24\x10\x28\x8a\x73\xcd\xff\xe6\xce\x94\xe7\xfb\x4f\x1e\
\xb4\xcd\x7f\x23\xce\x6e\x25\x19\x47\x31\x8d\xfd\x92\xb3\xd2\x00\
\xa0\x33\x90\xb9\xba\x00\x0a\x7b\x8e\xc0\xbc\x52\x76\x1c\x00\xc1\
\x11\x11\x63\x60\x6a\x1f\x0c\x0e\x9d\x40\x38\x26\xcc\x8a\xbe\x65\
\xbe\x6c\x80\x5e\x1c\xe4\x33\x87\x62\x76\x20\xd0\xd0\x4b\x83\xf5\
\xc6\xae\x01\xfc\x00\x13\x99\x71\xfe\xee\x81\xc6\x5b\x01\xed\x4d\
\x1d\x7c\xee\x97\xef\x2d\xc5\x03\xce\x04\xf8\x41\xf3\xbe\x1e\xd0\
\xeb\x7f\x3f\x0a\xe8\xf3\x29\xb5\x58\x1d\x81\xf5\x17\x3a\x09\x7c\
\xf4\x6b\x1f\x29\x6f\xdf\x5a\x49\x40\x94\x5b\xbc\x7a\xe5\x9f\x4e\
\x02\x00\x4b\xd7\x76\x79\xca\x73\xe4\xe4\x98\x1b\xfd\xcf\x19\x49\
\x76\x2a\xc9\x18\x8a\x29\xec\x97\xa0\x95\x02\x80\x87\xcf\xae\x0b\
\x00\x25\x37\xa0\xe0\xcc\x4a\x32\xa5\x8a\x8c\x1b\x71\x9e\x29\x8c\
\xc1\xd8\x9e\x22\x23\x23\xc3\x08\x21\x68\xed\x6c\xa5\xa5\xa5\xd5\
\xaf\xfd\xc3\x3e\x9d\x25\xd6\x12\xd2\xc8\x81\x80\x9f\x27\x11\x9d\
\xf2\x7f\xff\xed\xc3\x0c\x9c\xea\x6f\xf8\x83\xb0\xb9\x77\x0b\x5f\
\xfb\xcd\xff\x2e\xf7\xf3\xe6\x00\x7c\x4f\xdb\xd7\x12\x13\x88\x02\
\x7c\x98\xcc\xd5\xef\xaf\x35\x1e\x50\xcb\x39\x43\xee\x61\xd7\x05\
\xea\x0e\xdc\xff\xd4\x7d\xb6\xef\x1f\xbc\x4f\xa8\x89\x04\xa2\xdc\
\x5e\x3d\x0d\xee\xae\x57\xc5\x92\xf9\x6f\x59\x16\x63\x83\x53\x4c\
\xed\x03\x61\xf2\xbc\x52\x9c\x54\x92\x49\xa5\x98\x55\xa5\x00\xe0\
\x19\x69\x7f\xf7\x16\xea\x93\xc3\x5e\xd0\xc1\x9d\x21\x28\x87\x64\
\x4a\x59\x8c\x99\x4d\xfc\x04\x41\x6e\x62\x07\x9c\x18\x3a\x01\x02\
\x0c\x21\x58\x7d\xc9\x2a\xbb\xfd\x42\x0a\x21\x08\xb0\xa2\x99\x72\
\x18\x31\xd0\xd8\x35\x8f\xb2\x72\xe4\xf7\xb6\xfd\x4e\x63\x9f\x04\
\x47\x6e\xda\x70\x2b\x9f\xfb\xc5\x7b\xfd\xe7\xac\x15\xf8\x51\xda\
\xbe\x92\x56\xad\xe2\x67\x47\xba\x01\x8d\xb2\x06\x6a\x71\x39\x2a\
\x1d\xa3\x93\xc0\x89\x1d\xbc\xe1\x6f\x2e\x2c\x12\xf8\xfd\x6d\xbf\
\x53\xd7\x73\x17\x24\x81\xa0\xd9\x1f\xfc\xae\x93\x44\xf7\xca\x0e\
\x62\x09\xd3\xd6\xfe\x43\xe3\xb6\xf6\x07\x8c\x24\x4f\x2b\x8b\x31\
\x14\x93\xb8\xe6\xbf\xab\xfd\x0f\x9d\x7d\x17\xc0\x95\x32\x37\x40\
\x15\x19\x13\x31\x9e\x9f\xda\x0b\x27\x5e\x1c\xf6\x66\x0c\x5e\xd9\
\xb7\x1c\xd3\x8c\x95\xcc\x1e\x3d\xf8\x11\x12\x13\x88\x35\xfb\x1b\
\xb6\x5e\xf0\x63\xc0\x03\x4f\xdf\xc7\xa3\x2e\x73\x37\x58\xde\xf7\
\xca\xbb\xf8\xb5\xd7\xfd\x76\xe9\x01\xf7\x7a\x1b\x2a\x02\xbf\x92\
\x89\x5f\x0b\x80\xa3\xf6\x0d\x93\x33\xb5\x04\xe6\x42\x40\x35\xec\
\xb7\xeb\xc4\x0e\xde\xf0\xb7\x17\x06\x09\x7c\xf4\xdf\x3f\xc2\xc0\
\x70\xff\x9c\x9e\x3f\xcf\xc2\xd3\xac\xdb\xa0\xcb\xab\xaf\x57\x12\
\x7a\x56\x77\x7a\xcf\xc9\xa9\xe3\xa3\x4c\xec\x04\x04\x13\x46\x9c\
\x3d\x48\x26\x50\x4e\xfe\xbf\x41\xda\xdf\xbd\xf4\xfa\xe5\x90\xef\
\x6d\x41\x05\x20\x83\x62\x52\x5a\x8c\x99\x69\x1e\x07\x98\xdc\x09\
\x47\x8f\x1d\xf1\xa2\xe6\xbd\x6b\x7b\xc1\x70\x1a\x20\xa4\x61\x82\
\x04\xe0\x4d\x17\x36\xc7\xc6\x07\xf8\xc0\xa7\xef\x6e\xe8\x03\xa1\
\xcb\x5f\xfc\xfc\x27\x78\xdf\x2b\xee\x2a\xad\x38\x13\xe0\x57\x03\
\x7d\xad\x05\x43\x8d\xb4\x04\x2a\xc5\x1f\x6a\xbd\x96\x08\x32\xd8\
\x75\x62\x07\x6f\xf8\xbb\xf3\x9b\x04\x06\x86\xfb\xf9\xbb\x6f\x7f\
\x72\xee\x4a\x48\xe0\xcf\xef\x07\x7c\x7f\xaf\x06\xc0\xf9\x9e\x4e\
\xa7\x68\xef\x69\xc6\x10\x82\xa9\x89\x19\x26\x0f\x15\xc8\x8d\x80\
\x91\xe0\x51\x65\x31\xa6\x14\x53\x4a\x9f\xfe\xeb\x9c\x12\x40\x49\
\x5c\x2b\x20\xa7\x14\x33\x4a\x32\x21\x62\x1c\x14\x31\x06\xc6\x9f\
\x85\xc1\x53\x27\xb0\xac\xa2\x6d\x05\xac\x5e\x8e\xb2\x02\x60\xaf\
\x14\x0b\x68\x3e\x33\xf0\x03\x0c\x8c\xf4\xf3\xd1\xff\xf8\x48\x23\
\xda\x29\x54\x3e\x77\xd7\xbd\x36\x09\x84\xf9\xf8\x51\xc0\x8f\x02\
\x69\xd4\x7e\x75\x5a\x01\x51\x65\xbe\x8d\x5a\x6a\xbd\x8e\x6a\xf7\
\xb1\xeb\xf8\xf9\x4d\x02\xf7\x7c\xf6\x6e\x3b\x9b\x54\x8f\x1b\xaa\
\xef\x6b\x38\x15\xae\x7a\xc0\x2f\x22\x03\x86\x82\xa5\xeb\xba\xbd\
\x39\x02\x4e\x1d\x3f\xcd\xd4\x7e\x40\x90\x33\x53\x3c\xa5\x2c\xc6\
\x90\x4c\x11\x9c\xfd\xe7\xc5\x73\x11\x03\xf0\x8b\xa2\x34\x5d\x78\
\x06\xc9\xa4\x2a\x30\x66\xc4\x79\xba\x30\x0e\xa3\xcf\x14\x19\x1e\
\xb1\x0b\x83\x9a\x5a\x52\x2c\x5f\xb9\xdc\xcf\x88\x61\xf5\x01\xb1\
\x80\x15\x30\x47\xf0\xbb\x77\xf6\xd1\xff\xfc\x30\xbb\x06\x76\x9c\
\xf9\x13\x11\x21\x9f\xbb\xf3\x5e\x9f\x25\xe0\xcd\x4f\xa7\x4b\x2d\
\x26\x7e\x35\xd0\x87\x00\xad\x22\x40\x83\xc7\xcf\x75\x09\xfe\xa9\
\xa8\x73\x56\x23\x84\x90\xfb\x3b\x5f\x49\xe0\xef\xbe\xfd\x49\x1e\
\xd9\xbb\x3d\x1c\xd8\x95\x48\x40\x5b\x74\x65\x66\x68\x29\x3f\xe1\
\x4e\x86\xab\x11\x43\x3c\x9e\x60\x71\x5f\x87\xf7\xda\xaf\x91\xfe\
\x49\x37\xf7\x3f\x6f\xc1\x3f\x57\xe6\x4e\x00\x2f\x96\x05\x03\xb3\
\x4e\x30\xf0\xb4\x99\xe6\x09\x0c\x26\x4e\x3f\x01\x87\x8e\x1c\xf2\
\x1a\x6d\xfd\x15\x6b\x51\x32\xdc\x1c\x0a\xf3\x95\xe2\xed\x5a\x23\
\xeb\x9f\x35\x82\xdf\xed\x8c\xf9\x7e\xf9\xc6\xe7\x7e\xe1\x5e\xde\
\x77\xc3\x5d\xe1\xc1\xbd\x5a\xb5\x7d\x0d\xda\xb3\x22\xd8\x2b\xf9\
\xfb\x67\x12\x03\xa8\xc1\x62\xa9\x4a\x08\x51\x64\x20\xce\x3f\x12\
\x18\x18\xe9\xe7\xa3\xff\xf9\x91\xca\xda\xbd\xc2\x36\xcf\xf4\x8f\
\xd2\xfc\x01\xe5\x87\x84\x15\x97\x2e\xf6\xb4\xff\x89\x81\x53\xb6\
\xf6\x07\x62\xcd\x3c\x14\x19\xfc\x7b\xe1\xfc\x70\x01\xc0\xb1\x02\
\x9c\x60\xe0\x8c\x92\x8c\xcb\x02\x23\x46\x9c\xc7\xb2\x83\x70\x7a\
\x7f\x86\xc1\x93\x83\xb6\x15\xd0\x9c\xa2\x7b\x71\x4f\xd9\xa8\xa8\
\x60\x99\xa4\xe1\x30\xa5\x99\xd2\xea\x02\x8c\xc0\xa7\x76\x07\x95\
\xc0\x8f\xb0\xdf\x7a\xfb\xd1\xff\x9a\x3f\x57\x00\xe0\x73\xef\xbf\
\x97\x3f\x7e\xe3\x87\xcb\xce\x5d\x17\xf0\xab\x80\xbe\xec\xf8\xe0\
\xfe\x95\x00\x5f\xaf\xe6\xaf\x44\x22\x55\xe2\x01\x55\xc9\x20\x70\
\xbf\xbb\x06\x77\xf0\x86\xbf\x3f\x3f\x48\xe0\x9e\x7f\xb8\xbb\x74\
\x1d\xb5\x90\x40\xb0\xef\xc0\x9b\xe3\xc2\x0b\xfa\xb9\xae\x80\xee\
\xfb\xbb\xcf\xba\x32\x59\xb4\xdc\x1e\xf9\x27\xa5\xc5\xf0\xd1\x31\
\x26\x76\x82\x88\x71\x00\x18\x50\x92\x09\x35\x0f\xc1\x3f\x57\xce\
\x8c\x00\x5e\xf0\x59\x01\x05\x60\x16\xc9\x84\xb2\x18\x35\xd3\x3c\
\x81\x20\x37\xfc\xb0\x1d\x0b\x70\x1b\xac\x6f\x7d\x6f\x79\x2c\xc0\
\x6d\xb0\x90\xc0\x60\xac\x15\x3f\xa8\xf5\xcf\x10\x32\x88\xea\xb8\
\x8f\xfe\xf7\x87\x79\x74\xdf\xf6\x46\xb6\x5d\x99\xfc\xf1\x1b\x3f\
\xc4\xe7\xde\x7f\x6f\xb4\xe6\x73\xaf\x2d\x0a\xf8\x95\x40\x1f\x05\
\xcc\x5a\x62\x0a\x8d\x90\x28\x72\x10\x21\x9f\xd5\xc8\x20\x64\xbf\
\x5d\x27\x76\xf0\x86\x4f\x9f\x5b\x12\xf8\xd2\x63\xdb\x78\x64\xff\
\xf6\x68\x13\x1f\xca\x49\x20\x90\x05\xf2\x4c\xfe\x98\x1f\xf4\x65\
\xe5\xef\x31\x7b\xd4\xdf\xd2\x35\x5d\xc4\x93\xf6\xb4\x5f\x43\xc7\
\x47\x99\xd8\x05\x32\x07\x46\x92\x1f\xcb\x22\x63\x48\x26\x51\x64\
\x94\xad\xfd\xcf\x33\x02\x28\x89\xa4\xf4\xf6\xe0\x8c\x92\x4c\x29\
\xc5\x29\x23\xc1\x8f\x66\x0f\xc3\xf1\x5d\x63\x8c\x4d\xd8\xb3\x05\
\x75\xf5\x74\xd2\xd1\xd1\x59\x35\x45\xe2\x36\x60\x2c\x1d\x98\x37\
\x30\x40\x02\x22\x48\x06\x51\x1d\x66\xc0\x07\xfe\xf1\xee\x79\x9f\
\xb5\xe6\xbd\xd7\xdd\xc5\xb7\x7e\xf3\x21\x7b\x40\x4a\x05\x90\x87\
\x69\xc5\x8a\xa0\xaf\x02\xb4\x32\x39\x13\xbf\xbf\x86\x58\x40\xe8\
\x35\x46\x59\x0b\xee\xae\x55\xac\x82\x5d\xc7\xcf\x2d\x09\xbc\xf9\
\x65\x77\xb0\xb9\x77\x4b\x79\xfb\x46\x3c\x7b\xee\x3d\xe9\xcf\x9e\
\x11\x54\x66\x21\x69\x3f\x37\x1e\x60\x28\x93\x65\xeb\xbb\x31\x84\
\x40\x4a\x8b\xc1\x43\xa3\x4c\xec\x00\x61\x72\xd4\x48\xb0\x0f\x65\
\xfb\xfe\xf8\xdf\xfc\xa3\x38\x78\x3e\xc4\x00\x5c\x39\xe8\xa5\x04\
\xed\x39\x02\xec\xc1\x0a\xe3\xaa\xc8\x88\xd9\xc4\x23\x08\x72\xc3\
\xdb\xe1\xf0\x40\x29\x16\xb0\xe9\xea\x8d\x28\xcb\xcf\x90\x86\x19\
\xd1\x68\x26\xc4\x5b\xf1\xfb\xd7\x6e\xa0\x25\xcc\x12\x88\x00\x3f\
\xd8\xfe\xdd\xcf\xff\xcd\xdb\x1a\xd5\x76\x91\x72\xd3\xfa\x5b\xf9\
\xf1\xef\x3d\xcb\xe6\x15\x5b\xc2\x81\xaf\x3d\x60\x15\xe7\x0b\xac\
\x62\x6a\x97\xdd\xb3\xa8\x73\x9f\x7a\x00\x5f\x2b\x29\x84\xc5\x3d\
\x2a\x04\x2f\x83\xf7\x7b\x2e\x49\xa0\xbd\xa9\x83\xef\xfc\xd1\x43\
\xb5\x91\x40\x88\xf5\xe9\x0b\xf4\xc5\x02\xdf\x75\xab\xc0\xc9\xfb\
\xaf\xb8\x64\x31\xf1\x84\xe9\x44\xfe\x47\x39\xfd\xac\x53\xf7\x9f\
\xe0\xbb\xaa\xc0\xa8\x92\x8c\x03\xd3\xf3\x11\xfc\xd3\x2e\xbb\x61\
\x22\x81\xa2\x82\xac\x52\x76\x30\x50\x49\x4e\x18\x71\x1e\x9d\xed\
\x87\xe3\xcf\x95\xac\x80\xa6\xe6\x14\xcb\x96\x2f\xb7\x1b\x2d\x16\
\x00\xbf\xb3\xa0\x33\x66\xc2\x29\x11\xd6\x3a\xc2\xf7\xf0\xe8\x77\
\x13\xe1\x97\x81\x7d\xcc\xa3\xfb\xb6\xf3\x67\xf7\xcd\x6f\x3c\x00\
\xa0\x77\x51\x1f\x3f\xfa\xbd\x67\xf9\xb5\x5b\x7e\x3b\xf4\xfa\xaa\
\x4e\x1b\x56\x25\x97\x1e\x0a\xc4\x6a\x3e\xfc\x99\xc6\x00\x2a\x9d\
\x2f\xea\x3a\xa3\xdc\x05\xad\x4f\x82\x44\x30\x70\xba\x9f\x47\x5f\
\xdc\x3e\xef\x7d\x14\x26\x73\x21\x01\xaf\xe8\x27\x5e\x6e\xe6\x1b\
\x41\xa5\xe6\x10\x41\x22\x96\x60\xf9\x7a\x3b\xf5\x27\x2d\x8b\xc1\
\xfe\x92\xf6\x17\x71\x9e\x53\x92\x11\x65\x17\xff\xd8\x16\x80\x70\
\x86\xfe\x1e\x38\x1f\x5d\x80\x03\xd1\x56\x80\x91\xe2\x7b\xae\x15\
\xf0\xfc\x81\xe7\x4b\x19\x81\xcb\xd7\xa2\x8a\x81\xc6\x0a\x56\x4a\
\x69\xc4\x10\x6b\xb6\xf7\x29\x03\xbf\xde\x19\x41\x32\x08\x80\xdf\
\x3d\xe6\xa3\xdf\xf8\x30\xf7\x3f\x7b\x5f\x23\xdb\x31\x52\x3e\x7e\
\xc7\x27\xf8\xd6\xaf\x3d\xe4\xcd\x2f\x58\x75\x12\x91\x5a\x40\x1f\
\x5c\x57\x6f\x0c\xe0\x4c\x4c\xfd\xb0\xf3\x55\xba\xc6\xb0\xfb\xac\
\xe0\x1e\x6c\x5e\xbe\x85\x6f\xfd\xda\x43\xbc\xe9\xca\x3b\xe6\xbf\
\x73\x22\x64\x2e\x24\xe0\xe6\xfc\x8d\x30\xe0\x07\x94\x9b\xb2\x60\
\xc5\xfa\xc5\x5e\x59\xf8\x89\x23\xa7\x38\xfd\x53\x4f\xfb\x7f\x4f\
\x15\x19\x51\x92\x31\x60\x4a\xb9\x03\x7f\xce\x60\xda\xaf\x4a\xd2\
\x48\x0b\x00\x42\xac\x00\x24\xc7\x5d\x2b\xe0\xf4\xc1\x0c\x83\xa7\
\x06\xed\x77\x9e\x37\xa7\x58\xbb\x6e\xad\x3d\xef\x59\x70\x84\x94\
\xde\x80\x1a\x39\xc4\xdb\xeb\x00\x7f\xd0\x4f\x0b\x74\xde\x3d\xff\
\x72\x37\xbb\x8e\xee\x98\x8f\x36\x2d\x93\x9b\xd6\xdd\xca\xf3\x7f\
\x7c\x98\x37\xeb\x0f\x75\x10\x08\x95\x8a\x7e\xc2\x5c\x9a\x6a\xd5\
\x78\x73\x01\x79\x2d\xc7\x87\xed\x1b\x76\x3d\xb5\x1e\xa3\xdd\xf7\
\x1f\xbf\xee\xc3\xfc\xe8\x7f\x3e\xcb\xe6\xe5\x5b\xce\x4a\xbf\x54\
\x92\x9a\x48\xc0\xfd\xa9\x81\x3f\x68\x05\x10\x24\x01\x13\x52\xc9\
\x14\x4b\xfa\xec\x38\x58\x3e\x97\x67\xb0\x7f\x84\xf1\x1d\x9e\xf6\
\xdf\xa5\xa4\x63\xfe\x07\xb5\xff\xfe\xf3\xd9\x05\xd8\x1f\x62\x05\
\x48\xc6\x95\xc5\xa8\x91\xe2\xbb\x08\x72\x43\xdf\xb6\xeb\x02\x2c\
\xab\x88\x10\x76\x46\xc0\x90\xb1\xd0\x40\x49\x59\xce\xd4\x49\x0b\
\xc6\x9a\xb4\x4e\xd0\x3f\xf5\x3b\x0a\xf8\x9b\xa5\x9e\x2a\xad\x9f\
\x98\x1d\x3f\xab\x2f\xb5\x68\x4f\x77\xf0\xaf\x77\xfd\x37\x0f\xfe\
\x8f\x87\xe8\xed\xea\xab\x5d\x8b\x46\xf8\xd1\xa1\xfb\x57\x03\x77\
\x23\xa4\x56\x42\xa8\x95\x0c\x0c\x3b\x66\xf2\xf8\xef\x3c\xcb\x1f\
\xbd\xb6\xf1\xd3\x78\x9d\x89\x84\x92\x40\xe0\xda\xdd\x8a\x3f\x5d\
\x59\xe9\x7e\xbf\x4f\x89\xc5\x40\xe6\xa1\x6f\xe3\x72\xcf\x6d\x38\
\x71\xe4\x94\x1d\xf9\xb7\xb5\xff\x77\x54\x91\x61\x77\xd8\xaf\x72\
\x2b\xff\xe6\x49\xfb\x43\xe3\x2d\x00\xd0\xad\x00\x98\x52\x92\x31\
\x24\xc7\x8d\x24\xdf\xc8\x9e\x84\xa1\x9f\x64\x38\x72\xc2\x1e\x23\
\x10\x4f\xc4\xd8\xb0\x71\x83\xbf\x38\x28\x24\x78\xa2\x37\x62\xbc\
\x15\xff\x9c\x01\x61\x96\x40\x18\xf8\x43\xac\x84\x89\xcc\xf8\x59\
\x9f\xcf\xfe\xa6\x75\xb7\xb2\xe7\x0f\x0f\xf3\xc7\xaf\xfd\x30\xed\
\xa9\x8e\xd2\x86\x30\x22\xa8\x46\x10\xb5\x9a\xec\x51\xf1\x85\x5a\
\x8b\x81\xea\x39\x4f\x70\xbd\x11\xb1\x1e\xe8\xed\xec\xe3\xb3\xef\
\xb8\x97\x07\x3f\xf0\xd0\x79\xa1\xf5\xc3\xc4\x47\x02\x81\x98\x12\
\xc2\x8e\x4f\xf9\x4c\xff\x40\x0a\xd0\x88\x39\x04\xe1\xcc\x83\xd1\
\xb5\xb8\x93\xf6\x9e\x16\x30\xec\x9a\xff\x93\x2f\x8e\x71\xfa\x49\
\x30\xe2\x3c\x27\xe2\xec\x50\x92\x11\x14\x13\xc0\x0c\xba\xf6\xdf\
\xd7\x78\xed\x0f\x8d\x26\x80\x7d\x65\x56\xc0\x8c\x52\x4c\x28\x8b\
\x11\x33\xcd\xc3\xc2\x60\x68\xe8\xdb\x30\x70\xe8\x08\xd9\x5c\x16\
\x21\x60\xe5\xea\x65\x74\xb4\xda\xe6\x90\xaf\x01\xc3\x1a\xd2\x59\
\x17\x6f\x23\x5a\x53\xd6\x08\x7e\x77\x5f\xef\xa5\x16\x67\x39\xea\
\xfc\x47\xaf\xfe\x10\x3f\xfa\xcd\x67\x79\xef\xd5\x77\x45\x03\x5f\
\x5f\x57\x2d\xf2\x1e\x66\x56\x6b\xeb\xaa\xce\xf8\x1b\xd8\x1e\x1a\
\x9b\xa8\x96\x8d\x88\xba\xd6\x80\x9b\xd0\x9e\xea\xe0\x8f\x5f\xf3\
\xe1\xd2\xfd\x9f\xe7\xe2\x91\x80\x3b\xe5\xb8\xeb\xf7\x07\x95\x96\
\xf6\xac\xea\x03\x7d\xbc\x5a\x97\xa2\xc9\xaa\xcb\x96\x78\xed\x7d\
\xe2\xc8\x29\x46\x1e\xb3\xdb\xc4\x48\xf2\x4d\x55\x64\x48\x49\x46\
\xb1\x23\xff\xf3\xae\xfd\x61\x7e\x2c\x00\xd0\xac\x00\x14\xd3\x4a\
\x32\xaa\x0a\x0c\x19\x29\xfe\xcb\xca\xc1\xa9\x1f\x15\x39\x70\x78\
\xbf\x97\xda\xbb\xec\xca\x4b\xed\x80\x60\x58\x31\x90\x5e\x4a\xe9\
\xac\x37\x93\x10\xd7\x27\x0e\x99\x23\xf8\xdd\x63\x77\x1d\xdf\xc1\
\xcf\x7f\xee\x6d\xf3\xd9\xce\xa1\xe2\x6a\xc0\x3d\xbf\x77\xd8\x06\
\x42\x05\x6d\x59\x26\x11\xc1\xbf\x32\x70\xeb\x80\xae\x16\xa4\x0b\
\xfe\x9d\xa8\xbf\x65\x44\x9c\xbf\xd2\xdf\xc4\x01\xfe\xab\x3f\xcc\
\x9e\xdf\x3f\xcc\x1f\xbd\xfa\x43\x7e\x0b\xe8\x3c\x97\x20\x09\x78\
\xd3\xd9\xc7\xfd\xca\x29\x98\xf2\x73\x17\x24\xac\x5c\xb3\x84\x74\
\x73\x02\x61\xc0\xe8\xa9\x31\x86\xf7\x4d\x33\x73\x08\x8c\x04\xdb\
\x11\xbc\xa0\x24\xc3\x28\x26\x02\xb9\x7f\xc5\xde\xf9\xd1\xfe\x30\
\x1f\x04\xb0\xd7\xff\x0a\x31\x05\xb3\x4a\x31\x21\x2d\x86\x8d\x38\
\xcf\x88\x18\x7b\x87\xb7\xc3\x89\xc3\xc3\x5e\x5a\xb0\xad\xa3\x95\
\x35\x6b\xd6\x02\x94\xd5\x01\x18\x21\x6e\x81\x11\xb7\x09\xc0\x4c\
\x38\xe7\x0c\x9a\x66\xfa\xdd\x55\x01\xbf\xbb\x3c\x7a\x60\x3b\xf7\
\x7c\xf1\xee\xf9\x6a\xe7\x8a\xd2\xdb\xd9\xc7\x67\xdf\x7e\x2f\x7b\
\x7e\xf7\x30\xbf\xfa\x8a\xdf\xa6\x3d\xdd\x51\x73\xc0\xad\xe6\xd9\
\x82\x1b\x21\x51\xe7\xac\x52\xa0\xd4\xdb\xd1\x67\x03\xff\x77\x0f\
\xf3\x47\xaf\xba\xb0\x80\xaf\x8b\x47\x02\x7d\x5b\x30\x92\x01\x17\
\x35\x50\x00\xa4\xa7\xb6\x31\x20\x19\x4b\xfb\xd2\x7e\x47\x0e\x9f\
\xe0\xd4\x0f\x40\x18\x4c\x1a\x49\xbe\xe3\x68\xff\xd3\xc0\x34\xfe\
\x29\xbf\xe6\x55\x84\x52\xe1\xe4\x22\xc4\x19\x9c\xf9\x0a\xef\x31\
\x88\x01\x71\x61\xd0\x8a\x41\x8f\x91\x60\xb5\x30\xb9\xba\x38\xcd\
\x1f\x35\xf7\x91\xbc\xf4\x57\xd2\xbc\xf2\x9a\x57\xa2\x24\xe4\xf3\
\x45\x7e\xf4\xc8\x93\x14\x44\x06\x59\xc0\x5e\x8a\xf6\x34\x49\xee\
\x77\x77\x51\xee\xf7\x02\x64\xc7\xec\x92\xca\x32\xed\x5f\x25\x3e\
\x10\xe6\x42\x08\x01\xef\xbb\xfe\x2e\x3e\xfb\xbe\xc6\xbf\x51\xa6\
\x1e\x99\xc8\x8e\xf3\xc0\xde\xfb\xf8\xf4\x8f\x3f\xc5\xae\x93\x3b\
\x7c\x80\xaa\x1b\xe4\x67\xfa\x00\xd5\xa3\x7b\x94\xb3\xbb\x73\xcc\
\x4d\x7d\xb7\xf2\xde\x2d\x77\xf2\xde\xad\x77\xcd\x7f\xa3\x9d\x45\
\x99\x98\x1d\xe7\xf6\x4f\xde\xc6\x9e\xc1\x1d\x3e\xcd\xef\x73\x57\
\x5d\xdf\x3f\x06\x32\x03\x9b\xae\xbf\x94\x96\xce\x14\x86\x01\x87\
\x0f\x1e\xe5\xe0\x37\x6d\xdf\xdf\x4c\xf1\x65\x61\xf0\x2d\x99\xe7\
\x90\xb2\x38\xa1\x14\xe3\x28\xcd\xfc\x7f\xfe\xcc\xb4\x7f\x14\xbe\
\x5d\x99\x1f\x02\x00\x97\x04\xec\xa1\x0f\x82\xb4\x30\xe8\x14\x31\
\x56\x98\x29\xd6\xcb\x1c\xef\xb0\x72\xbc\x71\xf9\x1d\xb0\xf5\x4d\
\x6b\x59\xb3\x6a\x2d\x52\xc2\xe8\xf0\x18\xcf\x3c\xfd\x8c\xdd\x68\
\x11\xe0\x77\xd7\xbb\x24\x50\xcc\x43\x6e\xdc\xbd\x66\xf7\xe2\x29\
\x07\x7b\xb0\x1a\x0f\x7c\x25\x9c\xfa\xed\x6e\x5e\xb5\xc5\x2e\xe7\
\x4d\x77\x9c\x59\x1b\x34\x40\x76\x9d\xdc\xc1\x97\x77\x7c\x9e\x6f\
\xee\xbf\x8f\x23\xe3\xfd\x95\x77\xae\x16\xac\xab\x47\xd4\xdc\xb6\
\xf5\x76\xf4\xf1\xab\xd7\xfd\x16\x6f\xba\xec\x0e\x7a\x3b\xfa\xce\
\x6a\x5b\x9d\x4d\x99\xc8\x8c\xf3\x96\xbf\xbf\x8d\x3d\x43\xe5\x24\
\xe0\x82\xdf\x70\xc7\xfa\x2f\x5b\x4a\xef\x65\x4b\x30\x4c\xc5\xd4\
\xe4\x0c\xbb\x7f\xfc\x22\x03\x9f\x07\x61\x70\x34\xd6\xcc\x27\xad\
\x1c\xfb\x55\x91\x01\xc7\x05\xb0\x2d\x00\xe5\x94\xfd\xee\x99\x5f\
\x02\x88\xcd\x5b\x0b\x09\x70\x6e\xa2\x88\x33\x52\x50\x48\xc6\x64\
\x81\x53\x66\x33\xdf\x97\x05\xae\x1e\xfa\x0e\x4b\x5e\xb8\xec\x10\
\x3d\x5d\x8b\x69\x4e\xb7\xd0\xd5\xdd\xc9\xaa\x15\xbd\x1c\x3b\x75\
\xc4\x1e\x2c\x01\xa1\x0f\x9b\x1b\x15\x11\x02\x4c\xc3\xce\x0c\x14\
\xa7\x03\xe7\xae\x96\x82\x8a\x00\x3f\xa2\x34\x63\xcd\xb7\x7e\xfd\
\xdc\x93\xc0\xe6\xa5\x5b\xd8\xfc\xfa\x2d\x7c\xfc\xf5\x9f\xe0\xb9\
\x93\x3b\xf8\xf2\xce\xcf\xf3\xc0\x3e\x87\x0c\xa2\xfc\xee\xb0\xef\
\xf5\x4a\xf0\x58\xa5\x7d\x06\xb6\x6d\x5a\xb2\x85\xf7\x5e\x75\x27\
\x37\xf5\xdd\xca\xa6\xa5\x5b\xce\x69\x7b\x9d\x4d\x09\x8b\x4f\x05\
\xcb\xd9\x13\x2a\x4d\xaf\x13\xf8\xb3\x2c\xc9\xe1\x83\x47\x39\xf5\
\x03\x40\x90\x33\xd3\xfc\x9b\x2c\x72\x12\xc9\x28\x8a\x29\x47\xf3\
\xdb\xd3\x7d\x37\x00\xfc\x35\xdd\xc3\xbc\x59\x00\x00\x1b\x35\x57\
\xc0\xb6\x02\x3a\x84\xc9\x32\x23\xc1\x5a\x04\x37\x14\xa7\xf9\xf5\
\xd6\xcb\xe0\xf2\xbb\x5b\x79\xf9\x96\xeb\x51\x52\x91\xcf\x15\x79\
\xea\x89\x67\xc8\x58\x53\xb6\xf6\xb7\x22\xac\x00\x4b\x73\x05\x8a\
\x90\x9f\x04\x2b\x4b\xd5\x8a\xc0\x60\xbc\x40\x44\x6d\x13\xb0\x79\
\xc5\x16\xbe\xfa\x4b\xff\x4d\xef\xa2\xbe\xf9\xee\x87\xba\xe5\xc8\
\x78\x3f\x8f\x0e\x6c\xe7\xb1\x81\x87\xd9\x35\xb4\x83\xe7\x86\x76\
\x9c\xb5\x73\xdf\xb4\xfa\x56\x36\x2d\xd9\xc2\x8d\xab\x6f\xe1\xa6\
\xd5\xb7\x5e\xb0\x3e\xfd\x5c\x65\x22\x33\xce\x5b\x3f\xe7\x68\x7f\
\x7d\xd2\x8f\x60\x25\x60\xd1\xe4\xf2\xad\xeb\x68\xe9\x4c\x23\x0c\
\x38\x7a\xf8\x04\xfb\xbf\x3d\xcc\xc8\xa3\x60\xc4\xf9\xae\x91\xe0\
\xbf\x65\x9e\x17\x94\xc5\x31\x27\xfd\x37\x83\x72\xa6\xfc\x6a\x10\
\xf8\xcf\x9d\x0b\xe0\xca\x46\x04\x02\x13\x88\x23\x68\x16\x06\xdd\
\x22\xc6\x4a\x33\xc9\x7a\x2b\xc7\xbb\x65\x9e\x9b\x57\xbd\x0b\x36\
\xbf\x6a\x9d\xe3\x0a\x28\x26\xc7\xa7\x78\xe2\x89\x27\x31\x12\x01\
\x9f\x5f\x5f\x2c\xbf\x3b\xa0\x8a\x90\x1d\xb7\xd7\x7b\x12\xe6\xf7\
\xd7\x08\x7e\xf7\xb3\xbd\xa9\x83\x6f\xfd\xea\x43\xf6\xc0\x9e\xf3\
\x5c\x1e\x1b\xd8\xce\xae\xa1\x1d\x1c\x19\x1f\xe0\xb9\xa1\x1d\x8c\
\x67\xc7\x6d\x62\x98\x63\x57\xde\xd4\x7b\x2b\x00\x37\xae\xbe\x95\
\xde\x8e\xd5\xac\x6e\xef\xe3\xc6\xd5\xb7\x9e\xeb\xdb\x3c\xa7\x32\
\x91\x19\xe7\x0e\x07\xfc\xbe\xe2\x9f\x00\x01\x50\x84\x55\xbd\xcb\
\x59\xbe\xae\x07\xc3\x80\xc9\xa9\x69\x76\xff\xf8\x45\x8e\x7e\x15\
\x54\x91\xa3\xb1\x66\xfe\xd6\xca\x71\x50\x15\xe9\x57\x92\x53\x48\
\x26\x71\xc7\xfc\xef\x6e\x5c\xea\xef\xdc\x13\x00\xc0\x95\x18\xd8\
\xf1\x80\x24\x06\x6d\x86\xc1\x62\x11\x67\xb5\x99\xe6\x92\xc2\x04\
\xbf\x6b\x24\x58\xb2\xe6\x1e\xb8\xee\x15\xd7\xd0\xd9\xd6\x89\x94\
\x8a\xfe\x17\x8f\x70\xf0\xf0\x01\x3b\x6a\x5a\x0c\xb7\x00\x74\x72\
\x70\x3f\x73\x13\xf6\x67\xb5\xa0\x5f\x2d\xe0\xf7\xd2\x57\x4d\x1d\
\xfc\xc5\x1d\x9f\xe0\xbd\xd7\xdc\xd5\xb8\x36\x39\xcb\xf2\xdc\xd0\
\x0e\x26\xdc\x60\x49\x05\xe9\x6d\xef\xa3\xb7\xbd\xef\x5c\x5f\xee\
\x9c\x65\x22\x33\x3e\x6f\x6e\xdb\x44\x66\x9c\xb7\xfe\xc3\x6d\x3c\
\x3f\xb4\x23\xb4\xca\xcf\x8b\x05\x08\x68\x4d\xb6\x73\xf9\xb5\x7d\
\xf6\xf3\x2b\x2d\x76\xef\x3c\xc0\xe1\xaf\xe5\xc9\x1c\x23\x17\x6b\
\xe6\xef\x95\xc5\xb3\xaa\xc0\x21\x25\x19\x74\xea\xfe\x33\x28\x27\
\xf0\xb7\xbb\x71\xa6\xff\xf9\x42\x00\x36\xc4\x04\x31\x70\x02\x82\
\x26\x4b\x8d\x04\x6b\x10\x5c\x5f\x9c\xe1\x03\xcd\x7d\x24\x37\xfc\
\x72\x9a\x97\x5f\x75\x3d\xa6\x19\x43\x49\xc5\x4f\x7f\xb2\x93\xd3\
\x33\xc3\xa0\xa2\xb5\xbf\xf7\x69\x39\xa4\x50\x80\xec\x69\x50\xf5\
\x80\x3f\xaa\xa8\x28\xf0\xfd\xd7\x6e\xfe\x6d\x3e\xfe\x96\x4f\x34\
\xae\x5d\x16\xa4\x61\x32\x91\x19\xe7\xdd\x9f\x7f\x1b\x13\x99\x71\
\x1e\xfc\x1f\x8d\x8f\xdd\xb8\xe0\xdf\x73\x6a\x87\x37\x60\x2d\x58\
\xa8\xe6\xae\x33\x73\x09\xae\x7a\xe5\x06\xe2\x49\x03\xc3\x80\x03\
\xfb\xfa\x39\xf4\x9d\x09\xbb\xe2\x2f\xc1\x77\x8c\x38\xf7\xc9\x3c\
\x2f\xfa\x4c\x7f\x3b\xef\x2f\x79\xae\xb1\x85\x3f\xd5\x08\x60\xbe\
\x0a\x81\xfc\xb2\xdb\xf7\x5a\xf1\x9c\x82\x69\xa5\x38\x2d\x8b\x9c\
\x14\x06\x3b\xcd\x24\x3f\x98\xe9\x87\xa3\xdf\xcf\xf0\xfc\x0b\x7b\
\xbc\x82\x93\x4d\x5b\x37\x12\x57\xe9\xb2\x12\xe1\x32\x5f\x4b\xaf\
\xc5\x8e\x43\xa2\x93\xf0\x89\x39\x21\x7c\x6c\x40\x58\xe5\x1a\xfe\
\xf5\x42\xc0\xa7\x1f\xfd\x24\xaf\xfc\xe4\x56\x8e\x8c\xf5\x9f\x95\
\x66\x5b\x90\xda\x64\xd7\xe0\x0e\x5e\xf1\x37\x5b\x79\xf4\xf0\x76\
\x76\x9d\xd8\xc1\x1b\x3f\xdb\xd8\xca\xce\x89\xec\x38\x6f\xfd\x47\
\x0d\xfc\x5a\x8a\xcf\x97\xfe\x8b\x83\x35\x0b\x97\xbd\x6c\x0d\xf1\
\xa4\x89\x30\x04\x27\x07\x47\x18\x3a\x38\xc1\xe9\x9f\x80\x88\x71\
\xd0\x4c\xf1\x7d\x59\xe4\xa4\x92\x8c\x28\x7b\xae\xbf\xac\x13\x28\
\x3f\xe3\xf7\xfc\xcd\x45\xce\x0e\x01\xd8\x62\x4f\x1d\x26\x28\xa2\
\xc8\x28\xc5\x24\x92\x61\x59\xe4\x84\xd9\xc4\xb7\x45\x8c\xfd\xc3\
\x0f\xc3\xb1\x7d\xa7\x38\x3a\x78\x04\x61\x08\xe2\x89\x18\x5b\xaf\
\xde\x8c\x28\xc4\xfc\x80\x0f\x4b\xbb\x68\x83\x86\x62\x29\x48\xb8\
\x23\x07\x83\xe6\x7d\x98\x55\x10\x0c\x1a\xa2\xed\x83\x3f\xbd\xe8\
\x3e\x6c\x0f\x3c\x7f\xdf\xd9\xec\xa7\x05\x89\x90\x8f\xfd\xe0\x23\
\xbc\xf2\x6f\xb6\x96\x52\xa4\x06\x0d\x25\x01\x0f\xfc\xc3\x3b\xca\
\x66\xfb\x09\x8e\x01\x90\x79\x58\x7b\x69\x2f\xcd\x1d\x76\xd0\x2f\
\x33\x9b\xa1\xff\xc0\x71\x4e\x3e\x08\x08\x72\xb1\x66\xfe\x5d\x16\
\x39\x81\x64\x08\x3b\xdf\xef\x6a\x7e\xbb\xde\xff\xb9\x8b\x99\x00\
\x9e\xf3\x59\x01\x79\x14\xb3\x4a\x31\xae\x2c\x4e\xc9\x02\xc7\xe3\
\xad\x7c\x49\x18\x4c\x0c\x7c\x01\xf6\xed\xdf\xcf\xf4\xec\x14\x86\
\x10\xb4\x75\xb4\x72\xe9\x25\x1b\x90\x45\x7f\x25\xa0\x6f\xaa\xe5\
\x40\xe1\x85\x88\x41\x2c\x09\x89\x0e\xbf\x25\x10\x39\xb1\x66\x58\
\x6b\x18\x81\x63\x34\x32\x99\xc8\x8e\xf3\xee\x2f\xbe\x8d\x3f\xf8\
\xe6\xef\x30\x91\x1d\x3f\x6b\x4d\xb8\x20\x25\x39\x32\xde\xcf\x1b\
\xff\xe9\x36\xfe\xec\x07\x1f\x2e\xad\x34\x4a\x9f\x8d\x20\x81\x89\
\xec\x38\x6f\xfd\x27\x47\xf3\x87\x59\xa0\xda\xbc\xff\x28\xe8\xea\
\x58\xc4\x92\xd5\x8b\x30\x1c\xbf\xff\xc0\xbe\xc3\x0c\xfd\x00\x0a\
\x93\x60\xa6\xf8\x8a\xb2\x38\xa4\x6c\xed\x7f\x5a\x29\xa6\x11\xda\
\x44\x9f\xbb\xce\x3e\xf8\xf5\x26\x3b\x3b\xb2\xcb\x29\x13\x16\x8e\
\x2b\x60\x8f\x13\x38\x2d\x8b\x9c\x94\x16\x87\xcd\x26\xfe\xd9\xca\
\xc1\xb1\x7f\x87\x5d\xfb\x77\x52\x94\x45\x84\x10\xac\xec\x5d\xc6\
\xca\xc5\xbd\x20\xfd\x35\xd7\x7a\x2d\xb6\xee\x02\xb8\x1d\x65\xa6\
\x1c\x12\x10\x11\x41\xbf\x39\x80\x5f\xff\x1b\x9f\x7e\xfc\x93\xbc\
\xe2\xef\x6c\xd3\x73\x41\xce\x9e\x7c\xfa\x47\x81\x76\x0f\xeb\xc7\
\x33\x24\x81\x89\xec\x38\x6f\xfd\xe7\x80\xe6\x0f\xb8\x9f\xfa\x4b\
\x3e\x93\xa4\xb9\x64\x4b\xaf\x57\x22\x7d\xe8\x85\x23\x9c\x7c\x3c\
\x6f\xd7\xfa\xa7\xf8\x9e\x30\x78\x5c\x16\x39\xa1\x14\xc3\x0a\x6f\
\x9a\xef\xd2\x4b\x3e\xce\x91\x9c\x5d\x02\x80\x12\x09\x80\x85\x72\
\xde\x2b\x68\x31\xa9\x8a\x8c\x0a\xc1\x6e\x33\xcd\xf7\x66\xfa\xa1\
\xff\x81\x0c\x3f\xdd\xf3\xb4\x03\x5e\xc1\xe5\x57\x5e\xca\xa2\xe6\
\x1e\x1b\x87\x66\x00\xf4\x1a\x19\x04\x4b\x32\x63\x49\x48\x76\x10\
\x3e\x91\x08\xda\xba\x40\x8b\x54\x03\xbf\xfb\x79\x64\xbc\x9f\x37\
\xfe\xf3\x6d\xfc\xc1\x83\x0b\xd6\xc0\x7c\xcb\x91\xf1\x7e\xde\xf8\
\x2f\xb7\xf1\x07\xdf\x72\xda\x3a\xe8\xd2\xb9\xa2\xb9\x7c\x73\x21\
\x81\x89\xec\x38\x6f\xfd\x97\x12\xf8\x09\x5a\x99\xfa\x73\x66\x82\
\x99\x4f\x70\xe5\x75\x97\x60\x08\x30\x04\x1c\x3f\x7a\x92\x63\x3f\
\xb5\xfd\x7e\x23\xce\x61\xd3\x1e\xe7\x3f\x82\xb4\x5f\xf1\x85\x22\
\x8b\x70\x0a\x7e\x40\xb1\xf3\xa5\x44\x00\xb6\xd8\x04\x20\xbc\xd7\
\x8b\x8f\xab\x22\xa7\x64\x9e\x13\x46\x9c\xfb\x8c\x04\xbb\x4f\x3f\
\x09\xc7\x9e\x98\xe2\xf9\x17\xf7\x78\x24\xb0\xe9\xaa\x8d\xa4\xcd\
\xd6\xd2\x48\x2c\x7d\x86\xd5\x58\xe0\xb7\xee\x0e\xb8\x31\x81\xb0\
\x31\xee\x21\x0f\x51\xad\xe0\xd7\xbf\x7f\xfa\xc7\x9f\x64\xe3\xff\
\x5b\xc3\x97\x9f\xdd\x76\x8e\x9a\xf4\xe2\x95\x89\xec\x38\x1f\xdb\
\xfe\x11\x36\xfe\xbf\x35\xe5\x5a\xbf\x12\x09\x38\xdb\xea\x21\x81\
\x89\xec\x38\x77\xdc\x7b\x1b\xcf\x8f\xec\x28\xc5\x9b\x2a\x3c\x6b\
\xe4\x4c\x36\x6c\x59\x4b\x3c\x61\x07\xfd\x46\x86\x4f\x73\x68\xc7\
\x49\x86\xbe\x0f\xc2\x64\xc4\x4c\xf3\x15\x59\xe0\xb8\x92\x9c\x54\
\x76\xc5\xdf\x34\xf6\x50\x5f\x0b\x81\x3c\x97\xe0\x77\x9b\xe9\xec\
\xcb\xce\x40\x56\xc0\x9e\x37\xe0\xb4\x92\x0c\xc9\x3c\xc7\xcd\x14\
\x5f\x15\x26\xc3\x43\xdf\x85\xfe\x3d\x27\x18\x1c\xb6\xdf\x2b\x90\
\x48\xc6\xb9\xf6\xe5\x57\x93\xa2\xd5\x9b\x56\x5c\xcf\x04\x04\xcd\
\x33\x97\xb1\x31\x1d\x4b\x40\x27\x01\x57\xaa\x59\x06\xd5\xf6\xd5\
\xb6\x4f\x64\xc7\xf9\x1f\xf7\xdd\xcd\x1b\xb7\xdd\xc6\x63\xfd\xdb\
\xcf\x49\xd3\x5e\x6c\xf2\xe5\x1d\xdb\x78\xe5\xe7\xb6\xf2\x67\xdb\
\x3f\x1c\x3e\x5c\xda\xfd\xac\x81\x04\xde\x50\x85\x04\x26\xb2\xe3\
\xdc\xb1\xed\x36\xf6\x8c\xec\xf0\x29\x11\x5d\xf3\xeb\xd6\x66\x71\
\x0a\xae\xd8\x72\x09\xad\x6e\xd0\x2f\x93\xe1\xf0\xc1\xe3\x9c\xfa\
\x21\xc8\x3c\x39\x33\xcd\x7f\xa9\x22\x07\x95\x64\x50\x29\x46\x3d\
\xd3\xdf\x8d\xfa\xef\x38\xb7\xe0\x87\xb3\x55\x07\x10\x25\x5b\x9c\
\x01\x43\x82\x38\xd0\x2c\x04\xcd\xc2\x64\xa9\x11\x67\x8d\x91\xe4\
\xca\xfc\x24\xbf\x6b\x26\x49\xae\xfd\x15\xd8\xbc\x75\x23\xcb\x7a\
\x96\xa3\x14\x4c\x8c\x4f\xf2\xf4\xd3\xcf\x40\xba\x58\xca\xff\x5b\
\xa5\x1a\x01\xf7\xb7\xb2\xb4\x82\x21\x69\x7f\xb7\xf2\x90\x9b\xb2\
\x7f\x57\xf4\xfb\xc3\x0a\x89\x02\xfb\x46\x8d\x2f\x70\xe3\x0d\x6f\
\xba\xec\x0e\xfe\xfc\x67\x3e\x71\x51\x0f\x8a\x99\x2f\x79\x6c\x60\
\x3b\x1f\xdb\xfe\x11\x1e\xd5\x88\x54\xb9\xce\xa3\xbb\x78\x1b\xb4\
\x4f\x15\xb1\xce\xc9\xae\x6f\x5a\xbe\x85\x6f\x85\xd4\x09\x4c\x64\
\xc7\xb9\xe3\x0b\xb6\xd9\x2f\x0c\x30\x5c\x05\xa3\xcf\xe6\xab\x05\
\x9e\xe5\x2c\xac\x59\xb3\x9a\xa5\xbd\x8b\xec\xb9\xfd\xf2\x79\x76\
\xef\xdc\xc7\xe1\xaf\x58\xe4\x86\x21\xd6\xcc\x97\x50\x3c\x2c\xf3\
\xf4\x2b\xc9\x09\x27\xdf\x9f\xa1\x34\xc9\xa7\xe2\xd9\xf9\x27\x80\
\xf3\xa3\x10\xa8\x92\x6c\xf5\xaa\x04\x13\x08\x52\x42\xd0\x29\x4c\
\x56\x18\x09\xfa\x44\x8c\x57\x14\xa6\xb8\x33\xb5\x84\xe4\xba\xbb\
\x62\x5c\xbb\xf5\x1a\x5a\x9a\x5a\x91\x12\x26\x27\xa6\x78\xe6\xd9\
\x67\x50\xc9\x82\x0d\x76\xb7\x40\x28\x04\xf4\xde\x7a\x59\xda\x96\
\x9b\x00\xe9\x96\x5c\x84\xf9\xfd\x51\xda\xa6\x46\xf0\xeb\xc7\xbd\
\x77\xcb\x5d\xfc\xd1\x2d\x1f\xba\xa0\x2b\xec\xce\x96\x3c\x36\xb0\
\x9d\x8f\x3d\xe2\x00\x3f\x08\x66\xe6\x87\x04\x26\xb2\xe3\xdc\xf1\
\xc5\xdb\x78\x7e\x78\x07\x98\x0e\xf8\x75\xc0\x6b\x04\x20\x4c\x50\
\x59\xe8\x5b\xdd\xe7\x80\x5f\x21\xa5\xc5\xde\x3d\x07\x39\x7c\x7f\
\x86\xc9\xbd\x60\xa6\xb8\x5f\x18\x3c\x20\xf3\xf4\x2b\x8b\x13\xca\
\x1d\xec\x63\x0f\xf4\xb1\xb5\xff\x59\x00\xbf\xdd\x5e\x73\x24\x80\
\x7a\xe4\x8c\xc8\x62\xab\x7f\xee\x00\x6f\xbc\x80\xc9\x72\x33\x49\
\x9f\x52\xbc\xae\x38\xcb\xcf\xa5\x96\xc2\xba\x3b\x23\x48\x20\x55\
\x28\x95\x07\x07\xb5\xbf\xfb\x29\xcb\xd7\xe7\xa7\x6d\x8b\xc0\x37\
\x22\xb0\x92\xdf\x1f\xb6\x4e\x5b\x1f\x55\x7c\xe4\x6e\x7f\xef\x55\
\x77\xf1\x47\x37\x2f\x10\x41\x98\x3c\x76\x64\x3b\x1f\x7b\xf8\x23\
\x3c\x3a\xb0\xbd\xb4\x32\x0c\xcc\xd4\x40\x02\xb2\xfc\x18\x8f\x00\
\x94\xdd\xd7\x57\xad\xde\xc2\x03\xbf\xf4\x10\x00\x6f\xfb\xb2\x9d\
\xea\xf3\x01\xdd\x08\xa4\x9a\x9d\x29\xeb\x64\x06\xd6\xf4\xf6\xb1\
\xb4\xb7\xcb\x1e\xe1\x27\x8b\xec\xdd\x73\x80\x81\xef\x65\x18\x7f\
\x16\x8c\x38\x4f\x19\x09\xfe\xc5\x01\xff\x71\xec\x94\x9f\xfb\x6a\
\x6f\x0b\xd5\x78\xf0\x9f\x09\x86\xcf\x3d\x01\x80\x4b\x02\xf6\xb8\
\x3d\x41\x52\x40\x2b\x26\x5d\x86\xc9\x72\x23\x49\x9f\xb2\xb8\xbd\
\x98\xe1\x2d\x1d\x5b\xa0\xf7\xad\x31\x5e\xbe\xe9\x06\x52\x89\xb4\
\x3d\x70\x68\x62\x8a\x67\x76\x38\x24\xa0\x59\x02\x3e\x02\x90\x01\
\xeb\x40\x96\xb6\x17\x66\xa1\x90\x09\x98\xfe\x8d\x02\x7f\xc4\x30\
\xe4\x9b\xd6\xdc\xca\xaf\x5e\xf3\x5b\xdc\x7e\xc9\x1d\x67\xdc\xf6\
\x17\xba\x7c\x65\xcf\x36\x3e\xf6\xd8\x47\x18\x18\xef\xb7\x01\x1a\
\xac\x87\xab\x46\x02\xc1\xc2\xd9\x4a\x24\x20\xed\x7e\x8a\x25\x6c\
\xa0\x6f\x5a\xbe\x05\xc3\xc0\x36\xfb\x35\xd0\x7b\xda\xdf\xf0\x6b\
\x7e\x99\x75\xc0\xbf\xaa\xcb\x4b\xf7\x3d\xbf\xe7\x00\xc7\x9f\x9a\
\x62\xe8\xfb\x60\xc4\xd8\x6d\x26\xf9\x07\x99\xe7\x90\xb4\x38\x86\
\x5d\xed\x37\x85\xd2\xe6\xf6\xff\x69\xe3\x35\xff\x85\x4f\x00\xa0\
\x93\x80\xe9\xb8\x02\x2d\x1a\x09\xac\x96\x16\x3f\x67\x65\x78\x4d\
\xc7\x55\x82\x4b\xdf\xd9\xca\xcb\xae\xb8\x86\x98\x11\xf3\x93\x40\
\xda\xb6\x04\x90\xf8\x2d\x02\x19\x61\x0d\xb8\x71\x81\x1c\x14\xa6\
\xb5\xf1\x03\x41\xbf\xbf\x4a\x2c\xa0\x66\xf0\x3b\x33\xc3\xba\xaf\
\x82\xee\xed\xec\xe3\x57\xaf\xfe\x2d\x6e\x5f\x7b\x07\xbd\xad\x7d\
\x67\xde\x86\x17\x88\x1c\x99\xea\xe7\x33\xbb\x3e\xc5\x97\xf7\x6c\
\xb3\x83\x72\xf6\x5b\xef\x50\xce\xe7\x19\x91\x40\x00\xf0\xfa\x3a\
\x23\x06\xb1\x38\x9e\x8f\x8f\x03\x7a\x1f\xf0\x83\xe6\xbf\xb3\x4d\
\x66\xa1\xcf\x05\xbf\x53\x57\x72\xe8\xc5\x01\x5e\x7c\x6c\x94\xa1\
\xef\x81\x11\x57\x87\xcd\x94\xf8\xb4\xcc\xa9\x17\xa4\xc5\x11\x24\
\xc3\x4a\x31\xe9\x9b\xdd\xe7\x99\xf9\x31\xfb\x2f\x0e\x02\x00\x9d\
\x04\x62\x08\x92\xce\x54\x62\x5d\x46\x8c\x65\x46\x92\xd5\x56\x96\
\x0f\xca\x82\xd8\xd2\xb1\x05\x9b\x04\x2e\x0f\x90\xc0\xce\x67\x20\
\x5d\xf0\xfc\x7c\x5d\xfb\x87\x92\x81\xf4\xc7\x05\xf2\xd3\x5a\x5c\
\x00\xc2\x53\x86\x81\x6d\x73\x02\xbf\xa9\xfd\x76\x3e\x6f\x5f\x77\
\x07\xb7\xf7\xbd\x95\xdb\x57\xdf\x41\x7b\xa2\xa3\x31\xed\x79\x1e\
\xc9\x44\x7e\x9c\xaf\xbc\xf0\x79\xbe\xb2\x7f\x1b\xcf\x8d\xec\x70\
\x5e\x27\xeb\x04\x63\x5d\xe0\xbb\x44\xe0\x7e\xaf\x85\x04\xdc\xfe\
\x0a\x23\x01\xed\x18\x33\x01\x66\xdc\x01\xb8\x3b\xc1\xa9\x4b\x00\
\xba\xc9\x1f\xd0\xfa\x86\xe1\x80\x7f\x95\xa6\xf9\x85\x06\xfe\xef\
\x83\x30\x18\x89\x35\xf3\x39\x99\x67\xb7\x33\xbc\xf7\xa4\xf3\x5e\
\xbf\xac\x33\xc2\xcf\xe2\x99\x06\x00\x2d\x42\x2e\x1e\x02\x80\x12\
\x09\x08\x87\x04\x84\xe7\x0e\x2c\x33\x52\xac\xb6\x32\xe2\x83\xb2\
\xc8\x96\x8e\xab\x02\x24\xa0\x34\x12\x48\x16\x4a\xc0\x8f\x30\xff\
\x3d\x37\x40\xfb\x8d\x82\xfc\x0c\x14\xb3\x54\x0e\xfa\x55\x02\xbf\
\x7e\x8c\xbb\xce\x9d\x9a\xdb\x01\xbb\x4b\x00\x1e\x19\x18\xa5\xf5\
\x9d\xa9\x4e\x6e\x5c\x7a\x1b\xb7\xf7\xbe\x95\x37\xac\x7c\xcb\x05\
\x4d\x06\x47\x67\x06\xf8\xd6\xb1\x6f\xf0\xd8\xc9\x87\xf9\xe6\x91\
\xfb\x50\x4a\xa1\xa4\xbd\x78\x9a\x5e\x27\x01\x77\xa9\x46\x02\x81\
\x47\x56\xc9\x90\xf5\xce\x77\x21\x6c\x93\x5f\x0f\xe6\xb9\x1a\xdc\
\xd0\xc0\x1f\xb4\x02\x3c\xb3\x3f\x03\x7d\xab\xd6\x38\x9a\x5f\xf9\
\x35\xbf\x0d\xfe\xd1\x78\x0b\xf7\xca\x3c\xbb\x65\x81\x7e\x25\x39\
\xee\xbc\xd8\x63\x16\x77\x76\x9f\xa7\x55\x43\x47\xf8\x05\xe5\xe2\
\x22\x00\xb0\x49\x40\x44\x59\x02\xc2\xb6\x04\x8a\x6c\xe9\xd8\x6c\
\x93\xc0\xd5\x97\x5f\x83\x69\x96\x2c\x81\xa7\x9f\x7e\x06\xd1\x5a\
\x40\xa9\x40\x5a\x30\x40\x0a\xe8\xbf\x35\x6b\xa0\x98\x83\xc2\x8c\
\xe3\x12\x78\x37\xa9\x7d\xba\x41\xc3\x4a\x59\x02\x57\x74\x4d\xaf\
\x11\x80\xef\xb7\x01\xc2\x14\x08\x21\x30\x0c\x03\x21\x84\xb7\xdc\
\xb8\xf4\x16\x5e\xb1\xf8\x16\x5e\xbf\xe2\x4d\x6c\xec\xd8\xdc\xd8\
\x76\x9e\x07\xf9\xf1\xf0\x63\x7c\xe7\xc4\xfd\x3c\x3e\xf4\x08\xbb\
\xc7\x76\xa2\x94\x42\x4a\x69\x83\xdf\x59\xa4\x94\x28\x4b\x23\x01\
\xd7\xfc\x6f\x30\x09\x98\x31\x5b\xf3\x0b\x51\x32\xe7\x3d\x8d\x2f\
\x02\x04\x10\xd4\xfe\x06\x14\xc7\x05\x6b\xd7\x05\x35\x7f\xbf\x0e\
\xfe\xa1\x78\x1b\x5f\x91\x39\x9e\x97\x05\x27\xe2\xaf\xbc\xe1\xbd\
\x39\x5c\x67\xe6\xa9\xf9\xd3\xfe\x70\x31\x12\x00\xc0\xcb\x3c\xdd\
\x19\xc7\x20\x25\x04\x6d\x98\xf4\x18\x31\xb1\xc2\x48\xd1\x67\x65\
\xb8\x5b\x16\x6c\x4b\x60\xed\xcf\xa6\xd9\x7c\xc9\x55\xb4\x34\xb5\
\xa2\xa4\x62\x76\x36\xcb\x8e\x9d\x3b\xc9\xc6\x26\x51\x2a\x9c\x00\
\x94\x6e\x11\x48\x90\x8a\xb2\x00\x61\x7e\x06\xac\x02\xa1\xc5\x25\
\x62\xae\xe0\x8f\x95\x6b\x7e\x61\x96\x03\xdf\xfd\x1d\x5c\xff\x8a\
\xc5\x37\x71\x7d\xf7\x4d\xdc\xd0\x73\x23\x97\xb7\x5d\x49\x5b\xbc\
\xbd\xf1\x6d\x5f\xa3\x1c\xcf\x1c\xe5\xf9\x89\xe7\x78\x62\xe4\x71\
\x9e\x9f\xd8\xc5\x8f\x87\x1f\xf3\x83\x3c\xf0\xbd\x6c\x9d\x54\x25\
\x80\x3b\xc0\x57\x1a\x01\xf8\xac\x83\x0a\xbe\xbd\xfb\xdd\x8d\x09\
\x08\xe1\x98\xfc\x31\x0d\xf4\x46\x40\xd3\x1b\x25\x77\xc0\x70\xfa\
\x42\x27\x00\x35\x15\x63\xc3\x65\xeb\xe8\xe8\x6a\x05\x67\x70\xcf\
\x8b\x2f\xf6\x33\xf0\xc4\x78\x49\xf3\xb7\xf2\x05\x59\xe4\xb0\xcc\
\xd1\xef\x8c\xed\x1f\xc6\x1d\xe2\xeb\xe6\xfa\xe7\x19\xfc\x70\xb1\
\x12\x00\x94\x48\x40\x10\x47\x90\x12\x06\x6d\x18\xa2\xc7\x88\xb3\
\xc2\x48\xb0\x54\xb7\x04\x7a\xdf\x1a\xe3\x65\x97\x5d\xe3\x91\x40\
\x3e\x5f\xe4\xa9\xa7\x9e\x21\x1b\x9b\x74\x66\x65\x71\x02\x83\xaa\
\x04\x70\x65\x39\xc0\x0f\x5a\x02\x96\xb3\x9f\xd4\xac\x01\x77\x32\
\xcc\xb0\x81\x45\x50\x1d\xfc\xae\xd9\x1f\x30\xf9\x85\xe1\x07\xb9\
\x61\x18\x91\xe0\x0f\x92\x83\x10\x82\xb6\x78\x3b\x57\xb4\x6f\xf2\
\xc8\xe0\xe5\x8b\x5e\x81\x10\x70\x6d\xe7\x0d\x0d\xe9\x82\xa9\xe2\
\x24\xfb\xa6\xf6\x30\x59\x98\x64\xdf\xd4\x6e\x8e\xcd\x1e\xe5\x78\
\xe6\x28\x4f\x8e\x3e\xee\xd3\xea\xc1\x25\x4c\xeb\x47\x91\x82\x2f\
\x06\xe0\x82\xbe\x48\x65\x4b\x20\x84\x04\x94\x72\xb4\x7e\xdc\x0f\
\x70\xdd\xec\xf7\x99\xfa\x9a\x2b\x80\x03\x7c\x00\x39\x16\x63\xd3\
\xcb\x2e\xa5\xa5\xbd\xc9\x9b\xc7\x7f\xcf\xf3\x07\x18\xda\x31\xcb\
\xc9\xef\xd9\x25\xbe\xf1\x16\xb6\xc9\x02\xfb\x65\x81\x21\x55\xe4\
\xb8\x13\xf4\x9b\xc2\x1d\xe4\x03\xf2\x6c\x80\xdf\xbe\xef\x8b\x95\
\x00\xc0\x26\x01\xa1\x5b\x02\xc2\xb6\x04\x4c\xba\x8d\x24\xab\xad\
\x3c\xbf\x28\xf3\x5c\xd7\xdc\x07\x7d\xef\x8c\x71\xed\x96\x6b\x68\
\x49\xb7\xd8\x5a\x5e\xc1\x73\xbb\xf7\x70\x6a\xfa\x84\x3d\xbf\x60\
\x84\xf6\xd7\x81\x5f\xb6\x4e\xda\xa9\xc5\xfc\x0c\x58\xc5\x1a\x83\
\x7e\x5a\x84\xd9\x03\x7f\xac\x1c\xfc\x86\x69\x84\x6a\xfb\x4a\x04\
\x10\x45\x0a\x6e\x3f\x04\x7f\xaf\x48\xaf\x62\x45\x7a\xa5\xef\x02\
\xdb\xe2\x6d\x6c\x68\xb9\xc2\xfb\xfd\xf4\xf8\x13\x80\x43\x72\x8e\
\xec\x9b\xda\xc3\x54\x71\xd2\x03\xac\xbd\xbd\xfc\x7b\x2d\xe0\x0f\
\x02\x3e\x48\x04\xa1\x24\x10\x74\x07\xdc\xef\xba\x68\x24\xa0\xa7\
\xf7\xbc\x20\x5f\x50\xeb\x8b\x80\xa9\x6f\xf8\x89\x42\x59\x90\x28\
\x36\xb3\x69\xeb\x06\x62\x71\x7b\x36\x1f\x4b\x5a\x3c\xff\xfc\x01\
\x8e\x3e\x32\xcb\xe8\x93\x20\x0c\x86\x63\xcd\xfc\x83\x2a\x70\xd0\
\xa9\xf1\x1f\xc5\x62\x44\x11\x98\xd5\xf7\x2c\x81\xdf\xed\x8b\xb9\
\xca\xf9\x4f\x00\x00\x57\xeb\x96\x80\xb0\x2d\x01\x93\x0e\xc3\xb4\
\xb3\x03\xca\xe2\x4d\xc5\x0c\x6f\x4e\x2d\x81\xd5\xef\x85\x4d\x97\
\x6d\x64\x69\xf7\x32\x5b\x8b\x2b\xe8\x3f\x7c\x84\x17\x8e\x1d\xc0\
\x6c\x56\xe5\x04\x10\x74\x11\xf4\x75\x1a\x19\x28\x69\xbb\x03\x85\
\x59\x2d\xf2\x7c\x86\xe0\xaf\x45\xf3\x47\x7d\xaf\x87\x04\xc2\xfa\
\x29\xd8\x67\xc1\xe7\x40\x07\xba\xfb\x59\x0b\xf8\x2b\xb9\x00\x41\
\x02\x08\x92\x42\x25\x12\x50\xfa\x36\xef\x22\xed\x0f\xd7\xd7\x77\
\x81\xec\xba\x68\xbe\x08\xbf\x4e\x06\x01\x57\x40\x38\x91\xfe\x45\
\xe9\x6e\xd6\x6e\xe8\x75\x06\xf6\xc0\xec\xec\x2c\xcf\xef\xdd\xcf\
\xf1\xef\x59\x4c\xee\x75\xf2\xfc\x29\x3e\x2f\xf3\x1c\x53\x45\x06\
\x95\x64\x58\x49\x26\x51\x4c\x9f\x2b\xf0\x87\xf5\x5d\x3d\x72\x61\
\x10\x00\xd8\x24\x20\x30\x10\xc2\x75\x07\x9a\x31\x9c\xec\x80\x4d\
\x02\xb7\x17\xb3\xbc\x25\xde\x0e\xab\xde\x0e\x97\x6f\x5d\xcb\x9a\
\xe5\x6b\xed\xf9\x04\x15\x9c\x1e\x1d\x63\xe7\xf3\x3b\xa1\xb9\x60\
\xfb\x8b\x81\xc0\x9f\x67\x01\xb8\x2e\x82\x0c\x10\x80\x2a\x11\x43\
\x31\x6b\x2f\x4a\x51\x1e\xf1\x37\x23\xcc\x7e\x2d\x00\x18\x05\xfe\
\x33\x25\x80\x20\x09\xe8\x7d\x53\x09\xfc\xba\x04\x81\xef\x7e\xaf\
\x07\xfc\xd5\x62\x00\x61\xd6\x40\x18\x09\x78\x04\xe0\xb8\x03\xbe\
\x94\x21\xfe\xbc\x7e\xd9\x3b\x0c\x0d\x3f\x09\x54\x22\x80\xe2\xa4\
\x60\x79\xcf\x72\xfa\x2e\x59\xee\x91\xc7\xd8\xf8\x38\x07\xf6\x1e\
\xe6\xc4\xf7\x2c\xa6\x5f\x04\x23\xc6\x2e\x33\xc9\x3f\xc9\x3c\x47\
\x95\xc5\xa0\x92\x8c\x3a\xe0\xcf\xfa\x0a\x7d\xce\x32\xf8\x83\x7d\
\x55\xaf\x5c\x38\x04\x00\x70\x0d\x02\x21\x5c\x77\x20\xe1\x4b\x11\
\x26\x59\x8d\xc1\xcb\x0b\x53\xbc\xcf\x4c\x91\xef\xfc\x87\x9f\x00\
\x00\x15\xfd\x49\x44\x41\x54\x5c\xf5\x76\x58\xb7\x65\x39\x97\xf4\
\x5e\x4a\xcc\x88\xa1\x14\xcc\xce\x66\x79\x76\x97\x1d\x1c\x34\xcc\
\x10\x02\xd0\x63\x00\x3a\x01\x28\xed\xd3\x2a\x7d\x16\xb2\x76\x8c\
\xc0\x6e\x04\xa2\x35\xff\x1c\xc0\x5f\x8d\x0c\xce\x94\x04\x2a\x49\
\x98\xf6\x77\x3f\xe7\x83\x00\x42\x49\x40\x37\xfd\xdd\x78\x80\x43\
\x0a\x06\x8e\xc6\x77\x35\x38\x55\x48\x20\x82\x00\x84\x00\x39\x19\
\xe3\xb2\x0d\xeb\xe9\xe8\x6a\xf5\xd6\x0d\x9e\x1c\xe2\x85\x3d\x47\
\x39\xf1\x20\xe4\x46\xc0\x4c\x72\xbf\x61\xf2\xa0\xcc\x73\x54\x5a\
\x0c\x7a\xe5\xbd\xd2\xd1\xfa\xca\xb9\xda\x73\x00\x7e\xbd\x7f\xe6\
\x22\x17\x16\x01\x00\x5c\x2b\xfc\x75\x02\x76\x8a\x70\x91\x43\x02\
\xbd\x18\x5c\x5b\x9c\xe5\x67\x95\x45\xf7\xd2\xd7\x42\xef\x4d\xad\
\x6c\x5e\x7f\x15\xa9\x44\xca\xd3\xe2\x7b\xf7\x1e\xe0\xf8\xd8\x00\
\xb1\xe6\x08\x02\xd0\x49\x40\x95\x52\x4c\x5e\x0c\x41\x23\x04\x59\
\xb4\x4b\x89\x2d\x0b\x7f\xd0\x2f\x56\x6e\x09\x84\xf9\xfc\x41\x02\
\x08\xba\x03\x67\x4a\x02\x61\x9f\x61\x7d\x16\xd4\xf8\xfa\xf7\x33\
\x21\x80\x5a\x3e\x43\x49\xc0\x9d\x26\xd3\x2a\x91\x80\xa1\xb4\x62\
\x1d\x15\x00\xbb\x88\x26\x01\x1d\xfc\x6e\xf0\x4f\xe6\xa1\x49\xb5\
\x71\xc5\xa6\x4b\x88\x27\x0c\x84\xb0\xfd\xfd\xfe\x23\x47\x39\xb6\
\x6f\x84\xa3\xf7\x81\xcc\x91\x33\x53\x7c\x43\x18\x7c\x5f\xe6\x39\
\xa2\x8a\x1a\xf8\x15\x39\xe7\x25\x1e\x12\x75\x76\xa2\xfd\x51\xf2\
\xd2\x22\x00\xb0\x49\xc0\xad\x13\x30\x48\x0a\x41\x0b\x06\x8b\x9c\
\xa9\xc6\x57\x18\x71\x2e\x29\xcc\x70\x97\xb2\x58\xd1\xb1\x19\x56\
\xbe\x36\xc6\x95\x97\x6d\xa4\xbb\xa3\xc7\x23\x81\xa1\xa1\x61\x7b\
\x06\xe2\x96\x42\x09\xd0\x11\x04\x80\x1e\x17\x50\x01\x12\x70\x89\
\x40\xd9\xef\x29\xb4\x24\x25\xb3\xdf\x79\x33\xac\xfd\xbb\xdc\xcf\
\xaf\x55\xfb\x37\xd2\x0a\xa8\xd4\x5f\x61\x24\x10\x05\x7e\x77\x5d\
\xa5\x80\x5f\x2d\x66\x7f\x24\x09\x58\x25\x77\x40\x28\xfb\x15\x70\
\x06\xf6\x77\xaf\xc8\x87\x40\x65\x9f\xf0\x6b\x79\xc2\x08\xc0\x31\
\xf9\x57\x74\xad\x60\xf5\x25\xcb\xbd\xe3\xf2\xf9\x1c\xfb\x0e\xbe\
\xc0\xc9\x67\x67\x39\x69\xa7\xf9\xa6\xcc\x34\xff\x8a\xe4\x29\x59\
\xe0\x88\x72\x35\xbf\x64\xda\x33\xf9\x95\x93\xe7\xff\xc9\xb9\x03\
\x7f\xb0\xdf\xea\x95\x0b\x93\x00\x00\x5e\xee\xb7\x04\x9c\x51\x84\
\x9d\xc2\xa4\xc7\x88\xb3\xcc\x48\xb0\xa2\x98\xe5\xdd\xb2\xc0\xd5\
\xa9\x25\xb0\xea\x6d\x70\xe9\xe5\x6b\x59\xbb\x7c\xad\x47\x02\xb3\
\xb3\x59\x9e\xdb\xb3\x87\x29\x75\x1a\x33\x15\x08\xfc\x59\x21\xa6\
\xbf\x6b\x11\xe8\xe0\x97\xa5\x82\x21\x25\x6c\xa5\x55\x2c\x38\xfb\
\xb9\x79\xfe\x58\xb4\xd6\x0f\x03\x7b\x23\x08\x20\x8c\x04\x6a\xe9\
\xab\x28\x12\x08\x03\x7f\xa5\x94\x5f\xa5\xe8\x7f\x14\xf0\x2d\xcb\
\xf2\xad\x17\x28\x4c\x67\x9a\x2d\x0f\xf8\x0a\x84\xd4\xe2\xaf\xa2\
\x9c\x00\x74\xc0\x63\x94\xd2\x7d\x48\x10\x53\x49\x2e\xd9\xb0\x96\
\x8e\x45\xad\xde\xfa\xd3\x63\x63\x1c\xd8\x7f\x98\xc1\x87\xed\x60\
\x9f\x30\x39\x1e\x6b\x62\x9b\x17\xe9\xb7\x38\xa9\x4d\xe7\x95\xf3\
\x4c\x7e\x75\xee\xc1\x1f\xec\xb3\x7a\xe5\xc2\x25\x00\xb0\x49\x40\
\x78\x03\x88\x12\x18\x34\x09\x83\x36\x61\xd0\x25\x4c\x96\x1a\x49\
\x56\xc8\x22\x3f\x63\xe5\xf8\x19\x33\x49\x72\xf9\xed\xd0\xbb\xb5\
\x93\xcb\xd7\x6c\x24\x15\x2f\xb9\x04\xfd\x03\x47\xe8\x1f\x3a\x04\
\xa9\x82\xfd\x8c\x05\xc1\x1f\x20\x04\xa9\x13\x00\x8e\x42\x12\x36\
\x01\xb8\x03\x8a\xa4\xb0\x89\xc0\x52\x20\x30\x2a\x02\xbf\x16\xf0\
\x57\xab\x0d\xa8\xc7\x15\x08\xf6\x5b\xad\x19\x80\x28\x42\x88\x22\
\x80\x7a\xcd\x7e\xfb\xb7\xad\xf2\x85\xe9\x94\x08\x4a\x65\x03\xdd\
\x1d\xca\xab\x4a\x64\xe0\x91\x40\xd0\xdc\x0f\xba\x03\xc2\x9e\xbd\
\x67\x49\xfb\x52\x7a\xd7\xae\x20\x1e\x37\x9d\x97\x75\x5a\x1c\x3b\
\x7e\x9c\xfe\xe7\x87\x38\xf9\x03\xdb\xdf\x37\x62\x3c\x69\xa6\xf8\
\x77\x99\xe7\xb8\x16\xe9\x1f\x43\x31\x83\xd4\x34\xff\x93\xe7\x1e\
\xf8\xc1\xfe\x9a\x8b\x5c\xd8\x04\x00\x70\x9d\x67\x09\x18\x08\x67\
\x52\x11\x83\x16\x0c\x3a\x0d\x93\x25\x22\xce\x72\xe0\x4a\x2b\xc7\
\x7b\x95\x64\x49\xd7\xb5\xb0\xec\xe6\x18\x57\x6e\xb0\x5d\x02\x74\
\x6b\xe0\x79\xcd\x1a\x70\x0b\x50\xa2\x7c\x7f\xf7\xb7\xdb\x09\x1a\
\xf8\x95\xb6\x08\xc3\x40\x15\x0d\xa4\x25\x50\xb2\x3a\x11\xcc\xc5\
\x0d\xd0\x7f\xbb\xfd\x51\x6b\x3a\x50\x97\x4a\xda\x5f\xff\x7e\x26\
\x04\x10\x45\x02\x4a\x49\x30\x24\x42\x48\x14\xda\xa2\x64\x49\xe3\
\x07\x49\x80\x00\x09\x68\xda\xde\x5d\x54\x01\x8c\x4c\x8a\x4b\xd6\
\xdb\x5a\xdf\xb6\x14\x14\x33\x99\x59\x5e\x3c\x7c\x98\xe3\x4f\xcc\
\x32\xf2\x94\xe3\xef\x27\xf9\x4f\x21\xf8\xa1\x2a\x32\xa2\x2c\x7b\
\x0e\x3f\x27\xd2\x3f\x8b\x22\xef\x69\xfe\x27\xce\x1f\xf0\x07\xfb\
\xad\x5e\xb9\xf0\x09\x00\x6c\x12\xb0\x09\xc0\x7e\x09\xa9\x41\x5a\
\x18\xb4\x08\x83\x45\x98\x2c\x36\x62\x74\x89\x18\x6b\xac\x2c\x6f\
\x96\x45\x5e\x9e\x5a\x02\xcb\xdf\x08\xab\xd6\xf7\x70\xc5\x9a\x8d\
\x98\x4e\x96\x40\x29\x38\x7e\x62\x90\x03\x03\xfb\xa1\xb9\x60\xcf\
\xfe\xa2\x0f\x1a\x0a\x98\xff\x7a\x31\x5a\x18\x01\x20\x04\x86\x70\
\xa7\x39\xb0\x17\x65\x19\x48\xcb\x40\x88\x68\xe0\x57\x72\x0b\xaa\
\x11\xc0\x99\x66\x02\xe6\x92\xff\xaf\x35\xe2\x1f\x06\x7c\x84\x44\
\x18\x12\x84\x0d\x76\xa5\x24\x52\x49\xcf\xbd\x76\xd7\x95\x69\x7e\
\x59\x2a\xbf\x70\x5d\x01\x3d\x1e\x80\x82\xe2\x94\x60\x71\xdb\x52\
\xfa\xd6\x3b\x5a\xdf\x71\x13\x8e\x9e\x38\xce\xc0\xe1\xe3\x9c\x7c\
\x08\xa6\x5f\x04\x61\x72\xd4\x4c\xf2\x55\x60\xbf\x2c\x70\x02\x8b\
\x31\x0f\xfc\x92\x59\xec\xf7\x58\xd8\x9a\xff\x3c\x03\xbf\xde\x57\
\x73\x91\x8b\x83\x00\x00\xae\x17\x02\xe1\xcd\x31\x18\x43\x90\xc2\
\xa0\x59\x18\xb4\x0b\x83\x76\x11\x67\xa9\x11\x67\x85\x2c\xf2\x0a\
\x2b\xcb\xdb\x80\x64\xcf\x2b\x61\xd9\x4d\x31\xae\xe8\xf3\x5b\x03\
\x85\x42\x91\xbd\x07\xf7\x33\x34\x39\x48\xbc\x55\x95\xa7\x02\x83\
\xbe\x3f\xb6\xc9\x1f\x04\xbf\x10\x06\xc2\x2b\x5f\x30\x7c\x9f\x28\
\x9b\x0c\x94\x2a\x27\x83\x5a\xdd\x81\x5a\x09\xa0\x56\x12\x08\x8b\
\xfe\xd7\x5b\xf9\x57\x4b\xd0\xcf\x76\xc6\x6d\xe0\xbb\x00\x57\x4a\
\x79\x1a\x5f\x3a\x9f\xba\x15\x80\x52\x7e\xcd\xaf\xc5\x01\x50\x7e\
\xcd\x6f\xcd\xda\x11\xfe\xb5\xeb\x56\xd3\xda\xd6\x64\x13\x82\x80\
\xd9\xcc\x2c\x2f\x1c\x3e\xc4\xa9\xe7\x67\x19\xfc\x01\xc8\x1c\x18\
\x71\x1e\x31\x53\xdc\x2f\xf3\x9c\x50\x45\x4e\x2a\xcb\x29\xe9\xb5\
\x83\x7d\x19\xef\x75\xdd\x0a\xc9\x8f\xcf\x3f\xf0\x07\xfb\xad\x5e\
\xb9\x78\x08\x00\xe0\x06\xa7\xab\x4b\x71\x81\x24\x06\x69\x21\x68\
\x12\x26\x8b\x84\xc9\x62\x23\xc1\x72\x11\x63\x4d\x61\x86\x77\xaa\
\x22\x97\xa4\x96\xc0\xf2\x37\xc0\xaa\x4b\x7a\xb8\xa2\x6f\xa3\xfd\
\x62\x52\x87\x08\x4e\x9f\x1e\xe3\xc5\xfe\x43\x4c\xa9\xd3\x18\x69\
\xc7\x25\xd0\x2c\x01\x70\x00\x8f\xa6\xfd\xbd\xef\xae\xd6\x17\x08\
\x61\xda\x71\x00\x61\x38\x65\x0c\x06\x86\x28\x11\x03\xca\x26\x02\
\x25\xed\xd0\x75\x35\x22\x98\x6b\x85\x60\x2d\x7d\x35\x97\xdc\x7f\
\x35\xad\xaf\x9c\x86\x13\x42\x69\x9a\x5e\x79\x9a\xde\x23\x01\x0d\
\xf8\xd2\x59\x87\xe6\x0e\x84\xc6\x00\xb4\x58\x80\x2a\x80\x31\x9b\
\x62\xd5\x8a\x15\x2c\x5d\xde\xe3\x9c\x4f\xd3\xfa\xfd\xc7\x39\xf9\
\x43\x98\x3e\x04\x42\x30\x64\xa6\xf8\x0f\x04\xbb\x65\x91\x93\x14\
\x19\x52\x16\xa7\x95\xfd\xca\xba\x2c\xee\x58\x7e\xe5\x8c\x42\xf8\
\xd1\xf9\x09\x7e\xbd\xaf\xe6\x22\x17\x17\x01\xb8\xf2\x0a\x2f\x2e\
\x60\x3a\x03\x89\x12\x4e\xaa\xb0\x43\xc4\xe8\x36\x62\x2c\x31\x92\
\x2c\x93\x05\x5e\x5e\xcc\xf0\x36\x14\xc9\x9e\x1b\x61\xc9\xb5\x31\
\xd6\xf7\xad\x65\xd5\x92\xde\x92\xb9\xaf\x14\xc7\x4e\x0c\x72\xf8\
\xd8\x21\x8a\x89\x0c\x22\xa1\x81\xdf\x39\x5d\x39\xf8\x5d\x0e\x32\
\xca\xb4\xbf\xe1\xfe\xf6\x59\x04\xa2\xb4\xce\x23\x04\x01\x01\xeb\
\x60\xae\x99\x01\xbd\x8f\xe6\x42\x00\xee\x67\xad\x11\x7f\xe5\x8d\
\xcd\x75\x01\xac\xca\x34\xbd\x54\x21\xc0\x57\xa5\x7d\x6d\x2b\xc0\
\x2a\xb9\x02\x58\x9e\x15\x00\x7e\x12\x50\x05\x50\x93\x31\x96\x2e\
\x5a\xc6\xca\xde\xa5\x9e\xb9\x8f\x80\xc9\xe9\x49\x5b\xeb\x3f\x97\
\xe3\xd4\x63\x8e\xd6\x8f\xf1\x88\x99\xe6\x9b\xb2\xc0\x09\x55\x60\
\x48\x59\x8c\x20\x19\x57\x2a\x44\xeb\xab\xf3\x1b\xfc\x7a\x1f\xcd\
\x45\x2e\x4e\x02\x00\x9b\x04\xec\xff\xec\x32\x1c\x3b\x55\xd8\x24\
\x0c\x5a\x85\x49\xa7\x88\xd1\x6d\xc4\x59\x2c\x62\xac\x2a\x4c\xf3\
\x6e\x65\x71\x59\xbc\x1d\x96\xbe\x0a\x96\x5f\xd9\xca\x25\xab\x2e\
\xa5\xa3\xa5\x13\x00\x29\xed\x68\xdf\x0b\x87\x0e\x73\x74\xe8\x08\
\xa2\xc3\x89\x0f\x88\x90\x20\x20\x68\xda\xdf\x28\x69\xfe\x10\x32\
\x28\x23\x04\x07\xec\x3e\x6b\xc1\xe1\x32\x3b\xa7\xe8\xf0\x5a\x15\
\x6b\xc0\xed\x93\x5a\x6a\x02\x74\xa9\x35\xfa\x1f\x04\x7d\x29\x17\
\xe2\xf7\xdb\x5d\x32\xb0\x01\xae\x4a\xfe\x7d\x00\xec\x3e\xad\x8f\
\x9f\x14\x24\x96\xbd\x9f\x57\x16\x58\xb2\x02\x94\x84\xc2\xb0\xa0\
\x2b\xdd\xc3\xfa\x4b\x57\xfb\xfc\xfc\x5c\x21\xc7\xe1\x23\x03\x0c\
\x1d\x1b\xe3\xe4\x0f\x61\xf6\xb8\xa6\xf5\x61\x8f\x2c\x32\x88\xc5\
\x29\x65\xfb\xfb\x93\x28\x66\x02\xf9\x7d\xc9\x63\xe7\x37\xf0\x83\
\xfd\x36\x17\xb9\x78\x09\x00\xe0\x95\x5e\x5c\xc0\x25\x82\x04\x06\
\x29\xe1\xa4\x0b\x31\xe9\x34\x62\xf4\x18\x71\x96\xc8\x22\x5b\xad\
\x2c\xef\x54\x8a\xf6\xe6\x55\xb6\x5b\xb0\x7c\x55\x0f\x97\xac\xda\
\xe0\xa4\x0c\x95\x1d\x1f\x28\x16\xe9\x3f\x72\x94\xe3\xa7\x8f\xd8\
\x81\xc2\x94\x7d\x2a\x4f\xfb\xe3\x04\xfe\x94\x06\x70\xcc\x12\xf0\
\x5d\x97\xc0\x23\x02\x51\x46\x02\x86\x4b\x04\x3a\x31\x38\x7f\xd7\
\x5d\xaf\x93\x81\x5d\x6c\xe0\xdc\x66\x8d\x96\x40\xf0\x7b\x2d\xc1\
\x3f\x0f\xe8\x8e\x1f\xef\xfa\xf3\xd2\x05\x7a\xd9\xa7\xbe\x4d\x0b\
\xf0\x69\xfe\x7e\x99\xcf\x1f\xfc\x44\x22\x95\x05\x42\x21\xdd\xb1\
\xc1\x45\x28\x8c\x0a\xba\x12\x3d\xf4\xf5\xad\x24\x95\x4e\x78\x1a\
\x5f\x5a\x16\x27\x4e\x9d\xe4\xd8\xd1\x41\x86\x9e\xb4\x18\xdb\x09\
\x40\xce\x88\xf1\x03\x33\xc5\xc3\xb2\xc0\x09\x55\x0c\x68\x7d\xdb\
\xe4\x2f\x45\xf9\x15\xea\x42\x01\xbf\xde\x67\x73\x91\x8b\x9b\x00\
\x5c\xb9\xb1\x2c\x40\x18\x47\x90\x16\x06\xcd\xc2\xa4\x03\x93\x45\
\x46\x8c\xc5\x46\x8c\x45\xc5\x0c\x6f\x95\x45\x6e\x02\x92\x5d\x57\
\x43\xcf\x0d\xb0\x7a\x65\x2f\x6b\x97\xaf\xc5\x34\x4c\x2f\xf5\x57\
\x28\x14\x19\x38\x72\x94\xa3\xc3\xb6\x45\x60\x34\x61\xfb\x9b\x8e\
\xa6\x77\xcd\x7f\xc3\x07\xfe\x92\x05\x60\x04\x2c\x01\x9b\x2c\x02\
\xa0\x17\x4e\xac\x40\x23\x01\x61\x68\x6e\x81\x8f\x28\x1c\x83\x47\
\x68\xc4\xe0\xa2\x42\x38\x2c\xe8\xf5\x93\x20\xd8\x65\xf6\x63\xe0\
\xd9\x33\xce\x3a\xfb\x05\x4e\x3e\x9f\x1f\xbf\x0b\x10\x04\xbe\x0c\
\x6a\xff\x20\x11\x20\xfd\x96\x00\x15\x2c\x00\xef\xbb\x6d\x05\x58\
\x79\x49\xf6\xa4\xa4\x3b\xd9\x43\xdf\xda\x95\xa4\xd3\x49\xcf\xc7\
\x47\x28\x4e\x8d\x0e\x73\xf8\xe8\x00\x23\x3b\x2c\x46\x7f\x62\x4f\
\xf6\x6a\xc4\x78\xce\x48\xf0\x4d\x14\x07\xa5\xc5\x69\x8a\x9c\x52\
\xd2\xa7\xf5\xf3\xde\xbc\x7d\xca\x61\xb8\x47\x2f\x1c\xf0\xbb\xfd\
\x34\x57\x79\x69\x10\x00\xc0\x4d\x5a\x80\xb0\x34\xbf\x40\x42\x08\
\x9a\xb0\xdd\x82\x0e\x61\xd0\x26\x62\x2c\x46\xb0\x4e\xe6\xb8\x43\
\x5a\x6c\x31\x93\xb0\xe8\x6a\x3b\x3e\xb0\x7a\x65\x2f\xab\x16\xaf\
\xf2\x06\x17\xb9\x4a\xf1\xd8\xe0\x20\x87\x07\x0f\x51\x6c\xca\x92\
\xec\x2c\x69\x7f\x8f\x00\x42\xc0\x4f\x18\x09\x68\xa6\xbf\x0b\xec\
\xe0\x6f\xbb\xa8\x48\x94\x13\x84\xa7\xfd\xc3\x32\x02\xc2\xf9\x5f\
\xb8\x1d\x56\x36\x67\xa9\x8d\x7f\x07\xf8\xb8\x05\x0e\x3a\xf0\xc3\
\x40\x1f\x0e\x7c\x9f\x99\xaf\xff\x2e\x03\xbf\xdf\x05\x70\xc1\x1e\
\xcc\x02\xe4\x26\x8b\x14\xc7\x60\x49\xeb\x12\x56\xf6\x2e\x2b\xf9\
\xf8\xf6\xad\x70\x6a\x74\x98\x23\x83\xc7\x38\xf5\x5c\x8e\xd1\xa7\
\xec\xd7\x71\x0b\x93\xa3\x46\x8c\xef\x88\x18\x3b\x1c\x3f\x7f\x58\
\x49\x26\xb0\x98\x50\xee\xf0\x5d\x45\x1e\xd7\xd7\x07\xc5\x23\x17\
\x16\xf0\xbd\xbe\x5b\x20\x80\x3a\xe4\x66\xcf\x1a\xb0\xcb\x88\x6d\
\x22\x48\x0a\x83\x66\x04\x69\x61\xd2\x29\x4c\xba\x1c\xb7\xe0\x6a\
\x59\xe0\x8d\x4a\xb2\xda\x4c\xc2\xd2\xdb\xa0\x6b\x53\x8c\x55\x3d\
\x01\x22\x00\x50\x30\x34\x3c\xcc\x89\xa1\x41\x26\x18\x25\xd5\x19\
\x23\x96\x88\x95\xfb\xfe\xc1\x60\x20\x9a\xd9\x4f\xd0\x0d\x08\x10\
\x81\xa3\xd9\x85\xa1\x6d\x77\x81\x6f\x38\x6e\x82\xf3\x89\x4b\x00\
\x04\xcc\x7f\x1f\xf8\xdd\x6f\xaa\xf4\xaf\x52\xa5\x4f\x07\xf8\x94\
\x91\x40\xd0\xec\x0f\xf1\xef\xcb\x88\x40\xda\xb6\xb5\x0a\x31\xfd\
\x83\x16\x00\x16\x56\xd1\x22\x3b\x56\xc0\x9a\x10\xac\x5c\xb2\x82\
\x9e\xc5\x9d\xc4\xe2\x31\x04\xca\x33\x6c\x4e\x8d\x0e\x73\xe4\xc4\
\x31\xc6\x06\x6c\xe0\x3b\x7e\xfe\x84\x88\xf1\x3d\x33\xc1\x63\xb2\
\xc8\x29\x65\x31\xaa\x2c\x46\x1d\x73\x3f\x83\x74\x8a\x7a\xd0\x22\
\xfc\xea\xc2\x05\xbf\xdd\x55\x0b\x04\x50\x9f\xdc\xe2\x59\x03\xfa\
\x78\x82\x04\x82\xb8\x10\x34\x63\xd2\x26\x4c\x16\x19\x26\x3d\xc2\
\x26\x82\x2d\x32\xcf\xcf\x28\xc9\xaa\x78\x1b\x2c\x7e\x05\x74\x5e\
\x12\x63\xc5\xb2\x65\xac\x5a\xdc\xeb\x1b\x69\x88\x82\xd9\x4c\x96\
\x81\xa3\x47\x18\x2f\x9c\x46\x25\x2d\x9a\xbb\x92\xbe\xa0\x60\x18\
\x01\x04\x03\x82\x46\x84\x55\x60\xf8\x5c\x04\xdd\x4a\x10\x9e\x45\
\x50\x06\xfe\x00\x11\x54\xb5\x00\x9c\x1b\x09\x6a\x7f\x9c\xef\x52\
\x29\x94\xd4\x8b\x76\x34\x12\x90\x41\x6d\xaf\xc7\x03\x22\x5c\x00\
\x8d\x08\x32\x93\x39\xa6\x87\xb3\xb4\x27\xdb\x59\xb2\xb8\x87\x8e\
\x8e\x36\xcf\xcc\x17\xd8\x6f\xe2\x19\x1d\x3f\xcd\x91\xc1\x63\x8c\
\x1e\xc8\x31\xb6\x2b\x14\xf8\x23\x0e\xf0\x4f\x23\x3d\x8d\x9f\x71\
\xcc\xfd\xd2\xf0\x5d\x85\xe2\xe1\x0b\x17\xf8\x5e\xdf\x2d\x10\xc0\
\x1c\xe5\x56\x5f\xdd\x80\x9b\x36\x4c\x08\x41\x1a\xbb\x92\xb0\x5d\
\x98\x74\x09\x93\x2e\x11\x67\xb1\x2c\xb2\x49\xe6\x79\x9d\x92\xac\
\x32\x93\xb0\xe8\x65\xd0\xb5\x15\x9b\x08\x7a\x56\xd1\x92\x6e\xf5\
\x48\x40\x61\x77\xcc\xd0\xf0\x30\x23\x63\xa3\x9c\xce\x8c\xd2\xd4\
\x91\xa4\x79\x51\x2a\x90\x1d\x30\x43\xb3\x01\x06\x9a\xd9\xaf\x11\
\x42\x94\x8b\xe0\xba\x02\x2e\xd8\x4b\x44\xe0\xc6\x07\xc0\x73\x02\
\xa2\xfa\xcb\xd5\xfe\x94\x5b\x01\x4a\x4a\x9f\x1b\x20\xc3\xfc\x7b\
\xcf\x12\xb0\xca\x2d\x00\x64\xb8\x2b\x80\x24\x3b\x9d\x63\x62\x68\
\x06\xa3\x60\xb2\xb4\x67\x31\x3d\x3d\x8b\x88\xc5\x4a\xa9\x3c\x80\
\x5c\x3e\xc7\xa9\xd3\xc3\x9c\x38\x35\xc8\xe9\xbd\x45\x46\x5c\x53\
\x3f\x1c\xf8\x63\x48\x26\x35\xe0\xbb\x43\x77\x4b\xa9\x3d\x50\x6c\
\xbf\xf0\xc1\x0f\x0b\x04\x70\x66\x72\xab\x96\x29\xf0\x93\x40\x0a\
\xbb\x80\xa8\x0d\x7b\x94\xe1\x22\x11\xa3\xdb\x48\xd0\x25\x0b\x5c\
\x61\x65\xb9\xd9\x75\x0d\x5a\xd7\x43\xcf\xf5\xd0\xd9\xd3\xca\xaa\
\xc5\xab\x58\xb6\x68\x99\x47\x02\xd2\x69\xdf\x42\xa1\xc8\xa9\x91\
\x61\x46\x4e\x8f\x32\x63\x4d\xd1\xdc\xd9\x44\x6b\x57\x93\x2f\x48\
\x68\x04\x5d\x82\x80\x65\x10\xcc\x0e\xf8\x49\x21\x18\x03\xd0\x02\
\x83\x76\x27\x79\xf1\x00\xaf\xb7\xdc\x7e\x53\x41\x17\x40\x8b\xfe\
\x13\xe1\xf7\xe3\xd7\xf8\x7a\x74\x5f\x2a\x89\x92\x01\x13\x3f\x60\
\xf2\xcf\x4e\x67\x19\x3b\x39\x09\x39\x41\x57\xe7\x22\x7a\xba\x17\
\x91\x4a\x25\x4b\x71\x4b\x27\xc9\x3f\x31\x3d\xc9\x89\x53\x83\x0c\
\x0f\x9f\x66\xec\x39\x98\xd8\x67\x03\x1f\xc1\x84\x19\xe7\x87\x66\
\x8a\xa7\x65\x81\x11\xa7\x7e\xff\xb4\x92\x8c\x23\x99\x52\x76\xfd\
\x7e\xd6\x17\xe4\xc3\xc9\x27\x2a\xb8\x58\xc0\xef\xf6\xd3\x5c\x65\
\x81\x00\x5c\xb9\xcd\xe7\x16\xb8\xd9\x02\x9b\x08\xec\x92\x62\xbb\
\x90\xc8\xa4\x43\xc4\x58\x24\x4c\x5a\x95\xc5\x95\x32\xcf\x35\xd2\
\x62\x2b\x40\xf3\x2a\xe8\xb8\x02\xba\x36\xc6\xe8\x69\xef\x61\x55\
\xcf\x2a\x9a\xd3\x2d\xa5\xc2\x21\xe7\xd3\x25\x83\xf1\xa9\x49\x66\
\xf2\x53\xa4\xdb\x53\xb4\xb4\x37\x91\x4c\x25\x4b\xa0\x0e\x8d\x07\
\x68\x64\x60\xe8\xb1\x03\x11\x20\x06\x0d\xec\x1a\x11\xe8\x81\xc0\
\xb2\x09\x41\xb4\x0b\x54\x01\xf3\x9f\x32\x3f\x3f\x40\x02\x3e\xff\
\xdf\xd5\xfc\x96\x8f\x14\x0a\x85\x02\x93\xa7\xa7\x18\x1f\x99\x22\
\x6d\xa6\x68\x4e\x37\xd1\xdd\xbd\x88\xb4\x0b\x7a\xf0\x69\xfb\xa1\
\xd3\xa7\x38\x75\xfa\x14\x63\xfd\x39\x26\x0e\xd8\xc0\x47\x81\x30\
\x19\x30\x4c\x9e\x36\x12\x3c\xe1\x80\x7e\xc2\xc9\xe5\x47\x03\xdf\
\x0d\xf2\x01\xfc\xf0\xe2\x01\xbe\xd7\x77\x0b\x04\xd0\x20\xb9\xcd\
\x67\x2b\xbb\xe5\xc4\x2e\x11\x24\x31\x68\x12\xf6\x3b\x0b\x5b\x31\
\x68\x36\x4c\xda\x45\x8c\x4e\xa0\xcf\xca\x73\x9d\x2a\x72\xb5\x52\
\xb4\x9b\x49\xe8\xd8\x08\xed\x97\x43\xc7\xf2\x14\xab\xba\x57\xd1\
\xdd\xde\x63\xc7\x0a\xa0\x94\x4a\x77\xbe\x4f\x4e\x4f\x71\x6a\x64\
\x84\xe9\xcc\x34\x05\x0a\xb4\x76\x34\xd3\xd6\xd9\x4a\x2a\x95\xc2\
\xf0\x08\xc0\xf4\xc5\x00\xfc\xa4\x10\xb4\x0a\x74\xff\xdf\x2d\x3b\
\xd6\x02\x80\x6e\x50\xd0\xf9\xd7\xd5\xf8\xbe\x00\x20\xba\xff\x1f\
\x08\xf6\x05\x8a\x7d\xfc\x04\x60\x2f\xf9\x42\x81\xf1\xd1\x09\x26\
\xc6\x26\x91\x79\x45\xc2\x4c\xd0\xd9\xd9\x4e\x4f\xf7\x22\xb4\x4b\
\xf0\x34\xbd\x65\x15\x19\x99\x38\xcd\xa9\xd1\x61\x46\x4e\x4e\x30\
\xdd\x0f\xa7\x77\x41\x61\xca\x6e\x23\x11\xe3\xd9\x58\x92\xc7\x11\
\x1c\x54\x05\xc6\xa4\xed\xdf\x8f\x2b\xc9\x4c\x55\xe0\x2b\x2e\x4a\
\xe0\xbb\xb2\x40\x00\x8d\x96\x57\x55\x24\x82\x84\x33\xd0\x28\xe9\
\x4c\x4c\xda\x26\x4c\x3a\x0c\xdb\x2a\xe8\xb2\x0a\xbc\x4c\x16\xb8\
\x5a\x59\x5c\x0a\x90\x5a\x0c\x1d\x97\x43\xeb\x5a\xe8\x5c\xdc\xc2\
\xd2\x45\xcb\xe8\x6c\xee\xa4\x25\xdd\x02\x68\x43\x8a\xbd\x00\x9c\
\x3d\x21\xe5\xe9\xf1\x71\x66\xb3\xb3\x58\xc2\x22\x99\x4e\xd2\xb1\
\xa8\x9d\x74\x3a\x45\x53\x53\x93\xdf\xfc\x0f\xd4\x0e\xf8\xd3\x82\
\x9a\x6b\x40\xc9\x2a\x08\xeb\x33\xa5\x31\x92\x2f\xf0\xe7\x06\xfd\
\x7c\xa5\xbc\x4e\x35\xa0\x63\xee\x17\xf2\x79\x26\x27\xa6\x98\x9c\
\x9c\x62\x6a\x72\x1a\x2c\x48\x27\x53\xb4\xb6\x34\xd3\xd1\xde\x5e\
\xf2\xe7\xc1\xa7\xed\x2d\xcb\x0e\xe8\x8d\x4e\x8e\x72\xea\xd4\x69\
\xa6\x07\x60\xfa\x30\x4c\x1d\xf6\xf6\x1d\x12\x31\x9e\x32\x93\x3c\
\x85\x64\x50\x15\x19\x97\x16\x13\x58\x4c\x2a\xc9\x14\x8a\x19\xe5\
\x4e\xca\x19\x05\xfc\x1f\x5c\xbc\xc0\x2f\xef\xbb\xfa\x65\x81\x00\
\x2a\xc9\xab\x43\x89\xc0\x25\x83\x98\xb0\xcb\x8b\xd3\xc2\xa0\x19\
\x93\x76\x21\x68\x77\xdc\x83\x4e\xa0\xd7\x2a\x70\x85\x2a\x72\xb5\
\x92\x2c\x46\x40\xaa\xa7\x44\x06\xad\x5d\x29\xba\xdb\x7a\xe8\x68\
\xee\xa0\xc7\x1b\x89\xa8\xfb\xe2\xa5\xb2\x9c\x4c\x36\xcb\xe4\xf4\
\x34\x53\xd3\x53\x4c\x4d\x4f\x93\x6a\x4a\x61\xc9\x22\xe9\x74\x9a\
\xb6\xb6\x36\x12\x89\x04\xed\xed\xed\x24\x12\x09\xcd\x75\xf0\x9b\
\xff\x86\x56\x29\x88\x77\x4b\x94\xce\xe7\x7e\xd3\xc0\x2f\x9d\xdf\
\xae\xc6\x1f\x3e\x35\x8c\x42\x31\x3c\x3c\x42\xa1\x50\x20\x9f\xcb\
\x93\xcf\xe5\x69\x69\x6e\x26\x9d\x4a\xd1\xd2\xd2\x4c\x6b\x73\xb3\
\xf3\x4c\xe8\x0d\xa9\xdc\x52\x04\xa6\x33\x33\x8c\x4e\x9c\x66\x74\
\x62\x94\xa9\xc9\x19\xa6\xfa\x61\xba\x1f\xa6\x0e\x79\x97\x35\x21\
\x04\xbb\xcd\x24\x3f\x15\x06\x2f\x28\x8b\x31\x65\x31\xee\xe4\xf0\
\xa7\x94\x64\x06\x45\x46\xe9\xda\x3e\x0c\xf8\xdf\xbf\xf8\x81\xef\
\xb5\xee\x02\x01\xcc\xb3\xbc\xc6\x47\x04\x7a\xb0\xd0\x44\x10\x17\
\xd8\xa3\x0e\xb1\xa7\x25\x6b\x75\x0a\x8b\xda\x85\x9d\x4e\x6c\x57\
\x16\xbd\x56\x9e\xab\x51\x5c\xaa\x14\x4b\x00\xe2\xed\xd0\xb6\x16\
\x9a\x96\x43\xeb\x1a\xe8\x68\xee\xa4\xa3\xa5\x83\xce\xe6\x0e\x3a\
\x5a\x3a\x80\x72\x22\x50\xda\x3f\x4a\x41\x26\x97\x25\x9b\xcd\x32\
\x9b\xb5\x3f\x0b\xc5\x22\xd9\x7c\x8e\x8e\xb6\x36\xb2\xb9\x1c\x99\
\x5c\x96\xc5\x3d\x8b\xbd\x0b\x27\xf0\x2d\x28\x2e\xfd\x8c\x8d\x8f\
\xa3\xa4\xa4\xa5\xb9\x99\x99\xd9\x59\xa4\x65\x91\x4a\xa6\x88\xc5\
\x4c\x5a\x5b\x5a\x88\xc7\x62\xb4\xb8\x40\x77\x8f\x13\xa5\xe7\xa8\
\x64\xde\xdb\x80\x9f\x98\x9e\x60\x7c\x7a\x9c\x89\x99\x49\x66\x4e\
\x15\x99\x72\x34\x7d\x76\xc4\x3b\x69\x56\x18\xec\x36\x4c\x9e\x33\
\x92\x3c\xa7\x8a\x4c\x28\xc9\xa4\xb2\x98\x40\x31\x85\xc5\xb4\x63\
\xe2\x67\x94\x1b\xd1\x2f\x8d\xd4\x93\x3e\xe0\x7f\xef\xa5\x03\x7c\
\x57\x16\x08\xe0\x6c\xc9\x6b\xcb\x88\x40\x9f\x9c\x34\x2e\x9c\xb1\
\x06\x40\x5a\x18\x34\x61\xbb\x08\x2d\xc2\xb0\xb3\x09\xc2\xa4\x0d\
\xc5\x4a\xab\xc0\x15\xca\xe2\x4a\x25\xe9\x73\xd1\xd8\xbc\x12\x9a\
\x56\x40\xf3\x72\x9b\x14\x5a\x52\x2d\x74\xb4\x74\xd0\x92\x6a\xa5\
\x25\xdd\x62\xbb\x0c\x9e\xab\x1e\x98\xc2\x0b\x9d\x1c\x4a\x6b\x83\
\xeb\xa6\x66\xa6\x29\x16\x8b\x94\xed\x0a\x74\xb6\xb5\x57\xbe\x77\
\xe1\x3f\x42\xef\x71\xe1\x0c\x8a\xca\xe6\xb3\xcc\x64\xa6\x6d\xd0\
\xcf\x4c\x30\x9d\x99\x61\xe6\x54\x91\xd9\x41\x98\x3d\x01\xb3\x83\
\x60\x65\xbd\x63\x4e\x22\x38\x68\x26\x78\x46\x18\x1c\x52\x16\x93\
\xca\x62\x52\x29\x66\x91\x4c\x23\x99\x51\xf6\x64\x1c\x19\x24\x39\
\x85\x96\xc3\xd7\xf3\xf8\xee\x8d\x7e\xf7\xa5\x07\x7c\x57\x16\x08\
\xe0\x6c\xcb\xeb\x44\x50\xa1\x1a\x9e\x8b\x60\xd8\xd5\x85\xc2\x1e\
\x6f\x90\xc4\xae\x29\x48\x3a\x25\xc7\x2d\xc2\xa0\x45\x98\xb4\x89\
\x18\x2d\x40\x97\x2c\x72\x89\x2a\xb2\x46\x29\xd6\x2b\xc9\x52\xf7\
\xef\xa6\xba\x6d\x97\xa1\x69\x39\xc4\x5b\x6d\x62\x68\x49\xb5\x90\
\x8a\xa7\x68\x49\xb7\xd2\x92\x6a\x26\x66\xc6\x68\x49\xb7\xd8\x15\
\x89\xce\xe5\x78\xbd\x59\xce\x08\x3e\x09\xdd\xe2\x3e\x0b\x8e\x5a\
\x57\xce\xd7\x12\xda\xed\xed\xd3\x99\x19\x8a\x56\x91\xf1\xe9\x09\
\x72\xf9\x2c\xd9\x42\xd6\xfe\x7e\x1a\x0a\xd3\xb6\x66\x77\x41\xaf\
\x9d\x68\x5c\x18\xbc\x28\x0c\x5e\x30\x62\xbc\x20\x04\x27\x94\x64\
\x5a\xd9\xfe\xfc\x34\x92\x69\x65\x57\xe9\xe5\xdc\xdc\xbd\xb2\x73\
\xf7\x05\x64\x99\x89\x5f\x02\xfe\x77\x5e\xba\xc0\x77\x65\x81\x00\
\xce\xa5\xbc\x5e\x94\x6c\x82\xf2\xa2\x22\x3b\x56\x80\x3d\x27\x01\
\xda\x04\x25\x98\x34\x39\x13\x95\x34\x3b\x93\x99\x36\x21\xe8\x94\
\x79\xd6\x4a\x8b\xb5\x28\x7a\x95\x62\xad\xf3\xb7\x01\x9b\x10\xe2\
\xad\x90\xea\xb2\x3f\xe3\xad\x10\x6f\xb1\x3f\x5b\x53\x2d\x98\x66\
\xcc\x26\x85\x54\x8b\x77\x79\xa9\x78\x92\x54\x22\x55\xba\x5e\x15\
\xfa\xd5\x27\xe3\x33\xe3\xde\xf7\x6c\x3e\x4b\x36\x9f\x73\x72\xf2\
\xf6\xfa\xd9\x41\xb0\xf2\x90\x1b\xb5\x01\x9f\x9f\x2a\x03\x3b\x02\
\x4e\x22\x38\x2a\x0c\x5e\x30\xe2\xbc\x20\x0c\x86\x94\xc5\xac\x13\
\xb5\xb7\xb5\xbb\x64\xd6\xd1\xf8\x6e\xb1\x4e\x5e\xe1\x99\xf7\x61\
\x26\x7e\x29\x48\xf2\xed\x05\xe0\xbb\xb2\x40\x00\xe7\x83\xbc\xa1\
\x8c\x08\xf4\x61\xc8\x42\x0b\x1c\xba\x64\x90\xc0\x7e\xb1\x49\x0a\
\x48\x39\xc3\x94\x53\x08\x12\xce\x67\x5a\x18\xa4\x95\xc5\x6a\x29\
\x59\xa1\x24\x5d\xc0\x06\xa5\xe8\x04\x3a\xbd\xf3\x3a\x4d\x1f\x6f\
\x85\x44\x9b\xfd\x3d\xd9\x05\x66\x12\x0f\x90\x4d\xcb\xb5\xeb\xac\
\xd2\xdd\xb3\x83\xfe\xdf\x33\x83\xf6\x31\x32\x0f\xd9\xd1\xc0\xf1\
\xae\xff\x2e\x38\x81\xe0\xa8\x10\x8c\x0a\x93\x93\x66\x9c\x17\x94\
\x64\x56\x49\x32\x4a\x32\x83\xc5\xac\x52\xe4\x90\x64\x14\x64\x91\
\x4e\xe4\x5e\x92\x2f\x03\xbd\x37\xfb\x87\x4f\xd3\xdb\x67\x7a\x70\
\x01\xf4\x61\xb2\x40\x00\xe7\x93\xbc\xd1\x47\x04\x51\x96\x81\xa9\
\x59\x06\x3a\x21\x24\xb4\xec\x42\xd2\x29\x42\x4a\x0b\x83\xb4\x4b\
\x10\xc2\xa4\x49\x18\xa4\xac\x02\xeb\x84\xc0\x94\x45\x56\x01\x42\
\x16\x59\x8e\xed\xa9\x37\x01\x25\xc8\x87\x75\x4d\xe8\x40\x80\xc8\
\x75\x19\x21\x38\x8e\x42\x89\x18\x23\x40\xce\x30\x39\x05\xcc\x18\
\x31\x4e\xa0\x18\x77\x80\x9e\x43\x91\x53\x36\xa8\x73\xca\xd6\xea\
\x59\x67\x00\x4e\xd6\xf1\xdf\x6d\x2d\xef\x9a\xf6\xae\x96\x8f\xd6\
\xf4\xf6\xe7\x37\x17\x80\x5f\x49\x16\x08\xe0\x7c\x95\xdb\xcb\x62\
\x05\xa2\x02\x21\x78\x2f\x11\x13\xa5\x39\x0b\x3c\x62\x10\xf6\x7b\
\x0f\x92\x08\x52\xc2\x9e\xfa\x3c\xe1\x1e\x27\xb4\xbf\xa5\xf0\xed\
\x93\x92\x16\xcb\xb0\x83\x92\xa5\xea\x9f\xb0\xee\xb2\xa7\x38\x56\
\x4a\x22\x8d\x38\xfd\x58\x64\x95\x0d\xe0\xac\xb0\x67\xca\xb1\x9c\
\x92\x00\x89\xc2\x52\x4e\x24\xde\x89\xca\x67\xb1\x09\xa0\xa0\x81\
\xbb\xe0\x81\xbd\x54\x87\x5f\x09\xf0\x7e\x13\xff\x81\x05\xd0\xd7\
\x2a\x0b\x04\x70\x21\xc8\x9b\x7d\xe5\x77\x41\xcb\x40\x4f\x2f\x96\
\xd2\x8c\xa5\x21\xcb\x3a\x29\xb8\x4b\x4c\xe8\x33\x8f\xe8\xd3\xa2\
\x43\xca\x09\x40\x26\x1c\x97\xc3\x2b\x06\xf4\x95\x02\x38\xa2\x34\
\x08\x2a\x1b\x9c\x45\x57\x93\x83\xa7\xbd\x75\xd0\xba\x44\x50\xa4\
\x04\xf0\xa2\x43\x0c\xf6\x6b\x3c\x4b\x40\x77\x83\x77\x7a\xd4\xbe\
\xdc\xa7\x07\xf8\xc6\x02\xe8\xe7\x22\x0b\x04\x70\x21\xca\x5b\x03\
\xf5\xb8\xc1\xf4\xa2\xfd\xcb\x25\x07\xdd\x5a\x70\x0b\x92\x0c\xe1\
\xea\x73\x23\x90\x92\x74\x5d\x0b\xfb\xd3\xf0\x08\xc0\x95\xb0\xee\
\x2a\x8d\xff\xb1\xc1\x2a\x1d\x4d\x5e\x32\xd5\x7d\xda\x5a\x95\x80\
\xec\x8e\xb0\x2b\x4d\xe1\x4b\x28\xd0\x83\x86\x3d\xdc\xb7\x00\xf8\
\x46\xc8\x02\x01\x5c\x0c\xf2\xb6\x80\x85\x10\x46\x0a\xf6\x6f\xc3\
\x47\x12\xe5\xfb\xf9\x89\xc2\x59\x2f\xa8\x91\x00\x4a\xe0\x75\x35\
\xbe\x15\x00\x75\x69\xef\x12\xb8\xa5\x06\x6e\x7d\x9b\xfe\x09\xff\
\xb5\x00\xf8\xf9\x90\x05\x02\xb8\x98\xe5\xed\x42\x84\x42\x5d\x97\
\xa0\x5b\xa1\xbb\x16\x95\xf6\xd7\x45\x05\xbe\xb9\xc0\xd6\x35\xb8\
\x7f\x3f\xb4\xfd\xca\xa9\xe1\x3f\x16\xc0\x7e\xb6\x64\x81\x00\x5e\
\x6a\xf2\x4e\x11\x45\x00\xfa\xbf\xd1\x1a\xbf\x32\x01\xe8\xbf\x83\
\x3a\xbc\x7c\xbf\xaf\x2d\x00\xfd\x5c\xcb\x02\x01\x2c\x48\xb9\xbc\
\x5b\x9c\x39\x01\xfc\xeb\x02\xb8\x2f\x04\x39\xe7\x04\xb0\x20\x0b\
\xb2\x20\x17\xa6\x18\xe7\xfa\x02\x16\x64\x41\x16\xe4\xdc\xc9\x02\
\x01\x2c\xc8\x82\xbc\x84\x65\x81\x00\x16\x64\x41\x5e\xc2\xb2\x40\
\x00\x0b\xb2\x20\x2f\x61\x59\x20\x80\x05\x59\x90\x97\xb0\x2c\x10\
\xc0\x82\x2c\xc8\x4b\x58\x16\x08\x60\x41\x16\xe4\x25\x2c\x0b\x04\
\xb0\x20\x0b\xf2\x12\x96\x05\x02\x58\x90\x05\x79\x09\xcb\xff\x0f\
\xdb\x75\xfb\xb1\x81\x97\x28\x5d\x00\x00\x00\x25\x74\x45\x58\x74\
\x63\x72\x65\x61\x74\x65\x2d\x64\x61\x74\x65\x00\x32\x30\x30\x39\
\x2d\x31\x31\x2d\x32\x38\x54\x31\x37\x3a\x31\x38\x3a\x32\x38\x2d\
\x30\x37\x3a\x30\x30\x31\x91\xb2\x2c\x00\x00\x00\x25\x74\x45\x58\
\x74\x64\x61\x74\x65\x3a\x63\x72\x65\x61\x74\x65\x00\x32\x30\x31\
\x38\x2d\x30\x36\x2d\x32\x38\x54\x32\x32\x3a\x35\x30\x3a\x32\x35\
\x2b\x30\x38\x3a\x30\x30\xdc\x05\xa0\xc5\x00\x00\x00\x25\x74\x45\
\x58\x74\x64\x61\x74\x65\x3a\x6d\x6f\x64\x69\x66\x79\x00\x32\x30\
\x31\x31\x2d\x30\x34\x2d\x32\x39\x54\x31\x34\x3a\x35\x37\x3a\x30\
\x30\x2b\x30\x38\x3a\x30\x30\xf7\xaa\x04\x7c\x00\x00\x00\x35\x74\
\x45\x58\x74\x4c\x69\x63\x65\x6e\x73\x65\x00\x68\x74\x74\x70\x3a\
\x2f\x2f\x63\x72\x65\x61\x74\x69\x76\x65\x63\x6f\x6d\x6d\x6f\x6e\
\x73\x2e\x6f\x72\x67\x2f\x6c\x69\x63\x65\x6e\x73\x65\x73\x2f\x4c\
\x47\x50\x4c\x2f\x32\x2e\x31\x2f\x3b\xc1\xb4\x18\x00\x00\x00\x25\
\x74\x45\x58\x74\x6d\x6f\x64\x69\x66\x79\x2d\x64\x61\x74\x65\x00\
\x32\x30\x30\x39\x2d\x31\x31\x2d\x32\x38\x54\x31\x34\x3a\x33\x32\
\x3a\x33\x36\x2d\x30\x37\x3a\x30\x30\x00\x31\xfa\x1d\x00\x00\x00\
\x43\x74\x45\x58\x74\x73\x6f\x66\x74\x77\x61\x72\x65\x00\x2f\x75\
\x73\x72\x2f\x6c\x6f\x63\x61\x6c\x2f\x69\x6d\x61\x67\x65\x6d\x61\
\x67\x69\x63\x6b\x2f\x73\x68\x61\x72\x65\x2f\x64\x6f\x63\x2f\x49\
\x6d\x61\x67\x65\x4d\x61\x67\x69\x63\x6b\x2d\x37\x2f\x2f\x69\x6e\
\x64\x65\x78\x2e\x68\x74\x6d\x6c\xbd\xb5\x79\x0a\x00\x00\x00\x16\
\x74\x45\x58\x74\x53\x6f\x75\x72\x63\x65\x00\x43\x72\x79\x73\x74\
\x61\x6c\x20\x50\x72\x6f\x6a\x65\x63\x74\xeb\xe3\xe4\x8b\x00\x00\
\x00\x27\x74\x45\x58\x74\x53\x6f\x75\x72\x63\x65\x5f\x55\x52\x4c\
\x00\x68\x74\x74\x70\x3a\x2f\x2f\x65\x76\x65\x72\x61\x6c\x64\x6f\
\x2e\x63\x6f\x6d\x2f\x63\x72\x79\x73\x74\x61\x6c\x2f\xa5\x91\x93\
\x5b\x00\x00\x00\x18\x74\x45\x58\x74\x54\x68\x75\x6d\x62\x3a\x3a\
\x44\x6f\x63\x75\x6d\x65\x6e\x74\x3a\x3a\x50\x61\x67\x65\x73\x00\
\x31\xa7\xff\xbb\x2f\x00\x00\x00\x18\x74\x45\x58\x74\x54\x68\x75\
\x6d\x62\x3a\x3a\x49\x6d\x61\x67\x65\x3a\x3a\x48\x65\x69\x67\x68\
\x74\x00\x32\x35\x36\xe9\xc3\x44\x19\x00\x00\x00\x17\x74\x45\x58\
\x74\x54\x68\x75\x6d\x62\x3a\x3a\x49\x6d\x61\x67\x65\x3a\x3a\x57\
\x69\x64\x74\x68\x00\x32\x35\x36\x7a\x32\x14\x44\x00\x00\x00\x19\
\x74\x45\x58\x74\x54\x68\x75\x6d\x62\x3a\x3a\x4d\x69\x6d\x65\x74\
\x79\x70\x65\x00\x69\x6d\x61\x67\x65\x2f\x70\x6e\x67\x3f\xb2\x56\
\x4e\x00\x00\x00\x17\x74\x45\x58\x74\x54\x68\x75\x6d\x62\x3a\x3a\
\x4d\x54\x69\x6d\x65\x00\x31\x33\x30\x34\x30\x36\x30\x32\x32\x30\
\x02\x25\x5d\x73\x00\x00\x00\x12\x74\x45\x58\x74\x54\x68\x75\x6d\
\x62\x3a\x3a\x53\x69\x7a\x65\x00\x34\x31\x35\x36\x35\x42\xef\xaa\
\x12\xff\x00\x00\x00\x60\x74\x45\x58\x74\x54\x68\x75\x6d\x62\x3a\
\x3a\x55\x52\x49\x00\x66\x69\x6c\x65\x3a\x2f\x2f\x2f\x68\x6f\x6d\
\x65\x2f\x77\x77\x77\x72\x6f\x6f\x74\x2f\x6e\x65\x77\x73\x69\x74\
\x65\x2f\x77\x77\x77\x2e\x65\x61\x73\x79\x69\x63\x6f\x6e\x2e\x6e\
\x65\x74\x2f\x63\x64\x6e\x2d\x69\x6d\x67\x2e\x65\x61\x73\x79\x69\
\x63\x6f\x6e\x2e\x63\x6e\x2f\x73\x72\x63\x2f\x35\x34\x31\x31\x2f\
\x35\x34\x31\x31\x30\x33\x2e\x70\x6e\x67\x08\xe8\x32\x4d\x00\x00\
\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
"
qt_resource_name = b"\
\x00\x03\
\x00\x00\x76\xf3\
\x00\x70\
\x00\x69\x00\x63\
\x00\x06\
\x07\x03\x7d\xc3\
\x00\x69\
\x00\x6d\x00\x61\x00\x67\x00\x65\x00\x73\
\x00\x0d\
\x0a\x4b\xfb\xc7\
\x00\x6c\
\x00\x6f\x00\x67\x00\x6f\x00\x5f\x00\x74\x00\x65\x00\x78\x00\x74\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x06\
\x07\x4c\x4f\x9f\
\x00\x6d\
\x00\x79\x00\x2e\x00\x69\x00\x63\x00\x6f\
\x00\x06\
\x07\x87\x57\x47\
\x00\x71\
\x00\x74\x00\x2e\x00\x70\x00\x6e\x00\x67\
"
qt_resource_struct_v1 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x0c\x00\x02\x00\x00\x00\x03\x00\x00\x00\x03\
\x00\x00\x00\x3e\x00\x01\x00\x00\x00\x01\x00\x00\x66\x0b\
\x00\x00\x00\x50\x00\x00\x00\x00\x00\x01\x00\x00\x67\x3f\
\x00\x00\x00\x1e\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
"
qt_resource_struct_v2 = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x02\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x0c\x00\x02\x00\x00\x00\x03\x00\x00\x00\x03\
\x00\x00\x00\x00\x00\x00\x00\x00\
\x00\x00\x00\x3e\x00\x01\x00\x00\x00\x01\x00\x00\x66\x0b\
\x00\x00\x01\x69\x32\x9d\xdb\x80\
\x00\x00\x00\x50\x00\x00\x00\x00\x00\x01\x00\x00\x67\x3f\
\x00\x00\x01\x69\xe7\x63\x91\x40\
\x00\x00\x00\x1e\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x01\x69\xe7\x63\x91\x40\
"
qt_version = QtCore.qVersion().split('.')
if qt_version < ['5', '8', '0']:
rcc_version = 1
qt_resource_struct = qt_resource_struct_v1
else:
rcc_version = 2
qt_resource_struct = qt_resource_struct_v2
def qInitResources():
QtCore.qRegisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(rcc_version, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| 65.216893 | 103 | 0.727185 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.