repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
AshivDhondea/SORADSIM | scenarios/main_057_iss_13.py | 1 | 13528 | # -*- coding: utf-8 -*-
"""
Created on 01 October 2017
@author: Ashiv Dhondea
"""
import AstroFunctions as AstFn
import AstroConstants as AstCnst
import GeometryFunctions as GF
import RadarSystem as RS
import TimeHandlingFunctions as THF
import math
import numpy as np
import datetime as dt
import pytz
import aniso8601
import matplotlib.pyplot as plt
from matplotlib import rc
rc('font', **{'family': 'serif', 'serif': ['Computer Modern']})
rc('text', usetex=True)
params = {'text.latex.preamble' : [r'\usepackage{amsmath}', r'\usepackage{amssymb}']}
plt.rcParams.update(params)
from mpl_toolkits.axes_grid.anchored_artists import AnchoredText
import matplotlib as mpl
import matplotlib.patches as patches
from mpl_toolkits.basemap import Basemap
import pandas as pd # for loading MeerKAT dishes' latlon
# --------------------------------------------------------------------------- #
print 'Loading MeerKAT positions'
dframe = pd.read_excel("MeerKAT64v36.wgs84.64x4_edited.xlsx",sheetname="Sheet1")
dframe = dframe.reset_index()
meerkat_id = dframe['ID'][0:64]
meerkat_lat = dframe['Lat'][0:64]
meerkat_lon = dframe['Lon'][0:64]
# --------------------------------------------------------------------------- #
with open('main_meerkat_radar_parameters_doreen.txt') as fp:
for line in fp:
if 'centre_frequency' in line:
good_index = line.index('=')
centre_frequency = float(line[good_index+1:-1]);
if 'HPBW Rx' in line:
good_index = line.index('=')
beamwidth_rx = float(line[good_index+1:-1]);
if 'HPBW Tx' in line:
good_index = line.index('=')
beamwidth_tx = float(line[good_index+1:-1]);
if 'HPBW Tx' in line:
good_index = line.index('=')
beamwidth_tx = float(line[good_index+1:-1]);
if 'bandwidth' in line:
good_index = line.index('=')
bandwidth = float(line[good_index+1:-1]);
fp.close();
# --------------------------------------------------------------------------- #
speed_light = AstCnst.c*1e3; # [m/s]
wavelength = speed_light/centre_frequency; # [m]
# --------------------------------------------------------------------------- #
print 'Loading data'
timevec = np.load('main_057_iss_05_timevec.npy'); # timevector
x_target = np.load('main_057_iss_05_x_target.npy'); # state vector in SEZ frame
theta_GMST = np.load('main_057_iss_05_theta_GMST.npy'); # GMST angles in rad
y_sph_tx = np.load('main_057_iss_05_y_sph_tx.npy'); # spherical measurement vectors in Tx frame
y_sph_rx = np.load('main_057_iss_05_y_sph_rx.npy'); # spherical measurement vectors in Rx frame
y_sph_rx_meerkat_01 = np.load('main_057_iss_05_y_sph_rx_meerkat_01.npy');
y_sph_rx_meerkat_02 = np.load('main_057_iss_05_y_sph_rx_meerkat_02.npy');
# discretization step length/PRF
delta_t = timevec[2]-timevec[1];
# time stamps
experiment_timestamps = [None]*len(timevec)
index=0;
with open('main_057_iss_05_experiment_timestamps.txt') as fp:
for line in fp:
modified_timestring = line[:-8];
experiment_timestamps[index] = aniso8601.parse_datetime(modified_timestring);
index+=1;
fp.close();
experiment_timestamps[-1] = experiment_timestamps[-1].replace(tzinfo=None)
title_string1 = str(experiment_timestamps[0].isoformat())+'/'+str(experiment_timestamps[-1].isoformat());
norad_id = '25544'
# --------------------------------------------------------------------------- #
# Bistatic Radar characteristics
# beamwidth of transmitter and receiver
beamwidth_rx = math.radians(beamwidth_rx);
# Location of MeerKAT
lat_meerkat_00 = float(meerkat_lat[0]);
lon_meerkat_00 = float(meerkat_lon[0]);
altitude_meerkat = 1.038; # [km]
lat_meerkat_01 = float(meerkat_lat[1]);
lon_meerkat_01 = float(meerkat_lon[1]);
lat_meerkat_02 = float(meerkat_lat[2]);
lon_meerkat_02 = float(meerkat_lon[2]);
lat_meerkat_03 = float(meerkat_lat[3]);
lon_meerkat_03 = float(meerkat_lon[3]);
# Location of Denel Bredasdorp
lat_denel = -34.6; # [deg]
lon_denel = 20.316666666666666; # [deg]
altitude_denel = 0.018;#[km]
# --------------------------------------------------------------------------- #
lat_sgp4 = np.load('main_057_iss_05_lat_sgp4.npy',);
lon_sgp4 = np.load('main_057_iss_05_lon_sgp4.npy');
tx_beam_indices_best = np.load('main_057_iss_07_tx_beam_indices_best.npy');
# --------------------------------------------------------------------------- #
# sort out a few variables
tx_bw_time_max = tx_beam_indices_best[1];
tx_beam_index_down = tx_beam_indices_best[0];
# --------------------------------------------------------------------------- #
# sort out a few variables
tx_bw_time_max = tx_beam_indices_best[1];
tx_beam_index_down = tx_beam_indices_best[0];
tx_beam_index_up = tx_beam_indices_best[2];
# --------------------------------------------------------------------------- #
# sort out a few variables
tx_beam_circ_index = np.load('main_057_iss_08_tx_beam_circ_index.npy');
earliest_pt = tx_beam_circ_index[0];
tx_bw_time_max = tx_beam_circ_index[1];
latest_pt = tx_beam_circ_index[2];
# --------------------------------------------------------------------------- #
rx0_beam_circ_index = np.load('main_057_iss_09_rx0_beam_circ_index.npy');
earliest_pt_rx = rx0_beam_circ_index[0]
index_for_rx0 = rx0_beam_circ_index[1]
latest_pt_rx = rx0_beam_circ_index[2]
rx1_beam_circ_index = np.load('main_057_iss_09_rx1_beam_circ_index.npy');
earliest_pt_rx1 = rx1_beam_circ_index[0]
index_for_rx1 = rx1_beam_circ_index[1]
latest_pt_rx1 = rx1_beam_circ_index[2]
rx2_beam_circ_index = np.load('main_057_iss_09_rx2_beam_circ_index.npy');
earliest_pt_rx2 = rx2_beam_circ_index[0]
index_for_rx2 = rx2_beam_circ_index[1]
latest_pt_rx2 = rx2_beam_circ_index[2]
# --------------------------------------------------------------------------- #
print 'finding relevant epochs'
# Find the epoch of the relevant data points
plot_lim = 6
plt_start_index = tx_beam_index_down - int(plot_lim/delta_t)
plt_end_index = tx_beam_index_up+1 + int(2/delta_t)
start_epoch_test = THF.fnCalculate_DatetimeEpoch(timevec,plt_start_index,experiment_timestamps[0]);
end_epoch_test = THF.fnCalculate_DatetimeEpoch(timevec,plt_end_index,experiment_timestamps[0]);
tx_beam_index_down_epoch = THF.fnCalculate_DatetimeEpoch(timevec,tx_beam_index_down,experiment_timestamps[0]);
tx_beam_index_up_epoch= THF.fnCalculate_DatetimeEpoch(timevec,tx_beam_index_up,experiment_timestamps[0]);
tx_bw_time_max_epoch = THF.fnCalculate_DatetimeEpoch(timevec,tx_bw_time_max,experiment_timestamps[0]);
earliest_pt_epoch= THF.fnCalculate_DatetimeEpoch(timevec,earliest_pt,experiment_timestamps[0]);
latest_pt_epoch = THF.fnCalculate_DatetimeEpoch(timevec,latest_pt,experiment_timestamps[0]);
earliest_pt_epoch = earliest_pt_epoch.replace(tzinfo=None)
end_epoch_test = end_epoch_test.replace(tzinfo=None);
start_epoch_test = start_epoch_test.replace(tzinfo=None)
title_string = str(start_epoch_test.isoformat())+'/'+str(end_epoch_test .isoformat());
tx_beam_index_down_epoch = tx_beam_index_down_epoch.replace(tzinfo=None);
tx_beam_index_up_epoch = tx_beam_index_up_epoch.replace(tzinfo=None)
tx_bw_time_max_epoch = tx_bw_time_max_epoch.replace(tzinfo=None)
latest_pt_epoch= latest_pt_epoch.replace(tzinfo=None)
# --------------------------------------------------------------------------- #
fig = plt.figure(1);ax = fig.gca();
plt.rc('text', usetex=True)
plt.rc('font', family='serif');
plt.rc('font',family='helvetica');
params = {'legend.fontsize': 8,
'legend.handlelength': 2}
plt.rcParams.update(params)
map = Basemap(llcrnrlon=3.0,llcrnrlat=-39.0,urcrnrlon=34.,urcrnrlat=-8.,resolution='i', projection='cass', lat_0 = 0.0, lon_0 = 0.0)
map.drawcoastlines()
map.drawcountries()
map.drawmapboundary(fill_color='lightblue')
map.fillcontinents(color='beige',lake_color='lightblue')
lon =np.rad2deg(lon_sgp4);
lat = np.rad2deg(lat_sgp4);
x,y = map(lon[plt_start_index:earliest_pt+1], lat[plt_start_index:earliest_pt+1])
map.plot(x, y, color="blue", latlon=False,linewidth=1)
x,y = map(lon[earliest_pt:latest_pt+1], lat[earliest_pt:latest_pt+1])
map.plot(x, y, color="crimson", latlon=False,linewidth=2,label=r"%s" %str(earliest_pt_epoch.isoformat())+'Z/'+str(latest_pt_epoch.isoformat())+'Z');
x,y = map(lon[latest_pt+1:plt_end_index+1], lat[latest_pt+1:plt_end_index+1])
map.plot(x, y, color="blue", latlon=False,linewidth=1)
x,y = map(lon_denel,lat_denel)
map.plot(x,y,marker='o',color='green'); # Denel Bredasdorp lat lon
x2,y2 = map(20,-34)
plt.annotate(r"\textbf{Tx}", xy=(x2, y2),color='green')
x,y = map(lon_meerkat_00,lat_meerkat_00)
map.plot(x,y,marker='o',color='blue'); # rx lat lon
x2,y2 = map(22,-30)
plt.annotate(r"\textbf{Rx}", xy=(x2, y2),color='blue')
parallels = np.arange(-81.,0.,5.)
# labels = [left,right,top,bottom]
map.drawparallels(parallels,labels=[False,True,False,False],labelstyle='+/-',linewidth=0.2)
meridians = np.arange(10.,351.,10.)
map.drawmeridians(meridians,labels=[True,False,False,True],labelstyle='+/-',linewidth=0.2)
plt.title(r'\textbf{Object %s trajectory during the interval %s}' %(norad_id,title_string), fontsize=12)
plt.legend(loc='upper right',title=r"Dwell-time interval");
ax.get_legend().get_title().set_fontsize('10')
fig.savefig('main_057_iss_13_map.pdf',bbox_inches='tight',pad_inches=0.05,dpi=10)
# --------------------------------------------------------------------------- #
tx_beam_index_down_lat = math.degrees(lat_sgp4[tx_beam_index_down]);
tx_beam_index_down_lon = math.degrees(lon_sgp4[tx_beam_index_down]);
tx_beam_index_up_lat = math.degrees(lat_sgp4[tx_beam_index_up]);
tx_beam_index_up_lon = math.degrees(lon_sgp4[tx_beam_index_up]);
#earliest_pt_rx
rx_beam_index_down_lat = math.degrees(lat_sgp4[earliest_pt_rx]);
rx_beam_index_down_lon = math.degrees(lon_sgp4[earliest_pt_rx]);
rx_beam_index_up_lat = math.degrees(lat_sgp4[latest_pt_rx]);
rx_beam_index_up_lon = math.degrees(lon_sgp4[latest_pt_rx]);
rx1_beam_index_down_lat = math.degrees(lat_sgp4[earliest_pt_rx1]);
rx1_beam_index_down_lon = math.degrees(lon_sgp4[earliest_pt_rx1]);
rx1_beam_index_up_lat = math.degrees(lat_sgp4[latest_pt_rx1]);
rx1_beam_index_up_lon = math.degrees(lon_sgp4[latest_pt_rx1]);
rx2_beam_index_down_lat = math.degrees(lat_sgp4[earliest_pt_rx2]);
rx2_beam_index_down_lon = math.degrees(lon_sgp4[earliest_pt_rx2]);
rx2_beam_index_up_lat = math.degrees(lat_sgp4[latest_pt_rx2]);
rx2_beam_index_up_lon = math.degrees(lon_sgp4[latest_pt_rx2]);
# --------------------------------------------------------------------------- #
fig = plt.figure(2);
ax = fig.gca();
plt.rc('text', usetex=True)
plt.rc('font', family='serif');
plt.rc('font',family='helvetica');
params = {'legend.fontsize': 8,
'legend.handlelength': 2}
plt.rcParams.update(params)
map = Basemap(llcrnrlon=3.0,llcrnrlat=-38.0,urcrnrlon=34.,urcrnrlat=-16.,resolution='i', projection='cass', lat_0 = 0.0, lon_0 = 0.0)
map.drawcoastlines()
lon =np.rad2deg(lon_sgp4);
lat = np.rad2deg(lat_sgp4);
x,y = map(lon[plt_start_index:earliest_pt+1], lat[plt_start_index:earliest_pt+1])
map.plot(x, y, color="blue", latlon=False,linewidth=1)
x,y = map(lon[tx_beam_index_down:tx_beam_index_up+1], lat[tx_beam_index_down:tx_beam_index_up+1])
map.plot(x, y, color="crimson", latlon=False,linewidth=2,label=r"%s" %str(tx_beam_index_down_epoch.isoformat())+'Z/'+str(tx_beam_index_up_epoch.isoformat())+'Z');
x,y = map(lon[tx_beam_index_up+1:plt_end_index+1], lat[tx_beam_index_up+1:plt_end_index+1])
map.plot(x, y, color="blue", latlon=False,linewidth=1)
x_denel,y_denel = map(lon_denel,lat_denel)
map.plot(x_denel,y_denel,marker='o',color='green'); # Denel Bredasdorp lat lon
x2,y2 = map(20,-34)
plt.annotate(r"\textbf{Tx}", xy=(x2, y2),color='green')
tx_beam_index_down_x,tx_beam_index_down_y = map(tx_beam_index_down_lon,tx_beam_index_down_lat )
tx_beam_index_up_x,tx_beam_index_up_y = map(tx_beam_index_up_lon,tx_beam_index_up_lat )
map.drawgreatcircle(tx_beam_index_down_lon,tx_beam_index_down_lat, lon_denel,lat_denel,linewidth=0.5,color='gray')
map.drawgreatcircle(tx_beam_index_up_lon,tx_beam_index_up_lat, lon_denel,lat_denel,linewidth=0.5,color='gray')
map.drawgreatcircle(rx_beam_index_down_lon,rx_beam_index_down_lat, lon_meerkat_00,lat_meerkat_00,linewidth=0.5,color='mediumblue')
map.drawgreatcircle(rx_beam_index_up_lon,rx_beam_index_up_lat,lon_meerkat_00,lat_meerkat_00,linewidth=0.5,color='mediumblue')
map.drawgreatcircle(rx1_beam_index_down_lon,rx1_beam_index_down_lat, lon_meerkat_01,lat_meerkat_01,linewidth=0.5,color='orangered')
map.drawgreatcircle(rx1_beam_index_up_lon,rx1_beam_index_up_lat,lon_meerkat_01,lat_meerkat_01,linewidth=0.5,color='orangered')
map.drawgreatcircle(rx2_beam_index_down_lon,rx2_beam_index_down_lat, lon_meerkat_02,lat_meerkat_02,linewidth=0.5,color='purple')
map.drawgreatcircle(rx2_beam_index_up_lon,rx2_beam_index_up_lat,lon_meerkat_02,lat_meerkat_02,linewidth=0.5,color='purple')
x,y = map(lon_meerkat_00,lat_meerkat_00)
map.plot(x,y,marker='o',color='blue'); # rx lat lon
x2,y2 = map(22,-31)
plt.annotate(r"\textbf{Rx}", xy=(x2, y2),color='blue');
plt.title(r'\textbf{Object %s trajectory during the interval %s}' %(norad_id,title_string), fontsize=12)
plt.legend(loc='upper right',title=r"Dwell-time interval");
ax.get_legend().get_title().set_fontsize('10')
fig.savefig('main_057_iss_13_map2.pdf',bbox_inches='tight',pad_inches=0.05,dpi=10)
| mit |
karan259/GrovePi | Software/Python/grove_water_sensor.py | 2 | 1885 | #!/usr/bin/env python
#
# GrovePi Example for using the Grove Water Sensor (http://www.seeedstudio.com/wiki/Grove_-_Water_Sensor)
#
# The GrovePi connects the Raspberry Pi and Grove sensors. You can learn more about GrovePi here: http://www.dexterindustries.com/GrovePi
#
# Have a question about this example? Ask on the forums here: http://forum.dexterindustries.com/c/grovepi
#
'''
## License
The MIT License (MIT)
GrovePi for the Raspberry Pi: an open source platform for connecting Grove Sensors to the Raspberry Pi.
Copyright (C) 2015 Dexter Industries
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
import time
import grovepi
# Connect the Grove Water Sensor to digital port D2
# SIG,NC,VCC,GND
water_sensor = 2
grovepi.pinMode(water_sensor,"INPUT")
while True:
try:
print(grovepi.digitalRead(water_sensor))
time.sleep(.5)
except IOError:
print ("Error")
| mit |
ifcharming/original2.0 | tools/vis-micro-hudson.py | 4 | 9316 | #!/usr/bin/env python
# This is a visualizer which pulls microbenchmark results from the MySQL
# databases and visualizes them. Four graphs will be generated per workload,
# latency graphs on single node and multiple nodes, and throughput graphs
# on single node and multiple nodes.
#
# Run it without any arguments to see what arguments are needed.
import sys
import os
import time
import datetime
import MySQLdb
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
def COLORS(k):
return (((k ** 3) % 255) / 255.0,
((k * 100) % 255) / 255.0,
((k * k) % 255) / 255.0)
class Stat:
def __init__(self, hostname, username, password, database):
self.conn = MySQLdb.connect(host = hostname,
user = username,
passwd = password,
db = database)
self.cursor = self.conn.cursor(MySQLdb.cursors.DictCursor)
def close(self):
self.cursor.close()
self.conn.close()
class LatencyStat(Stat):
LATENCIES = """
SELECT startTime AS time, numHosts AS hosts, AVG(latencies) AS latency
FROM ma_instances AS runs
JOIN ma_clientInstances AS clients ON clusterStartTime = startTime
JOIN (SELECT instanceId, AVG(clusterRoundtripAvg) AS latencies
FROM ma_clientProcedureStats
GROUP BY instanceId) AS stats ON stats.instanceId = clientInstanceId
WHERE runs.startTime >= '%s'
AND clients.applicationName = "Microbenchmark"
AND clients.subApplicationName = "%s"
GROUP BY startTime
LIMIT %u
"""
def get_latencies(self, workload_name, start_time, count):
res = []
latencies = {}
self.cursor.execute(self.LATENCIES % (start_time, workload_name, count))
res = list(self.cursor.fetchall())
for i in res:
i["time"] = datetime.date.fromtimestamp(i["time"] / 1000.0)
key = (i["time"], i["hosts"])
if i["latency"] == None:
continue
if key not in latencies \
or i["latency"] < latencies[key]["latency"]:
latencies[key] = i
return latencies.values()
class ThroughputStat(Stat):
THROUGHPUT = """
SELECT resultid as id,
hostcount as hosts,
date(time) as time,
avg(txnpersecond) as tps
FROM results
WHERE time >= '%s'
AND benchmarkname = 'org.voltdb.benchmark.workloads.Generator'
AND benchmarkoptions LIKE '%%workload=%s%%'
GROUP BY hostcount, date(time)
ORDER BY time DESC
LIMIT %u
"""
def get_throughputs(self, workload_name, time, count):
throughput_map = {}
self.cursor.execute(self.THROUGHPUT % (time, workload_name, count))
return list(self.cursor.fetchall())
class WorkloadNames(Stat):
NAMES = """
SELECT DISTINCT subApplicationName as names
FROM ma_clientInstances
WHERE applicationName = 'Microbenchmark' AND subApplicationName != 'FirstWorkload'
"""
def get_names(self):
name_map = {}
self.cursor.execute(self.NAMES)
return list(self.cursor.fetchall())
class Plot:
DPI = 100.0
def __init__(self, title, xlabel, ylabel, filename, w, h):
self.filename = filename
self.legends = {}
w = w == None and 800 or w
h = h == None and 300 or h
fig = plt.figure(figsize=(w / self.DPI, h / self.DPI),
dpi=self.DPI)
self.ax = fig.add_subplot(111)
self.ax.set_title(title)
plt.ylabel(ylabel)
plt.xlabel(xlabel)
fig.autofmt_xdate()
def plot(self, x, y, color, legend):
self.ax.plot(x, y, linestyle="-", label=str(legend), marker="^",
markerfacecolor=color, markersize=10)
def close(self):
formatter = matplotlib.dates.DateFormatter("%b %d")
self.ax.xaxis.set_major_formatter(formatter)
plt.legend(loc=0)
plt.savefig(self.filename, format="png", transparent=False,
bbox_inches="tight", pad_inches=0.2)
def parse_credentials(filename):
credentials = {}
fd = open(filename, "r")
for i in fd:
line = i.strip().split("?")
credentials["hostname"] = line[0].split("/")[-2]
db = line[0].split("/")[-1]
pair = line[1].split("&")
user = pair[0].strip("\\").split("=")
password = pair[1].strip("\\").split("=")
if user[1].startswith("monitor"):
credentials["latency"] = {user[0]: user[1],
password[0]: password[1],
"database": db}
else:
credentials["throughput"] = {user[0]: user[1],
password[0]: password[1],
"database": db}
fd.close()
return credentials
def usage():
print "Usage:"
print "\t", sys.argv[0], "credential_file output_dir filename_base" \
" [numDays] [width] [height] "
print
print "\t", "number of past days to take into account"
print "\t", "width in pixels"
print "\t", "height in pixels"
def main():
if len(sys.argv) < 4:
usage()
exit(-1)
if not os.path.exists(sys.argv[2]):
print sys.argv[2], "does not exist"
exit(-1)
credentials = parse_credentials(sys.argv[1])
path = os.path.join(sys.argv[2], sys.argv[3])
numDays = 30
width = None
height = None
if len(sys.argv) >= 5:
numDays = int(sys.argv[4])
if len(sys.argv) >= 6:
width = int(sys.argv[5])
if len(sys.argv) >= 7:
height = int(sys.argv[6])
workload_names = WorkloadNames(credentials["hostname"],
credentials["latency"]["user"],
credentials["latency"]["password"],
credentials["latency"]["database"])
latency_stat = LatencyStat(credentials["hostname"],
credentials["latency"]["user"],
credentials["latency"]["password"],
credentials["latency"]["database"])
volt_stat = ThroughputStat(credentials["hostname"],
credentials["throughput"]["user"],
credentials["throughput"]["password"],
credentials["throughput"]["database"])
timedelta = datetime.timedelta(days=numDays)
starttime = datetime.datetime.now() - timedelta
timestamp = time.mktime(starttime.timetuple()) * 1000.0
names = workload_names.get_names()
for n in names:
name = n["names"]
latencies = latency_stat.get_latencies(name, timestamp, 900)
throughput = volt_stat.get_throughputs(name, starttime, 900)
latency_map = {}
latencies.sort(key=lambda x: x["time"])
for v in latencies:
if v["time"] == None or v["latency"] == None:
continue
if v["hosts"] not in latency_map:
latency_map[v["hosts"]] = {"time": [], "latency": []}
datenum = matplotlib.dates.date2num(v["time"])
latency_map[v["hosts"]]["time"].append(datenum)
latency_map[v["hosts"]]["latency"].append(v["latency"])
if 1 in latency_map:
pl = Plot("Average Latency on Single Node for Workload: " + name, "Time", "Latency (ms)",
path + "-latency-single-" + name + ".png",
width, height)
v = latency_map.pop(1)
pl.plot(v["time"], v["latency"], COLORS(1), 1)
pl.close()
if len(latency_map) > 0:
pl = Plot("Average Latency for Workload: " + name, "Time", "Latency (ms)",
path + "-latency-" + name + ".png", width, height)
for k in latency_map.iterkeys():
v = latency_map[k]
pl.plot(v["time"], v["latency"], COLORS(k), k)
pl.close()
throughput_map = {}
throughput.sort(key=lambda x: x["id"])
for v in throughput:
if v["hosts"] not in throughput_map:
throughput_map[v["hosts"]] = {"time": [], "tps": []}
datenum = matplotlib.dates.date2num(v["time"])
throughput_map[v["hosts"]]["time"].append(datenum)
throughput_map[v["hosts"]]["tps"].append(v["tps"])
if 1 in throughput_map:
pl = Plot("Performance on Single Node for Workload: " + name, "Time", "Throughput (txns/sec)",
path + "-throughput-single-" + name + ".png",
width, height)
v = throughput_map.pop(1)
pl.plot(v["time"], v["tps"], COLORS(1), 1)
pl.close()
if len(throughput_map) > 0:
pl = Plot("Performance for Workload: " + name, "Time", "Throughput (txns/sec)",
path + "-throughput-" + name + ".png", width, height)
for k in throughput_map.iterkeys():
v = throughput_map[k]
pl.plot(v["time"], v["tps"], COLORS(k), k)
pl.close()
latency_stat.close()
volt_stat.close()
if __name__ == "__main__":
main()
| gpl-3.0 |
whaleygeek/punchcard_reader | cardreader/serial/urlhandler/protocol_loop.py | 141 | 9516 | #! python
#
# Python Serial Port Extension for Win32, Linux, BSD, Jython
# see __init__.py
#
# This module implements a loop back connection receiving itself what it sent.
#
# The purpose of this module is.. well... You can run the unit tests with it.
# and it was so easy to implement ;-)
#
# (C) 2001-2011 Chris Liechti <cliechti@gmx.net>
# this is distributed under a free software license, see license.txt
#
# URL format: loop://[option[/option...]]
# options:
# - "debug" print diagnostic messages
from serial.serialutil import *
import threading
import time
import logging
# map log level names to constants. used in fromURL()
LOGGER_LEVELS = {
'debug': logging.DEBUG,
'info': logging.INFO,
'warning': logging.WARNING,
'error': logging.ERROR,
}
class LoopbackSerial(SerialBase):
"""Serial port implementation that simulates a loop back connection in plain software."""
BAUDRATES = (50, 75, 110, 134, 150, 200, 300, 600, 1200, 1800, 2400, 4800,
9600, 19200, 38400, 57600, 115200)
def open(self):
"""Open port with current settings. This may throw a SerialException
if the port cannot be opened."""
if self._isOpen:
raise SerialException("Port is already open.")
self.logger = None
self.buffer_lock = threading.Lock()
self.loop_buffer = bytearray()
self.cts = False
self.dsr = False
if self._port is None:
raise SerialException("Port must be configured before it can be used.")
# not that there is anything to open, but the function applies the
# options found in the URL
self.fromURL(self.port)
# not that there anything to configure...
self._reconfigurePort()
# all things set up get, now a clean start
self._isOpen = True
if not self._rtscts:
self.setRTS(True)
self.setDTR(True)
self.flushInput()
self.flushOutput()
def _reconfigurePort(self):
"""Set communication parameters on opened port. for the loop://
protocol all settings are ignored!"""
# not that's it of any real use, but it helps in the unit tests
if not isinstance(self._baudrate, (int, long)) or not 0 < self._baudrate < 2**32:
raise ValueError("invalid baudrate: %r" % (self._baudrate))
if self.logger:
self.logger.info('_reconfigurePort()')
def close(self):
"""Close port"""
if self._isOpen:
self._isOpen = False
# in case of quick reconnects, give the server some time
time.sleep(0.3)
def makeDeviceName(self, port):
raise SerialException("there is no sensible way to turn numbers into URLs")
def fromURL(self, url):
"""extract host and port from an URL string"""
if url.lower().startswith("loop://"): url = url[7:]
try:
# process options now, directly altering self
for option in url.split('/'):
if '=' in option:
option, value = option.split('=', 1)
else:
value = None
if not option:
pass
elif option == 'logging':
logging.basicConfig() # XXX is that good to call it here?
self.logger = logging.getLogger('pySerial.loop')
self.logger.setLevel(LOGGER_LEVELS[value])
self.logger.debug('enabled logging')
else:
raise ValueError('unknown option: %r' % (option,))
except ValueError, e:
raise SerialException('expected a string in the form "[loop://][option[/option...]]": %s' % e)
# - - - - - - - - - - - - - - - - - - - - - - - -
def inWaiting(self):
"""Return the number of characters currently in the input buffer."""
if not self._isOpen: raise portNotOpenError
if self.logger:
# attention the logged value can differ from return value in
# threaded environments...
self.logger.debug('inWaiting() -> %d' % (len(self.loop_buffer),))
return len(self.loop_buffer)
def read(self, size=1):
"""Read size bytes from the serial port. If a timeout is set it may
return less characters as requested. With no timeout it will block
until the requested number of bytes is read."""
if not self._isOpen: raise portNotOpenError
if self._timeout is not None:
timeout = time.time() + self._timeout
else:
timeout = None
data = bytearray()
while size > 0:
self.buffer_lock.acquire()
try:
block = to_bytes(self.loop_buffer[:size])
del self.loop_buffer[:size]
finally:
self.buffer_lock.release()
data += block
size -= len(block)
# check for timeout now, after data has been read.
# useful for timeout = 0 (non blocking) read
if timeout and time.time() > timeout:
break
return bytes(data)
def write(self, data):
"""Output the given string over the serial port. Can block if the
connection is blocked. May raise SerialException if the connection is
closed."""
if not self._isOpen: raise portNotOpenError
# ensure we're working with bytes
data = to_bytes(data)
# calculate aprox time that would be used to send the data
time_used_to_send = 10.0*len(data) / self._baudrate
# when a write timeout is configured check if we would be successful
# (not sending anything, not even the part that would have time)
if self._writeTimeout is not None and time_used_to_send > self._writeTimeout:
time.sleep(self._writeTimeout) # must wait so that unit test succeeds
raise writeTimeoutError
self.buffer_lock.acquire()
try:
self.loop_buffer += data
finally:
self.buffer_lock.release()
return len(data)
def flushInput(self):
"""Clear input buffer, discarding all that is in the buffer."""
if not self._isOpen: raise portNotOpenError
if self.logger:
self.logger.info('flushInput()')
self.buffer_lock.acquire()
try:
del self.loop_buffer[:]
finally:
self.buffer_lock.release()
def flushOutput(self):
"""Clear output buffer, aborting the current output and
discarding all that is in the buffer."""
if not self._isOpen: raise portNotOpenError
if self.logger:
self.logger.info('flushOutput()')
def sendBreak(self, duration=0.25):
"""Send break condition. Timed, returns to idle state after given
duration."""
if not self._isOpen: raise portNotOpenError
def setBreak(self, level=True):
"""Set break: Controls TXD. When active, to transmitting is
possible."""
if not self._isOpen: raise portNotOpenError
if self.logger:
self.logger.info('setBreak(%r)' % (level,))
def setRTS(self, level=True):
"""Set terminal status line: Request To Send"""
if not self._isOpen: raise portNotOpenError
if self.logger:
self.logger.info('setRTS(%r) -> state of CTS' % (level,))
self.cts = level
def setDTR(self, level=True):
"""Set terminal status line: Data Terminal Ready"""
if not self._isOpen: raise portNotOpenError
if self.logger:
self.logger.info('setDTR(%r) -> state of DSR' % (level,))
self.dsr = level
def getCTS(self):
"""Read terminal status line: Clear To Send"""
if not self._isOpen: raise portNotOpenError
if self.logger:
self.logger.info('getCTS() -> state of RTS (%r)' % (self.cts,))
return self.cts
def getDSR(self):
"""Read terminal status line: Data Set Ready"""
if not self._isOpen: raise portNotOpenError
if self.logger:
self.logger.info('getDSR() -> state of DTR (%r)' % (self.dsr,))
return self.dsr
def getRI(self):
"""Read terminal status line: Ring Indicator"""
if not self._isOpen: raise portNotOpenError
if self.logger:
self.logger.info('returning dummy for getRI()')
return False
def getCD(self):
"""Read terminal status line: Carrier Detect"""
if not self._isOpen: raise portNotOpenError
if self.logger:
self.logger.info('returning dummy for getCD()')
return True
# - - - platform specific - - -
# None so far
# assemble Serial class with the platform specific implementation and the base
# for file-like behavior. for Python 2.6 and newer, that provide the new I/O
# library, derive from io.RawIOBase
try:
import io
except ImportError:
# classic version with our own file-like emulation
class Serial(LoopbackSerial, FileLike):
pass
else:
# io library present
class Serial(LoopbackSerial, io.RawIOBase):
pass
# simple client test
if __name__ == '__main__':
import sys
s = Serial('loop://')
sys.stdout.write('%s\n' % s)
sys.stdout.write("write...\n")
s.write("hello\n")
s.flush()
sys.stdout.write("read: %s\n" % s.read(5))
s.close()
| mit |
mrquim/mrquimrepo | repo/script.module.youtube.dl/lib/youtube_dl/extractor/vyborymos.py | 73 | 2031 | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..compat import compat_str
class VyboryMosIE(InfoExtractor):
_VALID_URL = r'https?://vybory\.mos\.ru/(?:#precinct/|account/channels\?.*?\bstation_id=)(?P<id>\d+)'
_TESTS = [{
'url': 'http://vybory.mos.ru/#precinct/13636',
'info_dict': {
'id': '13636',
'ext': 'mp4',
'title': 're:^Участковая избирательная комиссия №2231 [0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}$',
'description': 'Россия, Москва, улица Введенского, 32А',
'is_live': True,
},
'params': {
'skip_download': True,
}
}, {
'url': 'http://vybory.mos.ru/account/channels?station_id=13636',
'only_matching': True,
}]
def _real_extract(self, url):
station_id = self._match_id(url)
channels = self._download_json(
'http://vybory.mos.ru/account/channels?station_id=%s' % station_id,
station_id, 'Downloading channels JSON')
formats = []
for cam_num, (sid, hosts, name, _) in enumerate(channels, 1):
for num, host in enumerate(hosts, 1):
formats.append({
'url': 'http://%s/master.m3u8?sid=%s' % (host, sid),
'ext': 'mp4',
'format_id': 'camera%d-host%d' % (cam_num, num),
'format_note': '%s, %s' % (name, host),
})
info = self._download_json(
'http://vybory.mos.ru/json/voting_stations/%s/%s.json'
% (compat_str(station_id)[:3], station_id),
station_id, 'Downloading station JSON', fatal=False)
return {
'id': station_id,
'title': self._live_title(info['name'] if info else station_id),
'description': info.get('address'),
'is_live': True,
'formats': formats,
}
| gpl-2.0 |
tanglei528/horizon | openstack_dashboard/dashboards/settings/dashboard.py | 7 | 1119 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack Foundation
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
import horizon
class Settings(horizon.Dashboard):
name = _("Settings")
slug = "settings"
panels = ('user', 'password', )
default_panel = 'user'
def nav(self, context):
dash = context['request'].horizon.get('dashboard', None)
if dash and dash.slug == self.slug:
return True
return False
horizon.register(Settings)
| apache-2.0 |
mgax/aleph | aleph/views/__init__.py | 3 | 1689 | from colander import Invalid
from flask import request
from apikit import jsonify
from aleph.core import app, login_manager
from aleph.views.ui import ui # noqa
from aleph.assets import assets # noqa
from aleph.model import User
from aleph.views.data_api import blueprint as data_api
from aleph.views.search_api import blueprint as search_api
from aleph.views.graph_api import blueprint as graph_api
from aleph.views.sessions_api import blueprint as sessions_api
from aleph.views.users_api import blueprint as users_api
from aleph.views.lists_api import blueprint as lists_api
from aleph.views.entities_api import blueprint as entities_api
from aleph.views.exports_api import blueprint as exports_api
from aleph.views.sources_api import blueprint as sources_api
from aleph.views.crawlers_api import blueprint as crawlers_api
app.register_blueprint(data_api)
app.register_blueprint(search_api)
app.register_blueprint(graph_api)
app.register_blueprint(sessions_api)
app.register_blueprint(users_api)
app.register_blueprint(lists_api)
app.register_blueprint(entities_api)
app.register_blueprint(exports_api)
app.register_blueprint(sources_api)
app.register_blueprint(crawlers_api)
@login_manager.request_loader
def load_user_from_request(request):
api_key = request.headers.get('X-API-Key') \
or request.args.get('api_key')
if api_key is not None:
return User.by_api_key(api_key)
@app.before_request
def before():
request.authz_sources = {}
request.authz_lists = {}
@app.errorhandler(Invalid)
def handle_invalid(exc):
exc.node.name = ''
data = {
'status': 400,
'errors': exc.asdict()
}
return jsonify(data, status=400)
| mit |
pigshell/nhnick | src/breakpad/src/third_party/protobuf/protobuf/python/google/protobuf/internal/encoder.py | 484 | 25695 | # Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# http://code.google.com/p/protobuf/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Code for encoding protocol message primitives.
Contains the logic for encoding every logical protocol field type
into one of the 5 physical wire types.
This code is designed to push the Python interpreter's performance to the
limits.
The basic idea is that at startup time, for every field (i.e. every
FieldDescriptor) we construct two functions: a "sizer" and an "encoder". The
sizer takes a value of this field's type and computes its byte size. The
encoder takes a writer function and a value. It encodes the value into byte
strings and invokes the writer function to write those strings. Typically the
writer function is the write() method of a cStringIO.
We try to do as much work as possible when constructing the writer and the
sizer rather than when calling them. In particular:
* We copy any needed global functions to local variables, so that we do not need
to do costly global table lookups at runtime.
* Similarly, we try to do any attribute lookups at startup time if possible.
* Every field's tag is encoded to bytes at startup, since it can't change at
runtime.
* Whatever component of the field size we can compute at startup, we do.
* We *avoid* sharing code if doing so would make the code slower and not sharing
does not burden us too much. For example, encoders for repeated fields do
not just call the encoders for singular fields in a loop because this would
add an extra function call overhead for every loop iteration; instead, we
manually inline the single-value encoder into the loop.
* If a Python function lacks a return statement, Python actually generates
instructions to pop the result of the last statement off the stack, push
None onto the stack, and then return that. If we really don't care what
value is returned, then we can save two instructions by returning the
result of the last statement. It looks funny but it helps.
* We assume that type and bounds checking has happened at a higher level.
"""
__author__ = 'kenton@google.com (Kenton Varda)'
import struct
from google.protobuf.internal import wire_format
# This will overflow and thus become IEEE-754 "infinity". We would use
# "float('inf')" but it doesn't work on Windows pre-Python-2.6.
_POS_INF = 1e10000
_NEG_INF = -_POS_INF
def _VarintSize(value):
"""Compute the size of a varint value."""
if value <= 0x7f: return 1
if value <= 0x3fff: return 2
if value <= 0x1fffff: return 3
if value <= 0xfffffff: return 4
if value <= 0x7ffffffff: return 5
if value <= 0x3ffffffffff: return 6
if value <= 0x1ffffffffffff: return 7
if value <= 0xffffffffffffff: return 8
if value <= 0x7fffffffffffffff: return 9
return 10
def _SignedVarintSize(value):
"""Compute the size of a signed varint value."""
if value < 0: return 10
if value <= 0x7f: return 1
if value <= 0x3fff: return 2
if value <= 0x1fffff: return 3
if value <= 0xfffffff: return 4
if value <= 0x7ffffffff: return 5
if value <= 0x3ffffffffff: return 6
if value <= 0x1ffffffffffff: return 7
if value <= 0xffffffffffffff: return 8
if value <= 0x7fffffffffffffff: return 9
return 10
def _TagSize(field_number):
"""Returns the number of bytes required to serialize a tag with this field
number."""
# Just pass in type 0, since the type won't affect the tag+type size.
return _VarintSize(wire_format.PackTag(field_number, 0))
# --------------------------------------------------------------------
# In this section we define some generic sizers. Each of these functions
# takes parameters specific to a particular field type, e.g. int32 or fixed64.
# It returns another function which in turn takes parameters specific to a
# particular field, e.g. the field number and whether it is repeated or packed.
# Look at the next section to see how these are used.
def _SimpleSizer(compute_value_size):
"""A sizer which uses the function compute_value_size to compute the size of
each value. Typically compute_value_size is _VarintSize."""
def SpecificSizer(field_number, is_repeated, is_packed):
tag_size = _TagSize(field_number)
if is_packed:
local_VarintSize = _VarintSize
def PackedFieldSize(value):
result = 0
for element in value:
result += compute_value_size(element)
return result + local_VarintSize(result) + tag_size
return PackedFieldSize
elif is_repeated:
def RepeatedFieldSize(value):
result = tag_size * len(value)
for element in value:
result += compute_value_size(element)
return result
return RepeatedFieldSize
else:
def FieldSize(value):
return tag_size + compute_value_size(value)
return FieldSize
return SpecificSizer
def _ModifiedSizer(compute_value_size, modify_value):
"""Like SimpleSizer, but modify_value is invoked on each value before it is
passed to compute_value_size. modify_value is typically ZigZagEncode."""
def SpecificSizer(field_number, is_repeated, is_packed):
tag_size = _TagSize(field_number)
if is_packed:
local_VarintSize = _VarintSize
def PackedFieldSize(value):
result = 0
for element in value:
result += compute_value_size(modify_value(element))
return result + local_VarintSize(result) + tag_size
return PackedFieldSize
elif is_repeated:
def RepeatedFieldSize(value):
result = tag_size * len(value)
for element in value:
result += compute_value_size(modify_value(element))
return result
return RepeatedFieldSize
else:
def FieldSize(value):
return tag_size + compute_value_size(modify_value(value))
return FieldSize
return SpecificSizer
def _FixedSizer(value_size):
"""Like _SimpleSizer except for a fixed-size field. The input is the size
of one value."""
def SpecificSizer(field_number, is_repeated, is_packed):
tag_size = _TagSize(field_number)
if is_packed:
local_VarintSize = _VarintSize
def PackedFieldSize(value):
result = len(value) * value_size
return result + local_VarintSize(result) + tag_size
return PackedFieldSize
elif is_repeated:
element_size = value_size + tag_size
def RepeatedFieldSize(value):
return len(value) * element_size
return RepeatedFieldSize
else:
field_size = value_size + tag_size
def FieldSize(value):
return field_size
return FieldSize
return SpecificSizer
# ====================================================================
# Here we declare a sizer constructor for each field type. Each "sizer
# constructor" is a function that takes (field_number, is_repeated, is_packed)
# as parameters and returns a sizer, which in turn takes a field value as
# a parameter and returns its encoded size.
Int32Sizer = Int64Sizer = EnumSizer = _SimpleSizer(_SignedVarintSize)
UInt32Sizer = UInt64Sizer = _SimpleSizer(_VarintSize)
SInt32Sizer = SInt64Sizer = _ModifiedSizer(
_SignedVarintSize, wire_format.ZigZagEncode)
Fixed32Sizer = SFixed32Sizer = FloatSizer = _FixedSizer(4)
Fixed64Sizer = SFixed64Sizer = DoubleSizer = _FixedSizer(8)
BoolSizer = _FixedSizer(1)
def StringSizer(field_number, is_repeated, is_packed):
"""Returns a sizer for a string field."""
tag_size = _TagSize(field_number)
local_VarintSize = _VarintSize
local_len = len
assert not is_packed
if is_repeated:
def RepeatedFieldSize(value):
result = tag_size * len(value)
for element in value:
l = local_len(element.encode('utf-8'))
result += local_VarintSize(l) + l
return result
return RepeatedFieldSize
else:
def FieldSize(value):
l = local_len(value.encode('utf-8'))
return tag_size + local_VarintSize(l) + l
return FieldSize
def BytesSizer(field_number, is_repeated, is_packed):
"""Returns a sizer for a bytes field."""
tag_size = _TagSize(field_number)
local_VarintSize = _VarintSize
local_len = len
assert not is_packed
if is_repeated:
def RepeatedFieldSize(value):
result = tag_size * len(value)
for element in value:
l = local_len(element)
result += local_VarintSize(l) + l
return result
return RepeatedFieldSize
else:
def FieldSize(value):
l = local_len(value)
return tag_size + local_VarintSize(l) + l
return FieldSize
def GroupSizer(field_number, is_repeated, is_packed):
"""Returns a sizer for a group field."""
tag_size = _TagSize(field_number) * 2
assert not is_packed
if is_repeated:
def RepeatedFieldSize(value):
result = tag_size * len(value)
for element in value:
result += element.ByteSize()
return result
return RepeatedFieldSize
else:
def FieldSize(value):
return tag_size + value.ByteSize()
return FieldSize
def MessageSizer(field_number, is_repeated, is_packed):
"""Returns a sizer for a message field."""
tag_size = _TagSize(field_number)
local_VarintSize = _VarintSize
assert not is_packed
if is_repeated:
def RepeatedFieldSize(value):
result = tag_size * len(value)
for element in value:
l = element.ByteSize()
result += local_VarintSize(l) + l
return result
return RepeatedFieldSize
else:
def FieldSize(value):
l = value.ByteSize()
return tag_size + local_VarintSize(l) + l
return FieldSize
# --------------------------------------------------------------------
# MessageSet is special.
def MessageSetItemSizer(field_number):
"""Returns a sizer for extensions of MessageSet.
The message set message looks like this:
message MessageSet {
repeated group Item = 1 {
required int32 type_id = 2;
required string message = 3;
}
}
"""
static_size = (_TagSize(1) * 2 + _TagSize(2) + _VarintSize(field_number) +
_TagSize(3))
local_VarintSize = _VarintSize
def FieldSize(value):
l = value.ByteSize()
return static_size + local_VarintSize(l) + l
return FieldSize
# ====================================================================
# Encoders!
def _VarintEncoder():
"""Return an encoder for a basic varint value (does not include tag)."""
local_chr = chr
def EncodeVarint(write, value):
bits = value & 0x7f
value >>= 7
while value:
write(local_chr(0x80|bits))
bits = value & 0x7f
value >>= 7
return write(local_chr(bits))
return EncodeVarint
def _SignedVarintEncoder():
"""Return an encoder for a basic signed varint value (does not include
tag)."""
local_chr = chr
def EncodeSignedVarint(write, value):
if value < 0:
value += (1 << 64)
bits = value & 0x7f
value >>= 7
while value:
write(local_chr(0x80|bits))
bits = value & 0x7f
value >>= 7
return write(local_chr(bits))
return EncodeSignedVarint
_EncodeVarint = _VarintEncoder()
_EncodeSignedVarint = _SignedVarintEncoder()
def _VarintBytes(value):
"""Encode the given integer as a varint and return the bytes. This is only
called at startup time so it doesn't need to be fast."""
pieces = []
_EncodeVarint(pieces.append, value)
return "".join(pieces)
def TagBytes(field_number, wire_type):
"""Encode the given tag and return the bytes. Only called at startup."""
return _VarintBytes(wire_format.PackTag(field_number, wire_type))
# --------------------------------------------------------------------
# As with sizers (see above), we have a number of common encoder
# implementations.
def _SimpleEncoder(wire_type, encode_value, compute_value_size):
"""Return a constructor for an encoder for fields of a particular type.
Args:
wire_type: The field's wire type, for encoding tags.
encode_value: A function which encodes an individual value, e.g.
_EncodeVarint().
compute_value_size: A function which computes the size of an individual
value, e.g. _VarintSize().
"""
def SpecificEncoder(field_number, is_repeated, is_packed):
if is_packed:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
def EncodePackedField(write, value):
write(tag_bytes)
size = 0
for element in value:
size += compute_value_size(element)
local_EncodeVarint(write, size)
for element in value:
encode_value(write, element)
return EncodePackedField
elif is_repeated:
tag_bytes = TagBytes(field_number, wire_type)
def EncodeRepeatedField(write, value):
for element in value:
write(tag_bytes)
encode_value(write, element)
return EncodeRepeatedField
else:
tag_bytes = TagBytes(field_number, wire_type)
def EncodeField(write, value):
write(tag_bytes)
return encode_value(write, value)
return EncodeField
return SpecificEncoder
def _ModifiedEncoder(wire_type, encode_value, compute_value_size, modify_value):
"""Like SimpleEncoder but additionally invokes modify_value on every value
before passing it to encode_value. Usually modify_value is ZigZagEncode."""
def SpecificEncoder(field_number, is_repeated, is_packed):
if is_packed:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
def EncodePackedField(write, value):
write(tag_bytes)
size = 0
for element in value:
size += compute_value_size(modify_value(element))
local_EncodeVarint(write, size)
for element in value:
encode_value(write, modify_value(element))
return EncodePackedField
elif is_repeated:
tag_bytes = TagBytes(field_number, wire_type)
def EncodeRepeatedField(write, value):
for element in value:
write(tag_bytes)
encode_value(write, modify_value(element))
return EncodeRepeatedField
else:
tag_bytes = TagBytes(field_number, wire_type)
def EncodeField(write, value):
write(tag_bytes)
return encode_value(write, modify_value(value))
return EncodeField
return SpecificEncoder
def _StructPackEncoder(wire_type, format):
"""Return a constructor for an encoder for a fixed-width field.
Args:
wire_type: The field's wire type, for encoding tags.
format: The format string to pass to struct.pack().
"""
value_size = struct.calcsize(format)
def SpecificEncoder(field_number, is_repeated, is_packed):
local_struct_pack = struct.pack
if is_packed:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
def EncodePackedField(write, value):
write(tag_bytes)
local_EncodeVarint(write, len(value) * value_size)
for element in value:
write(local_struct_pack(format, element))
return EncodePackedField
elif is_repeated:
tag_bytes = TagBytes(field_number, wire_type)
def EncodeRepeatedField(write, value):
for element in value:
write(tag_bytes)
write(local_struct_pack(format, element))
return EncodeRepeatedField
else:
tag_bytes = TagBytes(field_number, wire_type)
def EncodeField(write, value):
write(tag_bytes)
return write(local_struct_pack(format, value))
return EncodeField
return SpecificEncoder
def _FloatingPointEncoder(wire_type, format):
"""Return a constructor for an encoder for float fields.
This is like StructPackEncoder, but catches errors that may be due to
passing non-finite floating-point values to struct.pack, and makes a
second attempt to encode those values.
Args:
wire_type: The field's wire type, for encoding tags.
format: The format string to pass to struct.pack().
"""
value_size = struct.calcsize(format)
if value_size == 4:
def EncodeNonFiniteOrRaise(write, value):
# Remember that the serialized form uses little-endian byte order.
if value == _POS_INF:
write('\x00\x00\x80\x7F')
elif value == _NEG_INF:
write('\x00\x00\x80\xFF')
elif value != value: # NaN
write('\x00\x00\xC0\x7F')
else:
raise
elif value_size == 8:
def EncodeNonFiniteOrRaise(write, value):
if value == _POS_INF:
write('\x00\x00\x00\x00\x00\x00\xF0\x7F')
elif value == _NEG_INF:
write('\x00\x00\x00\x00\x00\x00\xF0\xFF')
elif value != value: # NaN
write('\x00\x00\x00\x00\x00\x00\xF8\x7F')
else:
raise
else:
raise ValueError('Can\'t encode floating-point values that are '
'%d bytes long (only 4 or 8)' % value_size)
def SpecificEncoder(field_number, is_repeated, is_packed):
local_struct_pack = struct.pack
if is_packed:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
def EncodePackedField(write, value):
write(tag_bytes)
local_EncodeVarint(write, len(value) * value_size)
for element in value:
# This try/except block is going to be faster than any code that
# we could write to check whether element is finite.
try:
write(local_struct_pack(format, element))
except SystemError:
EncodeNonFiniteOrRaise(write, element)
return EncodePackedField
elif is_repeated:
tag_bytes = TagBytes(field_number, wire_type)
def EncodeRepeatedField(write, value):
for element in value:
write(tag_bytes)
try:
write(local_struct_pack(format, element))
except SystemError:
EncodeNonFiniteOrRaise(write, element)
return EncodeRepeatedField
else:
tag_bytes = TagBytes(field_number, wire_type)
def EncodeField(write, value):
write(tag_bytes)
try:
write(local_struct_pack(format, value))
except SystemError:
EncodeNonFiniteOrRaise(write, value)
return EncodeField
return SpecificEncoder
# ====================================================================
# Here we declare an encoder constructor for each field type. These work
# very similarly to sizer constructors, described earlier.
Int32Encoder = Int64Encoder = EnumEncoder = _SimpleEncoder(
wire_format.WIRETYPE_VARINT, _EncodeSignedVarint, _SignedVarintSize)
UInt32Encoder = UInt64Encoder = _SimpleEncoder(
wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize)
SInt32Encoder = SInt64Encoder = _ModifiedEncoder(
wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize,
wire_format.ZigZagEncode)
# Note that Python conveniently guarantees that when using the '<' prefix on
# formats, they will also have the same size across all platforms (as opposed
# to without the prefix, where their sizes depend on the C compiler's basic
# type sizes).
Fixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '<I')
Fixed64Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED64, '<Q')
SFixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '<i')
SFixed64Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED64, '<q')
FloatEncoder = _FloatingPointEncoder(wire_format.WIRETYPE_FIXED32, '<f')
DoubleEncoder = _FloatingPointEncoder(wire_format.WIRETYPE_FIXED64, '<d')
def BoolEncoder(field_number, is_repeated, is_packed):
"""Returns an encoder for a boolean field."""
false_byte = chr(0)
true_byte = chr(1)
if is_packed:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
def EncodePackedField(write, value):
write(tag_bytes)
local_EncodeVarint(write, len(value))
for element in value:
if element:
write(true_byte)
else:
write(false_byte)
return EncodePackedField
elif is_repeated:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT)
def EncodeRepeatedField(write, value):
for element in value:
write(tag_bytes)
if element:
write(true_byte)
else:
write(false_byte)
return EncodeRepeatedField
else:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT)
def EncodeField(write, value):
write(tag_bytes)
if value:
return write(true_byte)
return write(false_byte)
return EncodeField
def StringEncoder(field_number, is_repeated, is_packed):
"""Returns an encoder for a string field."""
tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
local_len = len
assert not is_packed
if is_repeated:
def EncodeRepeatedField(write, value):
for element in value:
encoded = element.encode('utf-8')
write(tag)
local_EncodeVarint(write, local_len(encoded))
write(encoded)
return EncodeRepeatedField
else:
def EncodeField(write, value):
encoded = value.encode('utf-8')
write(tag)
local_EncodeVarint(write, local_len(encoded))
return write(encoded)
return EncodeField
def BytesEncoder(field_number, is_repeated, is_packed):
"""Returns an encoder for a bytes field."""
tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
local_len = len
assert not is_packed
if is_repeated:
def EncodeRepeatedField(write, value):
for element in value:
write(tag)
local_EncodeVarint(write, local_len(element))
write(element)
return EncodeRepeatedField
else:
def EncodeField(write, value):
write(tag)
local_EncodeVarint(write, local_len(value))
return write(value)
return EncodeField
def GroupEncoder(field_number, is_repeated, is_packed):
"""Returns an encoder for a group field."""
start_tag = TagBytes(field_number, wire_format.WIRETYPE_START_GROUP)
end_tag = TagBytes(field_number, wire_format.WIRETYPE_END_GROUP)
assert not is_packed
if is_repeated:
def EncodeRepeatedField(write, value):
for element in value:
write(start_tag)
element._InternalSerialize(write)
write(end_tag)
return EncodeRepeatedField
else:
def EncodeField(write, value):
write(start_tag)
value._InternalSerialize(write)
return write(end_tag)
return EncodeField
def MessageEncoder(field_number, is_repeated, is_packed):
"""Returns an encoder for a message field."""
tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
assert not is_packed
if is_repeated:
def EncodeRepeatedField(write, value):
for element in value:
write(tag)
local_EncodeVarint(write, element.ByteSize())
element._InternalSerialize(write)
return EncodeRepeatedField
else:
def EncodeField(write, value):
write(tag)
local_EncodeVarint(write, value.ByteSize())
return value._InternalSerialize(write)
return EncodeField
# --------------------------------------------------------------------
# As before, MessageSet is special.
def MessageSetItemEncoder(field_number):
"""Encoder for extensions of MessageSet.
The message set message looks like this:
message MessageSet {
repeated group Item = 1 {
required int32 type_id = 2;
required string message = 3;
}
}
"""
start_bytes = "".join([
TagBytes(1, wire_format.WIRETYPE_START_GROUP),
TagBytes(2, wire_format.WIRETYPE_VARINT),
_VarintBytes(field_number),
TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)])
end_bytes = TagBytes(1, wire_format.WIRETYPE_END_GROUP)
local_EncodeVarint = _EncodeVarint
def EncodeField(write, value):
write(start_bytes)
local_EncodeVarint(write, value.ByteSize())
value._InternalSerialize(write)
return write(end_bytes)
return EncodeField
| bsd-3-clause |
rossburton/yocto-autobuilder | lib/python2.7/site-packages/buildbot-0.8.8-py2.7.egg/buildbot/test/unit/test_clients_sendchange.py | 4 | 10020 | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import mock
from twisted.trial import unittest
from twisted.spread import pb
from twisted.internet import defer, reactor
from buildbot.clients import sendchange
class Sender(unittest.TestCase):
def setUp(self):
# patch out some PB components and make up some mocks
self.patch(pb, 'PBClientFactory', self._fake_PBClientFactory)
self.patch(reactor, 'connectTCP', self._fake_connectTCP)
self.factory = mock.Mock(name='PBClientFactory')
self.factory.login = self._fake_login
self.factory.login_d = defer.Deferred()
self.remote = mock.Mock(name='PB Remote')
self.remote.callRemote = self._fake_callRemote
self.remote.broker.transport.loseConnection = self._fake_loseConnection
# results
self.creds = None
self.conn_host = self.conn_port = None
self.lostConnection = False
self.added_changes = []
self.vc_used = None
def _fake_PBClientFactory(self):
return self.factory
def _fake_login(self, creds):
self.creds = creds
return self.factory.login_d
def _fake_connectTCP(self, host, port, factory):
self.conn_host = host
self.conn_port = port
self.assertIdentical(factory, self.factory)
self.factory.login_d.callback(self.remote)
def _fake_callRemote(self, method, change):
self.assertEqual(method, 'addChange')
self.added_changes.append(change)
return defer.succeed(None)
def _fake_loseConnection(self):
self.lostConnection = True
def assertProcess(self, host, port, username, password, changes):
self.assertEqual([host, port, username, password, changes],
[ self.conn_host, self.conn_port,
self.creds.username, self.creds.password,
self.added_changes])
def test_send_minimal(self):
s = sendchange.Sender('localhost:1234')
d = s.send('branch', 'rev', 'comm', ['a'])
def check(_):
self.assertProcess('localhost', 1234, 'change', 'changepw', [
dict(project='', repository='', who=None, files=['a'],
comments='comm', branch='branch', revision='rev',
category=None, when=None, properties={}, revlink='',
src=None)])
d.addCallback(check)
return d
def test_send_auth(self):
s = sendchange.Sender('localhost:1234', auth=('me','sekrit'))
d = s.send('branch', 'rev', 'comm', ['a'])
def check(_):
self.assertProcess('localhost', 1234, 'me', 'sekrit', [
dict(project='', repository='', who=None, files=['a'],
comments='comm', branch='branch', revision='rev',
category=None, when=None, properties={}, revlink='',
src=None)])
d.addCallback(check)
return d
def test_send_full(self):
s = sendchange.Sender('localhost:1234')
d = s.send('branch', 'rev', 'comm', ['a'], who='me', category='cats',
when=1234, properties={'a':'b'}, repository='r', vc='git',
project='p', revlink='rl')
def check(_):
self.assertProcess('localhost', 1234, 'change', 'changepw', [
dict(project='p', repository='r', who='me', files=['a'],
comments='comm', branch='branch', revision='rev',
category='cats', when=1234, properties={'a':'b'},
revlink='rl', src='git')])
d.addCallback(check)
return d
def test_send_files_tuple(self):
# 'buildbot sendchange' sends files as a tuple, rather than a list..
s = sendchange.Sender('localhost:1234')
d = s.send('branch', 'rev', 'comm', ('a', 'b'))
def check(_):
self.assertProcess('localhost', 1234, 'change', 'changepw', [
dict(project='', repository='', who=None, files=['a', 'b'],
comments='comm', branch='branch', revision='rev',
category=None, when=None, properties={}, revlink='',
src=None)])
d.addCallback(check)
return d
def test_send_codebase(self):
s = sendchange.Sender('localhost:1234')
d = s.send('branch', 'rev', 'comm', ['a'], codebase='mycb')
def check(_):
self.assertProcess('localhost', 1234, 'change', 'changepw', [
dict(project='', repository='', who=None, files=['a'],
comments='comm', branch='branch', revision='rev',
category=None, when=None, properties={}, revlink='',
src=None, codebase='mycb')])
d.addCallback(check)
return d
def test_send_unicode(self):
s = sendchange.Sender('localhost:1234')
d = s.send(u'\N{DEGREE SIGN}',
u'\U0001f49e',
u'\N{POSTAL MARK FACE}',
[u'\U0001F4C1'],
project=u'\N{SKULL AND CROSSBONES}',
repository=u'\N{SNOWMAN}',
who=u'\N{THAI CHARACTER KHOMUT}',
category=u'\U0001F640',
when=1234,
properties={u'\N{LATIN SMALL LETTER A WITH MACRON}':'b'},
revlink=u'\U0001F517')
def check(_):
self.assertProcess('localhost', 1234, 'change', 'changepw', [
dict(project=u'\N{SKULL AND CROSSBONES}',
repository=u'\N{SNOWMAN}',
who=u'\N{THAI CHARACTER KHOMUT}',
files=[u'\U0001F4C1'], # FILE FOLDER
comments=u'\N{POSTAL MARK FACE}',
branch=u'\N{DEGREE SIGN}',
revision=u'\U0001f49e', # REVOLVING HEARTS
category=u'\U0001F640', # WEARY CAT FACE
when=1234,
properties={u'\N{LATIN SMALL LETTER A WITH MACRON}':'b'},
revlink=u'\U0001F517', # LINK SYMBOL
src=None)])
d.addCallback(check)
return d
def test_send_unicode_utf8(self):
s = sendchange.Sender('localhost:1234')
d = s.send(u'\N{DEGREE SIGN}'.encode('utf8'),
u'\U0001f49e'.encode('utf8'),
u'\N{POSTAL MARK FACE}'.encode('utf8'),
[u'\U0001F4C1'.encode('utf8')],
project=u'\N{SKULL AND CROSSBONES}'.encode('utf8'),
repository=u'\N{SNOWMAN}'.encode('utf8'),
who=u'\N{THAI CHARACTER KHOMUT}'.encode('utf8'),
category=u'\U0001F640'.encode('utf8'),
when=1234,
properties={
u'\N{LATIN SMALL LETTER A WITH MACRON}'.encode('utf8')
: 'b'},
revlink=u'\U0001F517'.encode('utf8'))
def check(_):
self.assertProcess('localhost', 1234, 'change', 'changepw', [
dict(project=u'\N{SKULL AND CROSSBONES}',
repository=u'\N{SNOWMAN}',
who=u'\N{THAI CHARACTER KHOMUT}',
files=[u'\U0001F4C1'], # FILE FOLDER
comments=u'\N{POSTAL MARK FACE}',
branch=u'\N{DEGREE SIGN}',
revision=u'\U0001f49e', # REVOLVING HEARTS
category=u'\U0001F640', # WEARY CAT FACE
when=1234,
## NOTE: not decoded!
properties={'\xc4\x81':'b'},
revlink=u'\U0001F517', # LINK SYMBOL
src=None)])
d.addCallback(check)
return d
def test_send_unicode_latin1(self):
# hand send() a bunch of latin1 strings, and expect them recoded
# to unicode
s = sendchange.Sender('localhost:1234', encoding='latin1')
d = s.send(u'\N{YEN SIGN}'.encode('latin1'),
u'\N{POUND SIGN}'.encode('latin1'),
u'\N{BROKEN BAR}'.encode('latin1'),
[u'\N{NOT SIGN}'.encode('latin1')],
project=u'\N{DEGREE SIGN}'.encode('latin1'),
repository=u'\N{SECTION SIGN}'.encode('latin1'),
who=u'\N{MACRON}'.encode('latin1'),
category=u'\N{PILCROW SIGN}'.encode('latin1'),
when=1234,
properties={
u'\N{SUPERSCRIPT ONE}'.encode('latin1')
: 'b'},
revlink=u'\N{INVERTED QUESTION MARK}'.encode('latin1'))
def check(_):
self.assertProcess('localhost', 1234, 'change', 'changepw', [
dict(project=u'\N{DEGREE SIGN}',
repository=u'\N{SECTION SIGN}',
who=u'\N{MACRON}',
files=[u'\N{NOT SIGN}'],
comments=u'\N{BROKEN BAR}',
branch=u'\N{YEN SIGN}',
revision=u'\N{POUND SIGN}',
category=u'\N{PILCROW SIGN}',
when=1234,
## NOTE: not decoded!
properties={'\xb9':'b'},
revlink=u'\N{INVERTED QUESTION MARK}',
src=None)])
d.addCallback(check)
return d
| gpl-2.0 |
40223220/worktogether | static/Brython3.1.1-20150328-091302/Lib/importlib/__init__.py | 610 | 3472 | """A pure Python implementation of import."""
__all__ = ['__import__', 'import_module', 'invalidate_caches']
# Bootstrap help #####################################################
# Until bootstrapping is complete, DO NOT import any modules that attempt
# to import importlib._bootstrap (directly or indirectly). Since this
# partially initialised package would be present in sys.modules, those
# modules would get an uninitialised copy of the source version, instead
# of a fully initialised version (either the frozen one or the one
# initialised below if the frozen one is not available).
import _imp # Just the builtin component, NOT the full Python module
import sys
from . import machinery #fix me brython
try:
import _frozen_importlib as _bootstrap
except ImportError:
from . import _bootstrap
_bootstrap._setup(sys, _imp)
else:
# importlib._bootstrap is the built-in import, ensure we don't create
# a second copy of the module.
_bootstrap.__name__ = 'importlib._bootstrap'
_bootstrap.__package__ = 'importlib'
_bootstrap.__file__ = __file__.replace('__init__.py', '_bootstrap.py')
sys.modules['importlib._bootstrap'] = _bootstrap
# To simplify imports in test code
_w_long = _bootstrap._w_long
_r_long = _bootstrap._r_long
# Fully bootstrapped at this point, import whatever you like, circular
# dependencies and startup overhead minimisation permitting :)
# Public API #########################################################
from ._bootstrap import __import__
def invalidate_caches():
"""Call the invalidate_caches() method on all meta path finders stored in
sys.meta_path (where implemented)."""
for finder in sys.meta_path:
if hasattr(finder, 'invalidate_caches'):
finder.invalidate_caches()
def find_loader(name, path=None):
"""Find the loader for the specified module.
First, sys.modules is checked to see if the module was already imported. If
so, then sys.modules[name].__loader__ is returned. If that happens to be
set to None, then ValueError is raised. If the module is not in
sys.modules, then sys.meta_path is searched for a suitable loader with the
value of 'path' given to the finders. None is returned if no loader could
be found.
Dotted names do not have their parent packages implicitly imported. You will
most likely need to explicitly import all parent packages in the proper
order for a submodule to get the correct loader.
"""
try:
loader = sys.modules[name].__loader__
if loader is None:
raise ValueError('{}.__loader__ is None'.format(name))
else:
return loader
except KeyError:
pass
return _bootstrap._find_module(name, path)
def import_module(name, package=None):
"""Import a module.
The 'package' argument is required when performing a relative import. It
specifies the package to use as the anchor point from which to resolve the
relative import to an absolute import.
"""
level = 0
if name.startswith('.'):
if not package:
raise TypeError("relative imports require the 'package' argument")
for character in name:
if character != '.':
break
level += 1
return _bootstrap._gcd_import(name[level:], package, level)
#need at least one import hook for importlib stuff to work.
import basehook
sys.meta_path.append(basehook.BaseHook())
| gpl-3.0 |
friedrichromstedt/matplotlayers | matplotlayers/layer.py | 1 | 2290 | # Copyright (c) 2008, 2009, 2010 Friedrich Romstedt
# <www.friedrichromstedt.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# Developed since: Jul 2008
__version__ = (0, 1, 0)
import keyconf
class Layer(keyconf.Configuration):
"""Base class for a layer. The class is derived from
keyconf.Configuration, to support the .configure() method seamlessly."""
def __init__(self):
"""Sets the layer to has-changed."""
keyconf.Configuration.__init__(self)
self.set_changed()
#
# Tracking status ...
#
def configure(self, **kwargs):
self.set_changed()
keyconf.Configuration.configure(self, **kwargs)
def unconfigure(self, *args):
self.set_changed()
keyconf.Configuration.unconfigure(self, *args)
#
# Changed-flag methods ...
#
def set_changed(self):
"""Flag that the layer has changed."""
self.changed = True
def unset_changed(self):
"""Flag that the layer is unchanged."""
self.changed = False
def has_changed(self):
return self.changed
#
# Comaprison ...
#
def __eq__(self, other):
return id(self) == id(other)
| mit |
hanwei2008/flower_recognizer | model.py | 1 | 1107 | # -*- coding: utf-8 -*-
import os
import tensorflow as tf
FLAGS = tf.app.flags.FLAGS
class PhotoRecognizer(object):
'''
基于google于2015年在ILSVRC比赛上发表的incept-v3模型进行迁移学习,完成其它图像识别任务
'''
def __init__(self, n_classes, data_processor, reuse=None):
self.__n_classes = n_classes
self.__data_processor = data_processor
self.__reuse = reuse
@property
def reuse(self):
return self.__reuse
@reuse.setter
def reuse(self, value):
self.__reuse = value
def model(self, bottleneck_input):
# 定义一层全链接层
with tf.variable_scope('final_training_ops', reuse=self.__reuse):
weights = tf.Variable(tf.truncated_normal([FLAGS.bottleneck_tensor_size, self.__n_classes], stddev=0.001))
biases = tf.Variable(tf.zeros([self.__n_classes]))
logits = tf.matmul(bottleneck_input, weights) + biases
final_tensor = tf.nn.softmax(logits)
predictions = tf.argmax(final_tensor, 1)
return predictions, logits
| mit |
davisein/jitsudone | django/django/contrib/gis/db/backends/spatialite/base.py | 77 | 3463 | from ctypes.util import find_library
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.db.backends.sqlite3.base import (
_sqlite_extract, _sqlite_date_trunc, _sqlite_regexp, _sqlite_format_dtdelta,
connection_created, Database, DatabaseWrapper as SQLiteDatabaseWrapper,
SQLiteCursorWrapper)
from django.contrib.gis.db.backends.spatialite.client import SpatiaLiteClient
from django.contrib.gis.db.backends.spatialite.creation import SpatiaLiteCreation
from django.contrib.gis.db.backends.spatialite.introspection import SpatiaLiteIntrospection
from django.contrib.gis.db.backends.spatialite.operations import SpatiaLiteOperations
class DatabaseWrapper(SQLiteDatabaseWrapper):
def __init__(self, *args, **kwargs):
# Before we get too far, make sure pysqlite 2.5+ is installed.
if Database.version_info < (2, 5, 0):
raise ImproperlyConfigured('Only versions of pysqlite 2.5+ are '
'compatible with SpatiaLite and GeoDjango.')
# Trying to find the location of the SpatiaLite library.
# Here we are figuring out the path to the SpatiaLite library
# (`libspatialite`). If it's not in the system library path (e.g., it
# cannot be found by `ctypes.util.find_library`), then it may be set
# manually in the settings via the `SPATIALITE_LIBRARY_PATH` setting.
self.spatialite_lib = getattr(settings, 'SPATIALITE_LIBRARY_PATH',
find_library('spatialite'))
if not self.spatialite_lib:
raise ImproperlyConfigured('Unable to locate the SpatiaLite library. '
'Make sure it is in your library path, or set '
'SPATIALITE_LIBRARY_PATH in your settings.'
)
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.ops = SpatiaLiteOperations(self)
self.client = SpatiaLiteClient(self)
self.creation = SpatiaLiteCreation(self)
self.introspection = SpatiaLiteIntrospection(self)
def _cursor(self):
if self.connection is None:
self._sqlite_create_connection()
## From here on, customized for GeoDjango ##
# Enabling extension loading on the SQLite connection.
try:
self.connection.enable_load_extension(True)
except AttributeError:
raise ImproperlyConfigured('The pysqlite library does not support C extension loading. '
'Both SQLite and pysqlite must be configured to allow '
'the loading of extensions to use SpatiaLite.'
)
# Loading the SpatiaLite library extension on the connection, and returning
# the created cursor.
cur = self.connection.cursor(factory=SQLiteCursorWrapper)
try:
cur.execute("SELECT load_extension(%s)", (self.spatialite_lib,))
except Exception, msg:
raise ImproperlyConfigured('Unable to load the SpatiaLite library extension '
'"%s" because: %s' % (self.spatialite_lib, msg))
return cur
else:
return self.connection.cursor(factory=SQLiteCursorWrapper)
| bsd-3-clause |
catapult-project/catapult-csm | third_party/gsutil/third_party/boto/boto/glacier/exceptions.py | 185 | 2195 | # -*- coding: utf-8 -*-
# Copyright (c) 2012 Thomas Parslow http://almostobsolete.net/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from boto.compat import json
class UnexpectedHTTPResponseError(Exception):
def __init__(self, expected_responses, response):
self.status = response.status
self.body = response.read()
self.code = None
try:
body = json.loads(self.body)
self.code = body["code"]
msg = 'Expected %s, got ' % expected_responses
msg += '(%d, code=%s, message=%s)' % (response.status,
self.code,
body["message"])
except Exception:
msg = 'Expected %s, got (%d, %s)' % (expected_responses,
response.status,
self.body)
super(UnexpectedHTTPResponseError, self).__init__(msg)
class ArchiveError(Exception):
pass
class UploadArchiveError(ArchiveError):
pass
class DownloadArchiveError(ArchiveError):
pass
class TreeHashDoesNotMatchError(ArchiveError):
pass
| bsd-3-clause |
3ll3d00d/vibe | backend/src/analyser/common/targetstatecontroller.py | 1 | 4824 | import logging
from flask_restful import marshal
from analyser.common.config import loadTargetState
from core.interface import targetStateFields, RecordingDeviceStatus
logger = logging.getLogger('analyser.targetstate')
REACH_TARGET_STATE = 'RTS'
class TargetStateController(object):
def __init__(self, targetStateProvider, reactor, httpclient, deviceController=None):
"""
Registers with the reactor.
:param reactor:
"""
self._reactor = reactor
self._httpclient = httpclient
self._reactor.register(REACH_TARGET_STATE, _applyTargetState)
self._targetStateProvider = targetStateProvider
self.deviceController = deviceController
def updateDeviceState(self, device):
"""
Updates the target state on the specified device.
:param targetState: the target state to reach.
:param device: the device to update.
:return:
"""
# this is only threadsafe because the targetstate is effectively immutable, if it becomes mutable in future then
# funkiness may result
self._reactor.offer(REACH_TARGET_STATE, [self._targetStateProvider.state, device, self._httpclient])
def updateTargetState(self, newState):
"""
Updates the system target state and propagates that to all devices.
:param newState:
:return:
"""
self._targetStateProvider.state = loadTargetState(newState, self._targetStateProvider.state)
for device in self.deviceController.getDevices():
self.updateDeviceState(device.payload)
def getTargetState(self):
"""
The current system target state.
:return: the state.
"""
return self._targetStateProvider.state
class TargetStateProvider(object):
"""
Provides access to the current target state.
"""
def __init__(self, targetState):
self.state = targetState
class TargetState(object):
"""
The target state of the measurement system.
"""
def __init__(self, fs=500, samplesPerBatch=125, accelerometerSens=2, accelerometerEnabled=True, gyroSens=500,
gyroEnabled=True):
self.fs = fs
self.accelerometerSens = accelerometerSens
self.accelerometerEnabled = accelerometerEnabled
self.gyroSens = gyroSens
self.gyroEnabled = gyroEnabled
self.samplesPerBatch = samplesPerBatch
def _applyTargetState(targetState, md, httpclient):
"""
compares the current device state against the targetStateProvider and issues updates as necessary to ensure the
device is
at that state.
:param md:
:param targetState: the target state.
:param httpclient: the http client
:return:
"""
anyUpdate = False
if md['fs'] != targetState.fs:
logger.info("Updating fs from " + str(md['fs']) + " to " + str(targetState.fs) + " for " + md['name'])
anyUpdate = True
if md['samplesPerBatch'] != targetState.samplesPerBatch:
logger.info("Updating samplesPerBatch from " + str(md['samplesPerBatch']) + " to " + str(
targetState.samplesPerBatch) + " for " + md['name'])
anyUpdate = True
if md['gyroEnabled'] != targetState.gyroEnabled:
logger.info("Updating gyroEnabled from " + str(md['gyroEnabled']) + " to " + str(
targetState.gyroEnabled) + " for " + md['name'])
anyUpdate = True
if md['gyroSens'] != targetState.gyroSens:
logger.info(
"Updating gyroSens from " + str(md['gyroSens']) + " to " + str(targetState.gyroSens) + " for " + md[
'name'])
anyUpdate = True
if md['accelerometerEnabled'] != targetState.accelerometerEnabled:
logger.info("Updating accelerometerEnabled from " + str(md['accelerometerEnabled']) + " to " + str(
targetState.accelerometerEnabled) + " for " + md['name'])
anyUpdate = True
if md['accelerometerSens'] != targetState.accelerometerSens:
logger.info("Updating accelerometerSens from " + str(md['accelerometerSens']) + " to " + str(
targetState.accelerometerSens) + " for " + md['name'])
anyUpdate = True
if anyUpdate:
payload = marshal(targetState, targetStateFields)
logger.info("Applying target state change " + md['name'] + " - " + str(payload))
if RecordingDeviceStatus.INITIALISED.name == md.get('status'):
try:
httpclient.patch(md['serviceURL'], json=payload)
except Exception as e:
logger.exception(e)
else:
logger.warning("Ignoring target state change until " + md['name'] + " is idle, currently " + md['status'])
else:
logger.debug("Device " + md['name'] + " is at target state, we continue")
| mit |
geodynamics/gale | config/scons/scons-local-1.2.0/SCons/Tool/qt.py | 12 | 13096 |
"""SCons.Tool.qt
Tool-specific initialization for Qt.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/qt.py 3842 2008/12/20 22:59:52 scons"
import os.path
import re
import SCons.Action
import SCons.Builder
import SCons.Defaults
import SCons.Scanner
import SCons.Tool
import SCons.Util
class ToolQtWarning(SCons.Warnings.Warning):
pass
class GeneratedMocFileNotIncluded(ToolQtWarning):
pass
class QtdirNotFound(ToolQtWarning):
pass
SCons.Warnings.enableWarningClass(ToolQtWarning)
header_extensions = [".h", ".hxx", ".hpp", ".hh"]
if SCons.Util.case_sensitive_suffixes('.h', '.H'):
header_extensions.append('.H')
cplusplus = __import__('c++', globals(), locals(), [])
cxx_suffixes = cplusplus.CXXSuffixes
def checkMocIncluded(target, source, env):
moc = target[0]
cpp = source[0]
# looks like cpp.includes is cleared before the build stage :-(
# not really sure about the path transformations (moc.cwd? cpp.cwd?) :-/
path = SCons.Defaults.CScan.path(env, moc.cwd)
includes = SCons.Defaults.CScan(cpp, env, path)
if not moc in includes:
SCons.Warnings.warn(
GeneratedMocFileNotIncluded,
"Generated moc file '%s' is not included by '%s'" %
(str(moc), str(cpp)))
def find_file(filename, paths, node_factory):
for dir in paths:
node = node_factory(filename, dir)
if node.rexists():
return node
return None
class _Automoc:
"""
Callable class, which works as an emitter for Programs, SharedLibraries and
StaticLibraries.
"""
def __init__(self, objBuilderName):
self.objBuilderName = objBuilderName
def __call__(self, target, source, env):
"""
Smart autoscan function. Gets the list of objects for the Program
or Lib. Adds objects and builders for the special qt files.
"""
try:
if int(env.subst('$QT_AUTOSCAN')) == 0:
return target, source
except ValueError:
pass
try:
debug = int(env.subst('$QT_DEBUG'))
except ValueError:
debug = 0
# some shortcuts used in the scanner
splitext = SCons.Util.splitext
objBuilder = getattr(env, self.objBuilderName)
# some regular expressions:
# Q_OBJECT detection
q_object_search = re.compile(r'[^A-Za-z0-9]Q_OBJECT[^A-Za-z0-9]')
# cxx and c comment 'eater'
#comment = re.compile(r'(//.*)|(/\*(([^*])|(\*[^/]))*\*/)')
# CW: something must be wrong with the regexp. See also bug #998222
# CURRENTLY THERE IS NO TEST CASE FOR THAT
# The following is kind of hacky to get builders working properly (FIXME)
objBuilderEnv = objBuilder.env
objBuilder.env = env
mocBuilderEnv = env.Moc.env
env.Moc.env = env
# make a deep copy for the result; MocH objects will be appended
out_sources = source[:]
for obj in source:
if not obj.has_builder():
# binary obj file provided
if debug:
print "scons: qt: '%s' seems to be a binary. Discarded." % str(obj)
continue
cpp = obj.sources[0]
if not splitext(str(cpp))[1] in cxx_suffixes:
if debug:
print "scons: qt: '%s' is no cxx file. Discarded." % str(cpp)
# c or fortran source
continue
#cpp_contents = comment.sub('', cpp.get_contents())
cpp_contents = cpp.get_contents()
h=None
for h_ext in header_extensions:
# try to find the header file in the corresponding source
# directory
hname = splitext(cpp.name)[0] + h_ext
h = find_file(hname, (cpp.get_dir(),), env.File)
if h:
if debug:
print "scons: qt: Scanning '%s' (header of '%s')" % (str(h), str(cpp))
#h_contents = comment.sub('', h.get_contents())
h_contents = h.get_contents()
break
if not h and debug:
print "scons: qt: no header for '%s'." % (str(cpp))
if h and q_object_search.search(h_contents):
# h file with the Q_OBJECT macro found -> add moc_cpp
moc_cpp = env.Moc(h)
moc_o = objBuilder(moc_cpp)
out_sources.append(moc_o)
#moc_cpp.target_scanner = SCons.Defaults.CScan
if debug:
print "scons: qt: found Q_OBJECT macro in '%s', moc'ing to '%s'" % (str(h), str(moc_cpp))
if cpp and q_object_search.search(cpp_contents):
# cpp file with Q_OBJECT macro found -> add moc
# (to be included in cpp)
moc = env.Moc(cpp)
env.Ignore(moc, moc)
if debug:
print "scons: qt: found Q_OBJECT macro in '%s', moc'ing to '%s'" % (str(cpp), str(moc))
#moc.source_scanner = SCons.Defaults.CScan
# restore the original env attributes (FIXME)
objBuilder.env = objBuilderEnv
env.Moc.env = mocBuilderEnv
return (target, out_sources)
AutomocShared = _Automoc('SharedObject')
AutomocStatic = _Automoc('StaticObject')
def _detect(env):
"""Not really safe, but fast method to detect the QT library"""
QTDIR = None
if not QTDIR:
QTDIR = env.get('QTDIR',None)
if not QTDIR:
QTDIR = os.environ.get('QTDIR',None)
if not QTDIR:
moc = env.WhereIs('moc')
if moc:
QTDIR = os.path.dirname(os.path.dirname(moc))
SCons.Warnings.warn(
QtdirNotFound,
"Could not detect qt, using moc executable as a hint (QTDIR=%s)" % QTDIR)
else:
QTDIR = None
SCons.Warnings.warn(
QtdirNotFound,
"Could not detect qt, using empty QTDIR")
return QTDIR
def uicEmitter(target, source, env):
adjustixes = SCons.Util.adjustixes
bs = SCons.Util.splitext(str(source[0].name))[0]
bs = os.path.join(str(target[0].get_dir()),bs)
# first target (header) is automatically added by builder
if len(target) < 2:
# second target is implementation
target.append(adjustixes(bs,
env.subst('$QT_UICIMPLPREFIX'),
env.subst('$QT_UICIMPLSUFFIX')))
if len(target) < 3:
# third target is moc file
target.append(adjustixes(bs,
env.subst('$QT_MOCHPREFIX'),
env.subst('$QT_MOCHSUFFIX')))
return target, source
def uicScannerFunc(node, env, path):
lookout = []
lookout.extend(env['CPPPATH'])
lookout.append(str(node.rfile().dir))
includes = re.findall("<include.*?>(.*?)</include>", node.get_contents())
result = []
for incFile in includes:
dep = env.FindFile(incFile,lookout)
if dep:
result.append(dep)
return result
uicScanner = SCons.Scanner.Base(uicScannerFunc,
name = "UicScanner",
node_class = SCons.Node.FS.File,
node_factory = SCons.Node.FS.File,
recursive = 0)
def generate(env):
"""Add Builders and construction variables for qt to an Environment."""
CLVar = SCons.Util.CLVar
Action = SCons.Action.Action
Builder = SCons.Builder.Builder
env.SetDefault(QTDIR = _detect(env),
QT_BINPATH = os.path.join('$QTDIR', 'bin'),
QT_CPPPATH = os.path.join('$QTDIR', 'include'),
QT_LIBPATH = os.path.join('$QTDIR', 'lib'),
QT_MOC = os.path.join('$QT_BINPATH','moc'),
QT_UIC = os.path.join('$QT_BINPATH','uic'),
QT_LIB = 'qt', # may be set to qt-mt
QT_AUTOSCAN = 1, # scan for moc'able sources
# Some QT specific flags. I don't expect someone wants to
# manipulate those ...
QT_UICIMPLFLAGS = CLVar(''),
QT_UICDECLFLAGS = CLVar(''),
QT_MOCFROMHFLAGS = CLVar(''),
QT_MOCFROMCXXFLAGS = CLVar('-i'),
# suffixes/prefixes for the headers / sources to generate
QT_UICDECLPREFIX = '',
QT_UICDECLSUFFIX = '.h',
QT_UICIMPLPREFIX = 'uic_',
QT_UICIMPLSUFFIX = '$CXXFILESUFFIX',
QT_MOCHPREFIX = 'moc_',
QT_MOCHSUFFIX = '$CXXFILESUFFIX',
QT_MOCCXXPREFIX = '',
QT_MOCCXXSUFFIX = '.moc',
QT_UISUFFIX = '.ui',
# Commands for the qt support ...
# command to generate header, implementation and moc-file
# from a .ui file
QT_UICCOM = [
CLVar('$QT_UIC $QT_UICDECLFLAGS -o ${TARGETS[0]} $SOURCE'),
CLVar('$QT_UIC $QT_UICIMPLFLAGS -impl ${TARGETS[0].file} '
'-o ${TARGETS[1]} $SOURCE'),
CLVar('$QT_MOC $QT_MOCFROMHFLAGS -o ${TARGETS[2]} ${TARGETS[0]}')],
# command to generate meta object information for a class
# declarated in a header
QT_MOCFROMHCOM = (
'$QT_MOC $QT_MOCFROMHFLAGS -o ${TARGETS[0]} $SOURCE'),
# command to generate meta object information for a class
# declarated in a cpp file
QT_MOCFROMCXXCOM = [
CLVar('$QT_MOC $QT_MOCFROMCXXFLAGS -o ${TARGETS[0]} $SOURCE'),
Action(checkMocIncluded,None)])
# ... and the corresponding builders
uicBld = Builder(action=SCons.Action.Action('$QT_UICCOM', '$QT_UICCOMSTR'),
emitter=uicEmitter,
src_suffix='$QT_UISUFFIX',
suffix='$QT_UICDECLSUFFIX',
prefix='$QT_UICDECLPREFIX',
source_scanner=uicScanner)
mocBld = Builder(action={}, prefix={}, suffix={})
for h in header_extensions:
act = SCons.Action.Action('$QT_MOCFROMHCOM', '$QT_MOCFROMHCOMSTR')
mocBld.add_action(h, act)
mocBld.prefix[h] = '$QT_MOCHPREFIX'
mocBld.suffix[h] = '$QT_MOCHSUFFIX'
for cxx in cxx_suffixes:
act = SCons.Action.Action('$QT_MOCFROMCXXCOM', '$QT_MOCFROMCXXCOMSTR')
mocBld.add_action(cxx, act)
mocBld.prefix[cxx] = '$QT_MOCCXXPREFIX'
mocBld.suffix[cxx] = '$QT_MOCCXXSUFFIX'
# register the builders
env['BUILDERS']['Uic'] = uicBld
env['BUILDERS']['Moc'] = mocBld
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
static_obj.add_src_builder('Uic')
shared_obj.add_src_builder('Uic')
# We use the emitters of Program / StaticLibrary / SharedLibrary
# to scan for moc'able files
# We can't refer to the builders directly, we have to fetch them
# as Environment attributes because that sets them up to be called
# correctly later by our emitter.
env.AppendUnique(PROGEMITTER =[AutomocStatic],
SHLIBEMITTER=[AutomocShared],
LIBEMITTER =[AutomocStatic],
# Of course, we need to link against the qt libraries
CPPPATH=["$QT_CPPPATH"],
LIBPATH=["$QT_LIBPATH"],
LIBS=['$QT_LIB'])
def exists(env):
return _detect(env)
| gpl-2.0 |
LynxyssCZ/Flexget | update-changelog.py | 9 | 6179 | from __future__ import unicode_literals, division, absolute_import
import collections
import datetime
import io
import re
import sys
from git import Repo
class MDChangeSet(object):
"""Represets a markdown changeset for a single version."""
CATEGORIES = [
('### Added\n', ['add', 'added', 'feature']),
('### Changed\n', ['change', 'changed', 'update']),
('### Fixed\n', ['fix', 'fixed']),
('### Deprecated\n', ['deprecate', 'deprecated']),
('### Removed\n', ['remove', 'removed']),
]
def __init__(self):
self.pre_header = ['\n']
self.version_header = ''
self.post_header = []
self.sections = collections.OrderedDict()
self.footer = []
@classmethod
def from_md_lines(cls, lines):
"""Parse an existing markdown changeset section and return the VersionLog instance."""
instance = cls()
instance.pre_header, version_header, tail = isplit('## ', lines)
if version_header:
instance.version_header = version_header
instance.post_header, section, tail = isplit('### ', tail)
while section:
instance.sections[section], section, tail = isplit('### ', tail)
return instance
def parse_message(self, message):
"""
Parses a git commit message and formats and adds any tagged messages to this changeset.
Returns True if one or more changelog messages was found.
"""
found = False
for cat, item in self.change_items(message):
found = True
item = re.sub('#(\d{3,4})', r'[#\1](https://github.com/Flexget/Flexget/issues/\1)', item)
item = '- {0}\n'.format(item)
self.sections.setdefault(cat, ['\n']).insert(0, item)
return found
def change_items(self, message):
"""An iterator of changelog updates from a commit message in the form (category, message)"""
for line in message.split('\n'):
for cat_match in re.finditer('\[(\w+)\]', line):
found_cat = self.cat_lookup(cat_match.group(1))
if found_cat:
line = line.replace(cat_match.group(0), '').strip()
yield found_cat, line
def cat_lookup(self, cat):
"""Return an official category for `cat` tag text."""
for cat_item in self.CATEGORIES:
if cat.lower() in cat_item[1]:
return cat_item[0]
def to_md_lines(self):
"""An iterator over the markdown lines representing this changeset."""
for l in self.pre_header:
yield l
yield self.version_header
for l in self.post_header:
yield l
for section, items in self.sections.items():
yield section
for item in items:
yield item
for l in self.footer:
yield l
def isplit(start_text, iterator):
"""Returns head, match, tail tuple, where match is the first line that starts with `start_text`"""
head = []
iterator = iter(iterator)
for item in iterator:
if item.startswith(start_text):
return head, item, iterator
head.append(item)
return head, None, iterator
if __name__ == '__main__':
try:
filename = sys.argv[1]
except IndexError:
print('No filename specified, using changelog.md')
filename = 'changelog.md'
with io.open(filename, encoding='utf-8') as logfile:
pre_lines, start_comment, tail = isplit('<!---', logfile)
active_lines, end_comment, tail = isplit('<!---', tail)
post_lines = list(tail)
repo = Repo('.')
cur_ver = MDChangeSet.from_md_lines(active_lines)
latestref = re.match('<!---\s*([\d\w]+)', start_comment).group(1)
oldestref = re.match('<!---\s*([\d\w]+)', end_comment).group(1)
released_vers = []
commits = list(repo.iter_commits('{0}..HEAD'.format(latestref), reverse=True))
modified = False
if commits:
tags = {}
for tag in repo.tags:
tags[tag.commit.hexsha] = tag.tag
for commit in commits:
if cur_ver.parse_message(commit.message):
modified = True
if commit.hexsha in tags:
modified = True
# Tag changeset with release date and version and create new current changeset
version = tags[commit.hexsha].tag
release_date = datetime.datetime.fromtimestamp(tags[commit.hexsha].tagged_date).strftime('%Y-%m-%d')
cur_ver.version_header = '## {0} ({1})\n'.format(version, release_date)
diffstartref = oldestref
if oldestref in tags:
diffstartref = tags[oldestref].tag
cur_ver.post_header.insert(0, '[all commits](https://github.com/Flexget/Flexget/compare/{0}...{1})\n'.
format(diffstartref, version))
released_vers.insert(0, cur_ver)
cur_ver = MDChangeSet()
oldestref = commit.hexsha
if cur_ver.sections:
verfile = repo.tree('HEAD')['flexget/_version.py'].data_stream.read()
__version__ = None
try:
exec(verfile) # pylint: disable=W0122
except Exception:
pass
new_version_header = '## {0} (unreleased)\n'.format(__version__)
if new_version_header != cur_ver.version_header:
cur_ver.version_header = new_version_header
modified = True
if modified:
print('Writing modified changelog.')
with io.open(filename, 'w', encoding='utf-8') as logfile:
logfile.writelines(pre_lines)
logfile.write('<!---{0}--->\n'.format(commit.hexsha))
logfile.writelines(cur_ver.to_md_lines())
logfile.write('<!---{0}--->\n'.format(oldestref))
for ver in released_vers:
logfile.writelines(ver.to_md_lines())
logfile.writelines(post_lines)
else:
print('No updates to write.')
| mit |
david-a-wheeler/linux | scripts/gdb/linux/lists.py | 630 | 2897 | #
# gdb helper commands and functions for Linux kernel debugging
#
# list tools
#
# Copyright (c) Thiebaud Weksteen, 2015
#
# Authors:
# Thiebaud Weksteen <thiebaud@weksteen.fr>
#
# This work is licensed under the terms of the GNU GPL version 2.
#
import gdb
from linux import utils
list_head = utils.CachedType("struct list_head")
def list_check(head):
nb = 0
if (head.type == list_head.get_type().pointer()):
head = head.dereference()
elif (head.type != list_head.get_type()):
raise gdb.GdbError('argument must be of type (struct list_head [*])')
c = head
try:
gdb.write("Starting with: {}\n".format(c))
except gdb.MemoryError:
gdb.write('head is not accessible\n')
return
while True:
p = c['prev'].dereference()
n = c['next'].dereference()
try:
if p['next'] != c.address:
gdb.write('prev.next != current: '
'current@{current_addr}={current} '
'prev@{p_addr}={p}\n'.format(
current_addr=c.address,
current=c,
p_addr=p.address,
p=p,
))
return
except gdb.MemoryError:
gdb.write('prev is not accessible: '
'current@{current_addr}={current}\n'.format(
current_addr=c.address,
current=c
))
return
try:
if n['prev'] != c.address:
gdb.write('next.prev != current: '
'current@{current_addr}={current} '
'next@{n_addr}={n}\n'.format(
current_addr=c.address,
current=c,
n_addr=n.address,
n=n,
))
return
except gdb.MemoryError:
gdb.write('next is not accessible: '
'current@{current_addr}={current}\n'.format(
current_addr=c.address,
current=c
))
return
c = n
nb += 1
if c == head:
gdb.write("list is consistent: {} node(s)\n".format(nb))
return
class LxListChk(gdb.Command):
"""Verify a list consistency"""
def __init__(self):
super(LxListChk, self).__init__("lx-list-check", gdb.COMMAND_DATA,
gdb.COMPLETE_EXPRESSION)
def invoke(self, arg, from_tty):
argv = gdb.string_to_argv(arg)
if len(argv) != 1:
raise gdb.GdbError("lx-list-check takes one argument")
list_check(gdb.parse_and_eval(argv[0]))
LxListChk()
| gpl-2.0 |
zxsted/scipy | scipy/special/tests/test_orthogonal_eval.py | 110 | 8165 | from __future__ import division, print_function, absolute_import
import numpy as np
from numpy.testing import assert_, assert_allclose
import scipy.special.orthogonal as orth
from scipy.special._testutils import FuncData
def test_eval_chebyt():
n = np.arange(0, 10000, 7)
x = 2*np.random.rand() - 1
v1 = np.cos(n*np.arccos(x))
v2 = orth.eval_chebyt(n, x)
assert_(np.allclose(v1, v2, rtol=1e-15))
def test_eval_genlaguerre_restriction():
# check it returns nan for alpha <= -1
assert_(np.isnan(orth.eval_genlaguerre(0, -1, 0)))
assert_(np.isnan(orth.eval_genlaguerre(0.1, -1, 0)))
def test_warnings():
# ticket 1334
olderr = np.seterr(all='raise')
try:
# these should raise no fp warnings
orth.eval_legendre(1, 0)
orth.eval_laguerre(1, 1)
orth.eval_gegenbauer(1, 1, 0)
finally:
np.seterr(**olderr)
class TestPolys(object):
"""
Check that the eval_* functions agree with the constructed polynomials
"""
def check_poly(self, func, cls, param_ranges=[], x_range=[], nn=10,
nparam=10, nx=10, rtol=1e-8):
np.random.seed(1234)
dataset = []
for n in np.arange(nn):
params = [a + (b-a)*np.random.rand(nparam) for a,b in param_ranges]
params = np.asarray(params).T
if not param_ranges:
params = [0]
for p in params:
if param_ranges:
p = (n,) + tuple(p)
else:
p = (n,)
x = x_range[0] + (x_range[1] - x_range[0])*np.random.rand(nx)
x[0] = x_range[0] # always include domain start point
x[1] = x_range[1] # always include domain end point
poly = np.poly1d(cls(*p))
z = np.c_[np.tile(p, (nx,1)), x, poly(x)]
dataset.append(z)
dataset = np.concatenate(dataset, axis=0)
def polyfunc(*p):
p = (p[0].astype(int),) + p[1:]
return func(*p)
olderr = np.seterr(all='raise')
try:
ds = FuncData(polyfunc, dataset, list(range(len(param_ranges)+2)), -1,
rtol=rtol)
ds.check()
finally:
np.seterr(**olderr)
def test_jacobi(self):
self.check_poly(orth.eval_jacobi, orth.jacobi,
param_ranges=[(-0.99, 10), (-0.99, 10)], x_range=[-1, 1],
rtol=1e-5)
def test_sh_jacobi(self):
self.check_poly(orth.eval_sh_jacobi, orth.sh_jacobi,
param_ranges=[(1, 10), (0, 1)], x_range=[0, 1],
rtol=1e-5)
def test_gegenbauer(self):
self.check_poly(orth.eval_gegenbauer, orth.gegenbauer,
param_ranges=[(-0.499, 10)], x_range=[-1, 1],
rtol=1e-7)
def test_chebyt(self):
self.check_poly(orth.eval_chebyt, orth.chebyt,
param_ranges=[], x_range=[-1, 1])
def test_chebyu(self):
self.check_poly(orth.eval_chebyu, orth.chebyu,
param_ranges=[], x_range=[-1, 1])
def test_chebys(self):
self.check_poly(orth.eval_chebys, orth.chebys,
param_ranges=[], x_range=[-2, 2])
def test_chebyc(self):
self.check_poly(orth.eval_chebyc, orth.chebyc,
param_ranges=[], x_range=[-2, 2])
def test_sh_chebyt(self):
olderr = np.seterr(all='ignore')
try:
self.check_poly(orth.eval_sh_chebyt, orth.sh_chebyt,
param_ranges=[], x_range=[0, 1])
finally:
np.seterr(**olderr)
def test_sh_chebyu(self):
self.check_poly(orth.eval_sh_chebyu, orth.sh_chebyu,
param_ranges=[], x_range=[0, 1])
def test_legendre(self):
self.check_poly(orth.eval_legendre, orth.legendre,
param_ranges=[], x_range=[-1, 1])
def test_sh_legendre(self):
olderr = np.seterr(all='ignore')
try:
self.check_poly(orth.eval_sh_legendre, orth.sh_legendre,
param_ranges=[], x_range=[0, 1])
finally:
np.seterr(**olderr)
def test_genlaguerre(self):
self.check_poly(orth.eval_genlaguerre, orth.genlaguerre,
param_ranges=[(-0.99, 10)], x_range=[0, 100])
def test_laguerre(self):
self.check_poly(orth.eval_laguerre, orth.laguerre,
param_ranges=[], x_range=[0, 100])
def test_hermite(self):
self.check_poly(orth.eval_hermite, orth.hermite,
param_ranges=[], x_range=[-100, 100])
def test_hermitenorm(self):
self.check_poly(orth.eval_hermitenorm, orth.hermitenorm,
param_ranges=[], x_range=[-100, 100])
class TestRecurrence(object):
"""
Check that the eval_* functions sig='ld->d' and 'dd->d' agree.
"""
def check_poly(self, func, param_ranges=[], x_range=[], nn=10,
nparam=10, nx=10, rtol=1e-8):
np.random.seed(1234)
dataset = []
for n in np.arange(nn):
params = [a + (b-a)*np.random.rand(nparam) for a,b in param_ranges]
params = np.asarray(params).T
if not param_ranges:
params = [0]
for p in params:
if param_ranges:
p = (n,) + tuple(p)
else:
p = (n,)
x = x_range[0] + (x_range[1] - x_range[0])*np.random.rand(nx)
x[0] = x_range[0] # always include domain start point
x[1] = x_range[1] # always include domain end point
kw = dict(sig=(len(p)+1)*'d'+'->d')
z = np.c_[np.tile(p, (nx,1)), x, func(*(p + (x,)), **kw)]
dataset.append(z)
dataset = np.concatenate(dataset, axis=0)
def polyfunc(*p):
p = (p[0].astype(int),) + p[1:]
kw = dict(sig='l'+(len(p)-1)*'d'+'->d')
return func(*p, **kw)
olderr = np.seterr(all='raise')
try:
ds = FuncData(polyfunc, dataset, list(range(len(param_ranges)+2)), -1,
rtol=rtol)
ds.check()
finally:
np.seterr(**olderr)
def test_jacobi(self):
self.check_poly(orth.eval_jacobi,
param_ranges=[(-0.99, 10), (-0.99, 10)], x_range=[-1, 1])
def test_sh_jacobi(self):
self.check_poly(orth.eval_sh_jacobi,
param_ranges=[(1, 10), (0, 1)], x_range=[0, 1])
def test_gegenbauer(self):
self.check_poly(orth.eval_gegenbauer,
param_ranges=[(-0.499, 10)], x_range=[-1, 1])
def test_chebyt(self):
self.check_poly(orth.eval_chebyt,
param_ranges=[], x_range=[-1, 1])
def test_chebyu(self):
self.check_poly(orth.eval_chebyu,
param_ranges=[], x_range=[-1, 1])
def test_chebys(self):
self.check_poly(orth.eval_chebys,
param_ranges=[], x_range=[-2, 2])
def test_chebyc(self):
self.check_poly(orth.eval_chebyc,
param_ranges=[], x_range=[-2, 2])
def test_sh_chebyt(self):
self.check_poly(orth.eval_sh_chebyt,
param_ranges=[], x_range=[0, 1])
def test_sh_chebyu(self):
self.check_poly(orth.eval_sh_chebyu,
param_ranges=[], x_range=[0, 1])
def test_legendre(self):
self.check_poly(orth.eval_legendre,
param_ranges=[], x_range=[-1, 1])
def test_sh_legendre(self):
self.check_poly(orth.eval_sh_legendre,
param_ranges=[], x_range=[0, 1])
def test_genlaguerre(self):
self.check_poly(orth.eval_genlaguerre,
param_ranges=[(-0.99, 10)], x_range=[0, 100])
def test_laguerre(self):
self.check_poly(orth.eval_laguerre,
param_ranges=[], x_range=[0, 100])
def test_hermite(self):
v = orth.eval_hermite(70, 1.0)
a = -1.457076485701412e60
assert_allclose(v,a)
| bsd-3-clause |
pyjs/pyjs | pyjs/builtin/mkbuiltin.py | 6 | 37729 | #!/usr/bin/env python
import re
func_type = {
'jsmethod': 0,
'function': 1,
'staticmethod': 2,
'classmethod': 3,
'wrappermethod': 4,
}
short_names = {
'module': 'm$',
'globals': 'g$',
'locals': 'l$',
'funcbase': 'f$',
'builtin': 'B$',
'constants': 'C$',
'None': 'N$',
'True': 'T$',
'False': 'F$',
'bool': 'b$',
'booljs': '_b',
'fcall': '_f',
'fcallext': '_fe',
'mcall': '_m',
'mcallext': '_me',
}
class Replacement(object):
re_p = re.compile('''[$]{\s*([0-9]+)(\s*,\s*([^,]+?))+\s*}[$]''')
re_empty_line = re.compile('''^ +$''', re.M)
def substitute(self, src, names):
def subs(m):
indent_level = int(m.group(1))
args = m.group(0)[:-1].split(',')[1:]
args = [i.strip() for i in m.group(0)[:-2].split(',')[1:]]
name, args = args[0], args[1:]
if name in short_names:
return short_names[name]
try:
repl = getattr(self, 'repl_%s' % name, None)(*args)
except:
print 'repl name:', name, args
raise
lines = ['%s%s' % (
' ' * indent_level,
line,
) for line in repl.split('\n')]
return "\n".join(lines)
dst = self.re_p.sub(subs, src)
for name in names:
if isinstance(names[name], basestring):
dst = dst.replace('${%s}' % name, names[name])
dst = self.re_empty_line.sub('', dst)
return dst
def repl_g(self):
# globals
return short_names['globals']
def repl_l(self):
# locals
return short_names['locals']
return 'l$'
def repl_new_bool(self, value, true, false):
return self.substitute("""
var v = ${value}.valueOf();
switch (v) {
case null:
case false:
case 0:
case '':
return ${false};
case true:
case 1:
return ${true};
}
if (typeof v == 'number' || typeof v == 'string') {
return ${true};
}
if (${value}['$inst'] === true) {
var mro$, attr, attrs = ['__nonzero__', '__len__'];
for (var i = 0; i < attrs.length; i++) {
attr = attrs[i];
${2, getattribute, mro$, v, ${value}, attr}$
if (typeof v != "undefined") {
${3, bind_method, v, v, ${value}, _meth_src}$
v = @{{fcall}}(this, null, v, ${value});
switch (attr) {
case '__nonzero__':
return v.__v ? ${true} : ${false};
case '__len__':
return v.__v != 0 ? ${true} : ${false};
}
}
}
}
return ${true};""", locals())
def repl_jsmethod(self):
return str(func_type['jsmethod'])
def repl_function(self):
return str(func_type['function'])
def repl_classmethod(self):
return str(func_type['classmethod'])
def repl_staticmethod(self):
return str(func_type['staticmethod'])
def repl_wrappermethod(self):
return str(func_type['wrappermethod'])
def repl_call_head(self, skip):
return self.substitute("""\
var rval, f, star = null, dstar = null, named = null, o = obj,
args = Array.prototype.slice.call(arguments, ${skip}),
track_len = $pyjs.trackstack.length;
for (var i = 0; i < args.length; i++) {
if (typeof args[i] == "undefined") {
return @{{raise}}($new(@{{TypeError}}, B$str("argument " + i + " is undefined")));
}
}
if (module !== null && lineno !== null) {
if (module['__class__'] !== @{{module}}) {
debugger;
}
$pyjs.track.lineno = lineno;
$pyjs.track.module = module;
$pyjs.trackstack[track_len] = {'lineno': $pyjs.track.lineno, 'module': $pyjs.track.module};
}
if (typeof o == "undefined") {
//debugger;
//return ${0,None}$;
throw "o == 'undefined'";""", locals())
def repl_call_method(self):
return self.substitute("""\
} else if (method !== null) {
if (method['$inst'] === true) {
o = method;
} else if (typeof o['$inst'] == 'boolean') {
// as in getattr
var mro$;
${2, getattributes, o, obj, method, null}$
} else {
o = o[method];
}""", locals())
def repl_call_object(self):
return self.substitute("""\
}
for (;;) {
switch (o['__class__']) {
case @{{function}}:
f = o;
break;
case @{{instancemethod}}:
f = o['im_func'];
if (o['im_self'] !== null){
args = [o['im_self']].concat(args);
} else if (base !== null) {
args = [base].concat(args);
} else if (obj['$inst'] === true) {
args = [obj].concat(args);
} else {
if (args.length > 0 && args[0]['$inst'] === true) {
// check if args[0].__class__.__mro__ contains im_class
var mro = args[0]['__class__']['__mro__'];
for (var j = 0; j < mro.length; j++) {
if (mro[j] === o['im_class']) {
mro = true;
break;
}
}
if (mro !== true) {
@{{_issubtype}}(o['im_class'], args[0]);
return @{{raise}}($new(@{{TypeError}}, B$str(
"unbound method " + f.__name__ + "() " +
"must be called with " + _typeof(o['im_class']) +
" instance as first argument (got " +
_typeof(args[0]) + " instance instead)")));
}
break;
}
return @{{raise}}($new(@{{TypeError}}, B$str(
"unbound method " + f.__name__ + "()" +
" must be called with " + _typeof(o['im_class']) +
" instance as first argument (got nothing instead)")));
}
break;
default:
if (typeof o != 'function') {
return @{{raise}}($new(@{{TypeError}}, B$str("javascript '" + typeof obj + "' object is not callable")));
}
if (typeof o['$inst'] === "boolean") {
obj = o;
method = '__call__';
//o = o['__call__'];
o = @{{_getattr}}(obj, method);
} else if (o['$inst'] === true) {
o = @{{_getattr}}(obj, method);
} else {
f = o;
break;
}
if (typeof o == "undefined") {
return @{{raise}}($new(@{{TypeError}}, B$str("'" + _typeof(obj) + "' object is not callable")));
}
continue
}
break;
}""", locals())
def repl_call_star_dstar(self):
return self.substitute("""\
named = args.pop();
dstar = args.pop();
star = args.pop();
if (star !== null) {
if (star instanceof Array) {
args = args.concat(star);
} if (star.__array instanceof Array) {
args = args.concat(star.__array);
} else {
var iter = @{{fcall}}(this, null, @{{iter}}, null, star),
next = @{{getattr}}(iter, 'next'),
stopiter = @{{stopiter}},
v;
for (;;) {
@{{stopiter}} = true;
v = @{{fcall}}(this, null, next, iter);
@{{stopiter}} = stopiter;
if (v === @{{StopIter}}) {
break;
}
args.push(v);
}
}
star = null;
}
if (named !== null) {
var k, d = {};
k = false;
for (k in named) {
d[k] = named[k];
}
if (k !== false) {
if (typeof f.func_args == "undefined") {
@{{raise}}($new(@{{TypeError}}, B$str("Cannot apply named arguments on javascript function")));
}
named = d;
}
}
if (dstar !== null) {
if (dstar._length == 0) {
dstar = null;
} else {
if (typeof f.func_args == "undefined") {
@{{raise}}($new(@{{TypeError}}, B$str("Cannot apply dstar args on javascript function")));
}
var k, v, d = {};
if (dstar.__class__ === @{{dict}}) {
for (var h in dstar.__object_hash) {
k = dstar.__object_hash[h]['key'];
if (typeof k != "string" && k.__class__ != @{{str}}) {
@{{raise}}($new(@{{TypeError}}, B$str(f.__name__ + "() keywords must be strings")));
}
d[k.valueOf()] = dstar.__object[h];
}
} else if (dstar['$inst'] == "undefined") {
for (var k in dstar) {
d[k] = dstar[k];
}
} else {
@{{raise}}($new(@{{TypeError}}, B$str("Invalid dstar_args")));
}
dstar = d;
}
}
if (dstar !== null || named !== null) {
var ndefaults = f.func_defaults ? f.func_defaults.length : 0;
if (dstar === null) {
dstar = {};
} else if (named === null) {
named = {};
}
for (var i = 0; i < f.func_args.length; i++) {
k = f.func_args[i];
v = named[k];
delete named[k];
if (typeof v == "undefined") {
v = dstar[k];
delete dstar[k];
} else if (typeof dstar[k] != "undefined") {
@{{raise}}($new(@{{TypeError}}, B$str(f.__name__ + "() got multiple values for keyword argument '" + k + "'")));
}
if (typeof v != "undefined") {
if (i < args.length) {
@{{raise}}($new(@{{TypeError}}, B$str(f.__name__ + "() got multiple values for keyword argument '" + k + "'")));
}
args[i] = v;
} else if (i >= args.length) {
if (i < f.func_minargs) {
@{{raise}}($new(@{{TypeError}}, B$str(f.__name__ + "() takes at least " + f.func_minargs + " non-keyword arguments (" + f.func_args.length + " given)")));
}
args[i] = f.func_defaults[ndefaults - (f.func_args.length - i)];
}
}
if (f.func_dstarargs === null) {
for (var k in dstar) {
@{{raise}}($new(@{{TypeError}}, B$str(f.__name__ + "() got an unexpected keyword argument '" + k + "'")));
}
}
for (k in named) {
if (typeof dstar[k] != 'undefined') {
return @{{raise}}($new(@{{TypeError}}, B$str(f.__name__ + "() got multiple values for keyword argument '" + k + "'")));
}
dstar[k] = named[k];
}
dstar = $new(@{{dict}}, dstar);
}""", locals())
def repl_call_tail(self):
return self.substitute("""\
if (typeof f.func_args != "undefined") {
var n_args = args.length;
if (dstar === null && args.length < f.func_args.length) {
// Just add defaults
var n = f.func_args.length - args.length;
if (n > 0 && f.func_defaults !== null) {
if (n > f.func_defaults.length) {
args = args.concat(f.func_defaults.slice(0));
} else {
n = f.func_defaults.length - n;
args = args.concat(f.func_defaults.slice(n));
}
}
}
if (f.func_args.length != args.length) {
if (f.func_starargs !== null && f.func_args.length < args.length) {
if (f.func_args.length == 0) {
star = args;
args = [];
} else {
star = args.slice(f.func_args.length);
args.splice(f.func_args.length, args.length - f.func_args.length);
}
star = B$tuple(star);
} else if (f.func_defaults === null || f.func_defaults.length == 0) {
switch (f.func_args.length) {
case 0:
@{{raise}}($new(@{{TypeError}}, B$str(f.__name__ + "() takes no arguments (" + n_args + " given)")));
case 1:
@{{raise}}($new(@{{TypeError}}, B$str(f.__name__ + "() takes exactly " + f.func_args.length + " argument (" + n_args + " given)")));
default:
@{{raise}}($new(@{{TypeError}}, B$str(f.__name__ + "() takes exactly " + f.func_args.length + " arguments (" + n_args + " given)")));
}
} else {
if (f.func_args.length > args.length) {
if (f.func_minargs == 1) {
@{{raise}}($new(@{{TypeError}}, B$str(f.__name__ + "() takes at least 1 argument (0 given)")));
} else {
@{{raise}}($new(@{{TypeError}}, B$str(f.__name__ + "() takes at least " + f.func_minargs + " arguments (" + n_args + " given)")));
}
}
if (f.func_minargs == 1) {
@{{raise}}($new(@{{TypeError}}, B$str(f.__name__ + "() takes at most 1 argument (" + n_args + " given)")));
} else {
@{{raise}}($new(@{{TypeError}}, B$str(f.__name__ + "() takes at most " + f.func_minargs + " arguments (" + n_args + " given)")));
}
}
}
if (f.func_starargs !== null) {
if (star === null) {
//star = B$tuple([]);
star = empty_tuple;
}
args.push(star);
}
if (module !== null && lineno !== null) {
if (typeof f._module != "undefined" &&
f._module !== null &&
typeof f._lineno != "undefined" &&
f._lineno !== null) {
$pyjs.track.module = f._module;
$pyjs.track.lineno = f._lineno;
}
}
if (dstar !== null) {
args.push(dstar);
args.push(null); // no named args
} else if (f.func_dstarargs !== null) {
dstar = B$dict();
args.push(dstar);
args.push(null); // no named args
}
}
if (typeof obj['$inst'] != "undefined" || typeof obj['func_type'] != "undefined") {
rval = f.apply(module, args);
} else {
// obj is an ordinary javascript object
rval = f.apply(obj, args);
}
if (typeof rval == "undefined") {
if (typeof f['__name__'] == 'undefined') {
return ${0,None}$;
} else {
@{{raise}}($new(@{{ValueError}}, B$str("return value of call is undefined")));
}
}
if (module !== null && lineno !== null) {
$pyjs.track = $pyjs.trackstack[track_len];
$pyjs.trackstack.splice(track_len, $pyjs.trackstack.length);
if (typeof $pyjs.track == "undefined" || $pyjs.track.lineno != lineno || $pyjs.track.module !== module) {
debugger;
}
}
return rval;""", locals())
def repl___new__(self, instance, cls, add_dict=True):
if add_dict is not True:
if add_dict.lower().strip() == 'false':
add_dict = False
else:
add_dict = True
if add_dict:
add_dict = """
if (typeof ${cls} == "undefined") {
debugger;
}
if (typeof ${cls}['__slots__'] == "undefined" || ${cls}['__slots__'].length > 0) {
${instance}['__dict__'] = B$dict();
${instance}['$dict'] = ${instance}['__dict__']['__object'];
}"""
else:
add_dict = ''
return self.substitute("""\
var ${instance} = function ( ) {
var args = Array.prototype.slice.call(arguments);
if (arguments.callee['__class__'] === @{{instancemethod}}) {
if (arguments.callee['im_self'] !== null) {
return @{{fcall}}.apply(this, [this, null, arguments.callee['im_func'], null, arguments.callee['im_self']].concat(args));
}
}
var a = @{{_getattr}}(arguments.callee, '__call__');
if (typeof a == "undefined") {
@{{raise}}($new(@{{TypeError}}, B$str("'" + _typeof(arguments.callee) + "' object is not callable")));
}
if (args.length >= 3) {
var len = args.length;
if ((args[len-3] === null || args[len-3]['__class__'] === @{{tuple}}) &&
(args[len-2] === null || args[len-3]['__class__'] === @{{dict}}) &&
(args[len-1] === null || typeof args[len-1]['__class__'] == "undefined")) {
return @{{fcallext}}.apply(this, [this, null, a, arguments.callee].concat(args));
}
}
return @{{fcall}}.apply(this, [this, null, a, arguments.callee].concat(args));
}
${instance}['toString'] = function ( ) {
try {
return @{{mcall}}(this, null, this, '__str__').valueOf();
} catch (e) {
}
try {
return "<" + this.__class__.__name__ + " instance>";
} catch (e) {
}
return "<instance>";
};
${instance}['$inst'] = true;%(add_dict)s
${instance}['__class__'] = ${cls};""" % locals(), locals())
def repl_create_instance(self, args, cls, mcall, fcall):
return self.substitute("""\
var method$, instance, mro$, module = this['__class__'] === @{{module}} ? this : null;
${0, getattribute, mro$, method$, ${cls}, '__new__'}$
if (method$ === B$__new__) {
${1, __new__, instance, ${cls}}$
} else {
instance = ${fcall}.apply(module, [module, null, method$, ${cls}, ${cls}].concat(${args}));
instance['__class__'] = cls;
}
if (instance['$inst'] === true) {
${1, getattribute, mro$, method$, ${cls}, '__init__'}$
if (method$ !== B$__init__) {
${2, bind_method, method$, method$, instance, _meth_src}$
var ret = ${fcall}.apply(module, [module, null, method$, null].concat(${args}));
if (ret !== @{{None}} && ret !== null) {
if (ret['__class__'] != "undefined") {
return @{{raise}}($new(@{{TypeError}}, B$str("__init__() should return None, not '" + ret['__class__']['__name__'] + "'")));
}
return @{{raise}}($new(@{{TypeError}}, B$str("__init__() should return None")));
}
}
}
return instance;""", locals())
def repl_bind_method(self, dst, src, obj, meth_src):
return self.substitute("""\
if (${meth_src}['$inst'] === false && ${obj}['__class__'] !== @{{module}} && typeof ${src} != "undefined" && typeof ${src}['$inst'] != "undefined") {
switch (${dst}['__class__']) {
case @{{function}}:
${3, __new__, _new_dst$, @{{instancemethod}}}$
_new_dst$['im_class'] = ${obj}['$inst'] === true ? ${obj}['__class__'] : ${obj};
_new_dst$['im_func'] = ${dst};
_new_dst$['im_self'] = ${obj}['$inst'] === true ? ${obj} : null;
${dst} = _new_dst$;
break;
case @{{staticmethod}}:
${dst} = ${dst}['im_func'];
break;
case @{{classmethod}}:
${3, __new__, _new_dst$, @{{instancemethod}}}$
_new_dst$['im_class'] = ${obj}['$inst'] === true ? ${obj}['__class__'] : ${obj};
_new_dst$['im_func'] = ${dst}['im_func'];
_new_dst$['im_self'] = ${obj}['$inst'] === true ? ${obj}['__class__'] : ${obj};
${dst} = _new_dst$;
break;
case @{{bool}}: // Some known to be non-descriptors
case @{{int}}:
case @{{long}}:
case @{{str}}:
break;
default:
// check for __get__ method in ${dst}
if (${dst}['$inst'] === true) {
var get$ = @{{_getattr}}(${dst}, '__get__');
if (typeof get$ != 'undefined') {
${dst} = @{{fcall}}(this, null, get$, ${dst}, ${obj}, ${obj}['__class__']);
}
}
break;
}
}""", locals())
def repl_attr_args_validate(self, _self, name):
return self.substitute("""\
if ($self['$inst'] !== true) {
@{{raise}}($new(@{{TypeError}}, B$str("can't apply this __getattribute__ to type object")));
}
if (${name}['__class__'] !== @{{str}} && typeof ${name} != 'string') {
@{{raise}}($new(@{{TypeError}}, B$str("attribute name must be string")));
}""", locals())
def repl_getattribute(self, mro, dst, src, name, break_after_instance=False):
if break_after_instance:
break_after_instance = 'break;\n '
else:
break_after_instance = ''
return self.substitute("""\
${dst} = [][1];
var ${mro} = ${src}['__mro__'];
var _meth_src = ${src};
switch (${src}['$inst']) {
case true:
if (${src}['__class__'] === @{{module}}) {
${dst} = ${src}['$dict'][${name}];
break;
} else if (${src}['__class__'] === @{{function}}) {
switch (${name}.charAt(0)) {
case 'i':
case '_':
${dst} = ${src}[${name}];
}
break;
}
var _noraise$ = @{{noraise}};
var ga;
${mro} = ${src}['__class__']['__mro__'];
for (var mro_i$ = 0; mro_i$ < ${mro}.length - 1; mro_i$++) {
var _mro$ = ${mro}[mro_i$];
var ga = _mro$['__getattribute__'];
if (typeof ga == "undefined") {
if (typeof _mro$ == "undefined" || typeof _mro$['$dict']['__getattribute__'] == "undefined") {
continue;
}
ga = _mro$['$dict']['__getattribute__'];
}
${3, bind_method, ga, ${src}, ${src}, ${src}['__class__']}$
@{{noraise}} = @{{AttributeError}};
${dst} = @{{fcall}}(this, null, ga, _mro$, ${name});
@{{noraise}} = _noraise$;
if (${dst} === @{{AttributeError}}) {
${dst} = [][1];
}
_meth_src = ${src}['__class__'];
${src} = ${src}['__class__'];
break;
}
if (typeof ${dst} == "undefined") {
if (typeof ${src}['$dict'] != "undefined") {
${dst} = ${src}['$dict'][${name}];
if (typeof ${dst} != "undefined") {
if (${dst} !== {}[${name}]) {
break;
}
${dst} = [][1];
}
}
switch (${name}.charAt(0)) {
case 'i':
case '_':
${dst} = ${src}[${name}];
}
if (typeof ${dst} != "undefined") {
break;
}
}${break_after_instance}
case false:
if (typeof ${dst} == "undefined") {
var _mro$, ga;
if (${src}['$inst'] === true) {
_meth_src = ${src}['__class__'];
} else {
switch (${name}.charAt(0)) {
case 'i':
case '_':
${dst} = ${src}[${name}];
}
if (typeof ${dst} != "undefined") {
break;
}
}
if (typeof ${dst} == "undefined") {
for (var mro_i$ = 0; mro_i$ < ${mro}.length; mro_i$++) {
_mro$ = ${mro}[mro_i$];
${dst} = _mro$['$dict'][${name}];
if (typeof ${dst} != "undefined") {
if (${dst} !== {}[${name}]) {
break;
}
${dst} = [][1];
}
switch (${name}.charAt(0)) {
case 'i':
case '_':
${dst} = _mro$[${name}];
}
if (typeof ${dst} != "undefined") {
break;
}
}
}
if (typeof ${dst} == "undefined" && ${name} !== '__get__') {
for (var mro_i$ = 0; mro_i$ < ${mro}.length - 1; mro_i$++) {
_mro$ = ${mro}[mro_i$];
if (typeof _mro$['$dict'] == "undefined" || typeof _mro$['$dict']['__getattr__'] == "undefined") {
continue;
}
ga = _mro$['$dict']['__getattr__'];
${5, bind_method, ga, ${src}, ${src}, ${src}['__class__']}$
@{{noraise}} = @{{AttributeError}};
${dst} = @{{fcall}}(this, null, ga, _mro$, ${name});
@{{noraise}} = _noraise$;
if (${dst} === @{{AttributeError}}) {
${dst} = [][1];
}
// TODO : unbind ${dst} ?
break;
}
}
}
break;
default:
${dst} = ${src}[${name}];
if (typeof ${dst} == "undefined" && typeof ${src}['$dict'] != "undefined") {
${dst} = ${src}['$dict'][${name}];
}
}""", locals())
def repl_getattributes(self, dst, src, name, value):
return self.substitute("""\
var attrname, attrnames, ga, mro, _${src} = ${src};
if (${name} instanceof Array) {
attrnames = ${name};
} else {
attrnames = [${name}];
}
find_attr:
for (var attri = 0; attri < attrnames.length; attri++) {
attrname = attrnames[attri];
if (typeof attrname != 'string') {
if (typeof attrname['__s'] != "undefined") {
attrname = attrname['__s'];
} else {
@{{raise}}($new(@{{TypeError}}, B$str("attribute name must be string, not '" + _typeof(attrname) + "'")));
}
}
${1, getattribute, mro, ${dst}, _${src}, attrname}$
if (typeof ${dst} == "undefined") {
if (_${src}['$inst'] === true && _${src}['__class__'] !== @{{module}} && _${src}['__class__'] !== @{{function}}) {
if (typeof ${dst} == "undefined") {
if (${value} === null || typeof ${value} == "undefined") {
@{{raise}}($new(@{{AttributeError}}, B$str("'" + _${src}['__class__']['__name__'] + "' object has no attribute '" + attrname + "'")));
} else {
${dst} = ${value};
break find_attr;
}
}
}
if (${value} === null || typeof ${value} == "undefined") {
if (_${src}['$inst'] === false) {
@{{raise}}($new(@{{AttributeError}}, B$str("type object '" + _${src}['__name__'] + "' object has no attribute '" + attrname + "'")));
}
@{{raise}}($new(@{{AttributeError}}, B$str(attrname)));
}
${dst} = ${value};
break find_attr;
}
if (attri == attrnames.length - 1) {
${2, bind_method, ${dst}, _${src}, ${src}, _meth_src}$
} else {
// check for __get__ method in ${dst}
if (${dst}['$inst'] === true) {
var get$ = @{{_getattr}}(${dst}, '__get__');
if (typeof get$ != 'undefined') {
${dst} = @{{fcall}}(this, null, get$, ${dst}, _${src}, _${src}['__class__']);
}
}
}
${src} = _${src};
_${src} = ${dst};
}""", locals())
def repl_type_class(self, cls, module, clsname, bases, dict):
return self.substitute("""\
var ${cls},
mro$ = new Array(),
_bases = ${bases};
${cls} = function () {
var args = Array.prototype.slice.call(arguments);
if (args.length >= 3) {
var len = args.length;
if ((args[len-3] === null || args[len-3]['__class__'] === @{{tuple}}) &&
(args[len-2] === null || args[len-3]['__class__'] === @{{dict}}) &&
(args[len-1] === null || typeof args[len-1]['__class__'] == "undefined")) {
return $newext.apply(this, [arguments.callee].concat(args));
}
}
return $new.apply(this, [arguments.callee].concat(args));
};
${cls}['$inst'] = false;
${cls}['__name__'] = typeof ${clsname} == "string" ? B$str(${clsname}) : ${clsname};
if (${bases} instanceof Array) {
${cls}['__bases__'] = B$tuple(${bases});
} else {
${cls}['__bases__'] = ${bases};
_bases = ${bases}['__array'];
}
if (typeof ${dict}['mro'] != "undefined") {
// The mro method (?) exists. Use that.
// TODO
@{{raise}}(@{{NotImplemented}});
} else {
for (var i = 0; i < _bases.length; i++) {
mro$.push(new Array().concat(_bases[i].__mro__));
}
${cls}['__mro__'] = [${cls}].concat(mro_merge(mro$));
}
${cls}['$dict'] = {};
if (${module}['__class__'] !== @{{module}}) {
debugger;
}
var __module__ = typeof ${module}['$dict'] != "undefined" ? ${module}['$dict']['__name__'] : ${module}['__name__'];
if (typeof __module__ != "undefined") {
cls['$dict']['__module__'] = __module__;
}
if (typeof ${dict} != "undefined" && ${dict}['__class__'] === @{{dict}}) {
for (var k in ${dict}.__object) {
cls['$dict'][k] = ${dict}.__object[k];
}
}
${cls}['__dict__'] = @{{dictproxy}};
${cls}['__dict__']['__object'] = ${cls}['$dict'];
func(${0,module}$, null, ${cls}, '__call__', ${0, classmethod}$, null, 'args', 'kwargs', null, $newext, true);
${cls}['__class__'] = @{{type}};""", locals())
def repl_hash(self, obj, dst):
return self.substitute("""\
if (typeof ${obj}['$inst'] != "undefined") {
${dst} = ${obj}[$hash_id_name$];
if (typeof ${dst} == "undefined") {
if (${obj}['__class__'] === @{{str}}) {
${dst} = ${obj}['__s'].charAt(0) == '#' ? '#string#' + ${obj}['__s'] : ${obj}['__s'];
} else {
${dst} = @{{mcall}}(this, null, ${obj}, '__hash__');
}
}
} else if (typeof ${obj} == "string") {
${dst} = ${obj}.charAt(0) == '#' ? '#string#' + ${obj} : ${obj};
} else {
${dst} = '#' + typeof ${obj} + '#' + ${obj};
}""" % locals(), locals())
def repl_op_compare(self, op, a, b, val1, val2):
if op in ['is', 'is_not']:
return self.substitute("""\
if (${a} === ${b}) {
return ${val1};
}
if (${a} !== null && ${b} !== null) {
switch ((${a}.__number__ << 8) | ${b}.__number__) {
case 0x0101:
return ${a} == ${b} ? ${val1} : ${val2};
case 0x0202:
return $a.__v == $b.__v ? ${val1} : ${val2};
case 0x0404:
return @{{long}}['$dict'].__cmp__(${a}, ${b}) == 0 ? ${val1} : ${val2};
}
}
return ${val2};""" % locals(), locals())
if op in ['in', 'not_in']:
return self.substitute("""\
var i, mro$;
${0, getattribute, mro$, i, ${b}, '__contains__'}$
if (typeof i != "undefined") {
${1, bind_method, i, i, ${b}, _meth_src}$
return @{{fcall}}(this, null, i, ${b}, ${a}).valueOf() ? ${val1} : ${val2};
}
var __iter__ = @{{iter}}(${b});
var $stopiter = @{{stopiter}}
for (;;) {
@{{stopiter}} = true;
i = @{{mcall}}(this, null, __iter__, 'next');
@{{stopiter}} = $stopiter;
if (i === @{{StopIter}}) {
return ${val2};
}
if (@{{op_eq}}(i, ${a})) {
return ${val1};
}
}""" % locals(), locals())
valnull = '${val2}'
if not '=' in op:
opis = ''
elif op == '!=':
opis = 'if (${a} === ${b}) return ${val2};\n'
valnull = '${val1}'
else:
opis = 'if (${a} === ${b}) return ${val1};\n'
opis = ''
return self.substitute("""\
%(opis)sif (${a} !== null && ${b} !== null) {
switch ((${a}.__number__ << 8) | ${b}.__number__) {
case 0x0101:
case 0x0401:
return ${a}.valueOf() ${op} ${b}.valueOf() ? ${val1} : ${val2};
case 0x0102:
return ${a}.valueOf() ${op} ${b}.__v ? ${val1} : ${val2};
case 0x0201:
return ${a}.__v ${op} ${b}.valueOf() ? ${val1} : ${val2};
case 0x0202:
return ${a}.__v ${op} ${b}.__v ? ${val1} : ${val2};
case 0x0104:
case 0x0204:
${a} = $new(@{{long}}, ${a}.valueOf());
case 0x0404:
if (${a}['__class__'] !== @{{long}}) break;
return @{{long}}['$dict']['__cmp__'](${a}, ${b}).valueOf() ${op} 0 ? ${val1} : ${val2};
case 0x0402:
if (${a}['__class__'] !== @{{long}}) break;
return @{{long}}['$dict']['__cmp__'](${a}, $new(@{{long}}, ${b}.valueOf())).valueOf() ${op} 0 ? ${val1} : ${val2};
}
var v = @{{fcall}}(this, null, @{{cmp}}, null, ${a}, ${b}).valueOf();
return v === null ? %(valnull)s : (v ${op} 0 ? ${val1} : ${val2});
}
return ${val2};""" % locals(), locals())
def repl_op_arithmetic(self, op, opname, x, y, i, opfunc=None):
jsop = op
if opfunc is None:
opnumber = "x_v %s y_v" % op
elif opfunc == 'Math.floor':
jsop = '/'
opnumber = "%s(x_v %s y_v)" % (opfunc, jsop)
elif opfunc == 'mod':
opnumber = "x_v %s y_v" % op
opnumber = "(x_v=x_v %s y_v) < 0 && y_v > 0 ? x_v + y_v : x_v" % op
else:
opnumber = "%s(x_v, y_v)" % opfunc
if op in ['/', '//', '%']:
zerodiv = "if (${y}.valueOf() == 0) return @{{raise}}($new(@{{ZeroDivisionError}}, B$str('float divmod()')));\n";
else:
zerodiv = ''
return self.substitute("""\
%(zerodiv)sif (${x} !== null && ${y} !== null) {
var m = ${i} === true ? '__i%(opname)s__' : '__%(opname)s__';
switch ((${x}.__number__ << 8) | ${y}.__number__) {
case 0x0101:
case 0x0102:
case 0x0201:
case 0x0104:
case 0x0401:
var x_v = ${x}.valueOf(), y_v = ${y}.valueOf();
return $new(@{{float}}, %(opnumber)s);
case 0x0202:
return @{{int}}['$dict'].__%(opname)s__(${x}, ${y});
case 0x0204:
return @{{long}}['$dict'].__%(opname)s($new(@{{long}}, ${x}.__v), ${y});
case 0x0402:
return @{{long}}['$dict'].__%(opname)s(${x}, $new(@{{long}}, ${y}.__v));
case 0x0404:
return @{{long}}['$dict'].__%(opname)s(${x}, ${y});
}
if (${x}['$inst'] === true && ${y}['$inst'] === true) {
var op, v;
op = @{{_getattr}}(${x}, m);
if (typeof op != "undefined") {
v = @{{fcall}}(this, null, op, ${x}, ${y});
if (v !== @{{NotImplemented}}) {
return v;
}
}
if (${i} !== true) {
op = @{{_getattr}}(${y}, '__r%(opname)s__');
if (typeof op != "undefined") {
v = @{{mcall}}(this, null, ${y}, '__r%(opname)s__', ${x});
if (v !== @{{NotImplemented}}) {
return v;
}
}
} else {
op = @{{_getattr}}(${x}, '__%(opname)s__');
if (typeof op != "undefined") {
v = @{{fcall}}(this, null, op, ${x}, ${y});
if (v !== @{{NotImplemented}}) {
return v;
}
}
}
}
var x_v = ${x}.valueOf(), y_v = ${y}.valueOf();
if (typeof x_v == 'number' && typeof y_v == 'number') {
return $new(@{{float}}, %(opnumber)s);
}
}
@{{raise}}($new(@{{TypeError}}, B$str("unsupported operand type(s) for %(op)s: '" + @{{repr}}(${x}) + "', '" + @{{repr}}(${y}) + "'")));\
""" % locals(), locals())
def repl_op_bitexpr2(self, op, opname, x, y):
return self.substitute("""\
if (${x} !== null && ${y} !== null) {
switch ((${x}.__number__ << 8) | ${y}.__number__) {
case 0x0202:
if (${x}['__class__'] === @{{int}}) {
return ${x}['__class__']['$dict']['__%(opname)s__'](${x}, ${y});
}
break
case 0x0204:
if (${y}['__class__'] === @{{long}}) {
return ${y}['__class__']['$dict']['__r%(opname)s__'](${y}, $new(@{{long}}, ${x}));
}
break
case 0x0402:
if (${x}['__class__'] === @{{long}}) {
return ${x}['__class__']['$dict']['__%(opname)s'](${x}, $new(@{{long}}, ${y}.__v));
}
break
case 0x0404:
if (${x}['__class__'] === @{{long}}) {
return ${x}['__class__']['$dict']['__%(opname)s'](${x}, ${y});
}
break
}
var v = @{{_getattr}}(${x}, '__%(opname)s__');
if (typeof v != "undefined") {
v = @{{fcall}}(this, null, v, ${x}, ${y});
if (v !== @{{NotImplemented}}) {
return v;
}
}
v = @{{_getattr}}(${y}, '__r%(opname)s__');
if (typeof v != "undefined") {
v = @{{fcall}}(this, null, v, ${y}, ${x});
if (v !== @{{NotImplemented}}) {
return v;
}
}
}
@{{raise}}($new(@{{TypeError}}, @{{sprintf}}("unsupported operand type(s) for %(op)s: '%%r', '%%r'", [${x}, ${y}])));\
""" % locals(), locals())
def repl_op_bitexpr(self, op, opname, args):
return self.substitute("""\
var a;
if (args[0] !== null && args[1] !== null && args.length > 1) {
var v, r, arg;
v = args[0];
for (var i = 1; i < args.length; i++) {
arg = args[i]
r = @{{_getattr}}(v, '__%(opname)s__');
if (typeof r != "undefined") {
r = @{{fcall}}(this, null, r, v, arg);
if (r !== @{{NotImplemented}}) {
v = r;
continue;
}
}
r = @{{_getattr}}(arg, '__r%(opname)s__');
if (typeof r != "undefined") {
r = @{{fcall}}(this, null, r, arg, v);
if (r !== @{{NotImplemented}}) {
v = r;
continue;
}
}
v = null;
break;
}
if (v !== null) {
return v;
}
}
var msg = "unsupported operand type(s) for %(op)s: "
for (var i = 0; i < args.length; i++) {
msg += @{{repr}}(args[i]);
}
@{{raise}}($new(@{{TypeError}}, B$str(msg)));\
""" % locals(), locals())
def repl_op_iexpr(self, op, opname, x, y):
return self.substitute("""\
@{{raise}}($new(@{{TypeError}}, B$str(msg)));\
""" % locals(), locals())
if __name__ == '__main__':
import sys
if not sys.argv[1:]:
src = open("__builtin__.py.in", 'r').read()
dst = Replacement().substitute(src, {})
open("__builtin__.py", 'w').write(dst)
else:
src = open(sys.argv[1], 'r').read()
dst = Replacement().substitute(src, {})
print dst
#open("pyjslib.py", 'w').write(dst)
| apache-2.0 |
ghhong1986/flask | docs/flaskext.py | 2228 | 4875 | # flasky extensions. flasky pygments style based on tango style
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Whitespace, Punctuation, Other, Literal
class FlaskyStyle(Style):
background_color = "#f8f8f8"
default_style = ""
styles = {
# No corresponding class for the following:
#Text: "", # class: ''
Whitespace: "underline #f8f8f8", # class: 'w'
Error: "#a40000 border:#ef2929", # class: 'err'
Other: "#000000", # class 'x'
Comment: "italic #8f5902", # class: 'c'
Comment.Preproc: "noitalic", # class: 'cp'
Keyword: "bold #004461", # class: 'k'
Keyword.Constant: "bold #004461", # class: 'kc'
Keyword.Declaration: "bold #004461", # class: 'kd'
Keyword.Namespace: "bold #004461", # class: 'kn'
Keyword.Pseudo: "bold #004461", # class: 'kp'
Keyword.Reserved: "bold #004461", # class: 'kr'
Keyword.Type: "bold #004461", # class: 'kt'
Operator: "#582800", # class: 'o'
Operator.Word: "bold #004461", # class: 'ow' - like keywords
Punctuation: "bold #000000", # class: 'p'
# because special names such as Name.Class, Name.Function, etc.
# are not recognized as such later in the parsing, we choose them
# to look the same as ordinary variables.
Name: "#000000", # class: 'n'
Name.Attribute: "#c4a000", # class: 'na' - to be revised
Name.Builtin: "#004461", # class: 'nb'
Name.Builtin.Pseudo: "#3465a4", # class: 'bp'
Name.Class: "#000000", # class: 'nc' - to be revised
Name.Constant: "#000000", # class: 'no' - to be revised
Name.Decorator: "#888", # class: 'nd' - to be revised
Name.Entity: "#ce5c00", # class: 'ni'
Name.Exception: "bold #cc0000", # class: 'ne'
Name.Function: "#000000", # class: 'nf'
Name.Property: "#000000", # class: 'py'
Name.Label: "#f57900", # class: 'nl'
Name.Namespace: "#000000", # class: 'nn' - to be revised
Name.Other: "#000000", # class: 'nx'
Name.Tag: "bold #004461", # class: 'nt' - like a keyword
Name.Variable: "#000000", # class: 'nv' - to be revised
Name.Variable.Class: "#000000", # class: 'vc' - to be revised
Name.Variable.Global: "#000000", # class: 'vg' - to be revised
Name.Variable.Instance: "#000000", # class: 'vi' - to be revised
Number: "#990000", # class: 'm'
Literal: "#000000", # class: 'l'
Literal.Date: "#000000", # class: 'ld'
String: "#4e9a06", # class: 's'
String.Backtick: "#4e9a06", # class: 'sb'
String.Char: "#4e9a06", # class: 'sc'
String.Doc: "italic #8f5902", # class: 'sd' - like a comment
String.Double: "#4e9a06", # class: 's2'
String.Escape: "#4e9a06", # class: 'se'
String.Heredoc: "#4e9a06", # class: 'sh'
String.Interpol: "#4e9a06", # class: 'si'
String.Other: "#4e9a06", # class: 'sx'
String.Regex: "#4e9a06", # class: 'sr'
String.Single: "#4e9a06", # class: 's1'
String.Symbol: "#4e9a06", # class: 'ss'
Generic: "#000000", # class: 'g'
Generic.Deleted: "#a40000", # class: 'gd'
Generic.Emph: "italic #000000", # class: 'ge'
Generic.Error: "#ef2929", # class: 'gr'
Generic.Heading: "bold #000080", # class: 'gh'
Generic.Inserted: "#00A000", # class: 'gi'
Generic.Output: "#888", # class: 'go'
Generic.Prompt: "#745334", # class: 'gp'
Generic.Strong: "bold #000000", # class: 'gs'
Generic.Subheading: "bold #800080", # class: 'gu'
Generic.Traceback: "bold #a40000", # class: 'gt'
}
| bsd-3-clause |
eoncloud-dev/eonboard | eoncloud_web/cloud/api/nova.py | 4 | 30002 | #-*-coding=utf-8-*-
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 OpenStack Foundation
# Copyright 2012 Nebula, Inc.
# Copyright (c) 2012 X.commerce, a business unit of eBay Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
import logging
from django.conf import settings
from django.utils.functional import cached_property # noqa
from django.utils.translation import ugettext_lazy as _
from novaclient import exceptions as nova_exceptions
from novaclient.v1_1 import client as nova_client
from novaclient.v1_1.contrib import instance_action as nova_instance_action
from novaclient.v1_1.contrib import list_extensions as nova_list_extensions
from novaclient.v1_1 import security_group_rules as nova_rules
from novaclient.v1_1 import security_groups as nova_security_groups
from novaclient.v1_1 import servers as nova_servers
#from horizon import conf
#from horizon.utils import functions as utils
from cloud.utils.memoized import memoized # noqa
from cloud.api import base
from cloud.api import network_base
LOG = logging.getLogger(__name__)
# API static values
INSTANCE_ACTIVE_STATE = 'ACTIVE'
VOLUME_STATE_AVAILABLE = "available"
DEFAULT_QUOTA_NAME = 'default'
class VNCConsole(base.APIDictWrapper):
"""Wrapper for the "console" dictionary.
Returned by the novaclient.servers.get_vnc_console method.
"""
_attrs = ['url', 'type']
class SPICEConsole(base.APIDictWrapper):
"""Wrapper for the "console" dictionary.
Returned by the novaclient.servers.get_spice_console method.
"""
_attrs = ['url', 'type']
class RDPConsole(base.APIDictWrapper):
"""Wrapper for the "console" dictionary.
Returned by the novaclient.servers.get_rdp_console method.
"""
_attrs = ['url', 'type']
class Server(base.APIResourceWrapper):
"""Simple wrapper around novaclient.server.Server.
Preserves the request info so image name can later be retrieved.
"""
_attrs = ['addresses', 'attrs', 'id', 'image', 'links',
'metadata', 'name', 'private_ip', 'public_ip', 'status', 'uuid',
'image_name', 'VirtualInterfaces', 'flavor', 'key_name', 'fault',
'tenant_id', 'user_id', 'created', 'OS-EXT-STS:power_state',
'OS-EXT-STS:task_state', 'OS-EXT-SRV-ATTR:instance_name',
'OS-EXT-SRV-ATTR:host', 'OS-EXT-AZ:availability_zone',
'OS-DCF:diskConfig']
def __init__(self, apiresource, request):
super(Server, self).__init__(apiresource)
self.request = request
# TODO(gabriel): deprecate making a call to Glance as a fallback.
@property
def image_name(self):
import glanceclient.exc as glance_exceptions # noqa
from openstack_dashboard.api import glance # noqa
if not self.image:
return "-"
if hasattr(self.image, 'name'):
return self.image.name
if 'name' in self.image:
return self.image['name']
else:
try:
image = glance.image_get(self.request, self.image['id'])
return image.name
except glance_exceptions.ClientException:
return "-"
@property
def internal_name(self):
return getattr(self, 'OS-EXT-SRV-ATTR:instance_name', "")
@property
def availability_zone(self):
return getattr(self, 'OS-EXT-AZ:availability_zone', "")
class Hypervisor(base.APIDictWrapper):
"""Simple wrapper around novaclient.hypervisors.Hypervisor."""
_attrs = ['manager', '_loaded', '_info', 'hypervisor_hostname', 'id',
'servers']
@property
def servers(self):
# if hypervisor doesn't have servers, the attribute is not present
servers = []
try:
servers = self._apidict.servers
except Exception:
pass
return servers
class NovaUsage(base.APIResourceWrapper):
"""Simple wrapper around contrib/simple_usage.py."""
_attrs = ['start', 'server_usages', 'stop', 'tenant_id',
'total_local_gb_usage', 'total_memory_mb_usage',
'total_vcpus_usage', 'total_hours']
def get_summary(self):
return {'instances': self.total_active_instances,
'memory_mb': self.memory_mb,
'vcpus': getattr(self, "total_vcpus_usage", 0),
'vcpu_hours': self.vcpu_hours,
'local_gb': self.local_gb,
'disk_gb_hours': self.disk_gb_hours}
@property
def total_active_instances(self):
return sum(1 for s in self.server_usages if s['ended_at'] is None)
@property
def vcpus(self):
return sum(s['vcpus'] for s in self.server_usages
if s['ended_at'] is None)
@property
def vcpu_hours(self):
return getattr(self, "total_hours", 0)
@property
def local_gb(self):
return sum(s['local_gb'] for s in self.server_usages
if s['ended_at'] is None)
@property
def memory_mb(self):
return sum(s['memory_mb'] for s in self.server_usages
if s['ended_at'] is None)
@property
def disk_gb_hours(self):
return getattr(self, "total_local_gb_usage", 0)
class SecurityGroup(base.APIResourceWrapper):
"""Wrapper around novaclient.security_groups.SecurityGroup.
Wraps its rules in SecurityGroupRule objects and allows access to them.
"""
_attrs = ['id', 'name', 'description', 'tenant_id']
@cached_property
def rules(self):
"""Wraps transmitted rule info in the novaclient rule class."""
manager = nova_rules.SecurityGroupRuleManager(None)
rule_objs = [nova_rules.SecurityGroupRule(manager, rule)
for rule in self._apiresource.rules]
return [SecurityGroupRule(rule) for rule in rule_objs]
class SecurityGroupRule(base.APIResourceWrapper):
"""Wrapper for individual rules in a SecurityGroup."""
_attrs = ['id', 'ip_protocol', 'from_port', 'to_port', 'ip_range', 'group']
def __unicode__(self):
if 'name' in self.group:
vals = {'from': self.from_port,
'to': self.to_port,
'group': self.group['name']}
return _('ALLOW %(from)s:%(to)s from %(group)s') % vals
else:
vals = {'from': self.from_port,
'to': self.to_port,
'cidr': self.ip_range['cidr']}
return _('ALLOW %(from)s:%(to)s from %(cidr)s') % vals
# The following attributes are defined to keep compatibility with Neutron
@property
def ethertype(self):
return None
@property
def direction(self):
return 'ingress'
class SecurityGroupManager(network_base.SecurityGroupManager):
backend = 'nova'
def __init__(self, request):
self.request = request
self.client = novaclient(request)
def list(self):
return [SecurityGroup(g) for g
in self.client.security_groups.list()]
def get(self, sg_id):
return SecurityGroup(self.client.security_groups.get(sg_id))
def create(self, name, desc):
return SecurityGroup(self.client.security_groups.create(name, desc))
def update(self, sg_id, name, desc):
return SecurityGroup(self.client.security_groups.update(sg_id,
name, desc))
def delete(self, security_group_id):
self.client.security_groups.delete(security_group_id)
def rule_create(self, parent_group_id,
direction=None, ethertype=None,
ip_protocol=None, from_port=None, to_port=None,
cidr=None, group_id=None):
# Nova Security Group API does not use direction and ethertype fields.
sg = self.client.security_group_rules.create(parent_group_id,
ip_protocol,
from_port,
to_port,
cidr,
group_id)
return SecurityGroupRule(sg)
def rule_delete(self, security_group_rule_id):
self.client.security_group_rules.delete(security_group_rule_id)
def list_by_instance(self, instance_id):
"""Gets security groups of an instance."""
# TODO(gabriel): This needs to be moved up to novaclient, and should
# be removed once novaclient supports this call.
security_groups = []
nclient = self.client
resp, body = nclient.client.get('/servers/%s/os-security-groups'
% instance_id)
if body:
# Wrap data in SG objects as novaclient would.
sg_objs = [
nova_security_groups.SecurityGroup(
nclient.security_groups, sg, loaded=True)
for sg in body.get('security_groups', [])]
# Then wrap novaclient's object with our own. Yes, sadly wrapping
# with two layers of objects is necessary.
security_groups = [SecurityGroup(sg) for sg in sg_objs]
return security_groups
def update_instance_security_group(self, instance_id,
new_security_group_ids):
try:
all_groups = self.list()
except Exception:
raise Exception(_("Couldn't get security group list."))
wanted_groups = set([sg.name for sg in all_groups
if sg.id in new_security_group_ids])
try:
current_groups = self.list_by_instance(instance_id)
except Exception:
raise Exception(_("Couldn't get current security group "
"list for instance %s.")
% instance_id)
current_group_names = set([sg.name for sg in current_groups])
groups_to_add = wanted_groups - current_group_names
groups_to_remove = current_group_names - wanted_groups
num_groups_to_modify = len(groups_to_add | groups_to_remove)
try:
for group in groups_to_add:
self.client.servers.add_security_group(instance_id, group)
num_groups_to_modify -= 1
for group in groups_to_remove:
self.client.servers.remove_security_group(instance_id, group)
num_groups_to_modify -= 1
except nova_exceptions.ClientException as err:
LOG.error(_("Failed to modify %(num_groups_to_modify)d instance "
"security groups: %(err)s") %
dict(num_groups_to_modify=num_groups_to_modify,
err=err))
# reraise novaclient.exceptions.ClientException, but with
# a sanitized error message so we don't risk exposing
# sensitive information to the end user. This has to be
# novaclient.exceptions.ClientException, not just
# Exception, since the former is recognized as a
# "recoverable" exception by horizon, and therefore the
# error message is passed along to the end user, while
# Exception is swallowed alive by horizon and a gneric
# error message is given to the end user
raise nova_exceptions.ClientException(
err.code,
_("Failed to modify %d instance security groups") %
num_groups_to_modify)
return True
class FlavorExtraSpec(object):
def __init__(self, flavor_id, key, val):
self.flavor_id = flavor_id
self.id = key
self.key = key
self.value = val
class FloatingIp(base.APIResourceWrapper):
_attrs = ['id', 'ip', 'fixed_ip', 'port_id', 'instance_id',
'instance_type', 'pool']
def __init__(self, fip):
fip.__setattr__('port_id', fip.instance_id)
fip.__setattr__('instance_type',
'compute' if fip.instance_id else None)
super(FloatingIp, self).__init__(fip)
class FloatingIpPool(base.APIDictWrapper):
def __init__(self, pool):
pool_dict = {'id': pool.name,
'name': pool.name}
super(FloatingIpPool, self).__init__(pool_dict)
class FloatingIpTarget(base.APIDictWrapper):
def __init__(self, server):
server_dict = {'name': '%s (%s)' % (server.name, server.id),
'id': server.id}
super(FloatingIpTarget, self).__init__(server_dict)
class FloatingIpManager(network_base.FloatingIpManager):
def __init__(self, request):
self.request = request
self.client = novaclient(request)
def list_pools(self):
return [FloatingIpPool(pool)
for pool in self.client.floating_ip_pools.list()]
def list(self):
return [FloatingIp(fip)
for fip in self.client.floating_ips.list()]
def get(self, floating_ip_id):
return FloatingIp(self.client.floating_ips.get(floating_ip_id))
def allocate(self, pool):
return FloatingIp(self.client.floating_ips.create(pool=pool))
def release(self, floating_ip_id):
self.client.floating_ips.delete(floating_ip_id)
def associate(self, floating_ip_id, port_id):
# In Nova implied port_id is instance_id
server = self.client.servers.get(port_id)
fip = self.client.floating_ips.get(floating_ip_id)
self.client.servers.add_floating_ip(server.id, fip.ip)
def disassociate(self, floating_ip_id, port_id):
fip = self.client.floating_ips.get(floating_ip_id)
server = self.client.servers.get(fip.instance_id)
self.client.servers.remove_floating_ip(server.id, fip.ip)
def list_targets(self):
return [FloatingIpTarget(s) for s in self.client.servers.list()]
def get_target_id_by_instance(self, instance_id, target_list=None):
return instance_id
def list_target_id_by_instance(self, instance_id, target_list=None):
return [instance_id, ]
def is_simple_associate_supported(self):
return conf.HORIZON_CONFIG["simple_ip_management"]
def is_supported(self):
return True
#@memoized
def novaclient(request):
"""
insecure = getattr(settings, 'OPENSTACK_SSL_NO_VERIFY', False)
cacert = getattr(settings, 'OPENSTACK_SSL_CACERT', None)
LOG.debug('novaclient connection created using token "%s" and url "%s"' %
(request.user.token.id, base.url_for(request, 'compute')))
c = nova_client.Client(request.user.username,
request.user.token.id,
project_id=request.user.tenant_id,
auth_url=base.url_for(request, 'compute'),
insecure=insecure,
cacert=cacert,
http_log_debug=settings.DEBUG)
c.client.auth_token = request.user.token.id
c.client.management_url = base.url_for(request, 'compute')
return c
"""
c = nova_client.Client(username=request.get("username"),
api_key=request.get("password"),
project_id=request.get("tenant_name"),
auth_url=request.get("auth_url"),
http_log_debug=False)
return c
def server_vnc_console(request, instance_id, console_type='novnc'):
return VNCConsole(novaclient(request).servers.get_vnc_console(instance_id,
console_type)['console'])
def server_spice_console(request, instance_id, console_type='spice-html5'):
return SPICEConsole(novaclient(request).servers.get_spice_console(
instance_id, console_type)['console'])
def server_rdp_console(request, instance_id, console_type='rdp-html5'):
return RDPConsole(novaclient(request).servers.get_rdp_console(
instance_id, console_type)['console'])
def flavor_create(request, name, memory, vcpu, disk, flavorid='auto',
ephemeral=0, swap=0, metadata=None, is_public=True):
flavor = novaclient(request).flavors.create(name, memory, vcpu, disk,
flavorid=flavorid,
ephemeral=ephemeral,
swap=swap, is_public=is_public)
if (metadata):
flavor_extra_set(request, flavor.id, metadata)
return flavor
def flavor_delete(request, flavor_id):
novaclient(request).flavors.delete(flavor_id)
def flavor_get(request, flavor_id):
return novaclient(request).flavors.get(flavor_id)
@memoized
def flavor_list(request, is_public=True):
"""Get the list of available instance sizes (flavors)."""
return novaclient(request).flavors.list(is_public=is_public)
@memoized
def flavor_access_list(request, flavor=None):
"""Get the list of access instance sizes (flavors)."""
return novaclient(request).flavor_access.list(flavor=flavor)
def add_tenant_to_flavor(request, flavor, tenant):
"""Add a tenant to the given flavor access list."""
return novaclient(request).flavor_access.add_tenant_access(
flavor=flavor, tenant=tenant)
def remove_tenant_from_flavor(request, flavor, tenant):
"""Remove a tenant from the given flavor access list."""
return novaclient(request).flavor_access.remove_tenant_access(
flavor=flavor, tenant=tenant)
def flavor_get_extras(request, flavor_id, raw=False):
"""Get flavor extra specs."""
flavor = novaclient(request).flavors.get(flavor_id)
extras = flavor.get_keys()
if raw:
return extras
return [FlavorExtraSpec(flavor_id, key, value) for
key, value in extras.items()]
def flavor_extra_delete(request, flavor_id, keys):
"""Unset the flavor extra spec keys."""
flavor = novaclient(request).flavors.get(flavor_id)
return flavor.unset_keys(keys)
def flavor_extra_set(request, flavor_id, metadata):
"""Set the flavor extra spec keys."""
flavor = novaclient(request).flavors.get(flavor_id)
if (not metadata): # not a way to delete keys
return None
return flavor.set_keys(metadata)
def snapshot_create(request, instance_id, name):
return novaclient(request).servers.create_image(instance_id, name)
def keypair_create(request, name):
return novaclient(request).keypairs.create(name)
def keypair_import(request, name, public_key):
return novaclient(request).keypairs.create(name, public_key)
def keypair_delete(request, keypair_id):
novaclient(request).keypairs.delete(keypair_id)
def keypair_list(request):
return novaclient(request).keypairs.list()
def server_create(request, name, image, flavor, key_name, user_data,
security_groups, block_device_mapping=None,
block_device_mapping_v2=None, nics=None,
availability_zone="nova", instance_count=1, admin_pass=None,
disk_config=None, config_drive=None, meta=None):
return Server(novaclient(request).servers.create(
name, image, flavor, userdata=user_data,
security_groups=security_groups,
key_name=key_name, block_device_mapping=block_device_mapping,
block_device_mapping_v2=block_device_mapping_v2,
nics=nics, availability_zone=availability_zone,
min_count=instance_count, admin_pass=admin_pass,
disk_config=disk_config, config_drive=config_drive,
meta=meta), request)
def server_delete(request, instance):
novaclient(request).servers.delete(instance)
def server_get(request, instance_id):
return Server(novaclient(request).servers.get(instance_id), request)
def server_list(request, search_opts=None, all_tenants=False):
page_size = utils.get_page_size(request)
c = novaclient(request)
paginate = False
if search_opts is None:
search_opts = {}
elif 'paginate' in search_opts:
paginate = search_opts.pop('paginate')
if paginate:
search_opts['limit'] = page_size + 1
if all_tenants:
search_opts['all_tenants'] = True
else:
search_opts['project_id'] = request.user.tenant_id
servers = [Server(s, request)
for s in c.servers.list(True, search_opts)]
has_more_data = False
if paginate and len(servers) > page_size:
servers.pop(-1)
has_more_data = True
elif paginate and len(servers) == getattr(settings, 'API_RESULT_LIMIT',
1000):
has_more_data = True
return (servers, has_more_data)
def server_console_output(request, instance_id, tail_length=None):
"""Gets console output of an instance."""
return novaclient(request).servers.get_console_output(instance_id,
length=tail_length)
def server_pause(request, instance_id):
novaclient(request).servers.pause(instance_id)
def server_unpause(request, instance_id):
novaclient(request).servers.unpause(instance_id)
def server_suspend(request, instance_id):
novaclient(request).servers.suspend(instance_id)
def server_resume(request, instance_id):
novaclient(request).servers.resume(instance_id)
def server_reboot(request, instance_id, soft_reboot=True):
hardness = nova_servers.REBOOT_HARD
if soft_reboot:
hardness = nova_servers.REBOOT_SOFT
novaclient(request).servers.reboot(instance_id, hardness)
def server_rebuild(request, instance_id, image_id, password=None,
disk_config=None):
return novaclient(request).servers.rebuild(instance_id, image_id,
password, disk_config)
def server_update(request, instance_id, name):
return novaclient(request).servers.update(instance_id, name=name)
def server_migrate(request, instance_id):
novaclient(request).servers.migrate(instance_id)
def server_live_migrate(request, instance_id, host, block_migration=False,
disk_over_commit=False):
novaclient(request).servers.live_migrate(instance_id, host,
block_migration,
disk_over_commit)
def server_resize(request, instance_id, flavor, disk_config=None, **kwargs):
novaclient(request).servers.resize(instance_id, flavor,
disk_config, **kwargs)
def server_confirm_resize(request, instance_id):
novaclient(request).servers.confirm_resize(instance_id)
def server_revert_resize(request, instance_id):
novaclient(request).servers.revert_resize(instance_id)
def server_start(request, instance_id):
novaclient(request).servers.start(instance_id)
def server_stop(request, instance_id):
novaclient(request).servers.stop(instance_id)
def tenant_quota_get(request, tenant_id):
return base.QuotaSet(novaclient(request).quotas.get(tenant_id))
def tenant_quota_update(request, tenant_id, **kwargs):
novaclient(request).quotas.update(tenant_id, **kwargs)
def default_quota_get(request, tenant_id):
return base.QuotaSet(novaclient(request).quotas.defaults(tenant_id))
def default_quota_update(request, **kwargs):
novaclient(request).quota_classes.update(DEFAULT_QUOTA_NAME, **kwargs)
def usage_get(request, tenant_id, start, end):
return NovaUsage(novaclient(request).usage.get(tenant_id, start, end))
def usage_list(request, start, end):
return [NovaUsage(u) for u in
novaclient(request).usage.list(start, end, True)]
def virtual_interfaces_list(request, instance_id):
return novaclient(request).virtual_interfaces.list(instance_id)
def get_x509_credentials(request):
return novaclient(request).certs.create()
def get_x509_root_certificate(request):
return novaclient(request).certs.get()
def get_password(request, instance_id, private_key=None):
return novaclient(request).servers.get_password(instance_id, private_key)
def instance_volume_attach(request, volume_id, instance_id, device):
return novaclient(request).volumes.create_server_volume(instance_id,
volume_id,
device)
def instance_volume_detach(request, instance_id, att_id):
return novaclient(request).volumes.delete_server_volume(instance_id,
att_id)
def instance_volumes_list(request, instance_id):
from openstack_dashboard.api import cinder
volumes = novaclient(request).volumes.get_server_volumes(instance_id)
for volume in volumes:
volume_data = cinder.cinderclient(request).volumes.get(volume.id)
volume.name = cinder.Volume(volume_data).name
return volumes
def hypervisor_list(request):
return novaclient(request).hypervisors.list()
def hypervisor_stats(request):
return novaclient(request).hypervisors.statistics()
def hypervisor_search(request, query, servers=True):
return novaclient(request).hypervisors.search(query, servers)
def evacuate_host(request, host, target=None, on_shared_storage=False):
# TODO(jmolle) This should be change for nova atomic api host_evacuate
hypervisors = novaclient(request).hypervisors.search(host, True)
response = []
err_code = None
for hypervisor in hypervisors:
hyper = Hypervisor(hypervisor)
# if hypervisor doesn't have servers, the attribute is not present
for server in hyper.servers:
try:
novaclient(request).servers.evacuate(server['uuid'],
target,
on_shared_storage)
except nova_exceptions.ClientException as err:
err_code = err.code
msg = _("Name: %(name)s ID: %(uuid)s")
msg = msg % {'name': server['name'], 'uuid': server['uuid']}
response.append(msg)
if err_code:
msg = _('Failed to evacuate instances: %s') % ', '.join(response)
raise nova_exceptions.ClientException(err_code, msg)
return True
def tenant_absolute_limits(request, reserved=False):
limits = novaclient(request).limits.get(reserved=reserved).absolute
limits_dict = {}
for limit in limits:
if limit.value < 0:
# Workaround for nova bug 1370867 that absolute_limits
# returns negative value for total.*Used instead of 0.
# For such case, replace negative values with 0.
if limit.name.startswith('total') and limit.name.endswith('Used'):
limits_dict[limit.name] = 0
else:
# -1 is used to represent unlimited quotas
limits_dict[limit.name] = float("inf")
else:
limits_dict[limit.name] = limit.value
return limits_dict
def availability_zone_list(request, detailed=False):
return novaclient(request).availability_zones.list(detailed=detailed)
def service_list(request, binary=None):
return novaclient(request).services.list(binary=binary)
def aggregate_details_list(request):
result = []
c = novaclient(request)
for aggregate in c.aggregates.list():
result.append(c.aggregates.get_details(aggregate.id))
return result
def aggregate_create(request, name, availability_zone=None):
return novaclient(request).aggregates.create(name, availability_zone)
def aggregate_delete(request, aggregate_id):
return novaclient(request).aggregates.delete(aggregate_id)
def aggregate_get(request, aggregate_id):
return novaclient(request).aggregates.get(aggregate_id)
def aggregate_update(request, aggregate_id, values):
return novaclient(request).aggregates.update(aggregate_id, values)
def aggregate_set_metadata(request, aggregate_id, metadata):
return novaclient(request).aggregates.set_metadata(aggregate_id, metadata)
def host_list(request):
return novaclient(request).hosts.list()
def add_host_to_aggregate(request, aggregate_id, host):
return novaclient(request).aggregates.add_host(aggregate_id, host)
def remove_host_from_aggregate(request, aggregate_id, host):
return novaclient(request).aggregates.remove_host(aggregate_id, host)
@memoized
def list_extensions(request):
return nova_list_extensions.ListExtManager(novaclient(request)).show_all()
@memoized
def extension_supported(extension_name, request):
"""Determine if nova supports a given extension name.
Example values for the extension_name include AdminActions, ConsoleOutput,
etc.
"""
extensions = list_extensions(request)
for extension in extensions:
if extension.name == extension_name:
return True
return False
def can_set_server_password():
features = getattr(settings, 'OPENSTACK_HYPERVISOR_FEATURES', {})
return features.get('can_set_password', False)
def instance_action_list(request, instance_id):
return nova_instance_action.InstanceActionManager(
novaclient(request)).list(instance_id)
| apache-2.0 |
archen/django | django/db/migrations/migration.py | 6 | 5709 | class Migration(object):
"""
The base class for all migrations.
Migration files will import this from django.db.migrations.Migration
and subclass it as a class called Migration. It will have one or more
of the following attributes:
- operations: A list of Operation instances, probably from django.db.migrations.operations
- dependencies: A list of tuples of (app_path, migration_name)
- run_before: A list of tuples of (app_path, migration_name)
- replaces: A list of migration_names
Note that all migrations come out of migrations and into the Loader or
Graph as instances, having been initialised with their app label and name.
"""
# Operations to apply during this migration, in order.
operations = []
# Other migrations that should be run before this migration.
# Should be a list of (app, migration_name).
dependencies = []
# Other migrations that should be run after this one (i.e. have
# this migration added to their dependencies). Useful to make third-party
# apps' migrations run after your AUTH_USER replacement, for example.
run_before = []
# Migration names in this app that this migration replaces. If this is
# non-empty, this migration will only be applied if all these migrations
# are not applied.
replaces = []
# Error class which is raised when a migration is irreversible
class IrreversibleError(RuntimeError):
pass
def __init__(self, name, app_label):
self.name = name
self.app_label = app_label
# Copy dependencies & other attrs as we might mutate them at runtime
self.operations = list(self.__class__.operations)
self.dependencies = list(self.__class__.dependencies)
self.run_before = list(self.__class__.run_before)
self.replaces = list(self.__class__.replaces)
def __eq__(self, other):
if not isinstance(other, Migration):
return False
return (self.name == other.name) and (self.app_label == other.app_label)
def __ne__(self, other):
return not (self == other)
def __repr__(self):
return "<Migration %s.%s>" % (self.app_label, self.name)
def __str__(self):
return "%s.%s" % (self.app_label, self.name)
def __hash__(self):
return hash("%s.%s" % (self.app_label, self.name))
def mutate_state(self, project_state):
"""
Takes a ProjectState and returns a new one with the migration's
operations applied to it.
"""
new_state = project_state.clone()
for operation in self.operations:
operation.state_forwards(self.app_label, new_state)
return new_state
def apply(self, project_state, schema_editor, collect_sql=False):
"""
Takes a project_state representing all migrations prior to this one
and a schema_editor for a live database and applies the migration
in a forwards order.
Returns the resulting project state for efficient re-use by following
Migrations.
"""
for operation in self.operations:
# If this operation cannot be represented as SQL, place a comment
# there instead
if collect_sql and not operation.reduces_to_sql:
schema_editor.collected_sql.append("--")
schema_editor.collected_sql.append("-- MIGRATION NOW PERFORMS OPERATION THAT CANNOT BE WRITTEN AS SQL:")
schema_editor.collected_sql.append("-- %s" % operation.describe())
schema_editor.collected_sql.append("--")
continue
# Get the state after the operation has run
new_state = project_state.clone()
operation.state_forwards(self.app_label, new_state)
# Run the operation
operation.database_forwards(self.app_label, schema_editor, project_state, new_state)
# Switch states
project_state = new_state
return project_state
def unapply(self, project_state, schema_editor, collect_sql=False):
"""
Takes a project_state representing all migrations prior to this one
and a schema_editor for a live database and applies the migration
in a reverse order.
"""
# We need to pre-calculate the stack of project states
to_run = []
for operation in self.operations:
# If this operation cannot be represented as SQL, place a comment
# there instead
if collect_sql and not operation.reduces_to_sql:
schema_editor.collected_sql.append("--")
schema_editor.collected_sql.append("-- MIGRATION NOW PERFORMS OPERATION THAT CANNOT BE WRITTEN AS SQL:")
schema_editor.collected_sql.append("-- %s" % operation.describe())
schema_editor.collected_sql.append("--")
continue
# If it's irreversible, error out
if not operation.reversible:
raise Migration.IrreversibleError("Operation %s in %s is not reversible" % (operation, self))
new_state = project_state.clone()
operation.state_forwards(self.app_label, new_state)
to_run.append((operation, project_state, new_state))
project_state = new_state
# Now run them in reverse
to_run.reverse()
for operation, to_state, from_state in to_run:
operation.database_backwards(self.app_label, schema_editor, from_state, to_state)
def swappable_dependency(value):
"""
Turns a setting value into a dependency.
"""
return (value.split(".", 1)[0], "__first__")
| bsd-3-clause |
dcuartielles/SmartWatch | build/linux/work/hardware/tools/arm/arm-none-eabi/lib/armv7e-m/softfp/libstdc++.a-gdb.py | 2 | 2406 | # -*- python -*-
# Copyright (C) 2009, 2010 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
import gdb
import os
import os.path
pythondir = '/home/build/work/GCC-4-7-build/install-native/share/gcc-arm-none-eabi'
libdir = '/home/build/work/GCC-4-7-build/install-native/arm-none-eabi/lib/armv7e-m/softfp'
# This file might be loaded when there is no current objfile. This
# can happen if the user loads it manually. In this case we don't
# update sys.path; instead we just hope the user managed to do that
# beforehand.
if gdb.current_objfile () is not None:
# Update module path. We want to find the relative path from libdir
# to pythondir, and then we want to apply that relative path to the
# directory holding the objfile with which this file is associated.
# This preserves relocatability of the gcc tree.
# Do a simple normalization that removes duplicate separators.
pythondir = os.path.normpath (pythondir)
libdir = os.path.normpath (libdir)
prefix = os.path.commonprefix ([libdir, pythondir])
# In some bizarre configuration we might have found a match in the
# middle of a directory name.
if prefix[-1] != '/':
prefix = os.path.dirname (prefix) + '/'
# Strip off the prefix.
pythondir = pythondir[len (prefix):]
libdir = libdir[len (prefix):]
# Compute the ".."s needed to get from libdir to the prefix.
dotdots = ('..' + os.sep) * len (libdir.split (os.sep))
objfile = gdb.current_objfile ().filename
dir_ = os.path.join (os.path.dirname (objfile), dotdots, pythondir)
if not dir_ in sys.path:
sys.path.insert(0, dir_)
# Load the pretty-printers.
from libstdcxx.v6.printers import register_libstdcxx_printers
register_libstdcxx_printers (gdb.current_objfile ())
| lgpl-2.1 |
javachengwc/hue | desktop/core/ext-py/lxml/src/lxml/tests/test_errors.py | 28 | 1356 | # -*- coding: utf-8 -*-
import unittest, doctest
# These tests check that error handling in the Pyrex code is
# complete.
# It is likely that if there are errors, instead of failing the code
# will simply crash.
import sys, gc, os.path
from lxml import etree
this_dir = os.path.dirname(__file__)
if this_dir not in sys.path:
sys.path.insert(0, this_dir) # needed for Py3
from common_imports import HelperTestCase
class ErrorTestCase(HelperTestCase):
etree = etree
def test_bad_element(self):
# attrib argument of Element() should be a dictionary, so if
# we pass a string we should get an error.
self.assertRaises(TypeError, self.etree.Element, 'a', 'b')
def test_empty_parse(self):
self.assertRaises(etree.XMLSyntaxError, etree.fromstring, '')
def test_element_cyclic_gc_none(self):
# test if cyclic reference can crash etree
Element = self.etree.Element
gc.collect()
count = sys.getrefcount(None)
l = [Element('name'), Element('name')]
l.append(l)
del l
gc.collect()
self.assertEquals(sys.getrefcount(None), count)
def test_suite():
suite = unittest.TestSuite()
suite.addTests([unittest.makeSuite(ErrorTestCase)])
return suite
if __name__ == '__main__':
print('to test use test.py %s' % __file__)
| apache-2.0 |
alxgu/ansible | lib/ansible/modules/network/aci/mso_schema_template_l3out.py | 21 | 5772 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Dag Wieers (@dagwieers) <dag@wieers.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: mso_schema_template_l3out
short_description: Manage l3outs in schema templates
description:
- Manage l3outs in schema templates on Cisco ACI Multi-Site.
author:
- Dag Wieers (@dagwieers)
version_added: '2.8'
options:
schema:
description:
- The name of the schema.
type: str
required: yes
template:
description:
- The name of the template.
type: str
required: yes
l3out:
description:
- The name of the l3out to manage.
type: str
aliases: [ name ]
display_name:
description:
- The name as displayed on the MSO web interface.
type: str
vrf:
description:
- The VRF associated to this L3out.
type: dict
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
type: str
choices: [ absent, present, query ]
default: present
extends_documentation_fragment: mso
'''
EXAMPLES = r'''
- name: Add a new L3out
mso_schema_template_l3out:
host: mso_host
username: admin
password: SomeSecretPassword
schema: Schema 1
template: Template 1
l3out: L3out 1
state: present
delegate_to: localhost
- name: Remove an L3out
mso_schema_template_l3out:
host: mso_host
username: admin
password: SomeSecretPassword
schema: Schema 1
template: Template 1
l3out: L3out 1
state: absent
delegate_to: localhost
- name: Query a specific L3outs
mso_schema_template_l3out:
host: mso_host
username: admin
password: SomeSecretPassword
schema: Schema 1
template: Template 1
l3out: L3out 1
state: query
delegate_to: localhost
register: query_result
- name: Query all L3outs
mso_schema_template_l3out:
host: mso_host
username: admin
password: SomeSecretPassword
schema: Schema 1
template: Template 1
state: query
delegate_to: localhost
register: query_result
'''
RETURN = r'''
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.aci.mso import MSOModule, mso_argument_spec, mso_reference_spec, issubset
def main():
argument_spec = mso_argument_spec()
argument_spec.update(
schema=dict(type='str', required=True),
template=dict(type='str', required=True),
l3out=dict(type='str', aliases=['name']), # This parameter is not required for querying all objects
display_name=dict(type='str'),
vrf=dict(type='dict', options=mso_reference_spec()),
state=dict(type='str', default='present', choices=['absent', 'present', 'query']),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['state', 'absent', ['l3out']],
['state', 'present', ['l3out', 'vrf']],
],
)
schema = module.params['schema']
template = module.params['template']
l3out = module.params['l3out']
display_name = module.params['display_name']
vrf = module.params['vrf']
state = module.params['state']
mso = MSOModule(module)
# Get schema_id
schema_obj = mso.get_obj('schemas', displayName=schema)
if schema_obj:
schema_id = schema_obj['id']
else:
mso.fail_json(msg="Provided schema '{0}' does not exist".format(schema))
schema_path = 'schemas/{id}'.format(**schema_obj)
# Get template
templates = [t['name'] for t in schema_obj['templates']]
if template not in templates:
mso.fail_json(msg="Provided template '{0}' does not exist. Existing templates: {1}".format(template, ', '.join(templates)))
template_idx = templates.index(template)
# Get L3out
l3outs = [l['name'] for l in schema_obj['templates'][template_idx]['intersiteL3outs']]
if l3out is not None and l3out in l3outs:
l3out_idx = l3outs.index(l3out)
mso.existing = schema_obj['templates'][template_idx]['intersiteL3outs'][l3out_idx]
if state == 'query':
if l3out is None:
mso.existing = schema_obj['templates'][template_idx]['intersiteL3outs']
elif not mso.existing:
mso.fail_json(msg="L3out '{l3out}' not found".format(l3out=l3out))
mso.exit_json()
l3outs_path = '/templates/{0}/intersiteL3outs'.format(template)
l3out_path = '/templates/{0}/intersiteL3outs/{1}'.format(template, l3out)
ops = []
mso.previous = mso.existing
if state == 'absent':
if mso.existing:
mso.sent = mso.existing = {}
ops.append(dict(op='remove', path=l3out_path))
elif state == 'present':
vrf_ref = mso.make_reference(vrf, 'vrf', schema_id, template)
if display_name is None and not mso.existing:
display_name = l3out
payload = dict(
name=l3out,
displayName=display_name,
vrfRef=vrf_ref,
)
mso.sanitize(payload, collate=True)
if mso.existing:
ops.append(dict(op='replace', path=l3out_path, value=mso.sent))
else:
ops.append(dict(op='add', path=l3outs_path + '/-', value=mso.sent))
mso.existing = mso.proposed
if not module.check_mode:
mso.request(schema_path, method='PATCH', data=ops)
mso.exit_json()
if __name__ == "__main__":
main()
| gpl-3.0 |
abhattad4/Digi-Menu | digimenu2/django/contrib/auth/decorators.py | 117 | 3021 | from functools import wraps
from django.conf import settings
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.core.exceptions import PermissionDenied
from django.shortcuts import resolve_url
from django.utils.decorators import available_attrs
from django.utils.six.moves.urllib.parse import urlparse
def user_passes_test(test_func, login_url=None, redirect_field_name=REDIRECT_FIELD_NAME):
"""
Decorator for views that checks that the user passes the given test,
redirecting to the log-in page if necessary. The test should be a callable
that takes the user object and returns True if the user passes.
"""
def decorator(view_func):
@wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view(request, *args, **kwargs):
if test_func(request.user):
return view_func(request, *args, **kwargs)
path = request.build_absolute_uri()
resolved_login_url = resolve_url(login_url or settings.LOGIN_URL)
# If the login url is the same scheme and net location then just
# use the path as the "next" url.
login_scheme, login_netloc = urlparse(resolved_login_url)[:2]
current_scheme, current_netloc = urlparse(path)[:2]
if ((not login_scheme or login_scheme == current_scheme) and
(not login_netloc or login_netloc == current_netloc)):
path = request.get_full_path()
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(
path, resolved_login_url, redirect_field_name)
return _wrapped_view
return decorator
def login_required(function=None, redirect_field_name=REDIRECT_FIELD_NAME, login_url=None):
"""
Decorator for views that checks that the user is logged in, redirecting
to the log-in page if necessary.
"""
actual_decorator = user_passes_test(
lambda u: u.is_authenticated(),
login_url=login_url,
redirect_field_name=redirect_field_name
)
if function:
return actual_decorator(function)
return actual_decorator
def permission_required(perm, login_url=None, raise_exception=False):
"""
Decorator for views that checks whether a user has a particular permission
enabled, redirecting to the log-in page if necessary.
If the raise_exception parameter is given the PermissionDenied exception
is raised.
"""
def check_perms(user):
if not isinstance(perm, (list, tuple)):
perms = (perm, )
else:
perms = perm
# First check if the user has the permission (even anon users)
if user.has_perms(perms):
return True
# In case the 403 handler should be called raise the exception
if raise_exception:
raise PermissionDenied
# As the last resort, show the login form
return False
return user_passes_test(check_perms, login_url=login_url)
| bsd-3-clause |
ArnesSI/pynetdot | pynetdot/models/__init__.py | 1 | 1130 | from __future__ import absolute_import, division, print_function, unicode_literals
import sys
import inspect
from . import base
# Add custom behaviour to some class:
#class Device(base.BaseDevice):
# def my_method(self):
# ...
class RR(base.BaseRR):
@property
def label(self):
return '%s.%s' % (self.name, self.zone)
class Ipblock(base.BaseIpblock):
@property
def label(self):
return '%s/%s' % (self.address, self.prefix)
class Device(base.BaseDevice):
def pre_save_params_clean(self, params):
# netdot server api expects hostnames in string form, not as an id of a RR object
if 'name' in params and not self.id:
params['name'] = str(self.name)
# Generate classes for each class in base module. But skip those already
# defined in this module.
my_classes = inspect.getmembers(sys.modules[__name__], inspect.isclass)
my_names = [n for n,_ in my_classes]
for base_name, cls in inspect.getmembers(base, inspect.isclass):
name = base_name.replace('Base', '')
if name in my_names:
continue
globals()[name] = type(str(name), (cls,), {})
| lgpl-3.0 |
huaweiswitch/neutron | neutron/plugins/bigswitch/agent/restproxy_agent.py | 7 | 6454 | # Copyright 2014 Big Switch Networks, Inc.
# All Rights Reserved.
#
# Copyright 2011 VMware, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
import time
import eventlet
eventlet.monkey_patch()
from oslo.config import cfg
from neutron.agent.linux import ovs_lib
from neutron.agent.linux import utils
from neutron.agent import rpc as agent_rpc
from neutron.agent import securitygroups_rpc as sg_rpc
from neutron.common import config
from neutron.common import rpc as n_rpc
from neutron.common import topics
from neutron import context as q_context
from neutron.extensions import securitygroup as ext_sg
from neutron.openstack.common import excutils
from neutron.openstack.common import log
from neutron.plugins.bigswitch import config as pl_config
LOG = log.getLogger(__name__)
class IVSBridge(ovs_lib.OVSBridge):
'''
This class does not provide parity with OVS using IVS.
It's only the bare minimum necessary to use IVS with this agent.
'''
def run_vsctl(self, args, check_error=False):
full_args = ["ivs-ctl"] + args
try:
return utils.execute(full_args, root_helper=self.root_helper)
except Exception as e:
with excutils.save_and_reraise_exception() as ctxt:
LOG.error(_("Unable to execute %(cmd)s. "
"Exception: %(exception)s"),
{'cmd': full_args, 'exception': e})
if not check_error:
ctxt.reraise = False
def get_vif_port_set(self):
port_names = self.get_port_name_list()
edge_ports = set(port_names)
return edge_ports
def get_vif_port_by_id(self, port_id):
# IVS in nova uses hybrid method with last 14 chars of UUID
name = 'qvo%s' % port_id[:14]
if name in self.get_vif_port_set():
return name
return False
class PluginApi(agent_rpc.PluginApi,
sg_rpc.SecurityGroupServerRpcApiMixin):
pass
class SecurityGroupAgent(sg_rpc.SecurityGroupAgentRpcMixin):
def __init__(self, context, plugin_rpc, root_helper):
self.context = context
self.plugin_rpc = plugin_rpc
self.root_helper = root_helper
self.init_firewall()
class RestProxyAgent(n_rpc.RpcCallback,
sg_rpc.SecurityGroupAgentRpcCallbackMixin):
RPC_API_VERSION = '1.1'
def __init__(self, integ_br, polling_interval, root_helper, vs='ovs'):
super(RestProxyAgent, self).__init__()
self.polling_interval = polling_interval
self._setup_rpc()
self.sg_agent = SecurityGroupAgent(self.context,
self.plugin_rpc,
root_helper)
if vs == 'ivs':
self.int_br = IVSBridge(integ_br, root_helper)
else:
self.int_br = ovs_lib.OVSBridge(integ_br, root_helper)
def _setup_rpc(self):
self.topic = topics.AGENT
self.plugin_rpc = PluginApi(topics.PLUGIN)
self.context = q_context.get_admin_context_without_session()
self.endpoints = [self]
consumers = [[topics.PORT, topics.UPDATE],
[topics.SECURITY_GROUP, topics.UPDATE]]
self.connection = agent_rpc.create_consumers(self.endpoints,
self.topic,
consumers)
def port_update(self, context, **kwargs):
LOG.debug(_("Port update received"))
port = kwargs.get('port')
vif_port = self.int_br.get_vif_port_by_id(port['id'])
if not vif_port:
LOG.debug(_("Port %s is not present on this host."), port['id'])
return
LOG.debug(_("Port %s found. Refreshing firewall."), port['id'])
if ext_sg.SECURITYGROUPS in port:
self.sg_agent.refresh_firewall()
def _update_ports(self, registered_ports):
ports = self.int_br.get_vif_port_set()
if ports == registered_ports:
return
added = ports - registered_ports
removed = registered_ports - ports
return {'current': ports,
'added': added,
'removed': removed}
def _process_devices_filter(self, port_info):
if 'added' in port_info:
self.sg_agent.prepare_devices_filter(port_info['added'])
if 'removed' in port_info:
self.sg_agent.remove_devices_filter(port_info['removed'])
def daemon_loop(self):
ports = set()
while True:
start = time.time()
try:
port_info = self._update_ports(ports)
if port_info:
LOG.debug(_("Agent loop has new device"))
self._process_devices_filter(port_info)
ports = port_info['current']
except Exception:
LOG.exception(_("Error in agent event loop"))
elapsed = max(time.time() - start, 0)
if (elapsed < self.polling_interval):
time.sleep(self.polling_interval - elapsed)
else:
LOG.debug(_("Loop iteration exceeded interval "
"(%(polling_interval)s vs. %(elapsed)s)!"),
{'polling_interval': self.polling_interval,
'elapsed': elapsed})
def main():
config.init(sys.argv[1:])
config.setup_logging()
pl_config.register_config()
integ_br = cfg.CONF.RESTPROXYAGENT.integration_bridge
polling_interval = cfg.CONF.RESTPROXYAGENT.polling_interval
root_helper = cfg.CONF.AGENT.root_helper
bsnagent = RestProxyAgent(integ_br, polling_interval, root_helper,
cfg.CONF.RESTPROXYAGENT.virtual_switch_type)
bsnagent.daemon_loop()
sys.exit(0)
if __name__ == "__main__":
main()
| apache-2.0 |
ar7z1/ansible | lib/ansible/modules/network/aci/aci_aaa_user.py | 15 | 9942 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Dag Wieers (dagwieers) <dag@wieers.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: aci_aaa_user
short_description: Manage AAA users (aaa:User)
description:
- Manage AAA users on Cisco ACI fabrics.
notes:
- This module is not idempotent when C(aaa_password) is being used
(even if that password was already set identically). This
appears to be an inconsistency wrt. the idempotent nature
of the APIC REST API. The vendor has been informed.
- More information in :ref:`the ACI documentation <aci_guide_known_issues>`.
- More information about the internal APIC class B(aaa:User) from
L(the APIC Management Information Model reference,https://developer.cisco.com/docs/apic-mim-ref/).
author:
- Dag Wieers (@dagwieers)
requirements:
- python-dateutil
version_added: '2.5'
options:
aaa_password:
description:
- The password of the locally-authenticated user.
aaa_password_lifetime:
description:
- The lifetime of the locally-authenticated user password.
type: int
aaa_password_update_required:
description:
- Whether this account needs password update.
type: bool
aaa_user:
description:
- The name of the locally-authenticated user user to add.
aliases: [ name, user ]
clear_password_history:
description:
- Whether to clear the password history of a locally-authenticated user.
type: bool
description:
description:
- Description for the AAA user.
aliases: [ descr ]
email:
description:
- The email address of the locally-authenticated user.
enabled:
description:
- The status of the locally-authenticated user account.
type: bool
expiration:
description:
- The expiration date of the locally-authenticated user account.
expires:
description:
- Whether to enable an expiration date for the locally-authenticated user account.
type: bool
first_name:
description:
- The first name of the locally-authenticated user.
last_name:
description:
- The last name of the locally-authenticated user.
phone:
description:
- The phone number of the locally-authenticated user.
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
choices: [ absent, present, query ]
default: present
extends_documentation_fragment: aci
'''
EXAMPLES = r'''
- name: Add a user
aci_aaa_user:
host: apic
username: admin
password: SomeSecretPassword
aaa_user: dag
aaa_password: AnotherSecretPassword
expiration: never
expires: no
email: dag@wieers.com
phone: 1-234-555-678
first_name: Dag
last_name: Wieers
state: present
delegate_to: localhost
- name: Remove a user
aci_aaa_user:
host: apic
username: admin
password: SomeSecretPassword
aaa_user: dag
state: absent
delegate_to: localhost
- name: Query a user
aci_aaa_user:
host: apic
username: admin
password: SomeSecretPassword
aaa_user: dag
state: query
delegate_to: localhost
register: query_result
- name: Query all users
aci_aaa_user:
host: apic
username: admin
password: SomeSecretPassword
state: query
delegate_to: localhost
register: query_result
'''
RETURN = r'''
current:
description: The existing configuration from the APIC after the module has finished
returned: success
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
error:
description: The error information as returned from the APIC
returned: failure
type: dict
sample:
{
"code": "122",
"text": "unknown managed object class foo"
}
raw:
description: The raw output returned by the APIC REST API (xml or json)
returned: parse error
type: string
sample: '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1"><error code="122" text="unknown managed object class foo"/></imdata>'
sent:
description: The actual/minimal configuration pushed to the APIC
returned: info
type: list
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment"
}
}
}
previous:
description: The original configuration from the APIC before the module has started
returned: info
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
proposed:
description: The assembled configuration from the user-provided parameters
returned: info
type: dict
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"name": "production"
}
}
}
filter_string:
description: The filter string used for the request
returned: failure or debug
type: string
sample: '?rsp-prop-include=config-only'
method:
description: The HTTP method used for the request to the APIC
returned: failure or debug
type: string
sample: POST
response:
description: The HTTP response from the APIC
returned: failure or debug
type: string
sample: OK (30 bytes)
status:
description: The HTTP status from the APIC
returned: failure or debug
type: int
sample: 200
url:
description: The HTTP url used for the request to the APIC
returned: failure or debug
type: string
sample: https://10.11.12.13/api/mo/uni/tn-production.json
'''
from ansible.module_utils.network.aci.aci import ACIModule, aci_argument_spec
from ansible.module_utils.basic import AnsibleModule
try:
from dateutil.tz import tzutc
import dateutil.parser
HAS_DATEUTIL = True
except ImportError:
HAS_DATEUTIL = False
def main():
argument_spec = aci_argument_spec()
argument_spec.update(
aaa_password=dict(type='str', no_log=True),
aaa_password_lifetime=dict(type='int'),
aaa_password_update_required=dict(type='bool'),
aaa_user=dict(type='str', required=True, aliases=['name']), # Not required for querying all objects
clear_password_history=dict(type='bool'),
description=dict(type='str', aliases=['descr']),
email=dict(type='str'),
enabled=dict(type='bool'),
expiration=dict(type='str'),
expires=dict(type='bool'),
first_name=dict(type='str'),
last_name=dict(type='str'),
phone=dict(type='str'),
state=dict(type='str', default='present', choices=['absent', 'present', 'query']),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['state', 'absent', ['aaa_user']],
['state', 'present', ['aaa_user']],
['expires', True, ['expiration']],
],
)
aci = ACIModule(module)
if not HAS_DATEUTIL:
module.fail_json(msg='dateutil required for this module')
aaa_password = module.params['aaa_password']
aaa_password_lifetime = module.params['aaa_password_lifetime']
aaa_password_update_required = aci.boolean(module.params['aaa_password_update_required'])
aaa_user = module.params['aaa_user']
clear_password_history = module.params['clear_password_history']
description = module.params['description']
email = module.params['email']
enabled = aci.boolean(module.params['enabled'], 'active', 'inactive')
expires = aci.boolean(module.params['expires'])
first_name = module.params['first_name']
last_name = module.params['last_name']
phone = module.params['phone']
state = module.params['state']
expiration = module.params['expiration']
if expiration is not None and expiration != 'never':
try:
expiration = aci.iso8601_format(dateutil.parser.parse(expiration).replace(tzinfo=tzutc()))
except Exception as e:
module.fail_json(msg="Failed to parse date format '%s', %s" % (module.params['expiration'], e))
aci.construct_url(
root_class=dict(
aci_class='aaaUser',
aci_rn='userext/user-{0}'.format(aaa_user),
module_object=aaa_user,
target_filter={'name': aaa_user},
),
)
aci.get_existing()
if state == 'present':
aci.payload(
aci_class='aaaUser',
class_config=dict(
accountStatus=enabled,
clearPwdHistory=clear_password_history,
email=email,
expiration=expiration,
expires=expires,
firstName=first_name,
lastName=last_name,
name=aaa_user,
phone=phone,
pwd=aaa_password,
pwdLifeTime=aaa_password_lifetime,
pwdUpdateRequired=aaa_password_update_required,
),
)
aci.get_diff(aci_class='aaaUser')
aci.post_config()
elif state == 'absent':
aci.delete_config()
aci.exit_json()
if __name__ == "__main__":
main()
| gpl-3.0 |
Boussadia/weboob | modules/amelipro/backend.py | 2 | 3475 | # -*- coding: utf-8 -*-
# Copyright(C) 2013 Christophe Lampin
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import urllib
from weboob.capabilities.bill import ICapBill, SubscriptionNotFound, BillNotFound, Subscription, Bill
from weboob.tools.backend import BaseBackend, BackendConfig
from weboob.tools.value import ValueBackendPassword
from .browser import AmeliProBrowser
__all__ = ['AmeliProBackend']
class AmeliProBackend(BaseBackend, ICapBill):
NAME = 'amelipro'
DESCRIPTION = u'Ameli website: French Health Insurance for Professionals'
MAINTAINER = u'Christophe Lampin'
EMAIL = 'weboob@lampin.net'
VERSION = '0.i'
LICENSE = 'AGPLv3+'
BROWSER = AmeliProBrowser
CONFIG = BackendConfig(ValueBackendPassword('login',
label='numero de SS',
masked=False),
ValueBackendPassword('password',
label='Password',
masked=True)
)
BROWSER = AmeliProBrowser
def create_default_browser(self):
return self.create_browser(self.config['login'].get(),
self.config['password'].get())
def iter_subscription(self):
return self.browser.get_subscription_list()
def get_subscription(self, _id):
if not _id.isdigit():
raise SubscriptionNotFound()
with self.browser:
subscription = self.browser.get_subscription(_id)
if not subscription:
raise SubscriptionNotFound()
else:
return subscription
def iter_bills_history(self, subscription):
if not isinstance(subscription, Subscription):
subscription = self.get_subscription(subscription)
with self.browser:
return self.browser.iter_history(subscription)
def get_details(self, subscription):
if not isinstance(subscription, Subscription):
subscription = self.get_subscription(subscription)
with self.browser:
return self.browser.get_details(subscription)
def iter_bills(self, subscription):
if not isinstance(subscription, Subscription):
subscription = self.get_subscription(subscription)
with self.browser:
return self.browser.iter_bills()
def get_bill(self, id):
with self.browser:
bill = self.browser.get_bill(id)
if not bill:
raise BillNotFound()
else:
return bill
def download_bill(self, bill):
if not isinstance(bill, Bill):
bill = self.get_bill(bill)
with self.browser:
return self.browser.readurl(bill._url, urllib.urlencode(bill._args))
| agpl-3.0 |
cypsun/FreeCAD | src/Mod/Arch/importOBJ.py | 9 | 5268 | #***************************************************************************
#* *
#* Copyright (c) 2011 *
#* Yorik van Havre <yorik@uncreated.net> *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU Lesser General Public License (LGPL) *
#* as published by the Free Software Foundation; either version 2 of *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* This program is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Library General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with this program; if not, write to the Free Software *
#* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
#* USA *
#* *
#***************************************************************************
import FreeCAD, DraftGeomUtils, Part, Draft
from DraftTools import translate
p = Draft.precision()
if open.__module__ == '__builtin__':
pythonopen = open
def findVert(aVertex,aList):
"finds aVertex in aList, returns index"
for i in range(len(aList)):
if ( round(aVertex.X,p) == round(aList[i].X,p) ):
if ( round(aVertex.Y,p) == round(aList[i].Y,p) ):
if ( round(aVertex.Z,p) == round(aList[i].Z,p) ):
return i
def getIndices(shape,offset):
"returns a list with 2 lists: vertices and face indexes, offsetted with the given amount"
vlist = []
elist = []
flist = []
curves = None
for e in shape.Edges:
if not isinstance(e.Curve,Part.Line):
if not curves:
curves = shape.tessellate(1)
FreeCAD.Console.PrintWarning(translate("Arch","Found a shape containing curves, triangulating\n"))
if curves:
for v in curves[0]:
vlist.append(" "+str(round(v.x,p))+" "+str(round(v.y,p))+" "+str(round(v.z,p)))
for f in curves[1]:
fi = ""
for vi in f:
fi += " " + str(vi + offset)
flist.append(fi)
else:
for v in shape.Vertexes:
vlist.append(" "+str(round(v.X,p))+" "+str(round(v.Y,p))+" "+str(round(v.Z,p)))
if not shape.Faces:
for e in shape.Edges:
if DraftGeomUtils.geomType(e) == "Line":
ei = " " + str(findVert(e.Vertexes[0],shape.Vertexes) + offset)
ei += " " + str(findVert(e.Vertexes[-1],shape.Vertexes) + offset)
elist.append(ei)
for f in shape.Faces:
if len(f.Wires) > 1:
# if we have holes, we triangulate
tris = f.tessellate(1)
for fdata in tris[1]:
fi = ""
for vi in fdata:
vdata = Part.Vertex(tris[0][vi])
fi += " " + str(findVert(vdata,shape.Vertexes) + offset)
flist.append(fi)
else:
fi = ""
# OCC vertices are unsorted. We need to sort in the right order...
edges = DraftGeomUtils.sortEdges(f.OuterWire.Edges)
#print edges
for e in edges:
#print e.Vertexes[0].Point,e.Vertexes[1].Point
v = e.Vertexes[0]
fi += " " + str(findVert(v,shape.Vertexes) + offset)
flist.append(fi)
return vlist,elist,flist
def export(exportList,filename):
"called when freecad exports a file"
outfile = pythonopen(filename,"wb")
ver = FreeCAD.Version()
outfile.write("# FreeCAD v" + ver[0] + "." + ver[1] + " build" + ver[2] + " Arch module\n")
outfile.write("# http://www.freecadweb.org\n")
offset = 1
for obj in exportList:
if obj.isDerivedFrom("Part::Feature"):
if obj.ViewObject.isVisible():
vlist,elist,flist = getIndices(obj.Shape,offset)
offset += len(vlist)
outfile.write("o " + obj.Name + "\n")
for v in vlist:
outfile.write("v" + v + "\n")
for e in elist:
outfile.write("l" + e + "\n")
for f in flist:
outfile.write("f" + f + "\n")
outfile.close()
FreeCAD.Console.PrintMessage(translate("Arch","successfully written ")+filename+"\n")
| lgpl-2.1 |
petertodd/tx-flood-attack | lib/python-bitcoinlib/bitcoin/tests/test_transactions.py | 6 | 4118 | # Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from __future__ import absolute_import, division, print_function, unicode_literals
import json
import unittest
import os
from bitcoin.core import COutPoint, CTxIn, CTxOut, CTransaction, CheckTransaction, CheckTransactionError, lx, x, b2x, ValidationError
from bitcoin.core.scripteval import VerifyScript, SCRIPT_VERIFY_P2SH
from bitcoin.tests.test_scripteval import parse_script
def load_test_vectors(name):
with open(os.path.dirname(__file__) + '/data/' + name, 'r') as fd:
for test_case in json.load(fd):
# Comments designated by single length strings
if len(test_case) == 1:
continue
assert len(test_case) == 3
prevouts = {}
for json_prevout in test_case[0]:
assert len(json_prevout) == 3
n = json_prevout[1]
if n == -1:
n = 0xffffffff
prevout = COutPoint(lx(json_prevout[0]), n)
prevouts[prevout] = parse_script(json_prevout[2])
tx = CTransaction.deserialize(x(test_case[1]))
enforceP2SH = test_case[2]
yield (prevouts, tx, enforceP2SH)
class Test_COutPoint(unittest.TestCase):
def test_is_null(self):
self.assertTrue(COutPoint().is_null())
self.assertTrue(COutPoint(hash=b'\x00'*32,n=0xffffffff).is_null())
self.assertFalse(COutPoint(hash=b'\x00'*31 + b'\x01').is_null())
self.assertFalse(COutPoint(n=1).is_null())
def test_repr(self):
def T(outpoint, expected):
actual = repr(outpoint)
self.assertEqual(actual, expected)
T( COutPoint(),
'COutPoint()')
T( COutPoint(lx('4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b'), 0),
"COutPoint(lx('4a5e1e4baab89f3a32518a88c31bc87f618f76673e2cc77ab2127b7afdeda33b'), 0)")
class Test_CTxIn(unittest.TestCase):
def test_is_final(self):
self.assertTrue(CTxIn().is_final())
self.assertTrue(CTxIn(nSequence=0xffffffff).is_final())
self.assertFalse(CTxIn(nSequence=0).is_final())
def test_repr(self):
def T(txin, expected):
actual = repr(txin)
self.assertEqual(actual, expected)
T( CTxIn(),
'CTxIn(COutPoint(), CScript([]), 0xffffffff)')
class Test_CTransaction(unittest.TestCase):
def test_is_coinbase(self):
tx = CTransaction()
self.assertFalse(tx.is_coinbase())
tx.vin.append(CTxIn())
# IsCoinBase() in reference client doesn't check if vout is empty
self.assertTrue(tx.is_coinbase())
tx.vin[0].prevout.n = 0
self.assertFalse(tx.is_coinbase())
tx.vin[0] = CTxIn()
tx.vin.append(CTxIn())
self.assertFalse(tx.is_coinbase())
def test_tx_valid(self):
for prevouts, tx, enforceP2SH in load_test_vectors('tx_valid.json'):
try:
CheckTransaction(tx)
except CheckTransactionError:
self.fail('tx failed CheckTransaction(): ' \
+ str((prevouts, b2x(tx.serialize()), enforceP2SH)))
continue
for i in range(len(tx.vin)):
flags = set()
if enforceP2SH:
flags.add(SCRIPT_VERIFY_P2SH)
VerifyScript(tx.vin[i].scriptSig, prevouts[tx.vin[i].prevout], tx, i, flags=flags)
def test_tx_invalid(self):
for prevouts, tx, enforceP2SH in load_test_vectors('tx_invalid.json'):
try:
CheckTransaction(tx)
except CheckTransactionError:
continue
with self.assertRaises(ValidationError):
for i in range(len(tx.vin)):
flags = set()
if enforceP2SH:
flags.add(SCRIPT_VERIFY_P2SH)
VerifyScript(tx.vin[i].scriptSig, prevouts[tx.vin[i].prevout], tx, i, flags=flags)
| mit |
kabakchey/ordered_pgsql_dump | backup.py | 1 | 10461 | #!/usr/bin python
# -*- coding: utf-8 -*-
"""
Backuping database script with insert sorting
"""
import psycopg2
import ConfigParser
import os
import sys
import sql_reserved_words
import decimal
import datetime
import subprocess
import codecs
from shutil import move
NONE_TYPE = type(None)
DEFAULT_OUTPUT_FILE_NAME = 'database.sql'
DEFAULT_SCHEMA = 'public'
DEFAULT_OPTIONS_FILE = 'credentials.ini'
def check_identifier(identifier):
"""
Wraps identifier with \" if it is reserved word
"""
if identifier.upper() in sql_reserved_words.RESERVED_WORDS:
return '"' + identifier + '"'
else:
return identifier
def pg_dump_pre_data(host, port, password, user, database, schema, output_file_name):
"""
Getting pre data for defined db
"""
if not password is None:
os.environ['PGPASSWORD'] = password
pg_dump_process = subprocess.Popen(["C:/Program Files/PostgreSQL/9.3/bin/pg_dump.exe",
"--host",
host,
"--port",
port,
"--username",
user,
"--format",
"plain",
"--no-owner",
"--section",
"pre-data",
"--encoding",
"UTF8",
"--no-privileges",
"--no-tablespaces",
"--verbose",
"--no-unlogged-table-data",
"--schema=%s"%schema,
"--file",
output_file_name,
database],
env=os.environ)
pg_dump_process.wait()
def pg_dump_post_data(host, port, password, user, database, schema, output_file_name):
"""
Getting post data for defined db
"""
if not password is None:
os.environ['PGPASSWORD'] = password
pg_dump_process = subprocess.Popen(["C:/Program Files/PostgreSQL/9.3/bin/pg_dump.exe",
"--host",
host,
"--port",
port,
"--username",
user,
"--format",
"plain",
"--no-owner",
"--section",
"post-data",
"--encoding",
"UTF8",
"--no-privileges",
"--no-tablespaces",
"--verbose",
"--no-unlogged-table-data",
"--schema=%s"%schema,
"--file",
output_file_name,
database],
env=os.environ)
pg_dump_process.wait()
def get_db_con(host, database, user, password, port):
"""
Get connection to db
"""
if password is None:
return psycopg2.connect("host=%s dbname=%s user=%s port=%s" % (host, database, user, port))
else:
return psycopg2.connect("host=%s dbname=%s user=%s password=%s port=%s" % (host, database, user, password, port))
def preprocess_input_params():
"""
Checking argv and connecting to db
"""
if len(sys.argv) == 2 or len(sys.argv) > 2:
output_file = sys.argv[1]
else:
output_file = DEFAULT_OUTPUT_FILE_NAME
if len(sys.argv) > 2:
ini_file = sys.argv[2]
else:
ini_file = DEFAULT_OPTIONS_FILE
if not os.path.exists(ini_file):
print u"ERROR: Move backup script into the root project directory, for example 'C:\\xampp\\htdocs', or provide path to credentials.ini as second parameter and output file name as first"
sys.exit(1)
config = ConfigParser.ConfigParser()
config.readfp(open(ini_file))
if config.has_option('Database', 'schema'):
schema = config.get('Database', 'schema')
else:
schema = DEFAULT_SCHEMA
if config.has_option('Database', 'password'):
# To secure store password we can use
# this feature: http://www.postgresql.org/docs/9.3/static/libpq-pgpass.html
# ("The password file" feature)
password = config.get('Database', 'password')
else:
password = None
user = config.get('Database', 'user')
port = config.get('Database', 'port')
host = config.get('Database', 'host')
database = config.get('Database', 'database')
return (host, database, user, password, port, schema, output_file)
def process_tuple_to_string(tuple_):
"""
Convert python types to PostgreSQL insert strings
"""
result = []
for item in tuple_:
type_ = type(item)
if type_ == NONE_TYPE:
result.append("NULL")
elif (type_ == unicode or
type_ == str):
result.append("'%s'"%item.replace("'", "''").replace("\r", ""))
elif (type(item) == int or
type(item) == long):
result.append("%s"%item)
elif type(item) == decimal.Decimal:
result.append("%s"%unicode(item))
elif type(item) == datetime.datetime:
result.append("'%s'"%item.isoformat(' '))
elif type(item) == bool:
result.append(str(item))
else:
raise Exception("Unknown type to convert: '%s'"%type(item))
return u", ".join([item.decode('utf-8') for item in result])
def get_data(con, schema, output_file_name):
"""
Get inserts for dump
"""
output_file = codecs.open(output_file_name, "a", encoding='utf-8')
cur = con.cursor()
cur.execute("SELECT table_name FROM information_schema.tables WHERE table_schema = %s AND table_type = 'BASE TABLE' ORDER BY table_name", (schema, ))
for table in cur.fetchall():
table_name = check_identifier(table[0])
cur.execute("SELECT column_name FROM information_schema.columns WHERE table_name = %s ORDER BY ordinal_position", (table[0], ))
columns = [check_identifier(tuple_[0]) for tuple_ in cur.fetchall()]
columns_list = ", ".join(columns)
cur.execute("""SELECT kcu.column_name
FROM information_schema.tables t
LEFT JOIN information_schema.table_constraints tc
ON tc.table_catalog = t.table_catalog
AND tc.table_schema = t.table_schema
AND tc.table_name = t.table_name
AND tc.constraint_type = 'PRIMARY KEY'
LEFT JOIN information_schema.key_column_usage kcu
ON kcu.table_catalog = tc.table_catalog
AND kcu.table_schema = tc.table_schema
AND kcu.table_name = tc.table_name
AND kcu.constraint_name = tc.constraint_name
WHERE t.table_schema = %s AND
t.table_name = %s
ORDER BY t.table_catalog,
t.table_schema,
t.table_name,
kcu.constraint_name,
kcu.ordinal_position""", (schema, table_name))
pkeys = [tuple_[0] for tuple_ in cur.fetchall() if not tuple_[0] is None]
insert_sql_ = "INSERT INTO %s (%s) VALUES (%s);\n"%(table_name, columns_list, '%s')
if len(pkeys) > 0:
select_sql_ = "SELECT %s FROM %s ORDER BY %s;"%(columns_list, table_name, ", ".join([check_identifier(pkey) for pkey in pkeys]))
else:
select_sql_ = "SELECT %s FROM %s ORDER BY %s;"%(columns_list, table_name, columns_list)
cur.execute(select_sql_)
for tuple_ in cur.fetchall():
output_file.write(insert_sql_%process_tuple_to_string(tuple_))
output_file.write("\n")
for column in columns:
cur.execute("SELECT pg_get_serial_sequence(%s, %s)", (table_name.replace('"', ''), column.replace('"', '')))
for seq in cur.fetchall():
if not seq[0] is None:
cur.execute("SELECT sequence_name, last_value, is_called FROM %s"%seq[0])
output_file.write("SELECT pg_catalog.setval('%s', %s, %s);\n\n\n"%cur.fetchone())
con.close()
def clear_dump(file_name):
"""
Remove comments from dump to reduce differences between dumps
"""
res_file = open(file_name+'clean_file', 'w')
for line in open(file_name, 'r'):
if (not line.startswith('-- TOC') and
not line.startswith('-- Dependencies') and
not line.startswith('-- Started') and
not line.startswith('-- Completed')):
res_file.write(line)
else:
res_file.write('\n')
res_file.close()
move(file_name+'clean_file', file_name)
def main():
"""
Main rutine
"""
host, database, user, password, port, schema, output_file_name = preprocess_input_params()
con = get_db_con(host, database, user, password, port)
pg_dump_pre_data(host, port, password, user, database, schema, output_file_name)
get_data(con, schema, output_file_name)
buffer_file_name = output_file_name + ".buffer"
pg_dump_post_data(host, port, password, user, database, schema, buffer_file_name)
open(output_file_name, "a").write(open(buffer_file_name, "r").read())
os.remove(buffer_file_name)
clear_dump(output_file_name)
main()
| bsd-3-clause |
bluemini/kuma | vendor/packages/pkg_resources/__init__.py | 211 | 106670 | """
Package resource API
--------------------
A resource is a logical file contained within a package, or a logical
subdirectory thereof. The package resource API expects resource names
to have their path parts separated with ``/``, *not* whatever the local
path separator is. Do not use os.path operations to manipulate resource
names being passed into the API.
The package resource API is designed to work with normal filesystem packages,
.egg files, and unpacked .egg files. It can also work in a limited way with
.zip files and with custom PEP 302 loaders that support the ``get_data()``
method.
"""
from __future__ import absolute_import
import sys
import os
import io
import time
import re
import types
import zipfile
import zipimport
import warnings
import stat
import functools
import pkgutil
import token
import symbol
import operator
import platform
import collections
import plistlib
import email.parser
import tempfile
import textwrap
from pkgutil import get_importer
try:
import _imp
except ImportError:
# Python 3.2 compatibility
import imp as _imp
PY3 = sys.version_info > (3,)
PY2 = not PY3
if PY3:
from urllib.parse import urlparse, urlunparse
if PY2:
from urlparse import urlparse, urlunparse
if PY3:
string_types = str,
else:
string_types = str, eval('unicode')
iteritems = (lambda i: i.items()) if PY3 else lambda i: i.iteritems()
# capture these to bypass sandboxing
from os import utime
try:
from os import mkdir, rename, unlink
WRITE_SUPPORT = True
except ImportError:
# no write support, probably under GAE
WRITE_SUPPORT = False
from os import open as os_open
from os.path import isdir, split
# Avoid try/except due to potential problems with delayed import mechanisms.
if sys.version_info >= (3, 3) and sys.implementation.name == "cpython":
import importlib.machinery as importlib_machinery
else:
importlib_machinery = None
try:
import parser
except ImportError:
pass
try:
import pkg_resources._vendor.packaging.version
import pkg_resources._vendor.packaging.specifiers
packaging = pkg_resources._vendor.packaging
except ImportError:
# fallback to naturally-installed version; allows system packagers to
# omit vendored packages.
import packaging.version
import packaging.specifiers
# declare some globals that will be defined later to
# satisfy the linters.
require = None
working_set = None
class PEP440Warning(RuntimeWarning):
"""
Used when there is an issue with a version or specifier not complying with
PEP 440.
"""
class _SetuptoolsVersionMixin(object):
def __hash__(self):
return super(_SetuptoolsVersionMixin, self).__hash__()
def __lt__(self, other):
if isinstance(other, tuple):
return tuple(self) < other
else:
return super(_SetuptoolsVersionMixin, self).__lt__(other)
def __le__(self, other):
if isinstance(other, tuple):
return tuple(self) <= other
else:
return super(_SetuptoolsVersionMixin, self).__le__(other)
def __eq__(self, other):
if isinstance(other, tuple):
return tuple(self) == other
else:
return super(_SetuptoolsVersionMixin, self).__eq__(other)
def __ge__(self, other):
if isinstance(other, tuple):
return tuple(self) >= other
else:
return super(_SetuptoolsVersionMixin, self).__ge__(other)
def __gt__(self, other):
if isinstance(other, tuple):
return tuple(self) > other
else:
return super(_SetuptoolsVersionMixin, self).__gt__(other)
def __ne__(self, other):
if isinstance(other, tuple):
return tuple(self) != other
else:
return super(_SetuptoolsVersionMixin, self).__ne__(other)
def __getitem__(self, key):
return tuple(self)[key]
def __iter__(self):
component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
replace = {
'pre': 'c',
'preview': 'c',
'-': 'final-',
'rc': 'c',
'dev': '@',
}.get
def _parse_version_parts(s):
for part in component_re.split(s):
part = replace(part, part)
if not part or part == '.':
continue
if part[:1] in '0123456789':
# pad for numeric comparison
yield part.zfill(8)
else:
yield '*'+part
# ensure that alpha/beta/candidate are before final
yield '*final'
def old_parse_version(s):
parts = []
for part in _parse_version_parts(s.lower()):
if part.startswith('*'):
# remove '-' before a prerelease tag
if part < '*final':
while parts and parts[-1] == '*final-':
parts.pop()
# remove trailing zeros from each series of numeric parts
while parts and parts[-1] == '00000000':
parts.pop()
parts.append(part)
return tuple(parts)
# Warn for use of this function
warnings.warn(
"You have iterated over the result of "
"pkg_resources.parse_version. This is a legacy behavior which is "
"inconsistent with the new version class introduced in setuptools "
"8.0. In most cases, conversion to a tuple is unnecessary. For "
"comparison of versions, sort the Version instances directly. If "
"you have another use case requiring the tuple, please file a "
"bug with the setuptools project describing that need.",
RuntimeWarning,
stacklevel=1,
)
for part in old_parse_version(str(self)):
yield part
class SetuptoolsVersion(_SetuptoolsVersionMixin, packaging.version.Version):
pass
class SetuptoolsLegacyVersion(_SetuptoolsVersionMixin,
packaging.version.LegacyVersion):
pass
def parse_version(v):
try:
return SetuptoolsVersion(v)
except packaging.version.InvalidVersion:
return SetuptoolsLegacyVersion(v)
_state_vars = {}
def _declare_state(vartype, **kw):
globals().update(kw)
_state_vars.update(dict.fromkeys(kw, vartype))
def __getstate__():
state = {}
g = globals()
for k, v in _state_vars.items():
state[k] = g['_sget_'+v](g[k])
return state
def __setstate__(state):
g = globals()
for k, v in state.items():
g['_sset_'+_state_vars[k]](k, g[k], v)
return state
def _sget_dict(val):
return val.copy()
def _sset_dict(key, ob, state):
ob.clear()
ob.update(state)
def _sget_object(val):
return val.__getstate__()
def _sset_object(key, ob, state):
ob.__setstate__(state)
_sget_none = _sset_none = lambda *args: None
def get_supported_platform():
"""Return this platform's maximum compatible version.
distutils.util.get_platform() normally reports the minimum version
of Mac OS X that would be required to *use* extensions produced by
distutils. But what we want when checking compatibility is to know the
version of Mac OS X that we are *running*. To allow usage of packages that
explicitly require a newer version of Mac OS X, we must also know the
current version of the OS.
If this condition occurs for any other platform with a version in its
platform strings, this function should be extended accordingly.
"""
plat = get_build_platform()
m = macosVersionString.match(plat)
if m is not None and sys.platform == "darwin":
try:
plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
except ValueError:
# not Mac OS X
pass
return plat
__all__ = [
# Basic resource access and distribution/entry point discovery
'require', 'run_script', 'get_provider', 'get_distribution',
'load_entry_point', 'get_entry_map', 'get_entry_info',
'iter_entry_points',
'resource_string', 'resource_stream', 'resource_filename',
'resource_listdir', 'resource_exists', 'resource_isdir',
# Environmental control
'declare_namespace', 'working_set', 'add_activation_listener',
'find_distributions', 'set_extraction_path', 'cleanup_resources',
'get_default_cache',
# Primary implementation classes
'Environment', 'WorkingSet', 'ResourceManager',
'Distribution', 'Requirement', 'EntryPoint',
# Exceptions
'ResolutionError', 'VersionConflict', 'DistributionNotFound',
'UnknownExtra', 'ExtractionError',
# Warnings
'PEP440Warning',
# Parsing functions and string utilities
'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker',
# filesystem utilities
'ensure_directory', 'normalize_path',
# Distribution "precedence" constants
'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
# "Provider" interfaces, implementations, and registration/lookup APIs
'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
'register_finder', 'register_namespace_handler', 'register_loader_type',
'fixup_namespace_packages', 'get_importer',
# Deprecated/backward compatibility only
'run_main', 'AvailableDistributions',
]
class ResolutionError(Exception):
"""Abstract base for dependency resolution errors"""
def __repr__(self):
return self.__class__.__name__+repr(self.args)
class VersionConflict(ResolutionError):
"""
An already-installed version conflicts with the requested version.
Should be initialized with the installed Distribution and the requested
Requirement.
"""
_template = "{self.dist} is installed but {self.req} is required"
@property
def dist(self):
return self.args[0]
@property
def req(self):
return self.args[1]
def report(self):
return self._template.format(**locals())
def with_context(self, required_by):
"""
If required_by is non-empty, return a version of self that is a
ContextualVersionConflict.
"""
if not required_by:
return self
args = self.args + (required_by,)
return ContextualVersionConflict(*args)
class ContextualVersionConflict(VersionConflict):
"""
A VersionConflict that accepts a third parameter, the set of the
requirements that required the installed Distribution.
"""
_template = VersionConflict._template + ' by {self.required_by}'
@property
def required_by(self):
return self.args[2]
class DistributionNotFound(ResolutionError):
"""A requested distribution was not found"""
_template = ("The '{self.req}' distribution was not found "
"and is required by {self.requirers_str}")
@property
def req(self):
return self.args[0]
@property
def requirers(self):
return self.args[1]
@property
def requirers_str(self):
if not self.requirers:
return 'the application'
return ', '.join(self.requirers)
def report(self):
return self._template.format(**locals())
def __str__(self):
return self.report()
class UnknownExtra(ResolutionError):
"""Distribution doesn't have an "extra feature" of the given name"""
_provider_factories = {}
PY_MAJOR = sys.version[:3]
EGG_DIST = 3
BINARY_DIST = 2
SOURCE_DIST = 1
CHECKOUT_DIST = 0
DEVELOP_DIST = -1
def register_loader_type(loader_type, provider_factory):
"""Register `provider_factory` to make providers for `loader_type`
`loader_type` is the type or class of a PEP 302 ``module.__loader__``,
and `provider_factory` is a function that, passed a *module* object,
returns an ``IResourceProvider`` for that module.
"""
_provider_factories[loader_type] = provider_factory
def get_provider(moduleOrReq):
"""Return an IResourceProvider for the named module or requirement"""
if isinstance(moduleOrReq, Requirement):
return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
try:
module = sys.modules[moduleOrReq]
except KeyError:
__import__(moduleOrReq)
module = sys.modules[moduleOrReq]
loader = getattr(module, '__loader__', None)
return _find_adapter(_provider_factories, loader)(module)
def _macosx_vers(_cache=[]):
if not _cache:
version = platform.mac_ver()[0]
# fallback for MacPorts
if version == '':
plist = '/System/Library/CoreServices/SystemVersion.plist'
if os.path.exists(plist):
if hasattr(plistlib, 'readPlist'):
plist_content = plistlib.readPlist(plist)
if 'ProductVersion' in plist_content:
version = plist_content['ProductVersion']
_cache.append(version.split('.'))
return _cache[0]
def _macosx_arch(machine):
return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine)
def get_build_platform():
"""Return this platform's string for platform-specific distributions
XXX Currently this is the same as ``distutils.util.get_platform()``, but it
needs some hacks for Linux and Mac OS X.
"""
try:
# Python 2.7 or >=3.2
from sysconfig import get_platform
except ImportError:
from distutils.util import get_platform
plat = get_platform()
if sys.platform == "darwin" and not plat.startswith('macosx-'):
try:
version = _macosx_vers()
machine = os.uname()[4].replace(" ", "_")
return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]),
_macosx_arch(machine))
except ValueError:
# if someone is running a non-Mac darwin system, this will fall
# through to the default implementation
pass
return plat
macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
# XXX backward compat
get_platform = get_build_platform
def compatible_platforms(provided, required):
"""Can code for the `provided` platform run on the `required` platform?
Returns true if either platform is ``None``, or the platforms are equal.
XXX Needs compatibility checks for Linux and other unixy OSes.
"""
if provided is None or required is None or provided==required:
# easy case
return True
# Mac OS X special cases
reqMac = macosVersionString.match(required)
if reqMac:
provMac = macosVersionString.match(provided)
# is this a Mac package?
if not provMac:
# this is backwards compatibility for packages built before
# setuptools 0.6. All packages built after this point will
# use the new macosx designation.
provDarwin = darwinVersionString.match(provided)
if provDarwin:
dversion = int(provDarwin.group(1))
macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
if dversion == 7 and macosversion >= "10.3" or \
dversion == 8 and macosversion >= "10.4":
return True
# egg isn't macosx or legacy darwin
return False
# are they the same major version and machine type?
if provMac.group(1) != reqMac.group(1) or \
provMac.group(3) != reqMac.group(3):
return False
# is the required OS major update >= the provided one?
if int(provMac.group(2)) > int(reqMac.group(2)):
return False
return True
# XXX Linux and other platforms' special cases should go here
return False
def run_script(dist_spec, script_name):
"""Locate distribution `dist_spec` and run its `script_name` script"""
ns = sys._getframe(1).f_globals
name = ns['__name__']
ns.clear()
ns['__name__'] = name
require(dist_spec)[0].run_script(script_name, ns)
# backward compatibility
run_main = run_script
def get_distribution(dist):
"""Return a current distribution object for a Requirement or string"""
if isinstance(dist, string_types):
dist = Requirement.parse(dist)
if isinstance(dist, Requirement):
dist = get_provider(dist)
if not isinstance(dist, Distribution):
raise TypeError("Expected string, Requirement, or Distribution", dist)
return dist
def load_entry_point(dist, group, name):
"""Return `name` entry point of `group` for `dist` or raise ImportError"""
return get_distribution(dist).load_entry_point(group, name)
def get_entry_map(dist, group=None):
"""Return the entry point map for `group`, or the full entry map"""
return get_distribution(dist).get_entry_map(group)
def get_entry_info(dist, group, name):
"""Return the EntryPoint object for `group`+`name`, or ``None``"""
return get_distribution(dist).get_entry_info(group, name)
class IMetadataProvider:
def has_metadata(name):
"""Does the package's distribution contain the named metadata?"""
def get_metadata(name):
"""The named metadata resource as a string"""
def get_metadata_lines(name):
"""Yield named metadata resource as list of non-blank non-comment lines
Leading and trailing whitespace is stripped from each line, and lines
with ``#`` as the first non-blank character are omitted."""
def metadata_isdir(name):
"""Is the named metadata a directory? (like ``os.path.isdir()``)"""
def metadata_listdir(name):
"""List of metadata names in the directory (like ``os.listdir()``)"""
def run_script(script_name, namespace):
"""Execute the named script in the supplied namespace dictionary"""
class IResourceProvider(IMetadataProvider):
"""An object that provides access to package resources"""
def get_resource_filename(manager, resource_name):
"""Return a true filesystem path for `resource_name`
`manager` must be an ``IResourceManager``"""
def get_resource_stream(manager, resource_name):
"""Return a readable file-like object for `resource_name`
`manager` must be an ``IResourceManager``"""
def get_resource_string(manager, resource_name):
"""Return a string containing the contents of `resource_name`
`manager` must be an ``IResourceManager``"""
def has_resource(resource_name):
"""Does the package contain the named resource?"""
def resource_isdir(resource_name):
"""Is the named resource a directory? (like ``os.path.isdir()``)"""
def resource_listdir(resource_name):
"""List of resource names in the directory (like ``os.listdir()``)"""
class WorkingSet(object):
"""A collection of active distributions on sys.path (or a similar list)"""
def __init__(self, entries=None):
"""Create working set from list of path entries (default=sys.path)"""
self.entries = []
self.entry_keys = {}
self.by_key = {}
self.callbacks = []
if entries is None:
entries = sys.path
for entry in entries:
self.add_entry(entry)
@classmethod
def _build_master(cls):
"""
Prepare the master working set.
"""
ws = cls()
try:
from __main__ import __requires__
except ImportError:
# The main program does not list any requirements
return ws
# ensure the requirements are met
try:
ws.require(__requires__)
except VersionConflict:
return cls._build_from_requirements(__requires__)
return ws
@classmethod
def _build_from_requirements(cls, req_spec):
"""
Build a working set from a requirement spec. Rewrites sys.path.
"""
# try it without defaults already on sys.path
# by starting with an empty path
ws = cls([])
reqs = parse_requirements(req_spec)
dists = ws.resolve(reqs, Environment())
for dist in dists:
ws.add(dist)
# add any missing entries from sys.path
for entry in sys.path:
if entry not in ws.entries:
ws.add_entry(entry)
# then copy back to sys.path
sys.path[:] = ws.entries
return ws
def add_entry(self, entry):
"""Add a path item to ``.entries``, finding any distributions on it
``find_distributions(entry, True)`` is used to find distributions
corresponding to the path entry, and they are added. `entry` is
always appended to ``.entries``, even if it is already present.
(This is because ``sys.path`` can contain the same value more than
once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
equal ``sys.path``.)
"""
self.entry_keys.setdefault(entry, [])
self.entries.append(entry)
for dist in find_distributions(entry, True):
self.add(dist, entry, False)
def __contains__(self, dist):
"""True if `dist` is the active distribution for its project"""
return self.by_key.get(dist.key) == dist
def find(self, req):
"""Find a distribution matching requirement `req`
If there is an active distribution for the requested project, this
returns it as long as it meets the version requirement specified by
`req`. But, if there is an active distribution for the project and it
does *not* meet the `req` requirement, ``VersionConflict`` is raised.
If there is no active distribution for the requested project, ``None``
is returned.
"""
dist = self.by_key.get(req.key)
if dist is not None and dist not in req:
# XXX add more info
raise VersionConflict(dist, req)
return dist
def iter_entry_points(self, group, name=None):
"""Yield entry point objects from `group` matching `name`
If `name` is None, yields all entry points in `group` from all
distributions in the working set, otherwise only ones matching
both `group` and `name` are yielded (in distribution order).
"""
for dist in self:
entries = dist.get_entry_map(group)
if name is None:
for ep in entries.values():
yield ep
elif name in entries:
yield entries[name]
def run_script(self, requires, script_name):
"""Locate distribution for `requires` and run `script_name` script"""
ns = sys._getframe(1).f_globals
name = ns['__name__']
ns.clear()
ns['__name__'] = name
self.require(requires)[0].run_script(script_name, ns)
def __iter__(self):
"""Yield distributions for non-duplicate projects in the working set
The yield order is the order in which the items' path entries were
added to the working set.
"""
seen = {}
for item in self.entries:
if item not in self.entry_keys:
# workaround a cache issue
continue
for key in self.entry_keys[item]:
if key not in seen:
seen[key]=1
yield self.by_key[key]
def add(self, dist, entry=None, insert=True, replace=False):
"""Add `dist` to working set, associated with `entry`
If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
On exit from this routine, `entry` is added to the end of the working
set's ``.entries`` (if it wasn't already present).
`dist` is only added to the working set if it's for a project that
doesn't already have a distribution in the set, unless `replace=True`.
If it's added, any callbacks registered with the ``subscribe()`` method
will be called.
"""
if insert:
dist.insert_on(self.entries, entry)
if entry is None:
entry = dist.location
keys = self.entry_keys.setdefault(entry,[])
keys2 = self.entry_keys.setdefault(dist.location,[])
if not replace and dist.key in self.by_key:
# ignore hidden distros
return
self.by_key[dist.key] = dist
if dist.key not in keys:
keys.append(dist.key)
if dist.key not in keys2:
keys2.append(dist.key)
self._added_new(dist)
def resolve(self, requirements, env=None, installer=None,
replace_conflicting=False):
"""List all distributions needed to (recursively) meet `requirements`
`requirements` must be a sequence of ``Requirement`` objects. `env`,
if supplied, should be an ``Environment`` instance. If
not supplied, it defaults to all distributions available within any
entry or distribution in the working set. `installer`, if supplied,
will be invoked with each requirement that cannot be met by an
already-installed distribution; it should return a ``Distribution`` or
``None``.
Unless `replace_conflicting=True`, raises a VersionConflict exception if
any requirements are found on the path that have the correct name but
the wrong version. Otherwise, if an `installer` is supplied it will be
invoked to obtain the correct version of the requirement and activate
it.
"""
# set up the stack
requirements = list(requirements)[::-1]
# set of processed requirements
processed = {}
# key -> dist
best = {}
to_activate = []
# Mapping of requirement to set of distributions that required it;
# useful for reporting info about conflicts.
required_by = collections.defaultdict(set)
while requirements:
# process dependencies breadth-first
req = requirements.pop(0)
if req in processed:
# Ignore cyclic or redundant dependencies
continue
dist = best.get(req.key)
if dist is None:
# Find the best distribution and add it to the map
dist = self.by_key.get(req.key)
if dist is None or (dist not in req and replace_conflicting):
ws = self
if env is None:
if dist is None:
env = Environment(self.entries)
else:
# Use an empty environment and workingset to avoid
# any further conflicts with the conflicting
# distribution
env = Environment([])
ws = WorkingSet([])
dist = best[req.key] = env.best_match(req, ws, installer)
if dist is None:
requirers = required_by.get(req, None)
raise DistributionNotFound(req, requirers)
to_activate.append(dist)
if dist not in req:
# Oops, the "best" so far conflicts with a dependency
dependent_req = required_by[req]
raise VersionConflict(dist, req).with_context(dependent_req)
# push the new requirements onto the stack
new_requirements = dist.requires(req.extras)[::-1]
requirements.extend(new_requirements)
# Register the new requirements needed by req
for new_requirement in new_requirements:
required_by[new_requirement].add(req.project_name)
processed[req] = True
# return list of distros to activate
return to_activate
def find_plugins(self, plugin_env, full_env=None, installer=None,
fallback=True):
"""Find all activatable distributions in `plugin_env`
Example usage::
distributions, errors = working_set.find_plugins(
Environment(plugin_dirlist)
)
# add plugins+libs to sys.path
map(working_set.add, distributions)
# display errors
print('Could not load', errors)
The `plugin_env` should be an ``Environment`` instance that contains
only distributions that are in the project's "plugin directory" or
directories. The `full_env`, if supplied, should be an ``Environment``
contains all currently-available distributions. If `full_env` is not
supplied, one is created automatically from the ``WorkingSet`` this
method is called on, which will typically mean that every directory on
``sys.path`` will be scanned for distributions.
`installer` is a standard installer callback as used by the
``resolve()`` method. The `fallback` flag indicates whether we should
attempt to resolve older versions of a plugin if the newest version
cannot be resolved.
This method returns a 2-tuple: (`distributions`, `error_info`), where
`distributions` is a list of the distributions found in `plugin_env`
that were loadable, along with any other distributions that are needed
to resolve their dependencies. `error_info` is a dictionary mapping
unloadable plugin distributions to an exception instance describing the
error that occurred. Usually this will be a ``DistributionNotFound`` or
``VersionConflict`` instance.
"""
plugin_projects = list(plugin_env)
# scan project names in alphabetic order
plugin_projects.sort()
error_info = {}
distributions = {}
if full_env is None:
env = Environment(self.entries)
env += plugin_env
else:
env = full_env + plugin_env
shadow_set = self.__class__([])
# put all our entries in shadow_set
list(map(shadow_set.add, self))
for project_name in plugin_projects:
for dist in plugin_env[project_name]:
req = [dist.as_requirement()]
try:
resolvees = shadow_set.resolve(req, env, installer)
except ResolutionError as v:
# save error info
error_info[dist] = v
if fallback:
# try the next older version of project
continue
else:
# give up on this project, keep going
break
else:
list(map(shadow_set.add, resolvees))
distributions.update(dict.fromkeys(resolvees))
# success, no need to try any more versions of this project
break
distributions = list(distributions)
distributions.sort()
return distributions, error_info
def require(self, *requirements):
"""Ensure that distributions matching `requirements` are activated
`requirements` must be a string or a (possibly-nested) sequence
thereof, specifying the distributions and versions required. The
return value is a sequence of the distributions that needed to be
activated to fulfill the requirements; all relevant distributions are
included, even if they were already activated in this working set.
"""
needed = self.resolve(parse_requirements(requirements))
for dist in needed:
self.add(dist)
return needed
def subscribe(self, callback):
"""Invoke `callback` for all distributions (including existing ones)"""
if callback in self.callbacks:
return
self.callbacks.append(callback)
for dist in self:
callback(dist)
def _added_new(self, dist):
for callback in self.callbacks:
callback(dist)
def __getstate__(self):
return (
self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
self.callbacks[:]
)
def __setstate__(self, e_k_b_c):
entries, keys, by_key, callbacks = e_k_b_c
self.entries = entries[:]
self.entry_keys = keys.copy()
self.by_key = by_key.copy()
self.callbacks = callbacks[:]
class Environment(object):
"""Searchable snapshot of distributions on a search path"""
def __init__(self, search_path=None, platform=get_supported_platform(),
python=PY_MAJOR):
"""Snapshot distributions available on a search path
Any distributions found on `search_path` are added to the environment.
`search_path` should be a sequence of ``sys.path`` items. If not
supplied, ``sys.path`` is used.
`platform` is an optional string specifying the name of the platform
that platform-specific distributions must be compatible with. If
unspecified, it defaults to the current platform. `python` is an
optional string naming the desired version of Python (e.g. ``'3.3'``);
it defaults to the current version.
You may explicitly set `platform` (and/or `python`) to ``None`` if you
wish to map *all* distributions, not just those compatible with the
running platform or Python version.
"""
self._distmap = {}
self.platform = platform
self.python = python
self.scan(search_path)
def can_add(self, dist):
"""Is distribution `dist` acceptable for this environment?
The distribution must match the platform and python version
requirements specified when this environment was created, or False
is returned.
"""
return (self.python is None or dist.py_version is None
or dist.py_version==self.python) \
and compatible_platforms(dist.platform, self.platform)
def remove(self, dist):
"""Remove `dist` from the environment"""
self._distmap[dist.key].remove(dist)
def scan(self, search_path=None):
"""Scan `search_path` for distributions usable in this environment
Any distributions found are added to the environment.
`search_path` should be a sequence of ``sys.path`` items. If not
supplied, ``sys.path`` is used. Only distributions conforming to
the platform/python version defined at initialization are added.
"""
if search_path is None:
search_path = sys.path
for item in search_path:
for dist in find_distributions(item):
self.add(dist)
def __getitem__(self, project_name):
"""Return a newest-to-oldest list of distributions for `project_name`
Uses case-insensitive `project_name` comparison, assuming all the
project's distributions use their project's name converted to all
lowercase as their key.
"""
distribution_key = project_name.lower()
return self._distmap.get(distribution_key, [])
def add(self, dist):
"""Add `dist` if we ``can_add()`` it and it has not already been added
"""
if self.can_add(dist) and dist.has_version():
dists = self._distmap.setdefault(dist.key, [])
if dist not in dists:
dists.append(dist)
dists.sort(key=operator.attrgetter('hashcmp'), reverse=True)
def best_match(self, req, working_set, installer=None):
"""Find distribution best matching `req` and usable on `working_set`
This calls the ``find(req)`` method of the `working_set` to see if a
suitable distribution is already active. (This may raise
``VersionConflict`` if an unsuitable version of the project is already
active in the specified `working_set`.) If a suitable distribution
isn't active, this method returns the newest distribution in the
environment that meets the ``Requirement`` in `req`. If no suitable
distribution is found, and `installer` is supplied, then the result of
calling the environment's ``obtain(req, installer)`` method will be
returned.
"""
dist = working_set.find(req)
if dist is not None:
return dist
for dist in self[req.key]:
if dist in req:
return dist
# try to download/install
return self.obtain(req, installer)
def obtain(self, requirement, installer=None):
"""Obtain a distribution matching `requirement` (e.g. via download)
Obtain a distro that matches requirement (e.g. via download). In the
base ``Environment`` class, this routine just returns
``installer(requirement)``, unless `installer` is None, in which case
None is returned instead. This method is a hook that allows subclasses
to attempt other ways of obtaining a distribution before falling back
to the `installer` argument."""
if installer is not None:
return installer(requirement)
def __iter__(self):
"""Yield the unique project names of the available distributions"""
for key in self._distmap.keys():
if self[key]:
yield key
def __iadd__(self, other):
"""In-place addition of a distribution or environment"""
if isinstance(other, Distribution):
self.add(other)
elif isinstance(other, Environment):
for project in other:
for dist in other[project]:
self.add(dist)
else:
raise TypeError("Can't add %r to environment" % (other,))
return self
def __add__(self, other):
"""Add an environment or distribution to an environment"""
new = self.__class__([], platform=None, python=None)
for env in self, other:
new += env
return new
# XXX backward compatibility
AvailableDistributions = Environment
class ExtractionError(RuntimeError):
"""An error occurred extracting a resource
The following attributes are available from instances of this exception:
manager
The resource manager that raised this exception
cache_path
The base directory for resource extraction
original_error
The exception instance that caused extraction to fail
"""
class ResourceManager:
"""Manage resource extraction and packages"""
extraction_path = None
def __init__(self):
self.cached_files = {}
def resource_exists(self, package_or_requirement, resource_name):
"""Does the named resource exist?"""
return get_provider(package_or_requirement).has_resource(resource_name)
def resource_isdir(self, package_or_requirement, resource_name):
"""Is the named resource an existing directory?"""
return get_provider(package_or_requirement).resource_isdir(
resource_name
)
def resource_filename(self, package_or_requirement, resource_name):
"""Return a true filesystem path for specified resource"""
return get_provider(package_or_requirement).get_resource_filename(
self, resource_name
)
def resource_stream(self, package_or_requirement, resource_name):
"""Return a readable file-like object for specified resource"""
return get_provider(package_or_requirement).get_resource_stream(
self, resource_name
)
def resource_string(self, package_or_requirement, resource_name):
"""Return specified resource as a string"""
return get_provider(package_or_requirement).get_resource_string(
self, resource_name
)
def resource_listdir(self, package_or_requirement, resource_name):
"""List the contents of the named resource directory"""
return get_provider(package_or_requirement).resource_listdir(
resource_name
)
def extraction_error(self):
"""Give an error message for problems extracting file(s)"""
old_exc = sys.exc_info()[1]
cache_path = self.extraction_path or get_default_cache()
err = ExtractionError("""Can't extract file(s) to egg cache
The following error occurred while trying to extract file(s) to the Python egg
cache:
%s
The Python egg cache directory is currently set to:
%s
Perhaps your account does not have write access to this directory? You can
change the cache directory by setting the PYTHON_EGG_CACHE environment
variable to point to an accessible directory.
""" % (old_exc, cache_path)
)
err.manager = self
err.cache_path = cache_path
err.original_error = old_exc
raise err
def get_cache_path(self, archive_name, names=()):
"""Return absolute location in cache for `archive_name` and `names`
The parent directory of the resulting path will be created if it does
not already exist. `archive_name` should be the base filename of the
enclosing egg (which may not be the name of the enclosing zipfile!),
including its ".egg" extension. `names`, if provided, should be a
sequence of path name parts "under" the egg's extraction location.
This method should only be called by resource providers that need to
obtain an extraction location, and only for names they intend to
extract, as it tracks the generated names for possible cleanup later.
"""
extract_path = self.extraction_path or get_default_cache()
target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
try:
_bypass_ensure_directory(target_path)
except:
self.extraction_error()
self._warn_unsafe_extraction_path(extract_path)
self.cached_files[target_path] = 1
return target_path
@staticmethod
def _warn_unsafe_extraction_path(path):
"""
If the default extraction path is overridden and set to an insecure
location, such as /tmp, it opens up an opportunity for an attacker to
replace an extracted file with an unauthorized payload. Warn the user
if a known insecure location is used.
See Distribute #375 for more details.
"""
if os.name == 'nt' and not path.startswith(os.environ['windir']):
# On Windows, permissions are generally restrictive by default
# and temp directories are not writable by other users, so
# bypass the warning.
return
mode = os.stat(path).st_mode
if mode & stat.S_IWOTH or mode & stat.S_IWGRP:
msg = ("%s is writable by group/others and vulnerable to attack "
"when "
"used with get_resource_filename. Consider a more secure "
"location (set with .set_extraction_path or the "
"PYTHON_EGG_CACHE environment variable)." % path)
warnings.warn(msg, UserWarning)
def postprocess(self, tempname, filename):
"""Perform any platform-specific postprocessing of `tempname`
This is where Mac header rewrites should be done; other platforms don't
have anything special they should do.
Resource providers should call this method ONLY after successfully
extracting a compressed resource. They must NOT call it on resources
that are already in the filesystem.
`tempname` is the current (temporary) name of the file, and `filename`
is the name it will be renamed to by the caller after this routine
returns.
"""
if os.name == 'posix':
# Make the resource executable
mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777
os.chmod(tempname, mode)
def set_extraction_path(self, path):
"""Set the base path where resources will be extracted to, if needed.
If you do not call this routine before any extractions take place, the
path defaults to the return value of ``get_default_cache()``. (Which
is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
platform-specific fallbacks. See that routine's documentation for more
details.)
Resources are extracted to subdirectories of this path based upon
information given by the ``IResourceProvider``. You may set this to a
temporary directory, but then you must call ``cleanup_resources()`` to
delete the extracted files when done. There is no guarantee that
``cleanup_resources()`` will be able to remove all extracted files.
(Note: you may not change the extraction path for a given resource
manager once resources have been extracted, unless you first call
``cleanup_resources()``.)
"""
if self.cached_files:
raise ValueError(
"Can't change extraction path, files already extracted"
)
self.extraction_path = path
def cleanup_resources(self, force=False):
"""
Delete all extracted resource files and directories, returning a list
of the file and directory names that could not be successfully removed.
This function does not have any concurrency protection, so it should
generally only be called when the extraction path is a temporary
directory exclusive to a single process. This method is not
automatically called; you must call it explicitly or register it as an
``atexit`` function if you wish to ensure cleanup of a temporary
directory used for extractions.
"""
# XXX
def get_default_cache():
"""Determine the default cache location
This returns the ``PYTHON_EGG_CACHE`` environment variable, if set.
Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the
"Application Data" directory. On all other systems, it's "~/.python-eggs".
"""
try:
return os.environ['PYTHON_EGG_CACHE']
except KeyError:
pass
if os.name!='nt':
return os.path.expanduser('~/.python-eggs')
# XXX this may be locale-specific!
app_data = 'Application Data'
app_homes = [
# best option, should be locale-safe
(('APPDATA',), None),
(('USERPROFILE',), app_data),
(('HOMEDRIVE','HOMEPATH'), app_data),
(('HOMEPATH',), app_data),
(('HOME',), None),
# 95/98/ME
(('WINDIR',), app_data),
]
for keys, subdir in app_homes:
dirname = ''
for key in keys:
if key in os.environ:
dirname = os.path.join(dirname, os.environ[key])
else:
break
else:
if subdir:
dirname = os.path.join(dirname, subdir)
return os.path.join(dirname, 'Python-Eggs')
else:
raise RuntimeError(
"Please set the PYTHON_EGG_CACHE enviroment variable"
)
def safe_name(name):
"""Convert an arbitrary string to a standard distribution name
Any runs of non-alphanumeric/. characters are replaced with a single '-'.
"""
return re.sub('[^A-Za-z0-9.]+', '-', name)
def safe_version(version):
"""
Convert an arbitrary string to a standard version string
"""
try:
# normalize the version
return str(packaging.version.Version(version))
except packaging.version.InvalidVersion:
version = version.replace(' ','.')
return re.sub('[^A-Za-z0-9.]+', '-', version)
def safe_extra(extra):
"""Convert an arbitrary string to a standard 'extra' name
Any runs of non-alphanumeric characters are replaced with a single '_',
and the result is always lowercased.
"""
return re.sub('[^A-Za-z0-9.]+', '_', extra).lower()
def to_filename(name):
"""Convert a project or version name to its filename-escaped form
Any '-' characters are currently replaced with '_'.
"""
return name.replace('-','_')
class MarkerEvaluation(object):
values = {
'os_name': lambda: os.name,
'sys_platform': lambda: sys.platform,
'python_full_version': platform.python_version,
'python_version': lambda: platform.python_version()[:3],
'platform_version': platform.version,
'platform_machine': platform.machine,
'python_implementation': platform.python_implementation,
}
@classmethod
def is_invalid_marker(cls, text):
"""
Validate text as a PEP 426 environment marker; return an exception
if invalid or False otherwise.
"""
try:
cls.evaluate_marker(text)
except SyntaxError as e:
return cls.normalize_exception(e)
return False
@staticmethod
def normalize_exception(exc):
"""
Given a SyntaxError from a marker evaluation, normalize the error
message:
- Remove indications of filename and line number.
- Replace platform-specific error messages with standard error
messages.
"""
subs = {
'unexpected EOF while parsing': 'invalid syntax',
'parenthesis is never closed': 'invalid syntax',
}
exc.filename = None
exc.lineno = None
exc.msg = subs.get(exc.msg, exc.msg)
return exc
@classmethod
def and_test(cls, nodelist):
# MUST NOT short-circuit evaluation, or invalid syntax can be skipped!
items = [
cls.interpret(nodelist[i])
for i in range(1, len(nodelist), 2)
]
return functools.reduce(operator.and_, items)
@classmethod
def test(cls, nodelist):
# MUST NOT short-circuit evaluation, or invalid syntax can be skipped!
items = [
cls.interpret(nodelist[i])
for i in range(1, len(nodelist), 2)
]
return functools.reduce(operator.or_, items)
@classmethod
def atom(cls, nodelist):
t = nodelist[1][0]
if t == token.LPAR:
if nodelist[2][0] == token.RPAR:
raise SyntaxError("Empty parentheses")
return cls.interpret(nodelist[2])
msg = "Language feature not supported in environment markers"
raise SyntaxError(msg)
@classmethod
def comparison(cls, nodelist):
if len(nodelist) > 4:
msg = "Chained comparison not allowed in environment markers"
raise SyntaxError(msg)
comp = nodelist[2][1]
cop = comp[1]
if comp[0] == token.NAME:
if len(nodelist[2]) == 3:
if cop == 'not':
cop = 'not in'
else:
cop = 'is not'
try:
cop = cls.get_op(cop)
except KeyError:
msg = repr(cop) + " operator not allowed in environment markers"
raise SyntaxError(msg)
return cop(cls.evaluate(nodelist[1]), cls.evaluate(nodelist[3]))
@classmethod
def get_op(cls, op):
ops = {
symbol.test: cls.test,
symbol.and_test: cls.and_test,
symbol.atom: cls.atom,
symbol.comparison: cls.comparison,
'not in': lambda x, y: x not in y,
'in': lambda x, y: x in y,
'==': operator.eq,
'!=': operator.ne,
'<': operator.lt,
'>': operator.gt,
'<=': operator.le,
'>=': operator.ge,
}
if hasattr(symbol, 'or_test'):
ops[symbol.or_test] = cls.test
return ops[op]
@classmethod
def evaluate_marker(cls, text, extra=None):
"""
Evaluate a PEP 426 environment marker on CPython 2.4+.
Return a boolean indicating the marker result in this environment.
Raise SyntaxError if marker is invalid.
This implementation uses the 'parser' module, which is not implemented
on
Jython and has been superseded by the 'ast' module in Python 2.6 and
later.
"""
return cls.interpret(parser.expr(text).totuple(1)[1])
@classmethod
def _markerlib_evaluate(cls, text):
"""
Evaluate a PEP 426 environment marker using markerlib.
Return a boolean indicating the marker result in this environment.
Raise SyntaxError if marker is invalid.
"""
import _markerlib
# markerlib implements Metadata 1.2 (PEP 345) environment markers.
# Translate the variables to Metadata 2.0 (PEP 426).
env = _markerlib.default_environment()
for key in env.keys():
new_key = key.replace('.', '_')
env[new_key] = env.pop(key)
try:
result = _markerlib.interpret(text, env)
except NameError as e:
raise SyntaxError(e.args[0])
return result
if 'parser' not in globals():
# Fall back to less-complete _markerlib implementation if 'parser' module
# is not available.
evaluate_marker = _markerlib_evaluate
@classmethod
def interpret(cls, nodelist):
while len(nodelist)==2: nodelist = nodelist[1]
try:
op = cls.get_op(nodelist[0])
except KeyError:
raise SyntaxError("Comparison or logical expression expected")
return op(nodelist)
@classmethod
def evaluate(cls, nodelist):
while len(nodelist)==2: nodelist = nodelist[1]
kind = nodelist[0]
name = nodelist[1]
if kind==token.NAME:
try:
op = cls.values[name]
except KeyError:
raise SyntaxError("Unknown name %r" % name)
return op()
if kind==token.STRING:
s = nodelist[1]
if not cls._safe_string(s):
raise SyntaxError(
"Only plain strings allowed in environment markers")
return s[1:-1]
msg = "Language feature not supported in environment markers"
raise SyntaxError(msg)
@staticmethod
def _safe_string(cand):
return (
cand[:1] in "'\"" and
not cand.startswith('"""') and
not cand.startswith("'''") and
'\\' not in cand
)
invalid_marker = MarkerEvaluation.is_invalid_marker
evaluate_marker = MarkerEvaluation.evaluate_marker
class NullProvider:
"""Try to implement resources and metadata for arbitrary PEP 302 loaders"""
egg_name = None
egg_info = None
loader = None
def __init__(self, module):
self.loader = getattr(module, '__loader__', None)
self.module_path = os.path.dirname(getattr(module, '__file__', ''))
def get_resource_filename(self, manager, resource_name):
return self._fn(self.module_path, resource_name)
def get_resource_stream(self, manager, resource_name):
return io.BytesIO(self.get_resource_string(manager, resource_name))
def get_resource_string(self, manager, resource_name):
return self._get(self._fn(self.module_path, resource_name))
def has_resource(self, resource_name):
return self._has(self._fn(self.module_path, resource_name))
def has_metadata(self, name):
return self.egg_info and self._has(self._fn(self.egg_info, name))
if sys.version_info <= (3,):
def get_metadata(self, name):
if not self.egg_info:
return ""
return self._get(self._fn(self.egg_info, name))
else:
def get_metadata(self, name):
if not self.egg_info:
return ""
return self._get(self._fn(self.egg_info, name)).decode("utf-8")
def get_metadata_lines(self, name):
return yield_lines(self.get_metadata(name))
def resource_isdir(self, resource_name):
return self._isdir(self._fn(self.module_path, resource_name))
def metadata_isdir(self, name):
return self.egg_info and self._isdir(self._fn(self.egg_info, name))
def resource_listdir(self, resource_name):
return self._listdir(self._fn(self.module_path, resource_name))
def metadata_listdir(self, name):
if self.egg_info:
return self._listdir(self._fn(self.egg_info, name))
return []
def run_script(self, script_name, namespace):
script = 'scripts/'+script_name
if not self.has_metadata(script):
raise ResolutionError("No script named %r" % script_name)
script_text = self.get_metadata(script).replace('\r\n', '\n')
script_text = script_text.replace('\r', '\n')
script_filename = self._fn(self.egg_info, script)
namespace['__file__'] = script_filename
if os.path.exists(script_filename):
source = open(script_filename).read()
code = compile(source, script_filename, 'exec')
exec(code, namespace, namespace)
else:
from linecache import cache
cache[script_filename] = (
len(script_text), 0, script_text.split('\n'), script_filename
)
script_code = compile(script_text, script_filename,'exec')
exec(script_code, namespace, namespace)
def _has(self, path):
raise NotImplementedError(
"Can't perform this operation for unregistered loader type"
)
def _isdir(self, path):
raise NotImplementedError(
"Can't perform this operation for unregistered loader type"
)
def _listdir(self, path):
raise NotImplementedError(
"Can't perform this operation for unregistered loader type"
)
def _fn(self, base, resource_name):
if resource_name:
return os.path.join(base, *resource_name.split('/'))
return base
def _get(self, path):
if hasattr(self.loader, 'get_data'):
return self.loader.get_data(path)
raise NotImplementedError(
"Can't perform this operation for loaders without 'get_data()'"
)
register_loader_type(object, NullProvider)
class EggProvider(NullProvider):
"""Provider based on a virtual filesystem"""
def __init__(self, module):
NullProvider.__init__(self, module)
self._setup_prefix()
def _setup_prefix(self):
# we assume here that our metadata may be nested inside a "basket"
# of multiple eggs; that's why we use module_path instead of .archive
path = self.module_path
old = None
while path!=old:
if path.lower().endswith('.egg'):
self.egg_name = os.path.basename(path)
self.egg_info = os.path.join(path, 'EGG-INFO')
self.egg_root = path
break
old = path
path, base = os.path.split(path)
class DefaultProvider(EggProvider):
"""Provides access to package resources in the filesystem"""
def _has(self, path):
return os.path.exists(path)
def _isdir(self, path):
return os.path.isdir(path)
def _listdir(self, path):
return os.listdir(path)
def get_resource_stream(self, manager, resource_name):
return open(self._fn(self.module_path, resource_name), 'rb')
def _get(self, path):
with open(path, 'rb') as stream:
return stream.read()
register_loader_type(type(None), DefaultProvider)
if importlib_machinery is not None:
register_loader_type(importlib_machinery.SourceFileLoader, DefaultProvider)
class EmptyProvider(NullProvider):
"""Provider that returns nothing for all requests"""
_isdir = _has = lambda self, path: False
_get = lambda self, path: ''
_listdir = lambda self, path: []
module_path = None
def __init__(self):
pass
empty_provider = EmptyProvider()
class ZipManifests(dict):
"""
zip manifest builder
"""
@classmethod
def build(cls, path):
"""
Build a dictionary similar to the zipimport directory
caches, except instead of tuples, store ZipInfo objects.
Use a platform-specific path separator (os.sep) for the path keys
for compatibility with pypy on Windows.
"""
with ContextualZipFile(path) as zfile:
items = (
(
name.replace('/', os.sep),
zfile.getinfo(name),
)
for name in zfile.namelist()
)
return dict(items)
load = build
class MemoizedZipManifests(ZipManifests):
"""
Memoized zipfile manifests.
"""
manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime')
def load(self, path):
"""
Load a manifest at path or return a suitable manifest already loaded.
"""
path = os.path.normpath(path)
mtime = os.stat(path).st_mtime
if path not in self or self[path].mtime != mtime:
manifest = self.build(path)
self[path] = self.manifest_mod(manifest, mtime)
return self[path].manifest
class ContextualZipFile(zipfile.ZipFile):
"""
Supplement ZipFile class to support context manager for Python 2.6
"""
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def __new__(cls, *args, **kwargs):
"""
Construct a ZipFile or ContextualZipFile as appropriate
"""
if hasattr(zipfile.ZipFile, '__exit__'):
return zipfile.ZipFile(*args, **kwargs)
return super(ContextualZipFile, cls).__new__(cls)
class ZipProvider(EggProvider):
"""Resource support for zips and eggs"""
eagers = None
_zip_manifests = MemoizedZipManifests()
def __init__(self, module):
EggProvider.__init__(self, module)
self.zip_pre = self.loader.archive+os.sep
def _zipinfo_name(self, fspath):
# Convert a virtual filename (full path to file) into a zipfile subpath
# usable with the zipimport directory cache for our target archive
if fspath.startswith(self.zip_pre):
return fspath[len(self.zip_pre):]
raise AssertionError(
"%s is not a subpath of %s" % (fspath, self.zip_pre)
)
def _parts(self, zip_path):
# Convert a zipfile subpath into an egg-relative path part list.
# pseudo-fs path
fspath = self.zip_pre+zip_path
if fspath.startswith(self.egg_root+os.sep):
return fspath[len(self.egg_root)+1:].split(os.sep)
raise AssertionError(
"%s is not a subpath of %s" % (fspath, self.egg_root)
)
@property
def zipinfo(self):
return self._zip_manifests.load(self.loader.archive)
def get_resource_filename(self, manager, resource_name):
if not self.egg_name:
raise NotImplementedError(
"resource_filename() only supported for .egg, not .zip"
)
# no need to lock for extraction, since we use temp names
zip_path = self._resource_to_zip(resource_name)
eagers = self._get_eager_resources()
if '/'.join(self._parts(zip_path)) in eagers:
for name in eagers:
self._extract_resource(manager, self._eager_to_zip(name))
return self._extract_resource(manager, zip_path)
@staticmethod
def _get_date_and_size(zip_stat):
size = zip_stat.file_size
# ymdhms+wday, yday, dst
date_time = zip_stat.date_time + (0, 0, -1)
# 1980 offset already done
timestamp = time.mktime(date_time)
return timestamp, size
def _extract_resource(self, manager, zip_path):
if zip_path in self._index():
for name in self._index()[zip_path]:
last = self._extract_resource(
manager, os.path.join(zip_path, name)
)
# return the extracted directory name
return os.path.dirname(last)
timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
if not WRITE_SUPPORT:
raise IOError('"os.rename" and "os.unlink" are not supported '
'on this platform')
try:
real_path = manager.get_cache_path(
self.egg_name, self._parts(zip_path)
)
if self._is_current(real_path, zip_path):
return real_path
outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path))
os.write(outf, self.loader.get_data(zip_path))
os.close(outf)
utime(tmpnam, (timestamp, timestamp))
manager.postprocess(tmpnam, real_path)
try:
rename(tmpnam, real_path)
except os.error:
if os.path.isfile(real_path):
if self._is_current(real_path, zip_path):
# the file became current since it was checked above,
# so proceed.
return real_path
# Windows, del old file and retry
elif os.name=='nt':
unlink(real_path)
rename(tmpnam, real_path)
return real_path
raise
except os.error:
# report a user-friendly error
manager.extraction_error()
return real_path
def _is_current(self, file_path, zip_path):
"""
Return True if the file_path is current for this zip_path
"""
timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
if not os.path.isfile(file_path):
return False
stat = os.stat(file_path)
if stat.st_size!=size or stat.st_mtime!=timestamp:
return False
# check that the contents match
zip_contents = self.loader.get_data(zip_path)
with open(file_path, 'rb') as f:
file_contents = f.read()
return zip_contents == file_contents
def _get_eager_resources(self):
if self.eagers is None:
eagers = []
for name in ('native_libs.txt', 'eager_resources.txt'):
if self.has_metadata(name):
eagers.extend(self.get_metadata_lines(name))
self.eagers = eagers
return self.eagers
def _index(self):
try:
return self._dirindex
except AttributeError:
ind = {}
for path in self.zipinfo:
parts = path.split(os.sep)
while parts:
parent = os.sep.join(parts[:-1])
if parent in ind:
ind[parent].append(parts[-1])
break
else:
ind[parent] = [parts.pop()]
self._dirindex = ind
return ind
def _has(self, fspath):
zip_path = self._zipinfo_name(fspath)
return zip_path in self.zipinfo or zip_path in self._index()
def _isdir(self, fspath):
return self._zipinfo_name(fspath) in self._index()
def _listdir(self, fspath):
return list(self._index().get(self._zipinfo_name(fspath), ()))
def _eager_to_zip(self, resource_name):
return self._zipinfo_name(self._fn(self.egg_root, resource_name))
def _resource_to_zip(self, resource_name):
return self._zipinfo_name(self._fn(self.module_path, resource_name))
register_loader_type(zipimport.zipimporter, ZipProvider)
class FileMetadata(EmptyProvider):
"""Metadata handler for standalone PKG-INFO files
Usage::
metadata = FileMetadata("/path/to/PKG-INFO")
This provider rejects all data and metadata requests except for PKG-INFO,
which is treated as existing, and will be the contents of the file at
the provided location.
"""
def __init__(self, path):
self.path = path
def has_metadata(self, name):
return name=='PKG-INFO'
def get_metadata(self, name):
if name=='PKG-INFO':
with open(self.path,'rU') as f:
metadata = f.read()
return metadata
raise KeyError("No metadata except PKG-INFO is available")
def get_metadata_lines(self, name):
return yield_lines(self.get_metadata(name))
class PathMetadata(DefaultProvider):
"""Metadata provider for egg directories
Usage::
# Development eggs:
egg_info = "/path/to/PackageName.egg-info"
base_dir = os.path.dirname(egg_info)
metadata = PathMetadata(base_dir, egg_info)
dist_name = os.path.splitext(os.path.basename(egg_info))[0]
dist = Distribution(basedir, project_name=dist_name, metadata=metadata)
# Unpacked egg directories:
egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
dist = Distribution.from_filename(egg_path, metadata=metadata)
"""
def __init__(self, path, egg_info):
self.module_path = path
self.egg_info = egg_info
class EggMetadata(ZipProvider):
"""Metadata provider for .egg files"""
def __init__(self, importer):
"""Create a metadata provider from a zipimporter"""
self.zip_pre = importer.archive+os.sep
self.loader = importer
if importer.prefix:
self.module_path = os.path.join(importer.archive, importer.prefix)
else:
self.module_path = importer.archive
self._setup_prefix()
_declare_state('dict', _distribution_finders = {})
def register_finder(importer_type, distribution_finder):
"""Register `distribution_finder` to find distributions in sys.path items
`importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
handler), and `distribution_finder` is a callable that, passed a path
item and the importer instance, yields ``Distribution`` instances found on
that path item. See ``pkg_resources.find_on_path`` for an example."""
_distribution_finders[importer_type] = distribution_finder
def find_distributions(path_item, only=False):
"""Yield distributions accessible via `path_item`"""
importer = get_importer(path_item)
finder = _find_adapter(_distribution_finders, importer)
return finder(importer, path_item, only)
def find_eggs_in_zip(importer, path_item, only=False):
"""
Find eggs in zip files; possibly multiple nested eggs.
"""
if importer.archive.endswith('.whl'):
# wheels are not supported with this finder
# they don't have PKG-INFO metadata, and won't ever contain eggs
return
metadata = EggMetadata(importer)
if metadata.has_metadata('PKG-INFO'):
yield Distribution.from_filename(path_item, metadata=metadata)
if only:
# don't yield nested distros
return
for subitem in metadata.resource_listdir('/'):
if subitem.endswith('.egg'):
subpath = os.path.join(path_item, subitem)
for dist in find_eggs_in_zip(zipimport.zipimporter(subpath), subpath):
yield dist
register_finder(zipimport.zipimporter, find_eggs_in_zip)
def find_nothing(importer, path_item, only=False):
return ()
register_finder(object, find_nothing)
def find_on_path(importer, path_item, only=False):
"""Yield distributions accessible on a sys.path directory"""
path_item = _normalize_cached(path_item)
if os.path.isdir(path_item) and os.access(path_item, os.R_OK):
if path_item.lower().endswith('.egg'):
# unpacked egg
yield Distribution.from_filename(
path_item, metadata=PathMetadata(
path_item, os.path.join(path_item,'EGG-INFO')
)
)
else:
# scan for .egg and .egg-info in directory
for entry in os.listdir(path_item):
lower = entry.lower()
if lower.endswith('.egg-info') or lower.endswith('.dist-info'):
fullpath = os.path.join(path_item, entry)
if os.path.isdir(fullpath):
# egg-info directory, allow getting metadata
metadata = PathMetadata(path_item, fullpath)
else:
metadata = FileMetadata(fullpath)
yield Distribution.from_location(
path_item, entry, metadata, precedence=DEVELOP_DIST
)
elif not only and lower.endswith('.egg'):
dists = find_distributions(os.path.join(path_item, entry))
for dist in dists:
yield dist
elif not only and lower.endswith('.egg-link'):
with open(os.path.join(path_item, entry)) as entry_file:
entry_lines = entry_file.readlines()
for line in entry_lines:
if not line.strip():
continue
path = os.path.join(path_item, line.rstrip())
dists = find_distributions(path)
for item in dists:
yield item
break
register_finder(pkgutil.ImpImporter, find_on_path)
if importlib_machinery is not None:
register_finder(importlib_machinery.FileFinder, find_on_path)
_declare_state('dict', _namespace_handlers={})
_declare_state('dict', _namespace_packages={})
def register_namespace_handler(importer_type, namespace_handler):
"""Register `namespace_handler` to declare namespace packages
`importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
handler), and `namespace_handler` is a callable like this::
def namespace_handler(importer, path_entry, moduleName, module):
# return a path_entry to use for child packages
Namespace handlers are only called if the importer object has already
agreed that it can handle the relevant path item, and they should only
return a subpath if the module __path__ does not already contain an
equivalent subpath. For an example namespace handler, see
``pkg_resources.file_ns_handler``.
"""
_namespace_handlers[importer_type] = namespace_handler
def _handle_ns(packageName, path_item):
"""Ensure that named package includes a subpath of path_item (if needed)"""
importer = get_importer(path_item)
if importer is None:
return None
loader = importer.find_module(packageName)
if loader is None:
return None
module = sys.modules.get(packageName)
if module is None:
module = sys.modules[packageName] = types.ModuleType(packageName)
module.__path__ = []
_set_parent_ns(packageName)
elif not hasattr(module,'__path__'):
raise TypeError("Not a package:", packageName)
handler = _find_adapter(_namespace_handlers, importer)
subpath = handler(importer, path_item, packageName, module)
if subpath is not None:
path = module.__path__
path.append(subpath)
loader.load_module(packageName)
for path_item in path:
if path_item not in module.__path__:
module.__path__.append(path_item)
return subpath
def declare_namespace(packageName):
"""Declare that package 'packageName' is a namespace package"""
_imp.acquire_lock()
try:
if packageName in _namespace_packages:
return
path, parent = sys.path, None
if '.' in packageName:
parent = '.'.join(packageName.split('.')[:-1])
declare_namespace(parent)
if parent not in _namespace_packages:
__import__(parent)
try:
path = sys.modules[parent].__path__
except AttributeError:
raise TypeError("Not a package:", parent)
# Track what packages are namespaces, so when new path items are added,
# they can be updated
_namespace_packages.setdefault(parent,[]).append(packageName)
_namespace_packages.setdefault(packageName,[])
for path_item in path:
# Ensure all the parent's path items are reflected in the child,
# if they apply
_handle_ns(packageName, path_item)
finally:
_imp.release_lock()
def fixup_namespace_packages(path_item, parent=None):
"""Ensure that previously-declared namespace packages include path_item"""
_imp.acquire_lock()
try:
for package in _namespace_packages.get(parent,()):
subpath = _handle_ns(package, path_item)
if subpath:
fixup_namespace_packages(subpath, package)
finally:
_imp.release_lock()
def file_ns_handler(importer, path_item, packageName, module):
"""Compute an ns-package subpath for a filesystem or zipfile importer"""
subpath = os.path.join(path_item, packageName.split('.')[-1])
normalized = _normalize_cached(subpath)
for item in module.__path__:
if _normalize_cached(item)==normalized:
break
else:
# Only return the path if it's not already there
return subpath
register_namespace_handler(pkgutil.ImpImporter, file_ns_handler)
register_namespace_handler(zipimport.zipimporter, file_ns_handler)
if importlib_machinery is not None:
register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler)
def null_ns_handler(importer, path_item, packageName, module):
return None
register_namespace_handler(object, null_ns_handler)
def normalize_path(filename):
"""Normalize a file/dir name for comparison purposes"""
return os.path.normcase(os.path.realpath(filename))
def _normalize_cached(filename, _cache={}):
try:
return _cache[filename]
except KeyError:
_cache[filename] = result = normalize_path(filename)
return result
def _set_parent_ns(packageName):
parts = packageName.split('.')
name = parts.pop()
if parts:
parent = '.'.join(parts)
setattr(sys.modules[parent], name, sys.modules[packageName])
def yield_lines(strs):
"""Yield non-empty/non-comment lines of a string or sequence"""
if isinstance(strs, string_types):
for s in strs.splitlines():
s = s.strip()
# skip blank lines/comments
if s and not s.startswith('#'):
yield s
else:
for ss in strs:
for s in yield_lines(ss):
yield s
# whitespace and comment
LINE_END = re.compile(r"\s*(#.*)?$").match
# line continuation
CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match
# Distribution or extra
DISTRO = re.compile(r"\s*((\w|[-.])+)").match
# ver. info
VERSION = re.compile(r"\s*(<=?|>=?|===?|!=|~=)\s*((\w|[-.*_!+])+)").match
# comma between items
COMMA = re.compile(r"\s*,").match
OBRACKET = re.compile(r"\s*\[").match
CBRACKET = re.compile(r"\s*\]").match
MODULE = re.compile(r"\w+(\.\w+)*$").match
EGG_NAME = re.compile(
r"""
(?P<name>[^-]+) (
-(?P<ver>[^-]+) (
-py(?P<pyver>[^-]+) (
-(?P<plat>.+)
)?
)?
)?
""",
re.VERBOSE | re.IGNORECASE,
).match
class EntryPoint(object):
"""Object representing an advertised importable object"""
def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
if not MODULE(module_name):
raise ValueError("Invalid module name", module_name)
self.name = name
self.module_name = module_name
self.attrs = tuple(attrs)
self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras
self.dist = dist
def __str__(self):
s = "%s = %s" % (self.name, self.module_name)
if self.attrs:
s += ':' + '.'.join(self.attrs)
if self.extras:
s += ' [%s]' % ','.join(self.extras)
return s
def __repr__(self):
return "EntryPoint.parse(%r)" % str(self)
def load(self, require=True, *args, **kwargs):
"""
Require packages for this EntryPoint, then resolve it.
"""
if not require or args or kwargs:
warnings.warn(
"Parameters to load are deprecated. Call .resolve and "
".require separately.",
DeprecationWarning,
stacklevel=2,
)
if require:
self.require(*args, **kwargs)
return self.resolve()
def resolve(self):
"""
Resolve the entry point from its module and attrs.
"""
module = __import__(self.module_name, fromlist=['__name__'], level=0)
try:
return functools.reduce(getattr, self.attrs, module)
except AttributeError as exc:
raise ImportError(str(exc))
def require(self, env=None, installer=None):
if self.extras and not self.dist:
raise UnknownExtra("Can't require() without a distribution", self)
reqs = self.dist.requires(self.extras)
items = working_set.resolve(reqs, env, installer)
list(map(working_set.add, items))
pattern = re.compile(
r'\s*'
r'(?P<name>.+?)\s*'
r'=\s*'
r'(?P<module>[\w.]+)\s*'
r'(:\s*(?P<attr>[\w.]+))?\s*'
r'(?P<extras>\[.*\])?\s*$'
)
@classmethod
def parse(cls, src, dist=None):
"""Parse a single entry point from string `src`
Entry point syntax follows the form::
name = some.module:some.attr [extra1, extra2]
The entry name and module name are required, but the ``:attrs`` and
``[extras]`` parts are optional
"""
m = cls.pattern.match(src)
if not m:
msg = "EntryPoint must be in 'name=module:attrs [extras]' format"
raise ValueError(msg, src)
res = m.groupdict()
extras = cls._parse_extras(res['extras'])
attrs = res['attr'].split('.') if res['attr'] else ()
return cls(res['name'], res['module'], attrs, extras, dist)
@classmethod
def _parse_extras(cls, extras_spec):
if not extras_spec:
return ()
req = Requirement.parse('x' + extras_spec)
if req.specs:
raise ValueError()
return req.extras
@classmethod
def parse_group(cls, group, lines, dist=None):
"""Parse an entry point group"""
if not MODULE(group):
raise ValueError("Invalid group name", group)
this = {}
for line in yield_lines(lines):
ep = cls.parse(line, dist)
if ep.name in this:
raise ValueError("Duplicate entry point", group, ep.name)
this[ep.name]=ep
return this
@classmethod
def parse_map(cls, data, dist=None):
"""Parse a map of entry point groups"""
if isinstance(data, dict):
data = data.items()
else:
data = split_sections(data)
maps = {}
for group, lines in data:
if group is None:
if not lines:
continue
raise ValueError("Entry points must be listed in groups")
group = group.strip()
if group in maps:
raise ValueError("Duplicate group name", group)
maps[group] = cls.parse_group(group, lines, dist)
return maps
def _remove_md5_fragment(location):
if not location:
return ''
parsed = urlparse(location)
if parsed[-1].startswith('md5='):
return urlunparse(parsed[:-1] + ('',))
return location
class Distribution(object):
"""Wrap an actual or potential sys.path entry w/metadata"""
PKG_INFO = 'PKG-INFO'
def __init__(self, location=None, metadata=None, project_name=None,
version=None, py_version=PY_MAJOR, platform=None,
precedence=EGG_DIST):
self.project_name = safe_name(project_name or 'Unknown')
if version is not None:
self._version = safe_version(version)
self.py_version = py_version
self.platform = platform
self.location = location
self.precedence = precedence
self._provider = metadata or empty_provider
@classmethod
def from_location(cls, location, basename, metadata=None,**kw):
project_name, version, py_version, platform = [None]*4
basename, ext = os.path.splitext(basename)
if ext.lower() in _distributionImpl:
# .dist-info gets much metadata differently
match = EGG_NAME(basename)
if match:
project_name, version, py_version, platform = match.group(
'name','ver','pyver','plat'
)
cls = _distributionImpl[ext.lower()]
return cls(
location, metadata, project_name=project_name, version=version,
py_version=py_version, platform=platform, **kw
)
@property
def hashcmp(self):
return (
self.parsed_version,
self.precedence,
self.key,
_remove_md5_fragment(self.location),
self.py_version or '',
self.platform or '',
)
def __hash__(self):
return hash(self.hashcmp)
def __lt__(self, other):
return self.hashcmp < other.hashcmp
def __le__(self, other):
return self.hashcmp <= other.hashcmp
def __gt__(self, other):
return self.hashcmp > other.hashcmp
def __ge__(self, other):
return self.hashcmp >= other.hashcmp
def __eq__(self, other):
if not isinstance(other, self.__class__):
# It's not a Distribution, so they are not equal
return False
return self.hashcmp == other.hashcmp
def __ne__(self, other):
return not self == other
# These properties have to be lazy so that we don't have to load any
# metadata until/unless it's actually needed. (i.e., some distributions
# may not know their name or version without loading PKG-INFO)
@property
def key(self):
try:
return self._key
except AttributeError:
self._key = key = self.project_name.lower()
return key
@property
def parsed_version(self):
if not hasattr(self, "_parsed_version"):
self._parsed_version = parse_version(self.version)
return self._parsed_version
def _warn_legacy_version(self):
LV = packaging.version.LegacyVersion
is_legacy = isinstance(self._parsed_version, LV)
if not is_legacy:
return
# While an empty version is technically a legacy version and
# is not a valid PEP 440 version, it's also unlikely to
# actually come from someone and instead it is more likely that
# it comes from setuptools attempting to parse a filename and
# including it in the list. So for that we'll gate this warning
# on if the version is anything at all or not.
if not self.version:
return
tmpl = textwrap.dedent("""
'{project_name} ({version})' is being parsed as a legacy,
non PEP 440,
version. You may find odd behavior and sort order.
In particular it will be sorted as less than 0.0. It
is recommended to migrate to PEP 440 compatible
versions.
""").strip().replace('\n', ' ')
warnings.warn(tmpl.format(**vars(self)), PEP440Warning)
@property
def version(self):
try:
return self._version
except AttributeError:
for line in self._get_metadata(self.PKG_INFO):
if line.lower().startswith('version:'):
self._version = safe_version(line.split(':',1)[1].strip())
return self._version
else:
tmpl = "Missing 'Version:' header and/or %s file"
raise ValueError(tmpl % self.PKG_INFO, self)
@property
def _dep_map(self):
try:
return self.__dep_map
except AttributeError:
dm = self.__dep_map = {None: []}
for name in 'requires.txt', 'depends.txt':
for extra, reqs in split_sections(self._get_metadata(name)):
if extra:
if ':' in extra:
extra, marker = extra.split(':', 1)
if invalid_marker(marker):
# XXX warn
reqs=[]
elif not evaluate_marker(marker):
reqs=[]
extra = safe_extra(extra) or None
dm.setdefault(extra,[]).extend(parse_requirements(reqs))
return dm
def requires(self, extras=()):
"""List of Requirements needed for this distro if `extras` are used"""
dm = self._dep_map
deps = []
deps.extend(dm.get(None, ()))
for ext in extras:
try:
deps.extend(dm[safe_extra(ext)])
except KeyError:
raise UnknownExtra(
"%s has no such extra feature %r" % (self, ext)
)
return deps
def _get_metadata(self, name):
if self.has_metadata(name):
for line in self.get_metadata_lines(name):
yield line
def activate(self, path=None):
"""Ensure distribution is importable on `path` (default=sys.path)"""
if path is None:
path = sys.path
self.insert_on(path)
if path is sys.path:
fixup_namespace_packages(self.location)
for pkg in self._get_metadata('namespace_packages.txt'):
if pkg in sys.modules:
declare_namespace(pkg)
def egg_name(self):
"""Return what this distribution's standard .egg filename should be"""
filename = "%s-%s-py%s" % (
to_filename(self.project_name), to_filename(self.version),
self.py_version or PY_MAJOR
)
if self.platform:
filename += '-' + self.platform
return filename
def __repr__(self):
if self.location:
return "%s (%s)" % (self, self.location)
else:
return str(self)
def __str__(self):
try:
version = getattr(self, 'version', None)
except ValueError:
version = None
version = version or "[unknown version]"
return "%s %s" % (self.project_name, version)
def __getattr__(self, attr):
"""Delegate all unrecognized public attributes to .metadata provider"""
if attr.startswith('_'):
raise AttributeError(attr)
return getattr(self._provider, attr)
@classmethod
def from_filename(cls, filename, metadata=None, **kw):
return cls.from_location(
_normalize_cached(filename), os.path.basename(filename), metadata,
**kw
)
def as_requirement(self):
"""Return a ``Requirement`` that matches this distribution exactly"""
if isinstance(self.parsed_version, packaging.version.Version):
spec = "%s==%s" % (self.project_name, self.parsed_version)
else:
spec = "%s===%s" % (self.project_name, self.parsed_version)
return Requirement.parse(spec)
def load_entry_point(self, group, name):
"""Return the `name` entry point of `group` or raise ImportError"""
ep = self.get_entry_info(group, name)
if ep is None:
raise ImportError("Entry point %r not found" % ((group, name),))
return ep.load()
def get_entry_map(self, group=None):
"""Return the entry point map for `group`, or the full entry map"""
try:
ep_map = self._ep_map
except AttributeError:
ep_map = self._ep_map = EntryPoint.parse_map(
self._get_metadata('entry_points.txt'), self
)
if group is not None:
return ep_map.get(group,{})
return ep_map
def get_entry_info(self, group, name):
"""Return the EntryPoint object for `group`+`name`, or ``None``"""
return self.get_entry_map(group).get(name)
def insert_on(self, path, loc = None):
"""Insert self.location in path before its nearest parent directory"""
loc = loc or self.location
if not loc:
return
nloc = _normalize_cached(loc)
bdir = os.path.dirname(nloc)
npath= [(p and _normalize_cached(p) or p) for p in path]
for p, item in enumerate(npath):
if item == nloc:
break
elif item == bdir and self.precedence == EGG_DIST:
# if it's an .egg, give it precedence over its directory
if path is sys.path:
self.check_version_conflict()
path.insert(p, loc)
npath.insert(p, nloc)
break
else:
if path is sys.path:
self.check_version_conflict()
path.append(loc)
return
# p is the spot where we found or inserted loc; now remove duplicates
while True:
try:
np = npath.index(nloc, p+1)
except ValueError:
break
else:
del npath[np], path[np]
# ha!
p = np
return
def check_version_conflict(self):
if self.key == 'setuptools':
# ignore the inevitable setuptools self-conflicts :(
return
nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
loc = normalize_path(self.location)
for modname in self._get_metadata('top_level.txt'):
if (modname not in sys.modules or modname in nsp
or modname in _namespace_packages):
continue
if modname in ('pkg_resources', 'setuptools', 'site'):
continue
fn = getattr(sys.modules[modname], '__file__', None)
if fn and (normalize_path(fn).startswith(loc) or
fn.startswith(self.location)):
continue
issue_warning(
"Module %s was already imported from %s, but %s is being added"
" to sys.path" % (modname, fn, self.location),
)
def has_version(self):
try:
self.version
except ValueError:
issue_warning("Unbuilt egg for " + repr(self))
return False
return True
def clone(self,**kw):
"""Copy this distribution, substituting in any changed keyword args"""
names = 'project_name version py_version platform location precedence'
for attr in names.split():
kw.setdefault(attr, getattr(self, attr, None))
kw.setdefault('metadata', self._provider)
return self.__class__(**kw)
@property
def extras(self):
return [dep for dep in self._dep_map if dep]
class DistInfoDistribution(Distribution):
"""Wrap an actual or potential sys.path entry w/metadata, .dist-info style"""
PKG_INFO = 'METADATA'
EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])")
@property
def _parsed_pkg_info(self):
"""Parse and cache metadata"""
try:
return self._pkg_info
except AttributeError:
metadata = self.get_metadata(self.PKG_INFO)
self._pkg_info = email.parser.Parser().parsestr(metadata)
return self._pkg_info
@property
def _dep_map(self):
try:
return self.__dep_map
except AttributeError:
self.__dep_map = self._compute_dependencies()
return self.__dep_map
def _preparse_requirement(self, requires_dist):
"""Convert 'Foobar (1); baz' to ('Foobar ==1', 'baz')
Split environment marker, add == prefix to version specifiers as
necessary, and remove parenthesis.
"""
parts = requires_dist.split(';', 1) + ['']
distvers = parts[0].strip()
mark = parts[1].strip()
distvers = re.sub(self.EQEQ, r"\1==\2\3", distvers)
distvers = distvers.replace('(', '').replace(')', '')
return (distvers, mark)
def _compute_dependencies(self):
"""Recompute this distribution's dependencies."""
from _markerlib import compile as compile_marker
dm = self.__dep_map = {None: []}
reqs = []
# Including any condition expressions
for req in self._parsed_pkg_info.get_all('Requires-Dist') or []:
distvers, mark = self._preparse_requirement(req)
parsed = next(parse_requirements(distvers))
parsed.marker_fn = compile_marker(mark)
reqs.append(parsed)
def reqs_for_extra(extra):
for req in reqs:
if req.marker_fn(override={'extra':extra}):
yield req
common = frozenset(reqs_for_extra(None))
dm[None].extend(common)
for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []:
extra = safe_extra(extra.strip())
dm[extra] = list(frozenset(reqs_for_extra(extra)) - common)
return dm
_distributionImpl = {
'.egg': Distribution,
'.egg-info': Distribution,
'.dist-info': DistInfoDistribution,
}
def issue_warning(*args,**kw):
level = 1
g = globals()
try:
# find the first stack frame that is *not* code in
# the pkg_resources module, to use for the warning
while sys._getframe(level).f_globals is g:
level += 1
except ValueError:
pass
warnings.warn(stacklevel=level + 1, *args, **kw)
class RequirementParseError(ValueError):
def __str__(self):
return ' '.join(self.args)
def parse_requirements(strs):
"""Yield ``Requirement`` objects for each specification in `strs`
`strs` must be a string, or a (possibly-nested) iterable thereof.
"""
# create a steppable iterator, so we can handle \-continuations
lines = iter(yield_lines(strs))
def scan_list(ITEM, TERMINATOR, line, p, groups, item_name):
items = []
while not TERMINATOR(line, p):
if CONTINUE(line, p):
try:
line = next(lines)
p = 0
except StopIteration:
msg = "\\ must not appear on the last nonblank line"
raise RequirementParseError(msg)
match = ITEM(line, p)
if not match:
msg = "Expected " + item_name + " in"
raise RequirementParseError(msg, line, "at", line[p:])
items.append(match.group(*groups))
p = match.end()
match = COMMA(line, p)
if match:
# skip the comma
p = match.end()
elif not TERMINATOR(line, p):
msg = "Expected ',' or end-of-list in"
raise RequirementParseError(msg, line, "at", line[p:])
match = TERMINATOR(line, p)
# skip the terminator, if any
if match:
p = match.end()
return line, p, items
for line in lines:
match = DISTRO(line)
if not match:
raise RequirementParseError("Missing distribution spec", line)
project_name = match.group(1)
p = match.end()
extras = []
match = OBRACKET(line, p)
if match:
p = match.end()
line, p, extras = scan_list(
DISTRO, CBRACKET, line, p, (1,), "'extra' name"
)
line, p, specs = scan_list(VERSION, LINE_END, line, p, (1, 2),
"version spec")
specs = [(op, val) for op, val in specs]
yield Requirement(project_name, specs, extras)
class Requirement:
def __init__(self, project_name, specs, extras):
"""DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
self.unsafe_name, project_name = project_name, safe_name(project_name)
self.project_name, self.key = project_name, project_name.lower()
self.specifier = packaging.specifiers.SpecifierSet(
",".join(["".join([x, y]) for x, y in specs])
)
self.specs = specs
self.extras = tuple(map(safe_extra, extras))
self.hashCmp = (
self.key,
self.specifier,
frozenset(self.extras),
)
self.__hash = hash(self.hashCmp)
def __str__(self):
extras = ','.join(self.extras)
if extras:
extras = '[%s]' % extras
return '%s%s%s' % (self.project_name, extras, self.specifier)
def __eq__(self, other):
return (
isinstance(other, Requirement) and
self.hashCmp == other.hashCmp
)
def __ne__(self, other):
return not self == other
def __contains__(self, item):
if isinstance(item, Distribution):
if item.key != self.key:
return False
item = item.version
# Allow prereleases always in order to match the previous behavior of
# this method. In the future this should be smarter and follow PEP 440
# more accurately.
return self.specifier.contains(item, prereleases=True)
def __hash__(self):
return self.__hash
def __repr__(self): return "Requirement.parse(%r)" % str(self)
@staticmethod
def parse(s):
reqs = list(parse_requirements(s))
if reqs:
if len(reqs) == 1:
return reqs[0]
raise ValueError("Expected only one requirement", s)
raise ValueError("No requirements found", s)
def _get_mro(cls):
"""Get an mro for a type or classic class"""
if not isinstance(cls, type):
class cls(cls, object): pass
return cls.__mro__[1:]
return cls.__mro__
def _find_adapter(registry, ob):
"""Return an adapter factory for `ob` from `registry`"""
for t in _get_mro(getattr(ob, '__class__', type(ob))):
if t in registry:
return registry[t]
def ensure_directory(path):
"""Ensure that the parent directory of `path` exists"""
dirname = os.path.dirname(path)
if not os.path.isdir(dirname):
os.makedirs(dirname)
def _bypass_ensure_directory(path):
"""Sandbox-bypassing version of ensure_directory()"""
if not WRITE_SUPPORT:
raise IOError('"os.mkdir" not supported on this platform.')
dirname, filename = split(path)
if dirname and filename and not isdir(dirname):
_bypass_ensure_directory(dirname)
mkdir(dirname, 0o755)
def split_sections(s):
"""Split a string or iterable thereof into (section, content) pairs
Each ``section`` is a stripped version of the section header ("[section]")
and each ``content`` is a list of stripped lines excluding blank lines and
comment-only lines. If there are any such lines before the first section
header, they're returned in a first ``section`` of ``None``.
"""
section = None
content = []
for line in yield_lines(s):
if line.startswith("["):
if line.endswith("]"):
if section or content:
yield section, content
section = line[1:-1].strip()
content = []
else:
raise ValueError("Invalid section heading", line)
else:
content.append(line)
# wrap up last segment
yield section, content
def _mkstemp(*args,**kw):
old_open = os.open
try:
# temporarily bypass sandboxing
os.open = os_open
return tempfile.mkstemp(*args,**kw)
finally:
# and then put it back
os.open = old_open
# Silence the PEP440Warning by default, so that end users don't get hit by it
# randomly just because they use pkg_resources. We want to append the rule
# because we want earlier uses of filterwarnings to take precedence over this
# one.
warnings.filterwarnings("ignore", category=PEP440Warning, append=True)
# from jaraco.functools 1.3
def _call_aside(f, *args, **kwargs):
f(*args, **kwargs)
return f
@_call_aside
def _initialize(g=globals()):
"Set up global resource manager (deliberately not state-saved)"
manager = ResourceManager()
g['_manager'] = manager
for name in dir(manager):
if not name.startswith('_'):
g[name] = getattr(manager, name)
@_call_aside
def _initialize_master_working_set():
"""
Prepare the master working set and make the ``require()``
API available.
This function has explicit effects on the global state
of pkg_resources. It is intended to be invoked once at
the initialization of this module.
Invocation by other packages is unsupported and done
at their own risk.
"""
working_set = WorkingSet._build_master()
_declare_state('object', working_set=working_set)
require = working_set.require
iter_entry_points = working_set.iter_entry_points
add_activation_listener = working_set.subscribe
run_script = working_set.run_script
# backward compatibility
run_main = run_script
# Activate all distributions already on sys.path, and ensure that
# all distributions added to the working set in the future (e.g. by
# calling ``require()``) will get activated as well.
add_activation_listener(lambda dist: dist.activate())
working_set.entries=[]
# match order
list(map(working_set.add_entry, sys.path))
globals().update(locals())
| mpl-2.0 |
lexor90/node-compiler | node/deps/uv/gyp_uv.py | 22 | 2793 | #!/usr/bin/env python
import os
import platform
import sys
try:
import multiprocessing.synchronize
gyp_parallel_support = True
except ImportError:
gyp_parallel_support = False
CC = os.environ.get('CC', 'cc')
script_dir = os.path.dirname(__file__)
uv_root = os.path.normpath(script_dir)
output_dir = os.path.join(os.path.abspath(uv_root), 'out')
sys.path.insert(0, os.path.join(uv_root, 'build', 'gyp', 'pylib'))
try:
import gyp
except ImportError:
print('You need to install gyp in build/gyp first. See the README.')
sys.exit(42)
def host_arch():
machine = platform.machine()
if machine == 'i386': return 'ia32'
if machine == 'AMD64': return 'x64'
if machine == 'x86_64': return 'x64'
if machine.startswith('arm'): return 'arm'
if machine.startswith('mips'): return 'mips'
return machine # Return as-is and hope for the best.
def run_gyp(args):
rc = gyp.main(args)
if rc != 0:
print('Error running GYP')
sys.exit(rc)
if __name__ == '__main__':
args = sys.argv[1:]
# GYP bug.
# On msvs it will crash if it gets an absolute path.
# On Mac/make it will crash if it doesn't get an absolute path.
if sys.platform == 'win32':
args.append(os.path.join(uv_root, 'uv.gyp'))
common_fn = os.path.join(uv_root, 'common.gypi')
options_fn = os.path.join(uv_root, 'options.gypi')
# we force vs 2010 over 2008 which would otherwise be the default for gyp
if not os.environ.get('GYP_MSVS_VERSION'):
os.environ['GYP_MSVS_VERSION'] = '2010'
else:
args.append(os.path.join(os.path.abspath(uv_root), 'uv.gyp'))
common_fn = os.path.join(os.path.abspath(uv_root), 'common.gypi')
options_fn = os.path.join(os.path.abspath(uv_root), 'options.gypi')
if os.path.exists(common_fn):
args.extend(['-I', common_fn])
if os.path.exists(options_fn):
args.extend(['-I', options_fn])
args.append('--depth=' + uv_root)
# There's a bug with windows which doesn't allow this feature.
if sys.platform != 'win32':
if '-f' not in args:
args.extend('-f make'.split())
if 'eclipse' not in args and 'ninja' not in args:
args.extend(['-Goutput_dir=' + output_dir])
args.extend(['--generator-output', output_dir])
if not any(a.startswith('-Dhost_arch=') for a in args):
args.append('-Dhost_arch=%s' % host_arch())
if not any(a.startswith('-Dtarget_arch=') for a in args):
args.append('-Dtarget_arch=%s' % host_arch())
if not any(a.startswith('-Duv_library=') for a in args):
args.append('-Duv_library=static_library')
# Some platforms (OpenBSD for example) don't have multiprocessing.synchronize
# so gyp must be run with --no-parallel
if not gyp_parallel_support:
args.append('--no-parallel')
gyp_args = list(args)
print(gyp_args)
run_gyp(gyp_args)
| mit |
adrienpacifico/openfisca-france-data | openfisca_france_data/input_data_builders/build_openfisca_survey_data/step_06_rebuild.py | 2 | 38058 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <contact@openfisca.fr>
#
# Copyright (C) 2011, 2012, 2013, 2014, 2015 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import gc
import logging
from pandas import Series, concat
import numpy as np
from numpy import where
from openfisca_france_data.temporary import temporary_store_decorator
from openfisca_france_data import default_config_files_directory as config_files_directory
from openfisca_france_data.input_data_builders.build_openfisca_survey_data.base import (
year_specific_by_generic_data_frame_name
)
from openfisca_france_data.input_data_builders.build_openfisca_survey_data.utils import print_id, control
from openfisca_survey_manager.survey_collections import SurveyCollection
log = logging.getLogger(__name__)
@temporary_store_decorator(config_files_directory = config_files_directory, file_name = 'erfs')
def create_totals_first_pass(temporary_store = None, year = None):
assert temporary_store is not None
assert year is not None
# On part de la table individu de l'ERFS
# on renomme les variables
log.info(u"Creating Totals")
log.info(u"Etape 1 : Chargement des données")
indivim = temporary_store['indivim_{}'.format(year)]
assert not indivim.duplicated(['noindiv']).any(), "Présence de doublons"
# Deals individuals with imputed income : some individuals are in 'erf individu table' but
# not in the 'foyer' table. We need to create a foyer for them.
selection = Series()
for var_i in ["zsali", "zchoi", "zrsti", "zalri", "zrtoi", "zragi", "zrici", "zrnci"]:
var_o = var_i[:-1] + "o"
test = indivim[var_i] != indivim[var_o]
if selection.empty:
selection = test
else:
selection = test | selection
indivi_i = indivim[selection].copy()
indivi_i.rename(
columns = {
"ident": "idmen",
"persfip": "quifoy",
"zsali": "sali", # Inclu les salaires non imposables des agents d'assurance
"zchoi": "choi",
"zrsti": "rsti",
"zalri": "alr"
},
inplace = True,
)
assert indivi_i.quifoy.notnull().all()
indivi_i.loc[indivi_i.quifoy == "", "quifoy"] = "vous"
indivi_i.quelfic = "FIP_IMP"
# We merge them with the other individuals
indivim.rename(
columns = dict(
ident = "idmen",
persfip = "quifoy",
zsali = "sali", # Inclu les salaires non imposables des agents d'assurance
zchoi = "choi",
zrsti = "rsti",
zalri = "alr",
),
inplace = True,
)
if not (set(list(indivim.noindiv)) > set(list(indivi_i.noindiv))):
raise Exception("Individual ")
indivim.set_index("noindiv", inplace = True, verify_integrity = True)
indivi_i.set_index("noindiv", inplace = True, verify_integrity = True)
indivi = indivim
del indivim
indivi.update(indivi_i)
indivi.reset_index(inplace = True)
assert not(indivi.noindiv.duplicated().any()), "Doublons"
log.info("Etape 2 : isolation des FIP")
fip_imp = indivi.quelfic == "FIP_IMP"
indivi["idfoy"] = (
indivi.idmen.astype('int') * 100 +
(indivi.declar1.str[0:2]).convert_objects(convert_numeric=True)
)
# indivi.loc[fip_imp, "idfoy"] = np.nan
# Certains FIP (ou du moins avec revenus imputés) ont un numéro de déclaration d'impôt ( pourquoi ?)
assert indivi_i.declar1.notnull().all()
assert (indivi_i.declar1 == "").sum() > 0
fip_has_declar = (fip_imp) & (indivi.declar1 != "")
indivi.loc[fip_has_declar, "idfoy"] = (
indivi.idmen * 100 + indivi.declar1.str[0:2].convert_objects(convert_numeric = True))
del fip_has_declar
fip_no_declar = (fip_imp) & (indivi.declar1 == "")
del fip_imp
indivi.loc[fip_no_declar, "idfoy"] = 100 * indivi.loc[fip_no_declar, "idmen"] + indivi.loc[fip_no_declar, "noi"]
# WAS indivi["idmen"] * 100 + 50
indivi_fnd = indivi.loc[fip_no_declar, ["idfoy", "noindiv"]].copy()
while any(indivi_fnd.duplicated(subset = ["idfoy"])):
indivi_fnd["idfoy"] = where(
indivi_fnd.duplicated(subset = ["idfoy"]),
indivi_fnd["idfoy"] + 1,
indivi_fnd["idfoy"]
)
# assert indivi_fnd["idfoy"].duplicated().value_counts()[False] == len(indivi_fnd["idfoy"].values), \
# "Duplicates remaining"
assert not(indivi.noindiv.duplicated().any()), "Doublons"
indivi.idfoy.loc[fip_no_declar] = indivi_fnd.idfoy.copy()
del indivi_fnd, fip_no_declar
log.info(u"Etape 3 : Récupération des EE_NRT")
nrt = indivi.quelfic == "EE_NRT"
indivi.loc[nrt, 'idfoy'] = indivi.loc[nrt, 'idmen'] * 100 + indivi.loc[nrt, 'noi']
indivi.loc[nrt, 'quifoy'] = "vous"
del nrt
pref_or_cref = indivi.lpr.isin([1, 2])
adults = (indivi.quelfic.isin(["EE", "EE_CAF"])) & (pref_or_cref)
pref = adults & (indivi.lpr == 1)
cref = adults & (indivi.lpr == 2)
indivi.loc[adults, "idfoy"] = indivi.loc[adults, 'idmen'] * 100 + indivi.loc[adults, 'noiprm']
indivi.loc[pref, "quifoy"] = "vous"
indivi.loc[cref, "quifoy"] = "conj"
del adults, pref, cref
assert indivi.idfoy[indivi.lpr.dropna().isin([1, 2])].all()
idfoyList = indivi.loc[indivi.quifoy == "vous", 'idfoy'].unique()
indivi_without_idfoy = ~indivi.idfoy.isin(idfoyList)
log.info(u"Il reste {} idfoy problématiques".format(
indivi_without_idfoy.sum()
))
# Conjoints qui dont il n'existe pas de vous avec leur idfoy.
# Problème de double déclaration: si leur conjoint à un noindiv à leur idfoy on switch les déclarants
log.info("{} conj without a valid idfoy".format(
(indivi_without_idfoy & indivi.idfoy.notnull() & indivi.quifoy.isin(['conj'])).sum()
))
if (indivi_without_idfoy & indivi.idfoy.notnull() & indivi.quifoy.isin(['conj'])).any():
# On traite les gens qui ont quifoy=conj mais dont l'idfoy n'a pas de vous
# 1) s'ils ont un conjoint et qu'il est vous avec un idfoy valide on leur attribue son idfoy:
avec_conjoint = (
indivi_without_idfoy &
indivi.idfoy.notnull() &
indivi.quifoy.isin(['conj']) &
(indivi.noicon != 0) &
(100 * indivi.idmen + indivi.noicon).isin(idfoyList)
)
indivi.loc[avec_conjoint, 'idfoy'] = (
100 * indivi.loc[avec_conjoint, 'idmen'] + indivi.loc[avec_conjoint, 'noicon']
)
indivi_without_idfoy = ~indivi.idfoy.isin(idfoyList) # mise à jour des cas problématiques
del avec_conjoint
if (indivi_without_idfoy & indivi.idfoy.notnull() & indivi.quifoy.isin(['conj'])).any():
# 2) sinon ils deviennent vous
devient_vous = (
indivi_without_idfoy &
indivi.idfoy.notnull() &
indivi.quifoy.isin(['conj']) &
(indivi.noicon == 0)
)
indivi.loc[devient_vous, 'idfoy'] = indivi.loc[devient_vous, 'noindiv'].copy()
indivi.loc[devient_vous, 'quifoy'] = 'vous'
idfoyList = indivi.loc[indivi.quifoy == "vous", 'idfoy'].unique()
indivi_without_idfoy = ~indivi.idfoy.isin(idfoyList) # Mise à jour des cas problématiques
del devient_vous
problem = (indivi_without_idfoy & indivi.idfoy.notnull() & indivi.quifoy.isin(['conj']))
if problem.sum() > 0:
log.info("Dropping {} conj without valid idfoy".format(
problem.sum()
))
indivi.drop(indivi[problem].index, inplace = True)
indivi_without_idfoy = ~indivi.idfoy.isin(idfoyList) # Mise à jour des cas problématiques
problem = (indivi_without_idfoy & indivi.idfoy.notnull() & indivi.quifoy.isin(['conj']))
assert not problem.any()
idfoyList = indivi.loc[indivi.quifoy == "vous", 'idfoy'].unique()
indivi_without_idfoy = ~indivi.idfoy.isin(idfoyList)
log.info("Remainning {} non valid idfoy".format(
indivi_without_idfoy.sum()
))
# Les personnes à charge de personnes repérées comme conjoint idfoy = conj_noindiv
# (problème des doubles déclarations) doivent récupérer l'idfoy de celles-ci
pac = (
indivi_without_idfoy & indivi.idfoy.notnull() & indivi.quifoy.isin(['pac'])
)
log.info(u"Dealing with {} non valid idfoy of pacs".format(
pac.sum()
))
conj_noindiv = indivi.idfoy[pac].copy()
new_idfoy_by_old = indivi.loc[
indivi.noindiv.isin(conj_noindiv), ['noindiv', 'idfoy']
].astype('int').set_index('noindiv').squeeze().to_dict()
indivi.loc[pac, 'idfoy'] = indivi.loc[pac, 'idfoy'].map(new_idfoy_by_old)
del pac
indivi_without_idfoy = ~indivi.idfoy.isin(idfoyList) # Mise à jour des cas problématiques
assert not (indivi_without_idfoy & indivi.idfoy.notnull() & indivi.quifoy.isin(['pac'])).any()
# Il faut traiter les idfoy non attribués
indivi_without_idfoy = ~indivi.idfoy.isin(idfoyList)
assert (indivi_without_idfoy == indivi.idfoy.isnull()).all()
log.info(u"Il faut traiter les {} idfoy non attribués".format(
indivi_without_idfoy.sum()
))
# Adultes non enfants avec conjoints déclarants
married_adult_with_vous = (
indivi_without_idfoy &
((indivi.noiper == 0) | (indivi.noimer == 0)) &
(indivi.age >= 25) &
(indivi.noicon > 0) &
(100 * indivi.idmen + indivi.noicon).isin(idfoyList)
)
indivi.loc[married_adult_with_vous, 'idfoy'] = (
100 * indivi.loc[married_adult_with_vous, 'idmen'] + indivi.loc[married_adult_with_vous, 'noicon']
)
indivi.loc[married_adult_with_vous, 'quifoy'] = 'conj'
log.info(
u"""Il y a {} adultes > 25 ans non enfants avec conjoints déclarants""".format(
married_adult_with_vous.sum()
)
)
# Les deux membres du couples n'ont pas d'idfoy
married_adult_without_vous = (
indivi_without_idfoy &
((indivi.noiper == 0) | (indivi.noimer == 0)) &
(indivi.age >= 18) &
(indivi.noicon > 0) &
(~married_adult_with_vous)
)
# On les groupes par ménages, on vérifie qu'ils ne sont que deux
couple_by_idmen = (
(indivi.loc[
married_adult_without_vous, ['idmen', 'noindiv']
].groupby('idmen').agg('count')) == 2).astype('int').squeeze().to_dict()
couple_idmens = list(idmen for idmen in couple_by_idmen.keys() if couple_by_idmen[idmen])
# On crée un foyer vous-conj si couple
vous = married_adult_without_vous & (
((indivi.sexe == 1) & indivi.idmen.isin(couple_idmens)) |
(~indivi.idmen.isin(couple_idmens))
)
conj = married_adult_without_vous & (~vous) & indivi.idmen.isin(couple_idmens)
indivi.loc[vous, 'idfoy'] = indivi.loc[vous, 'noindiv'].copy()
indivi.loc[vous, 'quifoy'] = 'vous'
indivi.loc[conj, 'idfoy'] = 100 * indivi.loc[conj, 'idmen'] + indivi.loc[conj, 'noicon']
indivi.loc[conj, 'quifoy'] = 'conj'
del vous, conj
log.info(
u"""Il y a {} adultes > 25 ans non enfants sans conjoints déclarants: on crée un foyer""".format(
married_adult_without_vous.sum()
)
)
idfoyList = indivi.loc[indivi.quifoy == "vous", 'idfoy'].unique()
indivi_without_idfoy = ~indivi.idfoy.isin(idfoyList)
# Cas des enfants agés sans conjoint >= 25 ans
non_married_aged_kids = (
indivi_without_idfoy &
((indivi.noiper > 0) | (indivi.noimer > 0)) &
(indivi.age >= 25) &
(indivi.noicon == 0)
)
indivi.loc[non_married_aged_kids, 'idfoy'] = indivi.loc[non_married_aged_kids, 'noindiv'].copy()
indivi.loc[non_married_aged_kids, 'quifoy'] = 'vous'
log.info(
u"""On crée un foyer fiscal indépendants pour les {} enfants agés de plus de 25 ans sans conjoint
vivant avec leurs parents""".format(
non_married_aged_kids.sum()
)
)
del non_married_aged_kids
idfoyList = indivi.loc[indivi.quifoy == "vous", 'idfoy'].unique()
indivi_without_idfoy = ~indivi.idfoy.isin(idfoyList)
# Cas des enfants agés avec conjoint >= 18 ans
married_aged_kids = (
indivi_without_idfoy &
((indivi.noiper > 0) | (indivi.noimer > 0)) &
(indivi.age >= 18) &
(indivi.noicon != 0)
)
# Cas des enfants agés avec conjoint >= 18 ans
married_aged_kids = (
indivi_without_idfoy &
((indivi.noiper > 0) | (indivi.noimer > 0)) &
(indivi.age >= 18) &
(indivi.noicon != 0)
)
noiconjs = 100 * indivi.idmen + indivi.noicon
quifoy_by_noiconj = indivi.loc[
indivi.noindiv.isin(noiconjs[married_aged_kids]), ['noindiv', 'quifoy']
].set_index('noindiv').dropna().squeeze().to_dict()
is_conj_vous = noiconjs.map(quifoy_by_noiconj) == "vous"
indivi.loc[married_aged_kids & is_conj_vous, 'quifoy'] = "conj"
indivi.loc[married_aged_kids & is_conj_vous, 'idfoy'] = noiconjs[married_aged_kids & is_conj_vous].copy()
log.info("""Il y a {} enfants agés de plus de 25 ans avec conjoint
vivant avec leurs parents qui ne sont pas traités""".format(
married_aged_kids.sum()
)) # Il n'y en a pas en 2009
del married_aged_kids, noiconjs, is_conj_vous
# Colocations
if indivi_without_idfoy.any():
potential_idmens = indivi.loc[indivi_without_idfoy, 'idmen'].copy()
colocs = indivi.loc[indivi.idmen.isin(potential_idmens), ['idmen', 'age', 'quifoy']].copy()
coloc_by_idmen = colocs.groupby('idmen').agg({
'age':
lambda x:
(abs((x.min() - x.max())) < 20) & (x.min() >= 18),
'quifoy':
lambda x:
(x == 'vous').sum() >= 1,
}
)
coloc_dummy_by_idmen = (coloc_by_idmen.age * coloc_by_idmen.quifoy)
coloc_idmens = coloc_dummy_by_idmen.index[coloc_dummy_by_idmen.astype('bool')].tolist()
colocataires = indivi_without_idfoy & indivi.idmen.isin(coloc_idmens)
indivi.loc[colocataires, 'quifoy'] = 'vous'
indivi.loc[colocataires, 'idfoy'] = indivi.loc[colocataires, 'noindiv'].copy()
log.info(u"Il y a {} colocataires".format(
colocataires.sum()
))
del colocataires, coloc_dummy_by_idmen, coloc_by_idmen, coloc_idmens, colocs
idfoyList = indivi.loc[indivi.quifoy == "vous", 'idfoy'].unique()
indivi_without_idfoy = ~indivi.idfoy.isin(idfoyList)
# On met le reste des adultes de plus de 25 ans dans des foyers uniques
other_adults = indivi_without_idfoy & (indivi.age >= 25)
if indivi_without_idfoy.any():
indivi.loc[other_adults, 'quifoy'] = 'vous'
indivi.loc[other_adults, 'idfoy'] = indivi.loc[other_adults, 'noindiv'].copy()
log.info(u"Il y a {} autres adultes seuls à qui l'on crée un foyer individuel".format(
other_adults.sum()
))
del other_adults
idfoyList = indivi.loc[indivi.quifoy == "vous", 'idfoy'].unique()
indivi_without_idfoy = ~indivi.idfoy.isin(idfoyList)
# Cas des enfants jeunes < 25 ans
kids = (
indivi_without_idfoy &
(indivi.age < 25) &
((indivi.noiper > 0) | (indivi.noimer > 0))
)
# On rattache les enfants au foyer de leur pères s'il existe
log.info(u"On traite le cas des {} enfants (noiper ou noimer non nuls) repérés non rattachés".format(
kids.sum()
))
if kids.any():
pere_declarant_potentiel = kids & (indivi.noiper > 0)
indivi['pere_noindiv'] = (100 * indivi.idmen.fillna(0) + indivi.noiper.fillna(0)).astype('int')
pere_noindiv = (
100 * indivi.loc[pere_declarant_potentiel, 'idmen'].fillna(0) +
indivi.loc[pere_declarant_potentiel, 'noiper'].fillna(0)
).astype('int')
idfoy_by_noindiv = indivi.loc[
indivi.noindiv.isin(pere_noindiv), ['noindiv', 'idfoy']
].dropna().astype('int').set_index('noindiv').squeeze().to_dict()
pere_declarant_potentiel_idfoy = indivi['pere_noindiv'].map(idfoy_by_noindiv)
pere_veritable_declarant = pere_declarant_potentiel & pere_declarant_potentiel_idfoy.isin(idfoyList)
indivi.loc[pere_veritable_declarant, 'idfoy'] = (
pere_declarant_potentiel_idfoy[pere_veritable_declarant].astype('int')
)
indivi.loc[pere_veritable_declarant, 'quifoy'] = 'pac'
log.info(u"{} enfants rattachés au père ".format(
pere_veritable_declarant.sum()
))
del pere_declarant_potentiel, pere_declarant_potentiel_idfoy, pere_noindiv, \
pere_veritable_declarant, idfoy_by_noindiv
idfoyList = indivi.loc[indivi.quifoy == "vous", 'idfoy'].unique()
indivi_without_idfoy = ~indivi.idfoy.isin(idfoyList)
kids = (
indivi_without_idfoy &
(indivi.age < 25) &
((indivi.noiper > 0) | (indivi.noimer > 0))
)
log.info(u"Il reste {} enfants (noimer non nuls) repérés non rattachés".format(
kids.sum()
))
# Et de leurs mères sinon
if kids.any():
mere_declarant_potentiel = kids & (indivi.noimer > 0)
indivi['mere_noindiv'] = (100 * indivi.idmen.fillna(0) + indivi.noimer.fillna(0)).astype('int')
mere_noindiv = (
100 * indivi.loc[mere_declarant_potentiel, 'idmen'].fillna(0) +
indivi.loc[mere_declarant_potentiel, 'noimer'].fillna(0)
).astype('int')
idfoy_by_noindiv = indivi.loc[
indivi.noindiv.isin(mere_noindiv), ['noindiv', 'idfoy']
].dropna().astype('int').set_index('noindiv').squeeze().to_dict()
mere_declarant_potentiel_idfoy = indivi['mere_noindiv'].map(idfoy_by_noindiv)
mere_veritable_declarant = mere_declarant_potentiel & mere_declarant_potentiel_idfoy.isin(idfoyList)
indivi.loc[mere_veritable_declarant, 'idfoy'] = (
mere_declarant_potentiel_idfoy[mere_veritable_declarant].astype('int')
)
indivi.loc[mere_veritable_declarant, 'quifoy'] = 'pac'
log.info(u"{} enfants rattachés à la mère".format(
mere_veritable_declarant.sum()
))
del mere_declarant_potentiel, mere_declarant_potentiel_idfoy, mere_noindiv, \
mere_veritable_declarant, idfoy_by_noindiv
idfoyList = indivi.loc[indivi.quifoy == "vous", 'idfoy'].unique()
indivi_without_idfoy = ~indivi.idfoy.isin(idfoyList)
# Enfants avec parents pas indiqués (noimer et noiper = 0)
if indivi_without_idfoy.any():
potential_idmens = indivi.loc[indivi_without_idfoy, 'idmen'].copy()
parents = indivi.loc[indivi.idmen.isin(potential_idmens), [
'idmen', 'age', 'quifoy', 'lpr', 'noiper', 'noimer']].copy()
parents_by_idmen = parents.groupby('idmen').agg({
'quifoy':
lambda quifoy: (quifoy == 'vous').sum() == 1,
}
)
parents_dummy_by_idmen = parents_by_idmen.quifoy.copy()
parents_idmens = parents_dummy_by_idmen.index[
parents_dummy_by_idmen.astype('bool')].tolist()
parents_idfoy_by_idmem = indivi.loc[
indivi.idmen.isin(parents_idmens) & (indivi.quifoy == 'vous'),
['idmen', 'noindiv']].dropna().astype('int').set_index('idmen').squeeze().to_dict()
avec_parents = (
indivi_without_idfoy &
indivi.idmen.isin(parents_idmens) &
(
(indivi.age < 18) |
(
(indivi.age < 25) &
(indivi.sali == 0) &
(indivi.choi == 0) &
(indivi.alr == 0)
)
) &
(indivi.lpr == 4) &
(indivi.noiper == 0) &
(indivi.noimer == 0) &
(indivi.lpr == 4)
)
indivi.loc[avec_parents, 'idfoy'] = (
indivi.loc[avec_parents, 'idmen'].map(parents_idfoy_by_idmem))
indivi.loc[avec_parents, 'quifoy'] = 'pac'
log.info(u"Il y a {} enfants sans noiper ni noimer avec le seul vous du ménage".format(
avec_parents.sum()
))
del parents, parents_by_idmen, parents_dummy_by_idmen, parents_idfoy_by_idmem, parents_idmens
idfoyList = indivi.loc[indivi.quifoy == "vous", 'idfoy'].unique()
indivi_without_idfoy = ~indivi.idfoy.isin(idfoyList)
if indivi_without_idfoy.any():
potential_idmens = indivi.loc[indivi_without_idfoy, 'idmen'].copy()
parents_non_pr = indivi.loc[
indivi.idmen.isin(potential_idmens) & (indivi.quifoy == 'vous'),
['idmen', 'quifoy', 'noindiv', 'lpr']].copy()
parents_by_idmen = parents_non_pr.groupby('idmen').filter(
lambda df: (
((df.quifoy == 'vous').sum() >= 1) &
(df.lpr > 2).any()
)).query('lpr > 2')
parents_idfoy_by_idmem = parents_by_idmen[
['idmen', 'noindiv']
].dropna().astype('int').set_index('idmen').squeeze().to_dict()
avec_parents_non_pr = (
indivi_without_idfoy &
indivi.idmen.isin(parents_idfoy_by_idmem.keys()) &
(indivi.age < 18) &
(indivi.lpr == 4) &
(indivi.noiper == 0) &
(indivi.noimer == 0)
)
indivi.loc[avec_parents_non_pr, 'idfoy'] = (
indivi.loc[avec_parents_non_pr, 'idmen'].map(parents_idfoy_by_idmem))
indivi.loc[avec_parents_non_pr, 'quifoy'] = 'pac'
log.info(u"Il y a {} enfants sans noiper ni noimer avec le seul vous du ménage".format(
avec_parents_non_pr.sum()
))
del parents_non_pr, parents_by_idmen, parents_idfoy_by_idmem, avec_parents_non_pr
idfoyList = indivi.loc[indivi.quifoy == "vous", 'idfoy'].unique()
indivi_without_idfoy = ~indivi.idfoy.isin(idfoyList)
if indivi_without_idfoy.any():
other_enfants = indivi_without_idfoy & (indivi.age < 18)
potential_idmens = indivi.loc[other_enfants, 'idmen'].copy()
declarants = indivi.loc[
indivi.idmen.isin(potential_idmens) & (indivi.quifoy == 'vous'),
['idmen', 'idfoy']].dropna().astype('int').copy()
declarants_by_idmen = declarants.groupby('idmen').agg({
'idfoy': 'max'
}).squeeze().to_dict()
indivi.loc[other_enfants, 'idfoy'] = indivi.loc[other_enfants, 'idmen'].copy().map(declarants_by_idmen)
indivi.loc[other_enfants, 'quifoy'] = 'pac'
log.info(u"Il y a {} autres enfants que l'on met avec un vous du ménage".format(
other_enfants.sum()
))
idfoyList = indivi.loc[indivi.quifoy == "vous", 'idfoy'].unique()
indivi_without_idfoy = ~indivi.idfoy.isin(idfoyList)
if indivi_without_idfoy.any():
other_grands_enfants = indivi_without_idfoy & (indivi.age >= 18)
indivi.loc[other_grands_enfants, 'idfoy'] = indivi.loc[other_grands_enfants, 'noindiv']
indivi.loc[other_grands_enfants, 'quifoy'] = 'vous'
log.info(u"Il y a {} autres grans enfants (>= 18) que l'on met avec un vous du ménage".format(
other_grands_enfants.sum()
))
idfoyList = indivi.loc[indivi.quifoy == "vous", 'idfoy'].unique()
indivi_without_idfoy = ~indivi.idfoy.isin(idfoyList)
temporary_store['indivi_step_06_{}'.format(year)] = indivi
assert not indivi_without_idfoy.any()
log.info(u" 4.2 : On enlève les individus pour lesquels il manque le déclarant")
fip = temporary_store['fipDat_{}'.format(year)]
fip["declar"] = np.nan
fip["agepf"] = np.nan
fip.drop(["actrec", "year", "noidec"], axis = 1, inplace = True)
fip.naia = fip.naia.astype("int32")
fip.rename(
columns = dict(
ident = "idmen",
persfip = "quifoy",
zsali = "sali", # Inclu les salaires non imposables des agents d'assurance
zchoi = "choi",
zrsti = "rsti",
zalri = "alr"),
inplace = True)
is_fip_19_25 = ((year - fip.naia - 1) >= 19) & ((year - fip.naia - 1) < 25)
# TODO: BUT for the time being we keep them in thier vous menage so the following lines are commented
# The idmen are of the form 60XXXX we use idmen 61XXXX, 62XXXX for the idmen of the kids over 18 and less than 25
indivi = concat([indivi, fip.loc[is_fip_19_25].copy()])
temporary_store['indivi_step_06_{}'.format(year)] = indivi
assert not(indivi.noindiv.duplicated().any())
del is_fip_19_25
indivi['age'] = year - indivi.naia - 1
indivi['age_en_mois'] = 12 * indivi.age + 12 - indivi.naim
indivi["quimen"] = 0
assert indivi.lpr.notnull().all()
indivi.loc[indivi.lpr == 1, 'quimen'] = 0
indivi.loc[indivi.lpr == 2, 'quimen'] = 1
indivi.loc[indivi.lpr == 3, 'quimen'] = 2
indivi.loc[indivi.lpr == 4, 'quimen'] = 3
indivi['not_pr_cpr'] = None # Create a new row
indivi.loc[indivi.lpr <= 2, 'not_pr_cpr'] = False
indivi.loc[indivi.lpr > 2, 'not_pr_cpr'] = True
assert indivi.not_pr_cpr.isin([True, False]).all()
log.info(u" 4.3 : Creating non pr=0 and cpr=1 idmen's")
indivi.set_index('noindiv', inplace = True, verify_integrity = True)
test1 = indivi.loc[indivi.not_pr_cpr, ['quimen', 'idmen']].copy()
test1['quimen'] = 2
j = 2
while any(test1.duplicated(['quimen', 'idmen'])):
test1.loc[test1.duplicated(['quimen', 'idmen']), 'quimen'] = j + 1
j += 1
print_id(indivi)
indivi.update(test1)
indivi.reset_index(inplace = True)
print_id(indivi)
temporary_store['indivi_step_06_{}'.format(year)] = indivi
gc.collect()
return
@temporary_store_decorator(config_files_directory = config_files_directory, file_name = 'erfs')
def create_totals_second_pass(temporary_store = None, year = None):
assert temporary_store is not None
assert year is not None
log.info(u" 5.1 : Elimination idfoy restant")
# Voiture balai
# On a plein d'idfoy vides, on fait 1 ménage = 1 foyer fiscal
indivi = temporary_store['indivi_step_06_{}'.format(year)]
idfoyList = indivi.loc[indivi.quifoy == "vous", 'idfoy'].unique()
indivi_without_idfoy = ~indivi.idfoy.isin(idfoyList)
indivi.loc[indivi_without_idfoy, 'quifoy'] = "pac"
indivi.loc[indivi_without_idfoy & (indivi.quimen == 0) & (indivi.age >= 18), 'quifoy'] = "vous"
indivi.loc[indivi_without_idfoy & (indivi.quimen == 0) & (indivi.age >= 18), 'idfoy'] = (
indivi.loc[indivi_without_idfoy, "idmen"].astype('int') * 100 + 51
)
indivi.loc[indivi_without_idfoy & (indivi.quimen == 1) & (indivi.age >= 18), 'quifoy'] = "conj"
del idfoyList
print_id(indivi)
# Sélectionne les variables à garder pour les steps suivants
variables = [
"actrec",
"age",
"age_en_mois",
"chpub",
"encadr",
"idfoy",
"idmen",
"nbsala",
"noi",
"noindiv",
"prosa",
"quelfic",
"quifoy",
"quimen",
"statut",
"titc",
"txtppb",
"wprm",
"rc1rev",
"maahe",
"sali",
"rsti",
"choi",
"alr",
"wprm",
]
assert set(variables).issubset(set(indivi.columns)), \
"Manquent les colonnes suivantes : {}".format(set(variables).difference(set(indivi.columns)))
dropped_columns = [variable for variable in indivi.columns if variable not in variables]
indivi.drop(dropped_columns, axis = 1, inplace = True)
# see http://stackoverflow.com/questions/11285613/selecting-columns
indivi.reset_index(inplace = True)
gc.collect()
# TODO les actrec des fip ne sont pas codées (on le fera à la fin quand on aura rassemblé
# les infos provenant des déclarations)
log.info(u"Etape 6 : Création des variables descriptives")
log.info(u" 6.1 : Variable activité")
log.info(u"Variables présentes; \n {}".format(indivi.columns))
indivi['activite'] = np.nan
indivi.loc[indivi.actrec <= 3, 'activite'] = 0
indivi.loc[indivi.actrec == 4, 'activite'] = 1
indivi.loc[indivi.actrec == 5, 'activite'] = 2
indivi.loc[indivi.actrec == 7, 'activite'] = 3
indivi.loc[indivi.actrec == 8, 'activite'] = 4
indivi.loc[indivi.age <= 13, 'activite'] = 2 # ce sont en fait les actrec=9
log.info("Valeurs prises par la variable activité \n {}".format(indivi['activite'].value_counts(dropna = False)))
# TODO: MBJ problem avec les actrec
# TODO: FIX AND REMOVE
indivi.loc[indivi.actrec.isnull(), 'activite'] = 5
indivi.loc[indivi.titc.isnull(), 'titc'] = 0
assert indivi.titc.notnull().all(), \
u"Problème avec les titc" # On a 420 NaN pour les varaibels statut, titc etc
log.info(u" 6.2 : Variable statut")
indivi.loc[indivi.statut.isnull(), 'statut'] = 0
indivi.statut = indivi.statut.astype('int')
indivi.loc[indivi.statut == 11, 'statut'] = 1
indivi.loc[indivi.statut == 12, 'statut'] = 2
indivi.loc[indivi.statut == 13, 'statut'] = 3
indivi.loc[indivi.statut == 21, 'statut'] = 4
indivi.loc[indivi.statut == 22, 'statut'] = 5
indivi.loc[indivi.statut == 33, 'statut'] = 6
indivi.loc[indivi.statut == 34, 'statut'] = 7
indivi.loc[indivi.statut == 35, 'statut'] = 8
indivi.loc[indivi.statut == 43, 'statut'] = 9
indivi.loc[indivi.statut == 44, 'statut'] = 10
indivi.loc[indivi.statut == 45, 'statut'] = 11
assert indivi.statut.isin(range(12)).all(), u"statut value over range"
log.info("Valeurs prises par la variable statut \n {}".format(
indivi['statut'].value_counts(dropna = False)))
log.info(u" 6.3 : variable txtppb")
indivi.loc[indivi.txtppb.isnull(), 'txtppb'] = 0
assert indivi.txtppb.notnull().all()
indivi.loc[indivi.nbsala.isnull(), 'nbsala'] = 0
indivi.nbsala = indivi.nbsala.astype('int')
indivi.loc[indivi.nbsala == 99, 'nbsala'] = 10
assert indivi.nbsala.isin(range(11)).all()
log.info("Valeurs prises par la variable txtppb \n {}".format(
indivi['txtppb'].value_counts(dropna = False)))
log.info(u" 6.4 : variable chpub et CSP")
indivi.loc[indivi.chpub.isnull(), 'chpub'] = 0
indivi.chpub = indivi.chpub.astype('int')
assert indivi.chpub.isin(range(11)).all()
indivi['cadre'] = 0
indivi.loc[indivi.prosa.isnull(), 'prosa'] = 0
assert indivi.prosa.notnull().all()
log.info("Valeurs prises par la variable encadr \n {}".format(indivi['encadr'].value_counts(dropna = False)))
# encadr : 1=oui, 2=non
indivi.loc[indivi.encadr.isnull(), 'encadr'] = 2
indivi.loc[indivi.encadr == 0, 'encadr'] = 2
assert indivi.encadr.notnull().all()
assert indivi.encadr.isin([1, 2]).all()
indivi.loc[indivi.prosa.isin([7, 8]), 'cadre'] = 1
indivi.loc[(indivi.prosa == 9) & (indivi.encadr == 1), 'cadre'] = 1
assert indivi.cadre.isin(range(2)).all()
log.info(
u"Etape 7: on vérifie qu'il ne manque pas d'info sur les liens avec la personne de référence"
)
log.info(
u"nb de doublons idfoy/quifoy {}".format(len(indivi[indivi.duplicated(subset = ['idfoy', 'quifoy'])]))
)
log.info(u"On crée les n° de personnes à charge dans le foyer fiscal")
assert indivi.idfoy.notnull().all()
print_id(indivi)
indivi['quifoy_bis'] = 2
indivi.loc[indivi.quifoy == 'vous', 'quifoy_bis'] = 0
indivi.loc[indivi.quifoy == 'conj', 'quifoy_bis'] = 1
indivi.loc[indivi.quifoy == 'pac', 'quifoy_bis'] = 2
del indivi['quifoy']
indivi['quifoy'] = indivi.quifoy_bis.copy()
del indivi['quifoy_bis']
print_id(indivi)
pac = indivi.loc[indivi['quifoy'] == 2, ['quifoy', 'idfoy', 'noindiv']].copy()
print_id(pac)
j = 2
while pac.duplicated(['quifoy', 'idfoy']).any():
pac.loc[pac.duplicated(['quifoy', 'idfoy']), 'quifoy'] = j
j += 1
print_id(pac)
indivi = indivi.merge(pac, on = ['noindiv', 'idfoy'], how = "left")
indivi['quifoy'] = indivi['quifoy_x']
indivi['quifoy'] = where(indivi['quifoy_x'] == 2, indivi['quifoy_y'], indivi['quifoy_x'])
del indivi['quifoy_x'], indivi['quifoy_y']
print_id(indivi)
del pac
assert len(indivi[indivi.duplicated(subset = ['idfoy', 'quifoy'])]) == 0, \
u"Il y a {} doublons idfoy/quifoy".format(
len(indivi[indivi.duplicated(subset = ['idfoy', 'quifoy'])])
)
print_id(indivi)
log.info(u"Etape 8 : création des fichiers totaux")
famille = temporary_store['famc_{}'.format(year)]
log.info(u" 8.1 : création de tot2 & tot3")
tot2 = indivi.merge(famille, on = 'noindiv', how = 'inner')
# TODO: MBJ increase in number of menage/foyer when merging with family ...
del famille
control(tot2, debug = True, verbose = True)
assert tot2.quifam.notnull().all()
temporary_store['tot2_{}'.format(year)] = tot2
del indivi
log.info(u" tot2 saved")
tot2 = tot2[tot2.idmen.notnull()].copy()
print_id(tot2)
tot3 = tot2
# TODO: check where they come from
log.info("Avant élimination des doublons noindiv: {}".format(len(tot3)))
tot3 = tot3.drop_duplicates(subset = 'noindiv')
log.info("Après élimination des doublons noindiv: {}".format(len(tot3)))
# Block to remove any unwanted duplicated pair
control(tot3, debug = True, verbose = True)
tot3 = tot3.drop_duplicates(subset = ['idfoy', 'quifoy'])
log.info("Après élimination des doublons idfoy, quifoy: {}".format(len(tot3)))
tot3 = tot3.drop_duplicates(subset = ['idfam', 'quifam'])
log.info("Après élimination des doublons idfam, 'quifam: {}".format(len(tot3)))
tot3 = tot3.drop_duplicates(subset = ['idmen', 'quimen'])
log.info("Après élimination des doublons idmen, quimen: {}".format(len(tot3)))
tot3 = tot3.drop_duplicates(subset = ['noindiv'])
control(tot3)
log.info(u" 8.2 : On ajoute les variables individualisables")
allvars = temporary_store['ind_vars_to_remove_{}'.format(year)]
vars2 = set(tot3.columns).difference(set(allvars))
tot3 = tot3[list(vars2)]
log.info("{}".format(len(tot3)))
assert not(tot3.duplicated(subset = ['noindiv']).any()), "doublon dans tot3['noindiv']"
lg_dup = len(tot3[tot3.duplicated(['idfoy', 'quifoy'])])
assert lg_dup == 0, "{} pairs of idfoy/quifoy in tot3 are duplicated".format(lg_dup)
temporary_store['tot3_{}'.format(year)] = tot3
control(tot3)
del tot2, allvars, tot3, vars2
gc.collect()
log.info(u"tot3 sauvegardé")
@temporary_store_decorator(config_files_directory = config_files_directory, file_name = 'erfs')
def create_final(temporary_store = None, year = None):
assert temporary_store is not None
assert year is not None
log.info(u"création de final")
foy_ind = temporary_store['foy_ind_{}'.format(year)]
tot3 = temporary_store['tot3_{}'.format(year)]
log.info(u"Stats on tot3")
print_id(tot3)
log.info(u"Stats on foy_ind")
print_id(foy_ind)
foy_ind.set_index(['idfoy', 'quifoy'], inplace = True, verify_integrity = True)
tot3.set_index(['idfoy', 'quifoy'], inplace = True, verify_integrity = True)
# tot3 = concat([tot3, foy_ind], join_axes=[tot3.index], axis=1, verify_integrity = True)
# TODO improve this
foy_ind.drop([u'alr', u'rsti', u'sali', u'choi'], axis = 1, inplace = True)
tot3 = tot3.join(foy_ind)
tot3.reset_index(inplace = True)
foy_ind.reset_index(inplace = True)
# tot3 = tot3.drop_duplicates(subset=['idfam', 'quifam'])
control(tot3, verbose=True)
final = tot3.loc[tot3.idmen.notnull(), :].copy()
control(final, verbose=True)
del tot3, foy_ind
gc.collect()
log.info(" loading fip")
sif = temporary_store['sif_{}'.format(year)]
log.info("Columns from sif dataframe: {}".format(sif.columns))
log.info(" update final using fip")
final.set_index('noindiv', inplace = True, verify_integrity = True)
# TODO: IL FAUT UNE METHODE POUR GERER LES DOUBLES DECLARATIONS
# On ne garde que les sif.noindiv qui correspondent à des idfoy == "vous"
# Et on enlève les duplicates
idfoys = final.loc[final.quifoy == 0, "idfoy"]
sif = sif[sif.noindiv.isin(idfoys) & ~(sif.change.isin(['M', 'S', 'Z']))].copy()
sif.drop_duplicates(subset = ['noindiv'], inplace = True)
sif.set_index('noindiv', inplace = True, verify_integrity = True)
final = final.join(sif)
final.reset_index(inplace = True)
control(final, debug=True)
final['caseP'] = final.caseP.fillna(False)
final['caseF'] = final.caseF.fillna(False)
print_id(final)
temporary_store['final_{}'.format(year)] = final
log.info(u"final sauvegardé")
del sif, final
if __name__ == '__main__':
year = 2009
logging.basicConfig(level = logging.INFO, filename = 'step_06.log', filemode = 'w')
create_totals_first_pass(year = year)
create_totals_second_pass(year = year)
create_final(year = year)
log.info(u"étape 06 remise en forme des données terminée")
| agpl-3.0 |
citiususc/construe | tests/bmi/fragment_test.py | 1 | 3752 | # -*- coding: utf-8 -*-
# pylint: disable-msg=E1101, E0102, E0202
"""
Created on Tue May 05 10:37:17 2015
Small test to try the abductive approach with the dataset from the Mobiguide
project at the BMI lab.
@author: T. Teijeiro
"""
import construe.utils.plotting.plotter as plotter
import construe.acquisition.record_acquisition as IN
import construe.acquisition.obs_buffer as obs_buffer
import construe.acquisition.signal_buffer as sig_buf
import construe.knowledge.observables as o
import construe.knowledge.constants as C
import construe.knowledge.abstraction_patterns as ap
import construe.inference.searching as searching
import construe.inference.reasoning as reasoning
import time
import itertools
import numpy as np
import objgraph
from pprint import pprint as pp
from construe.model import Interval as Iv
from construe.model.interpretation import Interpretation
from construe.utils.units_helper import (msec2samples as ms2sp,
samples2msec as sp2ms,
msec2bpm, bpm2msec)
#Signal reading
TFACTOR = 5.0
LENGTH = 23040
#Searching settings
KFACTOR = 12
MIN_DELAY = 1750
MAX_DELAY = int(ms2sp(20000)*TFACTOR)
#Overlapping between consecutive fragments
FR_OVERLAP = int(ms2sp(3000))
FR_IDX = 0
INIT = int(FR_IDX * (LENGTH - FR_OVERLAP))
IN.reset()
#Standard annotator used
ANNOTATOR = 'atr'
#Record used
REC = ('/home/tomas/Dropbox/Investigacion/tese/estadias/2015_BMI'
'/validation/training_dataset/MG008-2015_07_11-ECG-1')
REC = ('/home/local/tomas.teijeiro/Dropbox/Investigacion/tese/validacions/'
'loose_records/monitoring_160404-1003_SIM')
REC = '/datos/tomas.teijeiro/Servando/MonitorizacionDomiciliaria/mit/250Hz/100'
IN.set_record(REC, ANNOTATOR)
IN.set_offset(INIT)
IN.set_duration(LENGTH)
IN.set_tfactor(TFACTOR)
IN.start()
print('Preloading buffer...')
time.sleep(sp2ms(MIN_DELAY)/(1000.0*TFACTOR))
IN.get_more_evidence()
#Trivial interpretation
interp = Interpretation()
#The focus is initially set in the first observation
interp.focus.append(next(obs_buffer.get_observations()))
##########################
### Construe searching ###
##########################
print('Starting interpretation')
t0 = time.time()
cntr = searching.Construe(interp, KFACTOR)
ltime = (cntr.last_time, t0)
#Main loop
while cntr.best is None:
IN.get_more_evidence()
acq_time = IN.get_acquisition_point()
#HINT debug code
fstr = 'Int: {0:05d} '
for i in range(int(sp2ms(acq_time - cntr.last_time)/1000.0)):
fstr += '-'
fstr += ' Acq: {1}'
print(fstr.format(int(cntr.last_time), acq_time))
#End of debug code
filt = ((lambda n : acq_time + n[0][2] >= MIN_DELAY)
if obs_buffer.get_status() is obs_buffer.Status.ACQUIRING
else (lambda _ : True))
cntr.step(filt)
if cntr.last_time > ltime[0]:
ltime = (cntr.last_time, time.time())
#If the distance between acquisition time and interpretation time is
#excessive, the search tree is pruned.
if ms2sp((time.time()-ltime[1])*1000.0)*TFACTOR > MAX_DELAY:
print('Pruning search')
if cntr.open:
prevopen = cntr.open
cntr.prune()
print('Finished in {0:.3f} seconds'.format(time.time()-t0))
print('Created {0} interpretations'.format(interp.counter))
#Best explanation
#be = cntr.best.node
#be.recover_old()
#brview = plotter.plot_observations(sig_buf.get_signal(
# sig_buf.get_available_leads()[0]), be)
##Drawing of the search tree
#label_fncs = {}
#label_fncs['n'] = lambda br: str(br)
#label_fncs['e'] = lambda br: ''
#brview = plotter.plot_branch(interp, label_funcs=label_fncs, target=be)
| agpl-3.0 |
collex100/odoo | addons/account/report/account_general_journal.py | 381 | 7669 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import osv
from openerp.report import report_sxw
from common_report_header import common_report_header
class journal_print(report_sxw.rml_parse, common_report_header):
def __init__(self, cr, uid, name, context=None):
if context is None:
context = {}
super(journal_print, self).__init__(cr, uid, name, context=context)
self.period_ids = []
self.journal_ids = []
self.localcontext.update( {
'time': time,
'lines': self.lines,
'periods': self.periods,
'sum_debit_period': self._sum_debit_period,
'sum_credit_period': self._sum_credit_period,
'sum_debit': self._sum_debit,
'sum_credit': self._sum_credit,
'get_fiscalyear': self._get_fiscalyear,
'get_account': self._get_account,
'get_start_period': self.get_start_period,
'get_end_period': self.get_end_period,
'get_sortby': self._get_sortby,
'get_filter': self._get_filter,
'get_journal': self._get_journal,
'get_start_date':self._get_start_date,
'get_end_date':self._get_end_date,
'display_currency':self._display_currency,
'get_target_move': self._get_target_move,
})
def set_context(self, objects, data, ids, report_type=None):
obj_move = self.pool.get('account.move.line')
new_ids = ids
self.query_get_clause = ''
self.target_move = data['form'].get('target_move', 'all')
if (data['model'] == 'ir.ui.menu'):
new_ids = 'active_ids' in data['form'] and data['form']['active_ids'] or []
self.query_get_clause = 'AND '
self.query_get_clause += obj_move._query_get(self.cr, self.uid, obj='l', context=data['form'].get('used_context', {}))
objects = self.pool.get('account.journal.period').browse(self.cr, self.uid, new_ids)
if new_ids:
self.cr.execute('SELECT period_id, journal_id FROM account_journal_period WHERE id IN %s', (tuple(new_ids),))
res = self.cr.fetchall()
self.period_ids, self.journal_ids = zip(*res)
return super(journal_print, self).set_context(objects, data, ids, report_type=report_type)
# returns a list of period objs
def periods(self, journal_period_objs):
dic = {}
def filter_unique(o):
key = o.period_id.id
res = key in dic
if not res:
dic[key] = True
return not res
filtered_objs = filter(filter_unique, journal_period_objs)
return map(lambda x: x.period_id, filtered_objs)
def lines(self, period_id):
if not self.journal_ids:
return []
move_state = ['draft','posted']
if self.target_move == 'posted':
move_state = ['posted']
self.cr.execute('SELECT j.code, j.name, l.amount_currency,c.symbol AS currency_code,l.currency_id, '
'SUM(l.debit) AS debit, SUM(l.credit) AS credit '
'FROM account_move_line l '
'LEFT JOIN account_move am ON (l.move_id=am.id) '
'LEFT JOIN account_journal j ON (l.journal_id=j.id) '
'LEFT JOIN res_currency c on (l.currency_id=c.id)'
'WHERE am.state IN %s AND l.period_id=%s AND l.journal_id IN %s ' + self.query_get_clause + ' '
'GROUP BY j.id, j.code, j.name, l.amount_currency, c.symbol, l.currency_id ',
(tuple(move_state), period_id, tuple(self.journal_ids)))
return self.cr.dictfetchall()
def _set_get_account_currency_code(self, account_id):
self.cr.execute("SELECT c.symbol AS code "\
"FROM res_currency c, account_account AS ac "\
"WHERE ac.id = %s AND ac.currency_id = c.id" % (account_id))
result = self.cr.fetchone()
if result:
self.account_currency = result[0]
else:
self.account_currency = False
def _get_account(self, data):
if data['model'] == 'account.journal.period':
return self.pool.get('account.journal.period').browse(self.cr, self.uid, data['id']).company_id.name
return super(journal_print, self)._get_account(data)
def _get_fiscalyear(self, data):
if data['model'] == 'account.journal.period':
return self.pool.get('account.journal.period').browse(self.cr, self.uid, data['id']).fiscalyear_id.name
return super(journal_print, self)._get_fiscalyear(data)
def _display_currency(self, data):
if data['model'] == 'account.journal.period':
return True
return data['form']['amount_currency']
def _sum_debit_period(self, period_id, journal_id=False):
if journal_id:
journals = [journal_id]
else:
journals = self.journal_ids
if not journals:
return 0.0
move_state = ['draft','posted']
if self.target_move == 'posted':
move_state = ['posted']
self.cr.execute('SELECT SUM(l.debit) FROM account_move_line l '
'LEFT JOIN account_move am ON (l.move_id=am.id) '
'WHERE am.state IN %s AND l.period_id=%s AND l.journal_id IN %s ' + self.query_get_clause + ' ' \
'AND l.state<>\'draft\'',
(tuple(move_state), period_id, tuple(journals)))
return self.cr.fetchone()[0] or 0.0
def _sum_credit_period(self, period_id, journal_id=None):
if journal_id:
journals = [journal_id]
else:
journals = self.journal_ids
move_state = ['draft','posted']
if self.target_move == 'posted':
move_state = ['posted']
if not journals:
return 0.0
self.cr.execute('SELECT SUM(l.credit) FROM account_move_line l '
'LEFT JOIN account_move am ON (l.move_id=am.id) '
'WHERE am.state IN %s AND l.period_id=%s AND l.journal_id IN %s '+ self.query_get_clause + ' ' \
'AND l.state<>\'draft\'',
(tuple(move_state), period_id, tuple(journals)))
return self.cr.fetchone()[0] or 0.0
class report_generaljournal(osv.AbstractModel):
_name = 'report.account.report_generaljournal'
_inherit = 'report.abstract_report'
_template = 'account.report_generaljournal'
_wrapped_report_class = journal_print
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
fankcoder/huhamhire-hosts | gui/util_rc.py | 24 | 229377 | # -*- coding: utf-8 -*-
# Resource object code
#
# Created: 周三 1月 22 13:03:07 2014
# by: The Resource Compiler for PyQt (Qt v4.8.5)
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore
qt_resource_data = "\
\x00\x00\x04\xf8\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\
\x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\
\x79\x71\xc9\x65\x3c\x00\x00\x03\x71\x69\x54\x58\x74\x58\x4d\x4c\
\x3a\x63\x6f\x6d\x2e\x61\x64\x6f\x62\x65\x2e\x78\x6d\x70\x00\x00\
\x00\x00\x00\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x62\x65\x67\
\x69\x6e\x3d\x22\xef\xbb\xbf\x22\x20\x69\x64\x3d\x22\x57\x35\x4d\
\x30\x4d\x70\x43\x65\x68\x69\x48\x7a\x72\x65\x53\x7a\x4e\x54\x63\
\x7a\x6b\x63\x39\x64\x22\x3f\x3e\x20\x3c\x78\x3a\x78\x6d\x70\x6d\
\x65\x74\x61\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x3d\x22\x61\x64\x6f\
\x62\x65\x3a\x6e\x73\x3a\x6d\x65\x74\x61\x2f\x22\x20\x78\x3a\x78\
\x6d\x70\x74\x6b\x3d\x22\x41\x64\x6f\x62\x65\x20\x58\x4d\x50\x20\
\x43\x6f\x72\x65\x20\x35\x2e\x35\x2d\x63\x30\x31\x34\x20\x37\x39\
\x2e\x31\x35\x31\x34\x38\x31\x2c\x20\x32\x30\x31\x33\x2f\x30\x33\
\x2f\x31\x33\x2d\x31\x32\x3a\x30\x39\x3a\x31\x35\x20\x20\x20\x20\
\x20\x20\x20\x20\x22\x3e\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x20\
\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\x39\x39\
\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\x2d\x73\x79\x6e\x74\
\x61\x78\x2d\x6e\x73\x23\x22\x3e\x20\x3c\x72\x64\x66\x3a\x44\x65\
\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x20\x72\x64\x66\x3a\x61\x62\
\x6f\x75\x74\x3d\x22\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\
\x4d\x4d\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\
\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\
\x6d\x6d\x2f\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x73\x74\x52\x65\x66\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\
\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x73\x54\
\x79\x70\x65\x2f\x52\x65\x73\x6f\x75\x72\x63\x65\x52\x65\x66\x23\
\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\
\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x22\x20\x78\x6d\x70\x4d\x4d\
\x3a\x4f\x72\x69\x67\x69\x6e\x61\x6c\x44\x6f\x63\x75\x6d\x65\x6e\
\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x63\x34\x37\
\x39\x66\x62\x31\x39\x2d\x34\x33\x34\x34\x2d\x62\x65\x34\x33\x2d\
\x39\x39\x31\x31\x2d\x33\x64\x35\x32\x63\x31\x30\x33\x35\x37\x65\
\x31\x22\x20\x78\x6d\x70\x4d\x4d\x3a\x44\x6f\x63\x75\x6d\x65\x6e\
\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x38\x34\x39\
\x36\x35\x45\x39\x33\x35\x39\x38\x35\x31\x31\x45\x33\x42\x38\x30\
\x33\x45\x39\x42\x31\x37\x42\x32\x36\x32\x41\x35\x41\x22\x20\x78\
\x6d\x70\x4d\x4d\x3a\x49\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\
\x22\x78\x6d\x70\x2e\x69\x69\x64\x3a\x38\x34\x39\x36\x35\x45\x39\
\x32\x35\x39\x38\x35\x31\x31\x45\x33\x42\x38\x30\x33\x45\x39\x42\
\x31\x37\x42\x32\x36\x32\x41\x35\x41\x22\x20\x78\x6d\x70\x3a\x43\
\x72\x65\x61\x74\x6f\x72\x54\x6f\x6f\x6c\x3d\x22\x41\x64\x6f\x62\
\x65\x20\x50\x68\x6f\x74\x6f\x73\x68\x6f\x70\x20\x43\x43\x20\x28\
\x57\x69\x6e\x64\x6f\x77\x73\x29\x22\x3e\x20\x3c\x78\x6d\x70\x4d\
\x4d\x3a\x44\x65\x72\x69\x76\x65\x64\x46\x72\x6f\x6d\x20\x73\x74\
\x52\x65\x66\x3a\x69\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\x22\
\x78\x6d\x70\x2e\x69\x69\x64\x3a\x63\x31\x65\x61\x38\x31\x32\x35\
\x2d\x65\x62\x64\x64\x2d\x63\x38\x34\x62\x2d\x39\x66\x37\x61\x2d\
\x30\x36\x39\x39\x32\x39\x38\x64\x62\x39\x33\x63\x22\x20\x73\x74\
\x52\x65\x66\x3a\x64\x6f\x63\x75\x6d\x65\x6e\x74\x49\x44\x3d\x22\
\x78\x6d\x70\x2e\x64\x69\x64\x3a\x63\x34\x37\x39\x66\x62\x31\x39\
\x2d\x34\x33\x34\x34\x2d\x62\x65\x34\x33\x2d\x39\x39\x31\x31\x2d\
\x33\x64\x35\x32\x63\x31\x30\x33\x35\x37\x65\x31\x22\x2f\x3e\x20\
\x3c\x2f\x72\x64\x66\x3a\x44\x65\x73\x63\x72\x69\x70\x74\x69\x6f\
\x6e\x3e\x20\x3c\x2f\x72\x64\x66\x3a\x52\x44\x46\x3e\x20\x3c\x2f\
\x78\x3a\x78\x6d\x70\x6d\x65\x74\x61\x3e\x20\x3c\x3f\x78\x70\x61\
\x63\x6b\x65\x74\x20\x65\x6e\x64\x3d\x22\x72\x22\x3f\x3e\xcf\x3d\
\xf2\xba\x00\x00\x01\x1d\x49\x44\x41\x54\x78\xda\x62\xcc\xcb\xcb\
\x3b\xc7\xc0\xc0\x60\xc8\x80\x1d\xfc\x05\xe2\xb0\x49\x93\x26\xad\
\x03\x71\x80\x6a\x13\x80\xd4\x3c\x20\x66\x64\xa0\x0e\x38\xcf\x84\
\xc7\x72\x10\x60\x06\x62\x3d\x24\xbe\x03\x15\x2d\x07\x01\x43\x26\
\x86\x01\x06\xa3\x0e\x18\x75\x00\xc8\x01\x9f\x08\xa8\xf9\x88\xc4\
\x7e\x4e\x65\xfb\x3f\xb1\x00\x09\x63\x20\xd6\xc1\xa1\xe0\x07\x10\
\xef\x41\xe2\x37\x00\xf1\x61\x20\x66\xa3\x92\x03\xae\x30\x8c\x78\
\xc0\x08\x2d\x5e\x1d\x70\xc8\x7f\x01\xe2\x0e\x60\x51\xfc\x04\x5a\
\x14\xab\x01\xa9\x62\x20\x66\xa7\x92\xfd\x07\x58\x88\x28\xdb\x5f\
\x01\x71\x13\x94\x5d\x08\xc4\x69\x54\x0c\x80\x38\x26\x22\xca\x76\
\xe4\xac\xca\x49\xed\x18\x18\x2d\x88\x46\x1d\x00\x72\xc0\x7f\x02\
\x6a\xfe\x21\xb1\xbf\x53\xd9\xfe\xff\xa0\x6c\x98\x44\xa0\x1c\x98\
\x87\xc4\xef\x87\x3a\x9a\x6a\xe5\xc0\x68\x51\x0c\x2a\x8a\x55\xf0\
\xd4\x86\xa0\x38\xdf\x0b\x2c\x8a\xff\x40\x8b\x62\x50\xd0\x3b\x53\
\xb3\x36\x04\xa5\x81\xb3\x40\xcc\x87\x47\x51\x11\x34\xee\x61\xd5\
\x71\x05\x35\xdb\x03\x4c\x04\x2c\x07\x01\x7e\x24\xb6\x24\x95\x63\
\x80\x6f\xb4\x20\x1a\x75\xc0\xa0\x70\xc0\x39\x3c\xf2\xa0\xfc\x7f\
\x09\xad\xe8\xfc\x4f\x45\xfb\xcf\x31\xfe\xff\xff\x7f\x40\x43\x00\
\x20\xc0\x00\xcb\x8e\x3a\x29\x41\x01\xac\xc1\x00\x00\x00\x00\x49\
\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x05\xa0\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\
\x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\
\x79\x71\xc9\x65\x3c\x00\x00\x03\x71\x69\x54\x58\x74\x58\x4d\x4c\
\x3a\x63\x6f\x6d\x2e\x61\x64\x6f\x62\x65\x2e\x78\x6d\x70\x00\x00\
\x00\x00\x00\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x62\x65\x67\
\x69\x6e\x3d\x22\xef\xbb\xbf\x22\x20\x69\x64\x3d\x22\x57\x35\x4d\
\x30\x4d\x70\x43\x65\x68\x69\x48\x7a\x72\x65\x53\x7a\x4e\x54\x63\
\x7a\x6b\x63\x39\x64\x22\x3f\x3e\x20\x3c\x78\x3a\x78\x6d\x70\x6d\
\x65\x74\x61\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x3d\x22\x61\x64\x6f\
\x62\x65\x3a\x6e\x73\x3a\x6d\x65\x74\x61\x2f\x22\x20\x78\x3a\x78\
\x6d\x70\x74\x6b\x3d\x22\x41\x64\x6f\x62\x65\x20\x58\x4d\x50\x20\
\x43\x6f\x72\x65\x20\x35\x2e\x35\x2d\x63\x30\x31\x34\x20\x37\x39\
\x2e\x31\x35\x31\x34\x38\x31\x2c\x20\x32\x30\x31\x33\x2f\x30\x33\
\x2f\x31\x33\x2d\x31\x32\x3a\x30\x39\x3a\x31\x35\x20\x20\x20\x20\
\x20\x20\x20\x20\x22\x3e\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x20\
\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\x39\x39\
\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\x2d\x73\x79\x6e\x74\
\x61\x78\x2d\x6e\x73\x23\x22\x3e\x20\x3c\x72\x64\x66\x3a\x44\x65\
\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x20\x72\x64\x66\x3a\x61\x62\
\x6f\x75\x74\x3d\x22\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\
\x4d\x4d\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\
\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\
\x6d\x6d\x2f\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x73\x74\x52\x65\x66\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\
\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x73\x54\
\x79\x70\x65\x2f\x52\x65\x73\x6f\x75\x72\x63\x65\x52\x65\x66\x23\
\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\
\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x22\x20\x78\x6d\x70\x4d\x4d\
\x3a\x4f\x72\x69\x67\x69\x6e\x61\x6c\x44\x6f\x63\x75\x6d\x65\x6e\
\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x63\x34\x37\
\x39\x66\x62\x31\x39\x2d\x34\x33\x34\x34\x2d\x62\x65\x34\x33\x2d\
\x39\x39\x31\x31\x2d\x33\x64\x35\x32\x63\x31\x30\x33\x35\x37\x65\
\x31\x22\x20\x78\x6d\x70\x4d\x4d\x3a\x44\x6f\x63\x75\x6d\x65\x6e\
\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x31\x35\x41\
\x39\x46\x35\x46\x42\x35\x39\x38\x35\x31\x31\x45\x33\x38\x30\x36\
\x39\x39\x35\x43\x31\x38\x46\x35\x39\x35\x39\x46\x42\x22\x20\x78\
\x6d\x70\x4d\x4d\x3a\x49\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\
\x22\x78\x6d\x70\x2e\x69\x69\x64\x3a\x31\x35\x41\x39\x46\x35\x46\
\x41\x35\x39\x38\x35\x31\x31\x45\x33\x38\x30\x36\x39\x39\x35\x43\
\x31\x38\x46\x35\x39\x35\x39\x46\x42\x22\x20\x78\x6d\x70\x3a\x43\
\x72\x65\x61\x74\x6f\x72\x54\x6f\x6f\x6c\x3d\x22\x41\x64\x6f\x62\
\x65\x20\x50\x68\x6f\x74\x6f\x73\x68\x6f\x70\x20\x43\x43\x20\x28\
\x57\x69\x6e\x64\x6f\x77\x73\x29\x22\x3e\x20\x3c\x78\x6d\x70\x4d\
\x4d\x3a\x44\x65\x72\x69\x76\x65\x64\x46\x72\x6f\x6d\x20\x73\x74\
\x52\x65\x66\x3a\x69\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\x22\
\x78\x6d\x70\x2e\x69\x69\x64\x3a\x63\x31\x65\x61\x38\x31\x32\x35\
\x2d\x65\x62\x64\x64\x2d\x63\x38\x34\x62\x2d\x39\x66\x37\x61\x2d\
\x30\x36\x39\x39\x32\x39\x38\x64\x62\x39\x33\x63\x22\x20\x73\x74\
\x52\x65\x66\x3a\x64\x6f\x63\x75\x6d\x65\x6e\x74\x49\x44\x3d\x22\
\x78\x6d\x70\x2e\x64\x69\x64\x3a\x63\x34\x37\x39\x66\x62\x31\x39\
\x2d\x34\x33\x34\x34\x2d\x62\x65\x34\x33\x2d\x39\x39\x31\x31\x2d\
\x33\x64\x35\x32\x63\x31\x30\x33\x35\x37\x65\x31\x22\x2f\x3e\x20\
\x3c\x2f\x72\x64\x66\x3a\x44\x65\x73\x63\x72\x69\x70\x74\x69\x6f\
\x6e\x3e\x20\x3c\x2f\x72\x64\x66\x3a\x52\x44\x46\x3e\x20\x3c\x2f\
\x78\x3a\x78\x6d\x70\x6d\x65\x74\x61\x3e\x20\x3c\x3f\x78\x70\x61\
\x63\x6b\x65\x74\x20\x65\x6e\x64\x3d\x22\x72\x22\x3f\x3e\xec\xd9\
\x4e\x52\x00\x00\x01\xc5\x49\x44\x41\x54\x78\xda\x62\xfc\xff\xff\
\x3f\x03\x23\x23\x23\x03\x3e\xb0\x77\x5a\xd2\x04\x20\x95\xcf\x40\
\x1a\x98\xe8\x9c\x35\xaf\x00\x9f\x02\x90\xdd\x4c\x0c\x03\x0c\x58\
\x88\x54\x77\x0a\x88\x17\x92\x68\xf6\x29\x6a\x3a\x60\x07\x10\x1f\
\x23\xd1\x01\x9f\xa8\xe9\x80\x3a\x72\xd2\x00\x10\x17\x10\x52\x34\
\xe0\x69\x60\xc8\x24\xc2\x26\x20\x9e\x30\x90\x69\x20\x10\x88\x7d\
\x49\x74\xc0\x66\x20\x9e\x4b\x2d\x07\xe8\x02\xb1\x3f\x89\x0e\x78\
\x30\x24\xd2\xc0\xa8\x03\x58\xa0\x95\x4d\x22\x90\x92\xc7\xa3\xce\
\x82\x0c\xb3\x2d\x80\xe6\x36\xe0\x91\x7f\x08\xc4\xf3\x61\x89\x70\
\x1b\x10\xaf\x07\x62\x4b\x2a\x7a\xce\x1c\x8a\xb1\x81\xe3\xd0\x9c\
\x05\x89\x02\x60\xb5\xf9\x12\x48\x39\x92\x51\xe1\x90\x03\x40\x76\
\x38\x42\xed\x64\x60\x44\x6f\x0f\x00\x83\xad\x0c\x48\xb5\xd3\x20\
\x7d\xfc\x03\xe2\x4a\xa0\xc5\x5d\xc8\xed\x01\x46\x6c\x0d\x12\xa0\
\x23\xbc\x81\xd4\x72\x20\xe6\xa5\x92\xe5\x9f\x81\x38\x12\x68\xf9\
\x56\xf4\x06\x09\x23\xae\x16\x11\xd0\x11\xda\x40\x6a\x13\x10\x2b\
\x51\x68\xf9\x3d\x20\xf6\x03\x5a\x7e\x95\xa4\x16\x11\x54\x83\x19\
\x10\x1f\xa4\xc0\x72\x90\x5e\x33\x6c\x96\x13\x55\x0e\x00\x35\xbe\
\x05\x52\xae\x40\x3c\x93\x0c\xcb\x41\x7a\x5c\xa1\x66\xe0\x04\x8c\
\xc4\x34\x4a\xa1\x51\x92\x0b\xa4\xfa\x81\x98\x99\x80\xd2\xbf\x40\
\x5c\x08\xb4\x78\x32\x21\x33\xf1\xa6\x01\x1c\x8e\x70\x01\x52\xab\
\x81\x58\x00\x87\x92\x0f\x40\x1c\x0a\xb4\x7c\x0f\x31\xe6\x91\xec\
\x00\xa8\x23\xd4\xa0\x89\x53\x1d\x4d\xea\x26\x34\xb1\xdd\x22\xd6\
\x2c\xb2\x9a\xe5\x50\x0b\x40\x45\xf3\x2e\x24\xe1\xdd\x20\x31\x52\
\x2c\x27\x39\x0d\x60\x09\x09\x50\x5a\xe8\x85\x72\x8b\x81\x96\xff\
\x25\xd5\x0c\x78\x14\xec\x9b\x9e\x0c\x0a\x09\x4d\x20\x66\xa5\x53\
\x25\xf8\x1b\x88\x6f\x38\x65\xce\xfd\x0b\xab\x8c\xd6\x91\xd1\xe2\
\xa1\x14\x6c\x07\x62\x2f\x58\x1a\xf0\x1b\x80\xa6\x80\x27\x72\x41\
\xc4\xc8\x30\x40\x00\xe6\x80\x3f\x03\x60\xf7\x3f\x64\x07\x4c\x81\
\x09\xd0\xd1\xf2\xa9\x20\x06\x40\x80\x01\x00\x51\x94\x9c\xf8\x84\
\x67\x5d\xdc\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x07\x88\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\
\x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\
\x79\x71\xc9\x65\x3c\x00\x00\x03\x71\x69\x54\x58\x74\x58\x4d\x4c\
\x3a\x63\x6f\x6d\x2e\x61\x64\x6f\x62\x65\x2e\x78\x6d\x70\x00\x00\
\x00\x00\x00\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x62\x65\x67\
\x69\x6e\x3d\x22\xef\xbb\xbf\x22\x20\x69\x64\x3d\x22\x57\x35\x4d\
\x30\x4d\x70\x43\x65\x68\x69\x48\x7a\x72\x65\x53\x7a\x4e\x54\x63\
\x7a\x6b\x63\x39\x64\x22\x3f\x3e\x20\x3c\x78\x3a\x78\x6d\x70\x6d\
\x65\x74\x61\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x3d\x22\x61\x64\x6f\
\x62\x65\x3a\x6e\x73\x3a\x6d\x65\x74\x61\x2f\x22\x20\x78\x3a\x78\
\x6d\x70\x74\x6b\x3d\x22\x41\x64\x6f\x62\x65\x20\x58\x4d\x50\x20\
\x43\x6f\x72\x65\x20\x35\x2e\x35\x2d\x63\x30\x31\x34\x20\x37\x39\
\x2e\x31\x35\x31\x34\x38\x31\x2c\x20\x32\x30\x31\x33\x2f\x30\x33\
\x2f\x31\x33\x2d\x31\x32\x3a\x30\x39\x3a\x31\x35\x20\x20\x20\x20\
\x20\x20\x20\x20\x22\x3e\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x20\
\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\x39\x39\
\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\x2d\x73\x79\x6e\x74\
\x61\x78\x2d\x6e\x73\x23\x22\x3e\x20\x3c\x72\x64\x66\x3a\x44\x65\
\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x20\x72\x64\x66\x3a\x61\x62\
\x6f\x75\x74\x3d\x22\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\
\x4d\x4d\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\
\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\
\x6d\x6d\x2f\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x73\x74\x52\x65\x66\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\
\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x73\x54\
\x79\x70\x65\x2f\x52\x65\x73\x6f\x75\x72\x63\x65\x52\x65\x66\x23\
\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\
\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x22\x20\x78\x6d\x70\x4d\x4d\
\x3a\x4f\x72\x69\x67\x69\x6e\x61\x6c\x44\x6f\x63\x75\x6d\x65\x6e\
\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x63\x34\x37\
\x39\x66\x62\x31\x39\x2d\x34\x33\x34\x34\x2d\x62\x65\x34\x33\x2d\
\x39\x39\x31\x31\x2d\x33\x64\x35\x32\x63\x31\x30\x33\x35\x37\x65\
\x31\x22\x20\x78\x6d\x70\x4d\x4d\x3a\x44\x6f\x63\x75\x6d\x65\x6e\
\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x43\x42\x44\
\x44\x46\x43\x41\x38\x35\x39\x38\x44\x31\x31\x45\x33\x38\x45\x43\
\x31\x45\x30\x35\x34\x36\x33\x33\x31\x39\x46\x38\x34\x22\x20\x78\
\x6d\x70\x4d\x4d\x3a\x49\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\
\x22\x78\x6d\x70\x2e\x69\x69\x64\x3a\x43\x42\x44\x44\x46\x43\x41\
\x37\x35\x39\x38\x44\x31\x31\x45\x33\x38\x45\x43\x31\x45\x30\x35\
\x34\x36\x33\x33\x31\x39\x46\x38\x34\x22\x20\x78\x6d\x70\x3a\x43\
\x72\x65\x61\x74\x6f\x72\x54\x6f\x6f\x6c\x3d\x22\x41\x64\x6f\x62\
\x65\x20\x50\x68\x6f\x74\x6f\x73\x68\x6f\x70\x20\x43\x43\x20\x28\
\x57\x69\x6e\x64\x6f\x77\x73\x29\x22\x3e\x20\x3c\x78\x6d\x70\x4d\
\x4d\x3a\x44\x65\x72\x69\x76\x65\x64\x46\x72\x6f\x6d\x20\x73\x74\
\x52\x65\x66\x3a\x69\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\x22\
\x78\x6d\x70\x2e\x69\x69\x64\x3a\x37\x31\x65\x61\x35\x61\x61\x61\
\x2d\x63\x33\x30\x31\x2d\x30\x61\x34\x35\x2d\x62\x36\x30\x35\x2d\
\x38\x64\x36\x65\x63\x37\x33\x34\x65\x37\x62\x66\x22\x20\x73\x74\
\x52\x65\x66\x3a\x64\x6f\x63\x75\x6d\x65\x6e\x74\x49\x44\x3d\x22\
\x78\x6d\x70\x2e\x64\x69\x64\x3a\x63\x34\x37\x39\x66\x62\x31\x39\
\x2d\x34\x33\x34\x34\x2d\x62\x65\x34\x33\x2d\x39\x39\x31\x31\x2d\
\x33\x64\x35\x32\x63\x31\x30\x33\x35\x37\x65\x31\x22\x2f\x3e\x20\
\x3c\x2f\x72\x64\x66\x3a\x44\x65\x73\x63\x72\x69\x70\x74\x69\x6f\
\x6e\x3e\x20\x3c\x2f\x72\x64\x66\x3a\x52\x44\x46\x3e\x20\x3c\x2f\
\x78\x3a\x78\x6d\x70\x6d\x65\x74\x61\x3e\x20\x3c\x3f\x78\x70\x61\
\x63\x6b\x65\x74\x20\x65\x6e\x64\x3d\x22\x72\x22\x3f\x3e\xf7\xcb\
\xb3\xc5\x00\x00\x03\xad\x49\x44\x41\x54\x78\xda\xbc\x97\x69\x48\
\x55\x41\x14\xc7\xef\xd3\xd7\x26\x2d\x96\x18\x25\xb6\x88\x49\x54\
\x52\x7e\x50\xd3\xca\x28\x69\xa7\x3d\xa9\x84\xac\x8c\xa2\x5d\x92\
\x16\x22\x41\xcb\x16\xcb\x82\x20\x89\x8a\x84\x6c\x21\x22\xac\x88\
\x56\xbf\xd8\xf2\x21\xa5\x04\x97\x28\x08\x6c\xd1\x24\x22\x42\x4d\
\xc5\x35\xad\xff\x81\xff\xc4\x74\xbb\xbd\xad\x67\x07\x7e\xcc\xdc\
\x99\x37\x67\xce\xcc\x9c\x39\x73\x9e\x6d\xf7\xe5\x22\x9b\x61\x18\
\x29\x60\x23\x08\x32\x3c\x93\x47\x20\xe1\x78\x52\x6c\xa7\xbb\x03\
\x6d\x30\x60\x2f\xca\x2c\xd0\x0a\xf2\x40\x39\xeb\xae\x88\x1f\x38\
\xcd\xfa\x25\xb0\x16\x46\xfc\x70\xc7\x00\x3b\xd8\x09\x9a\xc0\x54\
\x0c\x2e\x75\x67\x30\x8c\xf7\xa7\x01\x55\x60\x35\xa8\x05\xa9\xee\
\xee\x80\x58\x7c\x0c\x1c\x01\x83\x40\x3d\x77\x60\x88\xe9\xb7\x9f\
\x41\x6f\xe0\xcf\xef\x56\x52\x07\x56\x80\x24\x30\x1f\xa4\x63\x21\
\x07\x5d\x35\xc0\x87\x65\x09\x58\x07\xde\x83\x1d\x20\x86\x75\x9d\
\x18\xf6\xa9\xef\x6b\x9a\x9e\x76\x1a\x21\xbe\x90\x89\x45\x6d\x75\
\xd7\x80\xef\xa0\x18\x1c\x00\x1f\xb8\x4a\xa9\x6f\x07\x33\x59\xf7\
\x67\xdf\x02\xb0\x81\xfe\xf2\x4b\xb0\xea\x66\x14\x8b\xc1\x73\x70\
\x0a\x46\x24\xba\x63\x80\xc1\x15\x66\x80\x0b\xe0\x16\xeb\x33\xb8\
\x3b\x19\x6c\x93\xbe\x1a\x10\x2e\x0e\x67\xf2\x25\x31\xa2\x01\xc5\
\x1c\xf0\x1a\x5c\x84\x11\xf3\xdc\x31\xe0\x09\x1d\x28\x95\xc7\x11\
\x02\xee\x73\xe5\x21\x1a\xa2\x3c\x13\xac\x04\xea\xda\x45\x68\x3b\
\x21\x3e\x11\x0f\xaa\x41\x3e\x8c\x98\xec\x8a\x13\x2e\x01\x23\xc1\
\x49\xb6\x97\x43\x51\x04\xfb\x92\x51\xcf\x73\x70\x13\xc4\xf3\x7d\
\xc1\x24\xfc\xee\x95\xd6\x1e\x8c\xa2\x08\xf4\x07\x71\xe8\xab\x70\
\x66\xc0\x03\xd0\xc7\xe2\x37\xcd\x18\xdc\xee\xc0\x80\x74\xfa\x48\
\x23\xc8\x05\x2f\x81\x8a\x05\xa3\x40\x1a\xf8\x02\x22\xa1\xe7\xa3\
\x55\x1c\x50\x22\xce\xb5\xcd\x62\x8e\xa3\x98\xa4\xaf\xd6\x57\x09\
\x45\xeb\xb5\xfe\x43\xa0\x03\x6c\xe2\x2d\xb1\x59\xe8\x18\x0c\x42\
\x81\x43\x03\x1a\xe8\xe5\x66\xe9\xe2\x04\xaa\xef\x93\xc9\xfb\xbb\
\x18\x49\xb3\x1c\x04\xab\x3a\x57\x9c\x30\x90\xce\x24\xec\xa3\x13\
\x49\x3d\x9b\x5b\x1c\x8a\xc9\xc4\xf3\x23\xa1\xb4\x0c\xe4\x1a\x5e\
\x10\x7d\x07\xe4\x7a\x15\x72\x27\x02\xf8\xfd\x58\xeb\x6f\x61\x59\
\xcc\x88\x59\xe9\x6d\x03\xc6\xca\xbd\xc7\x2a\xc7\x63\x75\x12\xdb\
\x07\xb0\x7d\x0b\xda\xae\x6a\x5b\x9e\x69\x78\x51\x74\x03\x64\x65\
\x6d\xca\xf1\x40\x2f\xd6\xcb\x8c\x6e\x14\xbb\xb6\x32\x79\x09\x4b\
\x59\x3f\x63\xfc\x27\xb1\xbb\xf0\xe4\x46\x32\x59\x09\xf4\x70\x8e\
\x1e\xea\xba\x42\xd7\x57\xd6\xa5\xcc\xc5\x42\x8b\xed\x4e\x26\x9f\
\x8d\xe2\x8e\xa6\xe4\x5f\xc4\x1c\x92\x93\xa1\x7f\xa9\x8f\x93\x41\
\xd9\x5e\x9a\xfc\x6f\x21\xe0\x84\x33\x03\xc6\xb1\xbc\x02\xce\xea\
\xd6\x83\x7b\x60\x3a\x58\xc8\x50\x2e\x0f\x50\x01\x5f\x50\xf9\x96\
\xdd\x7b\xab\x8d\xb9\xc1\xf6\x04\xf0\x46\x85\x6a\x67\x06\xf8\xb2\
\xac\x60\x60\x52\x72\x1b\x84\x31\x01\x09\xe0\x64\x85\xcc\x90\x3a\
\xf9\x74\x17\x30\xfc\x2a\x49\xe1\x42\xe6\x82\xf3\x2e\x3b\x21\xa5\
\x91\xaf\xa5\x92\x26\x2d\x4e\x7c\xd3\x1e\xb1\x76\x06\x28\x79\xd2\
\x83\x99\x6f\x2a\x59\xce\x1c\xa2\x8a\x8f\xd6\x1f\xa1\xd8\x99\x01\
\x3d\x59\xaf\x85\xf7\x76\x68\xb9\x61\x8b\x66\x4c\x33\x03\x9a\x3c\
\x4a\xab\x24\x29\xe1\xa4\x37\xc1\x43\x39\x73\xb0\xc8\x93\x1d\x88\
\xe6\xa0\x7e\x92\xb8\xc0\x7b\xe3\xb5\x40\xd5\xa0\xbd\x80\x52\x1f\
\x0a\x46\xf0\x7b\x0d\xa9\x66\xb2\xfb\x82\x69\x7f\xb4\xab\x06\xc8\
\x2b\x36\x90\xe7\x17\xcb\x6c\x28\x91\x79\xa2\x92\xc3\x4c\xd1\x44\
\x66\x81\x29\x16\x7a\x86\x83\xa7\x4c\x50\x24\xcf\x8c\x63\x7b\x9b\
\x33\x03\xae\x33\x08\x89\x44\x11\xb3\x4c\xd3\xea\xa3\x1d\xe8\x0a\
\x23\xbf\xe9\xf7\x31\x79\xbb\x59\x76\xf1\xfc\xba\x43\xee\xca\x6e\
\xa8\x1d\x08\xe7\x3d\x35\x4c\xc9\x86\x78\xfb\x32\x9c\x79\x10\xb3\
\x1a\x4f\x44\x8e\x24\x87\xa9\x7c\x89\x0a\xc5\xd0\x5d\xa3\x72\xc2\
\x7a\xde\xdd\x28\x34\xbe\xf3\xe6\x12\xa1\xdb\x8f\xf1\x61\x22\x98\
\x60\x95\x98\xca\x0e\x9c\x03\x7b\xe4\x0f\x05\x06\xe4\x30\xa9\xec\
\xf2\xc2\xfc\xc3\xc0\x66\x30\x06\x3c\xa3\x5e\xcb\xd7\x30\x8d\x59\
\xac\x24\x95\xfb\xbd\x7c\xce\x12\x2f\xf2\x99\xd4\x58\xfe\x6b\xfe\
\x29\xc0\x00\x12\x09\x26\xe1\x36\xa9\x6f\xb1\x00\x00\x00\x00\x49\
\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x06\xbf\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\
\x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\
\x79\x71\xc9\x65\x3c\x00\x00\x03\x71\x69\x54\x58\x74\x58\x4d\x4c\
\x3a\x63\x6f\x6d\x2e\x61\x64\x6f\x62\x65\x2e\x78\x6d\x70\x00\x00\
\x00\x00\x00\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x62\x65\x67\
\x69\x6e\x3d\x22\xef\xbb\xbf\x22\x20\x69\x64\x3d\x22\x57\x35\x4d\
\x30\x4d\x70\x43\x65\x68\x69\x48\x7a\x72\x65\x53\x7a\x4e\x54\x63\
\x7a\x6b\x63\x39\x64\x22\x3f\x3e\x20\x3c\x78\x3a\x78\x6d\x70\x6d\
\x65\x74\x61\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x3d\x22\x61\x64\x6f\
\x62\x65\x3a\x6e\x73\x3a\x6d\x65\x74\x61\x2f\x22\x20\x78\x3a\x78\
\x6d\x70\x74\x6b\x3d\x22\x41\x64\x6f\x62\x65\x20\x58\x4d\x50\x20\
\x43\x6f\x72\x65\x20\x35\x2e\x35\x2d\x63\x30\x31\x34\x20\x37\x39\
\x2e\x31\x35\x31\x34\x38\x31\x2c\x20\x32\x30\x31\x33\x2f\x30\x33\
\x2f\x31\x33\x2d\x31\x32\x3a\x30\x39\x3a\x31\x35\x20\x20\x20\x20\
\x20\x20\x20\x20\x22\x3e\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x20\
\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\x39\x39\
\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\x2d\x73\x79\x6e\x74\
\x61\x78\x2d\x6e\x73\x23\x22\x3e\x20\x3c\x72\x64\x66\x3a\x44\x65\
\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x20\x72\x64\x66\x3a\x61\x62\
\x6f\x75\x74\x3d\x22\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\
\x4d\x4d\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\
\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\
\x6d\x6d\x2f\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x73\x74\x52\x65\x66\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\
\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x73\x54\
\x79\x70\x65\x2f\x52\x65\x73\x6f\x75\x72\x63\x65\x52\x65\x66\x23\
\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\
\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x22\x20\x78\x6d\x70\x4d\x4d\
\x3a\x4f\x72\x69\x67\x69\x6e\x61\x6c\x44\x6f\x63\x75\x6d\x65\x6e\
\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x63\x34\x37\
\x39\x66\x62\x31\x39\x2d\x34\x33\x34\x34\x2d\x62\x65\x34\x33\x2d\
\x39\x39\x31\x31\x2d\x33\x64\x35\x32\x63\x31\x30\x33\x35\x37\x65\
\x31\x22\x20\x78\x6d\x70\x4d\x4d\x3a\x44\x6f\x63\x75\x6d\x65\x6e\
\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x42\x43\x39\
\x35\x36\x46\x31\x41\x35\x39\x38\x34\x31\x31\x45\x33\x38\x45\x35\
\x31\x43\x44\x35\x41\x43\x39\x34\x42\x36\x39\x45\x41\x22\x20\x78\
\x6d\x70\x4d\x4d\x3a\x49\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\
\x22\x78\x6d\x70\x2e\x69\x69\x64\x3a\x42\x43\x39\x35\x36\x46\x31\
\x39\x35\x39\x38\x34\x31\x31\x45\x33\x38\x45\x35\x31\x43\x44\x35\
\x41\x43\x39\x34\x42\x36\x39\x45\x41\x22\x20\x78\x6d\x70\x3a\x43\
\x72\x65\x61\x74\x6f\x72\x54\x6f\x6f\x6c\x3d\x22\x41\x64\x6f\x62\
\x65\x20\x50\x68\x6f\x74\x6f\x73\x68\x6f\x70\x20\x43\x43\x20\x28\
\x57\x69\x6e\x64\x6f\x77\x73\x29\x22\x3e\x20\x3c\x78\x6d\x70\x4d\
\x4d\x3a\x44\x65\x72\x69\x76\x65\x64\x46\x72\x6f\x6d\x20\x73\x74\
\x52\x65\x66\x3a\x69\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\x22\
\x78\x6d\x70\x2e\x69\x69\x64\x3a\x63\x31\x65\x61\x38\x31\x32\x35\
\x2d\x65\x62\x64\x64\x2d\x63\x38\x34\x62\x2d\x39\x66\x37\x61\x2d\
\x30\x36\x39\x39\x32\x39\x38\x64\x62\x39\x33\x63\x22\x20\x73\x74\
\x52\x65\x66\x3a\x64\x6f\x63\x75\x6d\x65\x6e\x74\x49\x44\x3d\x22\
\x78\x6d\x70\x2e\x64\x69\x64\x3a\x63\x34\x37\x39\x66\x62\x31\x39\
\x2d\x34\x33\x34\x34\x2d\x62\x65\x34\x33\x2d\x39\x39\x31\x31\x2d\
\x33\x64\x35\x32\x63\x31\x30\x33\x35\x37\x65\x31\x22\x2f\x3e\x20\
\x3c\x2f\x72\x64\x66\x3a\x44\x65\x73\x63\x72\x69\x70\x74\x69\x6f\
\x6e\x3e\x20\x3c\x2f\x72\x64\x66\x3a\x52\x44\x46\x3e\x20\x3c\x2f\
\x78\x3a\x78\x6d\x70\x6d\x65\x74\x61\x3e\x20\x3c\x3f\x78\x70\x61\
\x63\x6b\x65\x74\x20\x65\x6e\x64\x3d\x22\x72\x22\x3f\x3e\x5a\x6c\
\x96\x67\x00\x00\x02\xe4\x49\x44\x41\x54\x78\xda\xc4\x57\xbd\x6b\
\x14\x41\x14\xdf\xdd\x9c\x88\xa0\xb1\xb1\x12\xab\xa0\x9c\x26\xa8\
\x8d\x70\x82\xf8\x41\x8a\xa0\x8d\x45\xf0\x0f\x30\x9c\x28\x24\x70\
\x20\x39\xa3\x85\xa4\x38\xd3\xa8\x88\x72\x45\x2a\xb5\x4e\x95\x42\
\x2d\xd6\x14\x21\x0a\x16\x57\x08\x7e\x25\xb9\xcb\xa9\x58\x04\x0b\
\x89\x85\xa7\x9d\x90\xf3\xf7\xe0\xb7\x30\x8c\xfb\x76\x76\x97\x9c\
\x3e\xf8\x31\xc3\xbc\x79\x1f\xf3\xe6\xcd\x9b\x19\xdf\xcb\x41\x95\
\x4a\xa5\x8a\x66\x1a\xd8\xc5\xa1\x0e\x50\xab\xd7\xeb\xf7\xb2\xea\
\xf2\x73\x18\xdf\x8b\x66\x3d\x46\xb6\x0b\xec\x83\x13\x5f\xb3\xe8\
\x0b\x72\x04\xe0\x80\xe2\xb8\x4f\x5e\x26\x2a\x28\xab\x1c\x43\x33\
\x0e\x6c\x00\x55\xac\x6a\x39\x65\xd4\x7c\x4b\xcf\x29\xd9\x1a\xda\
\x99\x81\x9e\xd0\x19\x01\x08\xdd\x40\xf3\x18\x38\x06\x9c\x05\x96\
\x30\x76\x98\x3c\x31\x30\x90\xe0\xc0\x00\xe7\xc8\xdc\x61\x34\x62\
\xf0\x34\x70\x02\x78\x82\xb1\xf3\x2e\x8f\xc7\x68\xdc\x26\x89\xc4\
\x5d\xe0\x52\x8a\x30\x7f\x04\xe6\x80\x49\x60\x87\xc5\xfb\x2d\x8b\
\x42\x24\x16\xb5\x08\x8c\x2b\x4a\xf7\x00\xb7\x53\xee\xf1\x7e\xe0\
\x66\x8c\x71\xa1\x6d\xc0\x95\xa4\x2d\xd8\xf0\x7a\x4f\xdf\x93\x1c\
\xa8\xf6\xd8\x89\x4f\xc0\xad\xc4\x3a\xc0\x84\x5b\x64\xd8\x93\xa8\
\x05\xac\xb2\x7f\x08\x28\xa6\x30\x7e\x06\xfb\xbf\xee\x2c\x44\x70\
\x62\x8a\x7b\x1e\x47\xcf\x81\x29\x28\x7a\x67\xc9\x1c\x61\xa2\x8e\
\x28\x72\x13\x90\x99\x75\x56\x42\x1e\xa3\x96\x92\x70\xf7\x25\xbb\
\xa1\xa8\xab\x38\x2e\xb2\x52\x8e\xaf\xc6\xb0\xd7\x80\x83\xb6\xac\
\x6f\x94\xd7\xa8\xc2\xc9\x39\x7f\xa4\xac\xfc\x5c\xa4\x00\x32\x92\
\x3f\x83\xe4\xad\x60\x7c\xd3\x70\x22\x54\x22\x51\x06\x3e\xb3\x6c\
\xb7\xa5\x6c\xfb\x10\x98\x64\xe8\x5c\xf7\xc2\xd1\x28\xec\x90\x91\
\x3d\x9f\x97\x15\x91\xd7\x04\x46\xc1\x5f\x35\xb6\xe3\xad\x43\x9f\
\x38\x71\x2d\xe0\xad\xe6\x32\xde\x32\x8c\x07\x96\x71\x8f\xfd\x79\
\xf2\x3c\xce\x6d\xa5\xb8\x08\xa7\x45\xa0\x3f\xc5\xf1\x69\x1a\xfd\
\x21\xcb\xb8\xe9\xc4\xa0\x22\xa3\x51\x7f\xe0\xfd\x67\x0a\xf8\x98\
\x70\x91\xb9\xe2\x65\x65\x75\x32\xb6\xa2\xc8\x68\xd4\x09\x78\x5d\
\x76\x1d\x13\x8b\x4c\x2c\x8f\xd9\x3e\x6a\x39\x11\x25\xe1\xa6\x91\
\x84\xc5\x14\x49\x58\xcb\x72\x0c\x17\x78\x93\x99\xc7\x70\x28\x8a\
\x4a\xee\x63\xa8\x14\x93\x35\xde\x6a\x5b\x59\x88\xda\x12\x15\x5b\
\xf6\xaf\x24\xe4\x84\x39\x25\x6c\xa2\x38\x8c\xb6\x23\xa6\x14\x87\
\x8a\x71\xa1\x87\x71\x8e\xc7\x45\x40\x5e\x32\xcf\x94\xfb\xdc\xbe\
\x8c\x9a\x46\xc2\xb9\xf6\x5c\x6e\xd9\x61\x38\xf1\xde\xf5\x86\x0b\
\x53\x18\xcf\x4b\xe2\xc4\x49\x38\xd1\xd4\xb6\xa0\xd6\x43\xe3\xd1\
\xcb\xea\x7a\x52\x0e\x14\xfe\x41\xed\x29\x24\x39\x30\xc3\x87\x63\
\xdc\x63\x62\x82\xa7\xc3\x45\x6d\xae\x32\xee\x65\xd5\xe1\x29\x89\
\x77\x80\xef\xf6\x0b\x96\x13\xd1\x4b\x66\x96\xc9\x56\x4e\x30\x5e\
\xe6\x51\xbb\x23\x09\x67\x39\x21\xc6\x47\xc0\x7b\x63\x0a\xf4\xd9\
\x1a\x1a\x8d\x46\xab\x54\x2a\xbd\x62\x2e\xbc\x04\x2e\x46\xdf\x2d\
\xf0\x3c\xf0\x76\xcb\x98\xe2\xc0\x03\xcc\xfd\xc2\xb9\xdf\x30\xf7\
\x29\xba\x3b\x81\x0f\xc0\x65\xf0\x5e\x6f\xc5\xdf\x50\x3e\x1a\x4b\
\x0a\x5b\x22\xf5\xa2\xd7\x7f\xc3\xb6\x72\x77\x74\xc9\xcb\x44\x7d\
\x59\x05\x10\xda\x9f\x08\xed\x2f\x74\x8f\x03\xdb\x39\xfc\x43\x3e\
\x23\x58\xfd\x42\x56\x7d\x7f\x04\x18\x00\xe5\xb8\x12\x0b\xa0\x46\
\x15\xe4\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x05\xa0\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\
\x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\
\x79\x71\xc9\x65\x3c\x00\x00\x03\x71\x69\x54\x58\x74\x58\x4d\x4c\
\x3a\x63\x6f\x6d\x2e\x61\x64\x6f\x62\x65\x2e\x78\x6d\x70\x00\x00\
\x00\x00\x00\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x62\x65\x67\
\x69\x6e\x3d\x22\xef\xbb\xbf\x22\x20\x69\x64\x3d\x22\x57\x35\x4d\
\x30\x4d\x70\x43\x65\x68\x69\x48\x7a\x72\x65\x53\x7a\x4e\x54\x63\
\x7a\x6b\x63\x39\x64\x22\x3f\x3e\x20\x3c\x78\x3a\x78\x6d\x70\x6d\
\x65\x74\x61\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x3d\x22\x61\x64\x6f\
\x62\x65\x3a\x6e\x73\x3a\x6d\x65\x74\x61\x2f\x22\x20\x78\x3a\x78\
\x6d\x70\x74\x6b\x3d\x22\x41\x64\x6f\x62\x65\x20\x58\x4d\x50\x20\
\x43\x6f\x72\x65\x20\x35\x2e\x35\x2d\x63\x30\x31\x34\x20\x37\x39\
\x2e\x31\x35\x31\x34\x38\x31\x2c\x20\x32\x30\x31\x33\x2f\x30\x33\
\x2f\x31\x33\x2d\x31\x32\x3a\x30\x39\x3a\x31\x35\x20\x20\x20\x20\
\x20\x20\x20\x20\x22\x3e\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x20\
\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\x39\x39\
\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\x2d\x73\x79\x6e\x74\
\x61\x78\x2d\x6e\x73\x23\x22\x3e\x20\x3c\x72\x64\x66\x3a\x44\x65\
\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x20\x72\x64\x66\x3a\x61\x62\
\x6f\x75\x74\x3d\x22\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\
\x4d\x4d\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\
\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\
\x6d\x6d\x2f\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x73\x74\x52\x65\x66\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\
\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x73\x54\
\x79\x70\x65\x2f\x52\x65\x73\x6f\x75\x72\x63\x65\x52\x65\x66\x23\
\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\
\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x22\x20\x78\x6d\x70\x4d\x4d\
\x3a\x4f\x72\x69\x67\x69\x6e\x61\x6c\x44\x6f\x63\x75\x6d\x65\x6e\
\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x63\x34\x37\
\x39\x66\x62\x31\x39\x2d\x34\x33\x34\x34\x2d\x62\x65\x34\x33\x2d\
\x39\x39\x31\x31\x2d\x33\x64\x35\x32\x63\x31\x30\x33\x35\x37\x65\
\x31\x22\x20\x78\x6d\x70\x4d\x4d\x3a\x44\x6f\x63\x75\x6d\x65\x6e\
\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x31\x45\x45\
\x31\x42\x33\x30\x45\x35\x39\x38\x35\x31\x31\x45\x33\x38\x46\x34\
\x31\x43\x38\x46\x38\x31\x43\x33\x30\x33\x39\x39\x45\x22\x20\x78\
\x6d\x70\x4d\x4d\x3a\x49\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\
\x22\x78\x6d\x70\x2e\x69\x69\x64\x3a\x31\x45\x45\x31\x42\x33\x30\
\x44\x35\x39\x38\x35\x31\x31\x45\x33\x38\x46\x34\x31\x43\x38\x46\
\x38\x31\x43\x33\x30\x33\x39\x39\x45\x22\x20\x78\x6d\x70\x3a\x43\
\x72\x65\x61\x74\x6f\x72\x54\x6f\x6f\x6c\x3d\x22\x41\x64\x6f\x62\
\x65\x20\x50\x68\x6f\x74\x6f\x73\x68\x6f\x70\x20\x43\x43\x20\x28\
\x57\x69\x6e\x64\x6f\x77\x73\x29\x22\x3e\x20\x3c\x78\x6d\x70\x4d\
\x4d\x3a\x44\x65\x72\x69\x76\x65\x64\x46\x72\x6f\x6d\x20\x73\x74\
\x52\x65\x66\x3a\x69\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\x22\
\x78\x6d\x70\x2e\x69\x69\x64\x3a\x63\x31\x65\x61\x38\x31\x32\x35\
\x2d\x65\x62\x64\x64\x2d\x63\x38\x34\x62\x2d\x39\x66\x37\x61\x2d\
\x30\x36\x39\x39\x32\x39\x38\x64\x62\x39\x33\x63\x22\x20\x73\x74\
\x52\x65\x66\x3a\x64\x6f\x63\x75\x6d\x65\x6e\x74\x49\x44\x3d\x22\
\x78\x6d\x70\x2e\x64\x69\x64\x3a\x63\x34\x37\x39\x66\x62\x31\x39\
\x2d\x34\x33\x34\x34\x2d\x62\x65\x34\x33\x2d\x39\x39\x31\x31\x2d\
\x33\x64\x35\x32\x63\x31\x30\x33\x35\x37\x65\x31\x22\x2f\x3e\x20\
\x3c\x2f\x72\x64\x66\x3a\x44\x65\x73\x63\x72\x69\x70\x74\x69\x6f\
\x6e\x3e\x20\x3c\x2f\x72\x64\x66\x3a\x52\x44\x46\x3e\x20\x3c\x2f\
\x78\x3a\x78\x6d\x70\x6d\x65\x74\x61\x3e\x20\x3c\x3f\x78\x70\x61\
\x63\x6b\x65\x74\x20\x65\x6e\x64\x3d\x22\x72\x22\x3f\x3e\x68\x12\
\x49\xb3\x00\x00\x01\xc5\x49\x44\x41\x54\x78\xda\x62\xfc\xff\xff\
\x3f\x03\x23\x23\x23\x03\x3e\x90\x97\x97\x37\x01\x48\xe5\x33\x90\
\x06\x26\x4e\x9a\x34\xa9\x00\x9f\x02\x90\xdd\x4c\x0c\x03\x0c\x58\
\x88\x54\x77\x0a\x88\x17\x92\x68\xf6\x29\x6a\x3a\x60\x07\x10\x1f\
\x23\xd1\x01\x9f\xa8\xe9\x80\x3a\x72\xd2\x00\x10\x17\x10\x52\x34\
\xe0\x69\x60\xc8\x24\xc2\x26\x20\x9e\x30\x90\x69\x20\x10\x88\x7d\
\x49\x74\xc0\x66\x20\x9e\x4b\x2d\x07\xe8\x02\xb1\x3f\x89\x0e\x78\
\x30\x24\xd2\xc0\xa8\x03\x58\xa0\x95\x4d\x22\x90\x92\xc7\xa3\xce\
\x82\x0c\xb3\x2d\x80\xe6\x36\xe0\x91\x7f\x08\xc4\xf3\x61\x89\x70\
\x1b\x10\xaf\x07\x62\x4b\x2a\x7a\xce\x1c\x8a\xb1\x81\xe3\xd0\x9c\
\x05\x89\x02\x60\xb5\xf9\x12\x48\x39\x92\x51\xe1\x90\x03\x40\x76\
\x38\x42\xed\x64\x60\x44\x6f\x0f\x00\x83\xad\x0c\x48\xb5\xd3\x20\
\x7d\xfc\x03\xe2\x4a\xa0\xc5\x5d\xc8\xed\x01\x46\x6c\x0d\x12\xa0\
\x23\xbc\x81\xd4\x72\x20\xe6\xa5\x92\xe5\x9f\x81\x38\x12\x68\xf9\
\x56\xf4\x06\x09\x23\xae\x16\x11\xd0\x11\xda\x40\x6a\x13\x10\x2b\
\x51\x68\xf9\x3d\x20\xf6\x03\x5a\x7e\x95\xa4\x16\x11\x54\x83\x19\
\x10\x1f\xa4\xc0\x72\x90\x5e\x33\x6c\x96\x13\x55\x0e\x00\x35\xbe\
\x05\x52\xae\x40\x3c\x93\x0c\xcb\x41\x7a\x5c\xa1\x66\xe0\x04\x8c\
\xc4\x34\x4a\xa1\x51\x92\x0b\xa4\xfa\x81\x98\x99\x80\xd2\xbf\x40\
\x5c\x08\xb4\x78\x32\x21\x33\xf1\xa6\x01\x1c\x8e\x70\x01\x52\xab\
\x81\x58\x00\x87\x92\x0f\x40\x1c\x0a\xb4\x7c\x0f\x31\xe6\x91\xec\
\x00\xa8\x23\xd4\xa0\x89\x53\x1d\x4d\xea\x26\x34\xb1\xdd\x22\xd6\
\x2c\xb2\x9a\xe5\x50\x0b\x40\x45\xf3\x2e\x24\xe1\xdd\x20\x31\x52\
\x2c\x27\x39\x0d\x60\x09\x09\x50\x5a\xe8\x85\x72\x8b\x81\x96\xff\
\x25\xd5\x0c\x78\x14\xe4\xe7\xe7\x83\x42\x42\x13\x88\x59\xe9\x54\
\x09\xfe\x06\xe2\x1b\x13\x27\x4e\xfc\x0b\xab\x8c\xd6\x91\xd1\xe2\
\xa1\x14\x6c\x07\x62\x2f\x58\x1a\xf0\x1b\x80\xa6\x80\x27\x72\x41\
\xc4\xc8\x30\x40\x00\xe6\x80\x3f\x03\x60\xf7\x3f\x64\x07\x4c\x81\
\x09\xd0\xd1\xf2\xa9\x20\x06\x40\x80\x01\x00\xc6\x51\x9f\x7a\x83\
\x78\x67\xeb\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x06\xbd\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\
\x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\
\x79\x71\xc9\x65\x3c\x00\x00\x03\x71\x69\x54\x58\x74\x58\x4d\x4c\
\x3a\x63\x6f\x6d\x2e\x61\x64\x6f\x62\x65\x2e\x78\x6d\x70\x00\x00\
\x00\x00\x00\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x62\x65\x67\
\x69\x6e\x3d\x22\xef\xbb\xbf\x22\x20\x69\x64\x3d\x22\x57\x35\x4d\
\x30\x4d\x70\x43\x65\x68\x69\x48\x7a\x72\x65\x53\x7a\x4e\x54\x63\
\x7a\x6b\x63\x39\x64\x22\x3f\x3e\x20\x3c\x78\x3a\x78\x6d\x70\x6d\
\x65\x74\x61\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x3d\x22\x61\x64\x6f\
\x62\x65\x3a\x6e\x73\x3a\x6d\x65\x74\x61\x2f\x22\x20\x78\x3a\x78\
\x6d\x70\x74\x6b\x3d\x22\x41\x64\x6f\x62\x65\x20\x58\x4d\x50\x20\
\x43\x6f\x72\x65\x20\x35\x2e\x35\x2d\x63\x30\x31\x34\x20\x37\x39\
\x2e\x31\x35\x31\x34\x38\x31\x2c\x20\x32\x30\x31\x33\x2f\x30\x33\
\x2f\x31\x33\x2d\x31\x32\x3a\x30\x39\x3a\x31\x35\x20\x20\x20\x20\
\x20\x20\x20\x20\x22\x3e\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x20\
\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\x39\x39\
\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\x2d\x73\x79\x6e\x74\
\x61\x78\x2d\x6e\x73\x23\x22\x3e\x20\x3c\x72\x64\x66\x3a\x44\x65\
\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x20\x72\x64\x66\x3a\x61\x62\
\x6f\x75\x74\x3d\x22\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\
\x4d\x4d\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\
\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\
\x6d\x6d\x2f\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x73\x74\x52\x65\x66\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\
\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x73\x54\
\x79\x70\x65\x2f\x52\x65\x73\x6f\x75\x72\x63\x65\x52\x65\x66\x23\
\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\
\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x22\x20\x78\x6d\x70\x4d\x4d\
\x3a\x4f\x72\x69\x67\x69\x6e\x61\x6c\x44\x6f\x63\x75\x6d\x65\x6e\
\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x63\x34\x37\
\x39\x66\x62\x31\x39\x2d\x34\x33\x34\x34\x2d\x62\x65\x34\x33\x2d\
\x39\x39\x31\x31\x2d\x33\x64\x35\x32\x63\x31\x30\x33\x35\x37\x65\
\x31\x22\x20\x78\x6d\x70\x4d\x4d\x3a\x44\x6f\x63\x75\x6d\x65\x6e\
\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x41\x41\x42\
\x31\x33\x44\x37\x30\x35\x39\x38\x34\x31\x31\x45\x33\x41\x44\x39\
\x31\x44\x46\x46\x41\x36\x32\x34\x34\x38\x41\x34\x46\x22\x20\x78\
\x6d\x70\x4d\x4d\x3a\x49\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\
\x22\x78\x6d\x70\x2e\x69\x69\x64\x3a\x41\x41\x42\x31\x33\x44\x36\
\x46\x35\x39\x38\x34\x31\x31\x45\x33\x41\x44\x39\x31\x44\x46\x46\
\x41\x36\x32\x34\x34\x38\x41\x34\x46\x22\x20\x78\x6d\x70\x3a\x43\
\x72\x65\x61\x74\x6f\x72\x54\x6f\x6f\x6c\x3d\x22\x41\x64\x6f\x62\
\x65\x20\x50\x68\x6f\x74\x6f\x73\x68\x6f\x70\x20\x43\x43\x20\x28\
\x57\x69\x6e\x64\x6f\x77\x73\x29\x22\x3e\x20\x3c\x78\x6d\x70\x4d\
\x4d\x3a\x44\x65\x72\x69\x76\x65\x64\x46\x72\x6f\x6d\x20\x73\x74\
\x52\x65\x66\x3a\x69\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\x22\
\x78\x6d\x70\x2e\x69\x69\x64\x3a\x63\x31\x65\x61\x38\x31\x32\x35\
\x2d\x65\x62\x64\x64\x2d\x63\x38\x34\x62\x2d\x39\x66\x37\x61\x2d\
\x30\x36\x39\x39\x32\x39\x38\x64\x62\x39\x33\x63\x22\x20\x73\x74\
\x52\x65\x66\x3a\x64\x6f\x63\x75\x6d\x65\x6e\x74\x49\x44\x3d\x22\
\x78\x6d\x70\x2e\x64\x69\x64\x3a\x63\x34\x37\x39\x66\x62\x31\x39\
\x2d\x34\x33\x34\x34\x2d\x62\x65\x34\x33\x2d\x39\x39\x31\x31\x2d\
\x33\x64\x35\x32\x63\x31\x30\x33\x35\x37\x65\x31\x22\x2f\x3e\x20\
\x3c\x2f\x72\x64\x66\x3a\x44\x65\x73\x63\x72\x69\x70\x74\x69\x6f\
\x6e\x3e\x20\x3c\x2f\x72\x64\x66\x3a\x52\x44\x46\x3e\x20\x3c\x2f\
\x78\x3a\x78\x6d\x70\x6d\x65\x74\x61\x3e\x20\x3c\x3f\x78\x70\x61\
\x63\x6b\x65\x74\x20\x65\x6e\x64\x3d\x22\x72\x22\x3f\x3e\x74\x3f\
\x67\x43\x00\x00\x02\xe2\x49\x44\x41\x54\x78\xda\xbc\x97\x5d\x88\
\xcc\x51\x14\xc0\x67\xd6\x60\xdb\x98\x15\xa5\x79\xf0\x15\x4b\x48\
\xbe\x8a\x44\xc8\xee\x4a\x11\xf2\x64\x1f\x58\x91\x42\x69\xac\xf2\
\x91\xb2\xa5\x15\x25\xe5\x61\x3c\xd9\x97\x8d\x37\x1e\xb0\x51\xe4\
\xa3\x59\xad\x94\x07\xab\xb4\xb2\x59\xb2\x25\xd2\x96\xb0\xd8\xf5\
\xd1\x18\xbf\x53\x67\x74\xfb\x37\xf7\xce\xfd\x9b\x7f\x7b\xea\xd7\
\xbd\x73\x3f\xfe\xf7\x9c\x73\xcf\x3d\xf7\x4e\x3c\x16\xa1\xa4\xd3\
\xe9\x75\x14\x19\x98\x05\xcf\x61\x5f\x26\x93\xe9\xb4\x8d\xcf\xe7\
\xf3\xb1\x78\x84\x8b\x4f\xa3\xe8\x81\xd1\x46\xf3\x57\xa8\x41\x89\
\x7e\x9b\x02\x15\x11\x3a\x60\x7d\x60\x71\x91\xb1\x50\xe7\x9a\x14\
\xa5\x02\xdf\x2d\xed\x83\xae\x49\x89\x08\x5c\x5f\x25\x6e\x86\x6b\
\x70\x12\x26\x1b\xdd\xb2\x25\xb7\x23\x55\x80\x05\x47\x50\x2c\x55\
\xd7\xd6\xc3\x72\x18\x09\xbd\xd0\x0e\x93\x60\x1c\x3c\x83\x53\xec\
\xff\xcf\xb2\x15\x60\x51\x09\xd6\x46\xd8\x02\x6b\x20\x59\x64\xd8\
\x4c\xe5\x0f\xdc\x82\x07\xf0\xb1\xd4\xb7\x7d\x3d\xb0\x1b\x5a\x3d\
\xc7\x4a\x5c\x6d\x50\x36\xc1\x8d\xb2\x82\x10\xeb\x47\x51\x34\x07\
\x9a\xdf\xc0\x7b\x0f\x65\xea\xa3\xf0\xc0\x4e\x23\xb0\xde\xc2\x2a\
\xf8\x0c\x7d\x1e\x73\xd7\x16\x09\xd8\xd5\x30\x1b\xae\x8b\x21\x89\
\x12\xd6\x4b\xff\x61\xa3\xe9\x34\x41\xd5\x47\xbb\x78\xa4\xda\x96\
\xe0\x20\xa7\xc6\xcd\x61\xac\x6c\xc3\x02\x0d\xda\x42\xc0\x8a\x34\
\xc1\xd4\x52\x1e\xd8\x06\x33\xb4\xfe\x01\xda\xf8\xe0\x18\xca\x03\
\x8e\x39\x57\xf4\x24\xac\xd0\xdf\xed\x96\x71\x55\xce\x18\x50\xeb\
\x8f\x1b\x4d\x67\xf4\x48\xed\x87\x09\x0e\xeb\x5b\xe0\x9e\x43\xc1\
\x77\x70\x51\x33\xa7\xfd\x2e\x40\x81\x46\x1d\x58\xb0\x7e\xba\xd6\
\x65\xef\x27\x5a\xa6\x5d\x46\xc9\x06\xe6\xca\x71\xec\x02\xf1\xd6\
\x00\x64\x55\xa9\xfb\xf4\xbf\x30\xef\x02\xd7\x16\xec\x30\xea\xe7\
\x98\x38\xc4\x87\x9b\x1c\x8b\x17\xac\x8f\x31\xb6\x97\xb1\x53\xa8\
\xa6\xe0\x25\xbf\x73\xb6\x45\x5c\x1e\x38\x4b\x71\x08\x5e\xc3\x42\
\xf8\xad\xd6\xa7\x2c\x53\x9e\xc2\x32\x16\xfb\xe5\x9b\x55\x4b\xdd\
\x86\x47\x60\x2e\xcc\xe7\xa3\xdf\xf4\x38\xa6\x1c\xe3\x17\x41\xb7\
\x46\xbd\xb7\xc4\x3d\x53\xb1\x24\xa3\x57\x81\x8b\xc6\x25\xb2\xdf\
\x07\x51\xbc\xbb\x1c\x0f\x98\xb2\x3d\xc4\xe2\x85\x0c\xd8\x85\xe2\
\x4b\x7c\xf2\x76\xcc\x23\x19\x1d\x2b\xd2\xb5\x07\x2e\x68\xd2\x29\
\x26\x92\x70\x76\x95\xad\x40\x20\x19\x15\xe4\x31\xee\x6d\x85\xbd\
\xd4\xe7\xc1\x4d\xcb\xdc\xea\xb2\x14\xc0\xfa\x0a\x0d\xc6\xa0\x9c\
\x28\x54\x50\xa2\x07\x36\xea\x35\xfd\xa4\x58\xb6\x0b\x15\x84\x2c\
\x3a\x9e\xa2\xd6\x78\x70\xd4\x04\x86\x3c\x64\xc1\x95\x16\x85\xe5\
\x21\xf2\xc9\x68\xca\x32\xb6\xd6\x15\x84\x09\xb5\xb2\x4e\x6f\xae\
\x3a\x3d\x4e\xae\xd3\xd1\xec\xe8\x0b\xbe\xff\x2a\x7d\xb6\xe0\x12\
\xdc\xd1\x5b\x6f\x71\x89\xc5\xcf\x63\x51\x87\xad\x53\x93\x50\x3e\
\xcc\x16\x48\x84\x37\x38\xfa\xe5\x83\x8f\xf4\x5c\x5f\x35\xf3\xb8\
\x43\x06\x8c\xe0\x4b\xfa\x28\x90\x0d\xbc\x5c\xe4\x31\x79\x57\xe9\
\x64\xd1\xc1\x90\xef\xd6\x21\x43\x81\x4a\x1f\x05\x36\xc3\x56\x7d\
\xd7\x77\xd8\xfe\xc5\x84\x90\x1f\xa1\xb6\x40\x2d\x6c\x8b\xf0\x0f\
\xca\x17\xa3\x9e\x8c\x22\x11\xc5\xfe\x63\x0b\xfe\x1d\x73\xbd\x47\
\x86\x55\x81\xfe\xc0\x76\xe4\x86\x5b\x81\x16\x7d\x37\x88\x27\x8e\
\xba\x1e\x23\x22\x7f\x05\x18\x00\x68\x1f\xde\x97\xa8\x2b\x9f\x11\
\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x06\xbd\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\
\x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\
\x79\x71\xc9\x65\x3c\x00\x00\x03\x71\x69\x54\x58\x74\x58\x4d\x4c\
\x3a\x63\x6f\x6d\x2e\x61\x64\x6f\x62\x65\x2e\x78\x6d\x70\x00\x00\
\x00\x00\x00\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x62\x65\x67\
\x69\x6e\x3d\x22\xef\xbb\xbf\x22\x20\x69\x64\x3d\x22\x57\x35\x4d\
\x30\x4d\x70\x43\x65\x68\x69\x48\x7a\x72\x65\x53\x7a\x4e\x54\x63\
\x7a\x6b\x63\x39\x64\x22\x3f\x3e\x20\x3c\x78\x3a\x78\x6d\x70\x6d\
\x65\x74\x61\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x3d\x22\x61\x64\x6f\
\x62\x65\x3a\x6e\x73\x3a\x6d\x65\x74\x61\x2f\x22\x20\x78\x3a\x78\
\x6d\x70\x74\x6b\x3d\x22\x41\x64\x6f\x62\x65\x20\x58\x4d\x50\x20\
\x43\x6f\x72\x65\x20\x35\x2e\x35\x2d\x63\x30\x31\x34\x20\x37\x39\
\x2e\x31\x35\x31\x34\x38\x31\x2c\x20\x32\x30\x31\x33\x2f\x30\x33\
\x2f\x31\x33\x2d\x31\x32\x3a\x30\x39\x3a\x31\x35\x20\x20\x20\x20\
\x20\x20\x20\x20\x22\x3e\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x20\
\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\x39\x39\
\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\x2d\x73\x79\x6e\x74\
\x61\x78\x2d\x6e\x73\x23\x22\x3e\x20\x3c\x72\x64\x66\x3a\x44\x65\
\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x20\x72\x64\x66\x3a\x61\x62\
\x6f\x75\x74\x3d\x22\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\
\x4d\x4d\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\
\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\
\x6d\x6d\x2f\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x73\x74\x52\x65\x66\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\
\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x73\x54\
\x79\x70\x65\x2f\x52\x65\x73\x6f\x75\x72\x63\x65\x52\x65\x66\x23\
\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\
\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x22\x20\x78\x6d\x70\x4d\x4d\
\x3a\x4f\x72\x69\x67\x69\x6e\x61\x6c\x44\x6f\x63\x75\x6d\x65\x6e\
\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x63\x34\x37\
\x39\x66\x62\x31\x39\x2d\x34\x33\x34\x34\x2d\x62\x65\x34\x33\x2d\
\x39\x39\x31\x31\x2d\x33\x64\x35\x32\x63\x31\x30\x33\x35\x37\x65\
\x31\x22\x20\x78\x6d\x70\x4d\x4d\x3a\x44\x6f\x63\x75\x6d\x65\x6e\
\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x39\x46\x38\
\x32\x32\x30\x43\x30\x35\x39\x38\x34\x31\x31\x45\x33\x42\x35\x45\
\x42\x44\x32\x39\x33\x41\x33\x42\x36\x34\x41\x32\x38\x22\x20\x78\
\x6d\x70\x4d\x4d\x3a\x49\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\
\x22\x78\x6d\x70\x2e\x69\x69\x64\x3a\x39\x46\x38\x32\x32\x30\x42\
\x46\x35\x39\x38\x34\x31\x31\x45\x33\x42\x35\x45\x42\x44\x32\x39\
\x33\x41\x33\x42\x36\x34\x41\x32\x38\x22\x20\x78\x6d\x70\x3a\x43\
\x72\x65\x61\x74\x6f\x72\x54\x6f\x6f\x6c\x3d\x22\x41\x64\x6f\x62\
\x65\x20\x50\x68\x6f\x74\x6f\x73\x68\x6f\x70\x20\x43\x43\x20\x28\
\x57\x69\x6e\x64\x6f\x77\x73\x29\x22\x3e\x20\x3c\x78\x6d\x70\x4d\
\x4d\x3a\x44\x65\x72\x69\x76\x65\x64\x46\x72\x6f\x6d\x20\x73\x74\
\x52\x65\x66\x3a\x69\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\x22\
\x78\x6d\x70\x2e\x69\x69\x64\x3a\x63\x31\x65\x61\x38\x31\x32\x35\
\x2d\x65\x62\x64\x64\x2d\x63\x38\x34\x62\x2d\x39\x66\x37\x61\x2d\
\x30\x36\x39\x39\x32\x39\x38\x64\x62\x39\x33\x63\x22\x20\x73\x74\
\x52\x65\x66\x3a\x64\x6f\x63\x75\x6d\x65\x6e\x74\x49\x44\x3d\x22\
\x78\x6d\x70\x2e\x64\x69\x64\x3a\x63\x34\x37\x39\x66\x62\x31\x39\
\x2d\x34\x33\x34\x34\x2d\x62\x65\x34\x33\x2d\x39\x39\x31\x31\x2d\
\x33\x64\x35\x32\x63\x31\x30\x33\x35\x37\x65\x31\x22\x2f\x3e\x20\
\x3c\x2f\x72\x64\x66\x3a\x44\x65\x73\x63\x72\x69\x70\x74\x69\x6f\
\x6e\x3e\x20\x3c\x2f\x72\x64\x66\x3a\x52\x44\x46\x3e\x20\x3c\x2f\
\x78\x3a\x78\x6d\x70\x6d\x65\x74\x61\x3e\x20\x3c\x3f\x78\x70\x61\
\x63\x6b\x65\x74\x20\x65\x6e\x64\x3d\x22\x72\x22\x3f\x3e\x7d\x29\
\xa2\x1a\x00\x00\x02\xe2\x49\x44\x41\x54\x78\xda\xbc\x97\x5d\x88\
\xcc\x51\x14\xc0\x67\xd6\x60\xdb\x98\x15\xa5\x79\xf0\xb1\xb1\x84\
\xe4\xab\x48\x84\xec\xac\x14\x21\x4f\xf6\x81\x15\x6d\xa1\x94\x55\
\x8b\x94\x2d\xad\x28\x91\x07\x9e\x6c\x6a\xe3\x8d\x07\x6c\x14\xf9\
\x68\x6c\x2b\xe5\xc1\x2a\xad\x6c\x96\x6c\x89\xb4\x25\x2c\x76\x7d\
\x34\xc6\xef\xd4\xf9\xeb\xf6\x6f\xee\x9d\x3b\xe6\xdf\x9e\xfa\x75\
\xef\xdc\x8f\xff\x3d\xe7\xdc\x73\xcf\xbd\x13\x8f\x45\x28\x7d\x4d\
\x0d\x6b\x29\xce\xc2\x4c\x78\x0e\x7b\xaa\x4e\x5f\xe8\xb4\x8d\xcf\
\xe5\x72\xb1\x78\x84\x8b\x57\x51\xf4\xc0\x68\xa3\xf9\x2b\x54\xa3\
\x44\xbf\x4d\x81\xb2\x08\x1d\xb0\x2e\xb4\xb8\xc8\x58\x48\xbb\x26\
\x45\xa9\xc0\x77\x4b\xfb\xa0\x6b\x52\x22\x02\xd7\x57\x88\x9b\xe1\
\x1a\x1c\x83\xc9\x46\xb7\x6c\xc9\xed\x48\x15\x60\xc1\x11\x14\x4b\
\xd4\xb5\xb5\xb0\x0c\x46\x42\x2f\xb4\xc3\x24\x18\x07\xcf\xe0\x38\
\xfb\xff\xb3\x64\x05\x58\x54\x82\xb5\x1e\x36\xc3\x6a\x48\xe6\x19\
\x36\x43\xf9\x03\xb7\xa0\x03\x3e\x16\xfa\xb6\xaf\x07\x1a\xa0\xd5\
\x73\xac\xc4\xd5\x7a\x65\x23\xdc\x28\x29\x08\xb1\x7e\x14\x45\x73\
\xa8\xf9\x0d\xbc\xf7\x50\xa6\x36\x0a\x0f\xec\x30\x02\xeb\x2d\xac\
\x84\xcf\xa2\x9b\xc7\xdc\x35\x79\x02\x76\x15\xcc\x82\xeb\x62\x48\
\xa2\x80\xf5\xd2\x7f\xc0\x68\x3a\x41\x50\xf5\xd1\x2e\x1e\xa9\xb4\
\x25\x38\xc8\xaa\x71\xb3\x19\x2b\xdb\x30\x5f\x83\x36\x08\x58\x91\
\x46\x98\x5a\xc8\x03\x5b\x61\xba\xd6\x3f\x40\x1b\x1f\x1c\x43\xb9\
\xcf\x31\xe7\x8a\x9e\x84\xe5\xfa\xbb\xdd\x32\xae\xc2\x19\x03\x6a\
\xfd\x11\xa3\xe9\xa4\x1e\xa9\xbd\x30\xc1\x61\x7d\x0b\xdc\x73\x28\
\xf8\x0e\x2e\x6a\xe6\xb4\xdf\x05\x28\x50\xaf\x03\x03\xeb\xa7\x05\
\x5d\x30\xd1\x32\xed\x32\x4a\xd6\x31\x57\x8e\x63\x17\x88\xb7\x06\
\x20\xa3\x4a\xdd\xa7\xff\x85\x79\x17\xb8\xb6\x60\xbb\x51\x3f\xc3\
\xc4\x21\x3e\xdc\xe8\x58\x3c\xb0\x3e\xc6\xd8\x5e\xc6\x4e\xa1\x9a\
\x82\x97\xfc\xce\xda\x16\x71\x79\xe0\x14\x45\x13\xbc\x86\x05\xf0\
\x5b\xad\x4f\x59\xa6\x3c\x85\xa5\x2c\xf6\xcb\x37\xab\x16\xba\x0d\
\x0f\xc2\x1c\x98\xc7\x47\xbf\xe9\x71\x4c\x39\xc6\x2f\x84\x6e\x8d\
\x7a\x6f\x89\x7b\xa6\x62\x49\x46\xaf\x42\x17\x8d\x4b\x64\xbf\xf7\
\xa3\x78\x77\x29\x1e\x30\x65\x5b\x11\x8b\x07\x19\xb0\x0b\xc5\x17\
\xfb\xe4\xed\x98\x47\x32\x3a\x9c\xa7\x6b\x17\x9c\xd7\xa4\x93\x4f\
\x24\xe1\xec\x2c\x59\x81\x50\x32\x0a\xe4\x31\xee\x6d\x85\xdd\xd4\
\xe7\xc2\x4d\xcb\xdc\xca\x92\x14\xc0\xfa\x32\x0d\xc6\xb0\x1c\x0d\
\x2a\x28\xd1\x03\x1b\xf4\x9a\x7e\x92\x2f\xdb\x15\x15\x84\x2c\x3a\
\x9e\xa2\xc6\x78\x70\x54\x87\x86\x3c\x64\xc1\x15\x16\x85\xe5\x21\
\xf2\xc9\x68\xca\x30\xb6\xc6\x15\x84\x09\xb5\x32\xad\x37\x57\x5a\
\x8f\x93\xeb\x74\x34\x3b\xfa\xc2\xef\xbf\x72\x9f\x2d\xb8\x04\x77\
\xf4\xd6\x5b\x54\x60\xf1\x73\x58\xf4\xc0\xd6\xa9\x49\x28\x57\xcc\
\x16\x48\x84\xd7\x39\xfa\xe5\x83\x8f\xf4\x5c\x5f\x35\xf3\xb8\x43\
\x06\x8c\xe0\x4b\xfa\x28\x90\x09\xbd\x5c\xe4\x31\x79\x57\xe9\x64\
\xd1\xc1\x22\xdf\xad\x43\x86\x02\xe5\x3e\x0a\x6c\x82\x2d\x20\xe9\
\xb6\xc3\xf6\x2f\xa6\x08\xf9\x51\xd4\x16\xa8\x85\x6d\x11\xfe\x41\
\xf9\x62\xd4\x93\x51\x24\xa2\xd8\x7f\x6c\xc1\xbf\x63\xae\xf7\xc8\
\xb0\x2a\xd0\x1f\xda\x8e\xec\x70\x2b\xd0\xa2\xef\x06\xf1\xc4\x21\
\xd7\x63\x44\xe4\xaf\x00\x03\x00\x67\x30\xde\x97\xf5\x5f\xa5\xbd\
\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x07\x85\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\
\x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\
\x79\x71\xc9\x65\x3c\x00\x00\x03\x71\x69\x54\x58\x74\x58\x4d\x4c\
\x3a\x63\x6f\x6d\x2e\x61\x64\x6f\x62\x65\x2e\x78\x6d\x70\x00\x00\
\x00\x00\x00\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x62\x65\x67\
\x69\x6e\x3d\x22\xef\xbb\xbf\x22\x20\x69\x64\x3d\x22\x57\x35\x4d\
\x30\x4d\x70\x43\x65\x68\x69\x48\x7a\x72\x65\x53\x7a\x4e\x54\x63\
\x7a\x6b\x63\x39\x64\x22\x3f\x3e\x20\x3c\x78\x3a\x78\x6d\x70\x6d\
\x65\x74\x61\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x3d\x22\x61\x64\x6f\
\x62\x65\x3a\x6e\x73\x3a\x6d\x65\x74\x61\x2f\x22\x20\x78\x3a\x78\
\x6d\x70\x74\x6b\x3d\x22\x41\x64\x6f\x62\x65\x20\x58\x4d\x50\x20\
\x43\x6f\x72\x65\x20\x35\x2e\x35\x2d\x63\x30\x31\x34\x20\x37\x39\
\x2e\x31\x35\x31\x34\x38\x31\x2c\x20\x32\x30\x31\x33\x2f\x30\x33\
\x2f\x31\x33\x2d\x31\x32\x3a\x30\x39\x3a\x31\x35\x20\x20\x20\x20\
\x20\x20\x20\x20\x22\x3e\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x20\
\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\x39\x39\
\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\x2d\x73\x79\x6e\x74\
\x61\x78\x2d\x6e\x73\x23\x22\x3e\x20\x3c\x72\x64\x66\x3a\x44\x65\
\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x20\x72\x64\x66\x3a\x61\x62\
\x6f\x75\x74\x3d\x22\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\
\x4d\x4d\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\
\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\
\x6d\x6d\x2f\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x73\x74\x52\x65\x66\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\
\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x73\x54\
\x79\x70\x65\x2f\x52\x65\x73\x6f\x75\x72\x63\x65\x52\x65\x66\x23\
\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\
\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x22\x20\x78\x6d\x70\x4d\x4d\
\x3a\x4f\x72\x69\x67\x69\x6e\x61\x6c\x44\x6f\x63\x75\x6d\x65\x6e\
\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x63\x34\x37\
\x39\x66\x62\x31\x39\x2d\x34\x33\x34\x34\x2d\x62\x65\x34\x33\x2d\
\x39\x39\x31\x31\x2d\x33\x64\x35\x32\x63\x31\x30\x33\x35\x37\x65\
\x31\x22\x20\x78\x6d\x70\x4d\x4d\x3a\x44\x6f\x63\x75\x6d\x65\x6e\
\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x45\x45\x35\
\x37\x44\x41\x37\x43\x35\x39\x38\x34\x31\x31\x45\x33\x39\x44\x32\
\x38\x41\x32\x46\x31\x34\x37\x35\x33\x41\x36\x39\x46\x22\x20\x78\
\x6d\x70\x4d\x4d\x3a\x49\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\
\x22\x78\x6d\x70\x2e\x69\x69\x64\x3a\x45\x45\x35\x37\x44\x41\x37\
\x42\x35\x39\x38\x34\x31\x31\x45\x33\x39\x44\x32\x38\x41\x32\x46\
\x31\x34\x37\x35\x33\x41\x36\x39\x46\x22\x20\x78\x6d\x70\x3a\x43\
\x72\x65\x61\x74\x6f\x72\x54\x6f\x6f\x6c\x3d\x22\x41\x64\x6f\x62\
\x65\x20\x50\x68\x6f\x74\x6f\x73\x68\x6f\x70\x20\x43\x43\x20\x28\
\x57\x69\x6e\x64\x6f\x77\x73\x29\x22\x3e\x20\x3c\x78\x6d\x70\x4d\
\x4d\x3a\x44\x65\x72\x69\x76\x65\x64\x46\x72\x6f\x6d\x20\x73\x74\
\x52\x65\x66\x3a\x69\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\x22\
\x78\x6d\x70\x2e\x69\x69\x64\x3a\x63\x31\x65\x61\x38\x31\x32\x35\
\x2d\x65\x62\x64\x64\x2d\x63\x38\x34\x62\x2d\x39\x66\x37\x61\x2d\
\x30\x36\x39\x39\x32\x39\x38\x64\x62\x39\x33\x63\x22\x20\x73\x74\
\x52\x65\x66\x3a\x64\x6f\x63\x75\x6d\x65\x6e\x74\x49\x44\x3d\x22\
\x78\x6d\x70\x2e\x64\x69\x64\x3a\x63\x34\x37\x39\x66\x62\x31\x39\
\x2d\x34\x33\x34\x34\x2d\x62\x65\x34\x33\x2d\x39\x39\x31\x31\x2d\
\x33\x64\x35\x32\x63\x31\x30\x33\x35\x37\x65\x31\x22\x2f\x3e\x20\
\x3c\x2f\x72\x64\x66\x3a\x44\x65\x73\x63\x72\x69\x70\x74\x69\x6f\
\x6e\x3e\x20\x3c\x2f\x72\x64\x66\x3a\x52\x44\x46\x3e\x20\x3c\x2f\
\x78\x3a\x78\x6d\x70\x6d\x65\x74\x61\x3e\x20\x3c\x3f\x78\x70\x61\
\x63\x6b\x65\x74\x20\x65\x6e\x64\x3d\x22\x72\x22\x3f\x3e\x33\xcd\
\x50\xb2\x00\x00\x03\xaa\x49\x44\x41\x54\x78\xda\xbc\x97\x79\x48\
\x55\x41\x14\xc6\xef\xb3\x97\x95\x59\xbd\x0a\x23\xa4\xc0\x25\xa2\
\x3d\x21\x25\x69\x83\xc8\x32\x5b\xa0\x7d\xfb\xa3\x32\x5a\x6c\x7b\
\x24\x45\x44\x86\x96\x45\x96\x08\x91\x11\x51\x08\x69\x54\x58\x58\
\xfd\x51\x54\x44\xb6\x40\x94\x94\xa4\x66\x46\x84\x6d\x26\x11\x91\
\xf9\xd0\x32\xb5\xb4\xbe\x13\xdf\x95\xe1\x72\xdf\x72\x5f\xcf\x0e\
\xfc\x98\xb9\x33\xf7\xce\x7c\x33\xef\xcc\x99\xf3\x6c\x4e\xa7\xd3\
\xa6\x69\x9a\x13\x6c\x00\xe1\x9a\x7f\x76\x17\x2c\xca\xcd\xcd\x6d\
\xb3\xfa\xa1\x0d\x02\x76\xa1\xcc\x02\xcd\x20\x1f\x54\xb0\xee\x8b\
\x85\x80\xe3\xac\x17\x80\x64\x88\xf8\x6d\x45\x80\x1d\x6c\x07\xdf\
\xc0\x14\x7c\x5c\x66\xe5\x63\x88\x77\x50\xc0\x7b\xb0\x0a\xd4\x83\
\x54\xab\x3b\x20\x8a\x0f\x83\x83\xa0\x1f\x70\x71\x07\x06\x1a\xde\
\xfd\x04\xba\x03\x07\x9f\x9b\x89\x4c\xba\x14\xac\x04\xb3\x41\x3a\
\x16\xb2\xdf\x57\x01\x41\x2c\x4b\xc1\x1a\xf0\x16\x6c\x03\xf1\xac\
\xab\xc4\xb3\x4f\x7f\x2e\x54\xc6\x69\x05\x4b\xe8\x0b\x99\x58\xd4\
\x66\xab\x02\x7e\x81\x12\xb0\x0f\xbc\xe3\x2a\xa5\xbe\x15\x4c\x67\
\xdd\xc1\xbe\xb9\x60\x1d\xfd\xa5\xc3\xb0\xea\x26\x14\xf3\xc0\x63\
\x79\x84\x88\xe5\x56\x04\x68\x5c\x61\x06\x38\x0d\xae\xb0\x9e\xc0\
\xdd\xc9\x60\x9b\xf4\xd5\x82\x51\x60\xb5\xc1\x97\x44\x44\x03\x8a\
\x99\xe0\x85\x38\x25\x44\xcc\xb2\x22\xe0\x3e\x1d\x28\x95\x3f\x47\
\x24\xb8\xce\x95\x47\x2a\xc8\xe0\x99\x60\x19\xd0\x8f\x5d\x8c\xb2\
\x13\xe2\x13\xd3\x40\x0d\x28\x82\x88\x89\xbe\x38\xe1\x7c\x10\x01\
\x8e\xb0\xbd\x02\x03\xc5\xb0\x4f\x8e\x56\xbe\x87\x93\x50\xc7\x1d\
\x98\x80\xf7\xaa\x94\xf6\x41\x28\x1e\x81\xde\x60\x32\xfa\x9e\x79\
\x13\x70\x03\xf4\x30\x79\xa7\x09\x1f\xb7\x7a\x10\x90\x4e\x1f\x69\
\x04\x79\xa0\x12\xe8\xb1\x60\x08\x48\x03\x9f\x41\x2c\xc6\xf9\x60\
\x16\x07\x74\x13\xe7\xda\x62\x32\xc7\x21\x4c\x12\xaa\xf4\x55\x63\
\xa0\xb5\x4a\xff\x01\xf0\x13\xa4\xf0\x94\xd8\x4c\xc6\x18\x00\xa2\
\x81\x47\x01\x0d\xf4\x72\xa3\xb5\x73\x02\xbd\xef\xa3\xc1\xfb\xdb\
\x19\x49\xb3\x3c\x04\xab\x7a\x5f\x9c\x30\x8c\xce\x24\xec\xa6\x13\
\x49\x3d\x9b\x5b\x1c\x8d\xc9\xc4\xf3\x63\x31\x68\x39\xc8\xd3\x02\
\x60\xea\x0e\xc8\xf1\xba\xc3\x9d\xe8\xcf\xe7\x7b\x4a\xff\x0f\x96\
\x25\x8c\x98\xd5\x81\x16\x30\x42\xce\x3d\x56\x39\x06\xab\x93\xd8\
\xde\x87\xed\x9b\xd0\x76\x5e\xd9\xf2\x4c\x2d\x80\xa6\x0a\x90\x95\
\xb5\xe8\x8e\x07\xba\xb1\x5e\xae\x75\xa2\xd9\x95\x95\xc9\x4d\x58\
\xc6\xfa\x09\xed\x3f\x99\xdd\x87\x2b\x37\x96\xc9\x4a\x98\x9f\x73\
\x74\xd5\x8f\x2b\xc6\xfa\xc2\xba\x94\x79\x58\x68\x89\xdd\xcb\xe4\
\x89\x28\xae\x2a\x83\xfc\x8b\x19\x43\x72\x32\xc6\x5f\x10\xe4\xe5\
\xa3\xec\x00\x4d\xee\x2e\x04\xe4\x78\x13\x30\xd2\xa4\x4d\x76\x24\
\x09\x3c\xe7\xf3\x6b\x20\xb7\x5e\x31\x9f\x9f\x32\x3a\x36\x32\x4b\
\x4a\x60\x88\xae\x30\xdc\xa0\x7f\x43\xb5\x37\x01\x5d\x0c\xcf\x12\
\xe3\x25\xf6\x8f\x05\x67\xd9\xf6\x15\x54\x31\x14\xb7\x30\xdc\xbe\
\x02\x37\xc1\x4b\x70\x9b\x42\x5c\xbc\x9c\xdc\x46\x42\x5f\xac\x90\
\xce\x28\xd7\xed\x39\x0a\x92\x44\x24\x0e\x4c\x65\x38\x6e\xe2\x0d\
\x38\x07\x04\x83\xbd\xfc\x19\xbf\x33\x89\xf5\x5b\x40\x1b\x13\x93\
\x71\xe0\x21\x6f\xce\x62\x46\xce\x5e\xf4\x97\x02\xfe\x34\xa1\xec\
\xbf\xc5\xf8\x92\x43\x61\x3d\x2d\x1f\x43\xc5\x4e\x81\x61\xca\xa5\
\x23\x93\x9e\x01\x89\x5c\xa9\x83\x22\x56\x80\x3d\x4c\x66\x5c\xec\
\xab\x63\xf8\x0e\xb6\x2a\xc0\xa5\x64\xc1\xa3\x99\xf5\x76\x1c\x23\
\xf0\x04\x0c\xa5\x4f\x88\x2d\xe6\xea\xa3\x18\x49\x1f\x80\xf5\x4c\
\xd3\x6a\x98\xca\xa9\xd6\xe2\x4d\xc0\x05\x06\x21\xb1\x49\x86\xbe\
\xbe\x60\x86\xc9\x37\x49\x4a\x3d\x42\xa9\x47\x11\xd5\x2e\x06\xb9\
\xf1\x76\xdd\x76\x80\xcb\x9d\x14\x07\xae\x49\xd6\xad\xef\x80\x6c\
\xcd\x25\xe3\x1b\x08\x95\xf2\x8f\x69\x21\x22\x56\x38\xb3\x1a\x7f\
\x4c\x76\xee\x18\x53\xf9\x52\x3d\x14\x63\xec\x5a\x3d\x27\x74\xd1\
\xc3\xe3\xd0\xf8\x26\x90\x4b\xc4\xd8\x21\xcc\x31\xc6\x8b\x9f\x98\
\x25\xa6\xb2\x03\x27\xc1\x4e\x51\x87\x0f\x8e\x32\xa9\x6c\x0f\xc0\
\xfc\x83\xc1\x46\x30\x9c\xc7\xb6\xd2\xdd\x6d\x98\xc6\x80\x92\xc2\
\xa0\x11\x48\x93\x5c\xb2\x88\x49\x8d\xe9\xbf\xe6\x3f\x02\x0c\x00\
\x19\xca\x22\x70\x0c\xa6\x9f\x87\x00\x00\x00\x00\x49\x45\x4e\x44\
\xae\x42\x60\x82\
\x00\x00\x06\xbb\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\
\x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\
\x79\x71\xc9\x65\x3c\x00\x00\x03\x71\x69\x54\x58\x74\x58\x4d\x4c\
\x3a\x63\x6f\x6d\x2e\x61\x64\x6f\x62\x65\x2e\x78\x6d\x70\x00\x00\
\x00\x00\x00\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x62\x65\x67\
\x69\x6e\x3d\x22\xef\xbb\xbf\x22\x20\x69\x64\x3d\x22\x57\x35\x4d\
\x30\x4d\x70\x43\x65\x68\x69\x48\x7a\x72\x65\x53\x7a\x4e\x54\x63\
\x7a\x6b\x63\x39\x64\x22\x3f\x3e\x20\x3c\x78\x3a\x78\x6d\x70\x6d\
\x65\x74\x61\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x3d\x22\x61\x64\x6f\
\x62\x65\x3a\x6e\x73\x3a\x6d\x65\x74\x61\x2f\x22\x20\x78\x3a\x78\
\x6d\x70\x74\x6b\x3d\x22\x41\x64\x6f\x62\x65\x20\x58\x4d\x50\x20\
\x43\x6f\x72\x65\x20\x35\x2e\x35\x2d\x63\x30\x31\x34\x20\x37\x39\
\x2e\x31\x35\x31\x34\x38\x31\x2c\x20\x32\x30\x31\x33\x2f\x30\x33\
\x2f\x31\x33\x2d\x31\x32\x3a\x30\x39\x3a\x31\x35\x20\x20\x20\x20\
\x20\x20\x20\x20\x22\x3e\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x20\
\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\x39\x39\
\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\x2d\x73\x79\x6e\x74\
\x61\x78\x2d\x6e\x73\x23\x22\x3e\x20\x3c\x72\x64\x66\x3a\x44\x65\
\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x20\x72\x64\x66\x3a\x61\x62\
\x6f\x75\x74\x3d\x22\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\
\x4d\x4d\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\
\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\
\x6d\x6d\x2f\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x73\x74\x52\x65\x66\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\
\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x73\x54\
\x79\x70\x65\x2f\x52\x65\x73\x6f\x75\x72\x63\x65\x52\x65\x66\x23\
\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\
\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x22\x20\x78\x6d\x70\x4d\x4d\
\x3a\x4f\x72\x69\x67\x69\x6e\x61\x6c\x44\x6f\x63\x75\x6d\x65\x6e\
\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x63\x34\x37\
\x39\x66\x62\x31\x39\x2d\x34\x33\x34\x34\x2d\x62\x65\x34\x33\x2d\
\x39\x39\x31\x31\x2d\x33\x64\x35\x32\x63\x31\x30\x33\x35\x37\x65\
\x31\x22\x20\x78\x6d\x70\x4d\x4d\x3a\x44\x6f\x63\x75\x6d\x65\x6e\
\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x37\x34\x35\
\x38\x33\x43\x33\x39\x35\x39\x38\x35\x31\x31\x45\x33\x41\x31\x43\
\x32\x46\x42\x37\x41\x42\x30\x37\x44\x42\x42\x38\x37\x22\x20\x78\
\x6d\x70\x4d\x4d\x3a\x49\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\
\x22\x78\x6d\x70\x2e\x69\x69\x64\x3a\x37\x34\x35\x38\x33\x43\x33\
\x38\x35\x39\x38\x35\x31\x31\x45\x33\x41\x31\x43\x32\x46\x42\x37\
\x41\x42\x30\x37\x44\x42\x42\x38\x37\x22\x20\x78\x6d\x70\x3a\x43\
\x72\x65\x61\x74\x6f\x72\x54\x6f\x6f\x6c\x3d\x22\x41\x64\x6f\x62\
\x65\x20\x50\x68\x6f\x74\x6f\x73\x68\x6f\x70\x20\x43\x43\x20\x28\
\x57\x69\x6e\x64\x6f\x77\x73\x29\x22\x3e\x20\x3c\x78\x6d\x70\x4d\
\x4d\x3a\x44\x65\x72\x69\x76\x65\x64\x46\x72\x6f\x6d\x20\x73\x74\
\x52\x65\x66\x3a\x69\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\x22\
\x78\x6d\x70\x2e\x69\x69\x64\x3a\x63\x31\x65\x61\x38\x31\x32\x35\
\x2d\x65\x62\x64\x64\x2d\x63\x38\x34\x62\x2d\x39\x66\x37\x61\x2d\
\x30\x36\x39\x39\x32\x39\x38\x64\x62\x39\x33\x63\x22\x20\x73\x74\
\x52\x65\x66\x3a\x64\x6f\x63\x75\x6d\x65\x6e\x74\x49\x44\x3d\x22\
\x78\x6d\x70\x2e\x64\x69\x64\x3a\x63\x34\x37\x39\x66\x62\x31\x39\
\x2d\x34\x33\x34\x34\x2d\x62\x65\x34\x33\x2d\x39\x39\x31\x31\x2d\
\x33\x64\x35\x32\x63\x31\x30\x33\x35\x37\x65\x31\x22\x2f\x3e\x20\
\x3c\x2f\x72\x64\x66\x3a\x44\x65\x73\x63\x72\x69\x70\x74\x69\x6f\
\x6e\x3e\x20\x3c\x2f\x72\x64\x66\x3a\x52\x44\x46\x3e\x20\x3c\x2f\
\x78\x3a\x78\x6d\x70\x6d\x65\x74\x61\x3e\x20\x3c\x3f\x78\x70\x61\
\x63\x6b\x65\x74\x20\x65\x6e\x64\x3d\x22\x72\x22\x3f\x3e\x83\x53\
\x5b\xa8\x00\x00\x02\xe0\x49\x44\x41\x54\x78\xda\xc4\x97\x5d\x88\
\x4d\x51\x14\x80\xcf\x3d\xf7\xc6\x0c\xe6\x87\x51\x1a\x43\x8d\x14\
\xae\x19\x3f\xe3\xa7\x1b\xde\x66\x42\x89\x32\xf2\xa0\x79\x98\x97\
\x21\x0f\xc6\x95\xbc\x88\x46\x24\xc9\x4f\xe8\x26\x2f\x46\x29\x49\
\x1e\x64\x3c\x90\x78\xf0\xf3\xc2\x2d\xa1\xe6\x0e\x93\x22\xf2\x73\
\xf3\x30\x4d\x83\x49\x46\x5c\xdf\xd2\xbe\x75\xbb\x73\xf6\xbe\xfb\
\xdc\xb9\x87\x55\x5f\xeb\xfc\xec\xb3\xd6\x3a\x6b\xaf\xb3\xf7\x3a\
\x21\xc7\x52\xe2\xf1\x78\x15\xaa\x15\x56\x42\x0c\xa6\xc3\x14\x70\
\xe1\x3b\xbc\x85\x3e\x78\x00\xd7\x13\x89\xc4\x47\x1b\xbb\x21\x0b\
\xc7\xf3\x50\x5d\xb0\x09\xca\x2c\xe3\xcd\xc0\x6d\x38\x4e\x20\xf7\
\x8b\x0a\x00\xc7\x95\xa8\x13\xd0\x01\x61\xa7\x78\xe9\x81\x4e\x5d\
\x46\x42\x1a\xe7\x8b\x25\x8d\x50\xef\x94\x46\x06\xa0\x8d\x20\xee\
\xe4\xdf\x08\x7b\x38\x6f\x56\xe9\x9b\xe6\x94\x4e\x26\xc0\x96\x58\
\x2c\xf6\x26\x99\x4c\xf6\x6a\x33\x80\xf3\x26\x55\x44\x15\x4e\x30\
\xf2\x1b\x36\x90\x89\x5b\xa3\x02\xc0\x79\x35\xea\x99\x45\xda\x87\
\xe1\x26\x3c\x86\x34\xfc\x84\x19\xb0\x5c\x8c\x43\x65\x81\xe7\x87\
\x60\x29\x41\xbc\x96\x93\x48\xce\x8d\xa3\x05\x9c\x7f\x85\xc3\x70\
\x8e\x87\x87\x35\xb5\x53\x8e\xda\x0e\x07\xa1\x4a\x63\x47\xae\x9f\
\x67\x6c\x0b\x76\x32\x21\xf5\x60\x23\xea\xb9\xa1\xda\x53\xb0\x31\
\x1b\xb5\xc5\xa7\x3b\x53\x55\xff\x12\xc3\xb0\x56\xec\xf5\xb8\xea\
\xe4\x15\x6c\x53\x8e\xf2\xa5\x1f\x9a\x6d\x9d\x8b\x30\xf6\xbd\x3c\
\xa3\x5e\x4a\x27\xfb\xbd\x8a\x50\xce\x57\xc3\x1e\x58\x03\x23\xd0\
\x84\xc1\x17\xc5\x54\x1c\xf6\x66\xa3\xa4\xea\xcb\x35\x43\x16\x9a\
\x16\xa2\x05\xa8\xf9\x38\xbf\x3a\x96\xb2\xc7\xce\x21\xd4\x01\xcd\
\xed\xae\x90\x13\xb0\x10\x40\x2d\xea\x83\xda\x33\xf2\xe5\xae\x1b\
\x74\x00\x64\x30\x6d\xa8\x85\x68\x84\x08\xeb\x38\x58\x25\x55\xcb\
\xe0\x91\x80\xe2\x48\x69\xbe\x88\x5a\xc9\xc0\x1c\x90\x79\xfe\x44\
\x30\xa7\xa0\x21\x80\x00\x06\x35\xd7\xc3\xb2\x10\x4d\x56\x27\x35\
\xb0\x5b\x20\x88\x47\xe8\x0b\x12\x18\x59\xf9\x56\x82\x00\x6a\x34\
\xd7\x7f\x49\x06\xaa\x3d\x6e\xac\x80\x6e\x59\x6a\x09\xa6\x1b\x62\
\x63\x0c\xa0\x51\x73\x3d\x9d\x9b\x01\x2f\x99\xa4\xfa\x81\x0e\x82\
\x48\xa9\xac\x5c\x22\x2b\x03\x3e\xbe\x02\xd9\x27\x16\x69\x6e\xbf\
\x74\x0b\x04\x90\xff\x16\xa7\xe1\x22\x46\xcb\x7c\xbc\xfd\x4e\x43\
\xe3\xf3\x30\xe2\x23\x80\xcf\x52\x1f\xbc\xfd\x15\x1f\x6f\x1f\x45\
\xed\x32\x0c\xb9\x11\xd1\xd4\x40\xbe\x5c\x86\x1d\x38\x1f\xf2\xe1\
\x7c\x2a\xea\x1a\x8c\xd7\x0c\x79\x82\xbd\x5e\xdb\x29\x58\x26\xdf\
\xac\xcf\x3d\x40\x1a\x9b\xa8\x61\xd8\x91\x6c\x3f\x60\x13\xc0\x5c\
\x78\x8a\x61\x69\x52\xcf\x10\xf9\xa0\xa1\x91\xed\x84\x7d\x30\xd1\
\x60\xef\x9e\xa4\xff\xef\x6e\xc8\x43\xfd\xca\x81\xad\xfc\x90\x35\
\x1c\x92\x6a\x8d\x17\xa9\x53\x1d\xd1\x5a\x8b\xd6\x7d\x54\x47\x94\
\xcd\xc0\x3b\x38\xa9\x52\x63\x6a\xab\x64\x4e\xd7\x2b\x8a\xe9\x09\
\xdb\x72\x7b\x0b\x57\xa5\xea\x98\xda\x7a\xcf\xaa\xbe\xee\x4b\x00\
\xcb\xb1\xf4\x8e\xed\xb9\x0d\x69\x76\x0a\x1a\xb8\xd8\xe7\xd1\x0b\
\xc8\x1c\xcd\x0a\xfa\xbf\xc0\xd4\x90\x54\xa8\x29\xd9\xaa\xd9\xcb\
\x83\xfb\x33\xf2\xc8\xc6\x5e\xd8\x0c\xe3\xfe\xd9\xbf\xa1\x66\x61\
\x59\x07\x2d\x6a\x6d\xaf\xcf\x69\xbd\x8b\xff\x3b\xce\x64\x32\xce\
\xff\x94\x3f\x02\x0c\x00\xbe\x84\xeb\xde\xd4\xad\xd3\x24\x00\x00\
\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x07\x88\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\
\x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\
\x79\x71\xc9\x65\x3c\x00\x00\x03\x71\x69\x54\x58\x74\x58\x4d\x4c\
\x3a\x63\x6f\x6d\x2e\x61\x64\x6f\x62\x65\x2e\x78\x6d\x70\x00\x00\
\x00\x00\x00\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x62\x65\x67\
\x69\x6e\x3d\x22\xef\xbb\xbf\x22\x20\x69\x64\x3d\x22\x57\x35\x4d\
\x30\x4d\x70\x43\x65\x68\x69\x48\x7a\x72\x65\x53\x7a\x4e\x54\x63\
\x7a\x6b\x63\x39\x64\x22\x3f\x3e\x20\x3c\x78\x3a\x78\x6d\x70\x6d\
\x65\x74\x61\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x3d\x22\x61\x64\x6f\
\x62\x65\x3a\x6e\x73\x3a\x6d\x65\x74\x61\x2f\x22\x20\x78\x3a\x78\
\x6d\x70\x74\x6b\x3d\x22\x41\x64\x6f\x62\x65\x20\x58\x4d\x50\x20\
\x43\x6f\x72\x65\x20\x35\x2e\x35\x2d\x63\x30\x31\x34\x20\x37\x39\
\x2e\x31\x35\x31\x34\x38\x31\x2c\x20\x32\x30\x31\x33\x2f\x30\x33\
\x2f\x31\x33\x2d\x31\x32\x3a\x30\x39\x3a\x31\x35\x20\x20\x20\x20\
\x20\x20\x20\x20\x22\x3e\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x20\
\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\x39\x39\
\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\x2d\x73\x79\x6e\x74\
\x61\x78\x2d\x6e\x73\x23\x22\x3e\x20\x3c\x72\x64\x66\x3a\x44\x65\
\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x20\x72\x64\x66\x3a\x61\x62\
\x6f\x75\x74\x3d\x22\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\
\x4d\x4d\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\
\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\
\x6d\x6d\x2f\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x73\x74\x52\x65\x66\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\
\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x73\x54\
\x79\x70\x65\x2f\x52\x65\x73\x6f\x75\x72\x63\x65\x52\x65\x66\x23\
\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\
\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x22\x20\x78\x6d\x70\x4d\x4d\
\x3a\x4f\x72\x69\x67\x69\x6e\x61\x6c\x44\x6f\x63\x75\x6d\x65\x6e\
\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x63\x34\x37\
\x39\x66\x62\x31\x39\x2d\x34\x33\x34\x34\x2d\x62\x65\x34\x33\x2d\
\x39\x39\x31\x31\x2d\x33\x64\x35\x32\x63\x31\x30\x33\x35\x37\x65\
\x31\x22\x20\x78\x6d\x70\x4d\x4d\x3a\x44\x6f\x63\x75\x6d\x65\x6e\
\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x30\x31\x43\
\x33\x45\x33\x41\x37\x35\x39\x38\x35\x31\x31\x45\x33\x42\x46\x41\
\x33\x46\x46\x39\x46\x44\x38\x42\x31\x45\x45\x45\x30\x22\x20\x78\
\x6d\x70\x4d\x4d\x3a\x49\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\
\x22\x78\x6d\x70\x2e\x69\x69\x64\x3a\x30\x31\x43\x33\x45\x33\x41\
\x36\x35\x39\x38\x35\x31\x31\x45\x33\x42\x46\x41\x33\x46\x46\x39\
\x46\x44\x38\x42\x31\x45\x45\x45\x30\x22\x20\x78\x6d\x70\x3a\x43\
\x72\x65\x61\x74\x6f\x72\x54\x6f\x6f\x6c\x3d\x22\x41\x64\x6f\x62\
\x65\x20\x50\x68\x6f\x74\x6f\x73\x68\x6f\x70\x20\x43\x43\x20\x28\
\x57\x69\x6e\x64\x6f\x77\x73\x29\x22\x3e\x20\x3c\x78\x6d\x70\x4d\
\x4d\x3a\x44\x65\x72\x69\x76\x65\x64\x46\x72\x6f\x6d\x20\x73\x74\
\x52\x65\x66\x3a\x69\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\x22\
\x78\x6d\x70\x2e\x69\x69\x64\x3a\x63\x31\x65\x61\x38\x31\x32\x35\
\x2d\x65\x62\x64\x64\x2d\x63\x38\x34\x62\x2d\x39\x66\x37\x61\x2d\
\x30\x36\x39\x39\x32\x39\x38\x64\x62\x39\x33\x63\x22\x20\x73\x74\
\x52\x65\x66\x3a\x64\x6f\x63\x75\x6d\x65\x6e\x74\x49\x44\x3d\x22\
\x78\x6d\x70\x2e\x64\x69\x64\x3a\x63\x34\x37\x39\x66\x62\x31\x39\
\x2d\x34\x33\x34\x34\x2d\x62\x65\x34\x33\x2d\x39\x39\x31\x31\x2d\
\x33\x64\x35\x32\x63\x31\x30\x33\x35\x37\x65\x31\x22\x2f\x3e\x20\
\x3c\x2f\x72\x64\x66\x3a\x44\x65\x73\x63\x72\x69\x70\x74\x69\x6f\
\x6e\x3e\x20\x3c\x2f\x72\x64\x66\x3a\x52\x44\x46\x3e\x20\x3c\x2f\
\x78\x3a\x78\x6d\x70\x6d\x65\x74\x61\x3e\x20\x3c\x3f\x78\x70\x61\
\x63\x6b\x65\x74\x20\x65\x6e\x64\x3d\x22\x72\x22\x3f\x3e\x90\x2e\
\xe6\x71\x00\x00\x03\xad\x49\x44\x41\x54\x78\xda\xbc\x97\x69\x48\
\x55\x41\x14\xc7\xef\xd3\xd7\x26\x2d\x56\x28\x25\xb6\x88\x49\x54\
\x52\x7e\x50\xb3\xc5\x28\x69\x5f\xa0\x45\x2a\xa1\x32\xa3\x68\x7f\
\x24\x45\x44\x82\x96\x85\x96\x08\x81\x12\x51\xbd\xc8\x16\x22\xc2\
\xea\x43\x1b\xf5\x21\xeb\x4b\x4a\x09\x6a\x52\x10\xd8\x2e\x11\x11\
\xee\xb8\x96\xf5\x3f\xf0\xbf\x32\x5d\x6e\xef\xde\x6b\xcf\x0e\xfc\
\x98\xb9\x33\x6f\xce\x9c\x99\x39\x73\xe6\x3c\x97\xc7\xe3\x71\x69\
\x9a\xe6\x01\xdb\x41\x98\xd6\x3b\x29\x01\xc9\x05\x05\x05\x3f\x9d\
\x0e\x74\xc1\x80\x43\x28\x73\x41\x3b\x28\x02\x55\xac\xdb\x91\x20\
\x70\x9a\xf5\xcb\x60\x33\x8c\xf8\xe5\xc4\x00\x37\xd8\x0f\x5a\xc0\
\x1c\x0c\xae\x70\x32\x18\xc6\x07\xd3\x80\x8f\x60\x13\xa8\x03\xe9\
\x4e\x77\x40\x2c\x3e\x09\x72\xc0\x08\xd0\xc0\x1d\x18\x65\xf8\xed\
\x57\x30\x10\x04\xf3\xbb\x9d\xd4\x83\x75\x60\x23\x58\x0e\x32\xb1\
\x90\x63\x76\x0d\x08\x60\x59\x0e\xb6\x80\xf7\x60\x1f\x48\x60\x5d\
\x25\x81\x7d\xfa\xf7\x75\x45\x4f\x27\x8d\x10\x5f\xc8\xc6\xa2\x76\
\x3b\x35\xe0\x07\x28\x03\x47\xc1\x07\xae\x52\xea\x7b\xc1\x02\xd6\
\x83\xd9\xb7\x02\x6c\xa3\xbf\xf4\x08\x56\xdd\x8a\x62\x25\x78\x2e\
\x9f\x30\x22\xc5\x89\x01\x1a\x57\x98\x05\x2e\x82\xdb\xac\xcf\xe7\
\xee\x64\xb1\x4d\xfa\x6a\x41\xb4\x38\x9c\xc1\x97\xc4\x88\x26\x14\
\x8b\xc1\x6b\x70\x09\x46\x2c\x75\x62\xc0\x53\x3a\x50\x3a\x8f\x23\
\x02\xdc\xe7\xca\x23\x14\x44\x79\x36\x58\x0f\xf4\x6b\x17\xa3\xec\
\x84\xf8\x44\x12\xf8\x04\x8a\x61\xc4\x2c\x3b\x4e\xb8\x0a\x8c\x07\
\xa7\xd8\x5e\x05\x45\x31\xec\x4b\x43\xbd\xc8\xc7\x4d\x10\xcf\x0f\
\x04\x33\xf1\xbb\x57\x4a\x7b\x38\x8a\x52\x30\x14\x24\xa2\xef\xa5\
\x95\x01\x0f\xc0\x20\x93\xdf\xb4\x62\x70\xa7\x0f\x03\x32\xe9\x23\
\xcd\xc0\x0b\xaa\x81\x1e\x0b\x26\x80\x0c\xf0\x0d\xc4\x42\xcf\x67\
\xb3\x38\xa0\x8b\x38\xd7\x1e\x93\x39\x4e\x60\x92\xc1\x4a\x5f\x0d\
\x14\x6d\x55\xfa\x8f\x83\x2e\xb0\x83\xb7\xc4\x65\xa2\x23\x14\x44\
\x02\x9f\x06\x34\xd1\xcb\x8d\xd2\xcd\x09\xf4\xbe\x2f\x06\xef\xef\
\x66\x24\xcd\xf5\x11\xac\xea\xed\x38\x61\x08\x9d\x49\x38\x4c\x27\
\x92\x7a\x1e\xb7\x38\x12\x93\x89\xe7\xc7\x42\x69\x25\xf0\x6a\x7e\
\x10\x75\x07\xe4\x7a\x3d\xe6\x4e\x8c\xe4\xf7\x13\xa5\xbf\x8d\x65\
\x19\x23\x66\x8d\xbf\x0d\x98\x2c\xf7\x1e\xab\x9c\x8a\xd5\x49\x6c\
\x1f\xc6\xf6\x5d\x68\xbb\xa6\x6c\x79\xb6\xe6\x47\x51\x0d\x90\x95\
\x75\xe8\x8e\x07\x06\xb0\x5e\xa9\xf5\xa1\xb8\x95\x95\xc9\x4b\x58\
\xc1\xfa\x19\xed\x3f\x89\xdb\xc6\x93\x1b\xcb\x64\x25\xa4\x97\x73\
\xf4\xd3\xaf\x2b\x74\x7d\x67\x5d\x4a\x2f\x16\x5a\xe6\xb6\x98\x7c\
\x11\x8a\x3b\x8a\x92\x7f\x11\x63\x48\x4e\x83\xfe\xd5\x01\x16\x83\
\xf2\xfc\x34\xf9\xdf\x42\x40\xbe\x95\x01\x53\x58\x5e\x01\xaa\x5f\
\xa4\x82\x7b\x60\x1e\x58\xc6\x67\x58\xea\x0f\x59\x4a\x68\x5f\x08\
\xde\x2a\x63\x6e\xb2\x3d\x19\xbc\xd1\x43\xb5\x95\x01\x81\x2c\xab\
\x0d\x61\x54\x8e\x25\x8a\x09\x48\x28\x9f\xe0\x12\x25\x97\x94\xa7\
\xfb\x11\xc3\x6f\xcf\x89\x82\xab\x60\x09\xdf\x0c\x7b\x4e\x48\x69\
\xe6\x6b\xa9\x4b\x8b\x12\x27\x1a\x95\x47\xac\x93\x01\x4a\x9e\xf4\
\x70\xe6\x9b\xba\xac\x65\x0e\x21\x21\xfd\x82\x59\x28\xb6\x32\xa0\
\x3f\xeb\x75\xf0\xde\x2e\x25\x37\x6c\x53\xea\xad\x0c\x68\xf2\x28\
\x6d\x90\xa4\x84\x93\xde\xe2\xf1\xe4\xf3\xb8\xce\x3b\xdd\x81\x78\
\x70\x0e\x0c\x91\xc4\x05\xde\x9b\xa4\x04\xaa\x46\xc3\x83\x36\x1a\
\x8c\x53\x7c\x25\x95\xef\x8a\x24\xbb\x2f\x98\xf6\xc7\xdb\x35\x40\
\x5e\xb1\xe1\x3c\xbf\x19\xcc\x86\x52\x98\x27\xea\x92\xc3\x14\x4d\
\xa3\xe3\xcd\x36\xd1\x33\x96\x19\x57\x29\xf3\xcc\x44\xb6\x77\x58\
\x19\x70\x83\x41\x48\x24\x8e\x18\x65\xae\x52\x9f\xe8\x43\x57\x14\
\xf9\x43\x7f\x80\xc1\xdb\x8d\x72\x80\xe7\xd7\x17\x72\x57\x76\x43\
\xdf\x81\x68\xde\x53\xcd\x90\x6c\x88\xb7\xaf\xc1\x99\x87\xf1\xba\
\xf5\x46\xe4\x48\x0a\x99\xca\x97\xeb\xa1\x18\xba\x6b\xf5\x9c\xb0\
\x81\xd9\x6d\x1c\x1a\xdf\xf9\x73\x89\xd0\x1d\xc4\x1c\x63\x3a\x98\
\x66\x96\x98\xca\x0e\x9c\x05\x07\xe5\x0f\x05\x06\x14\x32\xe8\x74\
\xfb\x61\xfe\x31\x60\x27\x98\x04\x9e\x51\xaf\xe9\x6b\x98\xc1\x2c\
\x56\x92\xca\x23\x7e\x3e\x67\x89\x17\xc5\x4c\x6a\x4c\xff\x35\xff\
\x16\x60\x00\x9d\x8f\x25\xe2\xca\x2c\x04\xd4\x00\x00\x00\x00\x49\
\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x06\xed\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\
\x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\
\x79\x71\xc9\x65\x3c\x00\x00\x03\x71\x69\x54\x58\x74\x58\x4d\x4c\
\x3a\x63\x6f\x6d\x2e\x61\x64\x6f\x62\x65\x2e\x78\x6d\x70\x00\x00\
\x00\x00\x00\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x62\x65\x67\
\x69\x6e\x3d\x22\xef\xbb\xbf\x22\x20\x69\x64\x3d\x22\x57\x35\x4d\
\x30\x4d\x70\x43\x65\x68\x69\x48\x7a\x72\x65\x53\x7a\x4e\x54\x63\
\x7a\x6b\x63\x39\x64\x22\x3f\x3e\x20\x3c\x78\x3a\x78\x6d\x70\x6d\
\x65\x74\x61\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x3d\x22\x61\x64\x6f\
\x62\x65\x3a\x6e\x73\x3a\x6d\x65\x74\x61\x2f\x22\x20\x78\x3a\x78\
\x6d\x70\x74\x6b\x3d\x22\x41\x64\x6f\x62\x65\x20\x58\x4d\x50\x20\
\x43\x6f\x72\x65\x20\x35\x2e\x35\x2d\x63\x30\x31\x34\x20\x37\x39\
\x2e\x31\x35\x31\x34\x38\x31\x2c\x20\x32\x30\x31\x33\x2f\x30\x33\
\x2f\x31\x33\x2d\x31\x32\x3a\x30\x39\x3a\x31\x35\x20\x20\x20\x20\
\x20\x20\x20\x20\x22\x3e\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x20\
\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\x39\x39\
\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\x2d\x73\x79\x6e\x74\
\x61\x78\x2d\x6e\x73\x23\x22\x3e\x20\x3c\x72\x64\x66\x3a\x44\x65\
\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x20\x72\x64\x66\x3a\x61\x62\
\x6f\x75\x74\x3d\x22\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\
\x4d\x4d\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\
\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\
\x6d\x6d\x2f\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x73\x74\x52\x65\x66\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\
\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x73\x54\
\x79\x70\x65\x2f\x52\x65\x73\x6f\x75\x72\x63\x65\x52\x65\x66\x23\
\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\
\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x22\x20\x78\x6d\x70\x4d\x4d\
\x3a\x4f\x72\x69\x67\x69\x6e\x61\x6c\x44\x6f\x63\x75\x6d\x65\x6e\
\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x63\x34\x37\
\x39\x66\x62\x31\x39\x2d\x34\x33\x34\x34\x2d\x62\x65\x34\x33\x2d\
\x39\x39\x31\x31\x2d\x33\x64\x35\x32\x63\x31\x30\x33\x35\x37\x65\
\x31\x22\x20\x78\x6d\x70\x4d\x4d\x3a\x44\x6f\x63\x75\x6d\x65\x6e\
\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x36\x31\x41\
\x31\x30\x32\x43\x34\x35\x39\x38\x35\x31\x31\x45\x33\x42\x46\x35\
\x45\x43\x42\x45\x34\x45\x41\x32\x36\x32\x30\x45\x41\x22\x20\x78\
\x6d\x70\x4d\x4d\x3a\x49\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\
\x22\x78\x6d\x70\x2e\x69\x69\x64\x3a\x36\x31\x41\x31\x30\x32\x43\
\x33\x35\x39\x38\x35\x31\x31\x45\x33\x42\x46\x35\x45\x43\x42\x45\
\x34\x45\x41\x32\x36\x32\x30\x45\x41\x22\x20\x78\x6d\x70\x3a\x43\
\x72\x65\x61\x74\x6f\x72\x54\x6f\x6f\x6c\x3d\x22\x41\x64\x6f\x62\
\x65\x20\x50\x68\x6f\x74\x6f\x73\x68\x6f\x70\x20\x43\x43\x20\x28\
\x57\x69\x6e\x64\x6f\x77\x73\x29\x22\x3e\x20\x3c\x78\x6d\x70\x4d\
\x4d\x3a\x44\x65\x72\x69\x76\x65\x64\x46\x72\x6f\x6d\x20\x73\x74\
\x52\x65\x66\x3a\x69\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\x22\
\x78\x6d\x70\x2e\x69\x69\x64\x3a\x63\x31\x65\x61\x38\x31\x32\x35\
\x2d\x65\x62\x64\x64\x2d\x63\x38\x34\x62\x2d\x39\x66\x37\x61\x2d\
\x30\x36\x39\x39\x32\x39\x38\x64\x62\x39\x33\x63\x22\x20\x73\x74\
\x52\x65\x66\x3a\x64\x6f\x63\x75\x6d\x65\x6e\x74\x49\x44\x3d\x22\
\x78\x6d\x70\x2e\x64\x69\x64\x3a\x63\x34\x37\x39\x66\x62\x31\x39\
\x2d\x34\x33\x34\x34\x2d\x62\x65\x34\x33\x2d\x39\x39\x31\x31\x2d\
\x33\x64\x35\x32\x63\x31\x30\x33\x35\x37\x65\x31\x22\x2f\x3e\x20\
\x3c\x2f\x72\x64\x66\x3a\x44\x65\x73\x63\x72\x69\x70\x74\x69\x6f\
\x6e\x3e\x20\x3c\x2f\x72\x64\x66\x3a\x52\x44\x46\x3e\x20\x3c\x2f\
\x78\x3a\x78\x6d\x70\x6d\x65\x74\x61\x3e\x20\x3c\x3f\x78\x70\x61\
\x63\x6b\x65\x74\x20\x65\x6e\x64\x3d\x22\x72\x22\x3f\x3e\xf0\xfb\
\xe3\xbf\x00\x00\x03\x12\x49\x44\x41\x54\x78\xda\xc4\x97\x59\x68\
\x14\x41\x10\x86\x67\x36\x6b\x3c\x58\x3c\x50\x30\xa8\xa8\xa0\x46\
\xe3\x85\x20\xc6\x20\x51\x04\x2f\x54\x44\xd4\x27\xf3\x10\x04\x11\
\x3c\xe2\x2a\xe2\x8b\x04\x3c\x02\x1e\xa0\x08\xae\x27\x88\x22\x06\
\x7c\x92\x48\x10\x73\x78\x60\x50\x09\x1e\x08\x12\x95\xa8\x41\xf0\
\x26\x3e\x88\x9a\x88\x17\xd9\xe8\x57\x50\x23\xcd\xb2\x99\xe9\xdd\
\x6c\xd6\x82\x2f\x95\xdd\xe9\xa9\xfa\xa7\xa7\xbb\xba\xd6\x75\x2c\
\x2c\x1a\x8d\xe6\xe0\xe6\xc0\x12\x98\x09\xf9\x30\x54\x2f\xff\x82\
\x37\xf0\x08\x6e\x40\x75\x2c\x16\x6b\x75\x2c\xcd\x0d\x48\x1c\xc1\
\x95\xc1\x26\x18\x61\x19\xb3\x03\xae\xc0\x01\x84\xdc\x4d\x5b\x00\
\xc9\x57\xe2\x8e\xc2\x30\x27\x7d\xbb\x04\xdb\x10\xf2\xca\x88\x3b\
\x1c\x57\xc1\x77\x6b\x93\x0a\x60\x40\x2f\x5c\x0c\xd6\x3b\x99\xb1\
\x76\x99\x41\x12\x56\x12\x7b\x22\xff\xd7\xc1\x10\x88\xf0\x5d\xa7\
\x9b\x90\x3c\x17\x77\x11\x96\x39\x99\xb7\xf3\x1a\x77\x90\x7e\xce\
\x47\x40\x4b\xc8\x48\x2e\x62\x2e\xf4\x50\x72\xb1\x52\x23\xb9\xd8\
\x64\xf9\x13\x36\xbe\xd8\x01\xab\x2c\x02\xb5\x41\x03\x3c\x87\x2f\
\x30\x18\x8a\xa1\x30\x45\x41\x53\x64\x8d\x84\xf5\xe9\x45\x4d\x45\
\xc0\x0d\xb2\x90\xf6\xc8\x2c\x31\x75\xbf\x93\xac\x9d\x31\xb8\xcd\
\xb0\x01\x72\x2d\x05\xfc\x9b\x81\x43\x90\x13\xf0\xfe\x64\x21\x7d\
\xeb\x6a\x00\xd7\x5e\xe2\xb6\x22\x44\x16\xf0\x39\x98\x6d\x23\x20\
\xc4\x0d\xd3\xf1\x8b\x7c\x06\x9e\x80\x35\x7e\xc9\x13\xec\x35\xb4\
\x58\x8c\x1b\x4b\xee\x3e\x32\x03\xeb\x7c\x06\xdd\x86\x2d\x24\xff\
\x63\x59\x31\xfb\xe9\x2e\x5a\x6c\x31\x5c\x66\xbc\x20\xac\xe5\x35\
\x99\x49\xd2\x32\x92\x77\x58\x26\xef\x8b\xbb\x06\xb3\x52\x59\x88\
\xb2\x0d\x27\xc0\x02\xd8\x0b\x8d\x10\xd7\x8b\x57\x49\xde\x64\x1b\
\x89\xb1\x3f\xf4\x61\x96\x6b\x05\x6d\xb6\x11\xe0\x76\x51\xff\x65\
\x5b\x7d\x22\xe8\x83\xee\x6c\x7c\x2d\xbb\xf3\x0d\xf2\x12\x86\xd4\
\xbb\x4e\x16\x0d\x41\x93\x0c\x31\x73\xe1\x6b\x56\x05\x24\x88\x91\
\xf5\x57\x24\xff\x14\xeb\x87\xff\x62\x2e\xc9\xdf\xe1\x07\x68\x79\
\xbd\x2e\xf0\xee\x9f\x66\x53\x40\x5d\x92\x42\xd4\xea\x89\x51\x41\
\xef\xbb\x39\xdd\x33\xf4\xcc\xb8\x93\x58\xd0\x44\xc0\x41\xfc\xf6\
\x80\x18\xcd\x86\xa0\x06\x82\xb4\xa5\x28\xc0\x7b\x48\xd9\xe2\xf7\
\xe0\xa6\xce\x78\xa3\xd4\x81\xc7\x16\x31\x0a\xf4\xa0\xa9\x86\x5a\
\x2d\x3a\xb6\xc9\xa7\xe2\x16\x1a\xd5\x4f\x0a\x55\xb9\x16\xad\x67\
\xb6\x02\x3c\xab\x95\xa2\xa5\x45\xc7\x76\xa5\x1f\xf3\x69\xfd\x6a\
\x42\x3a\xbd\x9d\x16\xf1\xce\x4a\xb3\x42\xf2\xef\x96\xc9\x25\xe9\
\x91\x80\x53\xf1\x74\x88\x80\x3f\x2d\x4f\xaf\x71\x30\xca\x32\x79\
\x44\x8f\xe4\x8d\x3e\xc3\xea\xc9\xfd\xd0\xdb\xff\xf2\x1a\xc6\x07\
\xc4\x95\x27\x69\x26\xf8\x49\xa9\xf5\x7a\xfe\x3b\x49\x7a\xca\x12\
\xd8\x05\xa3\x7d\x62\xc5\xbd\x85\xef\xea\x8d\x72\xc3\xee\x14\x77\
\xd7\x7d\xd9\x56\x72\x66\xc0\x40\x7d\x00\x29\xaf\xfd\x2d\xee\x2d\
\xe7\x01\xf6\x99\x1d\x51\x53\x1a\xdb\xbb\x30\x8d\x3e\x50\xac\x0a\
\xf6\x7b\x1f\xbc\xae\xf8\x89\x31\xe0\x33\x54\xf6\x50\xe1\xbb\x0c\
\xab\xcd\x06\xc7\x13\x20\xef\x53\xb6\xd6\x5b\x39\x8a\x19\x50\xaa\
\x6d\x74\x7b\x06\x93\x9f\x92\xae\x3b\xb1\xa1\x75\x8d\x05\x74\x06\
\xb7\xd3\x2c\xbb\x7c\x27\x0b\xe9\x30\xac\xe8\x46\xe2\x0f\x52\xc4\
\x88\x5b\x95\xf2\x8f\x53\x43\x48\x91\xfe\x6e\x58\x1a\xd0\x3d\x9b\
\x26\x87\xdc\x71\x29\x44\x7e\x0d\x6d\x4a\xfd\x00\x42\xf2\xb4\xe5\
\x9a\x07\xd3\x60\x24\xf4\xd6\xcb\x1f\xe1\x85\xd6\xfa\x1a\xb8\x45\
\xe2\x78\x50\xcc\xbf\x02\x0c\x00\xb3\x4a\xfb\x1f\xcd\xe1\x9d\xc3\
\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x07\x89\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\
\x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\
\x79\x71\xc9\x65\x3c\x00\x00\x03\x71\x69\x54\x58\x74\x58\x4d\x4c\
\x3a\x63\x6f\x6d\x2e\x61\x64\x6f\x62\x65\x2e\x78\x6d\x70\x00\x00\
\x00\x00\x00\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x62\x65\x67\
\x69\x6e\x3d\x22\xef\xbb\xbf\x22\x20\x69\x64\x3d\x22\x57\x35\x4d\
\x30\x4d\x70\x43\x65\x68\x69\x48\x7a\x72\x65\x53\x7a\x4e\x54\x63\
\x7a\x6b\x63\x39\x64\x22\x3f\x3e\x20\x3c\x78\x3a\x78\x6d\x70\x6d\
\x65\x74\x61\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x3d\x22\x61\x64\x6f\
\x62\x65\x3a\x6e\x73\x3a\x6d\x65\x74\x61\x2f\x22\x20\x78\x3a\x78\
\x6d\x70\x74\x6b\x3d\x22\x41\x64\x6f\x62\x65\x20\x58\x4d\x50\x20\
\x43\x6f\x72\x65\x20\x35\x2e\x35\x2d\x63\x30\x31\x34\x20\x37\x39\
\x2e\x31\x35\x31\x34\x38\x31\x2c\x20\x32\x30\x31\x33\x2f\x30\x33\
\x2f\x31\x33\x2d\x31\x32\x3a\x30\x39\x3a\x31\x35\x20\x20\x20\x20\
\x20\x20\x20\x20\x22\x3e\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x20\
\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\x39\x39\
\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\x2d\x73\x79\x6e\x74\
\x61\x78\x2d\x6e\x73\x23\x22\x3e\x20\x3c\x72\x64\x66\x3a\x44\x65\
\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x20\x72\x64\x66\x3a\x61\x62\
\x6f\x75\x74\x3d\x22\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\
\x4d\x4d\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\
\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\
\x6d\x6d\x2f\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x73\x74\x52\x65\x66\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\
\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x73\x54\
\x79\x70\x65\x2f\x52\x65\x73\x6f\x75\x72\x63\x65\x52\x65\x66\x23\
\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\
\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x22\x20\x78\x6d\x70\x4d\x4d\
\x3a\x4f\x72\x69\x67\x69\x6e\x61\x6c\x44\x6f\x63\x75\x6d\x65\x6e\
\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x63\x34\x37\
\x39\x66\x62\x31\x39\x2d\x34\x33\x34\x34\x2d\x62\x65\x34\x33\x2d\
\x39\x39\x31\x31\x2d\x33\x64\x35\x32\x63\x31\x30\x33\x35\x37\x65\
\x31\x22\x20\x78\x6d\x70\x4d\x4d\x3a\x44\x6f\x63\x75\x6d\x65\x6e\
\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x44\x39\x41\
\x43\x30\x30\x39\x35\x35\x39\x38\x44\x31\x31\x45\x33\x41\x41\x46\
\x43\x43\x39\x35\x30\x31\x33\x33\x31\x41\x30\x30\x39\x22\x20\x78\
\x6d\x70\x4d\x4d\x3a\x49\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\
\x22\x78\x6d\x70\x2e\x69\x69\x64\x3a\x44\x39\x41\x43\x30\x30\x39\
\x34\x35\x39\x38\x44\x31\x31\x45\x33\x41\x41\x46\x43\x43\x39\x35\
\x30\x31\x33\x33\x31\x41\x30\x30\x39\x22\x20\x78\x6d\x70\x3a\x43\
\x72\x65\x61\x74\x6f\x72\x54\x6f\x6f\x6c\x3d\x22\x41\x64\x6f\x62\
\x65\x20\x50\x68\x6f\x74\x6f\x73\x68\x6f\x70\x20\x43\x43\x20\x28\
\x57\x69\x6e\x64\x6f\x77\x73\x29\x22\x3e\x20\x3c\x78\x6d\x70\x4d\
\x4d\x3a\x44\x65\x72\x69\x76\x65\x64\x46\x72\x6f\x6d\x20\x73\x74\
\x52\x65\x66\x3a\x69\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\x22\
\x78\x6d\x70\x2e\x69\x69\x64\x3a\x36\x63\x33\x30\x35\x65\x36\x65\
\x2d\x66\x31\x33\x66\x2d\x35\x65\x34\x38\x2d\x62\x37\x32\x65\x2d\
\x33\x31\x38\x39\x62\x37\x63\x66\x34\x66\x34\x38\x22\x20\x73\x74\
\x52\x65\x66\x3a\x64\x6f\x63\x75\x6d\x65\x6e\x74\x49\x44\x3d\x22\
\x78\x6d\x70\x2e\x64\x69\x64\x3a\x63\x34\x37\x39\x66\x62\x31\x39\
\x2d\x34\x33\x34\x34\x2d\x62\x65\x34\x33\x2d\x39\x39\x31\x31\x2d\
\x33\x64\x35\x32\x63\x31\x30\x33\x35\x37\x65\x31\x22\x2f\x3e\x20\
\x3c\x2f\x72\x64\x66\x3a\x44\x65\x73\x63\x72\x69\x70\x74\x69\x6f\
\x6e\x3e\x20\x3c\x2f\x72\x64\x66\x3a\x52\x44\x46\x3e\x20\x3c\x2f\
\x78\x3a\x78\x6d\x70\x6d\x65\x74\x61\x3e\x20\x3c\x3f\x78\x70\x61\
\x63\x6b\x65\x74\x20\x65\x6e\x64\x3d\x22\x72\x22\x3f\x3e\x3e\x1d\
\x49\x08\x00\x00\x03\xae\x49\x44\x41\x54\x78\xda\xbc\x97\x69\x48\
\x54\x51\x14\xc7\xdf\x8c\x93\x95\x59\x4d\x8b\x11\x52\xe0\x46\xb4\
\x59\x7e\x50\xb2\x15\x22\xcb\x16\x82\xf6\x0d\x5a\x8c\x16\x93\x8a\
\xa4\x85\xc8\xd0\xb2\xc8\x32\xa1\x0f\x12\x51\x08\x69\x1b\x11\x56\
\x1f\x8c\x82\xc8\x16\xa8\x94\x92\xd4\xac\x88\xb0\xcd\x24\x22\x4a\
\xc5\x6c\x52\x4b\xeb\x7f\xe0\xff\xe4\xf6\x78\xce\xcc\x9b\xc6\x0e\
\xfc\xb8\xf7\xdd\xfb\xde\xbd\xff\x7b\xe7\xdc\x73\xcf\xd8\x76\x9e\
\x2d\xb1\x69\x9a\xb6\x15\x6c\x04\xa1\x9a\x6f\x76\x07\x2c\x3a\xba\
\x72\x7c\x9b\xd5\x0f\x6d\x10\xb0\x1b\x65\x16\x68\x06\xf9\xa0\x92\
\x75\x6f\x2c\x08\x1c\x67\xbd\x00\x24\x41\xc4\x6f\x2b\x02\x1c\x60\
\x3b\x68\x02\x53\xf0\x71\xb9\x95\x8f\x21\xde\x49\x01\xef\xc1\x6a\
\x50\x0f\x52\xad\xee\x80\x28\x3e\x02\x0e\x81\xfe\xa0\x81\x3b\x30\
\xd8\xf0\xee\x27\xd0\x03\x38\xf9\xdc\x4c\x64\xd2\xa5\x60\x15\x98\
\x03\xd2\xb1\x90\x03\xde\x0a\xb0\xb3\x2c\x03\x6b\xc1\x5b\xb0\x0d\
\xc4\xb3\xae\x12\xcf\x3e\xfd\xf9\xa2\x32\x4e\x2b\x58\x42\x5f\xc8\
\xc4\xa2\x52\xac\x0a\xf8\x05\x4a\xc1\x7e\xf0\x8e\xab\x94\xfa\x16\
\x30\x9d\x75\x27\xfb\xe6\x82\xf5\xf4\x97\x0e\xc3\xaa\x5d\x28\xe6\
\x81\x47\x20\x17\x22\x96\x5b\x11\xa0\x71\x85\x19\xe0\x34\xb8\xca\
\x7a\x02\x77\x27\x83\x6d\xd2\x57\x0b\x46\x83\x35\x06\x5f\x12\x11\
\x8d\x28\x66\x82\x17\xe2\x94\x10\x31\xdb\x8a\x80\x7b\x74\xa0\x54\
\xfe\x1c\xe1\xe0\x3a\x57\x1e\xae\x20\x83\x67\x82\x65\x40\x3f\x76\
\x31\xca\x4e\x88\x4f\x4c\x03\x35\xa0\x10\x22\x26\x7a\xe3\x84\xf3\
\x41\x18\x38\xc6\xf6\x4a\x0c\x14\xc3\x3e\x39\x5a\xf9\x6e\x4e\x42\
\x1d\x8a\x00\x30\x01\xef\x3d\x57\xda\x87\xa0\x28\x01\x7d\xc0\x64\
\xf4\x3d\xf5\x24\xe0\x06\xe8\x69\xf2\x8e\x0b\x1f\xb7\xba\x11\x90\
\x4e\x1f\xf9\x06\xf2\x40\x15\xd0\x63\x41\x14\x48\x03\x9f\x41\x2c\
\xc6\xf9\x60\x16\x07\x74\x13\xe7\xda\x6c\x32\xc7\x61\x4c\x12\xac\
\xf4\x55\x63\xa0\x75\x4a\xff\x41\xf0\x13\x24\xf3\x94\xd8\x4c\xc6\
\x18\x04\x22\x81\x5b\x01\x8d\xf4\x72\xa3\xb5\x73\x02\xbd\xef\xa3\
\xc1\xfb\xdb\x19\x49\xb3\xdc\x04\xab\x7a\x6f\x9c\x30\x84\xce\x24\
\xec\xa1\x13\x49\x3d\x9b\x5b\x1c\x89\xc9\xc4\xf3\x63\x31\x68\x05\
\xc8\xd3\xfc\x60\xea\x0e\xc8\xf1\xba\xcd\x9d\x18\xc0\xe7\xbb\x4a\
\xff\x0f\x96\xa5\x8c\x98\xd5\xfe\x16\x30\x52\xce\x3d\x56\x39\x06\
\xab\x93\xd8\xde\x97\xed\x29\x68\xbb\xa0\x6c\x79\xa6\xe6\x47\x53\
\x05\xc8\xca\x5a\x74\xc7\x03\xdd\x59\xaf\xd0\xba\xd0\x1c\xca\xca\
\xe4\x26\x2c\x67\xfd\x84\xf6\x9f\xcc\xe1\xc5\x95\x1b\xcb\x64\x25\
\xc4\xc7\x39\xba\xe9\xc7\x15\x63\x7d\x61\x5d\xca\x3c\x2c\xb4\xd4\
\xe1\x61\xf2\x44\x14\x45\xca\x20\xff\x62\xc6\x90\x9c\x84\xf1\x17\
\xd8\x3d\x7c\x94\xed\xa7\xc9\x3b\x0b\x01\x39\x9e\x04\x8c\x32\x69\
\x2b\x62\xe2\xf1\x8c\xcf\xaf\xf9\x5c\xcc\xe7\x27\x8c\x8e\x4d\xcc\
\x92\x12\x18\xa2\x2b\x79\xc9\xa9\x16\xe5\x49\x40\x80\xe1\x59\x62\
\xbc\xc4\xfe\x68\x70\x8e\x6d\x75\x14\x93\xcc\x53\x24\xe1\xf6\x15\
\xef\x96\x97\xe0\x16\x85\x48\xa6\xf5\xc0\x5d\x24\xf4\xc6\x24\x0b\
\x1a\xc8\xeb\xf6\x3c\x05\x49\x22\x12\x07\xa6\x32\x1c\xbb\x78\x03\
\xca\xdd\x12\x08\xf6\xf1\x67\x74\x31\x89\xf5\x59\x40\x1b\x13\x13\
\x39\x15\x0f\x79\x73\x16\x33\x72\xf6\xa6\xbf\x14\x70\x37\x82\x99\
\x3f\xde\x64\x7c\xc9\x01\xdf\x41\x2f\xcb\xc7\x50\xb1\x53\x60\xb8\
\x72\xe9\xc8\xa4\x67\x40\x22\x57\xea\xa4\x88\x15\x60\x2f\x93\x99\
\x06\xf6\x7d\x65\xf8\x0e\xb4\x2a\x40\x6e\xb1\x7e\xac\x47\xd3\xd9\
\x3a\x8e\x11\x78\x0c\x86\x81\xb1\x6c\x5b\xcc\xd5\x47\x30\x92\xde\
\x07\x1b\x98\xa6\xd5\x30\x95\x53\xad\xc5\x93\x80\x4b\x0c\x42\x62\
\x93\x0c\x7d\x22\x6c\x86\xc9\x37\xb3\x94\x7a\x98\x52\x8f\x20\x7f\
\x8d\x6f\xef\xc4\xdb\x75\xdb\x01\xae\x74\x51\x1c\xb8\x26\x59\xb7\
\xbe\x03\xb2\x35\x97\x8d\x6f\x20\x54\xca\x59\x5e\x88\x88\x15\xca\
\xac\xc6\x17\x93\x9d\xcb\x65\x2a\x5f\xa6\x87\x62\x8c\x5d\xab\xe7\
\x84\x0d\xf4\xf0\x38\x34\xbe\xf1\xe7\x12\x31\x76\x10\x73\x8c\x71\
\xe2\x27\x66\x89\xa9\xec\xc0\x49\xb0\x4b\xfe\x50\xe0\x83\x5c\x26\
\x95\xed\x7e\x98\x7f\x28\xd8\x04\x46\xf0\xd8\x56\x75\x76\x1b\xa6\
\x31\xa0\x24\x33\x68\xf8\xd3\x24\x97\x2c\x64\x52\x63\xfa\xaf\xf9\
\x8f\x00\x03\x00\xa7\xa0\x22\x71\x6e\x63\x94\xd3\x00\x00\x00\x00\
\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x06\xed\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\
\x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\
\x79\x71\xc9\x65\x3c\x00\x00\x03\x71\x69\x54\x58\x74\x58\x4d\x4c\
\x3a\x63\x6f\x6d\x2e\x61\x64\x6f\x62\x65\x2e\x78\x6d\x70\x00\x00\
\x00\x00\x00\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x62\x65\x67\
\x69\x6e\x3d\x22\xef\xbb\xbf\x22\x20\x69\x64\x3d\x22\x57\x35\x4d\
\x30\x4d\x70\x43\x65\x68\x69\x48\x7a\x72\x65\x53\x7a\x4e\x54\x63\
\x7a\x6b\x63\x39\x64\x22\x3f\x3e\x20\x3c\x78\x3a\x78\x6d\x70\x6d\
\x65\x74\x61\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x3d\x22\x61\x64\x6f\
\x62\x65\x3a\x6e\x73\x3a\x6d\x65\x74\x61\x2f\x22\x20\x78\x3a\x78\
\x6d\x70\x74\x6b\x3d\x22\x41\x64\x6f\x62\x65\x20\x58\x4d\x50\x20\
\x43\x6f\x72\x65\x20\x35\x2e\x35\x2d\x63\x30\x31\x34\x20\x37\x39\
\x2e\x31\x35\x31\x34\x38\x31\x2c\x20\x32\x30\x31\x33\x2f\x30\x33\
\x2f\x31\x33\x2d\x31\x32\x3a\x30\x39\x3a\x31\x35\x20\x20\x20\x20\
\x20\x20\x20\x20\x22\x3e\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x20\
\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\x39\x39\
\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\x2d\x73\x79\x6e\x74\
\x61\x78\x2d\x6e\x73\x23\x22\x3e\x20\x3c\x72\x64\x66\x3a\x44\x65\
\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x20\x72\x64\x66\x3a\x61\x62\
\x6f\x75\x74\x3d\x22\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\
\x4d\x4d\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\
\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\
\x6d\x6d\x2f\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x73\x74\x52\x65\x66\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\
\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x73\x54\
\x79\x70\x65\x2f\x52\x65\x73\x6f\x75\x72\x63\x65\x52\x65\x66\x23\
\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\
\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x22\x20\x78\x6d\x70\x4d\x4d\
\x3a\x4f\x72\x69\x67\x69\x6e\x61\x6c\x44\x6f\x63\x75\x6d\x65\x6e\
\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x63\x34\x37\
\x39\x66\x62\x31\x39\x2d\x34\x33\x34\x34\x2d\x62\x65\x34\x33\x2d\
\x39\x39\x31\x31\x2d\x33\x64\x35\x32\x63\x31\x30\x33\x35\x37\x65\
\x31\x22\x20\x78\x6d\x70\x4d\x4d\x3a\x44\x6f\x63\x75\x6d\x65\x6e\
\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x35\x34\x30\
\x45\x44\x39\x43\x31\x35\x39\x38\x35\x31\x31\x45\x33\x39\x39\x43\
\x37\x38\x31\x46\x35\x33\x32\x45\x32\x43\x37\x44\x43\x22\x20\x78\
\x6d\x70\x4d\x4d\x3a\x49\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\
\x22\x78\x6d\x70\x2e\x69\x69\x64\x3a\x35\x34\x30\x45\x44\x39\x43\
\x30\x35\x39\x38\x35\x31\x31\x45\x33\x39\x39\x43\x37\x38\x31\x46\
\x35\x33\x32\x45\x32\x43\x37\x44\x43\x22\x20\x78\x6d\x70\x3a\x43\
\x72\x65\x61\x74\x6f\x72\x54\x6f\x6f\x6c\x3d\x22\x41\x64\x6f\x62\
\x65\x20\x50\x68\x6f\x74\x6f\x73\x68\x6f\x70\x20\x43\x43\x20\x28\
\x57\x69\x6e\x64\x6f\x77\x73\x29\x22\x3e\x20\x3c\x78\x6d\x70\x4d\
\x4d\x3a\x44\x65\x72\x69\x76\x65\x64\x46\x72\x6f\x6d\x20\x73\x74\
\x52\x65\x66\x3a\x69\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\x22\
\x78\x6d\x70\x2e\x69\x69\x64\x3a\x63\x31\x65\x61\x38\x31\x32\x35\
\x2d\x65\x62\x64\x64\x2d\x63\x38\x34\x62\x2d\x39\x66\x37\x61\x2d\
\x30\x36\x39\x39\x32\x39\x38\x64\x62\x39\x33\x63\x22\x20\x73\x74\
\x52\x65\x66\x3a\x64\x6f\x63\x75\x6d\x65\x6e\x74\x49\x44\x3d\x22\
\x78\x6d\x70\x2e\x64\x69\x64\x3a\x63\x34\x37\x39\x66\x62\x31\x39\
\x2d\x34\x33\x34\x34\x2d\x62\x65\x34\x33\x2d\x39\x39\x31\x31\x2d\
\x33\x64\x35\x32\x63\x31\x30\x33\x35\x37\x65\x31\x22\x2f\x3e\x20\
\x3c\x2f\x72\x64\x66\x3a\x44\x65\x73\x63\x72\x69\x70\x74\x69\x6f\
\x6e\x3e\x20\x3c\x2f\x72\x64\x66\x3a\x52\x44\x46\x3e\x20\x3c\x2f\
\x78\x3a\x78\x6d\x70\x6d\x65\x74\x61\x3e\x20\x3c\x3f\x78\x70\x61\
\x63\x6b\x65\x74\x20\x65\x6e\x64\x3d\x22\x72\x22\x3f\x3e\xce\x34\
\xc5\x6a\x00\x00\x03\x12\x49\x44\x41\x54\x78\xda\xc4\x97\x7b\x68\
\x8d\x71\x18\xc7\xdf\xf7\xec\x98\x4b\x27\x97\x28\x0b\xa1\x30\xe6\
\x96\x92\x59\x1a\xa9\xb9\x84\x24\xfc\x65\x7f\x2c\x91\x62\x26\x92\
\x7f\xb4\x72\x59\xb9\x14\x29\xb7\x51\xb2\x64\xe5\x2f\x4d\x4b\x76\
\x71\xc9\x42\xcb\x25\xa5\xa1\x83\xa5\xdc\x9b\x3f\x84\x4d\x6e\xed\
\x8c\xcf\x53\xcf\xab\x5f\xa7\xb3\xf7\xfd\x9d\xcb\xe6\xa9\xcf\x9e\
\x9d\xf3\xfe\xde\xe7\xf9\xbe\xbf\xf7\xf7\x7b\x7e\xcf\x71\x1d\x0b\
\xbb\x51\xb9\x2e\x0b\x37\x0f\x96\xc2\x6c\xc8\x85\xe1\x7a\xf9\x17\
\xbc\x81\x47\x32\x14\x6a\x8b\x4a\xab\xda\x1c\x4b\x73\x03\x12\x47\
\x70\x65\xb0\x19\x46\x59\xc6\xec\x84\x2b\x70\x10\x21\x77\x53\x16\
\x40\xf2\x55\xb8\xe3\x30\xc2\x49\xdd\x2e\xc1\x76\x84\xbc\x32\xe2\
\x8e\xc4\x55\xf0\xdd\xfa\x84\x02\x18\xd0\x07\x77\x0c\x36\x3a\x99\
\xb1\x0e\x99\x41\x12\x56\x13\x7b\x32\xff\x37\xc0\x30\x88\xf0\x5d\
\x97\x1b\x97\x3c\x1b\x77\x11\x96\x3b\x99\xb7\xf3\x1a\x77\x88\x7e\
\xce\x45\x40\x6b\xc8\x48\x2e\x62\x2e\xf4\x50\x72\xb1\x12\x23\xb9\
\xd8\x54\xf9\x13\x36\xbe\xd8\x09\xab\x2d\x02\xb5\x43\x13\x3c\x87\
\x2f\x30\x14\x0a\x21\x3f\x49\x41\xd3\x64\x8d\x84\xf5\xe9\x45\x4d\
\x45\xc0\x0d\xb2\x90\xf6\xca\x2c\x31\x75\xbf\x13\xac\x9d\x71\xb8\
\x2d\xb0\x09\xb2\x2d\x05\xfc\x9b\x81\xc3\x90\x15\xf0\xfe\x64\x21\
\x7d\xeb\x6e\x00\xd7\x5e\xe2\xb6\x21\x44\x16\xf0\x39\x98\x6b\x23\
\x20\xc4\x0d\x33\xf1\x8b\x7d\x06\x56\xc2\x5a\xbf\xe4\x71\xf6\x1a\
\x5a\x2d\xc6\x8d\x27\x77\x3f\x99\x81\x0d\x3e\x83\x6e\xc3\x56\x92\
\xff\xb1\xac\x98\x03\x74\x17\x2d\xb1\x18\x2e\x33\x9e\x17\xd6\xf2\
\x9a\xc8\x24\x69\x19\xc9\x3b\x2d\x93\xf7\xc7\x5d\x83\x39\xc9\x2c\
\x44\xd9\x86\x93\x60\x21\xec\x83\x66\x88\xe9\xc5\xab\x24\x6f\xb1\
\x8d\xc4\xd8\x1f\xfa\x30\x2b\xb4\x82\x46\x6d\x04\xb8\xdd\xd4\x7f\
\xd9\x56\x9f\x08\xfa\x20\x9d\x8d\xaf\x65\x77\x81\x41\x4e\xdc\x90\
\x46\xd7\xe9\x45\x43\xd0\x14\x43\xcc\x7c\xf8\xda\xab\x02\xe2\xc4\
\xc8\xfa\x2b\x90\x7f\x0a\xf5\xc3\x7f\x31\x97\xe4\xef\xf0\x83\xb4\
\xbc\x5e\x17\x78\xf7\x4f\x7b\x53\x40\x43\x82\x42\xd4\xe6\x89\x51\
\x41\xef\xd3\x9c\xee\x59\x7a\x66\xdc\x89\x2f\x68\x22\xe0\x10\x7e\
\x47\x40\x8c\xa8\x21\xa8\x89\x20\xed\x49\x0a\xf0\x1e\x52\xb6\xf8\
\x3d\xb8\xa9\x33\xde\x2c\x75\xe0\xb1\x45\x8c\x3c\x3d\x68\x6a\xa1\
\x5e\x8b\x8e\x6d\xf2\xe9\xb8\x45\x46\xf5\x93\x42\x55\xae\x45\xeb\
\x99\xad\x00\xcf\xea\xa5\x68\x69\xd1\xb1\x5d\xe9\x27\x7c\x5a\xbf\
\xba\x90\x4e\x6f\x97\x45\xbc\x2a\x69\x56\x48\xfe\xdd\x32\xb9\x24\
\x3d\x1a\x70\x2a\x9e\x09\x11\xf0\xa7\xe5\xe9\x35\x01\xc6\x58\x26\
\x8f\xe8\x91\x5c\xea\x33\xac\x91\xdc\x0f\xbd\xfd\x2f\xaf\x61\x62\
\x40\x5c\x79\x92\x28\xc1\x4f\x49\xad\xd7\xf3\xdf\x49\xd0\x53\x16\
\xc3\x6e\x18\xeb\x13\x2b\xe6\x2d\x7c\x57\x6f\x94\x1b\xf6\x24\xb9\
\xbb\xee\xcb\xb6\x92\x33\x03\x06\xeb\x03\x48\x79\x1d\x68\x71\x6f\
\x39\x0f\xb0\xdf\xec\x88\x5a\x52\xd8\xde\xf9\x29\xf4\x81\x62\x35\
\x70\xc0\xfb\xe0\x75\xc5\x4f\x8c\x01\x9f\xa1\xba\x87\x0a\xdf\x65\
\x58\x63\x36\x38\x9e\x00\x79\x9f\xb2\xb5\xde\xca\x51\xcc\x80\x12\
\x6d\xa3\x3b\x32\x98\xfc\xb4\x74\xdd\xf1\x0d\xad\x6b\x2c\xa0\xb3\
\xb8\x5d\x66\xd9\xe5\x3b\x59\x48\x47\x60\x65\x1a\x89\x3f\x48\x11\
\x23\x6e\x4d\xd2\x3f\x4e\x0d\x21\x05\xfa\xbb\x61\x59\x40\xf7\x6c\
\x9a\x1c\x72\x27\xa5\x10\xf9\x35\xb4\x49\xf5\x03\x08\xc9\xd1\x96\
\xab\x08\x66\xc0\x68\xe8\xab\x97\x3f\xc2\x0b\xad\xf5\x75\x70\x8b\
\xc4\xb1\xa0\x98\x7f\x05\x18\x00\x90\xb5\xfd\x36\x34\xeb\xb9\xe4\
\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x06\xa4\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\
\x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\
\x79\x71\xc9\x65\x3c\x00\x00\x03\x71\x69\x54\x58\x74\x58\x4d\x4c\
\x3a\x63\x6f\x6d\x2e\x61\x64\x6f\x62\x65\x2e\x78\x6d\x70\x00\x00\
\x00\x00\x00\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x62\x65\x67\
\x69\x6e\x3d\x22\xef\xbb\xbf\x22\x20\x69\x64\x3d\x22\x57\x35\x4d\
\x30\x4d\x70\x43\x65\x68\x69\x48\x7a\x72\x65\x53\x7a\x4e\x54\x63\
\x7a\x6b\x63\x39\x64\x22\x3f\x3e\x20\x3c\x78\x3a\x78\x6d\x70\x6d\
\x65\x74\x61\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x3d\x22\x61\x64\x6f\
\x62\x65\x3a\x6e\x73\x3a\x6d\x65\x74\x61\x2f\x22\x20\x78\x3a\x78\
\x6d\x70\x74\x6b\x3d\x22\x41\x64\x6f\x62\x65\x20\x58\x4d\x50\x20\
\x43\x6f\x72\x65\x20\x35\x2e\x35\x2d\x63\x30\x31\x34\x20\x37\x39\
\x2e\x31\x35\x31\x34\x38\x31\x2c\x20\x32\x30\x31\x33\x2f\x30\x33\
\x2f\x31\x33\x2d\x31\x32\x3a\x30\x39\x3a\x31\x35\x20\x20\x20\x20\
\x20\x20\x20\x20\x22\x3e\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x20\
\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\x39\x39\
\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\x2d\x73\x79\x6e\x74\
\x61\x78\x2d\x6e\x73\x23\x22\x3e\x20\x3c\x72\x64\x66\x3a\x44\x65\
\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x20\x72\x64\x66\x3a\x61\x62\
\x6f\x75\x74\x3d\x22\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\
\x4d\x4d\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\
\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\
\x6d\x6d\x2f\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x73\x74\x52\x65\x66\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\
\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x73\x54\
\x79\x70\x65\x2f\x52\x65\x73\x6f\x75\x72\x63\x65\x52\x65\x66\x23\
\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\
\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x22\x20\x78\x6d\x70\x4d\x4d\
\x3a\x4f\x72\x69\x67\x69\x6e\x61\x6c\x44\x6f\x63\x75\x6d\x65\x6e\
\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x63\x34\x37\
\x39\x66\x62\x31\x39\x2d\x34\x33\x34\x34\x2d\x62\x65\x34\x33\x2d\
\x39\x39\x31\x31\x2d\x33\x64\x35\x32\x63\x31\x30\x33\x35\x37\x65\
\x31\x22\x20\x78\x6d\x70\x4d\x4d\x3a\x44\x6f\x63\x75\x6d\x65\x6e\
\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x42\x33\x41\
\x34\x45\x35\x42\x34\x35\x39\x38\x34\x31\x31\x45\x33\x42\x33\x31\
\x41\x45\x43\x34\x30\x43\x43\x32\x45\x39\x37\x31\x33\x22\x20\x78\
\x6d\x70\x4d\x4d\x3a\x49\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\
\x22\x78\x6d\x70\x2e\x69\x69\x64\x3a\x42\x33\x41\x34\x45\x35\x42\
\x33\x35\x39\x38\x34\x31\x31\x45\x33\x42\x33\x31\x41\x45\x43\x34\
\x30\x43\x43\x32\x45\x39\x37\x31\x33\x22\x20\x78\x6d\x70\x3a\x43\
\x72\x65\x61\x74\x6f\x72\x54\x6f\x6f\x6c\x3d\x22\x41\x64\x6f\x62\
\x65\x20\x50\x68\x6f\x74\x6f\x73\x68\x6f\x70\x20\x43\x43\x20\x28\
\x57\x69\x6e\x64\x6f\x77\x73\x29\x22\x3e\x20\x3c\x78\x6d\x70\x4d\
\x4d\x3a\x44\x65\x72\x69\x76\x65\x64\x46\x72\x6f\x6d\x20\x73\x74\
\x52\x65\x66\x3a\x69\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\x22\
\x78\x6d\x70\x2e\x69\x69\x64\x3a\x63\x31\x65\x61\x38\x31\x32\x35\
\x2d\x65\x62\x64\x64\x2d\x63\x38\x34\x62\x2d\x39\x66\x37\x61\x2d\
\x30\x36\x39\x39\x32\x39\x38\x64\x62\x39\x33\x63\x22\x20\x73\x74\
\x52\x65\x66\x3a\x64\x6f\x63\x75\x6d\x65\x6e\x74\x49\x44\x3d\x22\
\x78\x6d\x70\x2e\x64\x69\x64\x3a\x63\x34\x37\x39\x66\x62\x31\x39\
\x2d\x34\x33\x34\x34\x2d\x62\x65\x34\x33\x2d\x39\x39\x31\x31\x2d\
\x33\x64\x35\x32\x63\x31\x30\x33\x35\x37\x65\x31\x22\x2f\x3e\x20\
\x3c\x2f\x72\x64\x66\x3a\x44\x65\x73\x63\x72\x69\x70\x74\x69\x6f\
\x6e\x3e\x20\x3c\x2f\x72\x64\x66\x3a\x52\x44\x46\x3e\x20\x3c\x2f\
\x78\x3a\x78\x6d\x70\x6d\x65\x74\x61\x3e\x20\x3c\x3f\x78\x70\x61\
\x63\x6b\x65\x74\x20\x65\x6e\x64\x3d\x22\x72\x22\x3f\x3e\x47\x2d\
\xcb\x84\x00\x00\x02\xc9\x49\x44\x41\x54\x78\xda\xc4\x57\x3f\x68\
\x93\x41\x14\xff\xbe\x10\x07\x07\x3b\x39\x89\x53\x51\x52\x1b\xd4\
\x45\x70\x90\xaa\x74\x28\xba\x28\x14\x07\xd7\x12\x41\x68\x27\x69\
\x52\x1d\xc4\x21\x76\x51\x11\xb7\xba\xa8\x38\x38\x04\x07\xc1\xe2\
\x10\x1d\x4a\x1d\xbb\xf9\x87\xb4\x69\xa3\x99\x82\x53\x1d\xcc\x2a\
\x34\xfe\x1e\xfc\x0e\x8e\xf3\xde\xdd\x97\xd0\x4f\x1f\xfc\xf8\xc2\
\x5d\xde\x7b\xbf\x7b\xef\xdd\xbb\xbb\x34\x19\x41\xce\xbe\xbd\x5e\
\xc5\xe7\x1e\x70\x88\x43\x7d\xa0\xbe\x71\xb5\xf1\x78\x58\x5b\xe9\
\x08\xce\x8f\xe0\xd3\xf3\xe8\x0e\x80\xa3\x20\xf1\x63\x18\x7b\x85\
\x11\x02\x70\x5c\x21\x9e\x72\x6e\x28\x29\x2a\xab\x9c\xc3\x67\x1e\
\xd8\x05\xaa\x58\x55\x2b\x63\xd4\x52\xc7\xce\x79\x49\x0d\xfd\x2c\
\xc3\x4e\x33\x1a\x01\x28\xdd\xc1\xe7\x05\x70\x06\xb8\x04\xac\x63\
\xec\x24\xe7\xc4\xc1\x78\x80\xc0\x38\xff\x23\xff\x9d\xc6\x47\x1c\
\x5e\x00\xce\x01\xab\x18\xbb\x12\x63\x3c\x47\xe7\xae\x48\x24\x1e\
\x01\x37\x32\x84\xf9\x1b\xd0\x00\x16\x81\x83\xce\xdc\x6f\x59\x14\
\x22\xb1\xa6\x45\x60\x5e\x31\x7a\x18\x78\x90\x31\xc7\xc7\x80\xbb\
\x1e\xe7\x22\x07\x80\x9b\xa1\x14\xec\x26\xf9\xcb\xcf\x10\x81\x6a\
\xce\x24\xbe\x03\xf7\x83\x7d\x80\x05\xb7\xc6\xb0\x87\x64\x1b\xd8\
\xe2\xef\x13\x40\x29\x83\xf3\x8b\xc8\x7f\x2f\xda\x88\x40\x62\x89\
\x39\xf7\xc9\x7b\x60\x09\x86\xbe\x38\x3a\xa7\x58\xa8\x33\x8a\xde\
\x02\x74\x56\xa2\x9d\x90\xdb\x68\x5b\x29\xb8\x27\x52\xdd\x30\x34\
\x50\x88\x8b\xae\xb4\xe3\x5b\x9e\xe9\x1d\x60\xc2\xd5\x4d\xad\xf6\
\x6a\x3a\x9c\xec\xf3\xe7\xca\xca\x2f\x1b\x03\xd0\x91\xfa\x99\xe4\
\xdc\x26\xc6\xf7\x2c\x12\x4d\x25\x12\x15\xa0\xcb\xb6\xdd\x91\xb6\
\x9d\x42\x61\x91\xa1\x8b\x9d\x0b\xa7\x4d\xd8\xa1\x23\x39\x7f\x23\
\x2b\xe2\x5c\x1b\x98\xc5\xfc\x96\x95\x8e\xcf\x11\x7b\x42\xa2\x26\
\x04\x7e\xe1\xc7\x58\xac\xe0\x60\x7c\xc2\x5a\x79\xcb\x72\x9e\x58\
\x24\xca\x56\x24\xda\x19\x0a\xb3\x5f\xc8\xe0\xdc\x18\x37\x52\xf6\
\x38\x4f\x38\x36\xa9\xe8\x68\x32\x56\x48\xfe\xb3\x14\x78\x99\x88\
\x89\xbd\xe2\x96\xb2\x3a\x19\xdb\x54\x74\x82\x29\xa8\xb3\x20\x42\
\x52\x62\x61\x25\xcc\xf1\xac\x43\xc2\x14\xe1\x9e\x55\x84\xa5\x0c\
\x45\x58\x1f\x66\x1b\x7e\xe0\x49\x66\x6f\xc3\xb2\x89\xca\xc8\xdb\
\x50\x69\x26\x3b\x3c\xd5\xf6\xb3\x11\x75\x24\x2a\xae\xee\x5f\x45\
\xc8\x3f\x34\x94\xb0\x89\xe1\xa6\x49\x87\xa7\x15\x37\x15\xe7\x22\
\xcf\x7c\xc4\x7d\x11\x90\x9b\xcc\x3b\xe5\x3c\x77\x0f\xa3\xb6\x55\
\x70\xb1\x9c\xcb\x29\x3b\x0d\x12\x5f\x63\x77\xb8\x66\x06\xe7\xa3\
\x8a\x90\x98\x02\x89\xb6\x96\x82\x7a\x8e\xce\xcd\xcd\xea\x76\xa8\
\x06\x8a\xff\xa0\xf7\x14\x43\x04\x96\x79\x71\xf4\x5d\x26\x16\xb8\
\x3b\x62\xd2\xe1\x2a\x7d\x37\xab\x3e\x77\x89\x9f\x00\xef\xed\xd7\
\x1c\x12\xe6\x26\xb3\xc2\x62\xab\x04\x9c\x57\xb8\xd5\x1e\x4a\xc1\
\x39\x24\xc4\xf9\x0c\xe6\x3e\xc5\xb6\xe1\x2a\xdf\x03\xaf\x81\xa7\
\x2c\x9a\x9e\xb5\x45\xbb\x01\x02\x5d\xb3\xd5\x58\xed\x53\xc0\x4b\
\xe0\x95\xbc\x0f\x30\xb6\xb1\x1f\x6f\x43\x79\x68\xac\x2b\xd3\x12\
\xa9\x8f\x79\xbf\x0d\x3b\xca\xd9\x31\xe0\x5c\x92\x2b\x01\xbe\x7e\
\x6b\xce\x29\x2a\x97\x9a\xda\xb0\x2f\x63\x91\x3f\x02\x0c\x00\x21\
\x27\x10\xbf\xa7\x82\xc6\x68\x00\x00\x00\x00\x49\x45\x4e\x44\xae\
\x42\x60\x82\
\x00\x00\x04\xf8\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\
\x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\
\x79\x71\xc9\x65\x3c\x00\x00\x03\x71\x69\x54\x58\x74\x58\x4d\x4c\
\x3a\x63\x6f\x6d\x2e\x61\x64\x6f\x62\x65\x2e\x78\x6d\x70\x00\x00\
\x00\x00\x00\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x62\x65\x67\
\x69\x6e\x3d\x22\xef\xbb\xbf\x22\x20\x69\x64\x3d\x22\x57\x35\x4d\
\x30\x4d\x70\x43\x65\x68\x69\x48\x7a\x72\x65\x53\x7a\x4e\x54\x63\
\x7a\x6b\x63\x39\x64\x22\x3f\x3e\x20\x3c\x78\x3a\x78\x6d\x70\x6d\
\x65\x74\x61\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x3d\x22\x61\x64\x6f\
\x62\x65\x3a\x6e\x73\x3a\x6d\x65\x74\x61\x2f\x22\x20\x78\x3a\x78\
\x6d\x70\x74\x6b\x3d\x22\x41\x64\x6f\x62\x65\x20\x58\x4d\x50\x20\
\x43\x6f\x72\x65\x20\x35\x2e\x35\x2d\x63\x30\x31\x34\x20\x37\x39\
\x2e\x31\x35\x31\x34\x38\x31\x2c\x20\x32\x30\x31\x33\x2f\x30\x33\
\x2f\x31\x33\x2d\x31\x32\x3a\x30\x39\x3a\x31\x35\x20\x20\x20\x20\
\x20\x20\x20\x20\x22\x3e\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x20\
\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\x39\x39\
\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\x2d\x73\x79\x6e\x74\
\x61\x78\x2d\x6e\x73\x23\x22\x3e\x20\x3c\x72\x64\x66\x3a\x44\x65\
\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x20\x72\x64\x66\x3a\x61\x62\
\x6f\x75\x74\x3d\x22\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\
\x4d\x4d\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\
\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\
\x6d\x6d\x2f\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x73\x74\x52\x65\x66\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\
\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x73\x54\
\x79\x70\x65\x2f\x52\x65\x73\x6f\x75\x72\x63\x65\x52\x65\x66\x23\
\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\
\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x22\x20\x78\x6d\x70\x4d\x4d\
\x3a\x4f\x72\x69\x67\x69\x6e\x61\x6c\x44\x6f\x63\x75\x6d\x65\x6e\
\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x63\x34\x37\
\x39\x66\x62\x31\x39\x2d\x34\x33\x34\x34\x2d\x62\x65\x34\x33\x2d\
\x39\x39\x31\x31\x2d\x33\x64\x35\x32\x63\x31\x30\x33\x35\x37\x65\
\x31\x22\x20\x78\x6d\x70\x4d\x4d\x3a\x44\x6f\x63\x75\x6d\x65\x6e\
\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x41\x45\x36\
\x37\x43\x43\x34\x36\x35\x39\x38\x44\x31\x31\x45\x33\x39\x32\x37\
\x33\x46\x43\x46\x32\x35\x44\x37\x31\x38\x43\x35\x39\x22\x20\x78\
\x6d\x70\x4d\x4d\x3a\x49\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\
\x22\x78\x6d\x70\x2e\x69\x69\x64\x3a\x41\x45\x36\x37\x43\x43\x34\
\x35\x35\x39\x38\x44\x31\x31\x45\x33\x39\x32\x37\x33\x46\x43\x46\
\x32\x35\x44\x37\x31\x38\x43\x35\x39\x22\x20\x78\x6d\x70\x3a\x43\
\x72\x65\x61\x74\x6f\x72\x54\x6f\x6f\x6c\x3d\x22\x41\x64\x6f\x62\
\x65\x20\x50\x68\x6f\x74\x6f\x73\x68\x6f\x70\x20\x43\x43\x20\x28\
\x57\x69\x6e\x64\x6f\x77\x73\x29\x22\x3e\x20\x3c\x78\x6d\x70\x4d\
\x4d\x3a\x44\x65\x72\x69\x76\x65\x64\x46\x72\x6f\x6d\x20\x73\x74\
\x52\x65\x66\x3a\x69\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\x22\
\x78\x6d\x70\x2e\x69\x69\x64\x3a\x37\x31\x65\x61\x35\x61\x61\x61\
\x2d\x63\x33\x30\x31\x2d\x30\x61\x34\x35\x2d\x62\x36\x30\x35\x2d\
\x38\x64\x36\x65\x63\x37\x33\x34\x65\x37\x62\x66\x22\x20\x73\x74\
\x52\x65\x66\x3a\x64\x6f\x63\x75\x6d\x65\x6e\x74\x49\x44\x3d\x22\
\x78\x6d\x70\x2e\x64\x69\x64\x3a\x63\x34\x37\x39\x66\x62\x31\x39\
\x2d\x34\x33\x34\x34\x2d\x62\x65\x34\x33\x2d\x39\x39\x31\x31\x2d\
\x33\x64\x35\x32\x63\x31\x30\x33\x35\x37\x65\x31\x22\x2f\x3e\x20\
\x3c\x2f\x72\x64\x66\x3a\x44\x65\x73\x63\x72\x69\x70\x74\x69\x6f\
\x6e\x3e\x20\x3c\x2f\x72\x64\x66\x3a\x52\x44\x46\x3e\x20\x3c\x2f\
\x78\x3a\x78\x6d\x70\x6d\x65\x74\x61\x3e\x20\x3c\x3f\x78\x70\x61\
\x63\x6b\x65\x74\x20\x65\x6e\x64\x3d\x22\x72\x22\x3f\x3e\xe1\x20\
\xe3\x43\x00\x00\x01\x1d\x49\x44\x41\x54\x78\xda\x62\x6c\x4f\x5c\
\x74\x8e\x81\x81\xc1\x90\x01\x3b\xf8\x0b\xc4\x61\x95\xf3\xe3\xd6\
\x81\x38\x40\xb5\x09\x40\x6a\x1e\x10\x33\x32\x50\x07\x9c\x67\xc2\
\x63\x39\x08\x30\x03\xb1\x1e\x12\xdf\x81\x8a\x96\x83\x80\x21\x13\
\xc3\x00\x83\x51\x07\x8c\x3a\x00\xe4\x80\x4f\x04\xd4\x7c\x44\x62\
\x3f\xa7\xb2\xfd\x9f\x58\x80\x84\x31\x10\xeb\xe0\x50\xf0\x03\x88\
\xf7\x20\xf1\x1b\x80\xf8\x30\x10\xb3\x51\xc9\x01\x57\x18\x46\x3c\
\x60\x84\x16\xaf\x0e\x38\xe4\xbf\x00\x71\x07\xb0\x28\x7e\x02\x2d\
\x8a\xd5\x80\x54\x31\x10\xb3\x53\xc9\xfe\x03\x2c\x44\x94\xed\xaf\
\x80\xb8\x09\xca\x2e\x04\xe2\x34\x2a\x06\x40\x1c\x13\x11\x65\x3b\
\x72\x56\xe5\xa4\x76\x0c\x8c\x16\x44\xa3\x0e\x00\x39\xe0\x3f\x01\
\x35\xff\x90\xd8\xdf\xa9\x6c\xff\x7f\x50\x36\x4c\x22\x50\x0e\xcc\
\x43\xe2\xf7\x43\x1d\x4d\xb5\x72\x60\xb4\x28\x06\x15\xc5\x2a\x78\
\x6a\x43\x50\x9c\xef\x05\x16\xc5\x7f\xa0\x45\x31\x28\xe8\x9d\xa9\
\x59\x1b\x82\xd2\xc0\x59\x20\xe6\xc3\xa3\xa8\x08\x1a\xf7\xb0\xea\
\xb8\x82\x9a\xed\x01\x26\x02\x96\x83\x00\x3f\x12\x5b\x92\xca\x31\
\xc0\x37\x5a\x10\x8d\x3a\x60\x50\x38\xe0\x1c\x1e\x79\x50\xfe\xbf\
\x84\x56\x74\xfe\xa7\xa2\xfd\xe7\x18\xff\xff\xff\x3f\xa0\x21\x00\
\x10\x60\x00\xb3\xf5\x3a\xe9\xf9\xc0\x6c\x8c\x00\x00\x00\x00\x49\
\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x06\xb1\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\
\x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\
\x79\x71\xc9\x65\x3c\x00\x00\x03\x71\x69\x54\x58\x74\x58\x4d\x4c\
\x3a\x63\x6f\x6d\x2e\x61\x64\x6f\x62\x65\x2e\x78\x6d\x70\x00\x00\
\x00\x00\x00\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x62\x65\x67\
\x69\x6e\x3d\x22\xef\xbb\xbf\x22\x20\x69\x64\x3d\x22\x57\x35\x4d\
\x30\x4d\x70\x43\x65\x68\x69\x48\x7a\x72\x65\x53\x7a\x4e\x54\x63\
\x7a\x6b\x63\x39\x64\x22\x3f\x3e\x20\x3c\x78\x3a\x78\x6d\x70\x6d\
\x65\x74\x61\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x3d\x22\x61\x64\x6f\
\x62\x65\x3a\x6e\x73\x3a\x6d\x65\x74\x61\x2f\x22\x20\x78\x3a\x78\
\x6d\x70\x74\x6b\x3d\x22\x41\x64\x6f\x62\x65\x20\x58\x4d\x50\x20\
\x43\x6f\x72\x65\x20\x35\x2e\x35\x2d\x63\x30\x31\x34\x20\x37\x39\
\x2e\x31\x35\x31\x34\x38\x31\x2c\x20\x32\x30\x31\x33\x2f\x30\x33\
\x2f\x31\x33\x2d\x31\x32\x3a\x30\x39\x3a\x31\x35\x20\x20\x20\x20\
\x20\x20\x20\x20\x22\x3e\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x20\
\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\x39\x39\
\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\x2d\x73\x79\x6e\x74\
\x61\x78\x2d\x6e\x73\x23\x22\x3e\x20\x3c\x72\x64\x66\x3a\x44\x65\
\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x20\x72\x64\x66\x3a\x61\x62\
\x6f\x75\x74\x3d\x22\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\
\x4d\x4d\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\
\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\
\x6d\x6d\x2f\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x73\x74\x52\x65\x66\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\
\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x73\x54\
\x79\x70\x65\x2f\x52\x65\x73\x6f\x75\x72\x63\x65\x52\x65\x66\x23\
\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\
\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x22\x20\x78\x6d\x70\x4d\x4d\
\x3a\x4f\x72\x69\x67\x69\x6e\x61\x6c\x44\x6f\x63\x75\x6d\x65\x6e\
\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x63\x34\x37\
\x39\x66\x62\x31\x39\x2d\x34\x33\x34\x34\x2d\x62\x65\x34\x33\x2d\
\x39\x39\x31\x31\x2d\x33\x64\x35\x32\x63\x31\x30\x33\x35\x37\x65\
\x31\x22\x20\x78\x6d\x70\x4d\x4d\x3a\x44\x6f\x63\x75\x6d\x65\x6e\
\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x42\x45\x42\
\x44\x37\x42\x46\x37\x35\x39\x38\x44\x31\x31\x45\x33\x39\x39\x41\
\x31\x41\x44\x34\x30\x37\x44\x35\x45\x43\x33\x39\x32\x22\x20\x78\
\x6d\x70\x4d\x4d\x3a\x49\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\
\x22\x78\x6d\x70\x2e\x69\x69\x64\x3a\x42\x45\x42\x44\x37\x42\x46\
\x36\x35\x39\x38\x44\x31\x31\x45\x33\x39\x39\x41\x31\x41\x44\x34\
\x30\x37\x44\x35\x45\x43\x33\x39\x32\x22\x20\x78\x6d\x70\x3a\x43\
\x72\x65\x61\x74\x6f\x72\x54\x6f\x6f\x6c\x3d\x22\x41\x64\x6f\x62\
\x65\x20\x50\x68\x6f\x74\x6f\x73\x68\x6f\x70\x20\x43\x43\x20\x28\
\x57\x69\x6e\x64\x6f\x77\x73\x29\x22\x3e\x20\x3c\x78\x6d\x70\x4d\
\x4d\x3a\x44\x65\x72\x69\x76\x65\x64\x46\x72\x6f\x6d\x20\x73\x74\
\x52\x65\x66\x3a\x69\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\x22\
\x78\x6d\x70\x2e\x69\x69\x64\x3a\x37\x31\x65\x61\x35\x61\x61\x61\
\x2d\x63\x33\x30\x31\x2d\x30\x61\x34\x35\x2d\x62\x36\x30\x35\x2d\
\x38\x64\x36\x65\x63\x37\x33\x34\x65\x37\x62\x66\x22\x20\x73\x74\
\x52\x65\x66\x3a\x64\x6f\x63\x75\x6d\x65\x6e\x74\x49\x44\x3d\x22\
\x78\x6d\x70\x2e\x64\x69\x64\x3a\x63\x34\x37\x39\x66\x62\x31\x39\
\x2d\x34\x33\x34\x34\x2d\x62\x65\x34\x33\x2d\x39\x39\x31\x31\x2d\
\x33\x64\x35\x32\x63\x31\x30\x33\x35\x37\x65\x31\x22\x2f\x3e\x20\
\x3c\x2f\x72\x64\x66\x3a\x44\x65\x73\x63\x72\x69\x70\x74\x69\x6f\
\x6e\x3e\x20\x3c\x2f\x72\x64\x66\x3a\x52\x44\x46\x3e\x20\x3c\x2f\
\x78\x3a\x78\x6d\x70\x6d\x65\x74\x61\x3e\x20\x3c\x3f\x78\x70\x61\
\x63\x6b\x65\x74\x20\x65\x6e\x64\x3d\x22\x72\x22\x3f\x3e\x59\x34\
\x5e\xf3\x00\x00\x02\xd6\x49\x44\x41\x54\x78\xda\xc4\x97\x5d\x88\
\x4d\x51\x14\x80\xcf\x3d\x73\x63\x26\xe6\x0f\x2f\x63\xa8\x91\xc2\
\x35\xe3\x67\xfc\x34\xe1\x6d\x26\x94\x28\x23\x4f\xa3\xa6\x34\xe4\
\x61\x46\x92\x17\x3f\x8d\x48\x92\x9f\xf0\x20\x2f\x46\x44\x92\x07\
\x19\x85\xc4\x83\x9f\x17\x94\x50\x73\x87\x49\x29\xf2\x73\xf3\x30\
\x4d\x83\x49\x46\x5c\xdf\xd2\x3e\x75\xba\x73\xf6\xbe\xfb\x9c\xb9\
\x87\x55\x5f\xeb\xde\x73\xf6\x59\x6b\xed\xb5\xd7\xd9\x7b\x9d\x84\
\x63\x29\x87\x36\x5e\x28\x47\x35\xc3\x52\x68\x80\xc9\x30\x01\x5c\
\xf8\x0e\x6f\xa1\x17\x1e\xc0\xb5\x5d\xe7\x5a\x3f\xda\xd8\x4d\x58\
\x38\x9e\x85\xea\x84\x75\x50\x6c\x19\x6f\x16\x6e\xc3\x11\x02\xb9\
\x1f\x29\x00\x1c\x97\xa1\x8e\x42\x1b\x14\x39\xd1\xa5\x1b\x3a\x74\
\x19\x49\x68\x9c\xcf\x97\x34\x42\x8d\x53\x18\xe9\x87\x16\x82\xb8\
\x93\x7b\xc3\x0d\x70\xde\xa8\xd6\xb1\x50\xce\x45\x26\xc2\x0d\x6c\
\x6f\x30\x66\x80\x01\xf5\xca\x79\xa9\x13\x8f\xfc\x86\x35\x64\xe2\
\xd6\x88\x00\x70\x5e\x81\x7a\x6e\x31\xf3\x21\xb8\x09\x8f\x21\x03\
\x3f\x61\x0a\x2c\x16\xe3\x50\x96\xe7\xf9\x41\x58\x48\x10\x6f\xe4\
\x4f\xd2\x9f\x80\x3c\xce\xbf\xc2\x01\x38\xcd\xc3\x43\x9a\xda\x29\
\x41\x6d\x81\x7d\x50\xae\xb1\x23\xd7\xcf\x30\xb6\x09\x3b\xd9\x84\
\x7a\xb0\x0e\xf5\xc2\x50\xed\x69\x58\xeb\x45\x6d\xf1\xea\x4e\x55\
\xd5\xbf\xc0\x30\xac\x19\x7b\xdd\x5e\x11\xbe\x86\xcd\xca\x51\xae\
\xf4\x41\xa3\xad\x73\x11\xc6\xbe\x97\x67\xd4\xa4\x74\xb2\x27\xa8\
\x08\xe5\xff\x72\xd8\x01\x2b\x60\x18\xea\x31\xf8\x32\x4a\xc5\x61\
\x6f\x3a\xaa\x07\x4a\x34\x43\xe6\x9a\x36\xa2\x39\xa8\xd9\x38\xbf\
\x32\x9a\xb2\xc7\xce\x7e\xd4\x5e\xcd\xed\xce\x84\x13\xb3\x10\x40\
\x15\xea\x43\xd0\x9e\x83\xdc\x75\xe3\x0e\x80\x0c\x66\x0c\xb5\x90\
\x4a\x12\x61\x35\x3f\x96\x49\xd5\x32\x78\x38\xa6\x38\xd2\x9a\x37\
\xa2\x4a\x32\x30\x03\x64\x9d\x3f\x11\xcc\x71\xa8\x8d\x21\x80\x01\
\xcd\xf5\x22\xd9\x88\x2a\x7d\xfb\xf5\x76\x81\x20\x1e\xa1\xcf\x4a\
\x60\x64\xe5\x5b\x81\xce\x82\x20\xf9\x25\x19\xa8\x08\xb8\xb1\x04\
\xba\x64\xab\x25\x98\x2e\x68\x18\x65\x00\x75\x9a\xeb\x19\x7f\x06\
\x82\x64\xbc\xea\x07\xda\x08\x22\xad\xb2\x72\x91\xac\xf4\x87\x78\
\x0b\xe4\x9c\x98\xa7\xb9\xfd\xca\xcd\x13\x40\xee\x2c\x4e\xc0\x79\
\x8c\x16\x87\x98\xfd\x56\x43\xe3\xf3\x30\x19\x22\x80\xcf\x52\x1f\
\xcc\xfe\x72\x88\xd9\xa7\x50\xdb\x0c\x43\xae\x27\x35\x35\x90\x2b\
\x97\xa0\x1d\xe7\x83\x21\x9c\x4f\x42\x5d\x85\xb1\x9a\x21\x4f\xb1\
\xd7\x63\xbb\x04\x8b\xe4\x9d\x0d\x79\x06\x48\x63\x93\x32\x0c\x3b\
\xe8\xf5\x03\x36\x01\xcc\x84\x67\x18\x96\x26\xf5\x24\x91\x0f\x18\
\x1a\xd9\x0e\xd8\x0d\xe3\x0c\xf6\xee\x49\xfa\xff\x9e\x86\x3c\xd4\
\xa7\x1c\xd8\xca\x0f\xd9\xc3\xe1\x89\xda\xe3\x45\xaa\x55\x47\xb4\
\xd2\xa2\x75\x1f\xd1\x11\x79\x19\x78\x07\xc7\x54\x6a\x4c\x6d\x95\
\xac\xe9\x6a\x45\x94\x9e\xb0\xc5\xdf\x5b\xb8\x2a\x55\x87\xd5\xd1\
\x7b\x4a\xf5\x75\x5f\x62\xd8\x8e\xa5\x77\x6c\xf5\x37\xa4\xde\x12\
\xd4\x72\xb1\x37\xa0\x17\x90\x35\x9a\x16\xf7\x77\x81\xa9\x21\x29\
\x55\x4b\xb2\x49\x73\x96\xc7\xf7\x65\x14\x90\x8d\x9d\xb0\x1e\xc6\
\xfc\xb3\x6f\x43\xcd\xc6\xb2\x0a\x9a\xd4\xde\x5e\xe3\x6b\xbd\xa3\
\x7f\x1d\x67\xb3\x59\xe7\x7f\xca\x1f\x01\x06\x00\x05\x3b\xea\xc8\
\xfa\x24\x85\x20\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\
\x00\x00\x69\xec\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x01\x00\x00\x00\x01\x00\x08\x06\x00\x00\x00\x5c\x72\xa8\x66\
\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\
\x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\
\x79\x71\xc9\x65\x3c\x00\x00\x03\x6b\x69\x54\x58\x74\x58\x4d\x4c\
\x3a\x63\x6f\x6d\x2e\x61\x64\x6f\x62\x65\x2e\x78\x6d\x70\x00\x00\
\x00\x00\x00\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x62\x65\x67\
\x69\x6e\x3d\x22\xef\xbb\xbf\x22\x20\x69\x64\x3d\x22\x57\x35\x4d\
\x30\x4d\x70\x43\x65\x68\x69\x48\x7a\x72\x65\x53\x7a\x4e\x54\x63\
\x7a\x6b\x63\x39\x64\x22\x3f\x3e\x20\x3c\x78\x3a\x78\x6d\x70\x6d\
\x65\x74\x61\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x3d\x22\x61\x64\x6f\
\x62\x65\x3a\x6e\x73\x3a\x6d\x65\x74\x61\x2f\x22\x20\x78\x3a\x78\
\x6d\x70\x74\x6b\x3d\x22\x41\x64\x6f\x62\x65\x20\x58\x4d\x50\x20\
\x43\x6f\x72\x65\x20\x35\x2e\x35\x2d\x63\x30\x31\x34\x20\x37\x39\
\x2e\x31\x35\x31\x34\x38\x31\x2c\x20\x32\x30\x31\x33\x2f\x30\x33\
\x2f\x31\x33\x2d\x31\x32\x3a\x30\x39\x3a\x31\x35\x20\x20\x20\x20\
\x20\x20\x20\x20\x22\x3e\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x20\
\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\x39\x39\
\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\x2d\x73\x79\x6e\x74\
\x61\x78\x2d\x6e\x73\x23\x22\x3e\x20\x3c\x72\x64\x66\x3a\x44\x65\
\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x20\x72\x64\x66\x3a\x61\x62\
\x6f\x75\x74\x3d\x22\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\
\x4d\x4d\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\
\x6f\x62\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\
\x6d\x6d\x2f\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x73\x74\x52\x65\x66\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\
\x65\x2e\x63\x6f\x6d\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x73\x54\
\x79\x70\x65\x2f\x52\x65\x73\x6f\x75\x72\x63\x65\x52\x65\x66\x23\
\x22\x20\x78\x6d\x6c\x6e\x73\x3a\x78\x6d\x70\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x6e\x73\x2e\x61\x64\x6f\x62\x65\x2e\x63\x6f\x6d\
\x2f\x78\x61\x70\x2f\x31\x2e\x30\x2f\x22\x20\x78\x6d\x70\x4d\x4d\
\x3a\x4f\x72\x69\x67\x69\x6e\x61\x6c\x44\x6f\x63\x75\x6d\x65\x6e\
\x74\x49\x44\x3d\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x30\x43\x31\
\x42\x46\x41\x46\x35\x41\x33\x39\x32\x45\x31\x31\x31\x41\x36\x37\
\x39\x38\x32\x45\x39\x30\x31\x31\x30\x33\x32\x43\x43\x22\x20\x78\
\x6d\x70\x4d\x4d\x3a\x44\x6f\x63\x75\x6d\x65\x6e\x74\x49\x44\x3d\
\x22\x78\x6d\x70\x2e\x64\x69\x64\x3a\x43\x41\x33\x32\x31\x42\x46\
\x41\x35\x34\x30\x38\x31\x31\x45\x33\x41\x31\x35\x45\x45\x45\x33\
\x31\x41\x34\x30\x39\x34\x30\x45\x31\x22\x20\x78\x6d\x70\x4d\x4d\
\x3a\x49\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\x22\x78\x6d\x70\
\x2e\x69\x69\x64\x3a\x43\x41\x33\x32\x31\x42\x46\x39\x35\x34\x30\
\x38\x31\x31\x45\x33\x41\x31\x35\x45\x45\x45\x33\x31\x41\x34\x30\
\x39\x34\x30\x45\x31\x22\x20\x78\x6d\x70\x3a\x43\x72\x65\x61\x74\
\x6f\x72\x54\x6f\x6f\x6c\x3d\x22\x41\x64\x6f\x62\x65\x20\x50\x68\
\x6f\x74\x6f\x73\x68\x6f\x70\x20\x43\x43\x20\x28\x4d\x61\x63\x69\
\x6e\x74\x6f\x73\x68\x29\x22\x3e\x20\x3c\x78\x6d\x70\x4d\x4d\x3a\
\x44\x65\x72\x69\x76\x65\x64\x46\x72\x6f\x6d\x20\x73\x74\x52\x65\
\x66\x3a\x69\x6e\x73\x74\x61\x6e\x63\x65\x49\x44\x3d\x22\x78\x6d\
\x70\x2e\x69\x69\x64\x3a\x39\x31\x66\x35\x61\x38\x61\x34\x2d\x36\
\x31\x62\x36\x2d\x34\x66\x34\x62\x2d\x62\x38\x61\x38\x2d\x32\x31\
\x61\x66\x32\x62\x30\x37\x63\x64\x31\x38\x22\x20\x73\x74\x52\x65\
\x66\x3a\x64\x6f\x63\x75\x6d\x65\x6e\x74\x49\x44\x3d\x22\x78\x6d\
\x70\x2e\x64\x69\x64\x3a\x30\x43\x31\x42\x46\x41\x46\x35\x41\x33\
\x39\x32\x45\x31\x31\x31\x41\x36\x37\x39\x38\x32\x45\x39\x30\x31\
\x31\x30\x33\x32\x43\x43\x22\x2f\x3e\x20\x3c\x2f\x72\x64\x66\x3a\
\x44\x65\x73\x63\x72\x69\x70\x74\x69\x6f\x6e\x3e\x20\x3c\x2f\x72\
\x64\x66\x3a\x52\x44\x46\x3e\x20\x3c\x2f\x78\x3a\x78\x6d\x70\x6d\
\x65\x74\x61\x3e\x20\x3c\x3f\x78\x70\x61\x63\x6b\x65\x74\x20\x65\
\x6e\x64\x3d\x22\x72\x22\x3f\x3e\x64\xc0\x1f\x2c\x00\x00\x66\x17\
\x49\x44\x41\x54\x78\xda\xec\x7d\x07\x80\x1d\x75\xb5\xfe\x37\x73\
\x7b\xdd\xbb\x7d\x37\xbd\x07\x12\x52\x08\x81\xd0\xa4\x13\xaa\x48\
\x49\xa4\x17\x69\x02\x8a\xe2\x53\xf9\x8b\xa0\xf2\xb0\x20\x8a\xcf\
\xf7\xf4\xd9\x50\x9f\x0a\x22\x9d\xa0\x14\xe9\x45\x29\xd2\x09\x25\
\x05\x42\xfa\x6e\xb6\xef\xbd\x7b\x7b\x99\x99\xff\x39\x67\xe6\x6e\
\x49\x93\x96\x6c\xc9\xef\xc0\xe4\xee\xde\x3b\xf7\xee\xcc\xdc\x39\
\xdf\xf9\x4e\xf9\x9d\xa3\x59\x96\x05\x25\x4a\x06\x43\x0e\x3b\xec\
\x30\x3c\xf5\xd4\x53\xbb\xdc\x79\xf3\x39\x1f\x72\xc8\x21\x43\xe2\
\x58\xdc\xea\x36\x54\x32\x58\xd2\xdd\xdd\x8d\xda\xda\x5a\xdc\x78\
\xe3\x8d\x35\x9d\x9d\x9d\x99\x1f\xfd\xe8\x47\x99\x96\x96\x96\x01\
\xfb\x5c\x75\xd5\x55\x98\x3b\x77\x2e\x72\xb9\xdc\xb0\x3b\x3f\xbf\
\xdf\x8f\xef\x7c\xe7\x3b\x58\xb1\x62\xc5\x80\xe7\x03\x81\xc0\x90\
\x39\xc6\x21\x07\x00\xf1\x78\x1c\x6f\xbc\xf1\x86\x7c\xe1\x7b\xed\
\xb5\x17\xdf\x20\x7e\xd3\x34\xab\xe9\xa5\x1a\xda\x82\xb4\x79\x69\
\xd3\x94\xfa\x0c\x7f\xd9\x67\x9f\x7d\x90\xcd\x66\x23\xc7\x1d\x77\
\xdc\x1e\xa4\xf8\x6f\xdd\x77\xdf\x7d\x8f\x13\x28\x88\xa6\xe7\xf3\
\x79\xd9\x67\xdf\x7d\xf7\xc5\xc2\x85\x0b\x91\x4e\xa7\x87\xdd\xf9\
\x85\x42\x21\xfc\xec\x67\x3f\xdb\xe2\xf9\x17\x5e\x78\x01\xd3\xa7\
\x4f\x47\x2c\x16\x53\x00\xb0\xb9\xbc\xf6\xda\x6b\x38\xfc\xf0\xc3\
\x31\x7e\xfc\x78\xdc\x71\xc7\x1d\x7a\x65\x65\xe5\x78\x02\x83\xc5\
\x9a\xa6\x9d\xac\xeb\xfa\x74\x06\x01\xfa\x59\x69\xcf\x08\x90\xdf\
\xfc\xe6\x37\x02\xf4\x1d\x1d\x1d\x08\x87\xc3\xef\x90\xa5\xbf\x64\
\xe5\xca\x95\xaf\xa6\x52\x29\xd2\xff\xbc\xc9\xfb\xf0\xeb\xac\xfc\
\xc3\x11\x00\x58\x0c\xc3\xd8\xe2\xb9\xaf\x7c\xe5\x2b\x98\x35\x6b\
\x96\xdc\xe7\x83\x2d\xfa\x50\xbb\x60\x1c\x93\xf0\xf9\x7c\x78\xf0\
\xc1\x07\x7d\x0b\x16\x2c\x38\x90\x6e\x80\x2f\xd0\xd3\x27\x91\xd2\
\x4f\xa4\xcd\x4f\x9b\x0a\x5a\x8c\x10\xc9\x64\x32\xe8\xe9\xe9\x01\
\x31\x3c\xfe\xde\x77\x3f\xf5\xd4\x53\x7f\x70\xd8\x61\x87\x1d\x44\
\xca\xee\x25\x9a\xec\xe2\x7d\xbc\x5e\xef\x88\x3c\x77\x97\xcb\xa5\
\x5c\x80\xad\x09\x23\xfe\x0d\x37\xdc\xa0\xcf\x9c\x39\x73\x36\xdd\
\x08\x5f\x72\xbb\xdd\x9f\x76\x68\xbf\x92\x11\x26\x0c\xf6\xac\xfc\
\x72\x23\xba\xdd\xfa\x98\x31\x63\x3e\x75\xde\x79\xe7\x7d\xd3\xe3\
\xf1\x58\xf7\xdf\x7f\xff\xb3\xe4\x1e\x64\xd7\xad\x5b\x67\x1d\x75\
\xd4\x51\x02\x16\xc3\x4d\x98\xa9\x0e\x75\xb6\xaa\x0f\xc5\x83\x9a\
\x30\x61\xc2\x34\x7a\x38\x91\x28\xff\xd1\x4a\xf9\x77\x1d\x30\x60\
\x65\xd9\x7d\xf7\xdd\x0f\xba\xe4\x92\x4b\xbe\x73\xf0\xc1\x07\xef\
\x53\x53\x53\x13\xf8\xc6\x37\xbe\xa1\xdd\x79\xe7\x9d\x20\x57\x10\
\x2a\x63\xb5\x8b\x00\x40\xb1\x58\x3c\xd1\xa1\xfd\x21\xf5\x15\xed\
\x02\x37\xa1\xae\xf7\x5a\x4b\xfe\xb9\xb1\xb1\x71\xff\x2b\xaf\xbc\
\xf2\xfb\x8b\x16\x2d\x3a\x98\x18\xa1\xe7\xa2\x8b\x2e\xd2\x96\x2c\
\x59\x82\xea\xea\xea\x01\x96\x75\x38\x6c\x43\x5d\xdc\x43\xf4\x86\
\x38\x84\x1e\xa6\x2b\xd5\xd8\x35\x99\x00\xb9\x00\x20\x77\x60\xff\
\x53\x4e\x39\xe5\xca\x52\xa9\x64\xdc\x7a\xeb\xad\xff\xfc\xdc\xe7\
\x3e\xc7\xd9\x01\xeb\xe4\x93\x4f\x96\xf4\xa1\x92\x11\x0c\x00\xb0\
\xd3\x7d\xba\xfa\x7a\x76\x0d\xd9\x9a\xa5\xe4\xe7\x76\xdb\x6d\xb7\
\x43\xce\x3f\xff\x7c\xcf\xc6\x8d\x1b\xbf\xf1\xf4\xd3\x4f\xbf\x7a\
\xe1\x85\x17\x72\x6e\xd0\x24\x60\x40\x57\x57\xd7\x90\xb7\xb0\xc3\
\x81\x01\x0c\x55\x25\x53\xce\x9e\x02\x05\x71\x07\x46\x8f\x1e\x7d\
\xc0\x35\xd7\x5c\xf3\x7d\xb2\xfc\xfb\x16\x0a\x05\xf7\x05\x17\x5c\
\xa0\xdd\x7b\xef\xbd\xa8\xaa\xaa\x52\x17\x69\x04\x33\x00\x25\xbb\
\x38\x03\x28\x0b\xbb\x03\x13\x27\x4e\x3c\xe8\xec\xb3\xcf\xbe\x8a\
\x00\xe0\x7b\x0f\x3e\xf8\xe0\xcb\x65\x77\x80\x99\xc0\x50\x76\x07\
\x14\x03\x50\xa2\xe4\x13\x88\x09\xb0\x22\xcd\x9e\x3d\xfb\xc8\x2b\
\xae\xb8\xe2\x3f\xf7\xd9\x67\x9f\x39\xf4\xb4\x97\x98\x80\xce\x4c\
\x80\xb3\x03\x4a\x14\x00\x28\x19\xc6\xd6\xff\x83\x44\xd2\xd9\x1d\
\x18\x37\x6e\xdc\xc1\xdf\xff\xfe\xf7\x7f\xf0\xd9\xcf\x7e\x76\x3f\
\x62\x03\x2e\x62\x02\x3a\x67\x07\xd8\x1d\x50\x99\x00\xe5\x02\x28\
\x19\x81\x2e\x40\x7f\xf1\x7a\xbd\xda\xe4\xc9\x93\x0f\x3d\xe7\x9c\
\x73\x8a\xf4\xeb\xf5\x77\xdd\x75\xd7\x8b\xf4\x73\x8e\x58\x82\xb5\
\x68\xd1\xa2\x21\x17\x18\x54\x2e\x80\x12\x25\x3b\xc0\x1d\x98\x35\
\x6b\xd6\x42\x76\x07\x0e\x3a\xe8\xa0\x3d\x43\xa1\x90\xef\xa2\x8b\
\x2e\xd2\x09\x0c\x54\x60\x50\x01\x80\x92\x91\xea\x06\xf4\xa7\xd3\
\x1c\x18\x6c\x6c\x6c\xfc\xd4\xb5\xd7\x5e\xfb\xbd\xe3\x8e\x3b\x6e\
\x41\x36\x9b\x75\x9d\x7f\xfe\xf9\x7a\x39\x3b\xa0\xe8\xff\x08\x71\
\x01\x54\xe9\xe7\xc8\xb7\xe8\x1f\x85\x2a\x3b\x0b\xc6\xb4\xa9\x53\
\xa7\xb2\x3b\x90\x8b\xc7\xe3\xdf\x7d\xf4\xd1\x47\x5f\xa7\x9f\xb9\
\x4e\x60\xc8\xb8\x03\xca\x05\x50\xa2\x64\x07\x01\x7c\xd9\x1d\xd8\
\x73\xcf\x3d\x8f\xb9\xfa\xea\xab\xaf\xdb\x7b\xef\xbd\x67\xea\xba\
\xee\x3d\xef\xbc\xf3\xf4\xbb\xef\xbe\x5b\xb9\x03\x0a\x00\x94\x8c\
\x44\x17\x60\xf3\xcd\xed\x76\x63\xd2\xa4\x49\x47\xdc\x70\xc3\x0d\
\x3f\xf8\xcc\x67\x3e\xb3\x57\xa1\x50\xd0\x89\x09\xe8\xf7\xdc\x73\
\xcf\x90\x71\x07\x94\x0b\xa0\x44\xc9\x0e\xa2\xca\xcc\x04\xb8\x67\
\xc0\x8c\x19\x33\x16\x9e\x7b\xee\xb9\x05\xfa\xfd\x07\x8f\x3d\xf6\
\xd8\x1b\x67\x9d\x75\x96\x14\x0b\x2d\x5e\xbc\x78\xd0\xdc\x01\x15\
\x03\x50\xa2\x64\x27\x29\x0a\x7f\xc6\x3e\xfb\xec\x73\x7c\x5d\x5d\
\x5d\xa0\xad\xad\xed\xaa\x57\x5e\x79\xe5\x4d\x02\x84\x22\x3d\x6f\
\x0e\x16\x08\xa8\x18\x80\x12\x25\x3b\xd3\x9a\x91\x3b\x30\x66\xcc\
\x98\x43\xaf\xbf\xfe\xfa\xef\x9f\x71\xc6\x19\xfb\x95\xdd\x01\x95\
\x22\x54\x0c\x40\xc9\x10\xf7\xff\x3f\x09\x61\x77\xc0\xef\xf7\xeb\
\xd3\xa7\x4f\x3f\xf2\x94\x53\x4e\x29\x90\xd5\xcf\x3f\xf1\xc4\x13\
\x4b\xc9\x1d\xc8\x72\x2b\xb9\x9d\xcd\x04\x94\x0b\xa0\x44\xc9\x4e\
\x96\x72\x76\x60\xaf\xbd\xf6\x3a\xae\xba\xba\x3a\xd0\xd4\xd4\x74\
\xe5\x6b\xaf\xbd\xf6\xd6\xd9\x67\x9f\x5d\x32\x49\x4e\x3d\xf5\x54\
\xd5\x4f\x40\xb9\x00\x4a\x46\xba\x70\xb1\xd0\xb8\x71\xe3\x0e\xbd\
\xe1\x86\x1b\xbe\x7b\xf2\xc9\x27\xef\xcd\xee\xc0\xb9\xe7\x9e\x2b\
\xee\x80\x6a\x2f\xa6\x18\x80\x92\x21\xe6\x06\xec\x08\x26\xc0\xc5\
\x42\xb3\x67\xcf\x3e\xe6\x82\x0b\x2e\xb0\x7a\x7a\x7a\xfe\xf3\x9f\
\xff\xfc\xe7\x9b\x67\x9e\x79\x66\x7e\x67\xb9\x03\xaa\x29\xa8\x12\
\x25\x43\xc0\x1d\xd8\x73\xcf\x3d\x8f\xbd\xee\xba\xeb\x7e\x30\x7f\
\xfe\xfc\x59\x4c\x0e\xce\x3a\xeb\x2c\x15\x18\x54\x00\xa0\x64\x57\
\x01\x01\xc7\x1d\x38\xec\xfb\xdf\xff\xfe\xf7\x88\x01\x48\x76\xe0\
\x8c\x33\xce\xe8\x75\x07\x94\x0b\xa0\x44\xc9\x08\x73\x01\xb6\xe6\
\x0e\xec\xb6\xdb\x6e\x0b\xcf\x3e\xfb\x6c\x83\xe4\xfa\x7b\xef\xbd\
\xf7\x55\x02\x81\x2c\x9c\x62\x21\x1e\x49\x37\xdc\xce\x4b\x31\x00\
\x25\x4a\x3e\x04\x08\x70\x53\x91\x59\xb3\x66\x1d\xf5\xb5\xaf\x7d\
\xed\xba\x03\x0f\x3c\x70\xb6\xcb\xe5\xf2\x96\xdd\x01\x9e\xd3\xb7\
\x2b\x06\x06\x15\x00\x28\xd9\xa5\x84\xeb\x04\x1a\x1b\x1b\x0f\xfe\
\xf6\xb7\xbf\x7d\xed\x79\xe7\x9d\x27\x9d\x85\x88\x09\x68\xbb\xaa\
\x3b\xa0\x5c\x00\x25\x23\xde\x05\xd8\x9c\x09\x10\x08\x68\x33\x67\
\xce\x3c\x8a\xa8\xbf\xd9\xda\xda\xfa\xdd\xa7\x9e\x7a\xea\x0d\x02\
\x81\x1c\x67\x07\x78\x29\x71\x22\x91\x50\x2e\x80\x12\x25\x23\xdd\
\x1d\x98\x3b\x77\xee\x31\xc4\x04\xbe\xb7\x60\xc1\x82\x59\xf4\x9c\
\xe7\xcc\x33\xcf\x14\x77\xa0\xa2\xa2\x62\x97\x71\x07\x14\x00\x28\
\xd9\x65\x41\x80\x57\x11\x8e\x1d\x3b\xf6\xd0\xab\xaf\xbe\xfa\xda\
\x73\xce\x39\x47\xdc\x81\xd3\x4f\x3f\x5d\xfa\x09\x70\x4c\x40\xb9\
\x00\x4a\x94\xec\x04\xfa\x3f\x58\x54\xb9\x9c\x1d\x98\x33\x67\xce\
\x31\x04\x00\x56\x73\x73\xf3\x75\xaf\xbf\xfe\xfa\x5b\x04\x02\xec\
\x0e\xc8\x04\x22\x1e\x5f\xae\x5c\x00\x25\x4a\x46\xb8\x3b\xb0\xc7\
\x1e\x7b\x1c\xcd\xab\x08\xf7\xdf\x7f\xff\xd9\xa6\x69\xba\xd8\x1d\
\x60\x26\x10\x8d\x46\x47\xb4\x3b\xa0\x00\x40\xc9\x90\x61\x01\x83\
\xd9\xb1\x87\xb3\x03\xec\x0e\x5c\x71\xc5\x15\xd7\x9c\x75\xd6\x59\
\x0b\xf2\xf9\x3c\xbb\x03\x32\x86\x8c\x63\x02\x1f\xe7\x38\x15\x00\
\x28\x51\x32\x0c\x98\x00\xb9\x03\xfa\x9e\x7b\xee\x79\xdc\x79\xe7\
\x9d\xf7\xff\x0e\x3b\xec\xb0\x39\xc1\x60\xd0\xc7\x31\x01\x06\x81\
\x48\x24\xa2\x18\xc0\xce\xfc\x32\xca\x8f\x6a\x1b\xf9\xdb\x50\x63\
\x23\xb3\x67\xcf\x3e\xf6\x86\x1b\x6e\xf8\x1e\xb9\x05\x33\xb8\x6c\
\x98\x41\xe0\xcd\x37\xdf\x14\x10\x18\x69\xee\x80\x62\x00\x4a\x94\
\x6c\x26\xec\x0e\x4c\x9a\x34\x69\xe1\x75\xd7\x5d\xf7\x9d\x4f\x7f\
\xfa\xd3\xd2\x68\xf4\xca\x2b\xaf\xd4\x96\x2e\x5d\xca\x2c\x61\x44\
\x9d\xab\xca\x02\x28\x19\x12\x56\x77\x08\xba\x03\xda\x82\x05\x0b\
\x4e\xf8\xf2\x97\xbf\x6c\xb5\xb7\xb7\x5f\xfb\xe4\x93\x4f\xae\x58\
\xb4\x68\x51\x9e\x40\xc0\x62\x10\x20\x50\x18\x76\xe7\xa5\x18\x80\
\x12\x25\x1f\x02\x04\x58\x81\xe7\xcd\x9b\x77\xc2\xcf\x7e\xf6\xb3\
\x1f\x1c\x78\xe0\x81\x33\x57\xad\x5a\xe5\xda\x7b\xef\xbd\xf5\xe5\
\xcb\x97\x23\x1c\x0e\x8f\x08\x77\x40\x01\x80\x12\x25\xff\x86\x09\
\x4c\x9e\x3c\xf9\xa8\xab\xae\xba\xea\x9a\x33\xcf\x3c\x73\xc1\xb2\
\x65\xcb\xdc\x5f\xff\xfa\xd7\x35\x8e\x09\x8c\x04\x77\x40\xb9\x00\
\x4a\x06\x9d\xfe\x0f\x65\xaa\x5c\xce\x0e\xec\xb7\xdf\x7e\x27\x7a\
\xbd\x5e\x7d\xfd\xfa\xf5\xec\x0e\x2c\x27\x26\x90\x7f\xf9\xe5\x97\
\xad\xd9\xb3\x67\x23\x9d\x4e\x2b\x17\x40\x89\x92\x91\x2c\x2e\x97\
\x8b\xb3\x03\xc7\xfd\xe4\x27\x3f\xf9\xde\x21\x87\x1c\x32\x93\xcb\
\x86\x2f\xbc\xf0\x42\x9d\x03\x83\xa1\x50\x48\xb9\x00\x4a\x94\x8c\
\x74\x77\x20\x10\x08\xb8\xa6\x4e\x9d\x7a\x0c\xb9\x00\x57\x1f\x7b\
\xec\xb1\xf3\x88\x01\xb8\xbf\xf5\xad\x6f\x69\xab\x57\xaf\x96\xae\
\x43\xca\x05\x50\xa2\xe4\x23\xba\x01\xc3\x05\x04\xc8\x0d\xd0\x3e\
\xf5\xa9\x4f\x9d\x44\x8f\xee\x8d\x1b\x37\x5e\xf3\xcc\x33\xcf\xac\
\xe4\x91\x64\x0f\x3f\xfc\xb0\xc5\x4c\x80\xdd\x81\xe1\x72\x3e\x8a\
\x01\x28\x51\xf2\x11\x01\x6b\xde\xbc\x79\xc7\xdf\x74\xd3\x4d\xd7\
\x93\x5b\x30\xe3\xd9\x67\x9f\x75\x2d\x58\xb0\x40\x5f\xb2\x64\xc9\
\xb0\x73\x07\x14\x00\x28\x51\xf2\x11\x98\x00\x67\x07\x66\xcc\x98\
\x71\xec\x35\xd7\x5c\xf3\xed\x45\x8b\x16\xcd\x7f\xe7\x9d\x77\xdc\
\x5f\xfd\xea\x57\xb5\xb7\xde\x7a\x0b\xc1\x60\x50\xb9\x00\x4a\x94\
\x8c\x64\x31\x4d\x53\x66\x11\x1e\x78\xe0\x81\x27\xfa\xfd\x7e\x57\
\x57\x57\xd7\xb5\xcf\x3f\xff\xfc\xb2\x93\x4f\x3e\x39\x7f\xef\xbd\
\xf7\x5a\xb3\x66\xcd\x52\x0c\x40\x89\x92\x0f\x43\xab\x87\xdb\xc6\
\x52\xce\x0e\xdc\x78\xe3\x8d\xd7\x1f\x7d\xf4\xd1\x73\xb8\x58\x68\
\xfe\xfc\xf9\xfa\x7d\xf7\xdd\xa7\x00\x40\x89\x92\x0f\x42\xa7\xb9\
\xac\xb6\x54\x2a\x0d\xdb\xe3\x27\xca\x2f\x6b\x07\x2e\xbb\xec\xb2\
\x6f\x72\xb1\x10\x9d\x8f\x7b\xf1\xe2\xc5\xda\xc3\x0f\x3f\x3c\xe4\
\x57\x11\x2a\x17\x40\xc9\xa0\x5b\xfd\x6c\x36\x2b\x74\x9a\xcb\x6b\
\x87\xab\x3b\xc0\x55\x81\x07\x1c\x70\xc0\x09\x04\x06\xee\x35\x6b\
\xd6\x7c\x6b\xe9\xd2\xa5\xec\x0e\x14\x02\x81\x80\xa9\x18\x80\x12\
\x25\xdb\x00\x00\xee\xc6\x53\x66\x00\xc3\x29\x7d\xb6\x35\x26\xe0\
\x74\x16\x3a\xe6\xe7\x3f\xff\xf9\x0d\x07\x1d\x74\xd0\x1c\x02\x36\
\xbd\xab\xab\x4b\x57\x00\xa0\x44\xc9\x36\x00\x80\x2d\x3f\x6f\xc5\
\x62\x51\x98\xc0\x70\x77\x67\xb8\xe5\xf8\xb4\x69\xd3\x8e\xf8\xea\
\x57\xbf\x7a\x0d\xb9\x01\xfb\xd0\x39\x32\xcb\x1e\xb2\xc8\xe6\x1e\
\xaa\x17\xb2\xff\xa3\x92\x91\x2b\x6c\x35\x49\x69\x90\x4a\xa5\xd0\
\xdd\xdd\x2d\x03\x3b\xf9\xf7\xe1\x0c\x02\x5c\x15\xb8\xff\xfe\xfb\
\x1f\x4f\xee\x80\x97\x18\xc0\xb7\x5b\x5b\x5b\xdf\xde\xb0\x61\x43\
\x36\x91\x48\x0c\x39\x77\x40\x31\x00\x25\x83\x0e\x00\xac\x30\x9c\
\x3b\x67\x46\xd0\xde\xde\x0e\x52\x18\xa9\xa8\x63\xdf\x7a\xb8\x82\
\x80\x93\x1d\x58\xf8\xd3\x9f\xfe\xf4\xbf\x2e\xbf\xfc\xf2\x23\x08\
\xd8\xc2\x43\x91\x09\xa8\x20\xa0\x92\x41\x17\x56\x96\xf2\xd2\xda\
\x64\x32\x89\x7c\x3e\x2f\x3d\xfb\xf9\x39\x76\x0f\x86\xab\xf0\xf1\
\x8f\x19\x33\x66\xbf\x99\x33\x67\x5e\x1c\x0a\x85\x5a\xe9\xa9\xd7\
\x68\x2b\x32\x46\x28\x00\x18\x06\xfe\x29\x6f\x43\xb1\x6f\xdd\x48\
\x92\x72\xe3\x0d\x4d\x23\x26\x40\x4a\xcf\x69\xb3\x5c\x2e\x27\x40\
\xd0\xd1\xd1\x21\xc1\xc1\xe1\x7a\xfd\x19\xd8\x78\x23\x10\x58\x18\
\x0e\x87\x1f\xa1\xa7\x56\xd3\xc6\x73\xc7\x0a\x43\x05\x04\x14\x00\
\x6c\x83\x96\x66\x73\x59\xc4\xe3\x5d\xa8\xa9\xae\x13\x8a\x3a\x5c\
\xe9\xe8\x50\x55\x7a\xd3\x32\x85\x11\x9b\x0c\x00\xd0\x9c\x8c\x80\
\x0b\x1e\xb2\x9a\xac\x19\xa6\x69\xc1\xa0\x6b\xce\xfb\xf2\x63\xa1\
\x58\x14\xfe\xec\xf5\x78\x69\x3f\xda\x97\x14\x0b\x43\x1c\x18\xf8\
\x3e\xe2\xc5\x43\x95\x95\x95\x6e\x02\x82\xc9\xf4\xd4\x24\xda\xde\
\xa7\xad\x8b\x36\x43\x01\xc0\x50\x55\xfe\x6c\x06\xad\x6d\x9b\xc4\
\x22\xad\x5d\xbf\x1a\x0d\xf5\xa3\x10\x09\x47\xe9\xa6\x34\xd4\x05\
\xfa\x98\x8a\x5f\x32\x0c\xa6\x57\xf0\xb8\x49\x81\xc9\xea\xbb\x49\
\x87\x4b\x06\x59\x79\x52\x72\x83\x40\x81\x15\x5f\x7c\x68\x0f\xb9\
\x05\x08\xc2\x4d\xdf\x81\x2c\xc5\xa5\xf7\xb1\xba\xeb\xf4\x5e\x2f\
\x01\xb2\xcf\xef\x87\x6f\x18\x2c\xc1\x65\x60\x73\xe2\x1b\x8d\xf4\
\xeb\x78\xda\xda\x68\x8b\x2b\x00\x18\xa2\x94\x8d\x2d\x7f\xf3\xa6\
\x8d\x18\x3b\x66\x1c\x2a\x2a\x2a\xd1\xd1\xd9\x86\xb5\x6b\xdf\x47\
\x5d\xdd\x28\xd4\xd6\xd6\xcb\x8d\xaa\x5c\x82\x0f\x2f\x6c\xc5\x99\
\x45\x31\x9b\x62\x90\xe5\xeb\x58\x2a\x96\x60\x90\xf2\x97\x5f\x33\
\xd8\xea\xf3\xef\xa4\xec\x86\xc3\xb8\xf8\x3b\x61\xcb\xaf\xd1\xfb\
\x5c\xcc\x10\xbc\x9c\x36\xf4\xa0\xa5\xb3\x1b\x95\x91\x30\x2a\xa3\
\x43\xbf\x55\x37\xc7\x31\x08\x00\xb8\x24\xb0\x8a\xb6\x00\x86\x50\
\xf0\x5d\x01\x40\x3f\xcb\xcf\x69\xa8\xa6\xe6\xf5\x18\x37\x7e\x3c\
\xdc\x74\xc3\x15\x8a\x05\x71\x01\xfc\xbe\x20\x56\xbc\xfb\x0e\x52\
\xa9\x24\xc6\x8f\x9f\xd8\x1b\x1b\x50\xf2\xc1\x95\x9f\x85\xd3\x7b\
\xcc\xa2\x8a\x85\x22\x59\x7b\x52\xf2\x52\x59\xf1\x0d\x9b\xf2\x93\
\xf2\x0b\x43\x70\x8a\x6a\xca\xdf\x0b\xf1\x67\x04\x03\x41\x7a\xbf\
\x0f\x89\x54\x0a\xab\x37\x6c\x44\x4f\x3a\x23\xaf\x57\x0e\x9f\x81\
\x1d\xac\x6b\x4c\x59\x5c\x43\xea\xbe\x57\xb7\x67\x3f\x00\x20\x9f\
\xff\xe9\xa7\x9e\x96\x14\x54\x8e\x98\x00\x6f\x0c\x02\x5c\xa2\x3a\
\x67\xd6\x3c\x14\xf3\x79\xbc\xfd\xf6\x52\x29\x5a\x61\xcb\xa4\xe4\
\x83\xd1\x7e\xcd\xa1\xed\x25\x52\xf8\x62\xa9\x24\x20\x60\x5b\x7d\
\x43\x14\x9e\x2d\x3e\x07\xfb\x8a\x04\x00\x9a\x28\xbf\x4b\xde\xe3\
\x21\x4b\xcf\xd7\xbe\xb6\xa6\x06\x9a\x4b\xc7\xd2\x15\xef\xe2\x1f\
\x2f\xbd\x8a\xa6\x96\x56\x04\xbc\x3e\x54\x47\x23\xea\x02\x2b\x00\
\xf8\x64\x84\x6f\xc0\x29\x93\xa7\x62\xf7\xdd\x67\xe0\x81\xfb\x1f\
\x20\x25\x2f\x48\x34\x3a\x93\xc9\x08\x08\x30\x75\x9d\x33\x77\x2f\
\x72\x0b\x62\x78\xf5\xf5\x97\x88\x2d\x74\x0d\xeb\x14\xd5\x4e\x74\
\x82\xe5\x3a\x95\xc4\xba\x93\xe5\x37\x08\x04\x18\x00\x8a\x86\xfc\
\x6c\x98\x44\xf9\x4b\xc4\x0a\x08\x04\x24\x18\x48\x40\xcc\x41\xbe\
\x50\x20\x40\x8a\x5f\x8d\x00\xb1\x86\x37\x57\xbe\x8b\x25\x0f\x3f\
\x8e\xd7\xde\x5e\x46\x6c\x40\xc7\xd4\x09\xe3\x31\x73\xca\x24\xc4\
\xa2\x61\x58\x50\x4c\x4c\x01\xc0\x27\x08\x02\x47\x1c\xb1\x50\x82\
\x36\x4f\x3c\xf9\x04\xdd\x8c\x16\x72\xf9\x2c\xb2\x04\x02\x0c\x08\
\x2c\xbb\x4d\x9f\x81\x69\x53\x77\xc3\x1b\x4b\x5f\xc3\xba\x75\x6b\
\xca\xfe\x9d\xba\x78\xdb\x61\x56\xa2\xe0\xb4\x99\x6c\xed\xf9\xb1\
\x64\x08\xed\x2f\x99\xf6\x73\x45\x62\x03\xa4\xf3\x12\xe0\x73\xe9\
\x6e\x54\xc7\x2a\x11\xa3\xed\xbd\xf5\x1b\x70\xcb\x7d\xf7\xe3\xef\
\x4f\xfe\x03\xe9\x6c\x16\xd3\x27\x4d\xc0\xbe\x73\xe7\x60\xf2\xb8\
\xb1\xf0\x79\x3d\xe2\x36\x28\x51\x31\x80\x4f\x94\xae\xf2\x76\xd2\
\x49\xa7\xe0\x77\xbf\xbb\x09\xaf\xbe\xf6\x1a\xe6\xce\x99\x8b\x4c\
\x2e\x03\x5b\xc7\x35\x61\x02\x63\x46\x8f\x45\x24\x1c\xc1\x0b\x2f\
\x3e\x87\xae\x78\x37\x66\xed\x31\x47\xc5\x05\xb6\x61\xfd\xd9\x42\
\xb3\x95\xb7\x7a\x7d\x7d\x93\x14\x9f\x23\xfe\xf4\x0a\x53\x7f\x93\
\x5d\x04\x4b\xea\x00\xd8\x4d\xa8\xae\xaa\x42\x73\x7b\x07\x1e\x7f\
\xee\x5f\x58\xf1\xfe\x6a\xf8\x3c\x6e\xcc\x9c\x3a\x05\x7b\xcf\xda\
\x03\x63\x1a\xea\xa1\xbb\x34\x61\x0e\x4a\x14\x03\xd8\x31\x01\x2b\
\xba\x29\xc3\xa1\x30\x16\x2f\xfe\x2c\xfe\xf5\xc2\x8b\x58\xbb\x6e\
\xad\xe4\xac\x33\x4e\x4c\x80\x5f\x67\x61\x57\xe0\xb0\x43\x8f\x44\
\x9e\xdc\x84\x17\x5e\x78\x4e\x18\x82\x62\x02\x9b\x29\x3f\xe1\x21\
\x5b\x7c\xa1\xfa\xb4\x99\x42\xf9\xed\xc0\x9f\x55\xb2\xa3\xff\xbc\
\x93\x4b\x94\xdf\x0d\xaf\xcf\x87\xbf\x3d\xf5\x0c\x7e\x78\xd3\x1f\
\xf0\xec\xab\xaf\xa3\xbe\xa6\x0a\xc7\x1f\x76\xb0\x6c\xe3\xc7\x8c\
\x92\x42\x5a\xa5\xfc\x0a\x00\x76\xb8\x70\x90\x8f\xd3\x80\xc7\x1c\
\x7d\x0c\x1e\xf9\xfb\x23\x88\xc7\xe3\x44\x61\x8b\xe2\x0e\xe4\xf3\
\xb9\x5e\x4b\xcf\x45\x29\x07\x1f\x74\x28\x1a\x1b\x1a\xd1\x93\xec\
\x51\x00\xd0\xab\xfb\xe5\x0a\x4a\xd3\x49\xe9\x31\x08\x14\x9d\x9f\
\x2d\x3b\xf2\x2f\xc5\x40\xa4\xfc\x9c\x12\xa4\xf7\x04\xc9\xe7\x5f\
\xdb\xd4\x8c\xbb\x1f\x7e\x02\x01\x9f\x17\x27\x1e\x79\x28\xce\x39\
\xf1\xd3\x98\x3f\x6b\xa6\x13\x40\x2c\x29\x86\xa5\x00\x60\xe7\x09\
\xd7\xa3\xef\x35\x6f\x3e\xf6\x9c\x3b\x17\x0f\xff\xfd\xef\x62\xe1\
\x19\x18\x72\x04\x00\x85\x42\x7e\xc0\xbe\x93\x26\x4d\x42\x28\x18\
\x52\x37\x68\x2f\x02\x00\xec\x9e\x8b\xa2\x1b\xe5\x60\x1f\xa7\xf9\
\x2c\x71\x05\x4c\xd3\x2e\xf8\xd1\x9d\x0a\x40\x4d\x40\xb7\x84\xa0\
\x3f\x80\x86\xda\x6a\x9c\x7e\xdc\xd1\xf8\xf4\xc1\x9f\x42\x2c\x1a\
\x95\xe7\x55\x15\xa6\x02\x80\x41\x11\x6e\x54\xb1\x70\xe1\x31\x08\
\x91\x4b\xf0\xd8\x63\x8f\x43\xd3\x35\x61\x02\x0c\x06\xa6\xd1\x57\
\xc8\xe5\x72\xb9\x7b\x63\x08\xca\xfa\xdb\x65\xbc\x96\xe9\x58\x7e\
\xd3\xa6\xfe\x6c\xed\xc9\xe6\x4b\xea\x8f\x5f\xe7\xe8\x80\xcb\xa5\
\xf7\x32\x06\x06\xd7\x48\x28\x88\xc6\xda\x1a\x79\xe4\xfd\x0d\x43\
\x55\x5e\xee\x92\x00\x50\x0e\xc6\x0d\xf6\x26\x96\x87\x74\x7a\xf1\
\xa2\xc5\xe8\xee\xe8\x04\xb7\x7c\x0e\xd1\xcd\x69\x95\xef\x74\x47\
\xbc\x5e\x8f\xfc\x5a\xce\x14\x0c\x95\xe3\x1f\x9c\x0d\x92\xdb\x17\
\xeb\x2f\x91\x7f\xbb\xc4\xd7\x32\xcc\x5e\x40\xe0\x8b\xea\xee\x57\
\x47\xc1\x1c\x80\x9f\x0f\x07\xfc\x72\x2d\x57\xad\xdf\x38\xe2\xae\
\xa3\x02\x80\x61\x2a\x9c\xbf\xe6\x75\x00\x8b\x16\x2f\xc6\xf3\xcf\
\x3d\x87\x0d\xeb\x36\xc0\xe7\xf3\x4b\x8d\x00\xc7\x06\x78\x2b\xd1\
\x8d\x1e\xab\xa8\x14\xea\x5b\x2a\x16\x76\xe9\xeb\x25\x96\xde\xa1\
\xf8\x25\xc3\x49\xf5\x95\x4c\x94\x2c\xdb\x0d\xb0\x1c\xbf\xbf\xbf\
\xf2\xf3\xff\xd2\x52\x4b\xd3\x51\x55\x51\x81\x8e\xee\x6e\x01\x84\
\x8f\x1a\x51\x51\xb1\x98\x0f\x2e\x2a\x0d\xf8\x01\x5d\x81\xf1\xe3\
\x26\xe2\xa8\xa3\x8e\x96\x78\xc0\xb8\xf1\xcb\xc4\xd2\x75\xc7\xbb\
\xc1\x6b\x3a\x8e\x38\xe2\x08\x4c\x18\x3f\x19\x15\xd1\x18\x7a\x92\
\x09\x29\x69\x2d\xbb\x05\xbb\x1c\x00\xb0\xf5\x77\x16\xfd\xf4\xd6\
\xf8\xc3\x74\xd8\x94\x6d\xf9\xcb\x0a\x5a\x56\xfe\xb2\xef\x50\x20\
\x7f\x7f\x5c\x63\x23\x5e\x7e\xeb\x6d\x64\x09\x60\xfd\xce\x42\xa0\
\x0f\x05\xd8\xa5\x12\xf2\x85\xa2\x94\x0d\xf7\x07\x1a\x25\x0a\x00\
\x3e\x76\x50\x70\xee\x9c\x79\xa4\xe8\x13\xe5\x67\x7f\x20\x80\x9e\
\x9e\x1e\xfc\xf9\xb6\x3f\xe3\xa5\x57\x5e\x94\xe2\xa1\xba\x9a\x06\
\x44\x89\x2d\xa4\xd2\x49\xf1\x75\xb9\xa4\x75\x97\x63\x4c\x8e\xf5\
\xb7\xd3\x7f\xf6\x66\x3a\xe5\xc0\xe5\x85\x3d\x5b\x28\xbf\xf3\x8c\
\x69\x19\xa8\xaf\xae\x44\xa2\x27\x89\x54\x26\x83\x00\x29\x31\xf9\
\x0f\x1f\x28\xea\x28\x6b\x0c\xe8\x6f\x32\x33\xeb\x49\xa5\x88\x91\
\x55\x10\x73\x0b\x0d\xf9\x25\xc3\xca\x05\x18\x46\xc2\x37\x33\xe7\
\xff\xeb\xeb\x49\xd1\x23\x51\x4c\x24\x30\x38\xe5\xa4\x45\x78\xe5\
\x95\xa5\x78\xf2\xa9\x27\xd1\xda\xbe\x89\x6d\xa0\x14\x09\x71\x39\
\xeb\xae\x16\xbd\x96\xb8\x09\xf9\xfa\xec\x02\x14\x39\xed\x27\xa9\
\x3b\x88\xd5\x77\xeb\xfa\x76\x95\x9f\x6f\x44\x8e\x11\x48\x1c\xc0\
\xe3\x46\x53\x6b\xbb\xb8\x04\xff\x8e\xea\xdb\x73\x05\xf2\xc8\x66\
\x73\x28\x12\x53\x2b\x4a\x65\xa1\x25\xeb\x0a\x54\xf6\x40\x01\xc0\
\x27\x4f\x71\x9d\xd4\x16\x3f\xe6\xe9\xc6\x9b\xb1\xdb\xee\x38\xfb\
\xcc\x73\xf0\xca\xab\x6f\xe0\x89\xa7\x1e\x17\x10\x90\x80\x56\x28\
\x22\x51\x6e\xcb\xda\x35\x6e\x42\x56\xc6\x32\xe5\x2f\x95\x8a\x12\
\x17\xb1\x78\x1d\x80\x6b\xa0\xe2\x6f\x4d\xf9\x9d\x22\x4b\x49\x17\
\xf2\x22\x9f\xa3\x0f\x3a\x10\x95\xb1\x8a\x0f\xa0\xf8\x05\xe9\x24\
\xcc\x8f\xd2\x44\x84\xfe\xe5\xc5\x46\xd2\x67\xc0\xed\xb6\x1b\x8a\
\xa8\xb5\x02\x0a\x00\x76\xb4\x6b\xb0\xfb\x6e\xbb\xe1\x73\xe7\x7e\
\x0e\xaf\xbd\xfa\x16\x1e\x7d\xec\x51\xb4\xb5\x37\x8b\xf5\x0b\x06\
\x43\x12\x0b\xd8\x15\xd2\x83\xac\x94\x12\xfd\xb7\x9c\xee\x3d\xa4\
\xd0\x6e\x6d\xcb\x60\xdf\xb6\x94\xbf\x0c\x0e\xac\xcc\x53\xc7\x8c\
\x42\x7d\x65\x0c\x19\xa2\xf3\xe5\xd9\x01\xe5\xb8\x41\x7f\xc5\xe7\
\x6b\x2f\xeb\x01\x9c\xd7\xb8\x66\x80\x19\x80\xdb\xa5\x09\x8b\x30\
\x9c\x2c\x8e\x12\x05\x00\x3b\x1c\x04\xa6\x4f\x9f\x8e\xf3\xcf\x3b\
\x1f\x6f\x2c\x7d\x07\x8f\x3c\xfe\x28\x5a\xca\x20\x10\x08\xc2\xe5\
\x76\x8d\x78\x10\x90\xd6\x5d\x6c\xf5\x2d\x3b\xe7\x2f\xf4\xbd\xac\
\xec\x16\xb6\xd2\x0f\x77\xa0\xf2\xf7\x43\x12\x64\xf3\x05\xa4\x33\
\x59\x59\x89\xd9\x93\x4c\x4a\x93\x16\xd3\x61\x17\xac\xf8\x39\x51\
\x7c\xb3\xfc\x66\x79\x64\xd0\xe1\x20\x62\x89\x58\x04\x57\x68\xf6\
\xae\xcd\x50\x19\x01\x05\x00\x3b\x05\x04\x72\x79\x4c\x9b\x36\x0d\
\x17\x9e\x7f\x21\xde\x5c\xba\x0c\x8f\x0a\x08\x6c\x94\xda\x80\x80\
\x3f\x20\x94\x74\xa4\x82\x00\x2b\x1b\x5b\x62\x56\xc2\x62\xd1\x10\
\xe5\xef\xd3\x3b\xad\xb7\x77\x42\xf9\xfc\xed\x1e\x80\x5b\x2a\x3f\
\x3f\x57\x6e\xc6\xca\x31\x14\x51\x62\xfa\xdc\x7c\xae\x80\x54\x3a\
\x8d\x0c\x29\xbf\x9d\x1e\xec\x73\x25\x98\x00\xc8\xdf\x25\xb0\x2d\
\x39\x8b\x8d\x82\x7e\x1f\x4a\x45\x43\x90\x47\xa9\xbf\x02\x80\x9d\
\xca\x04\xa6\x4e\x99\x4a\x20\x70\x11\xde\x7a\x6b\x39\x1e\xfa\xfb\
\xc3\x04\x02\x4d\xb2\x0e\x9e\xd7\xb5\x7b\x3c\xee\x11\x0b\x00\x85\
\x52\xc1\x29\x84\xb2\x06\x18\x5d\x17\x47\xff\x89\x01\xb9\xe9\xdc\
\x03\x92\x9a\x73\x6d\xce\x01\x7a\x95\xbf\x4c\x13\x34\x6d\xe0\x67\
\x73\x05\xa6\x66\x23\x48\xef\x7b\x4c\x27\x66\x20\x81\x3f\x2e\x17\
\x36\x2c\xb1\xfe\xbc\x4c\xd8\xe5\xd6\x05\x0c\xf8\x6f\x29\x00\x50\
\x00\x30\x08\x20\x30\x05\x9f\xbf\xf0\x62\xac\x5c\xf1\x1e\xee\x7f\
\xe0\x01\x6c\x6a\x6d\x92\x12\x62\xbf\x6f\x84\x82\x00\x69\x59\x8e\
\xac\xb4\xbe\xb9\xcf\xcf\x15\x7e\x96\x6d\xb1\x3d\x2e\x0f\x51\xf7\
\xa2\xa4\xe8\x6c\x85\xde\xbe\xf2\xf7\xb7\xf2\xfd\x83\x88\xcc\x21\
\xf8\x33\xd9\xe2\x73\xf9\xb0\xd4\x1b\x58\xf4\xb3\x65\x08\x00\x70\
\x23\x11\x0e\xd2\x5a\xfd\x16\x1a\x29\x51\x00\xb0\xd3\x41\x60\xd2\
\xa4\xc9\xb8\xe4\xf3\x97\xe2\xfd\xf7\xd7\xe2\xbe\x25\x4b\xb0\xa9\
\x65\x83\x80\x80\xcf\xeb\x95\x9e\x02\x23\xc9\xfa\x17\x0b\xb6\x32\
\xb2\x62\xf7\xc6\x04\x34\xbb\xf6\x3f\x14\x0a\x21\x12\x0e\x4b\xc6\
\xa4\xb9\xb5\x45\x4a\xa9\x3d\xd2\x49\xc9\xda\xae\xe5\x07\xfa\x5c\
\x7c\xcd\x21\xf3\x5c\x63\x20\xd5\x85\xe5\xa6\x22\x96\x5d\x66\xcc\
\x15\x86\x5c\x03\xc0\xbd\x03\x78\xe5\x60\x81\x8e\x87\xd3\x8e\xaa\
\x10\x48\x01\xc0\xa0\x82\xc0\xf8\x71\x13\x70\xd9\x25\x97\x61\xfd\
\x86\x66\xdc\x75\xcf\x3d\x68\x12\x10\x28\xd9\x20\xe0\x1d\x19\x20\
\xc0\x2d\xbc\x32\x32\xd4\xb3\xbf\x7f\x6f\x07\xe0\x22\xa1\x30\x42\
\xc1\x00\xda\x3a\xda\xb1\x76\x43\x13\xc6\x8f\x1d\x2b\x1d\x7d\xb9\
\xa1\xa7\xee\x72\x8b\x9f\xbf\x55\xda\xdf\xdf\xec\xf3\xda\x02\xd3\
\x4e\xef\xf1\xc6\x81\x55\xae\x35\x28\x95\xab\x0c\x65\x6e\x00\xe4\
\x39\xfe\x5b\xec\x8a\xb0\xab\xc0\xd7\x58\x95\x04\x2b\x00\x18\x54\
\xe1\x02\x95\xb1\x63\xc6\xe2\x8b\x97\x7d\x11\x2d\xad\x1d\xb8\xf3\
\xce\xbb\xb0\xb1\x79\x1d\x3d\x9f\x23\x6b\xe5\x95\xf1\x57\xc3\xfa\
\xe6\x21\xe5\x2f\x4a\xf4\xbd\xdc\x0c\xa5\x4f\x71\xd9\x1d\xc8\x93\
\x6f\xbe\x7e\x63\x33\xd6\x35\x35\x63\xd2\xf8\xf1\xe2\x26\xbc\xb5\
\x6c\x85\xd4\x09\xb0\x75\xe6\x36\x5f\x76\xe0\x6f\x4b\xe5\x17\xaa\
\xcf\x81\xc5\x92\xdd\x30\xb4\x5c\x51\xc8\x0a\x5f\x5e\x57\x50\x32\
\xec\x25\xc6\xcc\x0a\x82\x01\x3f\x5c\x2e\x8d\x80\xb7\x28\x1d\x9d\
\x3d\x6e\xb7\x5a\x9d\xa9\x00\x60\x28\x80\x40\x41\x1a\x86\x7c\x89\
\x40\xa0\xb3\x33\x8e\xdb\xef\xbc\x93\x40\x60\xbd\xf4\x15\x60\xba\
\xca\x29\xab\xe1\x2c\xdc\x41\xb9\xbc\x90\x87\x8b\x7e\xf4\x7e\xb4\
\xdb\x20\x70\xc8\xe7\x0b\x98\x4c\xca\xcf\xb1\x8f\xd5\xeb\xd7\x63\
\xca\x84\x71\xb4\x8d\x17\xd6\xc0\xa9\xbe\xfe\xeb\x02\xfa\x16\x06\
\x41\xfc\x79\xc3\xa9\x2b\x30\xca\x0d\x44\x7a\x7b\x09\x38\xe5\xc6\
\xce\x92\x61\x06\x93\x50\xd0\x2f\xeb\x07\x64\x10\x87\xdf\xa7\xac\
\xff\x70\x07\x80\x91\xb4\x14\x94\x41\xa0\xb6\xb6\x8e\x98\xc0\x65\
\x48\xc4\x93\xb8\xe3\x6e\x9b\x09\x64\xf3\x59\x59\xfe\xca\x31\x81\
\xe1\x76\x4e\x9c\xda\x4b\x67\x32\xe4\xdb\x17\x6c\x6b\x4b\xff\xa5\
\xb3\x39\x02\xb6\x82\x53\xb8\x63\xdf\x5a\x95\xb1\xa8\x44\xff\x79\
\xbf\xe9\x13\x27\x60\xdc\xa8\x51\x88\x27\x7a\xb0\xb1\xa5\x45\x62\
\x03\x76\x43\x90\x3e\x67\x9f\x95\xbb\x4c\xf5\x0d\x67\x5a\x50\x7f\
\xe5\x37\x9c\x91\x61\x76\x97\x21\x3b\xd8\x17\x8b\x46\x48\xf9\xf3\
\x28\x30\x13\xf0\xf9\xe0\xa5\xe3\x31\x9d\x01\x2e\x6a\x39\xb0\x62\
\x00\x43\x42\x98\x2a\xd7\xd6\x10\x08\x5c\xfa\x05\xf4\xf4\xa4\x71\
\xdb\xed\x77\x60\x43\xd3\x5a\x69\x33\xc6\x53\x64\x87\x93\x3b\xe0\
\x76\xbb\x08\xbc\x72\x02\x00\x7c\xdc\xf1\x64\x0a\xef\xae\x5d\x8f\
\x35\x4d\x9b\xb0\x81\x14\xbb\xad\x2b\x6e\xa7\xec\x9c\x68\x7f\x81\
\x68\x79\xa1\x50\x94\x02\x9e\xf7\xd7\xaf\xc3\xb2\xf7\x56\xa1\xa6\
\xaa\x52\xea\xfe\x2d\xa9\xe4\x73\x18\x83\x94\x11\x9b\xce\x8a\x42\
\xfb\x77\xa6\xf8\xa6\x69\x39\xb3\x02\x1d\xe5\xb7\xd0\x1b\x03\xe0\
\xe5\xc3\x05\x72\x29\xb2\xf4\xd9\x3e\xb7\x17\xe1\x60\x40\x51\x7f\
\x05\x00\x43\x17\x04\x6a\x6a\x6a\x71\x39\x81\x40\xbe\x50\xc2\x9f\
\xff\x7c\x1b\xd6\x6d\x78\x9f\x7c\xe3\x8c\x80\x80\x6f\x18\x80\x00\
\x5b\x7e\xb6\xf2\xc9\x64\x9a\x2c\xad\x07\x9d\xf1\x04\xd6\x35\x6f\
\x12\x1a\x5e\x11\x0a\x4a\x1a\x8e\xb3\x1d\x09\xc7\x35\x60\xdd\x66\
\x1a\xef\x71\xeb\x52\xb0\x93\x22\xda\x3f\xa6\xa1\x81\x14\x37\xea\
\x94\xf1\xc2\xd9\xc7\x92\xe8\xbe\x58\x76\xfa\x9d\xfd\x7c\x06\x87\
\x5e\x17\xa0\xbc\xb1\xf2\x17\x4b\x72\x2c\xdc\x3e\x8c\xf7\x4f\xa6\
\xd2\xb2\xe0\x28\x16\x09\x0f\x70\x41\x94\x28\x00\x18\x92\x20\x50\
\x55\x55\x83\x2f\x5c\x72\x99\xd0\xdf\x9b\x6f\xf9\x0b\x56\xaf\x7b\
\x8f\x40\x80\x14\xca\xe7\x1d\xd2\x20\xc0\xca\x25\xca\xdf\x93\x14\
\x16\xd0\x1e\x8f\xa3\xa9\xb5\x15\xb5\x95\x95\xa4\x8c\x35\xb2\x80\
\x27\x1a\x0a\x91\x65\x0f\xd8\xc5\x41\xa4\xa8\x52\x05\x4c\x1a\xce\
\x00\x51\x15\x8b\x61\x6c\x63\x83\x44\xeb\xb9\x70\xc7\xbe\xf9\xec\
\x02\x1e\x1e\x17\x66\xf7\x0e\x70\x56\x14\x3a\x9d\x84\x0c\xa3\x2f\
\x06\xc0\xfb\x71\x00\xd1\xef\xf7\x62\x14\x29\x3f\xbb\x1c\xdd\x74\
\x2c\x9c\x4d\xa8\x22\x37\xe0\xa3\xf4\x0f\x50\x00\xa0\x64\x50\x40\
\xa0\x32\x56\x29\x29\x42\x9f\x2f\x80\x3f\xfd\xe9\xcf\x58\xb5\xfa\
\x5d\x64\xb2\x69\x69\x8d\xed\xf3\x79\x81\x21\x56\xc3\xc6\x2b\x1b\
\xf3\x74\xdc\x3d\x69\x52\x7e\x8f\x0b\x1d\x9d\xdd\xa4\xfc\x6d\x68\
\xa8\xa9\x46\x4d\x65\x0c\x01\x3e\x6e\x9e\xda\xeb\xf5\xc8\xf1\xfb\
\x69\x73\xf1\xd0\x14\xe9\xfb\x67\xb7\x08\x37\x1d\xbf\x5d\x14\x9c\
\x9e\xe0\x81\x20\x9c\x29\xc8\x16\x0a\x02\x16\xec\xc3\x97\x0a\x86\
\x3c\x72\x1c\x80\x33\x00\x85\x72\xfa\x8f\x6b\xfc\xbd\x6e\x34\x54\
\x57\xc9\x50\xd0\x76\x62\x1e\xcc\x3e\xb8\xd2\xb0\x86\xae\x65\x38\
\x18\x54\xca\xaf\x00\x60\x78\x81\x40\x45\x24\x86\x4b\x3f\x7f\x09\
\xa2\xd1\x0a\xfc\xf1\xe6\x5b\xf0\xee\xaa\x15\xe4\x57\x27\xc5\xaf\
\xf6\x49\x9d\xc0\xd0\x00\x01\xb6\xde\x39\x52\x52\x6e\x7d\xee\xd1\
\xc9\xf2\x93\x8f\xdf\xd4\xd1\x81\xc6\x3a\xb2\xfa\x15\x31\xbb\x42\
\xaf\x37\xd8\xa5\x49\x9f\x7f\x2e\xc4\xf1\x90\xf2\x33\x10\x30\x5b\
\xe8\xbf\x2c\x8f\xfd\xf7\x24\x59\xef\x78\x3a\x43\x2e\x41\x8e\x3e\
\xbb\x88\xbc\xe1\x4c\x0f\x92\xfd\x2c\xc9\x1a\x70\x40\x2f\x16\x89\
\xa0\x8e\x00\xa6\xb1\xa6\x0a\x15\xe1\x10\x3a\xba\xba\xb0\x62\xf5\
\x1a\x7a\xec\x96\x7d\x6a\x09\x10\xa2\x21\xa5\xfc\x1f\x39\x9e\xa3\
\x2e\xc1\x20\x82\x00\xd1\x59\xee\x1b\x70\xc9\x45\x9f\xc7\x6f\xff\
\xef\xf7\xb8\xf9\xe6\x5b\x71\xe6\x99\xa7\x61\xda\x94\xe9\x88\x84\
\xa2\xc2\x9d\x39\x8d\x36\x98\x6b\x5a\x7b\x95\x3f\x95\x22\xe5\x77\
\xa3\xa3\x3b\x61\x2b\x3f\x59\xfe\x4a\x02\x2e\x5b\xf1\xb6\x3c\x3e\
\xcb\x71\x19\x3c\x6e\xaf\xf8\xf2\x9c\xcb\x07\xec\x74\x1f\x7f\x26\
\x2b\x37\xbb\xeb\xb6\xde\xd2\xb3\x2e\x9b\x25\x70\x47\x1f\x66\x1b\
\xc2\x08\x8a\x05\xa4\xd3\x29\x49\x17\x76\xf5\xf4\x88\xaf\xcf\x90\
\x18\xab\x88\x12\xf8\xd4\x62\x54\x5d\x9d\xe4\xff\x2d\xd5\xf8\x43\
\x01\xc0\x70\x15\x56\x8c\x40\x30\x8c\x8b\x2e\xb8\x10\xbf\xff\xc3\
\xef\xf1\xe7\x5b\xff\x82\x33\x4e\x3b\x15\xd3\xa7\xed\x8e\x68\xd8\
\x6e\x8a\x31\x58\x20\x50\x56\xfe\x24\x2b\xbf\x8b\x68\x3f\x51\xee\
\xe6\xf6\x76\xa1\xfd\x55\xd1\xe8\x76\x15\x8f\x15\xdd\xed\x72\x8b\
\x95\xe6\x61\x2a\xe5\xe7\xec\x71\xdf\xa4\xe0\x74\x4e\x5d\xf1\x38\
\xda\xbb\xe3\xe8\xa4\x8d\x15\x9c\x0b\x85\xb8\x38\x48\xe6\x2f\xd0\
\x75\xe1\xa5\x3c\x6e\xb7\x2e\xc1\xc6\x68\x38\x8c\x7a\xf2\xfb\x59\
\xe9\x79\x44\x58\x55\xac\x42\x66\x09\xaa\xae\x3f\x0a\x00\x86\xbd\
\x70\xce\x3b\xe0\x0f\xe1\x82\xf3\x2e\xc0\x1f\x6f\xf9\x23\x6e\xb9\
\xf5\x36\x9c\x7e\xea\x62\xcc\xd8\x7d\x96\xb8\x09\x36\x08\xe4\x77\
\xb2\xcf\xef\x92\xdc\x7d\x4f\x92\x94\xdf\xcd\xca\x4f\xb4\xbf\xad\
\xa3\x4f\xf9\xad\xed\xf7\xda\x61\xda\x1f\x20\xeb\x5c\x94\x25\xc2\
\x45\xbb\x50\x88\x9e\x63\x9a\xbf\x7e\x53\x8b\x04\x0f\xbb\x08\x50\
\xf2\x64\xe5\x79\xd5\x5e\xd0\xe7\x47\x45\x28\x6c\x67\x05\x68\x5f\
\x8b\x1e\x3d\x04\x20\x95\xd1\x08\x5d\x83\x08\xb9\x1a\x11\x62\x45\
\x21\x04\xfd\x7e\x61\x46\x52\x17\xa0\x68\xbf\x02\x80\x11\x03\x02\
\x06\x81\x40\x20\x88\xcf\x9d\x73\x1e\x6e\xfe\xcb\x9f\x71\xeb\x5f\
\xee\xc0\xa9\xa7\x1a\x98\x3d\x73\x8e\x80\x00\xaf\x83\xe3\xd5\x74\
\x3b\x83\x09\x94\x95\x3f\x41\x3e\x3f\x5b\x6b\xb6\xd2\x2d\x1d\x65\
\xe5\x8f\x08\x6f\xdf\xe6\x51\xd0\x0b\xbc\xfc\x97\x2d\x36\x2b\x69\
\x92\x00\x84\xb3\x00\xcc\x04\x92\x44\xe5\x37\x34\xb7\xa0\x2b\x11\
\x97\xc6\x21\x13\xc6\x8e\x46\x6d\x55\x25\x7d\x66\x85\x2c\x97\xe6\
\xa2\x28\xbd\xdc\x3b\x50\x1b\x78\xaa\xe2\x68\x94\x95\x5e\x29\xbe\
\x02\x80\x91\x09\x02\x06\xfc\xfe\x20\xce\x3d\xf3\x1c\xdc\x7e\xe7\
\x6d\x04\x02\xb7\xa3\xb4\xa8\x80\x3d\xe7\xce\x97\x66\xa4\x7e\xd2\
\x8a\xdc\x0e\x76\x07\xf4\x5e\xcb\x9f\x84\x5b\x7c\xfe\x38\x36\x75\
\x76\xd8\x69\xbe\x70\x44\xfe\xf4\xb6\xd4\x9f\x59\x01\x57\x35\x72\
\x47\x5e\x56\x70\xee\x9a\xcc\x2d\xd1\xb8\x45\x77\x82\xdc\x88\x96\
\x8e\x4e\xa9\xdb\xaf\x23\x20\x19\x55\x5b\x8b\xea\xca\x98\xdd\x2c\
\xc4\x99\x13\x58\xbe\x06\x4a\x14\x00\xec\xd2\x20\xc0\x83\x47\xce\
\x38\xed\x0c\x49\xa3\xdd\x76\xc7\xdd\xa2\x34\x7b\xcd\xdb\x9b\x68\
\x70\x15\xfc\x9a\x8f\x7c\xe5\xfc\x0e\x01\x01\x56\xfe\x82\x43\xfb\
\xd9\xff\x6f\xef\xee\x46\x4b\x67\x27\x1a\x49\x59\x63\x64\xd1\xb7\
\x67\xf9\x59\x7f\x45\xf9\x63\x51\x69\xd3\x93\x24\x00\xe1\xb5\x0e\
\x3e\x7f\x00\xdd\x3d\x09\x74\x27\x7a\xc4\xc2\x57\xd1\xeb\x75\x95\
\x95\x52\xef\xa0\xc6\x7f\x29\x00\x50\xb2\x0d\x10\xf0\xb8\x7d\x38\
\x6d\xf1\xa9\xb2\x6c\xf8\x8e\xbb\xee\x95\x8c\xc1\x3e\xf3\x17\x90\
\xd5\xac\x15\x8b\xca\xd1\xf2\x4f\x96\xf6\xeb\xbd\x96\x9f\x7d\x72\
\x2e\xe7\x6d\x25\xe5\x1f\x55\x67\x2b\xff\xf6\x7d\x7e\x4b\x94\x9b\
\xa3\xf3\x9c\xe3\x4f\xa6\x53\x52\xd9\xc8\x0d\x50\xda\x89\xee\xf7\
\x70\xa5\x9e\x9b\xfd\xf9\x28\x62\x91\x90\xf0\x7b\x43\x05\xef\x14\
\x00\x6c\xf3\x76\x1a\x06\x8b\x28\x76\xb4\x94\x64\xba\x90\x07\xa7\
\x7c\xe6\x64\x89\xa6\xdf\xbb\xe4\x7e\xc9\x18\x2c\xd8\x7b\x5f\xd4\
\x54\xd5\x13\x08\xd8\xe3\xc9\x3e\x89\x6b\xc4\x3e\x7b\x39\xe0\xc7\
\x91\x75\xb6\xfc\x6d\x5d\xdd\x42\xd3\xff\x9d\xf2\xf3\xf3\xbc\xa2\
\xd1\x56\x7e\xbb\x2c\x97\x47\x7d\xfb\xe8\xf8\x38\x65\x98\xcd\xe4\
\x84\x19\x54\xd2\xe7\xd8\x75\xfa\xd8\x65\x5a\xa5\x6f\x7e\x3f\x2b\
\x00\x50\xf2\xa1\x99\x80\x4e\x7e\xf8\x09\xc7\x9f\x20\xe5\xae\x7f\
\xbb\xff\x21\x01\x81\xfd\x16\xec\x8f\xfa\x9a\xc6\x4f\x04\x04\xec\
\x80\x5f\x51\x82\x75\x1c\x80\xeb\x8c\xdb\x96\xbf\x4c\xfb\xb7\x17\
\x69\xe7\xbf\xcb\x96\xbf\x92\x94\x9f\x6b\xf2\x53\xac\xfc\x74\x4c\
\x7e\xa2\xfd\x9c\xd6\xe3\x05\x43\x1c\xfc\xe3\x69\xbf\x21\x7a\x5e\
\x15\xeb\x28\x00\x50\xf2\x21\x85\x23\xe9\x2e\x02\x81\xe3\x8e\x39\
\x5e\x2c\xf5\x43\x7f\x7f\x54\xca\x62\x0f\xd8\xef\x00\x34\xd4\x35\
\x4a\xf4\x3c\xfb\x11\x41\x80\x3f\x8f\x1b\x6a\xb2\xbf\xce\xd5\x7b\
\x9d\xf1\x1e\xb4\x74\x74\xa3\xa1\xba\x06\x15\x8e\xe5\xc7\xf6\x2c\
\x3f\xf9\xf1\xbc\x14\x97\x2d\x7f\x2a\x95\x12\xc5\x0f\xf0\xc8\x34\
\x02\x02\x0e\x56\xb2\xe5\x0f\x93\xbb\xc2\x65\xc2\x4a\xf9\x15\x00\
\x28\xf9\x18\x20\xe0\x26\x77\xe0\xe8\x23\x8f\x11\xff\xfc\x91\x47\
\x1e\x95\x95\x75\x87\x1c\x74\x30\x1a\xeb\x46\x4b\xbe\x9d\xfb\xe5\
\x7f\x18\x1d\x63\xcb\xcf\xab\xef\x12\x64\xa9\x99\xf6\x77\x26\x12\
\xd8\xd4\xd1\x85\xfa\xea\x2a\x54\x90\x52\x5b\xff\xc6\xf2\x7b\xca\
\x3e\x3f\x59\x7e\xae\x12\xe4\x3c\xbe\x28\x3f\xf7\x04\x20\x96\xc2\
\x01\xc0\x80\xcf\x03\x9f\xea\xcc\xa3\x00\x40\xc9\x27\x07\x02\x47\
\x1e\xbe\x50\x94\xeb\x6f\xf7\x3f\x28\x2b\xe8\x8e\x38\xfc\x70\x02\
\x81\x51\x0e\x13\xc8\x6f\x55\xd9\x98\xda\xf3\x56\x6e\xa0\xa1\xbb\
\x02\xb2\x08\xa7\x27\xd9\x05\xb7\x28\x7f\x0f\x36\xb5\x77\xc8\xf2\
\x5a\xae\xbd\xdf\x6e\xb4\x9f\xeb\xf4\xc9\xf2\xf3\x72\x5e\xfe\x3c\
\x51\x7e\xb1\xfc\x7e\xf4\x48\x5d\x7f\x5e\x32\x08\x01\x02\x08\xaf\
\x4b\x75\xe5\x55\x00\xa0\xe4\x13\x07\x81\x43\x0e\x3e\x4c\xfa\xec\
\xdf\x7b\xef\x5f\xc5\x8a\x2f\x5c\x78\x24\x46\x11\x08\x70\xf0\x8d\
\xc7\x69\xf5\x2f\xd1\x95\xf5\xfb\xf4\x5c\x47\x7b\x3b\x6a\xeb\x1b\
\xe0\xf3\x85\x01\x63\x25\xb2\x69\x1e\xdf\x35\x1a\x1d\x89\x36\xb4\
\x76\x74\xa2\xa1\xa6\x86\xe8\x3c\xa7\xf0\xcc\xed\xa6\xfa\x78\xc9\
\x72\x2c\x5a\x21\x41\xca\x54\x32\x25\xeb\xff\xfd\xc1\x00\x92\x99\
\x0c\x29\x7f\x49\x29\xbf\x02\x00\x25\x3b\x23\x26\x70\xe0\xfe\x07\
\x49\x9d\xc0\xdd\x77\xdf\x23\x6b\xe4\x8f\x3a\xea\x28\x8c\x69\x1c\
\x6b\x33\x81\x6c\x4e\x22\xed\xec\x83\xf3\xda\xfd\x3f\xdc\xf2\x27\
\xbc\xfa\xc6\x9b\xd8\x7b\xcf\xbd\x71\xe9\x67\x4b\x08\xe5\x7f\x01\
\x7f\x60\x22\x56\xe5\xbe\x8d\xe6\xf6\x20\x46\xd7\xb1\xcf\x1f\xd9\
\x6e\x6d\xbf\xad\xfc\x76\x2a\x4f\x94\x9f\x2d\x7f\x28\x28\xa9\xbe\
\x64\x3a\x2b\x2b\xfa\x94\xf2\x2b\x00\x50\xb2\x13\x41\x60\xbf\xbd\
\xf7\x93\x01\x9c\x5c\x27\xc0\x9d\x73\x8e\x3e\xea\x68\x8c\x69\x18\
\x8b\x60\xd0\xdf\x1b\xdc\xfb\xe5\x6f\x7e\x85\x17\x49\xf9\x03\xc1\
\x08\x1a\x83\x77\xc0\x6b\xa4\x01\xcd\x07\x57\x6a\x29\xea\xbd\x3f\
\x47\xa1\xe1\x3b\x64\xc5\xab\xe8\x53\x0b\xdb\x55\x7e\x8f\xd7\x2d\
\xab\xff\x98\xf6\xb3\xe5\x0f\x06\x6d\xe5\x4f\x64\xec\xe9\xbc\xba\
\x52\x7e\x05\x00\x4a\x76\x36\x08\x78\x30\x7f\xfe\x02\x59\x1c\xc3\
\x73\x07\x78\x38\xe7\xd1\x47\x2d\xc4\x94\x49\xd3\x45\x6b\x59\xf9\
\x1f\xff\xe7\x73\x88\x56\xd4\xe3\xcc\x23\xd7\xe1\x94\x63\x8b\x30\
\xbc\x7b\xc2\xcc\xb4\x03\x49\x03\xb1\xe2\x73\x70\x55\x3e\x84\xb6\
\xfc\x05\xb4\xfb\x36\xd6\x19\x38\x01\xbf\x01\xa9\x3e\xa2\xfc\x9c\
\x82\xec\x21\xe5\xe7\xfa\x01\x65\xf9\x15\x00\x28\x19\x0c\x10\x20\
\xab\xcf\xc5\x42\xf3\xf6\x9c\x4f\x20\x60\x11\x08\xdc\x2b\xad\xb8\
\xdc\x47\xbb\x71\xcb\xad\x77\xe0\xaf\x7f\x7f\x04\xbe\x60\x0d\x4e\
\x5d\xd8\x84\x53\x16\x76\xc1\x4a\x73\xcd\x7d\x0f\x11\x80\x4a\x68\
\xa5\x06\x18\xa4\xcc\xa1\xcc\x6d\x08\xf9\xe7\x21\x9d\xdf\x13\xba\
\x96\x42\xff\x26\x24\x76\x9e\xbf\x7f\x91\x4f\x4a\x06\x9d\x72\xba\
\xcf\x0e\xf8\x29\xe5\x57\x00\xa0\x64\x50\x85\xfd\x76\x8f\xc7\x87\
\x3d\x67\xef\x25\xfe\xff\x43\xa4\xf4\x57\x5d\xfd\x6d\xbc\xb9\x72\
\x15\x2c\xbd\x02\x17\x2f\xee\xc2\x39\x9f\xe9\x81\x55\xd0\xa5\xa8\
\x47\xcb\x6e\x80\x15\xdd\x0d\xf0\xd7\x13\x08\x24\x01\xfa\x3d\xe6\
\xbd\x19\x39\xd7\x74\x52\x72\x37\x01\x88\xd1\xe7\xf3\x7b\x9d\x85\
\x3d\xac\xfc\x44\xfb\x39\xcd\xc7\xee\x05\xd3\x7e\xa5\xfc\x23\x43\
\x54\x4b\xb0\x11\x02\x02\xa1\x50\x54\xa8\x7f\x57\x57\x1c\xcf\xbe\
\xf4\x3a\x7a\xd2\x6e\x7c\x7e\x51\x06\x9f\x3f\x2d\x49\x96\x1f\x28\
\xf5\x14\x61\x66\x8b\xd0\x8a\x69\x1b\x04\x3c\x11\x58\x81\x46\x98\
\xee\x0a\xf8\xd2\x4f\xa1\xc2\xfd\x20\x4c\x2d\x28\x6e\x80\x58\x7e\
\x0f\xf9\xfc\xa4\xfc\x9c\xe7\x4f\xa4\x92\x42\xfb\x79\xeb\x49\xe7\
\x24\xdd\xa8\x94\x5f\x31\x00\x25\x43\x44\x38\xda\x9f\xcb\x66\xf0\
\x3f\xff\xfb\x0b\x2c\x79\xf0\x51\x64\x4b\x7e\x7c\xf3\xec\x3c\xae\
\x38\x8f\xdc\x84\xa4\x07\x85\x44\x09\x16\xbd\x6e\x15\x73\xd0\xe8\
\x1b\x77\x57\x11\x10\xb8\x43\xb0\x7c\xc4\x02\x02\x04\x10\xc9\x95\
\x88\x14\x6e\x45\xca\xbd\x1f\x72\xc5\x06\x52\x6c\x43\x2c\x3f\x57\
\x1c\x72\x1f\x40\xee\xe2\x1b\x08\x06\xd1\x93\xca\x22\xcb\x15\x7e\
\xdc\xec\x83\x94\xdf\xad\xa6\xef\x2a\x00\xd8\x21\x16\x4d\x2d\x06\
\xfa\xc0\xc2\x79\x7e\x4e\xc9\xfd\xf8\x27\x37\xe2\x57\xbf\xff\x13\
\x32\x45\x2f\xae\xbb\x3c\x80\xff\x77\x29\x5d\xbf\x6c\x10\xc5\xbc\
\x9f\x76\xca\x12\x4a\x98\x76\xff\xfd\x5c\x06\xc5\xf6\x14\xbc\xee\
\x35\xd0\x6a\x08\x04\x42\x63\x09\x18\x7a\xe0\xca\x2c\x47\x2c\x7a\
\x07\xba\xfd\x57\x21\x16\x71\x89\xe5\x4f\x92\xe5\x0f\x05\x82\x92\
\xe7\x4f\x48\xaa\x2f\x2f\x2d\xba\x02\x1e\x47\xf9\xd5\xf7\xf3\x81\
\xef\x67\xe5\x02\x28\xd9\x61\xd6\x7f\xc9\x92\x25\xf8\x9f\x9f\xff\
\x0a\xa9\x8e\x1c\xbe\x75\x89\x1f\x57\x5f\xee\x81\x95\xd7\x50\x34\
\xa2\xd0\x7c\x21\x68\xde\x00\x3d\x12\xbd\xf7\x06\xa1\xf9\x83\xe4\
\x32\xe8\x30\xba\x53\xd0\x12\xef\x02\x9a\x8b\x40\x60\x02\xd1\xff\
\x08\x02\x99\x7b\x50\x13\x5a\x06\xc3\x0c\x90\xcf\xdf\x23\x3e\x7f\
\x80\x69\x7f\x2a\x23\x53\x7d\xb8\x3b\x10\x2b\xbf\x4b\xd7\x94\xf2\
\xab\x18\x80\x92\xa1\x20\x6c\xd5\x0b\x45\x1e\xa4\xe1\xc5\x37\x2f\
\xf7\xe1\xda\xaf\x04\x80\x3c\xf7\x19\x64\x7e\xe7\x23\x84\xf0\x43\
\xf3\x04\xfa\x36\x02\x01\xdd\xeb\x07\x67\xfd\x8c\x44\x0f\xf4\xd4\
\x6a\x58\xde\x18\xac\xf0\x44\xe8\xc5\x0e\x78\x12\x3f\x46\x26\xd7\
\x86\x60\x20\x26\x3e\xbf\x58\x7e\xa2\xfd\xfd\x95\x5f\x89\x72\x01\
\x94\x0c\x11\xe1\x99\x7b\x27\x9c\x70\x0a\xc6\x57\x3c\x8c\x63\x3e\
\x45\x16\xbd\xe8\x86\x41\xcf\xf1\x6c\x4e\x4d\x27\x60\xd0\x83\xd2\
\xa1\x47\x2b\x1b\x6c\x99\xc7\x6d\x92\xf6\x13\xcd\x2f\x1a\xd0\x7a\
\xda\xa1\xbb\x89\x15\x04\xc7\xd2\xef\x49\xe8\x89\xc7\x51\x59\xfd\
\x2b\x18\xd1\x6b\x91\x48\xf5\x48\x47\x5f\xee\x47\xa0\x94\x5f\x01\
\x80\x92\x21\x28\xa6\xe6\xc5\xa8\x8a\x7f\x62\xdc\xc1\xcb\x60\x21\
\x8a\x12\x6d\x9a\xa7\x00\x94\x12\xd0\x4b\x1d\x30\xfd\x51\x58\x3e\
\xee\xc2\xd3\x0f\x04\x60\xda\xd4\xdf\xca\xc3\xcc\xd3\xf3\xc9\x26\
\xe2\x82\x5e\x58\x15\xd3\x09\x18\x72\xf0\x75\xfd\x2f\xd2\x7a\x35\
\xb2\xb8\x0c\x2e\x97\x86\x80\xb7\xa4\x94\x5f\x01\x80\x92\x21\xe7\
\xbf\xb9\xc8\x8a\xf3\x0c\xbe\xcc\x13\x70\xa5\x5b\x48\xad\x4b\x44\
\xf1\x0d\x52\xf8\x28\xe0\xaa\x81\x56\x88\x43\xcf\x31\xc5\x1f\x0b\
\x8b\x5c\x01\xa9\xf4\x63\xeb\xaf\x31\x33\xd0\xa0\x95\x98\x0d\x30\
\x08\x98\xd0\x93\xeb\xa0\x85\x2d\x58\x95\xf3\xc8\x35\x78\x03\xc1\
\xf6\x6b\x51\x5d\x99\x46\x31\x74\x01\x5c\x1a\xaf\x10\xcc\xab\x0b\
\xae\x00\x40\xc9\x50\x11\x8e\xfe\x73\x20\x2e\x1e\x4f\x10\x3d\x3f\
\x01\x9e\xe8\x2a\x78\x92\x2f\x90\x01\x2f\x90\xa5\x27\x65\xf5\x90\
\x5f\xef\xad\x24\x26\x90\x24\x20\x58\x4b\xbf\xd7\x13\x08\xf8\xe8\
\x35\x0f\x2c\x8d\xa8\x3f\xd3\x01\xcd\x69\xb1\x6d\x16\x61\x15\x4c\
\x68\xe9\xf5\xd0\x82\x25\x02\x81\xbd\x60\xba\xa3\x08\x75\xdf\x08\
\xc3\x78\x17\xb9\xe8\x95\x04\x34\x93\x68\x5f\x5e\x2f\xa0\x9a\x78\
\x2a\x00\x50\x32\xf8\xca\x4f\x8f\x89\x9e\x24\x3c\xba\x85\x96\xae\
\x89\x78\x2f\xf3\x45\xcc\x69\x1c\x83\x70\xee\x61\x98\xb9\x0e\xc9\
\xf9\xc3\x5f\x45\x8a\x4f\xd6\xdb\x20\x40\x28\x36\x43\xd3\x09\x14\
\xdc\x21\x7a\x74\xf3\x0c\x5e\x9b\x0d\x94\x2c\xbb\xd5\xb7\x49\x00\
\x40\xba\xad\x65\x68\xbf\x52\x06\x56\x78\x02\x0c\x3f\xb1\x88\xf8\
\x53\x08\xe4\x96\xa1\x10\xbb\x14\x25\xdf\xb1\xe2\x66\xd8\x40\xa0\
\x9a\x7a\x8e\x08\x16\xa9\x2e\xc1\xf0\x54\xfe\x78\x4f\x42\xe2\x79\
\xf1\x54\x0a\xcd\x6d\xab\xe1\x76\x8f\x47\x5b\xf1\x2a\x74\x87\xae\
\x81\x15\x99\x49\x5f\x6c\xca\x56\xe6\x5c\x1b\xd1\x7d\x17\x41\x3d\
\x29\xbe\x11\x27\xa3\xdf\x49\xef\x23\x65\xf7\xb8\x68\xf3\x90\xcb\
\xe0\xa5\x47\x2f\xb8\x42\xc8\x2a\x71\x7a\x8f\xf6\x2d\xd2\x7e\xf1\
\xb7\xa0\x9b\x05\x58\xd5\xfb\x01\x3e\x1d\xbe\x8e\xab\x11\xe8\xbc\
\x14\xee\xd2\x53\x36\x73\x80\x0f\x43\x6d\x82\xb1\x12\xc5\x00\x76\
\x0d\xe5\x4f\x24\xc0\xf3\x73\x58\xf9\x37\xb4\xb4\xa2\xb6\xaa\x0a\
\x55\x15\xa4\x90\x96\x86\x44\xf1\x64\xe4\xbd\x73\x11\x75\xdf\x8c\
\x40\xe6\x01\x20\xd7\x0a\xd3\x60\x36\x50\x4b\x2e\x40\x98\xac\x7b\
\x8a\x3e\x83\x9b\x81\x84\x79\xf8\x1f\xbd\x87\x3f\xd3\x6d\xab\x32\
\xbb\x03\x86\xc9\xc1\x05\xfb\x0f\xa6\xd7\x49\xf7\x20\x8b\x98\x80\
\xe5\xde\x03\x7a\x7a\x29\xfc\x6d\x97\xc1\x88\x1c\x87\x42\xe8\x6c\
\x18\xfa\xee\xf4\x1e\xbe\x85\x8a\xea\xcb\x51\x0c\x40\xc9\x4e\xf1\
\xf9\x59\xf9\x1d\xcb\xbf\xa1\xb5\x4d\x94\xbf\x3a\x5a\x61\x0f\xe3\
\xf4\xb2\x4e\xa7\x90\xc9\x8f\xc1\xa6\xd2\xff\x43\x5b\xe0\x07\x28\
\x56\x1c\x48\xcf\x65\xa0\x67\xd6\x41\x2b\x24\x6c\x17\x80\x19\x80\
\x45\x0c\x82\x0b\x06\xdc\x6c\xfc\x5d\xfc\x07\xe8\x6e\x70\xcb\x2d\
\x61\x99\x9a\x6d\xdc\x75\x8f\xd0\x7d\x2d\xb3\x91\xde\xdb\x0e\x04\
\x1a\xc8\xf0\xc7\xe0\xea\xfa\x33\xb1\x81\xcb\xe0\x2f\xfe\x92\xf6\
\x5e\x43\x3b\x0e\x9d\x51\xe6\x4a\x14\x03\x18\x79\xca\xef\x94\xdd\
\xc6\x79\xd4\x16\xf1\xfe\x78\x92\x2d\x7f\x1b\x6a\xaa\x2a\x64\x50\
\x27\xfb\x02\xdc\x22\x8c\xa5\xc4\x2d\xbd\xac\xb4\x14\x02\x75\x17\
\x0f\x46\x97\x35\x03\x21\xeb\x6f\x68\xf4\xde\x01\x4f\x61\x3d\x19\
\xf8\x3a\xb2\xe8\x75\xd0\x8a\x49\x52\xee\x24\xbd\xd5\x47\xc4\x81\
\x18\x80\x9b\x14\xd8\x74\xd9\xae\xbd\x65\xd8\x6c\x40\x73\x6c\x84\
\x66\x8f\xef\x02\xbf\x87\x41\xc1\x5f\x4f\xec\xe0\x7d\x78\xcc\xdf\
\xc1\x55\xb1\x0c\x79\xdf\xa9\x28\xe1\x60\xd8\x01\x42\x55\x21\xa8\
\x00\x40\xc9\x27\x47\xd1\x48\xf9\x59\x27\x39\xe0\xc7\x74\xad\x9b\
\x94\x7f\x23\xd3\xfe\xca\x18\xaa\x2b\x62\x7d\x83\x32\x99\xbd\x5b\
\x26\x29\x78\x89\x36\x7e\x8e\xde\x65\xf5\xa0\xbb\xdb\xc2\xf2\x8e\
\x43\xd1\x58\x35\x15\x33\x6b\xef\x44\x94\x7c\x78\x33\x67\xc1\xf4\
\x35\x90\x3b\x10\xa7\x7d\x72\x04\x02\x1e\xb9\x15\x2c\xe1\x83\x9a\
\xf4\x14\x80\xcb\x6d\xc7\x05\x84\x22\xe8\x70\xc6\xf6\xda\x7f\x28\
\x10\xa0\xdd\x69\xcb\x76\x40\x4b\xbd\x05\x9f\x46\x68\xe3\x0d\x11\
\x08\x2c\x80\x94\x21\x2a\x51\x00\xf0\x71\x44\x2d\x06\xea\x53\x7e\
\x3b\xda\x6f\xb7\xee\xee\x4a\x24\xd1\xd4\xd6\x2e\x13\x75\xab\x2b\
\xa2\x65\xbd\x17\x10\xe0\x11\xdc\xac\xa3\x16\x29\xbf\x44\xf4\x69\
\xff\x54\x3a\x45\x2e\x43\x07\x02\x5e\x1d\xdd\xe9\xf1\x78\x36\xfd\
\x05\xcc\xa8\xab\xc5\x04\xcf\xdd\xb0\x4a\x1e\x58\x9e\x2a\xa2\xf6\
\xdd\x8e\xe5\x36\xd8\x89\xb0\x03\x7c\xbc\xc4\x57\xf3\xdb\xd6\x9e\
\x97\x0f\x4a\x4c\xc0\x61\x02\xbc\xd1\x71\xc1\x47\x4c\xc2\x2c\x42\
\xcb\x75\x42\xf3\x34\xc3\xab\xdf\x05\xc3\x5d\x07\xd3\x1a\xa7\x62\
\x02\x5b\xb9\x9f\x55\x0c\x40\xc9\x87\x56\x7e\xd6\xe8\x44\x32\xe9\
\x28\x7f\x0f\x36\x92\xf2\xd7\x55\x57\xd9\xca\x6f\x6d\x79\x63\x09\
\x6b\xb7\x6c\x5f\x9c\xc7\x7c\x75\x76\x25\xe0\x76\x7b\x64\x96\xa0\
\x46\x74\x3f\x9e\xc8\xe1\xc9\x65\x9f\xc6\xea\xfc\x69\x70\x15\xc9\
\x7a\x1b\x39\x58\xae\x20\xa3\x86\xf3\x09\x25\x27\xc2\xef\x95\xca\
\x40\xd9\x5c\x3e\xe9\x21\x08\xde\xcf\xc5\x96\x3f\x28\xc0\x61\x95\
\xd1\x87\x0f\x33\x4f\x20\x50\x78\x1f\x5e\xf3\xaf\x84\x15\x05\xe9\
\x52\x24\x2c\x42\xc9\xd0\xbf\xcf\xd4\x25\x18\xa2\x96\x9f\x14\x88\
\x6b\xf1\x75\xd2\x4d\xee\xdb\xbf\xb1\xb5\x1d\x0d\xd5\xd5\xb6\xcf\
\x6f\x6d\xdd\xd3\x66\x95\x63\xbd\x0b\xf3\x2a\x3e\x9f\x4f\xca\x78\
\x79\xe8\x27\xaf\x17\xc8\xe4\xf2\x64\x9d\x33\xa8\x8c\x78\xd1\x66\
\x7c\x01\xb9\xd0\x69\xd0\x73\x76\x09\x70\x2f\xb5\x97\x0f\x71\x91\
\x2b\xe0\xe9\x03\x00\x0e\xf0\x69\x5e\x9b\x0d\xb8\xbc\xb0\x08\x00\
\x34\x66\x0c\xdd\x2b\xc8\x7b\x48\xd3\x73\x21\x3b\x6d\x98\xeb\x82\
\xd7\x78\x11\x5a\xfa\x56\x64\x32\x19\x3a\x07\x97\x03\x62\x4a\x14\
\x00\x28\xf9\x90\x96\x9f\x2c\x38\xd1\x7e\x36\xc6\x5d\x49\x9b\xf6\
\x37\xd4\x90\xf2\x57\xfc\xbb\xa1\x1d\x4e\xf7\xde\x58\x05\x26\x8e\
\x1b\x83\x49\xe3\xc6\x12\x10\xf8\x91\xce\x64\x61\x94\x4c\xd4\x54\
\x56\x61\xe2\x98\x3a\x4c\x18\x55\x87\x42\xe4\x4b\x30\xfc\xb3\xed\
\x3a\x01\xb6\xec\x65\x36\x21\x8b\x85\x5c\x0e\xed\x77\xdb\x8a\xcf\
\xd3\x7d\x34\x72\x19\x1c\xb0\xb0\x3a\xdf\x82\x11\x6f\x86\x51\x2c\
\xd8\xe5\x40\x2e\x72\x17\x0a\x9d\xc4\x54\x3a\xf0\xaf\x87\x7e\x8c\
\x73\xcf\x39\x17\xf7\xff\xed\x7e\xf9\x28\x9e\x0a\xac\x44\xc5\x00\
\x94\x7c\x50\xe5\x67\xcb\xcf\xca\x4f\xbf\x77\xa7\x52\xd8\xd8\xde\
\x26\xe3\xba\x2a\x79\x06\x9f\xb9\x7d\x5f\xd2\x23\x6d\xbc\xa2\x32\
\x7a\x9b\xe7\x01\xd4\x54\x56\xa2\xbe\xb6\x16\xf9\x37\xde\x24\x00\
\x28\xa1\xb1\xa1\x0e\x75\x55\x55\xf0\x79\xb8\xab\x70\x23\x72\x91\
\xcb\x11\xe8\xfc\x0f\x29\x05\x16\x85\x67\x57\x40\x73\x7c\x7d\x38\
\x20\xa0\x3b\x81\x40\x97\xbd\x59\x9d\xcb\x50\x6c\x59\x49\xcf\xe9\
\xf4\x5f\x11\x7a\x21\x0f\xcb\x1f\xb6\xb3\x0a\xd9\x16\x94\xf2\x05\
\xbc\xf4\xe2\x2b\xc4\x38\xb2\x48\x26\xe3\x38\xe9\xa4\x93\x11\x8e\
\x44\xa4\x75\xb8\x12\xc5\x00\x94\x6c\x47\xf9\x35\x51\xfe\xa4\x58\
\x7e\x4e\xf5\x35\x33\xed\xe7\x3c\x7f\x24\xba\xfd\x40\x92\x05\xf1\
\xf5\x63\xb1\x98\x00\x47\x0f\xb1\x06\x1f\xf9\xfd\xdc\x2c\x24\x93\
\x2f\xa1\xa6\xaa\x1a\x63\x46\xd5\x0b\x8b\xe0\xb1\x62\xf6\xc4\xdf\
\x02\x4a\xae\x03\x50\x0c\x7f\x9a\x94\xb8\xc3\x0e\xf6\x95\x69\x04\
\x2b\x3f\xff\xae\x95\x59\x80\xad\xfc\x66\xf7\xbb\x28\x6c\x7c\x1d\
\x26\xb1\x09\x0e\x34\x5a\x66\x09\x56\x29\x6f\x03\x88\xaf\x02\x20\
\xe5\x9f\x33\x39\x8b\x39\xd3\x3c\xd8\xd8\xdc\x85\x87\x1e\xfe\x3b\
\x7e\xfc\x93\x1f\xe3\xbd\x77\xdf\x95\xee\xc2\x5c\xcb\xa0\x44\x01\
\x80\x92\xcd\x44\x14\x83\xf3\xfb\x64\xf9\xd9\x0a\x77\x93\x02\x37\
\x73\xc0\xaf\xaa\x52\x26\xf1\x98\xd8\xde\xa0\x4e\x48\x0d\x00\xd3\
\x7e\x8d\x7e\x61\xf6\xc0\xfe\x3f\x8f\x09\x93\xd6\xdd\xf9\x1c\xb1\
\x82\x08\x6a\x89\xfe\xbb\xf5\xfe\x83\x3a\x4d\xa9\xe2\x2b\x06\xce\
\x80\xe9\x69\xb4\x73\xfc\xa2\xf0\x96\x7d\x57\x30\x13\x20\xfa\x6f\
\xb9\x1d\x17\xa0\x6b\x05\x8a\xeb\x5e\x82\x25\x95\x82\x7a\xef\xdf\
\x16\x20\x28\xd9\x19\x08\x78\x2b\x51\x53\xed\xc1\x09\xfb\x16\xd0\
\xd3\x1d\x47\x4b\x6b\x07\x56\xad\x5e\x8d\xeb\x6f\xf8\x21\xee\xbe\
\xeb\x4e\x64\x33\x19\xf8\xe8\xd8\x54\x80\x50\x01\x80\x92\xb2\x0f\
\x46\xca\xc5\x0a\x9e\x48\xd8\x3e\x7f\x9c\x18\x40\x73\x7b\xbb\x50\
\x75\xa6\xfd\xdb\x1b\xf9\x6b\x3a\xca\x5f\x55\xc9\xca\x6f\xd2\x67\
\x24\x64\x5a\x8f\xdf\x99\xd2\xcb\x03\x40\x39\x83\x10\xf0\x7b\xe1\
\x96\x32\xe2\xcd\x3f\xab\x08\x43\x9b\x46\x2c\xe0\x54\x80\x27\x06\
\x99\xce\x70\x10\xa7\x0e\xc0\xe2\x5c\x3f\xd7\x07\xb5\x2f\x45\x61\
\xc3\xab\x76\xb2\x40\x77\xf7\xc5\x0a\xb8\x24\x91\x63\x12\xc2\x04\
\x8a\x74\x1e\x6e\xb8\x2b\xaa\xb0\x70\xdf\x2c\x0e\x99\x03\xbc\xb7\
\xaa\x09\xf1\xce\x2e\xa9\x4f\xb8\xfb\x9e\xbb\x71\xfd\xf5\x3f\xc0\
\x6b\xaf\xbe\x22\xc7\xa4\x62\x03\x0a\x00\x76\x79\xca\xcf\x4a\xc0\
\xbe\x71\x9c\x95\x9f\x53\x7d\x64\xbd\x9b\x3b\x3a\xc8\xe7\xaf\x46\
\x4c\x46\x74\x63\xbb\x01\x3f\x6e\xdd\x5d\x4d\x96\xdf\x12\xe0\xe8\
\x11\xab\x1f\x08\x04\x91\x64\xe5\x2f\x94\x44\x3f\x7d\xb4\x8f\x4b\
\xdb\x56\xf7\x5e\x7b\x39\x70\xd1\xfb\x69\x58\x81\x3d\x88\xc2\x93\
\x2b\x60\x95\x9c\x03\xf4\x0b\x28\x58\x2d\x2f\xc2\x68\x5d\x66\x57\
\x08\x96\xd7\x08\x70\xb1\x90\xe3\xb2\xf4\xa3\x02\xc2\x0e\x74\x5f\
\x18\xe3\xa6\x84\x70\xde\xc2\x2e\x4c\x6d\xd4\xb0\x7c\xe5\x5a\x74\
\x10\x9b\x09\x84\xc3\x58\xb3\x7e\x3d\x7e\xf4\xe3\x1f\xe3\x77\xbf\
\xbb\x09\x2d\x2d\x9b\xc4\x2d\xd0\x75\xe5\x16\x28\x00\xd8\xc5\x14\
\x9f\x87\x7a\x72\xb5\x1e\xe7\xea\x79\xbc\xb6\x5b\x94\x3f\x81\x16\
\xb2\xfc\x8d\x35\x35\x42\xfb\xb7\xe7\xf3\xcb\xac\x3e\xfa\x8c\x2a\
\x56\x7e\xd8\xeb\x03\x58\xf9\xed\x89\x3d\x59\x49\xfb\xb1\xf2\xfb\
\x49\xf9\xdd\xff\xb6\x93\x0f\x5b\xee\x46\x62\x01\xa7\x90\x07\xe0\
\x25\x10\xe8\x06\x4a\xc4\x06\x32\xeb\xa1\xb5\xbd\x02\x2b\xd9\xea\
\xd4\x03\xb8\x1d\x66\xc0\xab\x08\xdd\xb4\x2f\x3d\xc2\xd5\x0b\x02\
\x72\xbc\xb4\xf1\x7c\xd1\x70\x4d\x1d\xf6\x99\xa7\xe1\xa2\x63\x93\
\x88\xfa\x35\xac\x59\xbd\x01\xad\x9b\x5a\xe5\xbc\x83\x91\x30\x1e\
\x7f\xea\x49\x7c\xf7\xba\xeb\xf0\xe0\xfd\x7f\x45\x2e\x97\x11\x20\
\x50\x6e\xc1\x20\x31\x50\x75\x09\x76\xbc\xf0\xcd\x5d\x4e\xef\xf1\
\x38\xef\x6c\x26\x2d\x33\xf5\xd8\x00\xb3\x15\x6f\xed\xec\x46\x5b\
\x77\x37\x46\xf1\x88\xee\x48\xc4\x09\xd2\x6d\x47\xf9\x1d\x9f\xdf\
\x72\x7c\xfe\xf2\xac\x3e\x6e\xe0\x59\x28\x16\x65\xbd\x80\xef\x03\
\x29\x7f\xf9\x43\x4d\x14\x5c\x47\xc0\x1d\x7e\x02\x7a\xf7\x13\xd0\
\x72\x9b\x48\xe9\x03\xe2\xfb\xeb\xa1\x18\x50\xc8\x42\x33\x4a\x03\
\xa7\x07\x5b\xc4\x02\x5c\x9a\x64\x03\xca\xba\xeb\x92\xb5\x44\xb4\
\x8f\xdb\x8b\xea\xf1\x8d\x38\xea\xc0\x26\xb4\xc5\x73\xf8\xe5\xfd\
\x3e\x34\x6d\x6c\x92\x89\xc5\x91\x8a\x28\x62\x15\x31\xa4\xf3\x39\
\xfc\xe1\xe6\x5b\xf0\xf2\xcb\xaf\x60\xd1\xe2\xc5\x98\x31\x63\xa6\
\xb0\x14\x1e\x3e\xaa\x44\x01\xc0\xf0\x56\x78\x68\x7d\x55\x39\xb0\
\x07\x79\xe6\x72\x39\x14\xc9\x32\x17\x4b\x76\xb5\x9d\x4e\xb4\x9c\
\x3b\xed\xb6\x77\x27\xb0\xa9\xa3\x13\xa3\xea\x6a\x11\x65\xe5\x37\
\xb7\x1d\xf2\xb3\x27\xf6\x78\x50\x59\x19\x13\x65\xe2\x8c\x41\xc0\
\xef\x93\xf6\xdd\x89\xa4\xad\xfc\xba\xfe\x21\x95\xdf\x86\x25\xfa\
\xbc\x5a\x14\x82\xe7\xc2\x6f\xac\x03\x52\xef\xdb\x77\x86\x27\x42\
\xac\x9f\xac\x33\x29\xb4\xcb\x28\xc0\x34\x8a\x36\x08\xf4\x1e\x23\
\xbb\x02\x9a\x64\x0c\xdc\x6e\x0d\xe9\xee\x34\x3a\x7b\x48\xf9\x6b\
\x5c\x08\x55\x05\x51\x37\xa1\x0e\x9f\x3d\xa2\x05\xad\xf1\x4a\xdc\
\xf6\x94\x45\xfb\xb4\x49\x2a\xb3\x44\xee\x49\x94\x5c\x9c\x6a\x02\
\xbc\x15\xab\x56\xe1\x86\x1f\xdd\x80\xe3\x8f\x3d\x0e\xc7\xd0\x16\
\xe1\xf1\xe3\xa5\x92\x6a\x3b\xbe\xab\x03\xc0\x70\x5a\x0b\x20\xf4\
\x55\x13\xb5\x67\xc3\x28\xc1\x39\x1e\xa6\xc9\x93\x75\x78\xa2\x6e\
\xc9\x30\x85\xaa\xf3\xf9\xd8\xbb\xea\x12\x08\xe3\x20\x5d\x73\x7b\
\x07\x2a\xc2\x21\x84\x83\xc1\x5e\xdf\x7e\xdb\x96\x9f\x95\xbf\x42\
\xfe\x80\x28\x7f\xc0\x4b\xd4\x3f\x88\x78\x2a\x83\x02\x29\x0d\x2b\
\xbf\xdf\xe3\x22\xe5\xff\x28\xf5\xe7\x45\x14\xb4\xfd\x80\xc8\x97\
\xe1\xd3\x6e\x82\x96\x5c\x6e\xc7\x03\x3c\x15\xa4\xdf\x3e\x9b\x0d\
\xd0\xb9\x80\x03\x7e\x62\xa5\x4d\xbb\xf4\x98\xce\xc5\xe3\x75\x61\
\xcd\xaa\x34\xfe\xe7\xce\x18\x56\x6e\xaa\x25\x30\xf3\xe1\xe2\x53\
\x53\x58\x30\xdf\xc2\x98\x62\x01\x17\x1c\xdb\x85\xd6\xee\x1a\x3c\
\xf6\x5a\x86\x5c\x97\x2e\x39\x49\xbe\x3e\xc1\x50\x48\x80\x80\xad\
\xfe\x5d\xf7\xde\x8b\xe5\x2b\x96\xe3\xcc\x33\xce\xc2\x94\x69\xd3\
\x04\x28\x07\x30\x8e\x61\x2c\x43\xf9\x3e\x56\x0c\xe0\x23\x5b\x79\
\xbb\x29\x67\x59\xf9\xcb\x4a\x2a\x8a\x4f\x8a\x62\xca\xcd\xab\x49\
\x90\xcb\x43\x8f\x0c\x0c\x7c\x53\xf3\xd3\x9a\x13\x0b\x48\x67\x53\
\xf2\xc8\x43\x3d\xf5\xed\x68\x7f\x99\xf6\x57\x89\xe5\xb7\xa4\x4a\
\xd0\x2f\x96\x3f\x88\x1e\xa1\xfd\x76\xb4\x9f\x95\x9f\xe9\xff\x47\
\xbb\xdf\x38\x2d\xa8\x13\x08\x1c\x03\x33\x5c\x07\xbf\xfb\x0f\xd0\
\x7b\x9e\x85\x5e\x8a\x43\xf3\xc7\xec\x62\x20\xcb\x4d\x4a\xc9\xa9\
\x41\xd3\xa9\x48\xb4\xe8\xef\x59\x78\xff\xdd\x1e\x5c\xfd\x4b\x1f\
\x5e\x5f\x13\x41\xc8\x9b\xc1\xfa\x8d\x39\xac\x6d\x8a\xe1\x3f\xbf\
\xec\xc6\x81\xfb\x5a\x98\x4a\xee\xc3\x97\x4e\xee\x44\x47\x4f\x2d\
\xde\x58\x6d\x9f\xb3\x69\x46\x6c\x70\x2c\x15\x11\x0a\x05\x51\x53\
\x5b\x2b\x6c\xe0\xc6\x1b\x6f\xc4\xe9\xa7\x9f\x86\x03\x3f\x75\x90\
\xc4\x1b\x94\x4b\xa0\x82\x80\x43\x0f\x35\xc9\x1a\x7a\x7d\x7e\x09\
\x6a\x71\x4e\x9c\xd7\xe0\xb3\x05\x66\x0a\x5e\x2a\x99\x52\x89\xc7\
\xd6\xbf\x48\x37\x77\x2e\x57\x40\x36\x9f\x17\x17\x80\xab\xf1\x34\
\xe7\x3f\x59\x47\x43\x2e\x00\x2b\x36\x2b\x2f\x8f\xfa\xde\x5a\x91\
\x2f\x03\x06\x07\xfc\x84\xf6\xc3\xee\x06\xc4\x80\x11\x24\xa5\xb1\
\x47\x74\x17\x6d\xe5\x77\xdb\xca\xff\xf1\xc4\x94\x5e\x00\x25\x6b\
\x6f\x64\x02\xdf\x46\xb1\xfa\x02\x3a\x37\x37\xf2\xf1\x4d\xd0\x72\
\xdd\xd0\x4a\x69\xfa\x5b\x26\x74\x3a\x1e\xee\x2b\xe8\x62\xa7\x9f\
\xce\xf3\xb6\x87\xf2\xf8\xc7\x9b\x41\x54\x04\x8a\xc4\x3e\x48\xa1\
\x7d\x39\x74\xb4\xb5\xe1\xdb\xff\xed\xc3\x3f\x5f\xa8\x45\x60\x42\
\x2d\xe6\xcc\xf5\xe3\xab\x9f\xed\xc2\xc4\x7a\x1d\x9b\xda\x7a\x90\
\xcd\xa6\x91\x49\xa7\xa5\x68\x29\x91\x48\x22\x95\x4c\xc9\x3c\xc2\
\x1c\xb9\x19\x37\xfd\xf6\xb7\xb8\xf7\x9e\xbb\x50\xa2\xeb\xa9\xd2\
\x85\x0a\x00\x86\x94\x78\xb8\xa2\x4d\x72\xf7\xa6\x58\x74\x4e\xe3\
\x95\x78\x23\x2b\xcc\x8f\xf9\x52\x41\x80\xa0\x58\xe4\xc7\x82\xac\
\xcf\x67\x75\x97\x40\xa0\xe3\x26\xc8\xb2\x5d\xb2\xa0\x21\x7f\x00\
\x41\x02\x92\x32\x60\x98\x5b\x59\xdd\xe7\xf5\x92\xe5\xaf\x8a\x49\
\x17\xdf\x44\x3c\x8e\x10\x47\xfb\x43\x01\x19\xd4\xc9\x81\xc4\x32\
\xed\xff\xe4\xfa\xf6\xdb\x55\x82\xa6\x59\x0f\x33\x70\x29\xee\x78\
\xfa\x30\x1c\x7d\x5e\x0a\xdf\xfa\x51\x12\x4f\x3d\xbc\x09\x9b\x56\
\xae\x23\x40\x68\xa7\x53\x28\x49\x2a\xd0\xa2\xe3\xea\xe8\x26\x06\
\x82\xf2\xec\x00\xe1\x37\xf0\x79\x2d\x74\x92\x7b\x73\xcd\x4f\xbd\
\x78\xfa\xf9\x06\x84\x26\x36\xe2\x80\x7d\x74\x5c\xb9\xb8\x1b\x95\
\x21\x0d\x2d\x6d\x71\xb9\x76\xf9\x6c\x4e\x66\x10\x26\x89\xd5\x24\
\xe2\x09\x78\x5c\x9c\x29\x88\xe0\xae\x25\x4b\x70\xf3\x9f\xfe\x48\
\x40\x91\x55\x20\xa0\x00\x60\xf0\x7d\x38\xa6\xad\x3e\xbf\x5f\x2e\
\x19\x2b\x38\xa7\xda\x8a\x85\x92\x58\xf9\xa2\x61\x48\xe0\x8a\x1f\
\xb9\x20\xc6\x28\xf2\xf3\x36\x75\x65\xa5\xd0\x1c\xc5\xe7\x8d\x57\
\xe7\x95\x3f\x93\x2b\x00\xb9\xce\xdf\x23\x8b\x6d\x9c\x75\x38\x9a\
\xb3\x3f\xa7\xf1\xfc\xde\x5e\xda\x1f\xef\xe6\x54\x9f\x53\xe4\xc3\
\xca\x5f\x2c\x0e\xa0\xfd\x9f\xbc\x14\x09\x90\x7c\xf0\x54\x1e\x8b\
\xb5\xf1\xc9\xb8\xe9\x81\x0a\x5c\x72\x63\x14\x37\xfe\x31\x88\x57\
\xfe\xd9\x81\x8e\x35\xeb\x61\xe4\xb3\xd0\xc3\x41\x1c\xb9\x9f\x07\
\x85\x6c\x37\xb1\x11\xbb\x36\xa8\xec\x24\xf9\x09\x04\xe2\x9d\xed\
\xb8\xe6\xbf\xbd\x78\xea\xf9\x51\x88\x4e\x6e\xc0\xe1\x07\x18\xb8\
\x72\x11\x9d\x8b\x47\x43\x5b\x7b\x5c\x5c\x80\x62\xbe\x80\x14\xb1\
\x81\x24\xb1\x01\x76\x6f\xf8\x5a\x56\x56\x56\xe2\xb1\x27\x9f\xc4\
\x1f\x7e\xff\x3b\x64\x32\x69\x05\x02\x3b\x48\x5c\xd7\x5e\x7b\xed\
\x90\x3a\xa0\xf7\xde\x7b\x8f\x2d\xc3\xe7\x76\xdf\x7d\xf7\xf1\xd2\
\xe4\x62\x90\x85\xad\x32\x5b\x7c\x2e\x61\x65\x4b\x5d\x22\xab\xce\
\x37\x28\x53\x7d\xd3\xb0\xe9\xbe\x3c\x96\x6c\x0b\x2e\x3f\xf3\x7b\
\x74\xdb\xe2\x97\xe3\x05\x76\xe1\x8e\x87\x14\x38\x28\xcc\xc0\x76\
\x04\x20\xa3\xb6\xbd\xd2\x95\xd7\xae\xe7\xe7\xcc\x00\x57\xed\x31\
\xd8\x70\x56\x40\x52\x7d\x4e\x9e\x9f\x47\x74\x97\x53\x7d\x3b\x56\
\xf9\x1d\xe0\xa3\x73\x9b\x34\x61\x2c\xc2\x21\x1d\x71\xb2\xfa\xe9\
\xbc\x86\x67\xdf\x2c\x60\x6d\x4b\x18\x95\xee\x0c\x62\xbe\x6e\x3a\
\x26\x2f\xa6\xef\x51\x89\x42\x22\x8e\x47\x9f\x2f\x20\x12\x0d\xdb\
\x41\x48\xe7\xcc\xe9\x10\x91\x49\xa5\xf1\xdc\x1b\x11\x4c\x18\x1b\
\xc1\x8c\x39\x06\x46\x07\xbb\xe1\x25\xe6\xf0\xd2\x0a\x2f\x32\xd9\
\x02\x9d\x9b\x4f\xae\x50\xd1\x89\xfe\xcb\x35\x27\xc0\x0d\x47\x23\
\x78\x97\xee\x87\xce\xb6\x56\xcc\x9a\x35\x5b\xbe\x03\x73\x18\x06\
\x06\xb9\xce\xe1\xe6\x9b\x6f\x5e\xbd\x7e\xfd\xfa\x77\xe9\xd7\xb5\
\xb4\x75\x9e\x77\xde\x79\xc6\x84\x09\x13\x54\x10\x70\xa8\x5b\x7e\
\xb6\x3c\x1c\x7d\x67\x8a\x5f\x60\xeb\x4e\x56\xde\x34\x2c\x19\x9f\
\xcd\x29\x2d\x01\x00\xde\x2c\x3b\x35\xc6\xd6\xdb\xad\xf5\xe5\xc6\
\xcb\xea\xc9\x37\x34\xef\xc3\x3e\xaf\xf4\xf0\x13\x73\xef\xdc\xec\
\xa4\xf4\x2e\xdd\x5e\x2c\xc3\x75\x01\x6e\xba\x61\xbc\x6e\x2f\x4a\
\x46\x49\xa6\xf4\x72\x9e\xdf\x47\xee\x42\x77\x22\x25\x2c\x83\xd9\
\xc8\x27\xe3\xf3\x7f\x80\xf3\xa7\x73\x3f\xfd\xb4\x33\xd1\xd2\xd2\
\x86\xb7\x96\xbd\x4d\xa0\xe4\xc7\x8b\x2b\x9b\xd0\x1a\xaf\xc6\x85\
\x64\xad\x8f\xca\x6f\xc4\xe8\xdd\xea\x71\xcd\x55\xa3\x51\x32\x9b\
\xf0\xbf\x77\xb4\x61\xc2\xf8\x06\x3a\x1f\xc3\xa9\x64\xd4\xc4\x1d\
\x48\x11\x80\x5c\xfb\x3f\xb5\xc8\x5e\x32\x09\xc7\x2d\x28\x62\x71\
\xbe\x85\x5c\x18\x1d\xbf\x7c\x20\x80\xae\xae\x38\xaa\x6b\x2a\x25\
\xbb\x91\xc9\x64\xe5\xba\x32\xf8\x70\x90\x90\x17\x38\x3d\xff\xd2\
\x4b\x02\xc2\xe7\x9f\x7f\x21\xbc\x0c\xc4\x2a\x30\xa8\x00\x60\x67\
\x59\x7e\x56\x7e\xf6\xb5\xb9\x80\xc7\xe0\xcd\xb2\x2d\x3e\x2b\xb3\
\x28\xbe\x69\x47\xc3\x59\xa9\xb9\x55\xb7\xab\x9f\x53\x25\xc1\x3e\
\xfa\x1c\x9f\xcf\x2b\x16\x9d\xa3\xf5\x9a\x56\xea\x05\x16\x8e\x13\
\x58\xce\x12\x5f\x4d\xe2\x02\xa6\x54\x08\x6a\x64\x09\xd3\xec\x1f\
\xe7\xf3\x62\xf9\x7d\x3e\x3f\x9a\x5b\x3b\xe4\xc6\xe7\xba\x7e\x9f\
\x7b\xe7\x0d\xea\x94\xbf\x49\xcc\xe3\xcc\x33\xce\xc4\xf5\x37\xfc\
\x80\xac\x99\x4f\x5c\x96\x55\xef\xaf\xc7\x7f\x2d\x09\xa3\xbd\xc7\
\x23\xca\x3c\x61\x46\x11\xd7\x7e\xb3\x91\xae\xd3\x26\xfc\xe2\xae\
\x16\x4c\x99\x54\x4f\x00\x65\xc2\x3e\x3d\x8d\xde\x67\xa1\xa7\xab\
\x1d\x3f\xfc\x75\x3d\xf9\x09\xd3\x71\xdc\x9e\x45\x9c\x56\xe8\x44\
\x22\xa3\xe3\x4f\x8f\xf9\xa0\x75\xc6\x51\x55\x5d\x29\xcc\x26\x97\
\xcd\x3a\xa0\x6a\x09\x28\x44\xa3\x51\x3c\xff\xaf\x7f\x49\xbd\xc3\
\x59\x67\x9f\x23\xc1\x47\x05\x02\x0a\x00\x76\xb8\xcf\x2f\xca\x4f\
\xfe\x29\x53\x53\x43\xf2\xfa\x46\xaf\xd2\x1b\x96\xe1\xe4\xc1\x6d\
\xaa\xaf\x43\x43\x7f\x83\x5c\x2e\x06\xe2\xd7\x58\xf9\x93\x99\x0c\
\x36\x36\x35\x93\x35\xf4\x0a\xd5\x67\xe5\xe6\x00\xde\xa8\xfa\x7a\
\x09\x12\x4a\x01\x10\x37\xde\x75\x02\x88\x70\x5c\x86\x00\x59\xff\
\x75\x4d\x2d\xe4\xf7\xa7\x89\x8a\x07\xa5\xc8\xc7\xe3\xd2\x77\x6a\
\x6e\x99\x83\x75\x13\x26\x4e\xc4\xe2\x93\x17\xe1\xb7\xff\xf7\x7b\
\x34\x8e\x19\x25\xec\x65\xdd\xfa\x66\xfc\xe6\x01\x9f\xb0\x81\xcf\
\x65\xba\x88\xde\x97\xf0\xbd\xab\x6b\x09\xc4\x3a\xf0\xeb\xbb\x5b\
\x31\x65\x72\xbd\xa4\x09\xed\xe0\xa6\x0d\x02\xa9\x78\x1b\x7e\x7c\
\x53\x3d\xf4\x4b\x66\xe2\x98\xd9\x6f\xe2\xfc\x52\x37\x31\xab\x4a\
\xdc\xfa\x04\x5f\xcb\x38\x2a\xab\x79\x49\xb3\x2e\xd7\x87\x01\xb1\
\xdc\x00\x25\x10\x0e\xe1\xa9\x67\x9e\xa1\x6b\x10\xc6\x29\x8b\x16\
\x3b\xa9\x44\x35\x9d\x48\x01\xc0\x0e\x01\x00\x5e\x44\xe3\x95\xe0\
\x94\x44\xf4\x85\xf6\x1b\xbd\xfe\x3e\xca\x51\x7d\xbd\x4c\xf1\xb7\
\xae\xfc\x70\x7c\x7f\x56\x68\x0e\x0c\x56\xc7\x2a\xe1\x27\x36\x90\
\x4e\xa7\xe1\x8f\x84\x51\x53\x55\x25\x37\x39\xbb\x05\xdc\xb9\xc7\
\x74\x0a\x85\xca\x9d\x79\x43\xa1\x10\x36\xb5\x75\x60\x53\x6b\x2b\
\x42\xc1\x10\xa2\xec\x0a\x10\x28\x0c\x46\x61\x09\x03\xd3\x81\x07\
\x1d\x8c\x57\x5e\x7d\x0d\x6f\xbc\xb5\x14\xf5\xf5\x0d\x42\xef\x5b\
\x36\xb5\xe0\xce\xa7\x81\x4d\x5d\xb5\xf8\x52\xb6\x03\xf3\xe7\x97\
\x70\x3d\x81\x80\x85\x4e\xfc\xfa\x1e\x02\x81\x89\x75\x70\xbb\x34\
\xa7\x99\x89\x46\xec\xc1\x42\xa2\xbd\x15\x3f\xfa\x0d\x01\xdf\xc5\
\xb3\x70\xf4\xbc\x37\x71\x91\xd9\x45\x2e\x56\x25\x6e\x7b\x86\x63\
\xac\x71\x49\x79\x72\xbb\x91\x42\xbe\x60\x57\x56\x95\x83\x8a\x01\
\x3f\x1e\x7a\xe4\x61\x44\xe8\xda\x1d\x7d\xec\xf1\xaa\x71\xac\x02\
\x80\x1d\x40\xfd\x49\xc9\x3d\x44\x73\xd9\xff\xce\x39\xd4\x9f\x23\
\xd5\x86\x73\xb3\x49\x60\xcf\xb2\x33\xf9\xe5\xd8\xfe\x40\xe5\xc7\
\x16\x33\x32\x0a\xf9\xa2\x7c\xae\xcf\x6b\xbb\x13\x41\xa2\xd4\xa3\
\x1a\xea\x91\x24\xab\xbe\x6a\xdd\x7a\x44\xc9\xaa\x85\x02\x41\x72\
\x07\xfa\x68\x2d\xd7\xf6\xf3\xdf\x5f\xb5\x76\x83\x44\xd6\x63\x91\
\x10\xed\xe3\x1f\xb4\x1b\x9e\x8f\x9f\xfd\xef\xc5\x8b\x17\x61\xc5\
\x8a\xe5\xd0\xdd\x2e\x54\xc6\xec\xb6\xe4\x1e\x57\x07\xfe\xf1\x66\
\x06\x9d\xc9\x5a\x7c\x35\xdd\x85\x83\xf7\x6f\xc1\xf5\xdf\xa8\x25\
\xc5\x8f\xe3\x97\x77\xb5\x61\xe2\xf8\x06\x52\x7c\x73\x00\x08\xc4\
\x09\x04\x6e\xf8\x4d\x1d\xac\x8b\x66\xe3\x98\x3d\x97\xe2\x62\x83\
\x40\xc0\xac\xc2\xdd\xff\x84\x0c\x2d\x89\x55\x56\x48\x11\x55\xb1\
\x98\x47\x3a\x65\x39\x60\x6a\x49\xa3\x93\x7b\xef\xbb\x0f\xd1\x8a\
\x18\x0e\x38\xf0\x20\x09\xa8\x42\x81\xc0\x47\x16\x95\x06\xdc\x8c\
\xfa\xb3\x7f\xc9\x0a\x9d\x2b\xe4\xed\x74\x9f\xe4\xf2\x6d\xab\xef\
\xb2\x1d\x75\x09\xda\x79\x39\x2d\x65\x61\x2b\xca\xbf\xb9\x6f\x6e\
\x17\xfd\xf0\x7b\xba\x13\x09\xa4\x32\x69\xb2\xe6\x41\x24\x93\x29\
\xac\x5a\xb3\x56\x9a\x77\xf0\x92\x5e\xa3\x9f\xf2\xf3\x3a\x01\x3f\
\x3d\xbf\x66\x7d\x13\x12\xc9\x1e\xa2\xbd\x01\xd9\x67\xb0\x6f\xf4\
\x22\x01\xd2\xa4\xc9\x53\x70\xcc\xd1\x47\x23\x1e\x8f\x23\x52\x11\
\x41\x38\x1c\x46\x75\x4d\x2d\x1a\xea\xa2\x78\x67\x8d\x85\x6b\x6f\
\xae\xc6\x83\x8f\x5b\xe8\x5a\xdb\x82\xef\x7e\x25\x84\x2f\x9d\x9a\
\xc6\xba\x0d\xad\xe4\x46\xa1\x37\x2b\xc2\xd7\x48\x40\xa0\xa3\x15\
\xd7\xff\xc6\x8b\x07\x97\xce\xc5\xac\x79\x31\x5c\xf2\xe9\x6e\x9c\
\x72\x60\x91\xce\xb9\x80\xee\xee\x04\x01\x46\x49\x98\x10\xb3\x0f\
\x66\x4d\xe9\x54\x8a\xc0\x34\x4f\xe0\x6c\xe0\x2f\xb7\xfd\x05\xef\
\xbc\xfd\xa6\x44\xd8\x95\x8c\x30\x06\x30\x58\x73\x01\xc4\xc2\x93\
\xa2\xe6\x88\x7a\xe6\x73\xce\xc2\x1d\x58\x7d\x4d\x73\xb9\xe3\x2e\
\xf9\xe1\x9c\xe3\xe7\xd7\xb8\x28\x27\xcf\x69\x41\x8e\xcc\x6b\xfa\
\x56\xa6\x63\x39\x85\x3f\x5c\x48\xeb\x72\xa1\xae\xba\x5a\x02\x82\
\x5c\x1d\x98\x27\x80\xa9\x20\x2a\x1b\xe3\x8e\x3f\x8e\x2f\x6b\xf3\
\x0a\xb2\xa8\xc4\x14\xd2\x99\x1c\x56\x6f\xd8\x00\x0f\x59\xc1\x7a\
\x72\x15\x18\x40\x0c\x63\xf0\x7d\x5e\x66\x44\x47\x1e\x75\x14\x5e\
\x7e\xf5\x15\xb4\xb4\xb5\x91\x25\xae\x70\xe2\x22\x06\x5c\x6e\x1d\
\x1b\x5b\x7a\xf0\xbd\xbf\x54\x22\x91\xe9\xc1\x89\x87\x75\xe0\xda\
\x2f\x55\xc1\xe3\x49\xe1\xbf\x6f\xb7\x30\x76\x54\x1d\x01\xa7\xd6\
\x1b\x13\x60\x10\x48\x76\x11\x08\xfc\x96\xdd\x81\xb9\x38\x7a\xfe\
\xeb\xb8\xd8\xe4\xb5\x02\x55\xb8\xef\x79\x66\x1d\x71\x54\x55\x55\
\x08\x28\xf3\x42\xaa\xb4\x95\x16\xf0\x88\x10\xe8\x70\x4c\xe5\x0f\
\x7f\xfa\x03\xbe\xf4\xc5\x2f\x61\xec\xb8\xf1\x43\xba\xe7\xe0\x50\
\x76\x55\x14\x03\xd8\x2c\xf0\xc7\xca\x9c\xcb\xe5\x85\xf6\xdb\xb6\
\xdb\x12\xc3\xcb\x16\x39\x1c\x0e\xc9\xca\xbd\x27\xff\xf5\x12\x1e\
\x7c\xea\x1f\x78\x7d\xc5\x4a\x59\xd0\xc3\x5d\x78\x78\x55\xdc\xc0\
\x52\x5e\x6d\xa0\x3b\xe0\xf4\xf1\xe7\xcf\xe6\xc9\xbe\x49\xb2\x68\
\x59\x02\x01\xb1\xfc\x9a\xbd\x77\xb9\x8e\xc0\x4f\x56\x6d\x43\x73\
\xb3\x34\x0a\x89\x91\x95\x65\xa0\x18\x2a\x01\x2f\x83\x5c\xa3\x58\
\xac\x12\x27\x9f\x78\xb2\xc4\x48\xb8\x23\x11\xc7\x2a\xfc\xbc\x24\
\x99\x5d\x9b\xc6\x0a\x74\x93\x9e\xfe\xd7\x3d\x51\xdc\xf6\x90\x1f\
\xeb\x57\x74\xe0\xea\x8b\x7d\xb8\xe2\xd4\x14\xd6\x37\xb5\xa3\x68\
\x0c\x2c\x16\x62\x10\x48\x77\x93\x3b\xf0\x5b\x2f\x1e\x5b\x31\x17\
\xb3\xf7\x0e\xe3\x82\xe3\x3a\x71\xea\xc1\x36\x08\xf3\x6c\x03\xfe\
\x9b\x7c\x7d\x38\x15\x9b\x65\x26\x40\x1b\x67\x4f\x5a\x5a\xdb\x71\
\xfb\xed\xb7\x23\x45\xcc\x40\xf5\x1b\x54\x00\xf0\xf1\x00\xc0\xe6\
\xde\xc8\x92\x82\x72\x75\x9f\x3d\x5c\xcb\x92\x4c\x40\x94\x14\x30\
\x4b\x16\xe6\xd5\xb7\x97\xe1\xc5\x37\xde\x42\x36\x9b\xc3\xf8\xd1\
\x8d\xa8\x21\x1f\xb8\xa9\xb5\x15\xef\xae\x5d\x2f\x2b\xfe\x58\x71\
\xed\xd4\xdf\x40\xe5\xd7\xfa\x51\x03\x66\x13\xd5\x95\x31\x7b\xa8\
\x27\x59\x4f\x59\x4c\x64\xd9\x69\x47\x8e\x0f\x30\x88\xf0\xcf\xeb\
\x9a\x5b\x24\x17\xce\x83\x40\x74\x19\x0c\x3a\x74\xae\x15\x53\xf2\
\x79\x7b\xcd\xc7\x82\xbd\xf7\x46\xbc\x3b\x8e\x10\x01\x63\x40\xd2\
\x95\x3e\x89\x13\x34\xd4\xc7\x90\xcd\x03\x3f\xff\x5b\x18\xb7\x3c\
\x14\xc2\xea\xe5\x5d\xb8\xfa\x42\x0f\xbe\xb4\x38\x89\xb5\xeb\xdb\
\xc9\xd7\xd7\xb7\x00\x81\x04\x31\x81\x1b\x7e\xe7\xc7\x53\xab\xe6\
\x62\xee\x3e\x61\x9c\xb5\xb0\x0b\xa7\x1d\x5c\x14\xcb\xdf\xd1\x19\
\x97\x98\x0c\x5f\x3c\xae\xc0\x4c\xa7\x33\x52\x5c\xc4\x80\xfd\xe6\
\xb2\x65\xb8\xe7\xee\xbb\x04\x20\xd5\x1c\x02\x15\x04\xfc\xc8\xca\
\x2f\xd6\x9f\xe8\x2d\x53\x49\x4e\xf7\x71\x85\x1e\x2f\xb5\xe5\x45\
\x3e\x1c\x88\x5b\xdf\xb2\x49\x5e\x1b\x5d\x57\x8b\x89\x63\x47\xa3\
\xa1\xb6\x9a\x68\xbd\x5b\xaa\x02\x9b\x5a\x5a\xb1\x72\xf5\x1a\xa1\
\xa6\xe3\x1a\xeb\x09\x08\x3c\x52\x34\xa4\x59\x03\x15\xbf\x7c\xc3\
\x73\x30\x8c\x7f\x67\x70\x29\xbb\x16\x92\xfe\xa3\xcf\xe2\x85\x3e\
\x9c\xf2\x5b\xdf\xbc\x89\x00\xa6\xc2\xee\xfa\x33\xc4\xd2\x5d\x52\
\xc7\x40\xe7\x78\xe2\x49\x27\x61\xd9\x8a\xe5\x02\x5c\x11\x02\x01\
\x5e\xec\xc4\xd9\x0e\x3e\xa7\xba\xba\x4a\xb4\x93\xe2\xde\xf4\x50\
\x00\xdc\x33\xf4\x4c\xa2\xf3\xd7\x5c\x1c\x23\xe5\x4f\xe1\x7f\xef\
\xd2\x30\x69\x7c\x2d\xdc\x7a\xbf\x3a\x01\x02\x81\xee\xb6\x56\xfc\
\xe8\xb7\x75\xd0\x3f\x3f\x07\x9f\xda\x7b\x29\x81\x44\x37\x82\x9e\
\x0a\xdc\xf2\x94\x86\xf6\x8e\x04\x6a\x6a\x22\xf4\xbd\x78\x25\x0e\
\x90\x76\x00\x9b\xd9\xc7\x33\xcf\xfe\x03\xe3\xc6\x8d\xc3\xa1\x87\
\x1f\x21\xaf\x29\x51\x0c\xe0\xc3\xdd\xd0\xdc\xa4\x82\x2c\x38\x5b\
\x7f\x4e\xf7\xb1\x9f\xcf\xed\xb5\x5a\xba\xe2\x78\xfb\xdd\xf7\xd1\
\x44\x37\x66\x8c\x94\x7b\xee\xee\xd3\xb1\xcf\x9c\x3d\x30\xba\xbe\
\xd6\xa6\xa4\xb2\xba\x0f\xc4\x06\x46\x61\xde\x8c\xdd\x25\xc8\xf5\
\xfa\xb2\x95\xd8\xb0\xa9\xd5\xa9\x0b\xd0\xfa\x29\xbf\xd6\x4b\x09\
\xb4\x7e\x6e\x81\x28\xbf\xc4\x14\x0c\x29\x02\x62\x20\xea\x88\xc7\
\xe9\x58\x72\xc2\x14\x38\x6d\x68\x0e\x41\xff\x91\x57\xea\x4d\x98\
\x38\x09\x9f\x39\xe1\x04\xa4\xc8\x55\x71\x4b\xcd\x42\x50\xea\x16\
\x5c\x4e\xdb\xb3\xda\x9a\x18\x34\x97\x07\xbf\x7d\x38\x88\x5f\x2f\
\xa9\xc0\x7b\xcb\x12\xf8\xce\xc5\x2e\x7c\x71\x51\x12\xab\xd7\x6d\
\x85\x09\xb8\x4c\x74\xb4\xb4\xe1\x87\xbf\x0e\xe1\x1f\x6b\xe6\x62\
\xce\x82\x30\x16\x1d\x16\xc7\xc5\x47\x67\x10\x70\x15\xd1\xd2\x92\
\x20\x10\xce\xcb\x75\x65\x45\x67\x16\xc0\x9d\x86\x39\x36\x72\xdf\
\x5f\xff\x8a\x15\xcb\x97\xa9\xa0\xa0\x02\x80\x0f\x2f\xac\x90\xe5\
\x95\x75\x42\xf7\xc9\xb7\x7d\x6f\xc3\x46\x34\x13\xbd\xe7\xe5\xba\
\xd3\x27\x8c\x17\xe5\x1f\xd7\xd0\x20\xc1\x3c\xbe\xe1\xca\x41\x1d\
\xfe\x97\xe3\x06\xbc\xdf\xcc\x29\x93\x08\x08\x76\x43\x82\x7c\x52\
\x0e\xe2\xb9\x74\xbd\x9f\xf2\x0f\x64\x02\xe5\x5a\x01\xa6\xae\x45\
\xa2\xb7\xbc\xa4\xb8\x64\xda\x0b\x84\xda\x3b\xbb\xa5\x24\xb8\x8e\
\x2b\xe3\x86\x30\xad\x65\xc6\x74\xe8\xa1\x87\x63\xef\xbd\xe6\x21\
\x1e\xef\xb6\x5d\x81\x60\x08\x7e\x02\x50\xe9\x7d\x48\x0c\xa9\xa6\
\x3a\x06\x8f\xcf\x85\x9b\x1f\xf7\xe3\x17\xf7\x54\x60\xe5\x3b\x09\
\x7c\xfb\xf3\x3a\xbe\xe8\xb8\x03\x45\x43\xdf\x2c\x3b\xc0\x20\xd0\
\x82\xeb\x7f\x1d\xc0\x13\x2b\xe7\x62\xd6\x82\x18\x4e\x3c\xb4\x07\
\x97\x1e\x9b\x46\x5d\xd4\xc0\x26\x02\x81\x1c\x29\x3d\x5f\xf7\x3c\
\x01\x36\x67\x53\xd8\x25\xeb\xec\xee\x96\x78\x40\x77\x57\xa7\x5a\
\x38\xa4\x5c\x80\x0f\x0b\x00\x1a\x82\x7e\x2f\xd2\xa4\xf8\x9b\xc8\
\xea\x73\xa3\x4e\xee\xb1\x3f\x86\xe8\x3e\x5b\xe1\x00\x59\x61\xd6\
\x77\x63\x3b\x54\x9c\x01\xa1\xe4\xb0\x07\x06\x01\x43\xda\x80\xe5\
\x07\xa6\x09\xb5\x2d\xde\x44\xd4\xbf\x24\x0c\xc4\xb6\xf2\xf6\xda\
\x02\xae\x0f\xe0\xde\x80\xd5\xb1\xd8\xbf\x9d\x06\x34\x98\xc2\x91\
\x7f\xb6\xfa\xa7\x9e\x76\x3a\xd6\x6f\xdc\x28\x91\xf9\x48\x24\x24\
\xcf\xb3\xdb\x92\x49\xdb\x33\x02\x6b\xaa\xb8\xc4\x37\x8e\xdb\x9f\
\xf1\xd0\x6b\x31\x7c\x9e\xdc\x81\x6f\x5d\x18\x25\x30\x4d\xe2\xe7\
\x77\x59\x18\x37\xba\x96\xac\x7f\xbf\xec\x80\xc7\x42\x17\xb1\xae\
\x1f\xfc\xba\x1e\xc6\x45\x73\x70\xec\xde\xef\xd0\x73\x1d\x88\x86\
\x4c\xfc\xe9\xf1\x30\x56\x34\xa5\x50\x55\x65\x22\x1c\x26\x17\x8d\
\x98\x52\x92\xae\x1b\xd7\x05\xbc\xbf\x66\x35\x96\xdc\x77\x1f\xce\
\x39\xe7\x5c\xe9\x54\x64\x59\xaa\x52\x50\x31\x80\x0f\x7a\x21\x78\
\x01\x8f\x65\x2b\x65\x75\x34\x82\x49\x63\x46\x61\x4c\x7d\x1d\x02\
\x44\x29\xa5\xde\xff\x03\xd2\x70\xbb\xa4\xd7\x2e\x18\xd2\xf5\xad\
\xd0\xfe\x7e\xd6\x9f\x01\xc3\x70\x56\xbf\x19\x86\x6d\xfd\x39\xe2\
\xcd\xf5\x07\x15\x04\x00\xdc\xef\x6f\xa8\x57\xba\x71\xec\x62\xdc\
\xb8\xf1\x38\xf3\xf4\x33\xc4\xff\x67\xbc\xe2\xda\x80\x80\x2c\x60\
\xf2\x4b\x28\x95\x41\xa0\x8a\x40\x20\x12\xf1\xe0\xce\x7f\xfa\xf0\
\xdb\xbf\xc6\xb0\xec\xad\x1e\x7c\xe3\x73\x3a\xbe\x72\x5a\x12\x1b\
\x9b\xdb\x90\x37\xd0\xaf\x33\xb0\x1d\x18\xec\xe9\x20\x26\xf0\x1b\
\x37\x1e\x58\xba\x07\xa6\xce\xaf\xc7\xa1\xfb\xe6\xf1\xa5\xcf\x24\
\x70\xd0\xee\x45\xf4\xc4\x53\xe8\xea\xea\x21\xd6\x64\x48\xc5\x60\
\x4f\x22\x21\x6e\x1c\xc7\x03\x9e\x7d\xf6\x59\x49\xb7\x2a\x51\x0c\
\xe0\x43\x05\xb6\xc2\xa4\x70\x41\x9f\xb7\x77\x4d\xfe\xc7\xf1\xbd\
\xed\x51\x5f\xb6\x15\xda\x9a\xf2\xb3\x85\x2c\x39\xae\x84\x29\x0c\
\xc0\x44\x90\x57\x00\x4a\x67\xa1\x02\xd1\xff\x7a\x3b\xb5\x35\x0c\
\xaa\xdc\xb8\x54\x7a\x9f\x7d\xf7\x43\x6b\x4b\x2b\x6e\xbf\xfb\x4e\
\x54\x10\x6b\x0a\x85\xc2\x36\xb3\xe1\x6e\x49\xbc\xee\x81\xae\x45\
\x55\x65\x25\xd9\xea\x6e\xdc\xf3\x9c\x07\x3e\x4f\x05\xce\x30\x13\
\xf8\xfa\x39\x11\xba\x3e\x49\xfc\xf4\x36\x60\xec\xe8\xba\x2d\x98\
\x40\xaa\xbb\x0d\x37\xfc\xaa\x0e\xda\xa5\x33\x70\xdc\x82\x77\xe1\
\xf5\xb7\x13\x13\x88\x63\x54\x4d\x14\xf7\xbf\x04\xb4\xb7\x1b\x04\
\x2e\x21\xc9\x9e\x24\xe3\x09\x44\x63\x15\xb8\xef\xaf\xf7\x61\xca\
\x94\x29\x18\x3d\x7a\x34\x86\xc2\x92\x72\x05\x00\xc3\x05\x04\x80\
\x81\x7d\xee\x3f\xa6\x5b\xc1\x99\x84\x7c\xc1\x70\xd4\xbe\xdf\xfa\
\x60\x5e\xf4\x53\x32\x7b\xad\xbf\xe9\x14\x8a\x70\x4e\x3d\x5f\x28\
\xca\x20\x0f\x76\x01\x38\x86\x60\x0c\x83\x05\x2f\x0c\x66\x7c\x94\
\x47\x1f\x7b\x2c\xda\x3b\xda\xf0\xc4\xd3\x4f\x13\x08\x54\xd9\x0b\
\xa8\x1c\x46\x54\x94\x18\x8b\x2e\x53\x8c\x3a\x8d\x1e\x72\x07\x98\
\x7e\x56\xe0\x34\x10\x08\x9c\xcb\x95\x90\x29\xfc\xf4\x2f\x1a\x26\
\x8c\xab\x95\x1e\x02\xe5\xec\x00\x33\x81\x54\x77\x3b\x7e\xf4\x9b\
\x3a\x7a\xff\x54\x1c\xb3\x9f\x5b\x40\x20\x12\x4e\xa0\xb1\x32\x8c\
\xbb\x9e\x05\x36\xb6\x26\xc9\x55\x2b\x0a\xe0\x30\x68\x36\x11\x80\
\x2e\x59\x72\x2f\x2e\xbd\xf4\x32\xf9\x1e\xd4\x7a\x01\x05\x00\x83\
\xe0\x1f\x9b\x72\x33\xda\xdd\x80\x34\xf4\xd7\xff\xa2\x61\x2f\x2b\
\xd6\x2c\xa7\x91\x28\x2f\x02\xa2\xe7\xb9\xbc\x98\xfb\xe3\x71\x24\
\xdb\x6e\x92\x31\xbc\xce\xd7\xed\xf1\xe2\xd4\xd3\xce\x90\xee\x3e\
\x2f\xbe\xfc\x0a\x2a\x24\x85\x69\x83\x5b\x9a\x83\x9d\x45\x6e\x1b\
\xc6\xee\x40\x94\xe8\x7b\x02\x7f\xf9\x07\x2f\xba\x8a\xd2\x73\x3d\
\xb8\xea\xfc\x28\x5d\x97\x24\x7e\x7e\x3b\x30\x69\x3c\x2f\x20\x32\
\x07\x80\x40\x4f\x47\x2b\x7e\xfc\xeb\x7a\x68\xfa\x44\x1c\x7d\x80\
\x1b\x81\x48\x37\xa2\xd1\x04\xc6\xd4\xe5\x71\xe7\x3f\x2a\xf0\xca\
\xaa\x1c\x92\xe9\x82\x9d\x92\x8c\x86\xf1\xd8\xe3\x8f\xe1\xc0\x03\
\x0e\xc4\x9e\xf3\xe6\xa9\xc9\xc4\x0a\x00\x06\xc7\xa5\xe0\x68\x74\
\xb9\xd0\xa7\x4c\xff\xa5\xff\x9f\x69\x2f\x73\x2d\x5b\x7e\x53\xdc\
\x04\x4d\x16\x0b\x65\xf3\x39\x04\x02\x7e\xe9\x03\x30\xdc\x2c\x17\
\xc7\x2f\x42\xe4\xff\x9f\x7f\xc1\x85\x52\x13\xf0\xca\x1b\xaf\x23\
\x5a\x51\x69\x57\x48\xd2\xff\xe9\x54\x5a\x3a\x2a\xb9\xdc\x0c\x02\
\x11\x74\x74\xf6\x48\x8e\xdf\xed\xe2\xde\x0b\x09\x7c\xe7\x12\x5e\
\x5c\x94\xc4\xcf\xee\x00\x26\x4f\xa8\x93\xe7\x7b\x17\x10\x79\x4c\
\x74\x75\xb4\xe0\x47\xbf\x6c\xa0\xe7\xc6\xe0\xd8\x43\x3c\x98\x1a\
\x09\xa1\xb2\xa6\x1d\x93\x46\x77\xe0\xd1\x97\x23\x78\xe4\xb5\x00\
\xd6\xb6\xa5\xd0\xdc\x92\x14\x97\x63\x43\x53\x3b\xe6\xef\xad\xc2\
\x5c\xc3\x0e\x00\x06\x6b\x2d\xc0\x27\x2d\xb6\x1b\xe0\x16\x3f\x94\
\xd7\xb8\x9b\x92\x29\xe8\x6b\x7b\x65\xf5\x73\x01\xbc\x5c\xed\x67\
\xda\x99\x04\x5e\x63\xc0\xcb\x91\xcd\x61\x78\x0d\xf8\x5c\xc3\xe1\
\x08\x2e\xb8\xe8\x62\x68\xbf\xff\x1d\x5e\x23\x10\x08\x86\xc2\xbd\
\x95\x91\xbc\xb2\xaf\xc4\x6e\x8e\xcb\x8b\x9a\xea\x0a\xb4\xb5\x27\
\x08\x04\xfc\x08\xf9\x4b\x74\xad\xe2\xb8\xf6\x8b\xe4\x3a\x20\x85\
\x5f\xdc\x69\x61\xf2\xf8\x7a\x02\x01\x38\x20\xa0\xd3\x35\x22\x10\
\xe0\xa5\xc4\xbf\xac\x83\x59\x6a\xc0\xf1\x87\xb7\xa2\x31\x10\x44\
\x45\x4d\x07\xc6\x8d\x4f\xe0\xd0\xf9\x19\xbc\xb8\x2c\x84\x95\x1b\
\xc9\x4d\x70\x67\x31\x63\xd4\x0a\x72\xb5\x8e\x76\xc2\x28\xd6\xa0\
\xde\xcf\x0a\x00\x76\x61\x37\x40\x16\x15\x59\x90\x00\x9f\x69\x95\
\x01\xae\x2f\xbe\x27\xfd\x07\xbc\x5e\x27\x1b\x60\x4a\xf3\x4f\xdd\
\x99\xbc\x3b\x1c\x85\x41\x20\x12\xad\xc0\x05\x17\x5c\x84\xc0\xad\
\x37\xe3\x85\x97\x5f\x96\x75\x02\x65\x49\x27\xd3\xd2\x23\xc1\xed\
\xf6\xa2\xae\x36\x8a\x4d\xad\x09\xfc\xf1\xf1\x30\x42\x3e\x62\x02\
\x9e\x6e\x7c\xf7\x3f\xd8\xfa\x67\xf0\xb3\xbf\xb4\x60\xfc\xb8\x06\
\x02\x43\x07\x04\xc8\xaa\x7b\x3d\xf6\x2a\xc2\x1f\xfe\xaa\x9e\x58\
\x46\x2d\x3e\xb3\xb0\x0d\x41\x54\x13\x6b\xd2\x50\x3f\x2a\x8f\xbd\
\xe7\xe7\x60\x14\x75\xf8\x42\x01\xb8\x46\xbd\x87\x9e\x52\x07\xfd\
\xc5\x0a\xe6\x27\xea\x86\x54\x00\x30\x18\x7e\xb1\x07\x5a\xa1\xe0\
\x28\xbf\xe5\xac\x31\x70\x5e\xe7\xbc\xbf\x83\x04\x7e\xf2\xf9\x19\
\x28\xd8\x63\xe0\x49\x41\x1c\x30\x1b\xce\x0c\x88\x2b\x05\xc3\x91\
\x08\xce\x39\xef\x7c\x84\x42\x11\x3c\xf9\xf4\xd3\xd2\xd0\xc3\x4e\
\x81\x10\x08\xa4\x2d\x59\xdc\xc3\x20\xc0\x4b\x89\x9b\x9a\x13\xf8\
\xd3\x93\x61\x84\x83\x06\x3c\xde\x4e\x5c\xfb\xb5\x06\x52\xf6\x38\
\x7e\x7e\xdb\x26\xd4\x37\x34\xc0\xdf\x6f\x15\x21\x77\x16\x4a\x76\
\x3b\xfd\x04\x50\x8f\x13\x0f\x6f\x23\xd4\x89\xc0\x6d\xd1\x7b\x19\
\x2d\x3c\xf4\x77\x82\x63\x61\xb8\x0c\xb8\xf0\x1e\xa9\xfe\x02\x76\
\xbe\xd4\x0d\xb9\x15\x51\x0e\xd2\x8e\xbc\xb8\x64\xfd\xb9\x72\x90\
\x17\xfc\x31\xb5\x17\x85\xe6\xa8\xb8\x53\xf8\x23\xff\x31\x48\xd0\
\x3e\x01\x02\x0a\x6e\x00\xc2\x01\x40\xff\x08\x29\x67\x65\xd0\xe3\
\x66\xa6\xa7\x9d\x7e\x06\x4e\x38\xfe\x78\x89\x0b\x78\x7d\x5e\x02\
\x86\xb0\xb8\x09\x1e\x29\xb0\xb2\x88\xfa\xfb\xd0\x58\x1f\xc1\xaa\
\x16\x1d\xb7\x3d\x1d\xc5\x6b\xaf\x5b\x68\x59\xd9\x8e\x6f\x5e\x59\
\x8b\x6f\x7f\x3e\x87\xf6\xd6\x16\xe4\x8a\x76\x07\xa6\xf2\x6d\xeb\
\xf3\x6a\xc8\x24\xda\xf0\xa3\x9b\x3c\x78\xf2\x85\x18\xb4\xb0\x0f\
\xa6\x1e\xb2\xfb\x36\xf2\x10\x96\x5c\x12\x56\x31\x4e\x00\xd0\xe6\
\xac\xea\x54\xa2\x00\x60\x27\xfa\xfe\xac\xc8\xbc\x86\x9f\xab\xfa\
\x0a\xa5\x42\x6f\xb4\xdf\x28\x2b\xbe\xf8\xfe\x90\x6c\x40\xc0\xeb\
\x11\x57\xa1\xbc\xb4\x78\x24\xd5\xb3\xcb\x34\x24\xd2\xdc\xcf\x9c\
\x74\x32\x4e\x5d\xfc\x59\xb8\xa4\xfd\xb9\x4b\x8a\x85\x78\x21\x8f\
\xc7\xeb\xb6\x3b\xfd\xd0\x79\xd7\xd5\x84\xf1\xca\xfb\x3a\x1e\xf8\
\x57\x14\xcb\xdf\xca\xa3\x7b\x55\x07\x2e\xbd\xbc\x11\xdf\xbc\x20\
\x8b\x96\x96\x16\xba\x3e\xba\x53\x5c\x65\x07\x14\xe8\xb2\x21\x19\
\xef\xc2\xed\x7f\xf3\x20\xd9\x03\xb8\x82\x21\xba\xa3\x3d\x36\xdd\
\x37\x73\xb6\xd5\xb7\x4a\x83\xea\xff\x2b\x17\x60\x17\x53\x7c\x8e\
\xfc\x33\x7d\xb7\x87\x87\xe4\x25\xaf\xcf\xae\x80\x51\x5e\x3b\xd0\
\x2f\xf8\xc7\xd6\x9f\x53\x81\x21\xbf\x5f\x16\x02\x59\xf4\x7e\xae\
\xfe\xe3\xfa\x81\x91\x94\xbb\x2e\x57\x47\x1e\xb9\xf0\x28\x44\xc8\
\xf2\xdf\x7e\xd7\xed\x88\x27\x92\x02\x02\x7c\xcd\x78\x51\x0f\xa7\
\xea\x42\x4e\xb3\x95\x47\x97\x66\x50\x17\xab\x40\x38\x1a\xc7\xac\
\x48\x10\x5f\xb8\x62\x0c\xf2\xf9\x0d\xf8\xe1\x1f\x5b\x31\x7a\x34\
\xb9\x03\x5e\x53\x2a\x27\xb9\xd0\xca\xed\x26\xb6\xd0\x6e\x20\xd1\
\x55\x42\x64\x1c\x01\xa7\xdb\xcf\x5d\x4c\x6d\x47\x4b\x35\x0d\x55\
\x00\xb0\xd3\x2e\xa4\xdb\x2d\x56\x5c\x9a\x56\x10\x05\xe5\x9b\x5e\
\x5a\x89\x1b\x4e\x3e\xbb\x1c\xfc\x73\xd6\xfb\x8b\xff\x4f\xaf\x71\
\x96\x80\x97\x00\xe7\xc9\x45\x60\x57\x20\x48\x66\x8d\xcb\x88\x47\
\x9a\xcd\x92\xb5\x12\xc4\x06\xf6\x3b\xe0\x00\xd4\xd4\xd6\xe0\xe6\
\x3f\xdf\x82\x75\x1b\x36\x48\x93\x15\x29\xd6\x49\x26\xa5\xa4\x37\
\x1a\x0d\x09\x68\xfe\xf5\x45\xa0\xb1\x2a\x84\x58\x45\x2b\xa6\x44\
\x42\xf8\x8f\xaf\x8d\xa5\x6b\xba\x8e\x40\x00\x18\x35\xba\x9e\x40\
\x53\x93\xe1\x2c\xa9\x9c\x1b\x7b\x8c\x6d\x43\x6d\x84\x2c\x7d\x89\
\x94\xdf\xe5\x73\x7a\xac\x8b\x0f\x46\xa0\x1a\xa6\xbf\xad\x43\x53\
\xb7\xa8\x72\x01\x76\x84\xb0\xd2\xfb\x1c\xca\x9e\xce\x66\x44\xf9\
\x79\x0d\x8a\x51\x32\x7a\x03\x7f\xe5\xb5\x04\x96\x13\xf4\x93\xd2\
\x5f\xc3\x92\x32\xe1\x28\x47\xab\x39\x55\x48\x37\xb3\xdf\xe3\x91\
\x55\x80\x23\x55\xf8\x1a\x30\x33\x9a\x3a\x6d\x3a\x2e\xff\xc2\xe5\
\xd8\x6b\xee\x5c\x7b\x46\x62\x30\x28\xbd\xff\xb9\x99\x88\xae\xb9\
\x50\x5d\x19\x41\x57\x46\xc7\x5d\xcf\x45\xf0\xf6\x72\x17\xba\xd6\
\xb5\x00\x45\x17\xbe\xf6\xf5\xb1\xf8\xc6\xe7\x92\x68\xd9\xb4\x01\
\x6b\x9a\x92\x68\xea\xc8\xe1\x90\x59\xcd\xb8\xec\xb3\x49\xf8\xc2\
\x01\xbb\x65\x9a\xb4\x66\x73\x89\xf2\x9b\x7a\x94\x9c\x80\x31\x2a\
\x06\xa0\x18\xc0\x0e\x40\x4e\x5d\x17\xab\xcf\xb7\x16\x17\xef\x94\
\x9c\x59\x80\x6c\x7d\x64\x5e\x20\x37\xc7\x70\xd2\x7d\x26\xec\x62\
\x1f\x48\x21\x90\xad\xf8\xbc\x3f\x2b\x7f\x65\x24\x22\x43\x40\xb8\
\x38\x48\xa6\x02\x71\xf4\x7f\x24\x5f\x38\x06\x01\x3a\xdf\xda\xba\
\x3a\x5c\x74\xe1\xc5\xf8\xfb\xdf\x1f\xc2\xa3\x8f\x3f\x2e\x2c\xa9\
\x82\xce\x9d\x17\xf5\x68\x39\x0b\x35\x55\x61\x2c\x6f\x4a\xe3\xfe\
\x17\xab\x30\x61\x6c\x07\xa2\x75\x71\x78\x2a\x2a\xf1\xb5\xaf\x8c\
\xc1\x9c\xdd\x5a\xf1\xd4\x8b\x6d\x68\xa8\x71\xe3\xb4\x63\xbc\xa8\
\x9b\x54\x05\x23\x5f\x2e\xb8\xd2\x6c\x10\x70\x07\x61\xba\xa6\xd1\
\x77\x30\x01\x2a\x05\x38\xcc\x00\x60\x28\x17\x02\x95\xfd\x7c\x0e\
\x6c\x31\x55\x95\x06\x15\xb0\xab\xfc\x4a\x4c\x73\x79\x44\xb8\xc9\
\xa5\xbe\xe8\x5d\x4c\x24\xfe\x3e\x8f\x13\xe3\x49\x39\x9c\x0d\x20\
\x2c\x88\x92\xbf\x5b\x5f\x5d\x49\xd4\xdf\x44\x86\x7b\x07\x38\xa3\
\xbe\x76\x95\xba\x75\x06\x01\xb6\xf8\x27\x9e\x74\x32\x1a\x1b\x1b\
\x71\xef\x5f\x97\xa0\xad\xad\x1d\xb1\xca\x2a\x99\x82\xcc\x53\x87\
\xa3\xe1\x02\x9e\x79\xc7\x8d\x7d\xa6\x07\x30\x6e\x72\x17\xaa\x43\
\x51\xba\x3e\x6e\x1c\x79\xfc\x28\x1c\x79\x6c\x11\x72\x91\x0d\x2f\
\xcc\x7c\xb9\xa8\x42\xb3\x95\xdd\x45\xae\x80\xaf\x01\x39\xed\x50\
\x62\x5b\x9c\x7a\x2c\x0c\xfa\xfd\xac\x00\x60\xc4\x50\x7e\xb7\xa4\
\xf1\x32\xd9\xb4\x34\x0e\x61\xcb\xcd\x55\x7e\x96\x65\xd8\xf9\x7d\
\xd3\xb6\xf8\x56\x39\xf8\xe5\x3c\x9a\x76\x05\x90\x04\xf8\x2a\x2b\
\xa2\xb4\x85\x91\x2d\x96\x10\x4f\xa7\xc5\x65\xe5\x91\x62\xfa\x2e\
\xe6\xa8\xf2\x62\x21\x06\xd4\xfd\xf6\x3f\x40\x56\xee\xdd\x76\xc7\
\x1d\x78\xfb\x9d\xb7\x11\x8b\x55\xa0\xa7\x47\x17\x17\x6a\x63\x53\
\x02\x4f\x2c\x0d\xe3\xd0\x05\x9d\xa8\x1e\x95\x80\xe5\x8a\xc0\x4c\
\xf1\x6a\x4d\x4f\xef\xaa\xca\x7e\xaa\x46\x17\xbb\x04\x2d\x50\x8d\
\xbc\xf7\x50\x14\xcd\x59\xe0\x29\xc7\x4a\x14\x00\x7c\x3c\x04\x87\
\xdd\xdd\xdf\x1e\xdd\x55\x20\xe5\xcf\x49\x97\x60\x1e\xdd\x95\xcc\
\x64\xe9\x46\xb6\x9b\x88\x5a\x4e\x9a\xaf\x7f\x80\x45\x46\x82\x13\
\x5b\xe0\xa6\x22\xe1\x60\x80\xac\x5a\x50\xd8\x43\x3c\x95\x41\x32\
\x9b\x95\xd7\xf9\x35\x29\x05\xde\x8a\xa5\xd0\x36\xff\xc1\xea\xfb\
\xd9\xb2\xfa\x5e\xfa\x28\x36\x46\xdb\xca\x70\xd1\x8f\x62\xad\xb4\
\x6d\xfe\xb2\xf9\x13\xd6\x16\x0f\x96\xe3\x12\x70\x6b\xef\x2f\x5c\
\xf6\x05\x3c\xf2\xc8\xc3\x78\xe4\xd1\x87\x1d\x70\x00\x12\x89\x1c\
\x96\xaf\xcb\x63\x7d\xab\x17\xd3\xb3\x3d\xd0\x3c\x74\xad\x5d\x21\
\xda\x7c\xe8\xdf\x66\xcd\x0e\xbc\x64\xe9\x8e\x0e\xa0\x10\x3c\x12\
\x19\xeb\x64\x09\xfe\x01\xa5\xad\x9e\xa7\x12\x05\x00\xdb\xa4\x6b\
\xe5\x7e\x00\x72\x7b\xc9\xba\x7e\xad\x57\x61\xb8\xfd\x77\x67\xa2\
\x07\x9b\x3a\xc9\x22\x45\xa3\x32\xae\xcb\xe3\xb8\x04\xe5\xae\x3e\
\xe5\x15\x80\xd2\x16\x4b\x62\x05\x2e\xa7\x01\x88\x89\x1e\xa2\xfb\
\x99\x7c\x4e\xd8\x82\xd7\xeb\x41\x24\x10\x40\x88\x1b\x7f\xf4\x57\
\x3e\xa7\x76\xdd\x9e\xae\xdb\x57\x33\xbc\x5d\xe5\xec\x37\x8a\x7c\
\x80\x2b\xb5\x99\x1a\x32\x1b\x29\x2f\x91\x2d\xa7\xe7\x7a\xcf\x4d\
\xd3\x06\x9e\x6b\x3f\xc5\x91\xa0\xba\xa6\x95\xa1\x70\x80\x22\x7f\
\x38\xf8\xb1\x27\x29\x69\xf6\x3f\xfd\x86\xa9\x68\xa8\xae\x0e\xe0\
\xdc\x73\xcf\xc3\xdc\xb9\x73\x71\xcb\x2d\x37\xe3\xad\xb7\xde\x26\
\x90\xd5\xd0\xde\x6d\x20\x57\x30\x05\x51\xad\x42\x86\xfe\x61\x45\
\x67\x06\xe0\x11\x7f\x5f\xde\xed\xa2\xcf\x0a\xc4\x50\x8a\x1c\x8b\
\xa2\x7e\x29\xdc\x16\x8f\x5d\xcb\xd1\xbe\x9e\x3e\x37\xac\xdf\xf9\
\x96\xbf\x67\x05\x00\x4a\x06\x58\x44\x1e\xc9\x25\xdd\x7a\x45\x49\
\x30\xe0\x86\x31\x9d\x7d\xc6\x8d\x1e\x03\xa3\xa9\x19\xeb\x5a\x5b\
\x65\xb2\x4f\x2c\x1a\x91\x15\x7c\x9c\x0d\x90\xd9\x7d\xa6\x1d\x8d\
\x36\x9d\xa8\xb7\x45\x16\x8e\xdf\xcb\xb1\x01\x8e\x72\x73\xe6\x20\
\x93\x4a\xc1\xa0\x9f\x35\x4e\x65\xa5\xd2\x92\xd3\xe6\xc2\x21\x29\
\x74\x91\x7d\xec\x5e\x79\x0c\x38\xbc\xb2\xcd\xfe\x59\x2f\xab\x90\
\x7d\xd3\x6b\x5a\xef\x2c\x32\x4d\xeb\xaf\x5c\xfd\x3a\x10\xe8\x7d\
\xe3\xcb\x74\x47\xf1\x75\xe7\xdc\xca\xca\x6b\x39\xfe\xb3\xad\xf4\
\x7d\xec\x82\x7f\x36\xcb\xc0\xd3\x0b\x4a\x5a\x6f\xab\x2d\xd3\x01\
\x28\xcb\x29\x67\xee\xbf\xe6\xa6\x0f\xcc\xb8\x57\x40\x1f\x68\xc8\
\x75\x2c\x99\x42\xef\x0d\x27\x55\x6a\x96\x47\xae\xcb\x88\x75\x13\
\x75\x8d\x63\xf0\xb9\x0b\x2f\xc1\x03\x7f\x5d\x82\x67\x9f\x7b\x11\
\xd3\x8c\x6e\xcc\x9c\xbd\x1e\x18\x5b\x0b\x3d\x41\x00\x50\x48\xdb\
\x39\x7e\x3e\x56\xbf\x97\x90\x34\x82\x92\x36\x0e\x79\xd7\x89\x30\
\xdd\x27\xd2\xf3\x01\xb8\x35\x2e\x00\xf2\xf7\x82\x96\xa6\x59\x7d\
\x6c\xc9\x61\x72\x79\xd5\x41\x58\x01\xc0\xe6\xc2\xca\xcf\x0a\x6a\
\x6a\x8e\x3f\x4f\x37\x27\x5b\x6e\xa3\x3c\x0a\x9c\x1e\xdb\x3b\x3a\
\xb1\x61\xdd\x7a\xbc\xff\xfe\x6a\x99\xde\x93\xa7\x9b\x38\x5c\x11\
\x41\x5d\x6d\x8d\xf4\xa9\xe3\xd5\x7c\x1c\xe0\x72\x91\x22\x97\x41\
\x80\x17\xc8\x24\xba\xe3\x58\xb7\xbe\x09\x2d\x04\x1e\x19\xf2\xfd\
\x99\x39\xb0\x2b\xc1\x55\x70\x21\x62\x12\xa1\x50\x00\xe1\x50\x48\
\xfa\xea\x45\x22\x11\x49\x8d\x55\xd0\xc6\x9f\x1d\xa5\xdf\xb9\xed\
\x38\x1f\x9f\xe5\xe8\x35\x83\x46\x19\x04\x5c\x7c\x93\xeb\x8e\xe2\
\xdb\xda\x2e\x37\xbb\x6e\x69\xc2\x42\x4c\x47\x39\xcb\x29\x72\x89\
\x3b\xf4\x1b\xa6\xd1\x07\x76\x8e\x42\x6b\x70\xea\x15\x9c\xe7\xcd\
\x72\x40\xb3\xbf\x15\x45\xef\x8a\xc5\xde\x05\x4e\x1a\x7a\x47\x9e\
\x5b\x4e\xc6\xa3\x6c\x81\xf9\x78\x58\xe9\xba\xe2\x71\x74\x75\x76\
\xa1\xbb\xbb\x1b\xf1\x78\x52\x22\xff\x89\x9e\xa4\x34\xf8\xe4\x5e\
\x02\x9c\x4e\xe5\xe9\xdf\x7c\x3e\xde\x68\x2d\x8a\xf9\x00\xbe\x77\
\x97\x1b\x07\xb4\xb8\x31\x75\x4a\x04\xb5\x81\x12\xbc\xba\x89\x78\
\x8e\xd8\x41\x8f\x1b\xeb\xd6\x54\xa2\xbd\x6b\x06\xea\x1b\x62\x98\
\x3c\xe9\x5d\x4c\x9c\xd8\x20\xd7\x8f\x81\x93\xcf\x5d\x26\x3d\xbb\
\xec\xc7\xf2\x75\xe3\xe3\xca\xe5\x72\xbb\x3c\x0b\x50\x00\xb0\x19\
\x65\x4e\x26\x93\x4e\x47\x5b\x97\xdc\x28\xba\x33\x05\x58\xb7\xff\
\x11\xcf\xbe\xa1\xae\x1a\x41\xbf\x07\xb5\xd5\x95\x98\x30\x61\x1c\
\xda\xba\xba\xd0\x95\x48\xc2\x20\xcb\x56\xc8\x15\xe1\x61\x6a\x4a\
\x9a\xc0\xe0\xc1\xf1\x02\xbe\xe1\xe3\xdd\x49\x99\x75\xc7\x41\xc0\
\xa9\x53\x26\x21\x1c\xe4\xd6\xe3\x3e\x29\x02\xe2\x61\xa1\x41\xfa\
\xd9\x4f\xc0\xc1\xbf\x07\x82\x7e\x29\x09\xe6\x05\x42\xcc\x2a\xb8\
\x7e\x9e\x47\x8a\x73\xa1\x10\x33\x82\x5e\xf7\xa4\x9f\xc5\xd6\x9c\
\x51\xe5\xbd\x2d\xc7\xe0\x0c\x32\x75\x14\x5a\x46\x90\x5b\xf6\x7e\
\x56\xef\x38\x72\xb3\xb7\x46\x41\xce\x55\x3e\xbb\xfc\x68\x7f\x96\
\xe6\xac\x4a\x14\xb7\x81\x3f\x53\x66\x9e\xeb\xbd\x85\x4d\xae\xde\
\xa0\x84\xd9\xeb\xb2\x94\x99\x80\x0d\x0e\xae\x3e\x46\x60\x41\xba\
\x27\x7b\x3c\x5e\x54\x56\x54\x90\x02\x36\x22\x9b\xcd\x22\x9b\xc9\
\x92\xe2\x67\xec\x81\x1f\xf4\x7b\x3a\x43\x8f\x99\x3c\xd1\xfe\x9c\
\xdd\x32\x9d\x00\x38\x95\xd5\xb1\x62\x8d\x85\xae\x94\x17\x13\x46\
\x05\x10\xab\xf0\x21\x91\x02\x36\x6e\x2a\x22\x12\xaa\xc2\x5e\x13\
\xab\x30\xa6\x31\x86\x1a\xfa\x4e\x82\xe1\x90\x64\x54\x7a\xbb\x01\
\x59\xf4\x19\x85\x92\x03\x52\x36\xeb\xb0\x9c\x1e\x0c\x8a\x01\x0c\
\x61\x65\x1c\xac\xf4\x09\x07\xa0\x4a\x32\x17\x10\x03\x7d\xe2\xb2\
\xab\x40\x0a\xc0\xe3\xba\x78\x68\xc7\x6e\xd3\x26\xf7\xbe\x56\x6e\
\xee\x61\x4a\xf5\x9f\x35\xc0\xbf\xd6\x9d\x1b\x52\x77\xba\x04\x6d\
\x1e\x26\xb3\x36\x0b\xc2\xf5\x4f\x85\x4a\x6c\x81\x2d\x2f\xdf\xb8\
\x9b\x45\x06\xcb\xc1\xc9\xad\x06\xf3\x9c\x0f\xb6\x36\x8b\x21\x58\
\xdb\x89\x29\xf4\x3f\xdf\xad\x3d\x6e\x3d\x78\xe8\x50\x6c\x6b\xfb\
\xd1\x80\x32\x54\xf8\xc9\xaf\x0f\x78\x49\x89\xa3\xa1\x01\xe3\xd5\
\xad\x7e\xd7\x42\xdb\xec\x98\x74\x69\xa1\xae\x09\x33\x30\x4c\x9b\
\xca\xb8\x5c\xf6\xf3\xba\x66\xe7\x5d\xf8\xba\xf3\xa2\xab\x12\xd7\
\x65\x38\x2e\x49\x7f\xb6\x52\x3e\xba\x9d\xad\xf8\x2a\x0d\x38\x4c\
\xe3\x01\xd8\x46\x40\x8d\xef\x42\x06\x89\xcd\x5b\x4d\x7d\xe8\x1b\
\xcb\xb2\x76\x58\xd1\x4f\xaf\x22\x6d\xe7\x98\xb6\xf7\xda\x07\x01\
\x8b\x8f\x9a\x2d\xb0\xb6\x1b\xc7\xfc\xa0\xd7\xd0\x92\x89\x43\xfd\
\x5d\x98\xed\x9d\xd3\x87\xfb\x6c\xc5\x00\x94\x7c\x04\x90\xf8\x08\
\x1f\xb0\x43\x6f\x49\x6d\x28\x9d\xeb\x27\x7e\x5c\xda\x56\x14\x5c\
\xc9\x87\x15\xb5\x16\x40\x89\x12\x05\x00\x4a\x94\x28\x51\x00\xa0\
\x44\x89\x12\x15\x03\x18\x6c\x19\x29\x5d\x81\x95\x28\xe9\x7f\x3f\
\x2b\x00\xf8\x40\x07\x64\xd0\x56\x32\x7c\x2e\xa0\xe4\x52\x37\x8f\
\x92\xe1\x2f\x7c\x2f\x5b\xc5\x2c\xaf\x49\xe6\xd2\xc3\x0c\x3f\x7a\
\xb4\xa1\xd1\xad\x48\x1b\x0c\x64\xba\x79\x69\xfe\x27\xef\x76\x96\
\xe6\x79\x5d\x5b\x5e\x85\xce\xce\x4e\x5e\x5c\x33\xaf\xb6\xa6\x26\
\x26\x0d\x1e\x94\x28\x19\xe6\xc2\x65\xdd\x6f\xbc\xb1\xb4\x2b\x91\
\x48\x74\x40\xd3\xe2\xa4\x76\x99\xb9\x73\xe7\x9a\xb1\x58\x0c\x05\
\xc3\xd2\x2f\x9a\x17\xb8\x62\x7c\x4c\x5f\xba\xcb\x30\x80\x77\x3b\
\x8c\xfd\x5f\x69\x2e\xee\xcb\xad\x9e\xb7\x38\x20\x77\x95\xe4\x74\
\x9a\x5a\x4b\x2a\x44\xa1\x64\xc4\x88\x7f\xd2\xbe\x55\x21\x5d\xaf\
\x2a\xff\xbe\x2e\x9b\xc5\xea\xe6\x22\x72\x25\x0b\xa7\xed\xe1\xaf\
\xde\xa5\x5c\x00\x8f\x0b\x59\x2f\x29\xbf\xd7\xbd\xb5\xe4\xad\x21\
\x95\x22\xba\xa2\xff\x4a\x46\x90\x18\x85\xec\x80\xbe\x44\xdc\x1d\
\xd9\xc5\xb3\x0e\x20\xcb\x36\x8c\x5d\x0a\x00\x4c\xa7\x2e\xdd\x1c\
\xc1\x31\xbe\xd2\x66\xe7\x66\xf7\x05\xc0\x76\x6b\x65\xf9\x66\xd8\
\xfc\x9a\xb8\x34\x55\xbf\x36\x92\xa5\xbc\xa2\x72\x97\x02\x80\xf2\
\xe2\x32\x73\x84\x46\xf9\xb9\x2a\xad\xca\xcb\x2b\xf4\xcc\x5e\xed\
\x2f\x9a\x1a\x12\x45\x7d\x9b\x0d\x2a\xf9\xd9\x00\xb1\x9e\x90\xdb\
\x1a\xd0\xe9\xa3\x87\xde\x53\x30\x87\x17\x08\xf0\xd1\x97\xe8\x98\
\xbd\x2e\x35\x9a\xfb\xdf\xdc\x29\xbd\x1d\xa3\x77\x2d\x06\xe0\x2c\
\x1a\x19\x89\xad\x1a\xf9\xdc\x42\xba\x85\x2b\xe6\xbb\x31\x3a\xca\
\xf9\x0c\x0b\x3e\xf2\x79\x5e\x79\xbf\x13\xd7\xfd\x33\x85\xaa\xfa\
\x31\xd2\x3e\x6c\x73\xc9\x97\x80\x83\x46\x03\x9f\x9b\xed\xa1\x9f\
\xed\x3b\xc2\x47\x14\xf1\xaa\x25\xab\xf0\x3e\x46\xc9\xb8\xf0\xa1\
\xac\x4c\xe5\xb5\x07\x05\x3a\x35\x0f\x31\x9d\x09\x51\xf2\x73\x7b\
\x8c\xde\x1e\x06\x4a\xb6\x76\xcd\x9c\xe5\xd6\xd8\xd5\x00\xc0\xa1\
\xc8\xee\x11\x68\x1c\xca\xcd\x40\x2b\xfc\x1a\xaa\xc3\x6e\x14\xe9\
\x09\x0f\x37\x0a\x29\xf5\x20\xd9\xb6\x09\x95\x8d\xe3\x60\x96\x8c\
\xad\xbe\xcf\x4b\xc0\x11\x0b\xf3\x30\x51\x27\x56\x42\x9a\x94\x69\
\x5d\x83\x42\x55\xad\x34\x2a\x19\xca\x00\xc0\x16\x9f\x95\x7f\x7a\
\x25\x70\xce\x6c\x02\xab\x64\x1b\xfe\xe3\xa1\x2e\xd4\x4e\x98\x02\
\xcb\x50\x5d\x79\xb7\x77\xbf\xec\x7a\x00\xe0\xb8\x00\x23\x31\xcb\
\x27\x8d\x6a\xa5\xfd\x97\xd5\xbb\x79\xb8\x57\xa0\x69\xc9\xf9\x1a\
\xdb\x18\x58\x23\xaf\x71\xfc\x53\xde\xe3\x00\x80\x6e\xf5\xee\xcf\
\x8f\x43\x55\xff\xf9\x9c\xab\x7c\xc0\x09\x93\x75\x1c\x37\xd5\x85\
\x68\xd8\x8b\xa7\x5f\xea\x41\x3a\xde\x81\x6a\x6b\x1a\x1d\xbf\x02\
\x80\x6d\xd1\x26\xd3\x1a\xdc\xef\xd5\x3d\x68\x4a\xe2\x6c\x23\x12\
\x00\xfa\x35\xeb\xec\xef\x17\x9b\xbd\x0c\x61\x1b\xcc\x61\x1b\xfe\
\x74\xf9\x33\x87\x2a\x00\x64\x8b\xc0\x19\x73\x34\x7c\x7a\x86\x1b\
\xf9\x02\x9c\x65\xce\x9a\xb4\x02\x33\x46\x78\xb0\xf7\xe3\xba\x4d\
\x83\xad\x03\x83\xc6\x00\xd8\x22\x96\x46\xe0\x1a\xce\x32\x03\xe8\
\xdf\x2a\x0b\x4e\x1b\x2d\x3e\xef\x92\xd9\xd7\xce\x6e\x73\x06\xd0\
\xbf\xb5\x96\x28\x7f\x79\x9c\xb8\xb5\x6d\x4b\xa1\x6d\x06\x12\x65\
\xe1\x8e\x38\x6e\xfd\xc3\xdd\x5d\x72\x6c\xfd\x28\xa9\xb4\x14\xe3\
\x19\x1b\xe5\xc6\x22\xdb\x78\x1f\x3f\xef\x71\x71\xaf\x3f\x9b\xb1\
\xf4\x3f\xdf\xed\x31\x3d\xd3\xf9\x9b\x76\xe3\x0e\x38\x7d\x0b\x77\
\x9d\xcc\x07\x9f\xaf\x0c\x8a\xd9\x35\x01\x80\xb6\x11\x12\x1f\xd2\
\xb6\xe2\xde\x58\x03\x7a\xe5\x59\x03\x15\xc2\xda\x92\x1d\x94\xca\
\x96\x72\x33\x00\x60\xb0\x28\x3a\x5b\x7f\x00\xd0\x9c\xbf\x95\x33\
\xec\x9f\xeb\xfd\x16\x62\x7e\xa9\xb1\x90\xb8\x43\x3c\x6b\xa1\x25\
\x6b\xb7\xef\xe2\x82\xab\xed\x65\x1f\x72\x25\xfb\x33\x6a\xe9\x33\
\x2a\x7d\x96\x44\xef\x21\xcf\x5b\x48\xe4\x2c\xb4\xe7\xe8\xdd\xda\
\x96\x9f\xc3\x7b\xf1\xdf\x4f\x17\xe0\x04\x2e\xfb\x8e\x9d\xab\x38\
\x93\x79\x03\x3d\xf4\x9a\x46\x88\xc8\x01\x4d\x69\xc3\xd5\x2f\x5e\
\x10\xf2\x00\x0d\x01\x4b\x1e\x59\xf1\xb9\xa5\x7a\x3a\x6f\xa1\x23\
\x07\xa4\x0c\x97\xd4\x89\xb0\x1b\xd4\xff\xd0\x47\x12\x99\xd0\xac\
\xc1\x77\xed\x06\x07\x00\xd8\xfa\xd3\xe6\x1a\xca\x31\x00\xeb\x83\
\x69\xbd\xb6\x19\x08\x88\x92\x6b\xd6\x56\x3f\x44\x5e\x33\x9d\xd9\
\x80\x56\x9f\xe5\xb3\x1c\x1a\x5d\xdc\x0a\x1f\xe4\xe7\x92\x39\x13\
\xa6\xc7\xb4\xdb\x5a\x3b\xcf\xe7\x0d\xfb\xe3\x0f\x1f\xab\xe1\xf8\
\x29\x3a\xa6\x57\xeb\x88\x7a\xfb\x00\xa0\x87\x14\x77\x45\x47\x09\
\x7f\x5b\x91\xc3\x63\x4d\x3a\x34\xb7\x4f\x82\x8c\xbd\x9d\xb1\x1c\
\xff\x93\xbf\x87\x43\x46\x69\x38\x79\x37\x1d\xbb\x55\x93\xff\xee\
\xed\x0f\x00\xa4\xc4\xa4\xc0\xef\x77\x19\xb8\x6f\x79\x16\x4f\x36\
\xeb\xd0\x3d\x5e\x62\x04\x76\x83\x4f\x4e\x4f\xfe\xbf\x05\x6e\xcc\
\xa8\xd1\x30\x2a\x64\x49\x26\x43\x5c\x82\x7c\x11\x7b\x4c\x1e\x85\
\x5b\xce\xaf\x45\xa4\x42\xc3\x5b\x1b\xb2\xf8\xc9\xbf\xb2\x88\xc4\
\xaa\xe4\x33\xeb\x09\xa8\x4e\x98\x06\xec\x3f\xd6\x85\xda\x80\x0d\
\x04\xd2\x81\x98\x8e\x25\x5d\xb0\xd0\x49\x00\xf0\xd2\xc6\x02\xee\
\x5f\x65\x60\x53\xde\x87\x00\xa7\x15\xfa\x75\x4f\xb2\x3e\xe4\x77\
\x34\x94\x0d\x47\x69\x57\x64\x00\x76\x40\xcc\x0e\x90\x0d\x49\x6b\
\xae\x6d\xfe\xdc\xd6\x07\x68\x94\xad\x30\xfa\x29\x32\x9f\x92\xa7\
\xdc\x41\xb8\xdc\x8f\xcf\xb2\x27\x01\x77\xa7\x0d\x78\x32\x26\x8c\
\xa2\xd9\xdb\x3c\xd3\x79\x19\x19\x02\x80\x8c\x33\xe2\xaa\x7f\xcb\
\xee\x7c\x89\xbb\xdf\x1a\x28\x79\x0c\x87\x5a\x93\x82\xd1\x8f\xa3\
\x49\x71\xbe\x75\x80\x1b\xc7\x4c\x75\x83\xc7\x0e\x14\xcd\x3e\x37\
\xc1\x43\x96\x33\xec\xd7\x30\xbe\xca\x8d\x23\x26\x7b\x71\xd7\x1b\
\x71\x5c\xf7\x5c\x06\x9d\x7a\xc4\x5e\x98\xe2\xfc\x5d\x0e\x36\x5e\
\x31\xcf\x85\x2b\x48\x89\xf9\x3d\x45\x01\x64\x97\x93\xba\x23\x20\
\xa0\xf3\xa8\x08\x02\x13\xe9\x73\x0e\x9b\xe4\xc1\xef\x9e\xef\xc0\
\x75\x2f\x66\xe0\x0e\x45\xa1\xd3\xb1\x64\x49\xe1\xa7\x55\xba\xb0\
\x0f\x81\x50\x36\x67\xcf\x3d\xb4\xad\x3f\xb1\x91\x48\x10\x07\x55\
\x6a\xd0\xbd\x3a\x52\xcd\x6d\x68\x59\xdb\x82\xec\xf4\x05\x98\x1a\
\x36\x71\xe3\x21\x6e\xcc\xac\xd7\xe5\xef\x19\x96\xdd\xf6\xdc\x6e\
\xc3\x6e\xc2\x4f\x00\x54\x17\x05\x66\x37\xb8\x71\xf8\xb8\x0c\xbe\
\xf1\x68\x07\x96\x67\x2a\x11\xf4\xba\x7a\xd9\x87\xbe\x9d\xf6\x5e\
\xd6\xe6\x74\xc1\x1a\xba\xac\x81\xcf\xa3\x64\x0e\x6e\xcf\xc0\x41\
\x72\x01\x6c\x85\x30\x77\x72\x0c\xa0\x7f\xf3\x4d\xed\xdf\x18\x7e\
\x6b\x3b\x74\xc0\x42\xd9\xc7\xef\xa3\xfc\xe5\xc2\x26\xbe\xa9\x35\
\x97\x29\x33\xec\xfa\x53\x7f\x9f\xd7\x8d\xba\xa0\x8e\x0a\x14\x60\
\xe9\x25\x99\x7d\x57\xee\x51\x2f\xd6\x96\xb6\xb0\x4b\xef\x6b\xc3\
\xed\x3c\xb2\x52\xf1\xe4\xe0\x82\xdc\x29\xa6\x7c\x7e\xad\x0f\xf8\
\xf9\x11\x5e\xec\x37\xde\x8d\x14\x59\x4c\x66\x03\xfc\xf9\x3e\xa7\
\x83\xaf\xcd\x10\x4c\xb2\xa6\x76\xe7\xdb\x33\xf6\xa9\xc2\xd8\x50\
\x27\x2e\x78\xa0\x0d\x3d\xfe\x5a\x78\xe8\x6f\x33\xe0\x2c\x9e\xaa\
\xe3\xeb\xfb\xbb\x85\xc6\x17\xe9\xf7\xa0\xcf\x83\x54\x2a\x85\x78\
\x22\x25\x5d\x90\x6b\xab\x49\x13\x75\x0f\x7d\x8e\x21\x9d\x82\x2f\
\xf9\x54\x2d\xd6\xb7\xad\xc2\x2f\xde\x05\xa2\xd1\xb0\x1c\x97\xed\
\xb8\xf3\x6d\x64\xf4\xb6\x00\x2f\x2b\xa2\x26\xf5\xdc\x1e\xb9\xda\
\x49\xa2\x38\x51\x42\x9c\xef\xec\xaf\x8b\xf2\x27\xf3\x96\xb4\x45\
\xe7\x38\x45\x7b\x47\x97\xcc\x59\x8c\x86\x03\xa8\x88\x46\x88\x0d\
\x99\xc8\xd1\x79\x8c\xaf\x0b\xe3\x3f\x0f\xca\xe3\x8c\xbb\x37\x22\
\x11\x19\x2b\x0c\x46\xd7\xca\x2d\xd1\xed\x38\xc7\xc7\x31\xfe\xfd\
\x9b\x8f\x0e\x86\x0a\x72\x0b\x75\x63\x90\x8d\xe0\x20\x01\x80\xe9\
\x6c\x83\xc3\xd1\xfa\x1a\x66\x6e\xe5\x4b\xe9\xf5\xd3\xb7\x54\xf0\
\x5e\x3f\xbe\x1c\xb4\xc3\x40\xf4\x96\x81\x13\x9c\xcf\x77\x9b\xce\
\x5c\x40\x1b\x00\x32\xb9\x02\x66\x11\x25\x7e\xe8\xf2\x5a\xa7\x1e\
\xd8\xb3\xe5\x2d\x4a\xfb\x79\x89\xea\x66\x8b\xd6\xc0\x46\x9c\xce\
\xb5\xb2\x9c\x8d\x9b\x15\x7f\xe3\x20\x0f\x29\xbf\x8b\x68\xbe\xad\
\x70\x41\xbf\x17\xcb\xdf\x5b\x8b\x07\x9e\x5f\x89\xf6\x8c\x85\x29\
\xa3\x6b\xf0\x99\x7d\x27\xa3\x86\x14\x38\x5f\x24\x3f\x3c\xa3\xe1\
\x53\x33\xaa\x71\x65\x73\x02\x57\xbe\xd0\x09\xbd\xa2\x0a\x01\x02\
\xa9\xb3\xf7\xf0\xda\x60\x6c\x6a\x12\xb3\xff\xdf\xdb\x9f\xc0\xed\
\x2f\x36\x23\xe5\x8e\x42\xf3\xf8\x30\xb9\xda\x8f\xeb\x4e\x9e\x81\
\xe9\xe3\xab\xed\xef\xca\xe3\xc6\xe5\x07\xd7\xe1\x81\x95\xeb\xd0\
\x5a\x9a\x48\xcc\x43\xc7\x13\xcb\x3b\xb1\x76\x59\x13\xf6\x9c\x3e\
\x06\xe3\x1b\x6b\xa4\x2b\x2f\x4f\x42\x6a\x69\x8f\xe3\x99\xd7\xde\
\x93\x71\x68\x6f\xae\x6e\x85\xe1\x0a\xe1\xa0\xd1\x16\xe6\x8f\x72\
\x91\xf2\x9b\xd2\xe2\x7c\xe5\xea\x8d\xb8\xfe\xd6\x7f\x62\x65\x92\
\x8e\xc3\x1b\x92\xe9\xc8\x67\xec\xd3\x80\x4b\x17\x4e\xe7\x55\x61\
\x04\x29\x1a\x76\x9b\x52\x87\xd3\xa6\x6c\xc2\x0d\xef\x74\x92\x3b\
\x11\xe3\xe5\xab\x72\xe9\x74\x27\x40\xa9\xa3\x0c\x08\x5a\x2f\x38\
\xf4\x07\x08\xbd\x77\x50\xca\x40\x25\x2f\x7f\x6d\x83\xe9\x85\x6a\
\x56\x9f\x21\xd9\xe5\x62\x00\x32\x68\xc3\xf8\x64\x01\xc0\xda\x06\
\x8d\xdf\x3c\xe0\x66\x59\x7d\xca\x3b\xf0\x77\x27\x4f\xef\x0c\xc4\
\x28\x53\xea\xb2\xf2\x97\x53\x71\xdb\xfb\xbe\x4a\x92\x05\x30\x07\
\x06\xae\x78\x28\x28\xdd\xdc\xb5\x95\xee\xed\x46\x7c\xb6\x56\x1e\
\x5d\x1e\xe1\xc5\x1b\xbb\x03\x0b\xea\xc8\xe7\x9f\x46\x96\x3f\x6f\
\x03\x45\x30\xe0\xc5\x43\x4f\xbd\x82\x2f\xfc\xdf\x4b\x68\xab\x9c\
\x01\x6f\xac\x01\xc6\x86\x12\x6e\x5f\xf9\x3e\x7e\x73\xda\x44\x4c\
\x6c\xac\x10\xf6\x90\x22\xa5\x3b\x7d\xbf\x51\xb8\xfb\xad\x65\x78\
\x36\x13\xc2\x8c\x2a\x17\xc6\x45\x2d\xf2\xe3\x35\xb1\xee\xa9\x44\
\x0f\x6e\xfa\x47\x13\xde\x09\xef\x8d\x50\x75\x8d\x54\xf3\xad\xe4\
\xde\xfc\x8f\x24\xf0\xbb\x93\x74\x74\xf7\xa4\xf0\x5e\x53\x37\xd6\
\x37\x77\xc2\x4c\x96\x60\xfa\xc6\x90\xc2\x7a\xf1\x5f\x2f\x15\x10\
\xdf\x94\xc3\x9f\x4e\x2f\x62\xda\x18\xdb\xad\xf0\x11\x00\x2c\x27\
\xca\x7f\xfe\xff\x2d\x43\x70\xe2\x1e\xa4\x88\x15\x08\xd6\x8f\xc6\
\x9c\xca\x92\x0c\xf5\xe4\xe3\xf6\xba\x75\x3c\xbb\x74\x35\xee\x58\
\x13\x85\x7f\xb7\x7d\xe4\x77\xa6\xf8\xdf\x79\xb5\x07\x8d\x35\xed\
\xe2\x72\xac\xd8\xd8\x85\xf5\xad\x71\xbc\xfb\x5e\x3b\xcc\xcc\x04\
\x14\xc2\x15\x3c\xa9\x65\x0b\x30\xd7\x7a\x15\xbf\x4f\xe1\xf5\x32\
\x48\x94\xe7\x3a\x68\x7d\xfb\xf6\xce\x54\xe8\xf7\xfb\x56\xb9\x9e\
\xf5\xef\x19\xe3\xc7\xcd\x02\x88\xab\xb8\xcb\xad\x05\x30\x9d\xde\
\xf9\x3b\xc8\x05\xd8\xd6\xe5\x34\x7b\x95\xdc\xea\x4d\x9b\x95\x03\
\x72\xbd\x8a\xff\x31\x23\xce\xb6\xd1\x36\x37\xcb\x00\xd8\xd6\x4a\
\x28\xb1\xb5\x5d\x64\xb4\x67\xf7\xf5\x53\x7e\xab\x3c\xd3\x8e\xb6\
\x22\x59\xf3\x13\xa7\xf9\x10\xf6\x11\xa5\x2e\xd8\x93\x86\x9b\xc9\
\xbf\xfe\xc6\x9f\x5f\x46\xf7\xf8\x43\x51\x4d\x4a\x66\x39\x0a\xf2\
\x32\x71\xfc\xef\x3f\x4d\x4a\xbd\x38\x28\x43\x32\x39\xde\x12\x09\
\xf8\x71\xd2\x34\x0f\x9e\x78\x96\x14\x2a\x56\x2f\xdf\x01\x7f\x3e\
\x4f\x3e\x0a\x47\xc2\xf8\xdd\x97\x17\x62\x09\xd1\xfb\x17\x5a\x4d\
\x6c\xe8\x21\x65\xf6\xfa\xf1\x5c\xc2\x8b\x85\x37\xad\x41\xaa\x75\
\x3d\x5a\x0b\x3e\x68\xc1\x4a\x02\x88\x7a\xa2\xee\x3c\xf8\xa4\x08\
\x7f\x30\x84\x8a\x09\x33\xe0\x0e\xba\xa4\xe0\xa7\x7c\xcc\xec\x42\
\xf8\x1b\x27\x20\x36\x71\x26\x2c\xda\x2f\x9d\x2f\xc1\x28\x95\x7a\
\x5f\xcf\xe4\x8a\x58\x74\xc4\x3c\x84\x1a\x92\x78\x60\x83\x86\xf7\
\xbb\x4d\xac\x4f\x11\x83\x0a\x54\xe2\xca\x67\x12\xa8\xb8\xef\x2d\
\x34\x75\x67\x91\x26\x36\xe2\xaf\x18\x87\x60\x55\x0d\x2c\x99\xd5\
\xb0\x75\xe7\xcc\xd8\x8e\x0b\x20\x60\x40\xff\xb8\xb4\x81\x3f\xcb\
\xa6\x3b\x63\xd3\x3e\x66\x4c\xf8\xa3\x02\x80\x31\xc8\xa5\x80\x83\
\xb4\x18\xc8\xfa\x68\x2e\x80\x85\x2d\xb8\x5c\x7f\x0c\x29\xff\xdc\
\x97\x7f\xb6\x06\x50\xf8\xb2\xe2\x5b\xe8\xa3\xf7\x7d\x53\x6c\xb6\
\x16\xfc\xfb\x88\x00\x60\x0d\x04\x00\xb6\x36\x3c\x21\xa8\x65\x6d\
\x87\x33\xad\x67\xeb\xd7\x84\x47\x82\x55\x57\xc5\xb6\x18\x0e\xc2\
\x6e\x00\x33\xa6\x08\xb9\x16\xb3\xea\x74\x3b\x5b\xc0\x56\x94\x28\
\xf8\xd3\xaf\xbe\x87\xf7\xb4\xd1\xa8\xae\x6d\x80\x59\xcc\xf5\x7e\
\x5e\xd8\xab\xe1\x1f\x6d\x5e\xac\x68\x2d\x60\xf7\x46\xbf\xa4\xe9\
\xf8\x7d\xb3\xc6\x46\x10\x2b\x6c\xc4\xba\x64\x2d\x56\x77\x15\x31\
\x8a\x58\x49\x81\xdc\x0e\xc2\x13\xcc\x9e\x50\x89\xbd\x26\x5a\x48\
\x10\x78\xac\x8d\x9b\x78\xa7\xad\x88\x97\x9b\x4c\x3c\xdf\x3c\x0a\
\xdd\xae\x46\x62\x31\x7e\x89\xc8\x6b\x66\x3f\x7f\x5f\x26\x72\x93\
\x62\x9b\xbe\x2d\x53\x9f\xa4\xf8\x66\xa9\x20\x8f\x7c\x1a\xaf\x6e\
\xe2\x79\x80\x86\xcc\x17\xe4\xe8\x77\x80\x5c\x97\x33\xf7\xad\xc1\
\x67\xf7\x2a\xa1\xb5\xc7\xc0\xf2\x4e\x03\xef\xb4\x14\xf1\xcc\x46\
\x72\x1b\x5c\x7b\x20\x17\xf3\x20\xe2\xf3\x82\xbb\xe7\x08\xb0\x59\
\x1f\xbc\xa2\xb0\x3f\xb5\x97\xb4\xab\x33\x6a\x4c\x00\xa1\x6c\xf9\
\x9d\x19\x8a\x65\x20\xe8\xef\x42\xf4\x07\x8c\xfe\x41\xde\xfe\x71\
\x9b\x8f\x9d\x75\xe8\x37\x8e\x6d\x17\x73\x01\x9c\x81\x90\xff\x86\
\x01\x58\x5b\xbb\xb6\xda\xc0\xc0\x8d\x65\x6e\x49\xeb\xcb\x05\x2d\
\x7d\x00\xd0\x07\x04\x9b\x07\x7e\xb4\xed\xc0\xfc\x47\x67\x00\xd6\
\x00\x10\x08\xf8\x3c\xf8\xd7\x9b\xab\x70\xd6\x7f\x3f\x83\xc8\xa8\
\x09\x9b\xcd\xb4\x77\x52\x67\x05\x03\x17\x1d\x30\x06\x57\x2f\xae\
\xa4\x9f\xcd\x01\x31\x00\x5e\x3c\x54\x34\x4a\x18\x1d\x34\x51\x17\
\xe0\x88\x7f\x99\x19\x18\x58\xd9\x94\x80\x9b\x68\xbf\x65\x16\x07\
\xd0\x63\xbe\xc9\x3b\x0b\xc0\x2a\xb2\xac\x33\x1b\xed\xfd\x79\xc4\
\x56\x5d\xd4\x8f\x06\x6f\x0e\xef\xe6\x8b\xf8\xcd\x4b\x69\xec\x3d\
\xc6\x0d\x1f\x51\x79\x8e\x15\xc8\xc6\x11\x0a\xaf\x1b\x33\x1a\x80\
\x59\x8d\x5e\x9c\x3e\x07\xe8\xce\x94\xf0\xf2\x86\x1c\x6e\x79\x23\
\x8d\x47\xd6\x5b\x70\xf3\xdc\xc3\x7e\x57\x47\x66\x01\x5a\x9b\xb3\
\x1e\x27\x6e\xc1\x0a\x4f\x1b\x91\x16\x3c\xb5\xbe\x88\x27\x96\x25\
\x70\xcc\xec\x6a\x24\x72\x86\x04\x82\x53\x86\x4d\xc7\x6b\x2a\x3c\
\x38\xbc\xd2\x83\x23\xa6\xfa\x71\x19\x7d\x81\x6b\x3a\x8b\x78\x70\
\x45\x06\x7f\x7c\x33\x81\xa6\xbc\x87\x80\xc7\xf5\xb1\x12\xe6\x72\
\x6f\x18\x5b\x82\x43\xf9\x1e\xd0\x37\x73\x23\xca\x00\xe0\xd6\xb7\
\x74\x17\xfa\xbb\x0d\x96\xf5\x6f\xee\xd7\x0f\xc0\x00\x76\xcd\x18\
\x00\x4f\xd7\xd1\x3f\x9e\x0b\xd0\xdf\xaa\xdb\x75\xf6\xe5\x9f\xad\
\xed\x2a\xb1\xb5\x03\xa9\x5d\x39\xd7\x3f\x80\x01\x58\x76\x24\xbf\
\x33\x34\x0e\x46\xe3\x5e\x0e\x95\x1d\x28\x19\xb2\xc2\xd9\xb0\xdb\
\xb1\xae\x56\x3f\x36\x61\xd9\x80\xc9\xae\x01\xdd\xc1\x2e\x98\x03\
\x94\x8c\x53\x71\x3c\x2a\xdb\x24\x80\xd8\x62\x91\x01\xed\x93\x2d\
\x1a\x03\x8e\x85\x2d\x1d\x88\x29\xf8\xb4\x12\x1e\x5c\x6b\xe1\xab\
\x77\xaf\xc5\x55\x47\xd4\x62\x54\x5d\x8c\x3e\x59\x97\x78\x01\x5b\
\xe9\x5c\x3f\x63\xeb\xf7\xba\x70\xe4\xf4\x30\x0e\x9b\x12\xc0\x2f\
\x9f\x69\xc1\xf7\x5e\x24\xa6\x11\x08\xf7\xa6\xe5\x6c\x37\x6a\x20\
\x00\x88\x75\x93\xe3\x36\x7a\xdd\x92\x8c\xa9\xe3\x2b\x0f\x77\xa1\
\x90\x4d\xe1\x88\xd9\xf5\xf0\xfa\xfc\xc2\x4a\x8a\x74\x2f\xf0\xf5\
\x29\xfe\x7f\xf6\xae\x3d\x46\xae\xaa\x8c\xff\xe6\xce\x9d\xf7\xec\
\xce\xec\xce\x6e\xb7\x5b\xb6\xed\x52\xba\x65\xdb\x42\x6d\x4b\x01\
\x41\x4c\x45\x7c\x03\x82\x31\xc1\x10\x35\x24\x04\x12\x13\xd1\x48\
\x9a\xa8\x91\x18\x05\xf5\x0f\x0d\x88\x09\x6a\x48\x8c\x51\x21\x5a\
\x40\x4b\x83\x36\xb5\x40\x79\x75\xcb\xab\x2f\x4a\xcb\x62\xbb\xbb\
\xdd\x2e\xbb\x2d\xfb\x7e\xcd\xec\xcc\xdc\x3b\xf7\xe5\x77\xce\xbd\
\x77\x76\x66\x77\x8b\x15\x85\x99\x9d\x39\xbf\xe4\x64\xe7\x79\xf7\
\xdc\x33\xdf\xf9\x9d\xdf\xf7\x9d\xef\x9c\x93\xef\x18\x1e\x5c\xd8\
\xe0\xc7\xb6\xad\x01\x5c\xdf\x36\x83\x6f\xec\x1c\xc2\x9b\x69\x72\
\x05\x7c\x1f\x4c\xe6\x98\x35\x87\x14\x0a\xad\xd2\x2b\xcd\x2a\x04\
\x16\xbf\x2d\x54\x0b\xff\x17\x17\xa0\x1a\x63\x00\xcc\x28\xce\x49\
\x00\x56\xb1\x9c\xf7\x14\x48\x3a\xb3\xc0\x4f\x67\x1d\x5e\x2f\x18\
\xe1\x0b\x55\xc0\xff\x22\xe1\xff\x77\xf7\x86\x8a\x64\xcd\x3f\xdb\
\x0f\xec\x20\x4d\xea\xc0\xbc\x83\xcf\x27\x00\x2f\x27\x0a\xa9\xe8\
\x4c\xc1\x59\xff\xdf\xe0\x2b\xea\x74\x7e\x50\xe6\xec\x09\xbe\xac\
\xb0\xce\xc9\xd4\x14\x5f\x71\x37\x87\x00\x0c\x92\xfd\x2c\x03\x0f\
\xf9\x6b\x5a\xf6\xd9\x79\x8c\x80\xe8\x2f\x8b\xba\x3f\x7a\xca\x8f\
\x17\x7b\xba\x70\x7d\xab\x85\xeb\xd6\x35\x90\x1b\x90\x40\xa2\xae\
\x16\x12\x9b\x92\x24\xa9\x9e\xd3\xed\xd3\x91\xd9\x48\xcd\x0c\xff\
\x5b\x9f\x58\x8a\x63\xbd\x6f\xe2\x6f\xc3\x1e\x84\x43\x41\xfb\xda\
\x6c\x14\x33\xad\xf9\x0a\xc0\x51\x7a\x70\x56\x03\xb2\xb9\x8f\x41\
\x23\x82\xdb\xff\x31\x8e\x2b\x3b\x06\x70\x63\x7b\x14\x57\x5f\xdc\
\x88\x95\x4b\xe3\xfc\x74\x64\x36\x31\xca\x09\x81\xfe\xa7\x42\x84\
\xa8\x10\x23\x5c\xdc\x52\x8b\x9f\x6d\x9d\xc6\x2d\x4f\x0c\x22\x57\
\xdb\x5c\xa4\x3c\x3e\xb0\xdf\xb0\x30\xb6\x60\x14\x8f\xfa\xae\x42\
\x90\x1d\x42\x70\x55\x83\x57\x9a\x3f\xa5\x68\xfd\x87\x08\x22\xcf\
\x23\xd1\x4b\xbb\xc8\xa3\x74\x41\x40\x6e\xd4\xf3\x9d\x6e\xf7\x15\
\x37\x45\xd2\xcd\x85\x37\x2c\x3b\x7b\xd0\x9a\xe3\xdb\x7f\xd8\x81\
\x9b\xf3\x51\x00\xf3\x3b\x83\x95\x8f\x7b\xb0\xd1\x15\x0b\xac\x8e\
\x73\x8f\x1e\xb7\xe6\x12\x00\x58\xac\xc4\x80\x44\x65\x30\xa5\x61\
\x38\xad\xa3\xb9\xce\xcd\x31\xf0\x60\x4d\x4b\x02\xc6\xf1\x19\xfa\
\x7e\x63\xd1\x75\x59\xfb\x25\xfc\x16\xda\xea\xbd\x79\x97\x81\x19\
\xea\xe0\x58\x12\x43\xd3\x2a\xac\x04\xcb\xd8\x23\xff\xdc\x2b\xa3\
\x5b\x6f\xc6\x83\x6f\xa7\xf1\xf0\xb1\x11\xac\x0e\xbf\x83\x8d\x4b\
\x24\x5c\xb6\x22\x8a\x4b\x56\xd4\x63\xed\x8a\x04\x22\x35\xf6\x9c\
\x3f\x0b\x24\x5a\x3e\x92\xe9\xab\x83\x78\xec\xe4\x30\xcc\xe0\x0a\
\xfe\x3f\xe7\xd6\xbd\xf0\x7e\x4d\xe7\x7e\xd9\x1d\xb1\xce\xcd\xf8\
\x4b\xf1\xc7\xb1\x67\x32\x8c\x3d\xcf\x4d\xa1\x65\x5f\x37\x56\x47\
\x73\xf8\xd8\xca\x10\xd6\xb6\xc4\xf0\x91\x95\x09\x2c\x5f\x96\x80\
\x0e\xc9\x3e\xad\x39\xab\xe1\xd2\xd6\x3a\x5c\xe4\xef\xc2\x1b\x4a\
\x1d\x42\xfe\x0f\xdf\x64\x17\xb2\x23\xd7\x65\x70\xd5\x80\xad\x14\
\x3c\x45\xb3\x0d\x85\xf1\x83\x85\x2e\xc2\x42\x60\xa6\x61\x55\x61\
\x22\x10\x1f\x19\x74\x52\x00\xd2\xbc\x66\x76\xb3\xcd\x75\xb3\x60\
\x09\xad\x93\x10\x33\x1b\x34\x2c\xdf\x8c\x70\x2b\x9f\xee\x6b\x38\
\xd3\x77\x56\xfe\x2f\xef\x08\x26\x93\xea\xfa\x02\xa4\xc8\x3e\xe7\
\x9d\xfd\x2c\xec\xd3\x70\x6d\x32\xd1\x49\x1d\xe8\x98\xce\x99\x38\
\x76\x36\x83\x4d\x17\xf8\xf9\x67\xb2\x9a\x8e\x6b\xd6\x35\xa3\xed\
\xa5\xe3\x38\xa5\xa8\x88\xf8\x66\x13\x81\x58\x2a\xf0\xe7\x2e\x0e\
\x60\x55\x9d\x04\x25\x67\x77\x40\x26\x5f\x5f\xef\xec\xc3\x94\x27\
\x8c\x35\x11\x3b\xa1\x28\x10\x90\x70\x31\xf9\xdf\xb5\x44\x04\x7f\
\x3c\x2e\xe3\xa4\x21\xe3\xc4\x88\x82\x3f\xf7\x25\x11\xd6\x4e\x63\
\x6d\x5d\x2f\x7e\xf5\xd5\x0d\x58\x47\x64\xc0\xd4\x00\x0b\x46\x26\
\xa2\x3e\x18\xa9\x31\x58\x4d\x2d\xfc\x5e\x78\xdd\xad\xe2\xfb\x65\
\xaa\x81\xfd\xc6\x16\x57\x7b\x3a\x42\xf4\xcf\x57\xc7\x25\x84\x03\
\x1e\x24\x42\x5e\x6c\x48\x44\x71\x60\xc0\x83\x67\xcf\xc6\x31\x4a\
\x9f\xed\xe8\x9c\x01\x0e\x4e\xa0\x51\x3e\x8b\xdb\x2e\x5f\x82\xef\
\xdd\xbc\x9e\x9f\xf0\xcb\xc4\x4a\x94\x94\x46\xd8\x60\xc7\xaf\x67\
\x88\x80\xa2\x76\xe0\xa7\xc4\x30\x9c\xa2\xc1\x8d\x0f\x58\x0e\x19\
\x78\x78\xdc\x80\x3d\x66\x2a\xc1\x39\xa0\x7d\x01\x29\x60\xf1\x6f\
\x1a\x66\x69\xb7\x04\x29\x11\x01\xd8\xc7\x38\x5b\x5e\xab\x60\x56\
\xc0\x95\xf9\x76\xa7\xd7\xf3\x89\x38\x56\x51\x04\xb6\xdc\x91\x0f\
\x02\x3a\x53\x7a\x6e\x87\xb0\xf2\x45\x5f\x50\x01\xd8\xdf\x31\x16\
\x20\x00\x83\x07\x01\xdd\x28\xf8\xae\x13\x29\xdc\xba\x31\xca\x47\
\xff\x1c\x11\xc2\x92\x86\x18\xee\xbb\x7e\x05\xee\x7e\x66\x02\xef\
\x6a\xb5\x5c\x8a\xb2\xef\x5f\xde\xe4\xc3\xf7\xae\x89\xe6\xb3\x09\
\xd9\xd6\xd4\xd3\xd3\x49\xec\x78\xad\x0f\x88\x6c\xc2\x35\x4d\x16\
\x7e\xf9\x85\x7a\x72\x57\x98\xa1\x4a\x44\x30\x26\x4e\xf7\x9d\xc6\
\x13\x03\xe4\x6b\xd3\x88\x2f\x07\x42\x50\xcc\x26\x1c\x4c\x2b\x38\
\x3d\x65\xe2\x92\xe5\xba\xe3\xeb\x5b\x98\x9c\x9a\xe6\xf7\xe1\xde\
\x8b\x1b\x04\x74\xeb\xce\x7c\xfa\xda\x20\xcb\x4c\x24\x22\xca\xa8\
\xa8\x91\x2d\xde\x41\xee\xbf\xae\x0e\x9b\x96\x07\xb9\x8f\xcf\x76\
\x49\xda\x47\xca\x65\xdf\xbf\xce\x42\xa9\xbd\x00\xfe\x68\x0c\x56\
\x34\x8e\xa1\x8c\x8e\x97\xc6\x2c\x6c\xd3\x34\x98\x3c\x6c\xef\x41\
\x26\xa3\x20\x93\x4e\x03\x7e\xa7\xfd\xca\x6c\x5d\xb4\x3b\xc0\x73\
\x77\x81\x85\x58\x3c\xf6\xd4\xa2\xec\xb8\x06\x2c\xa0\x58\x98\xb0\
\xe4\xe6\x9f\x70\x02\x70\xa6\x62\xab\x8e\x00\x4c\x27\xf0\xe3\x4e\
\xc1\xe9\x8e\x71\x99\x05\x9d\xbf\x1c\x24\xfd\xfb\x72\x01\xa4\x62\
\x9f\xd8\x3c\x87\x4f\x5c\xf4\x3d\xe3\x1c\x2e\x80\xe5\xe4\x4c\xd0\
\x28\x1a\xa0\xeb\xbe\xd8\xa7\xe1\x9f\x6f\x4d\xe0\xa6\x4d\x8d\x98\
\xce\x1a\xc8\xa8\x3a\x3e\xbd\xb1\x05\x3b\x1a\x93\x78\xba\x2b\x83\
\xd1\x8c\x1d\x40\xbb\x61\x6d\x14\x0d\x35\x32\xf9\xd1\x26\x37\xc6\
\x48\xc0\x87\x5f\xff\x75\x2f\x0e\x4d\x04\x11\x58\x13\xc7\x0b\xbd\
\x29\xf4\x4f\x84\xd0\xd2\x10\x26\xb5\xa0\xc3\x47\xc3\xd6\x8f\x6f\
\x6e\x43\xe4\xf9\x11\x1c\x1c\xd7\x78\x42\x4f\x30\xe0\xc5\x17\xaf\
\xa8\xc5\xd6\xb6\x1a\x64\x34\x7b\x1d\x02\xfb\x81\xf6\xbf\x79\x9a\
\xa4\x43\x82\x4f\xff\x71\x17\x80\xea\x3e\x99\xca\xe5\xeb\xce\x5c\
\x8b\x75\xab\x9a\xf1\x9b\x5b\x89\xa4\x7c\x41\xa8\xaa\x8a\xbb\x9f\
\x1a\xa2\xfa\x49\xd8\xb2\x32\x88\xa4\x6a\x62\x86\xc8\x6b\x73\xdb\
\x12\xfc\xfa\x46\x1d\xbf\x3b\xa6\x60\x32\x27\xf3\xce\xb1\x72\x99\
\x0f\xdb\xae\x8e\xf1\xf5\x08\x06\xd9\x07\x9b\x2a\x3c\xd9\x35\x80\
\xde\x31\x15\xf2\x0a\xd9\x0e\x76\x96\xa9\x21\xcc\xcd\x49\xd0\x1c\
\x12\x90\x24\x3b\x2b\x31\x1f\x3c\x74\xdd\x04\xf7\x77\xaf\xbe\x59\
\x00\x16\x58\xd2\xb8\x7c\x9a\x8d\xde\x5b\x15\xb1\x71\x84\xab\x00\
\x4c\xab\x58\x01\xb0\xe7\xff\xc9\x05\x58\x58\x01\x98\xb3\xa3\xad\
\x65\xef\xa1\x70\xef\xde\x11\x5c\x44\xd2\x7e\x5d\x6b\x82\x7c\x64\
\x9d\x27\xd9\xac\x5a\x1a\xc5\xb7\x97\xd9\x23\x3e\xeb\xf1\x39\x32\
\xac\x2c\xcf\xe1\x97\x50\x13\xf6\xe3\xc9\x3d\x2f\xe3\x67\xbb\xba\
\x20\xb7\x6d\x85\xdf\xa3\xa3\x2f\x69\xe0\x27\x7b\xce\xe0\x37\xb7\
\xb4\x52\xe7\xf7\xf1\x29\xc0\xfa\x58\x04\x0f\x7c\xb9\x15\x13\x29\
\x8d\xe7\xe3\xd7\x86\x64\xfa\xae\x4d\x22\x6c\xba\x2a\x56\x13\x46\
\xc7\xab\x47\xb1\xfd\xd0\x30\x02\x6d\xed\x24\x48\x72\x76\x10\x90\
\xea\xdb\x43\x1d\x54\xf2\xd8\xf7\x6a\x07\x71\x25\x7c\xe9\x8a\x16\
\xbe\x88\xe7\xd0\xdb\x7d\x88\xe5\x86\xf0\xdb\x03\x61\x7c\xb4\xd9\
\x83\x4f\x5e\xda\x84\x24\x91\x17\x9b\xc1\xb8\xe9\xf2\x16\x7c\x76\
\x83\x86\xc9\xb4\xce\xeb\xca\x48\x8b\xd5\x5f\xa1\xfa\xf8\x65\x7a\
\x4c\x76\xf2\xe0\xe3\x2f\x61\x58\x5e\x8a\x28\xdb\x64\xd4\xd0\x17\
\x8f\x2d\x50\xc9\x15\xc5\x0c\x3c\xb3\xc1\x43\xc7\x5d\xb0\x0f\xbf\
\xa9\x32\x05\xa0\xd1\xc8\xa1\xa8\x1a\x2c\x59\xca\xa7\xdd\x5a\x56\
\x65\x6c\x02\xe1\x12\x40\x90\xfc\x71\xb6\xb8\xc6\x4b\x6e\x0e\xfb\
\x1b\xf0\xc9\x0e\x01\x9c\x3b\x08\xe8\xf5\xb0\xcf\xca\xa4\x20\xec\
\xd7\x58\xe7\x61\x9d\xca\x26\x00\xc3\x4e\xfe\xa1\xf7\x7a\x32\x32\
\x6e\xfb\x4b\x2f\xee\xbb\x6e\x0a\xd7\x6e\x68\x21\xb9\xee\xe7\x01\
\xb6\xac\xb3\xca\x90\x47\xab\xc9\xca\xc2\x41\x2f\xb4\x9c\x8a\xdf\
\x3f\xf1\x2c\x7e\xf8\xe4\xdb\xc8\xae\xfc\x28\xf9\xfc\x41\xe2\x1f\
\x0d\x21\xd9\x83\x1d\x3d\xf4\xf7\xf1\x13\xb8\xe7\xf3\x2b\xb0\xb4\
\x91\x9f\x52\x03\x1a\x9c\x11\x8d\xf8\x51\xe3\x6c\x6a\x62\x2f\x34\
\xf2\x71\xa3\x7d\xe1\xe5\xc3\xf8\xce\xef\xf6\x21\xbd\x64\x13\x82\
\xd4\x39\xdd\xce\xc8\x62\x0b\xbb\xbb\x53\xb8\x63\x30\x82\xd5\xcb\
\x62\xa4\x4a\x6c\xc2\xe3\xea\xc3\x2b\xa3\x2e\x12\x44\x4c\x56\xd1\
\x4f\x1f\xbf\xeb\xa9\x41\xfc\x22\x93\xc5\x67\x36\x5d\x40\xef\xf9\
\x90\xd3\xa9\xbe\x74\xad\x86\x98\xcc\xeb\xae\x5b\xac\x83\x10\x69\
\x85\x24\x4c\x91\xab\xf1\xa3\x3f\xec\xc6\xdf\x4e\x51\x5b\xb6\xad\
\x84\xa5\x6b\x58\xac\x3b\x02\xe8\xce\x88\x6f\x38\x6e\x80\x1d\x23\
\xa0\xd7\xf5\x2a\x74\x01\x58\x4a\xab\x92\xcb\x41\xb2\xbc\x8b\x4e\
\xe2\x9f\xd7\xfd\xd1\xaf\xfc\xca\xc9\x11\x9c\x7e\x27\xcb\x3b\x66\
\x90\x3a\xff\x91\x93\x03\x7c\x14\xe0\x9d\x66\x81\x51\x8c\xed\x7b\
\xd1\x3f\xa9\xe0\x99\x43\xbd\x50\x94\x2c\x1f\x05\x83\x64\x21\x13\
\xc9\x0c\xa4\x80\x65\x7f\xcf\x31\x94\x20\x5d\xa8\x47\x8d\xe0\xb6\
\x1d\x43\xf8\xd4\xcb\xfd\xb8\x79\x63\x23\x2e\x5d\x1e\x47\x22\x1e\
\xe1\x6b\x0e\x34\x4d\xc7\x74\x2a\x83\xa3\xdd\x67\xf0\xc8\x73\x6f\
\x61\xdf\x59\x09\x72\xeb\x55\xf0\x47\x6a\x60\x69\xb9\xfc\xff\x64\
\xa4\xf4\x48\x97\x81\x83\x7d\xc7\xf0\x95\x4b\x42\xb8\xa6\xbd\x11\
\xcd\x75\x11\xbe\x03\x31\x1b\x8d\xd9\xb4\xe3\x4c\x3a\x8b\xae\xfe\
\x51\xec\x39\x70\x12\x4f\x1c\x19\xc5\x4c\xd3\x06\x84\xe2\x8d\x74\
\x9d\xd9\xce\xe8\x23\xd3\xee\x4f\x99\xb8\x73\x7b\x0f\xbe\xff\xf1\
\x38\xd5\x25\x86\x48\x38\xc8\xbf\x3f\x3c\x94\xc1\x2b\x47\xbb\xe9\
\xf7\xd6\x48\x79\x18\x18\xd4\x43\xb8\xfd\xc9\x21\xdc\x70\x70\x00\
\x37\x6e\x48\x60\x7d\x4b\x1d\x6a\xa2\x41\x9b\x20\x09\x19\x45\xc5\
\xd8\xe4\x0c\x0f\x56\x6e\xdf\xd7\x85\xc3\xa9\x18\x82\x17\x6e\x74\
\x94\x86\xb6\xb8\x07\x87\x02\xf7\x80\x11\x81\x41\x2a\x89\x29\x2f\
\xab\x84\xd2\xb7\x24\x67\x03\x7e\xf6\xa1\x37\x9e\x7f\xa6\x73\xec\
\x5a\xe6\x63\x56\x26\x48\xc2\x4e\x8f\x03\x53\x43\xb3\xb2\x80\x46\
\xb9\x60\xe3\x72\xea\xbd\x91\x05\x83\x58\x2c\x20\xa4\x51\xe7\xd4\
\x47\xcf\x90\x6e\xcc\xb8\x2b\x45\x20\xc7\xea\xe1\xab\x6b\x86\x35\
\x27\x6b\x92\x8f\x26\x2c\x71\x47\x55\x80\xd4\x04\x96\xfa\xb2\x68\
\x0c\x1a\x90\xad\x1c\x27\x00\x96\xbd\x77\x36\x4d\xed\x1b\x5f\x86\
\x40\xfd\x12\x2e\x37\xad\x85\xa2\xe7\xcc\x5d\xa0\x51\xc8\x9c\x99\
\x46\x8d\x39\x8d\xa5\x81\x1c\xc2\x5e\x93\x4f\x3b\xb2\x0e\x9c\x52\
\x74\xf4\x27\x49\xa5\x85\x1b\xe0\x6b\x68\x86\xec\x0f\xe6\x13\x7b\
\x8a\x2f\xe3\xe1\xb3\x0d\x52\x7a\x1c\x17\x05\x67\x10\x95\x54\xe8\
\x44\xf2\xe3\x33\x54\x0f\xc5\x0b\xdf\x92\x0b\x21\x87\x63\xbc\x1b\
\xb0\x55\x7e\xb9\x2c\xd5\x3b\x3d\x81\x96\x40\x16\x31\x9f\x0e\x3f\
\xec\xfd\x0e\x32\x44\x14\xa3\x33\x06\x26\x72\x21\x20\xb1\x0c\xc1\
\x58\x3d\x9c\x9c\xd9\xca\xb3\x12\x0f\x9b\x8a\x35\xb0\x6f\xdb\xe5\
\x9f\xf8\xf8\xea\xf8\x4b\x55\xa3\x00\xf8\xbe\xf8\x5c\x0e\xa3\x62\
\x11\xa8\x8d\x53\xe7\x4b\xcc\xf1\xf3\x8d\x05\x47\x7f\x77\x74\x60\
\x91\x7a\xef\xb2\x0b\x8b\x17\x38\x38\x99\x80\x0b\xa5\x29\xb3\x13\
\xa5\x58\x34\x1d\x0d\x4d\x18\x25\xa5\x31\xa4\x39\xe9\xc0\x24\xef\
\xa5\x7a\x19\x81\x26\xd9\x9e\x86\x22\x17\xe2\xbd\x9a\x9a\xcd\x1e\
\x7a\x6a\x63\x50\x10\x43\x0f\x91\x47\x5e\x6d\x90\xb6\xf7\xd4\x7a\
\xe1\x4f\xf8\x9c\xe8\xb5\xc9\x73\xfb\xcf\x55\x7f\x3f\xcb\xd6\x8d\
\x35\xa0\x5b\x8b\xdb\xd7\xa0\xe7\xbc\x1e\x7e\xd9\xb9\x0f\x3d\x4f\
\x5e\x81\xa0\x0f\x08\x2d\xc5\xbb\x44\x3e\x67\xb8\x2a\x72\x7c\xe1\
\x00\xa9\x95\x88\x6c\xbb\x18\xce\x7a\x82\x4a\x85\x65\x47\x7f\xab\
\x73\x35\xa0\x3b\x7f\x5c\xd1\x78\x3f\xc6\x6b\xbe\x8f\x20\x97\xc9\
\xfb\x1a\x3f\x6b\x8e\x1c\xea\x3c\x71\x30\x9f\xf9\x7c\x8d\xcb\x9d\
\x66\xf5\xf1\xac\x16\x6f\xd1\x1b\x7c\x31\xcf\x7f\x61\xa4\x45\xd7\
\x70\xea\x71\x2e\x07\x8f\x53\x14\x4f\xba\x2f\xfc\x9f\x8c\x68\x54\
\x54\x05\x0c\xa3\xfa\x32\x01\x51\x05\x0a\x40\x40\xe0\xfc\xc8\xdb\
\xa8\xce\x54\x60\x9b\x00\xc4\xb1\x97\x02\x82\x00\xaa\x6e\x53\x50\
\xcb\x12\x04\x20\x20\x50\xbd\x0a\x20\xbf\xce\x5d\xf8\x00\x02\x55\
\x0e\xab\x0a\x13\x81\x20\x14\x80\x80\xc0\xac\x02\xa8\x36\x17\xc0\
\xde\x62\x45\x9a\xdd\xa3\xa9\xe8\x3d\xe2\x87\x9c\xbe\xf0\x09\x9a\
\x02\x02\x8b\x72\x94\x07\x4b\xa7\xcc\x9f\xb7\x50\x7c\xc4\x93\x54\
\x7d\x04\x60\x0d\xbc\x61\xe0\xc4\x5b\xb0\x42\x81\xf9\x6f\xe6\x72\
\xb8\xf7\xae\x5b\xb0\xa5\xbd\x15\xd9\x9c\x26\x8c\x47\x60\xd1\x23\
\x14\x08\xe0\xbb\xf7\xfe\x1c\x9d\x27\xfa\x80\xfa\x66\xc0\x1f\x46\
\x7e\x1b\xd2\xac\x0a\x2b\x7b\x19\x3d\x68\xac\x22\x05\x90\x99\x50\
\x90\x1c\x04\xf4\xe0\x02\xef\xa9\xd8\xdc\x1c\xc0\xa7\xdb\x1b\x90\
\x56\x54\x61\x3d\x02\x8b\x1e\x91\x48\x08\x75\x2a\xd9\xfb\x70\xb7\
\xbd\x3b\x69\xa8\x76\x76\xe4\xcf\xb0\xcd\x11\x4b\x37\xd2\x95\x86\
\x00\x24\x6f\x07\x24\x79\x2d\x95\x55\xf3\xde\xf3\x1a\xfc\x50\x4a\
\x76\x1a\x0d\x2b\x02\x02\x8b\x1e\x32\xf5\x71\x96\xa0\x25\xfb\x59\
\xb6\x96\x7d\x92\x92\x4d\x00\x49\x7a\x7c\x90\x9e\x0c\x57\x17\x01\
\x00\x7b\x1d\xcd\xf3\x35\x2a\x4d\xf3\x5c\x84\xc2\xcd\x25\x05\x04\
\x16\x7b\x08\x60\xe1\x3d\xc1\x58\xca\xe7\x61\x2a\x8f\x52\x19\xa9\
\x2a\x02\xc8\xaa\xb9\x4e\x92\x3e\x7f\x42\xd0\xcf\x0e\xa1\xbb\x81\
\x5e\x5a\x4b\x25\x2c\x4c\x45\xa0\x0a\xc0\xa2\xdb\x6c\xc4\xdf\x4f\
\xe5\x31\x28\xb9\x0e\xc3\x34\x67\x4a\x55\x99\x92\x84\x20\x1f\xbe\
\xe7\x4e\xe5\x0f\x0f\x7d\xff\x78\xc0\xeb\xfd\x2d\x34\xfd\x61\x7a\
\xe9\x10\x66\xf7\x4e\x10\x10\xa8\x64\x39\x30\x0a\xcb\x7c\x8a\x1e\
\x3d\x00\x35\xf7\xf7\x07\x7f\xfa\xcd\xd1\xcb\xda\x57\x95\x4c\xea\
\x96\x44\x01\x6c\x5c\xd3\xca\xcb\x5b\xbd\x67\x46\x7e\xff\xd8\x9e\
\x9d\x53\x6a\xae\x86\x94\xc0\x05\xf4\x16\x8b\x09\x88\xe4\x00\x81\
\x0a\x04\x5f\x97\x69\xc0\xd4\x8e\xc1\x34\x1e\x83\xd7\x7f\x90\xed\
\x9c\x7a\xd5\xa6\x76\x7e\x94\x7a\x55\x29\x00\x17\xf7\xdf\xfd\x75\
\x5c\xb6\xb9\x7d\x9c\x64\xd0\xd3\xf4\xf4\x49\x2a\xdd\xc2\x50\x04\
\x2a\x73\xe4\x37\x75\xe8\xb9\x6e\xe4\x32\x7b\x31\xd6\x73\x1c\xe9\
\x31\x93\x05\x04\xb3\x4a\x69\x85\xaf\x5c\xea\x76\xb1\xf7\x44\x43\
\x0f\x95\xed\x60\x51\x51\xe0\x26\x2a\x9b\x60\xaf\x70\x15\x10\xa8\
\x8c\xd1\xdf\xd0\x46\x90\x9d\x7a\x1d\x93\xea\xeb\x50\x92\x19\x44\
\x1a\xec\x65\xd0\x25\x86\x5c\x26\x2d\xc4\x68\xf0\x38\x95\x33\x44\
\x95\x67\x98\x38\xa0\x92\x10\x86\x23\x50\x31\xea\x3f\x97\xee\x83\
\x3e\xdd\x89\x54\x72\x92\x2f\x82\x29\x71\x06\x60\x59\xb8\x00\x73\
\xc0\xa6\x45\x46\xa9\xec\xa1\x06\x1a\x11\x91\x00\x81\xca\x52\x00\
\x6c\x9f\x37\x28\x4e\x1a\x70\xd9\x58\xb7\x54\x5e\xed\xc4\xb6\x49\
\x55\x86\xa9\x4c\x89\x58\xa0\x40\xe5\xc9\x80\xf2\x33\x6a\xb9\xbc\
\xda\x88\xda\x47\xcd\x58\x50\xd3\x6c\xb3\x68\x91\x08\x24\x50\x11\
\x28\x67\x3b\x2e\x39\x01\xb0\x53\x64\x90\x51\xec\x27\x2c\x4d\x32\
\x93\x86\xc7\xd4\x2c\x78\x84\x02\x10\xa8\x60\x30\xbb\xd7\x14\xe7\
\x6c\xc0\x2a\x26\x80\xab\x36\xac\x41\x88\x11\x64\xd0\x4f\x0a\xc0\
\x8b\xf1\x6e\x99\x9f\x1b\x28\x20\x50\xc9\xd8\x48\x76\x1f\x4f\xb4\
\x20\x11\xab\xa9\x6e\x02\x78\x70\xdb\x6d\x45\xcf\x9f\x7f\x6e\x2f\
\xfa\xfa\x07\x84\x85\x08\x54\x34\x1e\xfe\xc1\x1d\xb8\xf2\xaa\xab\
\x4b\x5e\x0f\xa9\xdc\x1a\x46\x21\x69\x24\xc4\xbf\x40\xa5\x83\xbb\
\xbe\x65\x00\x49\xfc\x14\x02\x02\xd5\x0b\x41\x00\x02\x02\x82\x00\
\x04\x04\x04\x04\x01\x08\x08\x08\x08\x02\x10\x10\x10\x10\x04\x20\
\x20\x20\x50\xe1\x90\xcb\xb4\x5e\x92\xd8\x13\x50\xa0\x52\x50\xce\
\x76\x5c\x96\x0a\x40\xd3\xb4\x7e\x55\x55\xd3\xc2\x74\x04\x16\x33\
\xd8\xf9\x37\x64\xcb\x66\x7f\x7f\xbf\xaa\x28\x8a\x9b\xde\x6a\x09\
\x02\x78\xaf\x0a\x49\x12\x26\x26\x26\x76\x8f\x8e\x8e\xbe\xe9\x36\
\xa2\x80\xc0\x62\x84\xd7\xeb\xc5\xcc\xcc\x8c\xbe\x6b\xd7\xae\x77\
\x87\x86\x86\xd8\x66\x37\x6c\xff\x7f\xa3\x9c\x48\x40\x2a\xc7\x46\
\x7b\xed\xb5\xd7\x3a\x0e\x1f\x3e\xfc\xcc\xd4\xd4\xd4\x18\x23\x00\
\x49\x12\xa1\x0a\x81\xc5\x35\xf2\x33\x3b\x26\x15\x6b\x1e\x39\x72\
\x64\x72\xe7\xce\x9d\xfd\x34\xa0\xb1\xad\xbf\xa7\xc1\xf6\x04\x28\
\x23\x02\x28\xbb\x18\x80\x2c\xcb\xd8\xbd\x7b\x37\x6b\xac\xe7\xa8\
\xe3\xb7\xad\x5f\xbf\xfe\x73\xf1\x78\xbc\xce\xe7\xf3\x49\xac\x51\
\x45\x5c\x40\xa0\x9c\x3b\xbe\xe3\xc2\x5a\x4c\xf2\x1f\x3d\x7a\x74\
\x6a\xc7\x8e\x1d\x03\x27\x4e\x9c\x38\xab\xeb\x7a\x3f\xbd\x35\x48\
\x85\x6d\x01\x6e\x0a\x02\x78\x6f\xbf\x49\xdb\xbf\x7f\x7f\x57\x2a\
\x95\xda\xbe\x65\xcb\x96\x99\xcd\x9b\x37\x6f\x6d\x6f\x6f\x6f\x6d\
\x6c\x6c\x0c\x12\x11\x08\x45\x20\x50\x76\x60\x03\x93\x61\x18\xc8\
\x64\x32\xd6\xa9\x53\xa7\x52\x1d\x1d\x1d\xa3\x7b\xf7\xee\x1d\xee\
\xec\xec\x3c\x93\xcd\x66\x4f\xc1\xde\xf7\xf2\xac\x20\x80\xf3\x83\
\x39\x3e\x3e\x3e\xfd\xea\xab\xaf\x1e\xa5\xc6\xf3\x27\x93\x49\xab\
\xaf\xaf\x6f\x73\x5d\x5d\x5d\x63\x38\x1c\x8e\x10\x09\xb0\x7a\xbb\
\xc1\x01\x11\x24\x10\x28\x0b\x02\xa0\x51\x5f\x27\x5b\xd5\x7a\x7b\
\x7b\x93\x07\x0e\x1c\x18\x24\x05\xd0\x4f\xf6\xdb\x4b\x6f\x9f\x74\
\x08\x80\x29\x5b\x55\xb8\x00\xe7\xd1\x9e\x4c\x49\x51\xe3\x4d\x10\
\x09\x1c\xa4\x32\x44\xcf\x5f\xa6\xd2\x4a\x65\x09\x15\xb6\x88\xda\
\x07\x91\xc7\x20\x50\x7e\x76\x6b\x3a\x9d\x7c\x8a\xca\xbb\x54\xd8\
\xe8\xdf\xeb\x3c\x4e\x97\xd3\xe8\x5f\xce\x04\x80\x82\x86\x1c\x87\
\xbd\x61\x68\xd6\x79\xcc\xce\x14\x8c\x50\xf1\x0b\x02\x10\x28\x43\
\x02\x60\x51\x7e\xc5\x21\x80\x61\xa7\xe3\x8f\x39\x9d\x5f\x47\x99\
\x4d\x03\xca\x65\xde\xa0\x2e\x09\x4c\x50\xc9\x38\x0d\x1a\x72\x46\
\x7f\xaf\x90\xff\x02\x65\x6a\xb3\x2e\x09\x64\x9c\xa2\x3a\xaf\x97\
\x5d\x04\xfb\xdf\x02\x0c\x00\x0b\x53\x81\x73\x51\x45\x40\xdb\x00\
\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
"
qt_resource_name = "\
\x00\x04\
\x00\x06\xfa\x5e\
\x00\x69\
\x00\x63\x00\x6f\x00\x6e\
\x00\x07\
\x09\xcb\xb6\x93\
\x00\x62\
\x00\x75\x00\x74\x00\x74\x00\x6f\x00\x6e\x00\x73\
\x00\x03\
\x00\x00\x78\xc3\
\x00\x72\
\x00\x65\x00\x73\
\x00\x03\
\x00\x00\x70\x37\
\x00\x69\
\x00\x6d\x00\x67\
\x00\x1a\
\x0d\x95\x42\xe7\
\x00\x62\
\x00\x75\x00\x74\x00\x74\x00\x6f\x00\x6e\x00\x5f\x00\x62\x00\x61\x00\x63\x00\x6b\x00\x75\x00\x70\x00\x5f\x00\x64\x00\x69\x00\x73\
\x00\x61\x00\x62\x00\x6c\x00\x65\x00\x64\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x13\
\x09\x34\xc1\x67\
\x00\x62\
\x00\x75\x00\x74\x00\x74\x00\x6f\x00\x6e\x00\x5f\x00\x64\x00\x6f\x00\x77\x00\x6e\x00\x6c\x00\x6f\x00\x61\x00\x64\x00\x2e\x00\x70\
\x00\x6e\x00\x67\
\x00\x0f\
\x0d\xf5\x7d\xe7\
\x00\x62\
\x00\x75\x00\x74\x00\x74\x00\x6f\x00\x6e\x00\x5f\x00\x75\x00\x74\x00\x66\x00\x38\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x19\
\x01\x96\x0f\x07\
\x00\x62\
\x00\x75\x00\x74\x00\x74\x00\x6f\x00\x6e\x00\x5f\x00\x61\x00\x70\x00\x70\x00\x6c\x00\x79\x00\x5f\x00\x64\x00\x69\x00\x73\x00\x61\
\x00\x62\x00\x6c\x00\x65\x00\x64\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x1c\
\x0b\x10\x37\x07\
\x00\x62\
\x00\x75\x00\x74\x00\x74\x00\x6f\x00\x6e\x00\x5f\x00\x64\x00\x6f\x00\x77\x00\x6e\x00\x6c\x00\x6f\x00\x61\x00\x64\x00\x5f\x00\x64\
\x00\x69\x00\x73\x00\x61\x00\x62\x00\x6c\x00\x65\x00\x64\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x18\
\x0c\x40\x62\x67\
\x00\x62\
\x00\x75\x00\x74\x00\x74\x00\x6f\x00\x6e\x00\x5f\x00\x65\x00\x78\x00\x69\x00\x74\x00\x5f\x00\x64\x00\x69\x00\x73\x00\x61\x00\x62\
\x00\x6c\x00\x65\x00\x64\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0f\
\x08\x69\x7b\xe7\
\x00\x62\
\x00\x75\x00\x74\x00\x74\x00\x6f\x00\x6e\x00\x5f\x00\x65\x00\x78\x00\x69\x00\x74\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x18\
\x0c\x12\xc4\x27\
\x00\x62\
\x00\x75\x00\x74\x00\x74\x00\x6f\x00\x6e\x00\x5f\x00\x61\x00\x6e\x00\x73\x00\x69\x00\x5f\x00\x64\x00\x69\x00\x73\x00\x61\x00\x62\
\x00\x6c\x00\x65\x00\x64\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x1b\
\x06\xc3\x72\x67\
\x00\x62\
\x00\x75\x00\x74\x00\x74\x00\x6f\x00\x6e\x00\x5f\x00\x72\x00\x65\x00\x73\x00\x74\x00\x6f\x00\x72\x00\x65\x00\x5f\x00\x64\x00\x69\
\x00\x73\x00\x61\x00\x62\x00\x6c\x00\x65\x00\x64\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x18\
\x0c\x81\x05\x67\
\x00\x62\
\x00\x75\x00\x74\x00\x74\x00\x6f\x00\x6e\x00\x5f\x00\x75\x00\x74\x00\x66\x00\x38\x00\x5f\x00\x64\x00\x69\x00\x73\x00\x61\x00\x62\
\x00\x6c\x00\x65\x00\x64\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x1a\
\x0d\xa7\xd8\x07\
\x00\x62\
\x00\x75\x00\x74\x00\x74\x00\x6f\x00\x6e\x00\x5f\x00\x75\x00\x70\x00\x64\x00\x61\x00\x74\x00\x65\x00\x5f\x00\x64\x00\x69\x00\x73\
\x00\x61\x00\x62\x00\x6c\x00\x65\x00\x64\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x0f\
\x02\xf6\x79\x67\
\x00\x62\
\x00\x75\x00\x74\x00\x74\x00\x6f\x00\x6e\x00\x5f\x00\x61\x00\x6e\x00\x73\x00\x69\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x11\
\x06\x8e\xf4\xc7\
\x00\x62\
\x00\x75\x00\x74\x00\x74\x00\x6f\x00\x6e\x00\x5f\x00\x75\x00\x70\x00\x64\x00\x61\x00\x74\x00\x65\x00\x2e\x00\x70\x00\x6e\x00\x67\
\
\x00\x10\
\x01\xda\x95\x47\
\x00\x62\
\x00\x75\x00\x74\x00\x74\x00\x6f\x00\x6e\x00\x5f\x00\x61\x00\x70\x00\x70\x00\x6c\x00\x79\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x11\
\x0c\xef\x75\x47\
\x00\x62\
\x00\x75\x00\x74\x00\x74\x00\x6f\x00\x6e\x00\x5f\x00\x62\x00\x61\x00\x63\x00\x6b\x00\x75\x00\x70\x00\x2e\x00\x70\x00\x6e\x00\x67\
\
\x00\x12\
\x04\x27\x2f\xe7\
\x00\x62\
\x00\x75\x00\x74\x00\x74\x00\x6f\x00\x6e\x00\x5f\x00\x72\x00\x65\x00\x73\x00\x74\x00\x6f\x00\x72\x00\x65\x00\x2e\x00\x70\x00\x6e\
\x00\x67\
\x00\x05\
\x00\x6f\xa6\x53\
\x00\x69\
\x00\x63\x00\x6f\x00\x6e\x00\x73\
\x00\x14\
\x0d\x75\xab\xe7\
\x00\x75\
\x00\x74\x00\x6c\x00\x5f\x00\x69\x00\x63\x00\x6f\x00\x6e\x00\x40\x00\x32\x00\x35\x00\x36\x00\x78\x00\x32\x00\x35\x00\x36\x00\x2e\
\x00\x70\x00\x6e\x00\x67\
"
qt_resource_struct = "\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x16\
\x00\x00\x00\x0e\x00\x02\x00\x00\x00\x01\x00\x00\x00\x03\
\x00\x00\x00\x22\x00\x02\x00\x00\x00\x01\x00\x00\x00\x04\
\x00\x00\x00\x2e\x00\x02\x00\x00\x00\x01\x00\x00\x00\x05\
\x00\x00\x00\x0e\x00\x02\x00\x00\x00\x10\x00\x00\x00\x06\
\x00\x00\x00\xc4\x00\x00\x00\x00\x00\x01\x00\x00\x12\x2c\
\x00\x00\x02\xc2\x00\x00\x00\x00\x00\x01\x00\x00\x57\x58\
\x00\x00\x02\x76\x00\x00\x00\x00\x00\x01\x00\x00\x48\xda\
\x00\x00\x03\x10\x00\x00\x00\x00\x00\x01\x00\x00\x62\xfc\
\x00\x00\x02\x9a\x00\x00\x00\x00\x00\x01\x00\x00\x50\x67\
\x00\x00\x01\xca\x00\x00\x00\x00\x00\x01\x00\x00\x33\x9e\
\x00\x00\x01\x70\x00\x00\x00\x00\x00\x01\x00\x00\x25\x54\
\x00\x00\x00\x74\x00\x00\x00\x00\x00\x01\x00\x00\x04\xfc\
\x00\x00\x00\xfc\x00\x00\x00\x00\x00\x01\x00\x00\x18\xef\
\x00\x00\x01\x94\x00\x00\x00\x00\x00\x01\x00\x00\x2c\x15\
\x00\x00\x01\x3a\x00\x00\x00\x00\x00\x01\x00\x00\x1e\x93\
\x00\x00\x02\x06\x00\x00\x00\x00\x00\x01\x00\x00\x3a\x5d\
\x00\x00\x02\xe8\x00\x00\x00\x00\x00\x01\x00\x00\x5e\x00\
\x00\x00\x00\x3a\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x02\x3c\x00\x00\x00\x00\x00\x01\x00\x00\x41\xe9\
\x00\x00\x00\xa0\x00\x00\x00\x00\x00\x01\x00\x00\x0a\xa0\
\x00\x00\x00\x22\x00\x02\x00\x00\x00\x01\x00\x00\x00\x17\
\x00\x00\x00\x2e\x00\x02\x00\x00\x00\x01\x00\x00\x00\x18\
\x00\x00\x03\x3a\x00\x02\x00\x00\x00\x01\x00\x00\x00\x19\
\x00\x00\x03\x4a\x00\x00\x00\x00\x00\x01\x00\x00\x69\xb1\
"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| gpl-3.0 |
mlperf/training_results_v0.7 | Google/benchmarks/maskrcnn/implementations/maskrcnn-research-TF-tpu-v3-1024/object_detection/argmax_matcher.py | 2 | 9028 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Argmax matcher implementation.
This class takes a similarity matrix and matches columns to rows based on the
maximum value per column. One can specify matched_thresholds and
to prevent columns from matching to rows (generally resulting in a negative
training example) and unmatched_theshold to ignore the match (generally
resulting in neither a positive or negative training example).
This matcher is used in Fast(er)-RCNN.
Note: matchers are used in TargetAssigners. There is a create_target_assigner
factory function for popular implementations.
"""
import tensorflow.compat.v1 as tf
from REDACTED.mask_rcnn.object_detection import matcher
from REDACTED.mask_rcnn.object_detection import shape_utils
class ArgMaxMatcher(matcher.Matcher):
"""Matcher based on highest value.
This class computes matches from a similarity matrix. Each column is matched
to a single row.
To support object detection target assignment this class enables setting both
matched_threshold (upper threshold) and unmatched_threshold (lower thresholds)
defining three categories of similarity which define whether examples are
positive, negative, or ignored:
(1) similarity >= matched_threshold: Highest similarity. Matched/Positive!
(2) matched_threshold > similarity >= unmatched_threshold: Medium similarity.
Depending on negatives_lower_than_unmatched, this is either
Unmatched/Negative OR Ignore.
(3) unmatched_threshold > similarity: Lowest similarity. Depending on flag
negatives_lower_than_unmatched, either Unmatched/Negative OR Ignore.
For ignored matches this class sets the values in the Match object to -2.
"""
def __init__(self,
matched_threshold,
unmatched_threshold=None,
negatives_lower_than_unmatched=True,
force_match_for_each_row=False):
"""Construct ArgMaxMatcher.
Args:
matched_threshold: Threshold for positive matches. Positive if
sim >= matched_threshold, where sim is the maximum value of the
similarity matrix for a given column. Set to None for no threshold.
unmatched_threshold: Threshold for negative matches. Negative if
sim < unmatched_threshold. Defaults to matched_threshold
when set to None.
negatives_lower_than_unmatched: Boolean which defaults to True. If True
then negative matches are the ones below the unmatched_threshold,
whereas ignored matches are in between the matched and umatched
threshold. If False, then negative matches are in between the matched
and unmatched threshold, and everything lower than unmatched is ignored.
force_match_for_each_row: If True, ensures that each row is matched to
at least one column (which is not guaranteed otherwise if the
matched_threshold is high). Defaults to False. See
argmax_matcher_test.testMatcherForceMatch() for an example.
Raises:
ValueError: if unmatched_threshold is set but matched_threshold is not set
or if unmatched_threshold > matched_threshold.
"""
if (matched_threshold is None) and (unmatched_threshold is not None):
raise ValueError('Need to also define matched_threshold when'
'unmatched_threshold is defined')
self._matched_threshold = matched_threshold
if unmatched_threshold is None:
self._unmatched_threshold = matched_threshold
else:
if unmatched_threshold > matched_threshold:
raise ValueError('unmatched_threshold needs to be smaller or equal'
'to matched_threshold')
self._unmatched_threshold = unmatched_threshold
if not negatives_lower_than_unmatched:
if self._unmatched_threshold == self._matched_threshold:
raise ValueError('When negatives are in between matched and '
'unmatched thresholds, these cannot be of equal '
'value. matched: %s, unmatched: %s',
self._matched_threshold, self._unmatched_threshold)
self._force_match_for_each_row = force_match_for_each_row
self._negatives_lower_than_unmatched = negatives_lower_than_unmatched
def _match(self, similarity_matrix):
"""Tries to match each column of the similarity matrix to a row.
Args:
similarity_matrix: tensor of shape [N, M] representing any similarity
metric.
Returns:
Match object with corresponding matches for each of M columns.
"""
def _match_when_rows_are_empty():
"""Performs matching when the rows of similarity matrix are empty.
When the rows are empty, all detections are false positives. So we return
a tensor of -1's to indicate that the columns do not match to any rows.
Returns:
matches: int32 tensor indicating the row each column matches to.
"""
similarity_matrix_shape = shape_utils.combined_static_and_dynamic_shape(
similarity_matrix)
return -1 * tf.ones([similarity_matrix_shape[1]], dtype=tf.int32)
def _match_when_rows_are_non_empty():
"""Performs matching when the rows of similarity matrix are non empty.
Returns:
matches: int32 tensor indicating the row each column matches to.
"""
# Matches for each column
matches = tf.argmax(similarity_matrix, 0, output_type=tf.int32)
# Deal with matched and unmatched threshold
if self._matched_threshold is not None:
# Get logical indices of ignored and unmatched columns as tf.int64
matched_vals = tf.reduce_max(similarity_matrix, 0)
below_unmatched_threshold = tf.greater(self._unmatched_threshold,
matched_vals)
between_thresholds = tf.logical_and(
tf.greater_equal(matched_vals, self._unmatched_threshold),
tf.greater(self._matched_threshold, matched_vals))
if self._negatives_lower_than_unmatched:
matches = self._set_values_using_indicator(matches,
below_unmatched_threshold,
-1)
matches = self._set_values_using_indicator(matches,
between_thresholds,
-2)
else:
matches = self._set_values_using_indicator(matches,
below_unmatched_threshold,
-2)
matches = self._set_values_using_indicator(matches,
between_thresholds,
-1)
if self._force_match_for_each_row:
similarity_matrix_shape = shape_utils.combined_static_and_dynamic_shape(
similarity_matrix)
force_match_column_ids = tf.argmax(similarity_matrix, 1,
output_type=tf.int32)
force_match_column_indicators = tf.one_hot(
force_match_column_ids, depth=similarity_matrix_shape[1])
force_match_row_ids = tf.argmax(force_match_column_indicators, 0,
output_type=tf.int32)
force_match_column_mask = tf.cast(
tf.reduce_max(force_match_column_indicators, 0), tf.bool)
final_matches = tf.where(force_match_column_mask,
force_match_row_ids, matches)
return final_matches
else:
return matches
if similarity_matrix.shape.is_fully_defined():
if similarity_matrix.shape[0].value == 0:
return _match_when_rows_are_empty()
else:
return _match_when_rows_are_non_empty()
else:
return tf.cond(
tf.greater(tf.shape(similarity_matrix)[0], 0),
_match_when_rows_are_non_empty, _match_when_rows_are_empty)
def _set_values_using_indicator(self, x, indicator, val):
"""Set the indicated fields of x to val.
Args:
x: tensor.
indicator: boolean with same shape as x.
val: scalar with value to set.
Returns:
modified tensor.
"""
indicator = tf.cast(indicator, x.dtype)
return tf.add(tf.multiply(x, 1 - indicator), val * indicator)
| apache-2.0 |
stainsteelcrown/nonsense-story-generator | venv/lib/python2.7/site-packages/pip/_vendor/requests/auth.py | 331 | 6123 | # -*- coding: utf-8 -*-
"""
requests.auth
~~~~~~~~~~~~~
This module contains the authentication handlers for Requests.
"""
import os
import re
import time
import hashlib
from base64 import b64encode
from .compat import urlparse, str
from .cookies import extract_cookies_to_jar
from .utils import parse_dict_header
CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded'
CONTENT_TYPE_MULTI_PART = 'multipart/form-data'
def _basic_auth_str(username, password):
"""Returns a Basic Auth string."""
return 'Basic ' + b64encode(('%s:%s' % (username, password)).encode('latin1')).strip().decode('latin1')
class AuthBase(object):
"""Base class that all auth implementations derive from"""
def __call__(self, r):
raise NotImplementedError('Auth hooks must be callable.')
class HTTPBasicAuth(AuthBase):
"""Attaches HTTP Basic Authentication to the given Request object."""
def __init__(self, username, password):
self.username = username
self.password = password
def __call__(self, r):
r.headers['Authorization'] = _basic_auth_str(self.username, self.password)
return r
class HTTPProxyAuth(HTTPBasicAuth):
"""Attaches HTTP Proxy Authentication to a given Request object."""
def __call__(self, r):
r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password)
return r
class HTTPDigestAuth(AuthBase):
"""Attaches HTTP Digest Authentication to the given Request object."""
def __init__(self, username, password):
self.username = username
self.password = password
self.last_nonce = ''
self.nonce_count = 0
self.chal = {}
self.pos = None
def build_digest_header(self, method, url):
realm = self.chal['realm']
nonce = self.chal['nonce']
qop = self.chal.get('qop')
algorithm = self.chal.get('algorithm')
opaque = self.chal.get('opaque')
if algorithm is None:
_algorithm = 'MD5'
else:
_algorithm = algorithm.upper()
# lambdas assume digest modules are imported at the top level
if _algorithm == 'MD5' or _algorithm == 'MD5-SESS':
def md5_utf8(x):
if isinstance(x, str):
x = x.encode('utf-8')
return hashlib.md5(x).hexdigest()
hash_utf8 = md5_utf8
elif _algorithm == 'SHA':
def sha_utf8(x):
if isinstance(x, str):
x = x.encode('utf-8')
return hashlib.sha1(x).hexdigest()
hash_utf8 = sha_utf8
KD = lambda s, d: hash_utf8("%s:%s" % (s, d))
if hash_utf8 is None:
return None
# XXX not implemented yet
entdig = None
p_parsed = urlparse(url)
path = p_parsed.path
if p_parsed.query:
path += '?' + p_parsed.query
A1 = '%s:%s:%s' % (self.username, realm, self.password)
A2 = '%s:%s' % (method, path)
HA1 = hash_utf8(A1)
HA2 = hash_utf8(A2)
if nonce == self.last_nonce:
self.nonce_count += 1
else:
self.nonce_count = 1
ncvalue = '%08x' % self.nonce_count
s = str(self.nonce_count).encode('utf-8')
s += nonce.encode('utf-8')
s += time.ctime().encode('utf-8')
s += os.urandom(8)
cnonce = (hashlib.sha1(s).hexdigest()[:16])
noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, HA2)
if _algorithm == 'MD5-SESS':
HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce))
if qop is None:
respdig = KD(HA1, "%s:%s" % (nonce, HA2))
elif qop == 'auth' or 'auth' in qop.split(','):
respdig = KD(HA1, noncebit)
else:
# XXX handle auth-int.
return None
self.last_nonce = nonce
# XXX should the partial digests be encoded too?
base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
'response="%s"' % (self.username, realm, nonce, path, respdig)
if opaque:
base += ', opaque="%s"' % opaque
if algorithm:
base += ', algorithm="%s"' % algorithm
if entdig:
base += ', digest="%s"' % entdig
if qop:
base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce)
return 'Digest %s' % (base)
def handle_401(self, r, **kwargs):
"""Takes the given response and tries digest-auth, if needed."""
if self.pos is not None:
# Rewind the file position indicator of the body to where
# it was to resend the request.
r.request.body.seek(self.pos)
num_401_calls = getattr(self, 'num_401_calls', 1)
s_auth = r.headers.get('www-authenticate', '')
if 'digest' in s_auth.lower() and num_401_calls < 2:
setattr(self, 'num_401_calls', num_401_calls + 1)
pat = re.compile(r'digest ', flags=re.IGNORECASE)
self.chal = parse_dict_header(pat.sub('', s_auth, count=1))
# Consume content and release the original connection
# to allow our new request to reuse the same one.
r.content
r.raw.release_conn()
prep = r.request.copy()
extract_cookies_to_jar(prep._cookies, r.request, r.raw)
prep.prepare_cookies(prep._cookies)
prep.headers['Authorization'] = self.build_digest_header(
prep.method, prep.url)
_r = r.connection.send(prep, **kwargs)
_r.history.append(r)
_r.request = prep
return _r
setattr(self, 'num_401_calls', 1)
return r
def __call__(self, r):
# If we have a saved nonce, skip the 401
if self.last_nonce:
r.headers['Authorization'] = self.build_digest_header(r.method, r.url)
try:
self.pos = r.body.tell()
except AttributeError:
pass
r.register_hook('response', self.handle_401)
return r
| mit |
jamtwister/vncdotool | tests/functional/test_send_events.py | 2 | 2401 | from unittest import TestCase
import sys
import os.path
import pexpect
DATADIR = os.path.join(os.path.dirname(__file__), 'data')
KEYA_VDO = os.path.join(DATADIR, 'samplea.vdo')
KEYB_VDO = os.path.join(DATADIR, 'sampleb.vdo')
class TestSendEvents(TestCase):
def setUp(self):
cmd = 'vncev -rfbport 5933 -rfbwait 1000'
self.server = pexpect.spawn(cmd, logfile=sys.stdout, timeout=2)
def tearDown(self):
self.server.terminate(force=True)
def assertKeyDown(self, key):
down = '^.*down:\s+\(%s\)\r' % hex(key)
self.server.expect(down)
def assertKeyUp(self, key):
up = '^.*up:\s+\(%s\)\r' % hex(key)
self.server.expect(up)
def assertMouse(self, x, y, buttonmask):
output = '^.*Ptr: mouse button mask %s at %d,%d' % (hex(buttonmask), x, y)
self.server.expect(output)
def assertDisconnect(self):
disco = 'Client 127.0.0.1 gone'
self.server.expect(disco)
def run_vncdo(self, commands):
cmd = 'vncdo -v -s :33 ' + commands
vnc = pexpect.spawn(cmd, logfile=sys.stdout, timeout=5)
retval = vnc.wait()
assert retval == 0, retval
def test_key_alpha(self):
self.run_vncdo('key z')
self.assertKeyDown(ord('z'))
self.assertKeyUp(ord('z'))
self.assertDisconnect()
def test_key_ctrl_a(self):
self.run_vncdo('key ctrl-a')
self.assertKeyDown(int(0xffe3))
self.assertKeyDown(ord('a'))
self.assertKeyUp(int(0xffe3))
self.assertKeyUp(ord('a'))
self.assertDisconnect()
def test_type(self):
string = 'abcdefghij'
self.run_vncdo('type %s' % string)
for key in string:
self.assertKeyDown(ord(key))
self.assertKeyUp(ord(key))
self.assertDisconnect()
def test_mouse_move(self):
# vncev only prints click events, but will include the position
self.run_vncdo('move 10 20 click 1')
self.assertMouse(10, 20, 0x1)
self.assertDisconnect()
def test_mouse_click_button_two(self):
self.run_vncdo('click 2')
self.assertMouse(0, 0, 0x2)
self.assertDisconnect()
def test_read_files(self):
self.run_vncdo('key x %s key y %s' % (KEYA_VDO, KEYB_VDO))
for key in 'xayb':
self.assertKeyDown(ord(key))
self.assertKeyUp(ord(key))
| mit |
nburn42/tensorflow | tensorflow/contrib/layers/python/ops/bucketization_op.py | 120 | 1537 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Wrappers for bucketization operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.ops import math_ops
def bucketize(input_tensor, boundaries, name=None):
"""Bucketizes input_tensor by given boundaries.
See bucketize_op.cc for more details.
Args:
input_tensor: A `Tensor` which will be bucketize.
boundaries: A list of floats gives the boundaries. It has to be sorted.
name: A name prefix for the returned tensors (optional).
Returns:
A `Tensor` with type int32 which indicates the corresponding bucket for
each value in `input_tensor`.
Raises:
TypeError: If boundaries is not a list.
"""
return math_ops._bucketize( # pylint: disable=protected-access
input_tensor, boundaries=boundaries, name=name)
| apache-2.0 |
dannyboi104/SickRage | lib/hachoir_core/benchmark.py | 95 | 6784 | from hachoir_core.tools import humanDurationNanosec
from hachoir_core.i18n import _
from math import floor
from time import time
class BenchmarkError(Exception):
"""
Error during benchmark, use str(err) to format it as string.
"""
def __init__(self, message):
Exception.__init__(self,
"Benchmark internal error: %s" % message)
class BenchmarkStat:
"""
Benchmark statistics. This class automatically computes minimum value,
maximum value and sum of all values.
Methods:
- append(value): append a value
- getMin(): minimum value
- getMax(): maximum value
- getSum(): sum of all values
- __len__(): get number of elements
- __nonzero__(): isn't empty?
"""
def __init__(self):
self._values = []
def append(self, value):
self._values.append(value)
try:
self._min = min(self._min, value)
self._max = max(self._max, value)
self._sum += value
except AttributeError:
self._min = value
self._max = value
self._sum = value
def __len__(self):
return len(self._values)
def __nonzero__(self):
return bool(self._values)
def getMin(self):
return self._min
def getMax(self):
return self._max
def getSum(self):
return self._sum
class Benchmark:
def __init__(self, max_time=5.0,
min_count=5, max_count=None, progress_time=1.0):
"""
Constructor:
- max_time: Maximum wanted duration of the whole benchmark
(default: 5 seconds, minimum: 1 second).
- min_count: Minimum number of function calls to get good statistics
(defaut: 5, minimum: 1).
- progress_time: Time between each "progress" message
(default: 1 second, minimum: 250 ms).
- max_count: Maximum number of function calls (default: no limit).
- verbose: Is verbose? (default: False)
- disable_gc: Disable garbage collector? (default: False)
"""
self.max_time = max(max_time, 1.0)
self.min_count = max(min_count, 1)
self.max_count = max_count
self.progress_time = max(progress_time, 0.25)
self.verbose = False
self.disable_gc = False
def formatTime(self, value):
"""
Format a time delta to string: use humanDurationNanosec()
"""
return humanDurationNanosec(value * 1000000000)
def displayStat(self, stat):
"""
Display statistics to stdout:
- best time (minimum)
- average time (arithmetic average)
- worst time (maximum)
- total time (sum)
Use arithmetic avertage instead of geometric average because
geometric fails if any value is zero (returns zero) and also
because floating point multiplication lose precision with many
values.
"""
average = stat.getSum() / len(stat)
values = (stat.getMin(), average, stat.getMax(), stat.getSum())
values = tuple(self.formatTime(value) for value in values)
print _("Benchmark: best=%s average=%s worst=%s total=%s") \
% values
def _runOnce(self, func, args, kw):
before = time()
func(*args, **kw)
after = time()
return after - before
def _run(self, func, args, kw):
"""
Call func(*args, **kw) as many times as needed to get
good statistics. Algorithm:
- call the function once
- compute needed number of calls
- and then call function N times
To compute number of calls, parameters are:
- time of first function call
- minimum number of calls (min_count attribute)
- maximum test time (max_time attribute)
Notice: The function will approximate number of calls.
"""
# First call of the benchmark
stat = BenchmarkStat()
diff = self._runOnce(func, args, kw)
best = diff
stat.append(diff)
total_time = diff
# Compute needed number of calls
count = int(floor(self.max_time / diff))
count = max(count, self.min_count)
if self.max_count:
count = min(count, self.max_count)
# Not other call? Just exit
if count == 1:
return stat
estimate = diff * count
if self.verbose:
print _("Run benchmark: %s calls (estimate: %s)") \
% (count, self.formatTime(estimate))
display_progress = self.verbose and (1.0 <= estimate)
total_count = 1
while total_count < count:
# Run benchmark and display each result
if display_progress:
print _("Result %s/%s: %s (best: %s)") % \
(total_count, count,
self.formatTime(diff), self.formatTime(best))
part = count - total_count
# Will takes more than one second?
average = total_time / total_count
if self.progress_time < part * average:
part = max( int(self.progress_time / average), 1)
for index in xrange(part):
diff = self._runOnce(func, args, kw)
stat.append(diff)
total_time += diff
best = min(diff, best)
total_count += part
if display_progress:
print _("Result %s/%s: %s (best: %s)") % \
(count, count,
self.formatTime(diff), self.formatTime(best))
return stat
def validateStat(self, stat):
"""
Check statistics and raise a BenchmarkError if they are invalid.
Example of tests: reject empty stat, reject stat with only nul values.
"""
if not stat:
raise BenchmarkError("empty statistics")
if not stat.getSum():
raise BenchmarkError("nul statistics")
def run(self, func, *args, **kw):
"""
Run function func(*args, **kw), validate statistics,
and display the result on stdout.
Disable garbage collector if asked too.
"""
# Disable garbarge collector is needed and if it does exist
# (Jython 2.2 don't have it for example)
if self.disable_gc:
try:
import gc
except ImportError:
self.disable_gc = False
if self.disable_gc:
gc_enabled = gc.isenabled()
gc.disable()
else:
gc_enabled = False
# Run the benchmark
stat = self._run(func, args, kw)
if gc_enabled:
gc.enable()
# Validate and display stats
self.validateStat(stat)
self.displayStat(stat)
| gpl-3.0 |
indashnet/InDashNet.Open.UN2000 | android/external/chromium_org/tools/grit/grit/grit_runner_unittest.py | 60 | 1129 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''Unit tests for grit.py'''
import os
import sys
if __name__ == '__main__':
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import unittest
import StringIO
from grit import util
import grit.grit_runner
class OptionArgsUnittest(unittest.TestCase):
def setUp(self):
self.buf = StringIO.StringIO()
self.old_stdout = sys.stdout
sys.stdout = self.buf
def tearDown(self):
sys.stdout = self.old_stdout
def testSimple(self):
grit.grit_runner.Main(['-i',
util.PathFromRoot('grit/testdata/simple-input.xml'),
'-d', 'test', 'bla', 'voff', 'ga'])
output = self.buf.getvalue()
self.failUnless(output.count('disconnected'))
self.failUnless(output.count("'test'") == 0) # tool name doesn't occur
self.failUnless(output.count('bla'))
self.failUnless(output.count('simple-input.xml'))
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
kouk/boto | tests/integration/sdb/__init__.py | 761 | 1104 | # Copyright (c) 2006-2011 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
| mit |
loco-odoo/localizacion_co | openerp/addons/account_budget/wizard/account_budget_crossovered_report.py | 375 | 2089 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields, osv
class account_budget_crossvered_report(osv.osv_memory):
_name = "account.budget.crossvered.report"
_description = "Account Budget crossvered report"
_columns = {
'date_from': fields.date('Start of period', required=True),
'date_to': fields.date('End of period', required=True),
}
_defaults = {
'date_from': lambda *a: time.strftime('%Y-01-01'),
'date_to': lambda *a: time.strftime('%Y-%m-%d'),
}
def check_report(self, cr, uid, ids, context=None):
if context is None:
context = {}
data = self.read(cr, uid, ids, context=context)[0]
datas = {
'ids': context.get('active_ids', []),
'model': 'crossovered.budget',
'form': data
}
datas['form']['ids'] = datas['ids']
datas['form']['report'] = 'analytic-full'
return self.pool['report'].get_action(cr, uid, [], 'account_budget.report_crossoveredbudget', data=datas, context=context)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
mne-tools/mne-tools.github.io | 0.22/_downloads/d5b65973980e001b97f01d591c44af9e/plot_evoked_whitening.py | 10 | 3103 | """
=============================================
Whitening evoked data with a noise covariance
=============================================
Evoked data are loaded and then whitened using a given noise covariance
matrix. It's an excellent quality check to see if baseline signals match
the assumption of Gaussian white noise during the baseline period.
Covariance estimation and diagnostic plots are based on [1]_.
References
----------
.. [1] Engemann D. and Gramfort A. (2015) Automated model selection in
covariance estimation and spatial whitening of MEG and EEG signals, vol.
108, 328-342, NeuroImage.
"""
# Authors: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Denis A. Engemann <denis.engemann@gmail.com>
#
# License: BSD (3-clause)
import mne
from mne import io
from mne.datasets import sample
from mne.cov import compute_covariance
print(__doc__)
###############################################################################
# Set parameters
data_path = sample.data_path()
raw_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw.fif'
event_fname = data_path + '/MEG/sample/sample_audvis_filt-0-40_raw-eve.fif'
raw = io.read_raw_fif(raw_fname, preload=True)
raw.filter(1, 40, n_jobs=1, fir_design='firwin')
raw.info['bads'] += ['MEG 2443'] # bads + 1 more
events = mne.read_events(event_fname)
# let's look at rare events, button presses
event_id, tmin, tmax = 2, -0.2, 0.5
reject = dict(mag=4e-12, grad=4000e-13, eeg=80e-6)
epochs = mne.Epochs(raw, events, event_id, tmin, tmax, picks=('meg', 'eeg'),
baseline=None, reject=reject, preload=True)
# Uncomment next line to use fewer samples and study regularization effects
# epochs = epochs[:20] # For your data, use as many samples as you can!
###############################################################################
# Compute covariance using automated regularization
method_params = dict(diagonal_fixed=dict(mag=0.01, grad=0.01, eeg=0.01))
noise_covs = compute_covariance(epochs, tmin=None, tmax=0, method='auto',
return_estimators=True, verbose=True, n_jobs=1,
projs=None, rank=None,
method_params=method_params)
# With "return_estimator=True" all estimated covariances sorted
# by log-likelihood are returned.
print('Covariance estimates sorted from best to worst')
for c in noise_covs:
print("%s : %s" % (c['method'], c['loglik']))
###############################################################################
# Show the evoked data:
evoked = epochs.average()
evoked.plot(time_unit='s') # plot evoked response
###############################################################################
# We can then show whitening for our various noise covariance estimates.
#
# Here we should look to see if baseline signals match the
# assumption of Gaussian white noise. we expect values centered at
# 0 within 2 standard deviations for 95% of the time points.
#
# For the Global field power we expect a value of 1.
evoked.plot_white(noise_covs, time_unit='s')
| bsd-3-clause |
hoatle/odoo | addons/sale/wizard/__init__.py | 444 | 1129 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import sale_make_invoice
import sale_line_invoice
import sale_make_invoice_advance
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
srinia6/RackHD | test/tests/rackhd20/test_rackhd20_api_lookups.py | 13 | 3840 | '''
Copyright 2016, EMC, Inc.
Author(s):
George Paulos
'''
import fit_path # NOQA: unused import
import os
import sys
import subprocess
import fit_common
# Select test group here using @attr
from nose.plugins.attrib import attr
@attr(all=True, regression=True, smoke=True)
class rackhd20_api_lookups(fit_common.unittest.TestCase):
def setUp(self):
# delete any instance of test lookup
api_data = fit_common.rackhdapi("/api/2.0/lookups")
for item in api_data['json']:
if item['macAddress'] == "00:0a:0a:0a:0a:0a":
fit_common.rackhdapi("/api/2.0/lookups/" + item['id'], action="delete")
def test_api_20_lookups_ID(self):
api_data = fit_common.rackhdapi("/api/2.0/lookups")
self.assertEqual(api_data['status'], 200, 'Incorrect HTTP return code, expected 200, got:' + str(api_data['status']))
for item in api_data['json']:
self.assertEqual(fit_common.rackhdapi("/api/2.0/lookups/" + item['id'])
['status'], 200, 'Incorrect HTTP return code, expected 200, got:' + str(api_data['status']))
# this test cross-references node MAC addresses to lookup tables
def test_api_20_lookups_cross_reference(self):
nodecatalog = fit_common.rackhdapi("/api/2.0/nodes")['json']
lookuptable = fit_common.rackhdapi("/api/2.0/lookups")['json']
errorlist = ""
for node in nodecatalog:
# get list of compute nodes with sku
if node['type'] == "compute" and 'sku' in node and 'identifiers' in node:
# find node entry mac addresses
for macaddr in node['identifiers']:
# find mac address in lookup table
for lookupid in lookuptable:
#verify node ID for mac address
if macaddr in lookupid['macAddress']:
if fit_common.VERBOSITY >= 2:
print "*** Checking Node ID: " + node['id'] + " MAC: " + macaddr
if 'node' not in lookupid:
errorlist = errorlist + "Missing node ID: " + node['id'] + " MAC: " + macaddr + "\n"
if node['id'] != lookupid['node']:
errorlist = errorlist + "Wrong node in lookup table ID: " + lookupid['id'] + "\n"
if errorlist != "":
print "**** Lookup Errors:"
print errorlist
self.assertEqual(errorlist, "", "Errors in lookup table detected.")
def test_api_20_lookups_post_get_delete(self):
node = fit_common.node_select()[0]
data_payload = {
"macAddress": "00:0a:0a:0a:0a:0a",
"ipAddress": "128.128.128.128",
"node": node
}
api_data = fit_common.rackhdapi("/api/2.0/lookups", action="post", payload=data_payload)
self.assertEqual(api_data['status'], 201, 'Incorrect HTTP return code, expected 201, got:' + str(api_data['status']))
lookup_id = api_data['json']['id']
api_data = fit_common.rackhdapi("/api/2.0/lookups/" + lookup_id)
self.assertEqual(api_data['status'], 200, 'Incorrect HTTP return code, expected 200, got:' + str(api_data['status']))
self.assertEqual(api_data['json']['macAddress'], "00:0a:0a:0a:0a:0a", "Bad lookup MAC Address")
self.assertEqual(api_data['json']['ipAddress'], "128.128.128.128", "Bad lookup IP Address")
self.assertEqual(api_data['json']['node'], node, "Bad lookup node ID")
api_data = fit_common.rackhdapi("/api/2.0/lookups/" + lookup_id, action="delete")
self.assertEqual(api_data['status'], 204, 'Incorrect HTTP return code, expected 204, got:' + str(api_data['status']))
if __name__ == '__main__':
fit_common.unittest.main()
| apache-2.0 |
nirajkvinit/python3-study | shreya/venv/lib/python3.5/site-packages/pip/_vendor/requests/packages/urllib3/connection.py | 511 | 11617 | from __future__ import absolute_import
import datetime
import logging
import os
import sys
import socket
from socket import error as SocketError, timeout as SocketTimeout
import warnings
from .packages import six
try: # Python 3
from http.client import HTTPConnection as _HTTPConnection
from http.client import HTTPException # noqa: unused in this module
except ImportError:
from httplib import HTTPConnection as _HTTPConnection
from httplib import HTTPException # noqa: unused in this module
try: # Compiled with SSL?
import ssl
BaseSSLError = ssl.SSLError
except (ImportError, AttributeError): # Platform-specific: No SSL.
ssl = None
class BaseSSLError(BaseException):
pass
try: # Python 3:
# Not a no-op, we're adding this to the namespace so it can be imported.
ConnectionError = ConnectionError
except NameError: # Python 2:
class ConnectionError(Exception):
pass
from .exceptions import (
NewConnectionError,
ConnectTimeoutError,
SubjectAltNameWarning,
SystemTimeWarning,
)
from .packages.ssl_match_hostname import match_hostname, CertificateError
from .util.ssl_ import (
resolve_cert_reqs,
resolve_ssl_version,
ssl_wrap_socket,
assert_fingerprint,
)
from .util import connection
from ._collections import HTTPHeaderDict
log = logging.getLogger(__name__)
port_by_scheme = {
'http': 80,
'https': 443,
}
RECENT_DATE = datetime.date(2014, 1, 1)
class DummyConnection(object):
"""Used to detect a failed ConnectionCls import."""
pass
class HTTPConnection(_HTTPConnection, object):
"""
Based on httplib.HTTPConnection but provides an extra constructor
backwards-compatibility layer between older and newer Pythons.
Additional keyword parameters are used to configure attributes of the connection.
Accepted parameters include:
- ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
- ``source_address``: Set the source address for the current connection.
.. note:: This is ignored for Python 2.6. It is only applied for 2.7 and 3.x
- ``socket_options``: Set specific options on the underlying socket. If not specified, then
defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
For example, if you wish to enable TCP Keep Alive in addition to the defaults,
you might pass::
HTTPConnection.default_socket_options + [
(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
]
Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
"""
default_port = port_by_scheme['http']
#: Disable Nagle's algorithm by default.
#: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]``
default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]
#: Whether this connection verifies the host's certificate.
is_verified = False
def __init__(self, *args, **kw):
if six.PY3: # Python 3
kw.pop('strict', None)
# Pre-set source_address in case we have an older Python like 2.6.
self.source_address = kw.get('source_address')
if sys.version_info < (2, 7): # Python 2.6
# _HTTPConnection on Python 2.6 will balk at this keyword arg, but
# not newer versions. We can still use it when creating a
# connection though, so we pop it *after* we have saved it as
# self.source_address.
kw.pop('source_address', None)
#: The socket options provided by the user. If no options are
#: provided, we use the default options.
self.socket_options = kw.pop('socket_options', self.default_socket_options)
# Superclass also sets self.source_address in Python 2.7+.
_HTTPConnection.__init__(self, *args, **kw)
def _new_conn(self):
""" Establish a socket connection and set nodelay settings on it.
:return: New socket connection.
"""
extra_kw = {}
if self.source_address:
extra_kw['source_address'] = self.source_address
if self.socket_options:
extra_kw['socket_options'] = self.socket_options
try:
conn = connection.create_connection(
(self.host, self.port), self.timeout, **extra_kw)
except SocketTimeout as e:
raise ConnectTimeoutError(
self, "Connection to %s timed out. (connect timeout=%s)" %
(self.host, self.timeout))
except SocketError as e:
raise NewConnectionError(
self, "Failed to establish a new connection: %s" % e)
return conn
def _prepare_conn(self, conn):
self.sock = conn
# the _tunnel_host attribute was added in python 2.6.3 (via
# http://hg.python.org/cpython/rev/0f57b30a152f) so pythons 2.6(0-2) do
# not have them.
if getattr(self, '_tunnel_host', None):
# TODO: Fix tunnel so it doesn't depend on self.sock state.
self._tunnel()
# Mark this connection as not reusable
self.auto_open = 0
def connect(self):
conn = self._new_conn()
self._prepare_conn(conn)
def request_chunked(self, method, url, body=None, headers=None):
"""
Alternative to the common request method, which sends the
body with chunked encoding and not as one block
"""
headers = HTTPHeaderDict(headers if headers is not None else {})
skip_accept_encoding = 'accept-encoding' in headers
self.putrequest(method, url, skip_accept_encoding=skip_accept_encoding)
for header, value in headers.items():
self.putheader(header, value)
if 'transfer-encoding' not in headers:
self.putheader('Transfer-Encoding', 'chunked')
self.endheaders()
if body is not None:
stringish_types = six.string_types + (six.binary_type,)
if isinstance(body, stringish_types):
body = (body,)
for chunk in body:
if not chunk:
continue
if not isinstance(chunk, six.binary_type):
chunk = chunk.encode('utf8')
len_str = hex(len(chunk))[2:]
self.send(len_str.encode('utf-8'))
self.send(b'\r\n')
self.send(chunk)
self.send(b'\r\n')
# After the if clause, to always have a closed body
self.send(b'0\r\n\r\n')
class HTTPSConnection(HTTPConnection):
default_port = port_by_scheme['https']
def __init__(self, host, port=None, key_file=None, cert_file=None,
strict=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, **kw):
HTTPConnection.__init__(self, host, port, strict=strict,
timeout=timeout, **kw)
self.key_file = key_file
self.cert_file = cert_file
# Required property for Google AppEngine 1.9.0 which otherwise causes
# HTTPS requests to go out as HTTP. (See Issue #356)
self._protocol = 'https'
def connect(self):
conn = self._new_conn()
self._prepare_conn(conn)
self.sock = ssl.wrap_socket(conn, self.key_file, self.cert_file)
class VerifiedHTTPSConnection(HTTPSConnection):
"""
Based on httplib.HTTPSConnection but wraps the socket with
SSL certification.
"""
cert_reqs = None
ca_certs = None
ca_cert_dir = None
ssl_version = None
assert_fingerprint = None
def set_cert(self, key_file=None, cert_file=None,
cert_reqs=None, ca_certs=None,
assert_hostname=None, assert_fingerprint=None,
ca_cert_dir=None):
if (ca_certs or ca_cert_dir) and cert_reqs is None:
cert_reqs = 'CERT_REQUIRED'
self.key_file = key_file
self.cert_file = cert_file
self.cert_reqs = cert_reqs
self.assert_hostname = assert_hostname
self.assert_fingerprint = assert_fingerprint
self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
def connect(self):
# Add certificate verification
conn = self._new_conn()
resolved_cert_reqs = resolve_cert_reqs(self.cert_reqs)
resolved_ssl_version = resolve_ssl_version(self.ssl_version)
hostname = self.host
if getattr(self, '_tunnel_host', None):
# _tunnel_host was added in Python 2.6.3
# (See: http://hg.python.org/cpython/rev/0f57b30a152f)
self.sock = conn
# Calls self._set_hostport(), so self.host is
# self._tunnel_host below.
self._tunnel()
# Mark this connection as not reusable
self.auto_open = 0
# Override the host with the one we're requesting data from.
hostname = self._tunnel_host
is_time_off = datetime.date.today() < RECENT_DATE
if is_time_off:
warnings.warn((
'System time is way off (before {0}). This will probably '
'lead to SSL verification errors').format(RECENT_DATE),
SystemTimeWarning
)
# Wrap socket using verification with the root certs in
# trusted_root_certs
self.sock = ssl_wrap_socket(conn, self.key_file, self.cert_file,
cert_reqs=resolved_cert_reqs,
ca_certs=self.ca_certs,
ca_cert_dir=self.ca_cert_dir,
server_hostname=hostname,
ssl_version=resolved_ssl_version)
if self.assert_fingerprint:
assert_fingerprint(self.sock.getpeercert(binary_form=True),
self.assert_fingerprint)
elif resolved_cert_reqs != ssl.CERT_NONE \
and self.assert_hostname is not False:
cert = self.sock.getpeercert()
if not cert.get('subjectAltName', ()):
warnings.warn((
'Certificate for {0} has no `subjectAltName`, falling back to check for a '
'`commonName` for now. This feature is being removed by major browsers and '
'deprecated by RFC 2818. (See https://github.com/shazow/urllib3/issues/497 '
'for details.)'.format(hostname)),
SubjectAltNameWarning
)
_match_hostname(cert, self.assert_hostname or hostname)
self.is_verified = (resolved_cert_reqs == ssl.CERT_REQUIRED or
self.assert_fingerprint is not None)
def _match_hostname(cert, asserted_hostname):
try:
match_hostname(cert, asserted_hostname)
except CertificateError as e:
log.error(
'Certificate did not match expected hostname: %s. '
'Certificate: %s', asserted_hostname, cert
)
# Add cert to exception and reraise so client code can inspect
# the cert when catching the exception, if they want to
e._peer_cert = cert
raise
if ssl:
# Make a copy for testing.
UnverifiedHTTPSConnection = HTTPSConnection
HTTPSConnection = VerifiedHTTPSConnection
else:
HTTPSConnection = DummyConnection
| mit |
jballanc/openmicroscopy | components/tools/OmeroWeb/omeroweb/webclient/tests/seleniumtests.py | 3 | 5109 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
#
# Copyright (C) 2011 University of Dundee & Open Microscopy Environment.
# All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from omeroweb.webgateway.tests.seleniumbase import SeleniumTestBase, Utils
from omero.gateway.scripts import dbhelpers
from random import random
import sys
class WebClientTestBase (SeleniumTestBase):
def login (self, u, p):
sel = self.selenium
if self.selenium.is_element_present('link=Log out'):
self.logout()
sel.open("/webclient/login")
sel.type("id_username", u)
sel.type("id_password", p)
sel.click("//input[@value='Connect']")
def logout (self):
self.selenium.open("/webclient/logout")
self.selenium.wait_for_page_to_load("30000")
self.waitForElementPresence("//input[@value='Connect']")
def import_image(self, filename = None):
"""
This code from OmeroPy/tests/integration/library.py
TODO: Trying to find a way to do import from here, but no luck yet.
"""
#server = self.client.getProperty("omero.host")
#port = self.client.getProperty("omero.port")
#key = self.client.getSessionId()
server = 'localhost'
port = '4064'
key = ''
if filename is None:
filename = self.OmeroPy / ".." / ".." / ".." / "components" / "common" / "test" / "tinyTest.d3d.dv"
# Search up until we find "OmeroPy"
dist_dir = self.OmeroPy / ".." / ".." / ".." / "dist"
args = [sys.executable]
args.append(str(path(".") / "bin" / "omero"))
args.extend(["-s", server, "-k", key, "-p", port, "import", filename])
popen = subprocess.Popen(args, cwd=str(dist_dir), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = popen.communicate()
rc = popen.wait()
if rc != 0:
raise Exception("import failed: [%r] %s\n%s" % (args, rc, err))
pix_ids = []
for x in out.split("\n"):
if x and x.find("Created") < 0 and x.find("#") < 0:
try: # if the line has an image ID...
imageId = str(long(x.strip()))
pix_ids.append(imageId)
except: pass
return pix_ids
class WebClientTests (WebClientTestBase):
from omero.gateway.scripts import dbhelpers
def setUp(self):
super(WebClientTests, self).setUp()
#dbhelpers.refreshConfig()
#user = dbhelpers.ROOT.name
#password = dbhelpers.ROOT.passwd
#print user, password # seems to always be 'root', 'ome'
self.login('will', 'ome')
def testMetadata (self):
"""
Displays the metadata page for an image.
"""
#print "testMetadata"
sel = self.selenium
sel.open("/webclient/metadata_details/image/4183")
#sel.click("link=Metadata") # Making metadata 'visible' to user is unecessary for tests below
self.assertEqual("480 x 480 x 46 x 1", sel.get_table("//div[@id='metadata_tab']/table[2].0.1"))
# Check channel names...
self.failUnless(sel.is_text_present("DAPI")) # anywhere on page
# more specific (too fragile?)
self.assertEqual("DAPI", sel.get_text("//div[@id='metadata_tab']/h1[5]/span"))
self.assertEqual("FITC", sel.get_text("//div[@id='metadata_tab']/h1[6]/span"))
self.assertEqual("RD-TR-PE", sel.get_text("//div[@id='metadata_tab']/h1[7]/span"))
self.assertEqual("CY-5", sel.get_text("//div[@id='metadata_tab']/h1[8]/span"))
# check value of Channel inputs.
self.assertEqual("DAPI", sel.get_value("//div[@id='metadata_tab']/div[4]/table/tbody/tr[1]/td[2]/input")) # Name
self.assertEqual("360", sel.get_value("//div[@id='metadata_tab']/div[4]/table/tbody/tr[2]/td[2]/input")) # Excitation
self.assertEqual("457", sel.get_value("//div[@id='metadata_tab']/div[4]/table/tbody/tr[3]/td[2]/input")) # Excitation
# using id='id_name' gets us the FIRST element with that id (currently 1 per channel)
self.assertEqual("DAPI", sel.get_value("//input[@id='id_name']"))
def tearDown(self):
self.logout()
super(WebClientTests, self).tearDown()
if __name__ == "__main__":
Utils.runAsScript('webadmin')
| gpl-2.0 |
sestrella/ansible | lib/ansible/modules/network/fortios/fortios_user_tacacsplus.py | 13 | 12957 | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_user_tacacsplus
short_description: Configure TACACS+ server entries in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify user feature and tacacsplus category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
version_added: "2.8"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
version_added: 2.9
state:
description:
- Indicates whether to create or remove the object.
This attribute was present already in previous version in a deeper level.
It has been moved out to this outer level.
type: str
required: false
choices:
- present
- absent
version_added: 2.9
user_tacacsplus:
description:
- Configure TACACS+ server entries.
default: null
type: dict
suboptions:
state:
description:
- B(Deprecated)
- Starting with Ansible 2.9 we recommend using the top-level 'state' parameter.
- HORIZONTALLINE
- Indicates whether to create or remove the object.
type: str
required: false
choices:
- present
- absent
authen_type:
description:
- Allowed authentication protocols/methods.
type: str
choices:
- mschap
- chap
- pap
- ascii
- auto
authorization:
description:
- Enable/disable TACACS+ authorization.
type: str
choices:
- enable
- disable
key:
description:
- Key to access the primary server.
type: str
name:
description:
- TACACS+ server entry name.
required: true
type: str
port:
description:
- Port number of the TACACS+ server.
type: int
secondary_key:
description:
- Key to access the secondary server.
type: str
secondary_server:
description:
- Secondary TACACS+ server CN domain name or IP address.
type: str
server:
description:
- Primary TACACS+ server CN domain name or IP address.
type: str
source_ip:
description:
- source IP for communications to TACACS+ server.
type: str
tertiary_key:
description:
- Key to access the tertiary server.
type: str
tertiary_server:
description:
- Tertiary TACACS+ server CN domain name or IP address.
type: str
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: Configure TACACS+ server entries.
fortios_user_tacacsplus:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
state: "present"
user_tacacsplus:
authen_type: "mschap"
authorization: "enable"
key: "<your_own_value>"
name: "default_name_6"
port: "7"
secondary_key: "<your_own_value>"
secondary_server: "<your_own_value>"
server: "192.168.100.40"
source_ip: "84.230.14.43"
tertiary_key: "<your_own_value>"
tertiary_server: "<your_own_value>"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_user_tacacsplus_data(json):
option_list = ['authen_type', 'authorization', 'key',
'name', 'port', 'secondary_key',
'secondary_server', 'server', 'source_ip',
'tertiary_key', 'tertiary_server']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for elem in data:
elem = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def user_tacacsplus(data, fos):
vdom = data['vdom']
if 'state' in data and data['state']:
state = data['state']
elif 'state' in data['user_tacacsplus'] and data['user_tacacsplus']:
state = data['user_tacacsplus']['state']
else:
state = True
user_tacacsplus_data = data['user_tacacsplus']
filtered_data = underscore_to_hyphen(filter_user_tacacsplus_data(user_tacacsplus_data))
if state == "present":
return fos.set('user',
'tacacs+',
data=filtered_data,
vdom=vdom)
elif state == "absent":
return fos.delete('user',
'tacacs+',
mkey=filtered_data['name'],
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_user(data, fos):
if data['user_tacacsplus']:
resp = user_tacacsplus(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success", \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "default": "", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"state": {"required": False, "type": "str",
"choices": ["present", "absent"]},
"user_tacacsplus": {
"required": False, "type": "dict", "default": None,
"options": {
"state": {"required": False, "type": "str",
"choices": ["present", "absent"]},
"authen_type": {"required": False, "type": "str",
"choices": ["mschap", "chap", "pap",
"ascii", "auto"]},
"authorization": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"key": {"required": False, "type": "str"},
"name": {"required": True, "type": "str"},
"port": {"required": False, "type": "int"},
"secondary_key": {"required": False, "type": "str"},
"secondary_server": {"required": False, "type": "str"},
"server": {"required": False, "type": "str"},
"source_ip": {"required": False, "type": "str"},
"tertiary_key": {"required": False, "type": "str"},
"tertiary_server": {"required": False, "type": "str"}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
# legacy_mode refers to using fortiosapi instead of HTTPAPI
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_user(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_user(module.params, fos)
fos.logout()
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| gpl-3.0 |
kybriainfotech/iSocioCRM | openerp/tools/parse_version.py | 380 | 4462 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
## this functions are taken from the setuptools package (version 0.6c8)
## http://peak.telecommunity.com/DevCenter/PkgResources#parsing-utilities
import re
component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
replace = {'pre':'c', 'preview':'c','-':'final-','_':'final-','rc':'c','dev':'@','saas':'','~':''}.get
def _parse_version_parts(s):
for part in component_re.split(s):
part = replace(part,part)
if not part or part=='.':
continue
if part[:1] in '0123456789':
yield part.zfill(8) # pad for numeric comparison
else:
yield '*'+part
yield '*final' # ensure that alpha/beta/candidate are before final
def parse_version(s):
"""Convert a version string to a chronologically-sortable key
This is a rough cross between distutils' StrictVersion and LooseVersion;
if you give it versions that would work with StrictVersion, then it behaves
the same; otherwise it acts like a slightly-smarter LooseVersion. It is
*possible* to create pathological version coding schemes that will fool
this parser, but they should be very rare in practice.
The returned value will be a tuple of strings. Numeric portions of the
version are padded to 8 digits so they will compare numerically, but
without relying on how numbers compare relative to strings. Dots are
dropped, but dashes are retained. Trailing zeros between alpha segments
or dashes are suppressed, so that e.g. "2.4.0" is considered the same as
"2.4". Alphanumeric parts are lower-cased.
The algorithm assumes that strings like "-" and any alpha string that
alphabetically follows "final" represents a "patch level". So, "2.4-1"
is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is
considered newer than "2.4-1", whic in turn is newer than "2.4".
Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that
come before "final" alphabetically) are assumed to be pre-release versions,
so that the version "2.4" is considered newer than "2.4a1".
Finally, to handle miscellaneous cases, the strings "pre", "preview", and
"rc" are treated as if they were "c", i.e. as though they were release
candidates, and therefore are not as new as a version string that does not
contain them.
"""
parts = []
for part in _parse_version_parts((s or '0.1').lower()):
if part.startswith('*'):
if part<'*final': # remove '-' before a prerelease tag
while parts and parts[-1]=='*final-': parts.pop()
# remove trailing zeros from each series of numeric parts
while parts and parts[-1]=='00000000':
parts.pop()
parts.append(part)
return tuple(parts)
if __name__ == '__main__':
def cmp(a, b):
msg = '%s < %s == %s' % (a, b, a < b)
assert a < b, msg
return b
def chk(lst, verbose=False):
pvs = []
for v in lst:
pv = parse_version(v)
pvs.append(pv)
if verbose:
print v, pv
reduce(cmp, pvs)
chk(('0', '4.2', '4.2.3.4', '5.0.0-alpha', '5.0.0-rc1', '5.0.0-rc1.1', '5.0.0_rc2', '5.0.0_rc3', '5.0.0'), False)
chk(('5.0.0-0_rc3', '5.0.0-1dev', '5.0.0-1'), False)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
MattDevo/edk2 | AppPkg/Applications/Python/Python-2.7.10/Lib/encodings/iso8859_1.py | 93 | 13739 | """ Python Character Mapping Codec iso8859_1 generated from 'MAPPINGS/ISO8859/8859-1.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-1',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\x80' # 0x80 -> <control>
u'\x81' # 0x81 -> <control>
u'\x82' # 0x82 -> <control>
u'\x83' # 0x83 -> <control>
u'\x84' # 0x84 -> <control>
u'\x85' # 0x85 -> <control>
u'\x86' # 0x86 -> <control>
u'\x87' # 0x87 -> <control>
u'\x88' # 0x88 -> <control>
u'\x89' # 0x89 -> <control>
u'\x8a' # 0x8A -> <control>
u'\x8b' # 0x8B -> <control>
u'\x8c' # 0x8C -> <control>
u'\x8d' # 0x8D -> <control>
u'\x8e' # 0x8E -> <control>
u'\x8f' # 0x8F -> <control>
u'\x90' # 0x90 -> <control>
u'\x91' # 0x91 -> <control>
u'\x92' # 0x92 -> <control>
u'\x93' # 0x93 -> <control>
u'\x94' # 0x94 -> <control>
u'\x95' # 0x95 -> <control>
u'\x96' # 0x96 -> <control>
u'\x97' # 0x97 -> <control>
u'\x98' # 0x98 -> <control>
u'\x99' # 0x99 -> <control>
u'\x9a' # 0x9A -> <control>
u'\x9b' # 0x9B -> <control>
u'\x9c' # 0x9C -> <control>
u'\x9d' # 0x9D -> <control>
u'\x9e' # 0x9E -> <control>
u'\x9f' # 0x9F -> <control>
u'\xa0' # 0xA0 -> NO-BREAK SPACE
u'\xa1' # 0xA1 -> INVERTED EXCLAMATION MARK
u'\xa2' # 0xA2 -> CENT SIGN
u'\xa3' # 0xA3 -> POUND SIGN
u'\xa4' # 0xA4 -> CURRENCY SIGN
u'\xa5' # 0xA5 -> YEN SIGN
u'\xa6' # 0xA6 -> BROKEN BAR
u'\xa7' # 0xA7 -> SECTION SIGN
u'\xa8' # 0xA8 -> DIAERESIS
u'\xa9' # 0xA9 -> COPYRIGHT SIGN
u'\xaa' # 0xAA -> FEMININE ORDINAL INDICATOR
u'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xac' # 0xAC -> NOT SIGN
u'\xad' # 0xAD -> SOFT HYPHEN
u'\xae' # 0xAE -> REGISTERED SIGN
u'\xaf' # 0xAF -> MACRON
u'\xb0' # 0xB0 -> DEGREE SIGN
u'\xb1' # 0xB1 -> PLUS-MINUS SIGN
u'\xb2' # 0xB2 -> SUPERSCRIPT TWO
u'\xb3' # 0xB3 -> SUPERSCRIPT THREE
u'\xb4' # 0xB4 -> ACUTE ACCENT
u'\xb5' # 0xB5 -> MICRO SIGN
u'\xb6' # 0xB6 -> PILCROW SIGN
u'\xb7' # 0xB7 -> MIDDLE DOT
u'\xb8' # 0xB8 -> CEDILLA
u'\xb9' # 0xB9 -> SUPERSCRIPT ONE
u'\xba' # 0xBA -> MASCULINE ORDINAL INDICATOR
u'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbc' # 0xBC -> VULGAR FRACTION ONE QUARTER
u'\xbd' # 0xBD -> VULGAR FRACTION ONE HALF
u'\xbe' # 0xBE -> VULGAR FRACTION THREE QUARTERS
u'\xbf' # 0xBF -> INVERTED QUESTION MARK
u'\xc0' # 0xC0 -> LATIN CAPITAL LETTER A WITH GRAVE
u'\xc1' # 0xC1 -> LATIN CAPITAL LETTER A WITH ACUTE
u'\xc2' # 0xC2 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
u'\xc3' # 0xC3 -> LATIN CAPITAL LETTER A WITH TILDE
u'\xc4' # 0xC4 -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xc5' # 0xC5 -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\xc6' # 0xC6 -> LATIN CAPITAL LETTER AE
u'\xc7' # 0xC7 -> LATIN CAPITAL LETTER C WITH CEDILLA
u'\xc8' # 0xC8 -> LATIN CAPITAL LETTER E WITH GRAVE
u'\xc9' # 0xC9 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\xca' # 0xCA -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
u'\xcb' # 0xCB -> LATIN CAPITAL LETTER E WITH DIAERESIS
u'\xcc' # 0xCC -> LATIN CAPITAL LETTER I WITH GRAVE
u'\xcd' # 0xCD -> LATIN CAPITAL LETTER I WITH ACUTE
u'\xce' # 0xCE -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
u'\xcf' # 0xCF -> LATIN CAPITAL LETTER I WITH DIAERESIS
u'\xd0' # 0xD0 -> LATIN CAPITAL LETTER ETH (Icelandic)
u'\xd1' # 0xD1 -> LATIN CAPITAL LETTER N WITH TILDE
u'\xd2' # 0xD2 -> LATIN CAPITAL LETTER O WITH GRAVE
u'\xd3' # 0xD3 -> LATIN CAPITAL LETTER O WITH ACUTE
u'\xd4' # 0xD4 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
u'\xd5' # 0xD5 -> LATIN CAPITAL LETTER O WITH TILDE
u'\xd6' # 0xD6 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xd7' # 0xD7 -> MULTIPLICATION SIGN
u'\xd8' # 0xD8 -> LATIN CAPITAL LETTER O WITH STROKE
u'\xd9' # 0xD9 -> LATIN CAPITAL LETTER U WITH GRAVE
u'\xda' # 0xDA -> LATIN CAPITAL LETTER U WITH ACUTE
u'\xdb' # 0xDB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
u'\xdc' # 0xDC -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\xdd' # 0xDD -> LATIN CAPITAL LETTER Y WITH ACUTE
u'\xde' # 0xDE -> LATIN CAPITAL LETTER THORN (Icelandic)
u'\xdf' # 0xDF -> LATIN SMALL LETTER SHARP S (German)
u'\xe0' # 0xE0 -> LATIN SMALL LETTER A WITH GRAVE
u'\xe1' # 0xE1 -> LATIN SMALL LETTER A WITH ACUTE
u'\xe2' # 0xE2 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\xe3' # 0xE3 -> LATIN SMALL LETTER A WITH TILDE
u'\xe4' # 0xE4 -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xe5' # 0xE5 -> LATIN SMALL LETTER A WITH RING ABOVE
u'\xe6' # 0xE6 -> LATIN SMALL LETTER AE
u'\xe7' # 0xE7 -> LATIN SMALL LETTER C WITH CEDILLA
u'\xe8' # 0xE8 -> LATIN SMALL LETTER E WITH GRAVE
u'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE
u'\xea' # 0xEA -> LATIN SMALL LETTER E WITH CIRCUMFLEX
u'\xeb' # 0xEB -> LATIN SMALL LETTER E WITH DIAERESIS
u'\xec' # 0xEC -> LATIN SMALL LETTER I WITH GRAVE
u'\xed' # 0xED -> LATIN SMALL LETTER I WITH ACUTE
u'\xee' # 0xEE -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\xef' # 0xEF -> LATIN SMALL LETTER I WITH DIAERESIS
u'\xf0' # 0xF0 -> LATIN SMALL LETTER ETH (Icelandic)
u'\xf1' # 0xF1 -> LATIN SMALL LETTER N WITH TILDE
u'\xf2' # 0xF2 -> LATIN SMALL LETTER O WITH GRAVE
u'\xf3' # 0xF3 -> LATIN SMALL LETTER O WITH ACUTE
u'\xf4' # 0xF4 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\xf5' # 0xF5 -> LATIN SMALL LETTER O WITH TILDE
u'\xf6' # 0xF6 -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xf7' # 0xF7 -> DIVISION SIGN
u'\xf8' # 0xF8 -> LATIN SMALL LETTER O WITH STROKE
u'\xf9' # 0xF9 -> LATIN SMALL LETTER U WITH GRAVE
u'\xfa' # 0xFA -> LATIN SMALL LETTER U WITH ACUTE
u'\xfb' # 0xFB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS
u'\xfd' # 0xFD -> LATIN SMALL LETTER Y WITH ACUTE
u'\xfe' # 0xFE -> LATIN SMALL LETTER THORN (Icelandic)
u'\xff' # 0xFF -> LATIN SMALL LETTER Y WITH DIAERESIS
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| bsd-2-clause |
HesselTjeerdsma/Cyber-Physical-Pacman-Game | Algor/flask/lib/python2.7/site-packages/numpy/lib/type_check.py | 15 | 16418 | """Automatically adapted for numpy Sep 19, 2005 by convertcode.py
"""
from __future__ import division, absolute_import, print_function
__all__ = ['iscomplexobj', 'isrealobj', 'imag', 'iscomplex',
'isreal', 'nan_to_num', 'real', 'real_if_close',
'typename', 'asfarray', 'mintypecode', 'asscalar',
'common_type']
import numpy.core.numeric as _nx
from numpy.core.numeric import asarray, asanyarray, array, isnan, zeros
from .ufunclike import isneginf, isposinf
_typecodes_by_elsize = 'GDFgdfQqLlIiHhBb?'
def mintypecode(typechars,typeset='GDFgdf',default='d'):
"""
Return the character for the minimum-size type to which given types can
be safely cast.
The returned type character must represent the smallest size dtype such
that an array of the returned type can handle the data from an array of
all types in `typechars` (or if `typechars` is an array, then its
dtype.char).
Parameters
----------
typechars : list of str or array_like
If a list of strings, each string should represent a dtype.
If array_like, the character representation of the array dtype is used.
typeset : str or list of str, optional
The set of characters that the returned character is chosen from.
The default set is 'GDFgdf'.
default : str, optional
The default character, this is returned if none of the characters in
`typechars` matches a character in `typeset`.
Returns
-------
typechar : str
The character representing the minimum-size type that was found.
See Also
--------
dtype, sctype2char, maximum_sctype
Examples
--------
>>> np.mintypecode(['d', 'f', 'S'])
'd'
>>> x = np.array([1.1, 2-3.j])
>>> np.mintypecode(x)
'D'
>>> np.mintypecode('abceh', default='G')
'G'
"""
typecodes = [(isinstance(t, str) and t) or asarray(t).dtype.char
for t in typechars]
intersection = [t for t in typecodes if t in typeset]
if not intersection:
return default
if 'F' in intersection and 'd' in intersection:
return 'D'
l = []
for t in intersection:
i = _typecodes_by_elsize.index(t)
l.append((i, t))
l.sort()
return l[0][1]
def asfarray(a, dtype=_nx.float_):
"""
Return an array converted to a float type.
Parameters
----------
a : array_like
The input array.
dtype : str or dtype object, optional
Float type code to coerce input array `a`. If `dtype` is one of the
'int' dtypes, it is replaced with float64.
Returns
-------
out : ndarray
The input `a` as a float ndarray.
Examples
--------
>>> np.asfarray([2, 3])
array([ 2., 3.])
>>> np.asfarray([2, 3], dtype='float')
array([ 2., 3.])
>>> np.asfarray([2, 3], dtype='int8')
array([ 2., 3.])
"""
dtype = _nx.obj2sctype(dtype)
if not issubclass(dtype, _nx.inexact):
dtype = _nx.float_
return asarray(a, dtype=dtype)
def real(val):
"""
Return the real part of the complex argument.
Parameters
----------
val : array_like
Input array.
Returns
-------
out : ndarray or scalar
The real component of the complex argument. If `val` is real, the type
of `val` is used for the output. If `val` has complex elements, the
returned type is float.
See Also
--------
real_if_close, imag, angle
Examples
--------
>>> a = np.array([1+2j, 3+4j, 5+6j])
>>> a.real
array([ 1., 3., 5.])
>>> a.real = 9
>>> a
array([ 9.+2.j, 9.+4.j, 9.+6.j])
>>> a.real = np.array([9, 8, 7])
>>> a
array([ 9.+2.j, 8.+4.j, 7.+6.j])
>>> np.real(1 + 1j)
1.0
"""
try:
return val.real
except AttributeError:
return asanyarray(val).real
def imag(val):
"""
Return the imaginary part of the complex argument.
Parameters
----------
val : array_like
Input array.
Returns
-------
out : ndarray or scalar
The imaginary component of the complex argument. If `val` is real,
the type of `val` is used for the output. If `val` has complex
elements, the returned type is float.
See Also
--------
real, angle, real_if_close
Examples
--------
>>> a = np.array([1+2j, 3+4j, 5+6j])
>>> a.imag
array([ 2., 4., 6.])
>>> a.imag = np.array([8, 10, 12])
>>> a
array([ 1. +8.j, 3.+10.j, 5.+12.j])
>>> np.imag(1 + 1j)
1.0
"""
try:
return val.imag
except AttributeError:
return asanyarray(val).imag
def iscomplex(x):
"""
Returns a bool array, where True if input element is complex.
What is tested is whether the input has a non-zero imaginary part, not if
the input type is complex.
Parameters
----------
x : array_like
Input array.
Returns
-------
out : ndarray of bools
Output array.
See Also
--------
isreal
iscomplexobj : Return True if x is a complex type or an array of complex
numbers.
Examples
--------
>>> np.iscomplex([1+1j, 1+0j, 4.5, 3, 2, 2j])
array([ True, False, False, False, False, True], dtype=bool)
"""
ax = asanyarray(x)
if issubclass(ax.dtype.type, _nx.complexfloating):
return ax.imag != 0
res = zeros(ax.shape, bool)
return +res # convet to array-scalar if needed
def isreal(x):
"""
Returns a bool array, where True if input element is real.
If element has complex type with zero complex part, the return value
for that element is True.
Parameters
----------
x : array_like
Input array.
Returns
-------
out : ndarray, bool
Boolean array of same shape as `x`.
See Also
--------
iscomplex
isrealobj : Return True if x is not a complex type.
Examples
--------
>>> np.isreal([1+1j, 1+0j, 4.5, 3, 2, 2j])
array([False, True, True, True, True, False], dtype=bool)
"""
return imag(x) == 0
def iscomplexobj(x):
"""
Check for a complex type or an array of complex numbers.
The type of the input is checked, not the value. Even if the input
has an imaginary part equal to zero, `iscomplexobj` evaluates to True.
Parameters
----------
x : any
The input can be of any type and shape.
Returns
-------
iscomplexobj : bool
The return value, True if `x` is of a complex type or has at least
one complex element.
See Also
--------
isrealobj, iscomplex
Examples
--------
>>> np.iscomplexobj(1)
False
>>> np.iscomplexobj(1+0j)
True
>>> np.iscomplexobj([3, 1+0j, True])
True
"""
try:
dtype = x.dtype
type_ = dtype.type
except AttributeError:
type_ = asarray(x).dtype.type
return issubclass(type_, _nx.complexfloating)
def isrealobj(x):
"""
Return True if x is a not complex type or an array of complex numbers.
The type of the input is checked, not the value. So even if the input
has an imaginary part equal to zero, `isrealobj` evaluates to False
if the data type is complex.
Parameters
----------
x : any
The input can be of any type and shape.
Returns
-------
y : bool
The return value, False if `x` is of a complex type.
See Also
--------
iscomplexobj, isreal
Examples
--------
>>> np.isrealobj(1)
True
>>> np.isrealobj(1+0j)
False
>>> np.isrealobj([3, 1+0j, True])
False
"""
return not iscomplexobj(x)
#-----------------------------------------------------------------------------
def _getmaxmin(t):
from numpy.core import getlimits
f = getlimits.finfo(t)
return f.max, f.min
def nan_to_num(x, copy=True):
"""
Replace nan with zero and inf with finite numbers.
Returns an array or scalar replacing Not a Number (NaN) with zero,
(positive) infinity with a very large number and negative infinity
with a very small (or negative) number.
Parameters
----------
x : array_like
Input data.
copy : bool, optional
Whether to create a copy of `x` (True) or to replace values
in-place (False). The in-place operation only occurs if
casting to an array does not require a copy.
Default is True.
.. versionadded:: 1.13
Returns
-------
out : ndarray
New Array with the same shape as `x` and dtype of the element in
`x` with the greatest precision. If `x` is inexact, then NaN is
replaced by zero, and infinity (-infinity) is replaced by the
largest (smallest or most negative) floating point value that fits
in the output dtype. If `x` is not inexact, then a copy of `x` is
returned.
See Also
--------
isinf : Shows which elements are positive or negative infinity.
isneginf : Shows which elements are negative infinity.
isposinf : Shows which elements are positive infinity.
isnan : Shows which elements are Not a Number (NaN).
isfinite : Shows which elements are finite (not NaN, not infinity)
Notes
-----
NumPy uses the IEEE Standard for Binary Floating-Point for Arithmetic
(IEEE 754). This means that Not a Number is not equivalent to infinity.
Examples
--------
>>> np.set_printoptions(precision=8)
>>> x = np.array([np.inf, -np.inf, np.nan, -128, 128])
>>> np.nan_to_num(x)
array([ 1.79769313e+308, -1.79769313e+308, 0.00000000e+000,
-1.28000000e+002, 1.28000000e+002])
"""
x = _nx.array(x, subok=True, copy=copy)
xtype = x.dtype.type
if not issubclass(xtype, _nx.inexact):
return x
iscomplex = issubclass(xtype, _nx.complexfloating)
isscalar = (x.ndim == 0)
x = x[None] if isscalar else x
dest = (x.real, x.imag) if iscomplex else (x,)
maxf, minf = _getmaxmin(x.real.dtype)
for d in dest:
_nx.copyto(d, 0.0, where=isnan(d))
_nx.copyto(d, maxf, where=isposinf(d))
_nx.copyto(d, minf, where=isneginf(d))
return x[0] if isscalar else x
#-----------------------------------------------------------------------------
def real_if_close(a,tol=100):
"""
If complex input returns a real array if complex parts are close to zero.
"Close to zero" is defined as `tol` * (machine epsilon of the type for
`a`).
Parameters
----------
a : array_like
Input array.
tol : float
Tolerance in machine epsilons for the complex part of the elements
in the array.
Returns
-------
out : ndarray
If `a` is real, the type of `a` is used for the output. If `a`
has complex elements, the returned type is float.
See Also
--------
real, imag, angle
Notes
-----
Machine epsilon varies from machine to machine and between data types
but Python floats on most platforms have a machine epsilon equal to
2.2204460492503131e-16. You can use 'np.finfo(np.float).eps' to print
out the machine epsilon for floats.
Examples
--------
>>> np.finfo(np.float).eps
2.2204460492503131e-16
>>> np.real_if_close([2.1 + 4e-14j], tol=1000)
array([ 2.1])
>>> np.real_if_close([2.1 + 4e-13j], tol=1000)
array([ 2.1 +4.00000000e-13j])
"""
a = asanyarray(a)
if not issubclass(a.dtype.type, _nx.complexfloating):
return a
if tol > 1:
from numpy.core import getlimits
f = getlimits.finfo(a.dtype.type)
tol = f.eps * tol
if _nx.all(_nx.absolute(a.imag) < tol):
a = a.real
return a
def asscalar(a):
"""
Convert an array of size 1 to its scalar equivalent.
Parameters
----------
a : ndarray
Input array of size 1.
Returns
-------
out : scalar
Scalar representation of `a`. The output data type is the same type
returned by the input's `item` method.
Examples
--------
>>> np.asscalar(np.array([24]))
24
"""
return a.item()
#-----------------------------------------------------------------------------
_namefromtype = {'S1': 'character',
'?': 'bool',
'b': 'signed char',
'B': 'unsigned char',
'h': 'short',
'H': 'unsigned short',
'i': 'integer',
'I': 'unsigned integer',
'l': 'long integer',
'L': 'unsigned long integer',
'q': 'long long integer',
'Q': 'unsigned long long integer',
'f': 'single precision',
'd': 'double precision',
'g': 'long precision',
'F': 'complex single precision',
'D': 'complex double precision',
'G': 'complex long double precision',
'S': 'string',
'U': 'unicode',
'V': 'void',
'O': 'object'
}
def typename(char):
"""
Return a description for the given data type code.
Parameters
----------
char : str
Data type code.
Returns
-------
out : str
Description of the input data type code.
See Also
--------
dtype, typecodes
Examples
--------
>>> typechars = ['S1', '?', 'B', 'D', 'G', 'F', 'I', 'H', 'L', 'O', 'Q',
... 'S', 'U', 'V', 'b', 'd', 'g', 'f', 'i', 'h', 'l', 'q']
>>> for typechar in typechars:
... print(typechar, ' : ', np.typename(typechar))
...
S1 : character
? : bool
B : unsigned char
D : complex double precision
G : complex long double precision
F : complex single precision
I : unsigned integer
H : unsigned short
L : unsigned long integer
O : object
Q : unsigned long long integer
S : string
U : unicode
V : void
b : signed char
d : double precision
g : long precision
f : single precision
i : integer
h : short
l : long integer
q : long long integer
"""
return _namefromtype[char]
#-----------------------------------------------------------------------------
#determine the "minimum common type" for a group of arrays.
array_type = [[_nx.half, _nx.single, _nx.double, _nx.longdouble],
[None, _nx.csingle, _nx.cdouble, _nx.clongdouble]]
array_precision = {_nx.half: 0,
_nx.single: 1,
_nx.double: 2,
_nx.longdouble: 3,
_nx.csingle: 1,
_nx.cdouble: 2,
_nx.clongdouble: 3}
def common_type(*arrays):
"""
Return a scalar type which is common to the input arrays.
The return type will always be an inexact (i.e. floating point) scalar
type, even if all the arrays are integer arrays. If one of the inputs is
an integer array, the minimum precision type that is returned is a
64-bit floating point dtype.
All input arrays can be safely cast to the returned dtype without loss
of information.
Parameters
----------
array1, array2, ... : ndarrays
Input arrays.
Returns
-------
out : data type code
Data type code.
See Also
--------
dtype, mintypecode
Examples
--------
>>> np.common_type(np.arange(2, dtype=np.float32))
<type 'numpy.float32'>
>>> np.common_type(np.arange(2, dtype=np.float32), np.arange(2))
<type 'numpy.float64'>
>>> np.common_type(np.arange(4), np.array([45, 6.j]), np.array([45.0]))
<type 'numpy.complex128'>
"""
is_complex = False
precision = 0
for a in arrays:
t = a.dtype.type
if iscomplexobj(a):
is_complex = True
if issubclass(t, _nx.integer):
p = 2 # array_precision[_nx.double]
else:
p = array_precision.get(t, None)
if p is None:
raise TypeError("can't get common type for non-numeric array")
precision = max(precision, p)
if is_complex:
return array_type[1][precision]
else:
return array_type[0][precision]
| apache-2.0 |
sk4x0r/sat-solver | main.py | 1 | 1088 | import gbl
import re
import sys
if len(sys.argv)!=2:
print "Invalid number of arguments"
sys.exit(0)
dimacsfile=sys.argv[1].strip()
f = open(dimacsfile, 'r')
data=f.read()
lines=data.split('\n')
commentLine = re.compile('c.*') #Regular expression to detect comment lines
statLine = re.compile('p\s*cnf\s*(\d*)\s*(\d*)') #Regular expression to detect stat line
formula=[]
for line in lines:
line=line.strip()
if line=="%" or not line:
continue
if not commentLine.match(line):
stats=statLine.match(line)
if stats:
varCount=int(stats.group(1)) #number of unknowns
termCount=int(stats.group(2)) #number of clauses/terms
else:
numbers=line.rstrip('\n').split()
literals=[]
for number in numbers:
n=int(number)
if(n!=0):
literals.append(-n)
formula.append(literals)
unknowns=[x for x in range(1, varCount+1)] #create list of unknowns
satisfiable=gbl.newAlgo(formula, unknowns)
if not satisfiable:
print "UNSATISFIABLE"
| gpl-2.0 |
markgw/pimlico | src/python/pimlico/modules/nltk/nist_tokenize/execute.py | 1 | 1214 | # This file is part of Pimlico
# Copyright (C) 2020 Mark Granroth-Wilding
# Licensed under the GNU LGPL v3.0 - https://www.gnu.org/licenses/lgpl-3.0.en.html
from nltk.tokenize.nist import NISTTokenizer
from pimlico.core.modules.map import skip_invalid
from pimlico.core.modules.map.multiproc import multiprocessing_executor_factory
@skip_invalid
def process_document(worker, archive, filename, doc):
# Run tokenization
# This tokenizer doesn't split sentences, so linebreaks will be the same as in the input
# These linebreaks will subsequently be treated as sentence breaks
tokenized_text = worker.tokenizer.tokenize(doc.text, lowercase=worker.info.options["lowercase"], return_str=True)
sentences = [sent.split(" ") for sent in tokenized_text.splitlines()]
return worker.info.document(sentences=sentences)
def worker_set_up(worker):
# Prepare a tokenizer
worker.tokenizer = NISTTokenizer()
if worker.info.options["non_european"]:
worker.tokenize = worker.tokenizer.international_tokenize
else:
worker.tokenize = worker.tokenizer.tokenize
ModuleExecutor = multiprocessing_executor_factory(
process_document,
worker_set_up_fn=worker_set_up,
)
| gpl-3.0 |
pacificcasinohotel/pointsystem | vendor/guzzlehttp/guzzle/docs/conf.py | 100 | 2995 | import sys, os
from sphinx.highlighting import lexers
from pygments.lexers.web import PhpLexer
lexers['php'] = PhpLexer(startinline=True, linenos=1)
lexers['php-annotations'] = PhpLexer(startinline=True, linenos=1)
primary_domain = 'php'
# -- General configuration -----------------------------------------------------
extensions = []
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'Guzzle'
copyright = u'2012, Michael Dowling'
version = '3.0.0'
release = '3.0.0'
exclude_patterns = ['_build']
# -- Options for HTML output ---------------------------------------------------
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = "Guzzle documentation"
html_short_title = "Guzzle"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, maps document names to template names.
html_sidebars = {
'**': ['localtoc.html', 'searchbox.html']
}
# Output file base name for HTML help builder.
htmlhelp_basename = 'Guzzledoc'
# -- Guzzle Sphinx theme setup ------------------------------------------------
sys.path.insert(0, '/Users/dowling/projects/guzzle_sphinx_theme')
import guzzle_sphinx_theme
html_translator_class = 'guzzle_sphinx_theme.HTMLTranslator'
html_theme_path = guzzle_sphinx_theme.html_theme_path()
html_theme = 'guzzle_sphinx_theme'
# Guzzle theme options (see theme.conf for more information)
html_theme_options = {
"project_nav_name": "Guzzle",
"github_user": "guzzle",
"github_repo": "guzzle",
"disqus_comments_shortname": "guzzle",
"google_analytics_account": "UA-22752917-1"
}
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Guzzle.tex', u'Guzzle Documentation',
u'Michael Dowling', 'manual'),
]
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'guzzle', u'Guzzle Documentation',
[u'Michael Dowling'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Guzzle', u'Guzzle Documentation',
u'Michael Dowling', 'Guzzle', 'One line description of project.',
'Miscellaneous'),
]
| mit |
mczerski/OpenOCD | tools/xsvf_tools/svf2xsvf.py | 101 | 26710 | #!/usr/bin/python3.0
# Copyright 2008, SoftPLC Corporation http://softplc.com
# Dick Hollenbeck dick@softplc.com
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, you may find one here:
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.html
# or you may search the http://www.gnu.org website for the version 2 license,
# or you may write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
# A python program to convert an SVF file to an XSVF file. There is an
# option to include comments containing the source file line number from the origin
# SVF file before each outputted XSVF statement.
#
# We deviate from the XSVF spec in that we introduce a new command called
# XWAITSTATE which directly flows from the SVF RUNTEST command. Unfortunately
# XRUNSTATE was ill conceived and is not used here. We also add support for the
# three Lattice extensions to SVF: LCOUNT, LDELAY, and LSDR. The xsvf file
# generated from this program is suitable for use with the xsvf player in
# OpenOCD with my modifications to xsvf.c.
#
# This program is written for python 3.0, and it is not easy to change this
# back to 2.x. You may find it easier to use python 3.x even if that means
# building it.
import re
import sys
import struct
# There are both ---<Lexer>--- and ---<Parser>--- sections to this program
if len( sys.argv ) < 3:
print("usage %s <svf_filename> <xsvf_filename>" % sys.argv[0])
exit(1)
inputFilename = sys.argv[1]
outputFilename = sys.argv[2]
doCOMMENTs = True # Save XCOMMENTs in the output xsvf file
#doCOMMENTs = False # Save XCOMMENTs in the output xsvf file
# pick your file encoding
file_encoding = 'ISO-8859-1'
#file_encoding = 'utf-8'
xrepeat = 0 # argument to XREPEAT, gives retry count for masked compares
#-----< Lexer >---------------------------------------------------------------
StateBin = (RESET,IDLE,
DRSELECT,DRCAPTURE,DRSHIFT,DREXIT1,DRPAUSE,DREXIT2,DRUPDATE,
IRSELECT,IRCAPTURE,IRSHIFT,IREXIT1,IRPAUSE,IREXIT2,IRUPDATE) = range(16)
# Any integer index into this tuple will be equal to its corresponding StateBin value
StateTxt = ("RESET","IDLE",
"DRSELECT","DRCAPTURE","DRSHIFT","DREXIT1","DRPAUSE","DREXIT2","DRUPDATE",
"IRSELECT","IRCAPTURE","IRSHIFT","IREXIT1","IRPAUSE","IREXIT2","IRUPDATE")
(XCOMPLETE,XTDOMASK,XSIR,XSDR,XRUNTEST,hole0,hole1,XREPEAT,XSDRSIZE,XSDRTDO,
XSETSDRMASKS,XSDRINC,XSDRB,XSDRC,XSDRE,XSDRTDOB,XSDRTDOC,
XSDRTDOE,XSTATE,XENDIR,XENDDR,XSIR2,XCOMMENT,XWAIT,XWAITSTATE,
LCOUNT,LDELAY,LSDR,XTRST) = range(29)
#Note: LCOUNT, LDELAY, and LSDR are Lattice extensions to SVF and provide a way to loop back
# and check a completion status, essentially waiting on a part until it signals that it is done.
# For example below: loop 25 times, each time through the loop do a LDELAY (same as a true RUNTEST)
# and exit loop when LSDR compares match.
"""
LCOUNT 25;
! Step to DRPAUSE give 5 clocks and wait for 1.00e+000 SEC.
LDELAY DRPAUSE 5 TCK 1.00E-003 SEC;
! Test for the completed status. Match means pass.
! Loop back to LDELAY line if not match and loop count less than 25.
LSDR 1 TDI (0)
TDO (1);
"""
#XTRST is an opcode Xilinx seemed to have missed and it comes from the SVF TRST statement.
LineNumber = 1
def s_ident(scanner, token): return ("ident", token.upper(), LineNumber)
def s_hex(scanner, token):
global LineNumber
LineNumber = LineNumber + token.count('\n')
token = ''.join(token.split())
return ("hex", token[1:-1], LineNumber)
def s_int(scanner, token): return ("int", int(token), LineNumber)
def s_float(scanner, token): return ("float", float(token), LineNumber)
#def s_comment(scanner, token): return ("comment", token, LineNumber)
def s_semicolon(scanner, token): return ("semi", token, LineNumber)
def s_nl(scanner,token):
global LineNumber
LineNumber = LineNumber + 1
#print( 'LineNumber=', LineNumber, file=sys.stderr )
return None
#2.00E-002
scanner = re.Scanner([
(r"[a-zA-Z]\w*", s_ident),
# (r"[-+]?[0-9]+[.]?[0-9]*([eE][-+]?[0-9]+)?", s_float),
(r"[-+]?[0-9]+(([.][0-9eE+-]*)|([eE]+[-+]?[0-9]+))", s_float),
(r"\d+", s_int),
(r"\(([0-9a-fA-F]|\s)*\)", s_hex),
(r"(!|//).*$", None),
(r";", s_semicolon),
(r"\n",s_nl),
(r"\s*", None),
],
re.MULTILINE
)
# open the file using the given encoding
file = open( sys.argv[1], encoding=file_encoding )
# read all svf file input into string "input"
input = file.read()
file.close()
# Lexer:
# create a list of tuples containing (tokenType, tokenValue, LineNumber)
tokens = scanner.scan( input )[0]
input = None # allow gc to reclaim memory holding file
#for tokenType, tokenValue, ln in tokens: print( "line %d: %s" % (ln, tokenType), tokenValue )
#-----<parser>-----------------------------------------------------------------
tokVal = tokType = tokLn = None
tup = iter( tokens )
def nextTok():
"""
Function to read the next token from tup into tokType, tokVal, tokLn (linenumber)
which are globals.
"""
global tokType, tokVal, tokLn, tup
tokType, tokVal, tokLn = tup.__next__()
class ParseError(Exception):
"""A class to hold a parsing error message"""
def __init__(self, linenumber, token, message):
self.linenumber = linenumber
self.token = token
self.message = message
def __str__(self):
global inputFilename
return "Error in file \'%s\' at line %d near token %s\n %s" % (
inputFilename, self.linenumber, repr(self.token), self.message)
class MASKSET(object):
"""
Class MASKSET holds a set of bit vectors, all of which are related, will all
have the same length, and are associated with one of the seven shiftOps:
HIR, HDR, TIR, TDR, SIR, SDR, LSDR. One of these holds a mask, smask, tdi, tdo, and a
size.
"""
def __init__(self, name):
self.empty()
self.name = name
def empty(self):
self.mask = bytearray()
self.smask = bytearray()
self.tdi = bytearray()
self.tdo = bytearray()
self.size = 0
def syncLengths( self, sawTDI, sawTDO, sawMASK, sawSMASK, newSize ):
"""
Set all the lengths equal in the event some of the masks were
not seen as part of the last change set.
"""
if self.size == newSize:
return
if newSize == 0:
self.empty()
return
# If an SIR was given without a MASK(), then use a mask of all zeros.
# this is not consistent with the SVF spec, but it makes sense because
# it would be odd to be testing an instruction register read out of a
# tap without giving a mask for it. Also, lattice seems to agree and is
# generating SVF files that comply with this philosophy.
if self.name == 'SIR' and not sawMASK:
self.mask = bytearray( newSize )
if newSize != len(self.mask):
self.mask = bytearray( newSize )
if self.name == 'SDR': # leave mask for HIR,HDR,TIR,TDR,SIR zeros
for i in range( newSize ):
self.mask[i] = 1
if newSize != len(self.tdo):
self.tdo = bytearray( newSize )
if newSize != len(self.tdi):
self.tdi = bytearray( newSize )
if newSize != len(self.smask):
self.smask = bytearray( newSize )
self.size = newSize
#-----</MASKSET>-----
def makeBitArray( hexString, bitCount ):
"""
Converts a packed sequence of hex ascii characters into a bytearray where
each element in the array holds exactly one bit. Only "bitCount" bits are
scanned and these must be the least significant bits in the hex number. That
is, it is legal to have some unused bits in the must significant hex nibble
of the input "hexString". The string is scanned starting from the backend,
then just before returning we reverse the array. This way the append()
method can be used, which I assume is faster than an insert.
"""
global tokLn
a = bytearray()
length = bitCount
hexString = list(hexString)
hexString.reverse()
#print(hexString)
for c in hexString:
if length <= 0:
break;
c = int(c, 16)
for mask in [1,2,4,8]:
if length <= 0:
break;
length = length - 1
a.append( (c & mask) != 0 )
if length > 0:
raise ParseError( tokLn, hexString, "Insufficient hex characters for given length of %d" % bitCount )
a.reverse()
#print(a)
return a
def makeXSVFbytes( bitarray ):
"""
Make a bytearray which is contains the XSVF bits which will be written
directly to disk. The number of bytes needed is calculated from the size
of the argument bitarray.
"""
bitCount = len(bitarray)
byteCount = (bitCount+7)//8
ba = bytearray( byteCount )
firstBit = (bitCount % 8) - 1
if firstBit == -1:
firstBit = 7
bitNdx = 0
for byteNdx in range(byteCount):
mask = 1<<firstBit
byte = 0
while mask:
if bitarray[bitNdx]:
byte |= mask;
mask = mask >> 1
bitNdx = bitNdx + 1
ba[byteNdx] = byte
firstBit = 7
return ba
def writeComment( outputFile, shiftOp_linenum, shiftOp ):
"""
Write an XCOMMENT record to outputFile
"""
comment = "%s @%d\0" % (shiftOp, shiftOp_linenum) # \0 is terminating nul
ba = bytearray(1)
ba[0] = XCOMMENT
ba += comment.encode()
outputFile.write( ba )
def combineBitVectors( trailer, meat, header ):
"""
Combine the 3 bit vectors comprizing a transmission. Since the least
significant bits are sent first, the header is put onto the list last so
they are sent first from that least significant position.
"""
ret = bytearray()
ret.extend( trailer )
ret.extend( meat )
ret.extend( header )
return ret
def writeRUNTEST( outputFile, run_state, end_state, run_count, min_time, tokenTxt ):
"""
Write the output for the SVF RUNTEST command.
run_count - the number of clocks
min_time - the number of seconds
tokenTxt - either RUNTEST or LDELAY
"""
# convert from secs to usecs
min_time = int( min_time * 1000000)
# the SVF RUNTEST command does NOT map to the XSVF XRUNTEST command. Check the SVF spec, then
# read the XSVF command. They are not the same. Use an XSVF XWAITSTATE to
# implement the required behavior of the SVF RUNTEST command.
if doCOMMENTs:
writeComment( output, tokLn, tokenTxt )
if tokenTxt == 'RUNTEST':
obuf = bytearray(11)
obuf[0] = XWAITSTATE
obuf[1] = run_state
obuf[2] = end_state
struct.pack_into(">i", obuf, 3, run_count ) # big endian 4 byte int to obuf
struct.pack_into(">i", obuf, 7, min_time ) # big endian 4 byte int to obuf
outputFile.write( obuf )
else: # == 'LDELAY'
obuf = bytearray(10)
obuf[0] = LDELAY
obuf[1] = run_state
# LDELAY has no end_state
struct.pack_into(">i", obuf, 2, run_count ) # big endian 4 byte int to obuf
struct.pack_into(">i", obuf, 6, min_time ) # big endian 4 byte int to obuf
outputFile.write( obuf )
output = open( outputFilename, mode='wb' )
hir = MASKSET('HIR')
hdr = MASKSET('HDR')
tir = MASKSET('TIR')
tdr = MASKSET('TDR')
sir = MASKSET('SIR')
sdr = MASKSET('SDR')
expecting_eof = True
# one of the commands that take the shiftParts after the length, the parse
# template for all of these commands is identical
shiftOps = ('SDR', 'SIR', 'LSDR', 'HDR', 'HIR', 'TDR', 'TIR')
# the order must correspond to shiftOps, this holds the MASKSETS. 'LSDR' shares sdr with 'SDR'
shiftSets = (sdr, sir, sdr, hdr, hir, tdr, tir )
# what to expect as parameters to a shiftOp, i.e. after a SDR length or SIR length
shiftParts = ('TDI', 'TDO', 'MASK', 'SMASK')
# the set of legal states which can trail the RUNTEST command
run_state_allowed = ('IRPAUSE', 'DRPAUSE', 'RESET', 'IDLE')
enddr_state_allowed = ('DRPAUSE', 'IDLE')
endir_state_allowed = ('IRPAUSE', 'IDLE')
trst_mode_allowed = ('ON', 'OFF', 'Z', 'ABSENT')
enddr_state = IDLE
endir_state = IDLE
frequency = 1.00e+006 # HZ;
# change detection for xsdrsize and xtdomask
xsdrsize = -1 # the last one sent, send only on change
xtdomask = bytearray() # the last one sent, send only on change
# we use a number of single byte writes for the XSVF command below
cmdbuf = bytearray(1)
# Save the XREPEAT setting into the file as first thing.
obuf = bytearray(2)
obuf[0] = XREPEAT
obuf[1] = xrepeat
output.write( obuf )
try:
while 1:
expecting_eof = True
nextTok()
expecting_eof = False
# print( tokType, tokVal, tokLn )
if tokVal in shiftOps:
shiftOp_linenum = tokLn
shiftOp = tokVal
set = shiftSets[shiftOps.index(shiftOp)]
# set flags false, if we see one later, set that one true later
sawTDI = sawTDO = sawMASK = sawSMASK = False
nextTok()
if tokType != 'int':
raise ParseError( tokLn, tokVal, "Expecting 'int' giving %s length, got '%s'" % (shiftOp, tokType) )
length = tokVal
nextTok()
while tokVal != ';':
if tokVal not in shiftParts:
raise ParseError( tokLn, tokVal, "Expecting TDI, TDO, MASK, SMASK, or ';'")
shiftPart = tokVal
nextTok()
if tokType != 'hex':
raise ParseError( tokLn, tokVal, "Expecting hex bits" )
bits = makeBitArray( tokVal, length )
if shiftPart == 'TDI':
sawTDI = True
set.tdi = bits
elif shiftPart == 'TDO':
sawTDO = True
set.tdo = bits
elif shiftPart == 'MASK':
sawMASK = True
set.mask = bits
elif shiftPart == 'SMASK':
sawSMASK = True
set.smask = bits
nextTok()
set.syncLengths( sawTDI, sawTDO, sawMASK, sawSMASK, length )
# process all the gathered parameters and generate outputs here
if shiftOp == 'SIR':
if doCOMMENTs:
writeComment( output, shiftOp_linenum, 'SIR' )
tdi = combineBitVectors( tir.tdi, sir.tdi, hir.tdi )
if len(tdi) > 255:
obuf = bytearray(3)
obuf[0] = XSIR2
struct.pack_into( ">h", obuf, 1, len(tdi) )
else:
obuf = bytearray(2)
obuf[0] = XSIR
obuf[1] = len(tdi)
output.write( obuf )
obuf = makeXSVFbytes( tdi )
output.write( obuf )
elif shiftOp == 'SDR':
if doCOMMENTs:
writeComment( output, shiftOp_linenum, shiftOp )
if not sawTDO:
# pass a zero filled bit vector for the sdr.mask
mask = combineBitVectors( tdr.mask, bytearray(sdr.size), hdr.mask )
tdi = combineBitVectors( tdr.tdi, sdr.tdi, hdr.tdi )
if xsdrsize != len(tdi):
xsdrsize = len(tdi)
cmdbuf[0] = XSDRSIZE
output.write( cmdbuf )
obuf = bytearray(4)
struct.pack_into( ">i", obuf, 0, xsdrsize ) # big endian 4 byte int to obuf
output.write( obuf )
if xtdomask != mask:
xtdomask = mask
cmdbuf[0] = XTDOMASK
output.write( cmdbuf )
obuf = makeXSVFbytes( mask )
output.write( obuf )
cmdbuf[0] = XSDR
output.write( cmdbuf )
obuf = makeXSVFbytes( tdi )
output.write( obuf )
else:
mask = combineBitVectors( tdr.mask, sdr.mask, hdr.mask )
tdi = combineBitVectors( tdr.tdi, sdr.tdi, hdr.tdi )
tdo = combineBitVectors( tdr.tdo, sdr.tdo, hdr.tdo )
if xsdrsize != len(tdi):
xsdrsize = len(tdi)
cmdbuf[0] = XSDRSIZE
output.write( cmdbuf )
obuf = bytearray(4)
struct.pack_into(">i", obuf, 0, xsdrsize ) # big endian 4 byte int to obuf
output.write( obuf )
if xtdomask != mask:
xtdomask = mask
cmdbuf[0] = XTDOMASK
output.write( cmdbuf )
obuf = makeXSVFbytes( mask )
output.write( obuf )
cmdbuf[0] = XSDRTDO
output.write( cmdbuf )
obuf = makeXSVFbytes( tdi )
output.write( obuf )
obuf = makeXSVFbytes( tdo )
output.write( obuf )
#print( "len(tdo)=", len(tdo), "len(tdr.tdo)=", len(tdr.tdo), "len(sdr.tdo)=", len(sdr.tdo), "len(hdr.tdo)=", len(hdr.tdo) )
elif shiftOp == 'LSDR':
if doCOMMENTs:
writeComment( output, shiftOp_linenum, shiftOp )
mask = combineBitVectors( tdr.mask, sdr.mask, hdr.mask )
tdi = combineBitVectors( tdr.tdi, sdr.tdi, hdr.tdi )
tdo = combineBitVectors( tdr.tdo, sdr.tdo, hdr.tdo )
if xsdrsize != len(tdi):
xsdrsize = len(tdi)
cmdbuf[0] = XSDRSIZE
output.write( cmdbuf )
obuf = bytearray(4)
struct.pack_into(">i", obuf, 0, xsdrsize ) # big endian 4 byte int to obuf
output.write( obuf )
if xtdomask != mask:
xtdomask = mask
cmdbuf[0] = XTDOMASK
output.write( cmdbuf )
obuf = makeXSVFbytes( mask )
output.write( obuf )
cmdbuf[0] = LSDR
output.write( cmdbuf )
obuf = makeXSVFbytes( tdi )
output.write( obuf )
obuf = makeXSVFbytes( tdo )
output.write( obuf )
#print( "len(tdo)=", len(tdo), "len(tdr.tdo)=", len(tdr.tdo), "len(sdr.tdo)=", len(sdr.tdo), "len(hdr.tdo)=", len(hdr.tdo) )
elif tokVal == 'RUNTEST' or tokVal == 'LDELAY':
# e.g. from lattice tools:
# "RUNTEST IDLE 5 TCK 1.00E-003 SEC;"
saveTok = tokVal
nextTok()
min_time = 0
run_count = 0
max_time = 600 # ten minutes
if tokVal in run_state_allowed:
run_state = StateTxt.index(tokVal)
end_state = run_state # bottom of page 17 of SVF spec
nextTok()
if tokType != 'int' and tokType != 'float':
raise ParseError( tokLn, tokVal, "Expecting 'int' or 'float' after RUNTEST [run_state]")
timeval = tokVal;
nextTok()
if tokVal != 'TCK' and tokVal != 'SEC' and tokVal != 'SCK':
raise ParseError( tokLn, tokVal, "Expecting 'TCK' or 'SEC' or 'SCK' after RUNTEST [run_state] (run_count|min_time)")
if tokVal == 'TCK' or tokVal == 'SCK':
run_count = int( timeval )
else:
min_time = timeval
nextTok()
if tokType == 'int' or tokType == 'float':
min_time = tokVal
nextTok()
if tokVal != 'SEC':
raise ParseError( tokLn, tokVal, "Expecting 'SEC' after RUNTEST [run_state] run_count min_time")
nextTok()
if tokVal == 'MAXIMUM':
nextTok()
if tokType != 'int' and tokType != 'float':
raise ParseError( tokLn, tokVal, "Expecting 'max_time' after RUNTEST [run_state] min_time SEC MAXIMUM")
max_time = tokVal
nextTok()
if tokVal != 'SEC':
raise ParseError( tokLn, tokVal, "Expecting 'max_time' after RUNTEST [run_state] min_time SEC MAXIMUM max_time")
nextTok()
if tokVal == 'ENDSTATE':
nextTok()
if tokVal not in run_state_allowed:
raise ParseError( tokLn, tokVal, "Expecting 'run_state' after RUNTEST .... ENDSTATE")
end_state = StateTxt.index(tokVal)
nextTok()
if tokVal != ';':
raise ParseError( tokLn, tokVal, "Expecting ';' after RUNTEST ....")
# print( "run_count=", run_count, "min_time=", min_time,
# "max_time=", max_time, "run_state=", State[run_state], "end_state=", State[end_state] )
writeRUNTEST( output, run_state, end_state, run_count, min_time, saveTok )
elif tokVal == 'LCOUNT':
nextTok()
if tokType != 'int':
raise ParseError( tokLn, tokVal, "Expecting integer 'count' after LCOUNT")
loopCount = tokVal
nextTok()
if tokVal != ';':
raise ParseError( tokLn, tokVal, "Expecting ';' after LCOUNT count")
if doCOMMENTs:
writeComment( output, tokLn, 'LCOUNT' )
obuf = bytearray(5)
obuf[0] = LCOUNT
struct.pack_into(">i", obuf, 1, loopCount ) # big endian 4 byte int to obuf
output.write( obuf )
elif tokVal == 'ENDDR':
nextTok()
if tokVal not in enddr_state_allowed:
raise ParseError( tokLn, tokVal, "Expecting 'stable_state' after ENDDR. (one of: DRPAUSE, IDLE)")
enddr_state = StateTxt.index(tokVal)
nextTok()
if tokVal != ';':
raise ParseError( tokLn, tokVal, "Expecting ';' after ENDDR stable_state")
if doCOMMENTs:
writeComment( output, tokLn, 'ENDDR' )
obuf = bytearray(2)
obuf[0] = XENDDR
# Page 10 of the March 1999 SVF spec shows that RESET is also allowed here.
# Yet the XSVF spec has no provision for that, and uses a non-standard, i.e.
# boolean argument to XENDDR which only handles two of the 3 intended states.
obuf[1] = 1 if enddr_state == DRPAUSE else 0
output.write( obuf )
elif tokVal == 'ENDIR':
nextTok()
if tokVal not in endir_state_allowed:
raise ParseError( tokLn, tokVal, "Expecting 'stable_state' after ENDIR. (one of: IRPAUSE, IDLE)")
endir_state = StateTxt.index(tokVal)
nextTok()
if tokVal != ';':
raise ParseError( tokLn, tokVal, "Expecting ';' after ENDIR stable_state")
if doCOMMENTs:
writeComment( output, tokLn, 'ENDIR' )
obuf = bytearray(2)
obuf[0] = XENDIR
# Page 10 of the March 1999 SVF spec shows that RESET is also allowed here.
# Yet the XSVF spec has no provision for that, and uses a non-standard, i.e.
# boolean argument to XENDDR which only handles two of the 3 intended states.
obuf[1] = 1 if endir_state == IRPAUSE else 0
output.write( obuf )
elif tokVal == 'STATE':
nextTok()
ln = tokLn
while tokVal != ';':
if tokVal not in StateTxt:
raise ParseError( tokLn, tokVal, "Expecting 'stable_state' after STATE")
stable_state = StateTxt.index( tokVal )
if doCOMMENTs and ln != -1:
writeComment( output, ln, 'STATE' )
ln = -1 # save comment only once
obuf = bytearray(2)
obuf[0] = XSTATE
obuf[1] = stable_state
output.write( obuf )
nextTok()
elif tokVal == 'FREQUENCY':
nextTok()
if tokVal != ';':
if tokType != 'int' and tokType != 'float':
raise ParseError( tokLn, tokVal, "Expecting 'cycles HZ' after FREQUENCY")
frequency = tokVal
nextTok()
if tokVal != 'HZ':
raise ParseError( tokLn, tokVal, "Expecting 'HZ' after FREQUENCY cycles")
nextTok()
if tokVal != ';':
raise ParseError( tokLn, tokVal, "Expecting ';' after FREQUENCY cycles HZ")
elif tokVal == 'TRST':
nextTok()
if tokVal not in trst_mode_allowed:
raise ParseError( tokLn, tokVal, "Expecting 'ON|OFF|Z|ABSENT' after TRST")
trst_mode = tokVal
nextTok()
if tokVal != ';':
raise ParseError( tokLn, tokVal, "Expecting ';' after TRST trst_mode")
if doCOMMENTs:
writeComment( output, tokLn, 'TRST %s' % trst_mode )
obuf = bytearray( 2 )
obuf[0] = XTRST
obuf[1] = trst_mode_allowed.index( trst_mode ) # use the index as the binary argument to XTRST opcode
output.write( obuf )
else:
raise ParseError( tokLn, tokVal, "Unknown token '%s'" % tokVal)
except StopIteration:
if not expecting_eof:
print( "Unexpected End of File at line ", tokLn )
except ParseError as pe:
print( "\n", pe )
finally:
# print( "closing file" )
cmdbuf[0] = XCOMPLETE
output.write( cmdbuf )
output.close()
| gpl-2.0 |
nkcr/WebIndex | app/venv/lib/python3.5/site-packages/pip/baseparser.py | 424 | 10465 | """Base option parser setup"""
from __future__ import absolute_import
import sys
import optparse
import os
import re
import textwrap
from distutils.util import strtobool
from pip._vendor.six import string_types
from pip._vendor.six.moves import configparser
from pip.locations import (
legacy_config_file, config_basename, running_under_virtualenv,
site_config_files
)
from pip.utils import appdirs, get_terminal_size
_environ_prefix_re = re.compile(r"^PIP_", re.I)
class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
"""A prettier/less verbose help formatter for optparse."""
def __init__(self, *args, **kwargs):
# help position must be aligned with __init__.parseopts.description
kwargs['max_help_position'] = 30
kwargs['indent_increment'] = 1
kwargs['width'] = get_terminal_size()[0] - 2
optparse.IndentedHelpFormatter.__init__(self, *args, **kwargs)
def format_option_strings(self, option):
return self._format_option_strings(option, ' <%s>', ', ')
def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '):
"""
Return a comma-separated list of option strings and metavars.
:param option: tuple of (short opt, long opt), e.g: ('-f', '--format')
:param mvarfmt: metavar format string - evaluated as mvarfmt % metavar
:param optsep: separator
"""
opts = []
if option._short_opts:
opts.append(option._short_opts[0])
if option._long_opts:
opts.append(option._long_opts[0])
if len(opts) > 1:
opts.insert(1, optsep)
if option.takes_value():
metavar = option.metavar or option.dest.lower()
opts.append(mvarfmt % metavar.lower())
return ''.join(opts)
def format_heading(self, heading):
if heading == 'Options':
return ''
return heading + ':\n'
def format_usage(self, usage):
"""
Ensure there is only one newline between usage and the first heading
if there is no description.
"""
msg = '\nUsage: %s\n' % self.indent_lines(textwrap.dedent(usage), " ")
return msg
def format_description(self, description):
# leave full control over description to us
if description:
if hasattr(self.parser, 'main'):
label = 'Commands'
else:
label = 'Description'
# some doc strings have initial newlines, some don't
description = description.lstrip('\n')
# some doc strings have final newlines and spaces, some don't
description = description.rstrip()
# dedent, then reindent
description = self.indent_lines(textwrap.dedent(description), " ")
description = '%s:\n%s\n' % (label, description)
return description
else:
return ''
def format_epilog(self, epilog):
# leave full control over epilog to us
if epilog:
return epilog
else:
return ''
def indent_lines(self, text, indent):
new_lines = [indent + line for line in text.split('\n')]
return "\n".join(new_lines)
class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
"""Custom help formatter for use in ConfigOptionParser.
This is updates the defaults before expanding them, allowing
them to show up correctly in the help listing.
"""
def expand_default(self, option):
if self.parser is not None:
self.parser._update_defaults(self.parser.defaults)
return optparse.IndentedHelpFormatter.expand_default(self, option)
class CustomOptionParser(optparse.OptionParser):
def insert_option_group(self, idx, *args, **kwargs):
"""Insert an OptionGroup at a given position."""
group = self.add_option_group(*args, **kwargs)
self.option_groups.pop()
self.option_groups.insert(idx, group)
return group
@property
def option_list_all(self):
"""Get a list of all options, including those in option groups."""
res = self.option_list[:]
for i in self.option_groups:
res.extend(i.option_list)
return res
class ConfigOptionParser(CustomOptionParser):
"""Custom option parser which updates its defaults by checking the
configuration files and environmental variables"""
isolated = False
def __init__(self, *args, **kwargs):
self.config = configparser.RawConfigParser()
self.name = kwargs.pop('name')
self.isolated = kwargs.pop("isolated", False)
self.files = self.get_config_files()
if self.files:
self.config.read(self.files)
assert self.name
optparse.OptionParser.__init__(self, *args, **kwargs)
def get_config_files(self):
# the files returned by this method will be parsed in order with the
# first files listed being overridden by later files in standard
# ConfigParser fashion
config_file = os.environ.get('PIP_CONFIG_FILE', False)
if config_file == os.devnull:
return []
# at the base we have any site-wide configuration
files = list(site_config_files)
# per-user configuration next
if not self.isolated:
if config_file and os.path.exists(config_file):
files.append(config_file)
else:
# This is the legacy config file, we consider it to be a lower
# priority than the new file location.
files.append(legacy_config_file)
# This is the new config file, we consider it to be a higher
# priority than the legacy file.
files.append(
os.path.join(
appdirs.user_config_dir("pip"),
config_basename,
)
)
# finally virtualenv configuration first trumping others
if running_under_virtualenv():
venv_config_file = os.path.join(
sys.prefix,
config_basename,
)
if os.path.exists(venv_config_file):
files.append(venv_config_file)
return files
def check_default(self, option, key, val):
try:
return option.check_value(key, val)
except optparse.OptionValueError as exc:
print("An error occurred during configuration: %s" % exc)
sys.exit(3)
def _update_defaults(self, defaults):
"""Updates the given defaults with values from the config files and
the environ. Does a little special handling for certain types of
options (lists)."""
# Then go and look for the other sources of configuration:
config = {}
# 1. config files
for section in ('global', self.name):
config.update(
self.normalize_keys(self.get_config_section(section))
)
# 2. environmental variables
if not self.isolated:
config.update(self.normalize_keys(self.get_environ_vars()))
# Accumulate complex default state.
self.values = optparse.Values(self.defaults)
late_eval = set()
# Then set the options with those values
for key, val in config.items():
# ignore empty values
if not val:
continue
option = self.get_option(key)
# Ignore options not present in this parser. E.g. non-globals put
# in [global] by users that want them to apply to all applicable
# commands.
if option is None:
continue
if option.action in ('store_true', 'store_false', 'count'):
val = strtobool(val)
elif option.action == 'append':
val = val.split()
val = [self.check_default(option, key, v) for v in val]
elif option.action == 'callback':
late_eval.add(option.dest)
opt_str = option.get_opt_string()
val = option.convert_value(opt_str, val)
# From take_action
args = option.callback_args or ()
kwargs = option.callback_kwargs or {}
option.callback(option, opt_str, val, self, *args, **kwargs)
else:
val = self.check_default(option, key, val)
defaults[option.dest] = val
for key in late_eval:
defaults[key] = getattr(self.values, key)
self.values = None
return defaults
def normalize_keys(self, items):
"""Return a config dictionary with normalized keys regardless of
whether the keys were specified in environment variables or in config
files"""
normalized = {}
for key, val in items:
key = key.replace('_', '-')
if not key.startswith('--'):
key = '--%s' % key # only prefer long opts
normalized[key] = val
return normalized
def get_config_section(self, name):
"""Get a section of a configuration"""
if self.config.has_section(name):
return self.config.items(name)
return []
def get_environ_vars(self):
"""Returns a generator with all environmental vars with prefix PIP_"""
for key, val in os.environ.items():
if _environ_prefix_re.search(key):
yield (_environ_prefix_re.sub("", key).lower(), val)
def get_default_values(self):
"""Overridding to make updating the defaults after instantiation of
the option parser possible, _update_defaults() does the dirty work."""
if not self.process_default_values:
# Old, pre-Optik 1.5 behaviour.
return optparse.Values(self.defaults)
defaults = self._update_defaults(self.defaults.copy()) # ours
for option in self._get_all_options():
default = defaults.get(option.dest)
if isinstance(default, string_types):
opt_str = option.get_opt_string()
defaults[option.dest] = option.check_value(opt_str, default)
return optparse.Values(defaults)
def error(self, msg):
self.print_usage(sys.stderr)
self.exit(2, "%s\n" % msg)
| mit |
fanglinfang/myuw | myuw/test/api/other_quarters.py | 1 | 2728 | import json
from unittest2 import skipIf
from django.test.utils import override_settings
from django.test import TestCase
from django.test.client import Client
from django.core.urlresolvers import reverse
from myuw.test.api import missing_url, get_user, get_user_pass
FDAO_SWS = 'restclients.dao_implementation.sws.File'
Session = 'django.contrib.sessions.middleware.SessionMiddleware'
Common = 'django.middleware.common.CommonMiddleware'
CsrfView = 'django.middleware.csrf.CsrfViewMiddleware'
Auth = 'django.contrib.auth.middleware.AuthenticationMiddleware'
RemoteUser = 'django.contrib.auth.middleware.RemoteUserMiddleware'
Message = 'django.contrib.messages.middleware.MessageMiddleware'
XFrame = 'django.middleware.clickjacking.XFrameOptionsMiddleware'
UserService = 'userservice.user.UserServiceMiddleware'
AUTH_BACKEND = 'django.contrib.auth.backends.ModelBackend'
@override_settings(RESTCLIENTS_SWS_DAO_CLASS=FDAO_SWS,
MIDDLEWARE_CLASSES=(Session,
Common,
CsrfView,
Auth,
RemoteUser,
Message,
XFrame,
UserService,
),
AUTHENTICATION_BACKENDS=(AUTH_BACKEND,)
)
class TestOtherQuarters(TestCase):
def setUp(self):
self.client = Client()
@skipIf(missing_url("myuw_home"), "myuw urls not configured")
def test_javerage_oquarters(self):
url = reverse("myuw_other_quarters_api")
get_user('javerage')
self.client.login(username='javerage',
password=get_user_pass('javerage'))
response = self.client.get(url)
self.assertEquals(response.status_code, 200)
data = json.loads(response.content)
self.assertEquals(data["next_term_data"]["has_registration"], True)
self.assertEquals(data["next_term_data"]["quarter"], "Autumn")
self.assertEquals(data["next_term_data"]["year"], 2013)
self.assertEquals(len(data["terms"]), 3)
self.assertEquals(data["terms"][0]['section_count'], 2)
self.assertEquals(data["terms"][0]['url'], '/2013,summer,a-term')
self.assertEquals(data["terms"][0]['summer_term'], 'a-term')
self.assertEquals(data["terms"][0]['year'], 2013)
self.assertEquals(data["terms"][0]['quarter'], 'Summer')
self.assertEquals(data["terms"][0]['credits'], '2.0')
self.assertEquals(data["terms"][0]['last_final_exam_date'],
'2013-08-23 23:59:59')
| apache-2.0 |
eliasbakken/OctoPrint | docs/conf.py | 38 | 8899 | # -*- coding: utf-8 -*-
#
# OctoPrint documentation build configuration file, created by
# sphinx-quickstart on Mon Dec 02 17:08:50 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('../src/'))
sys.path.append(os.path.abspath('sphinxext'))
import octoprint._version
from datetime import date
year_since = 2013
year_current = date.today().year
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = '1.3'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['codeblockext', 'onlineinclude', 'sphinx.ext.todo', 'sphinx.ext.autodoc', 'sphinxcontrib.httpdomain',
'sphinx.ext.napoleon']
todo_include_todos = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'OctoPrint'
copyright = u'%d-%d, Gina Häußge' % (year_since, year_current) if year_current > year_since else u'%d, Gina Häußge' % year_since
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = octoprint._version.get_versions()["version"]
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
numfig = True
# -- Options for HTML output ---------------------------------------------------
# on_rtd is whether we are on readthedocs.org, this line of code grabbed from docs.readthedocs.org
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#html_theme = "sphinx_rtd_theme"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
def setup(app):
app.add_stylesheet("theme_overrides.css")
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'OctoPrintdoc'
# -- Options for LaTeX output --------------------------------------------------
#latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
#}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
#latex_documents = [
# ('index', 'OctoPrint.tex', u'OctoPrint Documentation',
# u'Gina Häußge', 'manual'),
#]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'octoprint', u'OctoPrint Documentation',
[u'Gina Häußge'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'OctoPrint', u'OctoPrint Documentation',
u'Gina Häußge', 'OctoPrint', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| agpl-3.0 |
Nitaco/ansible | test/units/modules/remote_management/oneview/test_oneview_logical_interconnect_group_facts.py | 68 | 2017 | # Copyright (c) 2016-2017 Hewlett Packard Enterprise Development LP
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from ansible.compat.tests import unittest
from oneview_module_loader import OneViewModuleBase
from ansible.modules.remote_management.oneview.oneview_logical_interconnect_group_facts import LogicalInterconnectGroupFactsModule
from hpe_test_utils import FactsParamsTestCase
ERROR_MSG = 'Fake message error'
PARAMS_GET_ALL = dict(
config='config.json',
name=None
)
PARAMS_GET_BY_NAME = dict(
config='config.json',
name="Test Logical Interconnect Group"
)
PRESENT_LIGS = [{
"name": "Test Logical Interconnect Group",
"uri": "/rest/logical-interconnect-groups/ebb4ada8-08df-400e-8fac-9ff987ac5140"
}]
class LogicalInterconnectGroupFactsSpec(unittest.TestCase, FactsParamsTestCase):
def setUp(self):
self.configure_mocks(self, LogicalInterconnectGroupFactsModule)
self.logical_interconnect_groups = self.mock_ov_client.logical_interconnect_groups
FactsParamsTestCase.configure_client_mock(self, self.logical_interconnect_groups)
def test_should_get_all_ligs(self):
self.logical_interconnect_groups.get_all.return_value = PRESENT_LIGS
self.mock_ansible_module.params = PARAMS_GET_ALL
LogicalInterconnectGroupFactsModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(logical_interconnect_groups=(PRESENT_LIGS))
)
def test_should_get_lig_by_name(self):
self.logical_interconnect_groups.get_by.return_value = PRESENT_LIGS
self.mock_ansible_module.params = PARAMS_GET_BY_NAME
LogicalInterconnectGroupFactsModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(logical_interconnect_groups=(PRESENT_LIGS))
)
if __name__ == '__main__':
unittest.main()
| gpl-3.0 |
tushar-rishav/coala | coalib/results/TextRange.py | 1 | 4194 | import copy
from coala_utils.decorators import (
enforce_signature, generate_ordering, generate_repr)
from coalib.results.TextPosition import TextPosition
@generate_repr("start", "end")
@generate_ordering("start", "end")
class TextRange:
@enforce_signature
def __init__(self, start: TextPosition, end: (TextPosition, None)=None):
"""
Creates a new TextRange.
:param start: A TextPosition indicating the start of the range.
Can't be ``None``.
:param end: A TextPosition indicating the end of the range. If
``None`` is given, the start object will be used
here.
:raises TypeError: Raised when
- start is no TextPosition or None.
- end is no TextPosition.
:raises ValueError: Raised when end position is smaller than start
position, because negative ranges are not allowed.
"""
self._start = start
self._end = end or copy.deepcopy(start)
if self._end < start:
raise ValueError("End position can't be less than start position.")
@classmethod
def from_values(cls,
start_line=None,
start_column=None,
end_line=None,
end_column=None):
"""
Creates a new TextRange.
:param start_line: The line number of the start position. The first
line is 1.
:param start_column: The column number of the start position. The first
column is 1.
:param end_line: The line number of the end position. If this
parameter is ``None``, then the end position is set
the same like start position and end_column gets
ignored.
:param end_column: The column number of the end position.
:return: A TextRange.
"""
start = TextPosition(start_line, start_column)
if end_line is None:
end = None
else:
end = TextPosition(end_line, end_column)
return cls(start, end)
@classmethod
def join(cls, a, b):
"""
Creates a new TextRange that covers the area of two overlapping ones
:param a: TextRange (needs to overlap b)
:param b: TextRange (needs to overlap a)
:return: A new TextRange covering the union of the Area of a and b
"""
if not isinstance(a, cls) or not isinstance(b, cls):
raise TypeError(
"only instances of {} can be joined".format(cls.__name__))
if not a.overlaps(b):
raise ValueError(
"{}s must overlap to be joined".format(cls.__name__))
return cls(min(a.start, b.start), max(a.end, b.end))
@property
def start(self):
return self._start
@property
def end(self):
return self._end
def overlaps(self, other):
return self.start <= other.end and self.end >= other.start
def expand(self, text_lines):
"""
Passes a new TextRange that covers the same area of a file as this one
would. All values of None get replaced with absolute values.
values of None will be interpreted as follows:
self.start.line is None: -> 1
self.start.column is None: -> 1
self.end.line is None: -> last line of file
self.end.column is None: -> last column of self.end.line
:param text_lines: File contents of the applicable file
:return: TextRange with absolute values
"""
start_line = self.start.line or 1
start_column = self.start.column or 1
end_line = self.end.line or len(text_lines)
end_column = self.end.column or len(text_lines[end_line - 1])
return TextRange.from_values(start_line,
start_column,
end_line,
end_column)
| agpl-3.0 |
ByteMail/ByteMail | werkzeug/testapp.py | 303 | 9398 | # -*- coding: utf-8 -*-
"""
werkzeug.testapp
~~~~~~~~~~~~~~~~
Provide a small test application that can be used to test a WSGI server
and check it for WSGI compliance.
:copyright: (c) 2013 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import os
import sys
import werkzeug
from textwrap import wrap
from werkzeug.wrappers import BaseRequest as Request, BaseResponse as Response
from werkzeug.utils import escape
import base64
logo = Response(base64.b64decode(
'''R0lGODlhoACgAOMIAAEDACwpAEpCAGdgAJaKAM28AOnVAP3rAP/////////
//////////////////////yH5BAEKAAgALAAAAACgAKAAAAT+EMlJq704680R+F0ojmRpnuj0rWnrv
nB8rbRs33gu0bzu/0AObxgsGn3D5HHJbCUFyqZ0ukkSDlAidctNFg7gbI9LZlrBaHGtzAae0eloe25
7w9EDOX2fst/xenyCIn5/gFqDiVVDV4aGeYiKkhSFjnCQY5OTlZaXgZp8nJ2ekaB0SQOjqphrpnOiq
ncEn65UsLGytLVmQ6m4sQazpbtLqL/HwpnER8bHyLrLOc3Oz8PRONPU1crXN9na263dMt/g4SzjMeX
m5yDpLqgG7OzJ4u8lT/P69ej3JPn69kHzN2OIAHkB9RUYSFCFQYQJFTIkCDBiwoXWGnowaLEjRm7+G
p9A7Hhx4rUkAUaSLJlxHMqVMD/aSycSZkyTplCqtGnRAM5NQ1Ly5OmzZc6gO4d6DGAUKA+hSocWYAo
SlM6oUWX2O/o0KdaVU5vuSQLAa0ADwQgMEMB2AIECZhVSnTno6spgbtXmHcBUrQACcc2FrTrWS8wAf
78cMFBgwIBgbN+qvTt3ayikRBk7BoyGAGABAdYyfdzRQGV3l4coxrqQ84GpUBmrdR3xNIDUPAKDBSA
ADIGDhhqTZIWaDcrVX8EsbNzbkvCOxG8bN5w8ly9H8jyTJHC6DFndQydbguh2e/ctZJFXRxMAqqPVA
tQH5E64SPr1f0zz7sQYjAHg0In+JQ11+N2B0XXBeeYZgBZFx4tqBToiTCPv0YBgQv8JqA6BEf6RhXx
w1ENhRBnWV8ctEX4Ul2zc3aVGcQNC2KElyTDYyYUWvShdjDyMOGMuFjqnII45aogPhz/CodUHFwaDx
lTgsaOjNyhGWJQd+lFoAGk8ObghI0kawg+EV5blH3dr+digkYuAGSaQZFHFz2P/cTaLmhF52QeSb45
Jwxd+uSVGHlqOZpOeJpCFZ5J+rkAkFjQ0N1tah7JJSZUFNsrkeJUJMIBi8jyaEKIhKPomnC91Uo+NB
yyaJ5umnnpInIFh4t6ZSpGaAVmizqjpByDegYl8tPE0phCYrhcMWSv+uAqHfgH88ak5UXZmlKLVJhd
dj78s1Fxnzo6yUCrV6rrDOkluG+QzCAUTbCwf9SrmMLzK6p+OPHx7DF+bsfMRq7Ec61Av9i6GLw23r
idnZ+/OO0a99pbIrJkproCQMA17OPG6suq3cca5ruDfXCCDoS7BEdvmJn5otdqscn+uogRHHXs8cbh
EIfYaDY1AkrC0cqwcZpnM6ludx72x0p7Fo/hZAcpJDjax0UdHavMKAbiKltMWCF3xxh9k25N/Viud8
ba78iCvUkt+V6BpwMlErmcgc502x+u1nSxJSJP9Mi52awD1V4yB/QHONsnU3L+A/zR4VL/indx/y64
gqcj+qgTeweM86f0Qy1QVbvmWH1D9h+alqg254QD8HJXHvjQaGOqEqC22M54PcftZVKVSQG9jhkv7C
JyTyDoAJfPdu8v7DRZAxsP/ky9MJ3OL36DJfCFPASC3/aXlfLOOON9vGZZHydGf8LnxYJuuVIbl83y
Az5n/RPz07E+9+zw2A2ahz4HxHo9Kt79HTMx1Q7ma7zAzHgHqYH0SoZWyTuOLMiHwSfZDAQTn0ajk9
YQqodnUYjByQZhZak9Wu4gYQsMyEpIOAOQKze8CmEF45KuAHTvIDOfHJNipwoHMuGHBnJElUoDmAyX
c2Qm/R8Ah/iILCCJOEokGowdhDYc/yoL+vpRGwyVSCWFYZNljkhEirGXsalWcAgOdeAdoXcktF2udb
qbUhjWyMQxYO01o6KYKOr6iK3fE4MaS+DsvBsGOBaMb0Y6IxADaJhFICaOLmiWTlDAnY1KzDG4ambL
cWBA8mUzjJsN2KjSaSXGqMCVXYpYkj33mcIApyhQf6YqgeNAmNvuC0t4CsDbSshZJkCS1eNisKqlyG
cF8G2JeiDX6tO6Mv0SmjCa3MFb0bJaGPMU0X7c8XcpvMaOQmCajwSeY9G0WqbBmKv34DsMIEztU6Y2
KiDlFdt6jnCSqx7Dmt6XnqSKaFFHNO5+FmODxMCWBEaco77lNDGXBM0ECYB/+s7nKFdwSF5hgXumQe
EZ7amRg39RHy3zIjyRCykQh8Zo2iviRKyTDn/zx6EefptJj2Cw+Ep2FSc01U5ry4KLPYsTyWnVGnvb
UpyGlhjBUljyjHhWpf8OFaXwhp9O4T1gU9UeyPPa8A2l0p1kNqPXEVRm1AOs1oAGZU596t6SOR2mcB
Oco1srWtkaVrMUzIErrKri85keKqRQYX9VX0/eAUK1hrSu6HMEX3Qh2sCh0q0D2CtnUqS4hj62sE/z
aDs2Sg7MBS6xnQeooc2R2tC9YrKpEi9pLXfYXp20tDCpSP8rKlrD4axprb9u1Df5hSbz9QU0cRpfgn
kiIzwKucd0wsEHlLpe5yHXuc6FrNelOl7pY2+11kTWx7VpRu97dXA3DO1vbkhcb4zyvERYajQgAADs
='''), mimetype='image/png')
TEMPLATE = u'''\
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"
"http://www.w3.org/TR/html4/loose.dtd">
<title>WSGI Information</title>
<style type="text/css">
@import url(http://fonts.googleapis.com/css?family=Ubuntu);
body { font-family: 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva',
'Verdana', sans-serif; background-color: white; color: #000;
font-size: 15px; text-align: center; }
#logo { float: right; padding: 0 0 10px 10px; }
div.box { text-align: left; width: 45em; margin: auto; padding: 50px 0;
background-color: white; }
h1, h2 { font-family: 'Ubuntu', 'Lucida Grande', 'Lucida Sans Unicode',
'Geneva', 'Verdana', sans-serif; font-weight: normal; }
h1 { margin: 0 0 30px 0; }
h2 { font-size: 1.4em; margin: 1em 0 0.5em 0; }
table { width: 100%%; border-collapse: collapse; border: 1px solid #AFC5C9 }
table th { background-color: #AFC1C4; color: white; font-size: 0.72em;
font-weight: normal; width: 18em; vertical-align: top;
padding: 0.5em 0 0.1em 0.5em; }
table td { border: 1px solid #AFC5C9; padding: 0.1em 0 0.1em 0.5em; }
code { font-family: 'Consolas', 'Monaco', 'Bitstream Vera Sans Mono',
monospace; font-size: 0.7em; }
ul li { line-height: 1.5em; }
ul.path { font-size: 0.7em; margin: 0 -30px; padding: 8px 30px;
list-style: none; background: #E8EFF0; }
ul.path li { line-height: 1.6em; }
li.virtual { color: #999; text-decoration: underline; }
li.exp { background: white; }
</style>
<div class="box">
<img src="?resource=logo" id="logo" alt="[The Werkzeug Logo]" />
<h1>WSGI Information</h1>
<p>
This page displays all available information about the WSGI server and
the underlying Python interpreter.
<h2 id="python-interpreter">Python Interpreter</h2>
<table>
<tr>
<th>Python Version
<td>%(python_version)s
<tr>
<th>Platform
<td>%(platform)s [%(os)s]
<tr>
<th>API Version
<td>%(api_version)s
<tr>
<th>Byteorder
<td>%(byteorder)s
<tr>
<th>Werkzeug Version
<td>%(werkzeug_version)s
</table>
<h2 id="wsgi-environment">WSGI Environment</h2>
<table>%(wsgi_env)s</table>
<h2 id="installed-eggs">Installed Eggs</h2>
<p>
The following python packages were installed on the system as
Python eggs:
<ul>%(python_eggs)s</ul>
<h2 id="sys-path">System Path</h2>
<p>
The following paths are the current contents of the load path. The
following entries are looked up for Python packages. Note that not
all items in this path are folders. Gray and underlined items are
entries pointing to invalid resources or used by custom import hooks
such as the zip importer.
<p>
Items with a bright background were expanded for display from a relative
path. If you encounter such paths in the output you might want to check
your setup as relative paths are usually problematic in multithreaded
environments.
<ul class="path">%(sys_path)s</ul>
</div>
'''
def iter_sys_path():
if os.name == 'posix':
def strip(x):
prefix = os.path.expanduser('~')
if x.startswith(prefix):
x = '~' + x[len(prefix):]
return x
else:
strip = lambda x: x
cwd = os.path.abspath(os.getcwd())
for item in sys.path:
path = os.path.join(cwd, item or os.path.curdir)
yield strip(os.path.normpath(path)), \
not os.path.isdir(path), path != item
def render_testapp(req):
try:
import pkg_resources
except ImportError:
eggs = ()
else:
eggs = sorted(pkg_resources.working_set,
key=lambda x: x.project_name.lower())
python_eggs = []
for egg in eggs:
try:
version = egg.version
except (ValueError, AttributeError):
version = 'unknown'
python_eggs.append('<li>%s <small>[%s]</small>' % (
escape(egg.project_name),
escape(version)
))
wsgi_env = []
sorted_environ = sorted(req.environ.items(),
key=lambda x: repr(x[0]).lower())
for key, value in sorted_environ:
wsgi_env.append('<tr><th>%s<td><code>%s</code>' % (
escape(str(key)),
' '.join(wrap(escape(repr(value))))
))
sys_path = []
for item, virtual, expanded in iter_sys_path():
class_ = []
if virtual:
class_.append('virtual')
if expanded:
class_.append('exp')
sys_path.append('<li%s>%s' % (
class_ and ' class="%s"' % ' '.join(class_) or '',
escape(item)
))
return (TEMPLATE % {
'python_version': '<br>'.join(escape(sys.version).splitlines()),
'platform': escape(sys.platform),
'os': escape(os.name),
'api_version': sys.api_version,
'byteorder': sys.byteorder,
'werkzeug_version': werkzeug.__version__,
'python_eggs': '\n'.join(python_eggs),
'wsgi_env': '\n'.join(wsgi_env),
'sys_path': '\n'.join(sys_path)
}).encode('utf-8')
def test_app(environ, start_response):
"""Simple test application that dumps the environment. You can use
it to check if Werkzeug is working properly:
.. sourcecode:: pycon
>>> from werkzeug.serving import run_simple
>>> from werkzeug.testapp import test_app
>>> run_simple('localhost', 3000, test_app)
* Running on http://localhost:3000/
The application displays important information from the WSGI environment,
the Python interpreter and the installed libraries.
"""
req = Request(environ, populate_request=False)
if req.args.get('resource') == 'logo':
response = logo
else:
response = Response(render_testapp(req), mimetype='text/html')
return response(environ, start_response)
if __name__ == '__main__':
from werkzeug.serving import run_simple
run_simple('localhost', 5000, test_app, use_reloader=True)
| mit |
wkoathp/glance | glance/tests/unit/api/test_cmd_cache_manage.py | 13 | 15382 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import optparse
import mock
from glance.cmd import cache_manage
from glance.common import exception
import glance.common.utils
import glance.image_cache.client
from glance.tests import utils as test_utils
class TestGlanceCmdManage(test_utils.BaseTestCase):
@mock.patch.object(glance.image_cache.client.CacheClient,
'get_cached_images')
@mock.patch.object(glance.common.utils.PrettyTable, 'make_row')
def test_list_cached_images(self, mock_row_create, mock_images):
"""
Verify that list_cached() method correctly processes images with all
filled data and images with not filled 'last_accessed' field.
"""
mock_images.return_value = [
{'last_accessed': float(0),
'last_modified': float(1378985797.124511),
'image_id': '1', 'size': '128', 'hits': '1'},
{'last_accessed': float(1378985797.124511),
'last_modified': float(1378985797.124511),
'image_id': '2', 'size': '255', 'hits': '2'}]
cache_manage.list_cached(mock.Mock(), '')
self.assertEqual(len(mock_images.return_value),
mock_row_create.call_count)
@mock.patch.object(glance.image_cache.client.CacheClient,
'get_cached_images')
def test_list_cached_images_empty(self, mock_images):
"""
Verify that list_cached() method handles a case when no images are
cached without errors.
"""
mock_images.return_value = []
self.assertEqual(cache_manage.SUCCESS,
cache_manage.list_cached(mock.Mock(), ''))
@mock.patch.object(glance.image_cache.client.CacheClient,
'get_queued_images')
@mock.patch.object(glance.common.utils.PrettyTable, 'make_row')
def test_list_queued_images(self, mock_row_create, mock_images):
"""Verify that list_queued() method correctly processes images."""
mock_images.return_value = [
{'image_id': '1'}, {'image_id': '2'}]
cache_manage.list_queued(mock.Mock(), '')
self.assertEqual(len(mock_images.return_value),
mock_row_create.call_count)
@mock.patch.object(glance.image_cache.client.CacheClient,
'get_queued_images')
def test_list_queued_images_empty(self, mock_images):
"""
Verify that list_queued() method handles a case when no images were
queued without errors.
"""
mock_images.return_value = []
self.assertEqual(cache_manage.SUCCESS,
cache_manage.list_queued(mock.Mock(), ''))
def test_queue_image_without_index(self):
self.assertEqual(cache_manage.FAILURE,
cache_manage.queue_image(mock.Mock(), []))
@mock.patch.object(glance.cmd.cache_manage, 'user_confirm')
@mock.patch.object(glance.cmd.cache_manage, 'get_client')
def test_queue_image_not_forced_not_confirmed(self,
mock_client, mock_confirm):
# options.forced set to False and queue confirmation set to False.
mock_confirm.return_value = False
mock_options = mock.Mock()
mock_options.force = False
self.assertEqual(cache_manage.SUCCESS,
cache_manage.queue_image(mock_options, ['img_id']))
self.assertFalse(mock_client.called)
@mock.patch.object(glance.cmd.cache_manage, 'user_confirm')
@mock.patch.object(glance.cmd.cache_manage, 'get_client')
def test_queue_image_not_forced_confirmed(self, mock_client, mock_confirm):
# options.forced set to False and queue confirmation set to True.
mock_confirm.return_value = True
mock_options = mock.Mock()
mock_options.force = False
mock_options.verbose = True # to cover additional condition and line
manager = mock.MagicMock()
manager.attach_mock(mock_client, 'mock_client')
self.assertEqual(cache_manage.SUCCESS,
cache_manage.queue_image(mock_options, ['img_id']))
self.assertTrue(mock_client.called)
self.assertIn(
mock.call.mock_client().queue_image_for_caching('img_id'),
manager.mock_calls)
def test_delete_cached_image_without_index(self):
self.assertEqual(cache_manage.FAILURE,
cache_manage.delete_cached_image(mock.Mock(), []))
@mock.patch.object(glance.cmd.cache_manage, 'user_confirm')
@mock.patch.object(glance.cmd.cache_manage, 'get_client')
def test_delete_cached_image_not_forced_not_confirmed(self,
mock_client,
mock_confirm):
# options.forced set to False and delete confirmation set to False.
mock_confirm.return_value = False
mock_options = mock.Mock()
mock_options.force = False
self.assertEqual(
cache_manage.SUCCESS,
cache_manage.delete_cached_image(mock_options, ['img_id']))
self.assertFalse(mock_client.called)
@mock.patch.object(glance.cmd.cache_manage, 'user_confirm')
@mock.patch.object(glance.cmd.cache_manage, 'get_client')
def test_delete_cached_image_not_forced_confirmed(self, mock_client,
mock_confirm):
# options.forced set to False and delete confirmation set to True.
mock_confirm.return_value = True
mock_options = mock.Mock()
mock_options.force = False
mock_options.verbose = True # to cover additional condition and line
manager = mock.MagicMock()
manager.attach_mock(mock_client, 'mock_client')
self.assertEqual(
cache_manage.SUCCESS,
cache_manage.delete_cached_image(mock_options, ['img_id']))
self.assertIn(
mock.call.mock_client().delete_cached_image('img_id'),
manager.mock_calls)
@mock.patch.object(glance.cmd.cache_manage, 'user_confirm')
@mock.patch.object(glance.cmd.cache_manage, 'get_client')
def test_delete_cached_images_not_forced_not_confirmed(self,
mock_client,
mock_confirm):
# options.forced set to False and delete confirmation set to False.
mock_confirm.return_value = False
mock_options = mock.Mock()
mock_options.force = False
self.assertEqual(
cache_manage.SUCCESS,
cache_manage.delete_all_cached_images(mock_options, None))
self.assertFalse(mock_client.called)
@mock.patch.object(glance.cmd.cache_manage, 'user_confirm')
@mock.patch.object(glance.cmd.cache_manage, 'get_client')
def test_delete_cached_images_not_forced_confirmed(self, mock_client,
mock_confirm):
# options.forced set to False and delete confirmation set to True.
mock_confirm.return_value = True
mock_options = mock.Mock()
mock_options.force = False
mock_options.verbose = True # to cover additional condition and line
manager = mock.MagicMock()
manager.attach_mock(mock_client, 'mock_client')
self.assertEqual(
cache_manage.SUCCESS,
cache_manage.delete_all_cached_images(mock_options, None))
self.assertTrue(mock_client.called)
self.assertIn(
mock.call.mock_client().delete_all_cached_images(),
manager.mock_calls)
def test_delete_queued_image_without_index(self):
self.assertEqual(cache_manage.FAILURE,
cache_manage.delete_queued_image(mock.Mock(), []))
@mock.patch.object(glance.cmd.cache_manage, 'user_confirm')
@mock.patch.object(glance.cmd.cache_manage, 'get_client')
def test_delete_queued_image_not_forced_not_confirmed(self,
mock_client,
mock_confirm):
# options.forced set to False and delete confirmation set to False.
mock_confirm.return_value = False
mock_options = mock.Mock()
mock_options.force = False
self.assertEqual(
cache_manage.SUCCESS,
cache_manage.delete_queued_image(mock_options, ['img_id']))
self.assertFalse(mock_client.called)
@mock.patch.object(glance.cmd.cache_manage, 'user_confirm')
@mock.patch.object(glance.cmd.cache_manage, 'get_client')
def test_delete_queued_image_not_forced_confirmed(self, mock_client,
mock_confirm):
# options.forced set to False and delete confirmation set to True.
mock_confirm.return_value = True
mock_options = mock.Mock()
mock_options.force = False
mock_options.verbose = True # to cover additional condition and line
manager = mock.MagicMock()
manager.attach_mock(mock_client, 'mock_client')
self.assertEqual(
cache_manage.SUCCESS,
cache_manage.delete_queued_image(mock_options, ['img_id']))
self.assertTrue(mock_client.called)
self.assertIn(
mock.call.mock_client().delete_queued_image('img_id'),
manager.mock_calls)
@mock.patch.object(glance.cmd.cache_manage, 'user_confirm')
@mock.patch.object(glance.cmd.cache_manage, 'get_client')
def test_delete_queued_images_not_forced_not_confirmed(self,
mock_client,
mock_confirm):
# options.forced set to False and delete confirmation set to False.
mock_confirm.return_value = False
mock_options = mock.Mock()
mock_options.force = False
self.assertEqual(
cache_manage.SUCCESS,
cache_manage.delete_all_queued_images(mock_options, None))
self.assertFalse(mock_client.called)
@mock.patch.object(glance.cmd.cache_manage, 'user_confirm')
@mock.patch.object(glance.cmd.cache_manage, 'get_client')
def test_delete_queued_images_not_forced_confirmed(self, mock_client,
mock_confirm):
# options.forced set to False and delete confirmation set to True.
mock_confirm.return_value = True
mock_options = mock.Mock()
mock_options.force = False
mock_options.verbose = True # to cover additional condition and line
manager = mock.MagicMock()
manager.attach_mock(mock_client, 'mock_client')
self.assertEqual(
cache_manage.SUCCESS,
cache_manage.delete_all_queued_images(mock_options, None))
self.assertTrue(mock_client.called)
self.assertIn(
mock.call.mock_client().delete_all_queued_images(),
manager.mock_calls)
@mock.patch.object(glance.cmd.cache_manage, 'get_client')
def test_catch_error_not_found(self, mock_function):
mock_function.side_effect = exception.NotFound()
self.assertEqual(cache_manage.FAILURE,
cache_manage.list_cached(mock.Mock(), None))
@mock.patch.object(glance.cmd.cache_manage, 'get_client')
def test_catch_error_forbidden(self, mock_function):
mock_function.side_effect = exception.Forbidden()
self.assertEqual(cache_manage.FAILURE,
cache_manage.list_cached(mock.Mock(), None))
@mock.patch.object(glance.cmd.cache_manage, 'get_client')
def test_catch_error_unhandled(self, mock_function):
mock_function.side_effect = exception.Duplicate()
my_mock = mock.Mock()
my_mock.debug = False
self.assertEqual(cache_manage.FAILURE,
cache_manage.list_cached(my_mock, None))
@mock.patch.object(glance.cmd.cache_manage, 'get_client')
def test_catch_error_unhandled_debug_mode(self, mock_function):
mock_function.side_effect = exception.Duplicate()
my_mock = mock.Mock()
my_mock.debug = True
self.assertRaises(exception.Duplicate,
cache_manage.list_cached, my_mock, None)
def test_cache_manage_env(self):
def_value = 'sometext12345678900987654321'
self.assertNotEqual(def_value,
cache_manage.env('PATH', default=def_value))
def test_cache_manage_env_default(self):
def_value = 'sometext12345678900987654321'
self.assertEqual(def_value,
cache_manage.env('TMPVALUE1234567890',
default=def_value))
def test_create_option(self):
oparser = optparse.OptionParser()
cache_manage.create_options(oparser)
self.assertTrue(len(oparser.option_list) > 0)
@mock.patch.object(glance.cmd.cache_manage, 'lookup_command')
def test_parse_options_no_parameters(self, mock_lookup):
oparser = optparse.OptionParser()
cache_manage.create_options(oparser)
result = self.assertRaises(SystemExit, cache_manage.parse_options,
oparser, [])
self.assertEqual(0, result.code)
self.assertFalse(mock_lookup.called)
@mock.patch.object(optparse.OptionParser, 'print_usage')
def test_parse_options_no_arguments(self, mock_printout):
oparser = optparse.OptionParser()
cache_manage.create_options(oparser)
result = self.assertRaises(SystemExit, cache_manage.parse_options,
oparser, ['-p', '1212'])
self.assertEqual(0, result.code)
self.assertTrue(mock_printout.called)
@mock.patch.object(glance.cmd.cache_manage, 'lookup_command')
def test_parse_options_retrieve_command(self, mock_lookup):
mock_lookup.return_value = True
oparser = optparse.OptionParser()
cache_manage.create_options(oparser)
(options, command, args) = cache_manage.parse_options(oparser,
['-p', '1212',
'list-cached'])
self.assertTrue(command)
def test_lookup_command_unsupported_command(self):
self.assertRaises(SystemExit, cache_manage.lookup_command, mock.Mock(),
'unsupported_command')
def test_lookup_command_supported_command(self):
command = cache_manage.lookup_command(mock.Mock(), 'list-cached')
self.assertEqual(cache_manage.list_cached, command)
| apache-2.0 |
Tesora-Release/tesora-horizon | openstack_dashboard/dashboards/admin/networks/ports/views.py | 45 | 3750 | # Copyright 2012 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon.utils import memoized
from openstack_dashboard import api
from openstack_dashboard.dashboards.admin.networks.ports \
import forms as ports_forms
from openstack_dashboard.dashboards.admin.networks.ports \
import tables as ports_tables
from openstack_dashboard.dashboards.admin.networks.ports \
import tabs as ports_tabs
from openstack_dashboard.dashboards.project.networks.ports \
import views as project_views
class CreateView(forms.ModalFormView):
form_class = ports_forms.CreatePort
form_id = "create_port_form"
modal_header = _("Create Port")
submit_label = _("Create Port")
submit_url = "horizon:admin:networks:addport"
page_title = _("Create Port")
template_name = 'admin/networks/ports/create.html'
url = 'horizon:admin:networks:detail'
def get_success_url(self):
return reverse(self.url,
args=(self.kwargs['network_id'],))
@memoized.memoized_method
def get_object(self):
try:
network_id = self.kwargs["network_id"]
return api.neutron.network_get(self.request, network_id)
except Exception:
redirect = reverse(self.url,
args=(self.kwargs['network_id'],))
msg = _("Unable to retrieve network.")
exceptions.handle(self.request, msg, redirect=redirect)
def get_context_data(self, **kwargs):
context = super(CreateView, self).get_context_data(**kwargs)
context['network'] = self.get_object()
args = (self.kwargs['network_id'],)
context['submit_url'] = reverse(self.submit_url, args=args)
context['cancel_url'] = reverse(self.url, args=args)
return context
def get_initial(self):
network = self.get_object()
return {"network_id": self.kwargs['network_id'],
"network_name": network.name}
class DetailView(project_views.DetailView):
tab_group_class = ports_tabs.PortDetailTabs
def get_context_data(self, **kwargs):
context = super(DetailView, self).get_context_data(**kwargs)
port = context["port"]
table = ports_tables.PortsTable(self.request,
network_id=port.network_id)
context["url"] = reverse('horizon:admin:networks:index')
context["actions"] = table.render_row_actions(port)
return context
@staticmethod
def get_redirect_url():
return reverse('horizon:admin:networks:index')
class UpdateView(project_views.UpdateView):
form_class = ports_forms.UpdatePort
template_name = 'admin/networks/ports/update.html'
context_object_name = 'port'
submit_url = "horizon:admin:networks:editport"
success_url = 'horizon:admin:networks:detail'
def get_initial(self):
initial = super(UpdateView, self).get_initial()
port = self._get_object()
initial['binding__host_id'] = port['binding__host_id']
return initial
| apache-2.0 |
Fusion-Rom/android_external_chromium_org | build/android/pylib/perf/setup.py | 57 | 2829 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generates test runner factory and tests for performance tests."""
import json
import fnmatch
import logging
import os
import shutil
from pylib import android_commands
from pylib import constants
from pylib import forwarder
from pylib.device import device_list
from pylib.perf import test_runner
from pylib.utils import test_environment
def _GetAllDevices():
devices_path = os.path.join(os.environ.get('CHROMIUM_OUT_DIR', 'out'),
device_list.LAST_DEVICES_FILENAME)
try:
devices = device_list.GetPersistentDeviceList(devices_path)
except IOError as e:
logging.error('Unable to find %s [%s]', devices_path, e)
devices = android_commands.GetAttachedDevices()
return sorted(devices)
def _GetStepsDictFromSingleStep(test_options):
# Running a single command, build the tests structure.
steps_dict = {
'version': 1,
'steps': {
'single_step': {
'device_affinity': 0,
'cmd': test_options.single_step
},
}
}
return steps_dict
def _GetStepsDict(test_options):
if test_options.single_step:
return _GetStepsDictFromSingleStep(test_options)
if test_options.steps:
with file(test_options.steps, 'r') as f:
steps = json.load(f)
# Already using the new format.
assert steps['version'] == 1
return steps
def Setup(test_options):
"""Create and return the test runner factory and tests.
Args:
test_options: A PerformanceOptions object.
Returns:
A tuple of (TestRunnerFactory, tests, devices).
"""
# TODO(bulach): remove this once the bot side lands. BUG=318369
constants.SetBuildType('Release')
if os.path.exists(constants.PERF_OUTPUT_DIR):
shutil.rmtree(constants.PERF_OUTPUT_DIR)
os.makedirs(constants.PERF_OUTPUT_DIR)
# Before running the tests, kill any leftover server.
test_environment.CleanupLeftoverProcesses()
forwarder.Forwarder.UseMultiprocessing()
# We want to keep device affinity, so return all devices ever seen.
all_devices = _GetAllDevices()
steps_dict = _GetStepsDict(test_options)
sorted_step_names = sorted(steps_dict['steps'].keys())
if test_options.test_filter:
sorted_step_names = fnmatch.filter(sorted_step_names,
test_options.test_filter)
flaky_steps = []
if test_options.flaky_steps:
with file(test_options.flaky_steps, 'r') as f:
flaky_steps = json.load(f)
def TestRunnerFactory(device, shard_index):
return test_runner.TestRunner(
test_options, device, shard_index, len(all_devices),
steps_dict, flaky_steps)
return (TestRunnerFactory, sorted_step_names, all_devices)
| bsd-3-clause |
0x0all/nupic | external/linux32/lib/python2.6/site-packages/matplotlib/quiver.py | 69 | 36790 | """
Support for plotting vector fields.
Presently this contains Quiver and Barb. Quiver plots an arrow in the
direction of the vector, with the size of the arrow related to the
magnitude of the vector.
Barbs are like quiver in that they point along a vector, but
the magnitude of the vector is given schematically by the presence of barbs
or flags on the barb.
This will also become a home for things such as standard
deviation ellipses, which can and will be derived very easily from
the Quiver code.
"""
import numpy as np
from numpy import ma
import matplotlib.collections as collections
import matplotlib.transforms as transforms
import matplotlib.text as mtext
import matplotlib.artist as martist
import matplotlib.font_manager as font_manager
from matplotlib.cbook import delete_masked_points
from matplotlib.patches import CirclePolygon
import math
_quiver_doc = """
Plot a 2-D field of arrows.
call signatures::
quiver(U, V, **kw)
quiver(U, V, C, **kw)
quiver(X, Y, U, V, **kw)
quiver(X, Y, U, V, C, **kw)
Arguments:
*X*, *Y*:
The x and y coordinates of the arrow locations (default is tail of
arrow; see *pivot* kwarg)
*U*, *V*:
give the *x* and *y* components of the arrow vectors
*C*:
an optional array used to map colors to the arrows
All arguments may be 1-D or 2-D arrays or sequences. If *X* and *Y*
are absent, they will be generated as a uniform grid. If *U* and *V*
are 2-D arrays but *X* and *Y* are 1-D, and if len(*X*) and len(*Y*)
match the column and row dimensions of *U*, then *X* and *Y* will be
expanded with :func:`numpy.meshgrid`.
*U*, *V*, *C* may be masked arrays, but masked *X*, *Y* are not
supported at present.
Keyword arguments:
*units*: ['width' | 'height' | 'dots' | 'inches' | 'x' | 'y' ]
arrow units; the arrow dimensions *except for length* are in
multiples of this unit.
* 'width' or 'height': the width or height of the axes
* 'dots' or 'inches': pixels or inches, based on the figure dpi
* 'x' or 'y': *X* or *Y* data units
The arrows scale differently depending on the units. For
'x' or 'y', the arrows get larger as one zooms in; for other
units, the arrow size is independent of the zoom state. For
'width or 'height', the arrow size increases with the width and
height of the axes, respectively, when the the window is resized;
for 'dots' or 'inches', resizing does not change the arrows.
*angles*: ['uv' | 'xy' | array]
With the default 'uv', the arrow aspect ratio is 1, so that
if *U*==*V* the angle of the arrow on the plot is 45 degrees
CCW from the *x*-axis.
With 'xy', the arrow points from (x,y) to (x+u, y+v).
Alternatively, arbitrary angles may be specified as an array
of values in degrees, CCW from the *x*-axis.
*scale*: [ None | float ]
data units per arrow unit, e.g. m/s per plot width; a smaller
scale parameter makes the arrow longer. If *None*, a simple
autoscaling algorithm is used, based on the average vector length
and the number of vectors.
*width*:
shaft width in arrow units; default depends on choice of units,
above, and number of vectors; a typical starting value is about
0.005 times the width of the plot.
*headwidth*: scalar
head width as multiple of shaft width, default is 3
*headlength*: scalar
head length as multiple of shaft width, default is 5
*headaxislength*: scalar
head length at shaft intersection, default is 4.5
*minshaft*: scalar
length below which arrow scales, in units of head length. Do not
set this to less than 1, or small arrows will look terrible!
Default is 1
*minlength*: scalar
minimum length as a multiple of shaft width; if an arrow length
is less than this, plot a dot (hexagon) of this diameter instead.
Default is 1.
*pivot*: [ 'tail' | 'middle' | 'tip' ]
The part of the arrow that is at the grid point; the arrow rotates
about this point, hence the name *pivot*.
*color*: [ color | color sequence ]
This is a synonym for the
:class:`~matplotlib.collections.PolyCollection` facecolor kwarg.
If *C* has been set, *color* has no effect.
The defaults give a slightly swept-back arrow; to make the head a
triangle, make *headaxislength* the same as *headlength*. To make the
arrow more pointed, reduce *headwidth* or increase *headlength* and
*headaxislength*. To make the head smaller relative to the shaft,
scale down all the head parameters. You will probably do best to leave
minshaft alone.
linewidths and edgecolors can be used to customize the arrow
outlines. Additional :class:`~matplotlib.collections.PolyCollection`
keyword arguments:
%(PolyCollection)s
""" % martist.kwdocd
_quiverkey_doc = """
Add a key to a quiver plot.
call signature::
quiverkey(Q, X, Y, U, label, **kw)
Arguments:
*Q*:
The Quiver instance returned by a call to quiver.
*X*, *Y*:
The location of the key; additional explanation follows.
*U*:
The length of the key
*label*:
a string with the length and units of the key
Keyword arguments:
*coordinates* = [ 'axes' | 'figure' | 'data' | 'inches' ]
Coordinate system and units for *X*, *Y*: 'axes' and 'figure' are
normalized coordinate systems with 0,0 in the lower left and 1,1
in the upper right; 'data' are the axes data coordinates (used for
the locations of the vectors in the quiver plot itself); 'inches'
is position in the figure in inches, with 0,0 at the lower left
corner.
*color*:
overrides face and edge colors from *Q*.
*labelpos* = [ 'N' | 'S' | 'E' | 'W' ]
Position the label above, below, to the right, to the left of the
arrow, respectively.
*labelsep*:
Distance in inches between the arrow and the label. Default is
0.1
*labelcolor*:
defaults to default :class:`~matplotlib.text.Text` color.
*fontproperties*:
A dictionary with keyword arguments accepted by the
:class:`~matplotlib.font_manager.FontProperties` initializer:
*family*, *style*, *variant*, *size*, *weight*
Any additional keyword arguments are used to override vector
properties taken from *Q*.
The positioning of the key depends on *X*, *Y*, *coordinates*, and
*labelpos*. If *labelpos* is 'N' or 'S', *X*, *Y* give the position
of the middle of the key arrow. If *labelpos* is 'E', *X*, *Y*
positions the head, and if *labelpos* is 'W', *X*, *Y* positions the
tail; in either of these two cases, *X*, *Y* is somewhere in the
middle of the arrow+label key object.
"""
class QuiverKey(martist.Artist):
""" Labelled arrow for use as a quiver plot scale key.
"""
halign = {'N': 'center', 'S': 'center', 'E': 'left', 'W': 'right'}
valign = {'N': 'bottom', 'S': 'top', 'E': 'center', 'W': 'center'}
pivot = {'N': 'mid', 'S': 'mid', 'E': 'tip', 'W': 'tail'}
def __init__(self, Q, X, Y, U, label, **kw):
martist.Artist.__init__(self)
self.Q = Q
self.X = X
self.Y = Y
self.U = U
self.coord = kw.pop('coordinates', 'axes')
self.color = kw.pop('color', None)
self.label = label
self._labelsep_inches = kw.pop('labelsep', 0.1)
self.labelsep = (self._labelsep_inches * Q.ax.figure.dpi)
def on_dpi_change(fig):
self.labelsep = (self._labelsep_inches * fig.dpi)
self._initialized = False # simple brute force update
# works because _init is called
# at the start of draw.
Q.ax.figure.callbacks.connect('dpi_changed', on_dpi_change)
self.labelpos = kw.pop('labelpos', 'N')
self.labelcolor = kw.pop('labelcolor', None)
self.fontproperties = kw.pop('fontproperties', dict())
self.kw = kw
_fp = self.fontproperties
#boxprops = dict(facecolor='red')
self.text = mtext.Text(text=label, # bbox=boxprops,
horizontalalignment=self.halign[self.labelpos],
verticalalignment=self.valign[self.labelpos],
fontproperties=font_manager.FontProperties(**_fp))
if self.labelcolor is not None:
self.text.set_color(self.labelcolor)
self._initialized = False
self.zorder = Q.zorder + 0.1
__init__.__doc__ = _quiverkey_doc
def _init(self):
if True: ##not self._initialized:
self._set_transform()
_pivot = self.Q.pivot
self.Q.pivot = self.pivot[self.labelpos]
self.verts = self.Q._make_verts(np.array([self.U]),
np.zeros((1,)))
self.Q.pivot = _pivot
kw = self.Q.polykw
kw.update(self.kw)
self.vector = collections.PolyCollection(self.verts,
offsets=[(self.X,self.Y)],
transOffset=self.get_transform(),
**kw)
if self.color is not None:
self.vector.set_color(self.color)
self.vector.set_transform(self.Q.get_transform())
self._initialized = True
def _text_x(self, x):
if self.labelpos == 'E':
return x + self.labelsep
elif self.labelpos == 'W':
return x - self.labelsep
else:
return x
def _text_y(self, y):
if self.labelpos == 'N':
return y + self.labelsep
elif self.labelpos == 'S':
return y - self.labelsep
else:
return y
def draw(self, renderer):
self._init()
self.vector.draw(renderer)
x, y = self.get_transform().transform_point((self.X, self.Y))
self.text.set_x(self._text_x(x))
self.text.set_y(self._text_y(y))
self.text.draw(renderer)
def _set_transform(self):
if self.coord == 'data':
self.set_transform(self.Q.ax.transData)
elif self.coord == 'axes':
self.set_transform(self.Q.ax.transAxes)
elif self.coord == 'figure':
self.set_transform(self.Q.ax.figure.transFigure)
elif self.coord == 'inches':
self.set_transform(self.Q.ax.figure.dpi_scale_trans)
else:
raise ValueError('unrecognized coordinates')
def set_figure(self, fig):
martist.Artist.set_figure(self, fig)
self.text.set_figure(fig)
def contains(self, mouseevent):
# Maybe the dictionary should allow one to
# distinguish between a text hit and a vector hit.
if (self.text.contains(mouseevent)[0]
or self.vector.contains(mouseevent)[0]):
return True, {}
return False, {}
quiverkey_doc = _quiverkey_doc
class Quiver(collections.PolyCollection):
"""
Specialized PolyCollection for arrows.
The only API method is set_UVC(), which can be used
to change the size, orientation, and color of the
arrows; their locations are fixed when the class is
instantiated. Possibly this method will be useful
in animations.
Much of the work in this class is done in the draw()
method so that as much information as possible is available
about the plot. In subsequent draw() calls, recalculation
is limited to things that might have changed, so there
should be no performance penalty from putting the calculations
in the draw() method.
"""
def __init__(self, ax, *args, **kw):
self.ax = ax
X, Y, U, V, C = self._parse_args(*args)
self.X = X
self.Y = Y
self.XY = np.hstack((X[:,np.newaxis], Y[:,np.newaxis]))
self.N = len(X)
self.scale = kw.pop('scale', None)
self.headwidth = kw.pop('headwidth', 3)
self.headlength = float(kw.pop('headlength', 5))
self.headaxislength = kw.pop('headaxislength', 4.5)
self.minshaft = kw.pop('minshaft', 1)
self.minlength = kw.pop('minlength', 1)
self.units = kw.pop('units', 'width')
self.angles = kw.pop('angles', 'uv')
self.width = kw.pop('width', None)
self.color = kw.pop('color', 'k')
self.pivot = kw.pop('pivot', 'tail')
kw.setdefault('facecolors', self.color)
kw.setdefault('linewidths', (0,))
collections.PolyCollection.__init__(self, [], offsets=self.XY,
transOffset=ax.transData,
closed=False,
**kw)
self.polykw = kw
self.set_UVC(U, V, C)
self._initialized = False
self.keyvec = None
self.keytext = None
def on_dpi_change(fig):
self._new_UV = True # vertices depend on width, span
# which in turn depend on dpi
self._initialized = False # simple brute force update
# works because _init is called
# at the start of draw.
self.ax.figure.callbacks.connect('dpi_changed', on_dpi_change)
__init__.__doc__ = """
The constructor takes one required argument, an Axes
instance, followed by the args and kwargs described
by the following pylab interface documentation:
%s""" % _quiver_doc
def _parse_args(self, *args):
X, Y, U, V, C = [None]*5
args = list(args)
if len(args) == 3 or len(args) == 5:
C = ma.asarray(args.pop(-1)).ravel()
V = ma.asarray(args.pop(-1))
U = ma.asarray(args.pop(-1))
nn = np.shape(U)
nc = nn[0]
nr = 1
if len(nn) > 1:
nr = nn[1]
if len(args) == 2: # remaining after removing U,V,C
X, Y = [np.array(a).ravel() for a in args]
if len(X) == nc and len(Y) == nr:
X, Y = [a.ravel() for a in np.meshgrid(X, Y)]
else:
indexgrid = np.meshgrid(np.arange(nc), np.arange(nr))
X, Y = [np.ravel(a) for a in indexgrid]
return X, Y, U, V, C
def _init(self):
"""initialization delayed until first draw;
allow time for axes setup.
"""
# It seems that there are not enough event notifications
# available to have this work on an as-needed basis at present.
if True: ##not self._initialized:
trans = self._set_transform()
ax = self.ax
sx, sy = trans.inverted().transform_point(
(ax.bbox.width, ax.bbox.height))
self.span = sx
sn = max(8, min(25, math.sqrt(self.N)))
if self.width is None:
self.width = 0.06 * self.span / sn
def draw(self, renderer):
self._init()
if self._new_UV or self.angles == 'xy':
verts = self._make_verts(self.U, self.V)
self.set_verts(verts, closed=False)
self._new_UV = False
collections.PolyCollection.draw(self, renderer)
def set_UVC(self, U, V, C=None):
self.U = U.ravel()
self.V = V.ravel()
if C is not None:
self.set_array(C.ravel())
self._new_UV = True
def _set_transform(self):
ax = self.ax
if self.units in ('x', 'y'):
if self.units == 'x':
dx0 = ax.viewLim.width
dx1 = ax.bbox.width
else:
dx0 = ax.viewLim.height
dx1 = ax.bbox.height
dx = dx1/dx0
else:
if self.units == 'width':
dx = ax.bbox.width
elif self.units == 'height':
dx = ax.bbox.height
elif self.units == 'dots':
dx = 1.0
elif self.units == 'inches':
dx = ax.figure.dpi
else:
raise ValueError('unrecognized units')
trans = transforms.Affine2D().scale(dx)
self.set_transform(trans)
return trans
def _angles(self, U, V, eps=0.001):
xy = self.ax.transData.transform(self.XY)
uv = ma.hstack((U[:,np.newaxis], V[:,np.newaxis])).filled(0)
xyp = self.ax.transData.transform(self.XY + eps * uv)
dxy = xyp - xy
ang = ma.arctan2(dxy[:,1], dxy[:,0])
return ang
def _make_verts(self, U, V):
uv = ma.asarray(U+V*1j)
a = ma.absolute(uv)
if self.scale is None:
sn = max(10, math.sqrt(self.N))
scale = 1.8 * a.mean() * sn / self.span # crude auto-scaling
self.scale = scale
length = a/(self.scale*self.width)
X, Y = self._h_arrows(length)
if self.angles == 'xy':
theta = self._angles(U, V).filled(0)[:,np.newaxis]
elif self.angles == 'uv':
theta = np.angle(ma.asarray(uv[..., np.newaxis]).filled(0))
else:
theta = ma.asarray(self.angles*np.pi/180.0).filled(0)
xy = (X+Y*1j) * np.exp(1j*theta)*self.width
xy = xy[:,:,np.newaxis]
XY = ma.concatenate((xy.real, xy.imag), axis=2)
return XY
def _h_arrows(self, length):
""" length is in arrow width units """
# It might be possible to streamline the code
# and speed it up a bit by using complex (x,y)
# instead of separate arrays; but any gain would be slight.
minsh = self.minshaft * self.headlength
N = len(length)
length = length.reshape(N, 1)
# x, y: normal horizontal arrow
x = np.array([0, -self.headaxislength,
-self.headlength, 0], np.float64)
x = x + np.array([0,1,1,1]) * length
y = 0.5 * np.array([1, 1, self.headwidth, 0], np.float64)
y = np.repeat(y[np.newaxis,:], N, axis=0)
# x0, y0: arrow without shaft, for short vectors
x0 = np.array([0, minsh-self.headaxislength,
minsh-self.headlength, minsh], np.float64)
y0 = 0.5 * np.array([1, 1, self.headwidth, 0], np.float64)
ii = [0,1,2,3,2,1,0]
X = x.take(ii, 1)
Y = y.take(ii, 1)
Y[:, 3:] *= -1
X0 = x0.take(ii)
Y0 = y0.take(ii)
Y0[3:] *= -1
shrink = length/minsh
X0 = shrink * X0[np.newaxis,:]
Y0 = shrink * Y0[np.newaxis,:]
short = np.repeat(length < minsh, 7, axis=1)
#print 'short', length < minsh
# Now select X0, Y0 if short, otherwise X, Y
X = ma.where(short, X0, X)
Y = ma.where(short, Y0, Y)
if self.pivot[:3] == 'mid':
X -= 0.5 * X[:,3, np.newaxis]
elif self.pivot[:3] == 'tip':
X = X - X[:,3, np.newaxis] #numpy bug? using -= does not
# work here unless we multiply
# by a float first, as with 'mid'.
tooshort = length < self.minlength
if tooshort.any():
# Use a heptagonal dot:
th = np.arange(0,7,1, np.float64) * (np.pi/3.0)
x1 = np.cos(th) * self.minlength * 0.5
y1 = np.sin(th) * self.minlength * 0.5
X1 = np.repeat(x1[np.newaxis, :], N, axis=0)
Y1 = np.repeat(y1[np.newaxis, :], N, axis=0)
tooshort = ma.repeat(tooshort, 7, 1)
X = ma.where(tooshort, X1, X)
Y = ma.where(tooshort, Y1, Y)
return X, Y
quiver_doc = _quiver_doc
_barbs_doc = """
Plot a 2-D field of barbs.
call signatures::
barb(U, V, **kw)
barb(U, V, C, **kw)
barb(X, Y, U, V, **kw)
barb(X, Y, U, V, C, **kw)
Arguments:
*X*, *Y*:
The x and y coordinates of the barb locations
(default is head of barb; see *pivot* kwarg)
*U*, *V*:
give the *x* and *y* components of the barb shaft
*C*:
an optional array used to map colors to the barbs
All arguments may be 1-D or 2-D arrays or sequences. If *X* and *Y*
are absent, they will be generated as a uniform grid. If *U* and *V*
are 2-D arrays but *X* and *Y* are 1-D, and if len(*X*) and len(*Y*)
match the column and row dimensions of *U*, then *X* and *Y* will be
expanded with :func:`numpy.meshgrid`.
*U*, *V*, *C* may be masked arrays, but masked *X*, *Y* are not
supported at present.
Keyword arguments:
*length*:
Length of the barb in points; the other parts of the barb
are scaled against this.
Default is 9
*pivot*: [ 'tip' | 'middle' ]
The part of the arrow that is at the grid point; the arrow rotates
about this point, hence the name *pivot*. Default is 'tip'
*barbcolor*: [ color | color sequence ]
Specifies the color all parts of the barb except any flags. This
parameter is analagous to the *edgecolor* parameter for polygons,
which can be used instead. However this parameter will override
facecolor.
*flagcolor*: [ color | color sequence ]
Specifies the color of any flags on the barb. This parameter is
analagous to the *facecolor* parameter for polygons, which can be
used instead. However this parameter will override facecolor. If
this is not set (and *C* has not either) then *flagcolor* will be
set to match *barbcolor* so that the barb has a uniform color. If
*C* has been set, *flagcolor* has no effect.
*sizes*:
A dictionary of coefficients specifying the ratio of a given
feature to the length of the barb. Only those values one wishes to
override need to be included. These features include:
- 'spacing' - space between features (flags, full/half barbs)
- 'height' - height (distance from shaft to top) of a flag or
full barb
- 'width' - width of a flag, twice the width of a full barb
- 'emptybarb' - radius of the circle used for low magnitudes
*fill_empty*:
A flag on whether the empty barbs (circles) that are drawn should
be filled with the flag color. If they are not filled, they will
be drawn such that no color is applied to the center. Default is
False
*rounding*:
A flag to indicate whether the vector magnitude should be rounded
when allocating barb components. If True, the magnitude is
rounded to the nearest multiple of the half-barb increment. If
False, the magnitude is simply truncated to the next lowest
multiple. Default is True
*barb_increments*:
A dictionary of increments specifying values to associate with
different parts of the barb. Only those values one wishes to
override need to be included.
- 'half' - half barbs (Default is 5)
- 'full' - full barbs (Default is 10)
- 'flag' - flags (default is 50)
*flip_barb*:
Either a single boolean flag or an array of booleans. Single
boolean indicates whether the lines and flags should point
opposite to normal for all barbs. An array (which should be the
same size as the other data arrays) indicates whether to flip for
each individual barb. Normal behavior is for the barbs and lines
to point right (comes from wind barbs having these features point
towards low pressure in the Northern Hemisphere.) Default is
False
Barbs are traditionally used in meteorology as a way to plot the speed
and direction of wind observations, but can technically be used to
plot any two dimensional vector quantity. As opposed to arrows, which
give vector magnitude by the length of the arrow, the barbs give more
quantitative information about the vector magnitude by putting slanted
lines or a triangle for various increments in magnitude, as show
schematically below::
: /\ \\
: / \ \\
: / \ \ \\
: / \ \ \\
: ------------------------------
.. note the double \\ at the end of each line to make the figure
.. render correctly
The largest increment is given by a triangle (or "flag"). After those
come full lines (barbs). The smallest increment is a half line. There
is only, of course, ever at most 1 half line. If the magnitude is
small and only needs a single half-line and no full lines or
triangles, the half-line is offset from the end of the barb so that it
can be easily distinguished from barbs with a single full line. The
magnitude for the barb shown above would nominally be 65, using the
standard increments of 50, 10, and 5.
linewidths and edgecolors can be used to customize the barb.
Additional :class:`~matplotlib.collections.PolyCollection` keyword
arguments:
%(PolyCollection)s
""" % martist.kwdocd
class Barbs(collections.PolyCollection):
'''
Specialized PolyCollection for barbs.
The only API method is :meth:`set_UVC`, which can be used to
change the size, orientation, and color of the arrows. Locations
are changed using the :meth:`set_offsets` collection method.
Possibly this method will be useful in animations.
There is one internal function :meth:`_find_tails` which finds
exactly what should be put on the barb given the vector magnitude.
From there :meth:`_make_barbs` is used to find the vertices of the
polygon to represent the barb based on this information.
'''
#This may be an abuse of polygons here to render what is essentially maybe
#1 triangle and a series of lines. It works fine as far as I can tell
#however.
def __init__(self, ax, *args, **kw):
self._pivot = kw.pop('pivot', 'tip')
self._length = kw.pop('length', 7)
barbcolor = kw.pop('barbcolor', None)
flagcolor = kw.pop('flagcolor', None)
self.sizes = kw.pop('sizes', dict())
self.fill_empty = kw.pop('fill_empty', False)
self.barb_increments = kw.pop('barb_increments', dict())
self.rounding = kw.pop('rounding', True)
self.flip = kw.pop('flip_barb', False)
#Flagcolor and and barbcolor provide convenience parameters for setting
#the facecolor and edgecolor, respectively, of the barb polygon. We
#also work here to make the flag the same color as the rest of the barb
#by default
if None in (barbcolor, flagcolor):
kw['edgecolors'] = 'face'
if flagcolor:
kw['facecolors'] = flagcolor
elif barbcolor:
kw['facecolors'] = barbcolor
else:
#Set to facecolor passed in or default to black
kw.setdefault('facecolors', 'k')
else:
kw['edgecolors'] = barbcolor
kw['facecolors'] = flagcolor
#Parse out the data arrays from the various configurations supported
x, y, u, v, c = self._parse_args(*args)
self.x = x
self.y = y
xy = np.hstack((x[:,np.newaxis], y[:,np.newaxis]))
#Make a collection
barb_size = self._length**2 / 4 #Empirically determined
collections.PolyCollection.__init__(self, [], (barb_size,), offsets=xy,
transOffset=ax.transData, **kw)
self.set_transform(transforms.IdentityTransform())
self.set_UVC(u, v, c)
__init__.__doc__ = """
The constructor takes one required argument, an Axes
instance, followed by the args and kwargs described
by the following pylab interface documentation:
%s""" % _barbs_doc
def _find_tails(self, mag, rounding=True, half=5, full=10, flag=50):
'''
Find how many of each of the tail pieces is necessary. Flag
specifies the increment for a flag, barb for a full barb, and half for
half a barb. Mag should be the magnitude of a vector (ie. >= 0).
This returns a tuple of:
(*number of flags*, *number of barbs*, *half_flag*, *empty_flag*)
*half_flag* is a boolean whether half of a barb is needed,
since there should only ever be one half on a given
barb. *empty_flag* flag is an array of flags to easily tell if
a barb is empty (too low to plot any barbs/flags.
'''
#If rounding, round to the nearest multiple of half, the smallest
#increment
if rounding:
mag = half * (mag / half + 0.5).astype(np.int)
num_flags = np.floor(mag / flag).astype(np.int)
mag = np.mod(mag, flag)
num_barb = np.floor(mag / full).astype(np.int)
mag = np.mod(mag, full)
half_flag = mag >= half
empty_flag = ~(half_flag | (num_flags > 0) | (num_barb > 0))
return num_flags, num_barb, half_flag, empty_flag
def _make_barbs(self, u, v, nflags, nbarbs, half_barb, empty_flag, length,
pivot, sizes, fill_empty, flip):
'''
This function actually creates the wind barbs. *u* and *v*
are components of the vector in the *x* and *y* directions,
respectively.
*nflags*, *nbarbs*, and *half_barb*, empty_flag* are,
*respectively, the number of flags, number of barbs, flag for
*half a barb, and flag for empty barb, ostensibly obtained
*from :meth:`_find_tails`.
*length* is the length of the barb staff in points.
*pivot* specifies the point on the barb around which the
entire barb should be rotated. Right now, valid options are
'head' and 'middle'.
*sizes* is a dictionary of coefficients specifying the ratio
of a given feature to the length of the barb. These features
include:
- *spacing*: space between features (flags, full/half
barbs)
- *height*: distance from shaft of top of a flag or full
barb
- *width* - width of a flag, twice the width of a full barb
- *emptybarb* - radius of the circle used for low
magnitudes
*fill_empty* specifies whether the circle representing an
empty barb should be filled or not (this changes the drawing
of the polygon).
*flip* is a flag indicating whether the features should be flipped to
the other side of the barb (useful for winds in the southern
hemisphere.
This function returns list of arrays of vertices, defining a polygon for
each of the wind barbs. These polygons have been rotated to properly
align with the vector direction.
'''
#These control the spacing and size of barb elements relative to the
#length of the shaft
spacing = length * sizes.get('spacing', 0.125)
full_height = length * sizes.get('height', 0.4)
full_width = length * sizes.get('width', 0.25)
empty_rad = length * sizes.get('emptybarb', 0.15)
#Controls y point where to pivot the barb.
pivot_points = dict(tip=0.0, middle=-length/2.)
#Check for flip
if flip: full_height = -full_height
endx = 0.0
endy = pivot_points[pivot.lower()]
#Get the appropriate angle for the vector components. The offset is due
#to the way the barb is initially drawn, going down the y-axis. This
#makes sense in a meteorological mode of thinking since there 0 degrees
#corresponds to north (the y-axis traditionally)
angles = -(ma.arctan2(v, u) + np.pi/2)
#Used for low magnitude. We just get the vertices, so if we make it
#out here, it can be reused. The center set here should put the
#center of the circle at the location(offset), rather than at the
#same point as the barb pivot; this seems more sensible.
circ = CirclePolygon((0,0), radius=empty_rad).get_verts()
if fill_empty:
empty_barb = circ
else:
#If we don't want the empty one filled, we make a degenerate polygon
#that wraps back over itself
empty_barb = np.concatenate((circ, circ[::-1]))
barb_list = []
for index, angle in np.ndenumerate(angles):
#If the vector magnitude is too weak to draw anything, plot an
#empty circle instead
if empty_flag[index]:
#We can skip the transform since the circle has no preferred
#orientation
barb_list.append(empty_barb)
continue
poly_verts = [(endx, endy)]
offset = length
#Add vertices for each flag
for i in range(nflags[index]):
#The spacing that works for the barbs is a little to much for
#the flags, but this only occurs when we have more than 1 flag.
if offset != length: offset += spacing / 2.
poly_verts.extend([[endx, endy + offset],
[endx + full_height, endy - full_width/2 + offset],
[endx, endy - full_width + offset]])
offset -= full_width + spacing
#Add vertices for each barb. These really are lines, but works
#great adding 3 vertices that basically pull the polygon out and
#back down the line
for i in range(nbarbs[index]):
poly_verts.extend([(endx, endy + offset),
(endx + full_height, endy + offset + full_width/2),
(endx, endy + offset)])
offset -= spacing
#Add the vertices for half a barb, if needed
if half_barb[index]:
#If the half barb is the first on the staff, traditionally it is
#offset from the end to make it easy to distinguish from a barb
#with a full one
if offset == length:
poly_verts.append((endx, endy + offset))
offset -= 1.5 * spacing
poly_verts.extend([(endx, endy + offset),
(endx + full_height/2, endy + offset + full_width/4),
(endx, endy + offset)])
#Rotate the barb according the angle. Making the barb first and then
#rotating it made the math for drawing the barb really easy. Also,
#the transform framework makes doing the rotation simple.
poly_verts = transforms.Affine2D().rotate(-angle).transform(
poly_verts)
barb_list.append(poly_verts)
return barb_list
#Taken shamelessly from Quiver
def _parse_args(self, *args):
X, Y, U, V, C = [None]*5
args = list(args)
if len(args) == 3 or len(args) == 5:
C = ma.asarray(args.pop(-1)).ravel()
V = ma.asarray(args.pop(-1))
U = ma.asarray(args.pop(-1))
nn = np.shape(U)
nc = nn[0]
nr = 1
if len(nn) > 1:
nr = nn[1]
if len(args) == 2: # remaining after removing U,V,C
X, Y = [np.array(a).ravel() for a in args]
if len(X) == nc and len(Y) == nr:
X, Y = [a.ravel() for a in np.meshgrid(X, Y)]
else:
indexgrid = np.meshgrid(np.arange(nc), np.arange(nr))
X, Y = [np.ravel(a) for a in indexgrid]
return X, Y, U, V, C
def set_UVC(self, U, V, C=None):
self.u = ma.asarray(U).ravel()
self.v = ma.asarray(V).ravel()
if C is not None:
c = ma.asarray(C).ravel()
x,y,u,v,c = delete_masked_points(self.x.ravel(), self.y.ravel(),
self.u, self.v, c)
else:
x,y,u,v = delete_masked_points(self.x.ravel(), self.y.ravel(),
self.u, self.v)
magnitude = np.sqrt(u*u + v*v)
flags, barbs, halves, empty = self._find_tails(magnitude,
self.rounding, **self.barb_increments)
#Get the vertices for each of the barbs
plot_barbs = self._make_barbs(u, v, flags, barbs, halves, empty,
self._length, self._pivot, self.sizes, self.fill_empty, self.flip)
self.set_verts(plot_barbs)
#Set the color array
if C is not None:
self.set_array(c)
#Update the offsets in case the masked data changed
xy = np.hstack((x[:,np.newaxis], y[:,np.newaxis]))
self._offsets = xy
def set_offsets(self, xy):
'''
Set the offsets for the barb polygons. This saves the offets passed in
and actually sets version masked as appropriate for the existing U/V
data. *offsets* should be a sequence.
ACCEPTS: sequence of pairs of floats
'''
self.x = xy[:,0]
self.y = xy[:,1]
x,y,u,v = delete_masked_points(self.x.ravel(), self.y.ravel(), self.u,
self.v)
xy = np.hstack((x[:,np.newaxis], y[:,np.newaxis]))
collections.PolyCollection.set_offsets(self, xy)
set_offsets.__doc__ = collections.PolyCollection.set_offsets.__doc__
barbs_doc = _barbs_doc
| gpl-3.0 |
gangadhar-kadam/adb-erp | setup/doctype/notification_control/notification_control.py | 6 | 1380 | # ERPNext - web based ERP (http://erpnext.com)
# Copyright (C) 2012 Web Notes Technologies Pvt Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import webnotes
from webnotes import msgprint
sql = webnotes.conn.sql
class DocType:
def __init__(self,d,dl):
self.doc, self.doclist = d,dl
def get_message(self, arg):
fn = arg.lower().replace(' ', '_') + '_message'
v = sql("select value from tabSingles where field=%s and doctype=%s", (fn, 'Notification Control'))
return v and v[0][0] or ''
def set_message(self, arg = ''):
fn = self.doc.select_transaction.lower().replace(' ', '_') + '_message'
webnotes.conn.set(self.doc, fn, self.doc.custom_message)
msgprint("Custom Message for %s updated!" % self.doc.select_transaction)
| agpl-3.0 |
watonyweng/nova | nova/scheduler/filters/type_filter.py | 45 | 2307 | # Copyright (c) 2012 The Cloudscaling Group, Inc.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.scheduler import filters
from nova.scheduler.filters import utils
class TypeAffinityFilter(filters.BaseHostFilter):
"""TypeAffinityFilter doesn't allow more than one VM type per host.
Note: this works best with ram_weight_multiplier
(spread) set to 1 (default).
"""
def host_passes(self, host_state, filter_properties):
"""Dynamically limits hosts to one instance type
Return False if host has any instance types other than the requested
type. Return True if all instance types match or if host is empty.
"""
instance_type = filter_properties.get('instance_type')
instance_type_id = instance_type['id']
other_types_on_host = utils.other_types_on_host(host_state,
instance_type_id)
return not other_types_on_host
class AggregateTypeAffinityFilter(filters.BaseHostFilter):
"""AggregateTypeAffinityFilter limits instance_type by aggregate
return True if no instance_type key is set or if the aggregate metadata
key 'instance_type' has the instance_type name as a value
"""
# Aggregate data does not change within a request
run_filter_once_per_request = True
def host_passes(self, host_state, filter_properties):
instance_type = filter_properties.get('instance_type')
aggregate_vals = utils.aggregate_values_from_key(
host_state, 'instance_type')
for val in aggregate_vals:
if (instance_type['name'] in
[x.strip() for x in val.split(',')]):
return True
return not aggregate_vals
| apache-2.0 |
turbokongen/home-assistant | homeassistant/components/enocean/light.py | 14 | 3234 | """Support for EnOcean light sources."""
import math
import voluptuous as vol
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
PLATFORM_SCHEMA,
SUPPORT_BRIGHTNESS,
LightEntity,
)
from homeassistant.const import CONF_ID, CONF_NAME
import homeassistant.helpers.config_validation as cv
from .device import EnOceanEntity
CONF_SENDER_ID = "sender_id"
DEFAULT_NAME = "EnOcean Light"
SUPPORT_ENOCEAN = SUPPORT_BRIGHTNESS
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_ID, default=[]): vol.All(cv.ensure_list, [vol.Coerce(int)]),
vol.Required(CONF_SENDER_ID): vol.All(cv.ensure_list, [vol.Coerce(int)]),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the EnOcean light platform."""
sender_id = config.get(CONF_SENDER_ID)
dev_name = config.get(CONF_NAME)
dev_id = config.get(CONF_ID)
add_entities([EnOceanLight(sender_id, dev_id, dev_name)])
class EnOceanLight(EnOceanEntity, LightEntity):
"""Representation of an EnOcean light source."""
def __init__(self, sender_id, dev_id, dev_name):
"""Initialize the EnOcean light source."""
super().__init__(dev_id, dev_name)
self._on_state = False
self._brightness = 50
self._sender_id = sender_id
@property
def name(self):
"""Return the name of the device if any."""
return self.dev_name
@property
def brightness(self):
"""Brightness of the light.
This method is optional. Removing it indicates to Home Assistant
that brightness is not supported for this light.
"""
return self._brightness
@property
def is_on(self):
"""If light is on."""
return self._on_state
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_ENOCEAN
def turn_on(self, **kwargs):
"""Turn the light source on or sets a specific dimmer value."""
brightness = kwargs.get(ATTR_BRIGHTNESS)
if brightness is not None:
self._brightness = brightness
bval = math.floor(self._brightness / 256.0 * 100.0)
if bval == 0:
bval = 1
command = [0xA5, 0x02, bval, 0x01, 0x09]
command.extend(self._sender_id)
command.extend([0x00])
self.send_command(command, [], 0x01)
self._on_state = True
def turn_off(self, **kwargs):
"""Turn the light source off."""
command = [0xA5, 0x02, 0x00, 0x01, 0x09]
command.extend(self._sender_id)
command.extend([0x00])
self.send_command(command, [], 0x01)
self._on_state = False
def value_changed(self, packet):
"""Update the internal state of this device.
Dimmer devices like Eltako FUD61 send telegram in different RORGs.
We only care about the 4BS (0xA5).
"""
if packet.data[0] == 0xA5 and packet.data[1] == 0x02:
val = packet.data[2]
self._brightness = math.floor(val / 100.0 * 256.0)
self._on_state = bool(val != 0)
self.schedule_update_ha_state()
| apache-2.0 |
dlazz/ansible | lib/ansible/plugins/lookup/url.py | 36 | 3469 | # (c) 2015, Brian Coca <bcoca@ansible.com>
# (c) 2012-17 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
lookup: url
author: Brian Coca (@bcoca)
version_added: "1.9"
short_description: return contents from URL
description:
- Returns the content of the URL requested to be used as data in play.
options:
_terms:
description: urls to query
validate_certs:
description: Flag to control SSL certificate validation
type: boolean
default: True
split_lines:
description: Flag to control if content is returned as a list of lines or as a single text blob
type: boolean
default: True
use_proxy:
description: Flag to control if the lookup will observe HTTP proxy environment variables when present.
type: boolean
default: True
username:
description: Username to use for HTTP authentication.
type: string
default: None
version_added: "2.8"
password:
description: Password to use for HTTP authentication.
type: string
default: None
version_added: "2.8"
"""
EXAMPLES = """
- name: url lookup splits lines by default
debug: msg="{{item}}"
loop: "{{ lookup('url', 'https://github.com/gremlin.keys', wantlist=True) }}"
- name: display ip ranges
debug: msg="{{ lookup('url', 'https://ip-ranges.amazonaws.com/ip-ranges.json', split_lines=False) }}"
- name: url lookup using authentication
debug: msg="{{ lookup('url', 'https://some.private.site.com/file.txt', username='bob', password='hunter2') }}"
"""
RETURN = """
_list:
description: list of list of lines or content of url(s)
"""
from ansible.errors import AnsibleError
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils._text import to_text, to_native
from ansible.module_utils.urls import open_url, ConnectionError, SSLValidationError
from ansible.plugins.lookup import LookupBase
from ansible.utils.display import Display
display = Display()
class LookupModule(LookupBase):
def run(self, terms, variables=None, **kwargs):
self.set_options(direct=kwargs)
ret = []
for term in terms:
display.vvvv("url lookup connecting to %s" % term)
try:
response = open_url(term, validate_certs=self.get_option('validate_certs'),
use_proxy=self.get_option('use_proxy'),
url_username=self.get_option('username'),
url_password=self.get_option('password'))
except HTTPError as e:
raise AnsibleError("Received HTTP error for %s : %s" % (term, to_native(e)))
except URLError as e:
raise AnsibleError("Failed lookup url for %s : %s" % (term, to_native(e)))
except SSLValidationError as e:
raise AnsibleError("Error validating the server's certificate for %s: %s" % (term, to_native(e)))
except ConnectionError as e:
raise AnsibleError("Error connecting to %s: %s" % (term, to_native(e)))
if self.get_option('split_lines'):
for line in response.read().splitlines():
ret.append(to_text(line))
else:
ret.append(to_text(response.read()))
return ret
| gpl-3.0 |
dlazz/ansible | lib/ansible/modules/network/cloudengine/ce_evpn_bd_vni.py | 26 | 39530 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ce_evpn_bd_vni
version_added: "2.4"
short_description: Manages EVPN VXLAN Network Identifier (VNI) on HUAWEI CloudEngine switches.
description:
- Manages Ethernet Virtual Private Network (EVPN) VXLAN Network
Identifier (VNI) configurations on HUAWEI CloudEngine switches.
author: Zhijin Zhou (@QijunPan)
notes:
- Ensure that EVPN has been configured to serve as the VXLAN control plane when state is present.
- Ensure that a bridge domain (BD) has existed when state is present.
- Ensure that a VNI has been created and associated with a broadcast domain (BD) when state is present.
- If you configure evpn:false to delete an EVPN instance, all configurations in the EVPN instance are deleted.
- After an EVPN instance has been created in the BD view, you can configure an RD using route_distinguisher
parameter in BD-EVPN instance view.
- Before configuring VPN targets for a BD EVPN instance, ensure that an RD has been configured
for the BD EVPN instance
- If you unconfigure route_distinguisher, all VPN target attributes for the BD EVPN instance will be removed at the same time.
- When using state:absent, evpn is not supported and it will be ignored.
- When using state:absent to delete VPN target attributes, ensure the configuration of VPN target attributes has
existed and otherwise it will report an error.
options:
bridge_domain_id:
description:
- Specify an existed bridge domain (BD).The value is an integer ranging from 1 to 16777215.
required: true
evpn:
description:
- Create or delete an EVPN instance for a VXLAN in BD view.
choices: ['enable','disable']
default: 'enable'
route_distinguisher:
description:
- Configures a route distinguisher (RD) for a BD EVPN instance.
The format of an RD can be as follows
- 1) 2-byte AS number:4-byte user-defined number, for example, 1:3. An AS number is an integer ranging from
0 to 65535, and a user-defined number is an integer ranging from 0 to 4294967295. The AS and user-defined
numbers cannot be both 0s. This means that an RD cannot be 0:0.
- 2) Integral 4-byte AS number:2-byte user-defined number, for example, 65537:3. An AS number is an integer
ranging from 65536 to 4294967295, and a user-defined number is an integer ranging from 0 to 65535.
- 3) 4-byte AS number in dotted notation:2-byte user-defined number, for example, 0.0:3 or 0.1:0. A 4-byte
AS number in dotted notation is in the format of x.y, where x and y are integers ranging from 0 to 65535.
- 4) A user-defined number is an integer ranging from 0 to 65535. The AS and user-defined numbers cannot be
both 0s. This means that an RD cannot be 0.0:0.
- 5) 32-bit IP address:2-byte user-defined number. For example, 192.168.122.15:1. An IP address ranges from
0.0.0.0 to 255.255.255.255, and a user-defined number is an integer ranging from 0 to 65535.
- 6) 'auto' specifies the RD that is automatically generated.
vpn_target_both:
description:
- Add VPN targets to both the import and export VPN target lists of a BD EVPN instance.
The format is the same as route_distinguisher.
vpn_target_import:
description:
- Add VPN targets to the import VPN target list of a BD EVPN instance.
The format is the same as route_distinguisher.
required: true
vpn_target_export:
description:
- Add VPN targets to the export VPN target list of a BD EVPN instance.
The format is the same as route_distinguisher.
state:
description:
- Manage the state of the resource.
choices: ['present','absent']
default: 'present'
'''
EXAMPLES = '''
- name: EVPN BD VNI test
hosts: cloudengine
connection: local
gather_facts: no
vars:
cli:
host: "{{ inventory_hostname }}"
port: "{{ ansible_ssh_port }}"
username: "{{ username }}"
password: "{{ password }}"
transport: cli
tasks:
- name: "Configure an EVPN instance for a VXLAN in BD view"
ce_evpn_bd_vni:
bridge_domain_id: 20
evpn: enable
provider: "{{ cli }}"
- name: "Configure a route distinguisher (RD) for a BD EVPN instance"
ce_evpn_bd_vni:
bridge_domain_id: 20
route_distinguisher: '22:22'
provider: "{{ cli }}"
- name: "Configure VPN targets to both the import and export VPN target lists of a BD EVPN instance"
ce_evpn_bd_vni:
bridge_domain_id: 20
vpn_target_both: 22:100,22:101
provider: "{{ cli }}"
- name: "Configure VPN targets to the import VPN target list of a BD EVPN instance"
ce_evpn_bd_vni:
bridge_domain_id: 20
vpn_target_import: 22:22,22:23
provider: "{{ cli }}"
- name: "Configure VPN targets to the export VPN target list of a BD EVPN instance"
ce_evpn_bd_vni:
bridge_domain_id: 20
vpn_target_export: 22:38,22:39
provider: "{{ cli }}"
- name: "Unconfigure VPN targets to both the import and export VPN target lists of a BD EVPN instance"
ce_evpn_bd_vni:
bridge_domain_id: 20
vpn_target_both: '22:100'
state: absent
provider: "{{ cli }}"
- name: "Unconfigure VPN targets to the import VPN target list of a BD EVPN instance"
ce_evpn_bd_vni:
bridge_domain_id: 20
vpn_target_import: '22:22'
state: absent
provider: "{{ cli }}"
- name: "Unconfigure VPN targets to the export VPN target list of a BD EVPN instance"
ce_evpn_bd_vni:
bridge_domain_id: 20
vpn_target_export: '22:38'
state: absent
provider: "{{ cli }}"
- name: "Unconfigure a route distinguisher (RD) of a BD EVPN instance"
ce_evpn_bd_vni:
bridge_domain_id: 20
route_distinguisher: '22:22'
state: absent
provider: "{{ cli }}"
- name: "Unconfigure an EVPN instance for a VXLAN in BD view"
ce_evpn_bd_vni:
bridge_domain_id: 20
evpn: disable
provider: "{{ cli }}"
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: always
type: dict
sample: {
"bridge_domain_id": "2",
"evpn": "enable",
"route_distinguisher": "22:22",
"state": "present",
"vpn_target_both": [
"22:100",
"22:101"
],
"vpn_target_export": [
"22:38",
"22:39"
],
"vpn_target_import": [
"22:22",
"22:23"
]
}
existing:
description: k/v pairs of existing attributes on the device
returned: always
type: dict
sample: {
"bridge_domain_id": "2",
"evpn": "disable",
"route_distinguisher": null,
"vpn_target_both": [],
"vpn_target_export": [],
"vpn_target_import": []
}
end_state:
description: k/v pairs of end attributes on the device
returned: always
type: dict
sample: {
"bridge_domain_id": "2",
"evpn": "enable",
"route_distinguisher": "22:22",
"vpn_target_both": [
"22:100",
"22:101"
],
"vpn_target_export": [
"22:38",
"22:39"
],
"vpn_target_import": [
"22:22",
"22:23"
]
}
updates:
description: command list sent to the device
returned: always
type: list
sample: [
"bridge-domain 2",
" evpn",
" route-distinguisher 22:22",
" vpn-target 22:38 export-extcommunity",
" vpn-target 22:39 export-extcommunity",
" vpn-target 22:100 export-extcommunity",
" vpn-target 22:101 export-extcommunity",
" vpn-target 22:22 import-extcommunity",
" vpn-target 22:23 import-extcommunity",
" vpn-target 22:100 import-extcommunity",
" vpn-target 22:101 import-extcommunity"
]
changed:
description: check to see if a change was made on the device
returned: always
type: bool
sample: true
'''
import copy
from xml.etree import ElementTree
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.cloudengine.ce import get_nc_config, set_nc_config, ce_argument_spec
CE_NC_GET_VNI_BD = """
<filter type="subtree">
<nvo3 xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<nvo3Vni2Bds>
<nvo3Vni2Bd>
<vniId></vniId>
<bdId>%s</bdId>
</nvo3Vni2Bd>
</nvo3Vni2Bds>
</nvo3>
</filter>
"""
CE_NC_GET_EVPN_CONFIG = """
<filter type="subtree">
<evpn xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<evpnInstances>
<evpnInstance>
<evpnName>%s</evpnName>
<bdId>%s</bdId>
<evpnAutoRD></evpnAutoRD>
<evpnRD></evpnRD>
<evpnRTs>
<evpnRT>
<vrfRTType></vrfRTType>
<vrfRTValue></vrfRTValue>
</evpnRT>
</evpnRTs>
<evpnAutoRTs>
<evpnAutoRT>
<vrfRTType></vrfRTType>
</evpnAutoRT>
</evpnAutoRTs>
</evpnInstance>
</evpnInstances>
</evpn>
</filter>
"""
CE_NC_DELETE_EVPN_CONFIG = """
<config>
<evpn xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<evpnInstances>
<evpnInstance operation="delete">
<evpnName>%s</evpnName>
<bdId>%s</bdId>
</evpnInstance>
</evpnInstances>
</evpn>
</config>
"""
CE_NC_DELETE_EVPN_CONFIG_HEAD = """
<config>
<evpn xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<evpnInstances>
<evpnInstance operation="delete">
<evpnName>%s</evpnName>
<bdId>%s</bdId>
"""
CE_NC_MERGE_EVPN_CONFIG_HEAD = """
<config>
<evpn xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<evpnInstances>
<evpnInstance operation="merge">
<evpnName>%s</evpnName>
<bdId>%s</bdId>
"""
CE_NC_MERGE_EVPN_AUTORTS_HEAD = """
<evpnAutoRTs>
"""
CE_NC_MERGE_EVPN_AUTORTS_TAIL = """
</evpnAutoRTs>
"""
CE_NC_DELETE_EVPN_AUTORTS_CONTEXT = """
<evpnAutoRT operation="delete">
<vrfRTType>%s</vrfRTType>
</evpnAutoRT>
"""
CE_NC_MERGE_EVPN_AUTORTS_CONTEXT = """
<evpnAutoRT>
<vrfRTType>%s</vrfRTType>
</evpnAutoRT>
"""
CE_NC_MERGE_EVPN_RTS_HEAD = """
<evpnRTs>
"""
CE_NC_MERGE_EVPN_RTS_TAIL = """
</evpnRTs>
"""
CE_NC_DELETE_EVPN_RTS_CONTEXT = """
<evpnRT operation="delete">
<vrfRTType>%s</vrfRTType>
<vrfRTValue>%s</vrfRTValue>
</evpnRT>
"""
CE_NC_MERGE_EVPN_RTS_CONTEXT = """
<evpnRT>
<vrfRTType>%s</vrfRTType>
<vrfRTValue>%s</vrfRTValue>
</evpnRT>
"""
CE_NC_MERGE_EVPN_CONFIG_TAIL = """
</evpnInstance>
</evpnInstances>
</evpn>
</config>
"""
def is_valid_value(vrf_targe_value):
"""check whether VPN target value is valid"""
each_num = None
if len(vrf_targe_value) > 21 or len(vrf_targe_value) < 3:
return False
if vrf_targe_value.find(':') == -1:
return False
elif vrf_targe_value == '0:0':
return False
elif vrf_targe_value == '0.0:0':
return False
else:
value_list = vrf_targe_value.split(':')
if value_list[0].find('.') != -1:
if not value_list[1].isdigit():
return False
if int(value_list[1]) > 65535:
return False
value = value_list[0].split('.')
if len(value) == 4:
for each_num in value:
if not each_num.isdigit():
return False
if int(each_num) > 255:
return False
return True
elif len(value) == 2:
for each_num in value:
if not each_num.isdigit():
return False
if int(each_num) > 65535:
return False
return True
else:
return False
elif not value_list[0].isdigit():
return False
elif not value_list[1].isdigit():
return False
elif int(value_list[0]) < 65536 and int(value_list[1]) < 4294967296:
return True
elif int(value_list[0]) > 65535 and int(value_list[0]) < 4294967296:
return bool(int(value_list[1]) < 65536)
else:
return False
class EvpnBd(object):
"""Manange evpn instance in BD view"""
def __init__(self, argument_spec, ):
self.spec = argument_spec
self.module = None
self.__init_module__()
# EVPN instance info
self.bridge_domain_id = self.module.params['bridge_domain_id']
self.evpn = self.module.params['evpn']
self.route_distinguisher = self.module.params['route_distinguisher']
self.vpn_target_both = self.module.params['vpn_target_both'] or list()
self.vpn_target_import = self.module.params[
'vpn_target_import'] or list()
self.vpn_target_export = self.module.params[
'vpn_target_export'] or list()
self.state = self.module.params['state']
self.__string_to_lowercase__()
self.commands = list()
self.evpn_info = dict()
self.conf_exist = False
# state
self.changed = False
self.updates_cmd = list()
self.results = dict()
self.proposed = dict()
self.existing = dict()
self.end_state = dict()
def __init_module__(self):
"""Init module"""
self.module = AnsibleModule(
argument_spec=self.spec, supports_check_mode=True)
def __check_response__(self, xml_str, xml_name):
"""Check if response message is already succeed"""
if "<ok/>" not in xml_str:
self.module.fail_json(msg='Error: %s failed.' % xml_name)
def __string_to_lowercase__(self):
"""Convert string to lowercase"""
if self.route_distinguisher:
self.route_distinguisher = self.route_distinguisher.lower()
if self.vpn_target_export:
for index, ele in enumerate(self.vpn_target_export):
self.vpn_target_export[index] = ele.lower()
if self.vpn_target_import:
for index, ele in enumerate(self.vpn_target_import):
self.vpn_target_import[index] = ele.lower()
if self.vpn_target_both:
for index, ele in enumerate(self.vpn_target_both):
self.vpn_target_both[index] = ele.lower()
def get_all_evpn_rts(self, evpn_rts):
"""Get all EVPN RTS"""
rts = evpn_rts.findall("evpnRT")
if not rts:
return
for ele in rts:
vrf_rttype = ele.find('vrfRTType')
vrf_rtvalue = ele.find('vrfRTValue')
if vrf_rttype.text == 'export_extcommunity':
self.evpn_info['vpn_target_export'].append(vrf_rtvalue.text)
elif vrf_rttype.text == 'import_extcommunity':
self.evpn_info['vpn_target_import'].append(vrf_rtvalue.text)
def get_all_evpn_autorts(self, evpn_autorts):
""""Get all EVPN AUTORTS"""
autorts = evpn_autorts.findall("evpnAutoRT")
if not autorts:
return
for autort in autorts:
vrf_rttype = autort.find('vrfRTType')
if vrf_rttype.text == 'export_extcommunity':
self.evpn_info['vpn_target_export'].append('auto')
elif vrf_rttype.text == 'import_extcommunity':
self.evpn_info['vpn_target_import'].append('auto')
def process_rts_info(self):
"""Process RTS information"""
if not self.evpn_info['vpn_target_export'] or\
not self.evpn_info['vpn_target_import']:
return
vpn_target_export = copy.deepcopy(self.evpn_info['vpn_target_export'])
for ele in vpn_target_export:
if ele in self.evpn_info['vpn_target_import']:
self.evpn_info['vpn_target_both'].append(ele)
self.evpn_info['vpn_target_export'].remove(ele)
self.evpn_info['vpn_target_import'].remove(ele)
def get_evpn_instance_info(self):
"""Get current EVPN instance information"""
if not self.bridge_domain_id:
self.module.fail_json(msg='Error: The value of bridge_domain_id cannot be empty.')
self.evpn_info['route_distinguisher'] = None
self.evpn_info['vpn_target_import'] = list()
self.evpn_info['vpn_target_export'] = list()
self.evpn_info['vpn_target_both'] = list()
self.evpn_info['evpn_inst'] = 'enable'
xml_str = CE_NC_GET_EVPN_CONFIG % (
self.bridge_domain_id, self.bridge_domain_id)
xml_str = get_nc_config(self.module, xml_str)
if "<data/>" in xml_str:
self.evpn_info['evpn_inst'] = 'disable'
return
xml_str = xml_str.replace('\r', '').replace('\n', '').\
replace('xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"', "").\
replace('xmlns="http://www.huawei.com/netconf/vrp"', "")
root = ElementTree.fromstring(xml_str)
evpn_inst = root.find("data/evpn/evpnInstances/evpnInstance")
if evpn_inst:
for eles in evpn_inst:
if eles.tag in ["evpnAutoRD", "evpnRD", "evpnRTs", "evpnAutoRTs"]:
if eles.tag == 'evpnAutoRD' and eles.text == 'true':
self.evpn_info['route_distinguisher'] = 'auto'
elif eles.tag == 'evpnRD' and self.evpn_info['route_distinguisher'] != 'auto':
self.evpn_info['route_distinguisher'] = eles.text
elif eles.tag == 'evpnRTs':
self.get_all_evpn_rts(eles)
elif eles.tag == 'evpnAutoRTs':
self.get_all_evpn_autorts(eles)
self.process_rts_info()
def get_existing(self):
"""Get existing config"""
self.existing = dict(bridge_domain_id=self.bridge_domain_id,
evpn=self.evpn_info['evpn_inst'],
route_distinguisher=self.evpn_info[
'route_distinguisher'],
vpn_target_both=self.evpn_info['vpn_target_both'],
vpn_target_import=self.evpn_info[
'vpn_target_import'],
vpn_target_export=self.evpn_info['vpn_target_export'])
def get_proposed(self):
"""Get proposed config"""
self.proposed = dict(bridge_domain_id=self.bridge_domain_id,
evpn=self.evpn,
route_distinguisher=self.route_distinguisher,
vpn_target_both=self.vpn_target_both,
vpn_target_import=self.vpn_target_import,
vpn_target_export=self.vpn_target_export,
state=self.state)
def get_end_state(self):
"""Get end config"""
self.get_evpn_instance_info()
self.end_state = dict(bridge_domain_id=self.bridge_domain_id,
evpn=self.evpn_info['evpn_inst'],
route_distinguisher=self.evpn_info[
'route_distinguisher'],
vpn_target_both=self.evpn_info[
'vpn_target_both'],
vpn_target_import=self.evpn_info[
'vpn_target_import'],
vpn_target_export=self.evpn_info['vpn_target_export'])
def show_result(self):
"""Show result"""
self.results['changed'] = self.changed
self.results['proposed'] = self.proposed
self.results['existing'] = self.existing
self.results['end_state'] = self.end_state
if self.changed:
self.results['updates'] = self.updates_cmd
else:
self.results['updates'] = list()
self.module.exit_json(**self.results)
def judge_if_vpn_target_exist(self, vpn_target_type):
"""Judge whether proposed vpn target has existed"""
vpn_target = list()
if vpn_target_type == 'vpn_target_import':
vpn_target.extend(self.existing['vpn_target_both'])
vpn_target.extend(self.existing['vpn_target_import'])
return set(self.proposed['vpn_target_import']).issubset(vpn_target)
elif vpn_target_type == 'vpn_target_export':
vpn_target.extend(self.existing['vpn_target_both'])
vpn_target.extend(self.existing['vpn_target_export'])
return set(self.proposed['vpn_target_export']).issubset(vpn_target)
return False
def judge_if_config_exist(self):
"""Judge whether configuration has existed"""
if self.state == 'absent':
if self.route_distinguisher or self.vpn_target_import or self.vpn_target_export or self.vpn_target_both:
return False
else:
return True
if self.evpn_info['evpn_inst'] != self.evpn:
return False
if self.evpn == 'disable' and self.evpn_info['evpn_inst'] == 'disable':
return True
if self.proposed['bridge_domain_id'] != self.existing['bridge_domain_id']:
return False
if self.proposed['route_distinguisher']:
if self.proposed['route_distinguisher'] != self.existing['route_distinguisher']:
return False
if self.proposed['vpn_target_both']:
if not self.existing['vpn_target_both']:
return False
if not set(self.proposed['vpn_target_both']).issubset(self.existing['vpn_target_both']):
return False
if self.proposed['vpn_target_import']:
if not self.judge_if_vpn_target_exist('vpn_target_import'):
return False
if self.proposed['vpn_target_export']:
if not self.judge_if_vpn_target_exist('vpn_target_export'):
return False
return True
def check_response(self, xml_str, xml_name):
"""Check if response message is already succeed."""
if "<ok/>" not in xml_str:
self.module.fail_json(msg='Error: %s failed.' % xml_name)
def unconfig_evpn_instance(self):
"""Unconfigure EVPN instance"""
self.updates_cmd.append("bridge-domain %s" % self.bridge_domain_id)
xml_str = CE_NC_MERGE_EVPN_CONFIG_HEAD % (
self.bridge_domain_id, self.bridge_domain_id)
self.updates_cmd.append(" evpn")
# unconfigure RD
if self.route_distinguisher:
if self.route_distinguisher.lower() == 'auto':
xml_str += '<evpnAutoRD>false</evpnAutoRD>'
self.updates_cmd.append(" undo route-distinguisher auto")
else:
xml_str += '<evpnRD></evpnRD>'
self.updates_cmd.append(
" undo route-distinguisher %s" % self.route_distinguisher)
xml_str += CE_NC_MERGE_EVPN_CONFIG_TAIL
recv_xml = set_nc_config(self.module, xml_str)
self.check_response(recv_xml, "UNDO_EVPN_BD_RD")
self.changed = True
return
# process VPN target list
vpn_target_export = copy.deepcopy(self.vpn_target_export)
vpn_target_import = copy.deepcopy(self.vpn_target_import)
if self.vpn_target_both:
for ele in self.vpn_target_both:
if ele not in vpn_target_export:
vpn_target_export.append(ele)
if ele not in vpn_target_import:
vpn_target_import.append(ele)
# unconfig EVPN auto RTS
head_flag = False
if vpn_target_export:
for ele in vpn_target_export:
if ele.lower() == 'auto':
if not head_flag:
xml_str += CE_NC_MERGE_EVPN_AUTORTS_HEAD
head_flag = True
xml_str += CE_NC_DELETE_EVPN_AUTORTS_CONTEXT % (
'export_extcommunity')
self.updates_cmd.append(
" undo vpn-target auto export-extcommunity")
if vpn_target_import:
for ele in vpn_target_import:
if ele.lower() == 'auto':
if not head_flag:
xml_str += CE_NC_MERGE_EVPN_AUTORTS_HEAD
head_flag = True
xml_str += CE_NC_DELETE_EVPN_AUTORTS_CONTEXT % (
'import_extcommunity')
self.updates_cmd.append(
" undo vpn-target auto import-extcommunity")
if head_flag:
xml_str += CE_NC_MERGE_EVPN_AUTORTS_TAIL
# unconfig EVPN RTS
head_flag = False
if vpn_target_export:
for ele in vpn_target_export:
if ele.lower() != 'auto':
if not head_flag:
xml_str += CE_NC_MERGE_EVPN_RTS_HEAD
head_flag = True
xml_str += CE_NC_DELETE_EVPN_RTS_CONTEXT % (
'export_extcommunity', ele)
self.updates_cmd.append(
" undo vpn-target %s export-extcommunity" % ele)
if vpn_target_import:
for ele in vpn_target_import:
if ele.lower() != 'auto':
if not head_flag:
xml_str += CE_NC_MERGE_EVPN_RTS_HEAD
head_flag = True
xml_str += CE_NC_DELETE_EVPN_RTS_CONTEXT % (
'import_extcommunity', ele)
self.updates_cmd.append(
" undo vpn-target %s import-extcommunity" % ele)
if head_flag:
xml_str += CE_NC_MERGE_EVPN_RTS_TAIL
xml_str += CE_NC_MERGE_EVPN_CONFIG_TAIL
recv_xml = set_nc_config(self.module, xml_str)
self.check_response(recv_xml, "MERGE_EVPN_BD_VPN_TARGET_CONFIG")
self.changed = True
def config_evpn_instance(self):
"""Configure EVPN instance"""
self.updates_cmd.append("bridge-domain %s" % self.bridge_domain_id)
if self.evpn == 'disable':
xml_str = CE_NC_DELETE_EVPN_CONFIG % (
self.bridge_domain_id, self.bridge_domain_id)
recv_xml = set_nc_config(self.module, xml_str)
self.check_response(recv_xml, "MERGE_EVPN_BD_CONFIG")
self.updates_cmd.append(" undo evpn")
self.changed = True
return
xml_str = CE_NC_MERGE_EVPN_CONFIG_HEAD % (
self.bridge_domain_id, self.bridge_domain_id)
self.updates_cmd.append(" evpn")
# configure RD
if self.route_distinguisher:
if not self.existing['route_distinguisher']:
if self.route_distinguisher.lower() == 'auto':
xml_str += '<evpnAutoRD>true</evpnAutoRD>'
self.updates_cmd.append(" route-distinguisher auto")
else:
xml_str += '<evpnRD>%s</evpnRD>' % self.route_distinguisher
self.updates_cmd.append(
" route-distinguisher %s" % self.route_distinguisher)
# process VPN target list
vpn_target_export = copy.deepcopy(self.vpn_target_export)
vpn_target_import = copy.deepcopy(self.vpn_target_import)
if self.vpn_target_both:
for ele in self.vpn_target_both:
if ele not in vpn_target_export:
vpn_target_export.append(ele)
if ele not in vpn_target_import:
vpn_target_import.append(ele)
# config EVPN auto RTS
head_flag = False
if vpn_target_export:
for ele in vpn_target_export:
if ele.lower() == 'auto' and \
(not self.is_vpn_target_exist('export_extcommunity', ele.lower())):
if not head_flag:
xml_str += CE_NC_MERGE_EVPN_AUTORTS_HEAD
head_flag = True
xml_str += CE_NC_MERGE_EVPN_AUTORTS_CONTEXT % (
'export_extcommunity')
self.updates_cmd.append(
" vpn-target auto export-extcommunity")
if vpn_target_import:
for ele in vpn_target_import:
if ele.lower() == 'auto' and \
(not self.is_vpn_target_exist('import_extcommunity', ele.lower())):
if not head_flag:
xml_str += CE_NC_MERGE_EVPN_AUTORTS_HEAD
head_flag = True
xml_str += CE_NC_MERGE_EVPN_AUTORTS_CONTEXT % (
'import_extcommunity')
self.updates_cmd.append(
" vpn-target auto import-extcommunity")
if head_flag:
xml_str += CE_NC_MERGE_EVPN_AUTORTS_TAIL
# config EVPN RTS
head_flag = False
if vpn_target_export:
for ele in vpn_target_export:
if ele.lower() != 'auto' and \
(not self.is_vpn_target_exist('export_extcommunity', ele.lower())):
if not head_flag:
xml_str += CE_NC_MERGE_EVPN_RTS_HEAD
head_flag = True
xml_str += CE_NC_MERGE_EVPN_RTS_CONTEXT % (
'export_extcommunity', ele)
self.updates_cmd.append(
" vpn-target %s export-extcommunity" % ele)
if vpn_target_import:
for ele in vpn_target_import:
if ele.lower() != 'auto' and \
(not self.is_vpn_target_exist('import_extcommunity', ele.lower())):
if not head_flag:
xml_str += CE_NC_MERGE_EVPN_RTS_HEAD
head_flag = True
xml_str += CE_NC_MERGE_EVPN_RTS_CONTEXT % (
'import_extcommunity', ele)
self.updates_cmd.append(
" vpn-target %s import-extcommunity" % ele)
if head_flag:
xml_str += CE_NC_MERGE_EVPN_RTS_TAIL
xml_str += CE_NC_MERGE_EVPN_CONFIG_TAIL
recv_xml = set_nc_config(self.module, xml_str)
self.check_response(recv_xml, "MERGE_EVPN_BD_CONFIG")
self.changed = True
def is_vpn_target_exist(self, target_type, value):
"""Judge whether VPN target has existed"""
if target_type == 'export_extcommunity':
if (value not in self.existing['vpn_target_export']) and\
(value not in self.existing['vpn_target_both']):
return False
return True
if target_type == 'import_extcommunity':
if (value not in self.existing['vpn_target_import']) and\
(value not in self.existing['vpn_target_both']):
return False
return True
return False
def config_evnp_bd(self):
"""Configure EVPN in BD view"""
if not self.conf_exist:
if self.state == 'present':
self.config_evpn_instance()
else:
self.unconfig_evpn_instance()
def process_input_params(self):
"""Process input parameters"""
if self.state == 'absent':
self.evpn = None
else:
if self.evpn == 'disable':
return
if self.vpn_target_both:
for ele in self.vpn_target_both:
if ele in self.vpn_target_export:
self.vpn_target_export.remove(ele)
if ele in self.vpn_target_import:
self.vpn_target_import.remove(ele)
if self.vpn_target_export and self.vpn_target_import:
vpn_target_export = copy.deepcopy(self.vpn_target_export)
for ele in vpn_target_export:
if ele in self.vpn_target_import:
self.vpn_target_both.append(ele)
self.vpn_target_import.remove(ele)
self.vpn_target_export.remove(ele)
def check_vpn_target_para(self):
"""Check whether VPN target value is valid"""
if self.route_distinguisher:
if self.route_distinguisher.lower() != 'auto' and\
not is_valid_value(self.route_distinguisher):
self.module.fail_json(
msg='Error: Route distinguisher has invalid value %s.' % self.route_distinguisher)
if self.vpn_target_export:
for ele in self.vpn_target_export:
if ele.lower() != 'auto' and not is_valid_value(ele):
self.module.fail_json(
msg='Error: VPN target extended community attribute has invalid value %s.' % ele)
if self.vpn_target_import:
for ele in self.vpn_target_import:
if ele.lower() != 'auto' and not is_valid_value(ele):
self.module.fail_json(
msg='Error: VPN target extended community attribute has invalid value %s.' % ele)
if self.vpn_target_both:
for ele in self.vpn_target_both:
if ele.lower() != 'auto' and not is_valid_value(ele):
self.module.fail_json(
msg='Error: VPN target extended community attribute has invalid value %s.' % ele)
def check_undo_params_if_exist(self):
"""Check whether all undo parameters is existed"""
if self.vpn_target_import:
for ele in self.vpn_target_import:
if ele not in self.evpn_info['vpn_target_import'] and ele not in self.evpn_info['vpn_target_both']:
self.module.fail_json(
msg='Error: VPN target import attribute value %s doesnot exist.' % ele)
if self.vpn_target_export:
for ele in self.vpn_target_export:
if ele not in self.evpn_info['vpn_target_export'] and ele not in self.evpn_info['vpn_target_both']:
self.module.fail_json(
msg='Error: VPN target export attribute value %s doesnot exist.' % ele)
if self.vpn_target_both:
for ele in self.vpn_target_both:
if ele not in self.evpn_info['vpn_target_both']:
self.module.fail_json(
msg='Error: VPN target export and import attribute value %s doesnot exist.' % ele)
def check_params(self):
"""Check all input params"""
# bridge_domain_id check
if self.bridge_domain_id:
if not self.bridge_domain_id.isdigit():
self.module.fail_json(
msg='Error: The parameter of bridge domain id is invalid.')
if int(self.bridge_domain_id) > 16777215 or int(self.bridge_domain_id) < 1:
self.module.fail_json(
msg='Error: The bridge domain id must be an integer between 1 and 16777215.')
if self.state == 'absent':
self.check_undo_params_if_exist()
# check bd whether binding the vxlan vni
self.check_vni_bd()
self.check_vpn_target_para()
if self.state == 'absent':
if self.route_distinguisher:
if not self.evpn_info['route_distinguisher']:
self.module.fail_json(
msg='Error: Route distinguisher doesnot have been configured.')
else:
if self.route_distinguisher != self.evpn_info['route_distinguisher']:
self.module.fail_json(
msg='Error: Current route distinguisher value is %s.' %
self.evpn_info['route_distinguisher'])
if self.state == 'present':
if self.route_distinguisher:
if self.evpn_info['route_distinguisher'] and\
self.route_distinguisher != self.evpn_info['route_distinguisher']:
self.module.fail_json(
msg='Error: Route distinguisher has already been configured.')
def check_vni_bd(self):
"""Check whether vxlan vni is configured in BD view"""
xml_str = CE_NC_GET_VNI_BD % self.bridge_domain_id
xml_str = get_nc_config(self.module, xml_str)
if "<data/>" in xml_str:
self.module.fail_json(
msg='Error: The vxlan vni is not configured or the bridge domain id is invalid.')
def work(self):
"""Excute task"""
self.get_evpn_instance_info()
self.process_input_params()
self.check_params()
self.get_existing()
self.get_proposed()
self.conf_exist = self.judge_if_config_exist()
self.config_evnp_bd()
self.get_end_state()
self.show_result()
def main():
"""Main function entry"""
argument_spec = dict(
bridge_domain_id=dict(required=True, type='str'),
evpn=dict(required=False, type='str',
default='enable', choices=['enable', 'disable']),
route_distinguisher=dict(required=False, type='str'),
vpn_target_both=dict(required=False, type='list'),
vpn_target_import=dict(required=False, type='list'),
vpn_target_export=dict(required=False, type='list'),
state=dict(required=False, default='present',
choices=['present', 'absent'])
)
argument_spec.update(ce_argument_spec)
evpn_bd = EvpnBd(argument_spec)
evpn_bd.work()
if __name__ == '__main__':
main()
| gpl-3.0 |
tiangolo/ansible | v1/ansible/runner/lookup_plugins/sequence.py | 85 | 7309 | # (c) 2013, Jayson Vantuyl <jayson@aggressive.ly>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from ansible.errors import AnsibleError
import ansible.utils as utils
from re import compile as re_compile, IGNORECASE
# shortcut format
NUM = "(0?x?[0-9a-f]+)"
SHORTCUT = re_compile(
"^(" + # Group 0
NUM + # Group 1: Start
"-)?" +
NUM + # Group 2: End
"(/" + # Group 3
NUM + # Group 4: Stride
")?" +
"(:(.+))?$", # Group 5, Group 6: Format String
IGNORECASE
)
class LookupModule(object):
"""
sequence lookup module
Used to generate some sequence of items. Takes arguments in two forms.
The simple / shortcut form is:
[start-]end[/stride][:format]
As indicated by the brackets: start, stride, and format string are all
optional. The format string is in the style of printf. This can be used
to pad with zeros, format in hexadecimal, etc. All of the numerical values
can be specified in octal (i.e. 0664) or hexadecimal (i.e. 0x3f8).
Negative numbers are not supported.
Some examples:
5 -> ["1","2","3","4","5"]
5-8 -> ["5", "6", "7", "8"]
2-10/2 -> ["2", "4", "6", "8", "10"]
4:host%02d -> ["host01","host02","host03","host04"]
The standard Ansible key-value form is accepted as well. For example:
start=5 end=11 stride=2 format=0x%02x -> ["0x05","0x07","0x09","0x0a"]
This format takes an alternate form of "end" called "count", which counts
some number from the starting value. For example:
count=5 -> ["1", "2", "3", "4", "5"]
start=0x0f00 count=4 format=%04x -> ["0f00", "0f01", "0f02", "0f03"]
start=0 count=5 stride=2 -> ["0", "2", "4", "6", "8"]
start=1 count=5 stride=2 -> ["1", "3", "5", "7", "9"]
The count option is mostly useful for avoiding off-by-one errors and errors
calculating the number of entries in a sequence when a stride is specified.
"""
def __init__(self, basedir, **kwargs):
"""absorb any keyword args"""
self.basedir = basedir
def reset(self):
"""set sensible defaults"""
self.start = 1
self.count = None
self.end = None
self.stride = 1
self.format = "%d"
def parse_kv_args(self, args):
"""parse key-value style arguments"""
for arg in ["start", "end", "count", "stride"]:
try:
arg_raw = args.pop(arg, None)
if arg_raw is None:
continue
arg_cooked = int(arg_raw, 0)
setattr(self, arg, arg_cooked)
except ValueError:
raise AnsibleError(
"can't parse arg %s=%r as integer"
% (arg, arg_raw)
)
if 'format' in args:
self.format = args.pop("format")
if args:
raise AnsibleError(
"unrecognized arguments to with_sequence: %r"
% args.keys()
)
def parse_simple_args(self, term):
"""parse the shortcut forms, return True/False"""
match = SHORTCUT.match(term)
if not match:
return False
_, start, end, _, stride, _, format = match.groups()
if start is not None:
try:
start = int(start, 0)
except ValueError:
raise AnsibleError("can't parse start=%s as integer" % start)
if end is not None:
try:
end = int(end, 0)
except ValueError:
raise AnsibleError("can't parse end=%s as integer" % end)
if stride is not None:
try:
stride = int(stride, 0)
except ValueError:
raise AnsibleError("can't parse stride=%s as integer" % stride)
if start is not None:
self.start = start
if end is not None:
self.end = end
if stride is not None:
self.stride = stride
if format is not None:
self.format = format
def sanity_check(self):
if self.count is None and self.end is None:
raise AnsibleError(
"must specify count or end in with_sequence"
)
elif self.count is not None and self.end is not None:
raise AnsibleError(
"can't specify both count and end in with_sequence"
)
elif self.count is not None:
# convert count to end
if self.count != 0:
self.end = self.start + self.count * self.stride - 1
else:
self.start = 0
self.end = 0
self.stride = 0
del self.count
if self.stride > 0 and self.end < self.start:
raise AnsibleError("to count backwards make stride negative")
if self.stride < 0 and self.end > self.start:
raise AnsibleError("to count forward don't make stride negative")
if self.format.count('%') != 1:
raise AnsibleError("bad formatting string: %s" % self.format)
def generate_sequence(self):
if self.stride > 0:
adjust = 1
else:
adjust = -1
numbers = xrange(self.start, self.end + adjust, self.stride)
for i in numbers:
try:
formatted = self.format % i
yield formatted
except (ValueError, TypeError):
raise AnsibleError(
"problem formatting %r with %r" % self.format
)
def run(self, terms, inject=None, **kwargs):
results = []
terms = utils.listify_lookup_plugin_terms(terms, self.basedir, inject)
if isinstance(terms, basestring):
terms = [ terms ]
for term in terms:
try:
self.reset() # clear out things for this iteration
try:
if not self.parse_simple_args(term):
self.parse_kv_args(utils.parse_kv(term))
except Exception:
raise AnsibleError(
"unknown error parsing with_sequence arguments: %r"
% term
)
self.sanity_check()
if self.start != self.end:
results.extend(self.generate_sequence())
except AnsibleError:
raise
except Exception, e:
raise AnsibleError(
"unknown error generating sequence: %s" % str(e)
)
return results
| gpl-3.0 |
kanjie128/test | pymavlink/tools/mavsearch.py | 11 | 1195 | #!/usr/bin/env python
'''
search a set of log files for a condition
'''
import sys, time, os
from pymavlink import mavutil
from optparse import OptionParser
parser = OptionParser("mavsearch.py [options]")
parser.add_option("--condition", default=None, help="conditional check on log")
parser.add_option("--types", default=None, help="message types to look for (comma separated)")
parser.add_option("--stop", action='store_true', help="stop when message type found")
parser.add_option("--stopcondition", action='store_true', help="stop when condition met")
(opts, args) = parser.parse_args()
def mavsearch(filename):
print("Loading %s ..." % filename)
mlog = mavutil.mavlink_connection(filename)
if opts.types is not None:
types = opts.types.split(',')
else:
types = None
while True:
m = mlog.recv_match(type=types)
if m is None:
break
if mlog.check_condition(opts.condition):
print m
if opts.stopcondition:
break
if opts.stop:
break
if len(args) < 1:
print("Usage: mavsearch.py [options] <LOGFILE...>")
sys.exit(1)
for f in args:
mavsearch(f)
| lgpl-3.0 |
lmazuel/azure-sdk-for-python | azure-mgmt-network/azure/mgmt/network/v2017_08_01/models/virtual_network_peering.py | 1 | 4050 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .sub_resource import SubResource
class VirtualNetworkPeering(SubResource):
"""Peerings in a virtual network resource.
:param id: Resource ID.
:type id: str
:param allow_virtual_network_access: Whether the VMs in the linked virtual
network space would be able to access all the VMs in local Virtual network
space.
:type allow_virtual_network_access: bool
:param allow_forwarded_traffic: Whether the forwarded traffic from the VMs
in the remote virtual network will be allowed/disallowed.
:type allow_forwarded_traffic: bool
:param allow_gateway_transit: If gateway links can be used in remote
virtual networking to link to this virtual network.
:type allow_gateway_transit: bool
:param use_remote_gateways: If remote gateways can be used on this virtual
network. If the flag is set to true, and allowGatewayTransit on remote
peering is also true, virtual network will use gateways of remote virtual
network for transit. Only one peering can have this flag set to true. This
flag cannot be set if virtual network already has a gateway.
:type use_remote_gateways: bool
:param remote_virtual_network: The reference of the remote virtual
network.
:type remote_virtual_network:
~azure.mgmt.network.v2017_08_01.models.SubResource
:param peering_state: The status of the virtual network peering. Possible
values are 'Initiated', 'Connected', and 'Disconnected'. Possible values
include: 'Initiated', 'Connected', 'Disconnected'
:type peering_state: str or
~azure.mgmt.network.v2017_08_01.models.VirtualNetworkPeeringState
:param provisioning_state: The provisioning state of the resource.
:type provisioning_state: str
:param name: The name of the resource that is unique within a resource
group. This name can be used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource
is updated.
:type etag: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'allow_virtual_network_access': {'key': 'properties.allowVirtualNetworkAccess', 'type': 'bool'},
'allow_forwarded_traffic': {'key': 'properties.allowForwardedTraffic', 'type': 'bool'},
'allow_gateway_transit': {'key': 'properties.allowGatewayTransit', 'type': 'bool'},
'use_remote_gateways': {'key': 'properties.useRemoteGateways', 'type': 'bool'},
'remote_virtual_network': {'key': 'properties.remoteVirtualNetwork', 'type': 'SubResource'},
'peering_state': {'key': 'properties.peeringState', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(self, **kwargs):
super(VirtualNetworkPeering, self).__init__(**kwargs)
self.allow_virtual_network_access = kwargs.get('allow_virtual_network_access', None)
self.allow_forwarded_traffic = kwargs.get('allow_forwarded_traffic', None)
self.allow_gateway_transit = kwargs.get('allow_gateway_transit', None)
self.use_remote_gateways = kwargs.get('use_remote_gateways', None)
self.remote_virtual_network = kwargs.get('remote_virtual_network', None)
self.peering_state = kwargs.get('peering_state', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
| mit |
midonet/Chimata-No-Kami | stages/midonet_api/fabfile.py | 1 | 1229 |
puts(green("installing MidoNet api on %s" % env.host_string))
zk = []
for zkhost in metadata.roles['zookeeper']:
zk.append("{'ip' => '%s', 'port' => '2181'}" % metadata.servers[zkhost]['ip'])
args = {}
args['zk_servers'] = "[%s]" % ",".join(zk)
args['keystone_auth'] = "false"
args['vtep'] = "true"
if 'fip' in metadata.servers[env.host_string]:
args['api_ip'] = "'%s'" % metadata.servers[env.host_string]['fip']
else:
args['api_ip'] = "'%s'" % metadata.servers[env.host_string]['ip']
run("""
echo 1 > /proc/sys/net/ipv6/conf/all/disable_ipv6
apt-get remove --purge -y midonet-api; echo
apt-get remove --purge -y tomcat7; echo
apt-get remove --purge -y tomcat6; echo
""")
Puppet.apply('midonet::midonet_api', args, metadata)
run("""
cat >/etc/default/tomcat7 <<EOF
TOMCAT7_USER=tomcat7
TOMCAT7_GROUP=tomcat7
JAVA_OPTS="-Djava.awt.headless=true -Xmx128m -XX:+UseConcMarkSweepGC -Djava.net.preferIPv4Stack=true -Djava.security.egd=file:/dev/./urandom"
EOF
sed -i 's,org.midonet.api.auth.MockAuthService,org.midonet.cluster.auth.MockAuthService,g;' /usr/share/midonet-api/WEB-INF/web.xml
rm -rfv /var/log/tomcat7/*
service tomcat7 restart
""")
| apache-2.0 |
da1z/intellij-community | python/helpers/epydoc/util.py | 91 | 10077 | # epydoc -- Utility functions
#
# Copyright (C) 2005 Edward Loper
# Author: Edward Loper <edloper@loper.org>
# URL: <http://epydoc.sf.net>
#
# $Id: util.py 1671 2008-01-29 02:55:49Z edloper $
"""
Miscellaneous utility functions that are used by multiple modules.
@group Python source types: is_module_file, is_package_dir, is_pyname,
py_src_filename
@group Text processing: wordwrap, decode_with_backslashreplace,
plaintext_to_html
"""
__docformat__ = 'epytext en'
import os, os.path, re
######################################################################
## Python Source Types
######################################################################
PY_SRC_EXTENSIONS = ['.py', '.pyw']
PY_BIN_EXTENSIONS = ['.pyc', '.so', '.pyd']
def is_module_file(path):
# Make sure it's a file name.
if not isinstance(path, basestring):
return False
(dir, filename) = os.path.split(path)
(basename, extension) = os.path.splitext(filename)
return (os.path.isfile(path) and
re.match('[a-zA-Z_]\w*$', basename) and
extension in PY_SRC_EXTENSIONS+PY_BIN_EXTENSIONS)
def is_src_filename(filename):
if not isinstance(filename, basestring): return False
if not os.path.exists(filename): return False
return os.path.splitext(filename)[1] in PY_SRC_EXTENSIONS
def is_package_dir(dirname):
"""
Return true if the given directory is a valid package directory
(i.e., it names a directory that contains a valid __init__ file,
and its name is a valid identifier).
"""
# Make sure it's a directory name.
if not isinstance(dirname, basestring):
return False
if not os.path.isdir(dirname):
return False
dirname = os.path.abspath(dirname)
# Make sure it's a valid identifier. (Special case for
# "foo/", where os.path.split -> ("foo", "").)
(parent, dir) = os.path.split(dirname)
if dir == '': (parent, dir) = os.path.split(parent)
# The following constraint was removed because of sourceforge
# bug #1787028 -- in some cases (eg eggs), it's too strict.
#if not re.match('\w+$', dir):
# return False
for name in os.listdir(dirname):
filename = os.path.join(dirname, name)
if name.startswith('__init__.') and is_module_file(filename):
return True
else:
return False
def is_pyname(name):
return re.match(r"\w+(\.\w+)*$", name)
def py_src_filename(filename):
basefile, extension = os.path.splitext(filename)
if extension in PY_SRC_EXTENSIONS:
return filename
else:
for ext in PY_SRC_EXTENSIONS:
if os.path.isfile('%s%s' % (basefile, ext)):
return '%s%s' % (basefile, ext)
else:
raise ValueError('Could not find a corresponding '
'Python source file for %r.' % filename)
def munge_script_name(filename):
name = os.path.split(filename)[1]
name = re.sub(r'\W', '_', name)
return 'script-'+name
######################################################################
## Text Processing
######################################################################
def decode_with_backslashreplace(s):
r"""
Convert the given 8-bit string into unicode, treating any
character c such that ord(c)<128 as an ascii character, and
converting any c such that ord(c)>128 into a backslashed escape
sequence.
>>> decode_with_backslashreplace('abc\xff\xe8')
u'abc\\xff\\xe8'
"""
# s.encode('string-escape') is not appropriate here, since it
# also adds backslashes to some ascii chars (eg \ and ').
assert isinstance(s, str)
return (s
.decode('latin1')
.encode('ascii', 'backslashreplace')
.decode('ascii'))
def wordwrap(str, indent=0, right=75, startindex=0, splitchars=''):
"""
Word-wrap the given string. I.e., add newlines to the string such
that any lines that are longer than C{right} are broken into
shorter lines (at the first whitespace sequence that occurs before
index C{right}). If the given string contains newlines, they will
I{not} be removed. Any lines that begin with whitespace will not
be wordwrapped.
@param indent: If specified, then indent each line by this number
of spaces.
@type indent: C{int}
@param right: The right margin for word wrapping. Lines that are
longer than C{right} will be broken at the first whitespace
sequence before the right margin.
@type right: C{int}
@param startindex: If specified, then assume that the first line
is already preceeded by C{startindex} characters.
@type startindex: C{int}
@param splitchars: A list of non-whitespace characters which can
be used to split a line. (E.g., use '/\\' to allow path names
to be split over multiple lines.)
@rtype: C{str}
"""
if splitchars:
chunks = re.split(r'( +|\n|[^ \n%s]*[%s])' %
(re.escape(splitchars), re.escape(splitchars)),
str.expandtabs())
else:
chunks = re.split(r'( +|\n)', str.expandtabs())
result = [' '*(indent-startindex)]
charindex = max(indent, startindex)
for chunknum, chunk in enumerate(chunks):
if (charindex+len(chunk) > right and charindex > 0) or chunk == '\n':
result.append('\n' + ' '*indent)
charindex = indent
if chunk[:1] not in ('\n', ' '):
result.append(chunk)
charindex += len(chunk)
else:
result.append(chunk)
charindex += len(chunk)
return ''.join(result).rstrip()+'\n'
def plaintext_to_html(s):
"""
@return: An HTML string that encodes the given plaintext string.
In particular, special characters (such as C{'<'} and C{'&'})
are escaped.
@rtype: C{string}
"""
s = s.replace('&', '&').replace('"', '"')
s = s.replace('<', '<').replace('>', '>')
return s
def plaintext_to_latex(str, nbsp=0, breakany=0):
"""
@return: A LaTeX string that encodes the given plaintext string.
In particular, special characters (such as C{'$'} and C{'_'})
are escaped, and tabs are expanded.
@rtype: C{string}
@param breakany: Insert hyphenation marks, so that LaTeX can
break the resulting string at any point. This is useful for
small boxes (e.g., the type box in the variable list table).
@param nbsp: Replace every space with a non-breaking space
(C{'~'}).
"""
# These get converted to hyphenation points later
if breakany: str = re.sub('(.)', '\\1\1', str)
# These get converted to \textbackslash later.
str = str.replace('\\', '\0')
# Expand tabs
str = str.expandtabs()
# These elements need to be backslashed.
str = re.sub(r'([#$&%_\${}])', r'\\\1', str)
# These elements have special names.
str = str.replace('|', '{\\textbar}')
str = str.replace('<', '{\\textless}')
str = str.replace('>', '{\\textgreater}')
str = str.replace('^', '{\\textasciicircum}')
str = str.replace('~', '{\\textasciitilde}')
str = str.replace('\0', r'{\textbackslash}')
# replace spaces with non-breaking spaces
if nbsp: str = str.replace(' ', '~')
# Convert \1's to hyphenation points.
if breakany: str = str.replace('\1', r'\-')
return str
class RunSubprocessError(OSError):
def __init__(self, cmd, out, err):
OSError.__init__(self, '%s failed' % cmd[0])
self.out = out
self.err = err
def run_subprocess(cmd, data=None):
"""
Execute the command C{cmd} in a subprocess.
@param cmd: The command to execute, specified as a list
of string.
@param data: A string containing data to send to the
subprocess.
@return: A tuple C{(out, err)}.
@raise OSError: If there is any problem executing the
command, or if its exitval is not 0.
"""
if isinstance(cmd, basestring):
cmd = cmd.split()
# Under Python 2.4+, use subprocess
try:
from subprocess import Popen, PIPE
pipe = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE)
out, err = pipe.communicate(data)
if hasattr(pipe, 'returncode'):
if pipe.returncode == 0:
return out, err
else:
raise RunSubprocessError(cmd, out, err)
else:
# Assume that there was an error iff anything was written
# to the child's stderr.
if err == '':
return out, err
else:
raise RunSubprocessError(cmd, out, err)
except ImportError:
pass
# Under Python 2.3 or earlier, on unix, use popen2.Popen3 so we
# can access the return value.
import popen2
if hasattr(popen2, 'Popen3'):
pipe = popen2.Popen3(' '.join(cmd), True)
to_child = pipe.tochild
from_child = pipe.fromchild
child_err = pipe.childerr
if data:
to_child.write(data)
to_child.close()
out = err = ''
while pipe.poll() is None:
out += from_child.read()
err += child_err.read()
out += from_child.read()
err += child_err.read()
if pipe.wait() == 0:
return out, err
else:
raise RunSubprocessError(cmd, out, err)
# Under Python 2.3 or earlier, on non-unix, use os.popen3
else:
to_child, from_child, child_err = os.popen3(' '.join(cmd), 'b')
if data:
try:
to_child.write(data)
# Guard for a broken pipe error
except IOError, e:
raise OSError(e)
to_child.close()
out = from_child.read()
err = child_err.read()
# Assume that there was an error iff anything was written
# to the child's stderr.
if err == '':
return out, err
else:
raise RunSubprocessError(cmd, out, err)
| apache-2.0 |
yippeecw/sfa | sfa/importer/dummyimporter.py | 2 | 15671 | #
# Dummy importer
#
# requirements
#
# read the planetlab database and update the local registry database accordingly
# so we update the following collections
# . authorities (from pl sites)
# . node (from pl nodes)
# . users+keys (from pl persons and attached keys)
# known limitation : *one* of the ssh keys is chosen at random here
# xxx todo/check xxx at the very least, when a key is known to the registry
# and is still current in plc
# then we should definitely make sure to keep that one in sfa...
# . slice+researchers (from pl slices and attached users)
#
import os
from sfa.util.config import Config
from sfa.util.xrn import Xrn, get_leaf, get_authority, hrn_to_urn
from sfa.trust.gid import create_uuid
from sfa.trust.certificate import convert_public_key, Keypair
# using global alchemy.session() here is fine
# as importer is on standalone one-shot process
from sfa.storage.alchemy import global_dbsession
from sfa.storage.model import RegRecord, RegAuthority, RegSlice, RegNode, RegUser, RegKey
from sfa.dummy.dummyshell import DummyShell
from sfa.dummy.dummyxrn import hostname_to_hrn, slicename_to_hrn, email_to_hrn, hrn_to_dummy_slicename
def _get_site_hrn(interface_hrn, site):
hrn = ".".join([interface_hrn, site['name']])
return hrn
class DummyImporter:
def __init__ (self, auth_hierarchy, logger):
self.auth_hierarchy = auth_hierarchy
self.logger=logger
def add_options (self, parser):
# we don't have any options for now
pass
# hrn hash is initialized from current db
# remember just-created records as we go
# xxx might make sense to add a UNIQUE constraint in the db itself
def remember_record_by_hrn (self, record):
tuple = (record.type, record.hrn)
if tuple in self.records_by_type_hrn:
self.logger.warning ("DummyImporter.remember_record_by_hrn: duplicate (%s,%s)"%tuple)
return
self.records_by_type_hrn [ tuple ] = record
# ditto for pointer hash
def remember_record_by_pointer (self, record):
if record.pointer == -1:
self.logger.warning ("DummyImporter.remember_record_by_pointer: pointer is void")
return
tuple = (record.type, record.pointer)
if tuple in self.records_by_type_pointer:
self.logger.warning ("DummyImporter.remember_record_by_pointer: duplicate (%s,%s)"%tuple)
return
self.records_by_type_pointer [ ( record.type, record.pointer,) ] = record
def remember_record (self, record):
self.remember_record_by_hrn (record)
self.remember_record_by_pointer (record)
def locate_by_type_hrn (self, type, hrn):
return self.records_by_type_hrn.get ( (type, hrn), None)
def locate_by_type_pointer (self, type, pointer):
return self.records_by_type_pointer.get ( (type, pointer), None)
# a convenience/helper function to see if a record is already known
# a former, broken, attempt (in 2.1-9) had been made
# to try and use 'pointer' as a first, most significant attempt
# the idea being to preserve stuff as much as possible, and thus
# to avoid creating a new gid in the case of a simple hrn rename
# however this of course doesn't work as the gid depends on the hrn...
#def locate (self, type, hrn=None, pointer=-1):
# if pointer!=-1:
# attempt = self.locate_by_type_pointer (type, pointer)
# if attempt : return attempt
# if hrn is not None:
# attempt = self.locate_by_type_hrn (type, hrn,)
# if attempt : return attempt
# return None
# this makes the run method a bit abtruse - out of the way
def run (self, options):
config = Config ()
interface_hrn = config.SFA_INTERFACE_HRN
root_auth = config.SFA_REGISTRY_ROOT_AUTH
shell = DummyShell (config)
######## retrieve all existing SFA objects
all_records = global_dbsession.query(RegRecord).all()
# create hash by (type,hrn)
# we essentially use this to know if a given record is already known to SFA
self.records_by_type_hrn = \
dict ( [ ( (record.type, record.hrn) , record ) for record in all_records ] )
# create hash by (type,pointer)
self.records_by_type_pointer = \
dict ( [ ( (record.type, record.pointer) , record ) for record in all_records
if record.pointer != -1] )
# initialize record.stale to True by default, then mark stale=False on the ones that are in use
for record in all_records: record.stale=True
######## retrieve Dummy TB data
# Get all plc sites
# retrieve only required stuf
sites = [shell.GetTestbedInfo()]
# create a hash of sites by login_base
# sites_by_login_base = dict ( [ ( site['login_base'], site ) for site in sites ] )
# Get all dummy TB users
users = shell.GetUsers()
# create a hash of users by user_id
users_by_id = dict ( [ ( user['user_id'], user) for user in users ] )
# Get all dummy TB public keys
keys = []
for user in users:
if 'keys' in user:
keys.extend(user['keys'])
# create a dict user_id -> [ keys ]
keys_by_person_id = {}
for user in users:
if 'keys' in user:
keys_by_person_id[user['user_id']] = user['keys']
# Get all dummy TB nodes
nodes = shell.GetNodes()
# create hash by node_id
nodes_by_id = dict ( [ ( node['node_id'], node, ) for node in nodes ] )
# Get all dummy TB slices
slices = shell.GetSlices()
# create hash by slice_id
slices_by_id = dict ( [ (slice['slice_id'], slice ) for slice in slices ] )
# start importing
for site in sites:
site_hrn = _get_site_hrn(interface_hrn, site)
# import if hrn is not in list of existing hrns or if the hrn exists
# but its not a site record
site_record=self.locate_by_type_hrn ('authority', site_hrn)
if not site_record:
try:
urn = hrn_to_urn(site_hrn, 'authority')
if not self.auth_hierarchy.auth_exists(urn):
self.auth_hierarchy.create_auth(urn)
auth_info = self.auth_hierarchy.get_auth_info(urn)
site_record = RegAuthority(hrn=site_hrn, gid=auth_info.get_gid_object(),
pointer= -1,
authority=get_authority(site_hrn))
site_record.just_created()
global_dbsession.add(site_record)
global_dbsession.commit()
self.logger.info("DummyImporter: imported authority (site) : %s" % site_record)
self.remember_record (site_record)
except:
# if the site import fails then there is no point in trying to import the
# site's child records (node, slices, persons), so skip them.
self.logger.log_exc("DummyImporter: failed to import site. Skipping child records")
continue
else:
# xxx update the record ...
pass
site_record.stale=False
# import node records
for node in nodes:
site_auth = get_authority(site_hrn)
site_name = site['name']
node_hrn = hostname_to_hrn(site_auth, site_name, node['hostname'])
# xxx this sounds suspicious
if len(node_hrn) > 64: node_hrn = node_hrn[:64]
node_record = self.locate_by_type_hrn ( 'node', node_hrn )
if not node_record:
try:
pkey = Keypair(create=True)
urn = hrn_to_urn(node_hrn, 'node')
node_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
node_record = RegNode (hrn=node_hrn, gid=node_gid,
pointer =node['node_id'],
authority=get_authority(node_hrn))
node_record.just_created()
global_dbsession.add(node_record)
global_dbsession.commit()
self.logger.info("DummyImporter: imported node: %s" % node_record)
self.remember_record (node_record)
except:
self.logger.log_exc("DummyImporter: failed to import node")
else:
# xxx update the record ...
pass
node_record.stale=False
site_pis=[]
# import users
for user in users:
user_hrn = email_to_hrn(site_hrn, user['email'])
# xxx suspicious again
if len(user_hrn) > 64: user_hrn = user_hrn[:64]
user_urn = hrn_to_urn(user_hrn, 'user')
user_record = self.locate_by_type_hrn ( 'user', user_hrn)
# return a tuple pubkey (a dummy TB key object) and pkey (a Keypair object)
def init_user_key (user):
pubkey = None
pkey = None
if user['keys']:
# randomly pick first key in set
for key in user['keys']:
pubkey = key
try:
pkey = convert_public_key(pubkey)
break
except:
continue
if not pkey:
self.logger.warn('DummyImporter: unable to convert public key for %s' % user_hrn)
pkey = Keypair(create=True)
else:
# the user has no keys. Creating a random keypair for the user's gid
self.logger.warn("DummyImporter: user %s does not have a NITOS public key"%user_hrn)
pkey = Keypair(create=True)
return (pubkey, pkey)
# new user
try:
if not user_record:
(pubkey,pkey) = init_user_key (user)
user_gid = self.auth_hierarchy.create_gid(user_urn, create_uuid(), pkey)
user_gid.set_email(user['email'])
user_record = RegUser (hrn=user_hrn, gid=user_gid,
pointer=user['user_id'],
authority=get_authority(user_hrn),
email=user['email'])
if pubkey:
user_record.reg_keys=[RegKey (pubkey)]
else:
self.logger.warning("No key found for user %s"%user_record)
user_record.just_created()
global_dbsession.add (user_record)
global_dbsession.commit()
self.logger.info("DummyImporter: imported person: %s" % user_record)
self.remember_record ( user_record )
else:
# update the record ?
# if user's primary key has changed then we need to update the
# users gid by forcing an update here
sfa_keys = user_record.reg_keys
def key_in_list (key,sfa_keys):
for reg_key in sfa_keys:
if reg_key.key==key: return True
return False
# is there a new key in Dummy TB ?
new_keys=False
for key in user['keys']:
if not key_in_list (key,sfa_keys):
new_keys = True
if new_keys:
(pubkey,pkey) = init_user_key (user)
user_gid = self.auth_hierarchy.create_gid(user_urn, create_uuid(), pkey)
if not pubkey:
user_record.reg_keys=[]
else:
user_record.reg_keys=[ RegKey (pubkey)]
self.logger.info("DummyImporter: updated person: %s" % user_record)
user_record.email = user['email']
global_dbsession.commit()
user_record.stale=False
except:
self.logger.log_exc("DummyImporter: failed to import user %d %s"%(user['user_id'],user['email']))
# import slices
for slice in slices:
slice_hrn = slicename_to_hrn(site_hrn, slice['slice_name'])
slice_record = self.locate_by_type_hrn ('slice', slice_hrn)
if not slice_record:
try:
pkey = Keypair(create=True)
urn = hrn_to_urn(slice_hrn, 'slice')
slice_gid = self.auth_hierarchy.create_gid(urn, create_uuid(), pkey)
slice_record = RegSlice (hrn=slice_hrn, gid=slice_gid,
pointer=slice['slice_id'],
authority=get_authority(slice_hrn))
slice_record.just_created()
global_dbsession.add(slice_record)
global_dbsession.commit()
self.logger.info("DummyImporter: imported slice: %s" % slice_record)
self.remember_record ( slice_record )
except:
self.logger.log_exc("DummyImporter: failed to import slice")
else:
# xxx update the record ...
self.logger.warning ("Slice update not yet implemented")
pass
# record current users affiliated with the slice
slice_record.reg_researchers = \
[ self.locate_by_type_pointer ('user',user_id) for user_id in slice['user_ids'] ]
global_dbsession.commit()
slice_record.stale=False
### remove stale records
# special records must be preserved
system_hrns = [interface_hrn, root_auth, interface_hrn + '.slicemanager']
for record in all_records:
if record.hrn in system_hrns:
record.stale=False
if record.peer_authority:
record.stale=False
for record in all_records:
try: stale=record.stale
except:
stale=True
self.logger.warning("stale not found with %s"%record)
if stale:
self.logger.info("DummyImporter: deleting stale record: %s" % record)
global_dbsession.delete(record)
global_dbsession.commit()
| mit |
dgzurita/odoo | addons/website_sale/models/product.py | 262 | 10108 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013-Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import tools
from openerp.osv import osv, fields
class product_style(osv.Model):
_name = "product.style"
_columns = {
'name' : fields.char('Style Name', required=True),
'html_class': fields.char('HTML Classes'),
}
class product_pricelist(osv.Model):
_inherit = "product.pricelist"
_columns = {
'code': fields.char('Promotional Code'),
}
class product_public_category(osv.osv):
_name = "product.public.category"
_description = "Public Category"
_order = "sequence, name"
_constraints = [
(osv.osv._check_recursion, 'Error ! You cannot create recursive categories.', ['parent_id'])
]
def name_get(self, cr, uid, ids, context=None):
res = []
for cat in self.browse(cr, uid, ids, context=context):
names = [cat.name]
pcat = cat.parent_id
while pcat:
names.append(pcat.name)
pcat = pcat.parent_id
res.append((cat.id, ' / '.join(reversed(names))))
return res
def _name_get_fnc(self, cr, uid, ids, prop, unknow_none, context=None):
res = self.name_get(cr, uid, ids, context=context)
return dict(res)
def _get_image(self, cr, uid, ids, name, args, context=None):
result = dict.fromkeys(ids, False)
for obj in self.browse(cr, uid, ids, context=context):
result[obj.id] = tools.image_get_resized_images(obj.image)
return result
def _set_image(self, cr, uid, id, name, value, args, context=None):
return self.write(cr, uid, [id], {'image': tools.image_resize_image_big(value)}, context=context)
_columns = {
'name': fields.char('Name', required=True, translate=True),
'complete_name': fields.function(_name_get_fnc, type="char", string='Name'),
'parent_id': fields.many2one('product.public.category','Parent Category', select=True),
'child_id': fields.one2many('product.public.category', 'parent_id', string='Children Categories'),
'sequence': fields.integer('Sequence', help="Gives the sequence order when displaying a list of product categories."),
# NOTE: there is no 'default image', because by default we don't show thumbnails for categories. However if we have a thumbnail
# for at least one category, then we display a default image on the other, so that the buttons have consistent styling.
# In this case, the default image is set by the js code.
# NOTE2: image: all image fields are base64 encoded and PIL-supported
'image': fields.binary("Image",
help="This field holds the image used as image for the category, limited to 1024x1024px."),
'image_medium': fields.function(_get_image, fnct_inv=_set_image,
string="Medium-sized image", type="binary", multi="_get_image",
store={
'product.public.category': (lambda self, cr, uid, ids, c={}: ids, ['image'], 10),
},
help="Medium-sized image of the category. It is automatically "\
"resized as a 128x128px image, with aspect ratio preserved. "\
"Use this field in form views or some kanban views."),
'image_small': fields.function(_get_image, fnct_inv=_set_image,
string="Smal-sized image", type="binary", multi="_get_image",
store={
'product.public.category': (lambda self, cr, uid, ids, c={}: ids, ['image'], 10),
},
help="Small-sized image of the category. It is automatically "\
"resized as a 64x64px image, with aspect ratio preserved. "\
"Use this field anywhere a small image is required."),
}
class product_template(osv.Model):
_inherit = ["product.template", "website.seo.metadata"]
_order = 'website_published desc, website_sequence desc, name'
_name = 'product.template'
_mail_post_access = 'read'
def _website_url(self, cr, uid, ids, field_name, arg, context=None):
res = dict.fromkeys(ids, '')
for product in self.browse(cr, uid, ids, context=context):
res[product.id] = "/shop/product/%s" % (product.id,)
return res
_columns = {
# TODO FIXME tde: when website_mail/mail_thread.py inheritance work -> this field won't be necessary
'website_message_ids': fields.one2many(
'mail.message', 'res_id',
domain=lambda self: [
'&', ('model', '=', self._name), ('type', '=', 'comment')
],
string='Website Comments',
),
'website_published': fields.boolean('Available in the website', copy=False),
'website_description': fields.html('Description for the website', translate=True),
'alternative_product_ids': fields.many2many('product.template','product_alternative_rel','src_id','dest_id', string='Alternative Products', help='Appear on the product page'),
'accessory_product_ids': fields.many2many('product.product','product_accessory_rel','src_id','dest_id', string='Accessory Products', help='Appear on the shopping cart'),
'website_size_x': fields.integer('Size X'),
'website_size_y': fields.integer('Size Y'),
'website_style_ids': fields.many2many('product.style', string='Styles'),
'website_sequence': fields.integer('Sequence', help="Determine the display order in the Website E-commerce"),
'website_url': fields.function(_website_url, string="Website url", type="char"),
'public_categ_ids': fields.many2many('product.public.category', string='Public Category', help="Those categories are used to group similar products for e-commerce."),
}
def _defaults_website_sequence(self, cr, uid, *l, **kwargs):
cr.execute('SELECT MAX(website_sequence)+1 FROM product_template')
next_sequence = cr.fetchone()[0] or 0
return next_sequence
_defaults = {
'website_size_x': 1,
'website_size_y': 1,
'website_sequence': _defaults_website_sequence,
'website_published': False,
}
def set_sequence_top(self, cr, uid, ids, context=None):
cr.execute('SELECT MAX(website_sequence) FROM product_template')
max_sequence = cr.fetchone()[0] or 0
return self.write(cr, uid, ids, {'website_sequence': max_sequence + 1}, context=context)
def set_sequence_bottom(self, cr, uid, ids, context=None):
cr.execute('SELECT MIN(website_sequence) FROM product_template')
min_sequence = cr.fetchone()[0] or 0
return self.write(cr, uid, ids, {'website_sequence': min_sequence -1}, context=context)
def set_sequence_up(self, cr, uid, ids, context=None):
product = self.browse(cr, uid, ids[0], context=context)
cr.execute(""" SELECT id, website_sequence FROM product_template
WHERE website_sequence > %s AND website_published = %s ORDER BY website_sequence ASC LIMIT 1""" % (product.website_sequence, product.website_published))
prev = cr.fetchone()
if prev:
self.write(cr, uid, [prev[0]], {'website_sequence': product.website_sequence}, context=context)
return self.write(cr, uid, [ids[0]], {'website_sequence': prev[1]}, context=context)
else:
return self.set_sequence_top(cr, uid, ids, context=context)
def set_sequence_down(self, cr, uid, ids, context=None):
product = self.browse(cr, uid, ids[0], context=context)
cr.execute(""" SELECT id, website_sequence FROM product_template
WHERE website_sequence < %s AND website_published = %s ORDER BY website_sequence DESC LIMIT 1""" % (product.website_sequence, product.website_published))
next = cr.fetchone()
if next:
self.write(cr, uid, [next[0]], {'website_sequence': product.website_sequence}, context=context)
return self.write(cr, uid, [ids[0]], {'website_sequence': next[1]}, context=context)
else:
return self.set_sequence_bottom(cr, uid, ids, context=context)
class product_product(osv.Model):
_inherit = "product.product"
def _website_url(self, cr, uid, ids, field_name, arg, context=None):
res = {}
for product in self.browse(cr, uid, ids, context=context):
res[product.id] = "/shop/product/%s" % (product.product_tmpl_id.id,)
return res
_columns = {
'website_url': fields.function(_website_url, string="Website url", type="char"),
}
class product_attribute(osv.Model):
_inherit = "product.attribute"
_columns = {
'type': fields.selection([('radio', 'Radio'), ('select', 'Select'), ('color', 'Color'), ('hidden', 'Hidden')], string="Type"),
}
_defaults = {
'type': lambda *a: 'radio',
}
class product_attribute_value(osv.Model):
_inherit = "product.attribute.value"
_columns = {
'color': fields.char("HTML Color Index", help="Here you can set a specific HTML color index (e.g. #ff0000) to display the color on the website if the attibute type is 'Color'."),
}
| agpl-3.0 |
mrnamingo/enigma2-test | lib/python/Screens/Menu.py | 11 | 9359 | from Screens.Screen import Screen
from Screens.ParentalControlSetup import ProtectedScreen
from Components.Sources.List import List
from Components.ActionMap import NumberActionMap
from Components.Sources.StaticText import StaticText
from Components.config import configfile
from Components.PluginComponent import plugins
from Components.config import config
from Components.SystemInfo import SystemInfo
from Tools.BoundFunction import boundFunction
from Tools.Directories import resolveFilename, SCOPE_SKIN
import xml.etree.cElementTree
from Screens.Setup import Setup, getSetupTitle, getSetupTitleLevel
mainmenu = _("Main menu")
# read the menu
file = open(resolveFilename(SCOPE_SKIN, 'menu.xml'), 'r')
mdom = xml.etree.cElementTree.parse(file)
file.close()
class MenuUpdater:
def __init__(self):
self.updatedMenuItems = {}
def addMenuItem(self, id, pos, text, module, screen, weight):
if not self.updatedMenuAvailable(id):
self.updatedMenuItems[id] = []
self.updatedMenuItems[id].append([text, pos, module, screen, weight])
def delMenuItem(self, id, pos, text, module, screen, weight):
self.updatedMenuItems[id].remove([text, pos, module, screen, weight])
def updatedMenuAvailable(self, id):
return self.updatedMenuItems.has_key(id)
def getUpdatedMenu(self, id):
return self.updatedMenuItems[id]
menuupdater = MenuUpdater()
class MenuSummary(Screen):
pass
class Menu(Screen, ProtectedScreen):
ALLOW_SUSPEND = True
def okbuttonClick(self):
# print "okbuttonClick"
selection = self["menu"].getCurrent()
if selection is not None:
selection[1]()
def execText(self, text):
exec text
def runScreen(self, arg):
# arg[0] is the module (as string)
# arg[1] is Screen inside this module
# plus possible arguments, as
# string (as we want to reference
# stuff which is just imported)
# FIXME. somehow
if arg[0] != "":
exec "from " + arg[0] + " import *"
self.openDialog(*eval(arg[1]))
def nothing(self): #dummy
pass
def gotoStandby(self, *res):
from Screens.Standby import Standby2
self.session.open(Standby2)
self.close(True)
def openDialog(self, *dialog): # in every layer needed
self.session.openWithCallback(self.menuClosed, *dialog)
def openSetup(self, dialog):
self.session.openWithCallback(self.menuClosed, Setup, dialog)
def addMenu(self, destList, node):
requires = node.get("requires")
if requires:
if requires[0] == '!':
if SystemInfo.get(requires[1:], False):
return
elif not SystemInfo.get(requires, False):
return
MenuTitle = _(node.get("text", "??").encode("UTF-8"))
entryID = node.get("entryID", "undefined")
weight = node.get("weight", 50)
x = node.get("flushConfigOnClose")
if x:
a = boundFunction(self.session.openWithCallback, self.menuClosedWithConfigFlush, Menu, node)
else:
a = boundFunction(self.session.openWithCallback, self.menuClosed, Menu, node)
#TODO add check if !empty(node.childNodes)
destList.append((MenuTitle, a, entryID, weight))
def menuClosedWithConfigFlush(self, *res):
configfile.save()
self.menuClosed(*res)
def menuClosed(self, *res):
if res and res[0]:
self.close(True)
def addItem(self, destList, node):
requires = node.get("requires")
if requires:
if requires[0] == '!':
if SystemInfo.get(requires[1:], False):
return
elif not SystemInfo.get(requires, False):
return
configCondition = node.get("configcondition")
if configCondition and not eval(configCondition + ".value"):
return
item_text = node.get("text", "").encode("UTF-8")
entryID = node.get("entryID", "undefined")
weight = node.get("weight", 50)
for x in node:
if x.tag == 'screen':
module = x.get("module")
screen = x.get("screen")
if screen is None:
screen = module
# print module, screen
if module:
module = "Screens." + module
else:
module = ""
# check for arguments. they will be appended to the
# openDialog call
args = x.text or ""
screen += ", " + args
destList.append((_(item_text or "??"), boundFunction(self.runScreen, (module, screen)), entryID, weight))
return
elif x.tag == 'plugin':
extensions = x.get("extensions")
system = x.get("system")
screen = x.get("screen")
if extensions:
module = extensions
elif system:
module = system
if screen is None:
screen = module
if extensions:
module = "Plugins.Extensions." + extensions + '.plugin'
elif system:
module = "Plugins.SystemPlugins." + system + '.plugin'
else:
module = ""
# check for arguments. they will be appended to the
# openDialog call
args = x.text or ""
screen += ", " + args
destList.append((_(item_text or "??"), boundFunction(self.runScreen, (module, screen)), entryID, weight))
return
elif x.tag == 'code':
destList.append((_(item_text or "??"), boundFunction(self.execText, x.text), entryID, weight))
return
elif x.tag == 'setup':
id = x.get("id")
if item_text == "":
if getSetupTitleLevel(id) > config.usage.setup_level.index:
return
item_text = _(getSetupTitle(id))
else:
item_text = _(item_text)
destList.append((item_text, boundFunction(self.openSetup, id), entryID, weight))
return
destList.append((item_text, self.nothing, entryID, weight))
def __init__(self, session, parent):
Screen.__init__(self, session)
list = []
menuID = None
for x in parent: #walk through the actual nodelist
if not x.tag:
continue
if x.tag == 'item':
item_level = int(x.get("level", 0))
if item_level <= config.usage.setup_level.index:
self.addItem(list, x)
count += 1
elif x.tag == 'menu':
item_level = int(x.get("level", 0))
if item_level <= config.usage.setup_level.index:
self.addMenu(list, x)
count += 1
elif x.tag == "id":
menuID = x.get("val")
count = 0
if menuID is not None:
# menuupdater?
if menuupdater.updatedMenuAvailable(menuID):
for x in menuupdater.getUpdatedMenu(menuID):
if x[1] == count:
list.append((x[0], boundFunction(self.runScreen, (x[2], x[3] + ", ")), x[4]))
count += 1
self.menuID = menuID
if config.ParentalControl.configured.value:
ProtectedScreen.__init__(self)
if menuID is not None:
# plugins
for l in plugins.getPluginsForMenu(menuID):
# check if a plugin overrides an existing menu
plugin_menuid = l[2]
for x in list:
if x[2] == plugin_menuid:
list.remove(x)
break
if len(l) > 4 and l[4]:
list.append((l[0], boundFunction(l[1], self.session, self.close), l[2], l[3] or 50))
else:
list.append((l[0], boundFunction(l[1], self.session), l[2], l[3] or 50))
# for the skin: first try a menu_<menuID>, then Menu
self.skinName = [ ]
if menuID is not None:
self.skinName.append("menu_" + menuID)
self.skinName.append("Menu")
self.menuID = menuID
ProtectedScreen.__init__(self)
# Sort by Weight
if config.usage.sort_menus.value:
list.sort()
else:
list.sort(key=lambda x: int(x[3]))
self["menu"] = List(list)
self["actions"] = NumberActionMap(["OkCancelActions", "MenuActions", "NumberActions"],
{
"ok": self.okbuttonClick,
"cancel": self.closeNonRecursive,
"menu": self.closeRecursive,
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal
})
a = parent.get("title", "").encode("UTF-8") or None
a = a and _(a)
if a is None:
a = _(parent.get("text", "").encode("UTF-8"))
self["title"] = StaticText(a)
Screen.setTitle(self, a)
self.menu_title = a
def isProtected(self):
if config.ParentalControl.setuppinactive.value:
if config.ParentalControl.config_sections.main_menu.value and self.menuID == "mainmenu":
return True
elif config.ParentalControl.config_sections.configuration.value and self.menuID == "setup":
return True
elif config.ParentalControl.config_sections.timer_menu.value and self.menuID == "timermenu":
return True
elif config.ParentalControl.config_sections.standby_menu.value and self.menuID == "shutdown":
return True
def keyNumberGlobal(self, number):
# print "menu keyNumber:", number
# Calculate index
number -= 1
if len(self["menu"].list) > number:
self["menu"].setIndex(number)
self.okbuttonClick()
def closeNonRecursive(self):
self.close(False)
def closeRecursive(self):
self.close(True)
def createSummary(self):
return MenuSummary
def isProtected(self):
if config.ParentalControl.setuppinactive.value:
if config.ParentalControl.config_sections.main_menu.value:
return self.menuID == "mainmenu"
elif config.ParentalControl.config_sections.configuration.value and self.menuID == "setup":
return True
elif config.ParentalControl.config_sections.timer_menu.value and self.menuID == "timermenu":
return True
elif config.ParentalControl.config_sections.standby_menu.value and self.menuID == "shutdown":
return True
class MainMenu(Menu):
#add file load functions for the xml-file
def __init__(self, *x):
self.skinName = "Menu"
Menu.__init__(self, *x)
| gpl-2.0 |
overcastcloud/suds-jurko | suds/sax/date.py | 9 | 14022 | # -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jurko Gospodnetić ( jurko.gospodnetic@pke.hr )
# based on code by: Glen Walker
# based on code by: Nathan Van Gheem ( vangheem@gmail.com )
"""Classes for conversion between XML dates and Python objects."""
from suds import UnicodeMixin
import datetime
import re
import time
_SNIPPET_DATE = \
r"(?P<year>\d{1,})-(?P<month>\d{1,2})-(?P<day>\d{1,2})"
_SNIPPET_TIME = \
r"(?P<hour>\d{1,2}):(?P<minute>[0-5]?[0-9]):(?P<second>[0-5]?[0-9])" \
r"(?:\.(?P<subsecond>\d+))?"
_SNIPPET_ZONE = \
r"(?:(?P<tz_sign>[-+])(?P<tz_hour>\d{1,2})" \
r"(?::(?P<tz_minute>[0-5]?[0-9]))?)" \
r"|(?P<tz_utc>[Zz])"
_PATTERN_DATE = r"^%s(?:%s)?$" % (_SNIPPET_DATE, _SNIPPET_ZONE)
_PATTERN_TIME = r"^%s(?:%s)?$" % (_SNIPPET_TIME, _SNIPPET_ZONE)
_PATTERN_DATETIME = r"^%s[T ]%s(?:%s)?$" % (_SNIPPET_DATE, _SNIPPET_TIME,
_SNIPPET_ZONE)
_RE_DATE = re.compile(_PATTERN_DATE)
_RE_TIME = re.compile(_PATTERN_TIME)
_RE_DATETIME = re.compile(_PATTERN_DATETIME)
class Date(UnicodeMixin):
"""
An XML date object supporting the xsd:date datatype.
@ivar value: The object value.
@type value: B{datetime}.I{date}
"""
def __init__(self, value):
"""
@param value: The date value of the object.
@type value: (datetime.date|str)
@raise ValueError: When I{value} is invalid.
"""
if isinstance(value, datetime.datetime):
self.value = value.date()
elif isinstance(value, datetime.date):
self.value = value
elif isinstance(value, basestring):
self.value = self.__parse(value)
else:
raise ValueError("invalid type for Date(): %s" % type(value))
@staticmethod
def __parse(value):
"""
Parse the string date.
Supports the subset of ISO8601 used by xsd:date, but is lenient with
what is accepted, handling most reasonable syntax.
Any timezone is parsed but ignored because a) it is meaningless without
a time and b) B{datetime}.I{date} does not support timezone
information.
@param value: A date string.
@type value: str
@return: A date object.
@rtype: B{datetime}.I{date}
"""
match_result = _RE_DATE.match(value)
if match_result is None:
raise ValueError("date data has invalid format '%s'" % (value,))
return _date_from_match(match_result)
def __unicode__(self):
return self.value.isoformat()
class DateTime(UnicodeMixin):
"""
An XML datetime object supporting the xsd:dateTime datatype.
@ivar value: The object value.
@type value: B{datetime}.I{datetime}
"""
def __init__(self, value):
"""
@param value: The datetime value of the object.
@type value: (datetime.datetime|str)
@raise ValueError: When I{value} is invalid.
"""
if isinstance(value, datetime.datetime):
self.value = value
elif isinstance(value, basestring):
self.value = self.__parse(value)
else:
raise ValueError("invalid type for DateTime(): %s" % type(value))
@staticmethod
def __parse(value):
"""
Parse the string datetime.
Supports the subset of ISO8601 used by xsd:dateTime, but is lenient
with what is accepted, handling most reasonable syntax.
Subsecond information is rounded to microseconds due to a restriction
in the python datetime.datetime/time implementation.
@param value: A datetime string.
@type value: str
@return: A datetime object.
@rtype: B{datetime}.I{datetime}
"""
match_result = _RE_DATETIME.match(value)
if match_result is None:
raise ValueError("date data has invalid format '%s'" % (value,))
date = _date_from_match(match_result)
time, round_up = _time_from_match(match_result)
tzinfo = _tzinfo_from_match(match_result)
value = datetime.datetime.combine(date, time)
value = value.replace(tzinfo=tzinfo)
if round_up:
value += datetime.timedelta(microseconds=1)
return value
def __unicode__(self):
return self.value.isoformat()
class Time(UnicodeMixin):
"""
An XML time object supporting the xsd:time datatype.
@ivar value: The object value.
@type value: B{datetime}.I{time}
"""
def __init__(self, value):
"""
@param value: The time value of the object.
@type value: (datetime.time|str)
@raise ValueError: When I{value} is invalid.
"""
if isinstance(value, datetime.time):
self.value = value
elif isinstance(value, basestring):
self.value = self.__parse(value)
else:
raise ValueError("invalid type for Time(): %s" % type(value))
@staticmethod
def __parse(value):
"""
Parse the string date.
Supports the subset of ISO8601 used by xsd:time, but is lenient with
what is accepted, handling most reasonable syntax.
Subsecond information is rounded to microseconds due to a restriction
in the python datetime.time implementation.
@param value: A time string.
@type value: str
@return: A time object.
@rtype: B{datetime}.I{time}
"""
match_result = _RE_TIME.match(value)
if match_result is None:
raise ValueError("date data has invalid format '%s'" % (value,))
time, round_up = _time_from_match(match_result)
tzinfo = _tzinfo_from_match(match_result)
if round_up:
time = _bump_up_time_by_microsecond(time)
return time.replace(tzinfo=tzinfo)
def __unicode__(self):
return self.value.isoformat()
class FixedOffsetTimezone(datetime.tzinfo, UnicodeMixin):
"""
A timezone with a fixed offset and no daylight savings adjustment.
http://docs.python.org/library/datetime.html#datetime.tzinfo
"""
def __init__(self, offset):
"""
@param offset: The fixed offset of the timezone.
@type offset: I{int} or B{datetime}.I{timedelta}
"""
if type(offset) == int:
offset = datetime.timedelta(hours=offset)
elif type(offset) != datetime.timedelta:
raise TypeError("timezone offset must be an int or "
"datetime.timedelta")
if offset.microseconds or (offset.seconds % 60 != 0):
raise ValueError("timezone offset must have minute precision")
self.__offset = offset
def dst(self, dt):
"""
http://docs.python.org/library/datetime.html#datetime.tzinfo.dst
"""
return datetime.timedelta(0)
def utcoffset(self, dt):
"""
http://docs.python.org/library/datetime.html#datetime.tzinfo.utcoffset
"""
return self.__offset
def tzname(self, dt):
"""
http://docs.python.org/library/datetime.html#datetime.tzinfo.tzname
"""
# total_seconds was introduced in Python 2.7
if hasattr(self.__offset, "total_seconds"):
total_seconds = self.__offset.total_seconds()
else:
total_seconds = (self.__offset.days * 24 * 60 * 60) + \
(self.__offset.seconds)
hours = total_seconds // (60 * 60)
total_seconds -= hours * 60 * 60
minutes = total_seconds // 60
total_seconds -= minutes * 60
seconds = total_seconds // 1
total_seconds -= seconds
if seconds:
return "%+03d:%02d:%02d" % (hours, minutes, seconds)
return "%+03d:%02d" % (hours, minutes)
def __unicode__(self):
return "FixedOffsetTimezone %s" % (self.tzname(None),)
class UtcTimezone(FixedOffsetTimezone):
"""
The UTC timezone.
http://docs.python.org/library/datetime.html#datetime.tzinfo
"""
def __init__(self):
FixedOffsetTimezone.__init__(self, datetime.timedelta(0))
def tzname(self, dt):
"""
http://docs.python.org/library/datetime.html#datetime.tzinfo.tzname
"""
return "UTC"
def __unicode__(self):
return "UtcTimezone"
class LocalTimezone(datetime.tzinfo):
"""
The local timezone of the operating system.
http://docs.python.org/library/datetime.html#datetime.tzinfo
"""
def __init__(self):
self.__offset = datetime.timedelta(seconds=-time.timezone)
self.__dst_offset = None
if time.daylight:
self.__dst_offset = datetime.timedelta(seconds=-time.altzone)
def dst(self, dt):
"""
http://docs.python.org/library/datetime.html#datetime.tzinfo.dst
"""
if self.__is_daylight_time(dt):
return self.__dst_offset - self.__offset
return datetime.timedelta(0)
def tzname(self, dt):
"""
http://docs.python.org/library/datetime.html#datetime.tzinfo.tzname
"""
if self.__is_daylight_time(dt):
return time.tzname[1]
return time.tzname[0]
def utcoffset(self, dt):
"""
http://docs.python.org/library/datetime.html#datetime.tzinfo.utcoffset
"""
if self.__is_daylight_time(dt):
return self.__dst_offset
return self.__offset
def __is_daylight_time(self, dt):
if not time.daylight:
return False
time_tuple = dt.replace(tzinfo=None).timetuple()
time_tuple = time.localtime(time.mktime(time_tuple))
return time_tuple.tm_isdst > 0
def __unicode__(self):
dt = datetime.datetime.now()
return "LocalTimezone %s offset: %s dst: %s" % (self.tzname(dt),
self.utcoffset(dt), self.dst(dt))
def _bump_up_time_by_microsecond(time):
"""
Helper function bumping up the given datetime.time by a microsecond,
cycling around silently to 00:00:00.0 in case of an overflow.
@param time: Time object.
@type time: B{datetime}.I{time}
@return: Time object.
@rtype: B{datetime}.I{time}
"""
dt = datetime.datetime(2000, 1, 1, time.hour, time.minute,
time.second, time.microsecond)
dt += datetime.timedelta(microseconds=1)
return dt.time()
def _date_from_match(match_object):
"""
Create a date object from a regular expression match.
The regular expression match is expected to be from _RE_DATE or
_RE_DATETIME.
@param match_object: The regular expression match.
@type match_object: B{re}.I{MatchObject}
@return: A date object.
@rtype: B{datetime}.I{date}
"""
year = int(match_object.group("year"))
month = int(match_object.group("month"))
day = int(match_object.group("day"))
return datetime.date(year, month, day)
def _time_from_match(match_object):
"""
Create a time object from a regular expression match.
Returns the time object and information whether the resulting time should
be bumped up by one microsecond due to microsecond rounding.
Subsecond information is rounded to microseconds due to a restriction in
the python datetime.datetime/time implementation.
The regular expression match is expected to be from _RE_DATETIME or
_RE_TIME.
@param match_object: The regular expression match.
@type match_object: B{re}.I{MatchObject}
@return: Time object + rounding flag.
@rtype: tuple of B{datetime}.I{time} and bool
"""
hour = int(match_object.group('hour'))
minute = int(match_object.group('minute'))
second = int(match_object.group('second'))
subsecond = match_object.group('subsecond')
round_up = False
microsecond = 0
if subsecond:
round_up = len(subsecond) > 6 and int(subsecond[6]) >= 5
subsecond = subsecond[:6]
microsecond = int(subsecond + "0" * (6 - len(subsecond)))
return datetime.time(hour, minute, second, microsecond), round_up
def _tzinfo_from_match(match_object):
"""
Create a timezone information object from a regular expression match.
The regular expression match is expected to be from _RE_DATE, _RE_DATETIME
or _RE_TIME.
@param match_object: The regular expression match.
@type match_object: B{re}.I{MatchObject}
@return: A timezone information object.
@rtype: B{datetime}.I{tzinfo}
"""
tz_utc = match_object.group("tz_utc")
if tz_utc:
return UtcTimezone()
tz_sign = match_object.group("tz_sign")
if not tz_sign:
return
h = int(match_object.group("tz_hour") or 0)
m = int(match_object.group("tz_minute") or 0)
if h == 0 and m == 0:
return UtcTimezone()
# Python limitation - timezone offsets larger than one day (in absolute)
# will cause operations depending on tzinfo.utcoffset() to fail, e.g.
# comparing two timezone aware datetime.datetime/time objects.
if h >= 24:
raise ValueError("timezone indicator too large")
tz_delta = datetime.timedelta(hours=h, minutes=m)
if tz_sign == "-":
tz_delta *= -1
return FixedOffsetTimezone(tz_delta)
| lgpl-3.0 |
swpease/Flavify | flavors/migrations/0004_auto_20170510_1549.py | 1 | 1557 | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-05-10 22:49
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('flavors', '0003_auto_20170507_1410'),
]
operations = [
migrations.CreateModel(
name='Combination',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tries', models.IntegerField(default=0)),
('likes', models.IntegerField(default=0)),
('dislikes', models.IntegerField(default=0)),
('datetime_submitted', models.DateTimeField(auto_now_add=True)),
('submittor', models.CharField(default='admin', max_length=100)),
],
),
migrations.AlterField(
model_name='ingredient',
name='tastes',
field=models.ManyToManyField(blank=True, to='flavors.Taste'),
),
migrations.AlterField(
model_name='taste',
name='mouth_taste',
field=models.CharField(choices=[('sweet', 'sweet'), ('salty', 'salty'), ('sour', 'sour'), ('bitter', 'bitter'), ('umami', 'umami'), ('spicy', 'spicy'), ('numbing', 'numbing'), ('cooling', 'cooling')], max_length=20),
),
migrations.AddField(
model_name='combination',
name='ingredients',
field=models.ManyToManyField(to='flavors.Ingredient'),
),
]
| mit |
srikk595/Multilingual-Search-System-for-tweets | partA/venv/lib/python2.7/site-packages/requests/packages/chardet/jisfreq.py | 3131 | 47315 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# Sampling from about 20M text materials include literature and computer technology
#
# Japanese frequency table, applied to both S-JIS and EUC-JP
# They are sorted in order.
# 128 --> 0.77094
# 256 --> 0.85710
# 512 --> 0.92635
# 1024 --> 0.97130
# 2048 --> 0.99431
#
# Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58
# Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191
#
# Typical Distribution Ratio, 25% of IDR
JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0
# Char to FreqOrder table ,
JIS_TABLE_SIZE = 4368
JISCharToFreqOrder = (
40, 1, 6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, # 16
3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247, 18, 179,5071, 856,1661, # 32
1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, # 48
2042,1061,1062, 48, 49, 44, 45, 433, 434,1040,1041, 996, 787,2997,1255,4305, # 64
2108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, # 80
5081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, # 96
1691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, # 112
5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, # 128
5103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, # 144
5118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, # 160
5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, # 176
5150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, # 192
5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, # 208
1309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, # 224
1875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, # 240
1865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, # 256
2049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, # 272
3299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161, 26,3377, 2,3929, 20, # 288
3691, 47,4100, 50, 17, 16, 35, 268, 27, 243, 42, 155, 24, 154, 29, 184, # 304
4, 91, 14, 92, 53, 396, 33, 289, 9, 37, 64, 620, 21, 39, 321, 5, # 320
12, 11, 52, 13, 3, 208, 138, 0, 7, 60, 526, 141, 151,1069, 181, 275, # 336
1591, 83, 132,1475, 126, 331, 829, 15, 69, 160, 59, 22, 157, 55,1079, 312, # 352
109, 38, 23, 25, 10, 19, 79,5195, 61, 382,1124, 8, 30,5196,5197,5198, # 368
5199,5200,5201,5202,5203,5204,5205,5206, 89, 62, 74, 34,2416, 112, 139, 196, # 384
271, 149, 84, 607, 131, 765, 46, 88, 153, 683, 76, 874, 101, 258, 57, 80, # 400
32, 364, 121,1508, 169,1547, 68, 235, 145,2999, 41, 360,3027, 70, 63, 31, # 416
43, 259, 262,1383, 99, 533, 194, 66, 93, 846, 217, 192, 56, 106, 58, 565, # 432
280, 272, 311, 256, 146, 82, 308, 71, 100, 128, 214, 655, 110, 261, 104,1140, # 448
54, 51, 36, 87, 67,3070, 185,2618,2936,2020, 28,1066,2390,2059,5207,5208, # 464
5209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, # 480
5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, # 496
5239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, # 512
4617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, # 528
5252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, # 544
5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, # 560
5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, # 576
5300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, # 592
5316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, # 608
5332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, # 624
5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, # 640
5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, # 656
5380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, # 672
3931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, # 688
5393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, # 704
5409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, # 720
5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, # 736
5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, # 752
5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, # 768
5473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, # 784
5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, # 800
5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, # 816
5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, # 832
5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, # 848
5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, # 864
5569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, # 880
5585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, # 896
5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, # 912
5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, # 928
5633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, # 944
5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, # 960
5665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, # 976
5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, # 992
5697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 1008
5713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 1024
5729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 1040
5745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 1056
5761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 1072
5777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 1088
5793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 1104
5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 1120
5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 1136
5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 1152
5857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 1168
5873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 1184
5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 1200
5905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 1216
5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 1232
5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 1248
5953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 1264
5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 1280
5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 1296
6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 1312
6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 1328
6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 1344
6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 1360
6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 1376
6081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 1392
6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 1408
6113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 1424
4313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440
854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456
665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 1472
1970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619, 65,3302,2045, # 1488
1761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504
896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 1520
3028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 1536
3517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552
804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 1568
3436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 1584
3696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600
586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 1616
2177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632
277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 1648
3936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 1664
1900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680
380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 1696
1407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712
850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 1728
2829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 1744
2481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 1760
2659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 1776
2598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 1792
1604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 1808
1080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 1824
1148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 1840
1575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 1856
2524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 1872
1866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 1888
2129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 1904
1260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 1920
1956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 1936
1027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 1952
1380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 1968
1177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 1984
1089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000
606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016
684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 2032
1536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 2048
2753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 2064
2093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 2080
2441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 2096
3816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 2112
3002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128
884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 2144
3170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 2160
1972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876, 78,2287,1482,1277, # 2176
861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 2192
2370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 2208
1682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224
576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 2240
3603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 2256
4112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 2272
2715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 2288
1570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 2304
2601,1919,1078, 75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 2320
1075, 292,3818,1756,2602, 317, 98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336
385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352
178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 2368
1593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 2384
2603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 2400
2427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 2416
2290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 2432
3701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 2448
1453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 2464
2470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480
359,2291,1676, 73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496
837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512
855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 2528
1209, 96, 587,2166,1032, 260,1072,2153, 173, 94, 226,3244, 819,2006,4642,4114, # 2544
2203, 231,1744, 782, 97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560
633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 2576
1257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 2592
1815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608
353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 2624
1459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 2640
1008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 2656
1643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672
764,2861,1853, 688,2429,1920,1462, 77, 595, 415,2002,3034, 798,1192,4115,6144, # 2688
2978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704
278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 2720
2167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 2736
3175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 2752
2545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 2768
1580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 2784
6147, 441, 762,1771,3447,3607,3608,1904, 840,3037, 86, 939,1385, 572,1370,2445, # 2800
1336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 2816
2373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 2832
1203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848
470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 2864
72,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 2880
3609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 2896
3316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 2912
1584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 2928
1123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 2944
1442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 2960
1715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976
123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992
913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 3008
2671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024
900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 3040
3706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 3056
2916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072
423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 3088
1937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 3104
2207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120
220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 3136
1984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152
745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 3168
4337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 3184
2800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 3200
1429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216
666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 3232
1913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 3248
2674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264
376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 3280
6157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 3296
1251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 3312
1883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 3328
2719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 3344
3532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360
914,2550,2587, 81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 3376
3534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 3392
1938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408
674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 3424
1399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440
199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 3456
3829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472
370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 3488
2277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504
414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 3520
4343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 3536
2184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 3552
1652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 3568
1994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 3584
1582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600
166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 3616
1990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 3632
3956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 3648
1915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 3664
3832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680
264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696
543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712
983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 3728
2452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 3744
1230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760
867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 3776
1188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792
894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 3808
1487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824
530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840
839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856
480,2083,1774,3458, 923,2279,1350, 221,3086, 85,2233,2234,3835,1585,3010,2147, # 3872
1387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 3888
1985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 3904
2019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 3920
4355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936
227,1351,1645,2453,2193,1421,2887, 812,2121, 634, 95,2435, 201,2312,4665,1646, # 3952
1671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968
328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 3984
1916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 4000
3088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 4016
1004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 4032
2840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 4048
2953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 4064
1803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 4080
1141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 4096
2954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112
455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 4128
2650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 4144
1411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 4160
1967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 4176
1279,2136,1697,2335, 204, 721,2097,3838, 90,6186,2085,2505, 191,3967, 124,2148, # 4192
1376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 4208
3621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 4224
2514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 4240
2325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256
575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 4272
3839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 4288
3727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 4304
1639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 4320
2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336
1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352
2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368 #last 512
#Everything below is of no interest for detection purpose
2138,2122,3730,2888,1995,1820,1044,6190,6191,6192,6193,6194,6195,6196,6197,6198, # 4384
6199,6200,6201,6202,6203,6204,6205,4670,6206,6207,6208,6209,6210,6211,6212,6213, # 4400
6214,6215,6216,6217,6218,6219,6220,6221,6222,6223,6224,6225,6226,6227,6228,6229, # 4416
6230,6231,6232,6233,6234,6235,6236,6237,3187,6238,6239,3969,6240,6241,6242,6243, # 4432
6244,4671,6245,6246,4672,6247,6248,4133,6249,6250,4364,6251,2923,2556,2613,4673, # 4448
4365,3970,6252,6253,6254,6255,4674,6256,6257,6258,2768,2353,4366,4675,4676,3188, # 4464
4367,3463,6259,4134,4677,4678,6260,2267,6261,3842,3332,4368,3543,6262,6263,6264, # 4480
3013,1954,1928,4135,4679,6265,6266,2478,3091,6267,4680,4369,6268,6269,1699,6270, # 4496
3544,4136,4681,6271,4137,6272,4370,2804,6273,6274,2593,3971,3972,4682,6275,2236, # 4512
4683,6276,6277,4684,6278,6279,4138,3973,4685,6280,6281,3258,6282,6283,6284,6285, # 4528
3974,4686,2841,3975,6286,6287,3545,6288,6289,4139,4687,4140,6290,4141,6291,4142, # 4544
6292,6293,3333,6294,6295,6296,4371,6297,3399,6298,6299,4372,3976,6300,6301,6302, # 4560
4373,6303,6304,3843,3731,6305,4688,4374,6306,6307,3259,2294,6308,3732,2530,4143, # 4576
6309,4689,6310,6311,6312,3048,6313,6314,4690,3733,2237,6315,6316,2282,3334,6317, # 4592
6318,3844,6319,6320,4691,6321,3400,4692,6322,4693,6323,3049,6324,4375,6325,3977, # 4608
6326,6327,6328,3546,6329,4694,3335,6330,4695,4696,6331,6332,6333,6334,4376,3978, # 4624
6335,4697,3979,4144,6336,3980,4698,6337,6338,6339,6340,6341,4699,4700,4701,6342, # 4640
6343,4702,6344,6345,4703,6346,6347,4704,6348,4705,4706,3135,6349,4707,6350,4708, # 4656
6351,4377,6352,4709,3734,4145,6353,2506,4710,3189,6354,3050,4711,3981,6355,3547, # 4672
3014,4146,4378,3735,2651,3845,3260,3136,2224,1986,6356,3401,6357,4712,2594,3627, # 4688
3137,2573,3736,3982,4713,3628,4714,4715,2682,3629,4716,6358,3630,4379,3631,6359, # 4704
6360,6361,3983,6362,6363,6364,6365,4147,3846,4717,6366,6367,3737,2842,6368,4718, # 4720
2628,6369,3261,6370,2386,6371,6372,3738,3984,4719,3464,4720,3402,6373,2924,3336, # 4736
4148,2866,6374,2805,3262,4380,2704,2069,2531,3138,2806,2984,6375,2769,6376,4721, # 4752
4722,3403,6377,6378,3548,6379,6380,2705,3092,1979,4149,2629,3337,2889,6381,3338, # 4768
4150,2557,3339,4381,6382,3190,3263,3739,6383,4151,4723,4152,2558,2574,3404,3191, # 4784
6384,6385,4153,6386,4724,4382,6387,6388,4383,6389,6390,4154,6391,4725,3985,6392, # 4800
3847,4155,6393,6394,6395,6396,6397,3465,6398,4384,6399,6400,6401,6402,6403,6404, # 4816
4156,6405,6406,6407,6408,2123,6409,6410,2326,3192,4726,6411,6412,6413,6414,4385, # 4832
4157,6415,6416,4158,6417,3093,3848,6418,3986,6419,6420,3849,6421,6422,6423,4159, # 4848
6424,6425,4160,6426,3740,6427,6428,6429,6430,3987,6431,4727,6432,2238,6433,6434, # 4864
4386,3988,6435,6436,3632,6437,6438,2843,6439,6440,6441,6442,3633,6443,2958,6444, # 4880
6445,3466,6446,2364,4387,3850,6447,4388,2959,3340,6448,3851,6449,4728,6450,6451, # 4896
3264,4729,6452,3193,6453,4389,4390,2706,3341,4730,6454,3139,6455,3194,6456,3051, # 4912
2124,3852,1602,4391,4161,3853,1158,3854,4162,3989,4392,3990,4731,4732,4393,2040, # 4928
4163,4394,3265,6457,2807,3467,3855,6458,6459,6460,3991,3468,4733,4734,6461,3140, # 4944
2960,6462,4735,6463,6464,6465,6466,4736,4737,4738,4739,6467,6468,4164,2403,3856, # 4960
6469,6470,2770,2844,6471,4740,6472,6473,6474,6475,6476,6477,6478,3195,6479,4741, # 4976
4395,6480,2867,6481,4742,2808,6482,2493,4165,6483,6484,6485,6486,2295,4743,6487, # 4992
6488,6489,3634,6490,6491,6492,6493,6494,6495,6496,2985,4744,6497,6498,4745,6499, # 5008
6500,2925,3141,4166,6501,6502,4746,6503,6504,4747,6505,6506,6507,2890,6508,6509, # 5024
6510,6511,6512,6513,6514,6515,6516,6517,6518,6519,3469,4167,6520,6521,6522,4748, # 5040
4396,3741,4397,4749,4398,3342,2125,4750,6523,4751,4752,4753,3052,6524,2961,4168, # 5056
6525,4754,6526,4755,4399,2926,4169,6527,3857,6528,4400,4170,6529,4171,6530,6531, # 5072
2595,6532,6533,6534,6535,3635,6536,6537,6538,6539,6540,6541,6542,4756,6543,6544, # 5088
6545,6546,6547,6548,4401,6549,6550,6551,6552,4402,3405,4757,4403,6553,6554,6555, # 5104
4172,3742,6556,6557,6558,3992,3636,6559,6560,3053,2726,6561,3549,4173,3054,4404, # 5120
6562,6563,3993,4405,3266,3550,2809,4406,6564,6565,6566,4758,4759,6567,3743,6568, # 5136
4760,3744,4761,3470,6569,6570,6571,4407,6572,3745,4174,6573,4175,2810,4176,3196, # 5152
4762,6574,4177,6575,6576,2494,2891,3551,6577,6578,3471,6579,4408,6580,3015,3197, # 5168
6581,3343,2532,3994,3858,6582,3094,3406,4409,6583,2892,4178,4763,4410,3016,4411, # 5184
6584,3995,3142,3017,2683,6585,4179,6586,6587,4764,4412,6588,6589,4413,6590,2986, # 5200
6591,2962,3552,6592,2963,3472,6593,6594,4180,4765,6595,6596,2225,3267,4414,6597, # 5216
3407,3637,4766,6598,6599,3198,6600,4415,6601,3859,3199,6602,3473,4767,2811,4416, # 5232
1856,3268,3200,2575,3996,3997,3201,4417,6603,3095,2927,6604,3143,6605,2268,6606, # 5248
3998,3860,3096,2771,6607,6608,3638,2495,4768,6609,3861,6610,3269,2745,4769,4181, # 5264
3553,6611,2845,3270,6612,6613,6614,3862,6615,6616,4770,4771,6617,3474,3999,4418, # 5280
4419,6618,3639,3344,6619,4772,4182,6620,2126,6621,6622,6623,4420,4773,6624,3018, # 5296
6625,4774,3554,6626,4183,2025,3746,6627,4184,2707,6628,4421,4422,3097,1775,4185, # 5312
3555,6629,6630,2868,6631,6632,4423,6633,6634,4424,2414,2533,2928,6635,4186,2387, # 5328
6636,4775,6637,4187,6638,1891,4425,3202,3203,6639,6640,4776,6641,3345,6642,6643, # 5344
3640,6644,3475,3346,3641,4000,6645,3144,6646,3098,2812,4188,3642,3204,6647,3863, # 5360
3476,6648,3864,6649,4426,4001,6650,6651,6652,2576,6653,4189,4777,6654,6655,6656, # 5376
2846,6657,3477,3205,4002,6658,4003,6659,3347,2252,6660,6661,6662,4778,6663,6664, # 5392
6665,6666,6667,6668,6669,4779,4780,2048,6670,3478,3099,6671,3556,3747,4004,6672, # 5408
6673,6674,3145,4005,3748,6675,6676,6677,6678,6679,3408,6680,6681,6682,6683,3206, # 5424
3207,6684,6685,4781,4427,6686,4782,4783,4784,6687,6688,6689,4190,6690,6691,3479, # 5440
6692,2746,6693,4428,6694,6695,6696,6697,6698,6699,4785,6700,6701,3208,2727,6702, # 5456
3146,6703,6704,3409,2196,6705,4429,6706,6707,6708,2534,1996,6709,6710,6711,2747, # 5472
6712,6713,6714,4786,3643,6715,4430,4431,6716,3557,6717,4432,4433,6718,6719,6720, # 5488
6721,3749,6722,4006,4787,6723,6724,3644,4788,4434,6725,6726,4789,2772,6727,6728, # 5504
6729,6730,6731,2708,3865,2813,4435,6732,6733,4790,4791,3480,6734,6735,6736,6737, # 5520
4436,3348,6738,3410,4007,6739,6740,4008,6741,6742,4792,3411,4191,6743,6744,6745, # 5536
6746,6747,3866,6748,3750,6749,6750,6751,6752,6753,6754,6755,3867,6756,4009,6757, # 5552
4793,4794,6758,2814,2987,6759,6760,6761,4437,6762,6763,6764,6765,3645,6766,6767, # 5568
3481,4192,6768,3751,6769,6770,2174,6771,3868,3752,6772,6773,6774,4193,4795,4438, # 5584
3558,4796,4439,6775,4797,6776,6777,4798,6778,4799,3559,4800,6779,6780,6781,3482, # 5600
6782,2893,6783,6784,4194,4801,4010,6785,6786,4440,6787,4011,6788,6789,6790,6791, # 5616
6792,6793,4802,6794,6795,6796,4012,6797,6798,6799,6800,3349,4803,3483,6801,4804, # 5632
4195,6802,4013,6803,6804,4196,6805,4014,4015,6806,2847,3271,2848,6807,3484,6808, # 5648
6809,6810,4441,6811,4442,4197,4443,3272,4805,6812,3412,4016,1579,6813,6814,4017, # 5664
6815,3869,6816,2964,6817,4806,6818,6819,4018,3646,6820,6821,4807,4019,4020,6822, # 5680
6823,3560,6824,6825,4021,4444,6826,4198,6827,6828,4445,6829,6830,4199,4808,6831, # 5696
6832,6833,3870,3019,2458,6834,3753,3413,3350,6835,4809,3871,4810,3561,4446,6836, # 5712
6837,4447,4811,4812,6838,2459,4448,6839,4449,6840,6841,4022,3872,6842,4813,4814, # 5728
6843,6844,4815,4200,4201,4202,6845,4023,6846,6847,4450,3562,3873,6848,6849,4816, # 5744
4817,6850,4451,4818,2139,6851,3563,6852,6853,3351,6854,6855,3352,4024,2709,3414, # 5760
4203,4452,6856,4204,6857,6858,3874,3875,6859,6860,4819,6861,6862,6863,6864,4453, # 5776
3647,6865,6866,4820,6867,6868,6869,6870,4454,6871,2869,6872,6873,4821,6874,3754, # 5792
6875,4822,4205,6876,6877,6878,3648,4206,4455,6879,4823,6880,4824,3876,6881,3055, # 5808
4207,6882,3415,6883,6884,6885,4208,4209,6886,4210,3353,6887,3354,3564,3209,3485, # 5824
2652,6888,2728,6889,3210,3755,6890,4025,4456,6891,4825,6892,6893,6894,6895,4211, # 5840
6896,6897,6898,4826,6899,6900,4212,6901,4827,6902,2773,3565,6903,4828,6904,6905, # 5856
6906,6907,3649,3650,6908,2849,3566,6909,3567,3100,6910,6911,6912,6913,6914,6915, # 5872
4026,6916,3355,4829,3056,4457,3756,6917,3651,6918,4213,3652,2870,6919,4458,6920, # 5888
2438,6921,6922,3757,2774,4830,6923,3356,4831,4832,6924,4833,4459,3653,2507,6925, # 5904
4834,2535,6926,6927,3273,4027,3147,6928,3568,6929,6930,6931,4460,6932,3877,4461, # 5920
2729,3654,6933,6934,6935,6936,2175,4835,2630,4214,4028,4462,4836,4215,6937,3148, # 5936
4216,4463,4837,4838,4217,6938,6939,2850,4839,6940,4464,6941,6942,6943,4840,6944, # 5952
4218,3274,4465,6945,6946,2710,6947,4841,4466,6948,6949,2894,6950,6951,4842,6952, # 5968
4219,3057,2871,6953,6954,6955,6956,4467,6957,2711,6958,6959,6960,3275,3101,4843, # 5984
6961,3357,3569,6962,4844,6963,6964,4468,4845,3570,6965,3102,4846,3758,6966,4847, # 6000
3878,4848,4849,4029,6967,2929,3879,4850,4851,6968,6969,1733,6970,4220,6971,6972, # 6016
6973,6974,6975,6976,4852,6977,6978,6979,6980,6981,6982,3759,6983,6984,6985,3486, # 6032
3487,6986,3488,3416,6987,6988,6989,6990,6991,6992,6993,6994,6995,6996,6997,4853, # 6048
6998,6999,4030,7000,7001,3211,7002,7003,4221,7004,7005,3571,4031,7006,3572,7007, # 6064
2614,4854,2577,7008,7009,2965,3655,3656,4855,2775,3489,3880,4222,4856,3881,4032, # 6080
3882,3657,2730,3490,4857,7010,3149,7011,4469,4858,2496,3491,4859,2283,7012,7013, # 6096
7014,2365,4860,4470,7015,7016,3760,7017,7018,4223,1917,7019,7020,7021,4471,7022, # 6112
2776,4472,7023,7024,7025,7026,4033,7027,3573,4224,4861,4034,4862,7028,7029,1929, # 6128
3883,4035,7030,4473,3058,7031,2536,3761,3884,7032,4036,7033,2966,2895,1968,4474, # 6144
3276,4225,3417,3492,4226,2105,7034,7035,1754,2596,3762,4227,4863,4475,3763,4864, # 6160
3764,2615,2777,3103,3765,3658,3418,4865,2296,3766,2815,7036,7037,7038,3574,2872, # 6176
3277,4476,7039,4037,4477,7040,7041,4038,7042,7043,7044,7045,7046,7047,2537,7048, # 6192
7049,7050,7051,7052,7053,7054,4478,7055,7056,3767,3659,4228,3575,7057,7058,4229, # 6208
7059,7060,7061,3660,7062,3212,7063,3885,4039,2460,7064,7065,7066,7067,7068,7069, # 6224
7070,7071,7072,7073,7074,4866,3768,4867,7075,7076,7077,7078,4868,3358,3278,2653, # 6240
7079,7080,4479,3886,7081,7082,4869,7083,7084,7085,7086,7087,7088,2538,7089,7090, # 6256
7091,4040,3150,3769,4870,4041,2896,3359,4230,2930,7092,3279,7093,2967,4480,3213, # 6272
4481,3661,7094,7095,7096,7097,7098,7099,7100,7101,7102,2461,3770,7103,7104,4231, # 6288
3151,7105,7106,7107,4042,3662,7108,7109,4871,3663,4872,4043,3059,7110,7111,7112, # 6304
3493,2988,7113,4873,7114,7115,7116,3771,4874,7117,7118,4232,4875,7119,3576,2336, # 6320
4876,7120,4233,3419,4044,4877,4878,4482,4483,4879,4484,4234,7121,3772,4880,1045, # 6336
3280,3664,4881,4882,7122,7123,7124,7125,4883,7126,2778,7127,4485,4486,7128,4884, # 6352
3214,3887,7129,7130,3215,7131,4885,4045,7132,7133,4046,7134,7135,7136,7137,7138, # 6368
7139,7140,7141,7142,7143,4235,7144,4886,7145,7146,7147,4887,7148,7149,7150,4487, # 6384
4047,4488,7151,7152,4888,4048,2989,3888,7153,3665,7154,4049,7155,7156,7157,7158, # 6400
7159,7160,2931,4889,4890,4489,7161,2631,3889,4236,2779,7162,7163,4891,7164,3060, # 6416
7165,1672,4892,7166,4893,4237,3281,4894,7167,7168,3666,7169,3494,7170,7171,4050, # 6432
7172,7173,3104,3360,3420,4490,4051,2684,4052,7174,4053,7175,7176,7177,2253,4054, # 6448
7178,7179,4895,7180,3152,3890,3153,4491,3216,7181,7182,7183,2968,4238,4492,4055, # 6464
7184,2990,7185,2479,7186,7187,4493,7188,7189,7190,7191,7192,4896,7193,4897,2969, # 6480
4494,4898,7194,3495,7195,7196,4899,4495,7197,3105,2731,7198,4900,7199,7200,7201, # 6496
4056,7202,3361,7203,7204,4496,4901,4902,7205,4497,7206,7207,2315,4903,7208,4904, # 6512
7209,4905,2851,7210,7211,3577,7212,3578,4906,7213,4057,3667,4907,7214,4058,2354, # 6528
3891,2376,3217,3773,7215,7216,7217,7218,7219,4498,7220,4908,3282,2685,7221,3496, # 6544
4909,2632,3154,4910,7222,2337,7223,4911,7224,7225,7226,4912,4913,3283,4239,4499, # 6560
7227,2816,7228,7229,7230,7231,7232,7233,7234,4914,4500,4501,7235,7236,7237,2686, # 6576
7238,4915,7239,2897,4502,7240,4503,7241,2516,7242,4504,3362,3218,7243,7244,7245, # 6592
4916,7246,7247,4505,3363,7248,7249,7250,7251,3774,4506,7252,7253,4917,7254,7255, # 6608
3284,2991,4918,4919,3219,3892,4920,3106,3497,4921,7256,7257,7258,4922,7259,4923, # 6624
3364,4507,4508,4059,7260,4240,3498,7261,7262,4924,7263,2992,3893,4060,3220,7264, # 6640
7265,7266,7267,7268,7269,4509,3775,7270,2817,7271,4061,4925,4510,3776,7272,4241, # 6656
4511,3285,7273,7274,3499,7275,7276,7277,4062,4512,4926,7278,3107,3894,7279,7280, # 6672
4927,7281,4513,7282,7283,3668,7284,7285,4242,4514,4243,7286,2058,4515,4928,4929, # 6688
4516,7287,3286,4244,7288,4517,7289,7290,7291,3669,7292,7293,4930,4931,4932,2355, # 6704
4933,7294,2633,4518,7295,4245,7296,7297,4519,7298,7299,4520,4521,4934,7300,4246, # 6720
4522,7301,7302,7303,3579,7304,4247,4935,7305,4936,7306,7307,7308,7309,3777,7310, # 6736
4523,7311,7312,7313,4248,3580,7314,4524,3778,4249,7315,3581,7316,3287,7317,3221, # 6752
7318,4937,7319,7320,7321,7322,7323,7324,4938,4939,7325,4525,7326,7327,7328,4063, # 6768
7329,7330,4940,7331,7332,4941,7333,4526,7334,3500,2780,1741,4942,2026,1742,7335, # 6784
7336,3582,4527,2388,7337,7338,7339,4528,7340,4250,4943,7341,7342,7343,4944,7344, # 6800
7345,7346,3020,7347,4945,7348,7349,7350,7351,3895,7352,3896,4064,3897,7353,7354, # 6816
7355,4251,7356,7357,3898,7358,3779,7359,3780,3288,7360,7361,4529,7362,4946,4530, # 6832
2027,7363,3899,4531,4947,3222,3583,7364,4948,7365,7366,7367,7368,4949,3501,4950, # 6848
3781,4951,4532,7369,2517,4952,4252,4953,3155,7370,4954,4955,4253,2518,4533,7371, # 6864
7372,2712,4254,7373,7374,7375,3670,4956,3671,7376,2389,3502,4065,7377,2338,7378, # 6880
7379,7380,7381,3061,7382,4957,7383,7384,7385,7386,4958,4534,7387,7388,2993,7389, # 6896
3062,7390,4959,7391,7392,7393,4960,3108,4961,7394,4535,7395,4962,3421,4536,7396, # 6912
4963,7397,4964,1857,7398,4965,7399,7400,2176,3584,4966,7401,7402,3422,4537,3900, # 6928
3585,7403,3782,7404,2852,7405,7406,7407,4538,3783,2654,3423,4967,4539,7408,3784, # 6944
3586,2853,4540,4541,7409,3901,7410,3902,7411,7412,3785,3109,2327,3903,7413,7414, # 6960
2970,4066,2932,7415,7416,7417,3904,3672,3424,7418,4542,4543,4544,7419,4968,7420, # 6976
7421,4255,7422,7423,7424,7425,7426,4067,7427,3673,3365,4545,7428,3110,2559,3674, # 6992
7429,7430,3156,7431,7432,3503,7433,3425,4546,7434,3063,2873,7435,3223,4969,4547, # 7008
4548,2898,4256,4068,7436,4069,3587,3786,2933,3787,4257,4970,4971,3788,7437,4972, # 7024
3064,7438,4549,7439,7440,7441,7442,7443,4973,3905,7444,2874,7445,7446,7447,7448, # 7040
3021,7449,4550,3906,3588,4974,7450,7451,3789,3675,7452,2578,7453,4070,7454,7455, # 7056
7456,4258,3676,7457,4975,7458,4976,4259,3790,3504,2634,4977,3677,4551,4260,7459, # 7072
7460,7461,7462,3907,4261,4978,7463,7464,7465,7466,4979,4980,7467,7468,2213,4262, # 7088
7469,7470,7471,3678,4981,7472,2439,7473,4263,3224,3289,7474,3908,2415,4982,7475, # 7104
4264,7476,4983,2655,7477,7478,2732,4552,2854,2875,7479,7480,4265,7481,4553,4984, # 7120
7482,7483,4266,7484,3679,3366,3680,2818,2781,2782,3367,3589,4554,3065,7485,4071, # 7136
2899,7486,7487,3157,2462,4072,4555,4073,4985,4986,3111,4267,2687,3368,4556,4074, # 7152
3791,4268,7488,3909,2783,7489,2656,1962,3158,4557,4987,1963,3159,3160,7490,3112, # 7168
4988,4989,3022,4990,4991,3792,2855,7491,7492,2971,4558,7493,7494,4992,7495,7496, # 7184
7497,7498,4993,7499,3426,4559,4994,7500,3681,4560,4269,4270,3910,7501,4075,4995, # 7200
4271,7502,7503,4076,7504,4996,7505,3225,4997,4272,4077,2819,3023,7506,7507,2733, # 7216
4561,7508,4562,7509,3369,3793,7510,3590,2508,7511,7512,4273,3113,2994,2616,7513, # 7232
7514,7515,7516,7517,7518,2820,3911,4078,2748,7519,7520,4563,4998,7521,7522,7523, # 7248
7524,4999,4274,7525,4564,3682,2239,4079,4565,7526,7527,7528,7529,5000,7530,7531, # 7264
5001,4275,3794,7532,7533,7534,3066,5002,4566,3161,7535,7536,4080,7537,3162,7538, # 7280
7539,4567,7540,7541,7542,7543,7544,7545,5003,7546,4568,7547,7548,7549,7550,7551, # 7296
7552,7553,7554,7555,7556,5004,7557,7558,7559,5005,7560,3795,7561,4569,7562,7563, # 7312
7564,2821,3796,4276,4277,4081,7565,2876,7566,5006,7567,7568,2900,7569,3797,3912, # 7328
7570,7571,7572,4278,7573,7574,7575,5007,7576,7577,5008,7578,7579,4279,2934,7580, # 7344
7581,5009,7582,4570,7583,4280,7584,7585,7586,4571,4572,3913,7587,4573,3505,7588, # 7360
5010,7589,7590,7591,7592,3798,4574,7593,7594,5011,7595,4281,7596,7597,7598,4282, # 7376
5012,7599,7600,5013,3163,7601,5014,7602,3914,7603,7604,2734,4575,4576,4577,7605, # 7392
7606,7607,7608,7609,3506,5015,4578,7610,4082,7611,2822,2901,2579,3683,3024,4579, # 7408
3507,7612,4580,7613,3226,3799,5016,7614,7615,7616,7617,7618,7619,7620,2995,3290, # 7424
7621,4083,7622,5017,7623,7624,7625,7626,7627,4581,3915,7628,3291,7629,5018,7630, # 7440
7631,7632,7633,4084,7634,7635,3427,3800,7636,7637,4582,7638,5019,4583,5020,7639, # 7456
3916,7640,3801,5021,4584,4283,7641,7642,3428,3591,2269,7643,2617,7644,4585,3592, # 7472
7645,4586,2902,7646,7647,3227,5022,7648,4587,7649,4284,7650,7651,7652,4588,2284, # 7488
7653,5023,7654,7655,7656,4589,5024,3802,7657,7658,5025,3508,4590,7659,7660,7661, # 7504
1969,5026,7662,7663,3684,1821,2688,7664,2028,2509,4285,7665,2823,1841,7666,2689, # 7520
3114,7667,3917,4085,2160,5027,5028,2972,7668,5029,7669,7670,7671,3593,4086,7672, # 7536
4591,4087,5030,3803,7673,7674,7675,7676,7677,7678,7679,4286,2366,4592,4593,3067, # 7552
2328,7680,7681,4594,3594,3918,2029,4287,7682,5031,3919,3370,4288,4595,2856,7683, # 7568
3509,7684,7685,5032,5033,7686,7687,3804,2784,7688,7689,7690,7691,3371,7692,7693, # 7584
2877,5034,7694,7695,3920,4289,4088,7696,7697,7698,5035,7699,5036,4290,5037,5038, # 7600
5039,7700,7701,7702,5040,5041,3228,7703,1760,7704,5042,3229,4596,2106,4089,7705, # 7616
4597,2824,5043,2107,3372,7706,4291,4090,5044,7707,4091,7708,5045,3025,3805,4598, # 7632
4292,4293,4294,3373,7709,4599,7710,5046,7711,7712,5047,5048,3806,7713,7714,7715, # 7648
5049,7716,7717,7718,7719,4600,5050,7720,7721,7722,5051,7723,4295,3429,7724,7725, # 7664
7726,7727,3921,7728,3292,5052,4092,7729,7730,7731,7732,7733,7734,7735,5053,5054, # 7680
7736,7737,7738,7739,3922,3685,7740,7741,7742,7743,2635,5055,7744,5056,4601,7745, # 7696
7746,2560,7747,7748,7749,7750,3923,7751,7752,7753,7754,7755,4296,2903,7756,7757, # 7712
7758,7759,7760,3924,7761,5057,4297,7762,7763,5058,4298,7764,4093,7765,7766,5059, # 7728
3925,7767,7768,7769,7770,7771,7772,7773,7774,7775,7776,3595,7777,4299,5060,4094, # 7744
7778,3293,5061,7779,7780,4300,7781,7782,4602,7783,3596,7784,7785,3430,2367,7786, # 7760
3164,5062,5063,4301,7787,7788,4095,5064,5065,7789,3374,3115,7790,7791,7792,7793, # 7776
7794,7795,7796,3597,4603,7797,7798,3686,3116,3807,5066,7799,7800,5067,7801,7802, # 7792
4604,4302,5068,4303,4096,7803,7804,3294,7805,7806,5069,4605,2690,7807,3026,7808, # 7808
7809,7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,7824, # 7824
7825,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,7840, # 7840
7841,7842,7843,7844,7845,7846,7847,7848,7849,7850,7851,7852,7853,7854,7855,7856, # 7856
7857,7858,7859,7860,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870,7871,7872, # 7872
7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886,7887,7888, # 7888
7889,7890,7891,7892,7893,7894,7895,7896,7897,7898,7899,7900,7901,7902,7903,7904, # 7904
7905,7906,7907,7908,7909,7910,7911,7912,7913,7914,7915,7916,7917,7918,7919,7920, # 7920
7921,7922,7923,7924,3926,7925,7926,7927,7928,7929,7930,7931,7932,7933,7934,7935, # 7936
7936,7937,7938,7939,7940,7941,7942,7943,7944,7945,7946,7947,7948,7949,7950,7951, # 7952
7952,7953,7954,7955,7956,7957,7958,7959,7960,7961,7962,7963,7964,7965,7966,7967, # 7968
7968,7969,7970,7971,7972,7973,7974,7975,7976,7977,7978,7979,7980,7981,7982,7983, # 7984
7984,7985,7986,7987,7988,7989,7990,7991,7992,7993,7994,7995,7996,7997,7998,7999, # 8000
8000,8001,8002,8003,8004,8005,8006,8007,8008,8009,8010,8011,8012,8013,8014,8015, # 8016
8016,8017,8018,8019,8020,8021,8022,8023,8024,8025,8026,8027,8028,8029,8030,8031, # 8032
8032,8033,8034,8035,8036,8037,8038,8039,8040,8041,8042,8043,8044,8045,8046,8047, # 8048
8048,8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063, # 8064
8064,8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079, # 8080
8080,8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095, # 8096
8096,8097,8098,8099,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110,8111, # 8112
8112,8113,8114,8115,8116,8117,8118,8119,8120,8121,8122,8123,8124,8125,8126,8127, # 8128
8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141,8142,8143, # 8144
8144,8145,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155,8156,8157,8158,8159, # 8160
8160,8161,8162,8163,8164,8165,8166,8167,8168,8169,8170,8171,8172,8173,8174,8175, # 8176
8176,8177,8178,8179,8180,8181,8182,8183,8184,8185,8186,8187,8188,8189,8190,8191, # 8192
8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8203,8204,8205,8206,8207, # 8208
8208,8209,8210,8211,8212,8213,8214,8215,8216,8217,8218,8219,8220,8221,8222,8223, # 8224
8224,8225,8226,8227,8228,8229,8230,8231,8232,8233,8234,8235,8236,8237,8238,8239, # 8240
8240,8241,8242,8243,8244,8245,8246,8247,8248,8249,8250,8251,8252,8253,8254,8255, # 8256
8256,8257,8258,8259,8260,8261,8262,8263,8264,8265,8266,8267,8268,8269,8270,8271) # 8272
# flake8: noqa
| gpl-2.0 |
RigsOfRods/rigs-of-rods | tools/blender/2.82/ror_export/__init__.py | 4 | 6028 | # <pep8-80 compliant>
bl_info = {
"name": "RoR Exporter",
"author": "ulteq",
"version": (0, 0, 2),
"blender": (2, 82, 0),
"category": "RoR",
}
import bpy
import json
import bmesh
from bpy.props import StringProperty
from bpy_extras.io_utils import ExportHelper
def register():
bpy.context.preferences.view.show_developer_ui = True
bpy.utils.register_class(ROR_OT_exporter)
bpy.types.TOPBAR_MT_file_export.append(menu_func)
return
def unregister():
bpy.types.TOPBAR_MT_file_export.remove(menu_func)
bpy.utils.unregister_class(ROR_OT_exporter)
return
def menu_func(self, context):
self.layout.operator(ROR_OT_exporter.bl_idname, text="Truck (.truck)")
class ROR_OT_exporter(bpy.types.Operator, ExportHelper):
bl_idname = "export_truck.truck"
bl_label = "Export RoR Truck"
filename_ext = ""
filter_glob : StringProperty(
default="*.truck;*.trailer;*.load;*.car;*.boat;*.airplane;*.train;*.machine;*.fixed",
options={'HIDDEN'},
)
filepath : bpy.props.StringProperty(subtype="FILE_PATH")
def execute(self, context):
nodes = []
beams = []
cabs = []
for obj in context.selected_objects[:1]:
if obj.type != 'MESH':
continue
current_mode = bpy.context.object.mode
bpy.ops.object.mode_set(mode="OBJECT")
bpy.ops.object.mode_set(mode="EDIT")
bm = bmesh.from_edit_mesh(obj.data)
group_names = {group.index : group.name for group in obj.vertex_groups}
node_digits = len(str(len(obj.data.vertices) - 1))
format_string = '{:'+str(node_digits)+'d}, {: 8.3f}, {: 8.3f}, {: 8.3f}'
defaults_key = bm.verts.layers.string.get("defaults")
options_key = bm.verts.layers.string.get("options")
bm.verts.ensure_lookup_table()
for v, bv in zip(obj.data.vertices, bm.verts):
defaults = ''
if defaults_key:
defaults = bv[defaults_key].decode()
options = ''
if options_key:
options = bv[options_key].decode()
if not options:
options = 'n'
groups = [group_names[g.group] for g in v.groups]
nodes.append([format_string.format(v.index, v.co[1], v.co[2], v.co[0]), options, groups, defaults])
format_string = '{:'+str(node_digits)+'d}, {:'+str(node_digits)+'d}'
defaults_key = bm.edges.layers.string.get("defaults")
options_key = bm.edges.layers.string.get("options")
bm.edges.ensure_lookup_table()
for e, be in zip(obj.data.edges, bm.edges):
defaults = ''
if defaults_key:
defaults = be[defaults_key].decode()
options = ''
if options_key:
options = be[options_key].decode()
if not options:
options = 'v'
ids = sorted([[g.group for g in obj.data.vertices[e.vertices[i]].groups] for i in [0, 1]])
vg1, vg2 = [[group_names[g] for g in ids[i]] for i in [0, 1]]
groups = vg1 if vg1 == vg2 else [', '.join(vg1)] + [">"] + [', '.join(vg2)]
beams.append([ids, groups, format_string.format(e.vertices[0], e.vertices[1]), options, defaults])
format_string = '{:'+str(node_digits)+'d}, {:'+str(node_digits)+'d}, {:'+str(node_digits)+'d}'
options_key = bm.faces.layers.string.get("options")
bm.faces.ensure_lookup_table()
for p, bp in zip(obj.data.polygons, bm.faces):
if len(p.vertices) == 3:
options = ''
if options_key:
options = bp[options_key].decode()
if not options:
options = 'c'
cabs.append([format_string.format(p.vertices[0], p.vertices[1], p.vertices[2]), options])
bpy.ops.object.mode_set(mode=current_mode)
bm.free()
truckfile = []
indices = [0, 0, 0]
try:
truckfile = json.loads(bpy.context.active_object.RoRTruckFile)
indices = json.loads(bpy.context.active_object.RoRInsertIndices)
except:
pass
with open(self.filepath, 'w') as f:
for line in truckfile[:indices[0]]:
print (line, file=f)
print("nodes", file=f)
defaults = ''
vertex_groups = []
for n in sorted(nodes):
if n[-1] and n[-1] != defaults:
defaults = n[-1]
print (defaults, file=f)
if n[-2] != vertex_groups:
vertex_groups = n[-2]
print (";grp:", ', '.join(vertex_groups), file=f)
print (*n[:-2], sep=', ', file=f)
lines = truckfile[indices[0]:indices[1]]
if not lines:
lines = ['']
for line in lines:
print (line, file=f)
print("beams", file=f)
edge_groups = []
for b in sorted(beams):
if b[-1] and b[-1] != defaults:
defaults = b[-1]
print (defaults, file=f)
if b[1] != edge_groups:
edge_groups = b[1]
print (";grp:", *edge_groups, file=f)
print (*b[2:-1], sep=', ', file=f)
lines = truckfile[indices[1]:indices[2]]
if not lines:
lines = ['']
for line in lines:
print (line, file=f)
if cabs:
print ("cab", file=f)
for c in cabs:
print (*c, sep=', ', file=f)
for line in truckfile[indices[2]:]:
print (line, file=f)
return {'FINISHED'}
| gpl-3.0 |
ikvk/imap_tools | tests/messages_data/error_emails/bad_subject.py | 1 | 3374 | import datetime
DATA = dict(
subject='MySurvey.com: You have a survey waiting! 91123105',
from_='carol@mysurvey.com',
to=('someone@aol.com',),
cc=(),
bcc=(),
reply_to=('carol@reply.mysurvey.com',),
date=datetime.datetime(2010, 12, 15, 12, 21, 20, tzinfo=datetime.timezone(datetime.timedelta(-1, 68400))),
date_str='Wed, 15 Dec 2010 12:21:20 -0500 ',
text="You have a survey waiting!\r\n\r\n\r\nTo take the survey:\r\n\r\n\r\n==================================================================\r\nPlease do not reply to this email, as we do not process emails sent to this address. To view FAQ's or to contact us, please go to our http://www.mysurvey.com/index.cfm?action=Main.lobbyGeneral&MyContent=contact page. \r\n================================================================== \r\nYou received this email because you (or someone in your household) registered to be a MySurvey.com member. Being a MySurvey.com member means receiving periodic email invitations to give your opinions via e-surveys as well as being eligible for special projects and product tests. If you wish to be removed from the MySurvey.com panel, please click here to http://www.mysurvey.com/index.cfm?action=Main.lobbyGeneral&myContent=unsubscribes.\r\n==================================================================\r\n",
html='<center>\r\nhello world\r\n</center>\r\n<IMG SRC="http://mailcenterus.mysurvey.com/gems_open_tracking.cfm?indid=99323446&cmpid=10000012106&r=9772160&rundate=15-DEC-2010+12%3a15%3a09&z=435ED3AE69D35EB44716E94814CD11A9"border="0" width="1" height="1">\r\n',
headers={'received': ('from survey1usmta.mysurvey.com (survey1usmta.mysurvey.com [198.178.238.149])\r\n\tby mtain-dh02.r1000.mx.aol.com (Internet Inbound) with ESMTP id 35AF9380001BD\r\n\tfor <jjffddjkl161@aol.com>; Wed, 15 Dec 2010 12:22:13 -0500 (EST)', 'from 172.30.44.41 (172.30.44.57) by survey1usmta.mysurvey.com (PowerMTA(TM) v3.5r15) id h13ska0ko6cn for <jjffddjkl161@aol.com>; Wed, 15 Dec 2010 12:21:20 -0500 (envelope-from <carol-jjffddjkl161=aol.com@mysurvey.com>)'), 'from': ('=?UTF-8?B?TXlTdXJ2ZXk=?=\r\n =?UTF-8?B?LmNvbSAmIEM=?=\r\n =?UTF-8?B?YXJvbCBBZGE=?=\r\n =?UTF-8?B?bXM=?=\r\n <carol@mysurvey.com>',), 'reply-to': ('carol@reply.mysurvey.com',), 'to': ('someone@aol.com',), 'date': ('Wed, 15 Dec 2010 12:21:20 -0500 ',), 'subject': ('=?UTF-8?B?TXlTdXJ2ZXk=?=\r\n =?UTF-8?B?LmNvbTogIFk=?=\r\n =?UTF-8?B?b3UgaGF2ZSA=?=\r\n =?UTF-8?B?YSBzdXJ2ZXk=?=\r\n =?UTF-8?B?IHdhaXRpbmc=?=\r\n =?UTF-8?B?ISAg?=\r\n =?UTF-8?Q?91123105?=\r\n =?UTF-8?B??=\r\n ',), 'mime-version': ('1.0 ',), 'content-type': ('multipart/alternative;boundary="----=_Layout_Part_DC7E1BB5_1105_4DB3_BAE3_2A6208EB099A"',), 'x-aol-global-disposition': ('G',), 'x-aol-scoll-score': ('1:2:376293952:93952408 ',), 'x-aol-scoll-url_count': ('5 ',), 'x-aol-sid': ('3039ac1d41164d08f9453480',), 'x-aol-ip': ('198.178.238.149',), 'x-aol-spf': ('domain : mysurvey.com SPF : pass',)},
attachments=[],
from_values={'email': 'carol@mysurvey.com', 'name': 'MySurvey.com & Carol Adams', 'full': 'MySurvey.com & Carol Adams <carol@mysurvey.com>'},
to_values=({'email': 'someone@aol.com', 'name': '', 'full': 'someone@aol.com'},),
cc_values=(),
bcc_values=(),
reply_to_values=({'email': 'carol@reply.mysurvey.com', 'name': '', 'full': 'carol@reply.mysurvey.com'},),
) | apache-2.0 |
jabber-at/gajim | src/common/protocol/caps.py | 1 | 4807 | # -*- coding:utf-8 -*-
## src/common/protocol/caps.py
##
## Copyright (C) 2009 Stephan Erb <steve-e AT h3c.de>
##
## This file is part of Gajim.
##
## Gajim is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published
## by the Free Software Foundation; version 3 only.
##
## Gajim is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Gajim. If not, see <http://www.gnu.org/licenses/>.
##
"""
Module containing the network portion of XEP-115 (Entity Capabilities)
"""
import logging
log = logging.getLogger('gajim.c.p.caps')
from common import gajim
from common import ged
from common.connection_handlers_events import CapsPresenceReceivedEvent, \
CapsDiscoReceivedEvent, CapsReceivedEvent
class ConnectionCaps(object):
def __init__(self, account, capscache, client_caps_factory):
self._account = account
self._capscache = capscache
self._create_suitable_client_caps = client_caps_factory
gajim.nec.register_incoming_event(CapsPresenceReceivedEvent)
gajim.nec.register_incoming_event(CapsReceivedEvent)
gajim.ged.register_event_handler('caps-presence-received', ged.GUI1,
self._nec_caps_presence_received)
gajim.ged.register_event_handler('agent-info-received', ged.GUI1,
self._nec_agent_info_received_caps)
def cleanup(self):
gajim.ged.remove_event_handler('caps-presence-received', ged.GUI1,
self._nec_caps_presence_received)
gajim.ged.remove_event_handler('agent-info-received', ged.GUI1,
self._nec_agent_info_received_caps)
def caps_change_account_name(self, new_name):
self._account = new_name
def _nec_caps_presence_received(self, obj):
if obj.conn.name != self._account:
return
obj.client_caps = self._create_suitable_client_caps(obj.node,
obj.caps_hash, obj.hash_method, obj.fjid)
if obj.show == 'offline' and obj.client_caps._hash_method == 'no':
self._capscache.forget_caps(obj.client_caps)
obj.client_caps = self._create_suitable_client_caps(obj.node,
obj.caps_hash, obj.hash_method)
else:
self._capscache.query_client_of_jid_if_unknown(self, obj.fjid,
obj.client_caps)
self._update_client_caps_of_contact(obj)
def _update_client_caps_of_contact(self, obj):
contact = self._get_contact_or_gc_contact_for_jid(obj.fjid)
if contact:
contact.client_caps = obj.client_caps
else:
log.info('Received Caps from unknown contact %s' % obj.fjid)
def _get_contact_or_gc_contact_for_jid(self, jid):
contact = gajim.contacts.get_contact_from_full_jid(self._account, jid)
if contact is None:
room_jid, nick = gajim.get_room_and_nick_from_fjid(jid)
contact = gajim.contacts.get_gc_contact(self._account, room_jid, nick)
return contact
def _nec_agent_info_received_caps(self, obj):
"""
callback to update our caps cache with queried information after
we have retrieved an unknown caps hash and issued a disco
"""
if obj.conn.name != self._account:
return
contact = self._get_contact_or_gc_contact_for_jid(obj.fjid)
if not contact:
log.info('Received Disco from unknown contact %s' % obj.fjid)
return
lookup = contact.client_caps.get_cache_lookup_strategy()
cache_item = lookup(self._capscache)
if cache_item.is_valid():
# we already know that the hash is fine and have already cached
# the identities and features
return
else:
validate = contact.client_caps.get_hash_validation_strategy()
hash_is_valid = validate(obj.identities, obj.features, obj.data)
if hash_is_valid:
cache_item.set_and_store(obj.identities, obj.features)
else:
node = caps_hash = hash_method = None
contact.client_caps = self._create_suitable_client_caps(
obj.node, caps_hash, hash_method)
log.info('Computed and retrieved caps hash differ.' +
'Ignoring caps of contact %s' % contact.get_full_jid())
gajim.nec.push_incoming_event(CapsDiscoReceivedEvent(None,
conn=self, fjid=obj.fjid, jid=obj.jid, resource=obj.resource,
client_caps=contact.client_caps))
| gpl-3.0 |
BorgERP/borg-erp-6of3 | base/base_base/ir/TODO/smile_utils/decorators.py | 1 | 2799 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011-2012 Casden (<http://www.casden.fr>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import inspect
def decorate_methods(decorator):
if not hasattr(decorator, '_decorated_methods'):
decorator._decorated_methods = {}
def meta_decorator(methods_to_decorate):
for model, method_name in list(set(methods_to_decorate)):
model_class = model.__class__
if hasattr(model_class, method_name):
model_name = model._name
method = getattr(model_class, method_name)
if method_name in decorator._decorated_methods.get(model_name, {}):
original_method = decorator._decorated_methods[model_name][method_name]
setattr(original_method.im_class, method_name, original_method)
decorator._decorated_methods.setdefault(model_name, {})[method_name] = method
setattr(model_class, method_name, decorator(method))
return True
return meta_decorator
def _get_kwargs(method, args, kwargs):
my_kwargs = kwargs and kwargs.copy() or {}
argument_names = inspect.getargspec(method)[0]
my_kwargs.update({}.fromkeys(argument_names, False))
for index, arg in enumerate(argument_names):
if index < len(args):
my_kwargs[arg] = args[index]
if 'context' not in my_kwargs or not isinstance(my_kwargs['context'], dict):
my_kwargs['context'] = {}
return my_kwargs
def get_method_args(method, args, kwargs):
my_kwargs = _get_kwargs(method, args, kwargs)
obj = my_kwargs['self']
cr = my_kwargs.get('cr') or my_kwargs.get('cursor')
uid = my_kwargs.get('uid') or my_kwargs.get('user')
ids = my_kwargs.get('ids') or my_kwargs.get('id') or my_kwargs.get('id_') or []
if isinstance(ids, (int, long)):
ids = [ids]
context = my_kwargs['context']
return obj, cr, uid, ids, context
| agpl-3.0 |
denys-duchier/kivy | kivy/uix/splitter.py | 1 | 13143 | '''Splitter
======
.. versionadded:: 1.5.0
.. image:: images/splitter.jpg
:align: right
The :class:`Splitter` is a widget that helps you re-size it's child
widget/layout by letting you re-size it via dragging the boundary or
double tapping the boundary. This widget is similar to the
:class:`~kivy.uix.scrollview.ScrollView` in that it allows only one
child widget.
Usage::
splitter = Splitter(sizable_from = 'right')
splitter.add_widget(layout_or_widget_instance)
splitter.min_size = 100
splitter.max_size = 250
To change the size of the strip/border used for resizing::
splitter.strip_size = '10pt'
To change its appearance::
splitter.strip_cls = your_custom_class
You can also change the appearance of the `strip_cls`, which defaults to
:class:`SplitterStrip`, by overriding the `kv` rule in your app::
<SplitterStrip>:
horizontal: True if self.parent and self.parent.sizable_from[0] \
in ('t', 'b') else False
background_normal: 'path to normal horizontal image' \
if self.horizontal else 'path to vertical normal image'
background_down: 'path to pressed horizontal image' \
if self.horizontal else 'path to vertical pressed image'
'''
__all__ = ('Splitter', )
from kivy.compat import string_types
from kivy.factory import Factory
from kivy.uix.button import Button
from kivy.properties import (OptionProperty, NumericProperty, ObjectProperty,
ListProperty, BooleanProperty)
from kivy.uix.boxlayout import BoxLayout
class SplitterStrip(Button):
'''Class used for tbe graphical representation of a
:class:`kivy.uix.splitter.SplitterStripe`.
'''
pass
class Splitter(BoxLayout):
'''See module documentation.
:Events:
`on_press`:
Fired when the splitter is pressed.
`on_release`:
Fired when the splitter is released.
.. versionchanged:: 1.6.0
Added `on_press` and `on_release` events.
'''
border = ListProperty([4, 4, 4, 4])
'''Border used for the
:class:`~kivy.graphics.vertex_instructions.BorderImage`
graphics instruction.
This must be a list of four values: (top, right, bottom, left).
Read the BorderImage instructions for more information about how
to use it.
:attr:`border` is a :class:`~kivy.properties.ListProperty` and
defaults to (4, 4, 4, 4).
'''
strip_cls = ObjectProperty(SplitterStrip)
'''Specifies the class of the resize Strip.
:attr:`strip_cls` is an :class:`kivy.properties.ObjectProperty` and
defaults to :class:`~kivy.uix.splitter.SplitterStrip`, which is of type
:class:`~kivy.uix.button.Button`.
.. versionchanged:: 1.8.0
If you set a string, the :class:`~kivy.factory.Factory` will be used to
resolve the class.
'''
sizable_from = OptionProperty('left', options=(
'left', 'right', 'top', 'bottom'))
'''Specifies whether the widget is resizable. Options are::
`left`, `right`, `top` or `bottom`
:attr:`sizable_from` is an :class:`~kivy.properties.OptionProperty`
and defaults to `left`.
'''
strip_size = NumericProperty('10pt')
'''Specifies the size of resize strip
:attr:`strp_size` is a :class:`~kivy.properties.NumericProperty`
defaults to `10pt`
'''
min_size = NumericProperty('100pt')
'''Specifies the minimum size beyond which the widget is not resizable.
:attr:`min_size` is a :class:`~kivy.properties.NumericProperty` and
defaults to `100pt`.
'''
max_size = NumericProperty('500pt')
'''Specifies the maximum size beyond which the widget is not resizable.
:attr:`max_size` is a :class:`~kivy.properties.NumericProperty`
and defaults to `500pt`.
'''
_parent_proportion = NumericProperty(0.)
'''(internal) Specifies the distance that the slider has travelled
across its parent, used to automatically maintain a sensible
position if the parent is resized.
:attr:`_parent_proportion` is a
:class:`~kivy.properties.NumericProperty` and defaults to 0.
.. versionadded:: 1.9.0
'''
_bound_parent = ObjectProperty(None, allownone=True)
'''(internal) References the widget whose size is currently being
tracked by :attr:`_parent_proportion`.
:attr:`_bound_parent` is a
:class:`~kivy.properties.ObjectProperty` and defaults to None.
.. versionadded:: 1.9.0
'''
keep_within_parent = BooleanProperty(False)
'''If True, will limit the splitter to stay within its parent widget.
:attr:`keep_within_parent` is a
:class:`~kivy.properties.BooleanProperty` and defaults to False.
.. versionadded:: 1.9.0
'''
rescale_with_parent = BooleanProperty(False)
'''If True, will automatically change size to take up the same
proportion of the parent widget when it is resized, while
staying within :attr:`min_size` and :attr:`max_size`. As long as
these attributes can be satisfied, this stops the
:class:`Splitter` from exceeding the parent size during rescaling.
:attr:`rescale_with_parent` is a
:class:`~kivy.properties.BooleanProperty` and defaults to False.
.. versionadded:: 1.9.0
'''
__events__ = ('on_press', 'on_release')
def __init__(self, **kwargs):
self._container = None
self._strip = None
super(Splitter, self).__init__(**kwargs)
do_size = self._do_size
fbind = self.fast_bind
fbind('max_size', do_size)
fbind('min_size', do_size)
fbind('parent', self._rebind_parent)
def on_sizable_from(self, instance, sizable_from):
if not instance._container:
return
sup = super(Splitter, instance)
_strp = instance._strip
if _strp:
# remove any previous binds
_strp.unbind(on_touch_down=instance.strip_down)
_strp.unbind(on_touch_move=instance.strip_move)
_strp.unbind(on_touch_up=instance.strip_up)
self.unbind(disabled=_strp.setter('disabled'))
sup.remove_widget(instance._strip)
else:
cls = instance.strip_cls
if isinstance(cls, string_types):
cls = Factory.get(cls)
instance._strip = _strp = cls()
sz_frm = instance.sizable_from[0]
if sz_frm in ('l', 'r'):
_strp.size_hint = None, 1
_strp.width = instance.strip_size
instance.orientation = 'horizontal'
instance.unbind(strip_size=_strp.setter('width'))
instance.bind(strip_size=_strp.setter('width'))
else:
_strp.size_hint = 1, None
_strp.height = instance.strip_size
instance.orientation = 'vertical'
instance.unbind(strip_size=_strp.setter('height'))
instance.bind(strip_size=_strp.setter('height'))
index = 1
if sz_frm in ('r', 'b'):
index = 0
sup.add_widget(_strp, index)
_strp.bind(on_touch_down=instance.strip_down)
_strp.bind(on_touch_move=instance.strip_move)
_strp.bind(on_touch_up=instance.strip_up)
_strp.disabled = self.disabled
self.bind(disabled=_strp.setter('disabled'))
def add_widget(self, widget, index=0):
if self._container or not widget:
return Exception('Splitter accepts only one Child')
self._container = widget
sz_frm = self.sizable_from[0]
if sz_frm in ('l', 'r'):
widget.size_hint_x = 1
else:
widget.size_hint_y = 1
index = 0
if sz_frm in ('r', 'b'):
index = 1
super(Splitter, self).add_widget(widget, index)
self.on_sizable_from(self, self.sizable_from)
def remove_widget(self, widget, *largs):
super(Splitter, self).remove_widget(widget)
if widget == self._container:
self._container = None
def clear_widgets(self):
self.remove_widget(self._container)
def strip_down(self, instance, touch):
if not instance.collide_point(*touch.pos):
return False
touch.grab(self)
self.dispatch('on_press')
def on_press(self):
pass
def _rebind_parent(self, instance, new_parent):
if self._bound_parent is not None:
self._bound_parent.unbind(size=self.rescale_parent_proportion)
if self.parent is not None:
new_parent.bind(size=self.rescale_parent_proportion)
self._bound_parent = new_parent
self.rescale_parent_proportion()
def rescale_parent_proportion(self, *args):
if self.rescale_with_parent:
parent_proportion = self._parent_proportion
if self.sizable_from in ('top', 'bottom'):
new_height = parent_proportion * self.parent.height
self.height = max(self.min_size, min(new_height, self.max_size))
else:
new_width = parent_proportion * self.parent.width
self.width = max(self.min_size, min(new_width, self.max_size))
def _do_size(self, instance, value):
if self.sizable_from[0] in ('l', 'r'):
self.width = max(self.min_size, min(self.width, self.max_size))
else:
self.height = max(self.min_size, min(self.height, self.max_size))
def strip_move(self, instance, touch):
if touch.grab_current is not instance:
return False
max_size = self.max_size
min_size = self.min_size
sz_frm = self.sizable_from[0]
if sz_frm in ('t', 'b'):
diff_y = (touch.dy)
if self.keep_within_parent:
if sz_frm == 't' and (self.top + diff_y) > self.parent.top:
diff_y = self.parent.top - self.top
elif sz_frm == 'b' and (self.y + diff_y) < self.parent.y:
diff_y = self.parent.y - self.y
if sz_frm == 'b':
diff_y *= -1
if self.size_hint_y:
self.size_hint_y = None
if self.height > 0:
self.height += diff_y
else:
self.height = 1
height = self.height
self.height = max(min_size, min(height, max_size))
self._parent_proportion = self.height / self.parent.height
else:
diff_x = (touch.dx)
if self.keep_within_parent:
if sz_frm == 'l' and (self.x + diff_x) < self.parent.x:
diff_x = self.parent.x - self.x
elif (sz_frm == 'r' and
(self.right + diff_x) > self.parent.right):
diff_x = self.parent.right - self.right
if sz_frm == 'l':
diff_x *= -1
if self.size_hint_x:
self.size_hint_x = None
if self.width > 0:
self.width += diff_x
else:
self.width = 1
width = self.width
self.width = max(min_size, min(width, max_size))
self._parent_proportion = self.width / self.parent.width
def strip_up(self, instance, touch):
if touch.grab_current is not instance:
return
if touch.is_double_tap:
max_size = self.max_size
min_size = self.min_size
sz_frm = self.sizable_from[0]
s = self.size
if sz_frm in ('t', 'b'):
if self.size_hint_y:
self.size_hint_y = None
if s[1] - min_size <= max_size - s[1]:
self.height = max_size
else:
self.height = min_size
else:
if self.size_hint_x:
self.size_hint_x = None
if s[0] - min_size <= max_size - s[0]:
self.width = max_size
else:
self.width = min_size
touch.ungrab(instance)
self.dispatch('on_release')
def on_release(self):
pass
if __name__ == '__main__':
from kivy.app import App
from kivy.uix.button import Button
from kivy.uix.floatlayout import FloatLayout
class SplitterApp(App):
def build(self):
root = FloatLayout()
bx = BoxLayout()
bx.add_widget(Button())
bx.add_widget(Button())
bx2 = BoxLayout()
bx2.add_widget(Button())
bx2.add_widget(Button())
bx2.add_widget(Button())
spl = Splitter(
size_hint=(1, .25),
pos_hint = {'top': 1},
sizable_from = 'bottom')
spl1 = Splitter(
sizable_from='left',
size_hint=(None, 1), width=90)
spl1.add_widget(Button())
bx.add_widget(spl1)
spl.add_widget(bx)
spl2 = Splitter(size_hint=(.25, 1))
spl2.add_widget(bx2)
spl2.sizable_from = 'right'
root.add_widget(spl)
root.add_widget(spl2)
return root
SplitterApp().run()
| mit |
slideinc/gogreen | gogreen/fileobject.py | 1 | 13038 | #!/usr/bin/env python
# -*- Mode: Python; tab-width: 4 -*-
# Copyright (c) 2005-2010 Slide, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of the author nor the names of other
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''fileobject
emulate a fileobject using a seperate process to handle IO for coroutine
concurrency
Written by Libor Michalek. 2006
'''
import coro
import corofile
import coroqueue
# corofile.emulate_popen2()
import os
import sys
import struct
import exceptions
import pickle
import signal
import weakref
import signal
STAUS_OPENED = 1
STAUS_CLOSED = 0
COMMAND_SIZE = 8 # cmd is 8 bytes, four for identifier and four for length
COMMAND_OPEN = 0
COMMAND_CLOSE = 1
COMMAND_READ = 2
COMMAND_WRITE = 3
COMMAND_FLUSH = 4
COMMAND_SEEK = 5
COMMAND_STAT = 6
COMMAND_MKDIRS = 7
COMMAND_CHMOD = 8
COMMAND_UNLINK = 9
COMMAND_UTIME = 10
COMMAND_TELL = 11
COMMAND_READLN = 12
COMMAND_MAP = {
COMMAND_OPEN: 'open',
COMMAND_CLOSE: 'close',
COMMAND_READ: 'read',
COMMAND_WRITE: 'write',
COMMAND_FLUSH: 'flush',
COMMAND_SEEK: 'seek',
COMMAND_STAT: 'stat',
COMMAND_MKDIRS: 'makedirs',
COMMAND_CHMOD: 'chmod',
COMMAND_UNLINK: 'unlink',
COMMAND_UTIME: 'utime',
COMMAND_TELL: 'tell',
COMMAND_READLN: 'readline',
}
RESPONSE_SUCCESS = 100
RESPONSE_ERROR = 101
BUFFER_READ_SIZE = 32*1024
BUFFER_WRITE_SIZE = 16*1024*1024
DEFAULT_QUEUE_SIZE = 16
NOWAIT = False
class CoroFileObjectError (exceptions.Exception):
pass
class CoroFileObject(object):
def __init__(self):
self._stdi, self._stdo = os.popen2([
os.path.realpath(__file__.replace('.pyc', '.py'))])
self._data = ''
self._status = STAUS_CLOSED
self.name = None
def __del__(self):
if self._status == STAUS_OPENED:
self.close(return_to_queue = False)
if self._stdi: self._stdi.close()
if self._stdo: self._stdo.close()
if not NOWAIT: os.wait()
def _cmd_(self, cmd, data = ''):
self._stdi.write(struct.pack('!ii', cmd, len(data)))
self._stdi.write(data)
self._stdi.flush()
data = self._stdo.read(COMMAND_SIZE)
if not data:
raise CoroFileObjectError('No command from child')
response, size = struct.unpack('!ii', data)
if size:
data = self._stdo.read(size)
else:
data = None
if response == RESPONSE_ERROR:
raise pickle.loads(data)
else:
return data
def open(self, name, mode = 'r'):
if self._status == STAUS_OPENED:
self.close()
result = self._cmd_(COMMAND_OPEN, '%s\n%s' % (name, mode))
self._status = STAUS_OPENED
self.name = name
return result
def read(self, bytes = -1):
while bytes < 0 or bytes > len(self._data):
result = self._cmd_(COMMAND_READ)
if result is None:
break
self._data += result
#
# either have all the data we want or file EOF
#
if bytes < 0:
# Return all remaining data
data = self._data
self._data = ''
else:
data = self._data[:bytes]
self._data = self._data[bytes:]
return data
def readline(self, bytes = -1):
while bytes < 0 or bytes > len(self._data):
result = self._cmd_(COMMAND_READLN)
if result is None:
break
self._data += result
if result[-1] == '\n':
break
#
# either have all the data we want or file EOF
#
if bytes < 0:
# Return all remaining data
data = self._data
self._data = ''
else:
data = self._data[:bytes]
self._data = self._data[bytes:]
return data
def write(self, data):
while data:
self._cmd_(COMMAND_WRITE, data[:BUFFER_WRITE_SIZE])
data = data[BUFFER_WRITE_SIZE:]
def seek(self, offset):
self._data = ''
return self._cmd_(COMMAND_SEEK, struct.pack('!i', offset))
def tell(self):
return int(self._cmd_(COMMAND_TELL))
def flush(self):
return self._cmd_(COMMAND_FLUSH)
def close(self, return_to_queue = True):
"""close closes this fileobject
NB: The return_to_queue parameter is ignored. It is required
for interface compatability with the AutoCleanFileOjbect subclass.
"""
self._data = ''
self._status = STAUS_CLOSED
return self._cmd_(COMMAND_CLOSE)
#
# non-standard extensions
#
def stat(self, path):
args = eval(self._cmd_(COMMAND_STAT, path))
return os._make_stat_result(*args)
def makedirs(self, path):
return eval(self._cmd_(COMMAND_MKDIRS, path))
def chmod(self, path, mode):
return eval(self._cmd_(COMMAND_CHMOD, '%s\n%d' % (path, mode)))
def unlink(self, path):
return eval(self._cmd_(COMMAND_UNLINK, path))
def utime(self, path, value = None):
return eval(self._cmd_(COMMAND_UTIME, '%s\n%s' % (path, str(value))))
class AutoCleanFileObject(CoroFileObject):
"""AutoCleanFileOjbect overrides close to optionally return itself
to the filequeue after closing.
"""
def close(self, return_to_queue = True):
"""close closes this fileobject
return_to_queue: return this object back to the filequeue if
True, the default.
"""
res = super(AutoCleanFileObject, self).close()
if return_to_queue:
filequeue.put(self)
return res
class FileObjectHandler(object):
def __init__(self, stdin, stdout):
self._stdi = stdin
self._stdo = stdout
self._fd = None
def open(self, data):
name, mode = data.split('\n')
self._fd = file(name, mode, BUFFER_READ_SIZE)
def close(self, data):
self._fd.close()
def read(self, data):
return self._fd.read(BUFFER_READ_SIZE)
def readline(self, data):
r = self._fd.readline(BUFFER_READ_SIZE)
return r
def write(self, data):
return self._fd.write(data)
def flush(self, data):
return self._fd.flush()
def seek(self, data):
(offset,) = struct.unpack('!i', data)
return self._fd.seek(offset)
def tell(self, data):
return str(self._fd.tell())
#
# non-standard extensions
#
def stat(self, data):
return str(os.stat(data).__reduce__()[1])
def makedirs(self, data):
return str(os.makedirs(data))
def chmod(self, data):
path, mode = data.split('\n')
return str(os.chmod(path, int(mode)))
def unlink(self, data):
return str(os.unlink(data))
def utime(self, data):
path, value = data.split('\n')
return str(os.utime(path, eval(value)))
def run(self):
result = 0
while True:
try:
data = self._stdi.read(COMMAND_SIZE)
except KeyboardInterrupt:
data = None
if not data:
result = 1
break
cmd, size = struct.unpack('!ii', data)
if size:
data = self._stdi.read(size)
else:
data = ''
if size != len(data):
result = 2
break
handler = getattr(self, COMMAND_MAP.get(cmd, 'none'), None)
if handler is None:
result = 3
break
try:
result = handler(data)
except exceptions.Exception, e:
result = pickle.dumps(e)
response = RESPONSE_ERROR
else:
response = RESPONSE_SUCCESS
if result is None:
result = ''
try:
self._stdo.write(struct.pack('!ii', response, len(result)))
self._stdo.write(result)
self._stdo.flush()
except IOError:
result = 4
break
return result
class CoroFileQueue(coroqueue.Queue):
def __init__(self, size, timeout = None):
super(CoroFileQueue, self).__init__(
AutoCleanFileObject, (), {}, size = size, timeout = timeout)
self._fd_save = {}
self._fd_refs = {}
def _save_info(self, o):
def dropped(ref):
self._fd_save.pop(self._fd_refs.pop(id(ref), None), None)
p = weakref.proxy(o, dropped)
self._fd_save[id(o)] = p
self._fd_refs[id(p)] = id(o)
return super(CoroFileQueue, self)._save_info(o)
def _drop_info(self, o):
self._fd_refs.pop(id(self._fd_save.pop(id(o), None)), None)
return super(CoroFileQueue, self)._drop_info(o)
def outstanding(self):
return map(lambda i: getattr(i, 'name', None), self._fd_save.values())
filequeue = CoroFileQueue(DEFAULT_QUEUE_SIZE)
def resize(size):
return filequeue.resize(size)
def size():
return filequeue.size()
def _fo_open(name, mode = 'r'):
fd = filequeue.get()
if fd is None:
return None
try:
fd.open(name, mode)
except:
filequeue.put(fd)
raise
return fd
#
# TODO: Remove close method once all references to it have been purged.
#
def _fo_close(fd):
fd.close(return_to_queue=False)
return filequeue.put(fd)
def __command__(name, *args, **kwargs):
fd = filequeue.get()
if fd is None:
return None
try:
return getattr(fd, name)(*args, **kwargs)
finally:
filequeue.put(fd)
def _fo_stat(path):
return __command__('stat', path)
def _fo_makedirs(path):
return __command__('makedirs', path)
def _fo_chmod(path, mode):
return __command__('chmod', path, mode)
def _fo_unlink(path):
return __command__('unlink', path)
def _fo_utime(path, value = None):
return __command__('utime', path, value = value)
#
# os.* calls
#
stat = os.stat
makedirs = os.makedirs
chmod = os.chmod
unlink = os.unlink
utime = os.utime
#
# file/open call
#
open = file
#
# close call
#
def dummy_close(fd):
return fd.close()
close = dummy_close
# a generator function for doing readlines in a fileobject friendly manner
def iterlines(fd):
while True:
ln = fd.readline()
if not ln:
break
yield ln
def iterfiles(filenames):
for fn in filenames:
fd = open(fn)
for ln in iterlines(fd):
yield ln
fd.close()
def emulate():
fileobject = sys.modules['gogreen.fileobject']
#
# os.* calls
#
fileobject.stat = _fo_stat
fileobject.makedirs = _fo_makedirs
fileobject.chmod = _fo_chmod
fileobject.unlink = _fo_unlink
fileobject.utime = _fo_utime
#
# file/open call
#
fileobject.open = _fo_open
fileobject.close = _fo_close
def nowait():
'''nowait
NOTE: GLOBAL SIGNAL CHANGE!
Do not wait for the terminated/exiting fileobject, since this can
block. To prevent the processes from becoming unreaped zombies we
disable the SIGCHILD signal. (see man wait(2))
'''
global NOWAIT
NOWAIT = True
signal.signal(signal.SIGCHLD, signal.SIG_IGN)
if __name__ == '__main__':
import prctl
prctl.prctl(prctl.PDEATHSIG, signal.SIGTERM)
handler = FileObjectHandler(sys.stdin, sys.stdout)
value = handler.run()
sys.exit(value)
#
# end..
| bsd-3-clause |
eltonsantos/django | django/contrib/gis/db/models/sql/where.py | 118 | 3987 | from django.db.models.constants import LOOKUP_SEP
from django.db.models.fields import FieldDoesNotExist
from django.db.models.sql.expressions import SQLEvaluator
from django.db.models.sql.where import Constraint, WhereNode
from django.contrib.gis.db.models.fields import GeometryField
class GeoConstraint(Constraint):
"""
This subclass overrides `process` to better handle geographic SQL
construction.
"""
def __init__(self, init_constraint):
self.alias = init_constraint.alias
self.col = init_constraint.col
self.field = init_constraint.field
def process(self, lookup_type, value, connection):
if isinstance(value, SQLEvaluator):
# Make sure the F Expression destination field exists, and
# set an `srid` attribute with the same as that of the
# destination.
geo_fld = GeoWhereNode._check_geo_field(value.opts, value.expression.name)
if not geo_fld:
raise ValueError('No geographic field found in expression.')
value.srid = geo_fld.srid
db_type = self.field.db_type(connection=connection)
params = self.field.get_db_prep_lookup(lookup_type, value, connection=connection)
return (self.alias, self.col, db_type), params
class GeoWhereNode(WhereNode):
"""
Used to represent the SQL where-clause for spatial databases --
these are tied to the GeoQuery class that created it.
"""
def _prepare_data(self, data):
if isinstance(data, (list, tuple)):
obj, lookup_type, value = data
if ( isinstance(obj, Constraint) and
isinstance(obj.field, GeometryField) ):
data = (GeoConstraint(obj), lookup_type, value)
return super(GeoWhereNode, self)._prepare_data(data)
def make_atom(self, child, qn, connection):
lvalue, lookup_type, value_annot, params_or_value = child
if isinstance(lvalue, GeoConstraint):
data, params = lvalue.process(lookup_type, params_or_value, connection)
spatial_sql, spatial_params = connection.ops.spatial_lookup_sql(
data, lookup_type, params_or_value, lvalue.field, qn)
return spatial_sql, spatial_params + params
else:
return super(GeoWhereNode, self).make_atom(child, qn, connection)
@classmethod
def _check_geo_field(cls, opts, lookup):
"""
Utility for checking the given lookup with the given model options.
The lookup is a string either specifying the geographic field, e.g.
'point, 'the_geom', or a related lookup on a geographic field like
'address__point'.
If a GeometryField exists according to the given lookup on the model
options, it will be returned. Otherwise returns None.
"""
# This takes into account the situation where the lookup is a
# lookup to a related geographic field, e.g., 'address__point'.
field_list = lookup.split(LOOKUP_SEP)
# Reversing so list operates like a queue of related lookups,
# and popping the top lookup.
field_list.reverse()
fld_name = field_list.pop()
try:
geo_fld = opts.get_field(fld_name)
# If the field list is still around, then it means that the
# lookup was for a geometry field across a relationship --
# thus we keep on getting the related model options and the
# model field associated with the next field in the list
# until there's no more left.
while len(field_list):
opts = geo_fld.rel.to._meta
geo_fld = opts.get_field(field_list.pop())
except (FieldDoesNotExist, AttributeError):
return False
# Finally, make sure we got a Geographic field and return.
if isinstance(geo_fld, GeometryField):
return geo_fld
else:
return False
| bsd-3-clause |
berrange/nova | nova/tests/virt/libvirt/test_rbd.py | 21 | 11227 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from nova import exception
from nova.openstack.common import log as logging
from nova import test
from nova import utils
from nova.virt.libvirt import rbd_utils
LOG = logging.getLogger(__name__)
CEPH_MON_DUMP = """dumped monmap epoch 1
{ "epoch": 1,
"fsid": "33630410-6d93-4d66-8e42-3b953cf194aa",
"modified": "2013-05-22 17:44:56.343618",
"created": "2013-05-22 17:44:56.343618",
"mons": [
{ "rank": 0,
"name": "a",
"addr": "[::1]:6789\/0"},
{ "rank": 1,
"name": "b",
"addr": "[::1]:6790\/0"},
{ "rank": 2,
"name": "c",
"addr": "[::1]:6791\/0"},
{ "rank": 3,
"name": "d",
"addr": "127.0.0.1:6792\/0"},
{ "rank": 4,
"name": "e",
"addr": "example.com:6791\/0"}],
"quorum": [
0,
1,
2]}
"""
class RbdTestCase(test.NoDBTestCase):
@mock.patch.object(rbd_utils, 'rbd')
@mock.patch.object(rbd_utils, 'rados')
def setUp(self, mock_rados, mock_rbd):
super(RbdTestCase, self).setUp()
self.mock_rados = mock_rados
self.mock_rados.Rados = mock.Mock
self.mock_rados.Rados.ioctx = mock.Mock()
self.mock_rados.Rados.connect = mock.Mock()
self.mock_rados.Rados.shutdown = mock.Mock()
self.mock_rados.Rados.open_ioctx = mock.Mock()
self.mock_rados.Rados.open_ioctx.return_value = \
self.mock_rados.Rados.ioctx
self.mock_rados.Error = Exception
self.mock_rbd = mock_rbd
self.mock_rbd.RBD = mock.Mock
self.mock_rbd.Image = mock.Mock
self.mock_rbd.Image.close = mock.Mock()
self.mock_rbd.RBD.Error = Exception
self.rbd_pool = 'rbd'
self.driver = rbd_utils.RBDDriver(self.rbd_pool, None, None)
self.volume_name = u'volume-00000001'
def tearDown(self):
super(RbdTestCase, self).tearDown()
def test_good_locations(self):
locations = ['rbd://fsid/pool/image/snap',
'rbd://%2F/%2F/%2F/%2F', ]
map(self.driver.parse_url, locations)
def test_bad_locations(self):
locations = ['rbd://image',
'http://path/to/somewhere/else',
'rbd://image/extra',
'rbd://image/',
'rbd://fsid/pool/image/',
'rbd://fsid/pool/image/snap/',
'rbd://///', ]
for loc in locations:
self.assertRaises(exception.ImageUnacceptable,
self.driver.parse_url, loc)
self.assertFalse(self.driver.is_cloneable({'url': loc},
{'disk_format': 'raw'}))
@mock.patch.object(rbd_utils.RBDDriver, '_get_fsid')
@mock.patch.object(rbd_utils, 'rbd')
@mock.patch.object(rbd_utils, 'rados')
def test_cloneable(self, mock_rados, mock_rbd, mock_get_fsid):
mock_get_fsid.return_value = 'abc'
location = {'url': 'rbd://abc/pool/image/snap'}
info = {'disk_format': 'raw'}
self.assertTrue(self.driver.is_cloneable(location, info))
self.assertTrue(mock_get_fsid.called)
@mock.patch.object(rbd_utils.RBDDriver, '_get_fsid')
def test_uncloneable_different_fsid(self, mock_get_fsid):
mock_get_fsid.return_value = 'abc'
location = {'url': 'rbd://def/pool/image/snap'}
self.assertFalse(
self.driver.is_cloneable(location, {'disk_format': 'raw'}))
self.assertTrue(mock_get_fsid.called)
@mock.patch.object(rbd_utils.RBDDriver, '_get_fsid')
@mock.patch.object(rbd_utils, 'RBDVolumeProxy')
@mock.patch.object(rbd_utils, 'rbd')
@mock.patch.object(rbd_utils, 'rados')
def test_uncloneable_unreadable(self, mock_rados, mock_rbd, mock_proxy,
mock_get_fsid):
mock_get_fsid.return_value = 'abc'
location = {'url': 'rbd://abc/pool/image/snap'}
mock_proxy.side_effect = mock_rbd.Error
self.assertFalse(
self.driver.is_cloneable(location, {'disk_format': 'raw'}))
mock_proxy.assert_called_once_with(self.driver, 'image', pool='pool',
snapshot='snap', read_only=True)
self.assertTrue(mock_get_fsid.called)
@mock.patch.object(rbd_utils.RBDDriver, '_get_fsid')
def test_uncloneable_bad_format(self, mock_get_fsid):
mock_get_fsid.return_value = 'abc'
location = {'url': 'rbd://abc/pool/image/snap'}
formats = ['qcow2', 'vmdk', 'vdi']
for f in formats:
self.assertFalse(
self.driver.is_cloneable(location, {'disk_format': f}))
self.assertTrue(mock_get_fsid.called)
@mock.patch.object(utils, 'execute')
def test_get_mon_addrs(self, mock_execute):
mock_execute.return_value = (CEPH_MON_DUMP, '')
hosts = ['::1', '::1', '::1', '127.0.0.1', 'example.com']
ports = ['6789', '6790', '6791', '6792', '6791']
self.assertEqual((hosts, ports), self.driver.get_mon_addrs())
@mock.patch.object(rbd_utils, 'RADOSClient')
@mock.patch.object(rbd_utils, 'rbd')
@mock.patch.object(rbd_utils, 'rados')
def test_clone(self, mock_rados, mock_rbd, mock_client):
pool = u'images'
image = u'image-name'
snap = u'snapshot-name'
location = {'url': u'rbd://fsid/%s/%s/%s' % (pool, image, snap)}
client_stack = []
def mock__enter__(inst):
def _inner():
client_stack.append(inst)
return inst
return _inner
client = mock_client.return_value
# capture both rados client used to perform the clone
client.__enter__.side_effect = mock__enter__(client)
rbd = mock_rbd.RBD.return_value
self.driver.clone(location, self.volume_name)
args = [client_stack[0].ioctx, str(image), str(snap),
client_stack[1].ioctx, str(self.volume_name)]
kwargs = {'features': mock_rbd.RBD_FEATURE_LAYERING}
rbd.clone.assert_called_once_with(*args, **kwargs)
self.assertEqual(client.__enter__.call_count, 2)
@mock.patch.object(rbd_utils, 'RBDVolumeProxy')
def test_resize(self, mock_proxy):
size = 1024
proxy = mock_proxy.return_value
proxy.__enter__.return_value = proxy
self.driver.resize(self.volume_name, size)
proxy.resize.assert_called_once_with(size)
@mock.patch.object(rbd_utils.RBDDriver, '_disconnect_from_rados')
@mock.patch.object(rbd_utils.RBDDriver, '_connect_to_rados')
@mock.patch.object(rbd_utils, 'rbd')
@mock.patch.object(rbd_utils, 'rados')
def test_rbd_volume_proxy_init(self, mock_rados, mock_rbd,
mock_connect_from_rados,
mock_disconnect_from_rados):
mock_connect_from_rados.return_value = (None, None)
mock_disconnect_from_rados.return_value = (None, None)
with rbd_utils.RBDVolumeProxy(self.driver, self.volume_name):
mock_connect_from_rados.assert_called_once_with(None)
self.assertFalse(mock_disconnect_from_rados.called)
mock_disconnect_from_rados.assert_called_once_with(None, None)
@mock.patch.object(rbd_utils, 'rbd')
@mock.patch.object(rbd_utils, 'rados')
def test_connect_to_rados_default(self, mock_rados, mock_rbd):
ret = self.driver._connect_to_rados()
self.assertTrue(self.mock_rados.Rados.connect.called)
self.assertTrue(self.mock_rados.Rados.open_ioctx.called)
self.assertIsInstance(ret[0], self.mock_rados.Rados)
self.assertEqual(ret[1], self.mock_rados.Rados.ioctx)
self.mock_rados.Rados.open_ioctx.assert_called_with(self.rbd_pool)
@mock.patch.object(rbd_utils, 'rbd')
@mock.patch.object(rbd_utils, 'rados')
def test_connect_to_rados_different_pool(self, mock_rados, mock_rbd):
ret = self.driver._connect_to_rados('alt_pool')
self.assertTrue(self.mock_rados.Rados.connect.called)
self.assertTrue(self.mock_rados.Rados.open_ioctx.called)
self.assertIsInstance(ret[0], self.mock_rados.Rados)
self.assertEqual(ret[1], self.mock_rados.Rados.ioctx)
self.mock_rados.Rados.open_ioctx.assert_called_with('alt_pool')
@mock.patch.object(rbd_utils, 'rados')
def test_connect_to_rados_error(self, mock_rados):
mock_rados.Rados.open_ioctx.side_effect = mock_rados.Error
self.assertRaises(mock_rados.Error, self.driver._connect_to_rados)
mock_rados.Rados.open_ioctx.assert_called_once_with(self.rbd_pool)
mock_rados.Rados.shutdown.assert_called_once_with()
def test_ceph_args_none(self):
self.driver.rbd_user = None
self.driver.ceph_conf = None
self.assertEqual([], self.driver.ceph_args())
def test_ceph_args_rbd_user(self):
self.driver.rbd_user = 'foo'
self.driver.ceph_conf = None
self.assertEqual(['--id', 'foo'], self.driver.ceph_args())
def test_ceph_args_ceph_conf(self):
self.driver.rbd_user = None
self.driver.ceph_conf = '/path/bar.conf'
self.assertEqual(['--conf', '/path/bar.conf'],
self.driver.ceph_args())
def test_ceph_args_rbd_user_and_ceph_conf(self):
self.driver.rbd_user = 'foo'
self.driver.ceph_conf = '/path/bar.conf'
self.assertEqual(['--id', 'foo', '--conf', '/path/bar.conf'],
self.driver.ceph_args())
@mock.patch.object(rbd_utils, 'RBDVolumeProxy')
def test_exists(self, mock_proxy):
snapshot = 'snap'
proxy = mock_proxy.return_value
self.assertTrue(self.driver.exists(self.volume_name,
self.rbd_pool,
snapshot))
proxy.__enter__.assert_called_once_with()
proxy.__exit__.assert_called_once_with(None, None, None)
@mock.patch.object(rbd_utils, 'rbd')
@mock.patch.object(rbd_utils, 'rados')
@mock.patch.object(rbd_utils, 'RADOSClient')
def test_cleanup_volumes(self, mock_client, mock_rados, mock_rbd):
instance = {'uuid': '12345'}
rbd = mock_rbd.RBD.return_value
rbd.list.return_value = ['12345_test', '111_test']
client = mock_client.return_value
self.driver.cleanup_volumes(instance)
rbd.remove.assert_called_once_with(client.ioctx, '12345_test')
client.__enter__.assert_called_once_with()
client.__exit__.assert_called_once_with(None, None, None)
| apache-2.0 |
EvanK/ansible-modules-extras | cloud/rackspace/rax_mon_alarm.py | 153 | 7760 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# This is a DOCUMENTATION stub specific to this module, it extends
# a documentation fragment located in ansible.utils.module_docs_fragments
DOCUMENTATION = '''
---
module: rax_mon_alarm
short_description: Create or delete a Rackspace Cloud Monitoring alarm.
description:
- Create or delete a Rackspace Cloud Monitoring alarm that associates an
existing rax_mon_entity, rax_mon_check, and rax_mon_notification_plan with
criteria that specify what conditions will trigger which levels of
notifications. Rackspace monitoring module flow | rax_mon_entity ->
rax_mon_check -> rax_mon_notification -> rax_mon_notification_plan ->
*rax_mon_alarm*
version_added: "2.0"
options:
state:
description:
- Ensure that the alarm with this C(label) exists or does not exist.
choices: [ "present", "absent" ]
required: false
default: present
label:
description:
- Friendly name for this alarm, used to achieve idempotence. Must be a String
between 1 and 255 characters long.
required: true
entity_id:
description:
- ID of the entity this alarm is attached to. May be acquired by registering
the value of a rax_mon_entity task.
required: true
check_id:
description:
- ID of the check that should be alerted on. May be acquired by registering
the value of a rax_mon_check task.
required: true
notification_plan_id:
description:
- ID of the notification plan to trigger if this alarm fires. May be acquired
by registering the value of a rax_mon_notification_plan task.
required: true
criteria:
description:
- Alarm DSL that describes alerting conditions and their output states. Must
be between 1 and 16384 characters long. See
http://docs.rackspace.com/cm/api/v1.0/cm-devguide/content/alerts-language.html
for a reference on the alerting language.
disabled:
description:
- If yes, create this alarm, but leave it in an inactive state. Defaults to
no.
choices: [ "yes", "no" ]
metadata:
description:
- Arbitrary key/value pairs to accompany the alarm. Must be a hash of String
keys and values between 1 and 255 characters long.
author: Ash Wilson
extends_documentation_fragment: rackspace.openstack
'''
EXAMPLES = '''
- name: Alarm example
gather_facts: False
hosts: local
connection: local
tasks:
- name: Ensure that a specific alarm exists.
rax_mon_alarm:
credentials: ~/.rax_pub
state: present
label: uhoh
entity_id: "{{ the_entity['entity']['id'] }}"
check_id: "{{ the_check['check']['id'] }}"
notification_plan_id: "{{ defcon1['notification_plan']['id'] }}"
criteria: >
if (rate(metric['average']) > 10) {
return new AlarmStatus(WARNING);
}
return new AlarmStatus(OK);
register: the_alarm
'''
try:
import pyrax
HAS_PYRAX = True
except ImportError:
HAS_PYRAX = False
def alarm(module, state, label, entity_id, check_id, notification_plan_id, criteria,
disabled, metadata):
if len(label) < 1 or len(label) > 255:
module.fail_json(msg='label must be between 1 and 255 characters long')
if criteria and len(criteria) < 1 or len(criteria) > 16384:
module.fail_json(msg='criteria must be between 1 and 16384 characters long')
# Coerce attributes.
changed = False
alarm = None
cm = pyrax.cloud_monitoring
if not cm:
module.fail_json(msg='Failed to instantiate client. This typically '
'indicates an invalid region or an incorrectly '
'capitalized region name.')
existing = [a for a in cm.list_alarms(entity_id) if a.label == label]
if existing:
alarm = existing[0]
if state == 'present':
should_create = False
should_update = False
should_delete = False
if len(existing) > 1:
module.fail_json(msg='%s existing alarms have the label %s.' %
(len(existing), label))
if alarm:
if check_id != alarm.check_id or notification_plan_id != alarm.notification_plan_id:
should_delete = should_create = True
should_update = (disabled and disabled != alarm.disabled) or \
(metadata and metadata != alarm.metadata) or \
(criteria and criteria != alarm.criteria)
if should_update and not should_delete:
cm.update_alarm(entity=entity_id, alarm=alarm,
criteria=criteria, disabled=disabled,
label=label, metadata=metadata)
changed = True
if should_delete:
alarm.delete()
changed = True
else:
should_create = True
if should_create:
alarm = cm.create_alarm(entity=entity_id, check=check_id,
notification_plan=notification_plan_id,
criteria=criteria, disabled=disabled, label=label,
metadata=metadata)
changed = True
else:
for a in existing:
a.delete()
changed = True
if alarm:
alarm_dict = {
"id": alarm.id,
"label": alarm.label,
"check_id": alarm.check_id,
"notification_plan_id": alarm.notification_plan_id,
"criteria": alarm.criteria,
"disabled": alarm.disabled,
"metadata": alarm.metadata
}
module.exit_json(changed=changed, alarm=alarm_dict)
else:
module.exit_json(changed=changed)
def main():
argument_spec = rax_argument_spec()
argument_spec.update(
dict(
state=dict(default='present', choices=['present', 'absent']),
label=dict(required=True),
entity_id=dict(required=True),
check_id=dict(required=True),
notification_plan_id=dict(required=True),
criteria=dict(),
disabled=dict(type='bool', default=False),
metadata=dict(type='dict')
)
)
module = AnsibleModule(
argument_spec=argument_spec,
required_together=rax_required_together()
)
if not HAS_PYRAX:
module.fail_json(msg='pyrax is required for this module')
state = module.params.get('state')
label = module.params.get('label')
entity_id = module.params.get('entity_id')
check_id = module.params.get('check_id')
notification_plan_id = module.params.get('notification_plan_id')
criteria = module.params.get('criteria')
disabled = module.boolean(module.params.get('disabled'))
metadata = module.params.get('metadata')
setup_rax_module(module, pyrax)
alarm(module, state, label, entity_id, check_id, notification_plan_id,
criteria, disabled, metadata)
# Import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.rax import *
# Invoke the module.
main()
| gpl-3.0 |
cmallwitz/Sunflower | application/widgets/breadcrumbs.py | 1 | 6900 | import os
from gi.repository import Gtk, Pango, Gdk, GObject
class Breadcrumbs(Gtk.HBox):
"""Widget for displaying paths with clickable segments."""
TYPE_NONE = 0
TYPE_NORMAL = 1
TYPE_SMART = 2
def __init__(self, parent):
GObject.GObject.__init__(self)
self._parent = parent
self._type = self._parent._breadcrumb_type
self._path = None
self._previous_path = None
self._colors = None
self._state = Gtk.StateType.NORMAL
self._smart_color = None
self._elements_size = None
self._elements_width = None
self._allocation = None
self._highlight_index = None
# create user interface
self._path_object = Gtk.DrawingArea()
self._path_object.add_events(Gdk.EventMask.POINTER_MOTION_MASK)
self._path_object.add_events(Gdk.EventMask.LEAVE_NOTIFY_MASK)
self._path_object.add_events(Gdk.EventMask.BUTTON_PRESS_MASK)
self._path_object.add_events(Gdk.EventMask.ENTER_NOTIFY_MASK)
# TODO: Fix for GTK3
self._path_object.connect('draw', self.__draw_event)
self._path_object.connect('motion-notify-event', self.__motion_event)
self._path_object.connect('enter-notify-event', self.__motion_event)
self._path_object.connect('leave-notify-event', self.__leave_event)
self._path_object.connect('button-press-event', self.__button_press_event)
self._path_object.connect('realize', self.__realize_event)
self.connect('size_allocate', self._update_visibility)
# pack interface
self.pack_start(self._path_object, True, True, 0)
self.show_all()
def __get_color(self, background, foreground):
"""Calculate color for the part history part of the path"""
red = (background.red + foreground.red) / 2
green = (background.green + foreground.green) / 2
blue = (background.blue + foreground.blue) / 2
return Gdk.Color(red, green, blue)
def __realize_event(self, widget, data=None):
"""Resize drawing area when object is realized"""
layout = widget.create_pango_layout('')
height = layout.get_size()[1] / Pango.SCALE
self._path_object.set_size_request(-1, height)
def __leave_event(self, widget, event):
"""Handle mouse leaving the widget"""
# remove highlight
self._highlight_index = None
# prepare refresh region
region = self._allocation.copy()
region.x = 0
# request redraw
self._path_object.queue_draw_area(region.x, region.y, region.width, region.height)
return True
def __button_press_event(self, widget, event):
"""Handle button press"""
path = self._path
if self._previous_path is not None and self._previous_path.startswith(self._path):
path = self._previous_path
# handle single left mouse click
if event.button is 1 and event.type is Gdk.EventType.BUTTON_PRESS:
width = self._elements_size[self._highlight_index]
new_path = path[0:width]
file_list = self._parent._parent
# change path
if hasattr(file_list, 'change_path'):
file_list.change_path(new_path)
return True
def __motion_event(self, widget, event):
"""Handle mouse movement over widget"""
elements = filter(lambda width: width <= event.x, self._elements_width)
index = len(elements)
# make sure we redraw only on index change
if index != self._highlight_index:
self._highlight_index = index
# make sure we don't have index higher than needed
if self._highlight_index >= len(self._elements_width):
self._highlight_index = len(self._elements_width) - 1
# prepare refresh region
region = self._allocation.copy()
region.x = 0
# request redraw
self._path_object.queue_draw_area(region.x, region.y, region.width, region.height)
return True
def __draw_event(self, widget, context, event=None):
"""Handle drawing bread crumbs"""
foreground_context = widget.get_pango_context()
background_context = context
layout = widget.create_pango_layout('')
text_to_draw = self._path
path_length = len(self._path)
# make sure we have allocation
if self._allocation is None:
self._allocation = widget.get_allocation()
# create attributes
attributes = Pango.AttrList.new()
# check if path is part of previous one
if self._type is Breadcrumbs.TYPE_SMART \
and self._previous_path is not None \
and self._previous_path.startswith(self._path):
attribute = Pango.AttrForeground.new(
self._smart_color.red,
self._smart_color.green,
self._smart_color.blue
)
# start_index=path_length,
# end_index=len(self._previous_path)
attributes.insert(attribute)
text_to_draw = self._previous_path
# calculate width of path elements
if self._elements_width is None:
path = None
provider = self._parent._parent.get_provider()
self._elements_size = []
self._elements_width = []
# split root element from others
root_element = provider.get_root_path(text_to_draw)
other_elements = text_to_draw[len(root_element):]
# make sure our path doesn't begin with slash
if other_elements.startswith(os.path.sep):
other_elements = other_elements[1:]
# split elements
elements = other_elements.split(os.path.sep)
elements.insert(0, root_element)
for element in elements:
# get path size
path = os.path.join(path, element) if path is not None else element
layout.set_text(path, -1)
# add width to the list
width = layout.get_size()[0] / Pango.SCALE
self._elements_size.append(len(path))
self._elements_width.append(width)
# underline hovered path if specified
if None not in (self._highlight_index, self._elements_size):
width = self._elements_size[self._highlight_index]
# attributes.insert(Pango.AttrUnderline(Pango.Underline.SINGLE, 0, width))
# prepare text for drawing
layout.set_text(text_to_draw, -1)
layout.set_attributes(attributes)
# draw background color
color = self._colors[0]
background_context.set_source_rgb(color.red, color.green, color.blue)
background_context.rectangle(0, 0, self._allocation.width, self._allocation.height)
background_context.fill()
return True
def _update_visibility(self, sender=None, data=None):
"""Handle path container resize"""
self._allocation = self._path_object.get_allocation()
def apply_color(self, colors):
"""Apply colors to all bread crumbs"""
self._colors = colors
self._smart_color = self.__get_color(*colors)
def apply_settings(self):
"""Method called when system applies new settings"""
self._type = self._parent._breadcrumb_type
def set_state(self, state):
"""Set widget state"""
self._state = state
def refresh(self, path=None):
"""Update label on directory change"""
if self._type is Breadcrumbs.TYPE_SMART \
and (self._previous_path is None or not self._previous_path.startswith(self._path)):
self._previous_path = self._path
# split path
self._path = path
# clear cache
self._elements_size = None
self._elements_width = None
self._highlight_index = None
# force widget to be redrawn
self._path_object.queue_draw()
| gpl-3.0 |
mistoll/ros_buildfarm | ros_buildfarm/status_page.py | 1 | 25734 | # Copyright 2014-2016 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import namedtuple
from distutils.version import LooseVersion
import itertools
import os
import re
import shutil
import sys
import time
from .common import get_debian_package_name
from .common import get_release_view_name
from .common import get_short_arch
from .common import Target
from .config import get_index as get_config_index
from .config import get_release_build_files
from .debian_repo import get_debian_repo_data
from .status_page_input import get_rosdistro_info
from .status_page_input import RosPackage
from .templates import expand_template
from .templates import get_template_path
def build_release_status_page(
config_url, rosdistro_name, release_build_name,
cache_dir, output_dir, copy_resources=False):
from rosdistro import get_cached_distribution
from rosdistro import get_index
start_time = time.time()
config = get_config_index(config_url)
release_build_files = get_release_build_files(config, rosdistro_name)
build_file = release_build_files[release_build_name]
index = get_index(config.rosdistro_index_url)
# get targets
targets = []
for os_name in sorted(build_file.targets.keys()):
if os_name not in ['debian', 'ubuntu']:
continue
for os_code_name in sorted(build_file.targets[os_name].keys()):
targets.append(Target(os_name, os_code_name, 'source'))
for arch in sorted(build_file.targets[os_name][os_code_name]):
targets.append(Target(os_name, os_code_name, arch))
print('The build file contains the following targets:')
for _, os_code_name, arch in targets:
print(' - %s %s' % (os_code_name, arch))
# get all input data
dist = get_cached_distribution(index, rosdistro_name)
rosdistro_info = get_rosdistro_info(dist, build_file)
# derive testing and main urls from building url
building_repo_url = build_file.target_repository
base_url = os.path.dirname(building_repo_url)
testing_repo_url = os.path.join(base_url, 'testing')
main_repo_url = os.path.join(base_url, 'main')
building_repo_data = get_debian_repo_data(
building_repo_url, targets, cache_dir)
testing_repo_data = get_debian_repo_data(
testing_repo_url, targets, cache_dir)
main_repo_data = get_debian_repo_data(main_repo_url, targets, cache_dir)
repos_data = [building_repo_data, testing_repo_data, main_repo_data]
# compute derived attributes
package_descriptors = get_rosdistro_package_descriptors(
rosdistro_info, rosdistro_name)
affected_by_sync = get_affected_by_sync(
package_descriptors, targets, testing_repo_data, main_repo_data)
regressions = get_regressions(
package_descriptors, targets,
building_repo_data, testing_repo_data, main_repo_data)
version_status = get_version_status(
package_descriptors, targets, repos_data, strip_version=True)
homogeneous = get_homogeneous(package_descriptors, targets, repos_data)
package_counts = get_package_counts(
package_descriptors, targets, repos_data)
jenkins_job_urls = get_jenkins_job_urls(
rosdistro_name, config.jenkins_url, release_build_name, targets)
# generate output
repo_urls = [building_repo_url, testing_repo_url, main_repo_url]
repo_names = get_url_names(repo_urls)
ordered_pkgs = []
for pkg_name in sorted(rosdistro_info.keys()):
ordered_pkgs.append(rosdistro_info[pkg_name])
template_name = 'status/release_status_page.html.em'
data = {
'title': 'ROS %s - release status' % rosdistro_name.capitalize(),
'start_time': start_time,
'start_time_local_str': time.strftime('%Y-%m-%d %H:%M:%S %z', time.localtime(start_time)),
'resource_hashes': get_resource_hashes(),
'repo_names': repo_names,
'repo_urls': repo_urls,
'has_repository_column': True,
'has_status_column': True,
'has_maintainer_column': True,
'ordered_pkgs': ordered_pkgs,
'targets': targets,
'short_arches': dict(
[(t.arch, get_short_arch(t.arch)) for t in targets]),
'repos_data': repos_data,
'affected_by_sync': affected_by_sync,
'homogeneous': homogeneous,
'jenkins_job_urls': jenkins_job_urls,
'package_counts': package_counts,
'regressions': regressions,
'version_status': version_status,
}
html = expand_template(template_name, data)
output_filename = os.path.join(
output_dir, 'ros_%s_%s.html' % (rosdistro_name, release_build_name))
print("Generating status page '%s':" % output_filename)
with open(output_filename, 'w') as h:
h.write(html)
additional_resources(output_dir, copy_resources=copy_resources)
def build_debian_repos_status_page(
repo_urls, os_code_name_and_arch_tuples,
cache_dir, output_name, output_dir):
start_time = time.time()
# get targets
targets = []
for os_code_name_and_arch in os_code_name_and_arch_tuples:
assert os_code_name_and_arch.count(':') == 1, \
'The string (%s) does not contain single colon separating an ' + \
'OS code name and an architecture'
os_code_name, arch = os_code_name_and_arch.split(':')
targets.append(Target('ubuntu', os_code_name, arch))
# get all input data
repos_data = []
for repo_url in repo_urls:
repo_data = get_debian_repo_data(repo_url, targets, cache_dir)
repos_data.append(repo_data)
# compute derived attributes
package_descriptors = get_repos_package_descriptors(repos_data, targets)
version_status = get_version_status(
package_descriptors, targets, repos_data, strip_os_code_name=True)
homogeneous = get_homogeneous(package_descriptors, targets, repos_data)
package_counts = get_package_counts(
package_descriptors, targets, repos_data)
# generate output
repo_names = get_url_names(repo_urls)
ordered_pkgs = []
for debian_pkg_name in sorted(package_descriptors.keys()):
pkg = RosPackage(debian_pkg_name)
pkg.debian_name = debian_pkg_name
pkg.version = package_descriptors[debian_pkg_name].version
# set unavailable attributes
pkg.repository_name = None
pkg.repository_url = None
pkg.status = None
pkg.status_description = None
pkg.maintainers = []
pkg.url = None
ordered_pkgs.append(pkg)
template_name = 'status/release_status_page.html.em'
data = {
'title': 'ROS repository status',
'start_time': start_time,
'start_time_local_str': time.strftime('%Y-%m-%d %H:%M:%S %z', time.localtime(start_time)),
'resource_hashes': get_resource_hashes(),
'repo_names': repo_names,
'repo_urls': repo_urls,
'has_repository_column': False,
'has_status_column': False,
'has_maintainer_column': False,
'ordered_pkgs': ordered_pkgs,
'targets': targets,
'short_arches': dict(
[(t.arch, get_short_arch(t.arch)) for t in targets]),
'repos_data': repos_data,
'affected_by_sync': None,
'homogeneous': homogeneous,
'jenkins_job_urls': None,
'package_counts': package_counts,
'regressions': None,
'version_status': version_status,
}
html = expand_template(template_name, data)
output_filename = os.path.join(
output_dir, '%s.html' % output_name)
print("Generating status page '%s':" % output_filename)
with open(output_filename, 'w') as h:
h.write(html)
additional_resources(output_dir)
PackageDescriptor = namedtuple(
'PackageDescriptor', 'pkg_name debian_pkg_name version')
def get_rosdistro_package_descriptors(rosdistro_info, rosdistro_name):
descriptors = {}
for pkg_name, pkg in rosdistro_info.items():
debian_pkg_name = get_debian_package_name(rosdistro_name, pkg_name)
descriptors[pkg_name] = PackageDescriptor(
pkg_name, debian_pkg_name, pkg.version)
return descriptors
def get_repos_package_descriptors(repos_data, targets):
descriptors = {}
# the highest version is the reference
for target in targets:
for repo_data in repos_data:
repo_index = repo_data[target]
for debian_pkg_name, version in repo_index.items():
version = _strip_os_code_name_suffix(
version, target.os_code_name)
if debian_pkg_name not in descriptors:
descriptors[debian_pkg_name] = PackageDescriptor(
debian_pkg_name, debian_pkg_name, version)
continue
if not version:
continue
other_version = descriptors[debian_pkg_name].version
if not other_version:
continue
# update version if higher
if _version_is_gt_other(version, other_version):
descriptors[debian_pkg_name] = PackageDescriptor(
debian_pkg_name, debian_pkg_name, version)
return descriptors
def get_url_names(urls):
names = []
for url in urls:
basename = os.path.basename(url)
if basename == 'ubuntu':
basename = os.path.basename(os.path.dirname(url))
names.append(basename)
return names
def get_affected_by_sync(
package_descriptors, targets,
testing_repo_data, main_repo_data):
"""
For each package and target check if it is affected by a sync.
This is the case when the package version in the testing repo is different
from the version in the main repo.
:return: a dict indexed by package names containing
dicts indexed by targets containing a boolean flag
"""
affected_by_sync = {}
for package_descriptor in package_descriptors.values():
pkg_name = package_descriptor.pkg_name
debian_pkg_name = package_descriptor.debian_pkg_name
affected_by_sync[pkg_name] = {}
for target in targets:
testing_version = _strip_version_suffix(
testing_repo_data.get(target, {}).get(debian_pkg_name, None))
main_version = _strip_version_suffix(
main_repo_data.get(target, {}).get(debian_pkg_name, None))
affected_by_sync[pkg_name][target] = \
testing_version != main_version
return affected_by_sync
def get_regressions(
package_descriptors, targets,
building_repo_data, testing_repo_data, main_repo_data):
"""
For each package and target check if it is a regression.
This is the case if the main repo contains a package version which is
higher then in any of the other repos or if any of the other repos does not
contain that package at all.
:return: a dict indexed by package names containing
dicts indexed by targets containing a boolean flag
"""
regressions = {}
for package_descriptor in package_descriptors.values():
pkg_name = package_descriptor.pkg_name
debian_pkg_name = package_descriptor.debian_pkg_name
regressions[pkg_name] = {}
for target in targets:
regressions[pkg_name][target] = False
main_version = \
main_repo_data.get(target, {}).get(debian_pkg_name, None)
if main_version is not None:
main_ver_loose = LooseVersion(main_version)
for repo_data in [building_repo_data, testing_repo_data]:
version = \
repo_data.get(target, {}).get(debian_pkg_name, None)
if not version or main_ver_loose > LooseVersion(version):
regressions[pkg_name][target] = True
return regressions
def get_version_status(
package_descriptors, targets, repos_data,
strip_version=False, strip_os_code_name=False):
"""
For each package and target check if it is affected by a sync.
This is the case when the package version in the testing repo is different
from the version in the main repo.
:return: a dict indexed by package names containing
dicts indexed by targets containing
a list of status strings (one for each repo)
"""
status = {}
for package_descriptor in package_descriptors.values():
pkg_name = package_descriptor.pkg_name
debian_pkg_name = package_descriptor.debian_pkg_name
ref_version = package_descriptor.version
if strip_version:
ref_version = _strip_version_suffix(ref_version)
status[pkg_name] = {}
for target in targets:
statuses = []
for repo_data in repos_data:
version = repo_data.get(target, {}).get(debian_pkg_name, None)
if strip_version:
version = _strip_version_suffix(version)
if strip_os_code_name:
version = _strip_os_code_name_suffix(
version, target.os_code_name)
if ref_version:
if not version:
statuses.append('missing')
elif version.startswith(ref_version): # including equal
statuses.append('equal')
else:
if _version_is_gt_other(version, ref_version):
statuses.append('higher')
else:
statuses.append('lower')
else:
if not version:
statuses.append('ignore')
else:
statuses.append('obsolete')
status[pkg_name][target] = statuses
return status
version_regex = re.compile(r'[0-9.-]+[0-9]')
def _strip_version_suffix(version):
"""
Remove trailing junk from the version number.
>>> strip_version_suffix('')
''
>>> strip_version_suffix('None')
'None'
>>> strip_version_suffix('1.2.3-4trusty-20140131-1359-+0000')
'1.2.3-4'
>>> strip_version_suffix('1.2.3-foo')
'1.2.3'
"""
global version_regex
if not version:
return version
match = version_regex.search(version)
return match.group(0) if match else version
def _strip_os_code_name_suffix(version, os_code_name):
if version:
index = version.find(os_code_name)
if index != -1:
version = version[:index]
return version
def get_homogeneous(package_descriptors, targets, repos_data):
"""
For each package check if the version in one repo is equal for all targets.
The version could be different in different repos though.
:return: a dict indexed by package names containing a boolean flag
"""
homogeneous = {}
for package_descriptor in package_descriptors.values():
pkg_name = package_descriptor.pkg_name
debian_pkg_name = package_descriptor.debian_pkg_name
versions = []
for repo_data in repos_data:
versions.append(set([]))
for target in targets:
version = _strip_version_suffix(
repo_data.get(target, {}).get(debian_pkg_name, None))
versions[-1].add(version)
homogeneous[pkg_name] = max([len(v) for v in versions]) == 1
return homogeneous
def get_package_counts(package_descriptors, targets, repos_data):
"""
Get the number of packages per target and repository.
:return: a dict indexed by targets containing
a list of integer values (one for each repo)
"""
counts = {}
for target in targets:
counts[target] = [0] * len(repos_data)
for package_descriptor in package_descriptors.values():
debian_pkg_name = package_descriptor.debian_pkg_name
for target in targets:
for i, repo_data in enumerate(repos_data):
version = repo_data.get(target, {}).get(debian_pkg_name, None)
if version:
counts[target][i] += 1
return counts
def get_jenkins_job_urls(
rosdistro_name, jenkins_url, release_build_name, targets):
"""
Get the Jenkins job urls for each target.
The placeholder {pkg} needs to be replaced with the ROS package name.
:return: a dict indexed by targets containing a string
"""
urls = {}
for target in targets:
view_name = get_release_view_name(
rosdistro_name, release_build_name,
target.os_name, target.os_code_name, target.arch)
base_url = jenkins_url + '/view/%s/job/%s__{pkg}__' % \
(view_name, view_name)
if target.arch == 'source':
urls[target] = base_url + '%s_%s__source' % \
(target.os_name, target.os_code_name)
else:
urls[target] = base_url + '%s_%s_%s__binary' % \
(target.os_name, target.os_code_name, target.arch)
return urls
def additional_resources(output_dir, copy_resources=False):
for subfolder in ['css', 'js']:
dst = os.path.join(output_dir, subfolder)
if not os.path.exists(dst):
src = get_template_path(os.path.join('status', subfolder))
if copy_resources:
shutil.copytree(src, dst)
else:
os.symlink(os.path.abspath(src), dst)
def get_resource_hashes():
hashes = {}
for subfolder in ['css', 'js']:
path = get_template_path(os.path.join('status', subfolder))
for filename in os.listdir(path):
if filename.endswith('.%s' % subfolder):
with open(os.path.join(path, filename)) as f:
hashes[filename] = hash(tuple(f.read()))
return hashes
def _version_is_gt_other(version, other_version):
try:
# might raise TypeError: http://bugs.python.org/issue14894
return LooseVersion(version) > LooseVersion(other_version)
except TypeError:
loose_version, other_loose_version = \
_get_comparable_loose_versions(version, other_version)
return loose_version < other_loose_version
def _get_comparable_loose_versions(version_str1, version_str2):
loose_version1 = LooseVersion(version_str1)
loose_version2 = LooseVersion(version_str2)
if sys.version_info[0] > 2:
# might raise TypeError in Python 3: http://bugs.python.org/issue14894
version_parts1 = loose_version1.version
version_parts2 = loose_version2.version
for i in range(min(len(version_parts1), len(version_parts2))):
try:
version_parts1[i] < version_parts2[i]
except TypeError:
version_parts1[i] = str(version_parts1[i])
version_parts2[i] = str(version_parts2[i])
return loose_version1, loose_version2
def build_release_compare_page(
config_url, rosdistro_names,
output_dir, copy_resources=False):
from rosdistro import get_cached_distribution
from rosdistro import get_index
start_time = time.time()
config = get_config_index(config_url)
index = get_index(config.rosdistro_index_url)
# get all input data
distros = [get_cached_distribution(index, d) for d in rosdistro_names]
repo_names = [d.repositories.keys() for d in distros]
repo_names = [x for y in repo_names for x in y]
repos_data = {}
for repo_name in repo_names:
repo_data = _compare_repo_version(distros, repo_name)
if repo_data:
repos_data[repo_name] = repo_data
template_name = 'status/release_compare_page.html.em'
data = {
'title':
'ROS %s - version compare' % ' '.join([x.capitalize() for x in rosdistro_names]),
'start_time': start_time,
'start_time_local_str': time.strftime('%Y-%m-%d %H:%M:%S %z', time.localtime(start_time)),
'resource_hashes': get_resource_hashes(),
'rosdistro_names': rosdistro_names,
'repos_data': repos_data,
}
html = expand_template(template_name, data)
output_filename = os.path.join(
output_dir, 'compare_%s.html' % '_'.join(rosdistro_names))
print("Generating compare page: '%s'" % output_filename)
with open(output_filename, 'w') as h:
h.write(html)
additional_resources(output_dir, copy_resources=copy_resources)
class CompareRow(object):
def __init__(self, repo_name):
self.repo_name = repo_name
self.repo_urls = []
self.maintainers = {}
self.versions = []
self.branches = []
def get_repo_name_with_link(self):
valid_urls = [u for u in self.repo_urls if u]
if len(set(valid_urls)) == 1:
return '<a href="%s">%s</a>' % (valid_urls[0], self.repo_name)
unique_urls = []
[unique_urls.append(u) for u in valid_urls if u not in unique_urls]
parts = [self.repo_name]
for i, repo_url in enumerate(unique_urls):
parts.append(' [<a href="%s">%d</a>]' % (repo_url, i + 1))
return ' '.join(parts)
def get_maintainers(self):
return ' '.join([self.maintainers[k] for k in sorted(self.maintainers.keys())])
def get_labels(self, distros):
all_versions = [LooseVersion(v) if v else v for v in self.versions]
valid_versions = [v for v in all_versions if v]
labels = []
if any([
_is_only_patch_is_different(p[0], p[1])
for p in itertools.combinations(valid_versions, 2)]
):
labels.append('DIFF_PATCH')
if any([_is_greater(p[0], p[1]) for p in itertools.combinations(valid_versions, 2)]):
labels.append('DOWNGRADE_VERSION')
versions_and_branches = zip(
itertools.combinations(all_versions, 2), itertools.combinations(self.branches, 2))
if any([
_is_same_version_but_different_branch(vb[0][0], vb[0][1], vb[1][0], vb[1][1])
for vb in versions_and_branches
]):
labels.append('DIFF_BRANCH_SAME_VERSION')
return labels
def _is_only_patch_is_different(a, b):
return a.version[0] == b.version[0] and \
a.version[1] == b.version[1] and a.version[2] != b.version[2]
def _is_greater(a, b):
return a.version[0] > b.version[0] or \
(a.version[0] == b.version[0] and a.version[1] > b.version[1])
def _is_same_version_but_different_branch(version_a, version_b, branch_a, branch_b):
# skip when any version is unknown
if not version_a or not version_b:
return False
# skip when any branch is unknown or they are equal
if not branch_a or not branch_b or branch_a == branch_b:
return False
return version_a.version[0] == version_b.version[0] and \
version_a.version[1] == version_b.version[1]
def _compare_repo_version(distros, repo_name):
from catkin_pkg.package import InvalidPackage, parse_package_string
row = CompareRow(repo_name)
for distro in distros:
repo_url = None
version = None
branch = None
if repo_name in distro.repositories:
repo = distro.repositories[repo_name]
rel_repo = repo.release_repository
if rel_repo:
version = rel_repo.version
for pkg_name in rel_repo.package_names:
pkg_xml = distro.get_release_package_xml(pkg_name)
if pkg_xml is not None:
try:
pkg = parse_package_string(pkg_xml)
for m in pkg.maintainers:
row.maintainers[m.name] = '<a href="mailto:%s">%s</a>' % \
(m.email, m.name)
except InvalidPackage:
row.maintainers['zzz'] = '<b>invalid package.xml in %s</b>' % \
distro.name
if repo.source_repository:
repo_url = repo.source_repository.url
elif repo.doc_repository:
repo_url = repo.doc_repository.url
source_repo = repo.source_repository
if source_repo:
branch = source_repo.version
else:
doc_repo = repo.source_repository
if doc_repo:
branch = doc_repo.version
row.repo_urls.append(repo_url)
row.versions.append(version)
row.branches.append(branch)
# skip if no versions available
if not [v for v in row.versions if v]:
return None
data = [row.get_repo_name_with_link(), row.get_maintainers()] + \
[v if v else '' for v in row.versions]
labels = row.get_labels(distros)
if len(labels) > 0:
data[0] += ' <span class="ht">%s</span>' % ' '.join(labels)
# div-wrap all cells for layout reasons
for i, value in enumerate(data):
data[i] = '<div>%s</div>' % value
return data
| apache-2.0 |
Luftzig/nimoy | specs/nimoy/ast_tools/expression_transformer_spec.py | 1 | 3070 | import ast
import _ast
from nimoy.specification import Specification
from nimoy.ast_tools.expression_transformer import ComparisonExpressionTransformer, ThrownExpressionTransformer
class ComparisonExpressionTransformerSpec(Specification):
def equality_expressions_are_transformed(self):
with setup:
module_definition = """1 == 2
1 != 2
1 < 2
1 <= 2
1 > 2
1 >= 2
1 is 2
1 is not 2
1 in 2
1 not in 2
'The quick brown fox' @ '.+brown.+'
"""
node = ast.parse(module_definition, mode='exec')
with when:
ComparisonExpressionTransformer().visit(node)
with then:
body_elements = node.body
all([isinstance(body_element.value, _ast.Call) for body_element in body_elements]) == True
all([body_element.value.func.attr == '_compare' for body_element in body_elements]) == True
def nested_if_equality_is_transformed(self):
with setup:
module_definition = """
if True:
1 == 2
"""
node = ast.parse(module_definition, mode='exec')
with when:
ComparisonExpressionTransformer().visit(node)
with then:
body_expression = node.body[0]
isinstance(body_expression, _ast.If) == True
isinstance(body_expression.body[0].value, _ast.Call) == True
body_expression.body[0].value.func.attr == '_compare'
def nested_for_equality_is_transformed(self):
with setup:
module_definition = """
for x in [1, 2]:
1 == 2
"""
node = ast.parse(module_definition, mode='exec')
with when:
ComparisonExpressionTransformer().visit(node)
with then:
body_expression = node.body[0]
isinstance(body_expression, _ast.For) == True
isinstance(body_expression.body[0].value, _ast.Call) == True
body_expression.body[0].value.func.attr == '_compare'
class ThrownExpressionTransformerSpec(Specification):
def single_thrown_call_is_transformed(self):
with setup:
module_definition = "thrown(ArithmeticError)"
node = ast.parse(module_definition, mode='exec')
with when:
ThrownExpressionTransformer().visit(node)
with then:
thrown_expression = node.body[0].value
isinstance(thrown_expression, _ast.Call) == True
thrown_expression.func.attr == '_exception_thrown'
thrown_expression.args[0].id == 'ArithmeticError'
def assigned_thrown_call_is_transformed(self):
with setup:
module_definition = "ex = thrown(ArithmeticError)"
node = ast.parse(module_definition, mode='exec')
with when:
ThrownExpressionTransformer().visit(node)
with then:
thrown_expression = node.body[0].value
isinstance(thrown_expression, _ast.Call) == True
thrown_expression.func.attr == '_exception_thrown'
thrown_expression.args[0].id == 'ArithmeticError'
| apache-2.0 |
OspreyX/trading-with-python | lib/qtpandas.py | 77 | 7937 | '''
Easy integration of DataFrame into pyqt framework
Copyright: Jev Kuznetsov
Licence: BSD
'''
from PyQt4.QtCore import (QAbstractTableModel,Qt,QVariant,QModelIndex,SIGNAL)
from PyQt4.QtGui import (QApplication,QDialog,QVBoxLayout, QHBoxLayout, QTableView, QPushButton,
QWidget,QTableWidget, QHeaderView, QFont,QMenu,QAbstractItemView)
from pandas import DataFrame, Index
class DataFrameModel(QAbstractTableModel):
''' data model for a DataFrame class '''
def __init__(self,parent=None):
super(DataFrameModel,self).__init__(parent)
self.df = DataFrame()
self.columnFormat = {} # format columns
def setFormat(self,fmt):
"""
set string formatting for the output
example : format = {'close':"%.2f"}
"""
self.columnFormat = fmt
def setDataFrame(self,dataFrame):
self.df = dataFrame
self.signalUpdate()
def signalUpdate(self):
''' tell viewers to update their data (this is full update, not efficient)'''
self.layoutChanged.emit()
def __repr__(self):
return str(self.df)
def setData(self,index,value, role=Qt.EditRole):
if index.isValid():
row,column = index.row(), index.column()
dtype = self.df.dtypes.tolist()[column] # get column dtype
if np.issubdtype(dtype,np.float):
val,ok = value.toFloat()
elif np.issubdtype(dtype,np.int):
val,ok = value.toInt()
else:
val = value.toString()
ok = True
if ok:
self.df.iloc[row,column] = val
return True
return False
def flags(self, index):
if not index.isValid():
return Qt.ItemIsEnabled
return Qt.ItemFlags(
QAbstractTableModel.flags(self, index)|
Qt.ItemIsEditable)
def appendRow(self, index, data=0):
self.df.loc[index,:] = data
self.signalUpdate()
def deleteRow(self, index):
idx = self.df.index[index]
#self.beginRemoveRows(QModelIndex(), index,index)
#self.df = self.df.drop(idx,axis=0)
#self.endRemoveRows()
#self.signalUpdate()
#------------- table display functions -----------------
def headerData(self,section,orientation,role=Qt.DisplayRole):
if role != Qt.DisplayRole:
return QVariant()
if orientation == Qt.Horizontal:
try:
return self.df.columns.tolist()[section]
except (IndexError, ):
return QVariant()
elif orientation == Qt.Vertical:
try:
#return self.df.index.tolist()
return str(self.df.index.tolist()[section])
except (IndexError, ):
return QVariant()
def data(self, index, role=Qt.DisplayRole):
if role != Qt.DisplayRole:
return QVariant()
if not index.isValid():
return QVariant()
col = self.df.ix[:,index.column()] # get a column slice first to get the right data type
elm = col[index.row()]
#elm = self.df.ix[index.row(),index.column()]
if self.df.columns[index.column()] in self.columnFormat.keys():
return QVariant(self.columnFormat[self.df.columns[index.column()]] % elm )
else:
return QVariant(str(elm))
def sort(self,nCol,order):
self.layoutAboutToBeChanged.emit()
if order == Qt.AscendingOrder:
self.df = self.df.sort(columns=self.df.columns[nCol], ascending=True)
elif order == Qt.DescendingOrder:
self.df = self.df.sort(columns=self.df.columns[nCol], ascending=False)
self.layoutChanged.emit()
def rowCount(self, index=QModelIndex()):
return self.df.shape[0]
def columnCount(self, index=QModelIndex()):
return self.df.shape[1]
class TableView(QTableView):
""" extended table view """
def __init__(self,name='TableView1', parent=None):
super(TableView,self).__init__(parent)
self.name = name
self.setSelectionBehavior(QAbstractItemView.SelectRows)
def contextMenuEvent(self, event):
menu = QMenu(self)
Action = menu.addAction("delete row")
Action.triggered.connect(self.deleteRow)
menu.exec_(event.globalPos())
def deleteRow(self):
print "Action triggered from " + self.name
print 'Selected rows:'
for idx in self.selectionModel().selectedRows():
print idx.row()
# self.model.deleteRow(idx.row())
class DataFrameWidget(QWidget):
''' a simple widget for using DataFrames in a gui '''
def __init__(self,name='DataFrameTable1', parent=None):
super(DataFrameWidget,self).__init__(parent)
self.name = name
self.dataModel = DataFrameModel()
self.dataModel.setDataFrame(DataFrame())
self.dataTable = QTableView()
#self.dataTable.setSelectionBehavior(QAbstractItemView.SelectRows)
self.dataTable.setSortingEnabled(True)
self.dataTable.setModel(self.dataModel)
self.dataModel.signalUpdate()
#self.dataTable.setFont(QFont("Courier New", 8))
layout = QVBoxLayout()
layout.addWidget(self.dataTable)
self.setLayout(layout)
def setFormat(self,fmt):
""" set non-default string formatting for a column """
for colName, f in fmt.iteritems():
self.dataModel.columnFormat[colName]=f
def fitColumns(self):
self.dataTable.horizontalHeader().setResizeMode(QHeaderView.Stretch)
def setDataFrame(self,df):
self.dataModel.setDataFrame(df)
def resizeColumnsToContents(self):
self.dataTable.resizeColumnsToContents()
def insertRow(self,index, data=None):
self.dataModel.appendRow(index,data)
#-----------------stand alone test code
def testDf():
''' creates test dataframe '''
data = {'int':[1,2,3],'float':[1./3,2.5,3.5],'string':['a','b','c'],'nan':[np.nan,np.nan,np.nan]}
return DataFrame(data, index=Index(['AAA','BBB','CCC']))[['int','float','string','nan']]
class Form(QDialog):
def __init__(self,parent=None):
super(Form,self).__init__(parent)
df = testDf() # make up some data
self.table = DataFrameWidget(parent=self)
self.table.setDataFrame(df)
#self.table.resizeColumnsToContents()
self.table.fitColumns()
self.table.setFormat({'float': '%.2f'})
#buttons
#but_add = QPushButton('Add')
but_test = QPushButton('Test')
but_test.clicked.connect(self.testFcn)
hbox = QHBoxLayout()
#hbox.addself.table(but_add)
hbox.addWidget(but_test)
layout = QVBoxLayout()
layout.addWidget(self.table)
layout.addLayout(hbox)
self.setLayout(layout)
def testFcn(self):
print 'test function'
self.table.insertRow('foo')
if __name__=='__main__':
import sys
import numpy as np
app = QApplication(sys.argv)
form = Form()
form.show()
app.exec_()
| bsd-3-clause |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.