repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
40223232/w16b_test | wsgi.py | 1 | 27797 | #@+leo-ver=5-thin
#@+node:2014fall.20141212095015.1775: * @file wsgi.py
# coding=utf-8
# 上面的程式內容編碼必須在程式的第一或者第二行才會有作用
################# (1) 模組導入區
# 導入 cherrypy 模組, 為了在 OpenShift 平台上使用 cherrypy 模組, 必須透過 setup.py 安裝
#@@language python
#@@tabwidth -4
#@+<<declarations>>
#@+node:2014fall.20141212095015.1776: ** <<declarations>> (wsgi)
import cherrypy
# 導入 Python 內建的 os 模組, 因為 os 模組為 Python 內建, 所以無需透過 setup.py 安裝
import os
# 導入 random 模組
import random
# 導入 gear 模組
import gear
################# (2) 廣域變數設定區
# 確定程式檔案所在目錄, 在 Windows 下有最後的反斜線
_curdir = os.path.join(os.getcwd(), os.path.dirname(__file__))
# 設定在雲端與近端的資料儲存目錄
if 'OPENSHIFT_REPO_DIR' in os.environ.keys():
# 表示程式在雲端執行
download_root_dir = os.environ['OPENSHIFT_DATA_DIR']
data_dir = os.environ['OPENSHIFT_DATA_DIR']
else:
# 表示程式在近端執行
download_root_dir = _curdir + "/local_data/"
data_dir = _curdir + "/local_data/"
'''以下為近端 input() 與 for 迴圈應用的程式碼, 若要將程式送到 OpenShift 執行, 除了採用 CherryPy 網際框架外, 還要轉為 html 列印
# 利用 input() 取得的資料型別為字串
toprint = input("要印甚麼內容?")
# 若要將 input() 取得的字串轉為整數使用, 必須利用 int() 轉換
repeat_no = int(input("重複列印幾次?"))
for i in range(repeat_no):
print(toprint)
'''
#@-<<declarations>>
#@+others
#@+node:2014fall.20141212095015.1777: ** class Hello
################# (3) 程式類別定義區
# 以下改用 CherryPy 網際框架程式架構
# 以下為 Hello 類別的設計內容, 其中的 object 使用, 表示 Hello 類別繼承 object 的所有特性, 包括方法與屬性設計
class Hello(object):
# Hello 類別的啟動設定
_cp_config = {
'tools.encode.encoding': 'utf-8',
'tools.sessions.on' : True,
'tools.sessions.storage_type' : 'file',
#'tools.sessions.locking' : 'explicit',
# session 以檔案儲存, 而且位於 data_dir 下的 tmp 目錄
'tools.sessions.storage_path' : data_dir+'/tmp',
# session 有效時間設為 60 分鐘
'tools.sessions.timeout' : 60
}
#@+others
#@+node:2014fall.20141212095015.2004: *3* __init__
def __init__(self):
# 配合透過案例啟始建立所需的目錄
if not os.path.isdir(data_dir+'/tmp'):
os.mkdir(data_dir+'/tmp')
if not os.path.isdir(data_dir+"/downloads"):
os.mkdir(data_dir+"/downloads")
if not os.path.isdir(data_dir+"/images"):
os.mkdir(data_dir+"/images")
#@+node:2014fall.20141212095015.1778: *3* index_orig
# 以 @ 開頭的 cherrypy.expose 為 decorator, 用來表示隨後的成員方法, 可以直接讓使用者以 URL 連結執行
@cherrypy.expose
# index 方法為 CherryPy 各類別成員方法中的內建(default)方法, 當使用者執行時未指定方法, 系統將會優先執行 index 方法
# 有 self 的方法為類別中的成員方法, Python 程式透過此一 self 在各成員方法間傳遞物件內容
def index_orig(self, toprint="Hello World!"):
return toprint
#@+node:2014fall.20141212095015.1779: *3* hello
@cherrypy.expose
def hello(self, toprint="Hello World!"):
return toprint
#@+node:2014fall.20141215194146.1791: *3* index
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def index(self):
outstring = '''
<!DOCTYPE html>
<html>
<head>
40223232
</head>
<body>
<br /><a href ="index">index</a><br />
</body>
</html>
'''
return outstring
#@+node:2015.20150330144929.1713: *3* twoDgear
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def twoDgear(self, N=20, M=5, P=15):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<form method=POST action=do2Dgear>
齒數:<input type=text name=N><br />
模數:<input type=text name=M><br />
壓力角:<input type=text name=P><br />
<input type=submit value=send>
</form>
</body>
</html>
'''
return outstring
#@+node:2015.20150331094055.1733: *3* threeDgear
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def threeDgear(self, N=20, M=5, P=15):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<form method=POST action=do3Dgear>
齒數:<input type=text name=N><br />
模數:<input type=text name=M><br />
壓力角:<input type=text name=P><br />
<input type=submit value=send>
</form>
</body>
</html>
'''
return outstring
#@+node:2015.20150330144929.1762: *3* do2Dgear
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def do2Dgear(self, N=20, M=5, P=15):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<!-- 以下為 canvas 畫圖程式 -->
<script type="text/python">
# 從 browser 導入 document
from browser import document
import math
# 畫布指定在名稱為 plotarea 的 canvas 上
canvas = document["plotarea"]
ctx = canvas.getContext("2d")
# 用紅色畫一條直線
ctx.beginPath()
ctx.lineWidth = 3
'''
outstring += '''
ctx.moveTo('''+str(N)+","+str(M)+")"
outstring += '''
ctx.lineTo(0, 500)
ctx.strokeStyle = "red"
ctx.stroke()
# 用藍色再畫一條直線
ctx.beginPath()
ctx.lineWidth = 3
ctx.moveTo(0, 0)
ctx.lineTo(500, 0)
ctx.strokeStyle = "blue"
ctx.stroke()
# 用綠色再畫一條直線
ctx.beginPath()
ctx.lineWidth = 3
ctx.moveTo(0, 0)
ctx.lineTo(500, 500)
ctx.strokeStyle = "green"
ctx.stroke()
# 用黑色畫一個圓
ctx.beginPath()
ctx.lineWidth = 3
ctx.strokeStyle = "black"
ctx.arc(250,250,50,0,2*math.pi)
ctx.stroke()
</script>
<canvas id="plotarea" width="800" height="600"></canvas>
</body>
</html>
'''
return outstring
#@+node:2015.20150331094055.1735: *3* do3Dgear
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def do3Dgear(self, N=20, M=5, P=15):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<!-- 以下為 canvas 畫圖程式 -->
<script type="text/python">
# 從 browser 導入 document
from browser import document
import math
# 畫布指定在名稱為 plotarea 的 canvas 上
canvas = document["plotarea"]
ctx = canvas.getContext("2d")
# 用紅色畫一條直線
ctx.beginPath()
ctx.lineWidth = 3
'''
outstring += '''
ctx.moveTo('''+str(N)+","+str(M)+")"
outstring += '''
ctx.lineTo(0, 500)
ctx.strokeStyle = "red"
ctx.stroke()
# 用藍色再畫一條直線
ctx.beginPath()
ctx.lineWidth = 3
ctx.moveTo(0, 0)
ctx.lineTo(500, 0)
ctx.strokeStyle = "blue"
ctx.stroke()
# 用綠色再畫一條直線
ctx.beginPath()
ctx.lineWidth = 3
ctx.moveTo(0, 0)
ctx.lineTo(500, 500)
ctx.strokeStyle = "green"
ctx.stroke()
# 用黑色畫一個圓
ctx.beginPath()
ctx.lineWidth = 3
ctx.strokeStyle = "black"
ctx.arc(250,250,50,0,2*math.pi)
ctx.stroke()
</script>
<canvas id="plotarea" width="800" height="600"></canvas>
</body>
</html>
'''
return outstring
#@+node:2015.20150330144929.1765: *3* mygeartest
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def mygeartest(self, N=20, M=5, P=15):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<!-- 以下為 canvas 畫圖程式 -->
<script type="text/python">
# 從 browser 導入 document
from browser import document
from math import *
# 準備在 id="plotarea" 的 canvas 中繪圖
canvas = document["plotarea"]
ctx = canvas.getContext("2d")
def create_line(x1, y1, x2, y2, width=3, fill="red"):
ctx.beginPath()
ctx.lineWidth = width
ctx.moveTo(x1, y1)
ctx.lineTo(x2, y2)
ctx.strokeStyle = fill
ctx.stroke()
# 導入數學函式後, 圓周率為 pi
# deg 為角度轉為徑度的轉換因子
deg = pi/180.
#
# 以下分別為正齒輪繪圖與主 tkinter 畫布繪圖
#
# 定義一個繪正齒輪的繪圖函式
# midx 為齒輪圓心 x 座標
# midy 為齒輪圓心 y 座標
# rp 為節圓半徑, n 為齒數
def 齒輪(midx, midy, rp, n, 顏色):
# 將角度轉換因子設為全域變數
global deg
# 齒輪漸開線分成 15 線段繪製
imax = 15
# 在輸入的畫布上繪製直線, 由圓心到節圓 y 軸頂點畫一直線
create_line(midx, midy, midx, midy-rp)
# 畫出 rp 圓, 畫圓函式尚未定義
#create_oval(midx-rp, midy-rp, midx+rp, midy+rp, width=2)
# a 為模數 (代表公制中齒的大小), 模數為節圓直徑(稱為節徑)除以齒數
# 模數也就是齒冠大小
a=2*rp/n
# d 為齒根大小, 為模數的 1.157 或 1.25倍, 這裡採 1.25 倍
d=2.5*rp/n
# ra 為齒輪的外圍半徑
ra=rp+a
print("ra:", ra)
# 畫出 ra 圓, 畫圓函式尚未定義
#create_oval(midx-ra, midy-ra, midx+ra, midy+ra, width=1)
# rb 則為齒輪的基圓半徑
# 基圓為漸開線長齒之基準圓
rb=rp*cos(20*deg)
print("rp:", rp)
print("rb:", rb)
# 畫出 rb 圓 (基圓), 畫圓函式尚未定義
#create_oval(midx-rb, midy-rb, midx+rb, midy+rb, width=1)
# rd 為齒根圓半徑
rd=rp-d
# 當 rd 大於 rb 時
print("rd:", rd)
# 畫出 rd 圓 (齒根圓), 畫圓函式尚未定義
#create_oval(midx-rd, midy-rd, midx+rd, midy+rd, width=1)
# dr 則為基圓到齒頂圓半徑分成 imax 段後的每段半徑增量大小
# 將圓弧分成 imax 段來繪製漸開線
dr=(ra-rb)/imax
# tan(20*deg)-20*deg 為漸開線函數
sigma=pi/(2*n)+tan(20*deg)-20*deg
for j in range(n):
ang=-2.*j*pi/n+sigma
ang2=2.*j*pi/n+sigma
lxd=midx+rd*sin(ang2-2.*pi/n)
lyd=midy-rd*cos(ang2-2.*pi/n)
#for(i=0;i<=imax;i++):
for i in range(imax+1):
r=rb+i*dr
theta=sqrt((r*r)/(rb*rb)-1.)
alpha=theta-atan(theta)
xpt=r*sin(alpha-ang)
ypt=r*cos(alpha-ang)
xd=rd*sin(-ang)
yd=rd*cos(-ang)
# i=0 時, 繪線起點由齒根圓上的點, 作為起點
if(i==0):
last_x = midx+xd
last_y = midy-yd
# 由左側齒根圓作為起點, 除第一點 (xd,yd) 齒根圓上的起點外, 其餘的 (xpt,ypt)則為漸開線上的分段點
create_line((midx+xpt),(midy-ypt),(last_x),(last_y),fill=顏色)
# 最後一點, 則為齒頂圓
if(i==imax):
lfx=midx+xpt
lfy=midy-ypt
last_x = midx+xpt
last_y = midy-ypt
# the line from last end of dedendum point to the recent
# end of dedendum point
# lxd 為齒根圓上的左側 x 座標, lyd 則為 y 座標
# 下列為齒根圓上用來近似圓弧的直線
create_line((lxd),(lyd),(midx+xd),(midy-yd),fill=顏色)
#for(i=0;i<=imax;i++):
for i in range(imax+1):
r=rb+i*dr
theta=sqrt((r*r)/(rb*rb)-1.)
alpha=theta-atan(theta)
xpt=r*sin(ang2-alpha)
ypt=r*cos(ang2-alpha)
xd=rd*sin(ang2)
yd=rd*cos(ang2)
# i=0 時, 繪線起點由齒根圓上的點, 作為起點
if(i==0):
last_x = midx+xd
last_y = midy-yd
# 由右側齒根圓作為起點, 除第一點 (xd,yd) 齒根圓上的起點外, 其餘的 (xpt,ypt)則為漸開線上的分段點
create_line((midx+xpt),(midy-ypt),(last_x),(last_y),fill=顏色)
# 最後一點, 則為齒頂圓
if(i==imax):
rfx=midx+xpt
rfy=midy-ypt
last_x = midx+xpt
last_y = midy-ypt
# lfx 為齒頂圓上的左側 x 座標, lfy 則為 y 座標
# 下列為齒頂圓上用來近似圓弧的直線
create_line(lfx,lfy,rfx,rfy,fill=顏色)
齒輪(400,400,300,41,"blue")
</script>
<canvas id="plotarea" width="800" height="800"></canvas>
</body>
</html>
'''
return outstring
#@+node:amd.20150415215023.1: *3* mygeartest2
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def mygeartest2(self, N=20, M=5, P=15):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<!-- 以下為 canvas 畫圖程式 -->
<script type="text/python">
# 從 browser 導入 document
from browser import document
from math import *
# 請注意, 這裡導入位於 Lib/site-packages 目錄下的 spur.py 檔案
import spur
# 準備在 id="plotarea" 的 canvas 中繪圖
canvas = document["plotarea"]
ctx = canvas.getContext("2d")
# 以下利用 spur.py 程式進行繪圖, 接下來的協同設計運算必須要配合使用者的需求進行設計運算與繪圖
# 其中並將工作分配給其他組員建立類似 spur.py 的相關零件繪圖模組
# midx, midy 為齒輪圓心座標, rp 為節圓半徑, n 為齒數, pa 為壓力角, color 為線的顏色
# Gear(midx, midy, rp, n=20, pa=20, color="black"):
# 模數決定齒的尺寸大小, 囓合齒輪組必須有相同的模數與壓力角
# 壓力角 pa 單位為角度
pa = 20
# m 為模數
m = 20
# 第1齒輪齒數
n_g1 = 17
# 第2齒輪齒數
n_g2 = 11
# 第3齒輪齒數
n_g3 = 13
# 計算兩齒輪的節圓半徑
rp_g1 = m*n_g1/2
rp_g2 = m*n_g2/2
rp_g3 = m*n_g3/2
# 繪圖第1齒輪的圓心座標
x_g1 = 400
y_g1 = 400
# 第2齒輪的圓心座標, 假設排列成水平, 表示各齒輪圓心 y 座標相同
x_g2 = x_g1 + rp_g1 + rp_g2
y_g2 = y_g1
# 第3齒輪的圓心座標
x_g3 = x_g1 + rp_g1 + 2*rp_g2 + rp_g3
y_g3 = y_g1
# 將第1齒輪順時鐘轉 90 度
# 使用 ctx.save() 與 ctx.restore() 以確保各齒輪以相對座標進行旋轉繪圖
ctx.save()
# translate to the origin of second gear
ctx.translate(x_g1, y_g1)
# rotate to engage
ctx.rotate(pi/2)
# put it back
ctx.translate(-x_g1, -y_g1)
spur.Spur(ctx).Gear(x_g1, y_g1, rp_g1, n_g1, pa, "blue")
ctx.restore()
# 將第2齒輪逆時鐘轉 90 度之後, 再多轉一齒, 以便與第1齒輪進行囓合
ctx.save()
# translate to the origin of second gear
ctx.translate(x_g2, y_g2)
# rotate to engage
ctx.rotate(-pi/2-pi/n_g2)
# put it back
ctx.translate(-x_g2, -y_g2)
spur.Spur(ctx).Gear(x_g2, y_g2, rp_g2, n_g2, pa, "black")
ctx.restore()
# 將第3齒輪逆時鐘轉 90 度之後, 再往回轉第2齒輪定位帶動轉角, 然後再逆時鐘多轉一齒, 以便與第2齒輪進行囓合
ctx.save()
# translate to the origin of second gear
ctx.translate(x_g3, y_g3)
# rotate to engage
# pi+pi/n_g2 為第2齒輪從順時鐘轉 90 度之後, 必須配合目前的標記線所作的齒輪 2 轉動角度, 要轉換到齒輪3 的轉動角度
# 必須乘上兩齒輪齒數的比例, 若齒輪2 大, 則齒輪3 會轉動較快
# 第1個 -pi/2 為將原先垂直的第3齒輪定位線逆時鐘旋轉 90 度
# -pi/n_g3 則是第3齒與第2齒定位線重合後, 必須再逆時鐘多轉一齒的轉角, 以便進行囓合
# (pi+pi/n_g2)*n_g2/n_g3 則是第2齒原定位線為順時鐘轉動 90 度,
# 但是第2齒輪為了與第1齒輪囓合, 已經距離定位線, 多轉了 180 度, 再加上第2齒輪的一齒角度, 因為要帶動第3齒輪定位,
# 這個修正角度必須要再配合第2齒與第3齒的轉速比加以轉換成第3齒輪的轉角, 因此乘上 n_g2/n_g3
ctx.rotate(-pi/2-pi/n_g3+(pi+pi/n_g2)*n_g2/n_g3)
# put it back
ctx.translate(-x_g3, -y_g3)
spur.Spur(ctx).Gear(x_g3, y_g3, rp_g3, n_g3, pa, "red")
ctx.restore()
# 按照上面三個正齒輪的囓合轉角運算, 隨後的傳動齒輪轉角便可依此類推, 完成6個齒輪的囓合繪圖
</script>
<canvas id="plotarea" width="1200" height="1200"></canvas>
</body>
</html>
'''
return outstring
#@+node:2015.20150331094055.1737: *3* my3Dgeartest
@cherrypy.expose
# N 為齒數, M 為模數, P 為壓力角
def my3Dgeartest(self, N=20, M=5, P=15):
outstring = '''
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="content-type" content="text/html;charset=utf-8">
<!-- 載入 brython.js -->
<script type="text/javascript" src="/static/Brython3.1.1-20150328-091302/brython.js"></script>
<script src="/static/Cango2D.js" type="text/javascript"></script>
<script src="/static/gearUtils-04.js" type="text/javascript"></script>
</head>
<!-- 啟動 brython() -->
<body onload="brython()">
<!-- 以下為 canvas 畫圖程式 -->
<script type="text/python">
# 從 browser 導入 document
from browser import document
from math import *
# 準備在 id="plotarea" 的 canvas 中繪圖
canvas = document["plotarea"]
ctx = canvas.getContext("2d")
def create_line(x1, y1, x2, y2, width=3, fill="red"):
ctx.beginPath()
ctx.lineWidth = width
ctx.moveTo(x1, y1)
ctx.lineTo(x2, y2)
ctx.strokeStyle = fill
ctx.stroke()
# 導入數學函式後, 圓周率為 pi
# deg 為角度轉為徑度的轉換因子
deg = pi/180.
#
# 以下分別為正齒輪繪圖與主 tkinter 畫布繪圖
#
# 定義一個繪正齒輪的繪圖函式
# midx 為齒輪圓心 x 座標
# midy 為齒輪圓心 y 座標
# rp 為節圓半徑, n 為齒數
def gear(midx, midy, rp, n, 顏色):
# 將角度轉換因子設為全域變數
global deg
# 齒輪漸開線分成 15 線段繪製
imax = 15
# 在輸入的畫布上繪製直線, 由圓心到節圓 y 軸頂點畫一直線
create_line(midx, midy, midx, midy-rp)
# 畫出 rp 圓, 畫圓函式尚未定義
#create_oval(midx-rp, midy-rp, midx+rp, midy+rp, width=2)
# a 為模數 (代表公制中齒的大小), 模數為節圓直徑(稱為節徑)除以齒數
# 模數也就是齒冠大小
a=2*rp/n
# d 為齒根大小, 為模數的 1.157 或 1.25倍, 這裡採 1.25 倍
d=2.5*rp/n
# ra 為齒輪的外圍半徑
ra=rp+a
print("ra:", ra)
# 畫出 ra 圓, 畫圓函式尚未定義
#create_oval(midx-ra, midy-ra, midx+ra, midy+ra, width=1)
# rb 則為齒輪的基圓半徑
# 基圓為漸開線長齒之基準圓
rb=rp*cos(20*deg)
print("rp:", rp)
print("rb:", rb)
# 畫出 rb 圓 (基圓), 畫圓函式尚未定義
#create_oval(midx-rb, midy-rb, midx+rb, midy+rb, width=1)
# rd 為齒根圓半徑
rd=rp-d
# 當 rd 大於 rb 時
print("rd:", rd)
# 畫出 rd 圓 (齒根圓), 畫圓函式尚未定義
#create_oval(midx-rd, midy-rd, midx+rd, midy+rd, width=1)
# dr 則為基圓到齒頂圓半徑分成 imax 段後的每段半徑增量大小
# 將圓弧分成 imax 段來繪製漸開線
dr=(ra-rb)/imax
# tan(20*deg)-20*deg 為漸開線函數
sigma=pi/(2*n)+tan(20*deg)-20*deg
for j in range(n):
ang=-2.*j*pi/n+sigma
ang2=2.*j*pi/n+sigma
lxd=midx+rd*sin(ang2-2.*pi/n)
lyd=midy-rd*cos(ang2-2.*pi/n)
#for(i=0;i<=imax;i++):
for i in range(imax+1):
r=rb+i*dr
theta=sqrt((r*r)/(rb*rb)-1.)
alpha=theta-atan(theta)
xpt=r*sin(alpha-ang)
ypt=r*cos(alpha-ang)
xd=rd*sin(-ang)
yd=rd*cos(-ang)
# i=0 時, 繪線起點由齒根圓上的點, 作為起點
if(i==0):
last_x = midx+xd
last_y = midy-yd
# 由左側齒根圓作為起點, 除第一點 (xd,yd) 齒根圓上的起點外, 其餘的 (xpt,ypt)則為漸開線上的分段點
create_line((midx+xpt),(midy-ypt),(last_x),(last_y),fill=顏色)
# 最後一點, 則為齒頂圓
if(i==imax):
lfx=midx+xpt
lfy=midy-ypt
last_x = midx+xpt
last_y = midy-ypt
# the line from last end of dedendum point to the recent
# end of dedendum point
# lxd 為齒根圓上的左側 x 座標, lyd 則為 y 座標
# 下列為齒根圓上用來近似圓弧的直線
create_line((lxd),(lyd),(midx+xd),(midy-yd),fill=顏色)
#for(i=0;i<=imax;i++):
for i in range(imax+1):
r=rb+i*dr
theta=sqrt((r*r)/(rb*rb)-1.)
alpha=theta-atan(theta)
xpt=r*sin(ang2-alpha)
ypt=r*cos(ang2-alpha)
xd=rd*sin(ang2)
yd=rd*cos(ang2)
# i=0 時, 繪線起點由齒根圓上的點, 作為起點
if(i==0):
last_x = midx+xd
last_y = midy-yd
# 由右側齒根圓作為起點, 除第一點 (xd,yd) 齒根圓上的起點外, 其餘的 (xpt,ypt)則為漸開線上的分段點
create_line((midx+xpt),(midy-ypt),(last_x),(last_y),fill=顏色)
# 最後一點, 則為齒頂圓
if(i==imax):
rfx=midx+xpt
rfy=midy-ypt
last_x = midx+xpt
last_y = midy-ypt
# lfx 為齒頂圓上的左側 x 座標, lfy 則為 y 座標
# 下列為齒頂圓上用來近似圓弧的直線
create_line(lfx,lfy,rfx,rfy,fill=顏色)
gear(400,400,300,41,"blue")
</script>
<canvas id="plotarea" width="800" height="800"></canvas>
</body>
</html>
'''
return outstring
#@+node:2014fall.20141215194146.1793: *3* doCheck
@cherrypy.expose
def doCheck(self, guess=None):
# 假如使用者直接執行 doCheck, 則設法轉回根方法
if guess is None:
raise cherrypy.HTTPRedirect("/")
# 從 session 取出 answer 對應資料, 且處理直接執行 doCheck 時無法取 session 值情況
try:
theanswer = int(cherrypy.session.get('answer'))
except:
raise cherrypy.HTTPRedirect("/")
# 經由表單所取得的 guess 資料型別為 string
try:
theguess = int(guess)
except:
return "error " + self.guessform()
# 每執行 doCheck 一次,次數增量一次
cherrypy.session['count'] += 1
# 答案與所猜數字進行比對
if theanswer < theguess:
return "big " + self.guessform()
elif theanswer > theguess:
return "small " + self.guessform()
else:
# 已經猜對, 從 session 取出累計猜測次數
thecount = cherrypy.session.get('count')
return "exact: <a href=''>再猜</a>"
#@+node:2014fall.20141215194146.1789: *3* guessform
def guessform(self):
# 印出讓使用者輸入的超文件表單
outstring = str(cherrypy.session.get('answer')) + "/" + str(cherrypy.session.get('count')) + '''<form method=POST action=doCheck>
請輸入您所猜的整數:<input type=text name=guess><br />
<input type=submit value=send>
</form>'''
return outstring
#@-others
#@-others
################# (4) 程式啟動區
# 配合程式檔案所在目錄設定靜態目錄或靜態檔案
application_conf = {'/static':{
'tools.staticdir.on': True,
# 程式執行目錄下, 必須自行建立 static 目錄
'tools.staticdir.dir': _curdir+"/static"},
'/downloads':{
'tools.staticdir.on': True,
'tools.staticdir.dir': data_dir+"/downloads"},
'/images':{
'tools.staticdir.on': True,
'tools.staticdir.dir': data_dir+"/images"}
}
root = Hello()
root.gear = gear.Gear()
if 'OPENSHIFT_REPO_DIR' in os.environ.keys():
# 表示在 OpenSfhit 執行
application = cherrypy.Application(root, config=application_conf)
else:
# 表示在近端執行
cherrypy.quickstart(root, config=application_conf)
#@-leo
| gpl-3.0 |
h3biomed/ansible | lib/ansible/modules/network/aruba/aruba_config.py | 34 | 16793 | #!/usr/bin/python
#
# Copyright: Ansible Team
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: aruba_config
version_added: "2.4"
author: "James Mighion (@jmighion)"
short_description: Manage Aruba configuration sections
description:
- Aruba configurations use a simple block indent file syntax
for segmenting configuration into sections. This module provides
an implementation for working with Aruba configuration sections in
a deterministic way.
extends_documentation_fragment: aruba
options:
lines:
description:
- The ordered set of commands that should be configured in the
section. The commands must be the exact same commands as found
in the device running-config. Be sure to note the configuration
command syntax as some commands are automatically modified by the
device config parser.
aliases: ['commands']
parents:
description:
- The ordered set of parents that uniquely identify the section or hierarchy
the commands should be checked against. If the parents argument
is omitted, the commands are checked against the set of top
level or global commands.
src:
description:
- Specifies the source path to the file that contains the configuration
or configuration template to load. The path to the source file can
either be the full path on the Ansible control host or a relative
path from the playbook or role root directory. This argument is mutually
exclusive with I(lines), I(parents).
before:
description:
- The ordered set of commands to push on to the command stack if
a change needs to be made. This allows the playbook designer
the opportunity to perform configuration commands prior to pushing
any changes without affecting how the set of commands are matched
against the system.
after:
description:
- The ordered set of commands to append to the end of the command
stack if a change needs to be made. Just like with I(before) this
allows the playbook designer to append a set of commands to be
executed after the command set.
match:
description:
- Instructs the module on the way to perform the matching of
the set of commands against the current device config. If
match is set to I(line), commands are matched line by line. If
match is set to I(strict), command lines are matched with respect
to position. If match is set to I(exact), command lines
must be an equal match. Finally, if match is set to I(none), the
module will not attempt to compare the source configuration with
the running configuration on the remote device.
default: line
choices: ['line', 'strict', 'exact', 'none']
replace:
description:
- Instructs the module on the way to perform the configuration
on the device. If the replace argument is set to I(line) then
the modified lines are pushed to the device in configuration
mode. If the replace argument is set to I(block) then the entire
command block is pushed to the device in configuration mode if any
line is not correct.
default: line
choices: ['line', 'block']
backup:
description:
- This argument will cause the module to create a full backup of
the current C(running-config) from the remote device before any
changes are made. If the C(backup_options) value is not given,
the backup file is written to the C(backup) folder in the playbook
root directory. If the directory does not exist, it is created.
type: bool
default: 'no'
running_config:
description:
- The module, by default, will connect to the remote device and
retrieve the current running-config to use as a base for comparing
against the contents of source. There are times when it is not
desirable to have the task get the current running-config for
every task in a playbook. The I(running_config) argument allows the
implementer to pass in the configuration to use as the base
config for comparison.
aliases: ['config']
save_when:
description:
- When changes are made to the device running-configuration, the
changes are not copied to non-volatile storage by default. Using
this argument will change that before. If the argument is set to
I(always), then the running-config will always be copied to the
startup-config and the I(modified) flag will always be set to
True. If the argument is set to I(modified), then the running-config
will only be copied to the startup-config if it has changed since
the last save to startup-config. If the argument is set to
I(never), the running-config will never be copied to the
startup-config. If the argument is set to I(changed), then the running-config
will only be copied to the startup-config if the task has made a change.
default: never
choices: ['always', 'never', 'modified', 'changed']
version_added: "2.5"
diff_against:
description:
- When using the C(ansible-playbook --diff) command line argument
the module can generate diffs against different sources.
- When this option is configure as I(startup), the module will return
the diff of the running-config against the startup-config.
- When this option is configured as I(intended), the module will
return the diff of the running-config against the configuration
provided in the C(intended_config) argument.
- When this option is configured as I(running), the module will
return the before and after diff of the running-config with respect
to any changes made to the device configuration.
choices: ['startup', 'intended', 'running']
diff_ignore_lines:
description:
- Use this argument to specify one or more lines that should be
ignored during the diff. This is used for lines in the configuration
that are automatically updated by the system. This argument takes
a list of regular expressions or exact line matches.
intended_config:
description:
- The C(intended_config) provides the master configuration that
the node should conform to and is used to check the final
running-config against. This argument will not modify any settings
on the remote device and is strictly used to check the compliance
of the current device's configuration against. When specifying this
argument, the task should also modify the C(diff_against) value and
set it to I(intended).
encrypt:
description:
- This allows an Aruba controller's passwords and keys to be displayed in plain
text when set to I(false) or encrypted when set to I(true).
If set to I(false), the setting will re-encrypt at the end of the module run.
Backups are still encrypted even when set to I(false).
type: bool
default: 'yes'
version_added: "2.5"
backup_options:
description:
- This is a dict object containing configurable options related to backup file path.
The value of this option is read only when C(backup) is set to I(yes), if C(backup) is set
to I(no) this option will be silently ignored.
suboptions:
filename:
description:
- The filename to be used to store the backup configuration. If the the filename
is not given it will be generated based on the hostname, current time and date
in format defined by <hostname>_config.<current-date>@<current-time>
dir_path:
description:
- This option provides the path ending with directory name in which the backup
configuration file will be stored. If the directory does not exist it will be first
created and the filename is either the value of C(filename) or default filename
as described in C(filename) options description. If the path value is not given
in that case a I(backup) directory will be created in the current working directory
and backup configuration will be copied in C(filename) within I(backup) directory.
type: path
type: dict
version_added: "2.8"
"""
EXAMPLES = """
- name: configure top level configuration
aruba_config:
lines: hostname {{ inventory_hostname }}
- name: diff the running-config against a provided config
aruba_config:
diff_against: intended
intended: "{{ lookup('file', 'master.cfg') }}"
- name: configure interface settings
aruba_config:
lines:
- description test interface
- ip access-group 1 in
parents: interface gigabitethernet 0/0/0
- name: load new acl into device
aruba_config:
lines:
- permit host 10.10.10.10
- ipv6 permit host fda9:97d6:32a3:3e59::3333
parents: ip access-list standard 1
before: no ip access-list standard 1
match: exact
- name: configurable backup path
aruba_config:
backup: yes
lines: hostname {{ inventory_hostname }}
backup_options:
filename: backup.cfg
dir_path: /home/user
"""
RETURN = """
commands:
description: The set of commands that will be pushed to the remote device
returned: always
type: list
sample: ['hostname foo', 'vlan 1', 'name default']
updates:
description: The set of commands that will be pushed to the remote device
returned: always
type: list
sample: ['hostname foo', 'vlan 1', 'name default']
backup_path:
description: The full path to the backup file
returned: when backup is yes
type: str
sample: /playbooks/ansible/backup/aruba_config.2016-07-16@22:28:34
"""
from ansible.module_utils.network.aruba.aruba import run_commands, get_config, load_config
from ansible.module_utils.network.aruba.aruba import aruba_argument_spec
from ansible.module_utils.network.aruba.aruba import check_args as aruba_check_args
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.common.config import NetworkConfig, dumps
def get_running_config(module, config=None):
contents = module.params['running_config']
if not contents:
if config:
contents = config
else:
contents = get_config(module)
return NetworkConfig(contents=contents)
def get_candidate(module):
candidate = NetworkConfig()
if module.params['src']:
candidate.load(module.params['src'])
elif module.params['lines']:
parents = module.params['parents'] or list()
candidate.add(module.params['lines'], parents=parents)
return candidate
def save_config(module, result):
result['changed'] = True
if not module.check_mode:
run_commands(module, 'copy running-config startup-config')
else:
module.warn('Skipping command `copy running-config startup-config` '
'due to check_mode. Configuration not copied to '
'non-volatile storage')
def main():
""" main entry point for module execution
"""
backup_spec = dict(
filename=dict(),
dir_path=dict(type='path')
)
argument_spec = dict(
src=dict(type='path'),
lines=dict(aliases=['commands'], type='list'),
parents=dict(type='list'),
before=dict(type='list'),
after=dict(type='list'),
match=dict(default='line', choices=['line', 'strict', 'exact', 'none']),
replace=dict(default='line', choices=['line', 'block']),
running_config=dict(aliases=['config']),
intended_config=dict(),
backup=dict(type='bool', default=False),
backup_options=dict(type='dict', options=backup_spec),
save_when=dict(choices=['always', 'never', 'modified', 'changed'], default='never'),
diff_against=dict(choices=['running', 'startup', 'intended']),
diff_ignore_lines=dict(type='list'),
encrypt=dict(type='bool', default=True),
)
argument_spec.update(aruba_argument_spec)
mutually_exclusive = [('lines', 'src'),
('parents', 'src')]
required_if = [('match', 'strict', ['lines']),
('match', 'exact', ['lines']),
('replace', 'block', ['lines']),
('diff_against', 'intended', ['intended_config'])]
module = AnsibleModule(argument_spec=argument_spec,
mutually_exclusive=mutually_exclusive,
required_if=required_if,
supports_check_mode=True)
warnings = list()
aruba_check_args(module, warnings)
result = {'changed': False, 'warnings': warnings}
config = None
if module.params['backup'] or (module._diff and module.params['diff_against'] == 'running'):
contents = get_config(module)
config = NetworkConfig(contents=contents)
if module.params['backup']:
result['__backup__'] = contents
if not module.params['encrypt']:
run_commands(module, 'encrypt disable')
if any((module.params['src'], module.params['lines'])):
match = module.params['match']
replace = module.params['replace']
candidate = get_candidate(module)
if match != 'none':
config = get_running_config(module, config)
path = module.params['parents']
configobjs = candidate.difference(config, match=match, replace=replace, path=path)
else:
configobjs = candidate.items
if configobjs:
commands = dumps(configobjs, 'commands').split('\n')
if module.params['before']:
commands[:0] = module.params['before']
if module.params['after']:
commands.extend(module.params['after'])
result['commands'] = commands
result['updates'] = commands
if not module.check_mode:
load_config(module, commands)
result['changed'] = True
running_config = None
startup_config = None
diff_ignore_lines = module.params['diff_ignore_lines']
if module.params['save_when'] == 'always':
save_config(module, result)
elif module.params['save_when'] == 'modified':
output = run_commands(module, ['show running-config', 'show startup-config'])
running_config = NetworkConfig(contents=output[0], ignore_lines=diff_ignore_lines)
startup_config = NetworkConfig(contents=output[1], ignore_lines=diff_ignore_lines)
if running_config.sha1 != startup_config.sha1:
save_config(module, result)
elif module.params['save_when'] == 'changed':
if result['changed']:
save_config(module, result)
if module._diff:
if not running_config:
output = run_commands(module, 'show running-config')
contents = output[0]
else:
contents = running_config.config_text
# recreate the object in order to process diff_ignore_lines
running_config = NetworkConfig(contents=contents, ignore_lines=diff_ignore_lines)
if module.params['diff_against'] == 'running':
if module.check_mode:
module.warn("unable to perform diff against running-config due to check mode")
contents = None
else:
contents = config.config_text
elif module.params['diff_against'] == 'startup':
if not startup_config:
output = run_commands(module, 'show startup-config')
contents = output[0]
else:
contents = startup_config.config_text
elif module.params['diff_against'] == 'intended':
contents = module.params['intended_config']
if contents is not None:
base_config = NetworkConfig(contents=contents, ignore_lines=diff_ignore_lines)
if running_config.sha1 != base_config.sha1:
result.update({
'changed': True,
'diff': {'before': str(base_config), 'after': str(running_config)}
})
# make sure 'encrypt enable' is applied if it was ever disabled
if not module.params['encrypt']:
run_commands(module, 'encrypt enable')
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
3dfxmadscientist/odoo_vi | addons/website/models/ir_ui_view.py | 17 | 8282 | # -*- coding: utf-8 -*-
import copy
import re
import simplejson
import werkzeug
from lxml import etree, html
from openerp import SUPERUSER_ID
from openerp.addons.website.models import website
from openerp.http import request
from openerp.osv import osv, fields
class view(osv.osv):
_inherit = "ir.ui.view"
_columns = {
'page': fields.boolean("Whether this view is a web page template (complete)"),
'website_meta_title': fields.char("Website meta title", size=70, translate=True),
'website_meta_description': fields.text("Website meta description", size=160, translate=True),
'website_meta_keywords': fields.char("Website meta keywords", translate=True),
}
_defaults = {
'page': False,
}
def _view_obj(self, cr, uid, view_id, context=None):
if isinstance(view_id, basestring):
return self.pool['ir.model.data'].xmlid_to_object(
cr, uid, view_id, raise_if_not_found=True, context=context
)
elif isinstance(view_id, (int, long)):
return self.browse(cr, uid, view_id, context=context)
# assume it's already a view object (WTF?)
return view_id
# Returns all views (called and inherited) related to a view
# Used by translation mechanism, SEO and optional templates
def _views_get(self, cr, uid, view_id, options=True, context=None, root=True):
""" For a given view ``view_id``, should return:
* the view itself
* all views inheriting from it, enabled or not
- but not the optional children of a non-enabled child
* all views called from it (via t-call)
"""
try:
view = self._view_obj(cr, uid, view_id, context=context)
except ValueError:
# Shall we log that ?
return []
while root and view.inherit_id:
view = view.inherit_id
result = [view]
node = etree.fromstring(view.arch)
for child in node.xpath("//t[@t-call]"):
try:
called_view = self._view_obj(cr, uid, child.get('t-call'), context=context)
except ValueError:
continue
if called_view not in result:
result += self._views_get(cr, uid, called_view, options=options, context=context)
extensions = view.inherit_children_ids
if not options:
# only active children
extensions = (v for v in view.inherit_children_ids
if v.application in ('always', 'enabled'))
# Keep options in a deterministic order regardless of their applicability
for extension in sorted(extensions, key=lambda v: v.id):
for r in self._views_get(
cr, uid, extension,
# only return optional grandchildren if this child is enabled
options=extension.application in ('always', 'enabled'),
context=context, root=False):
if r not in result:
result.append(r)
return result
def extract_embedded_fields(self, cr, uid, arch, context=None):
return arch.xpath('//*[@data-oe-model != "ir.ui.view"]')
def save_embedded_field(self, cr, uid, el, context=None):
Model = self.pool[el.get('data-oe-model')]
field = el.get('data-oe-field')
column = Model._all_columns[field].column
converter = self.pool['website.qweb'].get_converter_for(
el.get('data-oe-type'))
value = converter.from_html(cr, uid, Model, column, el)
if value is not None:
# TODO: batch writes?
Model.write(cr, uid, [int(el.get('data-oe-id'))], {
field: value
}, context=context)
def to_field_ref(self, cr, uid, el, context=None):
# filter out meta-information inserted in the document
attributes = dict((k, v) for k, v in el.items()
if not k.startswith('data-oe-'))
attributes['t-field'] = el.get('data-oe-expression')
out = html.html_parser.makeelement(el.tag, attrib=attributes)
out.tail = el.tail
return out
def replace_arch_section(self, cr, uid, view_id, section_xpath, replacement, context=None):
# the root of the arch section shouldn't actually be replaced as it's
# not really editable itself, only the content truly is editable.
[view] = self.browse(cr, uid, [view_id], context=context)
arch = etree.fromstring(view.arch.encode('utf-8'))
# => get the replacement root
if not section_xpath:
root = arch
else:
# ensure there's only one match
[root] = arch.xpath(section_xpath)
root.text = replacement.text
root.tail = replacement.tail
# replace all children
del root[:]
for child in replacement:
root.append(copy.deepcopy(child))
return arch
def render(self, cr, uid, id_or_xml_id, values=None, engine='ir.qweb', context=None):
if request and getattr(request, 'website_enabled', False):
engine='website.qweb'
if isinstance(id_or_xml_id, list):
id_or_xml_id = id_or_xml_id[0]
if not context:
context = {}
qcontext = dict(
context.copy(),
website=request.website,
url_for=website.url_for,
slug=website.slug,
res_company=request.website.company_id,
user_id=self.pool.get("res.users").browse(cr, uid, uid),
translatable=context.get('lang') != request.website.default_lang_code,
editable=request.website.is_publisher(),
)
# add some values
if values:
qcontext.update(values)
# in edit mode ir.ui.view will tag nodes
context['inherit_branding'] = qcontext.get('editable', False)
view_obj = request.website.get_template(id_or_xml_id)
if 'main_object' not in qcontext:
qcontext['main_object'] = view_obj
values = qcontext
return super(view, self).render(cr, uid, id_or_xml_id, values=values, engine=engine, context=context)
def _pretty_arch(self, arch):
# remove_blank_string does not seem to work on HTMLParser, and
# pretty-printing with lxml more or less requires stripping
# whitespace: http://lxml.de/FAQ.html#why-doesn-t-the-pretty-print-option-reformat-my-xml-output
# so serialize to XML, parse as XML (remove whitespace) then serialize
# as XML (pretty print)
arch_no_whitespace = etree.fromstring(
etree.tostring(arch, encoding='utf-8'),
parser=etree.XMLParser(encoding='utf-8', remove_blank_text=True))
return etree.tostring(
arch_no_whitespace, encoding='unicode', pretty_print=True)
def save(self, cr, uid, res_id, value, xpath=None, context=None):
""" Update a view section. The view section may embed fields to write
:param str model:
:param int res_id:
:param str xpath: valid xpath to the tag to replace
"""
res_id = int(res_id)
arch_section = html.fromstring(
value, parser=html.HTMLParser(encoding='utf-8'))
if xpath is None:
# value is an embedded field on its own, not a view section
self.save_embedded_field(cr, uid, arch_section, context=context)
return
for el in self.extract_embedded_fields(cr, uid, arch_section, context=context):
self.save_embedded_field(cr, uid, el, context=context)
# transform embedded field back to t-field
el.getparent().replace(el, self.to_field_ref(cr, uid, el, context=context))
arch = self.replace_arch_section(cr, uid, res_id, xpath, arch_section, context=context)
self.write(cr, uid, res_id, {
'arch': self._pretty_arch(arch)
}, context=context)
view = self.browse(cr, SUPERUSER_ID, res_id, context=context)
if view.model_data_id:
view.model_data_id.write({'noupdate': True})
| agpl-3.0 |
pgleeson/TempRepo3 | lib/jython/Lib/pydoc.py | 10 | 90404 | #!/usr/bin/env python
# -*- coding: Latin-1 -*-
"""Generate Python documentation in HTML or text for interactive use.
In the Python interpreter, do "from pydoc import help" to provide online
help. Calling help(thing) on a Python object documents the object.
Or, at the shell command line outside of Python:
Run "pydoc <name>" to show documentation on something. <name> may be
the name of a function, module, package, or a dotted reference to a
class or function within a module or module in a package. If the
argument contains a path segment delimiter (e.g. slash on Unix,
backslash on Windows) it is treated as the path to a Python source file.
Run "pydoc -k <keyword>" to search for a keyword in the synopsis lines
of all available modules.
Run "pydoc -p <port>" to start an HTTP server on a given port on the
local machine to generate documentation web pages.
For platforms without a command line, "pydoc -g" starts the HTTP server
and also pops up a little window for controlling it.
Run "pydoc -w <name>" to write out the HTML documentation for a module
to a file named "<name>.html".
Module docs for core modules are assumed to be in
http://www.python.org/doc/current/lib/
This can be overridden by setting the PYTHONDOCS environment variable
to a different URL or to a local directory containing the Library
Reference Manual pages.
"""
__author__ = "Ka-Ping Yee <ping@lfw.org>"
__date__ = "26 February 2001"
__version__ = "$Revision: 54366 $"
__credits__ = """Guido van Rossum, for an excellent programming language.
Tommy Burnette, the original creator of manpy.
Paul Prescod, for all his work on onlinehelp.
Richard Chamberlain, for the first implementation of textdoc.
"""
# Known bugs that can't be fixed here:
# - imp.load_module() cannot be prevented from clobbering existing
# loaded modules, so calling synopsis() on a binary module file
# changes the contents of any existing module with the same name.
# - If the __file__ attribute on a module is a relative path and
# the current directory is changed with os.chdir(), an incorrect
# path will be displayed.
import sys, imp, os, re, types, inspect, __builtin__, pkgutil
from repr import Repr
from string import expandtabs, find, join, lower, split, strip, rfind, rstrip
try:
from collections import deque
except ImportError:
# Python 2.3 compatibility
class deque(list):
def popleft(self):
return self.pop(0)
# --------------------------------------------------------- common routines
def pathdirs():
"""Convert sys.path into a list of absolute, existing, unique paths."""
dirs = []
normdirs = []
for dir in sys.path:
dir = os.path.abspath(dir or '.')
normdir = os.path.normcase(dir)
if normdir not in normdirs and os.path.isdir(dir):
dirs.append(dir)
normdirs.append(normdir)
return dirs
def getdoc(object):
"""Get the doc string or comments for an object."""
result = inspect.getdoc(object) or inspect.getcomments(object)
return result and re.sub('^ *\n', '', rstrip(result)) or ''
def splitdoc(doc):
"""Split a doc string into a synopsis line (if any) and the rest."""
lines = split(strip(doc), '\n')
if len(lines) == 1:
return lines[0], ''
elif len(lines) >= 2 and not rstrip(lines[1]):
return lines[0], join(lines[2:], '\n')
return '', join(lines, '\n')
def classname(object, modname):
"""Get a class name and qualify it with a module name if necessary."""
name = object.__name__
if object.__module__ != modname:
name = object.__module__ + '.' + name
return name
def isdata(object):
"""Check if an object is of a type that probably means it's data."""
return not (inspect.ismodule(object) or inspect.isclass(object) or
inspect.isroutine(object) or inspect.isframe(object) or
inspect.istraceback(object) or inspect.iscode(object))
def replace(text, *pairs):
"""Do a series of global replacements on a string."""
while pairs:
text = join(split(text, pairs[0]), pairs[1])
pairs = pairs[2:]
return text
def cram(text, maxlen):
"""Omit part of a string if needed to make it fit in a maximum length."""
if len(text) > maxlen:
pre = max(0, (maxlen-3)//2)
post = max(0, maxlen-3-pre)
return text[:pre] + '...' + text[len(text)-post:]
return text
_re_stripid = re.compile(r' at 0x[0-9a-f]{6,16}(>+)$', re.IGNORECASE)
def stripid(text):
"""Remove the hexadecimal id from a Python object representation."""
# The behaviour of %p is implementation-dependent in terms of case.
if _re_stripid.search(repr(Exception)):
return _re_stripid.sub(r'\1', text)
return text
def _is_some_method(obj):
return inspect.ismethod(obj) or inspect.ismethoddescriptor(obj)
def allmethods(cl):
methods = {}
for key, value in inspect.getmembers(cl, _is_some_method):
methods[key] = 1
for base in cl.__bases__:
methods.update(allmethods(base)) # all your base are belong to us
for key in methods.keys():
methods[key] = getattr(cl, key)
return methods
def _split_list(s, predicate):
"""Split sequence s via predicate, and return pair ([true], [false]).
The return value is a 2-tuple of lists,
([x for x in s if predicate(x)],
[x for x in s if not predicate(x)])
"""
yes = []
no = []
for x in s:
if predicate(x):
yes.append(x)
else:
no.append(x)
return yes, no
def visiblename(name, all=None):
"""Decide whether to show documentation on a variable."""
# Certain special names are redundant.
if name in ('__builtins__', '__doc__', '__file__', '__path__',
'__module__', '__name__', '__slots__'): return 0
# Private names are hidden, but special names are displayed.
if name.startswith('__') and name.endswith('__'): return 1
if all is not None:
# only document that which the programmer exported in __all__
return name in all
else:
return not name.startswith('_')
def classify_class_attrs(object):
"""Wrap inspect.classify_class_attrs, with fixup for data descriptors."""
def fixup((name, kind, cls, value)):
if inspect.isdatadescriptor(value):
kind = 'data descriptor'
return name, kind, cls, value
return map(fixup, inspect.classify_class_attrs(object))
# ----------------------------------------------------- module manipulation
def ispackage(path):
"""Guess whether a path refers to a package directory."""
if os.path.isdir(path):
for ext in ('.py', '.pyc', '.pyo', '$py.class'):
if os.path.isfile(os.path.join(path, '__init__' + ext)):
return True
return False
def source_synopsis(file):
line = file.readline()
while line[:1] == '#' or not strip(line):
line = file.readline()
if not line: break
line = strip(line)
if line[:4] == 'r"""': line = line[1:]
if line[:3] == '"""':
line = line[3:]
if line[-1:] == '\\': line = line[:-1]
while not strip(line):
line = file.readline()
if not line: break
result = strip(split(line, '"""')[0])
else: result = None
return result
def synopsis(filename, cache={}):
"""Get the one-line summary out of a module file."""
mtime = os.stat(filename).st_mtime
lastupdate, result = cache.get(filename, (0, None))
if lastupdate < mtime:
info = inspect.getmoduleinfo(filename)
try:
file = open(filename)
except IOError:
# module can't be opened, so skip it
return None
if info and 'b' in info[2]: # binary modules have to be imported
try: module = imp.load_module('__temp__', file, filename, info[1:])
except: return None
result = (module.__doc__ or '').splitlines()[0]
del sys.modules['__temp__']
else: # text modules can be directly examined
result = source_synopsis(file)
file.close()
cache[filename] = (mtime, result)
return result
class ErrorDuringImport(Exception):
"""Errors that occurred while trying to import something to document it."""
def __init__(self, filename, (exc, value, tb)):
self.filename = filename
self.exc = exc
self.value = value
self.tb = tb
def __str__(self):
exc = self.exc
if type(exc) is types.ClassType:
exc = exc.__name__
return 'problem in %s - %s: %s' % (self.filename, exc, self.value)
def importfile(path):
"""Import a Python source file or compiled file given its path."""
magic = imp.get_magic()
file = open(path, 'r')
if file.read(len(magic)) == magic:
kind = imp.PY_COMPILED
else:
kind = imp.PY_SOURCE
file.close()
filename = os.path.basename(path)
name, ext = os.path.splitext(filename)
file = open(path, 'r')
try:
module = imp.load_module(name, file, path, (ext, 'r', kind))
except:
raise ErrorDuringImport(path, sys.exc_info())
file.close()
return module
def safeimport(path, forceload=0, cache={}):
"""Import a module; handle errors; return None if the module isn't found.
If the module *is* found but an exception occurs, it's wrapped in an
ErrorDuringImport exception and reraised. Unlike __import__, if a
package path is specified, the module at the end of the path is returned,
not the package at the beginning. If the optional 'forceload' argument
is 1, we reload the module from disk (unless it's a dynamic extension)."""
try:
# If forceload is 1 and the module has been previously loaded from
# disk, we always have to reload the module. Checking the file's
# mtime isn't good enough (e.g. the module could contain a class
# that inherits from another module that has changed).
if forceload and path in sys.modules:
if path not in sys.builtin_module_names:
# Avoid simply calling reload() because it leaves names in
# the currently loaded module lying around if they're not
# defined in the new source file. Instead, remove the
# module from sys.modules and re-import. Also remove any
# submodules because they won't appear in the newly loaded
# module's namespace if they're already in sys.modules.
subs = [m for m in sys.modules if m.startswith(path + '.')]
for key in [path] + subs:
# Prevent garbage collection.
cache[key] = sys.modules[key]
del sys.modules[key]
module = __import__(path)
except:
# Did the error occur before or after the module was found?
(exc, value, tb) = info = sys.exc_info()
if path in sys.modules:
# An error occurred while executing the imported module.
raise ErrorDuringImport(sys.modules[path].__file__, info)
elif exc is SyntaxError:
# A SyntaxError occurred before we could execute the module.
raise ErrorDuringImport(value.filename, info)
elif exc is ImportError and \
split(lower(str(value)))[:2] == ['no', 'module']:
# The module was not found.
return None
else:
# Some other error occurred during the importing process.
raise ErrorDuringImport(path, sys.exc_info())
for part in split(path, '.')[1:]:
try: module = getattr(module, part)
except AttributeError: return None
return module
# ---------------------------------------------------- formatter base class
class Doc:
def document(self, object, name=None, *args):
"""Generate documentation for an object."""
args = (object, name) + args
# 'try' clause is to attempt to handle the possibility that inspect
# identifies something in a way that pydoc itself has issues handling;
# think 'super' and how it is a descriptor (which raises the exception
# by lacking a __name__ attribute) and an instance.
if inspect.isgetsetdescriptor(object): return self.docdata(*args)
if inspect.ismemberdescriptor(object): return self.docdata(*args)
try:
if inspect.ismodule(object): return self.docmodule(*args)
if inspect.isclass(object): return self.docclass(*args)
if inspect.isroutine(object): return self.docroutine(*args)
except AttributeError:
pass
if isinstance(object, property): return self.docproperty(*args)
return self.docother(*args)
def fail(self, object, name=None, *args):
"""Raise an exception for unimplemented types."""
message = "don't know how to document object%s of type %s" % (
name and ' ' + repr(name), type(object).__name__)
raise TypeError, message
docmodule = docclass = docroutine = docother = docproperty = docdata = fail
def getdocloc(self, object):
"""Return the location of module docs or None"""
try:
file = inspect.getabsfile(object)
except TypeError:
file = '(built-in)'
docloc = os.environ.get("PYTHONDOCS",
"http://www.python.org/doc/current/lib")
basedir = os.path.join(sys.exec_prefix, "lib",
"python"+sys.version[0:3])
if (isinstance(object, type(os)) and
(object.__name__ in ('errno', 'exceptions', 'gc', 'imp',
'marshal', 'posix', 'signal', 'sys',
'thread', 'zipimport') or
(file.startswith(basedir) and
not file.startswith(os.path.join(basedir, 'site-packages'))))):
htmlfile = "module-%s.html" % object.__name__
if docloc.startswith("http://"):
docloc = "%s/%s" % (docloc.rstrip("/"), htmlfile)
else:
docloc = os.path.join(docloc, htmlfile)
else:
docloc = None
return docloc
# -------------------------------------------- HTML documentation generator
class HTMLRepr(Repr):
"""Class for safely making an HTML representation of a Python object."""
def __init__(self):
Repr.__init__(self)
self.maxlist = self.maxtuple = 20
self.maxdict = 10
self.maxstring = self.maxother = 100
def escape(self, text):
return replace(text, '&', '&', '<', '<', '>', '>')
def repr(self, object):
return Repr.repr(self, object)
def repr1(self, x, level):
if hasattr(type(x), '__name__'):
methodname = 'repr_' + join(split(type(x).__name__), '_')
if hasattr(self, methodname):
return getattr(self, methodname)(x, level)
return self.escape(cram(stripid(repr(x)), self.maxother))
def repr_string(self, x, level):
test = cram(x, self.maxstring)
testrepr = repr(test)
if '\\' in test and '\\' not in replace(testrepr, r'\\', ''):
# Backslashes are only literal in the string and are never
# needed to make any special characters, so show a raw string.
return 'r' + testrepr[0] + self.escape(test) + testrepr[0]
return re.sub(r'((\\[\\abfnrtv\'"]|\\[0-9]..|\\x..|\\u....)+)',
r'<font color="#c040c0">\1</font>',
self.escape(testrepr))
repr_str = repr_string
def repr_instance(self, x, level):
try:
return self.escape(cram(stripid(repr(x)), self.maxstring))
except:
return self.escape('<%s instance>' % x.__class__.__name__)
repr_unicode = repr_string
class HTMLDoc(Doc):
"""Formatter class for HTML documentation."""
# ------------------------------------------- HTML formatting utilities
_repr_instance = HTMLRepr()
repr = _repr_instance.repr
escape = _repr_instance.escape
def page(self, title, contents):
"""Format an HTML page."""
return '''
<!doctype html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
<html><head><title>Python: %s</title>
</head><body bgcolor="#f0f0f8">
%s
</body></html>''' % (title, contents)
def heading(self, title, fgcol, bgcol, extras=''):
"""Format a page heading."""
return '''
<table width="100%%" cellspacing=0 cellpadding=2 border=0 summary="heading">
<tr bgcolor="%s">
<td valign=bottom> <br>
<font color="%s" face="helvetica, arial"> <br>%s</font></td
><td align=right valign=bottom
><font color="%s" face="helvetica, arial">%s</font></td></tr></table>
''' % (bgcol, fgcol, title, fgcol, extras or ' ')
def section(self, title, fgcol, bgcol, contents, width=6,
prelude='', marginalia=None, gap=' '):
"""Format a section with a heading."""
if marginalia is None:
marginalia = '<tt>' + ' ' * width + '</tt>'
result = '''<p>
<table width="100%%" cellspacing=0 cellpadding=2 border=0 summary="section">
<tr bgcolor="%s">
<td colspan=3 valign=bottom> <br>
<font color="%s" face="helvetica, arial">%s</font></td></tr>
''' % (bgcol, fgcol, title)
if prelude:
result = result + '''
<tr bgcolor="%s"><td rowspan=2>%s</td>
<td colspan=2>%s</td></tr>
<tr><td>%s</td>''' % (bgcol, marginalia, prelude, gap)
else:
result = result + '''
<tr><td bgcolor="%s">%s</td><td>%s</td>''' % (bgcol, marginalia, gap)
return result + '\n<td width="100%%">%s</td></tr></table>' % contents
def bigsection(self, title, *args):
"""Format a section with a big heading."""
title = '<big><strong>%s</strong></big>' % title
return self.section(title, *args)
def preformat(self, text):
"""Format literal preformatted text."""
text = self.escape(expandtabs(text))
return replace(text, '\n\n', '\n \n', '\n\n', '\n \n',
' ', ' ', '\n', '<br>\n')
def multicolumn(self, list, format, cols=4):
"""Format a list of items into a multi-column list."""
result = ''
rows = (len(list)+cols-1)/cols
for col in range(cols):
result = result + '<td width="%d%%" valign=top>' % (100/cols)
for i in range(rows*col, rows*col+rows):
if i < len(list):
result = result + format(list[i]) + '<br>\n'
result = result + '</td>'
return '<table width="100%%" summary="list"><tr>%s</tr></table>' % result
def grey(self, text): return '<font color="#909090">%s</font>' % text
def namelink(self, name, *dicts):
"""Make a link for an identifier, given name-to-URL mappings."""
for dict in dicts:
if name in dict:
return '<a href="%s">%s</a>' % (dict[name], name)
return name
def classlink(self, object, modname):
"""Make a link for a class."""
name, module = object.__name__, sys.modules.get(object.__module__)
if hasattr(module, name) and getattr(module, name) is object:
return '<a href="%s.html#%s">%s</a>' % (
module.__name__, name, classname(object, modname))
return classname(object, modname)
def modulelink(self, object):
"""Make a link for a module."""
return '<a href="%s.html">%s</a>' % (object.__name__, object.__name__)
def modpkglink(self, (name, path, ispackage, shadowed)):
"""Make a link for a module or package to display in an index."""
if shadowed:
return self.grey(name)
if path:
url = '%s.%s.html' % (path, name)
else:
url = '%s.html' % name
if ispackage:
text = '<strong>%s</strong> (package)' % name
else:
text = name
return '<a href="%s">%s</a>' % (url, text)
def markup(self, text, escape=None, funcs={}, classes={}, methods={}):
"""Mark up some plain text, given a context of symbols to look for.
Each context dictionary maps object names to anchor names."""
escape = escape or self.escape
results = []
here = 0
pattern = re.compile(r'\b((http|ftp)://\S+[\w/]|'
r'RFC[- ]?(\d+)|'
r'PEP[- ]?(\d+)|'
r'(self\.)?(\w+))')
while True:
match = pattern.search(text, here)
if not match: break
start, end = match.span()
results.append(escape(text[here:start]))
all, scheme, rfc, pep, selfdot, name = match.groups()
if scheme:
url = escape(all).replace('"', '"')
results.append('<a href="%s">%s</a>' % (url, url))
elif rfc:
url = 'http://www.rfc-editor.org/rfc/rfc%d.txt' % int(rfc)
results.append('<a href="%s">%s</a>' % (url, escape(all)))
elif pep:
url = 'http://www.python.org/peps/pep-%04d.html' % int(pep)
results.append('<a href="%s">%s</a>' % (url, escape(all)))
elif text[end:end+1] == '(':
results.append(self.namelink(name, methods, funcs, classes))
elif selfdot:
results.append('self.<strong>%s</strong>' % name)
else:
results.append(self.namelink(name, classes))
here = end
results.append(escape(text[here:]))
return join(results, '')
# ---------------------------------------------- type-specific routines
def formattree(self, tree, modname, parent=None):
"""Produce HTML for a class tree as given by inspect.getclasstree()."""
result = ''
for entry in tree:
if type(entry) is type(()):
c, bases = entry
result = result + '<dt><font face="helvetica, arial">'
result = result + self.classlink(c, modname)
if bases and bases != (parent,):
parents = []
for base in bases:
parents.append(self.classlink(base, modname))
result = result + '(' + join(parents, ', ') + ')'
result = result + '\n</font></dt>'
elif type(entry) is type([]):
result = result + '<dd>\n%s</dd>\n' % self.formattree(
entry, modname, c)
return '<dl>\n%s</dl>\n' % result
def docmodule(self, object, name=None, mod=None, *ignored):
"""Produce HTML documentation for a module object."""
name = object.__name__ # ignore the passed-in name
try:
all = object.__all__
except AttributeError:
all = None
parts = split(name, '.')
links = []
for i in range(len(parts)-1):
links.append(
'<a href="%s.html"><font color="#ffffff">%s</font></a>' %
(join(parts[:i+1], '.'), parts[i]))
linkedname = join(links + parts[-1:], '.')
head = '<big><big><strong>%s</strong></big></big>' % linkedname
try:
path = inspect.getabsfile(object)
url = path
if sys.platform == 'win32':
import nturl2path
url = nturl2path.pathname2url(path)
filelink = '<a href="file:%s">%s</a>' % (url, path)
except TypeError:
filelink = '(built-in)'
info = []
if hasattr(object, '__version__'):
version = str(object.__version__)
if version[:11] == '$' + 'Revision: ' and version[-1:] == '$':
version = strip(version[11:-1])
info.append('version %s' % self.escape(version))
if hasattr(object, '__date__'):
info.append(self.escape(str(object.__date__)))
if info:
head = head + ' (%s)' % join(info, ', ')
docloc = self.getdocloc(object)
if docloc is not None:
docloc = '<br><a href="%(docloc)s">Module Docs</a>' % locals()
else:
docloc = ''
result = self.heading(
head, '#ffffff', '#7799ee',
'<a href=".">index</a><br>' + filelink + docloc)
modules = inspect.getmembers(object, inspect.ismodule)
classes, cdict = [], {}
for key, value in inspect.getmembers(object, inspect.isclass):
# if __all__ exists, believe it. Otherwise use old heuristic.
if (all is not None or
(inspect.getmodule(value) or object) is object):
if visiblename(key, all):
classes.append((key, value))
cdict[key] = cdict[value] = '#' + key
for key, value in classes:
for base in value.__bases__:
key, modname = base.__name__, base.__module__
module = sys.modules.get(modname)
if modname != name and module and hasattr(module, key):
if getattr(module, key) is base:
if not key in cdict:
cdict[key] = cdict[base] = modname + '.html#' + key
funcs, fdict = [], {}
for key, value in inspect.getmembers(object, inspect.isroutine):
# if __all__ exists, believe it. Otherwise use old heuristic.
if (all is not None or
inspect.isbuiltin(value) or inspect.getmodule(value) is object):
if visiblename(key, all):
funcs.append((key, value))
fdict[key] = '#-' + key
if inspect.isfunction(value): fdict[value] = fdict[key]
data = []
for key, value in inspect.getmembers(object, isdata):
if visiblename(key, all):
data.append((key, value))
doc = self.markup(getdoc(object), self.preformat, fdict, cdict)
doc = doc and '<tt>%s</tt>' % doc
result = result + '<p>%s</p>\n' % doc
if hasattr(object, '__path__'):
modpkgs = []
for importer, modname, ispkg in pkgutil.iter_modules(object.__path__):
modpkgs.append((modname, name, ispkg, 0))
modpkgs.sort()
contents = self.multicolumn(modpkgs, self.modpkglink)
result = result + self.bigsection(
'Package Contents', '#ffffff', '#aa55cc', contents)
elif modules:
contents = self.multicolumn(
modules, lambda (key, value), s=self: s.modulelink(value))
result = result + self.bigsection(
'Modules', '#fffff', '#aa55cc', contents)
if classes:
classlist = map(lambda (key, value): value, classes)
contents = [
self.formattree(inspect.getclasstree(classlist, 1), name)]
for key, value in classes:
contents.append(self.document(value, key, name, fdict, cdict))
result = result + self.bigsection(
'Classes', '#ffffff', '#ee77aa', join(contents))
if funcs:
contents = []
for key, value in funcs:
contents.append(self.document(value, key, name, fdict, cdict))
result = result + self.bigsection(
'Functions', '#ffffff', '#eeaa77', join(contents))
if data:
contents = []
for key, value in data:
contents.append(self.document(value, key))
result = result + self.bigsection(
'Data', '#ffffff', '#55aa55', join(contents, '<br>\n'))
if hasattr(object, '__author__'):
contents = self.markup(str(object.__author__), self.preformat)
result = result + self.bigsection(
'Author', '#ffffff', '#7799ee', contents)
if hasattr(object, '__credits__'):
contents = self.markup(str(object.__credits__), self.preformat)
result = result + self.bigsection(
'Credits', '#ffffff', '#7799ee', contents)
return result
def docclass(self, object, name=None, mod=None, funcs={}, classes={},
*ignored):
"""Produce HTML documentation for a class object."""
realname = object.__name__
name = name or realname
bases = object.__bases__
contents = []
push = contents.append
# Cute little class to pump out a horizontal rule between sections.
class HorizontalRule:
def __init__(self):
self.needone = 0
def maybe(self):
if self.needone:
push('<hr>\n')
self.needone = 1
hr = HorizontalRule()
# List the mro, if non-trivial.
mro = deque(inspect.getmro(object))
if len(mro) > 2:
hr.maybe()
push('<dl><dt>Method resolution order:</dt>\n')
for base in mro:
push('<dd>%s</dd>\n' % self.classlink(base,
object.__module__))
push('</dl>\n')
def spill(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
push(self.document(getattr(object, name), name, mod,
funcs, classes, mdict, object))
push('\n')
return attrs
def spilldescriptors(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
push(self._docdescriptor(name, value, mod))
return attrs
def spilldata(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
base = self.docother(getattr(object, name), name, mod)
if callable(value) or inspect.isdatadescriptor(value):
doc = getattr(value, "__doc__", None)
else:
doc = None
if doc is None:
push('<dl><dt>%s</dl>\n' % base)
else:
doc = self.markup(getdoc(value), self.preformat,
funcs, classes, mdict)
doc = '<dd><tt>%s</tt>' % doc
push('<dl><dt>%s%s</dl>\n' % (base, doc))
push('\n')
return attrs
attrs = filter(lambda (name, kind, cls, value): visiblename(name),
classify_class_attrs(object))
mdict = {}
for key, kind, homecls, value in attrs:
mdict[key] = anchor = '#' + name + '-' + key
value = getattr(object, key)
try:
# The value may not be hashable (e.g., a data attr with
# a dict or list value).
mdict[value] = anchor
except TypeError:
pass
while attrs:
if mro:
thisclass = mro.popleft()
else:
thisclass = attrs[0][2]
attrs, inherited = _split_list(attrs, lambda t: t[2] is thisclass)
if thisclass is __builtin__.object:
attrs = inherited
continue
elif thisclass is object:
tag = 'defined here'
else:
tag = 'inherited from %s' % self.classlink(thisclass,
object.__module__)
tag += ':<br>\n'
# Sort attrs by name.
try:
attrs.sort(key=lambda t: t[0])
except TypeError:
attrs.sort(lambda t1, t2: cmp(t1[0], t2[0])) # 2.3 compat
# Pump out the attrs, segregated by kind.
attrs = spill('Methods %s' % tag, attrs,
lambda t: t[1] == 'method')
attrs = spill('Class methods %s' % tag, attrs,
lambda t: t[1] == 'class method')
attrs = spill('Static methods %s' % tag, attrs,
lambda t: t[1] == 'static method')
attrs = spilldescriptors('Data descriptors %s' % tag, attrs,
lambda t: t[1] == 'data descriptor')
attrs = spilldata('Data and other attributes %s' % tag, attrs,
lambda t: t[1] == 'data')
assert attrs == []
attrs = inherited
contents = ''.join(contents)
if name == realname:
title = '<a name="%s">class <strong>%s</strong></a>' % (
name, realname)
else:
title = '<strong>%s</strong> = <a name="%s">class %s</a>' % (
name, name, realname)
if bases:
parents = []
for base in bases:
parents.append(self.classlink(base, object.__module__))
title = title + '(%s)' % join(parents, ', ')
doc = self.markup(getdoc(object), self.preformat, funcs, classes, mdict)
doc = doc and '<tt>%s<br> </tt>' % doc
return self.section(title, '#000000', '#ffc8d8', contents, 3, doc)
def formatvalue(self, object):
"""Format an argument default value as text."""
return self.grey('=' + self.repr(object))
def docroutine(self, object, name=None, mod=None,
funcs={}, classes={}, methods={}, cl=None):
"""Produce HTML documentation for a function or method object."""
realname = object.__name__
name = name or realname
anchor = (cl and cl.__name__ or '') + '-' + name
note = ''
skipdocs = 0
if inspect.ismethod(object):
imclass = object.im_class
if cl:
if imclass is not cl:
note = ' from ' + self.classlink(imclass, mod)
else:
if object.im_self is not None:
note = ' method of %s instance' % self.classlink(
object.im_self.__class__, mod)
else:
note = ' unbound %s method' % self.classlink(imclass,mod)
object = object.im_func
if name == realname:
title = '<a name="%s"><strong>%s</strong></a>' % (anchor, realname)
else:
if (cl and realname in cl.__dict__ and
cl.__dict__[realname] is object):
reallink = '<a href="#%s">%s</a>' % (
cl.__name__ + '-' + realname, realname)
skipdocs = 1
else:
reallink = realname
title = '<a name="%s"><strong>%s</strong></a> = %s' % (
anchor, name, reallink)
if inspect.isfunction(object):
args, varargs, varkw, defaults = inspect.getargspec(object)
argspec = inspect.formatargspec(
args, varargs, varkw, defaults, formatvalue=self.formatvalue)
if realname == '<lambda>':
title = '<strong>%s</strong> <em>lambda</em> ' % name
argspec = argspec[1:-1] # remove parentheses
else:
argspec = '(...)'
decl = title + argspec + (note and self.grey(
'<font face="helvetica, arial">%s</font>' % note))
if skipdocs:
return '<dl><dt>%s</dt></dl>\n' % decl
else:
doc = self.markup(
getdoc(object), self.preformat, funcs, classes, methods)
doc = doc and '<dd><tt>%s</tt></dd>' % doc
return '<dl><dt>%s</dt>%s</dl>\n' % (decl, doc)
def _docdescriptor(self, name, value, mod):
results = []
push = results.append
if name:
push('<dl><dt><strong>%s</strong></dt>\n' % name)
if value.__doc__ is not None:
doc = self.markup(getdoc(value), self.preformat)
push('<dd><tt>%s</tt></dd>\n' % doc)
push('</dl>\n')
return ''.join(results)
def docproperty(self, object, name=None, mod=None, cl=None):
"""Produce html documentation for a property."""
return self._docdescriptor(name, object, mod)
def docother(self, object, name=None, mod=None, *ignored):
"""Produce HTML documentation for a data object."""
lhs = name and '<strong>%s</strong> = ' % name or ''
return lhs + self.repr(object)
def docdata(self, object, name=None, mod=None, cl=None):
"""Produce html documentation for a data descriptor."""
return self._docdescriptor(name, object, mod)
def index(self, dir, shadowed=None):
"""Generate an HTML index for a directory of modules."""
modpkgs = []
if shadowed is None: shadowed = {}
for importer, name, ispkg in pkgutil.iter_modules([dir]):
modpkgs.append((name, '', ispkg, name in shadowed))
shadowed[name] = 1
modpkgs.sort()
contents = self.multicolumn(modpkgs, self.modpkglink)
return self.bigsection(dir, '#ffffff', '#ee77aa', contents)
# -------------------------------------------- text documentation generator
class TextRepr(Repr):
"""Class for safely making a text representation of a Python object."""
def __init__(self):
Repr.__init__(self)
self.maxlist = self.maxtuple = 20
self.maxdict = 10
self.maxstring = self.maxother = 100
def repr1(self, x, level):
if hasattr(type(x), '__name__'):
methodname = 'repr_' + join(split(type(x).__name__), '_')
if hasattr(self, methodname):
return getattr(self, methodname)(x, level)
return cram(stripid(repr(x)), self.maxother)
def repr_string(self, x, level):
test = cram(x, self.maxstring)
testrepr = repr(test)
if '\\' in test and '\\' not in replace(testrepr, r'\\', ''):
# Backslashes are only literal in the string and are never
# needed to make any special characters, so show a raw string.
return 'r' + testrepr[0] + test + testrepr[0]
return testrepr
repr_str = repr_string
def repr_instance(self, x, level):
try:
return cram(stripid(repr(x)), self.maxstring)
except:
return '<%s instance>' % x.__class__.__name__
class TextDoc(Doc):
"""Formatter class for text documentation."""
# ------------------------------------------- text formatting utilities
_repr_instance = TextRepr()
repr = _repr_instance.repr
def bold(self, text):
"""Format a string in bold by overstriking."""
return join(map(lambda ch: ch + '\b' + ch, text), '')
def indent(self, text, prefix=' '):
"""Indent text by prepending a given prefix to each line."""
if not text: return ''
lines = split(text, '\n')
lines = map(lambda line, prefix=prefix: prefix + line, lines)
if lines: lines[-1] = rstrip(lines[-1])
return join(lines, '\n')
def section(self, title, contents):
"""Format a section with a given heading."""
return self.bold(title) + '\n' + rstrip(self.indent(contents)) + '\n\n'
# ---------------------------------------------- type-specific routines
def formattree(self, tree, modname, parent=None, prefix=''):
"""Render in text a class tree as returned by inspect.getclasstree()."""
result = ''
for entry in tree:
if type(entry) is type(()):
c, bases = entry
result = result + prefix + classname(c, modname)
if bases and bases != (parent,):
parents = map(lambda c, m=modname: classname(c, m), bases)
result = result + '(%s)' % join(parents, ', ')
result = result + '\n'
elif type(entry) is type([]):
result = result + self.formattree(
entry, modname, c, prefix + ' ')
return result
def docmodule(self, object, name=None, mod=None):
"""Produce text documentation for a given module object."""
name = object.__name__ # ignore the passed-in name
synop, desc = splitdoc(getdoc(object))
result = self.section('NAME', name + (synop and ' - ' + synop))
try:
all = object.__all__
except AttributeError:
all = None
try:
file = inspect.getabsfile(object)
except TypeError:
file = '(built-in)'
result = result + self.section('FILE', file)
docloc = self.getdocloc(object)
if docloc is not None:
result = result + self.section('MODULE DOCS', docloc)
if desc:
result = result + self.section('DESCRIPTION', desc)
classes = []
for key, value in inspect.getmembers(object, inspect.isclass):
# if __all__ exists, believe it. Otherwise use old heuristic.
if (all is not None
or (inspect.getmodule(value) or object) is object):
if visiblename(key, all):
classes.append((key, value))
funcs = []
for key, value in inspect.getmembers(object, inspect.isroutine):
# if __all__ exists, believe it. Otherwise use old heuristic.
if (all is not None or
inspect.isbuiltin(value) or inspect.getmodule(value) is object):
if visiblename(key, all):
funcs.append((key, value))
data = []
for key, value in inspect.getmembers(object, isdata):
if visiblename(key, all):
data.append((key, value))
if hasattr(object, '__path__'):
modpkgs = []
for importer, modname, ispkg in pkgutil.iter_modules(object.__path__):
if ispkg:
modpkgs.append(modname + ' (package)')
else:
modpkgs.append(modname)
modpkgs.sort()
result = result + self.section(
'PACKAGE CONTENTS', join(modpkgs, '\n'))
if classes:
classlist = map(lambda (key, value): value, classes)
contents = [self.formattree(
inspect.getclasstree(classlist, 1), name)]
for key, value in classes:
contents.append(self.document(value, key, name))
result = result + self.section('CLASSES', join(contents, '\n'))
if funcs:
contents = []
for key, value in funcs:
contents.append(self.document(value, key, name))
result = result + self.section('FUNCTIONS', join(contents, '\n'))
if data:
contents = []
for key, value in data:
contents.append(self.docother(value, key, name, maxlen=70))
result = result + self.section('DATA', join(contents, '\n'))
if hasattr(object, '__version__'):
version = str(object.__version__)
if version[:11] == '$' + 'Revision: ' and version[-1:] == '$':
version = strip(version[11:-1])
result = result + self.section('VERSION', version)
if hasattr(object, '__date__'):
result = result + self.section('DATE', str(object.__date__))
if hasattr(object, '__author__'):
result = result + self.section('AUTHOR', str(object.__author__))
if hasattr(object, '__credits__'):
result = result + self.section('CREDITS', str(object.__credits__))
return result
def docclass(self, object, name=None, mod=None):
"""Produce text documentation for a given class object."""
realname = object.__name__
name = name or realname
bases = object.__bases__
def makename(c, m=object.__module__):
return classname(c, m)
if name == realname:
title = 'class ' + self.bold(realname)
else:
title = self.bold(name) + ' = class ' + realname
if bases:
parents = map(makename, bases)
title = title + '(%s)' % join(parents, ', ')
doc = getdoc(object)
contents = doc and [doc + '\n'] or []
push = contents.append
# List the mro, if non-trivial.
mro = deque(inspect.getmro(object))
if len(mro) > 2:
push("Method resolution order:")
for base in mro:
push(' ' + makename(base))
push('')
# Cute little class to pump out a horizontal rule between sections.
class HorizontalRule:
def __init__(self):
self.needone = 0
def maybe(self):
if self.needone:
push('-' * 70)
self.needone = 1
hr = HorizontalRule()
def spill(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
push(self.document(getattr(object, name),
name, mod, object))
return attrs
def spilldescriptors(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
push(self._docdescriptor(name, value, mod))
return attrs
def spilldata(msg, attrs, predicate):
ok, attrs = _split_list(attrs, predicate)
if ok:
hr.maybe()
push(msg)
for name, kind, homecls, value in ok:
if callable(value) or inspect.isdatadescriptor(value):
doc = getdoc(value)
else:
doc = None
push(self.docother(getattr(object, name),
name, mod, maxlen=70, doc=doc) + '\n')
return attrs
attrs = filter(lambda (name, kind, cls, value): visiblename(name),
classify_class_attrs(object))
while attrs:
if mro:
thisclass = mro.popleft()
else:
thisclass = attrs[0][2]
attrs, inherited = _split_list(attrs, lambda t: t[2] is thisclass)
if thisclass is __builtin__.object:
attrs = inherited
continue
elif thisclass is object:
tag = "defined here"
else:
tag = "inherited from %s" % classname(thisclass,
object.__module__)
filter(lambda t: not t[0].startswith('_'), attrs)
# Sort attrs by name.
attrs.sort()
# Pump out the attrs, segregated by kind.
attrs = spill("Methods %s:\n" % tag, attrs,
lambda t: t[1] == 'method')
attrs = spill("Class methods %s:\n" % tag, attrs,
lambda t: t[1] == 'class method')
attrs = spill("Static methods %s:\n" % tag, attrs,
lambda t: t[1] == 'static method')
attrs = spilldescriptors("Data descriptors %s:\n" % tag, attrs,
lambda t: t[1] == 'data descriptor')
attrs = spilldata("Data and other attributes %s:\n" % tag, attrs,
lambda t: t[1] == 'data')
assert attrs == []
attrs = inherited
contents = '\n'.join(contents)
if not contents:
return title + '\n'
return title + '\n' + self.indent(rstrip(contents), ' | ') + '\n'
def formatvalue(self, object):
"""Format an argument default value as text."""
return '=' + self.repr(object)
def docroutine(self, object, name=None, mod=None, cl=None):
"""Produce text documentation for a function or method object."""
realname = object.__name__
name = name or realname
note = ''
skipdocs = 0
if inspect.ismethod(object):
imclass = object.im_class
if cl:
if imclass is not cl:
note = ' from ' + classname(imclass, mod)
else:
if object.im_self is not None:
note = ' method of %s instance' % classname(
object.im_self.__class__, mod)
else:
note = ' unbound %s method' % classname(imclass,mod)
object = object.im_func
if name == realname:
title = self.bold(realname)
else:
if (cl and realname in cl.__dict__ and
cl.__dict__[realname] is object):
skipdocs = 1
title = self.bold(name) + ' = ' + realname
if inspect.isfunction(object):
args, varargs, varkw, defaults = inspect.getargspec(object)
argspec = inspect.formatargspec(
args, varargs, varkw, defaults, formatvalue=self.formatvalue)
if realname == '<lambda>':
title = self.bold(name) + ' lambda '
argspec = argspec[1:-1] # remove parentheses
else:
argspec = '(...)'
decl = title + argspec + note
if skipdocs:
return decl + '\n'
else:
doc = getdoc(object) or ''
return decl + '\n' + (doc and rstrip(self.indent(doc)) + '\n')
def _docdescriptor(self, name, value, mod):
results = []
push = results.append
if name:
push(self.bold(name))
push('\n')
doc = getdoc(value) or ''
if doc:
push(self.indent(doc))
push('\n')
return ''.join(results)
def docproperty(self, object, name=None, mod=None, cl=None):
"""Produce text documentation for a property."""
return self._docdescriptor(name, object, mod)
def docdata(self, object, name=None, mod=None, cl=None):
"""Produce text documentation for a data descriptor."""
return self._docdescriptor(name, object, mod)
def docother(self, object, name=None, mod=None, parent=None, maxlen=None, doc=None):
"""Produce text documentation for a data object."""
repr = self.repr(object)
if maxlen:
line = (name and name + ' = ' or '') + repr
chop = maxlen - len(line)
if chop < 0: repr = repr[:chop] + '...'
line = (name and self.bold(name) + ' = ' or '') + repr
if doc is not None:
line += '\n' + self.indent(str(doc))
return line
# --------------------------------------------------------- user interfaces
def pager(text):
"""The first time this is called, determine what kind of pager to use."""
global pager
pager = getpager()
pager(text)
def getpager():
"""Decide what method to use for paging through text."""
if sys.platform.startswith('java'):
return plainpager
if type(sys.stdout) is not types.FileType:
return plainpager
if not sys.stdin.isatty() or not sys.stdout.isatty():
return plainpager
if 'PAGER' in os.environ:
if sys.platform == 'win32': # pipes completely broken in Windows
return lambda text: tempfilepager(plain(text), os.environ['PAGER'])
elif os.environ.get('TERM') in ('dumb', 'emacs'):
return lambda text: pipepager(plain(text), os.environ['PAGER'])
else:
return lambda text: pipepager(text, os.environ['PAGER'])
if os.environ.get('TERM') in ('dumb', 'emacs'):
return plainpager
if sys.platform == 'win32' or sys.platform.startswith('os2'):
return lambda text: tempfilepager(plain(text), 'more <')
if hasattr(os, 'system') and os.system('(less) 2>/dev/null') == 0:
return lambda text: pipepager(text, 'less')
import tempfile
(fd, filename) = tempfile.mkstemp()
os.close(fd)
try:
if hasattr(os, 'system') and os.system('more %s' % filename) == 0:
return lambda text: pipepager(text, 'more')
else:
return ttypager
finally:
os.unlink(filename)
def plain(text):
"""Remove boldface formatting from text."""
return re.sub('.\b', '', text)
def pipepager(text, cmd):
"""Page through text by feeding it to another program."""
pipe = os.popen(cmd, 'w')
try:
pipe.write(text)
pipe.close()
except IOError:
pass # Ignore broken pipes caused by quitting the pager program.
def tempfilepager(text, cmd):
"""Page through text by invoking a program on a temporary file."""
import tempfile
filename = tempfile.mktemp()
file = open(filename, 'w')
file.write(text)
file.close()
try:
os.system(cmd + ' ' + filename)
finally:
os.unlink(filename)
def ttypager(text):
"""Page through text on a text terminal."""
lines = split(plain(text), '\n')
try:
import tty
fd = sys.stdin.fileno()
old = tty.tcgetattr(fd)
tty.setcbreak(fd)
getchar = lambda: sys.stdin.read(1)
except (ImportError, AttributeError):
tty = None
getchar = lambda: sys.stdin.readline()[:-1][:1]
try:
r = inc = os.environ.get('LINES', 25) - 1
sys.stdout.write(join(lines[:inc], '\n') + '\n')
while lines[r:]:
sys.stdout.write('-- more --')
sys.stdout.flush()
c = getchar()
if c in ('q', 'Q'):
sys.stdout.write('\r \r')
break
elif c in ('\r', '\n'):
sys.stdout.write('\r \r' + lines[r] + '\n')
r = r + 1
continue
if c in ('b', 'B', '\x1b'):
r = r - inc - inc
if r < 0: r = 0
sys.stdout.write('\n' + join(lines[r:r+inc], '\n') + '\n')
r = r + inc
finally:
if tty:
tty.tcsetattr(fd, tty.TCSAFLUSH, old)
def plainpager(text):
"""Simply print unformatted text. This is the ultimate fallback."""
sys.stdout.write(plain(text))
def describe(thing):
"""Produce a short description of the given thing."""
if inspect.ismodule(thing):
if thing.__name__ in sys.builtin_module_names:
return 'built-in module ' + thing.__name__
if hasattr(thing, '__path__'):
return 'package ' + thing.__name__
else:
return 'module ' + thing.__name__
if inspect.isbuiltin(thing):
return 'built-in function ' + thing.__name__
if inspect.isgetsetdescriptor(thing):
return 'getset descriptor %s.%s.%s' % (
thing.__objclass__.__module__, thing.__objclass__.__name__,
thing.__name__)
if inspect.ismemberdescriptor(thing):
return 'member descriptor %s.%s.%s' % (
thing.__objclass__.__module__, thing.__objclass__.__name__,
thing.__name__)
if inspect.isclass(thing):
return 'class ' + thing.__name__
if inspect.isfunction(thing):
return 'function ' + thing.__name__
if inspect.ismethod(thing):
return 'method ' + thing.__name__
if type(thing) is types.InstanceType:
return 'instance of ' + thing.__class__.__name__
return type(thing).__name__
def locate(path, forceload=0):
"""Locate an object by name or dotted path, importing as necessary."""
parts = [part for part in split(path, '.') if part]
module, n = None, 0
while n < len(parts):
nextmodule = safeimport(join(parts[:n+1], '.'), forceload)
if nextmodule: module, n = nextmodule, n + 1
else: break
if module:
object = module
for part in parts[n:]:
try: object = getattr(object, part)
except AttributeError: return None
return object
else:
if hasattr(__builtin__, path):
return getattr(__builtin__, path)
# --------------------------------------- interactive interpreter interface
text = TextDoc()
html = HTMLDoc()
def resolve(thing, forceload=0):
"""Given an object or a path to an object, get the object and its name."""
if isinstance(thing, str):
object = locate(thing, forceload)
if not object:
raise ImportError, 'no Python documentation found for %r' % thing
return object, thing
else:
return thing, getattr(thing, '__name__', None)
def doc(thing, title='Python Library Documentation: %s', forceload=0):
"""Display text documentation, given an object or a path to an object."""
try:
object, name = resolve(thing, forceload)
desc = describe(object)
module = inspect.getmodule(object)
if name and '.' in name:
desc += ' in ' + name[:name.rfind('.')]
elif module and module is not object:
desc += ' in module ' + module.__name__
if not (inspect.ismodule(object) or
inspect.isclass(object) or
inspect.isroutine(object) or
inspect.isgetsetdescriptor(object) or
inspect.ismemberdescriptor(object) or
isinstance(object, property)):
# If the passed object is a piece of data or an instance,
# document its available methods instead of its value.
object = type(object)
desc += ' object'
pager(title % desc + '\n\n' + text.document(object, name))
except (ImportError, ErrorDuringImport), value:
print value
def writedoc(thing, forceload=0):
"""Write HTML documentation to a file in the current directory."""
try:
object, name = resolve(thing, forceload)
page = html.page(describe(object), html.document(object, name))
file = open(name + '.html', 'w')
file.write(page)
file.close()
print 'wrote', name + '.html'
except (ImportError, ErrorDuringImport), value:
print value
def writedocs(dir, pkgpath='', done=None):
"""Write out HTML documentation for all modules in a directory tree."""
if done is None: done = {}
for importer, modname, ispkg in pkgutil.walk_packages([dir], pkgpath):
writedoc(modname)
return
class Helper:
keywords = {
'and': 'BOOLEAN',
'as': 'with',
'assert': ('ref/assert', ''),
'break': ('ref/break', 'while for'),
'class': ('ref/class', 'CLASSES SPECIALMETHODS'),
'continue': ('ref/continue', 'while for'),
'def': ('ref/function', ''),
'del': ('ref/del', 'BASICMETHODS'),
'elif': 'if',
'else': ('ref/if', 'while for'),
'except': 'try',
'exec': ('ref/exec', ''),
'finally': 'try',
'for': ('ref/for', 'break continue while'),
'from': 'import',
'global': ('ref/global', 'NAMESPACES'),
'if': ('ref/if', 'TRUTHVALUE'),
'import': ('ref/import', 'MODULES'),
'in': ('ref/comparisons', 'SEQUENCEMETHODS2'),
'is': 'COMPARISON',
'lambda': ('ref/lambdas', 'FUNCTIONS'),
'not': 'BOOLEAN',
'or': 'BOOLEAN',
'pass': ('ref/pass', ''),
'print': ('ref/print', ''),
'raise': ('ref/raise', 'EXCEPTIONS'),
'return': ('ref/return', 'FUNCTIONS'),
'try': ('ref/try', 'EXCEPTIONS'),
'while': ('ref/while', 'break continue if TRUTHVALUE'),
'with': ('ref/with', 'CONTEXTMANAGERS EXCEPTIONS yield'),
'yield': ('ref/yield', ''),
}
topics = {
'TYPES': ('ref/types', 'STRINGS UNICODE NUMBERS SEQUENCES MAPPINGS FUNCTIONS CLASSES MODULES FILES inspect'),
'STRINGS': ('ref/strings', 'str UNICODE SEQUENCES STRINGMETHODS FORMATTING TYPES'),
'STRINGMETHODS': ('lib/string-methods', 'STRINGS FORMATTING'),
'FORMATTING': ('lib/typesseq-strings', 'OPERATORS'),
'UNICODE': ('ref/strings', 'encodings unicode SEQUENCES STRINGMETHODS FORMATTING TYPES'),
'NUMBERS': ('ref/numbers', 'INTEGER FLOAT COMPLEX TYPES'),
'INTEGER': ('ref/integers', 'int range'),
'FLOAT': ('ref/floating', 'float math'),
'COMPLEX': ('ref/imaginary', 'complex cmath'),
'SEQUENCES': ('lib/typesseq', 'STRINGMETHODS FORMATTING xrange LISTS'),
'MAPPINGS': 'DICTIONARIES',
'FUNCTIONS': ('lib/typesfunctions', 'def TYPES'),
'METHODS': ('lib/typesmethods', 'class def CLASSES TYPES'),
'CODEOBJECTS': ('lib/bltin-code-objects', 'compile FUNCTIONS TYPES'),
'TYPEOBJECTS': ('lib/bltin-type-objects', 'types TYPES'),
'FRAMEOBJECTS': 'TYPES',
'TRACEBACKS': 'TYPES',
'NONE': ('lib/bltin-null-object', ''),
'ELLIPSIS': ('lib/bltin-ellipsis-object', 'SLICINGS'),
'FILES': ('lib/bltin-file-objects', ''),
'SPECIALATTRIBUTES': ('lib/specialattrs', ''),
'CLASSES': ('ref/types', 'class SPECIALMETHODS PRIVATENAMES'),
'MODULES': ('lib/typesmodules', 'import'),
'PACKAGES': 'import',
'EXPRESSIONS': ('ref/summary', 'lambda or and not in is BOOLEAN COMPARISON BITWISE SHIFTING BINARY FORMATTING POWER UNARY ATTRIBUTES SUBSCRIPTS SLICINGS CALLS TUPLES LISTS DICTIONARIES BACKQUOTES'),
'OPERATORS': 'EXPRESSIONS',
'PRECEDENCE': 'EXPRESSIONS',
'OBJECTS': ('ref/objects', 'TYPES'),
'SPECIALMETHODS': ('ref/specialnames', 'BASICMETHODS ATTRIBUTEMETHODS CALLABLEMETHODS SEQUENCEMETHODS1 MAPPINGMETHODS SEQUENCEMETHODS2 NUMBERMETHODS CLASSES'),
'BASICMETHODS': ('ref/customization', 'cmp hash repr str SPECIALMETHODS'),
'ATTRIBUTEMETHODS': ('ref/attribute-access', 'ATTRIBUTES SPECIALMETHODS'),
'CALLABLEMETHODS': ('ref/callable-types', 'CALLS SPECIALMETHODS'),
'SEQUENCEMETHODS1': ('ref/sequence-types', 'SEQUENCES SEQUENCEMETHODS2 SPECIALMETHODS'),
'SEQUENCEMETHODS2': ('ref/sequence-methods', 'SEQUENCES SEQUENCEMETHODS1 SPECIALMETHODS'),
'MAPPINGMETHODS': ('ref/sequence-types', 'MAPPINGS SPECIALMETHODS'),
'NUMBERMETHODS': ('ref/numeric-types', 'NUMBERS AUGMENTEDASSIGNMENT SPECIALMETHODS'),
'EXECUTION': ('ref/execmodel', 'NAMESPACES DYNAMICFEATURES EXCEPTIONS'),
'NAMESPACES': ('ref/naming', 'global ASSIGNMENT DELETION DYNAMICFEATURES'),
'DYNAMICFEATURES': ('ref/dynamic-features', ''),
'SCOPING': 'NAMESPACES',
'FRAMES': 'NAMESPACES',
'EXCEPTIONS': ('ref/exceptions', 'try except finally raise'),
'COERCIONS': ('ref/coercion-rules','CONVERSIONS'),
'CONVERSIONS': ('ref/conversions', 'COERCIONS'),
'IDENTIFIERS': ('ref/identifiers', 'keywords SPECIALIDENTIFIERS'),
'SPECIALIDENTIFIERS': ('ref/id-classes', ''),
'PRIVATENAMES': ('ref/atom-identifiers', ''),
'LITERALS': ('ref/atom-literals', 'STRINGS BACKQUOTES NUMBERS TUPLELITERALS LISTLITERALS DICTIONARYLITERALS'),
'TUPLES': 'SEQUENCES',
'TUPLELITERALS': ('ref/exprlists', 'TUPLES LITERALS'),
'LISTS': ('lib/typesseq-mutable', 'LISTLITERALS'),
'LISTLITERALS': ('ref/lists', 'LISTS LITERALS'),
'DICTIONARIES': ('lib/typesmapping', 'DICTIONARYLITERALS'),
'DICTIONARYLITERALS': ('ref/dict', 'DICTIONARIES LITERALS'),
'BACKQUOTES': ('ref/string-conversions', 'repr str STRINGS LITERALS'),
'ATTRIBUTES': ('ref/attribute-references', 'getattr hasattr setattr ATTRIBUTEMETHODS'),
'SUBSCRIPTS': ('ref/subscriptions', 'SEQUENCEMETHODS1'),
'SLICINGS': ('ref/slicings', 'SEQUENCEMETHODS2'),
'CALLS': ('ref/calls', 'EXPRESSIONS'),
'POWER': ('ref/power', 'EXPRESSIONS'),
'UNARY': ('ref/unary', 'EXPRESSIONS'),
'BINARY': ('ref/binary', 'EXPRESSIONS'),
'SHIFTING': ('ref/shifting', 'EXPRESSIONS'),
'BITWISE': ('ref/bitwise', 'EXPRESSIONS'),
'COMPARISON': ('ref/comparisons', 'EXPRESSIONS BASICMETHODS'),
'BOOLEAN': ('ref/Booleans', 'EXPRESSIONS TRUTHVALUE'),
'ASSERTION': 'assert',
'ASSIGNMENT': ('ref/assignment', 'AUGMENTEDASSIGNMENT'),
'AUGMENTEDASSIGNMENT': ('ref/augassign', 'NUMBERMETHODS'),
'DELETION': 'del',
'PRINTING': 'print',
'RETURNING': 'return',
'IMPORTING': 'import',
'CONDITIONAL': 'if',
'LOOPING': ('ref/compound', 'for while break continue'),
'TRUTHVALUE': ('lib/truth', 'if while and or not BASICMETHODS'),
'DEBUGGING': ('lib/module-pdb', 'pdb'),
'CONTEXTMANAGERS': ('ref/context-managers', 'with'),
}
def __init__(self, input, output):
self.input = input
self.output = output
self.docdir = None
if sys.executable is None:
execdir = os.getcwd()
else:
execdir = os.path.dirname(sys.executable)
homedir = os.environ.get('PYTHONHOME')
for dir in [os.environ.get('PYTHONDOCS'),
homedir and os.path.join(homedir, 'doc'),
os.path.join(execdir, 'doc'),
'/usr/doc/python-docs-' + split(sys.version)[0],
'/usr/doc/python-' + split(sys.version)[0],
'/usr/doc/python-docs-' + sys.version[:3],
'/usr/doc/python-' + sys.version[:3],
os.path.join(sys.prefix, 'Resources/English.lproj/Documentation')]:
if dir and os.path.isdir(os.path.join(dir, 'lib')):
self.docdir = dir
def __repr__(self):
if inspect.stack()[1][3] == '?':
self()
return ''
return '<pydoc.Helper instance>'
def __call__(self, request=None):
if request is not None:
self.help(request)
else:
self.intro()
self.interact()
self.output.write('''
You are now leaving help and returning to the Python interpreter.
If you want to ask for help on a particular object directly from the
interpreter, you can type "help(object)". Executing "help('string')"
has the same effect as typing a particular string at the help> prompt.
''')
def interact(self):
self.output.write('\n')
while True:
try:
request = self.getline('help> ')
if not request: break
except (KeyboardInterrupt, EOFError):
break
request = strip(replace(request, '"', '', "'", ''))
if lower(request) in ('q', 'quit'): break
self.help(request)
def getline(self, prompt):
"""Read one line, using raw_input when available."""
if self.input is sys.stdin:
return raw_input(prompt)
else:
self.output.write(prompt)
self.output.flush()
return self.input.readline()
def help(self, request):
if type(request) is type(''):
if request == 'help': self.intro()
elif request == 'keywords': self.listkeywords()
elif request == 'topics': self.listtopics()
elif request == 'modules': self.listmodules()
elif request[:8] == 'modules ':
self.listmodules(split(request)[1])
elif request in self.keywords: self.showtopic(request)
elif request in self.topics: self.showtopic(request)
elif request: doc(request, 'Help on %s:')
elif isinstance(request, Helper): self()
else: doc(request, 'Help on %s:')
self.output.write('\n')
def intro(self):
self.output.write('''
Welcome to Python %s! This is the online help utility.
If this is your first time using Python, you should definitely check out
the tutorial on the Internet at http://www.python.org/doc/tut/.
Enter the name of any module, keyword, or topic to get help on writing
Python programs and using Python modules. To quit this help utility and
return to the interpreter, just type "quit".
To get a list of available modules, keywords, or topics, type "modules",
"keywords", or "topics". Each module also comes with a one-line summary
of what it does; to list the modules whose summaries contain a given word
such as "spam", type "modules spam".
''' % sys.version[:3])
def list(self, items, columns=4, width=80):
items = items[:]
items.sort()
colw = width / columns
rows = (len(items) + columns - 1) / columns
for row in range(rows):
for col in range(columns):
i = col * rows + row
if i < len(items):
self.output.write(items[i])
if col < columns - 1:
self.output.write(' ' + ' ' * (colw-1 - len(items[i])))
self.output.write('\n')
def listkeywords(self):
self.output.write('''
Here is a list of the Python keywords. Enter any keyword to get more help.
''')
self.list(self.keywords.keys())
def listtopics(self):
self.output.write('''
Here is a list of available topics. Enter any topic name to get more help.
''')
self.list(self.topics.keys())
def showtopic(self, topic):
if not self.docdir:
self.output.write('''
Sorry, topic and keyword documentation is not available because the Python
HTML documentation files could not be found. If you have installed them,
please set the environment variable PYTHONDOCS to indicate their location.
On the Microsoft Windows operating system, the files can be built by
running "hh -decompile . PythonNN.chm" in the C:\PythonNN\Doc> directory.
''')
return
target = self.topics.get(topic, self.keywords.get(topic))
if not target:
self.output.write('no documentation found for %s\n' % repr(topic))
return
if type(target) is type(''):
return self.showtopic(target)
filename, xrefs = target
filename = self.docdir + '/' + filename + '.html'
try:
file = open(filename)
except:
self.output.write('could not read docs from %s\n' % filename)
return
divpat = re.compile('<div[^>]*navigat.*?</div.*?>', re.I | re.S)
addrpat = re.compile('<address.*?>.*?</address.*?>', re.I | re.S)
document = re.sub(addrpat, '', re.sub(divpat, '', file.read()))
file.close()
import htmllib, formatter, StringIO
buffer = StringIO.StringIO()
parser = htmllib.HTMLParser(
formatter.AbstractFormatter(formatter.DumbWriter(buffer)))
parser.start_table = parser.do_p
parser.end_table = lambda parser=parser: parser.do_p({})
parser.start_tr = parser.do_br
parser.start_td = parser.start_th = lambda a, b=buffer: b.write('\t')
parser.feed(document)
buffer = replace(buffer.getvalue(), '\xa0', ' ', '\n', '\n ')
pager(' ' + strip(buffer) + '\n')
if xrefs:
buffer = StringIO.StringIO()
formatter.DumbWriter(buffer).send_flowing_data(
'Related help topics: ' + join(split(xrefs), ', ') + '\n')
self.output.write('\n%s\n' % buffer.getvalue())
def listmodules(self, key=''):
if key:
self.output.write('''
Here is a list of matching modules. Enter any module name to get more help.
''')
apropos(key)
else:
self.output.write('''
Please wait a moment while I gather a list of all available modules...
''')
modules = {}
def callback(path, modname, desc, modules=modules):
if modname and modname[-9:] == '.__init__':
modname = modname[:-9] + ' (package)'
if find(modname, '.') < 0:
modules[modname] = 1
ModuleScanner().run(callback)
self.list(modules.keys())
self.output.write('''
Enter any module name to get more help. Or, type "modules spam" to search
for modules whose descriptions contain the word "spam".
''')
help = Helper(sys.stdin, sys.stdout)
class Scanner:
"""A generic tree iterator."""
def __init__(self, roots, children, descendp):
self.roots = roots[:]
self.state = []
self.children = children
self.descendp = descendp
def next(self):
if not self.state:
if not self.roots:
return None
root = self.roots.pop(0)
self.state = [(root, self.children(root))]
node, children = self.state[-1]
if not children:
self.state.pop()
return self.next()
child = children.pop(0)
if self.descendp(child):
self.state.append((child, self.children(child)))
return child
class ModuleScanner:
"""An interruptible scanner that searches module synopses."""
def run(self, callback, key=None, completer=None):
if key: key = lower(key)
self.quit = False
seen = {}
for modname in sys.builtin_module_names:
if modname != '__main__':
seen[modname] = 1
if key is None:
callback(None, modname, '')
else:
desc = split(__import__(modname).__doc__ or '', '\n')[0]
if find(lower(modname + ' - ' + desc), key) >= 0:
callback(None, modname, desc)
for importer, modname, ispkg in pkgutil.walk_packages():
if self.quit:
break
if key is None:
callback(None, modname, '')
else:
loader = importer.find_module(modname)
if hasattr(loader,'get_source'):
import StringIO
desc = source_synopsis(
StringIO.StringIO(loader.get_source(modname))
) or ''
if hasattr(loader,'get_filename'):
path = loader.get_filename(modname)
else:
path = None
else:
module = loader.load_module(modname)
desc = (module.__doc__ or '').splitlines()[0]
path = getattr(module,'__file__',None)
if find(lower(modname + ' - ' + desc), key) >= 0:
callback(path, modname, desc)
if completer:
completer()
def apropos(key):
"""Print all the one-line module summaries that contain a substring."""
def callback(path, modname, desc):
if modname[-9:] == '.__init__':
modname = modname[:-9] + ' (package)'
print modname, desc and '- ' + desc
try: import warnings
except ImportError: pass
else: warnings.filterwarnings('ignore') # ignore problems during import
ModuleScanner().run(callback, key)
# --------------------------------------------------- web browser interface
def serve(port, callback=None, completer=None):
import BaseHTTPServer, mimetools, select
# Patch up mimetools.Message so it doesn't break if rfc822 is reloaded.
class Message(mimetools.Message):
def __init__(self, fp, seekable=1):
Message = self.__class__
Message.__bases__[0].__bases__[0].__init__(self, fp, seekable)
self.encodingheader = self.getheader('content-transfer-encoding')
self.typeheader = self.getheader('content-type')
self.parsetype()
self.parseplist()
class DocHandler(BaseHTTPServer.BaseHTTPRequestHandler):
def send_document(self, title, contents):
try:
self.send_response(200)
self.send_header('Content-Type', 'text/html')
self.end_headers()
self.wfile.write(html.page(title, contents))
except IOError: pass
def do_GET(self):
path = self.path
if path[-5:] == '.html': path = path[:-5]
if path[:1] == '/': path = path[1:]
if path and path != '.':
try:
obj = locate(path, forceload=1)
except ErrorDuringImport, value:
self.send_document(path, html.escape(str(value)))
return
if obj:
self.send_document(describe(obj), html.document(obj, path))
else:
self.send_document(path,
'no Python documentation found for %s' % repr(path))
else:
heading = html.heading(
'<big><big><strong>Python: Index of Modules</strong></big></big>',
'#ffffff', '#7799ee')
def bltinlink(name):
return '<a href="%s.html">%s</a>' % (name, name)
names = filter(lambda x: x != '__main__',
sys.builtin_module_names)
contents = html.multicolumn(names, bltinlink)
indices = ['<p>' + html.bigsection(
'Built-in Modules', '#ffffff', '#ee77aa', contents)]
seen = {}
for dir in sys.path:
indices.append(html.index(dir, seen))
contents = heading + join(indices) + '''<p align=right>
<font color="#909090" face="helvetica, arial"><strong>
pydoc</strong> by Ka-Ping Yee <ping@lfw.org></font>'''
self.send_document('Index of Modules', contents)
def log_message(self, *args): pass
class DocServer(BaseHTTPServer.HTTPServer):
def __init__(self, port, callback):
host = (sys.platform == 'mac') and '127.0.0.1' or 'localhost'
self.address = ('', port)
self.url = 'http://%s:%d/' % (host, port)
self.callback = callback
self.base.__init__(self, self.address, self.handler)
def serve_until_quit(self):
import sys
if sys.platform.startswith('java'):
from select import cpython_compatible_select as select
else:
from select import select
self.quit = False
while not self.quit:
rd, wr, ex = select([self.socket], [], [], 1)
if rd: self.handle_request()
def server_activate(self):
self.base.server_activate(self)
if self.callback: self.callback(self)
DocServer.base = BaseHTTPServer.HTTPServer
DocServer.handler = DocHandler
DocHandler.MessageClass = Message
try:
try:
DocServer(port, callback).serve_until_quit()
except (KeyboardInterrupt, select.error):
pass
finally:
if completer: completer()
# ----------------------------------------------------- graphical interface
def gui():
"""Graphical interface (starts web server and pops up a control window)."""
class GUI:
def __init__(self, window, port=7464):
self.window = window
self.server = None
self.scanner = None
import Tkinter
self.server_frm = Tkinter.Frame(window)
self.title_lbl = Tkinter.Label(self.server_frm,
text='Starting server...\n ')
self.open_btn = Tkinter.Button(self.server_frm,
text='open browser', command=self.open, state='disabled')
self.quit_btn = Tkinter.Button(self.server_frm,
text='quit serving', command=self.quit, state='disabled')
self.search_frm = Tkinter.Frame(window)
self.search_lbl = Tkinter.Label(self.search_frm, text='Search for')
self.search_ent = Tkinter.Entry(self.search_frm)
self.search_ent.bind('<Return>', self.search)
self.stop_btn = Tkinter.Button(self.search_frm,
text='stop', pady=0, command=self.stop, state='disabled')
if sys.platform == 'win32':
# Trying to hide and show this button crashes under Windows.
self.stop_btn.pack(side='right')
self.window.title('pydoc')
self.window.protocol('WM_DELETE_WINDOW', self.quit)
self.title_lbl.pack(side='top', fill='x')
self.open_btn.pack(side='left', fill='x', expand=1)
self.quit_btn.pack(side='right', fill='x', expand=1)
self.server_frm.pack(side='top', fill='x')
self.search_lbl.pack(side='left')
self.search_ent.pack(side='right', fill='x', expand=1)
self.search_frm.pack(side='top', fill='x')
self.search_ent.focus_set()
font = ('helvetica', sys.platform == 'win32' and 8 or 10)
self.result_lst = Tkinter.Listbox(window, font=font, height=6)
self.result_lst.bind('<Button-1>', self.select)
self.result_lst.bind('<Double-Button-1>', self.goto)
self.result_scr = Tkinter.Scrollbar(window,
orient='vertical', command=self.result_lst.yview)
self.result_lst.config(yscrollcommand=self.result_scr.set)
self.result_frm = Tkinter.Frame(window)
self.goto_btn = Tkinter.Button(self.result_frm,
text='go to selected', command=self.goto)
self.hide_btn = Tkinter.Button(self.result_frm,
text='hide results', command=self.hide)
self.goto_btn.pack(side='left', fill='x', expand=1)
self.hide_btn.pack(side='right', fill='x', expand=1)
self.window.update()
self.minwidth = self.window.winfo_width()
self.minheight = self.window.winfo_height()
self.bigminheight = (self.server_frm.winfo_reqheight() +
self.search_frm.winfo_reqheight() +
self.result_lst.winfo_reqheight() +
self.result_frm.winfo_reqheight())
self.bigwidth, self.bigheight = self.minwidth, self.bigminheight
self.expanded = 0
self.window.wm_geometry('%dx%d' % (self.minwidth, self.minheight))
self.window.wm_minsize(self.minwidth, self.minheight)
self.window.tk.willdispatch()
import threading
threading.Thread(
target=serve, args=(port, self.ready, self.quit)).start()
def ready(self, server):
self.server = server
self.title_lbl.config(
text='Python documentation server at\n' + server.url)
self.open_btn.config(state='normal')
self.quit_btn.config(state='normal')
def open(self, event=None, url=None):
url = url or self.server.url
try:
import webbrowser
webbrowser.open(url)
except ImportError: # pre-webbrowser.py compatibility
if sys.platform == 'win32':
os.system('start "%s"' % url)
elif sys.platform == 'mac':
try: import ic
except ImportError: pass
else: ic.launchurl(url)
else:
rc = os.system('netscape -remote "openURL(%s)" &' % url)
if rc: os.system('netscape "%s" &' % url)
def quit(self, event=None):
if self.server:
self.server.quit = 1
self.window.quit()
def search(self, event=None):
key = self.search_ent.get()
self.stop_btn.pack(side='right')
self.stop_btn.config(state='normal')
self.search_lbl.config(text='Searching for "%s"...' % key)
self.search_ent.forget()
self.search_lbl.pack(side='left')
self.result_lst.delete(0, 'end')
self.goto_btn.config(state='disabled')
self.expand()
import threading
if self.scanner:
self.scanner.quit = 1
self.scanner = ModuleScanner()
threading.Thread(target=self.scanner.run,
args=(self.update, key, self.done)).start()
def update(self, path, modname, desc):
if modname[-9:] == '.__init__':
modname = modname[:-9] + ' (package)'
self.result_lst.insert('end',
modname + ' - ' + (desc or '(no description)'))
def stop(self, event=None):
if self.scanner:
self.scanner.quit = 1
self.scanner = None
def done(self):
self.scanner = None
self.search_lbl.config(text='Search for')
self.search_lbl.pack(side='left')
self.search_ent.pack(side='right', fill='x', expand=1)
if sys.platform != 'win32': self.stop_btn.forget()
self.stop_btn.config(state='disabled')
def select(self, event=None):
self.goto_btn.config(state='normal')
def goto(self, event=None):
selection = self.result_lst.curselection()
if selection:
modname = split(self.result_lst.get(selection[0]))[0]
self.open(url=self.server.url + modname + '.html')
def collapse(self):
if not self.expanded: return
self.result_frm.forget()
self.result_scr.forget()
self.result_lst.forget()
self.bigwidth = self.window.winfo_width()
self.bigheight = self.window.winfo_height()
self.window.wm_geometry('%dx%d' % (self.minwidth, self.minheight))
self.window.wm_minsize(self.minwidth, self.minheight)
self.expanded = 0
def expand(self):
if self.expanded: return
self.result_frm.pack(side='bottom', fill='x')
self.result_scr.pack(side='right', fill='y')
self.result_lst.pack(side='top', fill='both', expand=1)
self.window.wm_geometry('%dx%d' % (self.bigwidth, self.bigheight))
self.window.wm_minsize(self.minwidth, self.bigminheight)
self.expanded = 1
def hide(self, event=None):
self.stop()
self.collapse()
import Tkinter
try:
root = Tkinter.Tk()
# Tk will crash if pythonw.exe has an XP .manifest
# file and the root has is not destroyed explicitly.
# If the problem is ever fixed in Tk, the explicit
# destroy can go.
try:
gui = GUI(root)
root.mainloop()
finally:
root.destroy()
except KeyboardInterrupt:
pass
# -------------------------------------------------- command-line interface
def ispath(x):
return isinstance(x, str) and find(x, os.sep) >= 0
def cli():
"""Command-line interface (looks at sys.argv to decide what to do)."""
import getopt
class BadUsage: pass
# Scripts don't get the current directory in their path by default.
scriptdir = os.path.dirname(sys.argv[0])
if scriptdir in sys.path:
sys.path.remove(scriptdir)
sys.path.insert(0, '.')
try:
opts, args = getopt.getopt(sys.argv[1:], 'gk:p:w')
writing = 0
for opt, val in opts:
if opt == '-g':
gui()
return
if opt == '-k':
apropos(val)
return
if opt == '-p':
try:
port = int(val)
except ValueError:
raise BadUsage
def ready(server):
print 'pydoc server ready at %s' % server.url
def stopped():
print 'pydoc server stopped'
serve(port, ready, stopped)
return
if opt == '-w':
writing = 1
if not args: raise BadUsage
for arg in args:
if ispath(arg) and not os.path.exists(arg):
print 'file %r does not exist' % arg
break
try:
if ispath(arg) and os.path.isfile(arg):
arg = importfile(arg)
if writing:
if ispath(arg) and os.path.isdir(arg):
writedocs(arg)
else:
writedoc(arg)
else:
help.help(arg)
except ErrorDuringImport, value:
print value
except (getopt.error, BadUsage):
cmd = os.path.basename(sys.argv[0])
print """pydoc - the Python documentation tool
%s <name> ...
Show text documentation on something. <name> may be the name of a
Python keyword, topic, function, module, or package, or a dotted
reference to a class or function within a module or module in a
package. If <name> contains a '%s', it is used as the path to a
Python source file to document. If name is 'keywords', 'topics',
or 'modules', a listing of these things is displayed.
%s -k <keyword>
Search for a keyword in the synopsis lines of all available modules.
%s -p <port>
Start an HTTP server on the given port on the local machine.
%s -g
Pop up a graphical interface for finding and serving documentation.
%s -w <name> ...
Write out the HTML documentation for a module to a file in the current
directory. If <name> contains a '%s', it is treated as a filename; if
it names a directory, documentation is written for all the contents.
""" % (cmd, os.sep, cmd, cmd, cmd, cmd, os.sep)
if __name__ == '__main__': cli()
| gpl-2.0 |
joerocklin/gem5 | ext/ply/test/yacc_misplaced.py | 174 | 1539 | # -----------------------------------------------------------------------------
# yacc_misplaced.py
#
# A misplaced | in grammar rules
# -----------------------------------------------------------------------------
import sys
if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
# Parsing rules
precedence = (
('left','PLUS','MINUS'),
('left','TIMES','DIVIDE'),
('right','UMINUS'),
)
# dictionary of names
names = { }
def p_statement_assign(t):
'statement : NAME EQUALS expression'
names[t[1]] = t[3]
def p_statement_expr(t):
'statement : expression'
print(t[1])
def p_expression_binop(t):
''' | expression PLUS expression
| expression MINUS expression
| expression TIMES expression
| expression DIVIDE expression'''
if t[2] == '+' : t[0] = t[1] + t[3]
elif t[2] == '-': t[0] = t[1] - t[3]
elif t[2] == '*': t[0] = t[1] * t[3]
elif t[2] == '/': t[0] = t[1] / t[3]
def p_expression_uminus(t):
'expression : MINUS expression %prec UMINUS'
t[0] = -t[2]
def p_expression_group(t):
'expression : LPAREN expression RPAREN'
t[0] = t[2]
def p_expression_number(t):
'expression : NUMBER'
t[0] = t[1]
def p_expression_name(t):
'expression : NAME'
try:
t[0] = names[t[1]]
except LookupError:
print("Undefined name '%s'" % t[1])
t[0] = 0
def p_error(t):
print("Syntax error at '%s'" % t.value)
yacc.yacc()
| bsd-3-clause |
linjoahow/w17test_1 | static/Brython3.1.1-20150328-091302/Lib/configparser.py | 692 | 50025 | """Configuration file parser.
A configuration file consists of sections, lead by a "[section]" header,
and followed by "name: value" entries, with continuations and such in
the style of RFC 822.
Intrinsic defaults can be specified by passing them into the
ConfigParser constructor as a dictionary.
class:
ConfigParser -- responsible for parsing a list of
configuration files, and managing the parsed database.
methods:
__init__(defaults=None, dict_type=_default_dict, allow_no_value=False,
delimiters=('=', ':'), comment_prefixes=('#', ';'),
inline_comment_prefixes=None, strict=True,
empty_lines_in_values=True):
Create the parser. When `defaults' is given, it is initialized into the
dictionary or intrinsic defaults. The keys must be strings, the values
must be appropriate for %()s string interpolation.
When `dict_type' is given, it will be used to create the dictionary
objects for the list of sections, for the options within a section, and
for the default values.
When `delimiters' is given, it will be used as the set of substrings
that divide keys from values.
When `comment_prefixes' is given, it will be used as the set of
substrings that prefix comments in empty lines. Comments can be
indented.
When `inline_comment_prefixes' is given, it will be used as the set of
substrings that prefix comments in non-empty lines.
When `strict` is True, the parser won't allow for any section or option
duplicates while reading from a single source (file, string or
dictionary). Default is True.
When `empty_lines_in_values' is False (default: True), each empty line
marks the end of an option. Otherwise, internal empty lines of
a multiline option are kept as part of the value.
When `allow_no_value' is True (default: False), options without
values are accepted; the value presented for these is None.
sections()
Return all the configuration section names, sans DEFAULT.
has_section(section)
Return whether the given section exists.
has_option(section, option)
Return whether the given option exists in the given section.
options(section)
Return list of configuration options for the named section.
read(filenames, encoding=None)
Read and parse the list of named configuration files, given by
name. A single filename is also allowed. Non-existing files
are ignored. Return list of successfully read files.
read_file(f, filename=None)
Read and parse one configuration file, given as a file object.
The filename defaults to f.name; it is only used in error
messages (if f has no `name' attribute, the string `<???>' is used).
read_string(string)
Read configuration from a given string.
read_dict(dictionary)
Read configuration from a dictionary. Keys are section names,
values are dictionaries with keys and values that should be present
in the section. If the used dictionary type preserves order, sections
and their keys will be added in order. Values are automatically
converted to strings.
get(section, option, raw=False, vars=None, fallback=_UNSET)
Return a string value for the named option. All % interpolations are
expanded in the return values, based on the defaults passed into the
constructor and the DEFAULT section. Additional substitutions may be
provided using the `vars' argument, which must be a dictionary whose
contents override any pre-existing defaults. If `option' is a key in
`vars', the value from `vars' is used.
getint(section, options, raw=False, vars=None, fallback=_UNSET)
Like get(), but convert value to an integer.
getfloat(section, options, raw=False, vars=None, fallback=_UNSET)
Like get(), but convert value to a float.
getboolean(section, options, raw=False, vars=None, fallback=_UNSET)
Like get(), but convert value to a boolean (currently case
insensitively defined as 0, false, no, off for False, and 1, true,
yes, on for True). Returns False or True.
items(section=_UNSET, raw=False, vars=None)
If section is given, return a list of tuples with (name, value) for
each option in the section. Otherwise, return a list of tuples with
(section_name, section_proxy) for each section, including DEFAULTSECT.
remove_section(section)
Remove the given file section and all its options.
remove_option(section, option)
Remove the given option from the given section.
set(section, option, value)
Set the given option.
write(fp, space_around_delimiters=True)
Write the configuration state in .ini format. If
`space_around_delimiters' is True (the default), delimiters
between keys and values are surrounded by spaces.
"""
from collections.abc import MutableMapping
from collections import OrderedDict as _default_dict, ChainMap as _ChainMap
import functools
import io
import itertools
import re
import sys
import warnings
__all__ = ["NoSectionError", "DuplicateOptionError", "DuplicateSectionError",
"NoOptionError", "InterpolationError", "InterpolationDepthError",
"InterpolationSyntaxError", "ParsingError",
"MissingSectionHeaderError",
"ConfigParser", "SafeConfigParser", "RawConfigParser",
"DEFAULTSECT", "MAX_INTERPOLATION_DEPTH"]
DEFAULTSECT = "DEFAULT"
MAX_INTERPOLATION_DEPTH = 10
# exception classes
class Error(Exception):
"""Base class for ConfigParser exceptions."""
def _get_message(self):
"""Getter for 'message'; needed only to override deprecation in
BaseException.
"""
return self.__message
def _set_message(self, value):
"""Setter for 'message'; needed only to override deprecation in
BaseException.
"""
self.__message = value
# BaseException.message has been deprecated since Python 2.6. To prevent
# DeprecationWarning from popping up over this pre-existing attribute, use
# a new property that takes lookup precedence.
message = property(_get_message, _set_message)
def __init__(self, msg=''):
self.message = msg
Exception.__init__(self, msg)
def __repr__(self):
return self.message
__str__ = __repr__
class NoSectionError(Error):
"""Raised when no section matches a requested option."""
def __init__(self, section):
Error.__init__(self, 'No section: %r' % (section,))
self.section = section
self.args = (section, )
class DuplicateSectionError(Error):
"""Raised when a section is repeated in an input source.
Possible repetitions that raise this exception are: multiple creation
using the API or in strict parsers when a section is found more than once
in a single input file, string or dictionary.
"""
def __init__(self, section, source=None, lineno=None):
msg = [repr(section), " already exists"]
if source is not None:
message = ["While reading from ", source]
if lineno is not None:
message.append(" [line {0:2d}]".format(lineno))
message.append(": section ")
message.extend(msg)
msg = message
else:
msg.insert(0, "Section ")
Error.__init__(self, "".join(msg))
self.section = section
self.source = source
self.lineno = lineno
self.args = (section, source, lineno)
class DuplicateOptionError(Error):
"""Raised by strict parsers when an option is repeated in an input source.
Current implementation raises this exception only when an option is found
more than once in a single file, string or dictionary.
"""
def __init__(self, section, option, source=None, lineno=None):
msg = [repr(option), " in section ", repr(section),
" already exists"]
if source is not None:
message = ["While reading from ", source]
if lineno is not None:
message.append(" [line {0:2d}]".format(lineno))
message.append(": option ")
message.extend(msg)
msg = message
else:
msg.insert(0, "Option ")
Error.__init__(self, "".join(msg))
self.section = section
self.option = option
self.source = source
self.lineno = lineno
self.args = (section, option, source, lineno)
class NoOptionError(Error):
"""A requested option was not found."""
def __init__(self, option, section):
Error.__init__(self, "No option %r in section: %r" %
(option, section))
self.option = option
self.section = section
self.args = (option, section)
class InterpolationError(Error):
"""Base class for interpolation-related exceptions."""
def __init__(self, option, section, msg):
Error.__init__(self, msg)
self.option = option
self.section = section
self.args = (option, section, msg)
class InterpolationMissingOptionError(InterpolationError):
"""A string substitution required a setting which was not available."""
def __init__(self, option, section, rawval, reference):
msg = ("Bad value substitution:\n"
"\tsection: [%s]\n"
"\toption : %s\n"
"\tkey : %s\n"
"\trawval : %s\n"
% (section, option, reference, rawval))
InterpolationError.__init__(self, option, section, msg)
self.reference = reference
self.args = (option, section, rawval, reference)
class InterpolationSyntaxError(InterpolationError):
"""Raised when the source text contains invalid syntax.
Current implementation raises this exception when the source text into
which substitutions are made does not conform to the required syntax.
"""
class InterpolationDepthError(InterpolationError):
"""Raised when substitutions are nested too deeply."""
def __init__(self, option, section, rawval):
msg = ("Value interpolation too deeply recursive:\n"
"\tsection: [%s]\n"
"\toption : %s\n"
"\trawval : %s\n"
% (section, option, rawval))
InterpolationError.__init__(self, option, section, msg)
self.args = (option, section, rawval)
class ParsingError(Error):
"""Raised when a configuration file does not follow legal syntax."""
def __init__(self, source=None, filename=None):
# Exactly one of `source'/`filename' arguments has to be given.
# `filename' kept for compatibility.
if filename and source:
raise ValueError("Cannot specify both `filename' and `source'. "
"Use `source'.")
elif not filename and not source:
raise ValueError("Required argument `source' not given.")
elif filename:
source = filename
Error.__init__(self, 'Source contains parsing errors: %s' % source)
self.source = source
self.errors = []
self.args = (source, )
@property
def filename(self):
"""Deprecated, use `source'."""
warnings.warn(
"The 'filename' attribute will be removed in future versions. "
"Use 'source' instead.",
DeprecationWarning, stacklevel=2
)
return self.source
@filename.setter
def filename(self, value):
"""Deprecated, user `source'."""
warnings.warn(
"The 'filename' attribute will be removed in future versions. "
"Use 'source' instead.",
DeprecationWarning, stacklevel=2
)
self.source = value
def append(self, lineno, line):
self.errors.append((lineno, line))
self.message += '\n\t[line %2d]: %s' % (lineno, line)
class MissingSectionHeaderError(ParsingError):
"""Raised when a key-value pair is found before any section header."""
def __init__(self, filename, lineno, line):
Error.__init__(
self,
'File contains no section headers.\nfile: %s, line: %d\n%r' %
(filename, lineno, line))
self.source = filename
self.lineno = lineno
self.line = line
self.args = (filename, lineno, line)
# Used in parser getters to indicate the default behaviour when a specific
# option is not found it to raise an exception. Created to enable `None' as
# a valid fallback value.
_UNSET = object()
class Interpolation:
"""Dummy interpolation that passes the value through with no changes."""
def before_get(self, parser, section, option, value, defaults):
return value
def before_set(self, parser, section, option, value):
return value
def before_read(self, parser, section, option, value):
return value
def before_write(self, parser, section, option, value):
return value
class BasicInterpolation(Interpolation):
"""Interpolation as implemented in the classic ConfigParser.
The option values can contain format strings which refer to other values in
the same section, or values in the special default section.
For example:
something: %(dir)s/whatever
would resolve the "%(dir)s" to the value of dir. All reference
expansions are done late, on demand. If a user needs to use a bare % in
a configuration file, she can escape it by writing %%. Other % usage
is considered a user error and raises `InterpolationSyntaxError'."""
_KEYCRE = re.compile(r"%\(([^)]+)\)s")
def before_get(self, parser, section, option, value, defaults):
L = []
self._interpolate_some(parser, option, L, value, section, defaults, 1)
return ''.join(L)
def before_set(self, parser, section, option, value):
tmp_value = value.replace('%%', '') # escaped percent signs
tmp_value = self._KEYCRE.sub('', tmp_value) # valid syntax
if '%' in tmp_value:
raise ValueError("invalid interpolation syntax in %r at "
"position %d" % (value, tmp_value.find('%')))
return value
def _interpolate_some(self, parser, option, accum, rest, section, map,
depth):
if depth > MAX_INTERPOLATION_DEPTH:
raise InterpolationDepthError(option, section, rest)
while rest:
p = rest.find("%")
if p < 0:
accum.append(rest)
return
if p > 0:
accum.append(rest[:p])
rest = rest[p:]
# p is no longer used
c = rest[1:2]
if c == "%":
accum.append("%")
rest = rest[2:]
elif c == "(":
m = self._KEYCRE.match(rest)
if m is None:
raise InterpolationSyntaxError(option, section,
"bad interpolation variable reference %r" % rest)
var = parser.optionxform(m.group(1))
rest = rest[m.end():]
try:
v = map[var]
except KeyError:
raise InterpolationMissingOptionError(
option, section, rest, var)
if "%" in v:
self._interpolate_some(parser, option, accum, v,
section, map, depth + 1)
else:
accum.append(v)
else:
raise InterpolationSyntaxError(
option, section,
"'%%' must be followed by '%%' or '(', "
"found: %r" % (rest,))
class ExtendedInterpolation(Interpolation):
"""Advanced variant of interpolation, supports the syntax used by
`zc.buildout'. Enables interpolation between sections."""
_KEYCRE = re.compile(r"\$\{([^}]+)\}")
def before_get(self, parser, section, option, value, defaults):
L = []
self._interpolate_some(parser, option, L, value, section, defaults, 1)
return ''.join(L)
def before_set(self, parser, section, option, value):
tmp_value = value.replace('$$', '') # escaped dollar signs
tmp_value = self._KEYCRE.sub('', tmp_value) # valid syntax
if '$' in tmp_value:
raise ValueError("invalid interpolation syntax in %r at "
"position %d" % (value, tmp_value.find('%')))
return value
def _interpolate_some(self, parser, option, accum, rest, section, map,
depth):
if depth > MAX_INTERPOLATION_DEPTH:
raise InterpolationDepthError(option, section, rest)
while rest:
p = rest.find("$")
if p < 0:
accum.append(rest)
return
if p > 0:
accum.append(rest[:p])
rest = rest[p:]
# p is no longer used
c = rest[1:2]
if c == "$":
accum.append("$")
rest = rest[2:]
elif c == "{":
m = self._KEYCRE.match(rest)
if m is None:
raise InterpolationSyntaxError(option, section,
"bad interpolation variable reference %r" % rest)
path = m.group(1).split(':')
rest = rest[m.end():]
sect = section
opt = option
try:
if len(path) == 1:
opt = parser.optionxform(path[0])
v = map[opt]
elif len(path) == 2:
sect = path[0]
opt = parser.optionxform(path[1])
v = parser.get(sect, opt, raw=True)
else:
raise InterpolationSyntaxError(
option, section,
"More than one ':' found: %r" % (rest,))
except (KeyError, NoSectionError, NoOptionError):
raise InterpolationMissingOptionError(
option, section, rest, ":".join(path))
if "$" in v:
self._interpolate_some(parser, opt, accum, v, sect,
dict(parser.items(sect, raw=True)),
depth + 1)
else:
accum.append(v)
else:
raise InterpolationSyntaxError(
option, section,
"'$' must be followed by '$' or '{', "
"found: %r" % (rest,))
class LegacyInterpolation(Interpolation):
"""Deprecated interpolation used in old versions of ConfigParser.
Use BasicInterpolation or ExtendedInterpolation instead."""
_KEYCRE = re.compile(r"%\(([^)]*)\)s|.")
def before_get(self, parser, section, option, value, vars):
rawval = value
depth = MAX_INTERPOLATION_DEPTH
while depth: # Loop through this until it's done
depth -= 1
if value and "%(" in value:
replace = functools.partial(self._interpolation_replace,
parser=parser)
value = self._KEYCRE.sub(replace, value)
try:
value = value % vars
except KeyError as e:
raise InterpolationMissingOptionError(
option, section, rawval, e.args[0])
else:
break
if value and "%(" in value:
raise InterpolationDepthError(option, section, rawval)
return value
def before_set(self, parser, section, option, value):
return value
@staticmethod
def _interpolation_replace(match, parser):
s = match.group(1)
if s is None:
return match.group()
else:
return "%%(%s)s" % parser.optionxform(s)
class RawConfigParser(MutableMapping):
"""ConfigParser that does not do interpolation."""
# Regular expressions for parsing section headers and options
_SECT_TMPL = r"""
\[ # [
(?P<header>[^]]+) # very permissive!
\] # ]
"""
_OPT_TMPL = r"""
(?P<option>.*?) # very permissive!
\s*(?P<vi>{delim})\s* # any number of space/tab,
# followed by any of the
# allowed delimiters,
# followed by any space/tab
(?P<value>.*)$ # everything up to eol
"""
_OPT_NV_TMPL = r"""
(?P<option>.*?) # very permissive!
\s*(?: # any number of space/tab,
(?P<vi>{delim})\s* # optionally followed by
# any of the allowed
# delimiters, followed by any
# space/tab
(?P<value>.*))?$ # everything up to eol
"""
# Interpolation algorithm to be used if the user does not specify another
_DEFAULT_INTERPOLATION = Interpolation()
# Compiled regular expression for matching sections
SECTCRE = re.compile(_SECT_TMPL, re.VERBOSE)
# Compiled regular expression for matching options with typical separators
OPTCRE = re.compile(_OPT_TMPL.format(delim="=|:"), re.VERBOSE)
# Compiled regular expression for matching options with optional values
# delimited using typical separators
OPTCRE_NV = re.compile(_OPT_NV_TMPL.format(delim="=|:"), re.VERBOSE)
# Compiled regular expression for matching leading whitespace in a line
NONSPACECRE = re.compile(r"\S")
# Possible boolean values in the configuration.
BOOLEAN_STATES = {'1': True, 'yes': True, 'true': True, 'on': True,
'0': False, 'no': False, 'false': False, 'off': False}
def __init__(self, defaults=None, dict_type=_default_dict,
allow_no_value=False, *, delimiters=('=', ':'),
comment_prefixes=('#', ';'), inline_comment_prefixes=None,
strict=True, empty_lines_in_values=True,
default_section=DEFAULTSECT,
interpolation=_UNSET):
self._dict = dict_type
self._sections = self._dict()
self._defaults = self._dict()
self._proxies = self._dict()
self._proxies[default_section] = SectionProxy(self, default_section)
if defaults:
for key, value in defaults.items():
self._defaults[self.optionxform(key)] = value
self._delimiters = tuple(delimiters)
if delimiters == ('=', ':'):
self._optcre = self.OPTCRE_NV if allow_no_value else self.OPTCRE
else:
d = "|".join(re.escape(d) for d in delimiters)
if allow_no_value:
self._optcre = re.compile(self._OPT_NV_TMPL.format(delim=d),
re.VERBOSE)
else:
self._optcre = re.compile(self._OPT_TMPL.format(delim=d),
re.VERBOSE)
self._comment_prefixes = tuple(comment_prefixes or ())
self._inline_comment_prefixes = tuple(inline_comment_prefixes or ())
self._strict = strict
self._allow_no_value = allow_no_value
self._empty_lines_in_values = empty_lines_in_values
self.default_section=default_section
self._interpolation = interpolation
if self._interpolation is _UNSET:
self._interpolation = self._DEFAULT_INTERPOLATION
if self._interpolation is None:
self._interpolation = Interpolation()
def defaults(self):
return self._defaults
def sections(self):
"""Return a list of section names, excluding [DEFAULT]"""
# self._sections will never have [DEFAULT] in it
return list(self._sections.keys())
def add_section(self, section):
"""Create a new section in the configuration.
Raise DuplicateSectionError if a section by the specified name
already exists. Raise ValueError if name is DEFAULT.
"""
if section == self.default_section:
raise ValueError('Invalid section name: %r' % section)
if section in self._sections:
raise DuplicateSectionError(section)
self._sections[section] = self._dict()
self._proxies[section] = SectionProxy(self, section)
def has_section(self, section):
"""Indicate whether the named section is present in the configuration.
The DEFAULT section is not acknowledged.
"""
return section in self._sections
def options(self, section):
"""Return a list of option names for the given section name."""
try:
opts = self._sections[section].copy()
except KeyError:
raise NoSectionError(section)
opts.update(self._defaults)
return list(opts.keys())
def read(self, filenames, encoding=None):
"""Read and parse a filename or a list of filenames.
Files that cannot be opened are silently ignored; this is
designed so that you can specify a list of potential
configuration file locations (e.g. current directory, user's
home directory, systemwide directory), and all existing
configuration files in the list will be read. A single
filename may also be given.
Return list of successfully read files.
"""
if isinstance(filenames, str):
filenames = [filenames]
read_ok = []
for filename in filenames:
try:
with open(filename, encoding=encoding) as fp:
self._read(fp, filename)
except IOError:
continue
read_ok.append(filename)
return read_ok
def read_file(self, f, source=None):
"""Like read() but the argument must be a file-like object.
The `f' argument must be iterable, returning one line at a time.
Optional second argument is the `source' specifying the name of the
file being read. If not given, it is taken from f.name. If `f' has no
`name' attribute, `<???>' is used.
"""
if source is None:
try:
source = f.name
except AttributeError:
source = '<???>'
self._read(f, source)
def read_string(self, string, source='<string>'):
"""Read configuration from a given string."""
sfile = io.StringIO(string)
self.read_file(sfile, source)
def read_dict(self, dictionary, source='<dict>'):
"""Read configuration from a dictionary.
Keys are section names, values are dictionaries with keys and values
that should be present in the section. If the used dictionary type
preserves order, sections and their keys will be added in order.
All types held in the dictionary are converted to strings during
reading, including section names, option names and keys.
Optional second argument is the `source' specifying the name of the
dictionary being read.
"""
elements_added = set()
for section, keys in dictionary.items():
section = str(section)
try:
self.add_section(section)
except (DuplicateSectionError, ValueError):
if self._strict and section in elements_added:
raise
elements_added.add(section)
for key, value in keys.items():
key = self.optionxform(str(key))
if value is not None:
value = str(value)
if self._strict and (section, key) in elements_added:
raise DuplicateOptionError(section, key, source)
elements_added.add((section, key))
self.set(section, key, value)
def readfp(self, fp, filename=None):
"""Deprecated, use read_file instead."""
warnings.warn(
"This method will be removed in future versions. "
"Use 'parser.read_file()' instead.",
DeprecationWarning, stacklevel=2
)
self.read_file(fp, source=filename)
def get(self, section, option, *, raw=False, vars=None, fallback=_UNSET):
"""Get an option value for a given section.
If `vars' is provided, it must be a dictionary. The option is looked up
in `vars' (if provided), `section', and in `DEFAULTSECT' in that order.
If the key is not found and `fallback' is provided, it is used as
a fallback value. `None' can be provided as a `fallback' value.
If interpolation is enabled and the optional argument `raw' is False,
all interpolations are expanded in the return values.
Arguments `raw', `vars', and `fallback' are keyword only.
The section DEFAULT is special.
"""
try:
d = self._unify_values(section, vars)
except NoSectionError:
if fallback is _UNSET:
raise
else:
return fallback
option = self.optionxform(option)
try:
value = d[option]
except KeyError:
if fallback is _UNSET:
raise NoOptionError(option, section)
else:
return fallback
if raw or value is None:
return value
else:
return self._interpolation.before_get(self, section, option, value,
d)
def _get(self, section, conv, option, **kwargs):
return conv(self.get(section, option, **kwargs))
def getint(self, section, option, *, raw=False, vars=None,
fallback=_UNSET):
try:
return self._get(section, int, option, raw=raw, vars=vars)
except (NoSectionError, NoOptionError):
if fallback is _UNSET:
raise
else:
return fallback
def getfloat(self, section, option, *, raw=False, vars=None,
fallback=_UNSET):
try:
return self._get(section, float, option, raw=raw, vars=vars)
except (NoSectionError, NoOptionError):
if fallback is _UNSET:
raise
else:
return fallback
def getboolean(self, section, option, *, raw=False, vars=None,
fallback=_UNSET):
try:
return self._get(section, self._convert_to_boolean, option,
raw=raw, vars=vars)
except (NoSectionError, NoOptionError):
if fallback is _UNSET:
raise
else:
return fallback
def items(self, section=_UNSET, raw=False, vars=None):
"""Return a list of (name, value) tuples for each option in a section.
All % interpolations are expanded in the return values, based on the
defaults passed into the constructor, unless the optional argument
`raw' is true. Additional substitutions may be provided using the
`vars' argument, which must be a dictionary whose contents overrides
any pre-existing defaults.
The section DEFAULT is special.
"""
if section is _UNSET:
return super().items()
d = self._defaults.copy()
try:
d.update(self._sections[section])
except KeyError:
if section != self.default_section:
raise NoSectionError(section)
# Update with the entry specific variables
if vars:
for key, value in vars.items():
d[self.optionxform(key)] = value
value_getter = lambda option: self._interpolation.before_get(self,
section, option, d[option], d)
if raw:
value_getter = lambda option: d[option]
return [(option, value_getter(option)) for option in d.keys()]
def popitem(self):
"""Remove a section from the parser and return it as
a (section_name, section_proxy) tuple. If no section is present, raise
KeyError.
The section DEFAULT is never returned because it cannot be removed.
"""
for key in self.sections():
value = self[key]
del self[key]
return key, value
raise KeyError
def optionxform(self, optionstr):
return optionstr.lower()
def has_option(self, section, option):
"""Check for the existence of a given option in a given section.
If the specified `section' is None or an empty string, DEFAULT is
assumed. If the specified `section' does not exist, returns False."""
if not section or section == self.default_section:
option = self.optionxform(option)
return option in self._defaults
elif section not in self._sections:
return False
else:
option = self.optionxform(option)
return (option in self._sections[section]
or option in self._defaults)
def set(self, section, option, value=None):
"""Set an option."""
if value:
value = self._interpolation.before_set(self, section, option,
value)
if not section or section == self.default_section:
sectdict = self._defaults
else:
try:
sectdict = self._sections[section]
except KeyError:
raise NoSectionError(section)
sectdict[self.optionxform(option)] = value
def write(self, fp, space_around_delimiters=True):
"""Write an .ini-format representation of the configuration state.
If `space_around_delimiters' is True (the default), delimiters
between keys and values are surrounded by spaces.
"""
if space_around_delimiters:
d = " {} ".format(self._delimiters[0])
else:
d = self._delimiters[0]
if self._defaults:
self._write_section(fp, self.default_section,
self._defaults.items(), d)
for section in self._sections:
self._write_section(fp, section,
self._sections[section].items(), d)
def _write_section(self, fp, section_name, section_items, delimiter):
"""Write a single section to the specified `fp'."""
fp.write("[{}]\n".format(section_name))
for key, value in section_items:
value = self._interpolation.before_write(self, section_name, key,
value)
if value is not None or not self._allow_no_value:
value = delimiter + str(value).replace('\n', '\n\t')
else:
value = ""
fp.write("{}{}\n".format(key, value))
fp.write("\n")
def remove_option(self, section, option):
"""Remove an option."""
if not section or section == self.default_section:
sectdict = self._defaults
else:
try:
sectdict = self._sections[section]
except KeyError:
raise NoSectionError(section)
option = self.optionxform(option)
existed = option in sectdict
if existed:
del sectdict[option]
return existed
def remove_section(self, section):
"""Remove a file section."""
existed = section in self._sections
if existed:
del self._sections[section]
del self._proxies[section]
return existed
def __getitem__(self, key):
if key != self.default_section and not self.has_section(key):
raise KeyError(key)
return self._proxies[key]
def __setitem__(self, key, value):
# To conform with the mapping protocol, overwrites existing values in
# the section.
# XXX this is not atomic if read_dict fails at any point. Then again,
# no update method in configparser is atomic in this implementation.
if key == self.default_section:
self._defaults.clear()
elif key in self._sections:
self._sections[key].clear()
self.read_dict({key: value})
def __delitem__(self, key):
if key == self.default_section:
raise ValueError("Cannot remove the default section.")
if not self.has_section(key):
raise KeyError(key)
self.remove_section(key)
def __contains__(self, key):
return key == self.default_section or self.has_section(key)
def __len__(self):
return len(self._sections) + 1 # the default section
def __iter__(self):
# XXX does it break when underlying container state changed?
return itertools.chain((self.default_section,), self._sections.keys())
def _read(self, fp, fpname):
"""Parse a sectioned configuration file.
Each section in a configuration file contains a header, indicated by
a name in square brackets (`[]'), plus key/value options, indicated by
`name' and `value' delimited with a specific substring (`=' or `:' by
default).
Values can span multiple lines, as long as they are indented deeper
than the first line of the value. Depending on the parser's mode, blank
lines may be treated as parts of multiline values or ignored.
Configuration files may include comments, prefixed by specific
characters (`#' and `;' by default). Comments may appear on their own
in an otherwise empty line or may be entered in lines holding values or
section names.
"""
elements_added = set()
cursect = None # None, or a dictionary
sectname = None
optname = None
lineno = 0
indent_level = 0
e = None # None, or an exception
for lineno, line in enumerate(fp, start=1):
comment_start = sys.maxsize
# strip inline comments
inline_prefixes = {p: -1 for p in self._inline_comment_prefixes}
while comment_start == sys.maxsize and inline_prefixes:
next_prefixes = {}
for prefix, index in inline_prefixes.items():
index = line.find(prefix, index+1)
if index == -1:
continue
next_prefixes[prefix] = index
if index == 0 or (index > 0 and line[index-1].isspace()):
comment_start = min(comment_start, index)
inline_prefixes = next_prefixes
# strip full line comments
for prefix in self._comment_prefixes:
if line.strip().startswith(prefix):
comment_start = 0
break
if comment_start == sys.maxsize:
comment_start = None
value = line[:comment_start].strip()
if not value:
if self._empty_lines_in_values:
# add empty line to the value, but only if there was no
# comment on the line
if (comment_start is None and
cursect is not None and
optname and
cursect[optname] is not None):
cursect[optname].append('') # newlines added at join
else:
# empty line marks end of value
indent_level = sys.maxsize
continue
# continuation line?
first_nonspace = self.NONSPACECRE.search(line)
cur_indent_level = first_nonspace.start() if first_nonspace else 0
if (cursect is not None and optname and
cur_indent_level > indent_level):
cursect[optname].append(value)
# a section header or option header?
else:
indent_level = cur_indent_level
# is it a section header?
mo = self.SECTCRE.match(value)
if mo:
sectname = mo.group('header')
if sectname in self._sections:
if self._strict and sectname in elements_added:
raise DuplicateSectionError(sectname, fpname,
lineno)
cursect = self._sections[sectname]
elements_added.add(sectname)
elif sectname == self.default_section:
cursect = self._defaults
else:
cursect = self._dict()
self._sections[sectname] = cursect
self._proxies[sectname] = SectionProxy(self, sectname)
elements_added.add(sectname)
# So sections can't start with a continuation line
optname = None
# no section header in the file?
elif cursect is None:
raise MissingSectionHeaderError(fpname, lineno, line)
# an option line?
else:
mo = self._optcre.match(value)
if mo:
optname, vi, optval = mo.group('option', 'vi', 'value')
if not optname:
e = self._handle_error(e, fpname, lineno, line)
optname = self.optionxform(optname.rstrip())
if (self._strict and
(sectname, optname) in elements_added):
raise DuplicateOptionError(sectname, optname,
fpname, lineno)
elements_added.add((sectname, optname))
# This check is fine because the OPTCRE cannot
# match if it would set optval to None
if optval is not None:
optval = optval.strip()
cursect[optname] = [optval]
else:
# valueless option handling
cursect[optname] = None
else:
# a non-fatal parsing error occurred. set up the
# exception but keep going. the exception will be
# raised at the end of the file and will contain a
# list of all bogus lines
e = self._handle_error(e, fpname, lineno, line)
# if any parsing errors occurred, raise an exception
if e:
raise e
self._join_multiline_values()
def _join_multiline_values(self):
defaults = self.default_section, self._defaults
all_sections = itertools.chain((defaults,),
self._sections.items())
for section, options in all_sections:
for name, val in options.items():
if isinstance(val, list):
val = '\n'.join(val).rstrip()
options[name] = self._interpolation.before_read(self,
section,
name, val)
def _handle_error(self, exc, fpname, lineno, line):
if not exc:
exc = ParsingError(fpname)
exc.append(lineno, repr(line))
return exc
def _unify_values(self, section, vars):
"""Create a sequence of lookups with 'vars' taking priority over
the 'section' which takes priority over the DEFAULTSECT.
"""
sectiondict = {}
try:
sectiondict = self._sections[section]
except KeyError:
if section != self.default_section:
raise NoSectionError(section)
# Update with the entry specific variables
vardict = {}
if vars:
for key, value in vars.items():
if value is not None:
value = str(value)
vardict[self.optionxform(key)] = value
return _ChainMap(vardict, sectiondict, self._defaults)
def _convert_to_boolean(self, value):
"""Return a boolean value translating from other types if necessary.
"""
if value.lower() not in self.BOOLEAN_STATES:
raise ValueError('Not a boolean: %s' % value)
return self.BOOLEAN_STATES[value.lower()]
def _validate_value_types(self, *, section="", option="", value=""):
"""Raises a TypeError for non-string values.
The only legal non-string value if we allow valueless
options is None, so we need to check if the value is a
string if:
- we do not allow valueless options, or
- we allow valueless options but the value is not None
For compatibility reasons this method is not used in classic set()
for RawConfigParsers. It is invoked in every case for mapping protocol
access and in ConfigParser.set().
"""
if not isinstance(section, str):
raise TypeError("section names must be strings")
if not isinstance(option, str):
raise TypeError("option keys must be strings")
if not self._allow_no_value or value:
if not isinstance(value, str):
raise TypeError("option values must be strings")
class ConfigParser(RawConfigParser):
"""ConfigParser implementing interpolation."""
_DEFAULT_INTERPOLATION = BasicInterpolation()
def set(self, section, option, value=None):
"""Set an option. Extends RawConfigParser.set by validating type and
interpolation syntax on the value."""
self._validate_value_types(option=option, value=value)
super().set(section, option, value)
def add_section(self, section):
"""Create a new section in the configuration. Extends
RawConfigParser.add_section by validating if the section name is
a string."""
self._validate_value_types(section=section)
super().add_section(section)
class SafeConfigParser(ConfigParser):
"""ConfigParser alias for backwards compatibility purposes."""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
warnings.warn(
"The SafeConfigParser class has been renamed to ConfigParser "
"in Python 3.2. This alias will be removed in future versions."
" Use ConfigParser directly instead.",
DeprecationWarning, stacklevel=2
)
class SectionProxy(MutableMapping):
"""A proxy for a single section from a parser."""
def __init__(self, parser, name):
"""Creates a view on a section of the specified `name` in `parser`."""
self._parser = parser
self._name = name
def __repr__(self):
return '<Section: {}>'.format(self._name)
def __getitem__(self, key):
if not self._parser.has_option(self._name, key):
raise KeyError(key)
return self._parser.get(self._name, key)
def __setitem__(self, key, value):
self._parser._validate_value_types(option=key, value=value)
return self._parser.set(self._name, key, value)
def __delitem__(self, key):
if not (self._parser.has_option(self._name, key) and
self._parser.remove_option(self._name, key)):
raise KeyError(key)
def __contains__(self, key):
return self._parser.has_option(self._name, key)
def __len__(self):
return len(self._options())
def __iter__(self):
return self._options().__iter__()
def _options(self):
if self._name != self._parser.default_section:
return self._parser.options(self._name)
else:
return self._parser.defaults()
def get(self, option, fallback=None, *, raw=False, vars=None):
return self._parser.get(self._name, option, raw=raw, vars=vars,
fallback=fallback)
def getint(self, option, fallback=None, *, raw=False, vars=None):
return self._parser.getint(self._name, option, raw=raw, vars=vars,
fallback=fallback)
def getfloat(self, option, fallback=None, *, raw=False, vars=None):
return self._parser.getfloat(self._name, option, raw=raw, vars=vars,
fallback=fallback)
def getboolean(self, option, fallback=None, *, raw=False, vars=None):
return self._parser.getboolean(self._name, option, raw=raw, vars=vars,
fallback=fallback)
@property
def parser(self):
# The parser object of the proxy is read-only.
return self._parser
@property
def name(self):
# The name of the section on a proxy is read-only.
return self._name
| gpl-3.0 |
mapr/hue | desktop/core/ext-py/Django-1.6.10/tests/one_to_one/tests.py | 49 | 5781 | from __future__ import absolute_import
from django.db import transaction, IntegrityError
from django.test import TestCase
from .models import (Place, Restaurant, Waiter, ManualPrimaryKey, RelatedModel,
MultiModel)
class OneToOneTests(TestCase):
def setUp(self):
self.p1 = Place(name='Demon Dogs', address='944 W. Fullerton')
self.p1.save()
self.p2 = Place(name='Ace Hardware', address='1013 N. Ashland')
self.p2.save()
self.r = Restaurant(place=self.p1, serves_hot_dogs=True, serves_pizza=False)
self.r.save()
def test_getter(self):
# A Restaurant can access its place.
self.assertEqual(repr(self.r.place), '<Place: Demon Dogs the place>')
# A Place can access its restaurant, if available.
self.assertEqual(repr(self.p1.restaurant), '<Restaurant: Demon Dogs the restaurant>')
# p2 doesn't have an associated restaurant.
with self.assertRaisesMessage(Restaurant.DoesNotExist, 'Place has no restaurant'):
self.p2.restaurant
def test_setter(self):
# Set the place using assignment notation. Because place is the primary
# key on Restaurant, the save will create a new restaurant
self.r.place = self.p2
self.r.save()
self.assertEqual(repr(self.p2.restaurant), '<Restaurant: Ace Hardware the restaurant>')
self.assertEqual(repr(self.r.place), '<Place: Ace Hardware the place>')
self.assertEqual(self.p2.pk, self.r.pk)
# Set the place back again, using assignment in the reverse direction.
self.p1.restaurant = self.r
self.assertEqual(repr(self.p1.restaurant), '<Restaurant: Demon Dogs the restaurant>')
r = Restaurant.objects.get(pk=self.p1.id)
self.assertEqual(repr(r.place), '<Place: Demon Dogs the place>')
def test_manager_all(self):
# Restaurant.objects.all() just returns the Restaurants, not the Places.
self.assertQuerysetEqual(Restaurant.objects.all(), [
'<Restaurant: Demon Dogs the restaurant>',
])
# Place.objects.all() returns all Places, regardless of whether they
# have Restaurants.
self.assertQuerysetEqual(Place.objects.order_by('name'), [
'<Place: Ace Hardware the place>',
'<Place: Demon Dogs the place>',
])
def test_manager_get(self):
def assert_get_restaurant(**params):
self.assertEqual(repr(Restaurant.objects.get(**params)),
'<Restaurant: Demon Dogs the restaurant>')
assert_get_restaurant(place__id__exact=self.p1.pk)
assert_get_restaurant(place__id=self.p1.pk)
assert_get_restaurant(place__exact=self.p1.pk)
assert_get_restaurant(place__exact=self.p1)
assert_get_restaurant(place=self.p1.pk)
assert_get_restaurant(place=self.p1)
assert_get_restaurant(pk=self.p1.pk)
assert_get_restaurant(place__pk__exact=self.p1.pk)
assert_get_restaurant(place__pk=self.p1.pk)
assert_get_restaurant(place__name__startswith="Demon")
def assert_get_place(**params):
self.assertEqual(repr(Place.objects.get(**params)),
'<Place: Demon Dogs the place>')
assert_get_place(restaurant__place__exact=self.p1.pk)
assert_get_place(restaurant__place__exact=self.p1)
assert_get_place(restaurant__place__pk=self.p1.pk)
assert_get_place(restaurant__exact=self.p1.pk)
assert_get_place(restaurant__exact=self.r)
assert_get_place(restaurant__pk=self.p1.pk)
assert_get_place(restaurant=self.p1.pk)
assert_get_place(restaurant=self.r)
assert_get_place(id__exact=self.p1.pk)
assert_get_place(pk=self.p1.pk)
def test_foreign_key(self):
# Add a Waiter to the Restaurant.
w = self.r.waiter_set.create(name='Joe')
w.save()
self.assertEqual(repr(w), '<Waiter: Joe the waiter at Demon Dogs the restaurant>')
# Query the waiters
def assert_filter_waiters(**params):
self.assertQuerysetEqual(Waiter.objects.filter(**params), [
'<Waiter: Joe the waiter at Demon Dogs the restaurant>'
])
assert_filter_waiters(restaurant__place__exact=self.p1.pk)
assert_filter_waiters(restaurant__place__exact=self.p1)
assert_filter_waiters(restaurant__place__pk=self.p1.pk)
assert_filter_waiters(restaurant__exact=self.p1.pk)
assert_filter_waiters(restaurant__exact=self.p1)
assert_filter_waiters(restaurant__pk=self.p1.pk)
assert_filter_waiters(restaurant=self.p1.pk)
assert_filter_waiters(restaurant=self.r)
assert_filter_waiters(id__exact=self.p1.pk)
assert_filter_waiters(pk=self.p1.pk)
# Delete the restaurant; the waiter should also be removed
r = Restaurant.objects.get(pk=self.p1.pk)
r.delete()
self.assertEqual(Waiter.objects.count(), 0)
def test_multiple_o2o(self):
# One-to-one fields still work if you create your own primary key
o1 = ManualPrimaryKey(primary_key="abc123", name="primary")
o1.save()
o2 = RelatedModel(link=o1, name="secondary")
o2.save()
# You can have multiple one-to-one fields on a model, too.
x1 = MultiModel(link1=self.p1, link2=o1, name="x1")
x1.save()
self.assertEqual(repr(o1.multimodel), '<MultiModel: Multimodel x1>')
# This will fail because each one-to-one field must be unique (and
# link2=o1 was used for x1, above).
mm = MultiModel(link1=self.p2, link2=o1, name="x1")
with self.assertRaises(IntegrityError):
with transaction.atomic():
mm.save()
| apache-2.0 |
android-ia/platform_external_chromium_org | tools/flakiness/is_flaky_test.py | 94 | 2089 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for is_flaky."""
import is_flaky
import subprocess
import sys
import threading
import unittest
class IsFlakyTest(unittest.TestCase):
def setUp(self):
self.original_subprocess_check_call = subprocess.check_call
subprocess.check_call = self.mock_check_call
self.check_call_calls = []
self.check_call_results = []
is_flaky.load_options = self.mock_load_options
def tearDown(self):
subprocess.check_call = self.original_subprocess_check_call
def mock_check_call(self, command, stdout, stderr):
self.check_call_calls.append(command)
if self.check_call_results:
return self.check_call_results.pop(0)
else:
return 0
def mock_load_options(self):
class MockOptions():
jobs = 2
retries = 10
threshold = 0.3
command = ['command', 'param1', 'param2']
return MockOptions()
def testExecutesTestCorrectNumberOfTimes(self):
is_flaky.main()
self.assertEqual(len(self.check_call_calls), 10)
def testExecutesTestWithCorrectArguments(self):
is_flaky.main()
for call in self.check_call_calls:
self.assertEqual(call, ['command', 'param1', 'param2'])
def testReturnsNonFlakyForAllSuccesses(self):
self.check_call_results = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
ret_code = is_flaky.main()
self.assertEqual(ret_code, 0)
def testReturnsNonFlakyForAllFailures(self):
self.check_call_results = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
ret_code = is_flaky.main()
self.assertEqual(ret_code, 0)
def testReturnsNonFlakyForSmallNumberOfFailures(self):
self.check_call_results = [1, 0, 1, 0, 0, 0, 0, 0, 0, 0]
ret_code = is_flaky.main()
self.assertEqual(ret_code, 0)
def testReturnsFlakyForLargeNumberOfFailures(self):
self.check_call_results = [1, 1, 1, 0, 1, 0, 0, 0, 0, 0]
ret_code = is_flaky.main()
self.assertEqual(ret_code, 1)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
chinmaygarde/mojo | sky/engine/build/scripts/make_element_lookup_trie.py | 3 | 5136 | #!/usr/bin/env python
# Copyright (C) 2013 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from itertools import groupby, islice
import sys
import in_generator
import template_expander
PARAMETER_NAME = 'data'
def _trie(tags, index):
"""Make a trie from list of tags, starting at index.
Resulting trie is partly space-optimized (semi-radix tree): once have only
one string left, compact the entire branch to one leaf node.
However, does not compact branch nodes with a single child. (FIXME)
Returns:
(char, subtrie, tag, conditions): (char, trie, str, list)
code generation differs between branch nodes and leaf nodes,
hence need different data for each.
Arguments:
tags: sorted list
(sorted needed by groupby, list needed by len)
index: index at which to branch
(assumes prior to this index strings have a common prefix)
"""
def trie_node(char, subtags_iter):
# Pass in |char| so we can include in same tuple without unpacking
subtags = list(subtags_iter) # need list for len
if len(subtags) == 1: # terminal node, no subtrie
subtrie = None
tag = subtags[0]
conditions = _conditions(tag, index + 1)
else:
subtrie = _trie(subtags, index + 1)
tag = None
conditions = None
return char, subtrie, tag, conditions
# Group by char at index
def char_at_index(tag):
return tag[index].lower()
char_subtags = ((k, g) for k, g in groupby(tags, char_at_index))
# FIXME: if all subtags have a common prefix, merge with child
# and skip the switch in the generated code
return (trie_node(char, subtags) for char, subtags in char_subtags)
def _conditions(tag, index):
# boolean conditions to check suffix; corresponds to compacting branch
# with a single leaf
return ["%s[%d] == '%c'" % (PARAMETER_NAME, i, c.lower())
for i, c in islice(enumerate(tag), index, None)]
class ElementLookupTrieWriter(in_generator.Writer):
# FIXME: Inherit all these from somewhere.
defaults = {
'JSInterfaceName': None,
'constructorNeedsCreatedByParser': None,
'interfaceName': None,
'noConstructor': None,
'runtimeEnabled': None,
}
default_parameters = {
'namespace': '',
'fallbackInterfaceName': '',
'fallbackJSInterfaceName': '',
}
def __init__(self, in_file_paths):
super(ElementLookupTrieWriter, self).__init__(in_file_paths)
self._tags = [entry['name'] for entry in self.in_file.name_dictionaries]
self._namespace = self.in_file.parameters['namespace'].strip('"')
self._outputs = {
(self._namespace + 'ElementLookupTrie.h'): self.generate_header,
(self._namespace + 'ElementLookupTrie.cpp'): self.generate_implementation,
}
@template_expander.use_jinja('ElementLookupTrie.h.tmpl')
def generate_header(self):
return {
'namespace': self._namespace,
}
@template_expander.use_jinja('ElementLookupTrie.cpp.tmpl')
def generate_implementation(self):
# First sort, so groupby works
self._tags.sort(key=lambda tag: (len(tag), tag))
# Group tags by length
length_tags = ((k, g) for k, g in groupby(self._tags, len))
return {
'namespace': self._namespace,
'length_tries': ((length, _trie(tags, 0))
for length, tags in length_tags),
}
if __name__ == '__main__':
in_generator.Maker(ElementLookupTrieWriter).main(sys.argv)
| bsd-3-clause |
shsingh/ansible | test/units/modules/network/eos/test_eos_bgp.py | 38 | 10035 | #
# (c) 2019, Ansible by Red Hat, inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.module_utils.network.eos.providers.cli.config.bgp.process import Provider
from ansible.modules.network.eos import eos_bgp
from .eos_module import TestEosModule, load_fixture
class TestFrrBgpModule(TestEosModule):
module = eos_bgp
def setUp(self):
super(TestFrrBgpModule, self).setUp()
self._bgp_config = load_fixture('eos_bgp_config.cfg')
def test_eos_bgp(self):
obj = Provider(params=dict(config=dict(bgp_as=64496, router_id='192.0.2.2', networks=None,
address_family=None), operation='merge'))
commands = obj.render(self._bgp_config)
self.assertEqual(commands, ['router bgp 64496', 'router-id 192.0.2.2', 'exit'])
def test_eos_bgp_idempotent(self):
obj = Provider(params=dict(config=dict(bgp_as=64496, router_id='192.0.2.1',
networks=None, address_family=None), operation='merge'))
commands = obj.render(self._bgp_config)
self.assertEqual(commands, [])
def test_eos_bgp_remove(self):
obj = Provider(params=dict(config=dict(bgp_as=64496, networks=None, address_family=None), operation='delete'))
commands = obj.render(self._bgp_config)
self.assertEqual(commands, ['no router bgp 64496'])
def test_eos_bgp_neighbor(self):
obj = Provider(params=dict(config=dict(bgp_as=64496, neighbors=[dict(neighbor='198.51.100.12', remote_as=64498)],
networks=None, address_family=None),
operation='merge'))
commands = obj.render(self._bgp_config)
self.assertEqual(commands, ['router bgp 64496', 'neighbor 198.51.100.12 remote-as 64498', 'exit'])
def test_eos_bgp_neighbor_idempotent(self):
neighbors = [dict(neighbor='198.51.100.102', remote_as=64498, timers=dict(keepalive=300, holdtime=360)),
dict(neighbor='203.0.113.5', remote_as=64511, maximum_prefix=500)]
obj = Provider(params=dict(config=dict(bgp_as=64496, neighbors=neighbors, networks=None, address_family=None),
operation='merge'))
commands = obj.render(self._bgp_config)
self.assertEqual(commands, [])
def test_eos_bgp_network(self):
obj = Provider(
params=dict(config=dict(bgp_as=64496, networks=[dict(prefix='203.0.113.0', masklen=24, route_map='RMAP_1')],
address_family=None),
operation='merge'))
commands = obj.render(self._bgp_config)
self.assertEqual(sorted(commands), sorted(['router bgp 64496', 'network 203.0.113.0/24 route-map RMAP_1', 'exit']))
def test_eos_bgp_network_idempotent(self):
obj = Provider(
params=dict(config=dict(bgp_as=64496, networks=[dict(prefix='192.0.2.0', masklen=27, route_map='RMAP_1'),
dict(prefix='198.51.100.0', masklen=24, route_map='RMAP_2')],
address_family=None),
operation='merge'))
commands = obj.render(self._bgp_config)
self.assertEqual(commands, [])
def test_eos_bgp_redistribute(self):
rd_1 = dict(protocol='rip', route_map='RMAP_1')
config = dict(bgp_as=64496, redistribute=[rd_1], networks=None, address_family=None)
obj = Provider(params=dict(config=config, operation='merge'))
commands = obj.render(self._bgp_config)
cmd = ['router bgp 64496', 'redistribute rip route-map RMAP_1', 'exit']
self.assertEqual(sorted(commands), sorted(cmd))
def test_eos_bgp_redistribute_idempotent(self):
rd_1 = dict(protocol='ospf', route_map='RMAP_1')
config = dict(bgp_as=64496, redistribute=[rd_1], networks=None, address_family=None)
obj = Provider(params=dict(config=config, operation='merge'))
commands = obj.render(self._bgp_config)
self.assertEqual(commands, [])
def test_eos_bgp_address_family_neighbors(self):
af_nbr_1 = dict(neighbor='198.51.100.104', default_originate=True, activate=True)
af_nbr_2 = dict(neighbor='198.51.100.105', activate=True, weight=30, graceful_restart=True)
config = dict(bgp_as=64496, address_family=[dict(afi='ipv4', neighbors=[af_nbr_1, af_nbr_2])],
networks=None)
obj = Provider(params=dict(config=config, operation='merge'))
commands = obj.render(self._bgp_config)
cmd = ['router bgp 64496', 'address-family ipv4', 'neighbor 198.51.100.104 activate',
'neighbor 198.51.100.104 default-originate', 'neighbor 198.51.100.105 weight 30',
'neighbor 198.51.100.105 activate', 'neighbor 198.51.100.105 graceful-restart', 'exit', 'exit']
self.assertEqual(sorted(commands), sorted(cmd))
def test_eos_bgp_address_family_neighbors_idempotent(self):
af_nbr_1 = dict(neighbor='198.51.100.102', activate=True, graceful_restart=True, default_originate=True, weight=25)
af_nbr_2 = dict(neighbor='192.0.2.111', activate=True, default_originate=True)
config = dict(bgp_as=64496, address_family=[dict(afi='ipv4', neighbors=[af_nbr_1, af_nbr_2])],
networks=None)
obj = Provider(params=dict(config=config, operation='merge'))
commands = obj.render(self._bgp_config)
self.assertEqual(commands, [])
def test_eos_bgp_address_family_networks(self):
net = dict(prefix='203.0.113.128', masklen=26, route_map='RMAP_1')
net2 = dict(prefix='203.0.113.192', masklen=26, route_map='RMAP_2')
config = dict(bgp_as=64496, address_family=[dict(afi='ipv4', networks=[net, net2])],
networks=None)
obj = Provider(params=dict(config=config, operation='merge'))
commands = obj.render(self._bgp_config)
cmd = ['router bgp 64496', 'address-family ipv4', 'network 203.0.113.128/26 route-map RMAP_1',
'network 203.0.113.192/26 route-map RMAP_2', 'exit', 'exit']
self.assertEqual(sorted(commands), sorted(cmd))
def test_eos_bgp_address_family_networks_idempotent(self):
net = dict(prefix='2001:db8:8000::', masklen=34, route_map=None)
net2 = dict(prefix='2001:db8:c000::', masklen=34, route_map=None)
config = dict(bgp_as=64496, address_family=[dict(afi='ipv6', networks=[net, net2])],
networks=None)
obj = Provider(params=dict(config=config, operation='merge'))
commands = obj.render(self._bgp_config)
self.assertEqual(commands, [])
def test_eos_bgp_operation_override(self):
net_1 = dict(prefix='2001:0db8:0800::', masklen=38, route_map='RMAP_1')
net_2 = dict(prefix='2001:0db8:1c00::', masklen=38, route_map='RMAP_2')
nbr_1 = dict(neighbor='203.0.113.111', remote_as=64511, update_source='Ethernet2')
nbr_2 = dict(neighbor='203.0.113.120', remote_as=64511, timers=dict(keepalive=300, holdtime=360))
af_nbr_1 = dict(neighbor='203.0.113.111', activate=True)
af_nbr_2 = dict(neighbor='203.0.113.120', activate=True, default_originate=True)
af_1 = dict(afi='ipv4', neighbors=[af_nbr_1, af_nbr_2])
af_2 = dict(afi='ipv6', networks=[net_1, net_2])
config = dict(bgp_as=64496, neighbors=[nbr_1, nbr_2], address_family=[af_1, af_2],
networks=None)
obj = Provider(params=dict(config=config, operation='override'))
commands = obj.render(self._bgp_config)
cmd = ['no router bgp 64496', 'router bgp 64496', 'neighbor 203.0.113.111 remote-as 64511',
'neighbor 203.0.113.111 update-source Ethernet2', 'neighbor 203.0.113.120 remote-as 64511',
'neighbor 203.0.113.120 timers 300 360', 'address-family ipv4',
'neighbor 203.0.113.111 activate', 'neighbor 203.0.113.120 default-originate', 'neighbor 203.0.113.120 activate',
'exit', 'address-family ipv6', 'network 2001:0db8:0800::/38 route-map RMAP_1',
'network 2001:0db8:1c00::/38 route-map RMAP_2',
'exit', 'exit']
self.assertEqual(sorted(commands), sorted(cmd))
def test_eos_bgp_operation_replace(self):
net = dict(prefix='203.0.113.0', masklen=27, route_map='RMAP_1')
net2 = dict(prefix='192.0.2.32', masklen=29, route_map='RMAP_2')
net_3 = dict(prefix='2001:db8:8000::', masklen=34, route_map=None)
net_4 = dict(prefix='2001:db8:c000::', masklen=34, route_map=None)
af_1 = dict(afi='ipv4', networks=[net, net2])
af_2 = dict(afi='ipv6', networks=[net_3, net_4])
config = dict(bgp_as=64496, address_family=[af_1, af_2], networks=None)
obj = Provider(params=dict(config=config, operation='replace'))
commands = obj.render(self._bgp_config)
cmd = ['router bgp 64496', 'address-family ipv4', 'network 203.0.113.0/27 route-map RMAP_1',
'network 192.0.2.32/29 route-map RMAP_2', 'no network 192.0.2.0/27', 'no network 198.51.100.0/24',
'exit', 'exit']
self.assertEqual(sorted(commands), sorted(cmd))
def test_eos_bgp_operation_replace_with_new_as(self):
nbr = dict(neighbor='203.0.113.124', remote_as=64496, update_source='Ethernet3')
config = dict(bgp_as=64497, neighbors=[nbr], networks=None, address_family=None)
obj = Provider(params=dict(config=config, operation='replace'))
commands = obj.render(self._bgp_config)
cmd = ['no router bgp 64496', 'router bgp 64497', 'neighbor 203.0.113.124 remote-as 64496',
'neighbor 203.0.113.124 update-source Ethernet3', 'exit']
self.assertEqual(sorted(commands), sorted(cmd))
| gpl-3.0 |
harryliangye/Public_Opinion_Finder | TextBlob/textblob/base.py | 16 | 2945 | # -*- coding: utf-8 -*-
"""Abstract base classes for models (taggers, noun phrase extractors, etc.)
which define the interface for descendant classes.
.. versionchanged:: 0.7.0
All base classes are defined in the same module, ``textblob.base``.
"""
from __future__ import absolute_import
from abc import ABCMeta, abstractmethod
import nltk
from textblob.compat import with_metaclass
##### POS TAGGERS #####
class BaseTagger(with_metaclass(ABCMeta)):
"""Abstract tagger class from which all taggers
inherit from. All descendants must implement a
``tag()`` method.
"""
@abstractmethod
def tag(self, text, tokenize=True):
"""Return a list of tuples of the form (word, tag)
for a given set of text.
"""
return
##### NOUN PHRASE EXTRACTORS #####
class BaseNPExtractor(with_metaclass(ABCMeta)):
"""Abstract base class from which all NPExtractor classes inherit.
Descendant classes must implement an ``extract(text)`` method
that returns a list of noun phrases as strings.
"""
@abstractmethod
def extract(self, text):
"""Return a list of noun phrases (strings) for a body of text."""
return
##### TOKENIZERS #####
class BaseTokenizer(with_metaclass(ABCMeta), nltk.tokenize.api.TokenizerI):
"""Abstract base class from which all Tokenizer classes inherit.
Descendant classes must implement a ``tokenize(text)`` method
that returns a list of noun phrases as strings.
"""
@abstractmethod
def tokenize(self, text):
"""Return a list of tokens (strings) for a body of text.
:rtype: list
"""
return
def itokenize(self, text, *args, **kwargs):
"""Return a generator that generates tokens "on-demand".
.. versionadded:: 0.6.0
:rtype: generator
"""
return (t for t in self.tokenize(text, *args, **kwargs))
##### SENTIMENT ANALYZERS ####
DISCRETE = 'ds'
CONTINUOUS = 'co'
class BaseSentimentAnalyzer(with_metaclass(ABCMeta)):
"""Abstract base class from which all sentiment analyzers inherit.
Should implement an ``analyze(text)`` method which returns either the
results of analysis.
"""
kind = DISCRETE
def __init__(self):
self._trained = False
def train(self):
# Train me
self._trained = True
@abstractmethod
def analyze(self, text):
"""Return the result of of analysis. Typically returns either a
tuple, float, or dictionary.
"""
# Lazily train the classifier
if not self._trained:
self.train()
# Analyze text
return None
##### PARSERS #####
class BaseParser(with_metaclass(ABCMeta)):
"""Abstract parser class from which all parsers inherit from. All
descendants must implement a ``parse()`` method.
"""
@abstractmethod
def parse(self, text):
"""Parses the text."""
return
| bsd-2-clause |
vivianli32/TravelConnect | flask/lib/python3.4/site-packages/whoosh/lang/isri.py | 95 | 17041 | # -*- coding: utf-8 -*-
#
# Natural Language Toolkit: The ISRI Arabic Stemmer
#
# Copyright (C) 2001-2012 NLTK Proejct
# Algorithm: Kazem Taghva, Rania Elkhoury, and Jeffrey Coombs (2005)
# Author: Hosam Algasaier <hosam_hme@yahoo.com>
# URL: <http://www.nltk.org/>
# For license information, see LICENSE.TXT
"""
ISRI Arabic Stemmer
The algorithm for this stemmer is described in:
Taghva, K., Elkoury, R., and Coombs, J. 2005. Arabic Stemming without a root
dictionary. Information Science Research Institute. University of Nevada, Las
Vegas, USA.
The Information Science Research Institute’s (ISRI) Arabic stemmer shares many
features with the Khoja stemmer. However, the main difference is that ISRI
stemmer does not use root dictionary. Also, if a root is not found, ISRI
stemmer returned normalized form, rather than returning the original
unmodified word.
Additional adjustments were made to improve the algorithm:
1- Adding 60 stop words.
2- Adding the pattern (تفاعيل) to ISRI pattern set.
3- The step 2 in the original algorithm was normalizing all hamza. This step is
discarded because it increases the word ambiguities and changes the original
root.
"""
from __future__ import unicode_literals
import re
class ISRIStemmer(object):
'''
ISRI Arabic stemmer based on algorithm: Arabic Stemming without a root dictionary.
Information Science Research Institute. University of Nevada, Las Vegas, USA.
A few minor modifications have been made to ISRI basic algorithm.
See the source code of this module for more information.
isri.stem(token) returns Arabic root for the given token.
The ISRI Stemmer requires that all tokens have Unicode string types.
If you use Python IDLE on Arabic Windows you have to decode text first
using Arabic '1256' coding.
'''
def __init__(self):
self.stm = 'defult none'
self.p3 = ['\u0643\u0627\u0644', '\u0628\u0627\u0644',
'\u0648\u0644\u0644', '\u0648\u0627\u0644'] # length three prefixes
self.p2 = ['\u0627\u0644', '\u0644\u0644'] # length two prefixes
self.p1 = ['\u0644', '\u0628', '\u0641', '\u0633', '\u0648',
'\u064a', '\u062a', '\u0646', '\u0627'] # length one prefixes
self.s3 = ['\u062a\u0645\u0644', '\u0647\u0645\u0644',
'\u062a\u0627\u0646', '\u062a\u064a\u0646',
'\u0643\u0645\u0644'] # length three suffixes
self.s2 = ['\u0648\u0646', '\u0627\u062a', '\u0627\u0646',
'\u064a\u0646', '\u062a\u0646', '\u0643\u0645',
'\u0647\u0646', '\u0646\u0627', '\u064a\u0627',
'\u0647\u0627', '\u062a\u0645', '\u0643\u0646',
'\u0646\u064a', '\u0648\u0627', '\u0645\u0627',
'\u0647\u0645'] # length two suffixes
self.s1 = ['\u0629', '\u0647', '\u064a', '\u0643', '\u062a',
'\u0627', '\u0646'] # length one suffixes
self.pr4 = {0: ['\u0645'], 1:['\u0627'],
2: ['\u0627', '\u0648', '\u064A'], 3:['\u0629']} # groups of length four patterns
self.pr53 = {0: ['\u0627', '\u062a'],
1: ['\u0627', '\u064a', '\u0648'],
2: ['\u0627', '\u062a', '\u0645'],
3: ['\u0645', '\u064a', '\u062a'],
4: ['\u0645', '\u062a'],
5: ['\u0627', '\u0648'],
6: ['\u0627', '\u0645']} # Groups of length five patterns and length three roots
self.re_short_vowels = re.compile(r'[\u064B-\u0652]')
self.re_hamza = re.compile(r'[\u0621\u0624\u0626]')
self.re_intial_hamza = re.compile(r'^[\u0622\u0623\u0625]')
self.stop_words = ['\u064a\u0643\u0648\u0646',
'\u0648\u0644\u064a\u0633',
'\u0648\u0643\u0627\u0646',
'\u0643\u0630\u0644\u0643',
'\u0627\u0644\u062a\u064a',
'\u0648\u0628\u064a\u0646',
'\u0639\u0644\u064a\u0647\u0627',
'\u0645\u0633\u0627\u0621',
'\u0627\u0644\u0630\u064a',
'\u0648\u0643\u0627\u0646\u062a',
'\u0648\u0644\u0643\u0646',
'\u0648\u0627\u0644\u062a\u064a',
'\u062a\u0643\u0648\u0646',
'\u0627\u0644\u064a\u0648\u0645',
'\u0627\u0644\u0644\u0630\u064a\u0646',
'\u0639\u0644\u064a\u0647',
'\u0643\u0627\u0646\u062a',
'\u0644\u0630\u0644\u0643',
'\u0623\u0645\u0627\u0645',
'\u0647\u0646\u0627\u0643',
'\u0645\u0646\u0647\u0627',
'\u0645\u0627\u0632\u0627\u0644',
'\u0644\u0627\u0632\u0627\u0644',
'\u0644\u0627\u064a\u0632\u0627\u0644',
'\u0645\u0627\u064a\u0632\u0627\u0644',
'\u0627\u0635\u0628\u062d',
'\u0623\u0635\u0628\u062d',
'\u0623\u0645\u0633\u0649',
'\u0627\u0645\u0633\u0649',
'\u0623\u0636\u062d\u0649',
'\u0627\u0636\u062d\u0649',
'\u0645\u0627\u0628\u0631\u062d',
'\u0645\u0627\u0641\u062a\u0626',
'\u0645\u0627\u0627\u0646\u0641\u0643',
'\u0644\u0627\u0633\u064a\u0645\u0627',
'\u0648\u0644\u0627\u064a\u0632\u0627\u0644',
'\u0627\u0644\u062d\u0627\u0644\u064a',
'\u0627\u0644\u064a\u0647\u0627',
'\u0627\u0644\u0630\u064a\u0646',
'\u0641\u0627\u0646\u0647',
'\u0648\u0627\u0644\u0630\u064a',
'\u0648\u0647\u0630\u0627',
'\u0644\u0647\u0630\u0627',
'\u0641\u0643\u0627\u0646',
'\u0633\u062a\u0643\u0648\u0646',
'\u0627\u0644\u064a\u0647',
'\u064a\u0645\u0643\u0646',
'\u0628\u0647\u0630\u0627',
'\u0627\u0644\u0630\u0649']
def stem(self, token):
"""
Stemming a word token using the ISRI stemmer.
"""
self.stm = token
self.norm(1) # remove diacritics which representing Arabic short vowels
if self.stm in self.stop_words: return self.stm # exclude stop words from being processed
self.pre32() # remove length three and length two prefixes in this order
self.suf32() # remove length three and length two suffixes in this order
self.waw() # remove connective ‘و’ if it precedes a word beginning with ‘و’
self.norm(2) # normalize initial hamza to bare alif
if len(self.stm) <= 3: return self.stm # return stem if less than or equal to three
if len(self.stm) == 4: # length 4 word
self.pro_w4()
return self.stm
elif len(self.stm) == 5: # length 5 word
self.pro_w53()
self.end_w5()
return self.stm
elif len(self.stm) == 6: # length 6 word
self.pro_w6()
self.end_w6()
return self.stm
elif len(self.stm) == 7: # length 7 word
self.suf1()
if len(self.stm) == 7:
self.pre1()
if len(self.stm) == 6:
self.pro_w6()
self.end_w6()
return self.stm
return self.stm # if word length >7 , then no stemming
def norm(self, num):
"""
normalization:
num=1 normalize diacritics
num=2 normalize initial hamza
num=3 both 1&2
"""
self.k = num
if self.k == 1:
self.stm = self.re_short_vowels.sub('', self.stm)
return self.stm
elif self.k == 2:
self.stm = self.re_intial_hamza.sub(r'\u0627', self.stm)
return self.stm
elif self.k == 3:
self.stm = self.re_short_vowels.sub('', self.stm)
self.stm = self.re_intial_hamza.sub(r'\u0627', self.stm)
return self.stm
def pre32(self):
"""remove length three and length two prefixes in this order"""
if len(self.stm) >= 6:
for pre3 in self.p3:
if self.stm.startswith(pre3):
self.stm = self.stm[3:]
return self.stm
elif len(self.stm) >= 5:
for pre2 in self.p2:
if self.stm.startswith(pre2):
self.stm = self.stm[2:]
return self.stm
def suf32(self):
"""remove length three and length two suffixes in this order"""
if len(self.stm) >= 6:
for suf3 in self.s3:
if self.stm.endswith(suf3):
self.stm = self.stm[:-3]
return self.stm
elif len(self.stm) >= 5:
for suf2 in self.s2:
if self.stm.endswith(suf2):
self.stm = self.stm[:-2]
return self.stm
def waw(self):
"""remove connective ‘و’ if it precedes a word beginning with ‘و’ """
if (len(self.stm) >= 4) & (self.stm[:2] == '\u0648\u0648'):
self.stm = self.stm[1:]
return self.stm
def pro_w4(self):
"""process length four patterns and extract length three roots"""
if self.stm[0] in self.pr4[0]: # مفعل
self.stm = self.stm[1:]
return self.stm
elif self.stm[1] in self.pr4[1]: # فاعل
self.stm = self.stm[0] + self.stm[2:]
return self.stm
elif self.stm[2] in self.pr4[2]: # فعال - فعول - فعيل
self.stm = self.stm[:2] + self.stm[3]
return self.stm
elif self.stm[3] in self.pr4[3]: # فعلة
self.stm = self.stm[:-1]
return self.stm
else:
self.suf1() # do - normalize short sufix
if len(self.stm) == 4:
self.pre1() # do - normalize short prefix
return self.stm
def pro_w53(self):
"""process length five patterns and extract length three roots"""
if ((self.stm[2] in self.pr53[0]) & (self.stm[0] == '\u0627')): # افتعل - افاعل
self.stm = self.stm[1] + self.stm[3:]
return self.stm
elif ((self.stm[3] in self.pr53[1]) & (self.stm[0] == '\u0645')): # مفعول - مفعال - مفعيل
self.stm = self.stm[1:3] + self.stm[4]
return self.stm
elif ((self.stm[0] in self.pr53[2]) & (self.stm[4] == '\u0629')): # مفعلة - تفعلة - افعلة
self.stm = self.stm[1:4]
return self.stm
elif ((self.stm[0] in self.pr53[3]) & (self.stm[2] == '\u062a')): # مفتعل - يفتعل - تفتعل
self.stm = self.stm[1] + self.stm[3:]
return self.stm
elif ((self.stm[0] in self.pr53[4]) & (self.stm[2] == '\u0627')): #مفاعل - تفاعل
self.stm = self.stm[1] + self.stm[3:]
return self.stm
elif ((self.stm[2] in self.pr53[5]) & (self.stm[4] == '\u0629')): # فعولة - فعالة
self.stm = self.stm[:2] + self.stm[3]
return self.stm
elif ((self.stm[0] in self.pr53[6]) & (self.stm[1] == '\u0646')): # انفعل - منفعل
self.stm = self.stm[2:]
return self.stm
elif ((self.stm[3] == '\u0627') & (self.stm[0] == '\u0627')): # افعال
self.stm = self.stm[1:3] + self.stm[4]
return self.stm
elif ((self.stm[4] == '\u0646') & (self.stm[3] == '\u0627')): # فعلان
self.stm = self.stm[:3]
return self.stm
elif ((self.stm[3] == '\u064a') & (self.stm[0] == '\u062a')): # تفعيل
self.stm = self.stm[1:3] + self.stm[4]
return self.stm
elif ((self.stm[3] == '\u0648') & (self.stm[1] == '\u0627')): # فاعول
self.stm = self.stm[0] + self.stm[2] + self.stm[4]
return self.stm
elif ((self.stm[2] == '\u0627') & (self.stm[1] == '\u0648')): # فواعل
self.stm = self.stm[0] + self.stm[3:]
return self.stm
elif ((self.stm[3] == '\u0626') & (self.stm[2] == '\u0627')): # فعائل
self.stm = self.stm[:2] + self.stm[4]
return self.stm
elif ((self.stm[4] == '\u0629') & (self.stm[1] == '\u0627')): # فاعلة
self.stm = self.stm[0] + self.stm[2:4]
return self.stm
elif ((self.stm[4] == '\u064a') & (self.stm[2] == '\u0627')): # فعالي
self.stm = self.stm[:2] + self.stm[3]
return self.stm
else:
self.suf1() # do - normalize short sufix
if len(self.stm) == 5:
self.pre1() # do - normalize short prefix
return self.stm
def pro_w54(self):
"""process length five patterns and extract length four roots"""
if (self.stm[0] in self.pr53[2]): #تفعلل - افعلل - مفعلل
self.stm = self.stm[1:]
return self.stm
elif (self.stm[4] == '\u0629'): # فعللة
self.stm = self.stm[:4]
return self.stm
elif (self.stm[2] == '\u0627'): # فعالل
self.stm = self.stm[:2] + self.stm[3:]
return self.stm
def end_w5(self):
"""ending step (word of length five)"""
if len(self.stm) == 3:
return self.stm
elif len(self.stm) == 4:
self.pro_w4()
return self.stm
elif len(self.stm) == 5:
self.pro_w54()
return self.stm
def pro_w6(self):
"""process length six patterns and extract length three roots"""
if ((self.stm.startswith('\u0627\u0633\u062a')) or (self.stm.startswith('\u0645\u0633\u062a'))): # مستفعل - استفعل
self.stm = self.stm[3:]
return self.stm
elif (self.stm[0] == '\u0645' and self.stm[3] == '\u0627' and self.stm[5] == '\u0629'): # مفعالة
self.stm = self.stm[1:3] + self.stm[4]
return self.stm
elif (self.stm[0] == '\u0627' and self.stm[2] == '\u062a' and self.stm[4] == '\u0627'): # افتعال
self.stm = self.stm[1] + self.stm[3] + self.stm[5]
return self.stm
elif (self.stm[0] == '\u0627' and self.stm[3] == '\u0648' and self.stm[2] == self.stm[4]): # افعوعل
self.stm = self.stm[1] + self.stm[4:]
return self.stm
elif (self.stm[0] == '\u062a' and self.stm[2] == '\u0627' and self.stm[4] == '\u064a'): # تفاعيل new pattern
self.stm = self.stm[1] + self.stm[3] + self.stm[5]
return self.stm
else:
self.suf1() # do - normalize short sufix
if len(self.stm) == 6:
self.pre1() # do - normalize short prefix
return self.stm
def pro_w64(self):
"""process length six patterns and extract length four roots"""
if (self.stm[0] and self.stm[4]) == '\u0627': # افعلال
self.stm = self.stm[1:4] + self.stm[5]
return self.stm
elif (self.stm.startswith('\u0645\u062a')): # متفعلل
self.stm = self.stm[2:]
return self.stm
def end_w6(self):
"""ending step (word of length six)"""
if len(self.stm) == 3:
return self.stm
elif len(self.stm) == 5:
self.pro_w53()
self.end_w5()
return self.stm
elif len (self.stm) == 6:
self.pro_w64()
return self.stm
def suf1(self):
"""normalize short sufix"""
for sf1 in self.s1:
if self.stm.endswith(sf1):
self.stm = self.stm[:-1]
return self.stm
def pre1(self):
"""normalize short prefix"""
for sp1 in self.p1:
if self.stm.startswith(sp1):
self.stm = self.stm[1:]
return self.stm
| mit |
aperigault/ansible | lib/ansible/modules/network/netvisor/_pn_show.py | 47 | 5646 | #!/usr/bin/python
""" PN CLI show commands """
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['deprecated'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: pn_show
author: "Pluribus Networks (@amitsi)"
version_added: "2.2"
short_description: Run show commands on nvOS device.
deprecated:
removed_in: '2.12'
why: Doesn't support latest Pluribus Networks netvisor
alternative: Latest modules will be pushed in Ansible future versions.
description:
- Execute show command in the nodes and returns the results
read from the device.
options:
pn_cliusername:
description:
- Provide login username if user is not root.
required: False
pn_clipassword:
description:
- Provide login password if user is not root.
required: False
pn_cliswitch:
description:
- Target switch(es) to run the cli on.
required: False
pn_command:
description:
- The C(pn_command) takes a CLI show command as value.
required: true
pn_parameters:
description:
- Display output using a specific parameter. Use 'all' to display
possible output. List of comma separated parameters.
default: 'all'
pn_options:
description:
- Specify formatting options.
"""
EXAMPLES = """
- name: run the vlan-show command
pn_show:
pn_command: 'vlan-show'
pn_parameters: id,scope,ports
pn_options: 'layout vertical'
- name: run the vlag-show command
pn_show:
pn_command: 'vlag-show'
pn_parameters: 'id,name,cluster,mode'
pn_options: 'no-show-headers'
- name: run the cluster-show command
pn_show:
pn_command: 'cluster-show'
"""
RETURN = """
command:
description: The CLI command run on the target node(s).
returned: always
type: str
stdout:
description: The set of responses from the show command.
returned: always
type: list
stderr:
description: The set of error responses from the show command.
returned: on error
type: list
changed:
description: Indicates whether the CLI caused any change on the target.
returned: always(False)
type: bool
"""
import shlex
# AnsibleModule boilerplate
from ansible.module_utils.basic import AnsibleModule
def pn_cli(module):
"""
This method is to generate the cli portion to launch the Netvisor cli.
It parses the username, password, switch parameters from module.
:param module: The Ansible module to fetch username, password and switch
:return: returns the cli string for further processing
"""
username = module.params['pn_cliusername']
password = module.params['pn_clipassword']
cliswitch = module.params['pn_cliswitch']
if username and password:
cli = '/usr/bin/cli --quiet --user %s:%s ' % (username, password)
else:
cli = '/usr/bin/cli --quiet '
if cliswitch:
if cliswitch == 'local':
cli += ' switch-local '
else:
cli += ' switch ' + cliswitch
return cli
def run_cli(module, cli):
"""
This method executes the cli command on the target node(s) and returns the
output. The module then exits based on the output.
:param cli: the complete cli string to be executed on the target node(s).
:param module: The Ansible module to fetch command
"""
cliswitch = module.params['pn_cliswitch']
command = module.params['pn_command']
cmd = shlex.split(cli)
# 'out' contains the output
# 'err' contains the error messages
result, out, err = module.run_command(cmd)
print_cli = cli.split(cliswitch)[1]
# Response in JSON format
if result != 0:
module.exit_json(
command=print_cli,
msg='%s: ' % command,
stderr=err.strip(),
changed=False
)
if out:
module.exit_json(
command=print_cli,
msg='%s: ' % command,
stdout=out.strip(),
changed=False
)
else:
module.exit_json(
command=cli,
msg='%s: Nothing to display!!!' % command,
changed=False
)
def main():
""" This section is for arguments parsing """
module = AnsibleModule(
argument_spec=dict(
pn_cliusername=dict(required=True, type='str'),
pn_clipassword=dict(required=True, type='str', no_log=True),
pn_cliswitch=dict(required=False, type='str'),
pn_command=dict(required=True, type='str'),
pn_parameters=dict(default='all', type='str'),
pn_options=dict(type='str')
)
)
# Accessing the arguments
command = module.params['pn_command']
parameters = module.params['pn_parameters']
options = module.params['pn_options']
# Building the CLI command string
cli = pn_cli(module)
cli += ' %s format %s ' % (command, parameters)
if options:
cli += options
run_cli(module, cli)
if __name__ == '__main__':
main()
| gpl-3.0 |
direvus/ansible | test/units/executor/module_common/test_recursive_finder.py | 22 | 8032 | # (c) 2017, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import imp
import pytest
import zipfile
from collections import namedtuple
from functools import partial
from io import BytesIO, StringIO
import ansible.errors
from ansible.executor.module_common import recursive_finder
from ansible.module_utils.six import PY2
from ansible.module_utils.six.moves import builtins
original_find_module = imp.find_module
# These are the modules that are brought in by module_utils/basic.py This may need to be updated
# when basic.py gains new imports
# We will remove these when we modify AnsiBallZ to store its args in a separate file instead of in
# basic.py
MODULE_UTILS_BASIC_IMPORTS = frozenset((('_text',),
('basic',),
('common', '__init__'),
('common', '_collections_compat'),
('common', 'file'),
('common', 'process'),
('parsing', '__init__'),
('parsing', 'convert_bool'),
('pycompat24',),
('six', '__init__'),
))
MODULE_UTILS_BASIC_FILES = frozenset(('ansible/module_utils/parsing/__init__.py',
'ansible/module_utils/common/process.py',
'ansible/module_utils/basic.py',
'ansible/module_utils/six/__init__.py',
'ansible/module_utils/_text.py',
'ansible/module_utils/common/_collections_compat.py',
'ansible/module_utils/parsing/convert_bool.py',
'ansible/module_utils/common/__init__.py',
'ansible/module_utils/common/file.py',
'ansible/module_utils/pycompat24.py',
))
ONLY_BASIC_IMPORT = frozenset((('basic',),))
ONLY_BASIC_FILE = frozenset(('ansible/module_utils/basic.py',))
@pytest.fixture
def finder_containers():
FinderContainers = namedtuple('FinderContainers', ['py_module_names', 'py_module_cache', 'zf'])
py_module_names = set()
# py_module_cache = {('__init__',): b''}
py_module_cache = {}
zipoutput = BytesIO()
zf = zipfile.ZipFile(zipoutput, mode='w', compression=zipfile.ZIP_STORED)
# zf.writestr('ansible/__init__.py', b'')
return FinderContainers(py_module_names, py_module_cache, zf)
def find_module_foo(module_utils_data, *args, **kwargs):
if args[0] == 'foo':
return (module_utils_data, '/usr/lib/python2.7/site-packages/ansible/module_utils/foo.py', ('.py', 'r', imp.PY_SOURCE))
return original_find_module(*args, **kwargs)
def find_package_foo(module_utils_data, *args, **kwargs):
if args[0] == 'foo':
return (module_utils_data, '/usr/lib/python2.7/site-packages/ansible/module_utils/foo', ('', '', imp.PKG_DIRECTORY))
return original_find_module(*args, **kwargs)
class TestRecursiveFinder(object):
def test_no_module_utils(self, finder_containers):
name = 'ping'
data = b'#!/usr/bin/python\nreturn \'{\"changed\": false}\''
recursive_finder(name, data, *finder_containers)
assert finder_containers.py_module_names == set(()).union(MODULE_UTILS_BASIC_IMPORTS)
assert finder_containers.py_module_cache == {}
assert frozenset(finder_containers.zf.namelist()) == MODULE_UTILS_BASIC_FILES
def test_from_import_toplevel_package(self, finder_containers, mocker):
if PY2:
module_utils_data = BytesIO(b'# License\ndef do_something():\n pass\n')
else:
module_utils_data = StringIO(u'# License\ndef do_something():\n pass\n')
mocker.patch('imp.find_module', side_effect=partial(find_package_foo, module_utils_data))
mocker.patch('ansible.executor.module_common._slurp', side_effect=lambda x: b'# License\ndef do_something():\n pass\n')
name = 'ping'
data = b'#!/usr/bin/python\nfrom ansible.module_utils import foo'
recursive_finder(name, data, *finder_containers)
mocker.stopall()
assert finder_containers.py_module_names == set((('foo', '__init__'),)).union(ONLY_BASIC_IMPORT)
assert finder_containers.py_module_cache == {}
assert frozenset(finder_containers.zf.namelist()) == frozenset(('ansible/module_utils/foo/__init__.py',)).union(ONLY_BASIC_FILE)
def test_from_import_toplevel_module(self, finder_containers, mocker):
if PY2:
module_utils_data = BytesIO(b'# License\ndef do_something():\n pass\n')
else:
module_utils_data = StringIO(u'# License\ndef do_something():\n pass\n')
mocker.patch('imp.find_module', side_effect=partial(find_module_foo, module_utils_data))
name = 'ping'
data = b'#!/usr/bin/python\nfrom ansible.module_utils import foo'
recursive_finder(name, data, *finder_containers)
mocker.stopall()
assert finder_containers.py_module_names == set((('foo',),)).union(MODULE_UTILS_BASIC_IMPORTS)
assert finder_containers.py_module_cache == {}
assert frozenset(finder_containers.zf.namelist()) == frozenset(('ansible/module_utils/foo.py',)).union(MODULE_UTILS_BASIC_FILES)
#
# Test importing six with many permutations because it is not a normal module
#
def test_from_import_six(self, finder_containers):
name = 'ping'
data = b'#!/usr/bin/python\nfrom ansible.module_utils import six'
recursive_finder(name, data, *finder_containers)
assert finder_containers.py_module_names == set((('six', '__init__'),)).union(MODULE_UTILS_BASIC_IMPORTS)
assert finder_containers.py_module_cache == {}
assert frozenset(finder_containers.zf.namelist()) == frozenset(('ansible/module_utils/six/__init__.py', )).union(MODULE_UTILS_BASIC_FILES)
def test_import_six(self, finder_containers):
name = 'ping'
data = b'#!/usr/bin/python\nimport ansible.module_utils.six'
recursive_finder(name, data, *finder_containers)
assert finder_containers.py_module_names == set((('six', '__init__'),)).union(MODULE_UTILS_BASIC_IMPORTS)
assert finder_containers.py_module_cache == {}
assert frozenset(finder_containers.zf.namelist()) == frozenset(('ansible/module_utils/six/__init__.py', )).union(MODULE_UTILS_BASIC_FILES)
def test_import_six_from_many_submodules(self, finder_containers):
name = 'ping'
data = b'#!/usr/bin/python\nfrom ansible.module_utils.six.moves.urllib.parse import urlparse'
recursive_finder(name, data, *finder_containers)
assert finder_containers.py_module_names == set((('six', '__init__'),)).union(MODULE_UTILS_BASIC_IMPORTS)
assert finder_containers.py_module_cache == {}
assert frozenset(finder_containers.zf.namelist()) == frozenset(('ansible/module_utils/six/__init__.py',)).union(MODULE_UTILS_BASIC_FILES)
| gpl-3.0 |
DailyActie/Surrogate-Model | 00-courses/operators_as _functions.py | 1 | 1783 | # MIT License
#
# Copyright (c) 2016 Daily Actie
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Author: Quan Pan <quanpan302@hotmail.com>
# License: MIT License
# Create: 2016-12-02
"""
Links:
https://docs.python.org/2/library/operator.html
operator.
lt(a, b) __lt__(a, b)
le(a, b) __le__(a, b)
eq(a, b) __eq__(a, b)
ne(a, b) __ne__(a, b)
ge(a, b) __ge__(a, b) a >= b
gt(a, b) __gt__(a, b) a > b
mul(a, b) __mul__(a, b) Return a * b, for a and b numbers.
"""
# encoding: utf-8
# module __builtin__
# from (built-in)
# by generator 1.138
from __future__ import print_function
"""
Built-in functions, exceptions, and other objects.
Noteworthy: None is the `nil' object; Ellipsis represents `...' in slices.
"""
| mit |
cherez/youtube-dl | youtube_dl/extractor/rutv.py | 106 | 7702 | # encoding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
ExtractorError,
int_or_none
)
class RUTVIE(InfoExtractor):
IE_DESC = 'RUTV.RU'
_VALID_URL = r'''(?x)
https?://player\.(?:rutv\.ru|vgtrk\.com)/
(?P<path>flash2v/container\.swf\?id=
|iframe/(?P<type>swf|video|live)/id/
|index/iframe/cast_id/)
(?P<id>\d+)'''
_TESTS = [
{
'url': 'http://player.rutv.ru/flash2v/container.swf?id=774471&sid=kultura&fbv=true&isPlay=true&ssl=false&i=560&acc_video_id=episode_id/972347/video_id/978186/brand_id/31724',
'info_dict': {
'id': '774471',
'ext': 'mp4',
'title': 'Монологи на все времена',
'description': 'md5:18d8b5e6a41fb1faa53819471852d5d5',
'duration': 2906,
},
'params': {
# m3u8 download
'skip_download': True,
},
},
{
'url': 'https://player.vgtrk.com/flash2v/container.swf?id=774016&sid=russiatv&fbv=true&isPlay=true&ssl=false&i=560&acc_video_id=episode_id/972098/video_id/977760/brand_id/57638',
'info_dict': {
'id': '774016',
'ext': 'mp4',
'title': 'Чужой в семье Сталина',
'description': '',
'duration': 2539,
},
'params': {
# m3u8 download
'skip_download': True,
},
},
{
'url': 'http://player.rutv.ru/iframe/swf/id/766888/sid/hitech/?acc_video_id=4000',
'info_dict': {
'id': '766888',
'ext': 'mp4',
'title': 'Вести.net: интернет-гиганты начали перетягивание программных "одеял"',
'description': 'md5:65ddd47f9830c4f42ed6475f8730c995',
'duration': 279,
},
'params': {
# m3u8 download
'skip_download': True,
},
},
{
'url': 'http://player.rutv.ru/iframe/video/id/771852/start_zoom/true/showZoomBtn/false/sid/russiatv/?acc_video_id=episode_id/970443/video_id/975648/brand_id/5169',
'info_dict': {
'id': '771852',
'ext': 'mp4',
'title': 'Прямой эфир. Жертвы загадочной болезни: смерть от старости в 17 лет',
'description': 'md5:b81c8c55247a4bd996b43ce17395b2d8',
'duration': 3096,
},
'params': {
# m3u8 download
'skip_download': True,
},
},
{
'url': 'http://player.rutv.ru/iframe/live/id/51499/showZoomBtn/false/isPlay/true/sid/sochi2014',
'info_dict': {
'id': '51499',
'ext': 'flv',
'title': 'Сочи-2014. Биатлон. Индивидуальная гонка. Мужчины ',
'description': 'md5:9e0ed5c9d2fa1efbfdfed90c9a6d179c',
},
'skip': 'Translation has finished',
},
{
'url': 'http://player.rutv.ru/iframe/live/id/21/showZoomBtn/false/isPlay/true/',
'info_dict': {
'id': '21',
'ext': 'mp4',
'title': 're:^Россия 24. Прямой эфир [0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}$',
'is_live': True,
},
'params': {
# m3u8 download
'skip_download': True,
},
},
]
@classmethod
def _extract_url(cls, webpage):
mobj = re.search(
r'<iframe[^>]+?src=(["\'])(?P<url>https?://player\.(?:rutv\.ru|vgtrk\.com)/(?:iframe/(?:swf|video|live)/id|index/iframe/cast_id)/.+?)\1', webpage)
if mobj:
return mobj.group('url')
mobj = re.search(
r'<meta[^>]+?property=(["\'])og:video\1[^>]+?content=(["\'])(?P<url>https?://player\.(?:rutv\.ru|vgtrk\.com)/flash2v/container\.swf\?id=.+?\2)',
webpage)
if mobj:
return mobj.group('url')
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
video_path = mobj.group('path')
if video_path.startswith('flash2v'):
video_type = 'video'
elif video_path.startswith('iframe'):
video_type = mobj.group('type')
if video_type == 'swf':
video_type = 'video'
elif video_path.startswith('index/iframe/cast_id'):
video_type = 'live'
is_live = video_type == 'live'
json_data = self._download_json(
'http://player.rutv.ru/iframe/%splay/id/%s' % ('live-' if is_live else '', video_id),
video_id, 'Downloading JSON')
if json_data['errors']:
raise ExtractorError('%s said: %s' % (self.IE_NAME, json_data['errors']), expected=True)
playlist = json_data['data']['playlist']
medialist = playlist['medialist']
media = medialist[0]
if media['errors']:
raise ExtractorError('%s said: %s' % (self.IE_NAME, media['errors']), expected=True)
view_count = playlist.get('count_views')
priority_transport = playlist['priority_transport']
thumbnail = media['picture']
width = int_or_none(media['width'])
height = int_or_none(media['height'])
description = media['anons']
title = media['title']
duration = int_or_none(media.get('duration'))
formats = []
for transport, links in media['sources'].items():
for quality, url in links.items():
preference = -1 if priority_transport == transport else -2
if transport == 'rtmp':
mobj = re.search(r'^(?P<url>rtmp://[^/]+/(?P<app>.+))/(?P<playpath>.+)$', url)
if not mobj:
continue
fmt = {
'url': mobj.group('url'),
'play_path': mobj.group('playpath'),
'app': mobj.group('app'),
'page_url': 'http://player.rutv.ru',
'player_url': 'http://player.rutv.ru/flash2v/osmf.swf?i=22',
'rtmp_live': True,
'ext': 'flv',
'vbr': int(quality),
'preference': preference,
}
elif transport == 'm3u8':
formats.extend(self._extract_m3u8_formats(
url, video_id, 'mp4', preference=preference, m3u8_id='hls'))
continue
else:
fmt = {
'url': url
}
fmt.update({
'width': width,
'height': height,
'format_id': '%s-%s' % (transport, quality),
})
formats.append(fmt)
self._sort_formats(formats)
return {
'id': video_id,
'title': self._live_title(title) if is_live else title,
'description': description,
'thumbnail': thumbnail,
'view_count': view_count,
'duration': duration,
'formats': formats,
'is_live': is_live,
}
| unlicense |
sgallagher/reviewboard | reviewboard/webapi/resources/base_watched_object.py | 6 | 5319 | from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from django.http import HttpResponseRedirect
from django.utils import six
from djblets.webapi.decorators import (webapi_login_required,
webapi_response_errors,
webapi_request_fields)
from djblets.webapi.errors import (DOES_NOT_EXIST, NOT_LOGGED_IN,
PERMISSION_DENIED)
from reviewboard.accounts.models import Profile
from reviewboard.webapi.base import WebAPIResource
from reviewboard.webapi.decorators import (webapi_check_local_site,
webapi_check_login_required)
from reviewboard.webapi.resources import resources
class BaseWatchedObjectResource(WebAPIResource):
"""A base resource for objects watched by a user."""
watched_resource = None
uri_object_key = 'watched_obj_id'
profile_field = None
star_function = None
unstar_function = None
allowed_methods = ('GET', 'POST', 'DELETE')
@property
def uri_object_key_regex(self):
return self.watched_resource.uri_object_key_regex
def get_queryset(self, request, username, local_site_name=None,
*args, **kwargs):
try:
local_site = self._get_local_site(local_site_name)
if local_site:
user = local_site.users.get(username=username)
profile = user.get_profile()
else:
profile = Profile.objects.get(user__username=username)
q = self.watched_resource.get_queryset(
request, local_site_name=local_site_name, *args, **kwargs)
q = q.filter(starred_by=profile)
return q
except Profile.DoesNotExist:
return self.watched_resource.model.objects.none()
@webapi_check_login_required
def get(self, request, watched_obj_id, *args, **kwargs):
try:
q = self.get_queryset(request, *args, **kwargs)
obj = self.get_watched_object(q, watched_obj_id, *args, **kwargs)
except ObjectDoesNotExist:
return DOES_NOT_EXIST
return HttpResponseRedirect(
self.watched_resource.get_href(obj, request, *args, **kwargs))
@webapi_check_login_required
@webapi_response_errors(DOES_NOT_EXIST)
def get_list(self, request, *args, **kwargs):
# TODO: Handle pagination and ?counts-only=1
try:
objects = [
self.serialize_object(obj)
for obj in self.get_queryset(request, is_list=True,
*args, **kwargs)
]
return 200, {
self.list_result_key: objects,
}
except User.DoesNotExist:
return DOES_NOT_EXIST
@webapi_check_local_site
@webapi_login_required
@webapi_response_errors(DOES_NOT_EXIST, NOT_LOGGED_IN, PERMISSION_DENIED)
@webapi_request_fields(required={
'object_id': {
'type': six.text_type,
'description': 'The ID of the object to watch.',
},
})
def create(self, request, object_id, *args, **kwargs):
try:
obj_kwargs = kwargs.copy()
obj_kwargs[self.watched_resource.uri_object_key] = object_id
obj = self.watched_resource.get_object(request, *args,
**obj_kwargs)
user = resources.user.get_object(request, *args, **kwargs)
except ObjectDoesNotExist:
return DOES_NOT_EXIST
if not resources.user.has_modify_permissions(request, user,
*args, **kwargs):
return self.get_no_access_error(request)
profile, profile_is_new = \
Profile.objects.get_or_create(user=request.user)
star = getattr(profile, self.star_function)
star(obj)
return 201, {
self.item_result_key: obj,
}
@webapi_check_local_site
@webapi_login_required
def delete(self, request, watched_obj_id, *args, **kwargs):
try:
obj_kwargs = kwargs.copy()
obj_kwargs[self.watched_resource.uri_object_key] = watched_obj_id
obj = self.watched_resource.get_object(request, *args,
**obj_kwargs)
user = resources.user.get_object(request, *args, **kwargs)
except ObjectDoesNotExist:
return DOES_NOT_EXIST
if not resources.user.has_modify_permissions(request, user,
*args, **kwargs):
return self.get_no_access_error(request)
profile, profile_is_new = \
Profile.objects.get_or_create(user=request.user)
if not profile_is_new:
unstar = getattr(profile, self.unstar_function)
unstar(obj)
return 204, {}
def serialize_object(self, obj, *args, **kwargs):
return {
'id': obj.pk,
self.item_result_key: obj,
}
def get_watched_object(self, queryset, obj_id, *args, **kwargs):
return queryset.get(pk=obj_id)
| mit |
KaranToor/MA450 | google-cloud-sdk/lib/googlecloudsdk/third_party/apis/deploymentmanager/v2/deploymentmanager_v2_client.py | 6 | 22417 | """Generated client library for deploymentmanager version v2."""
# NOTE: This file is autogenerated and should not be edited by hand.
from apitools.base.py import base_api
from googlecloudsdk.third_party.apis.deploymentmanager.v2 import deploymentmanager_v2_messages as messages
class DeploymentmanagerV2(base_api.BaseApiClient):
"""Generated client library for service deploymentmanager version v2."""
MESSAGES_MODULE = messages
BASE_URL = u'https://www.googleapis.com/deploymentmanager/v2/'
_PACKAGE = u'deploymentmanager'
_SCOPES = [u'https://www.googleapis.com/auth/cloud-platform', u'https://www.googleapis.com/auth/cloud-platform.read-only', u'https://www.googleapis.com/auth/ndev.cloudman', u'https://www.googleapis.com/auth/ndev.cloudman.readonly']
_VERSION = u'v2'
_CLIENT_ID = '1042881264118.apps.googleusercontent.com'
_CLIENT_SECRET = 'x_Tw5K8nnjoRAqULM9PFAC2b'
_USER_AGENT = 'x_Tw5K8nnjoRAqULM9PFAC2b'
_CLIENT_CLASS_NAME = u'DeploymentmanagerV2'
_URL_VERSION = u'v2'
_API_KEY = None
def __init__(self, url='', credentials=None,
get_credentials=True, http=None, model=None,
log_request=False, log_response=False,
credentials_args=None, default_global_params=None,
additional_http_headers=None):
"""Create a new deploymentmanager handle."""
url = url or self.BASE_URL
super(DeploymentmanagerV2, self).__init__(
url, credentials=credentials,
get_credentials=get_credentials, http=http, model=model,
log_request=log_request, log_response=log_response,
credentials_args=credentials_args,
default_global_params=default_global_params,
additional_http_headers=additional_http_headers)
self.deployments = self.DeploymentsService(self)
self.manifests = self.ManifestsService(self)
self.operations = self.OperationsService(self)
self.resources = self.ResourcesService(self)
self.types = self.TypesService(self)
class DeploymentsService(base_api.BaseApiService):
"""Service class for the deployments resource."""
_NAME = u'deployments'
def __init__(self, client):
super(DeploymentmanagerV2.DeploymentsService, self).__init__(client)
self._upload_configs = {
}
def CancelPreview(self, request, global_params=None):
"""Cancels and removes the preview currently associated with the deployment.
Args:
request: (DeploymentmanagerDeploymentsCancelPreviewRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('CancelPreview')
return self._RunMethod(
config, request, global_params=global_params)
CancelPreview.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'deploymentmanager.deployments.cancelPreview',
ordered_params=[u'project', u'deployment'],
path_params=[u'deployment', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/deployments/{deployment}/cancelPreview',
request_field=u'deploymentsCancelPreviewRequest',
request_type_name=u'DeploymentmanagerDeploymentsCancelPreviewRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Delete(self, request, global_params=None):
"""Deletes a deployment and all of the resources in the deployment.
Args:
request: (DeploymentmanagerDeploymentsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'DELETE',
method_id=u'deploymentmanager.deployments.delete',
ordered_params=[u'project', u'deployment'],
path_params=[u'deployment', u'project'],
query_params=[u'deletePolicy'],
relative_path=u'projects/{project}/global/deployments/{deployment}',
request_field='',
request_type_name=u'DeploymentmanagerDeploymentsDeleteRequest',
response_type_name=u'Operation',
supports_download=False,
)
def Get(self, request, global_params=None):
"""Gets information about a specific deployment.
Args:
request: (DeploymentmanagerDeploymentsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Deployment) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'deploymentmanager.deployments.get',
ordered_params=[u'project', u'deployment'],
path_params=[u'deployment', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/deployments/{deployment}',
request_field='',
request_type_name=u'DeploymentmanagerDeploymentsGetRequest',
response_type_name=u'Deployment',
supports_download=False,
)
def GetIamPolicy(self, request, global_params=None):
"""Gets the access control policy for a resource. May be empty if no such policy or resource exists.
Args:
request: (DeploymentmanagerDeploymentsGetIamPolicyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Policy) The response message.
"""
config = self.GetMethodConfig('GetIamPolicy')
return self._RunMethod(
config, request, global_params=global_params)
GetIamPolicy.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'deploymentmanager.deployments.getIamPolicy',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/deployments/{resource}/getIamPolicy',
request_field='',
request_type_name=u'DeploymentmanagerDeploymentsGetIamPolicyRequest',
response_type_name=u'Policy',
supports_download=False,
)
def Insert(self, request, global_params=None):
"""Creates a deployment and all of the resources described by the deployment manifest.
Args:
request: (DeploymentmanagerDeploymentsInsertRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Insert')
return self._RunMethod(
config, request, global_params=global_params)
Insert.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'deploymentmanager.deployments.insert',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'preview'],
relative_path=u'projects/{project}/global/deployments',
request_field=u'deployment',
request_type_name=u'DeploymentmanagerDeploymentsInsertRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Lists all deployments for a given project.
Args:
request: (DeploymentmanagerDeploymentsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(DeploymentsListResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'deploymentmanager.deployments.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/deployments',
request_field='',
request_type_name=u'DeploymentmanagerDeploymentsListRequest',
response_type_name=u'DeploymentsListResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
"""Updates a deployment and all of the resources described by the deployment manifest. This method supports patch semantics.
Args:
request: (DeploymentmanagerDeploymentsPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PATCH',
method_id=u'deploymentmanager.deployments.patch',
ordered_params=[u'project', u'deployment'],
path_params=[u'deployment', u'project'],
query_params=[u'createPolicy', u'deletePolicy', u'preview'],
relative_path=u'projects/{project}/global/deployments/{deployment}',
request_field=u'deploymentResource',
request_type_name=u'DeploymentmanagerDeploymentsPatchRequest',
response_type_name=u'Operation',
supports_download=False,
)
def SetIamPolicy(self, request, global_params=None):
"""Sets the access control policy on the specified resource. Replaces any existing policy.
Args:
request: (DeploymentmanagerDeploymentsSetIamPolicyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Policy) The response message.
"""
config = self.GetMethodConfig('SetIamPolicy')
return self._RunMethod(
config, request, global_params=global_params)
SetIamPolicy.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'deploymentmanager.deployments.setIamPolicy',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/deployments/{resource}/setIamPolicy',
request_field=u'policy',
request_type_name=u'DeploymentmanagerDeploymentsSetIamPolicyRequest',
response_type_name=u'Policy',
supports_download=False,
)
def Stop(self, request, global_params=None):
"""Stops an ongoing operation. This does not roll back any work that has already been completed, but prevents any new work from being started.
Args:
request: (DeploymentmanagerDeploymentsStopRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Stop')
return self._RunMethod(
config, request, global_params=global_params)
Stop.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'deploymentmanager.deployments.stop',
ordered_params=[u'project', u'deployment'],
path_params=[u'deployment', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/deployments/{deployment}/stop',
request_field=u'deploymentsStopRequest',
request_type_name=u'DeploymentmanagerDeploymentsStopRequest',
response_type_name=u'Operation',
supports_download=False,
)
def TestIamPermissions(self, request, global_params=None):
"""Returns permissions that a caller has on the specified resource.
Args:
request: (DeploymentmanagerDeploymentsTestIamPermissionsRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TestPermissionsResponse) The response message.
"""
config = self.GetMethodConfig('TestIamPermissions')
return self._RunMethod(
config, request, global_params=global_params)
TestIamPermissions.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'POST',
method_id=u'deploymentmanager.deployments.testIamPermissions',
ordered_params=[u'project', u'resource'],
path_params=[u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/deployments/{resource}/testIamPermissions',
request_field=u'testPermissionsRequest',
request_type_name=u'DeploymentmanagerDeploymentsTestIamPermissionsRequest',
response_type_name=u'TestPermissionsResponse',
supports_download=False,
)
def Update(self, request, global_params=None):
"""Updates a deployment and all of the resources described by the deployment manifest.
Args:
request: (DeploymentmanagerDeploymentsUpdateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Update')
return self._RunMethod(
config, request, global_params=global_params)
Update.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'PUT',
method_id=u'deploymentmanager.deployments.update',
ordered_params=[u'project', u'deployment'],
path_params=[u'deployment', u'project'],
query_params=[u'createPolicy', u'deletePolicy', u'preview'],
relative_path=u'projects/{project}/global/deployments/{deployment}',
request_field=u'deploymentResource',
request_type_name=u'DeploymentmanagerDeploymentsUpdateRequest',
response_type_name=u'Operation',
supports_download=False,
)
class ManifestsService(base_api.BaseApiService):
"""Service class for the manifests resource."""
_NAME = u'manifests'
def __init__(self, client):
super(DeploymentmanagerV2.ManifestsService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
"""Gets information about a specific manifest.
Args:
request: (DeploymentmanagerManifestsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Manifest) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'deploymentmanager.manifests.get',
ordered_params=[u'project', u'deployment', u'manifest'],
path_params=[u'deployment', u'manifest', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/deployments/{deployment}/manifests/{manifest}',
request_field='',
request_type_name=u'DeploymentmanagerManifestsGetRequest',
response_type_name=u'Manifest',
supports_download=False,
)
def List(self, request, global_params=None):
"""Lists all manifests for a given deployment.
Args:
request: (DeploymentmanagerManifestsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ManifestsListResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'deploymentmanager.manifests.list',
ordered_params=[u'project', u'deployment'],
path_params=[u'deployment', u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/deployments/{deployment}/manifests',
request_field='',
request_type_name=u'DeploymentmanagerManifestsListRequest',
response_type_name=u'ManifestsListResponse',
supports_download=False,
)
class OperationsService(base_api.BaseApiService):
"""Service class for the operations resource."""
_NAME = u'operations'
def __init__(self, client):
super(DeploymentmanagerV2.OperationsService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
"""Gets information about a specific operation.
Args:
request: (DeploymentmanagerOperationsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Operation) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'deploymentmanager.operations.get',
ordered_params=[u'project', u'operation'],
path_params=[u'operation', u'project'],
query_params=[],
relative_path=u'projects/{project}/global/operations/{operation}',
request_field='',
request_type_name=u'DeploymentmanagerOperationsGetRequest',
response_type_name=u'Operation',
supports_download=False,
)
def List(self, request, global_params=None):
"""Lists all operations for a project.
Args:
request: (DeploymentmanagerOperationsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(OperationsListResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'deploymentmanager.operations.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/operations',
request_field='',
request_type_name=u'DeploymentmanagerOperationsListRequest',
response_type_name=u'OperationsListResponse',
supports_download=False,
)
class ResourcesService(base_api.BaseApiService):
"""Service class for the resources resource."""
_NAME = u'resources'
def __init__(self, client):
super(DeploymentmanagerV2.ResourcesService, self).__init__(client)
self._upload_configs = {
}
def Get(self, request, global_params=None):
"""Gets information about a single resource.
Args:
request: (DeploymentmanagerResourcesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(Resource) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'deploymentmanager.resources.get',
ordered_params=[u'project', u'deployment', u'resource'],
path_params=[u'deployment', u'project', u'resource'],
query_params=[],
relative_path=u'projects/{project}/global/deployments/{deployment}/resources/{resource}',
request_field='',
request_type_name=u'DeploymentmanagerResourcesGetRequest',
response_type_name=u'Resource',
supports_download=False,
)
def List(self, request, global_params=None):
"""Lists all resources in a given deployment.
Args:
request: (DeploymentmanagerResourcesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(ResourcesListResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'deploymentmanager.resources.list',
ordered_params=[u'project', u'deployment'],
path_params=[u'deployment', u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/deployments/{deployment}/resources',
request_field='',
request_type_name=u'DeploymentmanagerResourcesListRequest',
response_type_name=u'ResourcesListResponse',
supports_download=False,
)
class TypesService(base_api.BaseApiService):
"""Service class for the types resource."""
_NAME = u'types'
def __init__(self, client):
super(DeploymentmanagerV2.TypesService, self).__init__(client)
self._upload_configs = {
}
def List(self, request, global_params=None):
"""Lists all resource types for Deployment Manager.
Args:
request: (DeploymentmanagerTypesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(TypesListResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'deploymentmanager.types.list',
ordered_params=[u'project'],
path_params=[u'project'],
query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'],
relative_path=u'projects/{project}/global/types',
request_field='',
request_type_name=u'DeploymentmanagerTypesListRequest',
response_type_name=u'TypesListResponse',
supports_download=False,
)
| apache-2.0 |
opoplawski/ansible | contrib/inventory/docker.py | 120 | 12105 | #!/usr/bin/env python
# (c) 2013, Paul Durivage <paul.durivage@gmail.com>
#
# This file is part of Ansible.
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
#
# Author: Paul Durivage <paul.durivage@gmail.com>
#
# Description:
# This module queries local or remote Docker daemons and generates
# inventory information.
#
# This plugin does not support targeting of specific hosts using the --host
# flag. Instead, it queries the Docker API for each container, running
# or not, and returns this data all once.
#
# The plugin returns the following custom attributes on Docker containers:
# docker_args
# docker_config
# docker_created
# docker_driver
# docker_exec_driver
# docker_host_config
# docker_hostname_path
# docker_hosts_path
# docker_id
# docker_image
# docker_name
# docker_network_settings
# docker_path
# docker_resolv_conf_path
# docker_state
# docker_volumes
# docker_volumes_rw
#
# Requirements:
# The docker-py module: https://github.com/dotcloud/docker-py
#
# Notes:
# A config file can be used to configure this inventory module, and there
# are several environment variables that can be set to modify the behavior
# of the plugin at runtime:
# DOCKER_CONFIG_FILE
# DOCKER_HOST
# DOCKER_VERSION
# DOCKER_TIMEOUT
# DOCKER_PRIVATE_SSH_PORT
# DOCKER_DEFAULT_IP
#
# Environment Variables:
# environment variable: DOCKER_CONFIG_FILE
# description:
# - A path to a Docker inventory hosts/defaults file in YAML format
# - A sample file has been provided, colocated with the inventory
# file called 'docker.yml'
# required: false
# default: Uses docker.docker.Client constructor defaults
# environment variable: DOCKER_HOST
# description:
# - The socket on which to connect to a Docker daemon API
# required: false
# default: Uses docker.docker.Client constructor defaults
# environment variable: DOCKER_VERSION
# description:
# - Version of the Docker API to use
# default: Uses docker.docker.Client constructor defaults
# required: false
# environment variable: DOCKER_TIMEOUT
# description:
# - Timeout in seconds for connections to Docker daemon API
# default: Uses docker.docker.Client constructor defaults
# required: false
# environment variable: DOCKER_PRIVATE_SSH_PORT
# description:
# - The private port (container port) on which SSH is listening
# for connections
# default: 22
# required: false
# environment variable: DOCKER_DEFAULT_IP
# description:
# - This environment variable overrides the container SSH connection
# IP address (aka, 'ansible_ssh_host')
#
# This option allows one to override the ansible_ssh_host whenever
# Docker has exercised its default behavior of binding private ports
# to all interfaces of the Docker host. This behavior, when dealing
# with remote Docker hosts, does not allow Ansible to determine
# a proper host IP address on which to connect via SSH to containers.
# By default, this inventory module assumes all 0.0.0.0-exposed
# ports to be bound to localhost:<port>. To override this
# behavior, for example, to bind a container's SSH port to the public
# interface of its host, one must manually set this IP.
#
# It is preferable to begin to launch Docker containers with
# ports exposed on publicly accessible IP addresses, particularly
# if the containers are to be targeted by Ansible for remote
# configuration, not accessible via localhost SSH connections.
#
# Docker containers can be explicitly exposed on IP addresses by
# a) starting the daemon with the --ip argument
# b) running containers with the -P/--publish ip::containerPort
# argument
# default: 127.0.0.1 if port exposed on 0.0.0.0 by Docker
# required: false
#
# Examples:
# Use the config file:
# DOCKER_CONFIG_FILE=./docker.yml docker.py --list
#
# Connect to docker instance on localhost port 4243
# DOCKER_HOST=tcp://localhost:4243 docker.py --list
#
# Any container's ssh port exposed on 0.0.0.0 will mapped to
# another IP address (where Ansible will attempt to connect via SSH)
# DOCKER_DEFAULT_IP=1.2.3.4 docker.py --list
import os
import sys
import json
import argparse
from UserDict import UserDict
from collections import defaultdict
import yaml
from requests import HTTPError, ConnectionError
# Manipulation of the path is needed because the docker-py
# module is imported by the name docker, and because this file
# is also named docker
for path in [os.getcwd(), '', os.path.dirname(os.path.abspath(__file__))]:
try:
del sys.path[sys.path.index(path)]
except:
pass
try:
import docker
except ImportError:
print('docker-py is required for this module')
sys.exit(1)
class HostDict(UserDict):
def __setitem__(self, key, value):
if value is not None:
self.data[key] = value
def update(self, dict=None, **kwargs):
if dict is None:
pass
elif isinstance(dict, UserDict):
for k, v in dict.data.items():
self[k] = v
else:
for k, v in dict.items():
self[k] = v
if len(kwargs):
for k, v in kwargs.items():
self[k] = v
def write_stderr(string):
sys.stderr.write('%s\n' % string)
def setup():
config = dict()
config_file = os.environ.get('DOCKER_CONFIG_FILE')
if config_file:
try:
config_file = os.path.abspath(config_file)
except Exception as e:
write_stderr(e)
sys.exit(1)
with open(config_file) as f:
try:
config = yaml.safe_load(f.read())
except Exception as e:
write_stderr(e)
sys.exit(1)
# Environment Variables
env_base_url = os.environ.get('DOCKER_HOST')
env_version = os.environ.get('DOCKER_VERSION')
env_timeout = os.environ.get('DOCKER_TIMEOUT')
env_ssh_port = os.environ.get('DOCKER_PRIVATE_SSH_PORT', '22')
env_default_ip = os.environ.get('DOCKER_DEFAULT_IP', '127.0.0.1')
# Config file defaults
defaults = config.get('defaults', dict())
def_host = defaults.get('host')
def_version = defaults.get('version')
def_timeout = defaults.get('timeout')
def_default_ip = defaults.get('default_ip')
def_ssh_port = defaults.get('private_ssh_port')
hosts = list()
if config:
hosts_list = config.get('hosts', list())
# Look to the config file's defined hosts
if hosts_list:
for host in hosts_list:
baseurl = host.get('host') or def_host or env_base_url
version = host.get('version') or def_version or env_version
timeout = host.get('timeout') or def_timeout or env_timeout
default_ip = host.get('default_ip') or def_default_ip or env_default_ip
ssh_port = host.get('private_ssh_port') or def_ssh_port or env_ssh_port
hostdict = HostDict(
base_url=baseurl,
version=version,
timeout=timeout,
default_ip=default_ip,
private_ssh_port=ssh_port,
)
hosts.append(hostdict)
# Look to the defaults
else:
hostdict = HostDict(
base_url=def_host,
version=def_version,
timeout=def_timeout,
default_ip=def_default_ip,
private_ssh_port=def_ssh_port,
)
hosts.append(hostdict)
# Look to the environment
else:
hostdict = HostDict(
base_url=env_base_url,
version=env_version,
timeout=env_timeout,
default_ip=env_default_ip,
private_ssh_port=env_ssh_port,
)
hosts.append(hostdict)
return hosts
def list_groups():
hosts = setup()
groups = defaultdict(list)
hostvars = defaultdict(dict)
for host in hosts:
ssh_port = host.pop('private_ssh_port', None)
default_ip = host.pop('default_ip', None)
hostname = host.get('base_url')
try:
client = docker.Client(**host)
containers = client.containers(all=True)
except (HTTPError, ConnectionError) as e:
write_stderr(e)
sys.exit(1)
for container in containers:
id = container.get('Id')
short_id = id[:13]
try:
name = container.get('Names', list()).pop(0).lstrip('/')
except IndexError:
name = short_id
if not id:
continue
inspect = client.inspect_container(id)
running = inspect.get('State', dict()).get('Running')
groups[id].append(name)
groups[name].append(name)
if not short_id in groups.keys():
groups[short_id].append(name)
groups[hostname].append(name)
if running is True:
groups['running'].append(name)
else:
groups['stopped'].append(name)
try:
port = client.port(container, ssh_port)[0]
except (IndexError, AttributeError, TypeError):
port = dict()
try:
ip = default_ip if port['HostIp'] == '0.0.0.0' else port['HostIp']
except KeyError:
ip = ''
container_info = dict(
ansible_ssh_host=ip,
ansible_ssh_port=port.get('HostPort', int()),
docker_args=inspect.get('Args'),
docker_config=inspect.get('Config'),
docker_created=inspect.get('Created'),
docker_driver=inspect.get('Driver'),
docker_exec_driver=inspect.get('ExecDriver'),
docker_host_config=inspect.get('HostConfig'),
docker_hostname_path=inspect.get('HostnamePath'),
docker_hosts_path=inspect.get('HostsPath'),
docker_id=inspect.get('ID'),
docker_image=inspect.get('Image'),
docker_name=name,
docker_network_settings=inspect.get('NetworkSettings'),
docker_path=inspect.get('Path'),
docker_resolv_conf_path=inspect.get('ResolvConfPath'),
docker_state=inspect.get('State'),
docker_volumes=inspect.get('Volumes'),
docker_volumes_rw=inspect.get('VolumesRW'),
)
hostvars[name].update(container_info)
groups['docker_hosts'] = [host.get('base_url') for host in hosts]
groups['_meta'] = dict()
groups['_meta']['hostvars'] = hostvars
print(json.dumps(groups, sort_keys=True, indent=4))
sys.exit(0)
def parse_args():
parser = argparse.ArgumentParser()
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('--list', action='store_true')
group.add_argument('--host', action='store_true')
return parser.parse_args()
def main():
args = parse_args()
if args.list:
list_groups()
elif args.host:
write_stderr('This option is not supported.')
sys.exit(1)
sys.exit(0)
main()
| gpl-3.0 |
MickSandoz/compassion-modules | contract_compassion/model/contract_origin.py | 2 | 4839 | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp import models, fields, api, _
from psycopg2 import IntegrityError
class contract_origin(models.Model):
""" Origin of a contract """
_name = 'recurring.contract.origin'
##########################################################################
# FIELDS #
##########################################################################
name = fields.Char(compute='_set_name', store=True)
type = fields.Selection('_get_origin_types', help=_(
"Origin of contract : "
" * Contact with sponsor/ambassador : an other sponsor told the "
"person about Compassion."
" * Event : sponsorship was made during an event"
" * Marketing campaign : sponsorship was made after specific "
"campaign (magazine, ad, etc..)"
" * Transfer : sponsorship transferred from another country."
" * Other : select only if none other type matches."
), required=True)
partner_id = fields.Many2one('res.partner', 'Partner')
analytic_id = fields.Many2one(
'account.analytic.account', 'Analytic Account')
contract_ids = fields.One2many(
'recurring.contract', 'origin_id', 'Sponsorships originated',
readonly=True)
country_id = fields.Many2one('res.country', 'Country')
other_name = fields.Char('Give details', size=128)
won_sponsorships = fields.Integer(
compute='_get_won_sponsorships', store=True)
_sql_constraints = [(
'name_uniq', 'UNIQUE(name)',
_("You cannot have two origins with same name."
"The origin does probably already exist.")
)]
##########################################################################
# FIELDS METHODS #
##########################################################################
@api.one
@api.depends('type')
def _set_name(self):
name = ""
if self.type == 'partner':
if self.partner_id.parent_id:
name = self.partner_id.parent_id.name + ", "
name += self.partner_id.name or _(
'Contact with Sponsor/Ambassador')
elif self.type in ('event', 'marketing'):
name = self.analytic_id.name
elif self.type == 'transfer':
if self.country_id:
name = _('Transfer from ') + self.country_id.name
else:
name = _('Transfer from partner country')
elif self.type == 'other':
name = self.other_name or 'Other'
elif self.type == 'sub':
name = _('SUB Sponsorship')
self.name = name
def _get_origin_types(self):
return [
('partner', _("Contact with sponsor/ambassador")),
('event', _("Event")),
('marketing', _("Marketing campaign")),
('sub', _("SUB Sponsorship")),
('transfer', _("Transfer")),
('other', _("Other")),
]
@api.depends('contract_ids.origin_id', 'contract_ids.state')
@api.one
def _get_won_sponsorships(self):
contract_ids = self.contract_ids.filtered(
lambda c: c.state in ('active', 'terminated'))
self.won_sponsorships = len(contract_ids)
##########################################################################
# ORM METHODS #
##########################################################################
@api.model
def create(self, vals):
"""Try to find existing origin instead of raising an error."""
try:
res = super(contract_origin, self).create(vals)
except IntegrityError:
# Find the origin
self.env.cr.commit() # Release the lock
origins = self.search([
('type', '=', vals.get('type')),
('partner_id', '=', vals.get('partner_id')),
('analytic_id', '=', vals.get('analytic_id')),
('country_id', '=', vals.get('country_id')),
('other_name', '=', vals.get('other_name')),
])
if origins:
res = origins[0]
else:
raise
return res
| agpl-3.0 |
naparuba/kunai | test/test_encryption.py | 2 | 2198 | #!/usr/bin/env python
# Copyright (C) 2014:
# Gabes Jean, naparuba@gmail.com
from opsbro_test import *
from opsbro.library import libstore
from opsbro.log import cprint
from opsbro.util import unicode_to_bytes, bytes_to_unicode
encrypter = libstore.get_encrypter()
class TestUDPEncrypter(OpsBroTest):
def setUp(self):
encrypter.load_zone_encryption_key('NTdiN2NlNmE4NTViMTFlNA==', 'internet')
cprint("ENCRYPTER: %s" % encrypter)
if encrypter.get_AES() is None:
raise Exception('The Crypto librairy is missing')
def test_encryption(self):
cprint("ENCRYPTER: %s" % encrypter)
orig_test = 'Hi I am Alice'
bobread = encrypter.encrypt(orig_test, dest_zone_name='internet')
clear = encrypter.decrypt(bobread)
cprint('CLEAN: %s' % clear)
self.assert_(clear == orig_test)
self.assert_(bobread != orig_test)
class TestRSAEncrypter(OpsBroTest):
def setUp(self):
RSA = encrypter.get_RSA()
master_key_priv = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'test-files', 'test-executors', 'internet.private.key')
with open(master_key_priv, 'rb') as f:
buf = unicode_to_bytes(f.read())
self.mfkey_priv = RSA.PrivateKey.load_pkcs1(buf)
master_key_pub = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'test-files', 'test-executors', 'internet.public.key')
with open(master_key_pub, 'rb') as f:
buf = unicode_to_bytes(f.read())
self.mfkey_pub = RSA.PublicKey.load_pkcs1(buf)
def test_encryption(self):
orig_test = 'Hi I am Alice'
RSA = encrypter.get_RSA()
encrypted = RSA.encrypt(unicode_to_bytes(orig_test), self.mfkey_pub) # encrypted thanks to public
decrypted = bytes_to_unicode(RSA.decrypt(encrypted, self.mfkey_priv)) # decrypte with private
print('Original:%s(%s)\nDecrypted:%s(%s)' % (orig_test, type(orig_test), decrypted, type(decrypted)))
self.assert_(decrypted == orig_test)
self.assert_(encrypted != orig_test)
print('OK')
if __name__ == '__main__':
unittest.main()
| mit |
hunter007/wechatpy | wechatpy/enterprise/messages.py | 4 | 1152 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from wechatpy.fields import IntegerField, StringField
from wechatpy import messages
MESSAGE_TYPES = {}
def register_message(msg_type):
def register(cls):
MESSAGE_TYPES[msg_type] = cls
return cls
return register
@register_message('text')
class TextMessage(messages.TextMessage):
agent = IntegerField('AgentID', 0)
@register_message('image')
class ImageMessage(messages.ImageMessage):
agent = IntegerField('AgentID', 0)
@register_message('voice')
class VoiceMessage(messages.VoiceMessage):
agent = IntegerField('AgentID', 0)
@register_message('shortvideo')
class ShortVideoMessage(messages.ShortVideoMessage):
agent = IntegerField('AgentID', 0)
@register_message('video')
class VideoMessage(messages.VideoMessage):
agent = IntegerField('AgentID', 0)
@register_message('location')
class LocationMessage(messages.LocationMessage):
agent = IntegerField('AgentID', 0)
@register_message('link')
class LinkMessage(messages.LinkMessage):
agent = IntegerField('AgentID', 0)
pic_url = StringField('PicUrl')
| mit |
Ironarcher/casso-backend | lib/werkzeug/local.py | 159 | 14275 | # -*- coding: utf-8 -*-
"""
werkzeug.local
~~~~~~~~~~~~~~
This module implements context-local objects.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import copy
from functools import update_wrapper
from werkzeug.wsgi import ClosingIterator
from werkzeug._compat import PY2, implements_bool
# since each thread has its own greenlet we can just use those as identifiers
# for the context. If greenlets are not available we fall back to the
# current thread ident depending on where it is.
try:
from greenlet import getcurrent as get_ident
except ImportError:
try:
from thread import get_ident
except ImportError:
from _thread import get_ident
def release_local(local):
"""Releases the contents of the local for the current context.
This makes it possible to use locals without a manager.
Example::
>>> loc = Local()
>>> loc.foo = 42
>>> release_local(loc)
>>> hasattr(loc, 'foo')
False
With this function one can release :class:`Local` objects as well
as :class:`LocalStack` objects. However it is not possible to
release data held by proxies that way, one always has to retain
a reference to the underlying local object in order to be able
to release it.
.. versionadded:: 0.6.1
"""
local.__release_local__()
class Local(object):
__slots__ = ('__storage__', '__ident_func__')
def __init__(self):
object.__setattr__(self, '__storage__', {})
object.__setattr__(self, '__ident_func__', get_ident)
def __iter__(self):
return iter(self.__storage__.items())
def __call__(self, proxy):
"""Create a proxy for a name."""
return LocalProxy(self, proxy)
def __release_local__(self):
self.__storage__.pop(self.__ident_func__(), None)
def __getattr__(self, name):
try:
return self.__storage__[self.__ident_func__()][name]
except KeyError:
raise AttributeError(name)
def __setattr__(self, name, value):
ident = self.__ident_func__()
storage = self.__storage__
try:
storage[ident][name] = value
except KeyError:
storage[ident] = {name: value}
def __delattr__(self, name):
try:
del self.__storage__[self.__ident_func__()][name]
except KeyError:
raise AttributeError(name)
class LocalStack(object):
"""This class works similar to a :class:`Local` but keeps a stack
of objects instead. This is best explained with an example::
>>> ls = LocalStack()
>>> ls.push(42)
>>> ls.top
42
>>> ls.push(23)
>>> ls.top
23
>>> ls.pop()
23
>>> ls.top
42
They can be force released by using a :class:`LocalManager` or with
the :func:`release_local` function but the correct way is to pop the
item from the stack after using. When the stack is empty it will
no longer be bound to the current context (and as such released).
By calling the stack without arguments it returns a proxy that resolves to
the topmost item on the stack.
.. versionadded:: 0.6.1
"""
def __init__(self):
self._local = Local()
def __release_local__(self):
self._local.__release_local__()
def _get__ident_func__(self):
return self._local.__ident_func__
def _set__ident_func__(self, value):
object.__setattr__(self._local, '__ident_func__', value)
__ident_func__ = property(_get__ident_func__, _set__ident_func__)
del _get__ident_func__, _set__ident_func__
def __call__(self):
def _lookup():
rv = self.top
if rv is None:
raise RuntimeError('object unbound')
return rv
return LocalProxy(_lookup)
def push(self, obj):
"""Pushes a new item to the stack"""
rv = getattr(self._local, 'stack', None)
if rv is None:
self._local.stack = rv = []
rv.append(obj)
return rv
def pop(self):
"""Removes the topmost item from the stack, will return the
old value or `None` if the stack was already empty.
"""
stack = getattr(self._local, 'stack', None)
if stack is None:
return None
elif len(stack) == 1:
release_local(self._local)
return stack[-1]
else:
return stack.pop()
@property
def top(self):
"""The topmost item on the stack. If the stack is empty,
`None` is returned.
"""
try:
return self._local.stack[-1]
except (AttributeError, IndexError):
return None
class LocalManager(object):
"""Local objects cannot manage themselves. For that you need a local
manager. You can pass a local manager multiple locals or add them later
by appending them to `manager.locals`. Everytime the manager cleans up
it, will clean up all the data left in the locals for this context.
The `ident_func` parameter can be added to override the default ident
function for the wrapped locals.
.. versionchanged:: 0.6.1
Instead of a manager the :func:`release_local` function can be used
as well.
.. versionchanged:: 0.7
`ident_func` was added.
"""
def __init__(self, locals=None, ident_func=None):
if locals is None:
self.locals = []
elif isinstance(locals, Local):
self.locals = [locals]
else:
self.locals = list(locals)
if ident_func is not None:
self.ident_func = ident_func
for local in self.locals:
object.__setattr__(local, '__ident_func__', ident_func)
else:
self.ident_func = get_ident
def get_ident(self):
"""Return the context identifier the local objects use internally for
this context. You cannot override this method to change the behavior
but use it to link other context local objects (such as SQLAlchemy's
scoped sessions) to the Werkzeug locals.
.. versionchanged:: 0.7
You can pass a different ident function to the local manager that
will then be propagated to all the locals passed to the
constructor.
"""
return self.ident_func()
def cleanup(self):
"""Manually clean up the data in the locals for this context. Call
this at the end of the request or use `make_middleware()`.
"""
for local in self.locals:
release_local(local)
def make_middleware(self, app):
"""Wrap a WSGI application so that cleaning up happens after
request end.
"""
def application(environ, start_response):
return ClosingIterator(app(environ, start_response), self.cleanup)
return application
def middleware(self, func):
"""Like `make_middleware` but for decorating functions.
Example usage::
@manager.middleware
def application(environ, start_response):
...
The difference to `make_middleware` is that the function passed
will have all the arguments copied from the inner application
(name, docstring, module).
"""
return update_wrapper(self.make_middleware(func), func)
def __repr__(self):
return '<%s storages: %d>' % (
self.__class__.__name__,
len(self.locals)
)
@implements_bool
class LocalProxy(object):
"""Acts as a proxy for a werkzeug local. Forwards all operations to
a proxied object. The only operations not supported for forwarding
are right handed operands and any kind of assignment.
Example usage::
from werkzeug.local import Local
l = Local()
# these are proxies
request = l('request')
user = l('user')
from werkzeug.local import LocalStack
_response_local = LocalStack()
# this is a proxy
response = _response_local()
Whenever something is bound to l.user / l.request the proxy objects
will forward all operations. If no object is bound a :exc:`RuntimeError`
will be raised.
To create proxies to :class:`Local` or :class:`LocalStack` objects,
call the object as shown above. If you want to have a proxy to an
object looked up by a function, you can (as of Werkzeug 0.6.1) pass
a function to the :class:`LocalProxy` constructor::
session = LocalProxy(lambda: get_current_request().session)
.. versionchanged:: 0.6.1
The class can be instanciated with a callable as well now.
"""
__slots__ = ('__local', '__dict__', '__name__')
def __init__(self, local, name=None):
object.__setattr__(self, '_LocalProxy__local', local)
object.__setattr__(self, '__name__', name)
def _get_current_object(self):
"""Return the current object. This is useful if you want the real
object behind the proxy at a time for performance reasons or because
you want to pass the object into a different context.
"""
if not hasattr(self.__local, '__release_local__'):
return self.__local()
try:
return getattr(self.__local, self.__name__)
except AttributeError:
raise RuntimeError('no object bound to %s' % self.__name__)
@property
def __dict__(self):
try:
return self._get_current_object().__dict__
except RuntimeError:
raise AttributeError('__dict__')
def __repr__(self):
try:
obj = self._get_current_object()
except RuntimeError:
return '<%s unbound>' % self.__class__.__name__
return repr(obj)
def __bool__(self):
try:
return bool(self._get_current_object())
except RuntimeError:
return False
def __unicode__(self):
try:
return unicode(self._get_current_object()) # noqa
except RuntimeError:
return repr(self)
def __dir__(self):
try:
return dir(self._get_current_object())
except RuntimeError:
return []
def __getattr__(self, name):
if name == '__members__':
return dir(self._get_current_object())
return getattr(self._get_current_object(), name)
def __setitem__(self, key, value):
self._get_current_object()[key] = value
def __delitem__(self, key):
del self._get_current_object()[key]
if PY2:
__getslice__ = lambda x, i, j: x._get_current_object()[i:j]
def __setslice__(self, i, j, seq):
self._get_current_object()[i:j] = seq
def __delslice__(self, i, j):
del self._get_current_object()[i:j]
__setattr__ = lambda x, n, v: setattr(x._get_current_object(), n, v)
__delattr__ = lambda x, n: delattr(x._get_current_object(), n)
__str__ = lambda x: str(x._get_current_object())
__lt__ = lambda x, o: x._get_current_object() < o
__le__ = lambda x, o: x._get_current_object() <= o
__eq__ = lambda x, o: x._get_current_object() == o
__ne__ = lambda x, o: x._get_current_object() != o
__gt__ = lambda x, o: x._get_current_object() > o
__ge__ = lambda x, o: x._get_current_object() >= o
__cmp__ = lambda x, o: cmp(x._get_current_object(), o) # noqa
__hash__ = lambda x: hash(x._get_current_object())
__call__ = lambda x, *a, **kw: x._get_current_object()(*a, **kw)
__len__ = lambda x: len(x._get_current_object())
__getitem__ = lambda x, i: x._get_current_object()[i]
__iter__ = lambda x: iter(x._get_current_object())
__contains__ = lambda x, i: i in x._get_current_object()
__add__ = lambda x, o: x._get_current_object() + o
__sub__ = lambda x, o: x._get_current_object() - o
__mul__ = lambda x, o: x._get_current_object() * o
__floordiv__ = lambda x, o: x._get_current_object() // o
__mod__ = lambda x, o: x._get_current_object() % o
__divmod__ = lambda x, o: x._get_current_object().__divmod__(o)
__pow__ = lambda x, o: x._get_current_object() ** o
__lshift__ = lambda x, o: x._get_current_object() << o
__rshift__ = lambda x, o: x._get_current_object() >> o
__and__ = lambda x, o: x._get_current_object() & o
__xor__ = lambda x, o: x._get_current_object() ^ o
__or__ = lambda x, o: x._get_current_object() | o
__div__ = lambda x, o: x._get_current_object().__div__(o)
__truediv__ = lambda x, o: x._get_current_object().__truediv__(o)
__neg__ = lambda x: -(x._get_current_object())
__pos__ = lambda x: +(x._get_current_object())
__abs__ = lambda x: abs(x._get_current_object())
__invert__ = lambda x: ~(x._get_current_object())
__complex__ = lambda x: complex(x._get_current_object())
__int__ = lambda x: int(x._get_current_object())
__long__ = lambda x: long(x._get_current_object()) # noqa
__float__ = lambda x: float(x._get_current_object())
__oct__ = lambda x: oct(x._get_current_object())
__hex__ = lambda x: hex(x._get_current_object())
__index__ = lambda x: x._get_current_object().__index__()
__coerce__ = lambda x, o: x._get_current_object().__coerce__(x, o)
__enter__ = lambda x: x._get_current_object().__enter__()
__exit__ = lambda x, *a, **kw: x._get_current_object().__exit__(*a, **kw)
__radd__ = lambda x, o: o + x._get_current_object()
__rsub__ = lambda x, o: o - x._get_current_object()
__rmul__ = lambda x, o: o * x._get_current_object()
__rdiv__ = lambda x, o: o / x._get_current_object()
if PY2:
__rtruediv__ = lambda x, o: x._get_current_object().__rtruediv__(o)
else:
__rtruediv__ = __rdiv__
__rfloordiv__ = lambda x, o: o // x._get_current_object()
__rmod__ = lambda x, o: o % x._get_current_object()
__rdivmod__ = lambda x, o: x._get_current_object().__rdivmod__(o)
__copy__ = lambda x: copy.copy(x._get_current_object())
__deepcopy__ = lambda x, memo: copy.deepcopy(x._get_current_object(), memo)
| apache-2.0 |
Yannig/ansible | lib/ansible/modules/network/cloudengine/ce_bgp_neighbor_af.py | 27 | 109137 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ce_bgp_neighbor_af
version_added: "2.4"
short_description: Manages BGP neighbor Address-family configuration on HUAWEI CloudEngine switches.
description:
- Manages BGP neighbor Address-family configurations on HUAWEI CloudEngine switches.
author:
- wangdezhuang (@CloudEngine-Ansible)
options:
vrf_name:
description:
- Name of a BGP instance. The name is a case-sensitive string of characters.
The BGP instance can be used only after the corresponding VPN instance is created.
required: true
af_type:
description:
- Address family type of a BGP instance.
required: true
choices: ['ipv4uni', 'ipv4multi', 'ipv4vpn', 'ipv6uni', 'ipv6vpn', 'evpn']
remote_address:
description:
- IPv4 or IPv6 peer connection address.
required: true
advertise_irb:
description:
- If the value is true, advertised IRB routes are distinguished.
If the value is false, advertised IRB routes are not distinguished.
required: false
default: no_use
choices: ['no_use','true','false']
advertise_arp:
description:
- If the value is true, advertised ARP routes are distinguished.
If the value is false, advertised ARP routes are not distinguished.
required: false
default: no_use
choices: ['no_use','true','false']
advertise_remote_nexthop:
description:
- If the value is true, the remote next-hop attribute is advertised to peers.
If the value is false, the remote next-hop attribute is not advertised to any peers.
required: false
default: no_use
choices: ['no_use','true','false']
advertise_community:
description:
- If the value is true, the community attribute is advertised to peers.
If the value is false, the community attribute is not advertised to peers.
required: false
default: no_use
choices: ['no_use','true','false']
advertise_ext_community:
description:
- If the value is true, the extended community attribute is advertised to peers.
If the value is false, the extended community attribute is not advertised to peers.
required: false
default: no_use
choices: ['no_use','true','false']
discard_ext_community:
description:
- If the value is true, the extended community attribute in the peer route information is discarded.
If the value is false, the extended community attribute in the peer route information is not discarded.
required: false
default: no_use
choices: ['no_use','true','false']
allow_as_loop_enable:
description:
- If the value is true, repetitive local AS numbers are allowed.
If the value is false, repetitive local AS numbers are not allowed.
required: false
default: no_use
choices: ['no_use','true','false']
allow_as_loop_limit:
description:
- Set the maximum number of repetitive local AS number.
The value is an integer ranging from 1 to 10.
required: false
default: null
keep_all_routes:
description:
- If the value is true, the system stores all route update messages received from all peers (groups)
after BGP connection setup.
If the value is false, the system stores only BGP update messages that are received from peers
and pass the configured import policy.
required: false
default: no_use
choices: ['no_use','true','false']
nexthop_configure:
description:
- null, The next hop is not changed.
local, The next hop is changed to the local IP address.
invariable, Prevent the device from changing the next hop of each imported IGP route
when advertising it to its BGP peers.
required: false
default: null
choices: ['null', 'local', 'invariable']
preferred_value:
description:
- Assign a preferred value for the routes learned from a specified peer.
The value is an integer ranging from 0 to 65535.
required: false
default: null
public_as_only:
description:
- If the value is true, sent BGP update messages carry only the public AS number but do not carry
private AS numbers.
If the value is false, sent BGP update messages can carry private AS numbers.
required: false
default: no_use
choices: ['no_use','true','false']
public_as_only_force:
description:
- If the value is true, sent BGP update messages carry only the public AS number but do not carry
private AS numbers.
If the value is false, sent BGP update messages can carry private AS numbers.
required: false
default: no_use
choices: ['no_use','true','false']
public_as_only_limited:
description:
- Limited use public as number.
required: false
default: no_use
choices: ['no_use','true','false']
public_as_only_replace:
description:
- Private as replaced by public as number.
required: false
default: no_use
choices: ['no_use','true','false']
public_as_only_skip_peer_as:
description:
- Public as only skip peer as.
required: false
default: no_use
choices: ['no_use','true','false']
route_limit:
description:
- Configure the maximum number of routes that can be accepted from a peer.
The value is an integer ranging from 1 to 4294967295.
required: false
default: null
route_limit_percent:
description:
- Specify the percentage of routes when a router starts to generate an alarm.
The value is an integer ranging from 1 to 100.
required: false
default: null
route_limit_type:
description:
- Noparameter, After the number of received routes exceeds the threshold and the timeout
timer expires,no action.
AlertOnly, An alarm is generated and no additional routes will be accepted if the maximum
number of routes allowed have been received.
IdleForever, The connection that is interrupted is not automatically re-established if the
maximum number of routes allowed have been received.
IdleTimeout, After the number of received routes exceeds the threshold and the timeout timer
expires, the connection that is interrupted is automatically re-established.
required: false
default: null
choices: ['noparameter', 'alertOnly', 'idleForever', 'idleTimeout']
route_limit_idle_timeout:
description:
- Specify the value of the idle-timeout timer to automatically reestablish the connections after
they are cut off when the number of routes exceeds the set threshold.
The value is an integer ranging from 1 to 1200.
required: false
default: null
rt_updt_interval:
description:
- Specify the minimum interval at which Update packets are sent. The value is an integer, in seconds.
The value is an integer ranging from 0 to 600.
required: false
default: null
redirect_ip:
description:
- Redirect ip.
required: false
default: no_use
choices: ['no_use','true','false']
redirect_ip_vaildation:
description:
- Redirect ip vaildation.
required: false
default: no_use
choices: ['no_use','true','false']
reflect_client:
description:
- If the value is true, the local device functions as the route reflector and a peer functions
as a client of the route reflector.
If the value is false, the route reflector and client functions are not configured.
required: false
default: no_use
choices: ['no_use','true','false']
substitute_as_enable:
description:
- If the value is true, the function to replace a specified peer's AS number in the AS-Path attribute with
the local AS number is enabled.
If the value is false, the function to replace a specified peer's AS number in the AS-Path attribute with
the local AS number is disabled.
required: false
default: no_use
choices: ['no_use','true','false']
import_rt_policy_name:
description:
- Specify the filtering policy applied to the routes learned from a peer.
The value is a string of 1 to 40 characters.
required: false
default: null
export_rt_policy_name:
description:
- Specify the filtering policy applied to the routes to be advertised to a peer.
The value is a string of 1 to 40 characters.
required: false
default: null
import_pref_filt_name:
description:
- Specify the IPv4 filtering policy applied to the routes received from a specified peer.
The value is a string of 1 to 169 characters.
required: false
default: null
export_pref_filt_name:
description:
- Specify the IPv4 filtering policy applied to the routes to be advertised to a specified peer.
The value is a string of 1 to 169 characters.
required: false
default: null
import_as_path_filter:
description:
- Apply an AS_Path-based filtering policy to the routes received from a specified peer.
The value is an integer ranging from 1 to 256.
required: false
default: null
export_as_path_filter:
description:
- Apply an AS_Path-based filtering policy to the routes to be advertised to a specified peer.
The value is an integer ranging from 1 to 256.
required: false
default: null
import_as_path_name_or_num:
description:
- A routing strategy based on the AS path list for routing received by a designated peer.
required: false
default: null
export_as_path_name_or_num:
description:
- Application of a AS path list based filtering policy to the routing of a specified peer.
required: false
default: null
import_acl_name_or_num:
description:
- Apply an IPv4 ACL-based filtering policy to the routes received from a specified peer.
The value is a string of 1 to 32 characters.
required: false
default: null
export_acl_name_or_num:
description:
- Apply an IPv4 ACL-based filtering policy to the routes to be advertised to a specified peer.
The value is a string of 1 to 32 characters.
required: false
default: null
ipprefix_orf_enable:
description:
- If the value is true, the address prefix-based Outbound Route Filter (ORF) capability is
enabled for peers.
If the value is false, the address prefix-based Outbound Route Filter (ORF) capability is
disabled for peers.
required: false
default: no_use
choices: ['no_use','true','false']
is_nonstd_ipprefix_mod:
description:
- If the value is true, Non-standard capability codes are used during capability negotiation.
If the value is false, RFC-defined standard ORF capability codes are used during capability negotiation.
required: false
default: no_use
choices: ['no_use','true','false']
orftype:
description:
- ORF Type.
The value is an integer ranging from 0 to 65535.
required: false
default: null
orf_mode:
description:
- ORF mode.
null, Default value.
receive, ORF for incoming packets.
send, ORF for outgoing packets.
both, ORF for incoming and outgoing packets.
required: false
default: null
choices: ['null', 'receive', 'send', 'both']
soostring:
description:
- Configure the Site-of-Origin (SoO) extended community attribute.
The value is a string of 3 to 21 characters.
required: false
default: null
default_rt_adv_enable:
description:
- If the value is true, the function to advertise default routes to peers is enabled.
If the value is false, the function to advertise default routes to peers is disabled.
required: false
default: no_use
choices: ['no_use','true', 'false']
default_rt_adv_policy:
description:
- Specify the name of a used policy. The value is a string.
The value is a string of 1 to 40 characters.
required: false
default: null
default_rt_match_mode:
description:
- null, Null.
matchall, Advertise the default route if all matching conditions are met.
matchany, Advertise the default route if any matching condition is met.
required: false
default: null
choices: ['null', 'matchall', 'matchany']
add_path_mode:
description:
- null, Null.
receive, Support receiving Add-Path routes.
send, Support sending Add-Path routes.
both, Support receiving and sending Add-Path routes.
required: false
default: null
choices: ['null', 'receive', 'send', 'both']
adv_add_path_num:
description:
- The number of addPath advertise route.
The value is an integer ranging from 2 to 64.
required: false
default: null
origin_as_valid:
description:
- If the value is true, Application results of route announcement.
If the value is false, Routing application results are not notified.
required: false
default: no_use
choices: ['no_use','true', 'false']
vpls_enable:
description:
- If the value is true, vpls enable.
If the value is false, vpls disable.
required: false
default: no_use
choices: ['no_use','true', 'false']
vpls_ad_disable:
description:
- If the value is true, enable vpls-ad.
If the value is false, disable vpls-ad.
required: false
default: no_use
choices: ['no_use','true', 'false']
update_pkt_standard_compatible:
description:
- If the value is true, When the vpnv4 multicast neighbor receives and updates the message,
the message has no label.
If the value is false, When the vpnv4 multicast neighbor receives and updates the message,
the message has label.
required: false
default: no_use
choices: ['no_use','true', 'false']
'''
EXAMPLES = '''
- name: CloudEngine BGP neighbor address family test
hosts: cloudengine
connection: local
gather_facts: no
vars:
cli:
host: "{{ inventory_hostname }}"
port: "{{ ansible_ssh_port }}"
username: "{{ username }}"
password: "{{ password }}"
transport: cli
tasks:
- name: "Config BGP peer Address_Family"
ce_bgp_neighbor_af:
state: present
vrf_name: js
af_type: ipv4uni
remote_address: 192.168.10.10
nexthop_configure: local
provider: "{{ cli }}"
- name: "Undo BGP peer Address_Family"
ce_bgp_neighbor_af:
state: absent
vrf_name: js
af_type: ipv4uni
remote_address: 192.168.10.10
nexthop_configure: local
provider: "{{ cli }}"
'''
RETURN = '''
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
proposed:
description: k/v pairs of parameters passed into module
returned: always
type: dict
sample: {"af_type": "ipv4uni", "nexthop_configure": "local",
"remote_address": "192.168.10.10",
"state": "present", "vrf_name": "js"}
existing:
description: k/v pairs of existing aaa server
returned: always
type: dict
sample: {"bgp neighbor af": {"af_type": "ipv4uni", "remote_address": "192.168.10.10",
"vrf_name": "js"},
"bgp neighbor af other": {"af_type": "ipv4uni", "nexthop_configure": "null",
"vrf_name": "js"}}
end_state:
description: k/v pairs of aaa params after module execution
returned: always
type: dict
sample: {"bgp neighbor af": {"af_type": "ipv4uni", "remote_address": "192.168.10.10",
"vrf_name": "js"},
"bgp neighbor af other": {"af_type": "ipv4uni", "nexthop_configure": "local",
"vrf_name": "js"}}
updates:
description: command sent to the device
returned: always
type: list
sample: ["peer 192.168.10.10 next-hop-local"]
'''
import re
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ce import get_nc_config, set_nc_config, ce_argument_spec, check_ip_addr
# get bgp peer af
CE_GET_BGP_PEER_AF_HEADER = """
<filter type="subtree">
<bgp xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<bgpcomm>
<bgpVrfs>
<bgpVrf>
<vrfName>%s</vrfName>
<bgpVrfAFs>
<bgpVrfAF>
<afType>%s</afType>
<peerAFs>
<peerAF>
<remoteAddress></remoteAddress>
"""
CE_GET_BGP_PEER_AF_TAIL = """
</peerAF>
</peerAFs>
</bgpVrfAF>
</bgpVrfAFs>
</bgpVrf>
</bgpVrfs>
</bgpcomm>
</bgp>
</filter>
"""
# merge bgp peer af
CE_MERGE_BGP_PEER_AF_HEADER = """
<config>
<bgp xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<bgpcomm>
<bgpVrfs>
<bgpVrf>
<vrfName>%s</vrfName>
<bgpVrfAFs>
<bgpVrfAF>
<afType>%s</afType>
<peerAFs>
<peerAF operation="merge">
<remoteAddress>%s</remoteAddress>
"""
CE_MERGE_BGP_PEER_AF_TAIL = """
</peerAF>
</peerAFs>
</bgpVrfAF>
</bgpVrfAFs>
</bgpVrf>
</bgpVrfs>
</bgpcomm>
</bgp>
</config>
"""
# create bgp peer af
CE_CREATE_BGP_PEER_AF = """
<config>
<bgp xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<bgpcomm>
<bgpVrfs>
<bgpVrf>
<vrfName>%s</vrfName>
<bgpVrfAFs>
<bgpVrfAF>
<afType>%s</afType>
<peerAFs>
<peerAF operation="create">
<remoteAddress>%s</remoteAddress>
</peerAF>
</peerAFs>
</bgpVrfAF>
</bgpVrfAFs>
</bgpVrf>
</bgpVrfs>
</bgpcomm>
</bgp>
</config>
"""
# delete bgp peer af
CE_DELETE_BGP_PEER_AF = """
<config>
<bgp xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<bgpcomm>
<bgpVrfs>
<bgpVrf>
<vrfName>%s</vrfName>
<bgpVrfAFs>
<bgpVrfAF>
<afType>%s</afType>
<peerAFs>
<peerAF operation="delete">
<remoteAddress>%s</remoteAddress>
</peerAF>
</peerAFs>
</bgpVrfAF>
</bgpVrfAFs>
</bgpVrf>
</bgpVrfs>
</bgpcomm>
</bgp>
</config>
"""
class BgpNeighborAf(object):
""" Manages BGP neighbor Address-family configuration """
def netconf_get_config(self, **kwargs):
""" netconf_get_config """
module = kwargs["module"]
conf_str = kwargs["conf_str"]
xml_str = get_nc_config(module, conf_str)
return xml_str
def netconf_set_config(self, **kwargs):
""" netconf_set_config """
module = kwargs["module"]
conf_str = kwargs["conf_str"]
xml_str = set_nc_config(module, conf_str)
return xml_str
def check_bgp_neighbor_af_args(self, **kwargs):
""" check_bgp_neighbor_af_args """
module = kwargs["module"]
result = dict()
need_cfg = False
vrf_name = module.params['vrf_name']
if vrf_name:
if len(vrf_name) > 31 or len(vrf_name) == 0:
module.fail_json(
msg='Error: The len of vrf_name %s is out of [1 - 31].' % vrf_name)
state = module.params['state']
af_type = module.params['af_type']
remote_address = module.params['remote_address']
if not check_ip_addr(ipaddr=remote_address):
module.fail_json(
msg='Error: The remote_address %s is invalid.' % remote_address)
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if state == "present":
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<remoteAddress>(.*)</remoteAddress>.*', recv_xml)
if re_find:
result["remote_address"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != remote_address:
need_cfg = True
else:
need_cfg = True
else:
if "<data/>" in recv_xml:
pass
else:
re_find = re.findall(
r'.*<remoteAddress>(.*)</remoteAddress>.*', recv_xml)
if re_find:
result["remote_address"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] == remote_address:
need_cfg = True
result["need_cfg"] = need_cfg
return result
def check_bgp_neighbor_af_other(self, **kwargs):
""" check_bgp_neighbor_af_other """
module = kwargs["module"]
result = dict()
need_cfg = False
state = module.params['state']
vrf_name = module.params['vrf_name']
af_type = module.params['af_type']
if state == "absent":
result["need_cfg"] = need_cfg
return result
advertise_irb = module.params['advertise_irb']
if advertise_irb != 'no_use':
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<advertiseIrb></advertiseIrb>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<advertiseIrb>(.*)</advertiseIrb>.*', recv_xml)
if re_find:
result["advertise_irb"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != advertise_irb:
need_cfg = True
else:
need_cfg = True
advertise_arp = module.params['advertise_arp']
if advertise_arp != 'no_use':
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<advertiseArp></advertiseArp>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<advertiseArp>(.*)</advertiseArp>.*', recv_xml)
if re_find:
result["advertise_arp"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != advertise_arp:
need_cfg = True
else:
need_cfg = True
advertise_remote_nexthop = module.params['advertise_remote_nexthop']
if advertise_remote_nexthop != 'no_use':
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<advertiseRemoteNexthop></advertiseRemoteNexthop>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<advertiseRemoteNexthop>(.*)</advertiseRemoteNexthop>.*', recv_xml)
if re_find:
result["advertise_remote_nexthop"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != advertise_remote_nexthop:
need_cfg = True
else:
need_cfg = True
advertise_community = module.params['advertise_community']
if advertise_community != 'no_use':
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<advertiseCommunity></advertiseCommunity>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<advertiseCommunity>(.*)</advertiseCommunity>.*', recv_xml)
if re_find:
result["advertise_community"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != advertise_community:
need_cfg = True
else:
need_cfg = True
advertise_ext_community = module.params['advertise_ext_community']
if advertise_ext_community != 'no_use':
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<advertiseExtCommunity></advertiseExtCommunity>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<advertiseExtCommunity>(.*)</advertiseExtCommunity>.*', recv_xml)
if re_find:
result["advertise_ext_community"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != advertise_ext_community:
need_cfg = True
else:
need_cfg = True
discard_ext_community = module.params['discard_ext_community']
if discard_ext_community != 'no_use':
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<discardExtCommunity></discardExtCommunity>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<discardExtCommunity>(.*)</discardExtCommunity>.*', recv_xml)
if re_find:
result["discard_ext_community"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != discard_ext_community:
need_cfg = True
else:
need_cfg = True
allow_as_loop_enable = module.params['allow_as_loop_enable']
if allow_as_loop_enable != 'no_use':
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<allowAsLoopEnable></allowAsLoopEnable>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<allowAsLoopEnable>(.*)</allowAsLoopEnable>.*', recv_xml)
if re_find:
result["allow_as_loop_enable"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != allow_as_loop_enable:
need_cfg = True
else:
need_cfg = True
allow_as_loop_limit = module.params['allow_as_loop_limit']
if allow_as_loop_limit:
if int(allow_as_loop_limit) > 10 or int(allow_as_loop_limit) < 1:
module.fail_json(
msg='the value of allow_as_loop_limit %s is out of [1 - 10].' % allow_as_loop_limit)
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<allowAsLoopLimit></allowAsLoopLimit>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<allowAsLoopLimit>(.*)</allowAsLoopLimit>.*', recv_xml)
if re_find:
result["allow_as_loop_limit"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != allow_as_loop_limit:
need_cfg = True
else:
need_cfg = True
keep_all_routes = module.params['keep_all_routes']
if keep_all_routes != 'no_use':
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<keepAllRoutes></keepAllRoutes>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<keepAllRoutes>(.*)</keepAllRoutes>.*', recv_xml)
if re_find:
result["keep_all_routes"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != keep_all_routes:
need_cfg = True
else:
need_cfg = True
nexthop_configure = module.params['nexthop_configure']
if nexthop_configure:
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<nextHopConfigure></nextHopConfigure>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<nextHopConfigure>(.*)</nextHopConfigure>.*', recv_xml)
if re_find:
result["nexthop_configure"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != nexthop_configure:
need_cfg = True
else:
need_cfg = True
preferred_value = module.params['preferred_value']
if preferred_value:
if int(preferred_value) > 65535 or int(preferred_value) < 0:
module.fail_json(
msg='the value of preferred_value %s is out of [0 - 65535].' % preferred_value)
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<preferredValue></preferredValue>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<preferredValue>(.*)</preferredValue>.*', recv_xml)
if re_find:
result["preferred_value"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != preferred_value:
need_cfg = True
else:
need_cfg = True
public_as_only = module.params['public_as_only']
if public_as_only != 'no_use':
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<publicAsOnly></publicAsOnly>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<publicAsOnly>(.*)</publicAsOnly>.*', recv_xml)
if re_find:
result["public_as_only"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != public_as_only:
need_cfg = True
else:
need_cfg = True
public_as_only_force = module.params['public_as_only_force']
if public_as_only_force != 'no_use':
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<publicAsOnlyForce></publicAsOnlyForce>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<publicAsOnlyForce>(.*)</publicAsOnlyForce>.*', recv_xml)
if re_find:
result["public_as_only_force"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != public_as_only_force:
need_cfg = True
else:
need_cfg = True
public_as_only_limited = module.params['public_as_only_limited']
if public_as_only_limited != 'no_use':
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<publicAsOnlyLimited></publicAsOnlyLimited>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<publicAsOnlyLimited>(.*)</publicAsOnlyLimited>.*', recv_xml)
if re_find:
result["public_as_only_limited"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != public_as_only_limited:
need_cfg = True
else:
need_cfg = True
public_as_only_replace = module.params['public_as_only_replace']
if public_as_only_replace != 'no_use':
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<publicAsOnlyReplace></publicAsOnlyReplace>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<publicAsOnlyReplace>(.*)</publicAsOnlyReplace>.*', recv_xml)
if re_find:
result["public_as_only_replace"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != public_as_only_replace:
need_cfg = True
else:
need_cfg = True
public_as_only_skip_peer_as = module.params[
'public_as_only_skip_peer_as']
if public_as_only_skip_peer_as != 'no_use':
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<publicAsOnlySkipPeerAs></publicAsOnlySkipPeerAs>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<publicAsOnlySkipPeerAs>(.*)</publicAsOnlySkipPeerAs>.*', recv_xml)
if re_find:
result["public_as_only_skip_peer_as"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != public_as_only_skip_peer_as:
need_cfg = True
else:
need_cfg = True
route_limit = module.params['route_limit']
if route_limit:
if int(route_limit) < 1:
module.fail_json(
msg='the value of route_limit %s is out of [1 - 4294967295].' % route_limit)
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<routeLimit></routeLimit>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<routeLimit>(.*)</routeLimit>.*', recv_xml)
if re_find:
result["route_limit"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != route_limit:
need_cfg = True
else:
need_cfg = True
route_limit_percent = module.params['route_limit_percent']
if route_limit_percent:
if int(route_limit_percent) < 1 or int(route_limit_percent) > 100:
module.fail_json(
msg='Error: The value of route_limit_percent %s is out of [1 - 100].' % route_limit_percent)
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<routeLimitPercent></routeLimitPercent>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<routeLimitPercent>(.*)</routeLimitPercent>.*', recv_xml)
if re_find:
result["route_limit_percent"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != route_limit_percent:
need_cfg = True
else:
need_cfg = True
route_limit_type = module.params['route_limit_type']
if route_limit_type:
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<routeLimitType></routeLimitType>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<routeLimitType>(.*)</routeLimitType>.*', recv_xml)
if re_find:
result["route_limit_type"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != route_limit_type:
need_cfg = True
else:
need_cfg = True
route_limit_idle_timeout = module.params['route_limit_idle_timeout']
if route_limit_idle_timeout:
if int(route_limit_idle_timeout) < 1 or int(route_limit_idle_timeout) > 1200:
module.fail_json(
msg='Error: The value of route_limit_idle_timeout %s is out of '
'[1 - 1200].' % route_limit_idle_timeout)
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<routeLimitIdleTimeout></routeLimitPercent>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<routeLimitIdleTimeout>(.*)</routeLimitIdleTimeout>.*', recv_xml)
if re_find:
result["route_limit_idle_timeout"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != route_limit_idle_timeout:
need_cfg = True
else:
need_cfg = True
rt_updt_interval = module.params['rt_updt_interval']
if rt_updt_interval:
if int(rt_updt_interval) < 0 or int(rt_updt_interval) > 600:
module.fail_json(
msg='Error: The value of rt_updt_interval %s is out of [0 - 600].' % rt_updt_interval)
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<rtUpdtInterval></rtUpdtInterval>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<rtUpdtInterval>(.*)</rtUpdtInterval>.*', recv_xml)
if re_find:
result["rt_updt_interval"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != rt_updt_interval:
need_cfg = True
else:
need_cfg = True
redirect_ip = module.params['redirect_ip']
if redirect_ip != 'no_use':
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<redirectIP></redirectIP>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<redirectIP>(.*)</redirectIP>.*', recv_xml)
if re_find:
result["redirect_ip"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != redirect_ip:
need_cfg = True
else:
need_cfg = True
redirect_ip_vaildation = module.params['redirect_ip_vaildation']
if redirect_ip_vaildation != 'no_use':
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<redirectIPVaildation></redirectIPVaildation>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<redirectIPVaildation>(.*)</redirectIPVaildation>.*', recv_xml)
if re_find:
result["redirect_ip_vaildation"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != redirect_ip_vaildation:
need_cfg = True
else:
need_cfg = True
reflect_client = module.params['reflect_client']
if reflect_client != 'no_use':
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<reflectClient></reflectClient>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<reflectClient>(.*)</reflectClient>.*', recv_xml)
if re_find:
result["reflect_client"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != reflect_client:
need_cfg = True
else:
need_cfg = True
substitute_as_enable = module.params['substitute_as_enable']
if substitute_as_enable != 'no_use':
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<substituteAsEnable></substituteAsEnable>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<substituteAsEnable>(.*)</substituteAsEnable>.*', recv_xml)
if re_find:
result["substitute_as_enable"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != substitute_as_enable:
need_cfg = True
else:
need_cfg = True
import_rt_policy_name = module.params['import_rt_policy_name']
if import_rt_policy_name:
if len(import_rt_policy_name) < 1 or len(import_rt_policy_name) > 40:
module.fail_json(
msg='Error: The len of import_rt_policy_name %s is out of [1 - 40].' % import_rt_policy_name)
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<importRtPolicyName></importRtPolicyName>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<importRtPolicyName>(.*)</importRtPolicyName>.*', recv_xml)
if re_find:
result["import_rt_policy_name"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != import_rt_policy_name:
need_cfg = True
else:
need_cfg = True
export_rt_policy_name = module.params['export_rt_policy_name']
if export_rt_policy_name:
if len(export_rt_policy_name) < 1 or len(export_rt_policy_name) > 40:
module.fail_json(
msg='Error: The len of export_rt_policy_name %s is out of [1 - 40].' % export_rt_policy_name)
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<exportRtPolicyName></exportRtPolicyName>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<exportRtPolicyName>(.*)</exportRtPolicyName>.*', recv_xml)
if re_find:
result["export_rt_policy_name"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != export_rt_policy_name:
need_cfg = True
else:
need_cfg = True
import_pref_filt_name = module.params['import_pref_filt_name']
if import_pref_filt_name:
if len(import_pref_filt_name) < 1 or len(import_pref_filt_name) > 169:
module.fail_json(
msg='Error: The len of import_pref_filt_name %s is out of [1 - 169].' % import_pref_filt_name)
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<importPrefFiltName></importPrefFiltName>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<importPrefFiltName>(.*)</importPrefFiltName>.*', recv_xml)
if re_find:
result["import_pref_filt_name"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != import_pref_filt_name:
need_cfg = True
else:
need_cfg = True
export_pref_filt_name = module.params['export_pref_filt_name']
if export_pref_filt_name:
if len(export_pref_filt_name) < 1 or len(export_pref_filt_name) > 169:
module.fail_json(
msg='Error: The len of export_pref_filt_name %s is out of [1 - 169].' % export_pref_filt_name)
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<exportPrefFiltName></exportPrefFiltName>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<exportPrefFiltName>(.*)</exportPrefFiltName>.*', recv_xml)
if re_find:
result["export_pref_filt_name"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != export_pref_filt_name:
need_cfg = True
else:
need_cfg = True
import_as_path_filter = module.params['import_as_path_filter']
if import_as_path_filter:
if int(import_as_path_filter) < 1 or int(import_as_path_filter) > 256:
module.fail_json(
msg='Error: The value of import_as_path_filter %s is out of [1 - 256].' % import_as_path_filter)
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<importAsPathFilter></importAsPathFilter>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<importAsPathFilter>(.*)</importAsPathFilter>.*', recv_xml)
if re_find:
result["import_as_path_filter"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != import_as_path_filter:
need_cfg = True
else:
need_cfg = True
export_as_path_filter = module.params['export_as_path_filter']
if export_as_path_filter:
if int(export_as_path_filter) < 1 or int(export_as_path_filter) > 256:
module.fail_json(
msg='Error: The value of export_as_path_filter %s is out of [1 - 256].' % export_as_path_filter)
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<exportAsPathFilter></exportAsPathFilter>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<exportAsPathFilter>(.*)</exportAsPathFilter>.*', recv_xml)
if re_find:
result["export_as_path_filter"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != export_as_path_filter:
need_cfg = True
else:
need_cfg = True
import_as_path_name_or_num = module.params[
'import_as_path_name_or_num']
if import_as_path_name_or_num:
if len(import_as_path_name_or_num) < 1 or len(import_as_path_name_or_num) > 51:
module.fail_json(
msg='Error: The len of import_as_path_name_or_num %s is out '
'of [1 - 51].' % import_as_path_name_or_num)
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<importAsPathNameOrNum></importAsPathNameOrNum>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<importAsPathNameOrNum>(.*)</importAsPathNameOrNum>.*', recv_xml)
if re_find:
result["import_as_path_name_or_num"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != import_as_path_name_or_num:
need_cfg = True
else:
need_cfg = True
export_as_path_name_or_num = module.params[
'export_as_path_name_or_num']
if export_as_path_name_or_num:
if len(export_as_path_name_or_num) < 1 or len(export_as_path_name_or_num) > 51:
module.fail_json(
msg='Error: The len of export_as_path_name_or_num %s is out '
'of [1 - 51].' % export_as_path_name_or_num)
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<exportAsPathNameOrNum></exportAsPathNameOrNum>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<exportAsPathNameOrNum>(.*)</exportAsPathNameOrNum>.*', recv_xml)
if re_find:
result["export_as_path_name_or_num"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != export_as_path_name_or_num:
need_cfg = True
else:
need_cfg = True
import_acl_name_or_num = module.params['import_acl_name_or_num']
if import_acl_name_or_num:
if len(import_acl_name_or_num) < 1 or len(import_acl_name_or_num) > 32:
module.fail_json(
msg='Error: The len of import_acl_name_or_num %s is out of [1 - 32].' % import_acl_name_or_num)
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<importAclNameOrNum></importAclNameOrNum>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<importAclNameOrNum>(.*)</importAclNameOrNum>.*', recv_xml)
if re_find:
result["import_acl_name_or_num"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != import_acl_name_or_num:
need_cfg = True
else:
need_cfg = True
export_acl_name_or_num = module.params['export_acl_name_or_num']
if export_acl_name_or_num:
if len(export_acl_name_or_num) < 1 or len(export_acl_name_or_num) > 32:
module.fail_json(
msg='Error: The len of export_acl_name_or_num %s is out of [1 - 32].' % export_acl_name_or_num)
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<exportAclNameOrNum></exportAclNameOrNum>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<exportAclNameOrNum>(.*)</exportAclNameOrNum>.*', recv_xml)
if re_find:
result["export_acl_name_or_num"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != export_acl_name_or_num:
need_cfg = True
else:
need_cfg = True
ipprefix_orf_enable = module.params['ipprefix_orf_enable']
if ipprefix_orf_enable != 'no_use':
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<ipprefixOrfEnable></ipprefixOrfEnable>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<ipprefixOrfEnable>(.*)</ipprefixOrfEnable>.*', recv_xml)
if re_find:
result["ipprefix_orf_enable"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != ipprefix_orf_enable:
need_cfg = True
else:
need_cfg = True
is_nonstd_ipprefix_mod = module.params['is_nonstd_ipprefix_mod']
if is_nonstd_ipprefix_mod != 'no_use':
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<isNonstdIpprefixMod></isNonstdIpprefixMod>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<isNonstdIpprefixMod>(.*)</isNonstdIpprefixMod>.*', recv_xml)
if re_find:
result["is_nonstd_ipprefix_mod"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != is_nonstd_ipprefix_mod:
need_cfg = True
else:
need_cfg = True
orftype = module.params['orftype']
if orftype:
if int(orftype) < 0 or int(orftype) > 65535:
module.fail_json(
msg='Error: The value of orftype %s is out of [0 - 65535].' % orftype)
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<orftype></orftype>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<orftype>(.*)</orftype>.*', recv_xml)
if re_find:
result["orftype"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != orftype:
need_cfg = True
else:
need_cfg = True
orf_mode = module.params['orf_mode']
if orf_mode:
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<orfMode></orfMode>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<orfMode>(.*)</orfMode>.*', recv_xml)
if re_find:
result["orf_mode"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != orf_mode:
need_cfg = True
else:
need_cfg = True
soostring = module.params['soostring']
if soostring:
if len(soostring) < 3 or len(soostring) > 21:
module.fail_json(
msg='Error: The len of soostring %s is out of [3 - 21].' % soostring)
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<soostring></soostring>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<soostring>(.*)</soostring>.*', recv_xml)
if re_find:
result["soostring"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != soostring:
need_cfg = True
else:
need_cfg = True
default_rt_adv_enable = module.params['default_rt_adv_enable']
if default_rt_adv_enable != 'no_use':
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<defaultRtAdvEnable></defaultRtAdvEnable>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<defaultRtAdvEnable>(.*)</defaultRtAdvEnable>.*', recv_xml)
if re_find:
result["default_rt_adv_enable"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != default_rt_adv_enable:
need_cfg = True
else:
need_cfg = True
default_rt_adv_policy = module.params['default_rt_adv_policy']
if default_rt_adv_policy:
if len(default_rt_adv_policy) < 1 or len(default_rt_adv_policy) > 40:
module.fail_json(
msg='Error: The len of default_rt_adv_policy %s is out of [1 - 40].' % default_rt_adv_policy)
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<defaultRtAdvPolicy></defaultRtAdvPolicy>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<defaultRtAdvPolicy>(.*)</defaultRtAdvPolicy>.*', recv_xml)
if re_find:
result["default_rt_adv_policy"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != default_rt_adv_policy:
need_cfg = True
else:
need_cfg = True
default_rt_match_mode = module.params['default_rt_match_mode']
if default_rt_match_mode:
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<defaultRtMatchMode></defaultRtMatchMode>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<defaultRtMatchMode>(.*)</defaultRtMatchMode>.*', recv_xml)
if re_find:
result["default_rt_match_mode"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != default_rt_match_mode:
need_cfg = True
else:
need_cfg = True
add_path_mode = module.params['add_path_mode']
if add_path_mode:
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<addPathMode></addPathMode>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<addPathMode>(.*)</addPathMode>.*', recv_xml)
if re_find:
result["add_path_mode"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != add_path_mode:
need_cfg = True
else:
need_cfg = True
adv_add_path_num = module.params['adv_add_path_num']
if adv_add_path_num:
if int(orftype) < 2 or int(orftype) > 64:
module.fail_json(
msg='Error: The value of adv_add_path_num %s is out of [2 - 64].' % adv_add_path_num)
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<advAddPathNum></advAddPathNum>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<advAddPathNum>(.*)</advAddPathNum>.*', recv_xml)
if re_find:
result["adv_add_path_num"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != adv_add_path_num:
need_cfg = True
else:
need_cfg = True
origin_as_valid = module.params['origin_as_valid']
if origin_as_valid != 'no_use':
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<originAsValid></originAsValid>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<originAsValid>(.*)</originAsValid>.*', recv_xml)
if re_find:
result["origin_as_valid"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != origin_as_valid:
need_cfg = True
else:
need_cfg = True
vpls_enable = module.params['vpls_enable']
if vpls_enable != 'no_use':
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<vplsEnable></vplsEnable>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<vplsEnable>(.*)</vplsEnable>.*', recv_xml)
if re_find:
result["vpls_enable"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != vpls_enable:
need_cfg = True
else:
need_cfg = True
vpls_ad_disable = module.params['vpls_ad_disable']
if vpls_ad_disable != 'no_use':
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<vplsAdDisable></vplsAdDisable>" + CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<vplsAdDisable>(.*)</vplsAdDisable>.*', recv_xml)
if re_find:
result["vpls_ad_disable"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != vpls_ad_disable:
need_cfg = True
else:
need_cfg = True
update_pkt_standard_compatible = module.params[
'update_pkt_standard_compatible']
if update_pkt_standard_compatible != 'no_use':
conf_str = CE_GET_BGP_PEER_AF_HEADER % (
vrf_name, af_type) + "<updatePktStandardCompatible></updatePktStandardCompatible>" + \
CE_GET_BGP_PEER_AF_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
need_cfg = True
else:
re_find = re.findall(
r'.*<updatePktStandardCompatible>(.*)</updatePktStandardCompatible>.*', recv_xml)
if re_find:
result["update_pkt_standard_compatible"] = re_find
result["vrf_name"] = vrf_name
result["af_type"] = af_type
if re_find[0] != update_pkt_standard_compatible:
need_cfg = True
else:
need_cfg = True
result["need_cfg"] = need_cfg
return result
def merge_bgp_peer_af(self, **kwargs):
""" merge_bgp_peer_af """
module = kwargs["module"]
vrf_name = module.params['vrf_name']
af_type = module.params['af_type']
remote_address = module.params['remote_address']
conf_str = CE_MERGE_BGP_PEER_AF_HEADER % (
vrf_name, af_type, remote_address) + CE_MERGE_BGP_PEER_AF_TAIL
recv_xml = self.netconf_set_config(module=module, conf_str=conf_str)
if "<ok/>" not in recv_xml:
module.fail_json(msg='Error: Merge bgp peer address family failed.')
cmds = []
if af_type == "ipv4uni":
cmd = "ipv4-family unicast"
elif af_type == "ipv4multi":
cmd = "ipv4-family multicast"
elif af_type == "ipv6uni":
cmd = "ipv6-family unicast"
cmds.append(cmd)
cmd = "peer %s" % remote_address
cmds.append(cmd)
return cmds
def create_bgp_peer_af(self, **kwargs):
""" create_bgp_peer_af """
module = kwargs["module"]
vrf_name = module.params['vrf_name']
af_type = module.params['af_type']
remote_address = module.params['remote_address']
conf_str = CE_CREATE_BGP_PEER_AF % (vrf_name, af_type, remote_address)
recv_xml = self.netconf_set_config(module=module, conf_str=conf_str)
if "<ok/>" not in recv_xml:
module.fail_json(msg='Error: Create bgp peer address family failed.')
cmds = []
if af_type == "ipv4uni":
cmd = "ipv4-family unicast"
elif af_type == "ipv4multi":
cmd = "ipv4-family multicast"
elif af_type == "ipv6uni":
cmd = "ipv6-family unicast"
cmds.append(cmd)
cmd = "peer %s" % remote_address
cmds.append(cmd)
return cmds
def delete_bgp_peer_af(self, **kwargs):
""" delete_bgp_peer_af """
module = kwargs["module"]
vrf_name = module.params['vrf_name']
af_type = module.params['af_type']
remote_address = module.params['remote_address']
conf_str = CE_DELETE_BGP_PEER_AF % (vrf_name, af_type, remote_address)
recv_xml = self.netconf_set_config(module=module, conf_str=conf_str)
if "<ok/>" not in recv_xml:
module.fail_json(msg='Error: Delete bgp peer address family failed.')
cmds = []
if af_type == "ipv4uni":
cmd = "ipv4-family unicast"
elif af_type == "ipv4multi":
cmd = "ipv4-family multicast"
elif af_type == "ipv6uni":
cmd = "ipv6-family unicast"
cmds.append(cmd)
cmd = "undo peer %s" % remote_address
cmds.append(cmd)
return cmds
def merge_bgp_peer_af_other(self, **kwargs):
""" merge_bgp_peer_af_other """
module = kwargs["module"]
vrf_name = module.params['vrf_name']
af_type = module.params['af_type']
remote_address = module.params['remote_address']
conf_str = CE_MERGE_BGP_PEER_AF_HEADER % (
vrf_name, af_type, remote_address)
cmds = []
advertise_irb = module.params['advertise_irb']
if advertise_irb != 'no_use':
conf_str += "<advertiseIrb>%s</advertiseIrb>" % advertise_irb
if advertise_irb == "ture":
cmd = "peer %s advertise irb" % remote_address
else:
cmd = "undo peer %s advertise irb" % remote_address
cmds.append(cmd)
advertise_arp = module.params['advertise_arp']
if advertise_arp != 'no_use':
conf_str += "<advertiseArp>%s</advertiseArp>" % advertise_arp
if advertise_arp == "ture":
cmd = "peer %s advertise arp" % remote_address
else:
cmd = "undo peer %s advertise arp" % remote_address
cmds.append(cmd)
advertise_remote_nexthop = module.params['advertise_remote_nexthop']
if advertise_remote_nexthop != 'no_use':
conf_str += "<advertiseRemoteNexthop>%s</advertiseRemoteNexthop>" % advertise_remote_nexthop
if advertise_remote_nexthop == "true":
cmd = "peer %s advertise remote-nexthop" % remote_address
else:
cmd = "undo peer %s advertise remote-nexthop" % remote_address
cmds.append(cmd)
advertise_community = module.params['advertise_community']
if advertise_community != 'no_use':
conf_str += "<advertiseCommunity>%s</advertiseCommunity>" % advertise_community
if advertise_community == "true":
cmd = "peer %s advertise-community" % remote_address
else:
cmd = "undo peer %s advertise-community" % remote_address
cmds.append(cmd)
advertise_ext_community = module.params['advertise_ext_community']
if advertise_ext_community != 'no_use':
conf_str += "<advertiseExtCommunity>%s</advertiseExtCommunity>" % advertise_ext_community
if advertise_ext_community == "true":
cmd = "peer %s advertise-ext-community" % remote_address
else:
cmd = "undo peer %s advertise-ext-community" % remote_address
cmds.append(cmd)
discard_ext_community = module.params['discard_ext_community']
if discard_ext_community != 'no_use':
conf_str += "<discardExtCommunity>%s</discardExtCommunity>" % discard_ext_community
if discard_ext_community == "true":
cmd = "peer %s discard-ext-community" % remote_address
else:
cmd = "undo peer %s discard-ext-community" % remote_address
cmds.append(cmd)
allow_as_loop_enable = module.params['allow_as_loop_enable']
if allow_as_loop_enable != 'no_use':
conf_str += "<allowAsLoopEnable>%s</allowAsLoopEnable>" % allow_as_loop_enable
if allow_as_loop_enable == "true":
cmd = "peer %s allow-as-loop" % remote_address
else:
cmd = "undo peer %s allow-as-loop" % remote_address
cmds.append(cmd)
allow_as_loop_limit = module.params['allow_as_loop_limit']
if allow_as_loop_limit:
conf_str += "<allowAsLoopLimit>%s</allowAsLoopLimit>" % allow_as_loop_limit
if allow_as_loop_enable == "true":
cmd = "peer %s allow-as-loop %s" % (remote_address, allow_as_loop_limit)
else:
cmd = "undo peer %s allow-as-loop" % remote_address
cmds.append(cmd)
keep_all_routes = module.params['keep_all_routes']
if keep_all_routes != 'no_use':
conf_str += "<keepAllRoutes>%s</keepAllRoutes>" % keep_all_routes
if keep_all_routes == "true":
cmd = "peer %s keep-all-routes" % remote_address
else:
cmd = "undo peer %s keep-all-routes" % remote_address
cmds.append(cmd)
nexthop_configure = module.params['nexthop_configure']
if nexthop_configure:
conf_str += "<nextHopConfigure>%s</nextHopConfigure>" % nexthop_configure
if nexthop_configure == "local":
cmd = "peer %s next-hop-local" % remote_address
cmds.append(cmd)
elif nexthop_configure == "invariable":
cmd = "peer %s next-hop-invariable" % remote_address
cmds.append(cmd)
preferred_value = module.params['preferred_value']
if preferred_value:
conf_str += "<preferredValue>%s</preferredValue>" % preferred_value
cmd = "peer %s preferred-value %s" % (remote_address, preferred_value)
cmds.append(cmd)
public_as_only = module.params['public_as_only']
if public_as_only != 'no_use':
conf_str += "<publicAsOnly>%s</publicAsOnly>" % public_as_only
if public_as_only == "true":
cmd = "peer %s public-as-only" % remote_address
else:
cmd = "undo peer %s public-as-only" % remote_address
cmds.append(cmd)
public_as_only_force = module.params['public_as_only_force']
if public_as_only_force != 'no_use':
conf_str += "<publicAsOnlyForce>%s</publicAsOnlyForce>" % public_as_only_force
if public_as_only_force == "true":
cmd = "peer %s public-as-only force" % remote_address
else:
cmd = "undo peer %s public-as-only force" % remote_address
cmds.append(cmd)
public_as_only_limited = module.params['public_as_only_limited']
if public_as_only_limited != 'no_use':
conf_str += "<publicAsOnlyLimited>%s</publicAsOnlyLimited>" % public_as_only_limited
if public_as_only_limited == "true":
cmd = "peer %s public-as-only limited" % remote_address
else:
cmd = "undo peer %s public-as-only limited" % remote_address
cmds.append(cmd)
public_as_only_replace = module.params['public_as_only_replace']
if public_as_only_replace != 'no_use':
conf_str += "<publicAsOnlyReplace>%s</publicAsOnlyReplace>" % public_as_only_replace
if public_as_only_replace == "true":
cmd = "peer %s public-as-only force replace" % remote_address
else:
cmd = "undo peer %s public-as-only force replace" % remote_address
cmds.append(cmd)
public_as_only_skip_peer_as = module.params[
'public_as_only_skip_peer_as']
if public_as_only_skip_peer_as != 'no_use':
conf_str += "<publicAsOnlySkipPeerAs>%s</publicAsOnlySkipPeerAs>" % public_as_only_skip_peer_as
if public_as_only_skip_peer_as == "true":
cmd = "peer %s public-as-only force include-peer-as" % remote_address
else:
cmd = "undo peer %s public-as-only force include-peer-as" % remote_address
cmds.append(cmd)
route_limit = module.params['route_limit']
if route_limit:
conf_str += "<routeLimit>%s</routeLimit>" % route_limit
cmd = "peer %s route-limit %s" % (remote_address, route_limit)
cmds.append(cmd)
route_limit_percent = module.params['route_limit_percent']
if route_limit_percent:
conf_str += "<routeLimitPercent>%s</routeLimitPercent>" % route_limit_percent
cmd = "peer %s route-limit %s %s" % (remote_address, route_limit, route_limit_percent)
cmds.append(cmd)
route_limit_type = module.params['route_limit_type']
if route_limit_type:
conf_str += "<routeLimitType>%s</routeLimitType>" % route_limit_type
if route_limit_type == "alertOnly":
cmd = "peer %s route-limit %s %s alert-only" % (remote_address, route_limit, route_limit_percent)
cmds.append(cmd)
elif route_limit_type == "idleForever":
cmd = "peer %s route-limit %s %s idle-forever" % (remote_address, route_limit, route_limit_percent)
cmds.append(cmd)
elif route_limit_type == "idleTimeout":
cmd = "peer %s route-limit %s %s idle-timeout" % (remote_address, route_limit, route_limit_percent)
cmds.append(cmd)
route_limit_idle_timeout = module.params['route_limit_idle_timeout']
if route_limit_idle_timeout:
conf_str += "<routeLimitIdleTimeout>%s</routeLimitIdleTimeout>" % route_limit_idle_timeout
cmd = "peer %s route-limit %s %s idle-timeout %s" % (remote_address, route_limit,
route_limit_percent, route_limit_idle_timeout)
cmds.append(cmd)
rt_updt_interval = module.params['rt_updt_interval']
if rt_updt_interval:
conf_str += "<rtUpdtInterval>%s</rtUpdtInterval>" % rt_updt_interval
cmd = "peer %s route-update-interval %s" % (remote_address, rt_updt_interval)
cmds.append(cmd)
redirect_ip = module.params['redirect_ip']
if redirect_ip != 'no_use':
conf_str += "<redirectIP>%s</redirectIP>" % redirect_ip
redirect_ip_vaildation = module.params['redirect_ip_vaildation']
if redirect_ip_vaildation != 'no_use':
conf_str += "<redirectIPVaildation>%s</redirectIPVaildation>" % redirect_ip_vaildation
reflect_client = module.params['reflect_client']
if reflect_client != 'no_use':
conf_str += "<reflectClient>%s</reflectClient>" % reflect_client
if reflect_client == "true":
cmd = "peer %s reflect-client" % remote_address
else:
cmd = "undo peer %s reflect-client" % remote_address
cmds.append(cmd)
substitute_as_enable = module.params['substitute_as_enable']
if substitute_as_enable != 'no_use':
conf_str += "<substituteAsEnable>%s</substituteAsEnable>" % substitute_as_enable
import_rt_policy_name = module.params['import_rt_policy_name']
if import_rt_policy_name:
conf_str += "<importRtPolicyName>%s</importRtPolicyName>" % import_rt_policy_name
cmd = "peer %s route-policy %s import" % (remote_address, import_rt_policy_name)
cmds.append(cmd)
export_rt_policy_name = module.params['export_rt_policy_name']
if export_rt_policy_name:
conf_str += "<exportRtPolicyName>%s</exportRtPolicyName>" % export_rt_policy_name
cmd = "peer %s route-policy %s export" % (remote_address, export_rt_policy_name)
cmds.append(cmd)
import_pref_filt_name = module.params['import_pref_filt_name']
if import_pref_filt_name:
conf_str += "<importPrefFiltName>%s</importPrefFiltName>" % import_pref_filt_name
cmd = "peer %s filter-policy %s import" % (remote_address, import_pref_filt_name)
cmds.append(cmd)
export_pref_filt_name = module.params['export_pref_filt_name']
if export_pref_filt_name:
conf_str += "<exportPrefFiltName>%s</exportPrefFiltName>" % export_pref_filt_name
cmd = "peer %s filter-policy %s export" % (remote_address, export_pref_filt_name)
cmds.append(cmd)
import_as_path_filter = module.params['import_as_path_filter']
if import_as_path_filter:
conf_str += "<importAsPathFilter>%s</importAsPathFilter>" % import_as_path_filter
cmd = "peer %s as-path-filter %s import" % (remote_address, import_as_path_filter)
cmds.append(cmd)
export_as_path_filter = module.params['export_as_path_filter']
if export_as_path_filter:
conf_str += "<exportAsPathFilter>%s</exportAsPathFilter>" % export_as_path_filter
cmd = "peer %s as-path-filter %s export" % (remote_address, export_as_path_filter)
cmds.append(cmd)
import_as_path_name_or_num = module.params[
'import_as_path_name_or_num']
if import_as_path_name_or_num:
conf_str += "<importAsPathNameOrNum>%s</importAsPathNameOrNum>" % import_as_path_name_or_num
cmd = "peer %s as-path-filter %s import" % (remote_address, import_as_path_name_or_num)
cmds.append(cmd)
export_as_path_name_or_num = module.params[
'export_as_path_name_or_num']
if export_as_path_name_or_num:
conf_str += "<exportAsPathNameOrNum>%s</exportAsPathNameOrNum>" % export_as_path_name_or_num
cmd = "peer %s as-path-filter %s export" % (remote_address, export_as_path_name_or_num)
cmds.append(cmd)
import_acl_name_or_num = module.params['import_acl_name_or_num']
if import_acl_name_or_num:
conf_str += "<importAclNameOrNum>%s</importAclNameOrNum>" % import_acl_name_or_num
cmd = "peer %s filter-policy %s import" % (remote_address, import_acl_name_or_num)
cmds.append(cmd)
export_acl_name_or_num = module.params['export_acl_name_or_num']
if export_acl_name_or_num:
conf_str += "<exportAclNameOrNum>%s</exportAclNameOrNum>" % export_acl_name_or_num
cmd = "peer %s filter-policy %s export" % (remote_address, export_acl_name_or_num)
cmds.append(cmd)
ipprefix_orf_enable = module.params['ipprefix_orf_enable']
if ipprefix_orf_enable != 'no_use':
conf_str += "<ipprefixOrfEnable>%s</ipprefixOrfEnable>" % ipprefix_orf_enable
if ipprefix_orf_enable == "true":
cmd = "peer %s capability-advertise orf ip-prefix" % remote_address
else:
cmd = "undo peer %s capability-advertise orf ip-prefix" % remote_address
cmds.append(cmd)
is_nonstd_ipprefix_mod = module.params['is_nonstd_ipprefix_mod']
if is_nonstd_ipprefix_mod != 'no_use':
conf_str += "<isNonstdIpprefixMod>%s</isNonstdIpprefixMod>" % is_nonstd_ipprefix_mod
if is_nonstd_ipprefix_mod == "true":
if ipprefix_orf_enable == "true":
cmd = "peer %s capability-advertise orf non-standard-compatible" % remote_address
else:
cmd = "undo peer %s capability-advertise orf non-standard-compatible" % remote_address
cmds.append(cmd)
else:
if ipprefix_orf_enable == "true":
cmd = "peer %s capability-advertise orf" % remote_address
else:
cmd = "undo peer %s capability-advertise orf" % remote_address
cmds.append(cmd)
orftype = module.params['orftype']
if orftype:
conf_str += "<orftype>%s</orftype>" % orftype
orf_mode = module.params['orf_mode']
if orf_mode:
conf_str += "<orfMode>%s</orfMode>" % orf_mode
if ipprefix_orf_enable == "true":
cmd = "peer %s capability-advertise orf ip-prefix %s" % (remote_address, orf_mode)
else:
cmd = "undo peer %s capability-advertise orf ip-prefix %s" % (remote_address, orf_mode)
cmds.append(cmd)
soostring = module.params['soostring']
if soostring:
conf_str += "<soostring>%s</soostring>" % soostring
cmd = "peer %s soo %s" % (remote_address, soostring)
cmds.append(cmd)
cmd = ""
default_rt_adv_enable = module.params['default_rt_adv_enable']
if default_rt_adv_enable != 'no_use':
conf_str += "<defaultRtAdvEnable>%s</defaultRtAdvEnable>" % default_rt_adv_enable
if default_rt_adv_enable == "true":
cmd += "peer %s default-route-advertise" % remote_address
else:
cmd += "undo peer %s default-route-advertise" % remote_address
default_rt_adv_policy = module.params['default_rt_adv_policy']
if default_rt_adv_policy:
conf_str += "<defaultRtAdvPolicy>%s</defaultRtAdvPolicy>" % default_rt_adv_policy
cmd += " route-policy %s" % default_rt_adv_policy
default_rt_match_mode = module.params['default_rt_match_mode']
if default_rt_match_mode:
conf_str += "<defaultRtMatchMode>%s</defaultRtMatchMode>" % default_rt_match_mode
if default_rt_match_mode == "matchall":
cmd += " conditional-route-match-all"
elif default_rt_match_mode == "matchany":
cmd += " conditional-route-match-any"
if cmd:
cmds.append(cmd)
add_path_mode = module.params['add_path_mode']
if add_path_mode:
conf_str += "<addPathMode>%s</addPathMode>" % add_path_mode
adv_add_path_num = module.params['adv_add_path_num']
if adv_add_path_num:
conf_str += "<advAddPathNum>%s</advAddPathNum>" % adv_add_path_num
origin_as_valid = module.params['origin_as_valid']
if origin_as_valid != 'no_use':
conf_str += "<originAsValid>%s</originAsValid>" % origin_as_valid
vpls_enable = module.params['vpls_enable']
if vpls_enable != 'no_use':
conf_str += "<vplsEnable>%s</vplsEnable>" % vpls_enable
vpls_ad_disable = module.params['vpls_ad_disable']
if vpls_ad_disable != 'no_use':
conf_str += "<vplsAdDisable>%s</vplsAdDisable>" % vpls_ad_disable
update_pkt_standard_compatible = module.params[
'update_pkt_standard_compatible']
if update_pkt_standard_compatible != 'no_use':
conf_str += "<updatePktStandardCompatible>%s</updatePktStandardCompatible>" % update_pkt_standard_compatible
conf_str += CE_MERGE_BGP_PEER_AF_TAIL
recv_xml = self.netconf_set_config(module=module, conf_str=conf_str)
if "<ok/>" not in recv_xml:
module.fail_json(msg='Error: Merge bgp peer address family other failed.')
return cmds
def main():
""" main """
argument_spec = dict(
state=dict(choices=['present', 'absent'], default='present'),
vrf_name=dict(type='str', required=True),
af_type=dict(choices=['ipv4uni', 'ipv4multi', 'ipv4vpn',
'ipv6uni', 'ipv6vpn', 'evpn'], required=True),
remote_address=dict(type='str', required=True),
advertise_irb=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
advertise_arp=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
advertise_remote_nexthop=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
advertise_community=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
advertise_ext_community=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
discard_ext_community=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
allow_as_loop_enable=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
allow_as_loop_limit=dict(type='str'),
keep_all_routes=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
nexthop_configure=dict(choices=['null', 'local', 'invariable']),
preferred_value=dict(type='str'),
public_as_only=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
public_as_only_force=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
public_as_only_limited=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
public_as_only_replace=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
public_as_only_skip_peer_as=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
route_limit=dict(type='str'),
route_limit_percent=dict(type='str'),
route_limit_type=dict(
choices=['noparameter', 'alertOnly', 'idleForever', 'idleTimeout']),
route_limit_idle_timeout=dict(type='str'),
rt_updt_interval=dict(type='str'),
redirect_ip=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
redirect_ip_vaildation=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
reflect_client=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
substitute_as_enable=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
import_rt_policy_name=dict(type='str'),
export_rt_policy_name=dict(type='str'),
import_pref_filt_name=dict(type='str'),
export_pref_filt_name=dict(type='str'),
import_as_path_filter=dict(type='str'),
export_as_path_filter=dict(type='str'),
import_as_path_name_or_num=dict(type='str'),
export_as_path_name_or_num=dict(type='str'),
import_acl_name_or_num=dict(type='str'),
export_acl_name_or_num=dict(type='str'),
ipprefix_orf_enable=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
is_nonstd_ipprefix_mod=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
orftype=dict(type='str'),
orf_mode=dict(choices=['null', 'receive', 'send', 'both']),
soostring=dict(type='str'),
default_rt_adv_enable=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
default_rt_adv_policy=dict(type='str'),
default_rt_match_mode=dict(choices=['null', 'matchall', 'matchany']),
add_path_mode=dict(choices=['null', 'receive', 'send', 'both']),
adv_add_path_num=dict(type='str'),
origin_as_valid=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
vpls_enable=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
vpls_ad_disable=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']),
update_pkt_standard_compatible=dict(type='str', default='no_use', choices=['no_use', 'true', 'false']))
argument_spec.update(ce_argument_spec)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True)
changed = False
proposed = dict()
existing = dict()
end_state = dict()
updates = []
state = module.params['state']
vrf_name = module.params['vrf_name']
af_type = module.params['af_type']
remote_address = module.params['remote_address']
advertise_irb = module.params['advertise_irb']
advertise_arp = module.params['advertise_arp']
advertise_remote_nexthop = module.params['advertise_remote_nexthop']
advertise_community = module.params['advertise_community']
advertise_ext_community = module.params['advertise_ext_community']
discard_ext_community = module.params['discard_ext_community']
allow_as_loop_enable = module.params['allow_as_loop_enable']
allow_as_loop_limit = module.params['allow_as_loop_limit']
keep_all_routes = module.params['keep_all_routes']
nexthop_configure = module.params['nexthop_configure']
preferred_value = module.params['preferred_value']
public_as_only = module.params['public_as_only']
public_as_only_force = module.params['public_as_only_force']
public_as_only_limited = module.params['public_as_only_limited']
public_as_only_replace = module.params['public_as_only_replace']
public_as_only_skip_peer_as = module.params['public_as_only_skip_peer_as']
route_limit = module.params['route_limit']
route_limit_percent = module.params['route_limit_percent']
route_limit_type = module.params['route_limit_type']
route_limit_idle_timeout = module.params['route_limit_idle_timeout']
rt_updt_interval = module.params['rt_updt_interval']
redirect_ip = module.params['redirect_ip']
redirect_ip_vaildation = module.params['redirect_ip_vaildation']
reflect_client = module.params['reflect_client']
substitute_as_enable = module.params['substitute_as_enable']
import_rt_policy_name = module.params['import_rt_policy_name']
export_rt_policy_name = module.params['export_rt_policy_name']
import_pref_filt_name = module.params['import_pref_filt_name']
export_pref_filt_name = module.params['export_pref_filt_name']
import_as_path_filter = module.params['import_as_path_filter']
export_as_path_filter = module.params['export_as_path_filter']
import_as_path_name_or_num = module.params['import_as_path_name_or_num']
export_as_path_name_or_num = module.params['export_as_path_name_or_num']
import_acl_name_or_num = module.params['import_acl_name_or_num']
export_acl_name_or_num = module.params['export_acl_name_or_num']
ipprefix_orf_enable = module.params['ipprefix_orf_enable']
is_nonstd_ipprefix_mod = module.params['is_nonstd_ipprefix_mod']
orftype = module.params['orftype']
orf_mode = module.params['orf_mode']
soostring = module.params['soostring']
default_rt_adv_enable = module.params['default_rt_adv_enable']
default_rt_adv_policy = module.params['default_rt_adv_policy']
default_rt_match_mode = module.params['default_rt_match_mode']
add_path_mode = module.params['add_path_mode']
adv_add_path_num = module.params['adv_add_path_num']
origin_as_valid = module.params['origin_as_valid']
vpls_enable = module.params['vpls_enable']
vpls_ad_disable = module.params['vpls_ad_disable']
update_pkt_standard_compatible = module.params[
'update_pkt_standard_compatible']
ce_bgp_peer_af_obj = BgpNeighborAf()
# get proposed
proposed["state"] = state
if vrf_name:
proposed["vrf_name"] = vrf_name
if af_type:
proposed["af_type"] = af_type
if remote_address:
proposed["remote_address"] = remote_address
if advertise_irb != 'no_use':
proposed["advertise_irb"] = advertise_irb
if advertise_arp != 'no_use':
proposed["advertise_arp"] = advertise_arp
if advertise_remote_nexthop != 'no_use':
proposed["advertise_remote_nexthop"] = advertise_remote_nexthop
if advertise_community != 'no_use':
proposed["advertise_community"] = advertise_community
if advertise_ext_community != 'no_use':
proposed["advertise_ext_community"] = advertise_ext_community
if discard_ext_community != 'no_use':
proposed["discard_ext_community"] = discard_ext_community
if allow_as_loop_enable != 'no_use':
proposed["allow_as_loop_enable"] = allow_as_loop_enable
if allow_as_loop_limit:
proposed["allow_as_loop_limit"] = allow_as_loop_limit
if keep_all_routes != 'no_use':
proposed["keep_all_routes"] = keep_all_routes
if nexthop_configure:
proposed["nexthop_configure"] = nexthop_configure
if preferred_value:
proposed["preferred_value"] = preferred_value
if public_as_only != 'no_use':
proposed["public_as_only"] = public_as_only
if public_as_only_force != 'no_use':
proposed["public_as_only_force"] = public_as_only_force
if public_as_only_limited != 'no_use':
proposed["public_as_only_limited"] = public_as_only_limited
if public_as_only_replace != 'no_use':
proposed["public_as_only_replace"] = public_as_only_replace
if public_as_only_skip_peer_as != 'no_use':
proposed["public_as_only_skip_peer_as"] = public_as_only_skip_peer_as
if route_limit:
proposed["route_limit"] = route_limit
if route_limit_percent:
proposed["route_limit_percent"] = route_limit_percent
if route_limit_type:
proposed["route_limit_type"] = route_limit_type
if route_limit_idle_timeout:
proposed["route_limit_idle_timeout"] = route_limit_idle_timeout
if rt_updt_interval:
proposed["rt_updt_interval"] = rt_updt_interval
if redirect_ip != 'no_use':
proposed["redirect_ip"] = redirect_ip
if redirect_ip_vaildation != 'no_use':
proposed["redirect_ip_vaildation"] = redirect_ip_vaildation
if reflect_client != 'no_use':
proposed["reflect_client"] = reflect_client
if substitute_as_enable != 'no_use':
proposed["substitute_as_enable"] = substitute_as_enable
if import_rt_policy_name:
proposed["import_rt_policy_name"] = import_rt_policy_name
if export_rt_policy_name:
proposed["export_rt_policy_name"] = export_rt_policy_name
if import_pref_filt_name:
proposed["import_pref_filt_name"] = import_pref_filt_name
if export_pref_filt_name:
proposed["export_pref_filt_name"] = export_pref_filt_name
if import_as_path_filter:
proposed["import_as_path_filter"] = import_as_path_filter
if export_as_path_filter:
proposed["export_as_path_filter"] = export_as_path_filter
if import_as_path_name_or_num:
proposed["import_as_path_name_or_num"] = import_as_path_name_or_num
if export_as_path_name_or_num:
proposed["export_as_path_name_or_num"] = export_as_path_name_or_num
if import_acl_name_or_num:
proposed["import_acl_name_or_num"] = import_acl_name_or_num
if export_acl_name_or_num:
proposed["export_acl_name_or_num"] = export_acl_name_or_num
if ipprefix_orf_enable != 'no_use':
proposed["ipprefix_orf_enable"] = ipprefix_orf_enable
if is_nonstd_ipprefix_mod != 'no_use':
proposed["is_nonstd_ipprefix_mod"] = is_nonstd_ipprefix_mod
if orftype:
proposed["orftype"] = orftype
if orf_mode:
proposed["orf_mode"] = orf_mode
if soostring:
proposed["soostring"] = soostring
if default_rt_adv_enable != 'no_use':
proposed["default_rt_adv_enable"] = default_rt_adv_enable
if default_rt_adv_policy:
proposed["default_rt_adv_policy"] = default_rt_adv_policy
if default_rt_match_mode:
proposed["default_rt_match_mode"] = default_rt_match_mode
if add_path_mode:
proposed["add_path_mode"] = add_path_mode
if adv_add_path_num:
proposed["adv_add_path_num"] = adv_add_path_num
if origin_as_valid != 'no_use':
proposed["origin_as_valid"] = origin_as_valid
if vpls_enable != 'no_use':
proposed["vpls_enable"] = vpls_enable
if vpls_ad_disable != 'no_use':
proposed["vpls_ad_disable"] = vpls_ad_disable
if update_pkt_standard_compatible != 'no_use':
proposed["update_pkt_standard_compatible"] = update_pkt_standard_compatible
if not ce_bgp_peer_af_obj:
module.fail_json(msg='Error: Init module failed.')
bgp_peer_af_rst = ce_bgp_peer_af_obj.check_bgp_neighbor_af_args(
module=module)
bgp_peer_af_other_rst = ce_bgp_peer_af_obj.check_bgp_neighbor_af_other(
module=module)
# state exist bgp peer address family config
exist_tmp = dict()
for item in bgp_peer_af_rst:
if item != "need_cfg":
exist_tmp[item] = bgp_peer_af_rst[item]
if exist_tmp:
existing["bgp neighbor af"] = exist_tmp
# state exist bgp peer address family other config
exist_tmp = dict()
for item in bgp_peer_af_other_rst:
if item != "need_cfg":
exist_tmp[item] = bgp_peer_af_other_rst[item]
if exist_tmp:
existing["bgp neighbor af other"] = exist_tmp
if state == "present":
if bgp_peer_af_rst["need_cfg"]:
if "remote_address" in bgp_peer_af_rst.keys():
cmd = ce_bgp_peer_af_obj.merge_bgp_peer_af(module=module)
changed = True
for item in cmd:
updates.append(item)
else:
cmd = ce_bgp_peer_af_obj.create_bgp_peer_af(module=module)
changed = True
for item in cmd:
updates.append(item)
if bgp_peer_af_other_rst["need_cfg"]:
cmd = ce_bgp_peer_af_obj.merge_bgp_peer_af_other(module=module)
changed = True
for item in cmd:
updates.append(item)
else:
if bgp_peer_af_rst["need_cfg"]:
cmd = ce_bgp_peer_af_obj.delete_bgp_peer_af(module=module)
changed = True
for item in cmd:
updates.append(item)
if bgp_peer_af_other_rst["need_cfg"]:
pass
# state end bgp peer address family config
bgp_peer_af_rst = ce_bgp_peer_af_obj.check_bgp_neighbor_af_args(
module=module)
end_tmp = dict()
for item in bgp_peer_af_rst:
if item != "need_cfg":
end_tmp[item] = bgp_peer_af_rst[item]
if end_tmp:
end_state["bgp neighbor af"] = end_tmp
# state end bgp peer address family other config
bgp_peer_af_other_rst = ce_bgp_peer_af_obj.check_bgp_neighbor_af_other(
module=module)
end_tmp = dict()
for item in bgp_peer_af_other_rst:
if item != "need_cfg":
end_tmp[item] = bgp_peer_af_other_rst[item]
if end_tmp:
end_state["bgp neighbor af other"] = end_tmp
results = dict()
results['proposed'] = proposed
results['existing'] = existing
results['changed'] = changed
results['end_state'] = end_state
results['updates'] = updates
module.exit_json(**results)
if __name__ == '__main__':
main()
| gpl-3.0 |
gunchleoc/django | django/utils/deprecation.py | 51 | 2897 | from __future__ import absolute_import
import inspect
import warnings
class RemovedInDjango20Warning(PendingDeprecationWarning):
pass
class RemovedInNextVersionWarning(DeprecationWarning):
pass
class warn_about_renamed_method(object):
def __init__(self, class_name, old_method_name, new_method_name, deprecation_warning):
self.class_name = class_name
self.old_method_name = old_method_name
self.new_method_name = new_method_name
self.deprecation_warning = deprecation_warning
def __call__(self, f):
def wrapped(*args, **kwargs):
warnings.warn(
"`%s.%s` is deprecated, use `%s` instead." %
(self.class_name, self.old_method_name, self.new_method_name),
self.deprecation_warning, 2)
return f(*args, **kwargs)
return wrapped
class RenameMethodsBase(type):
"""
Handles the deprecation paths when renaming a method.
It does the following:
1) Define the new method if missing and complain about it.
2) Define the old method if missing.
3) Complain whenever an old method is called.
See #15363 for more details.
"""
renamed_methods = ()
def __new__(cls, name, bases, attrs):
new_class = super(RenameMethodsBase, cls).__new__(cls, name, bases, attrs)
for base in inspect.getmro(new_class):
class_name = base.__name__
for renamed_method in cls.renamed_methods:
old_method_name = renamed_method[0]
old_method = base.__dict__.get(old_method_name)
new_method_name = renamed_method[1]
new_method = base.__dict__.get(new_method_name)
deprecation_warning = renamed_method[2]
wrapper = warn_about_renamed_method(class_name, *renamed_method)
# Define the new method if missing and complain about it
if not new_method and old_method:
warnings.warn(
"`%s.%s` method should be renamed `%s`." %
(class_name, old_method_name, new_method_name),
deprecation_warning, 2)
setattr(base, new_method_name, old_method)
setattr(base, old_method_name, wrapper(old_method))
# Define the old method as a wrapped call to the new method.
if not old_method and new_method:
setattr(base, old_method_name, wrapper(new_method))
return new_class
class DeprecationInstanceCheck(type):
def __instancecheck__(self, instance):
warnings.warn(
"`%s` is deprecated, use `%s` instead." % (self.__name__, self.alternative),
self.deprecation_warning, 2
)
return super(DeprecationInstanceCheck, self).__instancecheck__(instance)
| bsd-3-clause |
carolFrohlich/nipype | nipype/interfaces/camino/tests/test_auto_ComputeEigensystem.py | 12 | 1358 | # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from ....testing import assert_equal
from ..dti import ComputeEigensystem
def test_ComputeEigensystem_inputs():
input_map = dict(args=dict(argstr='%s',
),
environ=dict(nohash=True,
usedefault=True,
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
in_file=dict(argstr='< %s',
mandatory=True,
position=1,
),
inputdatatype=dict(argstr='-inputdatatype %s',
usedefault=True,
),
inputmodel=dict(argstr='-inputmodel %s',
),
maxcomponents=dict(argstr='-maxcomponents %d',
),
out_file=dict(argstr='> %s',
genfile=True,
position=-1,
),
outputdatatype=dict(argstr='-outputdatatype %s',
usedefault=True,
),
terminal_output=dict(nohash=True,
),
)
inputs = ComputeEigensystem.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_ComputeEigensystem_outputs():
output_map = dict(eigen=dict(),
)
outputs = ComputeEigensystem.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
yield assert_equal, getattr(outputs.traits()[key], metakey), value
| bsd-3-clause |
tdtrask/ansible | test/units/modules/network/netscaler/test_netscaler_nitro_request.py | 57 | 12941 |
# Copyright (c) 2017 Citrix Systems
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from ansible.compat.tests.mock import patch, Mock, call
from .netscaler_module import TestModule
import copy
import tempfile
import json
import sys
import codecs
from ansible.modules.network.netscaler import netscaler_nitro_request
module_arguments = dict(
nsip=None,
nitro_user=None,
nitro_pass=None,
nitro_protocol=None,
validate_certs=None,
nitro_auth_token=None,
resource=None,
name=None,
attributes=None,
args=None,
filter=None,
operation=None,
expected_nitro_errorcode=None,
action=None,
instance_ip=None,
instance_name=None,
instance_id=None,
)
class TestNetscalerNitroRequestModule(TestModule):
@classmethod
def setUpClass(cls):
class MockException(Exception):
pass
cls.MockException = MockException
@classmethod
def tearDownClass(cls):
pass
def setUp(self):
pass
def tearDown(self):
pass
def test_fail_on_conflicting_authentication_methods(self):
args = copy.deepcopy(module_arguments)
args.update(dict(
nitro_user='nsroot',
nitro_pass='nsroot',
nitro_auth_token='##DDASKLFDJ',
))
mock_module_instance = Mock(params=args)
expected_calls = [
call.fail_json(
changed=False,
failed=True,
msg='Cannot define both authentication token and username/password'
)
]
module_mock = Mock(return_value=mock_module_instance)
with patch('ansible.modules.network.netscaler.netscaler_nitro_request.AnsibleModule', module_mock):
netscaler_nitro_request.NitroAPICaller()
mock_module_instance.assert_has_calls(expected_calls)
def test_nitro_user_pass_credentials(self):
args = copy.deepcopy(module_arguments)
args.update(dict(
nitro_user='nsroot',
nitro_pass='nsroot',
))
mock_module_instance = Mock(params=args)
expected_headers = {
'Content-Type': 'application/json',
'X-NITRO-USER': 'nsroot',
'X-NITRO-PASS': 'nsroot',
}
module_mock = Mock(return_value=mock_module_instance)
with patch('ansible.modules.network.netscaler.netscaler_nitro_request.AnsibleModule', module_mock):
instance = netscaler_nitro_request.NitroAPICaller()
self.assertDictEqual(instance._headers, expected_headers)
def test_mas_login_headers(self):
args = copy.deepcopy(module_arguments)
args.update(dict(
nitro_user='nsroot',
nitro_pass='nsroot',
operation='mas_login',
))
mock_module_instance = Mock(params=args)
expected_headers = {
'Content-Type': 'application/json',
}
module_mock = Mock(return_value=mock_module_instance)
with patch('ansible.modules.network.netscaler.netscaler_nitro_request.AnsibleModule', module_mock):
instance = netscaler_nitro_request.NitroAPICaller()
self.assertDictEqual(instance._headers, expected_headers)
def test_mas_proxy_call_headers_instance_ip(self):
args = copy.deepcopy(module_arguments)
args.update(dict(
nitro_auth_token='##ABDB',
operation='add',
instance_ip='192.168.1.1',
))
mock_module_instance = Mock(params=args)
expected_headers = {
'Content-Type': 'application/json',
'_MPS_API_PROXY_MANAGED_INSTANCE_IP': args['instance_ip'],
'Cookie': 'NITRO_AUTH_TOKEN=%s' % args['nitro_auth_token'],
}
module_mock = Mock(return_value=mock_module_instance)
with patch('ansible.modules.network.netscaler.netscaler_nitro_request.AnsibleModule', module_mock):
instance = netscaler_nitro_request.NitroAPICaller()
self.assertDictEqual(instance._headers, expected_headers)
def test_mas_proxy_call_headers_instance_id(self):
args = copy.deepcopy(module_arguments)
args.update(dict(
nitro_auth_token='##ABDB',
operation='add',
instance_id='myid',
))
mock_module_instance = Mock(params=args)
expected_headers = {
'Content-Type': 'application/json',
'_MPS_API_PROXY_MANAGED_INSTANCE_ID': args['instance_id'],
'Cookie': 'NITRO_AUTH_TOKEN=%s' % args['nitro_auth_token'],
}
module_mock = Mock(return_value=mock_module_instance)
with patch('ansible.modules.network.netscaler.netscaler_nitro_request.AnsibleModule', module_mock):
instance = netscaler_nitro_request.NitroAPICaller()
self.assertDictEqual(instance._headers, expected_headers)
def test_mas_proxy_call_headers_instance_name(self):
args = copy.deepcopy(module_arguments)
args.update(dict(
nitro_auth_token='##ABDB',
operation='add',
instance_name='myname',
))
mock_module_instance = Mock(params=args)
expected_headers = {
'Content-Type': 'application/json',
'_MPS_API_PROXY_MANAGED_INSTANCE_NAME': args['instance_name'],
'Cookie': 'NITRO_AUTH_TOKEN=%s' % args['nitro_auth_token'],
}
module_mock = Mock(return_value=mock_module_instance)
with patch('ansible.modules.network.netscaler.netscaler_nitro_request.AnsibleModule', module_mock):
instance = netscaler_nitro_request.NitroAPICaller()
self.assertDictEqual(instance._headers, expected_headers)
def test_edit_response_data_no_body_success_status(self):
with patch('ansible.modules.network.netscaler.netscaler_nitro_request.AnsibleModule'):
instance = netscaler_nitro_request.NitroAPICaller()
r = None
info = {
'status': 200,
}
result = {}
success_status = 200
expected_result = {
'nitro_errorcode': 0,
'nitro_message': 'Success',
'nitro_severity': 'NONE',
'http_response_body': '',
'http_response_data': info,
}
instance.edit_response_data(r, info, result, success_status)
self.assertDictEqual(result, expected_result)
def test_edit_response_data_no_body_fail_status(self):
with patch('ansible.modules.network.netscaler.netscaler_nitro_request.AnsibleModule'):
instance = netscaler_nitro_request.NitroAPICaller()
r = None
info = {
'status': 201,
}
result = {}
success_status = 200
expected_result = {
'nitro_errorcode': -1,
'nitro_message': 'HTTP status %s' % info['status'],
'nitro_severity': 'ERROR',
'http_response_body': '',
'http_response_data': info,
}
instance.edit_response_data(r, info, result, success_status)
self.assertDictEqual(result, expected_result)
def test_edit_response_data_actual_body_data(self):
args = copy.deepcopy(module_arguments)
args.update(dict(
nitro_user='nsroot',
nitro_pass='nsroot',
nitro_auth_token='##DDASKLFDJ',
))
module_mock = Mock(params=args, from_json=json.loads)
with patch('ansible.modules.network.netscaler.netscaler_nitro_request.AnsibleModule', Mock(return_value=module_mock)):
with tempfile.TemporaryFile() as r:
actual_body = {
'errorcode': 258,
'message': 'Some error',
'severity': 'ERROR',
}
r.write(codecs.encode(json.dumps(actual_body), 'utf-8'))
r.seek(0)
instance = netscaler_nitro_request.NitroAPICaller()
info = {
'status': 200,
}
result = {}
success_status = 200
expected_result = {
'http_response_body': json.dumps(actual_body),
'http_response_data': info,
}
nitro_data = {}
for key, value in actual_body.items():
nitro_data['nitro_%s' % key] = value
expected_result.update(nitro_data)
instance.edit_response_data(r, info, result, success_status)
self.assertDictEqual(result, expected_result)
def test_edit_response_data_actual_body_data_irrelevant(self):
args = copy.deepcopy(module_arguments)
args.update(dict(
nitro_user='nsroot',
nitro_pass='nsroot',
nitro_auth_token='##DDASKLFDJ',
))
module_mock = Mock(params=args, from_json=json.loads)
with patch('ansible.modules.network.netscaler.netscaler_nitro_request.AnsibleModule', Mock(return_value=module_mock)):
with tempfile.TemporaryFile() as r:
actual_body = {}
r.write(codecs.encode(json.dumps(actual_body), 'utf-8'))
r.seek(0)
instance = netscaler_nitro_request.NitroAPICaller()
info = {
'status': 200,
}
result = {}
success_status = 200
expected_result = {
'http_response_body': json.dumps(actual_body),
'http_response_data': info,
'nitro_errorcode': 0,
'nitro_message': 'Success',
'nitro_severity': 'NONE',
}
instance.edit_response_data(r, info, result, success_status)
self.assertDictEqual(result, expected_result)
def test_edit_response_data_body_in_info(self):
args = copy.deepcopy(module_arguments)
args.update(dict(
nitro_user='nsroot',
nitro_pass='nsroot',
))
module_mock = Mock(params=args, from_json=json.loads)
with patch('ansible.modules.network.netscaler.netscaler_nitro_request.AnsibleModule', Mock(return_value=module_mock)):
body = {
'errorcode': 258,
'message': 'Numerical error 258',
'severity': 'ERROR'
}
instance = netscaler_nitro_request.NitroAPICaller()
r = None
info = {
'status': 200,
'body': codecs.encode(json.dumps(body), 'utf-8'),
}
result = {}
success_status = 200
expected_result = {
'http_response_body': json.dumps(body),
'http_response_data': info,
}
nitro_data = {}
for key, value in body.items():
nitro_data['nitro_%s' % key] = value
expected_result.update(nitro_data)
instance.edit_response_data(r, info, result, success_status)
self.assertDictEqual(result, expected_result)
def test_handle_get_return_object(self):
resource = 'lbvserver'
args = copy.deepcopy(module_arguments)
args.update(dict(
nitro_user='nsroot',
nitro_pass='nsroot',
resource=resource,
))
resource_data = {
'property1': 'value1',
'property2': 'value2',
}
module_mock = Mock(params=args, from_json=json.loads)
with patch('ansible.modules.network.netscaler.netscaler_nitro_request.AnsibleModule', Mock(return_value=module_mock)):
instance = netscaler_nitro_request.NitroAPICaller()
data = {resource: resource_data}
result = {
'nitro_errorcode': 0,
'http_response_body': json.dumps(data),
}
expected_result = {
'nitro_object': resource_data
}
expected_result.update(result)
instance.handle_get_return_object(result)
self.assertDictEqual(result, expected_result)
| gpl-3.0 |
marco-lancini/Showcase | django/db/models/sql/where.py | 289 | 13163 | """
Code to manage the creation and SQL rendering of 'where' constraints.
"""
import datetime
from itertools import repeat
from django.utils import tree
from django.db.models.fields import Field
from django.db.models.query_utils import QueryWrapper
from datastructures import EmptyResultSet, FullResultSet
# Connection types
AND = 'AND'
OR = 'OR'
class EmptyShortCircuit(Exception):
"""
Internal exception used to indicate that a "matches nothing" node should be
added to the where-clause.
"""
pass
class WhereNode(tree.Node):
"""
Used to represent the SQL where-clause.
The class is tied to the Query class that created it (in order to create
the correct SQL).
The children in this tree are usually either Q-like objects or lists of
[table_alias, field_name, db_type, lookup_type, value_annotation,
params]. However, a child could also be any class with as_sql() and
relabel_aliases() methods.
"""
default = AND
def add(self, data, connector):
"""
Add a node to the where-tree. If the data is a list or tuple, it is
expected to be of the form (obj, lookup_type, value), where obj is
a Constraint object, and is then slightly munged before being stored
(to avoid storing any reference to field objects). Otherwise, the 'data'
is stored unchanged and can be any class with an 'as_sql()' method.
"""
if not isinstance(data, (list, tuple)):
super(WhereNode, self).add(data, connector)
return
obj, lookup_type, value = data
if hasattr(value, '__iter__') and hasattr(value, 'next'):
# Consume any generators immediately, so that we can determine
# emptiness and transform any non-empty values correctly.
value = list(value)
# The "annotation" parameter is used to pass auxilliary information
# about the value(s) to the query construction. Specifically, datetime
# and empty values need special handling. Other types could be used
# here in the future (using Python types is suggested for consistency).
if isinstance(value, datetime.datetime):
annotation = datetime.datetime
elif hasattr(value, 'value_annotation'):
annotation = value.value_annotation
else:
annotation = bool(value)
if hasattr(obj, "prepare"):
value = obj.prepare(lookup_type, value)
super(WhereNode, self).add((obj, lookup_type, annotation, value),
connector)
return
super(WhereNode, self).add((obj, lookup_type, annotation, value),
connector)
def as_sql(self, qn, connection):
"""
Returns the SQL version of the where clause and the value to be
substituted in. Returns None, None if this node is empty.
If 'node' is provided, that is the root of the SQL generation
(generally not needed except by the internal implementation for
recursion).
"""
if not self.children:
return None, []
result = []
result_params = []
empty = True
for child in self.children:
try:
if hasattr(child, 'as_sql'):
sql, params = child.as_sql(qn=qn, connection=connection)
else:
# A leaf node in the tree.
sql, params = self.make_atom(child, qn, connection)
except EmptyResultSet:
if self.connector == AND and not self.negated:
# We can bail out early in this particular case (only).
raise
elif self.negated:
empty = False
continue
except FullResultSet:
if self.connector == OR:
if self.negated:
empty = True
break
# We match everything. No need for any constraints.
return '', []
if self.negated:
empty = True
continue
empty = False
if sql:
result.append(sql)
result_params.extend(params)
if empty:
raise EmptyResultSet
conn = ' %s ' % self.connector
sql_string = conn.join(result)
if sql_string:
if self.negated:
sql_string = 'NOT (%s)' % sql_string
elif len(self.children) != 1:
sql_string = '(%s)' % sql_string
return sql_string, result_params
def make_atom(self, child, qn, connection):
"""
Turn a tuple (table_alias, column_name, db_type, lookup_type,
value_annot, params) into valid SQL.
Returns the string for the SQL fragment and the parameters to use for
it.
"""
lvalue, lookup_type, value_annot, params_or_value = child
if hasattr(lvalue, 'process'):
try:
lvalue, params = lvalue.process(lookup_type, params_or_value, connection)
except EmptyShortCircuit:
raise EmptyResultSet
else:
params = Field().get_db_prep_lookup(lookup_type, params_or_value,
connection=connection, prepared=True)
if isinstance(lvalue, tuple):
# A direct database column lookup.
field_sql = self.sql_for_columns(lvalue, qn, connection)
else:
# A smart object with an as_sql() method.
field_sql = lvalue.as_sql(qn, connection)
if value_annot is datetime.datetime:
cast_sql = connection.ops.datetime_cast_sql()
else:
cast_sql = '%s'
if hasattr(params, 'as_sql'):
extra, params = params.as_sql(qn, connection)
cast_sql = ''
else:
extra = ''
if (len(params) == 1 and params[0] == '' and lookup_type == 'exact'
and connection.features.interprets_empty_strings_as_nulls):
lookup_type = 'isnull'
value_annot = True
if lookup_type in connection.operators:
format = "%s %%s %%s" % (connection.ops.lookup_cast(lookup_type),)
return (format % (field_sql,
connection.operators[lookup_type] % cast_sql,
extra), params)
if lookup_type == 'in':
if not value_annot:
raise EmptyResultSet
if extra:
return ('%s IN %s' % (field_sql, extra), params)
max_in_list_size = connection.ops.max_in_list_size()
if max_in_list_size and len(params) > max_in_list_size:
# Break up the params list into an OR of manageable chunks.
in_clause_elements = ['(']
for offset in xrange(0, len(params), max_in_list_size):
if offset > 0:
in_clause_elements.append(' OR ')
in_clause_elements.append('%s IN (' % field_sql)
group_size = min(len(params) - offset, max_in_list_size)
param_group = ', '.join(repeat('%s', group_size))
in_clause_elements.append(param_group)
in_clause_elements.append(')')
in_clause_elements.append(')')
return ''.join(in_clause_elements), params
else:
return ('%s IN (%s)' % (field_sql,
', '.join(repeat('%s', len(params)))),
params)
elif lookup_type in ('range', 'year'):
return ('%s BETWEEN %%s and %%s' % field_sql, params)
elif lookup_type in ('month', 'day', 'week_day'):
return ('%s = %%s' % connection.ops.date_extract_sql(lookup_type, field_sql),
params)
elif lookup_type == 'isnull':
return ('%s IS %sNULL' % (field_sql,
(not value_annot and 'NOT ' or '')), ())
elif lookup_type == 'search':
return (connection.ops.fulltext_search_sql(field_sql), params)
elif lookup_type in ('regex', 'iregex'):
return connection.ops.regex_lookup(lookup_type) % (field_sql, cast_sql), params
raise TypeError('Invalid lookup_type: %r' % lookup_type)
def sql_for_columns(self, data, qn, connection):
"""
Returns the SQL fragment used for the left-hand side of a column
constraint (for example, the "T1.foo" portion in the clause
"WHERE ... T1.foo = 6").
"""
table_alias, name, db_type = data
if table_alias:
lhs = '%s.%s' % (qn(table_alias), qn(name))
else:
lhs = qn(name)
return connection.ops.field_cast_sql(db_type) % lhs
def relabel_aliases(self, change_map, node=None):
"""
Relabels the alias values of any children. 'change_map' is a dictionary
mapping old (current) alias values to the new values.
"""
if not node:
node = self
for pos, child in enumerate(node.children):
if hasattr(child, 'relabel_aliases'):
child.relabel_aliases(change_map)
elif isinstance(child, tree.Node):
self.relabel_aliases(change_map, child)
elif isinstance(child, (list, tuple)):
if isinstance(child[0], (list, tuple)):
elt = list(child[0])
if elt[0] in change_map:
elt[0] = change_map[elt[0]]
node.children[pos] = (tuple(elt),) + child[1:]
else:
child[0].relabel_aliases(change_map)
# Check if the query value also requires relabelling
if hasattr(child[3], 'relabel_aliases'):
child[3].relabel_aliases(change_map)
class EverythingNode(object):
"""
A node that matches everything.
"""
def as_sql(self, qn=None, connection=None):
raise FullResultSet
def relabel_aliases(self, change_map, node=None):
return
class NothingNode(object):
"""
A node that matches nothing.
"""
def as_sql(self, qn=None, connection=None):
raise EmptyResultSet
def relabel_aliases(self, change_map, node=None):
return
class ExtraWhere(object):
def __init__(self, sqls, params):
self.sqls = sqls
self.params = params
def as_sql(self, qn=None, connection=None):
return " AND ".join(self.sqls), tuple(self.params or ())
class Constraint(object):
"""
An object that can be passed to WhereNode.add() and knows how to
pre-process itself prior to including in the WhereNode.
"""
def __init__(self, alias, col, field):
self.alias, self.col, self.field = alias, col, field
def __getstate__(self):
"""Save the state of the Constraint for pickling.
Fields aren't necessarily pickleable, because they can have
callable default values. So, instead of pickling the field
store a reference so we can restore it manually
"""
obj_dict = self.__dict__.copy()
if self.field:
obj_dict['model'] = self.field.model
obj_dict['field_name'] = self.field.name
del obj_dict['field']
return obj_dict
def __setstate__(self, data):
"""Restore the constraint """
model = data.pop('model', None)
field_name = data.pop('field_name', None)
self.__dict__.update(data)
if model is not None:
self.field = model._meta.get_field(field_name)
else:
self.field = None
def prepare(self, lookup_type, value):
if self.field:
return self.field.get_prep_lookup(lookup_type, value)
return value
def process(self, lookup_type, value, connection):
"""
Returns a tuple of data suitable for inclusion in a WhereNode
instance.
"""
# Because of circular imports, we need to import this here.
from django.db.models.base import ObjectDoesNotExist
try:
if self.field:
params = self.field.get_db_prep_lookup(lookup_type, value,
connection=connection, prepared=True)
db_type = self.field.db_type(connection=connection)
else:
# This branch is used at times when we add a comparison to NULL
# (we don't really want to waste time looking up the associated
# field object at the calling location).
params = Field().get_db_prep_lookup(lookup_type, value,
connection=connection, prepared=True)
db_type = None
except ObjectDoesNotExist:
raise EmptyShortCircuit
return (self.alias, self.col, db_type), params
def relabel_aliases(self, change_map):
if self.alias in change_map:
self.alias = change_map[self.alias]
| mit |
ZLLab-Mooc/edx-platform | lms/djangoapps/instructor/tests/test_registration_codes.py | 43 | 12486 | """
Test for the registration code status information.
"""
from course_modes.models import CourseMode
from courseware.tests.factories import InstructorFactory
from xmodule.modulestore.tests.factories import CourseFactory
from django.utils.translation import ugettext as _
from shoppingcart.models import (
Invoice, CourseRegistrationCodeInvoiceItem, CourseRegistrationCode,
CourseRegCodeItem, Order, RegistrationCodeRedemption
)
from student.models import CourseEnrollment
from student.roles import CourseSalesAdminRole
from nose.plugins.attrib import attr
import json
from student.tests.factories import UserFactory, CourseModeFactory
from django.core.urlresolvers import reverse
from django.test.utils import override_settings
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
@attr('shard_1')
@override_settings(REGISTRATION_CODE_LENGTH=8)
class TestCourseRegistrationCodeStatus(SharedModuleStoreTestCase):
"""
Test registration code status.
"""
@classmethod
def setUpClass(cls):
super(TestCourseRegistrationCodeStatus, cls).setUpClass()
cls.course = CourseFactory.create()
def setUp(self):
super(TestCourseRegistrationCodeStatus, self).setUp()
CourseModeFactory.create(course_id=self.course.id, min_price=50)
self.instructor = InstructorFactory(course_key=self.course.id)
self.client.login(username=self.instructor.username, password='test')
CourseSalesAdminRole(self.course.id).add_users(self.instructor)
# create testing invoice
self.sale_invoice = Invoice.objects.create(
total_amount=1234.32, company_name='Test1', company_contact_name='TestName',
company_contact_email='Test@company.com', recipient_name='Testw', recipient_email='test1@test.com',
customer_reference_number='2Fwe23S', internal_reference="A", course_id=self.course.id, is_valid=True
)
self.invoice_item = CourseRegistrationCodeInvoiceItem.objects.create(
invoice=self.sale_invoice,
qty=1,
unit_price=1234.32,
course_id=self.course.id
)
self.lookup_code_url = reverse('look_up_registration_code',
kwargs={'course_id': unicode(self.course.id)})
self.registration_code_detail_url = reverse('registration_code_details',
kwargs={'course_id': unicode(self.course.id)})
url = reverse('generate_registration_codes',
kwargs={'course_id': self.course.id.to_deprecated_string()})
data = {
'total_registration_codes': 12,
'company_name': 'Test Group',
'company_contact_name': 'Test@company.com',
'company_contact_email': 'Test@company.com',
'unit_price': 122.45,
'recipient_name': 'Test123',
'recipient_email': 'test@123.com',
'address_line_1': 'Portland Street',
'address_line_2': '',
'address_line_3': '',
'city': '',
'state': '',
'zip': '',
'country': '',
'customer_reference_number': '123A23F',
'internal_reference': '',
'invoice': ''
}
response = self.client.post(url, data)
self.assertEqual(response.status_code, 200, response.content)
def test_look_up_invalid_registration_code(self):
"""
Verify the view returns HTTP status 400 if an invalid registration code is passed.
Also, verify the data returned includes a message indicating the error,
and the is_registration_code_valid is set to False.
"""
data = {
'registration_code': 'invalid_reg_code'
}
response = self.client.get(self.lookup_code_url, data)
self.assertEqual(response.status_code, 400)
json_dict = json.loads(response.content)
message = _('The enrollment code ({code}) was not found for the {course_name} course.').format(
course_name=self.course.display_name, code=data['registration_code']
)
self.assertEqual(message, json_dict['message'])
self.assertFalse(json_dict['is_registration_code_valid'])
self.assertFalse(json_dict['is_registration_code_redeemed'])
def test_look_up_valid_registration_code(self):
"""
test lookup for the valid registration code
and that registration code has been redeemed by user
and then mark the registration code as in_valid
when marking as invalidate, it also lookup for
registration redemption entry and also delete
that redemption entry and un_enroll the student
who used that registration code for their enrollment.
"""
for i in range(2):
CourseRegistrationCode.objects.create(
code='reg_code{}'.format(i),
course_id=unicode(self.course.id),
created_by=self.instructor,
invoice=self.sale_invoice,
invoice_item=self.invoice_item,
mode_slug=CourseMode.DEFAULT_MODE_SLUG
)
reg_code = CourseRegistrationCode.objects.all()[0]
student = UserFactory()
enrollment = CourseEnrollment.enroll(student, self.course.id)
RegistrationCodeRedemption.objects.create(
registration_code=reg_code,
redeemed_by=student,
course_enrollment=enrollment
)
data = {
'registration_code': reg_code.code
}
response = self.client.get(self.lookup_code_url, data)
self.assertEqual(response.status_code, 200)
json_dict = json.loads(response.content)
self.assertTrue(json_dict['is_registration_code_valid'])
self.assertTrue(json_dict['is_registration_code_redeemed'])
# now mark that registration code as invalid
data = {
'registration_code': reg_code.code,
'action_type': 'invalidate_registration_code'
}
response = self.client.post(self.registration_code_detail_url, data)
self.assertEqual(response.status_code, 200)
json_dict = json.loads(response.content)
message = _('This enrollment code has been canceled. It can no longer be used.')
self.assertEqual(message, json_dict['message'])
# now check that the registration code should be marked as invalid in the db.
reg_code = CourseRegistrationCode.objects.get(code=reg_code.code)
self.assertEqual(reg_code.is_valid, False)
redemption = RegistrationCodeRedemption.get_registration_code_redemption(reg_code.code, self.course.id)
self.assertIsNone(redemption)
# now the student course enrollment should be false.
enrollment = CourseEnrollment.get_enrollment(student, self.course.id)
self.assertEqual(enrollment.is_active, False)
def test_lookup_valid_redeemed_registration_code(self):
"""
test to lookup for the valid and redeemed registration code
and then mark that registration code as un_redeemed
which will unenroll the user and delete the redemption
entry from the database.
"""
student = UserFactory()
self.client.login(username=student.username, password='test')
cart = Order.get_cart_for_user(student)
cart.order_type = 'business'
cart.save()
CourseRegCodeItem.add_to_order(cart, self.course.id, 2)
cart.purchase()
reg_code = CourseRegistrationCode.objects.filter(order=cart)[0]
enrollment = CourseEnrollment.enroll(student, self.course.id)
RegistrationCodeRedemption.objects.create(
registration_code=reg_code,
redeemed_by=student,
course_enrollment=enrollment
)
self.client.login(username=self.instructor.username, password='test')
data = {
'registration_code': reg_code.code
}
response = self.client.get(self.lookup_code_url, data)
self.assertEqual(response.status_code, 200)
json_dict = json.loads(response.content)
self.assertTrue(json_dict['is_registration_code_valid'])
self.assertTrue(json_dict['is_registration_code_redeemed'])
# now mark the registration code as unredeemed
# this will unenroll the user and removed the redemption entry from
# the database.
data = {
'registration_code': reg_code.code,
'action_type': 'unredeem_registration_code'
}
response = self.client.post(self.registration_code_detail_url, data)
self.assertEqual(response.status_code, 200)
json_dict = json.loads(response.content)
message = _('This enrollment code has been marked as unused.')
self.assertEqual(message, json_dict['message'])
redemption = RegistrationCodeRedemption.get_registration_code_redemption(reg_code.code, self.course.id)
self.assertIsNone(redemption)
# now the student course enrollment should be false.
enrollment = CourseEnrollment.get_enrollment(student, self.course.id)
self.assertEqual(enrollment.is_active, False)
def test_apply_invalid_reg_code_when_updating_code_information(self):
"""
test to apply an invalid registration code
when updating the registration code information.
"""
data = {
'registration_code': 'invalid_registration_code',
'action_type': 'unredeem_registration_code'
}
response = self.client.post(self.registration_code_detail_url, data)
self.assertEqual(response.status_code, 400)
json_dict = json.loads(response.content)
message = _('The enrollment code ({code}) was not found for the {course_name} course.').format(
course_name=self.course.display_name, code=data['registration_code']
)
self.assertEqual(message, json_dict['message'])
def test_mark_registration_code_as_valid(self):
"""
test to mark the invalid registration code
as valid
"""
for i in range(2):
CourseRegistrationCode.objects.create(
code='reg_code{}'.format(i),
course_id=self.course.id.to_deprecated_string(),
created_by=self.instructor,
invoice=self.sale_invoice,
invoice_item=self.invoice_item,
mode_slug=CourseMode.DEFAULT_MODE_SLUG,
is_valid=False
)
reg_code = CourseRegistrationCode.objects.all()[0]
data = {
'registration_code': reg_code.code,
'action_type': 'validate_registration_code'
}
response = self.client.post(self.registration_code_detail_url, data)
self.assertEqual(response.status_code, 200)
json_dict = json.loads(response.content)
message = _('The enrollment code has been restored.')
self.assertEqual(message, json_dict['message'])
# now check that the registration code should be marked as valid in the db.
reg_code = CourseRegistrationCode.objects.get(code=reg_code.code)
self.assertEqual(reg_code.is_valid, True)
def test_returns_error_when_unredeeming_already_unredeemed_registration_code_redemption(self):
"""
test to mark the already unredeemed registration code as unredeemed.
"""
for i in range(2):
CourseRegistrationCode.objects.create(
code='reg_code{}'.format(i),
course_id=self.course.id.to_deprecated_string(),
created_by=self.instructor,
invoice=self.sale_invoice,
invoice_item=self.invoice_item,
mode_slug=CourseMode.DEFAULT_MODE_SLUG,
)
reg_code = CourseRegistrationCode.objects.all()[0]
data = {
'registration_code': reg_code.code,
'action_type': 'unredeem_registration_code'
}
response = self.client.post(self.registration_code_detail_url, data)
self.assertEqual(response.status_code, 400)
json_dict = json.loads(response.content)
message = _('The redemption does not exist against enrollment code ({code}).').format(code=reg_code.code)
self.assertEqual(message, json_dict['message'])
| agpl-3.0 |
HousekeepLtd/django | django/conf/locale/ko/formats.py | 404 | 2320 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'Y년 n월 j일'
TIME_FORMAT = 'A g:i'
DATETIME_FORMAT = 'Y년 n월 j일 g:i A'
YEAR_MONTH_FORMAT = 'Y년 n월'
MONTH_DAY_FORMAT = 'n월 j일'
SHORT_DATE_FORMAT = 'Y-n-j.'
SHORT_DATETIME_FORMAT = 'Y-n-j H:i'
# FIRST_DAY_OF_WEEK =
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# Kept ISO formats as they are in first position
DATE_INPUT_FORMATS = [
'%Y-%m-%d', '%m/%d/%Y', '%m/%d/%y', # '2006-10-25', '10/25/2006', '10/25/06'
# '%b %d %Y', '%b %d, %Y', # 'Oct 25 2006', 'Oct 25, 2006'
# '%d %b %Y', '%d %b, %Y', # '25 Oct 2006', '25 Oct, 2006'
# '%B %d %Y', '%B %d, %Y', # 'October 25 2006', 'October 25, 2006'
# '%d %B %Y', '%d %B, %Y', # '25 October 2006', '25 October, 2006'
'%Y년 %m월 %d일', # '2006년 10월 25일', with localized suffix.
]
TIME_INPUT_FORMATS = [
'%H:%M:%S', # '14:30:59'
'%H:%M:%S.%f', # '14:30:59.000200'
'%H:%M', # '14:30'
'%H시 %M분 %S초', # '14시 30분 59초'
'%H시 %M분', # '14시 30분'
]
DATETIME_INPUT_FORMATS = [
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%m/%d/%Y %H:%M:%S', # '10/25/2006 14:30:59'
'%m/%d/%Y %H:%M:%S.%f', # '10/25/2006 14:30:59.000200'
'%m/%d/%Y %H:%M', # '10/25/2006 14:30'
'%m/%d/%Y', # '10/25/2006'
'%m/%d/%y %H:%M:%S', # '10/25/06 14:30:59'
'%m/%d/%y %H:%M:%S.%f', # '10/25/06 14:30:59.000200'
'%m/%d/%y %H:%M', # '10/25/06 14:30'
'%m/%d/%y', # '10/25/06'
'%Y년 %m월 %d일 %H시 %M분 %S초', # '2006년 10월 25일 14시 30분 59초'
'%Y년 %m월 %d일 %H시 %M분', # '2006년 10월 25일 14시 30분'
]
DECIMAL_SEPARATOR = '.'
THOUSAND_SEPARATOR = ','
NUMBER_GROUPING = 3
| bsd-3-clause |
ar45/django | django/contrib/sessions/backends/file.py | 336 | 7715 | import datetime
import errno
import logging
import os
import shutil
import tempfile
from django.conf import settings
from django.contrib.sessions.backends.base import (
VALID_KEY_CHARS, CreateError, SessionBase,
)
from django.contrib.sessions.exceptions import InvalidSessionKey
from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation
from django.utils import timezone
from django.utils.encoding import force_text
class SessionStore(SessionBase):
"""
Implements a file based session store.
"""
def __init__(self, session_key=None):
self.storage_path = type(self)._get_storage_path()
self.file_prefix = settings.SESSION_COOKIE_NAME
super(SessionStore, self).__init__(session_key)
@classmethod
def _get_storage_path(cls):
try:
return cls._storage_path
except AttributeError:
storage_path = getattr(settings, "SESSION_FILE_PATH", None)
if not storage_path:
storage_path = tempfile.gettempdir()
# Make sure the storage path is valid.
if not os.path.isdir(storage_path):
raise ImproperlyConfigured(
"The session storage path %r doesn't exist. Please set your"
" SESSION_FILE_PATH setting to an existing directory in which"
" Django can store session data." % storage_path)
cls._storage_path = storage_path
return storage_path
def _key_to_file(self, session_key=None):
"""
Get the file associated with this session key.
"""
if session_key is None:
session_key = self._get_or_create_session_key()
# Make sure we're not vulnerable to directory traversal. Session keys
# should always be md5s, so they should never contain directory
# components.
if not set(session_key).issubset(set(VALID_KEY_CHARS)):
raise InvalidSessionKey(
"Invalid characters in session key")
return os.path.join(self.storage_path, self.file_prefix + session_key)
def _last_modification(self):
"""
Return the modification time of the file storing the session's content.
"""
modification = os.stat(self._key_to_file()).st_mtime
if settings.USE_TZ:
modification = datetime.datetime.utcfromtimestamp(modification)
modification = modification.replace(tzinfo=timezone.utc)
else:
modification = datetime.datetime.fromtimestamp(modification)
return modification
def load(self):
session_data = {}
try:
with open(self._key_to_file(), "rb") as session_file:
file_data = session_file.read()
# Don't fail if there is no data in the session file.
# We may have opened the empty placeholder file.
if file_data:
try:
session_data = self.decode(file_data)
except (EOFError, SuspiciousOperation) as e:
if isinstance(e, SuspiciousOperation):
logger = logging.getLogger('django.security.%s' %
e.__class__.__name__)
logger.warning(force_text(e))
self.create()
# Remove expired sessions.
expiry_age = self.get_expiry_age(
modification=self._last_modification(),
expiry=session_data.get('_session_expiry'))
if expiry_age < 0:
session_data = {}
self.delete()
self.create()
except (IOError, SuspiciousOperation):
self._session_key = None
return session_data
def create(self):
while True:
self._session_key = self._get_new_session_key()
try:
self.save(must_create=True)
except CreateError:
continue
self.modified = True
return
def save(self, must_create=False):
if self.session_key is None:
return self.create()
# Get the session data now, before we start messing
# with the file it is stored within.
session_data = self._get_session(no_load=must_create)
session_file_name = self._key_to_file()
try:
# Make sure the file exists. If it does not already exist, an
# empty placeholder file is created.
flags = os.O_WRONLY | os.O_CREAT | getattr(os, 'O_BINARY', 0)
if must_create:
flags |= os.O_EXCL
fd = os.open(session_file_name, flags)
os.close(fd)
except OSError as e:
if must_create and e.errno == errno.EEXIST:
raise CreateError
raise
# Write the session file without interfering with other threads
# or processes. By writing to an atomically generated temporary
# file and then using the atomic os.rename() to make the complete
# file visible, we avoid having to lock the session file, while
# still maintaining its integrity.
#
# Note: Locking the session file was explored, but rejected in part
# because in order to be atomic and cross-platform, it required a
# long-lived lock file for each session, doubling the number of
# files in the session storage directory at any given time. This
# rename solution is cleaner and avoids any additional overhead
# when reading the session data, which is the more common case
# unless SESSION_SAVE_EVERY_REQUEST = True.
#
# See ticket #8616.
dir, prefix = os.path.split(session_file_name)
try:
output_file_fd, output_file_name = tempfile.mkstemp(dir=dir,
prefix=prefix + '_out_')
renamed = False
try:
try:
os.write(output_file_fd, self.encode(session_data).encode())
finally:
os.close(output_file_fd)
# This will atomically rename the file (os.rename) if the OS
# supports it. Otherwise this will result in a shutil.copy2
# and os.unlink (for example on Windows). See #9084.
shutil.move(output_file_name, session_file_name)
renamed = True
finally:
if not renamed:
os.unlink(output_file_name)
except (OSError, IOError, EOFError):
pass
def exists(self, session_key):
return os.path.exists(self._key_to_file(session_key))
def delete(self, session_key=None):
if session_key is None:
if self.session_key is None:
return
session_key = self.session_key
try:
os.unlink(self._key_to_file(session_key))
except OSError:
pass
def clean(self):
pass
@classmethod
def clear_expired(cls):
storage_path = cls._get_storage_path()
file_prefix = settings.SESSION_COOKIE_NAME
for session_file in os.listdir(storage_path):
if not session_file.startswith(file_prefix):
continue
session_key = session_file[len(file_prefix):]
session = cls(session_key)
# When an expired session is loaded, its file is removed, and a
# new file is immediately created. Prevent this by disabling
# the create() method.
session.create = lambda: None
session.load()
| bsd-3-clause |
Zhongqilong/mykbengineer | kbe/src/lib/python/Lib/test/test_email/test__header_value_parser.py | 68 | 111429 | import string
import unittest
from email import _header_value_parser as parser
from email import errors
from email import policy
from test.test_email import TestEmailBase, parameterize
class TestTokens(TestEmailBase):
# EWWhiteSpaceTerminal
def test_EWWhiteSpaceTerminal(self):
x = parser.EWWhiteSpaceTerminal(' \t', 'fws')
self.assertEqual(x, ' \t')
self.assertEqual(str(x), '')
self.assertEqual(x.value, '')
self.assertEqual(x.encoded, ' \t')
# UnstructuredTokenList
def test_undecodable_bytes_error_preserved(self):
badstr = b"le pouf c\xaflebre".decode('ascii', 'surrogateescape')
unst = parser.get_unstructured(badstr)
self.assertDefectsEqual(unst.all_defects, [errors.UndecodableBytesDefect])
parts = list(unst.parts)
self.assertDefectsEqual(parts[0].all_defects, [])
self.assertDefectsEqual(parts[1].all_defects, [])
self.assertDefectsEqual(parts[2].all_defects, [errors.UndecodableBytesDefect])
class TestParserMixin:
def _assert_results(self, tl, rest, string, value, defects, remainder,
comments=None):
self.assertEqual(str(tl), string)
self.assertEqual(tl.value, value)
self.assertDefectsEqual(tl.all_defects, defects)
self.assertEqual(rest, remainder)
if comments is not None:
self.assertEqual(tl.comments, comments)
def _test_get_x(self, method, source, string, value, defects,
remainder, comments=None):
tl, rest = method(source)
self._assert_results(tl, rest, string, value, defects, remainder,
comments=None)
return tl
def _test_parse_x(self, method, input, string, value, defects,
comments=None):
tl = method(input)
self._assert_results(tl, '', string, value, defects, '', comments)
return tl
class TestParser(TestParserMixin, TestEmailBase):
# _wsp_splitter
rfc_printable_ascii = bytes(range(33, 127)).decode('ascii')
rfc_atext_chars = (string.ascii_letters + string.digits +
"!#$%&\'*+-/=?^_`{}|~")
rfc_dtext_chars = rfc_printable_ascii.translate(str.maketrans('','',r'\[]'))
def test__wsp_splitter_one_word(self):
self.assertEqual(parser._wsp_splitter('foo', 1), ['foo'])
def test__wsp_splitter_two_words(self):
self.assertEqual(parser._wsp_splitter('foo def', 1),
['foo', ' ', 'def'])
def test__wsp_splitter_ws_runs(self):
self.assertEqual(parser._wsp_splitter('foo \t def jik', 1),
['foo', ' \t ', 'def jik'])
# get_fws
def test_get_fws_only(self):
fws = self._test_get_x(parser.get_fws, ' \t ', ' \t ', ' ', [], '')
self.assertEqual(fws.token_type, 'fws')
def test_get_fws_space(self):
self._test_get_x(parser.get_fws, ' foo', ' ', ' ', [], 'foo')
def test_get_fws_ws_run(self):
self._test_get_x(parser.get_fws, ' \t foo ', ' \t ', ' ', [], 'foo ')
# get_encoded_word
def test_get_encoded_word_missing_start_raises(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_encoded_word('abc')
def test_get_encoded_word_missing_end_raises(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_encoded_word('=?abc')
def test_get_encoded_word_missing_middle_raises(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_encoded_word('=?abc?=')
def test_get_encoded_word_valid_ew(self):
self._test_get_x(parser.get_encoded_word,
'=?us-ascii?q?this_is_a_test?= bird',
'this is a test',
'this is a test',
[],
' bird')
def test_get_encoded_word_internal_spaces(self):
self._test_get_x(parser.get_encoded_word,
'=?us-ascii?q?this is a test?= bird',
'this is a test',
'this is a test',
[errors.InvalidHeaderDefect],
' bird')
def test_get_encoded_word_gets_first(self):
self._test_get_x(parser.get_encoded_word,
'=?us-ascii?q?first?= =?utf-8?q?second?=',
'first',
'first',
[],
' =?utf-8?q?second?=')
def test_get_encoded_word_gets_first_even_if_no_space(self):
self._test_get_x(parser.get_encoded_word,
'=?us-ascii?q?first?==?utf-8?q?second?=',
'first',
'first',
[],
'=?utf-8?q?second?=')
def test_get_encoded_word_sets_extra_attributes(self):
ew = self._test_get_x(parser.get_encoded_word,
'=?us-ascii*jive?q?first_second?=',
'first second',
'first second',
[],
'')
self.assertEqual(ew.encoded, '=?us-ascii*jive?q?first_second?=')
self.assertEqual(ew.charset, 'us-ascii')
self.assertEqual(ew.lang, 'jive')
def test_get_encoded_word_lang_default_is_blank(self):
ew = self._test_get_x(parser.get_encoded_word,
'=?us-ascii?q?first_second?=',
'first second',
'first second',
[],
'')
self.assertEqual(ew.encoded, '=?us-ascii?q?first_second?=')
self.assertEqual(ew.charset, 'us-ascii')
self.assertEqual(ew.lang, '')
def test_get_encoded_word_non_printable_defect(self):
self._test_get_x(parser.get_encoded_word,
'=?us-ascii?q?first\x02second?=',
'first\x02second',
'first\x02second',
[errors.NonPrintableDefect],
'')
def test_get_encoded_word_leading_internal_space(self):
self._test_get_x(parser.get_encoded_word,
'=?us-ascii?q?=20foo?=',
' foo',
' foo',
[],
'')
def test_get_encoded_word_quopri_utf_escape_follows_cte(self):
# Issue 18044
self._test_get_x(parser.get_encoded_word,
'=?utf-8?q?=C3=89ric?=',
'Éric',
'Éric',
[],
'')
# get_unstructured
def _get_unst(self, value):
token = parser.get_unstructured(value)
return token, ''
def test_get_unstructured_null(self):
self._test_get_x(self._get_unst, '', '', '', [], '')
def test_get_unstructured_one_word(self):
self._test_get_x(self._get_unst, 'foo', 'foo', 'foo', [], '')
def test_get_unstructured_normal_phrase(self):
self._test_get_x(self._get_unst, 'foo bar bird',
'foo bar bird',
'foo bar bird',
[],
'')
def test_get_unstructured_normal_phrase_with_whitespace(self):
self._test_get_x(self._get_unst, 'foo \t bar bird',
'foo \t bar bird',
'foo bar bird',
[],
'')
def test_get_unstructured_leading_whitespace(self):
self._test_get_x(self._get_unst, ' foo bar',
' foo bar',
' foo bar',
[],
'')
def test_get_unstructured_trailing_whitespace(self):
self._test_get_x(self._get_unst, 'foo bar ',
'foo bar ',
'foo bar ',
[],
'')
def test_get_unstructured_leading_and_trailing_whitespace(self):
self._test_get_x(self._get_unst, ' foo bar ',
' foo bar ',
' foo bar ',
[],
'')
def test_get_unstructured_one_valid_ew_no_ws(self):
self._test_get_x(self._get_unst, '=?us-ascii?q?bar?=',
'bar',
'bar',
[],
'')
def test_get_unstructured_one_ew_trailing_ws(self):
self._test_get_x(self._get_unst, '=?us-ascii?q?bar?= ',
'bar ',
'bar ',
[],
'')
def test_get_unstructured_one_valid_ew_trailing_text(self):
self._test_get_x(self._get_unst, '=?us-ascii?q?bar?= bird',
'bar bird',
'bar bird',
[],
'')
def test_get_unstructured_phrase_with_ew_in_middle_of_text(self):
self._test_get_x(self._get_unst, 'foo =?us-ascii?q?bar?= bird',
'foo bar bird',
'foo bar bird',
[],
'')
def test_get_unstructured_phrase_with_two_ew(self):
self._test_get_x(self._get_unst,
'foo =?us-ascii?q?bar?= =?us-ascii?q?bird?=',
'foo barbird',
'foo barbird',
[],
'')
def test_get_unstructured_phrase_with_two_ew_trailing_ws(self):
self._test_get_x(self._get_unst,
'foo =?us-ascii?q?bar?= =?us-ascii?q?bird?= ',
'foo barbird ',
'foo barbird ',
[],
'')
def test_get_unstructured_phrase_with_ew_with_leading_ws(self):
self._test_get_x(self._get_unst,
' =?us-ascii?q?bar?=',
' bar',
' bar',
[],
'')
def test_get_unstructured_phrase_with_two_ew_extra_ws(self):
self._test_get_x(self._get_unst,
'foo =?us-ascii?q?bar?= \t =?us-ascii?q?bird?=',
'foo barbird',
'foo barbird',
[],
'')
def test_get_unstructured_two_ew_extra_ws_trailing_text(self):
self._test_get_x(self._get_unst,
'=?us-ascii?q?test?= =?us-ascii?q?foo?= val',
'testfoo val',
'testfoo val',
[],
'')
def test_get_unstructured_ew_with_internal_ws(self):
self._test_get_x(self._get_unst,
'=?iso-8859-1?q?hello=20world?=',
'hello world',
'hello world',
[],
'')
def test_get_unstructured_ew_with_internal_leading_ws(self):
self._test_get_x(self._get_unst,
' =?us-ascii?q?=20test?= =?us-ascii?q?=20foo?= val',
' test foo val',
' test foo val',
[],
'')
def test_get_unstructured_invaild_ew(self):
self._test_get_x(self._get_unst,
'=?test val',
'=?test val',
'=?test val',
[],
'')
def test_get_unstructured_undecodable_bytes(self):
self._test_get_x(self._get_unst,
b'test \xACfoo val'.decode('ascii', 'surrogateescape'),
'test \uDCACfoo val',
'test \uDCACfoo val',
[errors.UndecodableBytesDefect],
'')
def test_get_unstructured_undecodable_bytes_in_EW(self):
self._test_get_x(self._get_unst,
(b'=?us-ascii?q?=20test?= =?us-ascii?q?=20\xACfoo?='
b' val').decode('ascii', 'surrogateescape'),
' test \uDCACfoo val',
' test \uDCACfoo val',
[errors.UndecodableBytesDefect]*2,
'')
def test_get_unstructured_missing_base64_padding(self):
self._test_get_x(self._get_unst,
'=?utf-8?b?dmk?=',
'vi',
'vi',
[errors.InvalidBase64PaddingDefect],
'')
def test_get_unstructured_invalid_base64_character(self):
self._test_get_x(self._get_unst,
'=?utf-8?b?dm\x01k===?=',
'vi',
'vi',
[errors.InvalidBase64CharactersDefect],
'')
def test_get_unstructured_invalid_base64_character_and_bad_padding(self):
self._test_get_x(self._get_unst,
'=?utf-8?b?dm\x01k?=',
'vi',
'vi',
[errors.InvalidBase64CharactersDefect,
errors.InvalidBase64PaddingDefect],
'')
def test_get_unstructured_no_whitespace_between_ews(self):
self._test_get_x(self._get_unst,
'=?utf-8?q?foo?==?utf-8?q?bar?=',
'foobar',
'foobar',
[errors.InvalidHeaderDefect],
'')
# get_qp_ctext
def test_get_qp_ctext_only(self):
ptext = self._test_get_x(parser.get_qp_ctext,
'foobar', 'foobar', ' ', [], '')
self.assertEqual(ptext.token_type, 'ptext')
def test_get_qp_ctext_all_printables(self):
with_qp = self.rfc_printable_ascii.replace('\\', '\\\\')
with_qp = with_qp. replace('(', r'\(')
with_qp = with_qp.replace(')', r'\)')
ptext = self._test_get_x(parser.get_qp_ctext,
with_qp, self.rfc_printable_ascii, ' ', [], '')
def test_get_qp_ctext_two_words_gets_first(self):
self._test_get_x(parser.get_qp_ctext,
'foo de', 'foo', ' ', [], ' de')
def test_get_qp_ctext_following_wsp_preserved(self):
self._test_get_x(parser.get_qp_ctext,
'foo \t\tde', 'foo', ' ', [], ' \t\tde')
def test_get_qp_ctext_up_to_close_paren_only(self):
self._test_get_x(parser.get_qp_ctext,
'foo)', 'foo', ' ', [], ')')
def test_get_qp_ctext_wsp_before_close_paren_preserved(self):
self._test_get_x(parser.get_qp_ctext,
'foo )', 'foo', ' ', [], ' )')
def test_get_qp_ctext_close_paren_mid_word(self):
self._test_get_x(parser.get_qp_ctext,
'foo)bar', 'foo', ' ', [], ')bar')
def test_get_qp_ctext_up_to_open_paren_only(self):
self._test_get_x(parser.get_qp_ctext,
'foo(', 'foo', ' ', [], '(')
def test_get_qp_ctext_wsp_before_open_paren_preserved(self):
self._test_get_x(parser.get_qp_ctext,
'foo (', 'foo', ' ', [], ' (')
def test_get_qp_ctext_open_paren_mid_word(self):
self._test_get_x(parser.get_qp_ctext,
'foo(bar', 'foo', ' ', [], '(bar')
def test_get_qp_ctext_non_printables(self):
ptext = self._test_get_x(parser.get_qp_ctext,
'foo\x00bar)', 'foo\x00bar', ' ',
[errors.NonPrintableDefect], ')')
self.assertEqual(ptext.defects[0].non_printables[0], '\x00')
# get_qcontent
def test_get_qcontent_only(self):
ptext = self._test_get_x(parser.get_qcontent,
'foobar', 'foobar', 'foobar', [], '')
self.assertEqual(ptext.token_type, 'ptext')
def test_get_qcontent_all_printables(self):
with_qp = self.rfc_printable_ascii.replace('\\', '\\\\')
with_qp = with_qp. replace('"', r'\"')
ptext = self._test_get_x(parser.get_qcontent, with_qp,
self.rfc_printable_ascii,
self.rfc_printable_ascii, [], '')
def test_get_qcontent_two_words_gets_first(self):
self._test_get_x(parser.get_qcontent,
'foo de', 'foo', 'foo', [], ' de')
def test_get_qcontent_following_wsp_preserved(self):
self._test_get_x(parser.get_qcontent,
'foo \t\tde', 'foo', 'foo', [], ' \t\tde')
def test_get_qcontent_up_to_dquote_only(self):
self._test_get_x(parser.get_qcontent,
'foo"', 'foo', 'foo', [], '"')
def test_get_qcontent_wsp_before_close_paren_preserved(self):
self._test_get_x(parser.get_qcontent,
'foo "', 'foo', 'foo', [], ' "')
def test_get_qcontent_close_paren_mid_word(self):
self._test_get_x(parser.get_qcontent,
'foo"bar', 'foo', 'foo', [], '"bar')
def test_get_qcontent_non_printables(self):
ptext = self._test_get_x(parser.get_qcontent,
'foo\x00fg"', 'foo\x00fg', 'foo\x00fg',
[errors.NonPrintableDefect], '"')
self.assertEqual(ptext.defects[0].non_printables[0], '\x00')
# get_atext
def test_get_atext_only(self):
atext = self._test_get_x(parser.get_atext,
'foobar', 'foobar', 'foobar', [], '')
self.assertEqual(atext.token_type, 'atext')
def test_get_atext_all_atext(self):
atext = self._test_get_x(parser.get_atext, self.rfc_atext_chars,
self.rfc_atext_chars,
self.rfc_atext_chars, [], '')
def test_get_atext_two_words_gets_first(self):
self._test_get_x(parser.get_atext,
'foo bar', 'foo', 'foo', [], ' bar')
def test_get_atext_following_wsp_preserved(self):
self._test_get_x(parser.get_atext,
'foo \t\tbar', 'foo', 'foo', [], ' \t\tbar')
def test_get_atext_up_to_special(self):
self._test_get_x(parser.get_atext,
'foo@bar', 'foo', 'foo', [], '@bar')
def test_get_atext_non_printables(self):
atext = self._test_get_x(parser.get_atext,
'foo\x00bar(', 'foo\x00bar', 'foo\x00bar',
[errors.NonPrintableDefect], '(')
self.assertEqual(atext.defects[0].non_printables[0], '\x00')
# get_bare_quoted_string
def test_get_bare_quoted_string_only(self):
bqs = self._test_get_x(parser.get_bare_quoted_string,
'"foo"', '"foo"', 'foo', [], '')
self.assertEqual(bqs.token_type, 'bare-quoted-string')
def test_get_bare_quoted_string_must_start_with_dquote(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_bare_quoted_string('foo"')
with self.assertRaises(errors.HeaderParseError):
parser.get_bare_quoted_string(' "foo"')
def test_get_bare_quoted_string_following_wsp_preserved(self):
self._test_get_x(parser.get_bare_quoted_string,
'"foo"\t bar', '"foo"', 'foo', [], '\t bar')
def test_get_bare_quoted_string_multiple_words(self):
self._test_get_x(parser.get_bare_quoted_string,
'"foo bar moo"', '"foo bar moo"', 'foo bar moo', [], '')
def test_get_bare_quoted_string_multiple_words_wsp_preserved(self):
self._test_get_x(parser.get_bare_quoted_string,
'" foo moo\t"', '" foo moo\t"', ' foo moo\t', [], '')
def test_get_bare_quoted_string_end_dquote_mid_word(self):
self._test_get_x(parser.get_bare_quoted_string,
'"foo"bar', '"foo"', 'foo', [], 'bar')
def test_get_bare_quoted_string_quoted_dquote(self):
self._test_get_x(parser.get_bare_quoted_string,
r'"foo\"in"a', r'"foo\"in"', 'foo"in', [], 'a')
def test_get_bare_quoted_string_non_printables(self):
self._test_get_x(parser.get_bare_quoted_string,
'"a\x01a"', '"a\x01a"', 'a\x01a',
[errors.NonPrintableDefect], '')
def test_get_bare_quoted_string_no_end_dquote(self):
self._test_get_x(parser.get_bare_quoted_string,
'"foo', '"foo"', 'foo',
[errors.InvalidHeaderDefect], '')
self._test_get_x(parser.get_bare_quoted_string,
'"foo ', '"foo "', 'foo ',
[errors.InvalidHeaderDefect], '')
def test_get_bare_quoted_string_empty_quotes(self):
self._test_get_x(parser.get_bare_quoted_string,
'""', '""', '', [], '')
# Issue 16983: apply postel's law to some bad encoding.
def test_encoded_word_inside_quotes(self):
self._test_get_x(parser.get_bare_quoted_string,
'"=?utf-8?Q?not_really_valid?="',
'"not really valid"',
'not really valid',
[errors.InvalidHeaderDefect],
'')
# get_comment
def test_get_comment_only(self):
comment = self._test_get_x(parser.get_comment,
'(comment)', '(comment)', ' ', [], '', ['comment'])
self.assertEqual(comment.token_type, 'comment')
def test_get_comment_must_start_with_paren(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_comment('foo"')
with self.assertRaises(errors.HeaderParseError):
parser.get_comment(' (foo"')
def test_get_comment_following_wsp_preserved(self):
self._test_get_x(parser.get_comment,
'(comment) \t', '(comment)', ' ', [], ' \t', ['comment'])
def test_get_comment_multiple_words(self):
self._test_get_x(parser.get_comment,
'(foo bar) \t', '(foo bar)', ' ', [], ' \t', ['foo bar'])
def test_get_comment_multiple_words_wsp_preserved(self):
self._test_get_x(parser.get_comment,
'( foo bar\t ) \t', '( foo bar\t )', ' ', [], ' \t',
[' foo bar\t '])
def test_get_comment_end_paren_mid_word(self):
self._test_get_x(parser.get_comment,
'(foo)bar', '(foo)', ' ', [], 'bar', ['foo'])
def test_get_comment_quoted_parens(self):
self._test_get_x(parser.get_comment,
'(foo\) \(\)bar)', '(foo\) \(\)bar)', ' ', [], '', ['foo) ()bar'])
def test_get_comment_non_printable(self):
self._test_get_x(parser.get_comment,
'(foo\x7Fbar)', '(foo\x7Fbar)', ' ',
[errors.NonPrintableDefect], '', ['foo\x7Fbar'])
def test_get_comment_no_end_paren(self):
self._test_get_x(parser.get_comment,
'(foo bar', '(foo bar)', ' ',
[errors.InvalidHeaderDefect], '', ['foo bar'])
self._test_get_x(parser.get_comment,
'(foo bar ', '(foo bar )', ' ',
[errors.InvalidHeaderDefect], '', ['foo bar '])
def test_get_comment_nested_comment(self):
comment = self._test_get_x(parser.get_comment,
'(foo(bar))', '(foo(bar))', ' ', [], '', ['foo(bar)'])
self.assertEqual(comment[1].content, 'bar')
def test_get_comment_nested_comment_wsp(self):
comment = self._test_get_x(parser.get_comment,
'(foo ( bar ) )', '(foo ( bar ) )', ' ', [], '', ['foo ( bar ) '])
self.assertEqual(comment[2].content, ' bar ')
def test_get_comment_empty_comment(self):
self._test_get_x(parser.get_comment,
'()', '()', ' ', [], '', [''])
def test_get_comment_multiple_nesting(self):
comment = self._test_get_x(parser.get_comment,
'(((((foo)))))', '(((((foo)))))', ' ', [], '', ['((((foo))))'])
for i in range(4, 0, -1):
self.assertEqual(comment[0].content, '('*(i-1)+'foo'+')'*(i-1))
comment = comment[0]
self.assertEqual(comment.content, 'foo')
def test_get_comment_missing_end_of_nesting(self):
self._test_get_x(parser.get_comment,
'(((((foo)))', '(((((foo)))))', ' ',
[errors.InvalidHeaderDefect]*2, '', ['((((foo))))'])
def test_get_comment_qs_in_nested_comment(self):
comment = self._test_get_x(parser.get_comment,
'(foo (b\)))', '(foo (b\)))', ' ', [], '', ['foo (b\))'])
self.assertEqual(comment[2].content, 'b)')
# get_cfws
def test_get_cfws_only_ws(self):
cfws = self._test_get_x(parser.get_cfws,
' \t \t', ' \t \t', ' ', [], '', [])
self.assertEqual(cfws.token_type, 'cfws')
def test_get_cfws_only_comment(self):
cfws = self._test_get_x(parser.get_cfws,
'(foo)', '(foo)', ' ', [], '', ['foo'])
self.assertEqual(cfws[0].content, 'foo')
def test_get_cfws_only_mixed(self):
cfws = self._test_get_x(parser.get_cfws,
' (foo ) ( bar) ', ' (foo ) ( bar) ', ' ', [], '',
['foo ', ' bar'])
self.assertEqual(cfws[1].content, 'foo ')
self.assertEqual(cfws[3].content, ' bar')
def test_get_cfws_ends_at_non_leader(self):
cfws = self._test_get_x(parser.get_cfws,
'(foo) bar', '(foo) ', ' ', [], 'bar', ['foo'])
self.assertEqual(cfws[0].content, 'foo')
def test_get_cfws_ends_at_non_printable(self):
cfws = self._test_get_x(parser.get_cfws,
'(foo) \x07', '(foo) ', ' ', [], '\x07', ['foo'])
self.assertEqual(cfws[0].content, 'foo')
def test_get_cfws_non_printable_in_comment(self):
cfws = self._test_get_x(parser.get_cfws,
'(foo \x07) "test"', '(foo \x07) ', ' ',
[errors.NonPrintableDefect], '"test"', ['foo \x07'])
self.assertEqual(cfws[0].content, 'foo \x07')
def test_get_cfws_header_ends_in_comment(self):
cfws = self._test_get_x(parser.get_cfws,
' (foo ', ' (foo )', ' ',
[errors.InvalidHeaderDefect], '', ['foo '])
self.assertEqual(cfws[1].content, 'foo ')
def test_get_cfws_multiple_nested_comments(self):
cfws = self._test_get_x(parser.get_cfws,
'(foo (bar)) ((a)(a))', '(foo (bar)) ((a)(a))', ' ', [],
'', ['foo (bar)', '(a)(a)'])
self.assertEqual(cfws[0].comments, ['foo (bar)'])
self.assertEqual(cfws[2].comments, ['(a)(a)'])
# get_quoted_string
def test_get_quoted_string_only(self):
qs = self._test_get_x(parser.get_quoted_string,
'"bob"', '"bob"', 'bob', [], '')
self.assertEqual(qs.token_type, 'quoted-string')
self.assertEqual(qs.quoted_value, '"bob"')
self.assertEqual(qs.content, 'bob')
def test_get_quoted_string_with_wsp(self):
qs = self._test_get_x(parser.get_quoted_string,
'\t "bob" ', '\t "bob" ', ' bob ', [], '')
self.assertEqual(qs.quoted_value, ' "bob" ')
self.assertEqual(qs.content, 'bob')
def test_get_quoted_string_with_comments_and_wsp(self):
qs = self._test_get_x(parser.get_quoted_string,
' (foo) "bob"(bar)', ' (foo) "bob"(bar)', ' bob ', [], '')
self.assertEqual(qs[0][1].content, 'foo')
self.assertEqual(qs[2][0].content, 'bar')
self.assertEqual(qs.content, 'bob')
self.assertEqual(qs.quoted_value, ' "bob" ')
def test_get_quoted_string_with_multiple_comments(self):
qs = self._test_get_x(parser.get_quoted_string,
' (foo) (bar) "bob"(bird)', ' (foo) (bar) "bob"(bird)', ' bob ',
[], '')
self.assertEqual(qs[0].comments, ['foo', 'bar'])
self.assertEqual(qs[2].comments, ['bird'])
self.assertEqual(qs.content, 'bob')
self.assertEqual(qs.quoted_value, ' "bob" ')
def test_get_quoted_string_non_printable_in_comment(self):
qs = self._test_get_x(parser.get_quoted_string,
' (\x0A) "bob"', ' (\x0A) "bob"', ' bob',
[errors.NonPrintableDefect], '')
self.assertEqual(qs[0].comments, ['\x0A'])
self.assertEqual(qs.content, 'bob')
self.assertEqual(qs.quoted_value, ' "bob"')
def test_get_quoted_string_non_printable_in_qcontent(self):
qs = self._test_get_x(parser.get_quoted_string,
' (a) "a\x0B"', ' (a) "a\x0B"', ' a\x0B',
[errors.NonPrintableDefect], '')
self.assertEqual(qs[0].comments, ['a'])
self.assertEqual(qs.content, 'a\x0B')
self.assertEqual(qs.quoted_value, ' "a\x0B"')
def test_get_quoted_string_internal_ws(self):
qs = self._test_get_x(parser.get_quoted_string,
' (a) "foo bar "', ' (a) "foo bar "', ' foo bar ',
[], '')
self.assertEqual(qs[0].comments, ['a'])
self.assertEqual(qs.content, 'foo bar ')
self.assertEqual(qs.quoted_value, ' "foo bar "')
def test_get_quoted_string_header_ends_in_comment(self):
qs = self._test_get_x(parser.get_quoted_string,
' (a) "bob" (a', ' (a) "bob" (a)', ' bob ',
[errors.InvalidHeaderDefect], '')
self.assertEqual(qs[0].comments, ['a'])
self.assertEqual(qs[2].comments, ['a'])
self.assertEqual(qs.content, 'bob')
self.assertEqual(qs.quoted_value, ' "bob" ')
def test_get_quoted_string_header_ends_in_qcontent(self):
qs = self._test_get_x(parser.get_quoted_string,
' (a) "bob', ' (a) "bob"', ' bob',
[errors.InvalidHeaderDefect], '')
self.assertEqual(qs[0].comments, ['a'])
self.assertEqual(qs.content, 'bob')
self.assertEqual(qs.quoted_value, ' "bob"')
def test_get_quoted_string_no_quoted_string(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_quoted_string(' (ab) xyz')
def test_get_quoted_string_qs_ends_at_noncfws(self):
qs = self._test_get_x(parser.get_quoted_string,
'\t "bob" fee', '\t "bob" ', ' bob ', [], 'fee')
self.assertEqual(qs.content, 'bob')
self.assertEqual(qs.quoted_value, ' "bob" ')
# get_atom
def test_get_atom_only(self):
atom = self._test_get_x(parser.get_atom,
'bob', 'bob', 'bob', [], '')
self.assertEqual(atom.token_type, 'atom')
def test_get_atom_with_wsp(self):
self._test_get_x(parser.get_atom,
'\t bob ', '\t bob ', ' bob ', [], '')
def test_get_atom_with_comments_and_wsp(self):
atom = self._test_get_x(parser.get_atom,
' (foo) bob(bar)', ' (foo) bob(bar)', ' bob ', [], '')
self.assertEqual(atom[0][1].content, 'foo')
self.assertEqual(atom[2][0].content, 'bar')
def test_get_atom_with_multiple_comments(self):
atom = self._test_get_x(parser.get_atom,
' (foo) (bar) bob(bird)', ' (foo) (bar) bob(bird)', ' bob ',
[], '')
self.assertEqual(atom[0].comments, ['foo', 'bar'])
self.assertEqual(atom[2].comments, ['bird'])
def test_get_atom_non_printable_in_comment(self):
atom = self._test_get_x(parser.get_atom,
' (\x0A) bob', ' (\x0A) bob', ' bob',
[errors.NonPrintableDefect], '')
self.assertEqual(atom[0].comments, ['\x0A'])
def test_get_atom_non_printable_in_atext(self):
atom = self._test_get_x(parser.get_atom,
' (a) a\x0B', ' (a) a\x0B', ' a\x0B',
[errors.NonPrintableDefect], '')
self.assertEqual(atom[0].comments, ['a'])
def test_get_atom_header_ends_in_comment(self):
atom = self._test_get_x(parser.get_atom,
' (a) bob (a', ' (a) bob (a)', ' bob ',
[errors.InvalidHeaderDefect], '')
self.assertEqual(atom[0].comments, ['a'])
self.assertEqual(atom[2].comments, ['a'])
def test_get_atom_no_atom(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_atom(' (ab) ')
def test_get_atom_no_atom_before_special(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_atom(' (ab) @')
def test_get_atom_atom_ends_at_special(self):
atom = self._test_get_x(parser.get_atom,
' (foo) bob(bar) @bang', ' (foo) bob(bar) ', ' bob ', [], '@bang')
self.assertEqual(atom[0].comments, ['foo'])
self.assertEqual(atom[2].comments, ['bar'])
def test_get_atom_atom_ends_at_noncfws(self):
self._test_get_x(parser.get_atom,
'bob fred', 'bob ', 'bob ', [], 'fred')
def test_get_atom_rfc2047_atom(self):
self._test_get_x(parser.get_atom,
'=?utf-8?q?=20bob?=', ' bob', ' bob', [], '')
# get_dot_atom_text
def test_get_dot_atom_text(self):
dot_atom_text = self._test_get_x(parser.get_dot_atom_text,
'foo.bar.bang', 'foo.bar.bang', 'foo.bar.bang', [], '')
self.assertEqual(dot_atom_text.token_type, 'dot-atom-text')
self.assertEqual(len(dot_atom_text), 5)
def test_get_dot_atom_text_lone_atom_is_valid(self):
dot_atom_text = self._test_get_x(parser.get_dot_atom_text,
'foo', 'foo', 'foo', [], '')
def test_get_dot_atom_text_raises_on_leading_dot(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_dot_atom_text('.foo.bar')
def test_get_dot_atom_text_raises_on_trailing_dot(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_dot_atom_text('foo.bar.')
def test_get_dot_atom_text_raises_on_leading_non_atext(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_dot_atom_text(' foo.bar')
with self.assertRaises(errors.HeaderParseError):
parser.get_dot_atom_text('@foo.bar')
with self.assertRaises(errors.HeaderParseError):
parser.get_dot_atom_text('"foo.bar"')
def test_get_dot_atom_text_trailing_text_preserved(self):
dot_atom_text = self._test_get_x(parser.get_dot_atom_text,
'foo@bar', 'foo', 'foo', [], '@bar')
def test_get_dot_atom_text_trailing_ws_preserved(self):
dot_atom_text = self._test_get_x(parser.get_dot_atom_text,
'foo .bar', 'foo', 'foo', [], ' .bar')
# get_dot_atom
def test_get_dot_atom_only(self):
dot_atom = self._test_get_x(parser.get_dot_atom,
'foo.bar.bing', 'foo.bar.bing', 'foo.bar.bing', [], '')
self.assertEqual(dot_atom.token_type, 'dot-atom')
self.assertEqual(len(dot_atom), 1)
def test_get_dot_atom_with_wsp(self):
self._test_get_x(parser.get_dot_atom,
'\t foo.bar.bing ', '\t foo.bar.bing ', ' foo.bar.bing ', [], '')
def test_get_dot_atom_with_comments_and_wsp(self):
self._test_get_x(parser.get_dot_atom,
' (sing) foo.bar.bing (here) ', ' (sing) foo.bar.bing (here) ',
' foo.bar.bing ', [], '')
def test_get_dot_atom_space_ends_dot_atom(self):
self._test_get_x(parser.get_dot_atom,
' (sing) foo.bar .bing (here) ', ' (sing) foo.bar ',
' foo.bar ', [], '.bing (here) ')
def test_get_dot_atom_no_atom_raises(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_dot_atom(' (foo) ')
def test_get_dot_atom_leading_dot_raises(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_dot_atom(' (foo) .bar')
def test_get_dot_atom_two_dots_raises(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_dot_atom('bar..bang')
def test_get_dot_atom_trailing_dot_raises(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_dot_atom(' (foo) bar.bang. foo')
def test_get_dot_atom_rfc2047_atom(self):
self._test_get_x(parser.get_dot_atom,
'=?utf-8?q?=20bob?=', ' bob', ' bob', [], '')
# get_word (if this were black box we'd repeat all the qs/atom tests)
def test_get_word_atom_yields_atom(self):
word = self._test_get_x(parser.get_word,
' (foo) bar (bang) :ah', ' (foo) bar (bang) ', ' bar ', [], ':ah')
self.assertEqual(word.token_type, 'atom')
self.assertEqual(word[0].token_type, 'cfws')
def test_get_word_qs_yields_qs(self):
word = self._test_get_x(parser.get_word,
'"bar " (bang) ah', '"bar " (bang) ', 'bar ', [], 'ah')
self.assertEqual(word.token_type, 'quoted-string')
self.assertEqual(word[0].token_type, 'bare-quoted-string')
self.assertEqual(word[0].value, 'bar ')
self.assertEqual(word.content, 'bar ')
def test_get_word_ends_at_dot(self):
self._test_get_x(parser.get_word,
'foo.', 'foo', 'foo', [], '.')
# get_phrase
def test_get_phrase_simple(self):
phrase = self._test_get_x(parser.get_phrase,
'"Fred A. Johnson" is his name, oh.',
'"Fred A. Johnson" is his name',
'Fred A. Johnson is his name',
[],
', oh.')
self.assertEqual(phrase.token_type, 'phrase')
def test_get_phrase_complex(self):
phrase = self._test_get_x(parser.get_phrase,
' (A) bird (in (my|your)) "hand " is messy\t<>\t',
' (A) bird (in (my|your)) "hand " is messy\t',
' bird hand is messy ',
[],
'<>\t')
self.assertEqual(phrase[0][0].comments, ['A'])
self.assertEqual(phrase[0][2].comments, ['in (my|your)'])
def test_get_phrase_obsolete(self):
phrase = self._test_get_x(parser.get_phrase,
'Fred A.(weird).O Johnson',
'Fred A.(weird).O Johnson',
'Fred A. .O Johnson',
[errors.ObsoleteHeaderDefect]*3,
'')
self.assertEqual(len(phrase), 7)
self.assertEqual(phrase[3].comments, ['weird'])
def test_get_phrase_pharse_must_start_with_word(self):
phrase = self._test_get_x(parser.get_phrase,
'(even weirder).name',
'(even weirder).name',
' .name',
[errors.InvalidHeaderDefect] + [errors.ObsoleteHeaderDefect]*2,
'')
self.assertEqual(len(phrase), 3)
self.assertEqual(phrase[0].comments, ['even weirder'])
def test_get_phrase_ending_with_obsolete(self):
phrase = self._test_get_x(parser.get_phrase,
'simple phrase.(with trailing comment):boo',
'simple phrase.(with trailing comment)',
'simple phrase. ',
[errors.ObsoleteHeaderDefect]*2,
':boo')
self.assertEqual(len(phrase), 4)
self.assertEqual(phrase[3].comments, ['with trailing comment'])
def get_phrase_cfws_only_raises(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_phrase(' (foo) ')
# get_local_part
def test_get_local_part_simple(self):
local_part = self._test_get_x(parser.get_local_part,
'dinsdale@python.org', 'dinsdale', 'dinsdale', [], '@python.org')
self.assertEqual(local_part.token_type, 'local-part')
self.assertEqual(local_part.local_part, 'dinsdale')
def test_get_local_part_with_dot(self):
local_part = self._test_get_x(parser.get_local_part,
'Fred.A.Johnson@python.org',
'Fred.A.Johnson',
'Fred.A.Johnson',
[],
'@python.org')
self.assertEqual(local_part.local_part, 'Fred.A.Johnson')
def test_get_local_part_with_whitespace(self):
local_part = self._test_get_x(parser.get_local_part,
' Fred.A.Johnson @python.org',
' Fred.A.Johnson ',
' Fred.A.Johnson ',
[],
'@python.org')
self.assertEqual(local_part.local_part, 'Fred.A.Johnson')
def test_get_local_part_with_cfws(self):
local_part = self._test_get_x(parser.get_local_part,
' (foo) Fred.A.Johnson (bar (bird)) @python.org',
' (foo) Fred.A.Johnson (bar (bird)) ',
' Fred.A.Johnson ',
[],
'@python.org')
self.assertEqual(local_part.local_part, 'Fred.A.Johnson')
self.assertEqual(local_part[0][0].comments, ['foo'])
self.assertEqual(local_part[0][2].comments, ['bar (bird)'])
def test_get_local_part_simple_quoted(self):
local_part = self._test_get_x(parser.get_local_part,
'"dinsdale"@python.org', '"dinsdale"', '"dinsdale"', [], '@python.org')
self.assertEqual(local_part.token_type, 'local-part')
self.assertEqual(local_part.local_part, 'dinsdale')
def test_get_local_part_with_quoted_dot(self):
local_part = self._test_get_x(parser.get_local_part,
'"Fred.A.Johnson"@python.org',
'"Fred.A.Johnson"',
'"Fred.A.Johnson"',
[],
'@python.org')
self.assertEqual(local_part.local_part, 'Fred.A.Johnson')
def test_get_local_part_quoted_with_whitespace(self):
local_part = self._test_get_x(parser.get_local_part,
' "Fred A. Johnson" @python.org',
' "Fred A. Johnson" ',
' "Fred A. Johnson" ',
[],
'@python.org')
self.assertEqual(local_part.local_part, 'Fred A. Johnson')
def test_get_local_part_quoted_with_cfws(self):
local_part = self._test_get_x(parser.get_local_part,
' (foo) " Fred A. Johnson " (bar (bird)) @python.org',
' (foo) " Fred A. Johnson " (bar (bird)) ',
' " Fred A. Johnson " ',
[],
'@python.org')
self.assertEqual(local_part.local_part, ' Fred A. Johnson ')
self.assertEqual(local_part[0][0].comments, ['foo'])
self.assertEqual(local_part[0][2].comments, ['bar (bird)'])
def test_get_local_part_simple_obsolete(self):
local_part = self._test_get_x(parser.get_local_part,
'Fred. A.Johnson@python.org',
'Fred. A.Johnson',
'Fred. A.Johnson',
[errors.ObsoleteHeaderDefect],
'@python.org')
self.assertEqual(local_part.local_part, 'Fred.A.Johnson')
def test_get_local_part_complex_obsolete_1(self):
local_part = self._test_get_x(parser.get_local_part,
' (foo )Fred (bar).(bird) A.(sheep)Johnson."and dogs "@python.org',
' (foo )Fred (bar).(bird) A.(sheep)Johnson."and dogs "',
' Fred . A. Johnson.and dogs ',
[errors.ObsoleteHeaderDefect],
'@python.org')
self.assertEqual(local_part.local_part, 'Fred.A.Johnson.and dogs ')
def test_get_local_part_complex_obsolete_invalid(self):
local_part = self._test_get_x(parser.get_local_part,
' (foo )Fred (bar).(bird) A.(sheep)Johnson "and dogs"@python.org',
' (foo )Fred (bar).(bird) A.(sheep)Johnson "and dogs"',
' Fred . A. Johnson and dogs',
[errors.InvalidHeaderDefect]*2,
'@python.org')
self.assertEqual(local_part.local_part, 'Fred.A.Johnson and dogs')
def test_get_local_part_no_part_raises(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_local_part(' (foo) ')
def test_get_local_part_special_instead_raises(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_local_part(' (foo) @python.org')
def test_get_local_part_trailing_dot(self):
local_part = self._test_get_x(parser.get_local_part,
' borris.@python.org',
' borris.',
' borris.',
[errors.InvalidHeaderDefect]*2,
'@python.org')
self.assertEqual(local_part.local_part, 'borris.')
def test_get_local_part_trailing_dot_with_ws(self):
local_part = self._test_get_x(parser.get_local_part,
' borris. @python.org',
' borris. ',
' borris. ',
[errors.InvalidHeaderDefect]*2,
'@python.org')
self.assertEqual(local_part.local_part, 'borris.')
def test_get_local_part_leading_dot(self):
local_part = self._test_get_x(parser.get_local_part,
'.borris@python.org',
'.borris',
'.borris',
[errors.InvalidHeaderDefect]*2,
'@python.org')
self.assertEqual(local_part.local_part, '.borris')
def test_get_local_part_leading_dot_after_ws(self):
local_part = self._test_get_x(parser.get_local_part,
' .borris@python.org',
' .borris',
' .borris',
[errors.InvalidHeaderDefect]*2,
'@python.org')
self.assertEqual(local_part.local_part, '.borris')
def test_get_local_part_double_dot_raises(self):
local_part = self._test_get_x(parser.get_local_part,
' borris.(foo).natasha@python.org',
' borris.(foo).natasha',
' borris. .natasha',
[errors.InvalidHeaderDefect]*2,
'@python.org')
self.assertEqual(local_part.local_part, 'borris..natasha')
def test_get_local_part_quoted_strings_in_atom_list(self):
local_part = self._test_get_x(parser.get_local_part,
'""example" example"@example.com',
'""example" example"',
'example example',
[errors.InvalidHeaderDefect]*3,
'@example.com')
self.assertEqual(local_part.local_part, 'example example')
def test_get_local_part_valid_and_invalid_qp_in_atom_list(self):
local_part = self._test_get_x(parser.get_local_part,
r'"\\"example\\" example"@example.com',
r'"\\"example\\" example"',
r'\example\\ example',
[errors.InvalidHeaderDefect]*5,
'@example.com')
self.assertEqual(local_part.local_part, r'\example\\ example')
def test_get_local_part_unicode_defect(self):
# Currently this only happens when parsing unicode, not when parsing
# stuff that was originally binary.
local_part = self._test_get_x(parser.get_local_part,
'exámple@example.com',
'exámple',
'exámple',
[errors.NonASCIILocalPartDefect],
'@example.com')
self.assertEqual(local_part.local_part, 'exámple')
# get_dtext
def test_get_dtext_only(self):
dtext = self._test_get_x(parser.get_dtext,
'foobar', 'foobar', 'foobar', [], '')
self.assertEqual(dtext.token_type, 'ptext')
def test_get_dtext_all_dtext(self):
dtext = self._test_get_x(parser.get_dtext, self.rfc_dtext_chars,
self.rfc_dtext_chars,
self.rfc_dtext_chars, [], '')
def test_get_dtext_two_words_gets_first(self):
self._test_get_x(parser.get_dtext,
'foo bar', 'foo', 'foo', [], ' bar')
def test_get_dtext_following_wsp_preserved(self):
self._test_get_x(parser.get_dtext,
'foo \t\tbar', 'foo', 'foo', [], ' \t\tbar')
def test_get_dtext_non_printables(self):
dtext = self._test_get_x(parser.get_dtext,
'foo\x00bar]', 'foo\x00bar', 'foo\x00bar',
[errors.NonPrintableDefect], ']')
self.assertEqual(dtext.defects[0].non_printables[0], '\x00')
def test_get_dtext_with_qp(self):
ptext = self._test_get_x(parser.get_dtext,
r'foo\]\[\\bar\b\e\l\l',
r'foo][\barbell',
r'foo][\barbell',
[errors.ObsoleteHeaderDefect],
'')
def test_get_dtext_up_to_close_bracket_only(self):
self._test_get_x(parser.get_dtext,
'foo]', 'foo', 'foo', [], ']')
def test_get_dtext_wsp_before_close_bracket_preserved(self):
self._test_get_x(parser.get_dtext,
'foo ]', 'foo', 'foo', [], ' ]')
def test_get_dtext_close_bracket_mid_word(self):
self._test_get_x(parser.get_dtext,
'foo]bar', 'foo', 'foo', [], ']bar')
def test_get_dtext_up_to_open_bracket_only(self):
self._test_get_x(parser.get_dtext,
'foo[', 'foo', 'foo', [], '[')
def test_get_dtext_wsp_before_open_bracket_preserved(self):
self._test_get_x(parser.get_dtext,
'foo [', 'foo', 'foo', [], ' [')
def test_get_dtext_open_bracket_mid_word(self):
self._test_get_x(parser.get_dtext,
'foo[bar', 'foo', 'foo', [], '[bar')
# get_domain_literal
def test_get_domain_literal_only(self):
domain_literal = domain_literal = self._test_get_x(parser.get_domain_literal,
'[127.0.0.1]',
'[127.0.0.1]',
'[127.0.0.1]',
[],
'')
self.assertEqual(domain_literal.token_type, 'domain-literal')
self.assertEqual(domain_literal.domain, '[127.0.0.1]')
self.assertEqual(domain_literal.ip, '127.0.0.1')
def test_get_domain_literal_with_internal_ws(self):
domain_literal = self._test_get_x(parser.get_domain_literal,
'[ 127.0.0.1\t ]',
'[ 127.0.0.1\t ]',
'[ 127.0.0.1 ]',
[],
'')
self.assertEqual(domain_literal.domain, '[127.0.0.1]')
self.assertEqual(domain_literal.ip, '127.0.0.1')
def test_get_domain_literal_with_surrounding_cfws(self):
domain_literal = self._test_get_x(parser.get_domain_literal,
'(foo)[ 127.0.0.1] (bar)',
'(foo)[ 127.0.0.1] (bar)',
' [ 127.0.0.1] ',
[],
'')
self.assertEqual(domain_literal.domain, '[127.0.0.1]')
self.assertEqual(domain_literal.ip, '127.0.0.1')
def test_get_domain_literal_no_start_char_raises(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_domain_literal('(foo) ')
def test_get_domain_literal_no_start_char_before_special_raises(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_domain_literal('(foo) @')
def test_get_domain_literal_bad_dtext_char_before_special_raises(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_domain_literal('(foo) [abc[@')
# get_domain
def test_get_domain_regular_domain_only(self):
domain = self._test_get_x(parser.get_domain,
'example.com',
'example.com',
'example.com',
[],
'')
self.assertEqual(domain.token_type, 'domain')
self.assertEqual(domain.domain, 'example.com')
def test_get_domain_domain_literal_only(self):
domain = self._test_get_x(parser.get_domain,
'[127.0.0.1]',
'[127.0.0.1]',
'[127.0.0.1]',
[],
'')
self.assertEqual(domain.token_type, 'domain')
self.assertEqual(domain.domain, '[127.0.0.1]')
def test_get_domain_with_cfws(self):
domain = self._test_get_x(parser.get_domain,
'(foo) example.com(bar)\t',
'(foo) example.com(bar)\t',
' example.com ',
[],
'')
self.assertEqual(domain.domain, 'example.com')
def test_get_domain_domain_literal_with_cfws(self):
domain = self._test_get_x(parser.get_domain,
'(foo)[127.0.0.1]\t(bar)',
'(foo)[127.0.0.1]\t(bar)',
' [127.0.0.1] ',
[],
'')
self.assertEqual(domain.domain, '[127.0.0.1]')
def test_get_domain_domain_with_cfws_ends_at_special(self):
domain = self._test_get_x(parser.get_domain,
'(foo)example.com\t(bar), next',
'(foo)example.com\t(bar)',
' example.com ',
[],
', next')
self.assertEqual(domain.domain, 'example.com')
def test_get_domain_domain_literal_with_cfws_ends_at_special(self):
domain = self._test_get_x(parser.get_domain,
'(foo)[127.0.0.1]\t(bar), next',
'(foo)[127.0.0.1]\t(bar)',
' [127.0.0.1] ',
[],
', next')
self.assertEqual(domain.domain, '[127.0.0.1]')
def test_get_domain_obsolete(self):
domain = self._test_get_x(parser.get_domain,
'(foo) example . (bird)com(bar)\t',
'(foo) example . (bird)com(bar)\t',
' example . com ',
[errors.ObsoleteHeaderDefect],
'')
self.assertEqual(domain.domain, 'example.com')
def test_get_domain_no_non_cfws_raises(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_domain(" (foo)\t")
def test_get_domain_no_atom_raises(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_domain(" (foo)\t, broken")
# get_addr_spec
def test_get_addr_spec_normal(self):
addr_spec = self._test_get_x(parser.get_addr_spec,
'dinsdale@example.com',
'dinsdale@example.com',
'dinsdale@example.com',
[],
'')
self.assertEqual(addr_spec.token_type, 'addr-spec')
self.assertEqual(addr_spec.local_part, 'dinsdale')
self.assertEqual(addr_spec.domain, 'example.com')
self.assertEqual(addr_spec.addr_spec, 'dinsdale@example.com')
def test_get_addr_spec_with_doamin_literal(self):
addr_spec = self._test_get_x(parser.get_addr_spec,
'dinsdale@[127.0.0.1]',
'dinsdale@[127.0.0.1]',
'dinsdale@[127.0.0.1]',
[],
'')
self.assertEqual(addr_spec.local_part, 'dinsdale')
self.assertEqual(addr_spec.domain, '[127.0.0.1]')
self.assertEqual(addr_spec.addr_spec, 'dinsdale@[127.0.0.1]')
def test_get_addr_spec_with_cfws(self):
addr_spec = self._test_get_x(parser.get_addr_spec,
'(foo) dinsdale(bar)@ (bird) example.com (bog)',
'(foo) dinsdale(bar)@ (bird) example.com (bog)',
' dinsdale@example.com ',
[],
'')
self.assertEqual(addr_spec.local_part, 'dinsdale')
self.assertEqual(addr_spec.domain, 'example.com')
self.assertEqual(addr_spec.addr_spec, 'dinsdale@example.com')
def test_get_addr_spec_with_qouoted_string_and_cfws(self):
addr_spec = self._test_get_x(parser.get_addr_spec,
'(foo) "roy a bug"(bar)@ (bird) example.com (bog)',
'(foo) "roy a bug"(bar)@ (bird) example.com (bog)',
' "roy a bug"@example.com ',
[],
'')
self.assertEqual(addr_spec.local_part, 'roy a bug')
self.assertEqual(addr_spec.domain, 'example.com')
self.assertEqual(addr_spec.addr_spec, '"roy a bug"@example.com')
def test_get_addr_spec_ends_at_special(self):
addr_spec = self._test_get_x(parser.get_addr_spec,
'(foo) "roy a bug"(bar)@ (bird) example.com (bog) , next',
'(foo) "roy a bug"(bar)@ (bird) example.com (bog) ',
' "roy a bug"@example.com ',
[],
', next')
self.assertEqual(addr_spec.local_part, 'roy a bug')
self.assertEqual(addr_spec.domain, 'example.com')
self.assertEqual(addr_spec.addr_spec, '"roy a bug"@example.com')
def test_get_addr_spec_quoted_strings_in_atom_list(self):
addr_spec = self._test_get_x(parser.get_addr_spec,
'""example" example"@example.com',
'""example" example"@example.com',
'example example@example.com',
[errors.InvalidHeaderDefect]*3,
'')
self.assertEqual(addr_spec.local_part, 'example example')
self.assertEqual(addr_spec.domain, 'example.com')
self.assertEqual(addr_spec.addr_spec, '"example example"@example.com')
def test_get_addr_spec_dot_atom(self):
addr_spec = self._test_get_x(parser.get_addr_spec,
'star.a.star@example.com',
'star.a.star@example.com',
'star.a.star@example.com',
[],
'')
self.assertEqual(addr_spec.local_part, 'star.a.star')
self.assertEqual(addr_spec.domain, 'example.com')
self.assertEqual(addr_spec.addr_spec, 'star.a.star@example.com')
# get_obs_route
def test_get_obs_route_simple(self):
obs_route = self._test_get_x(parser.get_obs_route,
'@example.com, @two.example.com:',
'@example.com, @two.example.com:',
'@example.com, @two.example.com:',
[],
'')
self.assertEqual(obs_route.token_type, 'obs-route')
self.assertEqual(obs_route.domains, ['example.com', 'two.example.com'])
def test_get_obs_route_complex(self):
obs_route = self._test_get_x(parser.get_obs_route,
'(foo),, (blue)@example.com (bar),@two.(foo) example.com (bird):',
'(foo),, (blue)@example.com (bar),@two.(foo) example.com (bird):',
' ,, @example.com ,@two. example.com :',
[errors.ObsoleteHeaderDefect], # This is the obs-domain
'')
self.assertEqual(obs_route.token_type, 'obs-route')
self.assertEqual(obs_route.domains, ['example.com', 'two.example.com'])
def test_get_obs_route_no_route_before_end_raises(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_obs_route('(foo) @example.com,')
def test_get_obs_route_no_route_before_special_raises(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_obs_route('(foo) [abc],')
def test_get_obs_route_no_route_before_special_raises2(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_obs_route('(foo) @example.com [abc],')
# get_angle_addr
def test_get_angle_addr_simple(self):
angle_addr = self._test_get_x(parser.get_angle_addr,
'<dinsdale@example.com>',
'<dinsdale@example.com>',
'<dinsdale@example.com>',
[],
'')
self.assertEqual(angle_addr.token_type, 'angle-addr')
self.assertEqual(angle_addr.local_part, 'dinsdale')
self.assertEqual(angle_addr.domain, 'example.com')
self.assertIsNone(angle_addr.route)
self.assertEqual(angle_addr.addr_spec, 'dinsdale@example.com')
def test_get_angle_addr_empty(self):
angle_addr = self._test_get_x(parser.get_angle_addr,
'<>',
'<>',
'<>',
[errors.InvalidHeaderDefect],
'')
self.assertEqual(angle_addr.token_type, 'angle-addr')
self.assertIsNone(angle_addr.local_part)
self.assertIsNone(angle_addr.domain)
self.assertIsNone(angle_addr.route)
self.assertEqual(angle_addr.addr_spec, '<>')
def test_get_angle_addr_with_cfws(self):
angle_addr = self._test_get_x(parser.get_angle_addr,
' (foo) <dinsdale@example.com>(bar)',
' (foo) <dinsdale@example.com>(bar)',
' <dinsdale@example.com> ',
[],
'')
self.assertEqual(angle_addr.token_type, 'angle-addr')
self.assertEqual(angle_addr.local_part, 'dinsdale')
self.assertEqual(angle_addr.domain, 'example.com')
self.assertIsNone(angle_addr.route)
self.assertEqual(angle_addr.addr_spec, 'dinsdale@example.com')
def test_get_angle_addr_qs_and_domain_literal(self):
angle_addr = self._test_get_x(parser.get_angle_addr,
'<"Fred Perfect"@[127.0.0.1]>',
'<"Fred Perfect"@[127.0.0.1]>',
'<"Fred Perfect"@[127.0.0.1]>',
[],
'')
self.assertEqual(angle_addr.local_part, 'Fred Perfect')
self.assertEqual(angle_addr.domain, '[127.0.0.1]')
self.assertIsNone(angle_addr.route)
self.assertEqual(angle_addr.addr_spec, '"Fred Perfect"@[127.0.0.1]')
def test_get_angle_addr_internal_cfws(self):
angle_addr = self._test_get_x(parser.get_angle_addr,
'<(foo) dinsdale@example.com(bar)>',
'<(foo) dinsdale@example.com(bar)>',
'< dinsdale@example.com >',
[],
'')
self.assertEqual(angle_addr.local_part, 'dinsdale')
self.assertEqual(angle_addr.domain, 'example.com')
self.assertIsNone(angle_addr.route)
self.assertEqual(angle_addr.addr_spec, 'dinsdale@example.com')
def test_get_angle_addr_obs_route(self):
angle_addr = self._test_get_x(parser.get_angle_addr,
'(foo)<@example.com, (bird) @two.example.com: dinsdale@example.com> (bar) ',
'(foo)<@example.com, (bird) @two.example.com: dinsdale@example.com> (bar) ',
' <@example.com, @two.example.com: dinsdale@example.com> ',
[errors.ObsoleteHeaderDefect],
'')
self.assertEqual(angle_addr.local_part, 'dinsdale')
self.assertEqual(angle_addr.domain, 'example.com')
self.assertEqual(angle_addr.route, ['example.com', 'two.example.com'])
self.assertEqual(angle_addr.addr_spec, 'dinsdale@example.com')
def test_get_angle_addr_missing_closing_angle(self):
angle_addr = self._test_get_x(parser.get_angle_addr,
'<dinsdale@example.com',
'<dinsdale@example.com>',
'<dinsdale@example.com>',
[errors.InvalidHeaderDefect],
'')
self.assertEqual(angle_addr.local_part, 'dinsdale')
self.assertEqual(angle_addr.domain, 'example.com')
self.assertIsNone(angle_addr.route)
self.assertEqual(angle_addr.addr_spec, 'dinsdale@example.com')
def test_get_angle_addr_missing_closing_angle_with_cfws(self):
angle_addr = self._test_get_x(parser.get_angle_addr,
'<dinsdale@example.com (foo)',
'<dinsdale@example.com (foo)>',
'<dinsdale@example.com >',
[errors.InvalidHeaderDefect],
'')
self.assertEqual(angle_addr.local_part, 'dinsdale')
self.assertEqual(angle_addr.domain, 'example.com')
self.assertIsNone(angle_addr.route)
self.assertEqual(angle_addr.addr_spec, 'dinsdale@example.com')
def test_get_angle_addr_ends_at_special(self):
angle_addr = self._test_get_x(parser.get_angle_addr,
'<dinsdale@example.com> (foo), next',
'<dinsdale@example.com> (foo)',
'<dinsdale@example.com> ',
[],
', next')
self.assertEqual(angle_addr.local_part, 'dinsdale')
self.assertEqual(angle_addr.domain, 'example.com')
self.assertIsNone(angle_addr.route)
self.assertEqual(angle_addr.addr_spec, 'dinsdale@example.com')
def test_get_angle_addr_no_angle_raise(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_angle_addr('(foo) ')
def test_get_angle_addr_no_angle_before_special_raises(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_angle_addr('(foo) , next')
def test_get_angle_addr_no_angle_raises(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_angle_addr('bar')
def test_get_angle_addr_special_after_angle_raises(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_angle_addr('(foo) <, bar')
# get_display_name This is phrase but with a different value.
def test_get_display_name_simple(self):
display_name = self._test_get_x(parser.get_display_name,
'Fred A Johnson',
'Fred A Johnson',
'Fred A Johnson',
[],
'')
self.assertEqual(display_name.token_type, 'display-name')
self.assertEqual(display_name.display_name, 'Fred A Johnson')
def test_get_display_name_complex1(self):
display_name = self._test_get_x(parser.get_display_name,
'"Fred A. Johnson" is his name, oh.',
'"Fred A. Johnson" is his name',
'"Fred A. Johnson is his name"',
[],
', oh.')
self.assertEqual(display_name.token_type, 'display-name')
self.assertEqual(display_name.display_name, 'Fred A. Johnson is his name')
def test_get_display_name_complex2(self):
display_name = self._test_get_x(parser.get_display_name,
' (A) bird (in (my|your)) "hand " is messy\t<>\t',
' (A) bird (in (my|your)) "hand " is messy\t',
' "bird hand is messy" ',
[],
'<>\t')
self.assertEqual(display_name[0][0].comments, ['A'])
self.assertEqual(display_name[0][2].comments, ['in (my|your)'])
self.assertEqual(display_name.display_name, 'bird hand is messy')
def test_get_display_name_obsolete(self):
display_name = self._test_get_x(parser.get_display_name,
'Fred A.(weird).O Johnson',
'Fred A.(weird).O Johnson',
'"Fred A. .O Johnson"',
[errors.ObsoleteHeaderDefect]*3,
'')
self.assertEqual(len(display_name), 7)
self.assertEqual(display_name[3].comments, ['weird'])
self.assertEqual(display_name.display_name, 'Fred A. .O Johnson')
def test_get_display_name_pharse_must_start_with_word(self):
display_name = self._test_get_x(parser.get_display_name,
'(even weirder).name',
'(even weirder).name',
' ".name"',
[errors.InvalidHeaderDefect] + [errors.ObsoleteHeaderDefect]*2,
'')
self.assertEqual(len(display_name), 3)
self.assertEqual(display_name[0].comments, ['even weirder'])
self.assertEqual(display_name.display_name, '.name')
def test_get_display_name_ending_with_obsolete(self):
display_name = self._test_get_x(parser.get_display_name,
'simple phrase.(with trailing comment):boo',
'simple phrase.(with trailing comment)',
'"simple phrase." ',
[errors.ObsoleteHeaderDefect]*2,
':boo')
self.assertEqual(len(display_name), 4)
self.assertEqual(display_name[3].comments, ['with trailing comment'])
self.assertEqual(display_name.display_name, 'simple phrase.')
# get_name_addr
def test_get_name_addr_angle_addr_only(self):
name_addr = self._test_get_x(parser.get_name_addr,
'<dinsdale@example.com>',
'<dinsdale@example.com>',
'<dinsdale@example.com>',
[],
'')
self.assertEqual(name_addr.token_type, 'name-addr')
self.assertIsNone(name_addr.display_name)
self.assertEqual(name_addr.local_part, 'dinsdale')
self.assertEqual(name_addr.domain, 'example.com')
self.assertIsNone(name_addr.route)
self.assertEqual(name_addr.addr_spec, 'dinsdale@example.com')
def test_get_name_addr_atom_name(self):
name_addr = self._test_get_x(parser.get_name_addr,
'Dinsdale <dinsdale@example.com>',
'Dinsdale <dinsdale@example.com>',
'Dinsdale <dinsdale@example.com>',
[],
'')
self.assertEqual(name_addr.token_type, 'name-addr')
self.assertEqual(name_addr.display_name, 'Dinsdale')
self.assertEqual(name_addr.local_part, 'dinsdale')
self.assertEqual(name_addr.domain, 'example.com')
self.assertIsNone(name_addr.route)
self.assertEqual(name_addr.addr_spec, 'dinsdale@example.com')
def test_get_name_addr_atom_name_with_cfws(self):
name_addr = self._test_get_x(parser.get_name_addr,
'(foo) Dinsdale (bar) <dinsdale@example.com> (bird)',
'(foo) Dinsdale (bar) <dinsdale@example.com> (bird)',
' Dinsdale <dinsdale@example.com> ',
[],
'')
self.assertEqual(name_addr.display_name, 'Dinsdale')
self.assertEqual(name_addr.local_part, 'dinsdale')
self.assertEqual(name_addr.domain, 'example.com')
self.assertIsNone(name_addr.route)
self.assertEqual(name_addr.addr_spec, 'dinsdale@example.com')
def test_get_name_addr_name_with_cfws_and_dots(self):
name_addr = self._test_get_x(parser.get_name_addr,
'(foo) Roy.A.Bear (bar) <dinsdale@example.com> (bird)',
'(foo) Roy.A.Bear (bar) <dinsdale@example.com> (bird)',
' "Roy.A.Bear" <dinsdale@example.com> ',
[errors.ObsoleteHeaderDefect]*2,
'')
self.assertEqual(name_addr.display_name, 'Roy.A.Bear')
self.assertEqual(name_addr.local_part, 'dinsdale')
self.assertEqual(name_addr.domain, 'example.com')
self.assertIsNone(name_addr.route)
self.assertEqual(name_addr.addr_spec, 'dinsdale@example.com')
def test_get_name_addr_qs_name(self):
name_addr = self._test_get_x(parser.get_name_addr,
'"Roy.A.Bear" <dinsdale@example.com>',
'"Roy.A.Bear" <dinsdale@example.com>',
'"Roy.A.Bear" <dinsdale@example.com>',
[],
'')
self.assertEqual(name_addr.display_name, 'Roy.A.Bear')
self.assertEqual(name_addr.local_part, 'dinsdale')
self.assertEqual(name_addr.domain, 'example.com')
self.assertIsNone(name_addr.route)
self.assertEqual(name_addr.addr_spec, 'dinsdale@example.com')
def test_get_name_addr_with_route(self):
name_addr = self._test_get_x(parser.get_name_addr,
'"Roy.A.Bear" <@two.example.com: dinsdale@example.com>',
'"Roy.A.Bear" <@two.example.com: dinsdale@example.com>',
'"Roy.A.Bear" <@two.example.com: dinsdale@example.com>',
[errors.ObsoleteHeaderDefect],
'')
self.assertEqual(name_addr.display_name, 'Roy.A.Bear')
self.assertEqual(name_addr.local_part, 'dinsdale')
self.assertEqual(name_addr.domain, 'example.com')
self.assertEqual(name_addr.route, ['two.example.com'])
self.assertEqual(name_addr.addr_spec, 'dinsdale@example.com')
def test_get_name_addr_ends_at_special(self):
name_addr = self._test_get_x(parser.get_name_addr,
'"Roy.A.Bear" <dinsdale@example.com>, next',
'"Roy.A.Bear" <dinsdale@example.com>',
'"Roy.A.Bear" <dinsdale@example.com>',
[],
', next')
self.assertEqual(name_addr.display_name, 'Roy.A.Bear')
self.assertEqual(name_addr.local_part, 'dinsdale')
self.assertEqual(name_addr.domain, 'example.com')
self.assertIsNone(name_addr.route)
self.assertEqual(name_addr.addr_spec, 'dinsdale@example.com')
def test_get_name_addr_no_content_raises(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_name_addr(' (foo) ')
def test_get_name_addr_no_content_before_special_raises(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_name_addr(' (foo) ,')
def test_get_name_addr_no_angle_after_display_name_raises(self):
with self.assertRaises(errors.HeaderParseError):
parser.get_name_addr('foo bar')
# get_mailbox
def test_get_mailbox_addr_spec_only(self):
mailbox = self._test_get_x(parser.get_mailbox,
'dinsdale@example.com',
'dinsdale@example.com',
'dinsdale@example.com',
[],
'')
self.assertEqual(mailbox.token_type, 'mailbox')
self.assertIsNone(mailbox.display_name)
self.assertEqual(mailbox.local_part, 'dinsdale')
self.assertEqual(mailbox.domain, 'example.com')
self.assertIsNone(mailbox.route)
self.assertEqual(mailbox.addr_spec, 'dinsdale@example.com')
def test_get_mailbox_angle_addr_only(self):
mailbox = self._test_get_x(parser.get_mailbox,
'<dinsdale@example.com>',
'<dinsdale@example.com>',
'<dinsdale@example.com>',
[],
'')
self.assertEqual(mailbox.token_type, 'mailbox')
self.assertIsNone(mailbox.display_name)
self.assertEqual(mailbox.local_part, 'dinsdale')
self.assertEqual(mailbox.domain, 'example.com')
self.assertIsNone(mailbox.route)
self.assertEqual(mailbox.addr_spec, 'dinsdale@example.com')
def test_get_mailbox_name_addr(self):
mailbox = self._test_get_x(parser.get_mailbox,
'"Roy A. Bear" <dinsdale@example.com>',
'"Roy A. Bear" <dinsdale@example.com>',
'"Roy A. Bear" <dinsdale@example.com>',
[],
'')
self.assertEqual(mailbox.token_type, 'mailbox')
self.assertEqual(mailbox.display_name, 'Roy A. Bear')
self.assertEqual(mailbox.local_part, 'dinsdale')
self.assertEqual(mailbox.domain, 'example.com')
self.assertIsNone(mailbox.route)
self.assertEqual(mailbox.addr_spec, 'dinsdale@example.com')
def test_get_mailbox_ends_at_special(self):
mailbox = self._test_get_x(parser.get_mailbox,
'"Roy A. Bear" <dinsdale@example.com>, rest',
'"Roy A. Bear" <dinsdale@example.com>',
'"Roy A. Bear" <dinsdale@example.com>',
[],
', rest')
self.assertEqual(mailbox.token_type, 'mailbox')
self.assertEqual(mailbox.display_name, 'Roy A. Bear')
self.assertEqual(mailbox.local_part, 'dinsdale')
self.assertEqual(mailbox.domain, 'example.com')
self.assertIsNone(mailbox.route)
self.assertEqual(mailbox.addr_spec, 'dinsdale@example.com')
def test_get_mailbox_quoted_strings_in_atom_list(self):
mailbox = self._test_get_x(parser.get_mailbox,
'""example" example"@example.com',
'""example" example"@example.com',
'example example@example.com',
[errors.InvalidHeaderDefect]*3,
'')
self.assertEqual(mailbox.local_part, 'example example')
self.assertEqual(mailbox.domain, 'example.com')
self.assertEqual(mailbox.addr_spec, '"example example"@example.com')
# get_mailbox_list
def test_get_mailbox_list_single_addr(self):
mailbox_list = self._test_get_x(parser.get_mailbox_list,
'dinsdale@example.com',
'dinsdale@example.com',
'dinsdale@example.com',
[],
'')
self.assertEqual(mailbox_list.token_type, 'mailbox-list')
self.assertEqual(len(mailbox_list.mailboxes), 1)
mailbox = mailbox_list.mailboxes[0]
self.assertIsNone(mailbox.display_name)
self.assertEqual(mailbox.local_part, 'dinsdale')
self.assertEqual(mailbox.domain, 'example.com')
self.assertIsNone(mailbox.route)
self.assertEqual(mailbox.addr_spec, 'dinsdale@example.com')
self.assertEqual(mailbox_list.mailboxes,
mailbox_list.all_mailboxes)
def test_get_mailbox_list_two_simple_addr(self):
mailbox_list = self._test_get_x(parser.get_mailbox_list,
'dinsdale@example.com, dinsdale@test.example.com',
'dinsdale@example.com, dinsdale@test.example.com',
'dinsdale@example.com, dinsdale@test.example.com',
[],
'')
self.assertEqual(mailbox_list.token_type, 'mailbox-list')
self.assertEqual(len(mailbox_list.mailboxes), 2)
self.assertEqual(mailbox_list.mailboxes[0].addr_spec,
'dinsdale@example.com')
self.assertEqual(mailbox_list.mailboxes[1].addr_spec,
'dinsdale@test.example.com')
self.assertEqual(mailbox_list.mailboxes,
mailbox_list.all_mailboxes)
def test_get_mailbox_list_two_name_addr(self):
mailbox_list = self._test_get_x(parser.get_mailbox_list,
('"Roy A. Bear" <dinsdale@example.com>,'
' "Fred Flintstone" <dinsdale@test.example.com>'),
('"Roy A. Bear" <dinsdale@example.com>,'
' "Fred Flintstone" <dinsdale@test.example.com>'),
('"Roy A. Bear" <dinsdale@example.com>,'
' "Fred Flintstone" <dinsdale@test.example.com>'),
[],
'')
self.assertEqual(len(mailbox_list.mailboxes), 2)
self.assertEqual(mailbox_list.mailboxes[0].addr_spec,
'dinsdale@example.com')
self.assertEqual(mailbox_list.mailboxes[0].display_name,
'Roy A. Bear')
self.assertEqual(mailbox_list.mailboxes[1].addr_spec,
'dinsdale@test.example.com')
self.assertEqual(mailbox_list.mailboxes[1].display_name,
'Fred Flintstone')
self.assertEqual(mailbox_list.mailboxes,
mailbox_list.all_mailboxes)
def test_get_mailbox_list_two_complex(self):
mailbox_list = self._test_get_x(parser.get_mailbox_list,
('(foo) "Roy A. Bear" <dinsdale@example.com>(bar),'
' "Fred Flintstone" <dinsdale@test.(bird)example.com>'),
('(foo) "Roy A. Bear" <dinsdale@example.com>(bar),'
' "Fred Flintstone" <dinsdale@test.(bird)example.com>'),
(' "Roy A. Bear" <dinsdale@example.com> ,'
' "Fred Flintstone" <dinsdale@test. example.com>'),
[errors.ObsoleteHeaderDefect],
'')
self.assertEqual(len(mailbox_list.mailboxes), 2)
self.assertEqual(mailbox_list.mailboxes[0].addr_spec,
'dinsdale@example.com')
self.assertEqual(mailbox_list.mailboxes[0].display_name,
'Roy A. Bear')
self.assertEqual(mailbox_list.mailboxes[1].addr_spec,
'dinsdale@test.example.com')
self.assertEqual(mailbox_list.mailboxes[1].display_name,
'Fred Flintstone')
self.assertEqual(mailbox_list.mailboxes,
mailbox_list.all_mailboxes)
def test_get_mailbox_list_unparseable_mailbox_null(self):
mailbox_list = self._test_get_x(parser.get_mailbox_list,
('"Roy A. Bear"[] dinsdale@example.com,'
' "Fred Flintstone" <dinsdale@test.(bird)example.com>'),
('"Roy A. Bear"[] dinsdale@example.com,'
' "Fred Flintstone" <dinsdale@test.(bird)example.com>'),
('"Roy A. Bear"[] dinsdale@example.com,'
' "Fred Flintstone" <dinsdale@test. example.com>'),
[errors.InvalidHeaderDefect, # the 'extra' text after the local part
errors.InvalidHeaderDefect, # the local part with no angle-addr
errors.ObsoleteHeaderDefect, # period in extra text (example.com)
errors.ObsoleteHeaderDefect], # (bird) in valid address.
'')
self.assertEqual(len(mailbox_list.mailboxes), 1)
self.assertEqual(len(mailbox_list.all_mailboxes), 2)
self.assertEqual(mailbox_list.all_mailboxes[0].token_type,
'invalid-mailbox')
self.assertIsNone(mailbox_list.all_mailboxes[0].display_name)
self.assertEqual(mailbox_list.all_mailboxes[0].local_part,
'Roy A. Bear')
self.assertIsNone(mailbox_list.all_mailboxes[0].domain)
self.assertEqual(mailbox_list.all_mailboxes[0].addr_spec,
'"Roy A. Bear"')
self.assertIs(mailbox_list.all_mailboxes[1],
mailbox_list.mailboxes[0])
self.assertEqual(mailbox_list.mailboxes[0].addr_spec,
'dinsdale@test.example.com')
self.assertEqual(mailbox_list.mailboxes[0].display_name,
'Fred Flintstone')
def test_get_mailbox_list_junk_after_valid_address(self):
mailbox_list = self._test_get_x(parser.get_mailbox_list,
('"Roy A. Bear" <dinsdale@example.com>@@,'
' "Fred Flintstone" <dinsdale@test.example.com>'),
('"Roy A. Bear" <dinsdale@example.com>@@,'
' "Fred Flintstone" <dinsdale@test.example.com>'),
('"Roy A. Bear" <dinsdale@example.com>@@,'
' "Fred Flintstone" <dinsdale@test.example.com>'),
[errors.InvalidHeaderDefect],
'')
self.assertEqual(len(mailbox_list.mailboxes), 1)
self.assertEqual(len(mailbox_list.all_mailboxes), 2)
self.assertEqual(mailbox_list.all_mailboxes[0].addr_spec,
'dinsdale@example.com')
self.assertEqual(mailbox_list.all_mailboxes[0].display_name,
'Roy A. Bear')
self.assertEqual(mailbox_list.all_mailboxes[0].token_type,
'invalid-mailbox')
self.assertIs(mailbox_list.all_mailboxes[1],
mailbox_list.mailboxes[0])
self.assertEqual(mailbox_list.mailboxes[0].addr_spec,
'dinsdale@test.example.com')
self.assertEqual(mailbox_list.mailboxes[0].display_name,
'Fred Flintstone')
def test_get_mailbox_list_empty_list_element(self):
mailbox_list = self._test_get_x(parser.get_mailbox_list,
('"Roy A. Bear" <dinsdale@example.com>, (bird),,'
' "Fred Flintstone" <dinsdale@test.example.com>'),
('"Roy A. Bear" <dinsdale@example.com>, (bird),,'
' "Fred Flintstone" <dinsdale@test.example.com>'),
('"Roy A. Bear" <dinsdale@example.com>, ,,'
' "Fred Flintstone" <dinsdale@test.example.com>'),
[errors.ObsoleteHeaderDefect]*2,
'')
self.assertEqual(len(mailbox_list.mailboxes), 2)
self.assertEqual(mailbox_list.all_mailboxes,
mailbox_list.mailboxes)
self.assertEqual(mailbox_list.all_mailboxes[0].addr_spec,
'dinsdale@example.com')
self.assertEqual(mailbox_list.all_mailboxes[0].display_name,
'Roy A. Bear')
self.assertEqual(mailbox_list.mailboxes[1].addr_spec,
'dinsdale@test.example.com')
self.assertEqual(mailbox_list.mailboxes[1].display_name,
'Fred Flintstone')
def test_get_mailbox_list_only_empty_elements(self):
mailbox_list = self._test_get_x(parser.get_mailbox_list,
'(foo),, (bar)',
'(foo),, (bar)',
' ,, ',
[errors.ObsoleteHeaderDefect]*3,
'')
self.assertEqual(len(mailbox_list.mailboxes), 0)
self.assertEqual(mailbox_list.all_mailboxes,
mailbox_list.mailboxes)
# get_group_list
def test_get_group_list_cfws_only(self):
group_list = self._test_get_x(parser.get_group_list,
'(hidden);',
'(hidden)',
' ',
[],
';')
self.assertEqual(group_list.token_type, 'group-list')
self.assertEqual(len(group_list.mailboxes), 0)
self.assertEqual(group_list.mailboxes,
group_list.all_mailboxes)
def test_get_group_list_mailbox_list(self):
group_list = self._test_get_x(parser.get_group_list,
'dinsdale@example.org, "Fred A. Bear" <dinsdale@example.org>',
'dinsdale@example.org, "Fred A. Bear" <dinsdale@example.org>',
'dinsdale@example.org, "Fred A. Bear" <dinsdale@example.org>',
[],
'')
self.assertEqual(group_list.token_type, 'group-list')
self.assertEqual(len(group_list.mailboxes), 2)
self.assertEqual(group_list.mailboxes,
group_list.all_mailboxes)
self.assertEqual(group_list.mailboxes[1].display_name,
'Fred A. Bear')
def test_get_group_list_obs_group_list(self):
group_list = self._test_get_x(parser.get_group_list,
', (foo),,(bar)',
', (foo),,(bar)',
', ,, ',
[errors.ObsoleteHeaderDefect],
'')
self.assertEqual(group_list.token_type, 'group-list')
self.assertEqual(len(group_list.mailboxes), 0)
self.assertEqual(group_list.mailboxes,
group_list.all_mailboxes)
def test_get_group_list_comment_only_invalid(self):
group_list = self._test_get_x(parser.get_group_list,
'(bar)',
'(bar)',
' ',
[errors.InvalidHeaderDefect],
'')
self.assertEqual(group_list.token_type, 'group-list')
self.assertEqual(len(group_list.mailboxes), 0)
self.assertEqual(group_list.mailboxes,
group_list.all_mailboxes)
# get_group
def test_get_group_empty(self):
group = self._test_get_x(parser.get_group,
'Monty Python:;',
'Monty Python:;',
'Monty Python:;',
[],
'')
self.assertEqual(group.token_type, 'group')
self.assertEqual(group.display_name, 'Monty Python')
self.assertEqual(len(group.mailboxes), 0)
self.assertEqual(group.mailboxes,
group.all_mailboxes)
def test_get_group_null_addr_spec(self):
group = self._test_get_x(parser.get_group,
'foo: <>;',
'foo: <>;',
'foo: <>;',
[errors.InvalidHeaderDefect],
'')
self.assertEqual(group.display_name, 'foo')
self.assertEqual(len(group.mailboxes), 0)
self.assertEqual(len(group.all_mailboxes), 1)
self.assertEqual(group.all_mailboxes[0].value, '<>')
def test_get_group_cfws_only(self):
group = self._test_get_x(parser.get_group,
'Monty Python: (hidden);',
'Monty Python: (hidden);',
'Monty Python: ;',
[],
'')
self.assertEqual(group.token_type, 'group')
self.assertEqual(group.display_name, 'Monty Python')
self.assertEqual(len(group.mailboxes), 0)
self.assertEqual(group.mailboxes,
group.all_mailboxes)
def test_get_group_single_mailbox(self):
group = self._test_get_x(parser.get_group,
'Monty Python: "Fred A. Bear" <dinsdale@example.com>;',
'Monty Python: "Fred A. Bear" <dinsdale@example.com>;',
'Monty Python: "Fred A. Bear" <dinsdale@example.com>;',
[],
'')
self.assertEqual(group.token_type, 'group')
self.assertEqual(group.display_name, 'Monty Python')
self.assertEqual(len(group.mailboxes), 1)
self.assertEqual(group.mailboxes,
group.all_mailboxes)
self.assertEqual(group.mailboxes[0].addr_spec,
'dinsdale@example.com')
def test_get_group_mixed_list(self):
group = self._test_get_x(parser.get_group,
('Monty Python: "Fred A. Bear" <dinsdale@example.com>,'
'(foo) Roger <ping@exampele.com>, x@test.example.com;'),
('Monty Python: "Fred A. Bear" <dinsdale@example.com>,'
'(foo) Roger <ping@exampele.com>, x@test.example.com;'),
('Monty Python: "Fred A. Bear" <dinsdale@example.com>,'
' Roger <ping@exampele.com>, x@test.example.com;'),
[],
'')
self.assertEqual(group.token_type, 'group')
self.assertEqual(group.display_name, 'Monty Python')
self.assertEqual(len(group.mailboxes), 3)
self.assertEqual(group.mailboxes,
group.all_mailboxes)
self.assertEqual(group.mailboxes[0].display_name,
'Fred A. Bear')
self.assertEqual(group.mailboxes[1].display_name,
'Roger')
self.assertEqual(group.mailboxes[2].local_part, 'x')
def test_get_group_one_invalid(self):
group = self._test_get_x(parser.get_group,
('Monty Python: "Fred A. Bear" <dinsdale@example.com>,'
'(foo) Roger ping@exampele.com, x@test.example.com;'),
('Monty Python: "Fred A. Bear" <dinsdale@example.com>,'
'(foo) Roger ping@exampele.com, x@test.example.com;'),
('Monty Python: "Fred A. Bear" <dinsdale@example.com>,'
' Roger ping@exampele.com, x@test.example.com;'),
[errors.InvalidHeaderDefect, # non-angle addr makes local part invalid
errors.InvalidHeaderDefect], # and its not obs-local either: no dots.
'')
self.assertEqual(group.token_type, 'group')
self.assertEqual(group.display_name, 'Monty Python')
self.assertEqual(len(group.mailboxes), 2)
self.assertEqual(len(group.all_mailboxes), 3)
self.assertEqual(group.mailboxes[0].display_name,
'Fred A. Bear')
self.assertEqual(group.mailboxes[1].local_part, 'x')
self.assertIsNone(group.all_mailboxes[1].display_name)
# get_address
def test_get_address_simple(self):
address = self._test_get_x(parser.get_address,
'dinsdale@example.com',
'dinsdale@example.com',
'dinsdale@example.com',
[],
'')
self.assertEqual(address.token_type, 'address')
self.assertEqual(len(address.mailboxes), 1)
self.assertEqual(address.mailboxes,
address.all_mailboxes)
self.assertEqual(address.mailboxes[0].domain,
'example.com')
self.assertEqual(address[0].token_type,
'mailbox')
def test_get_address_complex(self):
address = self._test_get_x(parser.get_address,
'(foo) "Fred A. Bear" <(bird)dinsdale@example.com>',
'(foo) "Fred A. Bear" <(bird)dinsdale@example.com>',
' "Fred A. Bear" < dinsdale@example.com>',
[],
'')
self.assertEqual(address.token_type, 'address')
self.assertEqual(len(address.mailboxes), 1)
self.assertEqual(address.mailboxes,
address.all_mailboxes)
self.assertEqual(address.mailboxes[0].display_name,
'Fred A. Bear')
self.assertEqual(address[0].token_type,
'mailbox')
def test_get_address_rfc2047_display_name(self):
address = self._test_get_x(parser.get_address,
'=?utf-8?q?=C3=89ric?= <foo@example.com>',
'Éric <foo@example.com>',
'Éric <foo@example.com>',
[],
'')
self.assertEqual(address.token_type, 'address')
self.assertEqual(len(address.mailboxes), 1)
self.assertEqual(address.mailboxes,
address.all_mailboxes)
self.assertEqual(address.mailboxes[0].display_name,
'Éric')
self.assertEqual(address[0].token_type,
'mailbox')
def test_get_address_empty_group(self):
address = self._test_get_x(parser.get_address,
'Monty Python:;',
'Monty Python:;',
'Monty Python:;',
[],
'')
self.assertEqual(address.token_type, 'address')
self.assertEqual(len(address.mailboxes), 0)
self.assertEqual(address.mailboxes,
address.all_mailboxes)
self.assertEqual(address[0].token_type,
'group')
self.assertEqual(address[0].display_name,
'Monty Python')
def test_get_address_group(self):
address = self._test_get_x(parser.get_address,
'Monty Python: x@example.com, y@example.com;',
'Monty Python: x@example.com, y@example.com;',
'Monty Python: x@example.com, y@example.com;',
[],
'')
self.assertEqual(address.token_type, 'address')
self.assertEqual(len(address.mailboxes), 2)
self.assertEqual(address.mailboxes,
address.all_mailboxes)
self.assertEqual(address[0].token_type,
'group')
self.assertEqual(address[0].display_name,
'Monty Python')
self.assertEqual(address.mailboxes[0].local_part, 'x')
def test_get_address_quoted_local_part(self):
address = self._test_get_x(parser.get_address,
'"foo bar"@example.com',
'"foo bar"@example.com',
'"foo bar"@example.com',
[],
'')
self.assertEqual(address.token_type, 'address')
self.assertEqual(len(address.mailboxes), 1)
self.assertEqual(address.mailboxes,
address.all_mailboxes)
self.assertEqual(address.mailboxes[0].domain,
'example.com')
self.assertEqual(address.mailboxes[0].local_part,
'foo bar')
self.assertEqual(address[0].token_type, 'mailbox')
def test_get_address_ends_at_special(self):
address = self._test_get_x(parser.get_address,
'dinsdale@example.com, next',
'dinsdale@example.com',
'dinsdale@example.com',
[],
', next')
self.assertEqual(address.token_type, 'address')
self.assertEqual(len(address.mailboxes), 1)
self.assertEqual(address.mailboxes,
address.all_mailboxes)
self.assertEqual(address.mailboxes[0].domain,
'example.com')
self.assertEqual(address[0].token_type, 'mailbox')
def test_get_address_invalid_mailbox_invalid(self):
address = self._test_get_x(parser.get_address,
'ping example.com, next',
'ping example.com',
'ping example.com',
[errors.InvalidHeaderDefect, # addr-spec with no domain
errors.InvalidHeaderDefect, # invalid local-part
errors.InvalidHeaderDefect, # missing .s in local-part
],
', next')
self.assertEqual(address.token_type, 'address')
self.assertEqual(len(address.mailboxes), 0)
self.assertEqual(len(address.all_mailboxes), 1)
self.assertIsNone(address.all_mailboxes[0].domain)
self.assertEqual(address.all_mailboxes[0].local_part, 'ping example.com')
self.assertEqual(address[0].token_type, 'invalid-mailbox')
def test_get_address_quoted_strings_in_atom_list(self):
address = self._test_get_x(parser.get_address,
'""example" example"@example.com',
'""example" example"@example.com',
'example example@example.com',
[errors.InvalidHeaderDefect]*3,
'')
self.assertEqual(address.all_mailboxes[0].local_part, 'example example')
self.assertEqual(address.all_mailboxes[0].domain, 'example.com')
self.assertEqual(address.all_mailboxes[0].addr_spec, '"example example"@example.com')
# get_address_list
def test_get_address_list_mailboxes_simple(self):
address_list = self._test_get_x(parser.get_address_list,
'dinsdale@example.com',
'dinsdale@example.com',
'dinsdale@example.com',
[],
'')
self.assertEqual(address_list.token_type, 'address-list')
self.assertEqual(len(address_list.mailboxes), 1)
self.assertEqual(address_list.mailboxes,
address_list.all_mailboxes)
self.assertEqual([str(x) for x in address_list.mailboxes],
[str(x) for x in address_list.addresses])
self.assertEqual(address_list.mailboxes[0].domain, 'example.com')
self.assertEqual(address_list[0].token_type, 'address')
self.assertIsNone(address_list[0].display_name)
def test_get_address_list_mailboxes_two_simple(self):
address_list = self._test_get_x(parser.get_address_list,
'foo@example.com, "Fred A. Bar" <bar@example.com>',
'foo@example.com, "Fred A. Bar" <bar@example.com>',
'foo@example.com, "Fred A. Bar" <bar@example.com>',
[],
'')
self.assertEqual(address_list.token_type, 'address-list')
self.assertEqual(len(address_list.mailboxes), 2)
self.assertEqual(address_list.mailboxes,
address_list.all_mailboxes)
self.assertEqual([str(x) for x in address_list.mailboxes],
[str(x) for x in address_list.addresses])
self.assertEqual(address_list.mailboxes[0].local_part, 'foo')
self.assertEqual(address_list.mailboxes[1].display_name, "Fred A. Bar")
def test_get_address_list_mailboxes_complex(self):
address_list = self._test_get_x(parser.get_address_list,
('"Roy A. Bear" <dinsdale@example.com>, '
'(ping) Foo <x@example.com>,'
'Nobody Is. Special <y@(bird)example.(bad)com>'),
('"Roy A. Bear" <dinsdale@example.com>, '
'(ping) Foo <x@example.com>,'
'Nobody Is. Special <y@(bird)example.(bad)com>'),
('"Roy A. Bear" <dinsdale@example.com>, '
'Foo <x@example.com>,'
'"Nobody Is. Special" <y@example. com>'),
[errors.ObsoleteHeaderDefect, # period in Is.
errors.ObsoleteHeaderDefect], # cfws in domain
'')
self.assertEqual(address_list.token_type, 'address-list')
self.assertEqual(len(address_list.mailboxes), 3)
self.assertEqual(address_list.mailboxes,
address_list.all_mailboxes)
self.assertEqual([str(x) for x in address_list.mailboxes],
[str(x) for x in address_list.addresses])
self.assertEqual(address_list.mailboxes[0].domain, 'example.com')
self.assertEqual(address_list.mailboxes[0].token_type, 'mailbox')
self.assertEqual(address_list.addresses[0].token_type, 'address')
self.assertEqual(address_list.mailboxes[1].local_part, 'x')
self.assertEqual(address_list.mailboxes[2].display_name,
'Nobody Is. Special')
def test_get_address_list_mailboxes_invalid_addresses(self):
address_list = self._test_get_x(parser.get_address_list,
('"Roy A. Bear" <dinsdale@example.com>, '
'(ping) Foo x@example.com[],'
'Nobody Is. Special <(bird)example.(bad)com>'),
('"Roy A. Bear" <dinsdale@example.com>, '
'(ping) Foo x@example.com[],'
'Nobody Is. Special <(bird)example.(bad)com>'),
('"Roy A. Bear" <dinsdale@example.com>, '
'Foo x@example.com[],'
'"Nobody Is. Special" < example. com>'),
[errors.InvalidHeaderDefect, # invalid address in list
errors.InvalidHeaderDefect, # 'Foo x' local part invalid.
errors.InvalidHeaderDefect, # Missing . in 'Foo x' local part
errors.ObsoleteHeaderDefect, # period in 'Is.' disp-name phrase
errors.InvalidHeaderDefect, # no domain part in addr-spec
errors.ObsoleteHeaderDefect], # addr-spec has comment in it
'')
self.assertEqual(address_list.token_type, 'address-list')
self.assertEqual(len(address_list.mailboxes), 1)
self.assertEqual(len(address_list.all_mailboxes), 3)
self.assertEqual([str(x) for x in address_list.all_mailboxes],
[str(x) for x in address_list.addresses])
self.assertEqual(address_list.mailboxes[0].domain, 'example.com')
self.assertEqual(address_list.mailboxes[0].token_type, 'mailbox')
self.assertEqual(address_list.addresses[0].token_type, 'address')
self.assertEqual(address_list.addresses[1].token_type, 'address')
self.assertEqual(len(address_list.addresses[0].mailboxes), 1)
self.assertEqual(len(address_list.addresses[1].mailboxes), 0)
self.assertEqual(len(address_list.addresses[1].mailboxes), 0)
self.assertEqual(
address_list.addresses[1].all_mailboxes[0].local_part, 'Foo x')
self.assertEqual(
address_list.addresses[2].all_mailboxes[0].display_name,
"Nobody Is. Special")
def test_get_address_list_group_empty(self):
address_list = self._test_get_x(parser.get_address_list,
'Monty Python: ;',
'Monty Python: ;',
'Monty Python: ;',
[],
'')
self.assertEqual(address_list.token_type, 'address-list')
self.assertEqual(len(address_list.mailboxes), 0)
self.assertEqual(address_list.mailboxes,
address_list.all_mailboxes)
self.assertEqual(len(address_list.addresses), 1)
self.assertEqual(address_list.addresses[0].token_type, 'address')
self.assertEqual(address_list.addresses[0].display_name, 'Monty Python')
self.assertEqual(len(address_list.addresses[0].mailboxes), 0)
def test_get_address_list_group_simple(self):
address_list = self._test_get_x(parser.get_address_list,
'Monty Python: dinsdale@example.com;',
'Monty Python: dinsdale@example.com;',
'Monty Python: dinsdale@example.com;',
[],
'')
self.assertEqual(address_list.token_type, 'address-list')
self.assertEqual(len(address_list.mailboxes), 1)
self.assertEqual(address_list.mailboxes,
address_list.all_mailboxes)
self.assertEqual(address_list.mailboxes[0].domain, 'example.com')
self.assertEqual(address_list.addresses[0].display_name,
'Monty Python')
self.assertEqual(address_list.addresses[0].mailboxes[0].domain,
'example.com')
def test_get_address_list_group_and_mailboxes(self):
address_list = self._test_get_x(parser.get_address_list,
('Monty Python: dinsdale@example.com, "Fred" <flint@example.com>;, '
'Abe <x@example.com>, Bee <y@example.com>'),
('Monty Python: dinsdale@example.com, "Fred" <flint@example.com>;, '
'Abe <x@example.com>, Bee <y@example.com>'),
('Monty Python: dinsdale@example.com, "Fred" <flint@example.com>;, '
'Abe <x@example.com>, Bee <y@example.com>'),
[],
'')
self.assertEqual(address_list.token_type, 'address-list')
self.assertEqual(len(address_list.mailboxes), 4)
self.assertEqual(address_list.mailboxes,
address_list.all_mailboxes)
self.assertEqual(len(address_list.addresses), 3)
self.assertEqual(address_list.mailboxes[0].local_part, 'dinsdale')
self.assertEqual(address_list.addresses[0].display_name,
'Monty Python')
self.assertEqual(address_list.addresses[0].mailboxes[0].domain,
'example.com')
self.assertEqual(address_list.addresses[0].mailboxes[1].local_part,
'flint')
self.assertEqual(address_list.addresses[1].mailboxes[0].local_part,
'x')
self.assertEqual(address_list.addresses[2].mailboxes[0].local_part,
'y')
self.assertEqual(str(address_list.addresses[1]),
str(address_list.mailboxes[2]))
def test_invalid_content_disposition(self):
content_disp = self._test_parse_x(
parser.parse_content_disposition_header,
";attachment", "; attachment", ";attachment",
[errors.InvalidHeaderDefect]*2
)
def test_invalid_content_transfer_encoding(self):
cte = self._test_parse_x(
parser.parse_content_transfer_encoding_header,
";foo", ";foo", ";foo", [errors.InvalidHeaderDefect]*3
)
@parameterize
class Test_parse_mime_version(TestParserMixin, TestEmailBase):
def mime_version_as_value(self,
value,
tl_str,
tl_value,
major,
minor,
defects):
mime_version = self._test_parse_x(parser.parse_mime_version,
value, tl_str, tl_value, defects)
self.assertEqual(mime_version.major, major)
self.assertEqual(mime_version.minor, minor)
mime_version_params = {
'rfc_2045_1': (
'1.0',
'1.0',
'1.0',
1,
0,
[]),
'RFC_2045_2': (
'1.0 (produced by MetaSend Vx.x)',
'1.0 (produced by MetaSend Vx.x)',
'1.0 ',
1,
0,
[]),
'RFC_2045_3': (
'(produced by MetaSend Vx.x) 1.0',
'(produced by MetaSend Vx.x) 1.0',
' 1.0',
1,
0,
[]),
'RFC_2045_4': (
'1.(produced by MetaSend Vx.x)0',
'1.(produced by MetaSend Vx.x)0',
'1. 0',
1,
0,
[]),
'empty': (
'',
'',
'',
None,
None,
[errors.HeaderMissingRequiredValue]),
}
class TestFolding(TestEmailBase):
policy = policy.default
def _test(self, tl, folded, policy=policy):
self.assertEqual(tl.fold(policy=policy), folded, tl.ppstr())
def test_simple_unstructured_no_folds(self):
self._test(parser.get_unstructured("This is a test"),
"This is a test\n")
def test_simple_unstructured_folded(self):
self._test(parser.get_unstructured("This is also a test, but this "
"time there are enough words (and even some "
"symbols) to make it wrap; at least in theory."),
"This is also a test, but this time there are enough "
"words (and even some\n"
" symbols) to make it wrap; at least in theory.\n")
def test_unstructured_with_unicode_no_folds(self):
self._test(parser.get_unstructured("hübsch kleiner beißt"),
"=?utf-8?q?h=C3=BCbsch_kleiner_bei=C3=9Ft?=\n")
def test_one_ew_on_each_of_two_wrapped_lines(self):
self._test(parser.get_unstructured("Mein kleiner Kaktus ist sehr "
"hübsch. Es hat viele Stacheln "
"und oft beißt mich."),
"Mein kleiner Kaktus ist sehr =?utf-8?q?h=C3=BCbsch=2E?= "
"Es hat viele Stacheln\n"
" und oft =?utf-8?q?bei=C3=9Ft?= mich.\n")
def test_ews_combined_before_wrap(self):
self._test(parser.get_unstructured("Mein Kaktus ist hübsch. "
"Es beißt mich. "
"And that's all I'm sayin."),
"Mein Kaktus ist =?utf-8?q?h=C3=BCbsch=2E__Es_bei=C3=9Ft?= "
"mich. And that's\n"
" all I'm sayin.\n")
# XXX Need test of an encoded word so long that it needs to be wrapped
def test_simple_address(self):
self._test(parser.get_address_list("abc <xyz@example.com>")[0],
"abc <xyz@example.com>\n")
def test_address_list_folding_at_commas(self):
self._test(parser.get_address_list('abc <xyz@example.com>, '
'"Fred Blunt" <sharp@example.com>, '
'"J.P.Cool" <hot@example.com>, '
'"K<>y" <key@example.com>, '
'Firesale <cheap@example.com>, '
'<end@example.com>')[0],
'abc <xyz@example.com>, "Fred Blunt" <sharp@example.com>,\n'
' "J.P.Cool" <hot@example.com>, "K<>y" <key@example.com>,\n'
' Firesale <cheap@example.com>, <end@example.com>\n')
def test_address_list_with_unicode_names(self):
self._test(parser.get_address_list(
'Hübsch Kaktus <beautiful@example.com>, '
'beißt beißt <biter@example.com>')[0],
'=?utf-8?q?H=C3=BCbsch?= Kaktus <beautiful@example.com>,\n'
' =?utf-8?q?bei=C3=9Ft_bei=C3=9Ft?= <biter@example.com>\n')
def test_address_list_with_unicode_names_in_quotes(self):
self._test(parser.get_address_list(
'"Hübsch Kaktus" <beautiful@example.com>, '
'"beißt" beißt <biter@example.com>')[0],
'=?utf-8?q?H=C3=BCbsch?= Kaktus <beautiful@example.com>,\n'
' =?utf-8?q?bei=C3=9Ft_bei=C3=9Ft?= <biter@example.com>\n')
# XXX Need tests with comments on various sides of a unicode token,
# and with unicode tokens in the comments. Spaces inside the quotes
# currently don't do the right thing.
def test_initial_whitespace_splitting(self):
body = parser.get_unstructured(' ' + 'x'*77)
header = parser.Header([
parser.HeaderLabel([parser.ValueTerminal('test:', 'atext')]),
parser.CFWSList([parser.WhiteSpaceTerminal(' ', 'fws')]), body])
self._test(header, 'test: \n ' + 'x'*77 + '\n')
def test_whitespace_splitting(self):
self._test(parser.get_unstructured('xxx ' + 'y'*77),
'xxx \n ' + 'y'*77 + '\n')
if __name__ == '__main__':
unittest.main()
| lgpl-3.0 |
yvaucher/hr | __unported__/hr_resume/__openerp__.py | 2 | 1657 | # -*- encoding: utf-8 -*-
###############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013 Savoir-faire Linux (<http://www.savoirfairelinux.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
{
"name": "Resume Management",
"version": "0.1",
"category": "Human Resources",
"license": "AGPL-3",
"description": """
This module allows you to manage your employee resumes.
""",
"author": "Savoir-faire Linux",
"website": "http://www.savoirfairelinux.com",
"depends": [
"hr_experience",
"hr_skill",
"hr_language",
],
'data': ['hr_resume_view.xml',
'report/report_resume.xml',
],
"demo": [],
"test": [],
'installable': False,
"auto_install": False,
"images": [],
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
danielhjames/Booktype | lib/booktype/urls.py | 6 | 3152 | # This file is part of Booktype.
# Copyright (c) 2012 Aleksandar Erkalovic <aleksandar.erkalovic@sourcefabric.org>
#
# Booktype is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Booktype is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Booktype. If not, see <http://www.gnu.org/licenses/>.
from django.views import static
from django.conf import settings
from django.conf.urls import url, include
from django.views.generic.base import TemplateView
from booktype.apps.account.views import profilethumbnail
# This is dispatcher for Sputnik connections.
from sputnik.views import dispatcher as sputnik_dispatcher
SPUTNIK_DISPATCHER = (
(r'^/booktype/$', 'booktype.apps.core.channel'),
(r'^/chat/(?P<bookid>\d+)/$', 'booki.channels.chat'),
(r'^/booktype/book/(?P<bookid>\d+)/(?P<version>[\w\d\.\-.]+)/$', 'booktype.apps.edit.channel')
)
urlpatterns = [
# internationalization
url(r'^_i18n/', include('django.conf.urls.i18n')),
# front page
url(r'', include('booktype.apps.portal.urls', namespace="portal")),
# accounts
url(r'^accounts/', include('booktype.apps.account.urls', namespace="accounts")),
# booktype control center
url(r'^_control/', include('booktypecontrol.urls', namespace="control_center")),
# convert
# TODO: Add namespace
url(r'^_convert/', include('booktype.apps.convert.urls')),
url(r'^data/(?P<path>.*)$', static.serve, {'document_root': settings.DATA_ROOT, 'show_indexes': True}),
# misc
# TODO: replace with new apps
url(r'^_utils/profilethumb/(?P<profileid>[\w\d\_\.\-]+)/thumbnail.jpg$',
profilethumbnail, name='view_profilethumbnail'),
# sputnik dispatcher
url(r'^_sputnik/$', sputnik_dispatcher, {"map": SPUTNIK_DISPATCHER}, name='sputnik_dispatcher'),
# messaging application
# TODO: remove this application
url(r'^messaging/', include('booki.messaging.urls')),
# importer application
url(r'^_importer/', include('booktype.apps.importer.urls', namespace='importer')),
# API urls
url(r'^_api/', include('booktype.api.urls')),
]
urlpatterns += [
# export
url(r'^(?P<bookid>[\w\s\_\.\-\d]+)/', include('booktype.apps.loadsave.urls', namespace='loadsave')),
# new editor
url(r'^(?P<bookid>[\w\s\_\.\-\d]+)/', include('booktype.apps.edit.urls', namespace='edit')),
# old editor app
url(r'^(?P<bookid>[\w\s\_\.\-\d]+)/', include('booki.editor.urls')),
# robots.txt
url(r'^robots\.txt$', TemplateView.as_view(template_name='robots.txt', content_type='text/plain')),
# new booktype reader app
url(r'^(?P<bookid>[\w\s\_\.\-\d]+)/', include('booktype.apps.reader.urls', namespace='reader')),
]
| agpl-3.0 |
Kilhog/odoo | addons/account/report/account_invoice_report.py | 224 | 12489 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import tools
import openerp.addons.decimal_precision as dp
from openerp.osv import fields,osv
class account_invoice_report(osv.osv):
_name = "account.invoice.report"
_description = "Invoices Statistics"
_auto = False
_rec_name = 'date'
def _compute_amounts_in_user_currency(self, cr, uid, ids, field_names, args, context=None):
"""Compute the amounts in the currency of the user
"""
if context is None:
context={}
currency_obj = self.pool.get('res.currency')
currency_rate_obj = self.pool.get('res.currency.rate')
user = self.pool.get('res.users').browse(cr, uid, uid, context=context)
user_currency_id = user.company_id.currency_id.id
currency_rate_id = currency_rate_obj.search(
cr, uid, [
('rate', '=', 1),
'|',
('currency_id.company_id', '=', user.company_id.id),
('currency_id.company_id', '=', False)
], limit=1, context=context)[0]
base_currency_id = currency_rate_obj.browse(cr, uid, currency_rate_id, context=context).currency_id.id
res = {}
ctx = context.copy()
for item in self.browse(cr, uid, ids, context=context):
ctx['date'] = item.date
price_total = currency_obj.compute(cr, uid, base_currency_id, user_currency_id, item.price_total, context=ctx)
price_average = currency_obj.compute(cr, uid, base_currency_id, user_currency_id, item.price_average, context=ctx)
residual = currency_obj.compute(cr, uid, base_currency_id, user_currency_id, item.residual, context=ctx)
res[item.id] = {
'user_currency_price_total': price_total,
'user_currency_price_average': price_average,
'user_currency_residual': residual,
}
return res
_columns = {
'date': fields.date('Date', readonly=True),
'product_id': fields.many2one('product.product', 'Product', readonly=True),
'product_qty':fields.float('Product Quantity', readonly=True),
'uom_name': fields.char('Reference Unit of Measure', size=128, readonly=True),
'payment_term': fields.many2one('account.payment.term', 'Payment Term', readonly=True),
'period_id': fields.many2one('account.period', 'Force Period', domain=[('state','<>','done')], readonly=True),
'fiscal_position': fields.many2one('account.fiscal.position', 'Fiscal Position', readonly=True),
'currency_id': fields.many2one('res.currency', 'Currency', readonly=True),
'categ_id': fields.many2one('product.category','Category of Product', readonly=True),
'journal_id': fields.many2one('account.journal', 'Journal', readonly=True),
'partner_id': fields.many2one('res.partner', 'Partner', readonly=True),
'commercial_partner_id': fields.many2one('res.partner', 'Partner Company', help="Commercial Entity"),
'company_id': fields.many2one('res.company', 'Company', readonly=True),
'user_id': fields.many2one('res.users', 'Salesperson', readonly=True),
'price_total': fields.float('Total Without Tax', readonly=True),
'user_currency_price_total': fields.function(_compute_amounts_in_user_currency, string="Total Without Tax", type='float', digits_compute=dp.get_precision('Account'), multi="_compute_amounts"),
'price_average': fields.float('Average Price', readonly=True, group_operator="avg"),
'user_currency_price_average': fields.function(_compute_amounts_in_user_currency, string="Average Price", type='float', digits_compute=dp.get_precision('Account'), multi="_compute_amounts"),
'currency_rate': fields.float('Currency Rate', readonly=True),
'nbr': fields.integer('# of Invoices', readonly=True), # TDE FIXME master: rename into nbr_lines
'type': fields.selection([
('out_invoice','Customer Invoice'),
('in_invoice','Supplier Invoice'),
('out_refund','Customer Refund'),
('in_refund','Supplier Refund'),
],'Type', readonly=True),
'state': fields.selection([
('draft','Draft'),
('proforma','Pro-forma'),
('proforma2','Pro-forma'),
('open','Open'),
('paid','Done'),
('cancel','Cancelled')
], 'Invoice Status', readonly=True),
'date_due': fields.date('Due Date', readonly=True),
'account_id': fields.many2one('account.account', 'Account',readonly=True),
'account_line_id': fields.many2one('account.account', 'Account Line',readonly=True),
'partner_bank_id': fields.many2one('res.partner.bank', 'Bank Account',readonly=True),
'residual': fields.float('Total Residual', readonly=True),
'user_currency_residual': fields.function(_compute_amounts_in_user_currency, string="Total Residual", type='float', digits_compute=dp.get_precision('Account'), multi="_compute_amounts"),
'country_id': fields.many2one('res.country', 'Country of the Partner Company'),
}
_order = 'date desc'
_depends = {
'account.invoice': [
'account_id', 'amount_total', 'commercial_partner_id', 'company_id',
'currency_id', 'date_due', 'date_invoice', 'fiscal_position',
'journal_id', 'partner_bank_id', 'partner_id', 'payment_term',
'period_id', 'residual', 'state', 'type', 'user_id',
],
'account.invoice.line': [
'account_id', 'invoice_id', 'price_subtotal', 'product_id',
'quantity', 'uos_id',
],
'product.product': ['product_tmpl_id'],
'product.template': ['categ_id'],
'product.uom': ['category_id', 'factor', 'name', 'uom_type'],
'res.currency.rate': ['currency_id', 'name'],
'res.partner': ['country_id'],
}
def _select(self):
select_str = """
SELECT sub.id, sub.date, sub.product_id, sub.partner_id, sub.country_id,
sub.payment_term, sub.period_id, sub.uom_name, sub.currency_id, sub.journal_id,
sub.fiscal_position, sub.user_id, sub.company_id, sub.nbr, sub.type, sub.state,
sub.categ_id, sub.date_due, sub.account_id, sub.account_line_id, sub.partner_bank_id,
sub.product_qty, sub.price_total / cr.rate as price_total, sub.price_average /cr.rate as price_average,
cr.rate as currency_rate, sub.residual / cr.rate as residual, sub.commercial_partner_id as commercial_partner_id
"""
return select_str
def _sub_select(self):
select_str = """
SELECT min(ail.id) AS id,
ai.date_invoice AS date,
ail.product_id, ai.partner_id, ai.payment_term, ai.period_id,
u2.name AS uom_name,
ai.currency_id, ai.journal_id, ai.fiscal_position, ai.user_id, ai.company_id,
count(ail.*) AS nbr,
ai.type, ai.state, pt.categ_id, ai.date_due, ai.account_id, ail.account_id AS account_line_id,
ai.partner_bank_id,
SUM(CASE
WHEN ai.type::text = ANY (ARRAY['out_refund'::character varying::text, 'in_invoice'::character varying::text])
THEN (- ail.quantity) / u.factor * u2.factor
ELSE ail.quantity / u.factor * u2.factor
END) AS product_qty,
SUM(CASE
WHEN ai.type::text = ANY (ARRAY['out_refund'::character varying::text, 'in_invoice'::character varying::text])
THEN - ail.price_subtotal
ELSE ail.price_subtotal
END) AS price_total,
CASE
WHEN ai.type::text = ANY (ARRAY['out_refund'::character varying::text, 'in_invoice'::character varying::text])
THEN SUM(- ail.price_subtotal)
ELSE SUM(ail.price_subtotal)
END / CASE
WHEN SUM(ail.quantity / u.factor * u2.factor) <> 0::numeric
THEN CASE
WHEN ai.type::text = ANY (ARRAY['out_refund'::character varying::text, 'in_invoice'::character varying::text])
THEN SUM((- ail.quantity) / u.factor * u2.factor)
ELSE SUM(ail.quantity / u.factor * u2.factor)
END
ELSE 1::numeric
END AS price_average,
CASE
WHEN ai.type::text = ANY (ARRAY['out_refund'::character varying::text, 'in_invoice'::character varying::text])
THEN - ai.residual
ELSE ai.residual
END / (SELECT count(*) FROM account_invoice_line l where invoice_id = ai.id) *
count(*) AS residual,
ai.commercial_partner_id as commercial_partner_id,
partner.country_id
"""
return select_str
def _from(self):
from_str = """
FROM account_invoice_line ail
JOIN account_invoice ai ON ai.id = ail.invoice_id
JOIN res_partner partner ON ai.commercial_partner_id = partner.id
LEFT JOIN product_product pr ON pr.id = ail.product_id
left JOIN product_template pt ON pt.id = pr.product_tmpl_id
LEFT JOIN product_uom u ON u.id = ail.uos_id
LEFT JOIN product_uom u2 ON u2.id = pt.uom_id
"""
return from_str
def _group_by(self):
group_by_str = """
GROUP BY ail.product_id, ai.date_invoice, ai.id,
ai.partner_id, ai.payment_term, ai.period_id, u2.name, u2.id, ai.currency_id, ai.journal_id,
ai.fiscal_position, ai.user_id, ai.company_id, ai.type, ai.state, pt.categ_id,
ai.date_due, ai.account_id, ail.account_id, ai.partner_bank_id, ai.residual,
ai.amount_total, ai.commercial_partner_id, partner.country_id
"""
return group_by_str
def init(self, cr):
# self._table = account_invoice_report
tools.drop_view_if_exists(cr, self._table)
cr.execute("""CREATE or REPLACE VIEW %s as (
WITH currency_rate (currency_id, rate, date_start, date_end) AS (
SELECT r.currency_id, r.rate, r.name AS date_start,
(SELECT name FROM res_currency_rate r2
WHERE r2.name > r.name AND
r2.currency_id = r.currency_id
ORDER BY r2.name ASC
LIMIT 1) AS date_end
FROM res_currency_rate r
)
%s
FROM (
%s %s %s
) AS sub
JOIN currency_rate cr ON
(cr.currency_id = sub.currency_id AND
cr.date_start <= COALESCE(sub.date, NOW()) AND
(cr.date_end IS NULL OR cr.date_end > COALESCE(sub.date, NOW())))
)""" % (
self._table,
self._select(), self._sub_select(), self._from(), self._group_by()))
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
GitHublong/hue | desktop/core/ext-py/boto-2.38.0/boto/datapipeline/exceptions.py | 235 | 1471 | # Copyright (c) 2012 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from boto.exception import JSONResponseError
class PipelineDeletedException(JSONResponseError):
pass
class InvalidRequestException(JSONResponseError):
pass
class TaskNotFoundException(JSONResponseError):
pass
class PipelineNotFoundException(JSONResponseError):
pass
class InternalServiceError(JSONResponseError):
pass
| apache-2.0 |
Unode/firefox_decrypt | tests/simpletap/__init__.py | 1 | 3764 | """
Test Anything Protocol extension to Python's unit testing framework
This module contains TAPTestRunner and TAPTestResult which are used to produce
a test report in a TAP compatible format. All remaining functionality comes
from Python's own unittest module.
The core of the tests does not need any change and is purely unittest code.
The sole difference is in the __name__ == "__main__" section.
Simple usage:
import unittest
class IntegerArithmeticTestCase(unittest.TestCase):
def testAdd(self): # test method names begin 'test*'
"test adding values"
self.assertEqual((1 + 2), 3)
self.assertEqual(0 + 1, 1)
def testMultiply(self):
"test multiplying values"
self.assertEqual((0 * 10), 0)
self.assertEqual((5 * 8), 40)
def testFail(self):
"a failing test"
self.assertEqual(0, 1)
@unittest.expectedFailure
def testExpectFail(self):
"we saw this coming"
self.assertEqual(0, 1)
@unittest.skipIf(True, "Skipping this one")
def testSkip(self):
"pending a fix"
self.assertEqual(0, 1)
def testError(self):
"oops something went wrong"
no_such_variable + 1 # Oops!
if __name__ == "__main__":
from simpletap import TAPTestRunner
unittest.main(testRunner=TAPTestRunner())
When saved in a file called ``test.py`` and executed would produce:
1..6
ok 1 - test.py: test adding values
not ok 2 - test.py: oops something went wrong
# ERROR: NameError on file test.py line 30 in testError: 'no_such_variable + 1 # Oops!':
# global name 'no_such_variable' is not defined
skip 3 - test.py: we saw this coming
# EXPECTED_FAILURE: AssertionError on file test.py line 21 in testExpectFail: 'self.assertEqual(0, 1)':
# 0 != 1
not ok 4 - test.py: a failing test
# FAIL: AssertionError on file test.py line 16 in testFail: 'self.assertEqual(0, 1)':
# 0 != 1
ok 5 - test.py: test multiplying values
skip 6 - test.py: pending a fix
# SKIP:
# Skipping this one
You can also launch simpletap directly from the command line in much the
same way you do with unittest:
python3 -m simpletap test.IntegerArithmeticTestCase
For more information refer to the unittest documentation:
http://docs.python.org/library/unittest.html
Copyright (c) 2014-2016 Renato Alves <alves.rjc@gmail.com>
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
https://opensource.org/licenses/MIT
"""
from .result import TAPTestResult
from .runner import TAPTestRunner
from .version import __version__ # noqa
__all__ = ['TAPTestResult', 'TAPTestRunner']
| gpl-3.0 |
pkainz/pylearn2 | pylearn2/sandbox/cuda_convnet/debug.py | 41 | 3019 | __authors__ = "Ian Goodfellow"
__copyright__ = "Copyright 2010-2012, Universite de Montreal"
__credits__ = ["Ian Goodfellow"]
__license__ = "3-clause BSD"
__maintainer__ = "LISA Lab"
__email__ = "pylearn-dev@googlegroups"
from pylearn2.testing.skip import skip_if_no_gpu
skip_if_no_gpu()
import logging
import numpy as np
from theano import shared
from pylearn2.sandbox.cuda_convnet.filter_acts import FilterActs
from theano.sandbox.cuda import gpu_from_host
from theano.sandbox.cuda import host_from_gpu
from theano.tensor.nnet.conv import conv2d
from theano import function
def main():
logger = logging.getLogger(__name__)
# Tests that running FilterActs with no padding is the same as running
# theano's conv2D in valid mode
rng = np.random.RandomState([2012, 10, 9])
batch_size = 128
rows = 32
cols = 32
channels = 3
filter_rows = 7
filter_cols = filter_rows
num_filters = 16
images = shared(rng.uniform(-1., 1., (channels, rows, cols,
batch_size)).astype('float32'), name='images')
filters = shared(rng.uniform(-1., 1., (channels, filter_rows,
filter_cols, num_filters)).astype('float32'),
name='filters')
gpu_images = gpu_from_host(images)
gpu_filters = gpu_from_host(filters)
output = FilterActs()(gpu_images, gpu_filters)
output = host_from_gpu(output)
images_bc01 = images.dimshuffle(3, 0, 1, 2)
filters_bc01 = filters.dimshuffle(3, 0, 1, 2)
filters_bc01 = filters_bc01[:, :, ::-1, ::-1]
output_conv2d = conv2d(images_bc01, filters_bc01,
border_mode='valid')
output_conv2d = output_conv2d.dimshuffle(1, 2, 3, 0)
f = function([], [output, output_conv2d])
def err():
output, output_conv2d = f()
diff = output - output_conv2d
return np.abs(diff).max()
prev_err = err()
accepted_steps = 0
while True:
logger.debug('Current error: {0}'.format(prev_err))
change_filters = rng.randint(2)
if change_filters:
target = filters
else:
target = images
old_val = target.get_value()
selector = rng.randint(2)
if selector == 0:
new_val = old_val + rng.uniform(-.1, .1, old_val.shape)
else:
idx1 = rng.randint(old_val.shape[0])
idx2 = rng.randint(old_val.shape[1])
idx3 = rng.randint(old_val.shape[2])
idx4 = rng.randint(old_val.shape[3])
new_val = old_val.copy()
new_val[idx1, idx2, idx3, idx4] += rng.uniform(-1., 1.)
new_val = new_val.astype(old_val.dtype)
target.set_value(new_val)
new_err = err()
if new_err <= prev_err:
logger.debug(
'Failed to move beyond step {0}'.format(accepted_steps))
target.set_value(old_val)
else:
prev_err = new_err
accepted_steps += 1
if __name__ == "__main__":
main()
| bsd-3-clause |
firerszd/kbengine | kbe/src/lib/python/Lib/test/test_source_encoding.py | 67 | 5223 | # -*- coding: koi8-r -*-
import unittest
from test.support import TESTFN, unlink, unload
import importlib
import os
import sys
import subprocess
class SourceEncodingTest(unittest.TestCase):
def test_pep263(self):
self.assertEqual(
"ðÉÔÏÎ".encode("utf-8"),
b'\xd0\x9f\xd0\xb8\xd1\x82\xd0\xbe\xd0\xbd'
)
self.assertEqual(
"\ð".encode("utf-8"),
b'\\\xd0\x9f'
)
def test_compilestring(self):
# see #1882
c = compile(b"\n# coding: utf-8\nu = '\xc3\xb3'\n", "dummy", "exec")
d = {}
exec(c, d)
self.assertEqual(d['u'], '\xf3')
def test_issue2301(self):
try:
compile(b"# coding: cp932\nprint '\x94\x4e'", "dummy", "exec")
except SyntaxError as v:
self.assertEqual(v.text, "print '\u5e74'\n")
else:
self.fail()
def test_issue4626(self):
c = compile("# coding=latin-1\n\u00c6 = '\u00c6'", "dummy", "exec")
d = {}
exec(c, d)
self.assertEqual(d['\xc6'], '\xc6')
def test_issue3297(self):
c = compile("a, b = '\U0001010F', '\\U0001010F'", "dummy", "exec")
d = {}
exec(c, d)
self.assertEqual(d['a'], d['b'])
self.assertEqual(len(d['a']), len(d['b']))
self.assertEqual(ascii(d['a']), ascii(d['b']))
def test_issue7820(self):
# Ensure that check_bom() restores all bytes in the right order if
# check_bom() fails in pydebug mode: a buffer starts with the first
# byte of a valid BOM, but next bytes are different
# one byte in common with the UTF-16-LE BOM
self.assertRaises(SyntaxError, eval, b'\xff\x20')
# two bytes in common with the UTF-8 BOM
self.assertRaises(SyntaxError, eval, b'\xef\xbb\x20')
def test_20731(self):
sub = subprocess.Popen([sys.executable,
os.path.join(os.path.dirname(__file__),
'coding20731.py')],
stderr=subprocess.PIPE)
err = sub.communicate()[1]
self.assertEqual(sub.returncode, 0)
self.assertNotIn(b'SyntaxError', err)
def test_error_message(self):
compile(b'# -*- coding: iso-8859-15 -*-\n', 'dummy', 'exec')
compile(b'\xef\xbb\xbf\n', 'dummy', 'exec')
compile(b'\xef\xbb\xbf# -*- coding: utf-8 -*-\n', 'dummy', 'exec')
with self.assertRaisesRegex(SyntaxError, 'fake'):
compile(b'# -*- coding: fake -*-\n', 'dummy', 'exec')
with self.assertRaisesRegex(SyntaxError, 'iso-8859-15'):
compile(b'\xef\xbb\xbf# -*- coding: iso-8859-15 -*-\n',
'dummy', 'exec')
with self.assertRaisesRegex(SyntaxError, 'BOM'):
compile(b'\xef\xbb\xbf# -*- coding: iso-8859-15 -*-\n',
'dummy', 'exec')
with self.assertRaisesRegex(SyntaxError, 'fake'):
compile(b'\xef\xbb\xbf# -*- coding: fake -*-\n', 'dummy', 'exec')
with self.assertRaisesRegex(SyntaxError, 'BOM'):
compile(b'\xef\xbb\xbf# -*- coding: fake -*-\n', 'dummy', 'exec')
def test_bad_coding(self):
module_name = 'bad_coding'
self.verify_bad_module(module_name)
def test_bad_coding2(self):
module_name = 'bad_coding2'
self.verify_bad_module(module_name)
def verify_bad_module(self, module_name):
self.assertRaises(SyntaxError, __import__, 'test.' + module_name)
path = os.path.dirname(__file__)
filename = os.path.join(path, module_name + '.py')
with open(filename, "rb") as fp:
bytes = fp.read()
self.assertRaises(SyntaxError, compile, bytes, filename, 'exec')
def test_exec_valid_coding(self):
d = {}
exec(b'# coding: cp949\na = "\xaa\xa7"\n', d)
self.assertEqual(d['a'], '\u3047')
def test_file_parse(self):
# issue1134: all encodings outside latin-1 and utf-8 fail on
# multiline strings and long lines (>512 columns)
unload(TESTFN)
filename = TESTFN + ".py"
f = open(filename, "w", encoding="cp1252")
sys.path.insert(0, os.curdir)
try:
with f:
f.write("# -*- coding: cp1252 -*-\n")
f.write("'''A short string\n")
f.write("'''\n")
f.write("'A very long string %s'\n" % ("X" * 1000))
importlib.invalidate_caches()
__import__(TESTFN)
finally:
del sys.path[0]
unlink(filename)
unlink(filename + "c")
unlink(filename + "o")
unload(TESTFN)
def test_error_from_string(self):
# See http://bugs.python.org/issue6289
input = "# coding: ascii\n\N{SNOWMAN}".encode('utf-8')
with self.assertRaises(SyntaxError) as c:
compile(input, "<string>", "exec")
expected = "'ascii' codec can't decode byte 0xe2 in position 16: " \
"ordinal not in range(128)"
self.assertTrue(c.exception.args[0].startswith(expected),
msg=c.exception.args[0])
if __name__ == "__main__":
unittest.main()
| lgpl-3.0 |
40223141/lego | static/Brython3.1.1-20150328-091302/Lib/threading.py | 730 | 45641 | """Thread module emulating a subset of Java's threading model."""
import sys as _sys
import _thread
from time import sleep as _sleep
try:
from time import monotonic as _time
except ImportError:
from time import time as _time
from traceback import format_exc as _format_exc
from _weakrefset import WeakSet
# Note regarding PEP 8 compliant names
# This threading model was originally inspired by Java, and inherited
# the convention of camelCase function and method names from that
# language. Those original names are not in any imminent danger of
# being deprecated (even for Py3k),so this module provides them as an
# alias for the PEP 8 compliant names
# Note that using the new PEP 8 compliant names facilitates substitution
# with the multiprocessing module, which doesn't provide the old
# Java inspired names.
__all__ = ['active_count', 'Condition', 'current_thread', 'enumerate', 'Event',
'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Thread', 'Barrier',
'Timer', 'ThreadError', 'setprofile', 'settrace', 'local', 'stack_size']
# Rename some stuff so "from threading import *" is safe
_start_new_thread = _thread.start_new_thread
_allocate_lock = _thread.allocate_lock
get_ident = _thread.get_ident
ThreadError = _thread.error
try:
_CRLock = _thread.RLock
except AttributeError:
_CRLock = None
TIMEOUT_MAX = _thread.TIMEOUT_MAX
del _thread
# Support for profile and trace hooks
_profile_hook = None
_trace_hook = None
def setprofile(func):
"""Set a profile function for all threads started from the threading module.
The func will be passed to sys.setprofile() for each thread, before its
run() method is called.
"""
global _profile_hook
_profile_hook = func
def settrace(func):
"""Set a trace function for all threads started from the threading module.
The func will be passed to sys.settrace() for each thread, before its run()
method is called.
"""
global _trace_hook
_trace_hook = func
# Synchronization classes
Lock = _allocate_lock
def RLock(*args, **kwargs):
"""Factory function that returns a new reentrant lock.
A reentrant lock must be released by the thread that acquired it. Once a
thread has acquired a reentrant lock, the same thread may acquire it again
without blocking; the thread must release it once for each time it has
acquired it.
"""
if _CRLock is None:
return _PyRLock(*args, **kwargs)
return _CRLock(*args, **kwargs)
class _RLock:
"""This class implements reentrant lock objects.
A reentrant lock must be released by the thread that acquired it. Once a
thread has acquired a reentrant lock, the same thread may acquire it
again without blocking; the thread must release it once for each time it
has acquired it.
"""
def __init__(self):
self._block = _allocate_lock()
self._owner = None
self._count = 0
def __repr__(self):
owner = self._owner
try:
owner = _active[owner].name
except KeyError:
pass
return "<%s owner=%r count=%d>" % (
self.__class__.__name__, owner, self._count)
def acquire(self, blocking=True, timeout=-1):
"""Acquire a lock, blocking or non-blocking.
When invoked without arguments: if this thread already owns the lock,
increment the recursion level by one, and return immediately. Otherwise,
if another thread owns the lock, block until the lock is unlocked. Once
the lock is unlocked (not owned by any thread), then grab ownership, set
the recursion level to one, and return. If more than one thread is
blocked waiting until the lock is unlocked, only one at a time will be
able to grab ownership of the lock. There is no return value in this
case.
When invoked with the blocking argument set to true, do the same thing
as when called without arguments, and return true.
When invoked with the blocking argument set to false, do not block. If a
call without an argument would block, return false immediately;
otherwise, do the same thing as when called without arguments, and
return true.
When invoked with the floating-point timeout argument set to a positive
value, block for at most the number of seconds specified by timeout
and as long as the lock cannot be acquired. Return true if the lock has
been acquired, false if the timeout has elapsed.
"""
me = get_ident()
if self._owner == me:
self._count = self._count + 1
return 1
rc = self._block.acquire(blocking, timeout)
if rc:
self._owner = me
self._count = 1
return rc
__enter__ = acquire
def release(self):
"""Release a lock, decrementing the recursion level.
If after the decrement it is zero, reset the lock to unlocked (not owned
by any thread), and if any other threads are blocked waiting for the
lock to become unlocked, allow exactly one of them to proceed. If after
the decrement the recursion level is still nonzero, the lock remains
locked and owned by the calling thread.
Only call this method when the calling thread owns the lock. A
RuntimeError is raised if this method is called when the lock is
unlocked.
There is no return value.
"""
if self._owner != get_ident():
raise RuntimeError("cannot release un-acquired lock")
self._count = count = self._count - 1
if not count:
self._owner = None
self._block.release()
def __exit__(self, t, v, tb):
self.release()
# Internal methods used by condition variables
def _acquire_restore(self, state):
self._block.acquire()
self._count, self._owner = state
def _release_save(self):
if self._count == 0:
raise RuntimeError("cannot release un-acquired lock")
count = self._count
self._count = 0
owner = self._owner
self._owner = None
self._block.release()
return (count, owner)
def _is_owned(self):
return self._owner == get_ident()
_PyRLock = _RLock
class Condition:
"""Class that implements a condition variable.
A condition variable allows one or more threads to wait until they are
notified by another thread.
If the lock argument is given and not None, it must be a Lock or RLock
object, and it is used as the underlying lock. Otherwise, a new RLock object
is created and used as the underlying lock.
"""
def __init__(self, lock=None):
if lock is None:
lock = RLock()
self._lock = lock
# Export the lock's acquire() and release() methods
self.acquire = lock.acquire
self.release = lock.release
# If the lock defines _release_save() and/or _acquire_restore(),
# these override the default implementations (which just call
# release() and acquire() on the lock). Ditto for _is_owned().
try:
self._release_save = lock._release_save
except AttributeError:
pass
try:
self._acquire_restore = lock._acquire_restore
except AttributeError:
pass
try:
self._is_owned = lock._is_owned
except AttributeError:
pass
self._waiters = []
def __enter__(self):
return self._lock.__enter__()
def __exit__(self, *args):
return self._lock.__exit__(*args)
def __repr__(self):
return "<Condition(%s, %d)>" % (self._lock, len(self._waiters))
def _release_save(self):
self._lock.release() # No state to save
def _acquire_restore(self, x):
self._lock.acquire() # Ignore saved state
def _is_owned(self):
# Return True if lock is owned by current_thread.
# This method is called only if __lock doesn't have _is_owned().
if self._lock.acquire(0):
self._lock.release()
return False
else:
return True
def wait(self, timeout=None):
"""Wait until notified or until a timeout occurs.
If the calling thread has not acquired the lock when this method is
called, a RuntimeError is raised.
This method releases the underlying lock, and then blocks until it is
awakened by a notify() or notify_all() call for the same condition
variable in another thread, or until the optional timeout occurs. Once
awakened or timed out, it re-acquires the lock and returns.
When the timeout argument is present and not None, it should be a
floating point number specifying a timeout for the operation in seconds
(or fractions thereof).
When the underlying lock is an RLock, it is not released using its
release() method, since this may not actually unlock the lock when it
was acquired multiple times recursively. Instead, an internal interface
of the RLock class is used, which really unlocks it even when it has
been recursively acquired several times. Another internal interface is
then used to restore the recursion level when the lock is reacquired.
"""
if not self._is_owned():
raise RuntimeError("cannot wait on un-acquired lock")
waiter = _allocate_lock()
waiter.acquire()
self._waiters.append(waiter)
saved_state = self._release_save()
try: # restore state no matter what (e.g., KeyboardInterrupt)
if timeout is None:
waiter.acquire()
gotit = True
else:
if timeout > 0:
gotit = waiter.acquire(True, timeout)
else:
gotit = waiter.acquire(False)
if not gotit:
try:
self._waiters.remove(waiter)
except ValueError:
pass
return gotit
finally:
self._acquire_restore(saved_state)
def wait_for(self, predicate, timeout=None):
"""Wait until a condition evaluates to True.
predicate should be a callable which result will be interpreted as a
boolean value. A timeout may be provided giving the maximum time to
wait.
"""
endtime = None
waittime = timeout
result = predicate()
while not result:
if waittime is not None:
if endtime is None:
endtime = _time() + waittime
else:
waittime = endtime - _time()
if waittime <= 0:
break
self.wait(waittime)
result = predicate()
return result
def notify(self, n=1):
"""Wake up one or more threads waiting on this condition, if any.
If the calling thread has not acquired the lock when this method is
called, a RuntimeError is raised.
This method wakes up at most n of the threads waiting for the condition
variable; it is a no-op if no threads are waiting.
"""
if not self._is_owned():
raise RuntimeError("cannot notify on un-acquired lock")
__waiters = self._waiters
waiters = __waiters[:n]
if not waiters:
return
for waiter in waiters:
waiter.release()
try:
__waiters.remove(waiter)
except ValueError:
pass
def notify_all(self):
"""Wake up all threads waiting on this condition.
If the calling thread has not acquired the lock when this method
is called, a RuntimeError is raised.
"""
self.notify(len(self._waiters))
notifyAll = notify_all
class Semaphore:
"""This class implements semaphore objects.
Semaphores manage a counter representing the number of release() calls minus
the number of acquire() calls, plus an initial value. The acquire() method
blocks if necessary until it can return without making the counter
negative. If not given, value defaults to 1.
"""
# After Tim Peters' semaphore class, but not quite the same (no maximum)
def __init__(self, value=1):
if value < 0:
raise ValueError("semaphore initial value must be >= 0")
self._cond = Condition(Lock())
self._value = value
def acquire(self, blocking=True, timeout=None):
"""Acquire a semaphore, decrementing the internal counter by one.
When invoked without arguments: if the internal counter is larger than
zero on entry, decrement it by one and return immediately. If it is zero
on entry, block, waiting until some other thread has called release() to
make it larger than zero. This is done with proper interlocking so that
if multiple acquire() calls are blocked, release() will wake exactly one
of them up. The implementation may pick one at random, so the order in
which blocked threads are awakened should not be relied on. There is no
return value in this case.
When invoked with blocking set to true, do the same thing as when called
without arguments, and return true.
When invoked with blocking set to false, do not block. If a call without
an argument would block, return false immediately; otherwise, do the
same thing as when called without arguments, and return true.
When invoked with a timeout other than None, it will block for at
most timeout seconds. If acquire does not complete successfully in
that interval, return false. Return true otherwise.
"""
if not blocking and timeout is not None:
raise ValueError("can't specify timeout for non-blocking acquire")
rc = False
endtime = None
with self._cond:
while self._value == 0:
if not blocking:
break
if timeout is not None:
if endtime is None:
endtime = _time() + timeout
else:
timeout = endtime - _time()
if timeout <= 0:
break
self._cond.wait(timeout)
else:
self._value = self._value - 1
rc = True
return rc
__enter__ = acquire
def release(self):
"""Release a semaphore, incrementing the internal counter by one.
When the counter is zero on entry and another thread is waiting for it
to become larger than zero again, wake up that thread.
"""
with self._cond:
self._value = self._value + 1
self._cond.notify()
def __exit__(self, t, v, tb):
self.release()
class BoundedSemaphore(Semaphore):
"""Implements a bounded semaphore.
A bounded semaphore checks to make sure its current value doesn't exceed its
initial value. If it does, ValueError is raised. In most situations
semaphores are used to guard resources with limited capacity.
If the semaphore is released too many times it's a sign of a bug. If not
given, value defaults to 1.
Like regular semaphores, bounded semaphores manage a counter representing
the number of release() calls minus the number of acquire() calls, plus an
initial value. The acquire() method blocks if necessary until it can return
without making the counter negative. If not given, value defaults to 1.
"""
def __init__(self, value=1):
Semaphore.__init__(self, value)
self._initial_value = value
def release(self):
"""Release a semaphore, incrementing the internal counter by one.
When the counter is zero on entry and another thread is waiting for it
to become larger than zero again, wake up that thread.
If the number of releases exceeds the number of acquires,
raise a ValueError.
"""
with self._cond:
if self._value >= self._initial_value:
raise ValueError("Semaphore released too many times")
self._value += 1
self._cond.notify()
class Event:
"""Class implementing event objects.
Events manage a flag that can be set to true with the set() method and reset
to false with the clear() method. The wait() method blocks until the flag is
true. The flag is initially false.
"""
# After Tim Peters' event class (without is_posted())
def __init__(self):
self._cond = Condition(Lock())
self._flag = False
def _reset_internal_locks(self):
# private! called by Thread._reset_internal_locks by _after_fork()
self._cond.__init__()
def is_set(self):
"""Return true if and only if the internal flag is true."""
return self._flag
isSet = is_set
def set(self):
"""Set the internal flag to true.
All threads waiting for it to become true are awakened. Threads
that call wait() once the flag is true will not block at all.
"""
self._cond.acquire()
try:
self._flag = True
self._cond.notify_all()
finally:
self._cond.release()
def clear(self):
"""Reset the internal flag to false.
Subsequently, threads calling wait() will block until set() is called to
set the internal flag to true again.
"""
self._cond.acquire()
try:
self._flag = False
finally:
self._cond.release()
def wait(self, timeout=None):
"""Block until the internal flag is true.
If the internal flag is true on entry, return immediately. Otherwise,
block until another thread calls set() to set the flag to true, or until
the optional timeout occurs.
When the timeout argument is present and not None, it should be a
floating point number specifying a timeout for the operation in seconds
(or fractions thereof).
This method returns the internal flag on exit, so it will always return
True except if a timeout is given and the operation times out.
"""
self._cond.acquire()
try:
signaled = self._flag
if not signaled:
signaled = self._cond.wait(timeout)
return signaled
finally:
self._cond.release()
# A barrier class. Inspired in part by the pthread_barrier_* api and
# the CyclicBarrier class from Java. See
# http://sourceware.org/pthreads-win32/manual/pthread_barrier_init.html and
# http://java.sun.com/j2se/1.5.0/docs/api/java/util/concurrent/
# CyclicBarrier.html
# for information.
# We maintain two main states, 'filling' and 'draining' enabling the barrier
# to be cyclic. Threads are not allowed into it until it has fully drained
# since the previous cycle. In addition, a 'resetting' state exists which is
# similar to 'draining' except that threads leave with a BrokenBarrierError,
# and a 'broken' state in which all threads get the exception.
class Barrier:
"""Implements a Barrier.
Useful for synchronizing a fixed number of threads at known synchronization
points. Threads block on 'wait()' and are simultaneously once they have all
made that call.
"""
def __init__(self, parties, action=None, timeout=None):
"""Create a barrier, initialised to 'parties' threads.
'action' is a callable which, when supplied, will be called by one of
the threads after they have all entered the barrier and just prior to
releasing them all. If a 'timeout' is provided, it is uses as the
default for all subsequent 'wait()' calls.
"""
self._cond = Condition(Lock())
self._action = action
self._timeout = timeout
self._parties = parties
self._state = 0 #0 filling, 1, draining, -1 resetting, -2 broken
self._count = 0
def wait(self, timeout=None):
"""Wait for the barrier.
When the specified number of threads have started waiting, they are all
simultaneously awoken. If an 'action' was provided for the barrier, one
of the threads will have executed that callback prior to returning.
Returns an individual index number from 0 to 'parties-1'.
"""
if timeout is None:
timeout = self._timeout
with self._cond:
self._enter() # Block while the barrier drains.
index = self._count
self._count += 1
try:
if index + 1 == self._parties:
# We release the barrier
self._release()
else:
# We wait until someone releases us
self._wait(timeout)
return index
finally:
self._count -= 1
# Wake up any threads waiting for barrier to drain.
self._exit()
# Block until the barrier is ready for us, or raise an exception
# if it is broken.
def _enter(self):
while self._state in (-1, 1):
# It is draining or resetting, wait until done
self._cond.wait()
#see if the barrier is in a broken state
if self._state < 0:
raise BrokenBarrierError
assert self._state == 0
# Optionally run the 'action' and release the threads waiting
# in the barrier.
def _release(self):
try:
if self._action:
self._action()
# enter draining state
self._state = 1
self._cond.notify_all()
except:
#an exception during the _action handler. Break and reraise
self._break()
raise
# Wait in the barrier until we are relased. Raise an exception
# if the barrier is reset or broken.
def _wait(self, timeout):
if not self._cond.wait_for(lambda : self._state != 0, timeout):
#timed out. Break the barrier
self._break()
raise BrokenBarrierError
if self._state < 0:
raise BrokenBarrierError
assert self._state == 1
# If we are the last thread to exit the barrier, signal any threads
# waiting for the barrier to drain.
def _exit(self):
if self._count == 0:
if self._state in (-1, 1):
#resetting or draining
self._state = 0
self._cond.notify_all()
def reset(self):
"""Reset the barrier to the initial state.
Any threads currently waiting will get the BrokenBarrier exception
raised.
"""
with self._cond:
if self._count > 0:
if self._state == 0:
#reset the barrier, waking up threads
self._state = -1
elif self._state == -2:
#was broken, set it to reset state
#which clears when the last thread exits
self._state = -1
else:
self._state = 0
self._cond.notify_all()
def abort(self):
"""Place the barrier into a 'broken' state.
Useful in case of error. Any currently waiting threads and threads
attempting to 'wait()' will have BrokenBarrierError raised.
"""
with self._cond:
self._break()
def _break(self):
# An internal error was detected. The barrier is set to
# a broken state all parties awakened.
self._state = -2
self._cond.notify_all()
@property
def parties(self):
"""Return the number of threads required to trip the barrier."""
return self._parties
@property
def n_waiting(self):
"""Return the number of threads currently waiting at the barrier."""
# We don't need synchronization here since this is an ephemeral result
# anyway. It returns the correct value in the steady state.
if self._state == 0:
return self._count
return 0
@property
def broken(self):
"""Return True if the barrier is in a broken state."""
return self._state == -2
# exception raised by the Barrier class
class BrokenBarrierError(RuntimeError):
pass
# Helper to generate new thread names
_counter = 0
def _newname(template="Thread-%d"):
global _counter
_counter = _counter + 1
return template % _counter
# Active thread administration
_active_limbo_lock = _allocate_lock()
_active = {} # maps thread id to Thread object
_limbo = {}
# For debug and leak testing
_dangling = WeakSet()
# Main class for threads
class Thread:
"""A class that represents a thread of control.
This class can be safely subclassed in a limited fashion. There are two ways
to specify the activity: by passing a callable object to the constructor, or
by overriding the run() method in a subclass.
"""
__initialized = False
# Need to store a reference to sys.exc_info for printing
# out exceptions when a thread tries to use a global var. during interp.
# shutdown and thus raises an exception about trying to perform some
# operation on/with a NoneType
__exc_info = _sys.exc_info
# Keep sys.exc_clear too to clear the exception just before
# allowing .join() to return.
#XXX __exc_clear = _sys.exc_clear
def __init__(self, group=None, target=None, name=None,
args=(), kwargs=None, *, daemon=None):
"""This constructor should always be called with keyword arguments. Arguments are:
*group* should be None; reserved for future extension when a ThreadGroup
class is implemented.
*target* is the callable object to be invoked by the run()
method. Defaults to None, meaning nothing is called.
*name* is the thread name. By default, a unique name is constructed of
the form "Thread-N" where N is a small decimal number.
*args* is the argument tuple for the target invocation. Defaults to ().
*kwargs* is a dictionary of keyword arguments for the target
invocation. Defaults to {}.
If a subclass overrides the constructor, it must make sure to invoke
the base class constructor (Thread.__init__()) before doing anything
else to the thread.
"""
assert group is None, "group argument must be None for now"
if kwargs is None:
kwargs = {}
self._target = target
self._name = str(name or _newname())
self._args = args
self._kwargs = kwargs
if daemon is not None:
self._daemonic = daemon
else:
self._daemonic = current_thread().daemon
self._ident = None
self._started = Event()
self._stopped = False
self._block = Condition(Lock())
self._initialized = True
# sys.stderr is not stored in the class like
# sys.exc_info since it can be changed between instances
self._stderr = _sys.stderr
_dangling.add(self)
def _reset_internal_locks(self):
# private! Called by _after_fork() to reset our internal locks as
# they may be in an invalid state leading to a deadlock or crash.
if hasattr(self, '_block'): # DummyThread deletes _block
self._block.__init__()
self._started._reset_internal_locks()
def __repr__(self):
assert self._initialized, "Thread.__init__() was not called"
status = "initial"
if self._started.is_set():
status = "started"
if self._stopped:
status = "stopped"
if self._daemonic:
status += " daemon"
if self._ident is not None:
status += " %s" % self._ident
return "<%s(%s, %s)>" % (self.__class__.__name__, self._name, status)
def start(self):
"""Start the thread's activity.
It must be called at most once per thread object. It arranges for the
object's run() method to be invoked in a separate thread of control.
This method will raise a RuntimeError if called more than once on the
same thread object.
"""
if not self._initialized:
raise RuntimeError("thread.__init__() not called")
if self._started.is_set():
raise RuntimeError("threads can only be started once")
with _active_limbo_lock:
_limbo[self] = self
try:
_start_new_thread(self._bootstrap, ())
except Exception:
with _active_limbo_lock:
del _limbo[self]
raise
self._started.wait()
def run(self):
"""Method representing the thread's activity.
You may override this method in a subclass. The standard run() method
invokes the callable object passed to the object's constructor as the
target argument, if any, with sequential and keyword arguments taken
from the args and kwargs arguments, respectively.
"""
try:
if self._target:
self._target(*self._args, **self._kwargs)
finally:
# Avoid a refcycle if the thread is running a function with
# an argument that has a member that points to the thread.
del self._target, self._args, self._kwargs
def _bootstrap(self):
# Wrapper around the real bootstrap code that ignores
# exceptions during interpreter cleanup. Those typically
# happen when a daemon thread wakes up at an unfortunate
# moment, finds the world around it destroyed, and raises some
# random exception *** while trying to report the exception in
# _bootstrap_inner() below ***. Those random exceptions
# don't help anybody, and they confuse users, so we suppress
# them. We suppress them only when it appears that the world
# indeed has already been destroyed, so that exceptions in
# _bootstrap_inner() during normal business hours are properly
# reported. Also, we only suppress them for daemonic threads;
# if a non-daemonic encounters this, something else is wrong.
try:
self._bootstrap_inner()
except:
if self._daemonic and _sys is None:
return
raise
def _set_ident(self):
self._ident = get_ident()
def _bootstrap_inner(self):
try:
self._set_ident()
self._started.set()
with _active_limbo_lock:
_active[self._ident] = self
del _limbo[self]
if _trace_hook:
_sys.settrace(_trace_hook)
if _profile_hook:
_sys.setprofile(_profile_hook)
try:
self.run()
except SystemExit:
pass
except:
# If sys.stderr is no more (most likely from interpreter
# shutdown) use self._stderr. Otherwise still use sys (as in
# _sys) in case sys.stderr was redefined since the creation of
# self.
if _sys:
_sys.stderr.write("Exception in thread %s:\n%s\n" %
(self.name, _format_exc()))
else:
# Do the best job possible w/o a huge amt. of code to
# approximate a traceback (code ideas from
# Lib/traceback.py)
exc_type, exc_value, exc_tb = self._exc_info()
try:
print((
"Exception in thread " + self.name +
" (most likely raised during interpreter shutdown):"), file=self._stderr)
print((
"Traceback (most recent call last):"), file=self._stderr)
while exc_tb:
print((
' File "%s", line %s, in %s' %
(exc_tb.tb_frame.f_code.co_filename,
exc_tb.tb_lineno,
exc_tb.tb_frame.f_code.co_name)), file=self._stderr)
exc_tb = exc_tb.tb_next
print(("%s: %s" % (exc_type, exc_value)), file=self._stderr)
# Make sure that exc_tb gets deleted since it is a memory
# hog; deleting everything else is just for thoroughness
finally:
del exc_type, exc_value, exc_tb
finally:
# Prevent a race in
# test_threading.test_no_refcycle_through_target when
# the exception keeps the target alive past when we
# assert that it's dead.
#XXX self.__exc_clear()
pass
finally:
with _active_limbo_lock:
self._stop()
try:
# We don't call self._delete() because it also
# grabs _active_limbo_lock.
del _active[get_ident()]
except:
pass
def _stop(self):
self._block.acquire()
self._stopped = True
self._block.notify_all()
self._block.release()
def _delete(self):
"Remove current thread from the dict of currently running threads."
# Notes about running with _dummy_thread:
#
# Must take care to not raise an exception if _dummy_thread is being
# used (and thus this module is being used as an instance of
# dummy_threading). _dummy_thread.get_ident() always returns -1 since
# there is only one thread if _dummy_thread is being used. Thus
# len(_active) is always <= 1 here, and any Thread instance created
# overwrites the (if any) thread currently registered in _active.
#
# An instance of _MainThread is always created by 'threading'. This
# gets overwritten the instant an instance of Thread is created; both
# threads return -1 from _dummy_thread.get_ident() and thus have the
# same key in the dict. So when the _MainThread instance created by
# 'threading' tries to clean itself up when atexit calls this method
# it gets a KeyError if another Thread instance was created.
#
# This all means that KeyError from trying to delete something from
# _active if dummy_threading is being used is a red herring. But
# since it isn't if dummy_threading is *not* being used then don't
# hide the exception.
try:
with _active_limbo_lock:
del _active[get_ident()]
# There must not be any python code between the previous line
# and after the lock is released. Otherwise a tracing function
# could try to acquire the lock again in the same thread, (in
# current_thread()), and would block.
except KeyError:
if 'dummy_threading' not in _sys.modules:
raise
def join(self, timeout=None):
"""Wait until the thread terminates.
This blocks the calling thread until the thread whose join() method is
called terminates -- either normally or through an unhandled exception
or until the optional timeout occurs.
When the timeout argument is present and not None, it should be a
floating point number specifying a timeout for the operation in seconds
(or fractions thereof). As join() always returns None, you must call
isAlive() after join() to decide whether a timeout happened -- if the
thread is still alive, the join() call timed out.
When the timeout argument is not present or None, the operation will
block until the thread terminates.
A thread can be join()ed many times.
join() raises a RuntimeError if an attempt is made to join the current
thread as that would cause a deadlock. It is also an error to join() a
thread before it has been started and attempts to do so raises the same
exception.
"""
if not self._initialized:
raise RuntimeError("Thread.__init__() not called")
if not self._started.is_set():
raise RuntimeError("cannot join thread before it is started")
if self is current_thread():
raise RuntimeError("cannot join current thread")
self._block.acquire()
try:
if timeout is None:
while not self._stopped:
self._block.wait()
else:
deadline = _time() + timeout
while not self._stopped:
delay = deadline - _time()
if delay <= 0:
break
self._block.wait(delay)
finally:
self._block.release()
@property
def name(self):
"""A string used for identification purposes only.
It has no semantics. Multiple threads may be given the same name. The
initial name is set by the constructor.
"""
assert self._initialized, "Thread.__init__() not called"
return self._name
@name.setter
def name(self, name):
assert self._initialized, "Thread.__init__() not called"
self._name = str(name)
@property
def ident(self):
"""Thread identifier of this thread or None if it has not been started.
This is a nonzero integer. See the thread.get_ident() function. Thread
identifiers may be recycled when a thread exits and another thread is
created. The identifier is available even after the thread has exited.
"""
assert self._initialized, "Thread.__init__() not called"
return self._ident
def is_alive(self):
"""Return whether the thread is alive.
This method returns True just before the run() method starts until just
after the run() method terminates. The module function enumerate()
returns a list of all alive threads.
"""
assert self._initialized, "Thread.__init__() not called"
return self._started.is_set() and not self._stopped
isAlive = is_alive
@property
def daemon(self):
"""A boolean value indicating whether this thread is a daemon thread.
This must be set before start() is called, otherwise RuntimeError is
raised. Its initial value is inherited from the creating thread; the
main thread is not a daemon thread and therefore all threads created in
the main thread default to daemon = False.
The entire Python program exits when no alive non-daemon threads are
left.
"""
assert self._initialized, "Thread.__init__() not called"
return self._daemonic
@daemon.setter
def daemon(self, daemonic):
if not self._initialized:
raise RuntimeError("Thread.__init__() not called")
if self._started.is_set():
raise RuntimeError("cannot set daemon status of active thread");
self._daemonic = daemonic
def isDaemon(self):
return self.daemon
def setDaemon(self, daemonic):
self.daemon = daemonic
def getName(self):
return self.name
def setName(self, name):
self.name = name
# The timer class was contributed by Itamar Shtull-Trauring
class Timer(Thread):
"""Call a function after a specified number of seconds:
t = Timer(30.0, f, args=None, kwargs=None)
t.start()
t.cancel() # stop the timer's action if it's still waiting
"""
def __init__(self, interval, function, args=None, kwargs=None):
Thread.__init__(self)
self.interval = interval
self.function = function
self.args = args if args is not None else []
self.kwargs = kwargs if kwargs is not None else {}
self.finished = Event()
def cancel(self):
"""Stop the timer if it hasn't finished yet."""
self.finished.set()
def run(self):
self.finished.wait(self.interval)
if not self.finished.is_set():
self.function(*self.args, **self.kwargs)
self.finished.set()
# Special thread class to represent the main thread
# This is garbage collected through an exit handler
class _MainThread(Thread):
def __init__(self):
Thread.__init__(self, name="MainThread", daemon=False)
self._started.set()
self._set_ident()
with _active_limbo_lock:
_active[self._ident] = self
def _exitfunc(self):
self._stop()
t = _pickSomeNonDaemonThread()
while t:
t.join()
t = _pickSomeNonDaemonThread()
self._delete()
def _pickSomeNonDaemonThread():
for t in enumerate():
if not t.daemon and t.is_alive():
return t
return None
# Dummy thread class to represent threads not started here.
# These aren't garbage collected when they die, nor can they be waited for.
# If they invoke anything in threading.py that calls current_thread(), they
# leave an entry in the _active dict forever after.
# Their purpose is to return *something* from current_thread().
# They are marked as daemon threads so we won't wait for them
# when we exit (conform previous semantics).
class _DummyThread(Thread):
def __init__(self):
Thread.__init__(self, name=_newname("Dummy-%d"), daemon=True)
# Thread._block consumes an OS-level locking primitive, which
# can never be used by a _DummyThread. Since a _DummyThread
# instance is immortal, that's bad, so release this resource.
del self._block
self._started.set()
self._set_ident()
with _active_limbo_lock:
_active[self._ident] = self
def _stop(self):
pass
def join(self, timeout=None):
assert False, "cannot join a dummy thread"
# Global API functions
def current_thread():
"""Return the current Thread object, corresponding to the caller's thread of control.
If the caller's thread of control was not created through the threading
module, a dummy thread object with limited functionality is returned.
"""
try:
return _active[get_ident()]
except KeyError:
return _DummyThread()
currentThread = current_thread
def active_count():
"""Return the number of Thread objects currently alive.
The returned count is equal to the length of the list returned by
enumerate().
"""
with _active_limbo_lock:
return len(_active) + len(_limbo)
activeCount = active_count
def _enumerate():
# Same as enumerate(), but without the lock. Internal use only.
return list(_active.values()) + list(_limbo.values())
def enumerate():
"""Return a list of all Thread objects currently alive.
The list includes daemonic threads, dummy thread objects created by
current_thread(), and the main thread. It excludes terminated threads and
threads that have not yet been started.
"""
with _active_limbo_lock:
return list(_active.values()) + list(_limbo.values())
from _thread import stack_size
# Create the main thread object,
# and make it available for the interpreter
# (Py_Main) as threading._shutdown.
_shutdown = _MainThread()._exitfunc
# get thread-local implementation, either from the thread
# module, or from the python fallback
try:
from _thread import _local as local
except ImportError:
from _threading_local import local
def _after_fork():
# This function is called by Python/ceval.c:PyEval_ReInitThreads which
# is called from PyOS_AfterFork. Here we cleanup threading module state
# that should not exist after a fork.
# Reset _active_limbo_lock, in case we forked while the lock was held
# by another (non-forked) thread. http://bugs.python.org/issue874900
global _active_limbo_lock
_active_limbo_lock = _allocate_lock()
# fork() only copied the current thread; clear references to others.
new_active = {}
current = current_thread()
with _active_limbo_lock:
for thread in _enumerate():
# Any lock/condition variable may be currently locked or in an
# invalid state, so we reinitialize them.
thread._reset_internal_locks()
if thread is current:
# There is only one active thread. We reset the ident to
# its new value since it can have changed.
ident = get_ident()
thread._ident = ident
new_active[ident] = thread
else:
# All the others are already stopped.
thread._stop()
_limbo.clear()
_active.clear()
_active.update(new_active)
assert len(_active) == 1
| agpl-3.0 |
iwm911/plaso | plaso/parsers/plist_plugins/softwareupdate_test.py | 1 | 2465 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2014 The Plaso Project Authors.
# Please see the AUTHORS file for details on individual authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the Software Update plist plugin."""
import unittest
# pylint: disable=unused-import
from plaso.formatters import plist as plist_formatter
from plaso.lib import event
from plaso.parsers import plist
from plaso.parsers.plist_plugins import softwareupdate
from plaso.parsers.plist_plugins import test_lib
class SoftwareUpdatePluginTest(test_lib.PlistPluginTestCase):
"""Tests for the SoftwareUpdate plist plugin."""
def setUp(self):
"""Sets up the needed objects used throughout the test."""
self._plugin = softwareupdate.SoftwareUpdatePlugin(None)
self._parser = plist.PlistParser(event.PreprocessObject(), None)
def testProcess(self):
"""Tests the Process function."""
plist_name = u'com.apple.SoftwareUpdate.plist'
test_file = self._GetTestFilePath([plist_name])
events = self._ParsePlistFileWithPlugin(
self._parser, self._plugin, test_file, plist_name)
event_objects = self._GetEventObjects(events)
self.assertEquals(len(event_objects), 2)
event_object = event_objects[0]
self.assertEqual(event_object.key, u'')
self.assertEqual(event_object.root, u'/')
expected_desc = u'Last Mac OS X 10.9.1 (13B42) full update.'
self.assertEqual(event_object.desc, expected_desc)
expected_string = u'// {}'.format(expected_desc)
self._TestGetMessageStrings(
event_object, expected_string, expected_string)
event_object = event_objects[1]
self.assertEqual(event_object.key, u'')
self.assertEqual(event_object.root, u'/')
expected_desc = (u'Last Mac OS 10.9.1 (13B42) partially '
u'udpate, pending 1: RAWCameraUpdate5.03(031-2664).')
self.assertEqual(event_object.desc, expected_desc)
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
SpectreJan/gnuradio | gr-filter/python/filter/gui/icons_rc.py | 58 | 173558 | # -*- coding: utf-8 -*-
# Resource object code
#
# Created: Fri Aug 3 00:15:09 2012
# by: The Resource Compiler for PyQt (Qt v4.8.0)
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore
qt_resource_data = "\
\x00\x00\x08\x4a\
\x3c\
\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\
\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\
\x2d\x38\x22\x20\x73\x74\x61\x6e\x64\x61\x6c\x6f\x6e\x65\x3d\x22\
\x6e\x6f\x22\x3f\x3e\x0a\x3c\x21\x2d\x2d\x20\x43\x72\x65\x61\x74\
\x65\x64\x20\x77\x69\x74\x68\x20\x49\x6e\x6b\x73\x63\x61\x70\x65\
\x20\x28\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x69\x6e\x6b\
\x73\x63\x61\x70\x65\x2e\x6f\x72\x67\x2f\x29\x20\x2d\x2d\x3e\x0a\
\x0a\x3c\x73\x76\x67\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x64\
\x63\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x70\x75\x72\x6c\x2e\x6f\
\x72\x67\x2f\x64\x63\x2f\x65\x6c\x65\x6d\x65\x6e\x74\x73\x2f\x31\
\x2e\x31\x2f\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x63\x63\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x63\x72\x65\x61\x74\x69\x76\
\x65\x63\x6f\x6d\x6d\x6f\x6e\x73\x2e\x6f\x72\x67\x2f\x6e\x73\x23\
\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\
\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\
\x67\x2f\x31\x39\x39\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\
\x2d\x73\x79\x6e\x74\x61\x78\x2d\x6e\x73\x23\x22\x0a\x20\x20\x20\
\x78\x6d\x6c\x6e\x73\x3a\x73\x76\x67\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3d\
\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\
\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\x76\x67\x22\x0a\x20\x20\x20\
\x78\x6d\x6c\x6e\x73\x3a\x78\x6c\x69\x6e\x6b\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\
\x39\x39\x39\x2f\x78\x6c\x69\x6e\x6b\x22\x0a\x20\x20\x20\x78\x6d\
\x6c\x6e\x73\x3a\x73\x6f\x64\x69\x70\x6f\x64\x69\x3d\x22\x68\x74\
\x74\x70\x3a\x2f\x2f\x73\x6f\x64\x69\x70\x6f\x64\x69\x2e\x73\x6f\
\x75\x72\x63\x65\x66\x6f\x72\x67\x65\x2e\x6e\x65\x74\x2f\x44\x54\
\x44\x2f\x73\x6f\x64\x69\x70\x6f\x64\x69\x2d\x30\x2e\x64\x74\x64\
\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x69\x6e\x6b\x73\x63\
\x61\x70\x65\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\
\x69\x6e\x6b\x73\x63\x61\x70\x65\x2e\x6f\x72\x67\x2f\x6e\x61\x6d\
\x65\x73\x70\x61\x63\x65\x73\x2f\x69\x6e\x6b\x73\x63\x61\x70\x65\
\x22\x0a\x20\x20\x20\x69\x64\x3d\x22\x73\x76\x67\x33\x34\x33\x36\
\x22\x0a\x20\x20\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\
\x31\x22\x0a\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x76\
\x65\x72\x73\x69\x6f\x6e\x3d\x22\x30\x2e\x34\x38\x2e\x33\x2e\x31\
\x20\x72\x39\x38\x38\x36\x22\x0a\x20\x20\x20\x77\x69\x64\x74\x68\
\x3d\x22\x33\x32\x22\x0a\x20\x20\x20\x68\x65\x69\x67\x68\x74\x3d\
\x22\x33\x32\x22\x0a\x20\x20\x20\x73\x6f\x64\x69\x70\x6f\x64\x69\
\x3a\x64\x6f\x63\x6e\x61\x6d\x65\x3d\x22\x69\x6d\x70\x75\x6c\x73\
\x65\x5f\x72\x65\x73\x70\x6f\x6e\x73\x65\x2e\x69\x63\x6f\x22\x3e\
\x0a\x20\x20\x3c\x6d\x65\x74\x61\x64\x61\x74\x61\x0a\x20\x20\x20\
\x20\x20\x69\x64\x3d\x22\x6d\x65\x74\x61\x64\x61\x74\x61\x33\x34\
\x34\x32\x22\x3e\x0a\x20\x20\x20\x20\x3c\x72\x64\x66\x3a\x52\x44\
\x46\x3e\x0a\x20\x20\x20\x20\x20\x20\x3c\x63\x63\x3a\x57\x6f\x72\
\x6b\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x72\x64\x66\x3a\x61\
\x62\x6f\x75\x74\x3d\x22\x22\x3e\x0a\x20\x20\x20\x20\x20\x20\x20\
\x20\x3c\x64\x63\x3a\x66\x6f\x72\x6d\x61\x74\x3e\x69\x6d\x61\x67\
\x65\x2f\x73\x76\x67\x2b\x78\x6d\x6c\x3c\x2f\x64\x63\x3a\x66\x6f\
\x72\x6d\x61\x74\x3e\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x3c\x64\
\x63\x3a\x74\x79\x70\x65\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x72\x64\x66\x3a\x72\x65\x73\x6f\x75\x72\x63\x65\x3d\x22\
\x68\x74\x74\x70\x3a\x2f\x2f\x70\x75\x72\x6c\x2e\x6f\x72\x67\x2f\
\x64\x63\x2f\x64\x63\x6d\x69\x74\x79\x70\x65\x2f\x53\x74\x69\x6c\
\x6c\x49\x6d\x61\x67\x65\x22\x20\x2f\x3e\x0a\x20\x20\x20\x20\x20\
\x20\x20\x20\x3c\x64\x63\x3a\x74\x69\x74\x6c\x65\x3e\x3c\x2f\x64\
\x63\x3a\x74\x69\x74\x6c\x65\x3e\x0a\x20\x20\x20\x20\x20\x20\x3c\
\x2f\x63\x63\x3a\x57\x6f\x72\x6b\x3e\x0a\x20\x20\x20\x20\x3c\x2f\
\x72\x64\x66\x3a\x52\x44\x46\x3e\x0a\x20\x20\x3c\x2f\x6d\x65\x74\
\x61\x64\x61\x74\x61\x3e\x0a\x20\x20\x3c\x64\x65\x66\x73\x0a\x20\
\x20\x20\x20\x20\x69\x64\x3d\x22\x64\x65\x66\x73\x33\x34\x34\x30\
\x22\x20\x2f\x3e\x0a\x20\x20\x3c\x73\x6f\x64\x69\x70\x6f\x64\x69\
\x3a\x6e\x61\x6d\x65\x64\x76\x69\x65\x77\x0a\x20\x20\x20\x20\x20\
\x70\x61\x67\x65\x63\x6f\x6c\x6f\x72\x3d\x22\x23\x66\x66\x66\x66\
\x66\x66\x22\x0a\x20\x20\x20\x20\x20\x62\x6f\x72\x64\x65\x72\x63\
\x6f\x6c\x6f\x72\x3d\x22\x23\x36\x36\x36\x36\x36\x36\x22\x0a\x20\
\x20\x20\x20\x20\x62\x6f\x72\x64\x65\x72\x6f\x70\x61\x63\x69\x74\
\x79\x3d\x22\x31\x22\x0a\x20\x20\x20\x20\x20\x6f\x62\x6a\x65\x63\
\x74\x74\x6f\x6c\x65\x72\x61\x6e\x63\x65\x3d\x22\x31\x30\x22\x0a\
\x20\x20\x20\x20\x20\x67\x72\x69\x64\x74\x6f\x6c\x65\x72\x61\x6e\
\x63\x65\x3d\x22\x31\x30\x22\x0a\x20\x20\x20\x20\x20\x67\x75\x69\
\x64\x65\x74\x6f\x6c\x65\x72\x61\x6e\x63\x65\x3d\x22\x31\x30\x22\
\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x70\
\x61\x67\x65\x6f\x70\x61\x63\x69\x74\x79\x3d\x22\x30\x22\x0a\x20\
\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x70\x61\x67\
\x65\x73\x68\x61\x64\x6f\x77\x3d\x22\x32\x22\x0a\x20\x20\x20\x20\
\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\
\x2d\x77\x69\x64\x74\x68\x3d\x22\x31\x33\x36\x34\x22\x0a\x20\x20\
\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\
\x6f\x77\x2d\x68\x65\x69\x67\x68\x74\x3d\x22\x37\x35\x30\x22\x0a\
\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x6e\x61\x6d\x65\x64\x76\x69\
\x65\x77\x33\x34\x33\x38\x22\x0a\x20\x20\x20\x20\x20\x73\x68\x6f\
\x77\x67\x72\x69\x64\x3d\x22\x66\x61\x6c\x73\x65\x22\x0a\x20\x20\
\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x7a\x6f\x6f\x6d\
\x3d\x22\x37\x2e\x33\x37\x35\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\
\x6b\x73\x63\x61\x70\x65\x3a\x63\x78\x3d\x22\x31\x36\x22\x0a\x20\
\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x63\x79\x3d\
\x22\x31\x36\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\
\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x78\x3d\x22\x30\x22\x0a\
\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\
\x6e\x64\x6f\x77\x2d\x79\x3d\x22\x31\x36\x22\x0a\x20\x20\x20\x20\
\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\
\x2d\x6d\x61\x78\x69\x6d\x69\x7a\x65\x64\x3d\x22\x30\x22\x0a\x20\
\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x63\x75\x72\
\x72\x65\x6e\x74\x2d\x6c\x61\x79\x65\x72\x3d\x22\x73\x76\x67\x33\
\x34\x33\x36\x22\x20\x2f\x3e\x0a\x20\x20\x3c\x69\x6d\x61\x67\x65\
\x0a\x20\x20\x20\x20\x20\x77\x69\x64\x74\x68\x3d\x22\x33\x32\x22\
\x0a\x20\x20\x20\x20\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\
\x22\x0a\x20\x20\x20\x20\x20\x78\x6c\x69\x6e\x6b\x3a\x68\x72\x65\
\x66\x3d\x22\x64\x61\x74\x61\x3a\x69\x6d\x61\x67\x65\x2f\x70\x6e\
\x67\x3b\x62\x61\x73\x65\x36\x34\x2c\x69\x56\x42\x4f\x52\x77\x30\
\x4b\x47\x67\x6f\x41\x41\x41\x41\x4e\x53\x55\x68\x45\x55\x67\x41\
\x41\x41\x43\x41\x41\x41\x41\x41\x67\x43\x41\x59\x41\x41\x41\x42\
\x7a\x65\x6e\x72\x30\x41\x41\x41\x41\x42\x48\x4e\x43\x53\x56\x51\
\x49\x43\x41\x67\x49\x66\x41\x68\x6b\x69\x41\x41\x41\x41\x50\x31\
\x4a\x52\x45\x46\x55\x0a\x57\x49\x58\x74\x6c\x7a\x46\x75\x51\x6a\
\x45\x4d\x68\x6a\x38\x6e\x36\x63\x41\x4e\x75\x72\x43\x39\x37\x70\
\x31\x36\x68\x49\x36\x49\x4c\x68\x45\x58\x51\x4f\x49\x4b\x6e\x49\
\x41\x72\x49\x4c\x46\x58\x54\x32\x2f\x71\x33\x68\x74\x30\x34\x67\
\x42\x73\x4c\x4e\x79\x41\x67\x62\x7a\x6e\x44\x71\x38\x67\x70\x45\
\x70\x56\x0a\x38\x32\x69\x56\x78\x52\x34\x64\x2b\x2f\x65\x6e\x4f\
\x4c\x46\x6b\x41\x5a\x54\x43\x56\x68\x4a\x41\x58\x63\x48\x69\x41\
\x49\x54\x42\x6d\x58\x66\x4e\x49\x39\x6f\x39\x67\x79\x72\x69\x33\
\x7a\x6e\x46\x37\x52\x41\x5a\x6f\x57\x2b\x42\x35\x4b\x55\x31\x39\
\x34\x54\x75\x41\x78\x68\x2f\x4f\x66\x59\x6b\x39\x77\x54\x78\x0a\
\x6b\x46\x6c\x2f\x53\x41\x75\x61\x45\x62\x35\x37\x41\x31\x31\x66\
\x36\x61\x78\x37\x58\x7a\x50\x4b\x56\x63\x73\x45\x55\x43\x47\x30\
\x47\x30\x52\x33\x70\x4e\x6e\x71\x34\x6b\x36\x7a\x46\x61\x49\x37\
\x51\x72\x73\x42\x7a\x62\x72\x4e\x50\x49\x42\x51\x4c\x31\x47\x70\
\x53\x48\x34\x4f\x63\x76\x56\x37\x52\x45\x6c\x2b\x0a\x6a\x6b\x70\
\x46\x71\x4a\x66\x2f\x41\x2b\x42\x66\x58\x30\x41\x57\x74\x47\x34\
\x4b\x38\x66\x67\x39\x49\x42\x37\x37\x4d\x31\x6e\x30\x73\x58\x38\
\x4e\x49\x46\x49\x68\x62\x76\x4c\x7a\x51\x34\x73\x48\x78\x45\x30\
\x51\x39\x2f\x42\x72\x58\x57\x34\x5a\x52\x4b\x46\x57\x51\x6e\x33\
\x4c\x49\x43\x73\x2f\x69\x41\x7a\x41\x0a\x41\x41\x7a\x41\x41\x41\
\x7a\x41\x41\x41\x7a\x41\x41\x41\x77\x41\x43\x69\x2b\x6e\x35\x39\
\x57\x73\x6d\x48\x30\x43\x6a\x73\x52\x45\x35\x37\x4b\x79\x74\x38\
\x41\x41\x41\x41\x41\x41\x53\x55\x56\x4f\x52\x4b\x35\x43\x59\x49\
\x49\x3d\x0a\x22\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x69\x6d\
\x61\x67\x65\x33\x34\x34\x34\x22\x0a\x20\x20\x20\x20\x20\x78\x3d\
\x22\x30\x22\x0a\x20\x20\x20\x20\x20\x79\x3d\x22\x30\x22\x20\x2f\
\x3e\x0a\x3c\x2f\x73\x76\x67\x3e\x0a\
\x00\x00\x0e\x58\
\x3c\
\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\
\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\
\x2d\x38\x22\x20\x73\x74\x61\x6e\x64\x61\x6c\x6f\x6e\x65\x3d\x22\
\x6e\x6f\x22\x3f\x3e\x0a\x3c\x21\x2d\x2d\x20\x43\x72\x65\x61\x74\
\x65\x64\x20\x77\x69\x74\x68\x20\x49\x6e\x6b\x73\x63\x61\x70\x65\
\x20\x28\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x69\x6e\x6b\
\x73\x63\x61\x70\x65\x2e\x6f\x72\x67\x2f\x29\x20\x2d\x2d\x3e\x0a\
\x0a\x3c\x73\x76\x67\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x64\
\x63\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x70\x75\x72\x6c\x2e\x6f\
\x72\x67\x2f\x64\x63\x2f\x65\x6c\x65\x6d\x65\x6e\x74\x73\x2f\x31\
\x2e\x31\x2f\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x63\x63\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x63\x72\x65\x61\x74\x69\x76\
\x65\x63\x6f\x6d\x6d\x6f\x6e\x73\x2e\x6f\x72\x67\x2f\x6e\x73\x23\
\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\
\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\
\x67\x2f\x31\x39\x39\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\
\x2d\x73\x79\x6e\x74\x61\x78\x2d\x6e\x73\x23\x22\x0a\x20\x20\x20\
\x78\x6d\x6c\x6e\x73\x3a\x73\x76\x67\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3d\
\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\
\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\x76\x67\x22\x0a\x20\x20\x20\
\x78\x6d\x6c\x6e\x73\x3a\x78\x6c\x69\x6e\x6b\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\
\x39\x39\x39\x2f\x78\x6c\x69\x6e\x6b\x22\x0a\x20\x20\x20\x78\x6d\
\x6c\x6e\x73\x3a\x73\x6f\x64\x69\x70\x6f\x64\x69\x3d\x22\x68\x74\
\x74\x70\x3a\x2f\x2f\x73\x6f\x64\x69\x70\x6f\x64\x69\x2e\x73\x6f\
\x75\x72\x63\x65\x66\x6f\x72\x67\x65\x2e\x6e\x65\x74\x2f\x44\x54\
\x44\x2f\x73\x6f\x64\x69\x70\x6f\x64\x69\x2d\x30\x2e\x64\x74\x64\
\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x69\x6e\x6b\x73\x63\
\x61\x70\x65\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\
\x69\x6e\x6b\x73\x63\x61\x70\x65\x2e\x6f\x72\x67\x2f\x6e\x61\x6d\
\x65\x73\x70\x61\x63\x65\x73\x2f\x69\x6e\x6b\x73\x63\x61\x70\x65\
\x22\x0a\x20\x20\x20\x69\x64\x3d\x22\x73\x76\x67\x34\x35\x35\x37\
\x22\x0a\x20\x20\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\
\x31\x22\x0a\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x76\
\x65\x72\x73\x69\x6f\x6e\x3d\x22\x30\x2e\x34\x38\x2e\x33\x2e\x31\
\x20\x72\x39\x38\x38\x36\x22\x0a\x20\x20\x20\x77\x69\x64\x74\x68\
\x3d\x22\x33\x32\x22\x0a\x20\x20\x20\x68\x65\x69\x67\x68\x74\x3d\
\x22\x33\x32\x22\x0a\x20\x20\x20\x73\x6f\x64\x69\x70\x6f\x64\x69\
\x3a\x64\x6f\x63\x6e\x61\x6d\x65\x3d\x22\x72\x65\x6d\x6f\x76\x65\
\x5f\x72\x65\x64\x2e\x73\x76\x67\x22\x3e\x0a\x20\x20\x3c\x6d\x65\
\x74\x61\x64\x61\x74\x61\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\
\x6d\x65\x74\x61\x64\x61\x74\x61\x34\x35\x36\x33\x22\x3e\x0a\x20\
\x20\x20\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x3e\x0a\x20\x20\x20\
\x20\x20\x20\x3c\x63\x63\x3a\x57\x6f\x72\x6b\x0a\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x72\x64\x66\x3a\x61\x62\x6f\x75\x74\x3d\x22\
\x22\x3e\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x3c\x64\x63\x3a\x66\
\x6f\x72\x6d\x61\x74\x3e\x69\x6d\x61\x67\x65\x2f\x73\x76\x67\x2b\
\x78\x6d\x6c\x3c\x2f\x64\x63\x3a\x66\x6f\x72\x6d\x61\x74\x3e\x0a\
\x20\x20\x20\x20\x20\x20\x20\x20\x3c\x64\x63\x3a\x74\x79\x70\x65\
\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x72\x64\x66\x3a\
\x72\x65\x73\x6f\x75\x72\x63\x65\x3d\x22\x68\x74\x74\x70\x3a\x2f\
\x2f\x70\x75\x72\x6c\x2e\x6f\x72\x67\x2f\x64\x63\x2f\x64\x63\x6d\
\x69\x74\x79\x70\x65\x2f\x53\x74\x69\x6c\x6c\x49\x6d\x61\x67\x65\
\x22\x20\x2f\x3e\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x3c\x64\x63\
\x3a\x74\x69\x74\x6c\x65\x3e\x3c\x2f\x64\x63\x3a\x74\x69\x74\x6c\
\x65\x3e\x0a\x20\x20\x20\x20\x20\x20\x3c\x2f\x63\x63\x3a\x57\x6f\
\x72\x6b\x3e\x0a\x20\x20\x20\x20\x3c\x2f\x72\x64\x66\x3a\x52\x44\
\x46\x3e\x0a\x20\x20\x3c\x2f\x6d\x65\x74\x61\x64\x61\x74\x61\x3e\
\x0a\x20\x20\x3c\x64\x65\x66\x73\x0a\x20\x20\x20\x20\x20\x69\x64\
\x3d\x22\x64\x65\x66\x73\x34\x35\x36\x31\x22\x20\x2f\x3e\x0a\x20\
\x20\x3c\x73\x6f\x64\x69\x70\x6f\x64\x69\x3a\x6e\x61\x6d\x65\x64\
\x76\x69\x65\x77\x0a\x20\x20\x20\x20\x20\x70\x61\x67\x65\x63\x6f\
\x6c\x6f\x72\x3d\x22\x23\x66\x66\x66\x66\x66\x66\x22\x0a\x20\x20\
\x20\x20\x20\x62\x6f\x72\x64\x65\x72\x63\x6f\x6c\x6f\x72\x3d\x22\
\x23\x36\x36\x36\x36\x36\x36\x22\x0a\x20\x20\x20\x20\x20\x62\x6f\
\x72\x64\x65\x72\x6f\x70\x61\x63\x69\x74\x79\x3d\x22\x31\x22\x0a\
\x20\x20\x20\x20\x20\x6f\x62\x6a\x65\x63\x74\x74\x6f\x6c\x65\x72\
\x61\x6e\x63\x65\x3d\x22\x31\x30\x22\x0a\x20\x20\x20\x20\x20\x67\
\x72\x69\x64\x74\x6f\x6c\x65\x72\x61\x6e\x63\x65\x3d\x22\x31\x30\
\x22\x0a\x20\x20\x20\x20\x20\x67\x75\x69\x64\x65\x74\x6f\x6c\x65\
\x72\x61\x6e\x63\x65\x3d\x22\x31\x30\x22\x0a\x20\x20\x20\x20\x20\
\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x70\x61\x67\x65\x6f\x70\x61\
\x63\x69\x74\x79\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\
\x6b\x73\x63\x61\x70\x65\x3a\x70\x61\x67\x65\x73\x68\x61\x64\x6f\
\x77\x3d\x22\x32\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\
\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x31\x33\x36\x34\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\
\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x68\x65\x69\
\x67\x68\x74\x3d\x22\x37\x35\x30\x22\x0a\x20\x20\x20\x20\x20\x69\
\x64\x3d\x22\x6e\x61\x6d\x65\x64\x76\x69\x65\x77\x34\x35\x35\x39\
\x22\x0a\x20\x20\x20\x20\x20\x73\x68\x6f\x77\x67\x72\x69\x64\x3d\
\x22\x66\x61\x6c\x73\x65\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\
\x73\x63\x61\x70\x65\x3a\x7a\x6f\x6f\x6d\x3d\x22\x37\x2e\x33\x37\
\x35\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\
\x3a\x63\x78\x3d\x22\x32\x36\x2e\x32\x31\x34\x39\x33\x36\x22\x0a\
\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x63\x79\
\x3d\x22\x31\x39\x2e\x37\x32\x38\x38\x31\x34\x22\x0a\x20\x20\x20\
\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\
\x77\x2d\x78\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\
\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x79\x3d\x22\
\x31\x36\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\
\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x6d\x61\x78\x69\x6d\x69\x7a\
\x65\x64\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\
\x63\x61\x70\x65\x3a\x63\x75\x72\x72\x65\x6e\x74\x2d\x6c\x61\x79\
\x65\x72\x3d\x22\x73\x76\x67\x34\x35\x35\x37\x22\x0a\x20\x20\x20\
\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x73\x6e\x61\x70\x2d\
\x67\x6c\x6f\x62\x61\x6c\x3d\x22\x66\x61\x6c\x73\x65\x22\x20\x2f\
\x3e\x0a\x20\x20\x3c\x69\x6d\x61\x67\x65\x0a\x20\x20\x20\x20\x20\
\x77\x69\x64\x74\x68\x3d\x22\x33\x32\x22\x0a\x20\x20\x20\x20\x20\
\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\x0a\x20\x20\x20\x20\
\x20\x78\x6c\x69\x6e\x6b\x3a\x68\x72\x65\x66\x3d\x22\x64\x61\x74\
\x61\x3a\x69\x6d\x61\x67\x65\x2f\x70\x6e\x67\x3b\x62\x61\x73\x65\
\x36\x34\x2c\x69\x56\x42\x4f\x52\x77\x30\x4b\x47\x67\x6f\x41\x41\
\x41\x41\x4e\x53\x55\x68\x45\x55\x67\x41\x41\x41\x43\x41\x41\x41\
\x41\x41\x67\x43\x41\x59\x41\x41\x41\x42\x7a\x65\x6e\x72\x30\x41\
\x41\x41\x41\x42\x48\x4e\x43\x53\x56\x51\x49\x43\x41\x67\x49\x66\
\x41\x68\x6b\x69\x41\x41\x41\x41\x44\x56\x4a\x52\x45\x46\x55\x0a\
\x57\x49\x58\x74\x31\x7a\x45\x42\x41\x44\x41\x4d\x77\x7a\x42\x33\
\x2f\x44\x6c\x33\x4d\x50\x6f\x6f\x42\x4b\x77\x33\x55\x32\x33\x48\
\x75\x77\x54\x73\x4f\x34\x78\x58\x42\x51\x41\x41\x41\x41\x41\x41\
\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x77\x48\x64\x2f\x7a\
\x44\x78\x72\x74\x42\x44\x6b\x45\x71\x4e\x62\x67\x0a\x41\x41\x41\
\x41\x41\x45\x6c\x46\x54\x6b\x53\x75\x51\x6d\x43\x43\x0a\x22\x0a\
\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x69\x6d\x61\x67\x65\x34\x35\
\x36\x35\x22\x0a\x20\x20\x20\x20\x20\x78\x3d\x22\x30\x22\x0a\x20\
\x20\x20\x20\x20\x79\x3d\x22\x30\x22\x20\x2f\x3e\x0a\x20\x20\x3c\
\x74\x65\x78\x74\x0a\x20\x20\x20\x20\x20\x78\x6d\x6c\x3a\x73\x70\
\x61\x63\x65\x3d\x22\x70\x72\x65\x73\x65\x72\x76\x65\x22\x0a\x20\
\x20\x20\x20\x20\x73\x74\x79\x6c\x65\x3d\x22\x66\x6f\x6e\x74\x2d\
\x73\x69\x7a\x65\x3a\x34\x30\x70\x78\x3b\x66\x6f\x6e\x74\x2d\x73\
\x74\x79\x6c\x65\x3a\x6e\x6f\x72\x6d\x61\x6c\x3b\x66\x6f\x6e\x74\
\x2d\x77\x65\x69\x67\x68\x74\x3a\x6e\x6f\x72\x6d\x61\x6c\x3b\x6c\
\x69\x6e\x65\x2d\x68\x65\x69\x67\x68\x74\x3a\x31\x32\x35\x25\x3b\
\x6c\x65\x74\x74\x65\x72\x2d\x73\x70\x61\x63\x69\x6e\x67\x3a\x30\
\x70\x78\x3b\x77\x6f\x72\x64\x2d\x73\x70\x61\x63\x69\x6e\x67\x3a\
\x30\x70\x78\x3b\x66\x69\x6c\x6c\x3a\x23\x30\x30\x30\x30\x30\x30\
\x3b\x66\x69\x6c\x6c\x2d\x6f\x70\x61\x63\x69\x74\x79\x3a\x31\x3b\
\x73\x74\x72\x6f\x6b\x65\x3a\x6e\x6f\x6e\x65\x3b\x66\x6f\x6e\x74\
\x2d\x66\x61\x6d\x69\x6c\x79\x3a\x53\x61\x6e\x73\x22\x0a\x20\x20\
\x20\x20\x20\x78\x3d\x22\x31\x31\x2e\x36\x36\x31\x30\x31\x37\x22\
\x0a\x20\x20\x20\x20\x20\x79\x3d\x22\x39\x2e\x33\x35\x35\x39\x33\
\x32\x32\x22\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x74\x65\x78\
\x74\x35\x34\x31\x38\x22\x0a\x20\x20\x20\x20\x20\x73\x6f\x64\x69\
\x70\x6f\x64\x69\x3a\x6c\x69\x6e\x65\x73\x70\x61\x63\x69\x6e\x67\
\x3d\x22\x31\x32\x35\x25\x22\x3e\x3c\x74\x73\x70\x61\x6e\x0a\x20\
\x20\x20\x20\x20\x20\x20\x73\x6f\x64\x69\x70\x6f\x64\x69\x3a\x72\
\x6f\x6c\x65\x3d\x22\x6c\x69\x6e\x65\x22\x0a\x20\x20\x20\x20\x20\
\x20\x20\x69\x64\x3d\x22\x74\x73\x70\x61\x6e\x35\x34\x32\x30\x22\
\x0a\x20\x20\x20\x20\x20\x20\x20\x78\x3d\x22\x31\x31\x2e\x36\x36\
\x31\x30\x31\x37\x22\x0a\x20\x20\x20\x20\x20\x20\x20\x79\x3d\x22\
\x39\x2e\x33\x35\x35\x39\x33\x32\x32\x22\x20\x2f\x3e\x3c\x2f\x74\
\x65\x78\x74\x3e\x0a\x20\x20\x3c\x74\x65\x78\x74\x0a\x20\x20\x20\
\x20\x20\x78\x6d\x6c\x3a\x73\x70\x61\x63\x65\x3d\x22\x70\x72\x65\
\x73\x65\x72\x76\x65\x22\x0a\x20\x20\x20\x20\x20\x73\x74\x79\x6c\
\x65\x3d\x22\x66\x6f\x6e\x74\x2d\x73\x69\x7a\x65\x3a\x34\x30\x70\
\x78\x3b\x66\x6f\x6e\x74\x2d\x73\x74\x79\x6c\x65\x3a\x6e\x6f\x72\
\x6d\x61\x6c\x3b\x66\x6f\x6e\x74\x2d\x77\x65\x69\x67\x68\x74\x3a\
\x6e\x6f\x72\x6d\x61\x6c\x3b\x6c\x69\x6e\x65\x2d\x68\x65\x69\x67\
\x68\x74\x3a\x31\x32\x35\x25\x3b\x6c\x65\x74\x74\x65\x72\x2d\x73\
\x70\x61\x63\x69\x6e\x67\x3a\x30\x70\x78\x3b\x77\x6f\x72\x64\x2d\
\x73\x70\x61\x63\x69\x6e\x67\x3a\x30\x70\x78\x3b\x66\x69\x6c\x6c\
\x3a\x23\x66\x66\x30\x30\x30\x30\x3b\x66\x69\x6c\x6c\x2d\x6f\x70\
\x61\x63\x69\x74\x79\x3a\x31\x3b\x73\x74\x72\x6f\x6b\x65\x3a\x6e\
\x6f\x6e\x65\x3b\x66\x6f\x6e\x74\x2d\x66\x61\x6d\x69\x6c\x79\x3a\
\x53\x61\x6e\x73\x22\x0a\x20\x20\x20\x20\x20\x78\x3d\x22\x35\x2e\
\x39\x36\x36\x31\x30\x31\x36\x22\x0a\x20\x20\x20\x20\x20\x79\x3d\
\x22\x32\x37\x2e\x31\x31\x38\x36\x34\x35\x22\x0a\x20\x20\x20\x20\
\x20\x69\x64\x3d\x22\x74\x65\x78\x74\x36\x34\x38\x34\x22\x0a\x20\
\x20\x20\x20\x20\x73\x6f\x64\x69\x70\x6f\x64\x69\x3a\x6c\x69\x6e\
\x65\x73\x70\x61\x63\x69\x6e\x67\x3d\x22\x31\x32\x35\x25\x22\x3e\
\x3c\x74\x73\x70\x61\x6e\x0a\x20\x20\x20\x20\x20\x20\x20\x73\x6f\
\x64\x69\x70\x6f\x64\x69\x3a\x72\x6f\x6c\x65\x3d\x22\x6c\x69\x6e\
\x65\x22\x0a\x20\x20\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x74\x73\
\x70\x61\x6e\x36\x34\x38\x36\x22\x0a\x20\x20\x20\x20\x20\x20\x20\
\x78\x3d\x22\x35\x2e\x39\x36\x36\x31\x30\x31\x36\x22\x0a\x20\x20\
\x20\x20\x20\x20\x20\x79\x3d\x22\x32\x37\x2e\x31\x31\x38\x36\x34\
\x35\x22\x3e\x72\x3c\x2f\x74\x73\x70\x61\x6e\x3e\x3c\x2f\x74\x65\
\x78\x74\x3e\x0a\x20\x20\x3c\x74\x65\x78\x74\x0a\x20\x20\x20\x20\
\x20\x78\x6d\x6c\x3a\x73\x70\x61\x63\x65\x3d\x22\x70\x72\x65\x73\
\x65\x72\x76\x65\x22\x0a\x20\x20\x20\x20\x20\x73\x74\x79\x6c\x65\
\x3d\x22\x66\x6f\x6e\x74\x2d\x73\x69\x7a\x65\x3a\x31\x32\x2e\x35\
\x39\x33\x33\x34\x39\x34\x36\x30\x30\x30\x30\x30\x30\x30\x36\x36\
\x70\x78\x3b\x66\x6f\x6e\x74\x2d\x73\x74\x79\x6c\x65\x3a\x6e\x6f\
\x72\x6d\x61\x6c\x3b\x66\x6f\x6e\x74\x2d\x77\x65\x69\x67\x68\x74\
\x3a\x6e\x6f\x72\x6d\x61\x6c\x3b\x6c\x69\x6e\x65\x2d\x68\x65\x69\
\x67\x68\x74\x3a\x31\x32\x35\x25\x3b\x6c\x65\x74\x74\x65\x72\x2d\
\x73\x70\x61\x63\x69\x6e\x67\x3a\x30\x70\x78\x3b\x77\x6f\x72\x64\
\x2d\x73\x70\x61\x63\x69\x6e\x67\x3a\x30\x70\x78\x3b\x66\x69\x6c\
\x6c\x3a\x23\x30\x30\x30\x30\x66\x66\x3b\x66\x69\x6c\x6c\x2d\x6f\
\x70\x61\x63\x69\x74\x79\x3a\x31\x3b\x73\x74\x72\x6f\x6b\x65\x3a\
\x6e\x6f\x6e\x65\x3b\x66\x6f\x6e\x74\x2d\x66\x61\x6d\x69\x6c\x79\
\x3a\x53\x61\x6e\x73\x22\x0a\x20\x20\x20\x20\x20\x78\x3d\x22\x31\
\x32\x2e\x37\x31\x33\x34\x33\x32\x22\x0a\x20\x20\x20\x20\x20\x79\
\x3d\x22\x31\x39\x2e\x38\x38\x36\x35\x37\x32\x22\x0a\x20\x20\x20\
\x20\x20\x69\x64\x3d\x22\x74\x65\x78\x74\x36\x34\x38\x38\x22\x0a\
\x20\x20\x20\x20\x20\x73\x6f\x64\x69\x70\x6f\x64\x69\x3a\x6c\x69\
\x6e\x65\x73\x70\x61\x63\x69\x6e\x67\x3d\x22\x31\x32\x35\x25\x22\
\x0a\x20\x20\x20\x20\x20\x74\x72\x61\x6e\x73\x66\x6f\x72\x6d\x3d\
\x22\x73\x63\x61\x6c\x65\x28\x31\x2e\x31\x34\x35\x38\x30\x38\x34\
\x2c\x30\x2e\x38\x37\x32\x37\x34\x36\x32\x37\x29\x22\x3e\x3c\x74\
\x73\x70\x61\x6e\x0a\x20\x20\x20\x20\x20\x20\x20\x73\x6f\x64\x69\
\x70\x6f\x64\x69\x3a\x72\x6f\x6c\x65\x3d\x22\x6c\x69\x6e\x65\x22\
\x0a\x20\x20\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x74\x73\x70\x61\
\x6e\x36\x34\x39\x30\x22\x0a\x20\x20\x20\x20\x20\x20\x20\x78\x3d\
\x22\x31\x32\x2e\x37\x31\x33\x34\x33\x32\x22\x0a\x20\x20\x20\x20\
\x20\x20\x20\x79\x3d\x22\x31\x39\x2e\x38\x38\x36\x35\x37\x32\x22\
\x3e\x78\x3c\x2f\x74\x73\x70\x61\x6e\x3e\x3c\x2f\x74\x65\x78\x74\
\x3e\x0a\x20\x20\x3c\x74\x65\x78\x74\x0a\x20\x20\x20\x20\x20\x78\
\x6d\x6c\x3a\x73\x70\x61\x63\x65\x3d\x22\x70\x72\x65\x73\x65\x72\
\x76\x65\x22\x0a\x20\x20\x20\x20\x20\x73\x74\x79\x6c\x65\x3d\x22\
\x66\x6f\x6e\x74\x2d\x73\x69\x7a\x65\x3a\x31\x32\x2e\x32\x39\x30\
\x36\x30\x37\x34\x34\x39\x39\x39\x39\x39\x39\x39\x35\x35\x70\x78\
\x3b\x66\x6f\x6e\x74\x2d\x73\x74\x79\x6c\x65\x3a\x6e\x6f\x72\x6d\
\x61\x6c\x3b\x66\x6f\x6e\x74\x2d\x77\x65\x69\x67\x68\x74\x3a\x6e\
\x6f\x72\x6d\x61\x6c\x3b\x6c\x69\x6e\x65\x2d\x68\x65\x69\x67\x68\
\x74\x3a\x31\x32\x35\x25\x3b\x6c\x65\x74\x74\x65\x72\x2d\x73\x70\
\x61\x63\x69\x6e\x67\x3a\x30\x70\x78\x3b\x77\x6f\x72\x64\x2d\x73\
\x70\x61\x63\x69\x6e\x67\x3a\x30\x70\x78\x3b\x66\x69\x6c\x6c\x3a\
\x23\x30\x30\x30\x30\x66\x66\x3b\x66\x69\x6c\x6c\x2d\x6f\x70\x61\
\x63\x69\x74\x79\x3a\x31\x3b\x73\x74\x72\x6f\x6b\x65\x3a\x6e\x6f\
\x6e\x65\x3b\x66\x6f\x6e\x74\x2d\x66\x61\x6d\x69\x6c\x79\x3a\x53\
\x61\x6e\x73\x22\x0a\x20\x20\x20\x20\x20\x78\x3d\x22\x31\x34\x2e\
\x32\x35\x30\x32\x38\x32\x22\x0a\x20\x20\x20\x20\x20\x79\x3d\x22\
\x32\x37\x2e\x35\x34\x39\x30\x31\x37\x22\x0a\x20\x20\x20\x20\x20\
\x69\x64\x3d\x22\x74\x65\x78\x74\x36\x34\x39\x32\x22\x0a\x20\x20\
\x20\x20\x20\x73\x6f\x64\x69\x70\x6f\x64\x69\x3a\x6c\x69\x6e\x65\
\x73\x70\x61\x63\x69\x6e\x67\x3d\x22\x31\x32\x35\x25\x22\x0a\x20\
\x20\x20\x20\x20\x74\x72\x61\x6e\x73\x66\x6f\x72\x6d\x3d\x22\x73\
\x63\x61\x6c\x65\x28\x31\x2e\x30\x32\x31\x33\x38\x32\x33\x2c\x30\
\x2e\x39\x37\x39\x30\x36\x35\x33\x33\x29\x22\x3e\x3c\x74\x73\x70\
\x61\x6e\x0a\x20\x20\x20\x20\x20\x20\x20\x73\x6f\x64\x69\x70\x6f\
\x64\x69\x3a\x72\x6f\x6c\x65\x3d\x22\x6c\x69\x6e\x65\x22\x0a\x20\
\x20\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x74\x73\x70\x61\x6e\x36\
\x34\x39\x34\x22\x0a\x20\x20\x20\x20\x20\x20\x20\x78\x3d\x22\x31\
\x34\x2e\x32\x35\x30\x32\x38\x32\x22\x0a\x20\x20\x20\x20\x20\x20\
\x20\x79\x3d\x22\x32\x37\x2e\x35\x34\x39\x30\x31\x37\x22\x3e\x30\
\x3c\x2f\x74\x73\x70\x61\x6e\x3e\x3c\x2f\x74\x65\x78\x74\x3e\x0a\
\x3c\x2f\x73\x76\x67\x3e\x0a\
\x00\x00\x09\xc2\
\x3c\
\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\
\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\
\x2d\x38\x22\x20\x73\x74\x61\x6e\x64\x61\x6c\x6f\x6e\x65\x3d\x22\
\x6e\x6f\x22\x3f\x3e\x0a\x3c\x21\x2d\x2d\x20\x43\x72\x65\x61\x74\
\x65\x64\x20\x77\x69\x74\x68\x20\x49\x6e\x6b\x73\x63\x61\x70\x65\
\x20\x28\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x69\x6e\x6b\
\x73\x63\x61\x70\x65\x2e\x6f\x72\x67\x2f\x29\x20\x2d\x2d\x3e\x0a\
\x0a\x3c\x73\x76\x67\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x64\
\x63\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x70\x75\x72\x6c\x2e\x6f\
\x72\x67\x2f\x64\x63\x2f\x65\x6c\x65\x6d\x65\x6e\x74\x73\x2f\x31\
\x2e\x31\x2f\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x63\x63\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x63\x72\x65\x61\x74\x69\x76\
\x65\x63\x6f\x6d\x6d\x6f\x6e\x73\x2e\x6f\x72\x67\x2f\x6e\x73\x23\
\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\
\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\
\x67\x2f\x31\x39\x39\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\
\x2d\x73\x79\x6e\x74\x61\x78\x2d\x6e\x73\x23\x22\x0a\x20\x20\x20\
\x78\x6d\x6c\x6e\x73\x3a\x73\x76\x67\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3d\
\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\
\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\x76\x67\x22\x0a\x20\x20\x20\
\x78\x6d\x6c\x6e\x73\x3a\x78\x6c\x69\x6e\x6b\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\
\x39\x39\x39\x2f\x78\x6c\x69\x6e\x6b\x22\x0a\x20\x20\x20\x78\x6d\
\x6c\x6e\x73\x3a\x73\x6f\x64\x69\x70\x6f\x64\x69\x3d\x22\x68\x74\
\x74\x70\x3a\x2f\x2f\x73\x6f\x64\x69\x70\x6f\x64\x69\x2e\x73\x6f\
\x75\x72\x63\x65\x66\x6f\x72\x67\x65\x2e\x6e\x65\x74\x2f\x44\x54\
\x44\x2f\x73\x6f\x64\x69\x70\x6f\x64\x69\x2d\x30\x2e\x64\x74\x64\
\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x69\x6e\x6b\x73\x63\
\x61\x70\x65\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\
\x69\x6e\x6b\x73\x63\x61\x70\x65\x2e\x6f\x72\x67\x2f\x6e\x61\x6d\
\x65\x73\x70\x61\x63\x65\x73\x2f\x69\x6e\x6b\x73\x63\x61\x70\x65\
\x22\x0a\x20\x20\x20\x69\x64\x3d\x22\x73\x76\x67\x33\x36\x39\x35\
\x22\x0a\x20\x20\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\
\x31\x22\x0a\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x76\
\x65\x72\x73\x69\x6f\x6e\x3d\x22\x30\x2e\x34\x38\x2e\x33\x2e\x31\
\x20\x72\x39\x38\x38\x36\x22\x0a\x20\x20\x20\x77\x69\x64\x74\x68\
\x3d\x22\x33\x32\x22\x0a\x20\x20\x20\x68\x65\x69\x67\x68\x74\x3d\
\x22\x33\x32\x22\x0a\x20\x20\x20\x73\x6f\x64\x69\x70\x6f\x64\x69\
\x3a\x64\x6f\x63\x6e\x61\x6d\x65\x3d\x22\x70\x68\x61\x73\x65\x5f\
\x64\x65\x6c\x61\x79\x2e\x69\x63\x6f\x22\x3e\x0a\x20\x20\x3c\x6d\
\x65\x74\x61\x64\x61\x74\x61\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\
\x22\x6d\x65\x74\x61\x64\x61\x74\x61\x33\x37\x30\x31\x22\x3e\x0a\
\x20\x20\x20\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x3e\x0a\x20\x20\
\x20\x20\x20\x20\x3c\x63\x63\x3a\x57\x6f\x72\x6b\x0a\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x72\x64\x66\x3a\x61\x62\x6f\x75\x74\x3d\
\x22\x22\x3e\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x3c\x64\x63\x3a\
\x66\x6f\x72\x6d\x61\x74\x3e\x69\x6d\x61\x67\x65\x2f\x73\x76\x67\
\x2b\x78\x6d\x6c\x3c\x2f\x64\x63\x3a\x66\x6f\x72\x6d\x61\x74\x3e\
\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x3c\x64\x63\x3a\x74\x79\x70\
\x65\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x72\x64\x66\
\x3a\x72\x65\x73\x6f\x75\x72\x63\x65\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x70\x75\x72\x6c\x2e\x6f\x72\x67\x2f\x64\x63\x2f\x64\x63\
\x6d\x69\x74\x79\x70\x65\x2f\x53\x74\x69\x6c\x6c\x49\x6d\x61\x67\
\x65\x22\x20\x2f\x3e\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x3c\x64\
\x63\x3a\x74\x69\x74\x6c\x65\x3e\x3c\x2f\x64\x63\x3a\x74\x69\x74\
\x6c\x65\x3e\x0a\x20\x20\x20\x20\x20\x20\x3c\x2f\x63\x63\x3a\x57\
\x6f\x72\x6b\x3e\x0a\x20\x20\x20\x20\x3c\x2f\x72\x64\x66\x3a\x52\
\x44\x46\x3e\x0a\x20\x20\x3c\x2f\x6d\x65\x74\x61\x64\x61\x74\x61\
\x3e\x0a\x20\x20\x3c\x64\x65\x66\x73\x0a\x20\x20\x20\x20\x20\x69\
\x64\x3d\x22\x64\x65\x66\x73\x33\x36\x39\x39\x22\x20\x2f\x3e\x0a\
\x20\x20\x3c\x73\x6f\x64\x69\x70\x6f\x64\x69\x3a\x6e\x61\x6d\x65\
\x64\x76\x69\x65\x77\x0a\x20\x20\x20\x20\x20\x70\x61\x67\x65\x63\
\x6f\x6c\x6f\x72\x3d\x22\x23\x66\x66\x66\x66\x66\x66\x22\x0a\x20\
\x20\x20\x20\x20\x62\x6f\x72\x64\x65\x72\x63\x6f\x6c\x6f\x72\x3d\
\x22\x23\x36\x36\x36\x36\x36\x36\x22\x0a\x20\x20\x20\x20\x20\x62\
\x6f\x72\x64\x65\x72\x6f\x70\x61\x63\x69\x74\x79\x3d\x22\x31\x22\
\x0a\x20\x20\x20\x20\x20\x6f\x62\x6a\x65\x63\x74\x74\x6f\x6c\x65\
\x72\x61\x6e\x63\x65\x3d\x22\x31\x30\x22\x0a\x20\x20\x20\x20\x20\
\x67\x72\x69\x64\x74\x6f\x6c\x65\x72\x61\x6e\x63\x65\x3d\x22\x31\
\x30\x22\x0a\x20\x20\x20\x20\x20\x67\x75\x69\x64\x65\x74\x6f\x6c\
\x65\x72\x61\x6e\x63\x65\x3d\x22\x31\x30\x22\x0a\x20\x20\x20\x20\
\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x70\x61\x67\x65\x6f\x70\
\x61\x63\x69\x74\x79\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\x20\x69\
\x6e\x6b\x73\x63\x61\x70\x65\x3a\x70\x61\x67\x65\x73\x68\x61\x64\
\x6f\x77\x3d\x22\x32\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\
\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x77\x69\x64\x74\
\x68\x3d\x22\x31\x33\x36\x34\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\
\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x68\x65\
\x69\x67\x68\x74\x3d\x22\x37\x35\x30\x22\x0a\x20\x20\x20\x20\x20\
\x69\x64\x3d\x22\x6e\x61\x6d\x65\x64\x76\x69\x65\x77\x33\x36\x39\
\x37\x22\x0a\x20\x20\x20\x20\x20\x73\x68\x6f\x77\x67\x72\x69\x64\
\x3d\x22\x66\x61\x6c\x73\x65\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\
\x6b\x73\x63\x61\x70\x65\x3a\x7a\x6f\x6f\x6d\x3d\x22\x37\x2e\x33\
\x37\x35\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\
\x65\x3a\x63\x78\x3d\x22\x31\x36\x22\x0a\x20\x20\x20\x20\x20\x69\
\x6e\x6b\x73\x63\x61\x70\x65\x3a\x63\x79\x3d\x22\x31\x36\x22\x0a\
\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\
\x6e\x64\x6f\x77\x2d\x78\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\x20\
\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\
\x79\x3d\x22\x31\x36\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\
\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x6d\x61\x78\x69\
\x6d\x69\x7a\x65\x64\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\x20\x69\
\x6e\x6b\x73\x63\x61\x70\x65\x3a\x63\x75\x72\x72\x65\x6e\x74\x2d\
\x6c\x61\x79\x65\x72\x3d\x22\x73\x76\x67\x33\x36\x39\x35\x22\x20\
\x2f\x3e\x0a\x20\x20\x3c\x69\x6d\x61\x67\x65\x0a\x20\x20\x20\x20\
\x20\x77\x69\x64\x74\x68\x3d\x22\x33\x32\x22\x0a\x20\x20\x20\x20\
\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\x0a\x20\x20\x20\
\x20\x20\x78\x6c\x69\x6e\x6b\x3a\x68\x72\x65\x66\x3d\x22\x64\x61\
\x74\x61\x3a\x69\x6d\x61\x67\x65\x2f\x70\x6e\x67\x3b\x62\x61\x73\
\x65\x36\x34\x2c\x69\x56\x42\x4f\x52\x77\x30\x4b\x47\x67\x6f\x41\
\x41\x41\x41\x4e\x53\x55\x68\x45\x55\x67\x41\x41\x41\x43\x41\x41\
\x41\x41\x41\x67\x43\x41\x59\x41\x41\x41\x42\x7a\x65\x6e\x72\x30\
\x41\x41\x41\x41\x42\x48\x4e\x43\x53\x56\x51\x49\x43\x41\x67\x49\
\x66\x41\x68\x6b\x69\x41\x41\x41\x41\x68\x68\x4a\x52\x45\x46\x55\
\x0a\x57\x49\x58\x74\x6c\x55\x39\x49\x56\x45\x45\x63\x78\x7a\x2f\
\x7a\x5a\x74\x72\x53\x4e\x6e\x64\x72\x55\x31\x6a\x37\x44\x39\x45\
\x68\x75\x6e\x52\x4d\x49\x76\x45\x53\x55\x53\x65\x68\x77\x45\x4f\
\x48\x4a\x4b\x78\x54\x70\x34\x49\x36\x47\x47\x4c\x64\x75\x6e\x6a\
\x74\x55\x6c\x32\x57\x6b\x6b\x54\x70\x55\x42\x30\x36\x0a\x43\x46\
\x4b\x75\x46\x37\x55\x4d\x53\x32\x6b\x58\x44\x52\x64\x69\x43\x5a\
\x58\x43\x72\x57\x7a\x66\x65\x7a\x73\x64\x4e\x6b\x72\x64\x43\x47\
\x62\x65\x32\x68\x4c\x73\x35\x2f\x53\x59\x65\x62\x38\x76\x48\x32\
\x62\x65\x37\x2f\x63\x6b\x51\x5a\x43\x7a\x4c\x59\x6a\x6c\x49\x2b\
\x69\x78\x53\x64\x73\x49\x4a\x35\x43\x41\x0a\x38\x43\x37\x67\x63\
\x43\x74\x49\x52\x44\x43\x42\x4d\x6c\x42\x78\x41\x57\x46\x65\x6f\
\x69\x56\x71\x70\x42\x4f\x64\x50\x6f\x6d\x49\x37\x49\x59\x50\x55\
\x66\x54\x32\x4d\x5a\x68\x37\x6a\x64\x39\x2f\x46\x5a\x4a\x4c\x35\
\x64\x64\x63\x69\x5a\x70\x34\x6a\x4c\x77\x79\x42\x51\x64\x44\x71\
\x4e\x51\x44\x31\x4d\x55\x4d\x0a\x61\x49\x47\x63\x76\x34\x32\x36\
\x6b\x59\x46\x6d\x5a\x52\x4a\x6e\x65\x41\x58\x44\x44\x54\x42\x31\
\x43\x6a\x46\x78\x44\x74\x37\x6d\x66\x36\x38\x4c\x6a\x66\x2f\x38\
\x47\x68\x7a\x59\x69\x64\x78\x77\x66\x50\x30\x45\x35\x4a\x66\x44\
\x41\x48\x6a\x68\x32\x64\x4c\x4e\x31\x6b\x38\x67\x50\x49\x52\x37\
\x33\x69\x54\x79\x0a\x37\x38\x65\x6c\x65\x76\x57\x76\x5a\x36\x39\
\x4e\x77\x4a\x73\x61\x69\x41\x4e\x75\x34\x63\x38\x46\x6f\x67\x42\
\x36\x63\x2f\x6b\x45\x76\x4c\x62\x56\x48\x36\x6d\x4b\x76\x73\x49\
\x48\x51\x6d\x36\x4d\x50\x50\x4f\x72\x58\x78\x36\x74\x68\x58\x51\
\x49\x49\x66\x74\x4e\x42\x4d\x79\x75\x34\x48\x76\x37\x65\x33\x52\
\x6b\x0a\x68\x6b\x4b\x38\x70\x32\x52\x50\x6e\x65\x36\x41\x68\x57\
\x2b\x34\x36\x72\x35\x4a\x70\x45\x30\x62\x62\x6b\x4d\x4e\x4a\x74\
\x44\x70\x4b\x4f\x79\x4e\x49\x53\x59\x62\x34\x64\x41\x51\x4f\x68\
\x58\x48\x47\x65\x6a\x41\x48\x52\x77\x33\x7a\x37\x51\x69\x74\x67\
\x58\x31\x38\x69\x6e\x71\x63\x68\x62\x4f\x4e\x74\x69\x6d\x0a\x42\
\x4a\x69\x45\x43\x30\x73\x51\x2f\x67\x77\x35\x46\x78\x49\x66\x4b\
\x79\x42\x51\x48\x6f\x79\x6d\x56\x67\x6c\x65\x37\x53\x58\x77\x4e\
\x77\x57\x4a\x57\x4e\x4e\x6d\x4b\x2f\x70\x2b\x76\x56\x6e\x62\x34\
\x6c\x58\x2b\x49\x33\x79\x4a\x54\x4e\x35\x46\x39\x65\x55\x41\x55\
\x50\x64\x79\x79\x4f\x74\x33\x6f\x4d\x75\x71\x0a\x6f\x38\x79\x4c\
\x70\x4f\x78\x47\x39\x49\x37\x6a\x50\x61\x77\x48\x77\x47\x73\x50\
\x51\x2f\x59\x4a\x38\x6b\x57\x58\x6a\x59\x41\x35\x61\x6d\x41\x52\
\x7a\x74\x51\x55\x5a\x58\x70\x6d\x69\x6f\x76\x54\x64\x61\x69\x62\
\x69\x7a\x5a\x78\x46\x73\x65\x57\x6a\x30\x44\x66\x4d\x67\x42\x69\
\x36\x34\x37\x69\x57\x69\x49\x48\x0a\x2b\x79\x4c\x2f\x53\x45\x43\
\x34\x63\x43\x49\x45\x67\x42\x37\x35\x2b\x65\x50\x70\x56\x76\x44\
\x56\x74\x52\x45\x77\x6e\x34\x53\x36\x66\x68\x71\x6e\x72\x70\x56\
\x43\x55\x78\x4b\x61\x38\x35\x44\x64\x68\x58\x4f\x30\x43\x62\x30\
\x2f\x5a\x53\x4e\x67\x4d\x59\x31\x47\x47\x35\x48\x5a\x49\x58\x6a\
\x6e\x34\x32\x38\x38\x0a\x68\x74\x77\x7a\x44\x48\x4d\x43\x2f\x31\
\x45\x4c\x50\x4d\x76\x59\x53\x46\x53\x70\x55\x6c\x46\x2b\x41\x47\
\x50\x45\x6c\x34\x58\x43\x30\x65\x30\x57\x41\x41\x41\x41\x41\x45\
\x6c\x46\x54\x6b\x53\x75\x51\x6d\x43\x43\x0a\x22\x0a\x20\x20\x20\
\x20\x20\x69\x64\x3d\x22\x69\x6d\x61\x67\x65\x33\x37\x30\x33\x22\
\x0a\x20\x20\x20\x20\x20\x78\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\
\x20\x79\x3d\x22\x30\x22\x20\x2f\x3e\x0a\x3c\x2f\x73\x76\x67\x3e\
\x0a\
\x00\x00\x13\x31\
\x3c\
\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\
\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\
\x2d\x38\x22\x20\x73\x74\x61\x6e\x64\x61\x6c\x6f\x6e\x65\x3d\x22\
\x6e\x6f\x22\x3f\x3e\x0a\x3c\x21\x2d\x2d\x20\x43\x72\x65\x61\x74\
\x65\x64\x20\x77\x69\x74\x68\x20\x49\x6e\x6b\x73\x63\x61\x70\x65\
\x20\x28\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x69\x6e\x6b\
\x73\x63\x61\x70\x65\x2e\x6f\x72\x67\x2f\x29\x20\x2d\x2d\x3e\x0a\
\x0a\x3c\x73\x76\x67\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x64\
\x63\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x70\x75\x72\x6c\x2e\x6f\
\x72\x67\x2f\x64\x63\x2f\x65\x6c\x65\x6d\x65\x6e\x74\x73\x2f\x31\
\x2e\x31\x2f\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x63\x63\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x63\x72\x65\x61\x74\x69\x76\
\x65\x63\x6f\x6d\x6d\x6f\x6e\x73\x2e\x6f\x72\x67\x2f\x6e\x73\x23\
\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\
\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\
\x67\x2f\x31\x39\x39\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\
\x2d\x73\x79\x6e\x74\x61\x78\x2d\x6e\x73\x23\x22\x0a\x20\x20\x20\
\x78\x6d\x6c\x6e\x73\x3a\x73\x76\x67\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3d\
\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\
\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\x76\x67\x22\x0a\x20\x20\x20\
\x78\x6d\x6c\x6e\x73\x3a\x78\x6c\x69\x6e\x6b\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\
\x39\x39\x39\x2f\x78\x6c\x69\x6e\x6b\x22\x0a\x20\x20\x20\x78\x6d\
\x6c\x6e\x73\x3a\x73\x6f\x64\x69\x70\x6f\x64\x69\x3d\x22\x68\x74\
\x74\x70\x3a\x2f\x2f\x73\x6f\x64\x69\x70\x6f\x64\x69\x2e\x73\x6f\
\x75\x72\x63\x65\x66\x6f\x72\x67\x65\x2e\x6e\x65\x74\x2f\x44\x54\
\x44\x2f\x73\x6f\x64\x69\x70\x6f\x64\x69\x2d\x30\x2e\x64\x74\x64\
\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x69\x6e\x6b\x73\x63\
\x61\x70\x65\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\
\x69\x6e\x6b\x73\x63\x61\x70\x65\x2e\x6f\x72\x67\x2f\x6e\x61\x6d\
\x65\x73\x70\x61\x63\x65\x73\x2f\x69\x6e\x6b\x73\x63\x61\x70\x65\
\x22\x0a\x20\x20\x20\x69\x64\x3d\x22\x73\x76\x67\x34\x35\x35\x37\
\x22\x0a\x20\x20\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\
\x31\x22\x0a\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x76\
\x65\x72\x73\x69\x6f\x6e\x3d\x22\x30\x2e\x34\x38\x2e\x33\x2e\x31\
\x20\x72\x39\x38\x38\x36\x22\x0a\x20\x20\x20\x77\x69\x64\x74\x68\
\x3d\x22\x33\x32\x22\x0a\x20\x20\x20\x68\x65\x69\x67\x68\x74\x3d\
\x22\x33\x32\x22\x0a\x20\x20\x20\x73\x6f\x64\x69\x70\x6f\x64\x69\
\x3a\x64\x6f\x63\x6e\x61\x6d\x65\x3d\x22\x63\x6f\x6e\x6a\x75\x67\
\x61\x74\x65\x2e\x73\x76\x67\x22\x3e\x0a\x20\x20\x3c\x6d\x65\x74\
\x61\x64\x61\x74\x61\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x6d\
\x65\x74\x61\x64\x61\x74\x61\x34\x35\x36\x33\x22\x3e\x0a\x20\x20\
\x20\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x3e\x0a\x20\x20\x20\x20\
\x20\x20\x3c\x63\x63\x3a\x57\x6f\x72\x6b\x0a\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x72\x64\x66\x3a\x61\x62\x6f\x75\x74\x3d\x22\x22\
\x3e\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x3c\x64\x63\x3a\x66\x6f\
\x72\x6d\x61\x74\x3e\x69\x6d\x61\x67\x65\x2f\x73\x76\x67\x2b\x78\
\x6d\x6c\x3c\x2f\x64\x63\x3a\x66\x6f\x72\x6d\x61\x74\x3e\x0a\x20\
\x20\x20\x20\x20\x20\x20\x20\x3c\x64\x63\x3a\x74\x79\x70\x65\x0a\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x72\x64\x66\x3a\x72\
\x65\x73\x6f\x75\x72\x63\x65\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\
\x70\x75\x72\x6c\x2e\x6f\x72\x67\x2f\x64\x63\x2f\x64\x63\x6d\x69\
\x74\x79\x70\x65\x2f\x53\x74\x69\x6c\x6c\x49\x6d\x61\x67\x65\x22\
\x20\x2f\x3e\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x3c\x64\x63\x3a\
\x74\x69\x74\x6c\x65\x3e\x3c\x2f\x64\x63\x3a\x74\x69\x74\x6c\x65\
\x3e\x0a\x20\x20\x20\x20\x20\x20\x3c\x2f\x63\x63\x3a\x57\x6f\x72\
\x6b\x3e\x0a\x20\x20\x20\x20\x3c\x2f\x72\x64\x66\x3a\x52\x44\x46\
\x3e\x0a\x20\x20\x3c\x2f\x6d\x65\x74\x61\x64\x61\x74\x61\x3e\x0a\
\x20\x20\x3c\x64\x65\x66\x73\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\
\x22\x64\x65\x66\x73\x34\x35\x36\x31\x22\x20\x2f\x3e\x0a\x20\x20\
\x3c\x73\x6f\x64\x69\x70\x6f\x64\x69\x3a\x6e\x61\x6d\x65\x64\x76\
\x69\x65\x77\x0a\x20\x20\x20\x20\x20\x70\x61\x67\x65\x63\x6f\x6c\
\x6f\x72\x3d\x22\x23\x66\x66\x66\x66\x66\x66\x22\x0a\x20\x20\x20\
\x20\x20\x62\x6f\x72\x64\x65\x72\x63\x6f\x6c\x6f\x72\x3d\x22\x23\
\x36\x36\x36\x36\x36\x36\x22\x0a\x20\x20\x20\x20\x20\x62\x6f\x72\
\x64\x65\x72\x6f\x70\x61\x63\x69\x74\x79\x3d\x22\x31\x22\x0a\x20\
\x20\x20\x20\x20\x6f\x62\x6a\x65\x63\x74\x74\x6f\x6c\x65\x72\x61\
\x6e\x63\x65\x3d\x22\x31\x30\x22\x0a\x20\x20\x20\x20\x20\x67\x72\
\x69\x64\x74\x6f\x6c\x65\x72\x61\x6e\x63\x65\x3d\x22\x31\x30\x22\
\x0a\x20\x20\x20\x20\x20\x67\x75\x69\x64\x65\x74\x6f\x6c\x65\x72\
\x61\x6e\x63\x65\x3d\x22\x31\x30\x22\x0a\x20\x20\x20\x20\x20\x69\
\x6e\x6b\x73\x63\x61\x70\x65\x3a\x70\x61\x67\x65\x6f\x70\x61\x63\
\x69\x74\x79\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\
\x73\x63\x61\x70\x65\x3a\x70\x61\x67\x65\x73\x68\x61\x64\x6f\x77\
\x3d\x22\x32\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\
\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x77\x69\x64\x74\x68\x3d\
\x22\x36\x38\x31\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\
\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x68\x65\x69\x67\x68\
\x74\x3d\x22\x37\x35\x30\x22\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\
\x22\x6e\x61\x6d\x65\x64\x76\x69\x65\x77\x34\x35\x35\x39\x22\x0a\
\x20\x20\x20\x20\x20\x73\x68\x6f\x77\x67\x72\x69\x64\x3d\x22\x66\
\x61\x6c\x73\x65\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\
\x61\x70\x65\x3a\x7a\x6f\x6f\x6d\x3d\x22\x37\x2e\x33\x37\x35\x22\
\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x63\
\x78\x3d\x22\x31\x30\x2e\x32\x37\x31\x30\x30\x35\x22\x0a\x20\x20\
\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x63\x79\x3d\x22\
\x31\x33\x2e\x32\x32\x32\x38\x37\x38\x22\x0a\x20\x20\x20\x20\x20\
\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\
\x78\x3d\x22\x36\x38\x33\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\
\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x79\x3d\x22\
\x31\x36\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\
\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x6d\x61\x78\x69\x6d\x69\x7a\
\x65\x64\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\
\x63\x61\x70\x65\x3a\x63\x75\x72\x72\x65\x6e\x74\x2d\x6c\x61\x79\
\x65\x72\x3d\x22\x73\x76\x67\x34\x35\x35\x37\x22\x0a\x20\x20\x20\
\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x73\x6e\x61\x70\x2d\
\x67\x6c\x6f\x62\x61\x6c\x3d\x22\x66\x61\x6c\x73\x65\x22\x20\x2f\
\x3e\x0a\x20\x20\x3c\x69\x6d\x61\x67\x65\x0a\x20\x20\x20\x20\x20\
\x77\x69\x64\x74\x68\x3d\x22\x33\x32\x22\x0a\x20\x20\x20\x20\x20\
\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\x0a\x20\x20\x20\x20\
\x20\x78\x6c\x69\x6e\x6b\x3a\x68\x72\x65\x66\x3d\x22\x64\x61\x74\
\x61\x3a\x69\x6d\x61\x67\x65\x2f\x70\x6e\x67\x3b\x62\x61\x73\x65\
\x36\x34\x2c\x69\x56\x42\x4f\x52\x77\x30\x4b\x47\x67\x6f\x41\x41\
\x41\x41\x4e\x53\x55\x68\x45\x55\x67\x41\x41\x41\x43\x41\x41\x41\
\x41\x41\x67\x43\x41\x59\x41\x41\x41\x42\x7a\x65\x6e\x72\x30\x41\
\x41\x41\x41\x42\x48\x4e\x43\x53\x56\x51\x49\x43\x41\x67\x49\x66\
\x41\x68\x6b\x69\x41\x41\x41\x41\x44\x56\x4a\x52\x45\x46\x55\x0a\
\x57\x49\x58\x74\x31\x7a\x45\x42\x41\x44\x41\x4d\x77\x7a\x42\x33\
\x2f\x44\x6c\x33\x4d\x50\x6f\x6f\x42\x4b\x77\x33\x55\x32\x33\x48\
\x75\x77\x54\x73\x4f\x34\x78\x58\x42\x51\x41\x41\x41\x41\x41\x41\
\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x77\x48\x64\x2f\x7a\
\x44\x78\x72\x74\x42\x44\x6b\x45\x71\x4e\x62\x67\x0a\x41\x41\x41\
\x41\x41\x45\x6c\x46\x54\x6b\x53\x75\x51\x6d\x43\x43\x0a\x22\x0a\
\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x69\x6d\x61\x67\x65\x34\x35\
\x36\x35\x22\x0a\x20\x20\x20\x20\x20\x78\x3d\x22\x30\x22\x0a\x20\
\x20\x20\x20\x20\x79\x3d\x22\x30\x22\x20\x2f\x3e\x0a\x20\x20\x3c\
\x69\x6d\x61\x67\x65\x0a\x20\x20\x20\x20\x20\x79\x3d\x22\x31\x2e\
\x32\x35\x34\x32\x33\x37\x37\x22\x0a\x20\x20\x20\x20\x20\x78\x3d\
\x22\x30\x2e\x33\x33\x38\x39\x38\x32\x38\x32\x22\x0a\x20\x20\x20\
\x20\x20\x69\x64\x3d\x22\x69\x6d\x61\x67\x65\x35\x33\x32\x39\x22\
\x0a\x20\x20\x20\x20\x20\x78\x6c\x69\x6e\x6b\x3a\x68\x72\x65\x66\
\x3d\x22\x64\x61\x74\x61\x3a\x69\x6d\x61\x67\x65\x2f\x70\x6e\x67\
\x3b\x62\x61\x73\x65\x36\x34\x2c\x69\x56\x42\x4f\x52\x77\x30\x4b\
\x47\x67\x6f\x41\x41\x41\x41\x4e\x53\x55\x68\x45\x55\x67\x41\x41\
\x41\x48\x67\x41\x41\x41\x43\x55\x43\x41\x59\x41\x41\x41\x42\x59\
\x34\x75\x30\x72\x41\x41\x41\x41\x42\x48\x4e\x43\x53\x56\x51\x49\
\x43\x41\x67\x49\x66\x41\x68\x6b\x69\x41\x41\x41\x41\x35\x35\x4a\
\x52\x45\x46\x55\x0a\x65\x4a\x7a\x74\x33\x64\x74\x79\x32\x7a\x41\
\x4d\x52\x56\x47\x34\x30\x2f\x2f\x2f\x5a\x66\x66\x4a\x4d\x33\x5a\
\x39\x45\x57\x38\x67\x44\x73\x43\x39\x6e\x74\x4f\x45\x31\x69\x5a\
\x56\x53\x57\x59\x63\x4d\x77\x41\x41\x41\x41\x41\x41\x41\x41\x41\
\x49\x64\x34\x73\x65\x51\x41\x33\x33\x65\x39\x76\x58\x33\x62\x59\
\x66\x0a\x62\x77\x4a\x50\x61\x51\x33\x37\x79\x5a\x37\x59\x42\x42\
\x34\x32\x45\x2f\x66\x42\x50\x7a\x4b\x42\x75\x36\x30\x49\x2b\x38\
\x77\x33\x38\x68\x2f\x50\x62\x34\x34\x57\x71\x79\x66\x4d\x4b\x31\
\x5a\x77\x46\x38\x38\x59\x50\x69\x75\x5a\x46\x64\x7a\x4d\x64\x36\
\x56\x35\x49\x62\x41\x4d\x6e\x77\x6c\x45\x34\x43\x59\x35\x0a\x56\
\x36\x38\x5a\x67\x63\x57\x73\x6e\x30\x67\x45\x4c\x6f\x37\x41\x6c\
\x2f\x4b\x65\x6e\x73\x30\x49\x58\x42\x36\x42\x69\x79\x4e\x77\x63\
\x51\x51\x75\x6a\x73\x43\x58\x39\x72\x2b\x48\x75\x78\x4b\x42\x69\
\x79\x4f\x77\x6c\x50\x56\x6e\x43\x77\x49\x33\x79\x58\x75\x61\x4a\
\x6e\x42\x78\x61\x57\x64\x6d\x44\x4e\x34\x50\x0a\x78\x68\x43\x2f\
\x2f\x77\x4a\x59\x77\x57\x38\x2b\x72\x64\x4c\x6e\x41\x4c\x6e\x32\
\x5a\x42\x48\x59\x7a\x50\x72\x33\x4e\x61\x2b\x4b\x7a\x4b\x35\x4b\
\x5a\x36\x4f\x68\x56\x6f\x52\x6d\x58\x37\x51\x54\x6a\x39\x58\x58\
\x38\x7a\x33\x33\x33\x6e\x49\x64\x46\x4e\x6a\x6a\x43\x76\x68\x62\
\x72\x4d\x66\x50\x69\x72\x39\x2f\x0a\x44\x68\x2b\x41\x72\x78\x31\
\x76\x31\x73\x64\x48\x2f\x45\x56\x36\x63\x4f\x4e\x32\x37\x38\x4c\
\x51\x6a\x53\x77\x37\x73\x48\x37\x52\x57\x32\x73\x30\x49\x2f\x2b\
\x4e\x48\x73\x43\x38\x36\x4c\x44\x61\x4a\x47\x66\x64\x4e\x64\x57\
\x6f\x65\x71\x73\x34\x32\x51\x70\x57\x44\x61\x74\x4c\x62\x73\x61\
\x39\x79\x78\x5a\x56\x0a\x61\x78\x56\x4c\x44\x65\x5a\x56\x74\x72\
\x44\x50\x64\x43\x4b\x4c\x6e\x61\x49\x7a\x52\x39\x55\x6b\x4d\x74\
\x4d\x71\x68\x74\x56\x59\x78\x63\x45\x72\x75\x47\x4a\x59\x4c\x59\
\x47\x7a\x37\x49\x53\x34\x38\x61\x75\x59\x48\x52\x33\x46\x42\x63\
\x32\x77\x45\x31\x62\x76\x51\x2b\x77\x71\x4c\x72\x61\x43\x62\x37\
\x66\x6f\x0a\x41\x2f\x6f\x75\x64\x6a\x4b\x4c\x33\x53\x61\x4e\x38\
\x74\x77\x7a\x6c\x56\x76\x69\x69\x36\x79\x72\x4e\x39\x76\x56\x78\
\x4a\x78\x5a\x6b\x71\x31\x67\x74\x64\x4f\x76\x76\x75\x41\x44\x35\
\x76\x45\x70\x72\x61\x6f\x72\x32\x4f\x7a\x51\x54\x35\x76\x39\x46\
\x6d\x54\x30\x59\x43\x67\x48\x4e\x6d\x50\x54\x33\x52\x54\x31\x0a\
\x75\x47\x61\x37\x41\x78\x65\x37\x54\x63\x70\x67\x37\x79\x51\x6b\
\x63\x48\x45\x45\x44\x72\x46\x76\x46\x52\x4e\x34\x69\x75\x4b\x54\
\x73\x31\x63\x45\x44\x72\x4e\x6e\x46\x55\x76\x50\x76\x6a\x37\x52\
\x6d\x39\x31\x6e\x66\x35\x48\x4e\x42\x79\x74\x34\x79\x4b\x63\x6f\
\x6d\x71\x64\x71\x41\x6e\x64\x62\x48\x64\x4c\x33\x0a\x7a\x46\x4d\
\x6b\x73\x4d\x6f\x44\x44\x72\x31\x56\x58\x43\x44\x77\x7a\x72\x68\
\x65\x41\x66\x31\x65\x67\x39\x79\x4d\x36\x36\x4d\x61\x56\x2b\x65\
\x43\x4b\x2f\x45\x4b\x56\x6f\x30\x37\x38\x76\x56\x2b\x6b\x67\x5a\
\x57\x6a\x6a\x75\x44\x76\x39\x6c\x67\x4f\x68\x64\x55\x56\x7a\x52\
\x57\x63\x62\x4c\x41\x30\x51\x38\x7a\x0a\x64\x76\x2f\x37\x65\x63\
\x6b\x43\x6f\x31\x65\x69\x77\x46\x6c\x4f\x7a\x66\x39\x6a\x58\x33\
\x53\x44\x72\x48\x45\x66\x34\x69\x4a\x50\x2f\x75\x43\x72\x7a\x33\
\x56\x63\x49\x54\x72\x75\x71\x74\x66\x54\x38\x6a\x72\x57\x54\x77\
\x54\x48\x6a\x57\x30\x72\x42\x68\x73\x64\x31\x32\x7a\x74\x51\x66\
\x2f\x31\x65\x6e\x78\x57\x0a\x2b\x63\x41\x33\x37\x54\x33\x6f\x4d\
\x77\x4f\x76\x46\x76\x6a\x68\x2b\x58\x58\x4a\x66\x64\x72\x73\x7a\
\x73\x64\x77\x43\x6f\x48\x4e\x6f\x69\x2b\x55\x5a\x6e\x52\x65\x5a\
\x4d\x30\x63\x38\x50\x74\x64\x4a\x39\x67\x35\x41\x71\x36\x69\x43\
\x62\x31\x54\x34\x47\x31\x53\x70\x74\x42\x5a\x78\x76\x6c\x4f\x34\
\x44\x37\x34\x0a\x57\x2b\x69\x38\x42\x31\x57\x4a\x51\x4f\x43\x48\
\x35\x36\x44\x45\x58\x53\x58\x78\x6c\x65\x31\x75\x4f\x61\x2b\x6b\
\x68\x56\x59\x77\x50\x41\x77\x45\x7a\x6a\x53\x54\x4d\x34\x33\x56\
\x78\x2b\x41\x4b\x31\x76\x2b\x56\x44\x66\x33\x78\x37\x54\x46\x35\
\x69\x6c\x59\x4e\x72\x54\x69\x6d\x47\x45\x58\x65\x2b\x62\x6b\x4b\
\x0a\x75\x6d\x4a\x63\x4f\x53\x64\x4e\x75\x6e\x32\x2b\x37\x31\x6f\
\x50\x2f\x4a\x6d\x52\x6e\x51\x66\x73\x48\x62\x72\x33\x67\x48\x74\
\x39\x64\x4a\x4f\x75\x44\x51\x50\x32\x69\x68\x7a\x78\x4e\x69\x53\
\x42\x66\x31\x67\x5a\x4f\x6d\x6f\x7a\x51\x62\x37\x41\x47\x78\x39\
\x30\x72\x4c\x72\x69\x7a\x6e\x65\x51\x49\x77\x55\x38\x0a\x79\x5a\
\x6f\x4a\x54\x64\x78\x65\x67\x59\x38\x71\x65\x30\x4d\x54\x64\x34\
\x54\x41\x73\x2b\x69\x57\x30\x4d\x51\x64\x4a\x66\x52\x68\x70\x45\
\x54\x30\x49\x4c\x43\x43\x34\x59\x6e\x41\x78\x52\x47\x34\x57\x63\
\x37\x2f\x51\x67\x68\x63\x48\x49\x47\x4c\x4f\x7a\x54\x77\x4f\x66\
\x76\x4b\x44\x67\x31\x38\x44\x67\x49\x58\x0a\x52\x2b\x44\x69\x43\
\x46\x77\x63\x67\x59\x73\x6a\x63\x48\x45\x45\x4c\x6f\x37\x41\x54\
\x58\x49\x2b\x70\x6a\x51\x6a\x63\x48\x6b\x45\x4c\x75\x37\x41\x77\
\x4f\x63\x38\x70\x6a\x51\x37\x4d\x76\x42\x5a\x43\x46\x77\x63\x67\
\x59\x73\x6a\x63\x48\x45\x45\x4c\x6f\x37\x41\x78\x52\x47\x34\x4f\
\x41\x49\x58\x52\x2b\x42\x4c\x0a\x65\x5a\x39\x44\x6d\x78\x47\x34\
\x76\x4d\x4d\x43\x6e\x2f\x57\x59\x30\x75\x79\x34\x77\x4f\x63\x68\
\x63\x48\x45\x45\x4c\x6f\x37\x41\x78\x52\x47\x34\x4f\x41\x49\x58\
\x6c\x2f\x6f\x6d\x66\x6b\x7a\x50\x72\x56\x4c\x75\x68\x78\x78\x6d\
\x52\x77\x59\x32\x69\x2f\x6f\x7a\x63\x78\x46\x4b\x76\x49\x67\x78\
\x2b\x2f\x2f\x4d\x0a\x48\x41\x41\x41\x41\x41\x41\x41\x41\x43\x62\
\x38\x41\x78\x4d\x4b\x7a\x76\x4c\x75\x31\x71\x62\x6b\x41\x41\x41\
\x41\x41\x45\x6c\x46\x54\x6b\x53\x75\x51\x6d\x43\x43\x0a\x22\x0a\
\x20\x20\x20\x20\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x32\x39\x2e\
\x37\x36\x32\x37\x31\x36\x22\x0a\x20\x20\x20\x20\x20\x77\x69\x64\
\x74\x68\x3d\x22\x32\x31\x2e\x39\x36\x36\x31\x30\x33\x22\x20\x2f\
\x3e\x0a\x20\x20\x3c\x69\x6d\x61\x67\x65\x0a\x20\x20\x20\x20\x20\
\x79\x3d\x22\x30\x2e\x39\x38\x33\x30\x34\x38\x36\x32\x22\x0a\x20\
\x20\x20\x20\x20\x78\x3d\x22\x31\x31\x2e\x35\x32\x35\x34\x32\x34\
\x22\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x69\x6d\x61\x67\x65\
\x35\x33\x32\x39\x2d\x38\x22\x0a\x20\x20\x20\x20\x20\x78\x6c\x69\
\x6e\x6b\x3a\x68\x72\x65\x66\x3d\x22\x64\x61\x74\x61\x3a\x69\x6d\
\x61\x67\x65\x2f\x70\x6e\x67\x3b\x62\x61\x73\x65\x36\x34\x2c\x69\
\x56\x42\x4f\x52\x77\x30\x4b\x47\x67\x6f\x41\x41\x41\x41\x4e\x53\
\x55\x68\x45\x55\x67\x41\x41\x41\x48\x67\x41\x41\x41\x43\x55\x43\
\x41\x59\x41\x41\x41\x42\x59\x34\x75\x30\x72\x41\x41\x41\x41\x42\
\x48\x4e\x43\x53\x56\x51\x49\x43\x41\x67\x49\x66\x41\x68\x6b\x69\
\x41\x41\x41\x41\x35\x35\x4a\x52\x45\x46\x55\x20\x65\x4a\x7a\x74\
\x33\x64\x74\x79\x32\x7a\x41\x4d\x52\x56\x47\x34\x30\x2f\x2f\x2f\
\x5a\x66\x66\x4a\x4d\x33\x5a\x39\x45\x57\x38\x67\x44\x73\x43\x39\
\x6e\x74\x4f\x45\x31\x69\x5a\x56\x53\x57\x59\x63\x4d\x77\x41\x41\
\x41\x41\x41\x41\x41\x41\x41\x49\x64\x34\x73\x65\x51\x41\x33\x33\
\x65\x39\x76\x58\x33\x62\x59\x66\x20\x62\x77\x4a\x50\x61\x51\x33\
\x37\x79\x5a\x37\x59\x42\x42\x34\x32\x45\x2f\x66\x42\x50\x7a\x4b\
\x42\x75\x36\x30\x49\x2b\x38\x77\x33\x38\x68\x2f\x50\x62\x34\x34\
\x57\x71\x79\x66\x4d\x4b\x31\x5a\x77\x46\x38\x38\x59\x50\x69\x75\
\x5a\x46\x64\x7a\x4d\x64\x36\x56\x35\x49\x62\x41\x4d\x6e\x77\x6c\
\x45\x34\x43\x59\x35\x20\x56\x36\x38\x5a\x67\x63\x57\x73\x6e\x30\
\x67\x45\x4c\x6f\x37\x41\x6c\x2f\x4b\x65\x6e\x73\x30\x49\x58\x42\
\x36\x42\x69\x79\x4e\x77\x63\x51\x51\x75\x6a\x73\x43\x58\x39\x72\
\x2b\x48\x75\x78\x4b\x42\x69\x79\x4f\x77\x6c\x50\x56\x6e\x43\x77\
\x49\x33\x79\x58\x75\x61\x4a\x6e\x42\x78\x61\x57\x64\x6d\x44\x4e\
\x34\x50\x20\x78\x68\x43\x2f\x2f\x77\x4a\x59\x77\x57\x38\x2b\x72\
\x64\x4c\x6e\x41\x4c\x6e\x32\x5a\x42\x48\x59\x7a\x50\x72\x33\x4e\
\x61\x2b\x4b\x7a\x4b\x35\x4b\x5a\x36\x4f\x68\x56\x6f\x52\x6d\x58\
\x37\x51\x54\x6a\x39\x58\x58\x38\x7a\x33\x33\x33\x6e\x49\x64\x46\
\x4e\x6a\x6a\x43\x76\x68\x62\x72\x4d\x66\x50\x69\x72\x39\x2f\x20\
\x44\x68\x2b\x41\x72\x78\x31\x76\x31\x73\x64\x48\x2f\x45\x56\x36\
\x63\x4f\x4e\x32\x37\x38\x4c\x51\x6a\x53\x77\x37\x73\x48\x37\x52\
\x57\x32\x73\x30\x49\x2f\x2b\x4e\x48\x73\x43\x38\x36\x4c\x44\x61\
\x4a\x47\x66\x64\x4e\x64\x57\x6f\x65\x71\x73\x34\x32\x51\x70\x57\
\x44\x61\x74\x4c\x62\x73\x61\x39\x79\x78\x5a\x56\x20\x61\x78\x56\
\x4c\x44\x65\x5a\x56\x74\x72\x44\x50\x64\x43\x4b\x4c\x6e\x61\x49\
\x7a\x52\x39\x55\x6b\x4d\x74\x4d\x71\x68\x74\x56\x59\x78\x63\x45\
\x72\x75\x47\x4a\x59\x4c\x59\x47\x7a\x37\x49\x53\x34\x38\x61\x75\
\x59\x48\x52\x33\x46\x42\x63\x32\x77\x45\x31\x62\x76\x51\x2b\x77\
\x71\x4c\x72\x61\x43\x62\x37\x66\x6f\x20\x41\x2f\x6f\x75\x64\x6a\
\x4b\x4c\x33\x53\x61\x4e\x38\x74\x77\x7a\x6c\x56\x76\x69\x69\x36\
\x79\x72\x4e\x39\x76\x56\x78\x4a\x78\x5a\x6b\x71\x31\x67\x74\x64\
\x4f\x76\x76\x75\x41\x44\x35\x76\x45\x70\x72\x61\x6f\x72\x32\x4f\
\x7a\x51\x54\x35\x76\x39\x46\x6d\x54\x30\x59\x43\x67\x48\x4e\x6d\
\x50\x54\x33\x52\x54\x31\x20\x75\x47\x61\x37\x41\x78\x65\x37\x54\
\x63\x70\x67\x37\x79\x51\x6b\x63\x48\x45\x45\x44\x72\x46\x76\x46\
\x52\x4e\x34\x69\x75\x4b\x54\x73\x31\x63\x45\x44\x72\x4e\x6e\x46\
\x55\x76\x50\x76\x6a\x37\x52\x6d\x39\x31\x6e\x66\x35\x48\x4e\x42\
\x79\x74\x34\x79\x4b\x63\x6f\x6d\x71\x64\x71\x41\x6e\x64\x62\x48\
\x64\x4c\x33\x20\x7a\x46\x4d\x6b\x73\x4d\x6f\x44\x44\x72\x31\x56\
\x58\x43\x44\x77\x7a\x72\x68\x65\x41\x66\x31\x65\x67\x39\x79\x4d\
\x36\x36\x4d\x61\x56\x2b\x65\x43\x4b\x2f\x45\x4b\x56\x6f\x30\x37\
\x38\x76\x56\x2b\x6b\x67\x5a\x57\x6a\x6a\x75\x44\x76\x39\x6c\x67\
\x4f\x68\x64\x55\x56\x7a\x52\x57\x63\x62\x4c\x41\x30\x51\x38\x7a\
\x20\x64\x76\x2f\x37\x65\x63\x6b\x43\x6f\x31\x65\x69\x77\x46\x6c\
\x4f\x7a\x66\x39\x6a\x58\x33\x53\x44\x72\x48\x45\x66\x34\x69\x4a\
\x50\x2f\x75\x43\x72\x7a\x33\x56\x63\x49\x54\x72\x75\x71\x74\x66\
\x54\x38\x6a\x72\x57\x54\x77\x54\x48\x6a\x57\x30\x72\x42\x68\x73\
\x64\x31\x32\x7a\x74\x51\x66\x2f\x31\x65\x6e\x78\x57\x20\x2b\x63\
\x41\x33\x37\x54\x33\x6f\x4d\x77\x4f\x76\x46\x76\x6a\x68\x2b\x58\
\x58\x4a\x66\x64\x72\x73\x7a\x73\x64\x77\x43\x6f\x48\x4e\x6f\x69\
\x2b\x55\x5a\x6e\x52\x65\x5a\x4d\x30\x63\x38\x50\x74\x64\x4a\x39\
\x67\x35\x41\x71\x36\x69\x43\x62\x31\x54\x34\x47\x31\x53\x70\x74\
\x42\x5a\x78\x76\x6c\x4f\x34\x44\x37\x34\x20\x57\x2b\x69\x38\x42\
\x31\x57\x4a\x51\x4f\x43\x48\x35\x36\x44\x45\x58\x53\x58\x78\x6c\
\x65\x31\x75\x4f\x61\x2b\x6b\x68\x56\x59\x77\x50\x41\x77\x45\x7a\
\x6a\x53\x54\x4d\x34\x33\x56\x78\x2b\x41\x4b\x31\x76\x2b\x56\x44\
\x66\x33\x78\x37\x54\x46\x35\x69\x6c\x59\x4e\x72\x54\x69\x6d\x47\
\x45\x58\x65\x2b\x62\x6b\x4b\x20\x75\x6d\x4a\x63\x4f\x53\x64\x4e\
\x75\x6e\x32\x2b\x37\x31\x6f\x50\x2f\x4a\x6d\x52\x6e\x51\x66\x73\
\x48\x62\x72\x33\x67\x48\x74\x39\x64\x4a\x4f\x75\x44\x51\x50\x32\
\x69\x68\x7a\x78\x4e\x69\x53\x42\x66\x31\x67\x5a\x4f\x6d\x6f\x7a\
\x51\x62\x37\x41\x47\x78\x39\x30\x72\x4c\x72\x69\x7a\x6e\x65\x51\
\x49\x77\x55\x38\x20\x79\x5a\x6f\x4a\x54\x64\x78\x65\x67\x59\x38\
\x71\x65\x30\x4d\x54\x64\x34\x54\x41\x73\x2b\x69\x57\x30\x4d\x51\
\x64\x4a\x66\x52\x68\x70\x45\x54\x30\x49\x4c\x43\x43\x34\x59\x6e\
\x41\x78\x52\x47\x34\x57\x63\x37\x2f\x51\x67\x68\x63\x48\x49\x47\
\x4c\x4f\x7a\x54\x77\x4f\x66\x76\x4b\x44\x67\x31\x38\x44\x67\x49\
\x58\x20\x52\x2b\x44\x69\x43\x46\x77\x63\x67\x59\x73\x6a\x63\x48\
\x45\x45\x4c\x6f\x37\x41\x54\x58\x49\x2b\x70\x6a\x51\x6a\x63\x48\
\x6b\x45\x4c\x75\x37\x41\x77\x4f\x63\x38\x70\x6a\x51\x37\x4d\x76\
\x42\x5a\x43\x46\x77\x63\x67\x59\x73\x6a\x63\x48\x45\x45\x4c\x6f\
\x37\x41\x78\x52\x47\x34\x4f\x41\x49\x58\x52\x2b\x42\x4c\x20\x65\
\x5a\x39\x44\x6d\x78\x47\x34\x76\x4d\x4d\x43\x6e\x2f\x57\x59\x30\
\x75\x79\x34\x77\x4f\x63\x68\x63\x48\x45\x45\x4c\x6f\x37\x41\x78\
\x52\x47\x34\x4f\x41\x49\x58\x6c\x2f\x6f\x6d\x66\x6b\x7a\x50\x72\
\x56\x4c\x75\x68\x78\x78\x6d\x52\x77\x59\x32\x69\x2f\x6f\x7a\x63\
\x78\x46\x4b\x76\x49\x67\x78\x2b\x2f\x2f\x4d\x20\x48\x41\x41\x41\
\x41\x41\x41\x41\x41\x43\x62\x38\x41\x78\x4d\x4b\x7a\x76\x4c\x75\
\x31\x71\x62\x6b\x41\x41\x41\x41\x41\x45\x6c\x46\x54\x6b\x53\x75\
\x51\x6d\x43\x43\x20\x22\x0a\x20\x20\x20\x20\x20\x68\x65\x69\x67\
\x68\x74\x3d\x22\x32\x39\x2e\x37\x36\x32\x37\x31\x36\x22\x0a\x20\
\x20\x20\x20\x20\x77\x69\x64\x74\x68\x3d\x22\x32\x31\x2e\x39\x36\
\x36\x31\x30\x33\x22\x20\x2f\x3e\x0a\x3c\x2f\x73\x76\x67\x3e\x0a\
\
\x00\x00\x07\x6c\
\x3c\
\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\
\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\
\x2d\x38\x22\x20\x73\x74\x61\x6e\x64\x61\x6c\x6f\x6e\x65\x3d\x22\
\x6e\x6f\x22\x3f\x3e\x0a\x3c\x21\x2d\x2d\x20\x43\x72\x65\x61\x74\
\x65\x64\x20\x77\x69\x74\x68\x20\x49\x6e\x6b\x73\x63\x61\x70\x65\
\x20\x28\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x69\x6e\x6b\
\x73\x63\x61\x70\x65\x2e\x6f\x72\x67\x2f\x29\x20\x2d\x2d\x3e\x0a\
\x0a\x3c\x73\x76\x67\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x64\
\x63\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x70\x75\x72\x6c\x2e\x6f\
\x72\x67\x2f\x64\x63\x2f\x65\x6c\x65\x6d\x65\x6e\x74\x73\x2f\x31\
\x2e\x31\x2f\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x63\x63\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x63\x72\x65\x61\x74\x69\x76\
\x65\x63\x6f\x6d\x6d\x6f\x6e\x73\x2e\x6f\x72\x67\x2f\x6e\x73\x23\
\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\
\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\
\x67\x2f\x31\x39\x39\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\
\x2d\x73\x79\x6e\x74\x61\x78\x2d\x6e\x73\x23\x22\x0a\x20\x20\x20\
\x78\x6d\x6c\x6e\x73\x3a\x73\x76\x67\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3d\
\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\
\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\x76\x67\x22\x0a\x20\x20\x20\
\x78\x6d\x6c\x6e\x73\x3a\x78\x6c\x69\x6e\x6b\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\
\x39\x39\x39\x2f\x78\x6c\x69\x6e\x6b\x22\x0a\x20\x20\x20\x78\x6d\
\x6c\x6e\x73\x3a\x73\x6f\x64\x69\x70\x6f\x64\x69\x3d\x22\x68\x74\
\x74\x70\x3a\x2f\x2f\x73\x6f\x64\x69\x70\x6f\x64\x69\x2e\x73\x6f\
\x75\x72\x63\x65\x66\x6f\x72\x67\x65\x2e\x6e\x65\x74\x2f\x44\x54\
\x44\x2f\x73\x6f\x64\x69\x70\x6f\x64\x69\x2d\x30\x2e\x64\x74\x64\
\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x69\x6e\x6b\x73\x63\
\x61\x70\x65\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\
\x69\x6e\x6b\x73\x63\x61\x70\x65\x2e\x6f\x72\x67\x2f\x6e\x61\x6d\
\x65\x73\x70\x61\x63\x65\x73\x2f\x69\x6e\x6b\x73\x63\x61\x70\x65\
\x22\x0a\x20\x20\x20\x69\x64\x3d\x22\x73\x76\x67\x33\x38\x37\x39\
\x22\x0a\x20\x20\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\
\x31\x22\x0a\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x76\
\x65\x72\x73\x69\x6f\x6e\x3d\x22\x30\x2e\x34\x38\x2e\x33\x2e\x31\
\x20\x72\x39\x38\x38\x36\x22\x0a\x20\x20\x20\x77\x69\x64\x74\x68\
\x3d\x22\x33\x32\x22\x0a\x20\x20\x20\x68\x65\x69\x67\x68\x74\x3d\
\x22\x33\x32\x22\x0a\x20\x20\x20\x73\x6f\x64\x69\x70\x6f\x64\x69\
\x3a\x64\x6f\x63\x6e\x61\x6d\x65\x3d\x22\x73\x74\x65\x70\x5f\x72\
\x65\x73\x70\x6f\x6e\x73\x65\x2e\x69\x63\x6f\x22\x3e\x0a\x20\x20\
\x3c\x6d\x65\x74\x61\x64\x61\x74\x61\x0a\x20\x20\x20\x20\x20\x69\
\x64\x3d\x22\x6d\x65\x74\x61\x64\x61\x74\x61\x33\x38\x38\x35\x22\
\x3e\x0a\x20\x20\x20\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x3e\x0a\
\x20\x20\x20\x20\x20\x20\x3c\x63\x63\x3a\x57\x6f\x72\x6b\x0a\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x72\x64\x66\x3a\x61\x62\x6f\x75\
\x74\x3d\x22\x22\x3e\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x3c\x64\
\x63\x3a\x66\x6f\x72\x6d\x61\x74\x3e\x69\x6d\x61\x67\x65\x2f\x73\
\x76\x67\x2b\x78\x6d\x6c\x3c\x2f\x64\x63\x3a\x66\x6f\x72\x6d\x61\
\x74\x3e\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x3c\x64\x63\x3a\x74\
\x79\x70\x65\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x72\
\x64\x66\x3a\x72\x65\x73\x6f\x75\x72\x63\x65\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x70\x75\x72\x6c\x2e\x6f\x72\x67\x2f\x64\x63\x2f\
\x64\x63\x6d\x69\x74\x79\x70\x65\x2f\x53\x74\x69\x6c\x6c\x49\x6d\
\x61\x67\x65\x22\x20\x2f\x3e\x0a\x20\x20\x20\x20\x20\x20\x20\x20\
\x3c\x64\x63\x3a\x74\x69\x74\x6c\x65\x3e\x3c\x2f\x64\x63\x3a\x74\
\x69\x74\x6c\x65\x3e\x0a\x20\x20\x20\x20\x20\x20\x3c\x2f\x63\x63\
\x3a\x57\x6f\x72\x6b\x3e\x0a\x20\x20\x20\x20\x3c\x2f\x72\x64\x66\
\x3a\x52\x44\x46\x3e\x0a\x20\x20\x3c\x2f\x6d\x65\x74\x61\x64\x61\
\x74\x61\x3e\x0a\x20\x20\x3c\x64\x65\x66\x73\x0a\x20\x20\x20\x20\
\x20\x69\x64\x3d\x22\x64\x65\x66\x73\x33\x38\x38\x33\x22\x20\x2f\
\x3e\x0a\x20\x20\x3c\x73\x6f\x64\x69\x70\x6f\x64\x69\x3a\x6e\x61\
\x6d\x65\x64\x76\x69\x65\x77\x0a\x20\x20\x20\x20\x20\x70\x61\x67\
\x65\x63\x6f\x6c\x6f\x72\x3d\x22\x23\x66\x66\x66\x66\x66\x66\x22\
\x0a\x20\x20\x20\x20\x20\x62\x6f\x72\x64\x65\x72\x63\x6f\x6c\x6f\
\x72\x3d\x22\x23\x36\x36\x36\x36\x36\x36\x22\x0a\x20\x20\x20\x20\
\x20\x62\x6f\x72\x64\x65\x72\x6f\x70\x61\x63\x69\x74\x79\x3d\x22\
\x31\x22\x0a\x20\x20\x20\x20\x20\x6f\x62\x6a\x65\x63\x74\x74\x6f\
\x6c\x65\x72\x61\x6e\x63\x65\x3d\x22\x31\x30\x22\x0a\x20\x20\x20\
\x20\x20\x67\x72\x69\x64\x74\x6f\x6c\x65\x72\x61\x6e\x63\x65\x3d\
\x22\x31\x30\x22\x0a\x20\x20\x20\x20\x20\x67\x75\x69\x64\x65\x74\
\x6f\x6c\x65\x72\x61\x6e\x63\x65\x3d\x22\x31\x30\x22\x0a\x20\x20\
\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x70\x61\x67\x65\
\x6f\x70\x61\x63\x69\x74\x79\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\
\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x70\x61\x67\x65\x73\x68\
\x61\x64\x6f\x77\x3d\x22\x32\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\
\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x77\x69\
\x64\x74\x68\x3d\x22\x31\x33\x36\x34\x22\x0a\x20\x20\x20\x20\x20\
\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\
\x68\x65\x69\x67\x68\x74\x3d\x22\x37\x35\x30\x22\x0a\x20\x20\x20\
\x20\x20\x69\x64\x3d\x22\x6e\x61\x6d\x65\x64\x76\x69\x65\x77\x33\
\x38\x38\x31\x22\x0a\x20\x20\x20\x20\x20\x73\x68\x6f\x77\x67\x72\
\x69\x64\x3d\x22\x66\x61\x6c\x73\x65\x22\x0a\x20\x20\x20\x20\x20\
\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x7a\x6f\x6f\x6d\x3d\x22\x37\
\x2e\x33\x37\x35\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\
\x61\x70\x65\x3a\x63\x78\x3d\x22\x31\x36\x22\x0a\x20\x20\x20\x20\
\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x63\x79\x3d\x22\x31\x36\
\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\
\x77\x69\x6e\x64\x6f\x77\x2d\x78\x3d\x22\x30\x22\x0a\x20\x20\x20\
\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\
\x77\x2d\x79\x3d\x22\x31\x36\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\
\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x6d\x61\
\x78\x69\x6d\x69\x7a\x65\x64\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\
\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x63\x75\x72\x72\x65\x6e\
\x74\x2d\x6c\x61\x79\x65\x72\x3d\x22\x73\x76\x67\x33\x38\x37\x39\
\x22\x20\x2f\x3e\x0a\x20\x20\x3c\x69\x6d\x61\x67\x65\x0a\x20\x20\
\x20\x20\x20\x77\x69\x64\x74\x68\x3d\x22\x33\x32\x22\x0a\x20\x20\
\x20\x20\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\x0a\x20\
\x20\x20\x20\x20\x78\x6c\x69\x6e\x6b\x3a\x68\x72\x65\x66\x3d\x22\
\x64\x61\x74\x61\x3a\x69\x6d\x61\x67\x65\x2f\x70\x6e\x67\x3b\x62\
\x61\x73\x65\x36\x34\x2c\x69\x56\x42\x4f\x52\x77\x30\x4b\x47\x67\
\x6f\x41\x41\x41\x41\x4e\x53\x55\x68\x45\x55\x67\x41\x41\x41\x43\
\x41\x41\x41\x41\x41\x67\x43\x41\x59\x41\x41\x41\x42\x7a\x65\x6e\
\x72\x30\x41\x41\x41\x41\x42\x48\x4e\x43\x53\x56\x51\x49\x43\x41\
\x67\x49\x66\x41\x68\x6b\x69\x41\x41\x41\x41\x46\x70\x4a\x52\x45\
\x46\x55\x0a\x57\x49\x58\x74\x31\x7a\x45\x4b\x77\x44\x41\x4d\x42\
\x4d\x46\x56\x79\x43\x50\x39\x78\x44\x7a\x53\x34\x44\x52\x4a\x37\
\x55\x6f\x63\x49\x58\x75\x31\x51\x59\x4f\x4b\x4d\x79\x70\x67\x45\
\x55\x34\x53\x73\x49\x37\x67\x63\x41\x41\x45\x43\x42\x41\x67\x49\
\x41\x34\x34\x39\x79\x2b\x75\x76\x71\x71\x65\x34\x77\x73\x62\x0a\
\x65\x44\x4e\x48\x4e\x63\x7a\x33\x4d\x78\x49\x67\x51\x4d\x42\x54\
\x52\x4a\x31\x31\x75\x30\x6c\x38\x41\x2b\x42\x64\x49\x4f\x44\x76\
\x67\x43\x4a\x38\x6e\x74\x38\x44\x53\x67\x30\x35\x51\x36\x2b\x69\
\x72\x67\x41\x41\x41\x41\x42\x4a\x52\x55\x35\x45\x72\x6b\x4a\x67\
\x67\x67\x3d\x3d\x0a\x22\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\
\x69\x6d\x61\x67\x65\x33\x38\x38\x37\x22\x0a\x20\x20\x20\x20\x20\
\x78\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\x20\x79\x3d\x22\x30\x22\
\x20\x2f\x3e\x0a\x3c\x2f\x73\x76\x67\x3e\x0a\
\x00\x00\x0a\x85\
\x3c\
\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\
\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\
\x2d\x38\x22\x20\x73\x74\x61\x6e\x64\x61\x6c\x6f\x6e\x65\x3d\x22\
\x6e\x6f\x22\x3f\x3e\x0a\x3c\x21\x2d\x2d\x20\x43\x72\x65\x61\x74\
\x65\x64\x20\x77\x69\x74\x68\x20\x49\x6e\x6b\x73\x63\x61\x70\x65\
\x20\x28\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x69\x6e\x6b\
\x73\x63\x61\x70\x65\x2e\x6f\x72\x67\x2f\x29\x20\x2d\x2d\x3e\x0a\
\x0a\x3c\x73\x76\x67\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x64\
\x63\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x70\x75\x72\x6c\x2e\x6f\
\x72\x67\x2f\x64\x63\x2f\x65\x6c\x65\x6d\x65\x6e\x74\x73\x2f\x31\
\x2e\x31\x2f\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x63\x63\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x63\x72\x65\x61\x74\x69\x76\
\x65\x63\x6f\x6d\x6d\x6f\x6e\x73\x2e\x6f\x72\x67\x2f\x6e\x73\x23\
\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\
\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\
\x67\x2f\x31\x39\x39\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\
\x2d\x73\x79\x6e\x74\x61\x78\x2d\x6e\x73\x23\x22\x0a\x20\x20\x20\
\x78\x6d\x6c\x6e\x73\x3a\x73\x76\x67\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3d\
\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\
\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\x76\x67\x22\x0a\x20\x20\x20\
\x78\x6d\x6c\x6e\x73\x3a\x78\x6c\x69\x6e\x6b\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\
\x39\x39\x39\x2f\x78\x6c\x69\x6e\x6b\x22\x0a\x20\x20\x20\x78\x6d\
\x6c\x6e\x73\x3a\x73\x6f\x64\x69\x70\x6f\x64\x69\x3d\x22\x68\x74\
\x74\x70\x3a\x2f\x2f\x73\x6f\x64\x69\x70\x6f\x64\x69\x2e\x73\x6f\
\x75\x72\x63\x65\x66\x6f\x72\x67\x65\x2e\x6e\x65\x74\x2f\x44\x54\
\x44\x2f\x73\x6f\x64\x69\x70\x6f\x64\x69\x2d\x30\x2e\x64\x74\x64\
\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x69\x6e\x6b\x73\x63\
\x61\x70\x65\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\
\x69\x6e\x6b\x73\x63\x61\x70\x65\x2e\x6f\x72\x67\x2f\x6e\x61\x6d\
\x65\x73\x70\x61\x63\x65\x73\x2f\x69\x6e\x6b\x73\x63\x61\x70\x65\
\x22\x0a\x20\x20\x20\x69\x64\x3d\x22\x73\x76\x67\x33\x33\x35\x36\
\x22\x0a\x20\x20\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\
\x31\x22\x0a\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x76\
\x65\x72\x73\x69\x6f\x6e\x3d\x22\x30\x2e\x34\x38\x2e\x33\x2e\x31\
\x20\x72\x39\x38\x38\x36\x22\x0a\x20\x20\x20\x77\x69\x64\x74\x68\
\x3d\x22\x33\x32\x22\x0a\x20\x20\x20\x68\x65\x69\x67\x68\x74\x3d\
\x22\x33\x32\x22\x0a\x20\x20\x20\x73\x6f\x64\x69\x70\x6f\x64\x69\
\x3a\x64\x6f\x63\x6e\x61\x6d\x65\x3d\x22\x67\x72\x6f\x75\x70\x5f\
\x64\x65\x6c\x61\x79\x2e\x69\x63\x6f\x22\x3e\x0a\x20\x20\x3c\x6d\
\x65\x74\x61\x64\x61\x74\x61\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\
\x22\x6d\x65\x74\x61\x64\x61\x74\x61\x33\x33\x36\x32\x22\x3e\x0a\
\x20\x20\x20\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x3e\x0a\x20\x20\
\x20\x20\x20\x20\x3c\x63\x63\x3a\x57\x6f\x72\x6b\x0a\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x72\x64\x66\x3a\x61\x62\x6f\x75\x74\x3d\
\x22\x22\x3e\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x3c\x64\x63\x3a\
\x66\x6f\x72\x6d\x61\x74\x3e\x69\x6d\x61\x67\x65\x2f\x73\x76\x67\
\x2b\x78\x6d\x6c\x3c\x2f\x64\x63\x3a\x66\x6f\x72\x6d\x61\x74\x3e\
\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x3c\x64\x63\x3a\x74\x79\x70\
\x65\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x72\x64\x66\
\x3a\x72\x65\x73\x6f\x75\x72\x63\x65\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x70\x75\x72\x6c\x2e\x6f\x72\x67\x2f\x64\x63\x2f\x64\x63\
\x6d\x69\x74\x79\x70\x65\x2f\x53\x74\x69\x6c\x6c\x49\x6d\x61\x67\
\x65\x22\x20\x2f\x3e\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x3c\x64\
\x63\x3a\x74\x69\x74\x6c\x65\x3e\x3c\x2f\x64\x63\x3a\x74\x69\x74\
\x6c\x65\x3e\x0a\x20\x20\x20\x20\x20\x20\x3c\x2f\x63\x63\x3a\x57\
\x6f\x72\x6b\x3e\x0a\x20\x20\x20\x20\x3c\x2f\x72\x64\x66\x3a\x52\
\x44\x46\x3e\x0a\x20\x20\x3c\x2f\x6d\x65\x74\x61\x64\x61\x74\x61\
\x3e\x0a\x20\x20\x3c\x64\x65\x66\x73\x0a\x20\x20\x20\x20\x20\x69\
\x64\x3d\x22\x64\x65\x66\x73\x33\x33\x36\x30\x22\x20\x2f\x3e\x0a\
\x20\x20\x3c\x73\x6f\x64\x69\x70\x6f\x64\x69\x3a\x6e\x61\x6d\x65\
\x64\x76\x69\x65\x77\x0a\x20\x20\x20\x20\x20\x70\x61\x67\x65\x63\
\x6f\x6c\x6f\x72\x3d\x22\x23\x66\x66\x66\x66\x66\x66\x22\x0a\x20\
\x20\x20\x20\x20\x62\x6f\x72\x64\x65\x72\x63\x6f\x6c\x6f\x72\x3d\
\x22\x23\x36\x36\x36\x36\x36\x36\x22\x0a\x20\x20\x20\x20\x20\x62\
\x6f\x72\x64\x65\x72\x6f\x70\x61\x63\x69\x74\x79\x3d\x22\x31\x22\
\x0a\x20\x20\x20\x20\x20\x6f\x62\x6a\x65\x63\x74\x74\x6f\x6c\x65\
\x72\x61\x6e\x63\x65\x3d\x22\x31\x30\x22\x0a\x20\x20\x20\x20\x20\
\x67\x72\x69\x64\x74\x6f\x6c\x65\x72\x61\x6e\x63\x65\x3d\x22\x31\
\x30\x22\x0a\x20\x20\x20\x20\x20\x67\x75\x69\x64\x65\x74\x6f\x6c\
\x65\x72\x61\x6e\x63\x65\x3d\x22\x31\x30\x22\x0a\x20\x20\x20\x20\
\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x70\x61\x67\x65\x6f\x70\
\x61\x63\x69\x74\x79\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\x20\x69\
\x6e\x6b\x73\x63\x61\x70\x65\x3a\x70\x61\x67\x65\x73\x68\x61\x64\
\x6f\x77\x3d\x22\x32\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\
\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x77\x69\x64\x74\
\x68\x3d\x22\x31\x33\x36\x34\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\
\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x68\x65\
\x69\x67\x68\x74\x3d\x22\x37\x35\x30\x22\x0a\x20\x20\x20\x20\x20\
\x69\x64\x3d\x22\x6e\x61\x6d\x65\x64\x76\x69\x65\x77\x33\x33\x35\
\x38\x22\x0a\x20\x20\x20\x20\x20\x73\x68\x6f\x77\x67\x72\x69\x64\
\x3d\x22\x66\x61\x6c\x73\x65\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\
\x6b\x73\x63\x61\x70\x65\x3a\x7a\x6f\x6f\x6d\x3d\x22\x37\x2e\x33\
\x37\x35\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\
\x65\x3a\x63\x78\x3d\x22\x31\x36\x22\x0a\x20\x20\x20\x20\x20\x69\
\x6e\x6b\x73\x63\x61\x70\x65\x3a\x63\x79\x3d\x22\x31\x36\x22\x0a\
\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\
\x6e\x64\x6f\x77\x2d\x78\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\x20\
\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\
\x79\x3d\x22\x31\x36\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\
\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x6d\x61\x78\x69\
\x6d\x69\x7a\x65\x64\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\x20\x69\
\x6e\x6b\x73\x63\x61\x70\x65\x3a\x63\x75\x72\x72\x65\x6e\x74\x2d\
\x6c\x61\x79\x65\x72\x3d\x22\x73\x76\x67\x33\x33\x35\x36\x22\x20\
\x2f\x3e\x0a\x20\x20\x3c\x69\x6d\x61\x67\x65\x0a\x20\x20\x20\x20\
\x20\x77\x69\x64\x74\x68\x3d\x22\x33\x32\x22\x0a\x20\x20\x20\x20\
\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\x0a\x20\x20\x20\
\x20\x20\x78\x6c\x69\x6e\x6b\x3a\x68\x72\x65\x66\x3d\x22\x64\x61\
\x74\x61\x3a\x69\x6d\x61\x67\x65\x2f\x70\x6e\x67\x3b\x62\x61\x73\
\x65\x36\x34\x2c\x69\x56\x42\x4f\x52\x77\x30\x4b\x47\x67\x6f\x41\
\x41\x41\x41\x4e\x53\x55\x68\x45\x55\x67\x41\x41\x41\x43\x41\x41\
\x41\x41\x41\x67\x43\x41\x59\x41\x41\x41\x42\x7a\x65\x6e\x72\x30\
\x41\x41\x41\x41\x42\x48\x4e\x43\x53\x56\x51\x49\x43\x41\x67\x49\
\x66\x41\x68\x6b\x69\x41\x41\x41\x41\x71\x5a\x4a\x52\x45\x46\x55\
\x0a\x57\x49\x58\x74\x6c\x6b\x74\x49\x56\x47\x45\x59\x68\x70\x2f\
\x6a\x39\x7a\x75\x57\x56\x37\x4b\x63\x30\x44\x52\x6f\x49\x39\x47\
\x71\x57\x75\x62\x43\x63\x69\x47\x47\x61\x46\x67\x49\x45\x53\x30\
\x73\x6b\x56\x61\x74\x4b\x6d\x67\x54\x64\x74\x75\x71\x42\x43\x47\
\x31\x4b\x4f\x30\x2b\x4a\x42\x57\x45\x53\x53\x34\x4b\x0a\x51\x6a\
\x45\x4b\x4e\x46\x33\x6b\x6e\x61\x6c\x77\x55\x32\x47\x53\x61\x4e\
\x62\x4d\x6e\x4a\x6e\x54\x34\x6b\x79\x4f\x34\x2b\x58\x4d\x4f\x54\
\x72\x6d\x78\x67\x63\x4f\x35\x37\x39\x38\x33\x2f\x6c\x66\x2f\x74\
\x74\x35\x77\x53\x34\x79\x65\x52\x42\x35\x4f\x62\x70\x6f\x6e\x78\
\x70\x35\x68\x44\x6f\x31\x5a\x76\x74\x62\x0a\x63\x31\x4e\x74\x52\
\x32\x71\x76\x6d\x38\x43\x33\x64\x54\x6d\x44\x57\x4a\x46\x67\x4f\
\x31\x49\x76\x7a\x34\x33\x33\x34\x4d\x34\x45\x49\x48\x70\x30\x50\
\x53\x69\x6f\x72\x6c\x72\x6b\x37\x6a\x76\x6f\x33\x77\x2b\x37\x74\
\x79\x41\x74\x6e\x55\x68\x64\x49\x2b\x78\x4c\x69\x34\x38\x38\x79\
\x53\x35\x44\x65\x58\x54\x55\x0a\x50\x5a\x33\x45\x6f\x69\x71\x55\
\x78\x35\x6a\x74\x55\x33\x32\x74\x79\x4e\x6b\x42\x32\x4f\x57\x4b\
\x37\x49\x47\x67\x68\x6f\x7a\x66\x51\x46\x30\x65\x67\x30\x4a\x62\
\x79\x32\x73\x39\x41\x31\x72\x44\x4d\x34\x79\x6d\x77\x2b\x68\x44\
\x4c\x6f\x77\x37\x53\x5a\x47\x4f\x44\x6a\x63\x4d\x6c\x4b\x4c\x31\
\x56\x55\x47\x2f\x0a\x66\x34\x35\x69\x67\x2b\x43\x62\x38\x35\x43\
\x66\x69\x79\x51\x57\x72\x31\x77\x41\x43\x4d\x48\x32\x35\x33\x41\
\x31\x68\x50\x36\x72\x4f\x64\x49\x36\x73\x77\x63\x41\x50\x66\x58\
\x54\x77\x70\x51\x6a\x50\x30\x48\x54\x30\x51\x4c\x56\x7a\x67\x51\
\x6f\x6a\x7a\x48\x37\x4c\x4d\x70\x4f\x58\x36\x54\x38\x63\x61\x50\
\x35\x0a\x44\x6f\x51\x57\x6a\x39\x56\x43\x59\x4b\x54\x59\x45\x52\
\x42\x5a\x4a\x2f\x32\x6f\x5a\x68\x33\x61\x6b\x77\x4c\x44\x51\x4b\
\x46\x43\x5a\x66\x51\x53\x42\x46\x79\x42\x7a\x66\x67\x5a\x6a\x34\
\x35\x37\x6e\x77\x78\x65\x46\x35\x6f\x38\x73\x53\x4d\x67\x31\x68\
\x49\x45\x6b\x5a\x49\x79\x30\x45\x44\x6b\x6e\x43\x6d\x35\x0a\x2b\
\x67\x75\x2b\x6d\x73\x38\x59\x47\x56\x35\x43\x32\x51\x30\x4c\x4d\
\x6c\x52\x6c\x44\x66\x7a\x34\x54\x55\x41\x39\x74\x43\x50\x41\x47\
\x73\x6b\x71\x4e\x30\x39\x42\x38\x77\x77\x4a\x42\x34\x34\x68\x31\
\x37\x7a\x49\x79\x56\x4b\x7a\x4d\x35\x53\x4a\x65\x74\x57\x47\x33\
\x4f\x78\x43\x32\x6f\x64\x51\x64\x56\x4f\x6f\x0a\x39\x6c\x62\x6b\
\x65\x6a\x65\x4a\x52\x58\x74\x58\x50\x72\x68\x74\x4d\x74\x4e\x51\
\x48\x39\x70\x51\x5a\x37\x37\x43\x63\x62\x66\x54\x62\x41\x63\x58\
\x30\x56\x4a\x4d\x54\x45\x48\x71\x4a\x45\x77\x48\x34\x50\x37\x33\
\x4e\x52\x43\x77\x4d\x75\x7a\x2f\x6a\x4b\x7a\x51\x6b\x30\x39\x44\
\x63\x4d\x4e\x79\x55\x73\x32\x6a\x0a\x74\x2b\x54\x5a\x58\x30\x56\
\x69\x48\x76\x74\x31\x35\x6d\x50\x6c\x69\x46\x62\x41\x6d\x6a\x73\
\x69\x42\x78\x68\x71\x4e\x54\x61\x72\x67\x33\x74\x41\x30\x32\x50\
\x48\x78\x46\x75\x41\x35\x45\x51\x37\x6f\x69\x67\x4d\x51\x62\x70\
\x75\x6f\x31\x71\x6d\x41\x56\x42\x4e\x30\x38\x69\x46\x57\x31\x44\
\x72\x36\x48\x4b\x4c\x0a\x34\x59\x6a\x71\x77\x34\x35\x6f\x64\x4a\
\x34\x6a\x41\x6b\x53\x37\x68\x4f\x62\x70\x51\x58\x2b\x63\x42\x59\
\x42\x2b\x49\x68\x57\x2b\x76\x55\x41\x36\x61\x35\x30\x49\x73\x43\
\x5a\x71\x7a\x59\x65\x54\x6f\x6a\x33\x68\x30\x77\x6d\x6f\x4e\x49\
\x32\x4a\x4e\x48\x6a\x44\x4d\x65\x6d\x6f\x4b\x78\x4e\x4f\x68\x6e\
\x41\x77\x0a\x58\x66\x6d\x2b\x36\x4c\x6f\x2f\x41\x31\x72\x2b\x41\
\x4b\x42\x74\x32\x6d\x61\x32\x50\x5a\x69\x47\x48\x52\x6d\x72\x4a\
\x4b\x41\x33\x62\x4c\x48\x2b\x75\x56\x30\x74\x41\x43\x55\x75\x41\
\x49\x79\x33\x50\x57\x62\x62\x52\x51\x55\x7a\x67\x58\x67\x4b\x43\
\x44\x73\x69\x51\x4c\x53\x49\x49\x77\x49\x77\x73\x67\x5a\x4a\x0a\
\x53\x4b\x2b\x41\x67\x75\x31\x51\x36\x49\x64\x44\x65\x53\x51\x55\
\x56\x47\x42\x30\x6a\x44\x67\x52\x59\x49\x32\x34\x4c\x52\x78\x52\
\x64\x77\x37\x53\x4f\x6f\x4c\x55\x44\x30\x4b\x6a\x32\x79\x77\x33\
\x6a\x6b\x4a\x78\x58\x76\x77\x45\x72\x50\x4d\x66\x2b\x41\x75\x6f\
\x6e\x4e\x58\x7a\x6c\x59\x54\x52\x77\x51\x41\x41\x0a\x41\x41\x42\
\x4a\x52\x55\x35\x45\x72\x6b\x4a\x67\x67\x67\x3d\x3d\x0a\x22\x0a\
\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x69\x6d\x61\x67\x65\x33\x33\
\x36\x34\x22\x0a\x20\x20\x20\x20\x20\x78\x3d\x22\x30\x22\x0a\x20\
\x20\x20\x20\x20\x79\x3d\x22\x30\x22\x20\x2f\x3e\x0a\x3c\x2f\x73\
\x76\x67\x3e\x0a\
\x00\x00\x09\xd2\
\x3c\
\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\
\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\
\x2d\x38\x22\x20\x73\x74\x61\x6e\x64\x61\x6c\x6f\x6e\x65\x3d\x22\
\x6e\x6f\x22\x3f\x3e\x0a\x3c\x21\x2d\x2d\x20\x43\x72\x65\x61\x74\
\x65\x64\x20\x77\x69\x74\x68\x20\x49\x6e\x6b\x73\x63\x61\x70\x65\
\x20\x28\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x69\x6e\x6b\
\x73\x63\x61\x70\x65\x2e\x6f\x72\x67\x2f\x29\x20\x2d\x2d\x3e\x0a\
\x0a\x3c\x73\x76\x67\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x64\
\x63\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x70\x75\x72\x6c\x2e\x6f\
\x72\x67\x2f\x64\x63\x2f\x65\x6c\x65\x6d\x65\x6e\x74\x73\x2f\x31\
\x2e\x31\x2f\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x63\x63\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x63\x72\x65\x61\x74\x69\x76\
\x65\x63\x6f\x6d\x6d\x6f\x6e\x73\x2e\x6f\x72\x67\x2f\x6e\x73\x23\
\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\
\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\
\x67\x2f\x31\x39\x39\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\
\x2d\x73\x79\x6e\x74\x61\x78\x2d\x6e\x73\x23\x22\x0a\x20\x20\x20\
\x78\x6d\x6c\x6e\x73\x3a\x73\x76\x67\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3d\
\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\
\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\x76\x67\x22\x0a\x20\x20\x20\
\x78\x6d\x6c\x6e\x73\x3a\x78\x6c\x69\x6e\x6b\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\
\x39\x39\x39\x2f\x78\x6c\x69\x6e\x6b\x22\x0a\x20\x20\x20\x78\x6d\
\x6c\x6e\x73\x3a\x73\x6f\x64\x69\x70\x6f\x64\x69\x3d\x22\x68\x74\
\x74\x70\x3a\x2f\x2f\x73\x6f\x64\x69\x70\x6f\x64\x69\x2e\x73\x6f\
\x75\x72\x63\x65\x66\x6f\x72\x67\x65\x2e\x6e\x65\x74\x2f\x44\x54\
\x44\x2f\x73\x6f\x64\x69\x70\x6f\x64\x69\x2d\x30\x2e\x64\x74\x64\
\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x69\x6e\x6b\x73\x63\
\x61\x70\x65\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\
\x69\x6e\x6b\x73\x63\x61\x70\x65\x2e\x6f\x72\x67\x2f\x6e\x61\x6d\
\x65\x73\x70\x61\x63\x65\x73\x2f\x69\x6e\x6b\x73\x63\x61\x70\x65\
\x22\x0a\x20\x20\x20\x69\x64\x3d\x22\x73\x76\x67\x33\x35\x39\x36\
\x22\x0a\x20\x20\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\
\x31\x22\x0a\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x76\
\x65\x72\x73\x69\x6f\x6e\x3d\x22\x30\x2e\x34\x38\x2e\x33\x2e\x31\
\x20\x72\x39\x38\x38\x36\x22\x0a\x20\x20\x20\x77\x69\x64\x74\x68\
\x3d\x22\x33\x32\x22\x0a\x20\x20\x20\x68\x65\x69\x67\x68\x74\x3d\
\x22\x33\x32\x22\x0a\x20\x20\x20\x73\x6f\x64\x69\x70\x6f\x64\x69\
\x3a\x64\x6f\x63\x6e\x61\x6d\x65\x3d\x22\x6f\x76\x65\x72\x6c\x61\
\x79\x2e\x69\x63\x6f\x22\x3e\x0a\x20\x20\x3c\x6d\x65\x74\x61\x64\
\x61\x74\x61\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x6d\x65\x74\
\x61\x64\x61\x74\x61\x33\x36\x30\x32\x22\x3e\x0a\x20\x20\x20\x20\
\x3c\x72\x64\x66\x3a\x52\x44\x46\x3e\x0a\x20\x20\x20\x20\x20\x20\
\x3c\x63\x63\x3a\x57\x6f\x72\x6b\x0a\x20\x20\x20\x20\x20\x20\x20\
\x20\x20\x72\x64\x66\x3a\x61\x62\x6f\x75\x74\x3d\x22\x22\x3e\x0a\
\x20\x20\x20\x20\x20\x20\x20\x20\x3c\x64\x63\x3a\x66\x6f\x72\x6d\
\x61\x74\x3e\x69\x6d\x61\x67\x65\x2f\x73\x76\x67\x2b\x78\x6d\x6c\
\x3c\x2f\x64\x63\x3a\x66\x6f\x72\x6d\x61\x74\x3e\x0a\x20\x20\x20\
\x20\x20\x20\x20\x20\x3c\x64\x63\x3a\x74\x79\x70\x65\x0a\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x72\x64\x66\x3a\x72\x65\x73\
\x6f\x75\x72\x63\x65\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x70\x75\
\x72\x6c\x2e\x6f\x72\x67\x2f\x64\x63\x2f\x64\x63\x6d\x69\x74\x79\
\x70\x65\x2f\x53\x74\x69\x6c\x6c\x49\x6d\x61\x67\x65\x22\x20\x2f\
\x3e\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x3c\x64\x63\x3a\x74\x69\
\x74\x6c\x65\x3e\x3c\x2f\x64\x63\x3a\x74\x69\x74\x6c\x65\x3e\x0a\
\x20\x20\x20\x20\x20\x20\x3c\x2f\x63\x63\x3a\x57\x6f\x72\x6b\x3e\
\x0a\x20\x20\x20\x20\x3c\x2f\x72\x64\x66\x3a\x52\x44\x46\x3e\x0a\
\x20\x20\x3c\x2f\x6d\x65\x74\x61\x64\x61\x74\x61\x3e\x0a\x20\x20\
\x3c\x64\x65\x66\x73\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x64\
\x65\x66\x73\x33\x36\x30\x30\x22\x20\x2f\x3e\x0a\x20\x20\x3c\x73\
\x6f\x64\x69\x70\x6f\x64\x69\x3a\x6e\x61\x6d\x65\x64\x76\x69\x65\
\x77\x0a\x20\x20\x20\x20\x20\x70\x61\x67\x65\x63\x6f\x6c\x6f\x72\
\x3d\x22\x23\x66\x66\x66\x66\x66\x66\x22\x0a\x20\x20\x20\x20\x20\
\x62\x6f\x72\x64\x65\x72\x63\x6f\x6c\x6f\x72\x3d\x22\x23\x36\x36\
\x36\x36\x36\x36\x22\x0a\x20\x20\x20\x20\x20\x62\x6f\x72\x64\x65\
\x72\x6f\x70\x61\x63\x69\x74\x79\x3d\x22\x31\x22\x0a\x20\x20\x20\
\x20\x20\x6f\x62\x6a\x65\x63\x74\x74\x6f\x6c\x65\x72\x61\x6e\x63\
\x65\x3d\x22\x31\x30\x22\x0a\x20\x20\x20\x20\x20\x67\x72\x69\x64\
\x74\x6f\x6c\x65\x72\x61\x6e\x63\x65\x3d\x22\x31\x30\x22\x0a\x20\
\x20\x20\x20\x20\x67\x75\x69\x64\x65\x74\x6f\x6c\x65\x72\x61\x6e\
\x63\x65\x3d\x22\x31\x30\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\
\x73\x63\x61\x70\x65\x3a\x70\x61\x67\x65\x6f\x70\x61\x63\x69\x74\
\x79\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\
\x61\x70\x65\x3a\x70\x61\x67\x65\x73\x68\x61\x64\x6f\x77\x3d\x22\
\x32\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\
\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x77\x69\x64\x74\x68\x3d\x22\x31\
\x33\x36\x34\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\
\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x68\x65\x69\x67\x68\x74\
\x3d\x22\x37\x35\x30\x22\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\
\x6e\x61\x6d\x65\x64\x76\x69\x65\x77\x33\x35\x39\x38\x22\x0a\x20\
\x20\x20\x20\x20\x73\x68\x6f\x77\x67\x72\x69\x64\x3d\x22\x66\x61\
\x6c\x73\x65\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\
\x70\x65\x3a\x7a\x6f\x6f\x6d\x3d\x22\x37\x2e\x33\x37\x35\x22\x0a\
\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x63\x78\
\x3d\x22\x31\x36\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\
\x61\x70\x65\x3a\x63\x79\x3d\x22\x31\x36\x22\x0a\x20\x20\x20\x20\
\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\
\x2d\x78\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\
\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x79\x3d\x22\x31\
\x36\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\
\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x6d\x61\x78\x69\x6d\x69\x7a\x65\
\x64\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\
\x61\x70\x65\x3a\x63\x75\x72\x72\x65\x6e\x74\x2d\x6c\x61\x79\x65\
\x72\x3d\x22\x73\x76\x67\x33\x35\x39\x36\x22\x20\x2f\x3e\x0a\x20\
\x20\x3c\x69\x6d\x61\x67\x65\x0a\x20\x20\x20\x20\x20\x77\x69\x64\
\x74\x68\x3d\x22\x33\x32\x22\x0a\x20\x20\x20\x20\x20\x68\x65\x69\
\x67\x68\x74\x3d\x22\x33\x32\x22\x0a\x20\x20\x20\x20\x20\x78\x6c\
\x69\x6e\x6b\x3a\x68\x72\x65\x66\x3d\x22\x64\x61\x74\x61\x3a\x69\
\x6d\x61\x67\x65\x2f\x70\x6e\x67\x3b\x62\x61\x73\x65\x36\x34\x2c\
\x69\x56\x42\x4f\x52\x77\x30\x4b\x47\x67\x6f\x41\x41\x41\x41\x4e\
\x53\x55\x68\x45\x55\x67\x41\x41\x41\x43\x41\x41\x41\x41\x41\x67\
\x43\x41\x59\x41\x41\x41\x42\x7a\x65\x6e\x72\x30\x41\x41\x41\x41\
\x42\x48\x4e\x43\x53\x56\x51\x49\x43\x41\x67\x49\x66\x41\x68\x6b\
\x69\x41\x41\x41\x41\x69\x5a\x4a\x52\x45\x46\x55\x0a\x57\x49\x58\
\x74\x6c\x6b\x39\x49\x46\x46\x45\x63\x78\x7a\x38\x7a\x4c\x69\x4b\
\x37\x42\x4c\x6d\x73\x62\x46\x34\x32\x4c\x44\x31\x49\x55\x46\x4a\
\x55\x56\x48\x53\x51\x79\x6f\x67\x79\x42\x4b\x75\x54\x53\x59\x64\
\x51\x45\x49\x52\x4d\x64\x6c\x65\x4a\x56\x70\x4d\x4f\x32\x79\x35\
\x49\x73\x4f\x55\x68\x35\x74\x44\x66\x0a\x69\x33\x51\x49\x4f\x37\
\x6b\x52\x52\x6f\x6b\x46\x49\x74\x52\x42\x59\x30\x57\x43\x50\x4c\
\x51\x75\x6d\x70\x37\x63\x51\x2b\x6d\x2b\x44\x6c\x4d\x62\x30\x37\
\x62\x72\x76\x70\x6d\x46\x76\x65\x79\x44\x59\x59\x59\x33\x37\x2f\
\x74\x2b\x6e\x2f\x6e\x78\x66\x62\x2f\x66\x4b\x49\x43\x67\x79\x4b\
\x4f\x59\x41\x45\x49\x74\x0a\x59\x6e\x41\x41\x53\x67\x41\x6c\x67\
\x42\x4a\x41\x43\x61\x44\x6f\x41\x44\x62\x39\x46\x6a\x4a\x54\x6a\
\x71\x76\x42\x74\x32\x51\x56\x51\x41\x47\x45\x53\x46\x54\x49\x69\
\x64\x79\x33\x68\x76\x55\x6e\x66\x36\x2f\x46\x2b\x47\x5a\x37\x67\
\x52\x49\x47\x63\x51\x56\x43\x4f\x79\x77\x43\x6d\x50\x4f\x41\x53\
\x41\x54\x69\x0a\x6f\x44\x77\x45\x34\x62\x55\x4b\x6f\x41\x41\x69\
\x46\x44\x68\x4a\x36\x37\x6c\x5a\x64\x75\x31\x63\x79\x31\x2f\x6f\
\x48\x71\x71\x47\x31\x43\x79\x6f\x65\x33\x51\x76\x42\x4f\x74\x42\
\x6e\x54\x4d\x46\x45\x42\x34\x34\x51\x66\x52\x4e\x4c\x64\x48\x52\
\x52\x33\x4c\x69\x74\x42\x65\x53\x4e\x38\x44\x78\x41\x63\x51\x39\
\x0a\x38\x47\x73\x53\x57\x2b\x67\x65\x75\x4e\x62\x78\x6e\x70\x56\
\x56\x4f\x34\x39\x48\x47\x36\x51\x41\x2f\x6e\x72\x42\x63\x52\x39\
\x45\x54\x44\x49\x34\x38\x4e\x73\x44\x4e\x6c\x73\x4b\x62\x66\x67\
\x46\x76\x71\x45\x6d\x56\x6c\x62\x74\x65\x59\x74\x31\x4c\x36\x68\
\x76\x67\x56\x59\x6f\x37\x35\x51\x4e\x6e\x67\x59\x41\x0a\x32\x4c\
\x38\x33\x7a\x75\x57\x4c\x6e\x2b\x69\x35\x65\x53\x5a\x76\x38\x62\
\x65\x6c\x62\x51\x44\x48\x49\x4b\x58\x41\x54\x37\x6d\x7a\x2f\x43\
\x38\x41\x77\x4b\x42\x33\x67\x71\x6c\x70\x44\x2b\x4d\x54\x74\x56\
\x73\x4b\x4e\x7a\x64\x56\x32\x72\x6f\x75\x63\x4c\x76\x2f\x64\x52\
\x55\x6f\x6d\x74\x6b\x54\x59\x51\x42\x77\x0a\x32\x48\x38\x77\x63\
\x75\x63\x6c\x58\x66\x35\x6d\x31\x70\x50\x6c\x4f\x59\x58\x42\x79\
\x48\x45\x41\x2b\x72\x72\x66\x59\x61\x55\x75\x5a\x4e\x53\x42\x30\
\x34\x30\x4c\x48\x44\x32\x34\x79\x47\x43\x34\x4d\x61\x74\x6f\x61\
\x74\x70\x44\x52\x44\x76\x4d\x30\x35\x48\x6e\x6c\x4a\x57\x6c\x4c\
\x4e\x63\x46\x49\x52\x49\x56\x0a\x68\x6d\x76\x35\x73\x31\x4f\x34\
\x71\x77\x4a\x69\x35\x6c\x56\x4e\x78\x72\x75\x31\x2b\x65\x32\x69\
\x78\x74\x4d\x76\x78\x70\x37\x73\x4d\x38\x7a\x72\x58\x78\x2f\x38\
\x4c\x70\x6d\x46\x2f\x35\x64\x69\x6c\x7a\x4e\x4a\x4b\x42\x44\x6c\
\x36\x76\x55\x57\x4e\x6a\x61\x4d\x53\x7a\x71\x39\x35\x7a\x6c\x37\
\x61\x70\x37\x6d\x0a\x70\x70\x68\x78\x4a\x35\x4e\x5a\x79\x46\x71\
\x4b\x32\x79\x39\x39\x78\x4f\x56\x4d\x63\x76\x66\x42\x6b\x66\x53\
\x63\x39\x75\x77\x41\x73\x51\x55\x58\x34\x59\x48\x78\x4c\x43\x70\
\x35\x4c\x2b\x54\x73\x68\x6c\x2b\x2b\x56\x72\x4c\x37\x55\x49\x39\
\x68\x62\x6d\x34\x79\x51\x6e\x33\x64\x63\x76\x59\x4e\x35\x54\x71\
\x6c\x0a\x4d\x4e\x57\x4f\x63\x34\x33\x4d\x48\x70\x45\x62\x6f\x4f\
\x42\x2f\x52\x4c\x4a\x65\x4b\x48\x67\x47\x34\x45\x38\x57\x69\x47\
\x2b\x39\x30\x71\x63\x44\x46\x44\x53\x36\x35\x50\x67\x46\x53\x65\
\x48\x64\x62\x59\x4e\x50\x58\x30\x55\x41\x41\x41\x41\x41\x53\x55\
\x56\x4f\x52\x4b\x35\x43\x59\x49\x49\x3d\x0a\x22\x0a\x20\x20\x20\
\x20\x20\x69\x64\x3d\x22\x69\x6d\x61\x67\x65\x33\x36\x30\x34\x22\
\x0a\x20\x20\x20\x20\x20\x78\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\
\x20\x79\x3d\x22\x30\x22\x20\x2f\x3e\x0a\x3c\x2f\x73\x76\x67\x3e\
\x0a\
\x00\x00\x09\x17\
\x3c\
\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\
\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\
\x2d\x38\x22\x20\x73\x74\x61\x6e\x64\x61\x6c\x6f\x6e\x65\x3d\x22\
\x6e\x6f\x22\x3f\x3e\x0a\x3c\x21\x2d\x2d\x20\x43\x72\x65\x61\x74\
\x65\x64\x20\x77\x69\x74\x68\x20\x49\x6e\x6b\x73\x63\x61\x70\x65\
\x20\x28\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x69\x6e\x6b\
\x73\x63\x61\x70\x65\x2e\x6f\x72\x67\x2f\x29\x20\x2d\x2d\x3e\x0a\
\x0a\x3c\x73\x76\x67\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x64\
\x63\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x70\x75\x72\x6c\x2e\x6f\
\x72\x67\x2f\x64\x63\x2f\x65\x6c\x65\x6d\x65\x6e\x74\x73\x2f\x31\
\x2e\x31\x2f\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x63\x63\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x63\x72\x65\x61\x74\x69\x76\
\x65\x63\x6f\x6d\x6d\x6f\x6e\x73\x2e\x6f\x72\x67\x2f\x6e\x73\x23\
\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\
\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\
\x67\x2f\x31\x39\x39\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\
\x2d\x73\x79\x6e\x74\x61\x78\x2d\x6e\x73\x23\x22\x0a\x20\x20\x20\
\x78\x6d\x6c\x6e\x73\x3a\x73\x76\x67\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3d\
\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\
\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\x76\x67\x22\x0a\x20\x20\x20\
\x78\x6d\x6c\x6e\x73\x3a\x78\x6c\x69\x6e\x6b\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\
\x39\x39\x39\x2f\x78\x6c\x69\x6e\x6b\x22\x0a\x20\x20\x20\x78\x6d\
\x6c\x6e\x73\x3a\x73\x6f\x64\x69\x70\x6f\x64\x69\x3d\x22\x68\x74\
\x74\x70\x3a\x2f\x2f\x73\x6f\x64\x69\x70\x6f\x64\x69\x2e\x73\x6f\
\x75\x72\x63\x65\x66\x6f\x72\x67\x65\x2e\x6e\x65\x74\x2f\x44\x54\
\x44\x2f\x73\x6f\x64\x69\x70\x6f\x64\x69\x2d\x30\x2e\x64\x74\x64\
\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x69\x6e\x6b\x73\x63\
\x61\x70\x65\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\
\x69\x6e\x6b\x73\x63\x61\x70\x65\x2e\x6f\x72\x67\x2f\x6e\x61\x6d\
\x65\x73\x70\x61\x63\x65\x73\x2f\x69\x6e\x6b\x73\x63\x61\x70\x65\
\x22\x0a\x20\x20\x20\x69\x64\x3d\x22\x73\x76\x67\x33\x37\x39\x39\
\x22\x0a\x20\x20\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\
\x31\x22\x0a\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x76\
\x65\x72\x73\x69\x6f\x6e\x3d\x22\x30\x2e\x34\x38\x2e\x33\x2e\x31\
\x20\x72\x39\x38\x38\x36\x22\x0a\x20\x20\x20\x77\x69\x64\x74\x68\
\x3d\x22\x33\x32\x22\x0a\x20\x20\x20\x68\x65\x69\x67\x68\x74\x3d\
\x22\x33\x32\x22\x0a\x20\x20\x20\x73\x6f\x64\x69\x70\x6f\x64\x69\
\x3a\x64\x6f\x63\x6e\x61\x6d\x65\x3d\x22\x70\x68\x61\x73\x65\x5f\
\x72\x65\x73\x70\x6f\x6e\x73\x65\x2e\x69\x63\x6f\x22\x3e\x0a\x20\
\x20\x3c\x6d\x65\x74\x61\x64\x61\x74\x61\x0a\x20\x20\x20\x20\x20\
\x69\x64\x3d\x22\x6d\x65\x74\x61\x64\x61\x74\x61\x33\x38\x30\x35\
\x22\x3e\x0a\x20\x20\x20\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x3e\
\x0a\x20\x20\x20\x20\x20\x20\x3c\x63\x63\x3a\x57\x6f\x72\x6b\x0a\
\x20\x20\x20\x20\x20\x20\x20\x20\x20\x72\x64\x66\x3a\x61\x62\x6f\
\x75\x74\x3d\x22\x22\x3e\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x3c\
\x64\x63\x3a\x66\x6f\x72\x6d\x61\x74\x3e\x69\x6d\x61\x67\x65\x2f\
\x73\x76\x67\x2b\x78\x6d\x6c\x3c\x2f\x64\x63\x3a\x66\x6f\x72\x6d\
\x61\x74\x3e\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x3c\x64\x63\x3a\
\x74\x79\x70\x65\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\
\x72\x64\x66\x3a\x72\x65\x73\x6f\x75\x72\x63\x65\x3d\x22\x68\x74\
\x74\x70\x3a\x2f\x2f\x70\x75\x72\x6c\x2e\x6f\x72\x67\x2f\x64\x63\
\x2f\x64\x63\x6d\x69\x74\x79\x70\x65\x2f\x53\x74\x69\x6c\x6c\x49\
\x6d\x61\x67\x65\x22\x20\x2f\x3e\x0a\x20\x20\x20\x20\x20\x20\x20\
\x20\x3c\x64\x63\x3a\x74\x69\x74\x6c\x65\x3e\x3c\x2f\x64\x63\x3a\
\x74\x69\x74\x6c\x65\x3e\x0a\x20\x20\x20\x20\x20\x20\x3c\x2f\x63\
\x63\x3a\x57\x6f\x72\x6b\x3e\x0a\x20\x20\x20\x20\x3c\x2f\x72\x64\
\x66\x3a\x52\x44\x46\x3e\x0a\x20\x20\x3c\x2f\x6d\x65\x74\x61\x64\
\x61\x74\x61\x3e\x0a\x20\x20\x3c\x64\x65\x66\x73\x0a\x20\x20\x20\
\x20\x20\x69\x64\x3d\x22\x64\x65\x66\x73\x33\x38\x30\x33\x22\x20\
\x2f\x3e\x0a\x20\x20\x3c\x73\x6f\x64\x69\x70\x6f\x64\x69\x3a\x6e\
\x61\x6d\x65\x64\x76\x69\x65\x77\x0a\x20\x20\x20\x20\x20\x70\x61\
\x67\x65\x63\x6f\x6c\x6f\x72\x3d\x22\x23\x66\x66\x66\x66\x66\x66\
\x22\x0a\x20\x20\x20\x20\x20\x62\x6f\x72\x64\x65\x72\x63\x6f\x6c\
\x6f\x72\x3d\x22\x23\x36\x36\x36\x36\x36\x36\x22\x0a\x20\x20\x20\
\x20\x20\x62\x6f\x72\x64\x65\x72\x6f\x70\x61\x63\x69\x74\x79\x3d\
\x22\x31\x22\x0a\x20\x20\x20\x20\x20\x6f\x62\x6a\x65\x63\x74\x74\
\x6f\x6c\x65\x72\x61\x6e\x63\x65\x3d\x22\x31\x30\x22\x0a\x20\x20\
\x20\x20\x20\x67\x72\x69\x64\x74\x6f\x6c\x65\x72\x61\x6e\x63\x65\
\x3d\x22\x31\x30\x22\x0a\x20\x20\x20\x20\x20\x67\x75\x69\x64\x65\
\x74\x6f\x6c\x65\x72\x61\x6e\x63\x65\x3d\x22\x31\x30\x22\x0a\x20\
\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x70\x61\x67\
\x65\x6f\x70\x61\x63\x69\x74\x79\x3d\x22\x30\x22\x0a\x20\x20\x20\
\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x70\x61\x67\x65\x73\
\x68\x61\x64\x6f\x77\x3d\x22\x32\x22\x0a\x20\x20\x20\x20\x20\x69\
\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x77\
\x69\x64\x74\x68\x3d\x22\x31\x33\x36\x34\x22\x0a\x20\x20\x20\x20\
\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\
\x2d\x68\x65\x69\x67\x68\x74\x3d\x22\x37\x35\x30\x22\x0a\x20\x20\
\x20\x20\x20\x69\x64\x3d\x22\x6e\x61\x6d\x65\x64\x76\x69\x65\x77\
\x33\x38\x30\x31\x22\x0a\x20\x20\x20\x20\x20\x73\x68\x6f\x77\x67\
\x72\x69\x64\x3d\x22\x66\x61\x6c\x73\x65\x22\x0a\x20\x20\x20\x20\
\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x7a\x6f\x6f\x6d\x3d\x22\
\x37\x2e\x33\x37\x35\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\
\x63\x61\x70\x65\x3a\x63\x78\x3d\x22\x31\x36\x22\x0a\x20\x20\x20\
\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x63\x79\x3d\x22\x31\
\x36\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\
\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x78\x3d\x22\x30\x22\x0a\x20\x20\
\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\
\x6f\x77\x2d\x79\x3d\x22\x31\x36\x22\x0a\x20\x20\x20\x20\x20\x69\
\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x6d\
\x61\x78\x69\x6d\x69\x7a\x65\x64\x3d\x22\x30\x22\x0a\x20\x20\x20\
\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x63\x75\x72\x72\x65\
\x6e\x74\x2d\x6c\x61\x79\x65\x72\x3d\x22\x73\x76\x67\x33\x37\x39\
\x39\x22\x20\x2f\x3e\x0a\x20\x20\x3c\x69\x6d\x61\x67\x65\x0a\x20\
\x20\x20\x20\x20\x77\x69\x64\x74\x68\x3d\x22\x33\x32\x22\x0a\x20\
\x20\x20\x20\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\x0a\
\x20\x20\x20\x20\x20\x78\x6c\x69\x6e\x6b\x3a\x68\x72\x65\x66\x3d\
\x22\x64\x61\x74\x61\x3a\x69\x6d\x61\x67\x65\x2f\x70\x6e\x67\x3b\
\x62\x61\x73\x65\x36\x34\x2c\x69\x56\x42\x4f\x52\x77\x30\x4b\x47\
\x67\x6f\x41\x41\x41\x41\x4e\x53\x55\x68\x45\x55\x67\x41\x41\x41\
\x43\x41\x41\x41\x41\x41\x67\x43\x41\x59\x41\x41\x41\x42\x7a\x65\
\x6e\x72\x30\x41\x41\x41\x41\x42\x48\x4e\x43\x53\x56\x51\x49\x43\
\x41\x67\x49\x66\x41\x68\x6b\x69\x41\x41\x41\x41\x5a\x56\x4a\x52\
\x45\x46\x55\x0a\x57\x49\x58\x74\x31\x37\x46\x72\x46\x45\x45\x55\
\x78\x2f\x48\x50\x6e\x49\x64\x4b\x62\x4d\x54\x61\x4a\x70\x42\x57\
\x4f\x31\x45\x51\x77\x63\x72\x69\x69\x43\x6b\x46\x4c\x57\x30\x30\
\x49\x46\x6f\x63\x75\x78\x64\x74\x51\x6b\x44\x69\x33\x52\x34\x57\
\x69\x68\x42\x74\x4c\x41\x32\x6d\x31\x48\x42\x46\x47\x67\x73\x52\
\x0a\x42\x43\x75\x31\x46\x66\x49\x66\x42\x42\x75\x44\x53\x72\x79\
\x78\x57\x4d\x35\x4b\x6b\x31\x6b\x76\x63\x49\x4c\x37\x71\x6d\x48\
\x67\x39\x2b\x62\x4c\x6d\x2f\x6d\x39\x6d\x51\x6d\x49\x4a\x68\x79\
\x54\x42\x49\x69\x4e\x43\x53\x34\x4f\x61\x6f\x41\x61\x6f\x41\x62\
\x34\x56\x77\x42\x36\x47\x66\x33\x54\x31\x61\x52\x46\x0a\x70\x44\
\x65\x37\x54\x77\x44\x68\x50\x66\x45\x6c\x33\x52\x4d\x56\x39\x59\
\x38\x70\x35\x76\x63\x42\x49\x4e\x38\x67\x33\x4b\x41\x78\x59\x48\
\x6d\x6d\x67\x76\x77\x63\x38\x53\x61\x39\x48\x6a\x47\x4d\x41\x51\
\x44\x5a\x47\x75\x37\x53\x48\x4e\x41\x39\x6e\x69\x62\x50\x4e\x6d\
\x6d\x65\x78\x52\x6e\x36\x71\x7a\x77\x34\x0a\x4e\x41\x59\x41\x35\
\x45\x2f\x77\x74\x4b\x7a\x45\x2f\x57\x4e\x70\x4b\x64\x70\x62\x48\
\x4c\x35\x51\x6a\x72\x39\x75\x70\x4f\x74\x2b\x43\x77\x42\x35\x46\
\x77\x4e\x32\x42\x69\x78\x4f\x70\x61\x57\x35\x39\x59\x33\x73\x4d\
\x74\x36\x79\x38\x34\x62\x2b\x39\x42\x67\x41\x6b\x4e\x33\x47\x42\
\x34\x36\x38\x59\x50\x46\x67\x0a\x57\x71\x6f\x51\x36\x58\x51\x49\
\x44\x78\x6d\x2b\x70\x6e\x39\x71\x44\x49\x41\x51\x6d\x5a\x34\x6e\
\x66\x6d\x62\x71\x47\x57\x73\x48\x30\x69\x41\x67\x58\x38\x46\x31\
\x68\x75\x73\x70\x4e\x74\x32\x6c\x45\x56\x33\x36\x77\x66\x59\x56\
\x77\x6c\x45\x32\x56\x36\x71\x64\x38\x73\x34\x36\x6a\x56\x6b\x4a\
\x4e\x74\x32\x6a\x0a\x45\x79\x35\x39\x35\x38\x73\x63\x54\x74\x4b\
\x2f\x6c\x77\x34\x41\x32\x62\x73\x55\x6d\x79\x61\x30\x34\x71\x56\
\x74\x6d\x69\x32\x30\x4b\x42\x59\x71\x51\x75\x78\x70\x30\x38\x53\
\x37\x6f\x4c\x33\x46\x73\x49\x57\x72\x46\x4e\x65\x71\x51\x65\x78\
\x75\x30\x36\x42\x38\x6c\x43\x62\x75\x37\x2f\x49\x4d\x7a\x56\x65\
\x45\x0a\x4e\x76\x45\x35\x65\x59\x56\x7a\x45\x51\x4e\x46\x46\x33\
\x4d\x30\x57\x6d\x56\x31\x4b\x6a\x39\x4b\x37\x33\x77\x71\x4b\x78\
\x45\x66\x56\x64\x50\x78\x4a\x35\x76\x2b\x78\x58\x57\x38\x38\x4a\
\x46\x77\x73\x62\x70\x75\x46\x4c\x39\x73\x65\x6e\x34\x30\x55\x2f\
\x38\x4c\x61\x6f\x44\x2f\x47\x32\x44\x55\x69\x43\x59\x57\x0a\x50\
\x77\x48\x43\x37\x48\x54\x6e\x46\x42\x69\x6f\x6f\x67\x41\x41\x41\
\x41\x42\x4a\x52\x55\x35\x45\x72\x6b\x4a\x67\x67\x67\x3d\x3d\x0a\
\x22\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x69\x6d\x61\x67\x65\
\x33\x38\x30\x37\x22\x0a\x20\x20\x20\x20\x20\x78\x3d\x22\x30\x22\
\x0a\x20\x20\x20\x20\x20\x79\x3d\x22\x30\x22\x20\x2f\x3e\x0a\x3c\
\x2f\x73\x76\x67\x3e\x0a\
\x00\x00\x0b\x58\
\x3c\
\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\
\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\
\x2d\x38\x22\x20\x73\x74\x61\x6e\x64\x61\x6c\x6f\x6e\x65\x3d\x22\
\x6e\x6f\x22\x3f\x3e\x0a\x3c\x21\x2d\x2d\x20\x43\x72\x65\x61\x74\
\x65\x64\x20\x77\x69\x74\x68\x20\x49\x6e\x6b\x73\x63\x61\x70\x65\
\x20\x28\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x69\x6e\x6b\
\x73\x63\x61\x70\x65\x2e\x6f\x72\x67\x2f\x29\x20\x2d\x2d\x3e\x0a\
\x0a\x3c\x73\x76\x67\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x64\
\x63\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x70\x75\x72\x6c\x2e\x6f\
\x72\x67\x2f\x64\x63\x2f\x65\x6c\x65\x6d\x65\x6e\x74\x73\x2f\x31\
\x2e\x31\x2f\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x63\x63\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x63\x72\x65\x61\x74\x69\x76\
\x65\x63\x6f\x6d\x6d\x6f\x6e\x73\x2e\x6f\x72\x67\x2f\x6e\x73\x23\
\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\
\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\
\x67\x2f\x31\x39\x39\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\
\x2d\x73\x79\x6e\x74\x61\x78\x2d\x6e\x73\x23\x22\x0a\x20\x20\x20\
\x78\x6d\x6c\x6e\x73\x3a\x73\x76\x67\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3d\
\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\
\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\x76\x67\x22\x0a\x20\x20\x20\
\x78\x6d\x6c\x6e\x73\x3a\x78\x6c\x69\x6e\x6b\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\
\x39\x39\x39\x2f\x78\x6c\x69\x6e\x6b\x22\x0a\x20\x20\x20\x78\x6d\
\x6c\x6e\x73\x3a\x73\x6f\x64\x69\x70\x6f\x64\x69\x3d\x22\x68\x74\
\x74\x70\x3a\x2f\x2f\x73\x6f\x64\x69\x70\x6f\x64\x69\x2e\x73\x6f\
\x75\x72\x63\x65\x66\x6f\x72\x67\x65\x2e\x6e\x65\x74\x2f\x44\x54\
\x44\x2f\x73\x6f\x64\x69\x70\x6f\x64\x69\x2d\x30\x2e\x64\x74\x64\
\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x69\x6e\x6b\x73\x63\
\x61\x70\x65\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\
\x69\x6e\x6b\x73\x63\x61\x70\x65\x2e\x6f\x72\x67\x2f\x6e\x61\x6d\
\x65\x73\x70\x61\x63\x65\x73\x2f\x69\x6e\x6b\x73\x63\x61\x70\x65\
\x22\x0a\x20\x20\x20\x69\x64\x3d\x22\x73\x76\x67\x34\x35\x35\x37\
\x22\x0a\x20\x20\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\
\x31\x22\x0a\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x76\
\x65\x72\x73\x69\x6f\x6e\x3d\x22\x30\x2e\x34\x38\x2e\x33\x2e\x31\
\x20\x72\x39\x38\x38\x36\x22\x0a\x20\x20\x20\x77\x69\x64\x74\x68\
\x3d\x22\x33\x32\x22\x0a\x20\x20\x20\x68\x65\x69\x67\x68\x74\x3d\
\x22\x33\x32\x22\x0a\x20\x20\x20\x73\x6f\x64\x69\x70\x6f\x64\x69\
\x3a\x64\x6f\x63\x6e\x61\x6d\x65\x3d\x22\x73\x74\x65\x70\x2e\x73\
\x76\x67\x22\x3e\x0a\x20\x20\x3c\x6d\x65\x74\x61\x64\x61\x74\x61\
\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x6d\x65\x74\x61\x64\x61\
\x74\x61\x34\x35\x36\x33\x22\x3e\x0a\x20\x20\x20\x20\x3c\x72\x64\
\x66\x3a\x52\x44\x46\x3e\x0a\x20\x20\x20\x20\x20\x20\x3c\x63\x63\
\x3a\x57\x6f\x72\x6b\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x72\
\x64\x66\x3a\x61\x62\x6f\x75\x74\x3d\x22\x22\x3e\x0a\x20\x20\x20\
\x20\x20\x20\x20\x20\x3c\x64\x63\x3a\x66\x6f\x72\x6d\x61\x74\x3e\
\x69\x6d\x61\x67\x65\x2f\x73\x76\x67\x2b\x78\x6d\x6c\x3c\x2f\x64\
\x63\x3a\x66\x6f\x72\x6d\x61\x74\x3e\x0a\x20\x20\x20\x20\x20\x20\
\x20\x20\x3c\x64\x63\x3a\x74\x79\x70\x65\x0a\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x72\x64\x66\x3a\x72\x65\x73\x6f\x75\x72\
\x63\x65\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x70\x75\x72\x6c\x2e\
\x6f\x72\x67\x2f\x64\x63\x2f\x64\x63\x6d\x69\x74\x79\x70\x65\x2f\
\x53\x74\x69\x6c\x6c\x49\x6d\x61\x67\x65\x22\x20\x2f\x3e\x0a\x20\
\x20\x20\x20\x20\x20\x20\x20\x3c\x64\x63\x3a\x74\x69\x74\x6c\x65\
\x3e\x3c\x2f\x64\x63\x3a\x74\x69\x74\x6c\x65\x3e\x0a\x20\x20\x20\
\x20\x20\x20\x3c\x2f\x63\x63\x3a\x57\x6f\x72\x6b\x3e\x0a\x20\x20\
\x20\x20\x3c\x2f\x72\x64\x66\x3a\x52\x44\x46\x3e\x0a\x20\x20\x3c\
\x2f\x6d\x65\x74\x61\x64\x61\x74\x61\x3e\x0a\x20\x20\x3c\x64\x65\
\x66\x73\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x64\x65\x66\x73\
\x34\x35\x36\x31\x22\x20\x2f\x3e\x0a\x20\x20\x3c\x73\x6f\x64\x69\
\x70\x6f\x64\x69\x3a\x6e\x61\x6d\x65\x64\x76\x69\x65\x77\x0a\x20\
\x20\x20\x20\x20\x70\x61\x67\x65\x63\x6f\x6c\x6f\x72\x3d\x22\x23\
\x66\x66\x66\x66\x66\x66\x22\x0a\x20\x20\x20\x20\x20\x62\x6f\x72\
\x64\x65\x72\x63\x6f\x6c\x6f\x72\x3d\x22\x23\x36\x36\x36\x36\x36\
\x36\x22\x0a\x20\x20\x20\x20\x20\x62\x6f\x72\x64\x65\x72\x6f\x70\
\x61\x63\x69\x74\x79\x3d\x22\x31\x22\x0a\x20\x20\x20\x20\x20\x6f\
\x62\x6a\x65\x63\x74\x74\x6f\x6c\x65\x72\x61\x6e\x63\x65\x3d\x22\
\x31\x30\x22\x0a\x20\x20\x20\x20\x20\x67\x72\x69\x64\x74\x6f\x6c\
\x65\x72\x61\x6e\x63\x65\x3d\x22\x31\x30\x22\x0a\x20\x20\x20\x20\
\x20\x67\x75\x69\x64\x65\x74\x6f\x6c\x65\x72\x61\x6e\x63\x65\x3d\
\x22\x31\x30\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\
\x70\x65\x3a\x70\x61\x67\x65\x6f\x70\x61\x63\x69\x74\x79\x3d\x22\
\x30\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\
\x3a\x70\x61\x67\x65\x73\x68\x61\x64\x6f\x77\x3d\x22\x32\x22\x0a\
\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\
\x6e\x64\x6f\x77\x2d\x77\x69\x64\x74\x68\x3d\x22\x31\x33\x36\x34\
\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\
\x77\x69\x6e\x64\x6f\x77\x2d\x68\x65\x69\x67\x68\x74\x3d\x22\x37\
\x35\x30\x22\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x6e\x61\x6d\
\x65\x64\x76\x69\x65\x77\x34\x35\x35\x39\x22\x0a\x20\x20\x20\x20\
\x20\x73\x68\x6f\x77\x67\x72\x69\x64\x3d\x22\x66\x61\x6c\x73\x65\
\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\
\x7a\x6f\x6f\x6d\x3d\x22\x37\x2e\x33\x37\x35\x22\x0a\x20\x20\x20\
\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x63\x78\x3d\x22\x32\
\x36\x2e\x32\x31\x34\x39\x33\x36\x22\x0a\x20\x20\x20\x20\x20\x69\
\x6e\x6b\x73\x63\x61\x70\x65\x3a\x63\x79\x3d\x22\x31\x39\x2e\x37\
\x32\x38\x38\x31\x34\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\
\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x78\x3d\x22\x30\
\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\
\x77\x69\x6e\x64\x6f\x77\x2d\x79\x3d\x22\x31\x36\x22\x0a\x20\x20\
\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\
\x6f\x77\x2d\x6d\x61\x78\x69\x6d\x69\x7a\x65\x64\x3d\x22\x30\x22\
\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x63\
\x75\x72\x72\x65\x6e\x74\x2d\x6c\x61\x79\x65\x72\x3d\x22\x73\x76\
\x67\x34\x35\x35\x37\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\
\x63\x61\x70\x65\x3a\x73\x6e\x61\x70\x2d\x67\x6c\x6f\x62\x61\x6c\
\x3d\x22\x66\x61\x6c\x73\x65\x22\x20\x2f\x3e\x0a\x20\x20\x3c\x69\
\x6d\x61\x67\x65\x0a\x20\x20\x20\x20\x20\x77\x69\x64\x74\x68\x3d\
\x22\x33\x32\x22\x0a\x20\x20\x20\x20\x20\x68\x65\x69\x67\x68\x74\
\x3d\x22\x33\x32\x22\x0a\x20\x20\x20\x20\x20\x78\x6c\x69\x6e\x6b\
\x3a\x68\x72\x65\x66\x3d\x22\x64\x61\x74\x61\x3a\x69\x6d\x61\x67\
\x65\x2f\x70\x6e\x67\x3b\x62\x61\x73\x65\x36\x34\x2c\x69\x56\x42\
\x4f\x52\x77\x30\x4b\x47\x67\x6f\x41\x41\x41\x41\x4e\x53\x55\x68\
\x45\x55\x67\x41\x41\x41\x43\x41\x41\x41\x41\x41\x67\x43\x41\x59\
\x41\x41\x41\x42\x7a\x65\x6e\x72\x30\x41\x41\x41\x41\x42\x48\x4e\
\x43\x53\x56\x51\x49\x43\x41\x67\x49\x66\x41\x68\x6b\x69\x41\x41\
\x41\x41\x44\x56\x4a\x52\x45\x46\x55\x0a\x57\x49\x58\x74\x31\x7a\
\x45\x42\x41\x44\x41\x4d\x77\x7a\x42\x33\x2f\x44\x6c\x33\x4d\x50\
\x6f\x6f\x42\x4b\x77\x33\x55\x32\x33\x48\x75\x77\x54\x73\x4f\x34\
\x78\x58\x42\x51\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\
\x41\x41\x41\x41\x41\x77\x48\x64\x2f\x7a\x44\x78\x72\x74\x42\x44\
\x6b\x45\x71\x4e\x62\x67\x0a\x41\x41\x41\x41\x41\x45\x6c\x46\x54\
\x6b\x53\x75\x51\x6d\x43\x43\x0a\x22\x0a\x20\x20\x20\x20\x20\x69\
\x64\x3d\x22\x69\x6d\x61\x67\x65\x34\x35\x36\x35\x22\x0a\x20\x20\
\x20\x20\x20\x78\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\x20\x79\x3d\
\x22\x30\x22\x20\x2f\x3e\x0a\x20\x20\x3c\x74\x65\x78\x74\x0a\x20\
\x20\x20\x20\x20\x78\x6d\x6c\x3a\x73\x70\x61\x63\x65\x3d\x22\x70\
\x72\x65\x73\x65\x72\x76\x65\x22\x0a\x20\x20\x20\x20\x20\x73\x74\
\x79\x6c\x65\x3d\x22\x66\x6f\x6e\x74\x2d\x73\x69\x7a\x65\x3a\x34\
\x30\x70\x78\x3b\x66\x6f\x6e\x74\x2d\x73\x74\x79\x6c\x65\x3a\x6e\
\x6f\x72\x6d\x61\x6c\x3b\x66\x6f\x6e\x74\x2d\x77\x65\x69\x67\x68\
\x74\x3a\x6e\x6f\x72\x6d\x61\x6c\x3b\x6c\x69\x6e\x65\x2d\x68\x65\
\x69\x67\x68\x74\x3a\x31\x32\x35\x25\x3b\x6c\x65\x74\x74\x65\x72\
\x2d\x73\x70\x61\x63\x69\x6e\x67\x3a\x30\x70\x78\x3b\x77\x6f\x72\
\x64\x2d\x73\x70\x61\x63\x69\x6e\x67\x3a\x30\x70\x78\x3b\x66\x69\
\x6c\x6c\x3a\x23\x30\x30\x30\x30\x30\x30\x3b\x66\x69\x6c\x6c\x2d\
\x6f\x70\x61\x63\x69\x74\x79\x3a\x31\x3b\x73\x74\x72\x6f\x6b\x65\
\x3a\x6e\x6f\x6e\x65\x3b\x66\x6f\x6e\x74\x2d\x66\x61\x6d\x69\x6c\
\x79\x3a\x53\x61\x6e\x73\x22\x0a\x20\x20\x20\x20\x20\x78\x3d\x22\
\x31\x31\x2e\x36\x36\x31\x30\x31\x37\x22\x0a\x20\x20\x20\x20\x20\
\x79\x3d\x22\x39\x2e\x33\x35\x35\x39\x33\x32\x32\x22\x0a\x20\x20\
\x20\x20\x20\x69\x64\x3d\x22\x74\x65\x78\x74\x35\x34\x31\x38\x22\
\x0a\x20\x20\x20\x20\x20\x73\x6f\x64\x69\x70\x6f\x64\x69\x3a\x6c\
\x69\x6e\x65\x73\x70\x61\x63\x69\x6e\x67\x3d\x22\x31\x32\x35\x25\
\x22\x3e\x3c\x74\x73\x70\x61\x6e\x0a\x20\x20\x20\x20\x20\x20\x20\
\x73\x6f\x64\x69\x70\x6f\x64\x69\x3a\x72\x6f\x6c\x65\x3d\x22\x6c\
\x69\x6e\x65\x22\x0a\x20\x20\x20\x20\x20\x20\x20\x69\x64\x3d\x22\
\x74\x73\x70\x61\x6e\x35\x34\x32\x30\x22\x0a\x20\x20\x20\x20\x20\
\x20\x20\x78\x3d\x22\x31\x31\x2e\x36\x36\x31\x30\x31\x37\x22\x0a\
\x20\x20\x20\x20\x20\x20\x20\x79\x3d\x22\x39\x2e\x33\x35\x35\x39\
\x33\x32\x32\x22\x20\x2f\x3e\x3c\x2f\x74\x65\x78\x74\x3e\x0a\x20\
\x20\x3c\x70\x61\x74\x68\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\
\x63\x61\x70\x65\x3a\x63\x6f\x6e\x6e\x65\x63\x74\x6f\x72\x2d\x63\
\x75\x72\x76\x61\x74\x75\x72\x65\x3d\x22\x30\x22\x0a\x20\x20\x20\
\x20\x20\x64\x3d\x22\x4d\x20\x32\x35\x2e\x31\x39\x39\x39\x37\x33\
\x2c\x36\x2e\x32\x32\x37\x36\x34\x34\x34\x20\x43\x20\x32\x33\x2e\
\x39\x30\x32\x32\x30\x39\x2c\x36\x2e\x33\x36\x30\x34\x31\x30\x39\
\x20\x32\x30\x2e\x37\x35\x30\x37\x38\x36\x2c\x38\x2e\x34\x39\x31\
\x34\x35\x35\x37\x20\x31\x36\x2e\x32\x38\x30\x30\x36\x32\x2c\x31\
\x32\x2e\x35\x31\x31\x39\x32\x32\x20\x35\x2e\x33\x38\x32\x31\x39\
\x33\x38\x2c\x32\x2e\x37\x36\x36\x38\x39\x32\x36\x20\x32\x2e\x37\
\x35\x36\x39\x34\x35\x39\x2c\x35\x2e\x33\x35\x34\x33\x34\x36\x36\
\x20\x31\x32\x2e\x38\x31\x32\x32\x38\x2c\x31\x35\x2e\x38\x31\x35\
\x34\x38\x39\x20\x63\x20\x2d\x39\x2e\x39\x36\x39\x37\x38\x33\x38\
\x2c\x31\x30\x2e\x38\x39\x32\x37\x39\x33\x20\x2d\x37\x2e\x32\x35\
\x34\x30\x39\x31\x39\x2c\x31\x33\x2e\x34\x34\x33\x35\x38\x38\x20\
\x33\x2e\x34\x35\x36\x35\x34\x2c\x33\x2e\x33\x31\x35\x38\x37\x36\
\x20\x31\x30\x2e\x39\x34\x38\x30\x33\x39\x2c\x39\x2e\x38\x34\x32\
\x37\x33\x32\x20\x31\x33\x2e\x31\x37\x37\x39\x32\x36\x2c\x37\x2e\
\x34\x35\x36\x35\x31\x35\x20\x33\x2e\x32\x33\x30\x34\x33\x2c\x2d\
\x33\x2e\x33\x31\x37\x38\x37\x36\x20\x35\x2e\x37\x34\x37\x32\x31\
\x31\x2c\x2d\x36\x2e\x32\x36\x34\x38\x31\x35\x20\x37\x2e\x35\x31\
\x34\x32\x33\x2c\x2d\x39\x2e\x37\x37\x31\x33\x37\x33\x31\x20\x35\
\x2e\x37\x30\x30\x37\x32\x33\x2c\x2d\x39\x2e\x35\x38\x35\x38\x34\
\x34\x36\x20\x7a\x22\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x70\
\x61\x74\x68\x31\x31\x33\x30\x22\x0a\x20\x20\x20\x20\x20\x73\x6f\
\x64\x69\x70\x6f\x64\x69\x3a\x73\x74\x72\x6f\x6b\x65\x2d\x63\x6d\
\x79\x6b\x3d\x22\x28\x30\x20\x30\x20\x30\x20\x30\x2e\x38\x29\x22\
\x0a\x20\x20\x20\x20\x20\x73\x74\x79\x6c\x65\x3d\x22\x66\x6f\x6e\
\x74\x2d\x73\x69\x7a\x65\x3a\x31\x32\x70\x78\x3b\x66\x69\x6c\x6c\
\x3a\x23\x30\x30\x30\x30\x66\x66\x3b\x66\x69\x6c\x6c\x2d\x72\x75\
\x6c\x65\x3a\x65\x76\x65\x6e\x6f\x64\x64\x3b\x73\x74\x72\x6f\x6b\
\x65\x3a\x23\x33\x33\x33\x33\x33\x33\x3b\x73\x74\x72\x6f\x6b\x65\
\x2d\x77\x69\x64\x74\x68\x3a\x32\x2e\x35\x30\x30\x30\x30\x30\x30\
\x30\x30\x30\x30\x30\x30\x30\x30\x30\x30\x3b\x73\x74\x72\x6f\x6b\
\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3a\x72\x6f\x75\x6e\x64\x3b\
\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3a\
\x72\x6f\x75\x6e\x64\x3b\x73\x74\x72\x6f\x6b\x65\x2d\x64\x61\x73\
\x68\x61\x72\x72\x61\x79\x3a\x6e\x6f\x6e\x65\x22\x20\x2f\x3e\x0a\
\x3c\x2f\x73\x76\x67\x3e\x0a\
\x00\x00\x0b\x58\
\x3c\
\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\
\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\
\x2d\x38\x22\x20\x73\x74\x61\x6e\x64\x61\x6c\x6f\x6e\x65\x3d\x22\
\x6e\x6f\x22\x3f\x3e\x0a\x3c\x21\x2d\x2d\x20\x43\x72\x65\x61\x74\
\x65\x64\x20\x77\x69\x74\x68\x20\x49\x6e\x6b\x73\x63\x61\x70\x65\
\x20\x28\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x69\x6e\x6b\
\x73\x63\x61\x70\x65\x2e\x6f\x72\x67\x2f\x29\x20\x2d\x2d\x3e\x0a\
\x0a\x3c\x73\x76\x67\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x64\
\x63\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x70\x75\x72\x6c\x2e\x6f\
\x72\x67\x2f\x64\x63\x2f\x65\x6c\x65\x6d\x65\x6e\x74\x73\x2f\x31\
\x2e\x31\x2f\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x63\x63\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x63\x72\x65\x61\x74\x69\x76\
\x65\x63\x6f\x6d\x6d\x6f\x6e\x73\x2e\x6f\x72\x67\x2f\x6e\x73\x23\
\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\
\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\
\x67\x2f\x31\x39\x39\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\
\x2d\x73\x79\x6e\x74\x61\x78\x2d\x6e\x73\x23\x22\x0a\x20\x20\x20\
\x78\x6d\x6c\x6e\x73\x3a\x73\x76\x67\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3d\
\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\
\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\x76\x67\x22\x0a\x20\x20\x20\
\x78\x6d\x6c\x6e\x73\x3a\x78\x6c\x69\x6e\x6b\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\
\x39\x39\x39\x2f\x78\x6c\x69\x6e\x6b\x22\x0a\x20\x20\x20\x78\x6d\
\x6c\x6e\x73\x3a\x73\x6f\x64\x69\x70\x6f\x64\x69\x3d\x22\x68\x74\
\x74\x70\x3a\x2f\x2f\x73\x6f\x64\x69\x70\x6f\x64\x69\x2e\x73\x6f\
\x75\x72\x63\x65\x66\x6f\x72\x67\x65\x2e\x6e\x65\x74\x2f\x44\x54\
\x44\x2f\x73\x6f\x64\x69\x70\x6f\x64\x69\x2d\x30\x2e\x64\x74\x64\
\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x69\x6e\x6b\x73\x63\
\x61\x70\x65\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\
\x69\x6e\x6b\x73\x63\x61\x70\x65\x2e\x6f\x72\x67\x2f\x6e\x61\x6d\
\x65\x73\x70\x61\x63\x65\x73\x2f\x69\x6e\x6b\x73\x63\x61\x70\x65\
\x22\x0a\x20\x20\x20\x69\x64\x3d\x22\x73\x76\x67\x34\x35\x35\x37\
\x22\x0a\x20\x20\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\
\x31\x22\x0a\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x76\
\x65\x72\x73\x69\x6f\x6e\x3d\x22\x30\x2e\x34\x38\x2e\x33\x2e\x31\
\x20\x72\x39\x38\x38\x36\x22\x0a\x20\x20\x20\x77\x69\x64\x74\x68\
\x3d\x22\x33\x32\x22\x0a\x20\x20\x20\x68\x65\x69\x67\x68\x74\x3d\
\x22\x33\x32\x22\x0a\x20\x20\x20\x73\x6f\x64\x69\x70\x6f\x64\x69\
\x3a\x64\x6f\x63\x6e\x61\x6d\x65\x3d\x22\x73\x74\x65\x70\x2e\x73\
\x76\x67\x22\x3e\x0a\x20\x20\x3c\x6d\x65\x74\x61\x64\x61\x74\x61\
\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x6d\x65\x74\x61\x64\x61\
\x74\x61\x34\x35\x36\x33\x22\x3e\x0a\x20\x20\x20\x20\x3c\x72\x64\
\x66\x3a\x52\x44\x46\x3e\x0a\x20\x20\x20\x20\x20\x20\x3c\x63\x63\
\x3a\x57\x6f\x72\x6b\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x72\
\x64\x66\x3a\x61\x62\x6f\x75\x74\x3d\x22\x22\x3e\x0a\x20\x20\x20\
\x20\x20\x20\x20\x20\x3c\x64\x63\x3a\x66\x6f\x72\x6d\x61\x74\x3e\
\x69\x6d\x61\x67\x65\x2f\x73\x76\x67\x2b\x78\x6d\x6c\x3c\x2f\x64\
\x63\x3a\x66\x6f\x72\x6d\x61\x74\x3e\x0a\x20\x20\x20\x20\x20\x20\
\x20\x20\x3c\x64\x63\x3a\x74\x79\x70\x65\x0a\x20\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x20\x72\x64\x66\x3a\x72\x65\x73\x6f\x75\x72\
\x63\x65\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x70\x75\x72\x6c\x2e\
\x6f\x72\x67\x2f\x64\x63\x2f\x64\x63\x6d\x69\x74\x79\x70\x65\x2f\
\x53\x74\x69\x6c\x6c\x49\x6d\x61\x67\x65\x22\x20\x2f\x3e\x0a\x20\
\x20\x20\x20\x20\x20\x20\x20\x3c\x64\x63\x3a\x74\x69\x74\x6c\x65\
\x3e\x3c\x2f\x64\x63\x3a\x74\x69\x74\x6c\x65\x3e\x0a\x20\x20\x20\
\x20\x20\x20\x3c\x2f\x63\x63\x3a\x57\x6f\x72\x6b\x3e\x0a\x20\x20\
\x20\x20\x3c\x2f\x72\x64\x66\x3a\x52\x44\x46\x3e\x0a\x20\x20\x3c\
\x2f\x6d\x65\x74\x61\x64\x61\x74\x61\x3e\x0a\x20\x20\x3c\x64\x65\
\x66\x73\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x64\x65\x66\x73\
\x34\x35\x36\x31\x22\x20\x2f\x3e\x0a\x20\x20\x3c\x73\x6f\x64\x69\
\x70\x6f\x64\x69\x3a\x6e\x61\x6d\x65\x64\x76\x69\x65\x77\x0a\x20\
\x20\x20\x20\x20\x70\x61\x67\x65\x63\x6f\x6c\x6f\x72\x3d\x22\x23\
\x66\x66\x66\x66\x66\x66\x22\x0a\x20\x20\x20\x20\x20\x62\x6f\x72\
\x64\x65\x72\x63\x6f\x6c\x6f\x72\x3d\x22\x23\x36\x36\x36\x36\x36\
\x36\x22\x0a\x20\x20\x20\x20\x20\x62\x6f\x72\x64\x65\x72\x6f\x70\
\x61\x63\x69\x74\x79\x3d\x22\x31\x22\x0a\x20\x20\x20\x20\x20\x6f\
\x62\x6a\x65\x63\x74\x74\x6f\x6c\x65\x72\x61\x6e\x63\x65\x3d\x22\
\x31\x30\x22\x0a\x20\x20\x20\x20\x20\x67\x72\x69\x64\x74\x6f\x6c\
\x65\x72\x61\x6e\x63\x65\x3d\x22\x31\x30\x22\x0a\x20\x20\x20\x20\
\x20\x67\x75\x69\x64\x65\x74\x6f\x6c\x65\x72\x61\x6e\x63\x65\x3d\
\x22\x31\x30\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\
\x70\x65\x3a\x70\x61\x67\x65\x6f\x70\x61\x63\x69\x74\x79\x3d\x22\
\x30\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\
\x3a\x70\x61\x67\x65\x73\x68\x61\x64\x6f\x77\x3d\x22\x32\x22\x0a\
\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\
\x6e\x64\x6f\x77\x2d\x77\x69\x64\x74\x68\x3d\x22\x31\x33\x36\x34\
\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\
\x77\x69\x6e\x64\x6f\x77\x2d\x68\x65\x69\x67\x68\x74\x3d\x22\x37\
\x35\x30\x22\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x6e\x61\x6d\
\x65\x64\x76\x69\x65\x77\x34\x35\x35\x39\x22\x0a\x20\x20\x20\x20\
\x20\x73\x68\x6f\x77\x67\x72\x69\x64\x3d\x22\x66\x61\x6c\x73\x65\
\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\
\x7a\x6f\x6f\x6d\x3d\x22\x37\x2e\x33\x37\x35\x22\x0a\x20\x20\x20\
\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x63\x78\x3d\x22\x32\
\x36\x2e\x32\x31\x34\x39\x33\x36\x22\x0a\x20\x20\x20\x20\x20\x69\
\x6e\x6b\x73\x63\x61\x70\x65\x3a\x63\x79\x3d\x22\x31\x39\x2e\x37\
\x32\x38\x38\x31\x34\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\
\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x78\x3d\x22\x30\
\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\
\x77\x69\x6e\x64\x6f\x77\x2d\x79\x3d\x22\x31\x36\x22\x0a\x20\x20\
\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\
\x6f\x77\x2d\x6d\x61\x78\x69\x6d\x69\x7a\x65\x64\x3d\x22\x30\x22\
\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x63\
\x75\x72\x72\x65\x6e\x74\x2d\x6c\x61\x79\x65\x72\x3d\x22\x73\x76\
\x67\x34\x35\x35\x37\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\
\x63\x61\x70\x65\x3a\x73\x6e\x61\x70\x2d\x67\x6c\x6f\x62\x61\x6c\
\x3d\x22\x66\x61\x6c\x73\x65\x22\x20\x2f\x3e\x0a\x20\x20\x3c\x69\
\x6d\x61\x67\x65\x0a\x20\x20\x20\x20\x20\x77\x69\x64\x74\x68\x3d\
\x22\x33\x32\x22\x0a\x20\x20\x20\x20\x20\x68\x65\x69\x67\x68\x74\
\x3d\x22\x33\x32\x22\x0a\x20\x20\x20\x20\x20\x78\x6c\x69\x6e\x6b\
\x3a\x68\x72\x65\x66\x3d\x22\x64\x61\x74\x61\x3a\x69\x6d\x61\x67\
\x65\x2f\x70\x6e\x67\x3b\x62\x61\x73\x65\x36\x34\x2c\x69\x56\x42\
\x4f\x52\x77\x30\x4b\x47\x67\x6f\x41\x41\x41\x41\x4e\x53\x55\x68\
\x45\x55\x67\x41\x41\x41\x43\x41\x41\x41\x41\x41\x67\x43\x41\x59\
\x41\x41\x41\x42\x7a\x65\x6e\x72\x30\x41\x41\x41\x41\x42\x48\x4e\
\x43\x53\x56\x51\x49\x43\x41\x67\x49\x66\x41\x68\x6b\x69\x41\x41\
\x41\x41\x44\x56\x4a\x52\x45\x46\x55\x0a\x57\x49\x58\x74\x31\x7a\
\x45\x42\x41\x44\x41\x4d\x77\x7a\x42\x33\x2f\x44\x6c\x33\x4d\x50\
\x6f\x6f\x42\x4b\x77\x33\x55\x32\x33\x48\x75\x77\x54\x73\x4f\x34\
\x78\x58\x42\x51\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\
\x41\x41\x41\x41\x41\x77\x48\x64\x2f\x7a\x44\x78\x72\x74\x42\x44\
\x6b\x45\x71\x4e\x62\x67\x0a\x41\x41\x41\x41\x41\x45\x6c\x46\x54\
\x6b\x53\x75\x51\x6d\x43\x43\x0a\x22\x0a\x20\x20\x20\x20\x20\x69\
\x64\x3d\x22\x69\x6d\x61\x67\x65\x34\x35\x36\x35\x22\x0a\x20\x20\
\x20\x20\x20\x78\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\x20\x79\x3d\
\x22\x30\x22\x20\x2f\x3e\x0a\x20\x20\x3c\x74\x65\x78\x74\x0a\x20\
\x20\x20\x20\x20\x78\x6d\x6c\x3a\x73\x70\x61\x63\x65\x3d\x22\x70\
\x72\x65\x73\x65\x72\x76\x65\x22\x0a\x20\x20\x20\x20\x20\x73\x74\
\x79\x6c\x65\x3d\x22\x66\x6f\x6e\x74\x2d\x73\x69\x7a\x65\x3a\x34\
\x30\x70\x78\x3b\x66\x6f\x6e\x74\x2d\x73\x74\x79\x6c\x65\x3a\x6e\
\x6f\x72\x6d\x61\x6c\x3b\x66\x6f\x6e\x74\x2d\x77\x65\x69\x67\x68\
\x74\x3a\x6e\x6f\x72\x6d\x61\x6c\x3b\x6c\x69\x6e\x65\x2d\x68\x65\
\x69\x67\x68\x74\x3a\x31\x32\x35\x25\x3b\x6c\x65\x74\x74\x65\x72\
\x2d\x73\x70\x61\x63\x69\x6e\x67\x3a\x30\x70\x78\x3b\x77\x6f\x72\
\x64\x2d\x73\x70\x61\x63\x69\x6e\x67\x3a\x30\x70\x78\x3b\x66\x69\
\x6c\x6c\x3a\x23\x30\x30\x30\x30\x30\x30\x3b\x66\x69\x6c\x6c\x2d\
\x6f\x70\x61\x63\x69\x74\x79\x3a\x31\x3b\x73\x74\x72\x6f\x6b\x65\
\x3a\x6e\x6f\x6e\x65\x3b\x66\x6f\x6e\x74\x2d\x66\x61\x6d\x69\x6c\
\x79\x3a\x53\x61\x6e\x73\x22\x0a\x20\x20\x20\x20\x20\x78\x3d\x22\
\x31\x31\x2e\x36\x36\x31\x30\x31\x37\x22\x0a\x20\x20\x20\x20\x20\
\x79\x3d\x22\x39\x2e\x33\x35\x35\x39\x33\x32\x32\x22\x0a\x20\x20\
\x20\x20\x20\x69\x64\x3d\x22\x74\x65\x78\x74\x35\x34\x31\x38\x22\
\x0a\x20\x20\x20\x20\x20\x73\x6f\x64\x69\x70\x6f\x64\x69\x3a\x6c\
\x69\x6e\x65\x73\x70\x61\x63\x69\x6e\x67\x3d\x22\x31\x32\x35\x25\
\x22\x3e\x3c\x74\x73\x70\x61\x6e\x0a\x20\x20\x20\x20\x20\x20\x20\
\x73\x6f\x64\x69\x70\x6f\x64\x69\x3a\x72\x6f\x6c\x65\x3d\x22\x6c\
\x69\x6e\x65\x22\x0a\x20\x20\x20\x20\x20\x20\x20\x69\x64\x3d\x22\
\x74\x73\x70\x61\x6e\x35\x34\x32\x30\x22\x0a\x20\x20\x20\x20\x20\
\x20\x20\x78\x3d\x22\x31\x31\x2e\x36\x36\x31\x30\x31\x37\x22\x0a\
\x20\x20\x20\x20\x20\x20\x20\x79\x3d\x22\x39\x2e\x33\x35\x35\x39\
\x33\x32\x32\x22\x20\x2f\x3e\x3c\x2f\x74\x65\x78\x74\x3e\x0a\x20\
\x20\x3c\x70\x61\x74\x68\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\
\x63\x61\x70\x65\x3a\x63\x6f\x6e\x6e\x65\x63\x74\x6f\x72\x2d\x63\
\x75\x72\x76\x61\x74\x75\x72\x65\x3d\x22\x30\x22\x0a\x20\x20\x20\
\x20\x20\x64\x3d\x22\x4d\x20\x32\x35\x2e\x31\x39\x39\x39\x37\x33\
\x2c\x36\x2e\x32\x32\x37\x36\x34\x34\x34\x20\x43\x20\x32\x33\x2e\
\x39\x30\x32\x32\x30\x39\x2c\x36\x2e\x33\x36\x30\x34\x31\x30\x39\
\x20\x32\x30\x2e\x37\x35\x30\x37\x38\x36\x2c\x38\x2e\x34\x39\x31\
\x34\x35\x35\x37\x20\x31\x36\x2e\x32\x38\x30\x30\x36\x32\x2c\x31\
\x32\x2e\x35\x31\x31\x39\x32\x32\x20\x35\x2e\x33\x38\x32\x31\x39\
\x33\x38\x2c\x32\x2e\x37\x36\x36\x38\x39\x32\x36\x20\x32\x2e\x37\
\x35\x36\x39\x34\x35\x39\x2c\x35\x2e\x33\x35\x34\x33\x34\x36\x36\
\x20\x31\x32\x2e\x38\x31\x32\x32\x38\x2c\x31\x35\x2e\x38\x31\x35\
\x34\x38\x39\x20\x63\x20\x2d\x39\x2e\x39\x36\x39\x37\x38\x33\x38\
\x2c\x31\x30\x2e\x38\x39\x32\x37\x39\x33\x20\x2d\x37\x2e\x32\x35\
\x34\x30\x39\x31\x39\x2c\x31\x33\x2e\x34\x34\x33\x35\x38\x38\x20\
\x33\x2e\x34\x35\x36\x35\x34\x2c\x33\x2e\x33\x31\x35\x38\x37\x36\
\x20\x31\x30\x2e\x39\x34\x38\x30\x33\x39\x2c\x39\x2e\x38\x34\x32\
\x37\x33\x32\x20\x31\x33\x2e\x31\x37\x37\x39\x32\x36\x2c\x37\x2e\
\x34\x35\x36\x35\x31\x35\x20\x33\x2e\x32\x33\x30\x34\x33\x2c\x2d\
\x33\x2e\x33\x31\x37\x38\x37\x36\x20\x35\x2e\x37\x34\x37\x32\x31\
\x31\x2c\x2d\x36\x2e\x32\x36\x34\x38\x31\x35\x20\x37\x2e\x35\x31\
\x34\x32\x33\x2c\x2d\x39\x2e\x37\x37\x31\x33\x37\x33\x31\x20\x35\
\x2e\x37\x30\x30\x37\x32\x33\x2c\x2d\x39\x2e\x35\x38\x35\x38\x34\
\x34\x36\x20\x7a\x22\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x70\
\x61\x74\x68\x31\x31\x33\x30\x22\x0a\x20\x20\x20\x20\x20\x73\x6f\
\x64\x69\x70\x6f\x64\x69\x3a\x73\x74\x72\x6f\x6b\x65\x2d\x63\x6d\
\x79\x6b\x3d\x22\x28\x30\x20\x30\x20\x30\x20\x30\x2e\x38\x29\x22\
\x0a\x20\x20\x20\x20\x20\x73\x74\x79\x6c\x65\x3d\x22\x66\x6f\x6e\
\x74\x2d\x73\x69\x7a\x65\x3a\x31\x32\x70\x78\x3b\x66\x69\x6c\x6c\
\x3a\x23\x30\x30\x30\x30\x66\x66\x3b\x66\x69\x6c\x6c\x2d\x72\x75\
\x6c\x65\x3a\x65\x76\x65\x6e\x6f\x64\x64\x3b\x73\x74\x72\x6f\x6b\
\x65\x3a\x23\x33\x33\x33\x33\x33\x33\x3b\x73\x74\x72\x6f\x6b\x65\
\x2d\x77\x69\x64\x74\x68\x3a\x32\x2e\x35\x30\x30\x30\x30\x30\x30\
\x30\x30\x30\x30\x30\x30\x30\x30\x30\x30\x3b\x73\x74\x72\x6f\x6b\
\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\x3a\x72\x6f\x75\x6e\x64\x3b\
\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x6a\x6f\x69\x6e\x3a\
\x72\x6f\x75\x6e\x64\x3b\x73\x74\x72\x6f\x6b\x65\x2d\x64\x61\x73\
\x68\x61\x72\x72\x61\x79\x3a\x6e\x6f\x6e\x65\x22\x20\x2f\x3e\x0a\
\x3c\x2f\x73\x76\x67\x3e\x0a\
\x00\x00\x07\xf1\
\x3c\
\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\
\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\
\x2d\x38\x22\x20\x73\x74\x61\x6e\x64\x61\x6c\x6f\x6e\x65\x3d\x22\
\x6e\x6f\x22\x3f\x3e\x0a\x3c\x21\x2d\x2d\x20\x43\x72\x65\x61\x74\
\x65\x64\x20\x77\x69\x74\x68\x20\x49\x6e\x6b\x73\x63\x61\x70\x65\
\x20\x28\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x69\x6e\x6b\
\x73\x63\x61\x70\x65\x2e\x6f\x72\x67\x2f\x29\x20\x2d\x2d\x3e\x0a\
\x0a\x3c\x73\x76\x67\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x64\
\x63\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x70\x75\x72\x6c\x2e\x6f\
\x72\x67\x2f\x64\x63\x2f\x65\x6c\x65\x6d\x65\x6e\x74\x73\x2f\x31\
\x2e\x31\x2f\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x63\x63\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x63\x72\x65\x61\x74\x69\x76\
\x65\x63\x6f\x6d\x6d\x6f\x6e\x73\x2e\x6f\x72\x67\x2f\x6e\x73\x23\
\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\
\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\
\x67\x2f\x31\x39\x39\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\
\x2d\x73\x79\x6e\x74\x61\x78\x2d\x6e\x73\x23\x22\x0a\x20\x20\x20\
\x78\x6d\x6c\x6e\x73\x3a\x73\x76\x67\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3d\
\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\
\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\x76\x67\x22\x0a\x20\x20\x20\
\x78\x6d\x6c\x6e\x73\x3a\x78\x6c\x69\x6e\x6b\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\
\x39\x39\x39\x2f\x78\x6c\x69\x6e\x6b\x22\x0a\x20\x20\x20\x78\x6d\
\x6c\x6e\x73\x3a\x73\x6f\x64\x69\x70\x6f\x64\x69\x3d\x22\x68\x74\
\x74\x70\x3a\x2f\x2f\x73\x6f\x64\x69\x70\x6f\x64\x69\x2e\x73\x6f\
\x75\x72\x63\x65\x66\x6f\x72\x67\x65\x2e\x6e\x65\x74\x2f\x44\x54\
\x44\x2f\x73\x6f\x64\x69\x70\x6f\x64\x69\x2d\x30\x2e\x64\x74\x64\
\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x69\x6e\x6b\x73\x63\
\x61\x70\x65\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\
\x69\x6e\x6b\x73\x63\x61\x70\x65\x2e\x6f\x72\x67\x2f\x6e\x61\x6d\
\x65\x73\x70\x61\x63\x65\x73\x2f\x69\x6e\x6b\x73\x63\x61\x70\x65\
\x22\x0a\x20\x20\x20\x69\x64\x3d\x22\x73\x76\x67\x33\x35\x31\x36\
\x22\x0a\x20\x20\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\
\x31\x22\x0a\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x76\
\x65\x72\x73\x69\x6f\x6e\x3d\x22\x30\x2e\x34\x38\x2e\x33\x2e\x31\
\x20\x72\x39\x38\x38\x36\x22\x0a\x20\x20\x20\x77\x69\x64\x74\x68\
\x3d\x22\x33\x32\x22\x0a\x20\x20\x20\x68\x65\x69\x67\x68\x74\x3d\
\x22\x33\x32\x22\x0a\x20\x20\x20\x73\x6f\x64\x69\x70\x6f\x64\x69\
\x3a\x64\x6f\x63\x6e\x61\x6d\x65\x3d\x22\x6d\x61\x67\x5f\x72\x65\
\x73\x70\x6f\x6e\x73\x65\x2e\x69\x63\x6f\x22\x3e\x0a\x20\x20\x3c\
\x6d\x65\x74\x61\x64\x61\x74\x61\x0a\x20\x20\x20\x20\x20\x69\x64\
\x3d\x22\x6d\x65\x74\x61\x64\x61\x74\x61\x33\x35\x32\x32\x22\x3e\
\x0a\x20\x20\x20\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x3e\x0a\x20\
\x20\x20\x20\x20\x20\x3c\x63\x63\x3a\x57\x6f\x72\x6b\x0a\x20\x20\
\x20\x20\x20\x20\x20\x20\x20\x72\x64\x66\x3a\x61\x62\x6f\x75\x74\
\x3d\x22\x22\x3e\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x3c\x64\x63\
\x3a\x66\x6f\x72\x6d\x61\x74\x3e\x69\x6d\x61\x67\x65\x2f\x73\x76\
\x67\x2b\x78\x6d\x6c\x3c\x2f\x64\x63\x3a\x66\x6f\x72\x6d\x61\x74\
\x3e\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x3c\x64\x63\x3a\x74\x79\
\x70\x65\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x72\x64\
\x66\x3a\x72\x65\x73\x6f\x75\x72\x63\x65\x3d\x22\x68\x74\x74\x70\
\x3a\x2f\x2f\x70\x75\x72\x6c\x2e\x6f\x72\x67\x2f\x64\x63\x2f\x64\
\x63\x6d\x69\x74\x79\x70\x65\x2f\x53\x74\x69\x6c\x6c\x49\x6d\x61\
\x67\x65\x22\x20\x2f\x3e\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x3c\
\x64\x63\x3a\x74\x69\x74\x6c\x65\x3e\x3c\x2f\x64\x63\x3a\x74\x69\
\x74\x6c\x65\x3e\x0a\x20\x20\x20\x20\x20\x20\x3c\x2f\x63\x63\x3a\
\x57\x6f\x72\x6b\x3e\x0a\x20\x20\x20\x20\x3c\x2f\x72\x64\x66\x3a\
\x52\x44\x46\x3e\x0a\x20\x20\x3c\x2f\x6d\x65\x74\x61\x64\x61\x74\
\x61\x3e\x0a\x20\x20\x3c\x64\x65\x66\x73\x0a\x20\x20\x20\x20\x20\
\x69\x64\x3d\x22\x64\x65\x66\x73\x33\x35\x32\x30\x22\x20\x2f\x3e\
\x0a\x20\x20\x3c\x73\x6f\x64\x69\x70\x6f\x64\x69\x3a\x6e\x61\x6d\
\x65\x64\x76\x69\x65\x77\x0a\x20\x20\x20\x20\x20\x70\x61\x67\x65\
\x63\x6f\x6c\x6f\x72\x3d\x22\x23\x66\x66\x66\x66\x66\x66\x22\x0a\
\x20\x20\x20\x20\x20\x62\x6f\x72\x64\x65\x72\x63\x6f\x6c\x6f\x72\
\x3d\x22\x23\x36\x36\x36\x36\x36\x36\x22\x0a\x20\x20\x20\x20\x20\
\x62\x6f\x72\x64\x65\x72\x6f\x70\x61\x63\x69\x74\x79\x3d\x22\x31\
\x22\x0a\x20\x20\x20\x20\x20\x6f\x62\x6a\x65\x63\x74\x74\x6f\x6c\
\x65\x72\x61\x6e\x63\x65\x3d\x22\x31\x30\x22\x0a\x20\x20\x20\x20\
\x20\x67\x72\x69\x64\x74\x6f\x6c\x65\x72\x61\x6e\x63\x65\x3d\x22\
\x31\x30\x22\x0a\x20\x20\x20\x20\x20\x67\x75\x69\x64\x65\x74\x6f\
\x6c\x65\x72\x61\x6e\x63\x65\x3d\x22\x31\x30\x22\x0a\x20\x20\x20\
\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x70\x61\x67\x65\x6f\
\x70\x61\x63\x69\x74\x79\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\x20\
\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x70\x61\x67\x65\x73\x68\x61\
\x64\x6f\x77\x3d\x22\x32\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\
\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x77\x69\x64\
\x74\x68\x3d\x22\x31\x33\x36\x34\x22\x0a\x20\x20\x20\x20\x20\x69\
\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x68\
\x65\x69\x67\x68\x74\x3d\x22\x37\x35\x30\x22\x0a\x20\x20\x20\x20\
\x20\x69\x64\x3d\x22\x6e\x61\x6d\x65\x64\x76\x69\x65\x77\x33\x35\
\x31\x38\x22\x0a\x20\x20\x20\x20\x20\x73\x68\x6f\x77\x67\x72\x69\
\x64\x3d\x22\x66\x61\x6c\x73\x65\x22\x0a\x20\x20\x20\x20\x20\x69\
\x6e\x6b\x73\x63\x61\x70\x65\x3a\x7a\x6f\x6f\x6d\x3d\x22\x37\x2e\
\x33\x37\x35\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\
\x70\x65\x3a\x63\x78\x3d\x22\x31\x36\x22\x0a\x20\x20\x20\x20\x20\
\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x63\x79\x3d\x22\x31\x36\x22\
\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\
\x69\x6e\x64\x6f\x77\x2d\x78\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\
\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\
\x2d\x79\x3d\x22\x31\x36\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\
\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x6d\x61\x78\
\x69\x6d\x69\x7a\x65\x64\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\x20\
\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x63\x75\x72\x72\x65\x6e\x74\
\x2d\x6c\x61\x79\x65\x72\x3d\x22\x73\x76\x67\x33\x35\x31\x36\x22\
\x20\x2f\x3e\x0a\x20\x20\x3c\x69\x6d\x61\x67\x65\x0a\x20\x20\x20\
\x20\x20\x77\x69\x64\x74\x68\x3d\x22\x33\x32\x22\x0a\x20\x20\x20\
\x20\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\x0a\x20\x20\
\x20\x20\x20\x78\x6c\x69\x6e\x6b\x3a\x68\x72\x65\x66\x3d\x22\x64\
\x61\x74\x61\x3a\x69\x6d\x61\x67\x65\x2f\x70\x6e\x67\x3b\x62\x61\
\x73\x65\x36\x34\x2c\x69\x56\x42\x4f\x52\x77\x30\x4b\x47\x67\x6f\
\x41\x41\x41\x41\x4e\x53\x55\x68\x45\x55\x67\x41\x41\x41\x43\x41\
\x41\x41\x41\x41\x67\x43\x41\x59\x41\x41\x41\x42\x7a\x65\x6e\x72\
\x30\x41\x41\x41\x41\x42\x48\x4e\x43\x53\x56\x51\x49\x43\x41\x67\
\x49\x66\x41\x68\x6b\x69\x41\x41\x41\x41\x4c\x35\x4a\x52\x45\x46\
\x55\x0a\x57\x49\x58\x74\x6c\x6a\x45\x4f\x77\x6a\x41\x4d\x52\x56\
\x38\x51\x41\x7a\x63\x6f\x76\x52\x31\x33\x51\x4a\x51\x4e\x43\x63\
\x59\x69\x63\x51\x5a\x75\x56\x33\x6f\x4b\x4d\x2b\x51\x41\x56\x65\
\x78\x66\x65\x59\x6d\x58\x5a\x45\x6a\x69\x70\x36\x66\x45\x54\x67\
\x47\x4d\x35\x4d\x67\x45\x73\x45\x4e\x69\x63\x67\x41\x36\x0a\x51\
\x41\x66\x6f\x41\x42\x30\x67\x48\x65\x42\x59\x68\x39\x6c\x54\x6a\
\x6b\x65\x59\x66\x6c\x47\x41\x41\x70\x69\x74\x70\x37\x5a\x4e\x77\
\x2f\x4e\x54\x5a\x2f\x64\x72\x4d\x4c\x2b\x33\x46\x35\x51\x33\x63\
\x49\x48\x35\x48\x41\x54\x77\x33\x51\x46\x62\x48\x77\x76\x77\x42\
\x62\x75\x6c\x41\x4e\x54\x51\x57\x48\x41\x44\x0a\x71\x43\x77\x45\
\x6e\x32\x48\x63\x51\x67\x68\x41\x59\x55\x46\x51\x69\x47\x49\x57\
\x77\x67\x42\x52\x43\x36\x4a\x53\x37\x4c\x63\x67\x41\x59\x68\x59\
\x45\x44\x59\x6a\x6e\x77\x55\x5a\x67\x4e\x65\x43\x75\x42\x32\x33\
\x57\x35\x41\x43\x65\x43\x7a\x73\x38\x43\x46\x70\x73\x79\x41\x48\
\x61\x4c\x58\x67\x2b\x70\x42\x73\x0a\x48\x6a\x71\x38\x52\x6d\x44\
\x5a\x58\x6a\x6c\x56\x41\x47\x6e\x32\x78\x76\x67\x44\x79\x74\x74\
\x4a\x72\x38\x33\x33\x55\x79\x59\x41\x41\x41\x41\x41\x53\x55\x56\
\x4f\x52\x4b\x35\x43\x59\x49\x49\x3d\x0a\x22\x0a\x20\x20\x20\x20\
\x20\x69\x64\x3d\x22\x69\x6d\x61\x67\x65\x33\x35\x32\x34\x22\x0a\
\x20\x20\x20\x20\x20\x78\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\x20\
\x79\x3d\x22\x30\x22\x20\x2f\x3e\x0a\x3c\x2f\x73\x76\x67\x3e\x0a\
\
\x00\x00\x08\xfd\
\x3c\
\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\
\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\
\x2d\x38\x22\x20\x73\x74\x61\x6e\x64\x61\x6c\x6f\x6e\x65\x3d\x22\
\x6e\x6f\x22\x3f\x3e\x0a\x3c\x21\x2d\x2d\x20\x43\x72\x65\x61\x74\
\x65\x64\x20\x77\x69\x74\x68\x20\x49\x6e\x6b\x73\x63\x61\x70\x65\
\x20\x28\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x69\x6e\x6b\
\x73\x63\x61\x70\x65\x2e\x6f\x72\x67\x2f\x29\x20\x2d\x2d\x3e\x0a\
\x0a\x3c\x73\x76\x67\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x64\
\x63\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x70\x75\x72\x6c\x2e\x6f\
\x72\x67\x2f\x64\x63\x2f\x65\x6c\x65\x6d\x65\x6e\x74\x73\x2f\x31\
\x2e\x31\x2f\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x63\x63\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x63\x72\x65\x61\x74\x69\x76\
\x65\x63\x6f\x6d\x6d\x6f\x6e\x73\x2e\x6f\x72\x67\x2f\x6e\x73\x23\
\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\
\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\
\x67\x2f\x31\x39\x39\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\
\x2d\x73\x79\x6e\x74\x61\x78\x2d\x6e\x73\x23\x22\x0a\x20\x20\x20\
\x78\x6d\x6c\x6e\x73\x3a\x73\x76\x67\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3d\
\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\
\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\x76\x67\x22\x0a\x20\x20\x20\
\x78\x6d\x6c\x6e\x73\x3a\x78\x6c\x69\x6e\x6b\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\
\x39\x39\x39\x2f\x78\x6c\x69\x6e\x6b\x22\x0a\x20\x20\x20\x78\x6d\
\x6c\x6e\x73\x3a\x73\x6f\x64\x69\x70\x6f\x64\x69\x3d\x22\x68\x74\
\x74\x70\x3a\x2f\x2f\x73\x6f\x64\x69\x70\x6f\x64\x69\x2e\x73\x6f\
\x75\x72\x63\x65\x66\x6f\x72\x67\x65\x2e\x6e\x65\x74\x2f\x44\x54\
\x44\x2f\x73\x6f\x64\x69\x70\x6f\x64\x69\x2d\x30\x2e\x64\x74\x64\
\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x69\x6e\x6b\x73\x63\
\x61\x70\x65\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\
\x69\x6e\x6b\x73\x63\x61\x70\x65\x2e\x6f\x72\x67\x2f\x6e\x61\x6d\
\x65\x73\x70\x61\x63\x65\x73\x2f\x69\x6e\x6b\x73\x63\x61\x70\x65\
\x22\x0a\x20\x20\x20\x69\x64\x3d\x22\x73\x76\x67\x34\x35\x35\x37\
\x22\x0a\x20\x20\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\
\x31\x22\x0a\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x76\
\x65\x72\x73\x69\x6f\x6e\x3d\x22\x30\x2e\x34\x38\x2e\x33\x2e\x31\
\x20\x72\x39\x38\x38\x36\x22\x0a\x20\x20\x20\x77\x69\x64\x74\x68\
\x3d\x22\x33\x32\x22\x0a\x20\x20\x20\x68\x65\x69\x67\x68\x74\x3d\
\x22\x33\x32\x22\x0a\x20\x20\x20\x73\x6f\x64\x69\x70\x6f\x64\x69\
\x3a\x64\x6f\x63\x6e\x61\x6d\x65\x3d\x22\x61\x64\x64\x5f\x70\x6f\
\x6c\x65\x5f\x31\x2e\x73\x76\x67\x22\x3e\x0a\x20\x20\x3c\x6d\x65\
\x74\x61\x64\x61\x74\x61\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\
\x6d\x65\x74\x61\x64\x61\x74\x61\x34\x35\x36\x33\x22\x3e\x0a\x20\
\x20\x20\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x3e\x0a\x20\x20\x20\
\x20\x20\x20\x3c\x63\x63\x3a\x57\x6f\x72\x6b\x0a\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x72\x64\x66\x3a\x61\x62\x6f\x75\x74\x3d\x22\
\x22\x3e\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x3c\x64\x63\x3a\x66\
\x6f\x72\x6d\x61\x74\x3e\x69\x6d\x61\x67\x65\x2f\x73\x76\x67\x2b\
\x78\x6d\x6c\x3c\x2f\x64\x63\x3a\x66\x6f\x72\x6d\x61\x74\x3e\x0a\
\x20\x20\x20\x20\x20\x20\x20\x20\x3c\x64\x63\x3a\x74\x79\x70\x65\
\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x72\x64\x66\x3a\
\x72\x65\x73\x6f\x75\x72\x63\x65\x3d\x22\x68\x74\x74\x70\x3a\x2f\
\x2f\x70\x75\x72\x6c\x2e\x6f\x72\x67\x2f\x64\x63\x2f\x64\x63\x6d\
\x69\x74\x79\x70\x65\x2f\x53\x74\x69\x6c\x6c\x49\x6d\x61\x67\x65\
\x22\x20\x2f\x3e\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x3c\x64\x63\
\x3a\x74\x69\x74\x6c\x65\x3e\x3c\x2f\x64\x63\x3a\x74\x69\x74\x6c\
\x65\x3e\x0a\x20\x20\x20\x20\x20\x20\x3c\x2f\x63\x63\x3a\x57\x6f\
\x72\x6b\x3e\x0a\x20\x20\x20\x20\x3c\x2f\x72\x64\x66\x3a\x52\x44\
\x46\x3e\x0a\x20\x20\x3c\x2f\x6d\x65\x74\x61\x64\x61\x74\x61\x3e\
\x0a\x20\x20\x3c\x64\x65\x66\x73\x0a\x20\x20\x20\x20\x20\x69\x64\
\x3d\x22\x64\x65\x66\x73\x34\x35\x36\x31\x22\x20\x2f\x3e\x0a\x20\
\x20\x3c\x73\x6f\x64\x69\x70\x6f\x64\x69\x3a\x6e\x61\x6d\x65\x64\
\x76\x69\x65\x77\x0a\x20\x20\x20\x20\x20\x70\x61\x67\x65\x63\x6f\
\x6c\x6f\x72\x3d\x22\x23\x66\x66\x66\x66\x66\x66\x22\x0a\x20\x20\
\x20\x20\x20\x62\x6f\x72\x64\x65\x72\x63\x6f\x6c\x6f\x72\x3d\x22\
\x23\x36\x36\x36\x36\x36\x36\x22\x0a\x20\x20\x20\x20\x20\x62\x6f\
\x72\x64\x65\x72\x6f\x70\x61\x63\x69\x74\x79\x3d\x22\x31\x22\x0a\
\x20\x20\x20\x20\x20\x6f\x62\x6a\x65\x63\x74\x74\x6f\x6c\x65\x72\
\x61\x6e\x63\x65\x3d\x22\x31\x30\x22\x0a\x20\x20\x20\x20\x20\x67\
\x72\x69\x64\x74\x6f\x6c\x65\x72\x61\x6e\x63\x65\x3d\x22\x31\x30\
\x22\x0a\x20\x20\x20\x20\x20\x67\x75\x69\x64\x65\x74\x6f\x6c\x65\
\x72\x61\x6e\x63\x65\x3d\x22\x31\x30\x22\x0a\x20\x20\x20\x20\x20\
\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x70\x61\x67\x65\x6f\x70\x61\
\x63\x69\x74\x79\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\
\x6b\x73\x63\x61\x70\x65\x3a\x70\x61\x67\x65\x73\x68\x61\x64\x6f\
\x77\x3d\x22\x32\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\
\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x31\x33\x36\x34\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\
\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x68\x65\x69\
\x67\x68\x74\x3d\x22\x37\x35\x30\x22\x0a\x20\x20\x20\x20\x20\x69\
\x64\x3d\x22\x6e\x61\x6d\x65\x64\x76\x69\x65\x77\x34\x35\x35\x39\
\x22\x0a\x20\x20\x20\x20\x20\x73\x68\x6f\x77\x67\x72\x69\x64\x3d\
\x22\x66\x61\x6c\x73\x65\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\
\x73\x63\x61\x70\x65\x3a\x7a\x6f\x6f\x6d\x3d\x22\x37\x2e\x33\x37\
\x35\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\
\x3a\x63\x78\x3d\x22\x32\x34\x2e\x30\x39\x35\x33\x33\x38\x22\x0a\
\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x63\x79\
\x3d\x22\x31\x36\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\
\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x78\x3d\x22\x30\x22\
\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\
\x69\x6e\x64\x6f\x77\x2d\x79\x3d\x22\x31\x36\x22\x0a\x20\x20\x20\
\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\
\x77\x2d\x6d\x61\x78\x69\x6d\x69\x7a\x65\x64\x3d\x22\x30\x22\x0a\
\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x63\x75\
\x72\x72\x65\x6e\x74\x2d\x6c\x61\x79\x65\x72\x3d\x22\x73\x76\x67\
\x34\x35\x35\x37\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\
\x61\x70\x65\x3a\x73\x6e\x61\x70\x2d\x67\x6c\x6f\x62\x61\x6c\x3d\
\x22\x66\x61\x6c\x73\x65\x22\x20\x2f\x3e\x0a\x20\x20\x3c\x69\x6d\
\x61\x67\x65\x0a\x20\x20\x20\x20\x20\x77\x69\x64\x74\x68\x3d\x22\
\x33\x32\x22\x0a\x20\x20\x20\x20\x20\x68\x65\x69\x67\x68\x74\x3d\
\x22\x33\x32\x22\x0a\x20\x20\x20\x20\x20\x78\x6c\x69\x6e\x6b\x3a\
\x68\x72\x65\x66\x3d\x22\x64\x61\x74\x61\x3a\x69\x6d\x61\x67\x65\
\x2f\x70\x6e\x67\x3b\x62\x61\x73\x65\x36\x34\x2c\x69\x56\x42\x4f\
\x52\x77\x30\x4b\x47\x67\x6f\x41\x41\x41\x41\x4e\x53\x55\x68\x45\
\x55\x67\x41\x41\x41\x43\x41\x41\x41\x41\x41\x67\x43\x41\x59\x41\
\x41\x41\x42\x7a\x65\x6e\x72\x30\x41\x41\x41\x41\x42\x48\x4e\x43\
\x53\x56\x51\x49\x43\x41\x67\x49\x66\x41\x68\x6b\x69\x41\x41\x41\
\x41\x44\x56\x4a\x52\x45\x46\x55\x0a\x57\x49\x58\x74\x31\x7a\x45\
\x42\x41\x44\x41\x4d\x77\x7a\x42\x33\x2f\x44\x6c\x33\x4d\x50\x6f\
\x6f\x42\x4b\x77\x33\x55\x32\x33\x48\x75\x77\x54\x73\x4f\x34\x78\
\x58\x42\x51\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\
\x41\x41\x41\x41\x77\x48\x64\x2f\x7a\x44\x78\x72\x74\x42\x44\x6b\
\x45\x71\x4e\x62\x67\x0a\x41\x41\x41\x41\x41\x45\x6c\x46\x54\x6b\
\x53\x75\x51\x6d\x43\x43\x0a\x22\x0a\x20\x20\x20\x20\x20\x69\x64\
\x3d\x22\x69\x6d\x61\x67\x65\x34\x35\x36\x35\x22\x0a\x20\x20\x20\
\x20\x20\x78\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\x20\x79\x3d\x22\
\x30\x22\x20\x2f\x3e\x0a\x20\x20\x3c\x74\x65\x78\x74\x0a\x20\x20\
\x20\x20\x20\x78\x6d\x6c\x3a\x73\x70\x61\x63\x65\x3d\x22\x70\x72\
\x65\x73\x65\x72\x76\x65\x22\x0a\x20\x20\x20\x20\x20\x73\x74\x79\
\x6c\x65\x3d\x22\x66\x6f\x6e\x74\x2d\x73\x69\x7a\x65\x3a\x34\x30\
\x70\x78\x3b\x66\x6f\x6e\x74\x2d\x73\x74\x79\x6c\x65\x3a\x6e\x6f\
\x72\x6d\x61\x6c\x3b\x66\x6f\x6e\x74\x2d\x77\x65\x69\x67\x68\x74\
\x3a\x6e\x6f\x72\x6d\x61\x6c\x3b\x6c\x69\x6e\x65\x2d\x68\x65\x69\
\x67\x68\x74\x3a\x31\x32\x35\x25\x3b\x6c\x65\x74\x74\x65\x72\x2d\
\x73\x70\x61\x63\x69\x6e\x67\x3a\x30\x70\x78\x3b\x77\x6f\x72\x64\
\x2d\x73\x70\x61\x63\x69\x6e\x67\x3a\x30\x70\x78\x3b\x66\x69\x6c\
\x6c\x3a\x23\x30\x30\x30\x30\x66\x66\x3b\x66\x69\x6c\x6c\x2d\x6f\
\x70\x61\x63\x69\x74\x79\x3a\x31\x3b\x73\x74\x72\x6f\x6b\x65\x3a\
\x6e\x6f\x6e\x65\x3b\x66\x6f\x6e\x74\x2d\x66\x61\x6d\x69\x6c\x79\
\x3a\x53\x61\x6e\x73\x22\x0a\x20\x20\x20\x20\x20\x78\x3d\x22\x33\
\x2e\x39\x36\x36\x31\x30\x31\x39\x22\x0a\x20\x20\x20\x20\x20\x79\
\x3d\x22\x32\x37\x2e\x30\x35\x30\x38\x34\x36\x22\x0a\x20\x20\x20\
\x20\x20\x69\x64\x3d\x22\x74\x65\x78\x74\x34\x37\x36\x38\x22\x0a\
\x20\x20\x20\x20\x20\x73\x6f\x64\x69\x70\x6f\x64\x69\x3a\x6c\x69\
\x6e\x65\x73\x70\x61\x63\x69\x6e\x67\x3d\x22\x31\x32\x35\x25\x22\
\x3e\x3c\x74\x73\x70\x61\x6e\x0a\x20\x20\x20\x20\x20\x20\x20\x73\
\x6f\x64\x69\x70\x6f\x64\x69\x3a\x72\x6f\x6c\x65\x3d\x22\x6c\x69\
\x6e\x65\x22\x0a\x20\x20\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x74\
\x73\x70\x61\x6e\x34\x37\x37\x30\x22\x0a\x20\x20\x20\x20\x20\x20\
\x20\x78\x3d\x22\x33\x2e\x39\x36\x36\x31\x30\x31\x39\x22\x0a\x20\
\x20\x20\x20\x20\x20\x20\x79\x3d\x22\x32\x37\x2e\x30\x35\x30\x38\
\x34\x36\x22\x3e\x6f\x3c\x2f\x74\x73\x70\x61\x6e\x3e\x3c\x2f\x74\
\x65\x78\x74\x3e\x0a\x3c\x2f\x73\x76\x67\x3e\x0a\
\x00\x00\x08\xfd\
\x3c\
\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\
\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\
\x2d\x38\x22\x20\x73\x74\x61\x6e\x64\x61\x6c\x6f\x6e\x65\x3d\x22\
\x6e\x6f\x22\x3f\x3e\x0a\x3c\x21\x2d\x2d\x20\x43\x72\x65\x61\x74\
\x65\x64\x20\x77\x69\x74\x68\x20\x49\x6e\x6b\x73\x63\x61\x70\x65\
\x20\x28\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x69\x6e\x6b\
\x73\x63\x61\x70\x65\x2e\x6f\x72\x67\x2f\x29\x20\x2d\x2d\x3e\x0a\
\x0a\x3c\x73\x76\x67\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x64\
\x63\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x70\x75\x72\x6c\x2e\x6f\
\x72\x67\x2f\x64\x63\x2f\x65\x6c\x65\x6d\x65\x6e\x74\x73\x2f\x31\
\x2e\x31\x2f\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x63\x63\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x63\x72\x65\x61\x74\x69\x76\
\x65\x63\x6f\x6d\x6d\x6f\x6e\x73\x2e\x6f\x72\x67\x2f\x6e\x73\x23\
\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\
\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\
\x67\x2f\x31\x39\x39\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\
\x2d\x73\x79\x6e\x74\x61\x78\x2d\x6e\x73\x23\x22\x0a\x20\x20\x20\
\x78\x6d\x6c\x6e\x73\x3a\x73\x76\x67\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3d\
\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\
\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\x76\x67\x22\x0a\x20\x20\x20\
\x78\x6d\x6c\x6e\x73\x3a\x78\x6c\x69\x6e\x6b\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\
\x39\x39\x39\x2f\x78\x6c\x69\x6e\x6b\x22\x0a\x20\x20\x20\x78\x6d\
\x6c\x6e\x73\x3a\x73\x6f\x64\x69\x70\x6f\x64\x69\x3d\x22\x68\x74\
\x74\x70\x3a\x2f\x2f\x73\x6f\x64\x69\x70\x6f\x64\x69\x2e\x73\x6f\
\x75\x72\x63\x65\x66\x6f\x72\x67\x65\x2e\x6e\x65\x74\x2f\x44\x54\
\x44\x2f\x73\x6f\x64\x69\x70\x6f\x64\x69\x2d\x30\x2e\x64\x74\x64\
\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x69\x6e\x6b\x73\x63\
\x61\x70\x65\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\
\x69\x6e\x6b\x73\x63\x61\x70\x65\x2e\x6f\x72\x67\x2f\x6e\x61\x6d\
\x65\x73\x70\x61\x63\x65\x73\x2f\x69\x6e\x6b\x73\x63\x61\x70\x65\
\x22\x0a\x20\x20\x20\x69\x64\x3d\x22\x73\x76\x67\x34\x35\x35\x37\
\x22\x0a\x20\x20\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\
\x31\x22\x0a\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x76\
\x65\x72\x73\x69\x6f\x6e\x3d\x22\x30\x2e\x34\x38\x2e\x33\x2e\x31\
\x20\x72\x39\x38\x38\x36\x22\x0a\x20\x20\x20\x77\x69\x64\x74\x68\
\x3d\x22\x33\x32\x22\x0a\x20\x20\x20\x68\x65\x69\x67\x68\x74\x3d\
\x22\x33\x32\x22\x0a\x20\x20\x20\x73\x6f\x64\x69\x70\x6f\x64\x69\
\x3a\x64\x6f\x63\x6e\x61\x6d\x65\x3d\x22\x61\x64\x64\x5f\x70\x6f\
\x6c\x65\x5f\x31\x2e\x73\x76\x67\x22\x3e\x0a\x20\x20\x3c\x6d\x65\
\x74\x61\x64\x61\x74\x61\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\
\x6d\x65\x74\x61\x64\x61\x74\x61\x34\x35\x36\x33\x22\x3e\x0a\x20\
\x20\x20\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x3e\x0a\x20\x20\x20\
\x20\x20\x20\x3c\x63\x63\x3a\x57\x6f\x72\x6b\x0a\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x72\x64\x66\x3a\x61\x62\x6f\x75\x74\x3d\x22\
\x22\x3e\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x3c\x64\x63\x3a\x66\
\x6f\x72\x6d\x61\x74\x3e\x69\x6d\x61\x67\x65\x2f\x73\x76\x67\x2b\
\x78\x6d\x6c\x3c\x2f\x64\x63\x3a\x66\x6f\x72\x6d\x61\x74\x3e\x0a\
\x20\x20\x20\x20\x20\x20\x20\x20\x3c\x64\x63\x3a\x74\x79\x70\x65\
\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x72\x64\x66\x3a\
\x72\x65\x73\x6f\x75\x72\x63\x65\x3d\x22\x68\x74\x74\x70\x3a\x2f\
\x2f\x70\x75\x72\x6c\x2e\x6f\x72\x67\x2f\x64\x63\x2f\x64\x63\x6d\
\x69\x74\x79\x70\x65\x2f\x53\x74\x69\x6c\x6c\x49\x6d\x61\x67\x65\
\x22\x20\x2f\x3e\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x3c\x64\x63\
\x3a\x74\x69\x74\x6c\x65\x3e\x3c\x2f\x64\x63\x3a\x74\x69\x74\x6c\
\x65\x3e\x0a\x20\x20\x20\x20\x20\x20\x3c\x2f\x63\x63\x3a\x57\x6f\
\x72\x6b\x3e\x0a\x20\x20\x20\x20\x3c\x2f\x72\x64\x66\x3a\x52\x44\
\x46\x3e\x0a\x20\x20\x3c\x2f\x6d\x65\x74\x61\x64\x61\x74\x61\x3e\
\x0a\x20\x20\x3c\x64\x65\x66\x73\x0a\x20\x20\x20\x20\x20\x69\x64\
\x3d\x22\x64\x65\x66\x73\x34\x35\x36\x31\x22\x20\x2f\x3e\x0a\x20\
\x20\x3c\x73\x6f\x64\x69\x70\x6f\x64\x69\x3a\x6e\x61\x6d\x65\x64\
\x76\x69\x65\x77\x0a\x20\x20\x20\x20\x20\x70\x61\x67\x65\x63\x6f\
\x6c\x6f\x72\x3d\x22\x23\x66\x66\x66\x66\x66\x66\x22\x0a\x20\x20\
\x20\x20\x20\x62\x6f\x72\x64\x65\x72\x63\x6f\x6c\x6f\x72\x3d\x22\
\x23\x36\x36\x36\x36\x36\x36\x22\x0a\x20\x20\x20\x20\x20\x62\x6f\
\x72\x64\x65\x72\x6f\x70\x61\x63\x69\x74\x79\x3d\x22\x31\x22\x0a\
\x20\x20\x20\x20\x20\x6f\x62\x6a\x65\x63\x74\x74\x6f\x6c\x65\x72\
\x61\x6e\x63\x65\x3d\x22\x31\x30\x22\x0a\x20\x20\x20\x20\x20\x67\
\x72\x69\x64\x74\x6f\x6c\x65\x72\x61\x6e\x63\x65\x3d\x22\x31\x30\
\x22\x0a\x20\x20\x20\x20\x20\x67\x75\x69\x64\x65\x74\x6f\x6c\x65\
\x72\x61\x6e\x63\x65\x3d\x22\x31\x30\x22\x0a\x20\x20\x20\x20\x20\
\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x70\x61\x67\x65\x6f\x70\x61\
\x63\x69\x74\x79\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\
\x6b\x73\x63\x61\x70\x65\x3a\x70\x61\x67\x65\x73\x68\x61\x64\x6f\
\x77\x3d\x22\x32\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\
\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x31\x33\x36\x34\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\
\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x68\x65\x69\
\x67\x68\x74\x3d\x22\x37\x35\x30\x22\x0a\x20\x20\x20\x20\x20\x69\
\x64\x3d\x22\x6e\x61\x6d\x65\x64\x76\x69\x65\x77\x34\x35\x35\x39\
\x22\x0a\x20\x20\x20\x20\x20\x73\x68\x6f\x77\x67\x72\x69\x64\x3d\
\x22\x66\x61\x6c\x73\x65\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\
\x73\x63\x61\x70\x65\x3a\x7a\x6f\x6f\x6d\x3d\x22\x37\x2e\x33\x37\
\x35\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\
\x3a\x63\x78\x3d\x22\x32\x34\x2e\x30\x39\x35\x33\x33\x38\x22\x0a\
\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x63\x79\
\x3d\x22\x31\x36\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\
\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x78\x3d\x22\x30\x22\
\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\
\x69\x6e\x64\x6f\x77\x2d\x79\x3d\x22\x31\x36\x22\x0a\x20\x20\x20\
\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\
\x77\x2d\x6d\x61\x78\x69\x6d\x69\x7a\x65\x64\x3d\x22\x30\x22\x0a\
\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x63\x75\
\x72\x72\x65\x6e\x74\x2d\x6c\x61\x79\x65\x72\x3d\x22\x73\x76\x67\
\x34\x35\x35\x37\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\
\x61\x70\x65\x3a\x73\x6e\x61\x70\x2d\x67\x6c\x6f\x62\x61\x6c\x3d\
\x22\x66\x61\x6c\x73\x65\x22\x20\x2f\x3e\x0a\x20\x20\x3c\x69\x6d\
\x61\x67\x65\x0a\x20\x20\x20\x20\x20\x77\x69\x64\x74\x68\x3d\x22\
\x33\x32\x22\x0a\x20\x20\x20\x20\x20\x68\x65\x69\x67\x68\x74\x3d\
\x22\x33\x32\x22\x0a\x20\x20\x20\x20\x20\x78\x6c\x69\x6e\x6b\x3a\
\x68\x72\x65\x66\x3d\x22\x64\x61\x74\x61\x3a\x69\x6d\x61\x67\x65\
\x2f\x70\x6e\x67\x3b\x62\x61\x73\x65\x36\x34\x2c\x69\x56\x42\x4f\
\x52\x77\x30\x4b\x47\x67\x6f\x41\x41\x41\x41\x4e\x53\x55\x68\x45\
\x55\x67\x41\x41\x41\x43\x41\x41\x41\x41\x41\x67\x43\x41\x59\x41\
\x41\x41\x42\x7a\x65\x6e\x72\x30\x41\x41\x41\x41\x42\x48\x4e\x43\
\x53\x56\x51\x49\x43\x41\x67\x49\x66\x41\x68\x6b\x69\x41\x41\x41\
\x41\x44\x56\x4a\x52\x45\x46\x55\x0a\x57\x49\x58\x74\x31\x7a\x45\
\x42\x41\x44\x41\x4d\x77\x7a\x42\x33\x2f\x44\x6c\x33\x4d\x50\x6f\
\x6f\x42\x4b\x77\x33\x55\x32\x33\x48\x75\x77\x54\x73\x4f\x34\x78\
\x58\x42\x51\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\
\x41\x41\x41\x41\x77\x48\x64\x2f\x7a\x44\x78\x72\x74\x42\x44\x6b\
\x45\x71\x4e\x62\x67\x0a\x41\x41\x41\x41\x41\x45\x6c\x46\x54\x6b\
\x53\x75\x51\x6d\x43\x43\x0a\x22\x0a\x20\x20\x20\x20\x20\x69\x64\
\x3d\x22\x69\x6d\x61\x67\x65\x34\x35\x36\x35\x22\x0a\x20\x20\x20\
\x20\x20\x78\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\x20\x79\x3d\x22\
\x30\x22\x20\x2f\x3e\x0a\x20\x20\x3c\x74\x65\x78\x74\x0a\x20\x20\
\x20\x20\x20\x78\x6d\x6c\x3a\x73\x70\x61\x63\x65\x3d\x22\x70\x72\
\x65\x73\x65\x72\x76\x65\x22\x0a\x20\x20\x20\x20\x20\x73\x74\x79\
\x6c\x65\x3d\x22\x66\x6f\x6e\x74\x2d\x73\x69\x7a\x65\x3a\x34\x30\
\x70\x78\x3b\x66\x6f\x6e\x74\x2d\x73\x74\x79\x6c\x65\x3a\x6e\x6f\
\x72\x6d\x61\x6c\x3b\x66\x6f\x6e\x74\x2d\x77\x65\x69\x67\x68\x74\
\x3a\x6e\x6f\x72\x6d\x61\x6c\x3b\x6c\x69\x6e\x65\x2d\x68\x65\x69\
\x67\x68\x74\x3a\x31\x32\x35\x25\x3b\x6c\x65\x74\x74\x65\x72\x2d\
\x73\x70\x61\x63\x69\x6e\x67\x3a\x30\x70\x78\x3b\x77\x6f\x72\x64\
\x2d\x73\x70\x61\x63\x69\x6e\x67\x3a\x30\x70\x78\x3b\x66\x69\x6c\
\x6c\x3a\x23\x30\x30\x30\x30\x66\x66\x3b\x66\x69\x6c\x6c\x2d\x6f\
\x70\x61\x63\x69\x74\x79\x3a\x31\x3b\x73\x74\x72\x6f\x6b\x65\x3a\
\x6e\x6f\x6e\x65\x3b\x66\x6f\x6e\x74\x2d\x66\x61\x6d\x69\x6c\x79\
\x3a\x53\x61\x6e\x73\x22\x0a\x20\x20\x20\x20\x20\x78\x3d\x22\x33\
\x2e\x39\x36\x36\x31\x30\x31\x39\x22\x0a\x20\x20\x20\x20\x20\x79\
\x3d\x22\x32\x37\x2e\x30\x35\x30\x38\x34\x36\x22\x0a\x20\x20\x20\
\x20\x20\x69\x64\x3d\x22\x74\x65\x78\x74\x34\x37\x36\x38\x22\x0a\
\x20\x20\x20\x20\x20\x73\x6f\x64\x69\x70\x6f\x64\x69\x3a\x6c\x69\
\x6e\x65\x73\x70\x61\x63\x69\x6e\x67\x3d\x22\x31\x32\x35\x25\x22\
\x3e\x3c\x74\x73\x70\x61\x6e\x0a\x20\x20\x20\x20\x20\x20\x20\x73\
\x6f\x64\x69\x70\x6f\x64\x69\x3a\x72\x6f\x6c\x65\x3d\x22\x6c\x69\
\x6e\x65\x22\x0a\x20\x20\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x74\
\x73\x70\x61\x6e\x34\x37\x37\x30\x22\x0a\x20\x20\x20\x20\x20\x20\
\x20\x78\x3d\x22\x33\x2e\x39\x36\x36\x31\x30\x31\x39\x22\x0a\x20\
\x20\x20\x20\x20\x20\x20\x79\x3d\x22\x32\x37\x2e\x30\x35\x30\x38\
\x34\x36\x22\x3e\x6f\x3c\x2f\x74\x73\x70\x61\x6e\x3e\x3c\x2f\x74\
\x65\x78\x74\x3e\x0a\x3c\x2f\x73\x76\x67\x3e\x0a\
\x00\x00\x0b\x22\
\x3c\
\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\
\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\
\x2d\x38\x22\x20\x73\x74\x61\x6e\x64\x61\x6c\x6f\x6e\x65\x3d\x22\
\x6e\x6f\x22\x3f\x3e\x0a\x3c\x21\x2d\x2d\x20\x43\x72\x65\x61\x74\
\x65\x64\x20\x77\x69\x74\x68\x20\x49\x6e\x6b\x73\x63\x61\x70\x65\
\x20\x28\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x69\x6e\x6b\
\x73\x63\x61\x70\x65\x2e\x6f\x72\x67\x2f\x29\x20\x2d\x2d\x3e\x0a\
\x0a\x3c\x73\x76\x67\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x64\
\x63\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x70\x75\x72\x6c\x2e\x6f\
\x72\x67\x2f\x64\x63\x2f\x65\x6c\x65\x6d\x65\x6e\x74\x73\x2f\x31\
\x2e\x31\x2f\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x63\x63\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x63\x72\x65\x61\x74\x69\x76\
\x65\x63\x6f\x6d\x6d\x6f\x6e\x73\x2e\x6f\x72\x67\x2f\x6e\x73\x23\
\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\
\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\
\x67\x2f\x31\x39\x39\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\
\x2d\x73\x79\x6e\x74\x61\x78\x2d\x6e\x73\x23\x22\x0a\x20\x20\x20\
\x78\x6d\x6c\x6e\x73\x3a\x73\x76\x67\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3d\
\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\
\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\x76\x67\x22\x0a\x20\x20\x20\
\x78\x6d\x6c\x6e\x73\x3a\x78\x6c\x69\x6e\x6b\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\
\x39\x39\x39\x2f\x78\x6c\x69\x6e\x6b\x22\x0a\x20\x20\x20\x78\x6d\
\x6c\x6e\x73\x3a\x73\x6f\x64\x69\x70\x6f\x64\x69\x3d\x22\x68\x74\
\x74\x70\x3a\x2f\x2f\x73\x6f\x64\x69\x70\x6f\x64\x69\x2e\x73\x6f\
\x75\x72\x63\x65\x66\x6f\x72\x67\x65\x2e\x6e\x65\x74\x2f\x44\x54\
\x44\x2f\x73\x6f\x64\x69\x70\x6f\x64\x69\x2d\x30\x2e\x64\x74\x64\
\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x69\x6e\x6b\x73\x63\
\x61\x70\x65\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\
\x69\x6e\x6b\x73\x63\x61\x70\x65\x2e\x6f\x72\x67\x2f\x6e\x61\x6d\
\x65\x73\x70\x61\x63\x65\x73\x2f\x69\x6e\x6b\x73\x63\x61\x70\x65\
\x22\x0a\x20\x20\x20\x69\x64\x3d\x22\x73\x76\x67\x34\x35\x35\x37\
\x22\x0a\x20\x20\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\
\x31\x22\x0a\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x76\
\x65\x72\x73\x69\x6f\x6e\x3d\x22\x30\x2e\x34\x38\x2e\x33\x2e\x31\
\x20\x72\x39\x38\x38\x36\x22\x0a\x20\x20\x20\x77\x69\x64\x74\x68\
\x3d\x22\x33\x32\x22\x0a\x20\x20\x20\x68\x65\x69\x67\x68\x74\x3d\
\x22\x33\x32\x22\x0a\x20\x20\x20\x73\x6f\x64\x69\x70\x6f\x64\x69\
\x3a\x64\x6f\x63\x6e\x61\x6d\x65\x3d\x22\x63\x6f\x6e\x6a\x75\x67\
\x61\x74\x65\x31\x2e\x73\x76\x67\x22\x3e\x0a\x20\x20\x3c\x6d\x65\
\x74\x61\x64\x61\x74\x61\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\
\x6d\x65\x74\x61\x64\x61\x74\x61\x34\x35\x36\x33\x22\x3e\x0a\x20\
\x20\x20\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x3e\x0a\x20\x20\x20\
\x20\x20\x20\x3c\x63\x63\x3a\x57\x6f\x72\x6b\x0a\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x72\x64\x66\x3a\x61\x62\x6f\x75\x74\x3d\x22\
\x22\x3e\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x3c\x64\x63\x3a\x66\
\x6f\x72\x6d\x61\x74\x3e\x69\x6d\x61\x67\x65\x2f\x73\x76\x67\x2b\
\x78\x6d\x6c\x3c\x2f\x64\x63\x3a\x66\x6f\x72\x6d\x61\x74\x3e\x0a\
\x20\x20\x20\x20\x20\x20\x20\x20\x3c\x64\x63\x3a\x74\x79\x70\x65\
\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x72\x64\x66\x3a\
\x72\x65\x73\x6f\x75\x72\x63\x65\x3d\x22\x68\x74\x74\x70\x3a\x2f\
\x2f\x70\x75\x72\x6c\x2e\x6f\x72\x67\x2f\x64\x63\x2f\x64\x63\x6d\
\x69\x74\x79\x70\x65\x2f\x53\x74\x69\x6c\x6c\x49\x6d\x61\x67\x65\
\x22\x20\x2f\x3e\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x3c\x64\x63\
\x3a\x74\x69\x74\x6c\x65\x3e\x3c\x2f\x64\x63\x3a\x74\x69\x74\x6c\
\x65\x3e\x0a\x20\x20\x20\x20\x20\x20\x3c\x2f\x63\x63\x3a\x57\x6f\
\x72\x6b\x3e\x0a\x20\x20\x20\x20\x3c\x2f\x72\x64\x66\x3a\x52\x44\
\x46\x3e\x0a\x20\x20\x3c\x2f\x6d\x65\x74\x61\x64\x61\x74\x61\x3e\
\x0a\x20\x20\x3c\x64\x65\x66\x73\x0a\x20\x20\x20\x20\x20\x69\x64\
\x3d\x22\x64\x65\x66\x73\x34\x35\x36\x31\x22\x20\x2f\x3e\x0a\x20\
\x20\x3c\x73\x6f\x64\x69\x70\x6f\x64\x69\x3a\x6e\x61\x6d\x65\x64\
\x76\x69\x65\x77\x0a\x20\x20\x20\x20\x20\x70\x61\x67\x65\x63\x6f\
\x6c\x6f\x72\x3d\x22\x23\x66\x66\x66\x66\x66\x66\x22\x0a\x20\x20\
\x20\x20\x20\x62\x6f\x72\x64\x65\x72\x63\x6f\x6c\x6f\x72\x3d\x22\
\x23\x36\x36\x36\x36\x36\x36\x22\x0a\x20\x20\x20\x20\x20\x62\x6f\
\x72\x64\x65\x72\x6f\x70\x61\x63\x69\x74\x79\x3d\x22\x31\x22\x0a\
\x20\x20\x20\x20\x20\x6f\x62\x6a\x65\x63\x74\x74\x6f\x6c\x65\x72\
\x61\x6e\x63\x65\x3d\x22\x31\x30\x22\x0a\x20\x20\x20\x20\x20\x67\
\x72\x69\x64\x74\x6f\x6c\x65\x72\x61\x6e\x63\x65\x3d\x22\x31\x30\
\x22\x0a\x20\x20\x20\x20\x20\x67\x75\x69\x64\x65\x74\x6f\x6c\x65\
\x72\x61\x6e\x63\x65\x3d\x22\x31\x30\x22\x0a\x20\x20\x20\x20\x20\
\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x70\x61\x67\x65\x6f\x70\x61\
\x63\x69\x74\x79\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\
\x6b\x73\x63\x61\x70\x65\x3a\x70\x61\x67\x65\x73\x68\x61\x64\x6f\
\x77\x3d\x22\x32\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\
\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x31\x33\x36\x34\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\
\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x68\x65\x69\
\x67\x68\x74\x3d\x22\x37\x35\x30\x22\x0a\x20\x20\x20\x20\x20\x69\
\x64\x3d\x22\x6e\x61\x6d\x65\x64\x76\x69\x65\x77\x34\x35\x35\x39\
\x22\x0a\x20\x20\x20\x20\x20\x73\x68\x6f\x77\x67\x72\x69\x64\x3d\
\x22\x66\x61\x6c\x73\x65\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\
\x73\x63\x61\x70\x65\x3a\x7a\x6f\x6f\x6d\x3d\x22\x37\x2e\x33\x37\
\x35\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\
\x3a\x63\x78\x3d\x22\x32\x36\x2e\x32\x31\x34\x39\x33\x36\x22\x0a\
\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x63\x79\
\x3d\x22\x31\x37\x2e\x32\x39\x30\x36\x37\x35\x22\x0a\x20\x20\x20\
\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\
\x77\x2d\x78\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\
\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x79\x3d\x22\
\x31\x36\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\
\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x6d\x61\x78\x69\x6d\x69\x7a\
\x65\x64\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\
\x63\x61\x70\x65\x3a\x63\x75\x72\x72\x65\x6e\x74\x2d\x6c\x61\x79\
\x65\x72\x3d\x22\x73\x76\x67\x34\x35\x35\x37\x22\x0a\x20\x20\x20\
\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x73\x6e\x61\x70\x2d\
\x67\x6c\x6f\x62\x61\x6c\x3d\x22\x66\x61\x6c\x73\x65\x22\x20\x2f\
\x3e\x0a\x20\x20\x3c\x69\x6d\x61\x67\x65\x0a\x20\x20\x20\x20\x20\
\x77\x69\x64\x74\x68\x3d\x22\x33\x32\x22\x0a\x20\x20\x20\x20\x20\
\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\x0a\x20\x20\x20\x20\
\x20\x78\x6c\x69\x6e\x6b\x3a\x68\x72\x65\x66\x3d\x22\x64\x61\x74\
\x61\x3a\x69\x6d\x61\x67\x65\x2f\x70\x6e\x67\x3b\x62\x61\x73\x65\
\x36\x34\x2c\x69\x56\x42\x4f\x52\x77\x30\x4b\x47\x67\x6f\x41\x41\
\x41\x41\x4e\x53\x55\x68\x45\x55\x67\x41\x41\x41\x43\x41\x41\x41\
\x41\x41\x67\x43\x41\x59\x41\x41\x41\x42\x7a\x65\x6e\x72\x30\x41\
\x41\x41\x41\x42\x48\x4e\x43\x53\x56\x51\x49\x43\x41\x67\x49\x66\
\x41\x68\x6b\x69\x41\x41\x41\x41\x44\x56\x4a\x52\x45\x46\x55\x0a\
\x57\x49\x58\x74\x31\x7a\x45\x42\x41\x44\x41\x4d\x77\x7a\x42\x33\
\x2f\x44\x6c\x33\x4d\x50\x6f\x6f\x42\x4b\x77\x33\x55\x32\x33\x48\
\x75\x77\x54\x73\x4f\x34\x78\x58\x42\x51\x41\x41\x41\x41\x41\x41\
\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x77\x48\x64\x2f\x7a\
\x44\x78\x72\x74\x42\x44\x6b\x45\x71\x4e\x62\x67\x0a\x41\x41\x41\
\x41\x41\x45\x6c\x46\x54\x6b\x53\x75\x51\x6d\x43\x43\x0a\x22\x0a\
\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x69\x6d\x61\x67\x65\x34\x35\
\x36\x35\x22\x0a\x20\x20\x20\x20\x20\x78\x3d\x22\x30\x22\x0a\x20\
\x20\x20\x20\x20\x79\x3d\x22\x30\x22\x20\x2f\x3e\x0a\x20\x20\x3c\
\x74\x65\x78\x74\x0a\x20\x20\x20\x20\x20\x78\x6d\x6c\x3a\x73\x70\
\x61\x63\x65\x3d\x22\x70\x72\x65\x73\x65\x72\x76\x65\x22\x0a\x20\
\x20\x20\x20\x20\x73\x74\x79\x6c\x65\x3d\x22\x66\x6f\x6e\x74\x2d\
\x73\x69\x7a\x65\x3a\x31\x36\x2e\x39\x36\x38\x36\x33\x37\x34\x37\
\x30\x30\x30\x30\x30\x30\x30\x38\x39\x70\x78\x3b\x66\x6f\x6e\x74\
\x2d\x73\x74\x79\x6c\x65\x3a\x6e\x6f\x72\x6d\x61\x6c\x3b\x66\x6f\
\x6e\x74\x2d\x77\x65\x69\x67\x68\x74\x3a\x6e\x6f\x72\x6d\x61\x6c\
\x3b\x6c\x69\x6e\x65\x2d\x68\x65\x69\x67\x68\x74\x3a\x31\x32\x35\
\x25\x3b\x6c\x65\x74\x74\x65\x72\x2d\x73\x70\x61\x63\x69\x6e\x67\
\x3a\x30\x70\x78\x3b\x77\x6f\x72\x64\x2d\x73\x70\x61\x63\x69\x6e\
\x67\x3a\x30\x70\x78\x3b\x66\x69\x6c\x6c\x3a\x23\x30\x30\x30\x30\
\x66\x66\x3b\x66\x69\x6c\x6c\x2d\x6f\x70\x61\x63\x69\x74\x79\x3a\
\x31\x3b\x73\x74\x72\x6f\x6b\x65\x3a\x6e\x6f\x6e\x65\x3b\x66\x6f\
\x6e\x74\x2d\x66\x61\x6d\x69\x6c\x79\x3a\x53\x61\x6e\x73\x22\x0a\
\x20\x20\x20\x20\x20\x78\x3d\x22\x32\x2e\x30\x34\x33\x39\x35\x32\
\x35\x22\x0a\x20\x20\x20\x20\x20\x79\x3d\x22\x32\x32\x2e\x30\x37\
\x35\x31\x33\x36\x22\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x74\
\x65\x78\x74\x35\x33\x39\x31\x22\x0a\x20\x20\x20\x20\x20\x73\x6f\
\x64\x69\x70\x6f\x64\x69\x3a\x6c\x69\x6e\x65\x73\x70\x61\x63\x69\
\x6e\x67\x3d\x22\x31\x32\x35\x25\x22\x0a\x20\x20\x20\x20\x20\x74\
\x72\x61\x6e\x73\x66\x6f\x72\x6d\x3d\x22\x73\x63\x61\x6c\x65\x28\
\x31\x2e\x30\x34\x39\x38\x39\x36\x35\x2c\x30\x2e\x39\x35\x32\x34\
\x37\x34\x38\x32\x29\x22\x3e\x3c\x74\x73\x70\x61\x6e\x0a\x20\x20\
\x20\x20\x20\x20\x20\x73\x6f\x64\x69\x70\x6f\x64\x69\x3a\x72\x6f\
\x6c\x65\x3d\x22\x6c\x69\x6e\x65\x22\x0a\x20\x20\x20\x20\x20\x20\
\x20\x69\x64\x3d\x22\x74\x73\x70\x61\x6e\x35\x33\x39\x33\x22\x0a\
\x20\x20\x20\x20\x20\x20\x20\x78\x3d\x22\x32\x2e\x30\x34\x33\x39\
\x35\x32\x35\x22\x0a\x20\x20\x20\x20\x20\x20\x20\x79\x3d\x22\x32\
\x32\x2e\x30\x37\x35\x31\x33\x36\x22\x3e\x61\x3c\x2f\x74\x73\x70\
\x61\x6e\x3e\x3c\x2f\x74\x65\x78\x74\x3e\x0a\x20\x20\x3c\x74\x65\
\x78\x74\x0a\x20\x20\x20\x20\x20\x78\x6d\x6c\x3a\x73\x70\x61\x63\
\x65\x3d\x22\x70\x72\x65\x73\x65\x72\x76\x65\x22\x0a\x20\x20\x20\
\x20\x20\x73\x74\x79\x6c\x65\x3d\x22\x66\x6f\x6e\x74\x2d\x73\x69\
\x7a\x65\x3a\x31\x36\x2e\x39\x36\x38\x36\x33\x37\x34\x37\x30\x30\
\x30\x30\x30\x30\x30\x38\x39\x70\x78\x3b\x66\x6f\x6e\x74\x2d\x73\
\x74\x79\x6c\x65\x3a\x6e\x6f\x72\x6d\x61\x6c\x3b\x66\x6f\x6e\x74\
\x2d\x77\x65\x69\x67\x68\x74\x3a\x6e\x6f\x72\x6d\x61\x6c\x3b\x6c\
\x69\x6e\x65\x2d\x68\x65\x69\x67\x68\x74\x3a\x31\x32\x35\x25\x3b\
\x6c\x65\x74\x74\x65\x72\x2d\x73\x70\x61\x63\x69\x6e\x67\x3a\x30\
\x70\x78\x3b\x77\x6f\x72\x64\x2d\x73\x70\x61\x63\x69\x6e\x67\x3a\
\x30\x70\x78\x3b\x66\x69\x6c\x6c\x3a\x23\x30\x30\x30\x30\x66\x66\
\x3b\x66\x69\x6c\x6c\x2d\x6f\x70\x61\x63\x69\x74\x79\x3a\x31\x3b\
\x73\x74\x72\x6f\x6b\x65\x3a\x6e\x6f\x6e\x65\x3b\x66\x6f\x6e\x74\
\x2d\x66\x61\x6d\x69\x6c\x79\x3a\x53\x61\x6e\x73\x22\x0a\x20\x20\
\x20\x20\x20\x78\x3d\x22\x31\x32\x2e\x33\x39\x37\x36\x39\x37\x22\
\x0a\x20\x20\x20\x20\x20\x79\x3d\x22\x32\x31\x2e\x35\x35\x37\x34\
\x34\x37\x22\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x74\x65\x78\
\x74\x35\x33\x39\x35\x22\x0a\x20\x20\x20\x20\x20\x73\x6f\x64\x69\
\x70\x6f\x64\x69\x3a\x6c\x69\x6e\x65\x73\x70\x61\x63\x69\x6e\x67\
\x3d\x22\x31\x32\x35\x25\x22\x0a\x20\x20\x20\x20\x20\x74\x72\x61\
\x6e\x73\x66\x6f\x72\x6d\x3d\x22\x73\x63\x61\x6c\x65\x28\x31\x2e\
\x30\x34\x39\x38\x39\x36\x35\x2c\x30\x2e\x39\x35\x32\x34\x37\x34\
\x38\x32\x29\x22\x3e\x3c\x74\x73\x70\x61\x6e\x0a\x20\x20\x20\x20\
\x20\x20\x20\x73\x6f\x64\x69\x70\x6f\x64\x69\x3a\x72\x6f\x6c\x65\
\x3d\x22\x6c\x69\x6e\x65\x22\x0a\x20\x20\x20\x20\x20\x20\x20\x69\
\x64\x3d\x22\x74\x73\x70\x61\x6e\x35\x33\x39\x37\x22\x0a\x20\x20\
\x20\x20\x20\x20\x20\x78\x3d\x22\x31\x32\x2e\x33\x39\x37\x36\x39\
\x37\x22\x0a\x20\x20\x20\x20\x20\x20\x20\x79\x3d\x22\x32\x31\x2e\
\x35\x35\x37\x34\x34\x37\x22\x3e\x2c\x62\x3c\x2f\x74\x73\x70\x61\
\x6e\x3e\x3c\x2f\x74\x65\x78\x74\x3e\x0a\x3c\x2f\x73\x76\x67\x3e\
\x0a\
\x00\x00\x0c\x87\
\x3c\
\x3f\x78\x6d\x6c\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\
\x30\x22\x20\x65\x6e\x63\x6f\x64\x69\x6e\x67\x3d\x22\x55\x54\x46\
\x2d\x38\x22\x20\x73\x74\x61\x6e\x64\x61\x6c\x6f\x6e\x65\x3d\x22\
\x6e\x6f\x22\x3f\x3e\x0a\x3c\x21\x2d\x2d\x20\x43\x72\x65\x61\x74\
\x65\x64\x20\x77\x69\x74\x68\x20\x49\x6e\x6b\x73\x63\x61\x70\x65\
\x20\x28\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x69\x6e\x6b\
\x73\x63\x61\x70\x65\x2e\x6f\x72\x67\x2f\x29\x20\x2d\x2d\x3e\x0a\
\x0a\x3c\x73\x76\x67\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x64\
\x63\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x70\x75\x72\x6c\x2e\x6f\
\x72\x67\x2f\x64\x63\x2f\x65\x6c\x65\x6d\x65\x6e\x74\x73\x2f\x31\
\x2e\x31\x2f\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x63\x63\
\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x63\x72\x65\x61\x74\x69\x76\
\x65\x63\x6f\x6d\x6d\x6f\x6e\x73\x2e\x6f\x72\x67\x2f\x6e\x73\x23\
\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x72\x64\x66\x3d\x22\
\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\
\x67\x2f\x31\x39\x39\x39\x2f\x30\x32\x2f\x32\x32\x2d\x72\x64\x66\
\x2d\x73\x79\x6e\x74\x61\x78\x2d\x6e\x73\x23\x22\x0a\x20\x20\x20\
\x78\x6d\x6c\x6e\x73\x3a\x73\x76\x67\x3d\x22\x68\x74\x74\x70\x3a\
\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x32\x30\x30\
\x30\x2f\x73\x76\x67\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3d\
\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\
\x72\x67\x2f\x32\x30\x30\x30\x2f\x73\x76\x67\x22\x0a\x20\x20\x20\
\x78\x6d\x6c\x6e\x73\x3a\x78\x6c\x69\x6e\x6b\x3d\x22\x68\x74\x74\
\x70\x3a\x2f\x2f\x77\x77\x77\x2e\x77\x33\x2e\x6f\x72\x67\x2f\x31\
\x39\x39\x39\x2f\x78\x6c\x69\x6e\x6b\x22\x0a\x20\x20\x20\x78\x6d\
\x6c\x6e\x73\x3a\x73\x6f\x64\x69\x70\x6f\x64\x69\x3d\x22\x68\x74\
\x74\x70\x3a\x2f\x2f\x73\x6f\x64\x69\x70\x6f\x64\x69\x2e\x73\x6f\
\x75\x72\x63\x65\x66\x6f\x72\x67\x65\x2e\x6e\x65\x74\x2f\x44\x54\
\x44\x2f\x73\x6f\x64\x69\x70\x6f\x64\x69\x2d\x30\x2e\x64\x74\x64\
\x22\x0a\x20\x20\x20\x78\x6d\x6c\x6e\x73\x3a\x69\x6e\x6b\x73\x63\
\x61\x70\x65\x3d\x22\x68\x74\x74\x70\x3a\x2f\x2f\x77\x77\x77\x2e\
\x69\x6e\x6b\x73\x63\x61\x70\x65\x2e\x6f\x72\x67\x2f\x6e\x61\x6d\
\x65\x73\x70\x61\x63\x65\x73\x2f\x69\x6e\x6b\x73\x63\x61\x70\x65\
\x22\x0a\x20\x20\x20\x69\x64\x3d\x22\x73\x76\x67\x34\x35\x35\x37\
\x22\x0a\x20\x20\x20\x76\x65\x72\x73\x69\x6f\x6e\x3d\x22\x31\x2e\
\x31\x22\x0a\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x76\
\x65\x72\x73\x69\x6f\x6e\x3d\x22\x30\x2e\x34\x38\x2e\x33\x2e\x31\
\x20\x72\x39\x38\x38\x36\x22\x0a\x20\x20\x20\x77\x69\x64\x74\x68\
\x3d\x22\x33\x32\x22\x0a\x20\x20\x20\x68\x65\x69\x67\x68\x74\x3d\
\x22\x33\x32\x22\x0a\x20\x20\x20\x73\x6f\x64\x69\x70\x6f\x64\x69\
\x3a\x64\x6f\x63\x6e\x61\x6d\x65\x3d\x22\x66\x69\x6c\x74\x72\x5f\
\x74\x61\x70\x73\x2e\x73\x76\x67\x22\x3e\x0a\x20\x20\x3c\x6d\x65\
\x74\x61\x64\x61\x74\x61\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\
\x6d\x65\x74\x61\x64\x61\x74\x61\x34\x35\x36\x33\x22\x3e\x0a\x20\
\x20\x20\x20\x3c\x72\x64\x66\x3a\x52\x44\x46\x3e\x0a\x20\x20\x20\
\x20\x20\x20\x3c\x63\x63\x3a\x57\x6f\x72\x6b\x0a\x20\x20\x20\x20\
\x20\x20\x20\x20\x20\x72\x64\x66\x3a\x61\x62\x6f\x75\x74\x3d\x22\
\x22\x3e\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x3c\x64\x63\x3a\x66\
\x6f\x72\x6d\x61\x74\x3e\x69\x6d\x61\x67\x65\x2f\x73\x76\x67\x2b\
\x78\x6d\x6c\x3c\x2f\x64\x63\x3a\x66\x6f\x72\x6d\x61\x74\x3e\x0a\
\x20\x20\x20\x20\x20\x20\x20\x20\x3c\x64\x63\x3a\x74\x79\x70\x65\
\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x20\x72\x64\x66\x3a\
\x72\x65\x73\x6f\x75\x72\x63\x65\x3d\x22\x68\x74\x74\x70\x3a\x2f\
\x2f\x70\x75\x72\x6c\x2e\x6f\x72\x67\x2f\x64\x63\x2f\x64\x63\x6d\
\x69\x74\x79\x70\x65\x2f\x53\x74\x69\x6c\x6c\x49\x6d\x61\x67\x65\
\x22\x20\x2f\x3e\x0a\x20\x20\x20\x20\x20\x20\x20\x20\x3c\x64\x63\
\x3a\x74\x69\x74\x6c\x65\x3e\x3c\x2f\x64\x63\x3a\x74\x69\x74\x6c\
\x65\x3e\x0a\x20\x20\x20\x20\x20\x20\x3c\x2f\x63\x63\x3a\x57\x6f\
\x72\x6b\x3e\x0a\x20\x20\x20\x20\x3c\x2f\x72\x64\x66\x3a\x52\x44\
\x46\x3e\x0a\x20\x20\x3c\x2f\x6d\x65\x74\x61\x64\x61\x74\x61\x3e\
\x0a\x20\x20\x3c\x64\x65\x66\x73\x0a\x20\x20\x20\x20\x20\x69\x64\
\x3d\x22\x64\x65\x66\x73\x34\x35\x36\x31\x22\x20\x2f\x3e\x0a\x20\
\x20\x3c\x73\x6f\x64\x69\x70\x6f\x64\x69\x3a\x6e\x61\x6d\x65\x64\
\x76\x69\x65\x77\x0a\x20\x20\x20\x20\x20\x70\x61\x67\x65\x63\x6f\
\x6c\x6f\x72\x3d\x22\x23\x66\x66\x66\x66\x66\x66\x22\x0a\x20\x20\
\x20\x20\x20\x62\x6f\x72\x64\x65\x72\x63\x6f\x6c\x6f\x72\x3d\x22\
\x23\x36\x36\x36\x36\x36\x36\x22\x0a\x20\x20\x20\x20\x20\x62\x6f\
\x72\x64\x65\x72\x6f\x70\x61\x63\x69\x74\x79\x3d\x22\x31\x22\x0a\
\x20\x20\x20\x20\x20\x6f\x62\x6a\x65\x63\x74\x74\x6f\x6c\x65\x72\
\x61\x6e\x63\x65\x3d\x22\x31\x30\x22\x0a\x20\x20\x20\x20\x20\x67\
\x72\x69\x64\x74\x6f\x6c\x65\x72\x61\x6e\x63\x65\x3d\x22\x31\x30\
\x22\x0a\x20\x20\x20\x20\x20\x67\x75\x69\x64\x65\x74\x6f\x6c\x65\
\x72\x61\x6e\x63\x65\x3d\x22\x31\x30\x22\x0a\x20\x20\x20\x20\x20\
\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x70\x61\x67\x65\x6f\x70\x61\
\x63\x69\x74\x79\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\
\x6b\x73\x63\x61\x70\x65\x3a\x70\x61\x67\x65\x73\x68\x61\x64\x6f\
\x77\x3d\x22\x32\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\
\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x77\x69\x64\x74\x68\
\x3d\x22\x37\x32\x36\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\
\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x68\x65\x69\x67\
\x68\x74\x3d\x22\x37\x35\x30\x22\x0a\x20\x20\x20\x20\x20\x69\x64\
\x3d\x22\x6e\x61\x6d\x65\x64\x76\x69\x65\x77\x34\x35\x35\x39\x22\
\x0a\x20\x20\x20\x20\x20\x73\x68\x6f\x77\x67\x72\x69\x64\x3d\x22\
\x66\x61\x6c\x73\x65\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\
\x63\x61\x70\x65\x3a\x7a\x6f\x6f\x6d\x3d\x22\x37\x2e\x33\x37\x35\
\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\
\x63\x78\x3d\x22\x32\x36\x2e\x32\x31\x34\x39\x33\x36\x22\x0a\x20\
\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x63\x79\x3d\
\x22\x31\x39\x2e\x37\x32\x38\x38\x31\x34\x22\x0a\x20\x20\x20\x20\
\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\
\x2d\x78\x3d\x22\x36\x38\x33\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\
\x6b\x73\x63\x61\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x79\x3d\
\x22\x31\x36\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\
\x70\x65\x3a\x77\x69\x6e\x64\x6f\x77\x2d\x6d\x61\x78\x69\x6d\x69\
\x7a\x65\x64\x3d\x22\x30\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\
\x73\x63\x61\x70\x65\x3a\x63\x75\x72\x72\x65\x6e\x74\x2d\x6c\x61\
\x79\x65\x72\x3d\x22\x73\x76\x67\x34\x35\x35\x37\x22\x0a\x20\x20\
\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x73\x6e\x61\x70\
\x2d\x67\x6c\x6f\x62\x61\x6c\x3d\x22\x66\x61\x6c\x73\x65\x22\x20\
\x2f\x3e\x0a\x20\x20\x3c\x69\x6d\x61\x67\x65\x0a\x20\x20\x20\x20\
\x20\x77\x69\x64\x74\x68\x3d\x22\x33\x32\x22\x0a\x20\x20\x20\x20\
\x20\x68\x65\x69\x67\x68\x74\x3d\x22\x33\x32\x22\x0a\x20\x20\x20\
\x20\x20\x78\x6c\x69\x6e\x6b\x3a\x68\x72\x65\x66\x3d\x22\x64\x61\
\x74\x61\x3a\x69\x6d\x61\x67\x65\x2f\x70\x6e\x67\x3b\x62\x61\x73\
\x65\x36\x34\x2c\x69\x56\x42\x4f\x52\x77\x30\x4b\x47\x67\x6f\x41\
\x41\x41\x41\x4e\x53\x55\x68\x45\x55\x67\x41\x41\x41\x43\x41\x41\
\x41\x41\x41\x67\x43\x41\x59\x41\x41\x41\x42\x7a\x65\x6e\x72\x30\
\x41\x41\x41\x41\x42\x48\x4e\x43\x53\x56\x51\x49\x43\x41\x67\x49\
\x66\x41\x68\x6b\x69\x41\x41\x41\x41\x44\x56\x4a\x52\x45\x46\x55\
\x0a\x57\x49\x58\x74\x31\x7a\x45\x42\x41\x44\x41\x4d\x77\x7a\x42\
\x33\x2f\x44\x6c\x33\x4d\x50\x6f\x6f\x42\x4b\x77\x33\x55\x32\x33\
\x48\x75\x77\x54\x73\x4f\x34\x78\x58\x42\x51\x41\x41\x41\x41\x41\
\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x41\x77\x48\x64\x2f\
\x7a\x44\x78\x72\x74\x42\x44\x6b\x45\x71\x4e\x62\x67\x0a\x41\x41\
\x41\x41\x41\x45\x6c\x46\x54\x6b\x53\x75\x51\x6d\x43\x43\x0a\x22\
\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x69\x6d\x61\x67\x65\x34\
\x35\x36\x35\x22\x0a\x20\x20\x20\x20\x20\x78\x3d\x22\x30\x22\x0a\
\x20\x20\x20\x20\x20\x79\x3d\x22\x30\x22\x20\x2f\x3e\x0a\x20\x20\
\x3c\x74\x65\x78\x74\x0a\x20\x20\x20\x20\x20\x78\x6d\x6c\x3a\x73\
\x70\x61\x63\x65\x3d\x22\x70\x72\x65\x73\x65\x72\x76\x65\x22\x0a\
\x20\x20\x20\x20\x20\x73\x74\x79\x6c\x65\x3d\x22\x66\x6f\x6e\x74\
\x2d\x73\x69\x7a\x65\x3a\x34\x30\x70\x78\x3b\x66\x6f\x6e\x74\x2d\
\x73\x74\x79\x6c\x65\x3a\x6e\x6f\x72\x6d\x61\x6c\x3b\x66\x6f\x6e\
\x74\x2d\x77\x65\x69\x67\x68\x74\x3a\x6e\x6f\x72\x6d\x61\x6c\x3b\
\x6c\x69\x6e\x65\x2d\x68\x65\x69\x67\x68\x74\x3a\x31\x32\x35\x25\
\x3b\x6c\x65\x74\x74\x65\x72\x2d\x73\x70\x61\x63\x69\x6e\x67\x3a\
\x30\x70\x78\x3b\x77\x6f\x72\x64\x2d\x73\x70\x61\x63\x69\x6e\x67\
\x3a\x30\x70\x78\x3b\x66\x69\x6c\x6c\x3a\x23\x30\x30\x30\x30\x30\
\x30\x3b\x66\x69\x6c\x6c\x2d\x6f\x70\x61\x63\x69\x74\x79\x3a\x31\
\x3b\x73\x74\x72\x6f\x6b\x65\x3a\x6e\x6f\x6e\x65\x3b\x66\x6f\x6e\
\x74\x2d\x66\x61\x6d\x69\x6c\x79\x3a\x53\x61\x6e\x73\x22\x0a\x20\
\x20\x20\x20\x20\x78\x3d\x22\x31\x31\x2e\x36\x36\x31\x30\x31\x37\
\x22\x0a\x20\x20\x20\x20\x20\x79\x3d\x22\x39\x2e\x33\x35\x35\x39\
\x33\x32\x32\x22\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x74\x65\
\x78\x74\x35\x34\x31\x38\x22\x0a\x20\x20\x20\x20\x20\x73\x6f\x64\
\x69\x70\x6f\x64\x69\x3a\x6c\x69\x6e\x65\x73\x70\x61\x63\x69\x6e\
\x67\x3d\x22\x31\x32\x35\x25\x22\x3e\x3c\x74\x73\x70\x61\x6e\x0a\
\x20\x20\x20\x20\x20\x20\x20\x73\x6f\x64\x69\x70\x6f\x64\x69\x3a\
\x72\x6f\x6c\x65\x3d\x22\x6c\x69\x6e\x65\x22\x0a\x20\x20\x20\x20\
\x20\x20\x20\x69\x64\x3d\x22\x74\x73\x70\x61\x6e\x35\x34\x32\x30\
\x22\x0a\x20\x20\x20\x20\x20\x20\x20\x78\x3d\x22\x31\x31\x2e\x36\
\x36\x31\x30\x31\x37\x22\x0a\x20\x20\x20\x20\x20\x20\x20\x79\x3d\
\x22\x39\x2e\x33\x35\x35\x39\x33\x32\x32\x22\x20\x2f\x3e\x3c\x2f\
\x74\x65\x78\x74\x3e\x0a\x20\x20\x3c\x70\x61\x74\x68\x0a\x20\x20\
\x20\x20\x20\x73\x74\x79\x6c\x65\x3d\x22\x66\x69\x6c\x6c\x3a\x6e\
\x6f\x6e\x65\x3b\x73\x74\x72\x6f\x6b\x65\x3a\x23\x30\x30\x30\x30\
\x66\x66\x3b\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3a\
\x31\x70\x78\x3b\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\
\x61\x70\x3a\x62\x75\x74\x74\x3b\x73\x74\x72\x6f\x6b\x65\x2d\x6c\
\x69\x6e\x65\x6a\x6f\x69\x6e\x3a\x6d\x69\x74\x65\x72\x3b\x73\x74\
\x72\x6f\x6b\x65\x2d\x6f\x70\x61\x63\x69\x74\x79\x3a\x31\x22\x0a\
\x20\x20\x20\x20\x20\x64\x3d\x22\x6d\x20\x31\x36\x2e\x37\x37\x39\
\x36\x36\x31\x2c\x34\x2e\x34\x37\x34\x35\x37\x36\x33\x20\x63\x20\
\x30\x2c\x37\x2e\x39\x35\x34\x38\x30\x32\x37\x20\x30\x2c\x31\x35\
\x2e\x39\x30\x39\x36\x30\x34\x37\x20\x30\x2c\x32\x33\x2e\x38\x36\
\x34\x34\x30\x36\x37\x22\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\
\x70\x61\x74\x68\x35\x34\x32\x34\x22\x0a\x20\x20\x20\x20\x20\x69\
\x6e\x6b\x73\x63\x61\x70\x65\x3a\x63\x6f\x6e\x6e\x65\x63\x74\x6f\
\x72\x2d\x63\x75\x72\x76\x61\x74\x75\x72\x65\x3d\x22\x30\x22\x20\
\x2f\x3e\x0a\x20\x20\x3c\x70\x61\x74\x68\x0a\x20\x20\x20\x20\x20\
\x73\x74\x79\x6c\x65\x3d\x22\x66\x69\x6c\x6c\x3a\x23\x30\x30\x30\
\x30\x66\x66\x3b\x73\x74\x72\x6f\x6b\x65\x3a\x23\x30\x30\x30\x30\
\x66\x66\x3b\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3a\
\x31\x70\x78\x3b\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\
\x61\x70\x3a\x62\x75\x74\x74\x3b\x73\x74\x72\x6f\x6b\x65\x2d\x6c\
\x69\x6e\x65\x6a\x6f\x69\x6e\x3a\x6d\x69\x74\x65\x72\x3b\x73\x74\
\x72\x6f\x6b\x65\x2d\x6f\x70\x61\x63\x69\x74\x79\x3a\x31\x22\x0a\
\x20\x20\x20\x20\x20\x64\x3d\x22\x6d\x20\x31\x36\x2e\x39\x35\x30\
\x35\x31\x31\x2c\x34\x2e\x39\x34\x31\x31\x36\x31\x35\x20\x63\x20\
\x32\x2e\x30\x31\x30\x33\x39\x33\x2c\x32\x2e\x34\x30\x30\x38\x30\
\x38\x32\x20\x34\x2e\x30\x32\x30\x37\x38\x36\x2c\x34\x2e\x38\x30\
\x31\x36\x31\x37\x33\x20\x36\x2e\x30\x33\x31\x31\x37\x38\x2c\x37\
\x2e\x32\x30\x32\x34\x32\x34\x35\x22\x0a\x20\x20\x20\x20\x20\x69\
\x64\x3d\x22\x70\x61\x74\x68\x35\x34\x32\x36\x22\x0a\x20\x20\x20\
\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x63\x6f\x6e\x6e\x65\
\x63\x74\x6f\x72\x2d\x63\x75\x72\x76\x61\x74\x75\x72\x65\x3d\x22\
\x30\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\
\x3a\x74\x72\x61\x6e\x73\x66\x6f\x72\x6d\x2d\x63\x65\x6e\x74\x65\
\x72\x2d\x78\x3d\x22\x2d\x32\x2e\x33\x30\x35\x30\x38\x34\x37\x22\
\x20\x2f\x3e\x0a\x20\x20\x3c\x70\x61\x74\x68\x0a\x20\x20\x20\x20\
\x20\x73\x74\x79\x6c\x65\x3d\x22\x66\x69\x6c\x6c\x3a\x6e\x6f\x6e\
\x65\x3b\x73\x74\x72\x6f\x6b\x65\x3a\x23\x30\x30\x30\x30\x66\x66\
\x3b\x73\x74\x72\x6f\x6b\x65\x2d\x77\x69\x64\x74\x68\x3a\x31\x70\
\x78\x3b\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\x65\x63\x61\x70\
\x3a\x62\x75\x74\x74\x3b\x73\x74\x72\x6f\x6b\x65\x2d\x6c\x69\x6e\
\x65\x6a\x6f\x69\x6e\x3a\x6d\x69\x74\x65\x72\x3b\x73\x74\x72\x6f\
\x6b\x65\x2d\x6f\x70\x61\x63\x69\x74\x79\x3a\x30\x2e\x39\x34\x31\
\x31\x37\x36\x34\x37\x22\x0a\x20\x20\x20\x20\x20\x64\x3d\x22\x4d\
\x20\x31\x36\x2e\x38\x31\x32\x31\x39\x39\x2c\x34\x2e\x38\x30\x35\
\x35\x36\x37\x32\x20\x43\x20\x31\x34\x2e\x38\x30\x31\x38\x30\x36\
\x2c\x37\x2e\x32\x30\x36\x33\x37\x35\x34\x20\x31\x32\x2e\x37\x39\
\x31\x34\x31\x33\x2c\x39\x2e\x36\x30\x37\x31\x38\x34\x38\x20\x31\
\x30\x2e\x37\x38\x31\x30\x32\x31\x2c\x31\x32\x2e\x30\x30\x37\x39\
\x39\x32\x22\x0a\x20\x20\x20\x20\x20\x69\x64\x3d\x22\x70\x61\x74\
\x68\x35\x34\x32\x36\x2d\x32\x22\x0a\x20\x20\x20\x20\x20\x69\x6e\
\x6b\x73\x63\x61\x70\x65\x3a\x63\x6f\x6e\x6e\x65\x63\x74\x6f\x72\
\x2d\x63\x75\x72\x76\x61\x74\x75\x72\x65\x3d\x22\x30\x22\x0a\x20\
\x20\x20\x20\x20\x69\x6e\x6b\x73\x63\x61\x70\x65\x3a\x74\x72\x61\
\x6e\x73\x66\x6f\x72\x6d\x2d\x63\x65\x6e\x74\x65\x72\x2d\x78\x3d\
\x22\x32\x2e\x33\x30\x35\x30\x38\x34\x37\x22\x20\x2f\x3e\x0a\x3c\
\x2f\x73\x76\x67\x3e\x0a\
"
qt_resource_name = "\
\x00\x05\
\x00\x6f\xa6\x53\
\x00\x69\
\x00\x63\x00\x6f\x00\x6e\x00\x73\
\x00\x14\
\x09\x81\xca\x67\
\x00\x69\
\x00\x6d\x00\x70\x00\x75\x00\x6c\x00\x73\x00\x65\x00\x5f\x00\x72\x00\x65\x00\x73\x00\x70\x00\x6f\x00\x6e\x00\x73\x00\x65\x00\x2e\
\x00\x73\x00\x76\x00\x67\
\x00\x0e\
\x00\x3a\xd2\x27\
\x00\x72\
\x00\x65\x00\x6d\x00\x6f\x00\x76\x00\x65\x00\x5f\x00\x72\x00\x65\x00\x64\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0f\
\x03\xbf\x35\xe7\
\x00\x70\
\x00\x68\x00\x61\x00\x73\x00\x65\x00\x5f\x00\x64\x00\x65\x00\x6c\x00\x61\x00\x79\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0d\
\x0c\x6e\x6c\xc7\
\x00\x63\
\x00\x6f\x00\x6e\x00\x6a\x00\x75\x00\x67\x00\x61\x00\x74\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x11\
\x09\x67\xa5\x87\
\x00\x73\
\x00\x74\x00\x65\x00\x70\x00\x5f\x00\x72\x00\x65\x00\x73\x00\x70\x00\x6f\x00\x6e\x00\x73\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\
\x00\x0f\
\x0f\x09\x55\x87\
\x00\x67\
\x00\x72\x00\x6f\x00\x75\x00\x70\x00\x5f\x00\x64\x00\x65\x00\x6c\x00\x61\x00\x79\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0b\
\x02\x69\xc3\xe7\
\x00\x6f\
\x00\x76\x00\x65\x00\x72\x00\x6c\x00\x61\x00\x79\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x12\
\x05\x37\x32\x67\
\x00\x70\
\x00\x68\x00\x61\x00\x73\x00\x65\x00\x5f\x00\x72\x00\x65\x00\x73\x00\x70\x00\x6f\x00\x6e\x00\x73\x00\x65\x00\x2e\x00\x73\x00\x76\
\x00\x67\
\x00\x0c\
\x0a\xd5\x16\x27\
\x00\x61\
\x00\x64\x00\x64\x00\x5f\x00\x70\x00\x6f\x00\x6c\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x10\
\x08\x95\xc8\xc7\
\x00\x6d\
\x00\x61\x00\x67\x00\x5f\x00\x72\x00\x65\x00\x73\x00\x70\x00\x6f\x00\x6e\x00\x73\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0c\
\x00\x67\x14\xc7\
\x00\x61\
\x00\x64\x00\x64\x00\x5f\x00\x7a\x00\x65\x00\x72\x00\x6f\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0e\
\x08\x05\x4c\xc7\
\x00\x66\
\x00\x69\x00\x6c\x00\x74\x00\x72\x00\x5f\x00\x74\x00\x61\x00\x70\x00\x73\x00\x2e\x00\x73\x00\x76\x00\x67\
\x00\x0b\
\x03\x78\xd5\x47\
\x00\x69\
\x00\x6d\x00\x70\x00\x75\x00\x6c\x00\x73\x00\x65\x00\x2e\x00\x73\x00\x76\x00\x67\
"
qt_resource_struct = "\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x0f\x00\x00\x00\x02\
\x00\x00\x00\x3e\x00\x00\x00\x00\x00\x01\x00\x00\x08\x4e\
\x00\x00\x01\x7a\x00\x00\x00\x00\x00\x01\x00\x00\x80\x3d\
\x00\x00\x01\x7a\x00\x00\x00\x00\x00\x01\x00\x00\x77\x3c\
\x00\x00\x00\xf0\x00\x00\x00\x00\x00\x01\x00\x00\x45\x9e\
\x00\x00\x01\xba\x00\x00\x00\x00\x00\x01\x00\x00\x94\x64\
\x00\x00\x00\x60\x00\x00\x00\x00\x00\x01\x00\x00\x16\xaa\
\x00\x00\x01\x0c\x00\x00\x00\x00\x00\x01\x00\x00\x4f\x74\
\x00\x00\x01\x98\x00\x00\x00\x00\x00\x01\x00\x00\x89\x3e\
\x00\x00\x01\x54\x00\x00\x00\x00\x00\x01\x00\x00\x6f\x47\
\x00\x00\x00\xa4\x00\x00\x00\x00\x00\x01\x00\x00\x33\xa5\
\x00\x00\x00\x10\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x01\x36\x00\x00\x00\x00\x00\x01\x00\x00\x58\x8f\
\x00\x00\x01\x36\x00\x00\x00\x00\x00\x01\x00\x00\x63\xeb\
\x00\x00\x00\x84\x00\x00\x00\x00\x00\x01\x00\x00\x20\x70\
\x00\x00\x00\xcc\x00\x00\x00\x00\x00\x01\x00\x00\x3b\x15\
"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| gpl-3.0 |
NeerajM999/recap-python | LearnPython/data_structures/binary_tree.py | 1 | 1761 | class Node:
def __init__(self, value):
self.value = value
self.left = None
self.right = None
class BinaryTree(object):
def __init__(self, root_val):
self.root = Node(root_val)
def preorder_traversal(self, start, traversal):
""" Root -> left -> right """
if start:
traversal += (str(start.value) + "-")
traversal = self.preorder_traversal(start.left, traversal)
traversal = self.preorder_traversal(start.right, traversal)
return traversal
def inorder_traversal(self, start, traversal):
""" left -> root -> right """
if start:
traversal = self.inorder_traversal(start.left, traversal)
traversal += (str(start.value) + "-")
traversal = self.inorder_traversal(start.right, traversal)
return traversal
def postorder_traversal(self, start, traversal):
""" left -> right -> root """
if start:
traversal = self.postorder_traversal(start.left, traversal)
traversal = self.postorder_traversal(start.right, traversal)
traversal += (str(start.value) + "-")
return traversal
if __name__ == "__main__":
"""
1
/ \
2 3
/ \ / \
4 5 6 7
"""
tree = BinaryTree(1)
tree.root.left = Node(2)
tree.root.right = Node(3)
tree.root.left.left = Node(4)
tree.root.left.right = Node(5)
tree.root.right.left = Node(6)
tree.root.right.right = Node(7)
print("preorder-traversal: ", tree.preorder_traversal(tree.root, ""))
print("inorder-traversal: ", tree.inorder_traversal(tree.root, ""))
print("postorder-traversal: ", tree.postorder_traversal(tree.root, "")) | gpl-3.0 |
mayankcu/Django-social | venv/Lib/site-packages/django/core/management/color.py | 635 | 1608 | """
Sets up the terminal color scheme.
"""
import os
import sys
from django.utils import termcolors
def supports_color():
"""
Returns True if the running system's terminal supports color, and False
otherwise.
"""
unsupported_platform = (sys.platform in ('win32', 'Pocket PC'))
# isatty is not always implemented, #6223.
is_a_tty = hasattr(sys.stdout, 'isatty') and sys.stdout.isatty()
if unsupported_platform or not is_a_tty:
return False
return True
def color_style():
"""Returns a Style object with the Django color scheme."""
if not supports_color():
style = no_style()
else:
DJANGO_COLORS = os.environ.get('DJANGO_COLORS', '')
color_settings = termcolors.parse_color_setting(DJANGO_COLORS)
if color_settings:
class dummy: pass
style = dummy()
# The nocolor palette has all available roles.
# Use that pallete as the basis for populating
# the palette as defined in the environment.
for role in termcolors.PALETTES[termcolors.NOCOLOR_PALETTE]:
format = color_settings.get(role,{})
setattr(style, role, termcolors.make_style(**format))
# For backwards compatibility,
# set style for ERROR_OUTPUT == ERROR
style.ERROR_OUTPUT = style.ERROR
else:
style = no_style()
return style
def no_style():
"""Returns a Style object that has no colors."""
class dummy:
def __getattr__(self, attr):
return lambda x: x
return dummy()
| bsd-3-clause |
kret0s/gnuhealth-live | tryton/server/trytond-3.8.3/trytond/model/fields/one2one.py | 1 | 2080 | # This file is part of Tryton. The COPYRIGHT file at the top level of
# this repository contains the full copyright notices and license terms.
from types import NoneType
from trytond.model.fields.field import Field
from trytond.model.fields.many2many import Many2Many
from trytond.pool import Pool
class One2One(Many2Many):
'''
Define one2one field (``int``).
'''
_type = 'one2one'
def get(self, ids, model, name, values=None):
'''
Return target record.
:param ids: a list of ids
:param model: a string with the name of the model
:param name: a string with the name of the field
:param values: a dictionary with the read values
:return: a dictionary with ids as key and target id as value
'''
res = super(One2One, self).get(ids, model, name, values=values)
for i, vals in res.iteritems():
res[i] = vals[0] if vals else None
return res
def set(self, Model, name, ids, value, *args):
'''
Set the values.
'''
pool = Pool()
Relation = pool.get(self.relation_name)
to_delete = []
to_create = []
args = iter((ids, value) + args)
for ids, value in zip(args, args):
relations = Relation.search([
(self.origin, 'in', ids),
])
to_delete.extend(relations)
if value:
for record_id in ids:
to_create.append({
self.origin: record_id,
self.target: value,
})
if to_delete:
Relation.delete(to_delete)
if to_create:
Relation.create(to_create)
def __set__(self, inst, value):
Target = self.get_target()
if isinstance(value, dict):
value = Target(*value)
elif isinstance(value, (int, long)):
value = Target(value)
assert isinstance(value, (Target, NoneType))
Field.__set__(self, inst, value)
| gpl-3.0 |
supriyantomaftuh/syzygy | third_party/colorama/initialise.py | 49 | 1222 | import atexit
import sys
from .ansitowin32 import AnsiToWin32
orig_stdout = sys.stdout
orig_stderr = sys.stderr
wrapped_stdout = sys.stdout
wrapped_stderr = sys.stderr
atexit_done = False
def reset_all():
AnsiToWin32(orig_stdout).reset_all()
def init(autoreset=False, convert=None, strip=None, wrap=True):
if not wrap and any([autoreset, convert, strip]):
raise ValueError('wrap=False conflicts with any other arg=True')
global wrapped_stdout, wrapped_stderr
sys.stdout = wrapped_stdout = \
wrap_stream(orig_stdout, convert, strip, autoreset, wrap)
sys.stderr = wrapped_stderr = \
wrap_stream(orig_stderr, convert, strip, autoreset, wrap)
global atexit_done
if not atexit_done:
atexit.register(reset_all)
atexit_done = True
def deinit():
sys.stdout = orig_stdout
sys.stderr = orig_stderr
def reinit():
sys.stdout = wrapped_stdout
sys.stderr = wrapped_stdout
def wrap_stream(stream, convert, strip, autoreset, wrap):
if wrap:
wrapper = AnsiToWin32(stream,
convert=convert, strip=strip, autoreset=autoreset)
if wrapper.should_wrap():
stream = wrapper.stream
return stream
| apache-2.0 |
75651/kbengine_cloud | kbe/res/scripts/common/Lib/idlelib/idle_test/htest.py | 59 | 12613 | '''Run human tests of Idle's window, dialog, and popup widgets.
run(*tests)
Run each callable in tests after finding the matching test spec in this file.
If there are none, run an htest for each spec dict in this file after finding
the matching callable in the module named in the spec.
In a tested module, let X be a global name bound to a widget callable.
End the module with
if __name__ == '__main__':
<unittest, if there is one>
from idlelib.idle_test.htest import run
run(X)
The X object must have a .__name__ attribute and a 'parent' parameter.
X will often be a widget class, but a callable instance with .__name__
or a wrapper function also work. The name of wrapper functions, like
'_editor_window', should start with '_'.
This file must contain a matching instance of the following template,
with X.__name__ prepended, as in '_editor_window_spec ...'.
_spec = {
'file': '',
'kwds': {'title': ''},
'msg': ""
}
file (no .py): used in run() to import the file and get X.
kwds: passed to X (**kwds), after 'parent' is added, to initialize X.
title: an example; used for some widgets, delete if not.
msg: displayed in a master window. Hints as to how the user might
test the widget. Close the window to skip or end the test.
Modules not being tested at the moment:
PyShell.PyShellEditorWindow
Debugger.Debugger
AutoCompleteWindow.AutoCompleteWindow
OutputWindow.OutputWindow (indirectly being tested with grep test)
'''
from importlib import import_module
from idlelib.macosxSupport import _initializeTkVariantTests
import tkinter as tk
AboutDialog_spec = {
'file': 'aboutDialog',
'kwds': {'title': 'aboutDialog test',
'_htest': True,
},
'msg': "Test every button. Ensure Python, TK and IDLE versions "
"are correctly displayed.\n [Close] to exit.",
}
_calltip_window_spec = {
'file': 'CallTipWindow',
'kwds': {},
'msg': "Typing '(' should display a calltip.\n"
"Typing ') should hide the calltip.\n"
}
_class_browser_spec = {
'file': 'ClassBrowser',
'kwds': {},
'msg': "Inspect names of module, class(with superclass if "
"applicable), methods and functions.\nToggle nested items.\n"
"Double clicking on items prints a traceback for an exception "
"that is ignored."
}
_color_delegator_spec = {
'file': 'ColorDelegator',
'kwds': {},
'msg': "The text is sample Python code.\n"
"Ensure components like comments, keywords, builtins,\n"
"string, definitions, and break are correctly colored.\n"
"The default color scheme is in idlelib/config-highlight.def"
}
ConfigDialog_spec = {
'file': 'configDialog',
'kwds': {'title': 'Settings',
'_htest': True,},
'msg': "IDLE preferences dialog.\n"
"In the 'Fonts/Tabs' tab, changing font face, should update the "
"font face of the text in the area below it.\nIn the "
"'Highlighting' tab, try different color schemes. Clicking "
"items in the sample program should update the choices above it."
"\nIn the 'Keys' and 'General' tab, test settings of interest."
"\n[Ok] to close the dialog.[Apply] to apply the settings and "
"and [Cancel] to revert all changes.\nRe-run the test to ensure "
"changes made have persisted."
}
_dyn_option_menu_spec = {
'file': 'dynOptionMenuWidget',
'kwds': {},
'msg': "Select one of the many options in the 'old option set'.\n"
"Click the button to change the option set.\n"
"Select one of the many options in the 'new option set'."
}
_editor_window_spec = {
'file': 'EditorWindow',
'kwds': {},
'msg': "Test editor functions of interest."
}
GetCfgSectionNameDialog_spec = {
'file': 'configSectionNameDialog',
'kwds': {'title':'Get Name',
'message':'Enter something',
'used_names': {'abc'},
'_htest': True},
'msg': "After the text entered with [Ok] is stripped, <nothing>, "
"'abc', or more that 30 chars are errors.\n"
"Close 'Get Name' with a valid entry (printed to Shell), "
"[Cancel], or [X]",
}
GetHelpSourceDialog_spec = {
'file': 'configHelpSourceEdit',
'kwds': {'title': 'Get helpsource',
'_htest': True},
'msg': "Enter menu item name and help file path\n "
"<nothing> and more than 30 chars are invalid menu item names.\n"
"<nothing>, file does not exist are invalid path items.\n"
"Test for incomplete web address for help file path.\n"
"A valid entry will be printed to shell with [0k].\n"
"[Cancel] will print None to shell",
}
# Update once issue21519 is resolved.
GetKeysDialog_spec = {
'file': 'keybindingDialog',
'kwds': {'title': 'Test keybindings',
'action': 'find-again',
'currentKeySequences': [''] ,
'_htest': True,
},
'msg': "Test for different key modifier sequences.\n"
"<nothing> is invalid.\n"
"No modifier key is invalid.\n"
"Shift key with [a-z],[0-9], function key, move key, tab, space"
"is invalid.\nNo validitity checking if advanced key binding "
"entry is used."
}
_grep_dialog_spec = {
'file': 'GrepDialog',
'kwds': {},
'msg': "Click the 'Show GrepDialog' button.\n"
"Test the various 'Find-in-files' functions.\n"
"The results should be displayed in a new '*Output*' window.\n"
"'Right-click'->'Goto file/line' anywhere in the search results "
"should open that file \nin a new EditorWindow."
}
_help_dialog_spec = {
'file': 'EditorWindow',
'kwds': {},
'msg': "If the help text displays, this works.\n"
"Text is selectable. Window is scrollable."
}
_io_binding_spec = {
'file': 'IOBinding',
'kwds': {},
'msg': "Test the following bindings\n"
"<Control-o> to display open window from file dialog.\n"
"<Control-s> to save the file\n"
}
_multi_call_spec = {
'file': 'MultiCall',
'kwds': {},
'msg': "The following actions should trigger a print to console or IDLE"
" Shell.\nEntering and leaving the text area, key entry, "
"<Control-Key>,\n<Alt-Key-a>, <Control-Key-a>, "
"<Alt-Control-Key-a>, \n<Control-Button-1>, <Alt-Button-1> and "
"focusing out of the window\nare sequences to be tested."
}
_multistatus_bar_spec = {
'file': 'MultiStatusBar',
'kwds': {},
'msg': "Ensure presence of multi-status bar below text area.\n"
"Click 'Update Status' to change the multi-status text"
}
_object_browser_spec = {
'file': 'ObjectBrowser',
'kwds': {},
'msg': "Double click on items upto the lowest level.\n"
"Attributes of the objects and related information "
"will be displayed side-by-side at each level."
}
_path_browser_spec = {
'file': 'PathBrowser',
'kwds': {},
'msg': "Test for correct display of all paths in sys.path.\n"
"Toggle nested items upto the lowest level.\n"
"Double clicking on an item prints a traceback\n"
"for an exception that is ignored."
}
_percolator_spec = {
'file': 'Percolator',
'kwds': {},
'msg': "There are two tracers which can be toggled using a checkbox.\n"
"Toggling a tracer 'on' by checking it should print tracer"
"output to the console or to the IDLE shell.\n"
"If both the tracers are 'on', the output from the tracer which "
"was switched 'on' later, should be printed first\n"
"Test for actions like text entry, and removal."
}
_replace_dialog_spec = {
'file': 'ReplaceDialog',
'kwds': {},
'msg': "Click the 'Replace' button.\n"
"Test various replace options in the 'Replace dialog'.\n"
"Click [Close] or [X] to close the 'Replace Dialog'."
}
_search_dialog_spec = {
'file': 'SearchDialog',
'kwds': {},
'msg': "Click the 'Search' button.\n"
"Test various search options in the 'Search dialog'.\n"
"Click [Close] or [X] to close the 'Search Dialog'."
}
_scrolled_list_spec = {
'file': 'ScrolledList',
'kwds': {},
'msg': "You should see a scrollable list of items\n"
"Selecting (clicking) or double clicking an item "
"prints the name to the console or Idle shell.\n"
"Right clicking an item will display a popup."
}
_stack_viewer_spec = {
'file': 'StackViewer',
'kwds': {},
'msg': "A stacktrace for a NameError exception.\n"
"Expand 'idlelib ...' and '<locals>'.\n"
"Check that exc_value, exc_tb, and exc_type are correct.\n"
}
_tabbed_pages_spec = {
'file': 'tabbedpages',
'kwds': {},
'msg': "Toggle between the two tabs 'foo' and 'bar'\n"
"Add a tab by entering a suitable name for it.\n"
"Remove an existing tab by entering its name.\n"
"Remove all existing tabs.\n"
"<nothing> is an invalid add page and remove page name.\n"
}
TextViewer_spec = {
'file': 'textView',
'kwds': {'title': 'Test textView',
'text':'The quick brown fox jumps over the lazy dog.\n'*35,
'_htest': True},
'msg': "Test for read-only property of text.\n"
"Text is selectable. Window is scrollable.",
}
_tooltip_spec = {
'file': 'ToolTip',
'kwds': {},
'msg': "Place mouse cursor over both the buttons\n"
"A tooltip should appear with some text."
}
_tree_widget_spec = {
'file': 'TreeWidget',
'kwds': {},
'msg': "The canvas is scrollable.\n"
"Click on folders upto to the lowest level."
}
_undo_delegator_spec = {
'file': 'UndoDelegator',
'kwds': {},
'msg': "Click [Undo] to undo any action.\n"
"Click [Redo] to redo any action.\n"
"Click [Dump] to dump the current state "
"by printing to the console or the IDLE shell.\n"
}
_widget_redirector_spec = {
'file': 'WidgetRedirector',
'kwds': {},
'msg': "Every text insert should be printed to the console."
"or the IDLE shell."
}
def run(*tests):
root = tk.Tk()
root.title('IDLE htest')
root.resizable(0, 0)
_initializeTkVariantTests(root)
# a scrollable Label like constant width text widget.
frameLabel = tk.Frame(root, padx=10)
frameLabel.pack()
text = tk.Text(frameLabel, wrap='word')
text.configure(bg=root.cget('bg'), relief='flat', height=4, width=70)
scrollbar = tk.Scrollbar(frameLabel, command=text.yview)
text.config(yscrollcommand=scrollbar.set)
scrollbar.pack(side='right', fill='y', expand=False)
text.pack(side='left', fill='both', expand=True)
test_list = [] # List of tuples of the form (spec, callable widget)
if tests:
for test in tests:
test_spec = globals()[test.__name__ + '_spec']
test_spec['name'] = test.__name__
test_list.append((test_spec, test))
else:
for k, d in globals().items():
if k.endswith('_spec'):
test_name = k[:-5]
test_spec = d
test_spec['name'] = test_name
mod = import_module('idlelib.' + test_spec['file'])
test = getattr(mod, test_name)
test_list.append((test_spec, test))
test_name = tk.StringVar('')
callable_object = None
test_kwds = None
def next():
nonlocal test_name, callable_object, test_kwds
if len(test_list) == 1:
next_button.pack_forget()
test_spec, callable_object = test_list.pop()
test_kwds = test_spec['kwds']
test_kwds['parent'] = root
test_name.set('Test ' + test_spec['name'])
text.configure(state='normal') # enable text editing
text.delete('1.0','end')
text.insert("1.0",test_spec['msg'])
text.configure(state='disabled') # preserve read-only property
def run_test():
widget = callable_object(**test_kwds)
try:
print(widget.result)
except AttributeError:
pass
button = tk.Button(root, textvariable=test_name, command=run_test)
button.pack()
next_button = tk.Button(root, text="Next", command=next)
next_button.pack()
next()
root.mainloop()
if __name__ == '__main__':
run()
| lgpl-3.0 |
flyfei/python-for-android | python-modules/twisted/twisted/trial/test/test_doctest.py | 61 | 2405 | # Copyright (c) 2001-2008 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Test twisted's doctest support.
"""
from twisted.trial import itrial, runner, unittest, reporter
from twisted.trial.test import mockdoctest
class TestRunners(unittest.TestCase):
"""
Tests for Twisted's doctest support.
"""
def test_id(self):
"""
Check that the id() of the doctests' case object contains the FQPN of
the actual tests. We need this because id() has weird behaviour w/
doctest in Python 2.3.
"""
loader = runner.TestLoader()
suite = loader.loadDoctests(mockdoctest)
idPrefix = 'twisted.trial.test.mockdoctest.Counter'
for test in suite._tests:
self.assertIn(idPrefix, itrial.ITestCase(test).id())
def makeDocSuite(self, module):
"""
Return a L{runner.DocTestSuite} for the doctests in C{module}.
"""
return self.assertWarns(
DeprecationWarning, "DocTestSuite is deprecated in Twisted 8.0.",
__file__, lambda: runner.DocTestSuite(mockdoctest))
def test_correctCount(self):
"""
L{countTestCases} returns the number of doctests in the module.
"""
suite = self.makeDocSuite(mockdoctest)
self.assertEqual(7, suite.countTestCases())
def test_basicTrialIntegration(self):
"""
L{loadDoctests} loads all of the doctests in the given module.
"""
loader = runner.TestLoader()
suite = loader.loadDoctests(mockdoctest)
self.assertEqual(7, suite.countTestCases())
def _testRun(self, suite):
"""
Run C{suite} and check the result.
"""
result = reporter.TestResult()
suite.run(result)
self.assertEqual(5, result.successes)
# doctest reports failures as errors in 2.3
self.assertEqual(2, len(result.errors) + len(result.failures))
def test_expectedResults(self, count=1):
"""
Trial can correctly run doctests with its xUnit test APIs.
"""
suite = runner.TestLoader().loadDoctests(mockdoctest)
self._testRun(suite)
def test_repeatable(self):
"""
Doctests should be runnable repeatably.
"""
suite = runner.TestLoader().loadDoctests(mockdoctest)
self._testRun(suite)
self._testRun(suite)
| apache-2.0 |
hujiajie/chromium-crosswalk | third_party/cython/src/Cython/Debugger/DebugWriter.py | 103 | 2192 | from __future__ import with_statement
import os
import sys
import errno
try:
from lxml import etree
have_lxml = True
except ImportError:
have_lxml = False
try:
# Python 2.5
from xml.etree import cElementTree as etree
except ImportError:
try:
# Python 2.5
from xml.etree import ElementTree as etree
except ImportError:
try:
# normal cElementTree install
import cElementTree as etree
except ImportError:
try:
# normal ElementTree install
import elementtree.ElementTree as etree
except ImportError:
etree = None
from Cython.Compiler import Errors
class CythonDebugWriter(object):
"""
Class to output debugging information for cygdb
It writes debug information to cython_debug/cython_debug_info_<modulename>
in the build directory.
"""
def __init__(self, output_dir):
if etree is None:
raise Errors.NoElementTreeInstalledException()
self.output_dir = os.path.join(output_dir, 'cython_debug')
self.tb = etree.TreeBuilder()
# set by Cython.Compiler.ParseTreeTransforms.DebugTransform
self.module_name = None
self.start('cython_debug', attrs=dict(version='1.0'))
def start(self, name, attrs=None):
self.tb.start(name, attrs or {})
def end(self, name):
self.tb.end(name)
def serialize(self):
self.tb.end('Module')
self.tb.end('cython_debug')
xml_root_element = self.tb.close()
try:
os.makedirs(self.output_dir)
except OSError, e:
if e.errno != errno.EEXIST:
raise
et = etree.ElementTree(xml_root_element)
kw = {}
if have_lxml:
kw['pretty_print'] = True
fn = "cython_debug_info_" + self.module_name
et.write(os.path.join(self.output_dir, fn), encoding="UTF-8", **kw)
interpreter_path = os.path.join(self.output_dir, 'interpreter')
with open(interpreter_path, 'w') as f:
f.write(sys.executable)
| bsd-3-clause |
Metonimie/Beaglebone | programs/server.py | 1 | 3147 | #!/usr/bin/env python
"""
A very simple server in python
used to control gpio pins on the beaglebone black.
The server listens for POST requests on port
6410. It has no security at all, which means
that it accepts post-data from everyone.
Send a GET request::
curl http://localhost
Send a POST request::
curl -d "foo=bar&bin=baz" http://localhost
Usage:
nohup python3 server.py &
"""
# TODO: Add basic security
# TODO: Use dictionary for gpio name : file
import http.server
import urllib
PORT = 6410
gpio_path = "/sys/class/gpio/"
# If the param name is in here then we handle the value.
authorized_gpio = ["gpio60"]
class Server(http.server.BaseHTTPRequestHandler):
def prepare_response(self, code):
"""
Prepares the response that will be send back to the requester,
along with the code.
"""
self.send_response(code)
self.send_header("Content-type", "text/html")
self.send_header("Access-Control-Allow-Origin", "*")
self.end_headers()
def handle_gpio(self, key, value):
"""
Very basic gpio handling, converts the value into
an int and then it writes it to the file.
"""
try:
clean_value = int(value)
with open("{}{}/value".format(gpio_path, key), mode="w") as file:
file.write(str(clean_value))
return False
except ValueError as e:
print(e)
except Exception as e:
print("Exception: {}".format(e))
return True
def unsupported(self):
self.wfile.write("Go Away!\n".encode())
def do_GET(self):
self.unsupported()
def do_HEAD(self):
self.unsupported()
def do_POST(self):
"""
Handles the post request.
If error is True then the handling has failed or the request is
invalid
"""
error = False
try:
# The length of the request, in bytes.
length = int(self.headers['content-length'])
# Dictionary containing keys and values from the request.
postvars = urllib.parse.parse_qs(self.rfile.read(length))
for key, value in postvars.items():
clean_key = key.decode()
clean_value = value[0].decode()
print("Received: " + clean_key + " : " + clean_value)
if clean_key in authorized_gpio:
error = self.handle_gpio(clean_key, clean_value)
else:
error = True
except Exception as e:
print(e)
error = True
response = None
if not error:
self.prepare_response(200)
response = "Operation authorized.\n"
else:
self.prepare_response(403)
response = "Go away!\n"
# Write response to the client.
self.wfile.write(response.encode())
if __name__ == "__main__":
server_address = ('', PORT)
httpd = http.server.HTTPServer(server_address, Server)
print('Starting server')
httpd.serve_forever()
| gpl-3.0 |
qiulimao/weblocust | pyspider/database/elasticsearch/resultdb.py | 10 | 3922 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# vim: set et sw=4 ts=4 sts=4 ff=unix fenc=utf8:
# Author: Binux<roy@binux.me>
# http://binux.me
# Created on 2016-01-18 19:41:24
import time
import elasticsearch.helpers
from elasticsearch import Elasticsearch
from pyspider.database.base.resultdb import ResultDB as BaseResultDB
class ResultDB(BaseResultDB):
__type__ = 'result'
def __init__(self, hosts, index='pyspider'):
self.index = index
self.es = Elasticsearch(hosts=hosts)
self.es.indices.create(index=self.index, ignore=400)
if not self.es.indices.get_mapping(index=self.index, doc_type=self.__type__):
self.es.indices.put_mapping(index=self.index, doc_type=self.__type__, body={
"_all": {"enabled": True},
"properties": {
"taskid": {"enabled": False},
"project": {"type": "string", "index": "not_analyzed"},
"url": {"enabled": False},
}
})
@property
def projects(self):
ret = self.es.search(index=self.index, doc_type=self.__type__,
body={"aggs": {"projects": {
"terms": {"field": "project"}
}}}, _source=False)
return [each['key'] for each in ret['aggregations']['projects'].get('buckets', [])]
def save(self, project, taskid, url, result):
obj = {
'taskid': taskid,
'project': project,
'url': url,
'result': result,
'updatetime': time.time(),
}
return self.es.index(index=self.index, doc_type=self.__type__,
body=obj, id='%s:%s' % (project, taskid))
def select(self, project, fields=None, offset=0, limit=0):
offset = offset or 0
limit = limit or 0
if not limit:
for record in elasticsearch.helpers.scan(self.es, index=self.index, doc_type=self.__type__,
query={'query': {'term': {'project': project}}},
_source_include=fields or [], from_=offset,
sort="updatetime:desc"):
yield record['_source']
else:
for record in self.es.search(index=self.index, doc_type=self.__type__,
body={'query': {'term': {'project': project}}},
_source_include=fields or [], from_=offset, size=limit,
sort="updatetime:desc"
).get('hits', {}).get('hits', []):
yield record['_source']
def count(self, project):
return self.es.count(index=self.index, doc_type=self.__type__,
body={'query': {'term': {'project': project}}}
).get('count', 0)
def get(self, project, taskid, fields=None):
ret = self.es.get(index=self.index, doc_type=self.__type__, id="%s:%s" % (project, taskid),
_source_include=fields or [], ignore=404)
return ret.get('_source', None)
def drop(self, project):
self.refresh()
for record in elasticsearch.helpers.scan(self.es, index=self.index, doc_type=self.__type__,
query={'query': {'term': {'project': project}}},
_source=False):
self.es.delete(index=self.index, doc_type=self.__type__, id=record['_id'])
def refresh(self):
"""
Explicitly refresh one or more index, making all operations
performed since the last refresh available for search.
"""
self.es.indices.refresh(index=self.index)
| apache-2.0 |
arbrandes/edx-configuration | playbooks/roles/supervisor/files/pre_supervisor_checks.py | 1 | 12593 | import argparse
import boto.ec2
from boto.utils import get_instance_metadata, get_instance_identity
from boto.exception import AWSConnectionError
import hipchat
import os
import subprocess
import traceback
import socket
import time
# Services that should be checked for migrations.
MIGRATION_COMMANDS = {
'lms': "/edx/bin/edxapp-migrate-lms --noinput --list",
'cms': "/edx/bin/edxapp-migrate-cms --noinput --list",
'xqueue': ". {env_file}; sudo -E -u xqueue {python} {code_dir}/manage.py showmigrations",
'ecommerce': ". {env_file}; sudo -E -u ecommerce {python} {code_dir}/manage.py showmigrations",
'insights': ". {env_file}; sudo -E -u insights {python} {code_dir}/manage.py showmigrations",
'analytics_api': ". {env_file}; sudo -E -u analytics_api {python} {code_dir}/manage.py showmigrations",
'credentials': ". {env_file}; sudo -E -u credentials {python} {code_dir}/manage.py showmigrations",
'discovery': ". {env_file}; sudo -E -u discovery {python} {code_dir}/manage.py showmigrations",
}
HIPCHAT_USER = "PreSupervisor"
# Max amount of time to wait for tags to be applied.
MAX_BACKOFF = 120
INITIAL_BACKOFF = 1
REGION = get_instance_identity()['document']['region']
def services_for_instance(instance_id):
"""
Get the list of all services named by the services tag in this
instance's tags.
"""
ec2 = boto.ec2.connect_to_region(REGION)
reservations = ec2.get_all_instances(instance_ids=[instance_id])
for reservation in reservations:
for instance in reservation.instances:
if instance.id == instance_id:
try:
services = instance.tags['services'].split(',')
except KeyError as ke:
msg = "Tag named 'services' not found on this instance({})".format(instance_id)
raise Exception(msg)
for service in services:
yield service
def edp_for_instance(instance_id):
ec2 = boto.ec2.connect_to_region(REGION)
reservations = ec2.get_all_instances(instance_ids=[instance_id])
for reservation in reservations:
for instance in reservation.instances:
if instance.id == instance_id:
try:
environment = instance.tags['environment']
deployment = instance.tags['deployment']
play = instance.tags['play']
except KeyError as ke:
msg = "{} tag not found on this instance({})".format(ke.message, instance_id)
raise Exception(msg)
return (environment, deployment, play)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="Enable all services that are in the services tag of this ec2 instance.")
parser.add_argument("-a","--available",
help="The location of the available services.")
parser.add_argument("-e","--enabled",
help="The location of the enabled services.")
migration_args = parser.add_argument_group("edxapp_migrations",
"Args for running edxapp migration checks.")
migration_args.add_argument("--edxapp-code-dir",
help="Location of the edx-platform code.")
migration_args.add_argument("--edxapp-python",
help="Path to python to use for executing migration check.")
migration_args.add_argument("--edxapp-env",
help="Location of the edxapp environment file.")
xq_migration_args = parser.add_argument_group("xqueue_migrations",
"Args for running xqueue migration checks.")
xq_migration_args.add_argument("--xqueue-code-dir",
help="Location of the xqueue code.")
xq_migration_args.add_argument("--xqueue-python",
help="Path to python to use for executing migration check.")
migration_args.add_argument("--xqueue-env",
help="Location of the xqueue environment file.")
ecom_migration_args = parser.add_argument_group("ecommerce_migrations",
"Args for running ecommerce migration checks.")
ecom_migration_args.add_argument("--ecommerce-python",
help="Path to python to use for executing migration check.")
ecom_migration_args.add_argument("--ecommerce-env",
help="Location of the ecommerce environment file.")
ecom_migration_args.add_argument("--ecommerce-code-dir",
help="Location of the ecommerce code.")
credentials_migration_args = parser.add_argument_group("credentials_migrations",
"Args for running credentials migration checks.")
credentials_migration_args.add_argument("--credentials-python",
help="Path to python to use for executing migration check.")
credentials_migration_args.add_argument("--credentials-env",
help="Location of the credentials environment file.")
credentials_migration_args.add_argument("--credentials-code-dir",
help="Location of the credentials code.")
discovery_migration_args = parser.add_argument_group("discovery_migrations",
"Args for running discovery migration checks.")
discovery_migration_args.add_argument("--discovery-python",
help="Path to python to use for executing migration check.")
discovery_migration_args.add_argument("--discovery-env",
help="Location of the discovery environment file.")
discovery_migration_args.add_argument("--discovery-code-dir",
help="Location of the discovery code.")
insights_migration_args = parser.add_argument_group("insights_migrations",
"Args for running insights migration checks.")
insights_migration_args.add_argument("--insights-python",
help="Path to python to use for executing migration check.")
insights_migration_args.add_argument("--insights-env",
help="Location of the insights environment file.")
insights_migration_args.add_argument("--insights-code-dir",
help="Location of the insights code.")
analyticsapi_migration_args = parser.add_argument_group("analytics_api_migrations",
"Args for running analytics_api migration checks.")
analyticsapi_migration_args.add_argument("--analytics-api-python",
help="Path to python to use for executing migration check.")
analyticsapi_migration_args.add_argument("--analytics-api-env",
help="Location of the analytics_api environment file.")
analyticsapi_migration_args.add_argument("--analytics-api-code-dir",
help="Location of the analytics_api code.")
hipchat_args = parser.add_argument_group("hipchat",
"Args for hipchat notification.")
hipchat_args.add_argument("-c","--hipchat-api-key",
help="Hipchat token if you want to receive notifications via hipchat.")
hipchat_args.add_argument("-r","--hipchat-room",
help="Room to send messages to.")
args = parser.parse_args()
report = []
prefix = None
notify = None
try:
if args.hipchat_api_key:
hc = hipchat.HipChat(token=args.hipchat_api_key)
notify = lambda message: hc.message_room(room_id=args.hipchat_room,
message_from=HIPCHAT_USER, message=message)
except Exception as e:
print("Failed to initialize hipchat, {}".format(e))
traceback.print_exc()
instance_id = get_instance_metadata()['instance-id']
prefix = instance_id
ec2 = boto.ec2.connect_to_region(REGION)
reservations = ec2.get_all_instances(instance_ids=[instance_id])
instance = reservations[0].instances[0]
if instance.instance_profile['arn'].endswith('/abbey'):
print("Running an abbey build. Not starting any services.")
# Needs to exit with 1 instead of 0 to prevent
# services from starting.
exit(1)
time_left = MAX_BACKOFF
backoff = INITIAL_BACKOFF
environment = None
deployment = None
play = None
while time_left > 0:
try:
environment, deployment, play = edp_for_instance(instance_id)
prefix = "{environment}-{deployment}-{play}-{instance_id}".format(
environment=environment,
deployment=deployment,
play=play,
instance_id=instance_id)
break
except Exception as e:
print("Failed to get EDP for {}: {}".format(instance_id, str(e)))
# With the time limit being 2 minutes we will
# try 5 times before giving up.
time.sleep(backoff)
time_left -= backoff
backoff = backoff * 2
if environment is None or deployment is None or play is None:
msg = "Unable to retrieve environment, deployment, or play tag."
print(msg)
if notify:
notify("{} : {}".format(prefix, msg))
exit(0)
#get the hostname of the sandbox
hostname = socket.gethostname()
try:
#get the list of the volumes, that are attached to the instance
volumes = ec2.get_all_volumes(filters={'attachment.instance-id': instance_id})
for volume in volumes:
volume.add_tags({"hostname": hostname,
"environment": environment,
"deployment": deployment,
"cluster": play,
"instance-id": instance_id,
"created": volume.create_time })
except Exception as e:
msg = "Failed to tag volumes associated with {}: {}".format(instance_id, str(e))
print(msg)
if notify:
notify(msg)
try:
for service in services_for_instance(instance_id):
if service in MIGRATION_COMMANDS:
services = {
"lms": {'python': args.edxapp_python, 'env_file': args.edxapp_env, 'code_dir': args.edxapp_code_dir},
"cms": {'python': args.edxapp_python, 'env_file': args.edxapp_env, 'code_dir': args.edxapp_code_dir},
"ecommerce": {'python': args.ecommerce_python, 'env_file': args.ecommerce_env, 'code_dir': args.ecommerce_code_dir},
"credentials": {'python': args.credentials_python, 'env_file': args.credentials_env, 'code_dir': args.credentials_code_dir},
"discovery": {'python': args.discovery_python, 'env_file': args.discovery_env, 'code_dir': args.discovery_code_dir},
"insights": {'python': args.insights_python, 'env_file': args.insights_env, 'code_dir': args.insights_code_dir},
"analytics_api": {'python': args.analytics_api_python, 'env_file': args.analytics_api_env, 'code_dir': args.analytics_api_code_dir},
"xqueue": {'python': args.xqueue_python, 'env_file': args.xqueue_env, 'code_dir': args.xqueue_code_dir},
}
if service in services and all(arg!=None for arg in services[service].values()) and service in MIGRATION_COMMANDS:
serv_vars = services[service]
cmd = MIGRATION_COMMANDS[service].format(**serv_vars)
if os.path.exists(serv_vars['code_dir']):
os.chdir(serv_vars['code_dir'])
# Run migration check command.
output = subprocess.check_output(cmd, shell=True, )
if '[ ]' in output:
raise Exception("Migrations have not been run for {}".format(service))
# Link to available service.
available_file = os.path.join(args.available, "{}.conf".format(service))
link_location = os.path.join(args.enabled, "{}.conf".format(service))
if os.path.exists(available_file):
subprocess.call("sudo -u supervisor ln -sf {} {}".format(available_file, link_location), shell=True)
report.append("Enabling service: {}".format(service))
else:
raise Exception("No conf available for service: {}".format(link_location))
except AWSConnectionError as ae:
msg = "{}: ERROR : {}".format(prefix, ae)
if notify:
notify(msg)
notify(traceback.format_exc())
raise ae
except Exception as e:
msg = "{}: ERROR : {}".format(prefix, e)
print(msg)
if notify:
notify(msg)
traceback.print_exc()
raise e
else:
msg = "{}: {}".format(prefix, " | ".join(report))
print(msg)
if notify:
notify(msg)
| agpl-3.0 |
photoninger/ansible | test/runner/lib/manage_ci.py | 18 | 7268 | """Access Ansible Core CI remote services."""
from __future__ import absolute_import, print_function
import os
import pipes
import tempfile
from time import sleep
import lib.pytar
from lib.util import (
SubprocessError,
ApplicationError,
run_command,
intercept_command,
)
from lib.core_ci import (
AnsibleCoreCI,
)
from lib.ansible_util import (
ansible_environment,
)
class ManageWindowsCI(object):
"""Manage access to a Windows instance provided by Ansible Core CI."""
def __init__(self, core_ci):
"""
:type core_ci: AnsibleCoreCI
"""
self.core_ci = core_ci
def wait(self):
"""Wait for instance to respond to ansible ping."""
extra_vars = [
'ansible_connection=winrm',
'ansible_host=%s' % self.core_ci.connection.hostname,
'ansible_user=%s' % self.core_ci.connection.username,
'ansible_password=%s' % self.core_ci.connection.password,
'ansible_port=%s' % self.core_ci.connection.port,
'ansible_winrm_server_cert_validation=ignore',
]
name = 'windows_%s' % self.core_ci.version
env = ansible_environment(self.core_ci.args)
cmd = ['ansible', '-m', 'win_ping', '-i', '%s,' % name, name, '-e', ' '.join(extra_vars)]
for _ in range(1, 120):
try:
intercept_command(self.core_ci.args, cmd, 'ping', env=env)
return
except SubprocessError:
sleep(10)
continue
raise ApplicationError('Timeout waiting for %s/%s instance %s.' %
(self.core_ci.platform, self.core_ci.version, self.core_ci.instance_id))
class ManageNetworkCI(object):
"""Manage access to a network instance provided by Ansible Core CI."""
def __init__(self, core_ci):
"""
:type core_ci: AnsibleCoreCI
"""
self.core_ci = core_ci
def wait(self):
"""Wait for instance to respond to ansible ping."""
extra_vars = [
'ansible_host=%s' % self.core_ci.connection.hostname,
'ansible_port=%s' % self.core_ci.connection.port,
'ansible_connection=local',
'ansible_ssh_private_key_file=%s' % self.core_ci.ssh_key.key,
]
name = '%s-%s' % (self.core_ci.platform, self.core_ci.version.replace('.', '-'))
env = ansible_environment(self.core_ci.args)
cmd = [
'ansible',
'-m', '%s_command' % self.core_ci.platform,
'-a', 'commands=?',
'-u', self.core_ci.connection.username,
'-i', '%s,' % name,
'-e', ' '.join(extra_vars),
name,
]
for _ in range(1, 90):
try:
intercept_command(self.core_ci.args, cmd, 'ping', env=env)
return
except SubprocessError:
sleep(10)
continue
raise ApplicationError('Timeout waiting for %s/%s instance %s.' %
(self.core_ci.platform, self.core_ci.version, self.core_ci.instance_id))
class ManagePosixCI(object):
"""Manage access to a POSIX instance provided by Ansible Core CI."""
def __init__(self, core_ci):
"""
:type core_ci: AnsibleCoreCI
"""
self.core_ci = core_ci
self.ssh_args = ['-i', self.core_ci.ssh_key.key]
ssh_options = dict(
BatchMode='yes',
StrictHostKeyChecking='no',
UserKnownHostsFile='/dev/null',
ServerAliveInterval=15,
ServerAliveCountMax=4,
)
for ssh_option in sorted(ssh_options):
self.ssh_args += ['-o', '%s=%s' % (ssh_option, ssh_options[ssh_option])]
if self.core_ci.platform == 'freebsd':
if self.core_ci.provider == 'aws':
self.become = ['su', '-l', 'root', '-c']
elif self.core_ci.provider == 'azure':
self.become = ['sudo', '-in', 'sh', '-c']
else:
raise NotImplementedError('provider %s has not been implemented' % self.core_ci.provider)
elif self.core_ci.platform == 'osx':
self.become = ['sudo', '-in', 'PATH=/usr/local/bin:$PATH']
elif self.core_ci.platform == 'rhel':
self.become = ['sudo', '-in', 'bash', '-c']
def setup(self):
"""Start instance and wait for it to become ready and respond to an ansible ping."""
self.wait()
self.configure()
self.upload_source()
def wait(self):
"""Wait for instance to respond to SSH."""
for _ in range(1, 90):
try:
self.ssh('id')
return
except SubprocessError:
sleep(10)
continue
raise ApplicationError('Timeout waiting for %s/%s instance %s.' %
(self.core_ci.platform, self.core_ci.version, self.core_ci.instance_id))
def configure(self):
"""Configure remote host for testing."""
self.upload('test/runner/setup/remote.sh', '/tmp')
self.ssh('chmod +x /tmp/remote.sh && /tmp/remote.sh %s' % self.core_ci.platform)
def upload_source(self):
"""Upload and extract source."""
with tempfile.NamedTemporaryFile(prefix='ansible-source-', suffix='.tgz') as local_source_fd:
remote_source_dir = '/tmp'
remote_source_path = os.path.join(remote_source_dir, os.path.basename(local_source_fd.name))
if not self.core_ci.args.explain:
lib.pytar.create_tarfile(local_source_fd.name, '.', lib.pytar.DefaultTarFilter())
self.upload(local_source_fd.name, remote_source_dir)
self.ssh('rm -rf ~/ansible && mkdir ~/ansible && cd ~/ansible && tar oxzf %s' % remote_source_path)
def download(self, remote, local):
"""
:type remote: str
:type local: str
"""
self.scp('%s@%s:%s' % (self.core_ci.connection.username, self.core_ci.connection.hostname, remote), local)
def upload(self, local, remote):
"""
:type local: str
:type remote: str
"""
self.scp(local, '%s@%s:%s' % (self.core_ci.connection.username, self.core_ci.connection.hostname, remote))
def ssh(self, command, options=None):
"""
:type command: str | list[str]
:type options: list[str] | None
"""
if not options:
options = []
if isinstance(command, list):
command = ' '.join(pipes.quote(c) for c in command)
run_command(self.core_ci.args,
['ssh', '-tt', '-q'] + self.ssh_args +
options +
['-p', str(self.core_ci.connection.port),
'%s@%s' % (self.core_ci.connection.username, self.core_ci.connection.hostname)] +
self.become + [pipes.quote(command)])
def scp(self, src, dst):
"""
:type src: str
:type dst: str
"""
run_command(self.core_ci.args,
['scp'] + self.ssh_args +
['-P', str(self.core_ci.connection.port), '-q', '-r', src, dst])
| gpl-3.0 |
chainer/chainer | tests/chainerx_tests/dtype_utils.py | 5 | 5772 | import itertools
import numpy
import chainerx
def _permutate_dtype_mapping(dtype_mapping_list):
# Permutates in dtypes of dtype mapping.
d = {}
for in_dtypes, out_dtype in dtype_mapping_list:
for in_dtypes_ in itertools.permutations(in_dtypes):
d[in_dtypes_] = out_dtype
return sorted(d.items())
# Used for e.g. testing power.
result_numeric_dtypes_two_arrays = [
# Floats.
(('float16', 'float16'), 'float16'),
(('float32', 'float32'), 'float32'),
(('float64', 'float64'), 'float64'),
(('float16', 'float32'), 'float32'),
(('float32', 'float64'), 'float64'),
(('float64', 'float16'), 'float64'),
# Signed ints.
(('int8', 'int8'), 'int8'),
(('int16', 'int16'), 'int16'),
(('int32', 'int32'), 'int32'),
(('int64', 'int64'), 'int64'),
(('int8', 'int16'), 'int16'),
(('int8', 'int64'), 'int64'),
(('int16', 'int32'), 'int32'),
(('int32', 'int8'), 'int32'),
(('int32', 'int64'), 'int64'),
(('int64', 'int16'), 'int64'),
# Unsigned ints.
(('uint8', 'uint8'), 'uint8'),
# Signed int and unsigned int.
(('uint8', 'int8'), 'int16'),
(('uint8', 'int16'), 'int16'),
(('int32', 'uint8'), 'int32'),
# Signed int and float.
(('int8', 'float16'), 'float16'),
(('int16', 'float64'), 'float64'),
(('int64', 'float32'), 'float32'),
(('float16', 'int64'), 'float16'),
(('float32', 'int32'), 'float32'),
# Unsigned int and float.
(('uint8', 'float16'), 'float16'),
(('float16', 'uint8'), 'float16'),
]
result_comparable_dtypes_two_arrays = [
# Bools.
(('bool_', 'bool_'), 'bool_'),
] + result_numeric_dtypes_two_arrays
result_dtypes_two_arrays = _permutate_dtype_mapping([
# Bools.
(('bool_', 'bool_'), 'bool_'),
# Bool and other.
(('bool_', 'uint8'), 'uint8'),
(('bool_', 'int8'), 'int8'),
(('bool_', 'int16'), 'int16'),
(('bool_', 'float16'), 'float16'),
(('bool_', 'float64'), 'float64'),
]) + result_numeric_dtypes_two_arrays
result_dtypes_three_arrays = _permutate_dtype_mapping([
# Signed ints.
(('int32', 'int32', 'int32'), 'int32'),
(('int8', 'int8', 'int32'), 'int32'),
(('int8', 'int16', 'int32'), 'int32'),
(('int8', 'int32', 'int32'), 'int32'),
(('int8', 'int64', 'int32'), 'int64'),
# Unsigned ints.
(('uint8', 'uint8', 'uint8'), 'uint8'),
(('uint8', 'uint8', 'int8'), 'int16'),
(('uint8', 'int8', 'int8'), 'int16'),
(('uint8', 'int8', 'int16'), 'int16'),
(('uint8', 'uint8', 'int16'), 'int16'),
# Float and signed int.
(('float16', 'int8', 'int8'), 'float16'),
(('float16', 'int32', 'int64'), 'float16'),
(('float16', 'float32', 'int64'), 'float32'),
# Float and unsigned int.
(('float16', 'int8', 'uint8'), 'float16'),
(('float16', 'int32', 'uint8'), 'float16'),
(('float16', 'float32', 'uint8'), 'float32'),
# Bool and other.
(('bool_', 'uint8', 'uint8'), 'uint8'),
(('bool_', 'bool_', 'uint8'), 'uint8'),
(('bool_', 'int8', 'uint8'), 'int16'),
(('bool_', 'bool_', 'int32'), 'int32'),
(('bool_', 'float16', 'float32'), 'float32'),
(('bool_', 'bool_', 'float64'), 'float64'),
])
result_float_dtypes_array_scalar = [
(('float16',), float, 'float16'),
(('float32',), float, 'float32'),
(('float64',), float, 'float64'),
(('float16',), numpy.float64, 'float16'),
(('float64',), numpy.float16, 'float64'),
]
result_numeric_dtypes_array_scalar = [
# Float scalar.
(('int8',), float, 'float32'),
(('int16',), float, 'float32'),
(('int32',), float, 'float32'),
(('int64',), float, 'float32'),
(('uint8',), float, 'float32'),
(('int8',), numpy.float32, 'float32'),
(('int64',), numpy.float16, 'float32'),
(('uint8',), numpy.float64, 'float32'),
# Int scalar.
(('int8',), int, 'int8'),
(('int16',), int, 'int16'),
(('int32',), int, 'int32'),
(('int64',), int, 'int64'),
(('uint8',), int, 'uint8'),
(('float16',), int, 'float16'),
(('float32',), int, 'float32'),
(('float64',), int, 'float64'),
(('int16',), numpy.int16, 'int16'),
(('uint8',), numpy.int8, 'uint8'),
(('float64',), numpy.int8, 'float64'),
(('float16',), numpy.int64, 'float16'),
] + result_float_dtypes_array_scalar
result_comparable_dtypes_array_scalar = [
(('bool_',), bool, 'bool_'),
(('bool_',), numpy.bool_, 'bool_'),
] + result_numeric_dtypes_array_scalar
result_dtypes_array_scalar = [
# Bool scalar.
(('bool_',), bool, 'bool_'),
(('int8',), bool, 'int8'),
(('int16',), bool, 'int16'),
(('int32',), bool, 'int32'),
(('int64',), bool, 'int64'),
(('uint8',), bool, 'uint8'),
(('float16',), bool, 'float16'),
(('float32',), bool, 'float32'),
(('float64',), bool, 'float64'),
(('bool_',), numpy.bool_, 'bool_'),
(('int16',), numpy.bool_, 'int16'),
(('uint8',), numpy.bool_, 'uint8'),
(('float32',), numpy.bool_, 'float32'),
] + result_numeric_dtypes_array_scalar
def cast_if_numpy_array(xp, array, chx_expected_dtype):
"""Casts NumPy result array to match the dtype of ChainerX's corresponding
result.
This function receives result arrays for both NumPy and ChainerX and only
converts dtype of the NumPy array.
"""
if xp is chainerx:
assert isinstance(array, chainerx.ndarray)
return array
if xp is numpy:
assert isinstance(array, (numpy.ndarray, numpy.generic))
# Dtype conversion to allow comparing the correctnesses of the values.
return array.astype(chx_expected_dtype, copy=False)
assert False
def make_same_in_out_dtypes(number_of_in_params, dtypes):
return [((dtype,) * number_of_in_params, dtype) for dtype in dtypes]
| mit |
kapouer/node-gyp | gyp/pylib/gyp/generator/xcode.py | 110 | 54811 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import filecmp
import gyp.common
import gyp.xcodeproj_file
import errno
import os
import sys
import posixpath
import re
import shutil
import subprocess
import tempfile
# Project files generated by this module will use _intermediate_var as a
# custom Xcode setting whose value is a DerivedSources-like directory that's
# project-specific and configuration-specific. The normal choice,
# DERIVED_FILE_DIR, is target-specific, which is thought to be too restrictive
# as it is likely that multiple targets within a single project file will want
# to access the same set of generated files. The other option,
# PROJECT_DERIVED_FILE_DIR, is unsuitable because while it is project-specific,
# it is not configuration-specific. INTERMEDIATE_DIR is defined as
# $(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION).
_intermediate_var = 'INTERMEDIATE_DIR'
# SHARED_INTERMEDIATE_DIR is the same, except that it is shared among all
# targets that share the same BUILT_PRODUCTS_DIR.
_shared_intermediate_var = 'SHARED_INTERMEDIATE_DIR'
_library_search_paths_var = 'LIBRARY_SEARCH_PATHS'
generator_default_variables = {
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '',
'STATIC_LIB_PREFIX': 'lib',
'SHARED_LIB_PREFIX': 'lib',
'STATIC_LIB_SUFFIX': '.a',
'SHARED_LIB_SUFFIX': '.dylib',
# INTERMEDIATE_DIR is a place for targets to build up intermediate products.
# It is specific to each build environment. It is only guaranteed to exist
# and be constant within the context of a project, corresponding to a single
# input file. Some build environments may allow their intermediate directory
# to be shared on a wider scale, but this is not guaranteed.
'INTERMEDIATE_DIR': '$(%s)' % _intermediate_var,
'OS': 'mac',
'PRODUCT_DIR': '$(BUILT_PRODUCTS_DIR)',
'LIB_DIR': '$(BUILT_PRODUCTS_DIR)',
'RULE_INPUT_ROOT': '$(INPUT_FILE_BASE)',
'RULE_INPUT_EXT': '$(INPUT_FILE_SUFFIX)',
'RULE_INPUT_NAME': '$(INPUT_FILE_NAME)',
'RULE_INPUT_PATH': '$(INPUT_FILE_PATH)',
'RULE_INPUT_DIRNAME': '$(INPUT_FILE_DIRNAME)',
'SHARED_INTERMEDIATE_DIR': '$(%s)' % _shared_intermediate_var,
'CONFIGURATION_NAME': '$(CONFIGURATION)',
}
# The Xcode-specific sections that hold paths.
generator_additional_path_sections = [
'mac_bundle_resources',
'mac_framework_headers',
'mac_framework_private_headers',
# 'mac_framework_dirs', input already handles _dirs endings.
]
# The Xcode-specific keys that exist on targets and aren't moved down to
# configurations.
generator_additional_non_configuration_keys = [
'mac_bundle',
'mac_bundle_resources',
'mac_framework_headers',
'mac_framework_private_headers',
'xcode_create_dependents_test_runner',
]
# We want to let any rules apply to files that are resources also.
generator_extra_sources_for_rules = [
'mac_bundle_resources',
'mac_framework_headers',
'mac_framework_private_headers',
]
# Xcode's standard set of library directories, which don't need to be duplicated
# in LIBRARY_SEARCH_PATHS. This list is not exhaustive, but that's okay.
xcode_standard_library_dirs = frozenset([
'$(SDKROOT)/usr/lib',
'$(SDKROOT)/usr/local/lib',
])
def CreateXCConfigurationList(configuration_names):
xccl = gyp.xcodeproj_file.XCConfigurationList({'buildConfigurations': []})
if len(configuration_names) == 0:
configuration_names = ['Default']
for configuration_name in configuration_names:
xcbc = gyp.xcodeproj_file.XCBuildConfiguration({
'name': configuration_name})
xccl.AppendProperty('buildConfigurations', xcbc)
xccl.SetProperty('defaultConfigurationName', configuration_names[0])
return xccl
class XcodeProject(object):
def __init__(self, gyp_path, path, build_file_dict):
self.gyp_path = gyp_path
self.path = path
self.project = gyp.xcodeproj_file.PBXProject(path=path)
projectDirPath = gyp.common.RelativePath(
os.path.dirname(os.path.abspath(self.gyp_path)),
os.path.dirname(path) or '.')
self.project.SetProperty('projectDirPath', projectDirPath)
self.project_file = \
gyp.xcodeproj_file.XCProjectFile({'rootObject': self.project})
self.build_file_dict = build_file_dict
# TODO(mark): add destructor that cleans up self.path if created_dir is
# True and things didn't complete successfully. Or do something even
# better with "try"?
self.created_dir = False
try:
os.makedirs(self.path)
self.created_dir = True
except OSError, e:
if e.errno != errno.EEXIST:
raise
def Finalize1(self, xcode_targets, serialize_all_tests):
# Collect a list of all of the build configuration names used by the
# various targets in the file. It is very heavily advised to keep each
# target in an entire project (even across multiple project files) using
# the same set of configuration names.
configurations = []
for xct in self.project.GetProperty('targets'):
xccl = xct.GetProperty('buildConfigurationList')
xcbcs = xccl.GetProperty('buildConfigurations')
for xcbc in xcbcs:
name = xcbc.GetProperty('name')
if name not in configurations:
configurations.append(name)
# Replace the XCConfigurationList attached to the PBXProject object with
# a new one specifying all of the configuration names used by the various
# targets.
try:
xccl = CreateXCConfigurationList(configurations)
self.project.SetProperty('buildConfigurationList', xccl)
except:
sys.stderr.write("Problem with gyp file %s\n" % self.gyp_path)
raise
# The need for this setting is explained above where _intermediate_var is
# defined. The comments below about wanting to avoid project-wide build
# settings apply here too, but this needs to be set on a project-wide basis
# so that files relative to the _intermediate_var setting can be displayed
# properly in the Xcode UI.
#
# Note that for configuration-relative files such as anything relative to
# _intermediate_var, for the purposes of UI tree view display, Xcode will
# only resolve the configuration name once, when the project file is
# opened. If the active build configuration is changed, the project file
# must be closed and reopened if it is desired for the tree view to update.
# This is filed as Apple radar 6588391.
xccl.SetBuildSetting(_intermediate_var,
'$(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION)')
xccl.SetBuildSetting(_shared_intermediate_var,
'$(SYMROOT)/DerivedSources/$(CONFIGURATION)')
# Set user-specified project-wide build settings and config files. This
# is intended to be used very sparingly. Really, almost everything should
# go into target-specific build settings sections. The project-wide
# settings are only intended to be used in cases where Xcode attempts to
# resolve variable references in a project context as opposed to a target
# context, such as when resolving sourceTree references while building up
# the tree tree view for UI display.
# Any values set globally are applied to all configurations, then any
# per-configuration values are applied.
for xck, xcv in self.build_file_dict.get('xcode_settings', {}).iteritems():
xccl.SetBuildSetting(xck, xcv)
if 'xcode_config_file' in self.build_file_dict:
config_ref = self.project.AddOrGetFileInRootGroup(
self.build_file_dict['xcode_config_file'])
xccl.SetBaseConfiguration(config_ref)
build_file_configurations = self.build_file_dict.get('configurations', {})
if build_file_configurations:
for config_name in configurations:
build_file_configuration_named = \
build_file_configurations.get(config_name, {})
if build_file_configuration_named:
xcc = xccl.ConfigurationNamed(config_name)
for xck, xcv in build_file_configuration_named.get('xcode_settings',
{}).iteritems():
xcc.SetBuildSetting(xck, xcv)
if 'xcode_config_file' in build_file_configuration_named:
config_ref = self.project.AddOrGetFileInRootGroup(
build_file_configurations[config_name]['xcode_config_file'])
xcc.SetBaseConfiguration(config_ref)
# Sort the targets based on how they appeared in the input.
# TODO(mark): Like a lot of other things here, this assumes internal
# knowledge of PBXProject - in this case, of its "targets" property.
# ordinary_targets are ordinary targets that are already in the project
# file. run_test_targets are the targets that run unittests and should be
# used for the Run All Tests target. support_targets are the action/rule
# targets used by GYP file targets, just kept for the assert check.
ordinary_targets = []
run_test_targets = []
support_targets = []
# targets is full list of targets in the project.
targets = []
# does the it define it's own "all"?
has_custom_all = False
# targets_for_all is the list of ordinary_targets that should be listed
# in this project's "All" target. It includes each non_runtest_target
# that does not have suppress_wildcard set.
targets_for_all = []
for target in self.build_file_dict['targets']:
target_name = target['target_name']
toolset = target['toolset']
qualified_target = gyp.common.QualifiedTarget(self.gyp_path, target_name,
toolset)
xcode_target = xcode_targets[qualified_target]
# Make sure that the target being added to the sorted list is already in
# the unsorted list.
assert xcode_target in self.project._properties['targets']
targets.append(xcode_target)
ordinary_targets.append(xcode_target)
if xcode_target.support_target:
support_targets.append(xcode_target.support_target)
targets.append(xcode_target.support_target)
if not int(target.get('suppress_wildcard', False)):
targets_for_all.append(xcode_target)
if target_name.lower() == 'all':
has_custom_all = True;
# If this target has a 'run_as' attribute, add its target to the
# targets, and add it to the test targets.
if target.get('run_as'):
# Make a target to run something. It should have one
# dependency, the parent xcode target.
xccl = CreateXCConfigurationList(configurations)
run_target = gyp.xcodeproj_file.PBXAggregateTarget({
'name': 'Run ' + target_name,
'productName': xcode_target.GetProperty('productName'),
'buildConfigurationList': xccl,
},
parent=self.project)
run_target.AddDependency(xcode_target)
command = target['run_as']
script = ''
if command.get('working_directory'):
script = script + 'cd "%s"\n' % \
gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
command.get('working_directory'))
if command.get('environment'):
script = script + "\n".join(
['export %s="%s"' %
(key, gyp.xcodeproj_file.ConvertVariablesToShellSyntax(val))
for (key, val) in command.get('environment').iteritems()]) + "\n"
# Some test end up using sockets, files on disk, etc. and can get
# confused if more then one test runs at a time. The generator
# flag 'xcode_serialize_all_test_runs' controls the forcing of all
# tests serially. It defaults to True. To get serial runs this
# little bit of python does the same as the linux flock utility to
# make sure only one runs at a time.
command_prefix = ''
if serialize_all_tests:
command_prefix = \
"""python -c "import fcntl, subprocess, sys
file = open('$TMPDIR/GYP_serialize_test_runs', 'a')
fcntl.flock(file.fileno(), fcntl.LOCK_EX)
sys.exit(subprocess.call(sys.argv[1:]))" """
# If we were unable to exec for some reason, we want to exit
# with an error, and fixup variable references to be shell
# syntax instead of xcode syntax.
script = script + 'exec ' + command_prefix + '%s\nexit 1\n' % \
gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
gyp.common.EncodePOSIXShellList(command.get('action')))
ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
'shellScript': script,
'showEnvVarsInLog': 0,
})
run_target.AppendProperty('buildPhases', ssbp)
# Add the run target to the project file.
targets.append(run_target)
run_test_targets.append(run_target)
xcode_target.test_runner = run_target
# Make sure that the list of targets being replaced is the same length as
# the one replacing it, but allow for the added test runner targets.
assert len(self.project._properties['targets']) == \
len(ordinary_targets) + len(support_targets)
self.project._properties['targets'] = targets
# Get rid of unnecessary levels of depth in groups like the Source group.
self.project.RootGroupsTakeOverOnlyChildren(True)
# Sort the groups nicely. Do this after sorting the targets, because the
# Products group is sorted based on the order of the targets.
self.project.SortGroups()
# Create an "All" target if there's more than one target in this project
# file and the project didn't define its own "All" target. Put a generated
# "All" target first so that people opening up the project for the first
# time will build everything by default.
if len(targets_for_all) > 1 and not has_custom_all:
xccl = CreateXCConfigurationList(configurations)
all_target = gyp.xcodeproj_file.PBXAggregateTarget(
{
'buildConfigurationList': xccl,
'name': 'All',
},
parent=self.project)
for target in targets_for_all:
all_target.AddDependency(target)
# TODO(mark): This is evil because it relies on internal knowledge of
# PBXProject._properties. It's important to get the "All" target first,
# though.
self.project._properties['targets'].insert(0, all_target)
# The same, but for run_test_targets.
if len(run_test_targets) > 1:
xccl = CreateXCConfigurationList(configurations)
run_all_tests_target = gyp.xcodeproj_file.PBXAggregateTarget(
{
'buildConfigurationList': xccl,
'name': 'Run All Tests',
},
parent=self.project)
for run_test_target in run_test_targets:
run_all_tests_target.AddDependency(run_test_target)
# Insert after the "All" target, which must exist if there is more than
# one run_test_target.
self.project._properties['targets'].insert(1, run_all_tests_target)
def Finalize2(self, xcode_targets, xcode_target_to_target_dict):
# Finalize2 needs to happen in a separate step because the process of
# updating references to other projects depends on the ordering of targets
# within remote project files. Finalize1 is responsible for sorting duty,
# and once all project files are sorted, Finalize2 can come in and update
# these references.
# To support making a "test runner" target that will run all the tests
# that are direct dependents of any given target, we look for
# xcode_create_dependents_test_runner being set on an Aggregate target,
# and generate a second target that will run the tests runners found under
# the marked target.
for bf_tgt in self.build_file_dict['targets']:
if int(bf_tgt.get('xcode_create_dependents_test_runner', 0)):
tgt_name = bf_tgt['target_name']
toolset = bf_tgt['toolset']
qualified_target = gyp.common.QualifiedTarget(self.gyp_path,
tgt_name, toolset)
xcode_target = xcode_targets[qualified_target]
if isinstance(xcode_target, gyp.xcodeproj_file.PBXAggregateTarget):
# Collect all the run test targets.
all_run_tests = []
pbxtds = xcode_target.GetProperty('dependencies')
for pbxtd in pbxtds:
pbxcip = pbxtd.GetProperty('targetProxy')
dependency_xct = pbxcip.GetProperty('remoteGlobalIDString')
if hasattr(dependency_xct, 'test_runner'):
all_run_tests.append(dependency_xct.test_runner)
# Directly depend on all the runners as they depend on the target
# that builds them.
if len(all_run_tests) > 0:
run_all_target = gyp.xcodeproj_file.PBXAggregateTarget({
'name': 'Run %s Tests' % tgt_name,
'productName': tgt_name,
},
parent=self.project)
for run_test_target in all_run_tests:
run_all_target.AddDependency(run_test_target)
# Insert the test runner after the related target.
idx = self.project._properties['targets'].index(xcode_target)
self.project._properties['targets'].insert(idx + 1, run_all_target)
# Update all references to other projects, to make sure that the lists of
# remote products are complete. Otherwise, Xcode will fill them in when
# it opens the project file, which will result in unnecessary diffs.
# TODO(mark): This is evil because it relies on internal knowledge of
# PBXProject._other_pbxprojects.
for other_pbxproject in self.project._other_pbxprojects.keys():
self.project.AddOrGetProjectReference(other_pbxproject)
self.project.SortRemoteProductReferences()
# Give everything an ID.
self.project_file.ComputeIDs()
# Make sure that no two objects in the project file have the same ID. If
# multiple objects wind up with the same ID, upon loading the file, Xcode
# will only recognize one object (the last one in the file?) and the
# results are unpredictable.
self.project_file.EnsureNoIDCollisions()
def Write(self):
# Write the project file to a temporary location first. Xcode watches for
# changes to the project file and presents a UI sheet offering to reload
# the project when it does change. However, in some cases, especially when
# multiple projects are open or when Xcode is busy, things don't work so
# seamlessly. Sometimes, Xcode is able to detect that a project file has
# changed but can't unload it because something else is referencing it.
# To mitigate this problem, and to avoid even having Xcode present the UI
# sheet when an open project is rewritten for inconsequential changes, the
# project file is written to a temporary file in the xcodeproj directory
# first. The new temporary file is then compared to the existing project
# file, if any. If they differ, the new file replaces the old; otherwise,
# the new project file is simply deleted. Xcode properly detects a file
# being renamed over an open project file as a change and so it remains
# able to present the "project file changed" sheet under this system.
# Writing to a temporary file first also avoids the possible problem of
# Xcode rereading an incomplete project file.
(output_fd, new_pbxproj_path) = \
tempfile.mkstemp(suffix='.tmp', prefix='project.pbxproj.gyp.',
dir=self.path)
try:
output_file = os.fdopen(output_fd, 'wb')
self.project_file.Print(output_file)
output_file.close()
pbxproj_path = os.path.join(self.path, 'project.pbxproj')
same = False
try:
same = filecmp.cmp(pbxproj_path, new_pbxproj_path, False)
except OSError, e:
if e.errno != errno.ENOENT:
raise
if same:
# The new file is identical to the old one, just get rid of the new
# one.
os.unlink(new_pbxproj_path)
else:
# The new file is different from the old one, or there is no old one.
# Rename the new file to the permanent name.
#
# tempfile.mkstemp uses an overly restrictive mode, resulting in a
# file that can only be read by the owner, regardless of the umask.
# There's no reason to not respect the umask here, which means that
# an extra hoop is required to fetch it and reset the new file's mode.
#
# No way to get the umask without setting a new one? Set a safe one
# and then set it back to the old value.
umask = os.umask(077)
os.umask(umask)
os.chmod(new_pbxproj_path, 0666 & ~umask)
os.rename(new_pbxproj_path, pbxproj_path)
except Exception:
# Don't leave turds behind. In fact, if this code was responsible for
# creating the xcodeproj directory, get rid of that too.
os.unlink(new_pbxproj_path)
if self.created_dir:
shutil.rmtree(self.path, True)
raise
cached_xcode_version = None
def InstalledXcodeVersion():
"""Fetches the installed version of Xcode, returns empty string if it is
unable to figure it out."""
global cached_xcode_version
if not cached_xcode_version is None:
return cached_xcode_version
# Default to an empty string
cached_xcode_version = ''
# Collect the xcodebuild's version information.
try:
import subprocess
cmd = ['/usr/bin/xcodebuild', '-version']
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
xcodebuild_version_info = proc.communicate()[0]
# Any error, return empty string
if proc.returncode:
xcodebuild_version_info = ''
except OSError:
# We failed to launch the tool
xcodebuild_version_info = ''
# Pull out the Xcode version itself.
match_line = re.search('^Xcode (.*)$', xcodebuild_version_info, re.MULTILINE)
if match_line:
cached_xcode_version = match_line.group(1)
# Done!
return cached_xcode_version
def AddSourceToTarget(source, type, pbxp, xct):
# TODO(mark): Perhaps source_extensions and library_extensions can be made a
# little bit fancier.
source_extensions = ['c', 'cc', 'cpp', 'cxx', 'm', 'mm', 's']
# .o is conceptually more of a "source" than a "library," but Xcode thinks
# of "sources" as things to compile and "libraries" (or "frameworks") as
# things to link with. Adding an object file to an Xcode target's frameworks
# phase works properly.
library_extensions = ['a', 'dylib', 'framework', 'o']
basename = posixpath.basename(source)
(root, ext) = posixpath.splitext(basename)
if ext:
ext = ext[1:].lower()
if ext in source_extensions and type != 'none':
xct.SourcesPhase().AddFile(source)
elif ext in library_extensions and type != 'none':
xct.FrameworksPhase().AddFile(source)
else:
# Files that aren't added to a sources or frameworks build phase can still
# go into the project file, just not as part of a build phase.
pbxp.AddOrGetFileInRootGroup(source)
def AddResourceToTarget(resource, pbxp, xct):
# TODO(mark): Combine with AddSourceToTarget above? Or just inline this call
# where it's used.
xct.ResourcesPhase().AddFile(resource)
def AddHeaderToTarget(header, pbxp, xct, is_public):
# TODO(mark): Combine with AddSourceToTarget above? Or just inline this call
# where it's used.
settings = '{ATTRIBUTES = (%s, ); }' % ('Private', 'Public')[is_public]
xct.HeadersPhase().AddFile(header, settings)
_xcode_variable_re = re.compile('(\$\((.*?)\))')
def ExpandXcodeVariables(string, expansions):
"""Expands Xcode-style $(VARIABLES) in string per the expansions dict.
In some rare cases, it is appropriate to expand Xcode variables when a
project file is generated. For any substring $(VAR) in string, if VAR is a
key in the expansions dict, $(VAR) will be replaced with expansions[VAR].
Any $(VAR) substring in string for which VAR is not a key in the expansions
dict will remain in the returned string.
"""
matches = _xcode_variable_re.findall(string)
if matches == None:
return string
matches.reverse()
for match in matches:
(to_replace, variable) = match
if not variable in expansions:
continue
replacement = expansions[variable]
string = re.sub(re.escape(to_replace), replacement, string)
return string
def EscapeXCodeArgument(s):
"""We must escape the arguments that we give to XCode so that it knows not to
split on spaces and to respect backslash and quote literals."""
s = s.replace('\\', '\\\\')
s = s.replace('"', '\\"')
return '"' + s + '"'
def PerformBuild(data, configurations, params):
options = params['options']
for build_file, build_file_dict in data.iteritems():
(build_file_root, build_file_ext) = os.path.splitext(build_file)
if build_file_ext != '.gyp':
continue
xcodeproj_path = build_file_root + options.suffix + '.xcodeproj'
if options.generator_output:
xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path)
for config in configurations:
arguments = ['xcodebuild', '-project', xcodeproj_path]
arguments += ['-configuration', config]
print "Building [%s]: %s" % (config, arguments)
subprocess.check_call(arguments)
def GenerateOutput(target_list, target_dicts, data, params):
options = params['options']
generator_flags = params.get('generator_flags', {})
parallel_builds = generator_flags.get('xcode_parallel_builds', True)
serialize_all_tests = \
generator_flags.get('xcode_serialize_all_test_runs', True)
project_version = generator_flags.get('xcode_project_version', None)
skip_excluded_files = \
not generator_flags.get('xcode_list_excluded_files', True)
xcode_projects = {}
for build_file, build_file_dict in data.iteritems():
(build_file_root, build_file_ext) = os.path.splitext(build_file)
if build_file_ext != '.gyp':
continue
xcodeproj_path = build_file_root + options.suffix + '.xcodeproj'
if options.generator_output:
xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path)
xcp = XcodeProject(build_file, xcodeproj_path, build_file_dict)
xcode_projects[build_file] = xcp
pbxp = xcp.project
if parallel_builds:
pbxp.SetProperty('attributes',
{'BuildIndependentTargetsInParallel': 'YES'})
if project_version:
xcp.project_file.SetXcodeVersion(project_version)
# Add gyp/gypi files to project
if not generator_flags.get('standalone'):
main_group = pbxp.GetProperty('mainGroup')
build_group = gyp.xcodeproj_file.PBXGroup({'name': 'Build'})
main_group.AppendChild(build_group)
for included_file in build_file_dict['included_files']:
build_group.AddOrGetFileByPath(included_file, False)
xcode_targets = {}
xcode_target_to_target_dict = {}
for qualified_target in target_list:
[build_file, target_name, toolset] = \
gyp.common.ParseQualifiedTarget(qualified_target)
spec = target_dicts[qualified_target]
if spec['toolset'] != 'target':
raise Exception(
'Multiple toolsets not supported in xcode build (target %s)' %
qualified_target)
configuration_names = [spec['default_configuration']]
for configuration_name in sorted(spec['configurations'].keys()):
if configuration_name not in configuration_names:
configuration_names.append(configuration_name)
xcp = xcode_projects[build_file]
pbxp = xcp.project
# Set up the configurations for the target according to the list of names
# supplied.
xccl = CreateXCConfigurationList(configuration_names)
# Create an XCTarget subclass object for the target. The type with
# "+bundle" appended will be used if the target has "mac_bundle" set.
# loadable_modules not in a mac_bundle are mapped to
# com.googlecode.gyp.xcode.bundle, a pseudo-type that xcode.py interprets
# to create a single-file mh_bundle.
_types = {
'executable': 'com.apple.product-type.tool',
'loadable_module': 'com.googlecode.gyp.xcode.bundle',
'shared_library': 'com.apple.product-type.library.dynamic',
'static_library': 'com.apple.product-type.library.static',
'executable+bundle': 'com.apple.product-type.application',
'loadable_module+bundle': 'com.apple.product-type.bundle',
'shared_library+bundle': 'com.apple.product-type.framework',
}
target_properties = {
'buildConfigurationList': xccl,
'name': target_name,
}
type = spec['type']
is_bundle = int(spec.get('mac_bundle', 0))
if type != 'none':
type_bundle_key = type
if is_bundle:
type_bundle_key += '+bundle'
xctarget_type = gyp.xcodeproj_file.PBXNativeTarget
try:
target_properties['productType'] = _types[type_bundle_key]
except KeyError, e:
gyp.common.ExceptionAppend(e, "-- unknown product type while "
"writing target %s" % target_name)
raise
else:
xctarget_type = gyp.xcodeproj_file.PBXAggregateTarget
assert not is_bundle, (
'mac_bundle targets cannot have type none (target "%s")' %
target_name)
target_product_name = spec.get('product_name')
if target_product_name is not None:
target_properties['productName'] = target_product_name
xct = xctarget_type(target_properties, parent=pbxp,
force_outdir=spec.get('product_dir'),
force_prefix=spec.get('product_prefix'),
force_extension=spec.get('product_extension'))
pbxp.AppendProperty('targets', xct)
xcode_targets[qualified_target] = xct
xcode_target_to_target_dict[xct] = spec
spec_actions = spec.get('actions', [])
spec_rules = spec.get('rules', [])
# Xcode has some "issues" with checking dependencies for the "Compile
# sources" step with any source files/headers generated by actions/rules.
# To work around this, if a target is building anything directly (not
# type "none"), then a second target is used to run the GYP actions/rules
# and is made a dependency of this target. This way the work is done
# before the dependency checks for what should be recompiled.
support_xct = None
if type != 'none' and (spec_actions or spec_rules):
support_xccl = CreateXCConfigurationList(configuration_names);
support_target_properties = {
'buildConfigurationList': support_xccl,
'name': target_name + ' Support',
}
if target_product_name:
support_target_properties['productName'] = \
target_product_name + ' Support'
support_xct = \
gyp.xcodeproj_file.PBXAggregateTarget(support_target_properties,
parent=pbxp)
pbxp.AppendProperty('targets', support_xct)
xct.AddDependency(support_xct)
# Hang the support target off the main target so it can be tested/found
# by the generator during Finalize.
xct.support_target = support_xct
prebuild_index = 0
# Add custom shell script phases for "actions" sections.
for action in spec_actions:
# There's no need to write anything into the script to ensure that the
# output directories already exist, because Xcode will look at the
# declared outputs and automatically ensure that they exist for us.
# Do we have a message to print when this action runs?
message = action.get('message')
if message:
message = 'echo note: ' + gyp.common.EncodePOSIXShellArgument(message)
else:
message = ''
# Turn the list into a string that can be passed to a shell.
action_string = gyp.common.EncodePOSIXShellList(action['action'])
# Convert Xcode-type variable references to sh-compatible environment
# variable references.
message_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(message)
action_string_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
action_string)
script = ''
# Include the optional message
if message_sh:
script += message_sh + '\n'
# Be sure the script runs in exec, and that if exec fails, the script
# exits signalling an error.
script += 'exec ' + action_string_sh + '\nexit 1\n'
ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
'inputPaths': action['inputs'],
'name': 'Action "' + action['action_name'] + '"',
'outputPaths': action['outputs'],
'shellScript': script,
'showEnvVarsInLog': 0,
})
if support_xct:
support_xct.AppendProperty('buildPhases', ssbp)
else:
# TODO(mark): this assumes too much knowledge of the internals of
# xcodeproj_file; some of these smarts should move into xcodeproj_file
# itself.
xct._properties['buildPhases'].insert(prebuild_index, ssbp)
prebuild_index = prebuild_index + 1
# TODO(mark): Should verify that at most one of these is specified.
if int(action.get('process_outputs_as_sources', False)):
for output in action['outputs']:
AddSourceToTarget(output, type, pbxp, xct)
if int(action.get('process_outputs_as_mac_bundle_resources', False)):
for output in action['outputs']:
AddResourceToTarget(output, pbxp, xct)
# tgt_mac_bundle_resources holds the list of bundle resources so
# the rule processing can check against it.
if is_bundle:
tgt_mac_bundle_resources = spec.get('mac_bundle_resources', [])
else:
tgt_mac_bundle_resources = []
# Add custom shell script phases driving "make" for "rules" sections.
#
# Xcode's built-in rule support is almost powerful enough to use directly,
# but there are a few significant deficiencies that render them unusable.
# There are workarounds for some of its inadequacies, but in aggregate,
# the workarounds added complexity to the generator, and some workarounds
# actually require input files to be crafted more carefully than I'd like.
# Consequently, until Xcode rules are made more capable, "rules" input
# sections will be handled in Xcode output by shell script build phases
# performed prior to the compilation phase.
#
# The following problems with Xcode rules were found. The numbers are
# Apple radar IDs. I hope that these shortcomings are addressed, I really
# liked having the rules handled directly in Xcode during the period that
# I was prototyping this.
#
# 6588600 Xcode compiles custom script rule outputs too soon, compilation
# fails. This occurs when rule outputs from distinct inputs are
# interdependent. The only workaround is to put rules and their
# inputs in a separate target from the one that compiles the rule
# outputs. This requires input file cooperation and it means that
# process_outputs_as_sources is unusable.
# 6584932 Need to declare that custom rule outputs should be excluded from
# compilation. A possible workaround is to lie to Xcode about a
# rule's output, giving it a dummy file it doesn't know how to
# compile. The rule action script would need to touch the dummy.
# 6584839 I need a way to declare additional inputs to a custom rule.
# A possible workaround is a shell script phase prior to
# compilation that touches a rule's primary input files if any
# would-be additional inputs are newer than the output. Modifying
# the source tree - even just modification times - feels dirty.
# 6564240 Xcode "custom script" build rules always dump all environment
# variables. This is a low-prioroty problem and is not a
# show-stopper.
rules_by_ext = {}
for rule in spec_rules:
rules_by_ext[rule['extension']] = rule
# First, some definitions:
#
# A "rule source" is a file that was listed in a target's "sources"
# list and will have a rule applied to it on the basis of matching the
# rule's "extensions" attribute. Rule sources are direct inputs to
# rules.
#
# Rule definitions may specify additional inputs in their "inputs"
# attribute. These additional inputs are used for dependency tracking
# purposes.
#
# A "concrete output" is a rule output with input-dependent variables
# resolved. For example, given a rule with:
# 'extension': 'ext', 'outputs': ['$(INPUT_FILE_BASE).cc'],
# if the target's "sources" list contained "one.ext" and "two.ext",
# the "concrete output" for rule input "two.ext" would be "two.cc". If
# a rule specifies multiple outputs, each input file that the rule is
# applied to will have the same number of concrete outputs.
#
# If any concrete outputs are outdated or missing relative to their
# corresponding rule_source or to any specified additional input, the
# rule action must be performed to generate the concrete outputs.
# concrete_outputs_by_rule_source will have an item at the same index
# as the rule['rule_sources'] that it corresponds to. Each item is a
# list of all of the concrete outputs for the rule_source.
concrete_outputs_by_rule_source = []
# concrete_outputs_all is a flat list of all concrete outputs that this
# rule is able to produce, given the known set of input files
# (rule_sources) that apply to it.
concrete_outputs_all = []
# messages & actions are keyed by the same indices as rule['rule_sources']
# and concrete_outputs_by_rule_source. They contain the message and
# action to perform after resolving input-dependent variables. The
# message is optional, in which case None is stored for each rule source.
messages = []
actions = []
for rule_source in rule.get('rule_sources', []):
rule_source_dirname, rule_source_basename = \
posixpath.split(rule_source)
(rule_source_root, rule_source_ext) = \
posixpath.splitext(rule_source_basename)
# These are the same variable names that Xcode uses for its own native
# rule support. Because Xcode's rule engine is not being used, they
# need to be expanded as they are written to the makefile.
rule_input_dict = {
'INPUT_FILE_BASE': rule_source_root,
'INPUT_FILE_SUFFIX': rule_source_ext,
'INPUT_FILE_NAME': rule_source_basename,
'INPUT_FILE_PATH': rule_source,
'INPUT_FILE_DIRNAME': rule_source_dirname,
}
concrete_outputs_for_this_rule_source = []
for output in rule.get('outputs', []):
# Fortunately, Xcode and make both use $(VAR) format for their
# variables, so the expansion is the only transformation necessary.
# Any remaning $(VAR)-type variables in the string can be given
# directly to make, which will pick up the correct settings from
# what Xcode puts into the environment.
concrete_output = ExpandXcodeVariables(output, rule_input_dict)
concrete_outputs_for_this_rule_source.append(concrete_output)
# Add all concrete outputs to the project.
pbxp.AddOrGetFileInRootGroup(concrete_output)
concrete_outputs_by_rule_source.append( \
concrete_outputs_for_this_rule_source)
concrete_outputs_all.extend(concrete_outputs_for_this_rule_source)
# TODO(mark): Should verify that at most one of these is specified.
if int(rule.get('process_outputs_as_sources', False)):
for output in concrete_outputs_for_this_rule_source:
AddSourceToTarget(output, type, pbxp, xct)
# If the file came from the mac_bundle_resources list or if the rule
# is marked to process outputs as bundle resource, do so.
was_mac_bundle_resource = rule_source in tgt_mac_bundle_resources
if was_mac_bundle_resource or \
int(rule.get('process_outputs_as_mac_bundle_resources', False)):
for output in concrete_outputs_for_this_rule_source:
AddResourceToTarget(output, pbxp, xct)
# Do we have a message to print when this rule runs?
message = rule.get('message')
if message:
message = gyp.common.EncodePOSIXShellArgument(message)
message = ExpandXcodeVariables(message, rule_input_dict)
messages.append(message)
# Turn the list into a string that can be passed to a shell.
action_string = gyp.common.EncodePOSIXShellList(rule['action'])
action = ExpandXcodeVariables(action_string, rule_input_dict)
actions.append(action)
if len(concrete_outputs_all) > 0:
# TODO(mark): There's a possibilty for collision here. Consider
# target "t" rule "A_r" and target "t_A" rule "r".
makefile_name = '%s.make' % re.sub(
'[^a-zA-Z0-9_]', '_' , '%s_%s' % (target_name, rule['rule_name']))
makefile_path = os.path.join(xcode_projects[build_file].path,
makefile_name)
# TODO(mark): try/close? Write to a temporary file and swap it only
# if it's got changes?
makefile = open(makefile_path, 'wb')
# make will build the first target in the makefile by default. By
# convention, it's called "all". List all (or at least one)
# concrete output for each rule source as a prerequisite of the "all"
# target.
makefile.write('all: \\\n')
for concrete_output_index in \
xrange(0, len(concrete_outputs_by_rule_source)):
# Only list the first (index [0]) concrete output of each input
# in the "all" target. Otherwise, a parallel make (-j > 1) would
# attempt to process each input multiple times simultaneously.
# Otherwise, "all" could just contain the entire list of
# concrete_outputs_all.
concrete_output = \
concrete_outputs_by_rule_source[concrete_output_index][0]
if concrete_output_index == len(concrete_outputs_by_rule_source) - 1:
eol = ''
else:
eol = ' \\'
makefile.write(' %s%s\n' % (concrete_output, eol))
for (rule_source, concrete_outputs, message, action) in \
zip(rule['rule_sources'], concrete_outputs_by_rule_source,
messages, actions):
makefile.write('\n')
# Add a rule that declares it can build each concrete output of a
# rule source. Collect the names of the directories that are
# required.
concrete_output_dirs = []
for concrete_output_index in xrange(0, len(concrete_outputs)):
concrete_output = concrete_outputs[concrete_output_index]
if concrete_output_index == 0:
bol = ''
else:
bol = ' '
makefile.write('%s%s \\\n' % (bol, concrete_output))
concrete_output_dir = posixpath.dirname(concrete_output)
if (concrete_output_dir and
concrete_output_dir not in concrete_output_dirs):
concrete_output_dirs.append(concrete_output_dir)
makefile.write(' : \\\n')
# The prerequisites for this rule are the rule source itself and
# the set of additional rule inputs, if any.
prerequisites = [rule_source]
prerequisites.extend(rule.get('inputs', []))
for prerequisite_index in xrange(0, len(prerequisites)):
prerequisite = prerequisites[prerequisite_index]
if prerequisite_index == len(prerequisites) - 1:
eol = ''
else:
eol = ' \\'
makefile.write(' %s%s\n' % (prerequisite, eol))
# Make sure that output directories exist before executing the rule
# action.
if len(concrete_output_dirs) > 0:
makefile.write('\t@mkdir -p "%s"\n' %
'" "'.join(concrete_output_dirs))
# The rule message and action have already had the necessary variable
# substitutions performed.
if message:
# Mark it with note: so Xcode picks it up in build output.
makefile.write('\t@echo note: %s\n' % message)
makefile.write('\t%s\n' % action)
makefile.close()
# It might be nice to ensure that needed output directories exist
# here rather than in each target in the Makefile, but that wouldn't
# work if there ever was a concrete output that had an input-dependent
# variable anywhere other than in the leaf position.
# Don't declare any inputPaths or outputPaths. If they're present,
# Xcode will provide a slight optimization by only running the script
# phase if any output is missing or outdated relative to any input.
# Unfortunately, it will also assume that all outputs are touched by
# the script, and if the outputs serve as files in a compilation
# phase, they will be unconditionally rebuilt. Since make might not
# rebuild everything that could be declared here as an output, this
# extra compilation activity is unnecessary. With inputPaths and
# outputPaths not supplied, make will always be called, but it knows
# enough to not do anything when everything is up-to-date.
# To help speed things up, pass -j COUNT to make so it does some work
# in parallel. Don't use ncpus because Xcode will build ncpus targets
# in parallel and if each target happens to have a rules step, there
# would be ncpus^2 things going. With a machine that has 2 quad-core
# Xeons, a build can quickly run out of processes based on
# scheduling/other tasks, and randomly failing builds are no good.
script = \
"""JOB_COUNT="$(/usr/sbin/sysctl -n hw.ncpu)"
if [ "${JOB_COUNT}" -gt 4 ]; then
JOB_COUNT=4
fi
exec "${DEVELOPER_BIN_DIR}/make" -f "${PROJECT_FILE_PATH}/%s" -j "${JOB_COUNT}"
exit 1
""" % makefile_name
ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
'name': 'Rule "' + rule['rule_name'] + '"',
'shellScript': script,
'showEnvVarsInLog': 0,
})
if support_xct:
support_xct.AppendProperty('buildPhases', ssbp)
else:
# TODO(mark): this assumes too much knowledge of the internals of
# xcodeproj_file; some of these smarts should move into xcodeproj_file
# itself.
xct._properties['buildPhases'].insert(prebuild_index, ssbp)
prebuild_index = prebuild_index + 1
# Extra rule inputs also go into the project file. Concrete outputs were
# already added when they were computed.
groups = ['inputs', 'inputs_excluded']
if skip_excluded_files:
groups = [x for x in groups if not x.endswith('_excluded')]
for group in groups:
for item in rule.get(group, []):
pbxp.AddOrGetFileInRootGroup(item)
# Add "sources".
for source in spec.get('sources', []):
(source_root, source_extension) = posixpath.splitext(source)
if source_extension[1:] not in rules_by_ext:
# AddSourceToTarget will add the file to a root group if it's not
# already there.
AddSourceToTarget(source, type, pbxp, xct)
else:
pbxp.AddOrGetFileInRootGroup(source)
# Add "mac_bundle_resources" and "mac_framework_private_headers" if
# it's a bundle of any type.
if is_bundle:
for resource in tgt_mac_bundle_resources:
(resource_root, resource_extension) = posixpath.splitext(resource)
if resource_extension[1:] not in rules_by_ext:
AddResourceToTarget(resource, pbxp, xct)
else:
pbxp.AddOrGetFileInRootGroup(resource)
for header in spec.get('mac_framework_private_headers', []):
AddHeaderToTarget(header, pbxp, xct, False)
# Add "mac_framework_headers". These can be valid for both frameworks
# and static libraries.
if is_bundle or type == 'static_library':
for header in spec.get('mac_framework_headers', []):
AddHeaderToTarget(header, pbxp, xct, True)
# Add "copies".
pbxcp_dict = {}
for copy_group in spec.get('copies', []):
dest = copy_group['destination']
if dest[0] not in ('/', '$'):
# Relative paths are relative to $(SRCROOT).
dest = '$(SRCROOT)/' + dest
# Coalesce multiple "copies" sections in the same target with the same
# "destination" property into the same PBXCopyFilesBuildPhase, otherwise
# they'll wind up with ID collisions.
pbxcp = pbxcp_dict.get(dest, None)
if pbxcp is None:
pbxcp = gyp.xcodeproj_file.PBXCopyFilesBuildPhase({
'name': 'Copy to ' + copy_group['destination']
},
parent=xct)
pbxcp.SetDestination(dest)
# TODO(mark): The usual comment about this knowing too much about
# gyp.xcodeproj_file internals applies.
xct._properties['buildPhases'].insert(prebuild_index, pbxcp)
pbxcp_dict[dest] = pbxcp
for file in copy_group['files']:
pbxcp.AddFile(file)
# Excluded files can also go into the project file.
if not skip_excluded_files:
for key in ['sources', 'mac_bundle_resources', 'mac_framework_headers',
'mac_framework_private_headers']:
excluded_key = key + '_excluded'
for item in spec.get(excluded_key, []):
pbxp.AddOrGetFileInRootGroup(item)
# So can "inputs" and "outputs" sections of "actions" groups.
groups = ['inputs', 'inputs_excluded', 'outputs', 'outputs_excluded']
if skip_excluded_files:
groups = [x for x in groups if not x.endswith('_excluded')]
for action in spec.get('actions', []):
for group in groups:
for item in action.get(group, []):
# Exclude anything in BUILT_PRODUCTS_DIR. They're products, not
# sources.
if not item.startswith('$(BUILT_PRODUCTS_DIR)/'):
pbxp.AddOrGetFileInRootGroup(item)
for postbuild in spec.get('postbuilds', []):
action_string_sh = gyp.common.EncodePOSIXShellList(postbuild['action'])
script = 'exec ' + action_string_sh + '\nexit 1\n'
# Make the postbuild step depend on the output of ld or ar from this
# target. Apparently putting the script step after the link step isn't
# sufficient to ensure proper ordering in all cases. With an input
# declared but no outputs, the script step should run every time, as
# desired.
ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
'inputPaths': ['$(BUILT_PRODUCTS_DIR)/$(EXECUTABLE_PATH)'],
'name': 'Postbuild "' + postbuild['postbuild_name'] + '"',
'shellScript': script,
'showEnvVarsInLog': 0,
})
xct.AppendProperty('buildPhases', ssbp)
# Add dependencies before libraries, because adding a dependency may imply
# adding a library. It's preferable to keep dependencies listed first
# during a link phase so that they can override symbols that would
# otherwise be provided by libraries, which will usually include system
# libraries. On some systems, ld is finicky and even requires the
# libraries to be ordered in such a way that unresolved symbols in
# earlier-listed libraries may only be resolved by later-listed libraries.
# The Mac linker doesn't work that way, but other platforms do, and so
# their linker invocations need to be constructed in this way. There's
# no compelling reason for Xcode's linker invocations to differ.
if 'dependencies' in spec:
for dependency in spec['dependencies']:
xct.AddDependency(xcode_targets[dependency])
# The support project also gets the dependencies (in case they are
# needed for the actions/rules to work).
if support_xct:
support_xct.AddDependency(xcode_targets[dependency])
if 'libraries' in spec:
for library in spec['libraries']:
xct.FrameworksPhase().AddFile(library)
# Add the library's directory to LIBRARY_SEARCH_PATHS if necessary.
# I wish Xcode handled this automatically.
library_dir = posixpath.dirname(library)
if library_dir not in xcode_standard_library_dirs and (
not xct.HasBuildSetting(_library_search_paths_var) or
library_dir not in xct.GetBuildSetting(_library_search_paths_var)):
xct.AppendBuildSetting(_library_search_paths_var, library_dir)
for configuration_name in configuration_names:
configuration = spec['configurations'][configuration_name]
xcbc = xct.ConfigurationNamed(configuration_name)
for include_dir in configuration.get('mac_framework_dirs', []):
xcbc.AppendBuildSetting('FRAMEWORK_SEARCH_PATHS', include_dir)
for include_dir in configuration.get('include_dirs', []):
xcbc.AppendBuildSetting('HEADER_SEARCH_PATHS', include_dir)
if 'defines' in configuration:
for define in configuration['defines']:
set_define = EscapeXCodeArgument(define)
xcbc.AppendBuildSetting('GCC_PREPROCESSOR_DEFINITIONS', set_define)
if 'xcode_settings' in configuration:
for xck, xcv in configuration['xcode_settings'].iteritems():
xcbc.SetBuildSetting(xck, xcv)
if 'xcode_config_file' in configuration:
config_ref = pbxp.AddOrGetFileInRootGroup(
configuration['xcode_config_file'])
xcbc.SetBaseConfiguration(config_ref)
build_files = []
for build_file, build_file_dict in data.iteritems():
if build_file.endswith('.gyp'):
build_files.append(build_file)
for build_file in build_files:
xcode_projects[build_file].Finalize1(xcode_targets, serialize_all_tests)
for build_file in build_files:
xcode_projects[build_file].Finalize2(xcode_targets,
xcode_target_to_target_dict)
for build_file in build_files:
xcode_projects[build_file].Write()
| mit |
cyanogen/uchroma | uchroma/traits.py | 1 | 11759 | #
# uchroma - Copyright (C) 2021 Stefanie Kondik
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, version 3.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
#
# pylint: disable=protected-access, invalid-name, no-member
import enum
import importlib
import sys
from argparse import ArgumentParser
from typing import Iterable
from traitlets import CaselessStrEnum, Container, Dict, Enum, Int, HasTraits, \
List, TraitType, Undefined, UseEnum
from frozendict import frozendict
from uchroma.color import to_color
from uchroma.util import ArgsDict
class ColorTrait(TraitType):
"""
A traitlet which encapsulates a grapefruit.Color and performs
type coercion as needed.
"""
info_text = "a color"
allow_none = True
default_value = 'black'
def __init__(self, *args, **kwargs):
super(ColorTrait, self).__init__(*args, **kwargs)
def validate(self, obj, value):
try:
if value is not None:
value = to_color(value)
except:
self.error(obj, value)
return value
class ColorSchemeTrait(List):
"""
A list of ColorTraits which comprise a scheme
"""
info_text = 'a list of colors'
def __init__(self, trait=ColorTrait(), default_value=(),
minlen=0, maxlen=sys.maxsize, **kwargs):
super(ColorSchemeTrait, self).__init__(trait=trait, default_value=default_value,
minlen=minlen, maxlen=maxlen, **kwargs)
class ColorPresetTrait(UseEnum):
"""
A trait which represents a group of color schemes defined
as a Python Enum.
"""
info_text = 'a predefined color scheme'
def __init__(self, enum_class, default_value=None, **kwargs):
super(ColorPresetTrait, self).__init__(enum_class, default_value=default_value, **kwargs)
class WriteOnceMixin(object):
"""
Mixin for traits which cannot be changed after an initial
value has been set.
"""
write_once = True
def validate(self, obj, value):
if self.name not in obj._trait_values or \
obj._trait_values[self.name] == self.default_value:
return super().validate(obj, value)
self.error(obj, value)
class WriteOnceInt(WriteOnceMixin, Int):
"""
Subclass of Int which may only be written once
"""
pass
class FrozenDict(WriteOnceMixin, Dict):
"""
Subclass of Dict which converts the value to a frozendict on
the first setting.
"""
def validate(self, obj, value):
return frozendict(super().validate(obj, value))
class UseEnumCaseless(UseEnum):
"""
Subclass of UseEnum which allows selection of values using
case insensitive strings
"""
def select_by_name(self, value, default=Undefined):
if value.startswith(self.name_prefix):
# -- SUPPORT SCOPED-NAMES, like: "Color.red" => "red"
value = value.replace(self.name_prefix, "", 1)
keys = [x.lower() for x in self.enum_class.__members__.keys()]
idx = keys.index(value.lower())
if idx < 0:
return Undefined
return self.enum_class[list(self.enum_class.__members__.keys())[idx]]
class WriteOnceUseEnumCaseless(WriteOnceMixin, UseEnumCaseless):
"""
Subclass of UseEnumCaseless which may only be written once.
"""
pass
class DefaultCaselessStrEnum(CaselessStrEnum):
"""
Extension of CaselessStrEnum which handles default values better
"""
def validate(self, obj, value):
if self.default_value and (value is None or value == ''):
value = self.default_value
return super().validate(obj, value)
def is_trait_writable(trait: TraitType) -> bool:
"""
Test if a trait is writable
:param trait: the trait to be tested
:return: True if the trait is writable
"""
if trait.read_only:
return False
if hasattr(trait, 'write_once') and trait.write_once:
return False
return True
def trait_as_dict(trait: TraitType) -> dict:
"""
Convert a trait to a dict for sending over D-Bus or the like
:param trait: the trait to be converted
:return: dict representing this trait
"""
cls = trait.__class__
tdict = {}
for k, v in vars(trait).items():
if k.startswith('__') or k == 'this_class':
continue
if hasattr(cls, k) and getattr(cls, k) == v:
continue
if isinstance(v, Iterable) and len(v) == 0:
continue
if k.startswith('_'):
tdict[k[1:]] = v
else:
tdict[k] = v
if isinstance(trait, UseEnum):
cls = CaselessStrEnum
tdict['values'] = tuple(trait.enum_class.__members__.keys())
if 'enum_class' in tdict:
del tdict['enum_class']
for k, v in tdict.items():
if isinstance(v, TraitType):
tdict[k] = trait_as_dict(v)
if isinstance(v, enum.Enum):
tdict[k] = v.name
if isinstance(v, type):
tdict[k] = '%s.%s' % (v.__module__, v.__name__)
tdict['__class__'] = (cls.__module__, cls.__name__)
return tdict
def class_traits_as_dict(obj: HasTraits, values: dict=None) -> dict:
"""
Create a dict which represents all traits of the given object.
This dict itself can be inspected in a generic API, or it
may be converted back to a (stub) instance of HasTraits. This
facilitates the sending of configurable object properties over
an interface such as D-Bus.
:param obj: an instance of HasTraits
:param value: optional dict of trait values (pulled from obj by default)
:return: dict representing all traits in obj
"""
cls_dt = {}
if isinstance(obj, type) and hasattr(obj, 'class_traits'):
traits = obj.class_traits()
elif isinstance(obj, dict):
traits = obj
elif isinstance(obj, HasTraits):
traits = obj.traits()
values = obj._trait_values
else:
raise TypeError("Object does not support traits")
for k, v in traits.items():
dt = trait_as_dict(v)
if dt is None:
continue
if values is not None and k in values:
dt['__value__'] = values[k]
cls_dt[k] = dt
return cls_dt
def dict_as_trait(obj: dict) -> TraitType:
"""
Create a trait from a dict (trait_as_dict).
"""
if '__class__' not in obj:
raise ValueError("No module and class attribute present")
tobj = obj.copy()
module_name, trait_class = tobj.pop('__class__')
module = importlib.import_module(module_name)
if not hasattr(module, trait_class):
raise TypeError("Unknown class: %s" % trait_class)
cls = getattr(module, trait_class)
if 'trait' in tobj:
tobj['trait'] = dict_as_trait(tobj.pop('trait'))
metadata = {}
if 'metadata' in tobj:
metadata.update(tobj.pop('metadata'))
if issubclass(cls, Enum):
trait = cls(tobj.pop('values'), **tobj)
else:
trait = cls(**tobj)
for k in list(metadata.keys()):
if k in ('name', 'default_args', 'default_kwargs'):
setattr(trait, k, metadata.pop(k))
trait.metadata = metadata
return trait
def dict_as_class_traits(obj: dict) -> HasTraits:
"""
Convert a dict of unpacked traits to a HasTraits instance.
Useful for remote parameter inspection and validation.
:param obj: dict of unpacked traits
:return: the stub HasTraits instance
"""
if not isinstance(obj, dict):
raise TypeError("Object must be a dict (was: %s)" % obj)
traits = {}
values = {}
for k, v in obj.items():
if '__value__' in v:
values[k] = v.pop('__value__')
trait = dict_as_trait(v)
if trait is None:
continue
traits[k] = trait
cls = HasTraits()
cls.add_traits(**traits)
for k, v in values.items():
setattr(cls, k, v)
return cls
def get_args_dict(obj: HasTraits, incl_all=False):
"""
Return a dict of user-configurable traits for an object
:param obj: an instance of HasTraits
:param incl_all: If all items should be included, regardless of RO status
:return: dict of arguments
"""
argsdict = ArgsDict()
for k in sorted(obj._trait_values.keys()):
v = obj._trait_values[k]
trait = obj.traits()[k]
if incl_all or (not trait.get_metadata('hidden') and is_trait_writable(trait)):
argsdict[k] = v
return argsdict
def add_traits_to_argparse(obj: HasTraits, parser: ArgumentParser,
prefix: str=None):
"""
Add all traits from the given object to the argparse context.
:param obj: an instance of HasTraits
:param parser: argparse parser
:param prefix: string to prefix keys with
"""
for key, trait in obj.traits().items():
if trait.get_metadata('config') is not True:
continue
argname = '--%s' % key
if prefix is not None:
argname = '--%s.%s' % (prefix, key)
if isinstance(trait, Container):
parser.add_argument(argname, nargs='+', help=trait.info_text)
elif isinstance(trait, Enum):
parser.add_argument(argname, type=str.lower,
choices=[x.lower() for x in trait.values],
help=trait.info_text)
else:
argtype = str
if hasattr(trait, 'default_value'):
argtype = type(trait.default_value)
parser.add_argument(argname, type=argtype, help=trait.info_text)
def apply_from_argparse(args, traits=None, target: HasTraits=None) -> dict:
"""
Applies arguments added via add_traits_to_argparse to
a target object which implements HasTraits. If a target
is not known, a dict of traits may be passed instead.
Will throw TraitError if validation fails.
:param args: Parsed args from argparse
:param traits: Dictionary of traits (optional)
:param target: Target object (optional)
:return: Dict of the arguments which actually changed
"""
# apply the traits to an empty object, which will run
# the validators on the client
if isinstance(traits, HasTraits):
traits = traits.traits()
traits = traits.copy()
for k, v in traits.items():
if not isinstance(v, TraitType):
if isinstance(v, dict):
k[v] = dict_as_trait(v)
else:
raise TypeError("A dict or trait object must be supplied")
if target is None:
if traits is None:
raise ValueError("Either traits or target must be specified")
target = HasTraits()
target.add_traits(**traits)
# determine what should actually be changed
argkeys = [k for k, v in vars(args).items() if v is not None]
intersect = set(target.traits().keys()).intersection(set(argkeys))
# apply the argparse flags to the target object
for key in intersect:
if target.traits()[key].get_metadata('config') is not True:
raise ValueError("Trait is not marked as configurable: %s" % key)
setattr(target, key, getattr(args, key))
# if all validators passed, return a dict of the changed args
changed = {}
for key in intersect:
changed[key] = target._trait_values[key]
return changed
| lgpl-3.0 |
pelya/commandergenius | project/jni/python/src/Lib/getopt.py | 167 | 7316 | # -*- coding: iso-8859-1 -*-
"""Parser for command line options.
This module helps scripts to parse the command line arguments in
sys.argv. It supports the same conventions as the Unix getopt()
function (including the special meanings of arguments of the form `-'
and `--'). Long options similar to those supported by GNU software
may be used as well via an optional third argument. This module
provides two functions and an exception:
getopt() -- Parse command line options
gnu_getopt() -- Like getopt(), but allow option and non-option arguments
to be intermixed.
GetoptError -- exception (class) raised with 'opt' attribute, which is the
option involved with the exception.
"""
# Long option support added by Lars Wirzenius <liw@iki.fi>.
#
# Gerrit Holl <gerrit@nl.linux.org> moved the string-based exceptions
# to class-based exceptions.
#
# Peter Åstrand <astrand@lysator.liu.se> added gnu_getopt().
#
# TODO for gnu_getopt():
#
# - GNU getopt_long_only mechanism
# - allow the caller to specify ordering
# - RETURN_IN_ORDER option
# - GNU extension with '-' as first character of option string
# - optional arguments, specified by double colons
# - a option string with a W followed by semicolon should
# treat "-W foo" as "--foo"
__all__ = ["GetoptError","error","getopt","gnu_getopt"]
import os
class GetoptError(Exception):
opt = ''
msg = ''
def __init__(self, msg, opt=''):
self.msg = msg
self.opt = opt
Exception.__init__(self, msg, opt)
def __str__(self):
return self.msg
error = GetoptError # backward compatibility
def getopt(args, shortopts, longopts = []):
"""getopt(args, options[, long_options]) -> opts, args
Parses command line options and parameter list. args is the
argument list to be parsed, without the leading reference to the
running program. Typically, this means "sys.argv[1:]". shortopts
is the string of option letters that the script wants to
recognize, with options that require an argument followed by a
colon (i.e., the same format that Unix getopt() uses). If
specified, longopts is a list of strings with the names of the
long options which should be supported. The leading '--'
characters should not be included in the option name. Options
which require an argument should be followed by an equal sign
('=').
The return value consists of two elements: the first is a list of
(option, value) pairs; the second is the list of program arguments
left after the option list was stripped (this is a trailing slice
of the first argument). Each option-and-value pair returned has
the option as its first element, prefixed with a hyphen (e.g.,
'-x'), and the option argument as its second element, or an empty
string if the option has no argument. The options occur in the
list in the same order in which they were found, thus allowing
multiple occurrences. Long and short options may be mixed.
"""
opts = []
if type(longopts) == type(""):
longopts = [longopts]
else:
longopts = list(longopts)
while args and args[0].startswith('-') and args[0] != '-':
if args[0] == '--':
args = args[1:]
break
if args[0].startswith('--'):
opts, args = do_longs(opts, args[0][2:], longopts, args[1:])
else:
opts, args = do_shorts(opts, args[0][1:], shortopts, args[1:])
return opts, args
def gnu_getopt(args, shortopts, longopts = []):
"""getopt(args, options[, long_options]) -> opts, args
This function works like getopt(), except that GNU style scanning
mode is used by default. This means that option and non-option
arguments may be intermixed. The getopt() function stops
processing options as soon as a non-option argument is
encountered.
If the first character of the option string is `+', or if the
environment variable POSIXLY_CORRECT is set, then option
processing stops as soon as a non-option argument is encountered.
"""
opts = []
prog_args = []
if isinstance(longopts, str):
longopts = [longopts]
else:
longopts = list(longopts)
# Allow options after non-option arguments?
if shortopts.startswith('+'):
shortopts = shortopts[1:]
all_options_first = True
elif os.environ.get("POSIXLY_CORRECT"):
all_options_first = True
else:
all_options_first = False
while args:
if args[0] == '--':
prog_args += args[1:]
break
if args[0][:2] == '--':
opts, args = do_longs(opts, args[0][2:], longopts, args[1:])
elif args[0][:1] == '-':
opts, args = do_shorts(opts, args[0][1:], shortopts, args[1:])
else:
if all_options_first:
prog_args += args
break
else:
prog_args.append(args[0])
args = args[1:]
return opts, prog_args
def do_longs(opts, opt, longopts, args):
try:
i = opt.index('=')
except ValueError:
optarg = None
else:
opt, optarg = opt[:i], opt[i+1:]
has_arg, opt = long_has_args(opt, longopts)
if has_arg:
if optarg is None:
if not args:
raise GetoptError('option --%s requires argument' % opt, opt)
optarg, args = args[0], args[1:]
elif optarg:
raise GetoptError('option --%s must not have an argument' % opt, opt)
opts.append(('--' + opt, optarg or ''))
return opts, args
# Return:
# has_arg?
# full option name
def long_has_args(opt, longopts):
possibilities = [o for o in longopts if o.startswith(opt)]
if not possibilities:
raise GetoptError('option --%s not recognized' % opt, opt)
# Is there an exact match?
if opt in possibilities:
return False, opt
elif opt + '=' in possibilities:
return True, opt
# No exact match, so better be unique.
if len(possibilities) > 1:
# XXX since possibilities contains all valid continuations, might be
# nice to work them into the error msg
raise GetoptError('option --%s not a unique prefix' % opt, opt)
assert len(possibilities) == 1
unique_match = possibilities[0]
has_arg = unique_match.endswith('=')
if has_arg:
unique_match = unique_match[:-1]
return has_arg, unique_match
def do_shorts(opts, optstring, shortopts, args):
while optstring != '':
opt, optstring = optstring[0], optstring[1:]
if short_has_arg(opt, shortopts):
if optstring == '':
if not args:
raise GetoptError('option -%s requires argument' % opt,
opt)
optstring, args = args[0], args[1:]
optarg, optstring = optstring, ''
else:
optarg = ''
opts.append(('-' + opt, optarg))
return opts, args
def short_has_arg(opt, shortopts):
for i in range(len(shortopts)):
if opt == shortopts[i] != ':':
return shortopts.startswith(':', i+1)
raise GetoptError('option -%s not recognized' % opt, opt)
if __name__ == '__main__':
import sys
print getopt(sys.argv[1:], "a:b", ["alpha=", "beta"])
| lgpl-2.1 |
aapav01/kernel_ms013g_3-4-x | tools/perf/scripts/python/syscall-counts-by-pid.py | 11180 | 1927 | # system call counts, by pid
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide system call totals, broken down by syscall.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import syscall_name
usage = "perf script -s syscall-counts-by-pid.py [comm]\n";
for_comm = None
for_pid = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
try:
for_pid = int(sys.argv[1])
except:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_syscall_totals()
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if (for_comm and common_comm != for_comm) or \
(for_pid and common_pid != for_pid ):
return
try:
syscalls[common_comm][common_pid][id] += 1
except TypeError:
syscalls[common_comm][common_pid][id] = 1
def print_syscall_totals():
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events by comm/pid:\n\n",
print "%-40s %10s\n" % ("comm [pid]/syscalls", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
comm_keys = syscalls.keys()
for comm in comm_keys:
pid_keys = syscalls[comm].keys()
for pid in pid_keys:
print "\n%s [%d]\n" % (comm, pid),
id_keys = syscalls[comm][pid].keys()
for id, val in sorted(syscalls[comm][pid].iteritems(), \
key = lambda(k, v): (v, k), reverse = True):
print " %-38s %10d\n" % (syscall_name(id), val),
| gpl-2.0 |
mkaluza/external_chromium_org | chrome/browser/resources/test_presubmit.py | 61 | 19703 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for Web Development Style Guide checker."""
import os
import re
import sys
import unittest
test_dir = os.path.dirname(os.path.abspath(__file__))
sys.path.extend([
os.path.normpath(os.path.join(test_dir, '..', '..', '..', 'tools')),
os.path.join(test_dir),
])
import find_depot_tools # pylint: disable=W0611
from testing_support.super_mox import SuperMoxTestBase
from web_dev_style import css_checker, js_checker # pylint: disable=F0401
class JsStyleGuideTest(SuperMoxTestBase):
def setUp(self):
SuperMoxTestBase.setUp(self)
input_api = self.mox.CreateMockAnything()
input_api.re = re
output_api = self.mox.CreateMockAnything()
self.checker = js_checker.JSChecker(input_api, output_api)
def GetHighlight(self, line, error):
"""Returns the substring of |line| that is highlighted in |error|."""
error_lines = error.split('\n')
highlight = error_lines[error_lines.index(line) + 1]
return ''.join(ch1 for (ch1, ch2) in zip(line, highlight) if ch2 == '^')
def ShouldFailConstCheck(self, line):
"""Checks that the 'const' checker flags |line| as a style error."""
error = self.checker.ConstCheck(1, line)
self.assertNotEqual('', error,
'Should be flagged as style error: ' + line)
self.assertEqual(self.GetHighlight(line, error), 'const')
def ShouldPassConstCheck(self, line):
"""Checks that the 'const' checker doesn't flag |line| as a style error."""
self.assertEqual('', self.checker.ConstCheck(1, line),
'Should not be flagged as style error: ' + line)
def testConstFails(self):
lines = [
"const foo = 'bar';",
" const bar = 'foo';",
# Trying to use |const| as a variable name
"var const = 0;",
"var x = 5; const y = 6;",
"for (var i=0, const e=10; i<e; i++) {",
"for (const x=0; x<foo; i++) {",
"while (const x = 7) {",
]
for line in lines:
self.ShouldFailConstCheck(line)
def testConstPasses(self):
lines = [
# sanity check
"var foo = 'bar'",
# @const JsDoc tag
"/** @const */ var SEVEN = 7;",
# @const tag in multi-line comment
" * @const",
" * @const",
# @constructor tag in multi-line comment
" * @constructor",
" * @constructor",
# words containing 'const'
"if (foo.constructor) {",
"var deconstruction = 'something';",
"var madeUpWordconst = 10;",
# Strings containing the word |const|
"var str = 'const at the beginning';",
"var str = 'At the end: const';",
# doing this one with regex is probably not practical
#"var str = 'a const in the middle';",
]
for line in lines:
self.ShouldPassConstCheck(line)
def ShouldFailChromeSendCheck(self, line):
"""Checks that the 'chrome.send' checker flags |line| as a style error."""
error = self.checker.ChromeSendCheck(1, line)
self.assertNotEqual('', error,
'Should be flagged as style error: ' + line)
self.assertEqual(self.GetHighlight(line, error), ', []')
def ShouldPassChromeSendCheck(self, line):
"""Checks that the 'chrome.send' checker doesn't flag |line| as a style
error.
"""
self.assertEqual('', self.checker.ChromeSendCheck(1, line),
'Should not be flagged as style error: ' + line)
def testChromeSendFails(self):
lines = [
"chrome.send('message', []);",
" chrome.send('message', []);",
]
for line in lines:
self.ShouldFailChromeSendCheck(line)
def testChromeSendPasses(self):
lines = [
"chrome.send('message', constructArgs('foo', []));",
" chrome.send('message', constructArgs('foo', []));",
"chrome.send('message', constructArgs([]));",
" chrome.send('message', constructArgs([]));",
]
for line in lines:
self.ShouldPassChromeSendCheck(line)
def ShouldFailGetElementByIdCheck(self, line):
"""Checks that the 'getElementById' checker flags |line| as a style
error.
"""
error = self.checker.GetElementByIdCheck(1, line)
self.assertNotEqual('', error,
'Should be flagged as style error: ' + line)
self.assertEqual(self.GetHighlight(line, error), 'document.getElementById')
def ShouldPassGetElementByIdCheck(self, line):
"""Checks that the 'getElementById' checker doesn't flag |line| as a style
error.
"""
self.assertEqual('', self.checker.GetElementByIdCheck(1, line),
'Should not be flagged as style error: ' + line)
def testGetElementByIdFails(self):
lines = [
"document.getElementById('foo');",
" document.getElementById('foo');",
"var x = document.getElementById('foo');",
"if (document.getElementById('foo').hidden) {",
]
for line in lines:
self.ShouldFailGetElementByIdCheck(line)
def testGetElementByIdPasses(self):
lines = [
"elem.ownerDocument.getElementById('foo');",
" elem.ownerDocument.getElementById('foo');",
"var x = elem.ownerDocument.getElementById('foo');",
"if (elem.ownerDocument.getElementById('foo').hidden) {",
"doc.getElementById('foo');",
" doc.getElementById('foo');",
"cr.doc.getElementById('foo');",
" cr.doc.getElementById('foo');",
"var x = doc.getElementById('foo');",
"if (doc.getElementById('foo').hidden) {",
]
for line in lines:
self.ShouldPassGetElementByIdCheck(line)
def ShouldFailInheritDocCheck(self, line):
"""Checks that the '@inheritDoc' checker flags |line| as a style error."""
error = self.checker.InheritDocCheck(1, line)
self.assertNotEqual('', error,
msg='Should be flagged as style error: ' + line)
self.assertEqual(self.GetHighlight(line, error), '@inheritDoc')
def ShouldPassInheritDocCheck(self, line):
"""Checks that the '@inheritDoc' checker doesn't flag |line| as a style
error.
"""
self.assertEqual('', self.checker.InheritDocCheck(1, line),
msg='Should not be flagged as style error: ' + line)
def testInheritDocFails(self):
lines = [
" /** @inheritDoc */",
" * @inheritDoc",
]
for line in lines:
self.ShouldFailInheritDocCheck(line)
def testInheritDocPasses(self):
lines = [
"And then I said, but I won't @inheritDoc! Hahaha!",
" If your dad's a doctor, do you inheritDoc?",
" What's up, inherit doc?",
" this.inheritDoc(someDoc)",
]
for line in lines:
self.ShouldPassInheritDocCheck(line)
def ShouldFailWrapperTypeCheck(self, line):
"""Checks that the use of wrapper types (i.e. new Number(), @type {Number})
is a style error.
"""
error = self.checker.WrapperTypeCheck(1, line)
self.assertNotEqual('', error,
msg='Should be flagged as style error: ' + line)
highlight = self.GetHighlight(line, error)
self.assertTrue(highlight in ('Boolean', 'Number', 'String'))
def ShouldPassWrapperTypeCheck(self, line):
"""Checks that the wrapper type checker doesn't flag |line| as a style
error.
"""
self.assertEqual('', self.checker.WrapperTypeCheck(1, line),
msg='Should not be flagged as style error: ' + line)
def testWrapperTypePasses(self):
lines = [
"/** @param {!ComplexType} */",
" * @type {Object}",
" * @param {Function=} opt_callback",
" * @param {} num Number of things to add to {blah}.",
" * @return {!print_preview.PageNumberSet}",
" /* @returns {Number} */", # Should be /** @return {Number} */
"* @param {!LocalStrings}"
" Your type of Boolean is false!",
" Then I parameterized her Number from her friend!",
" A String of Pearls",
" types.params.aBoolean.typeString(someNumber)",
]
for line in lines:
self.ShouldPassWrapperTypeCheck(line)
def testWrapperTypeFails(self):
lines = [
" /**@type {String}*/(string)",
" * @param{Number=} opt_blah A number",
"/** @private @return {!Boolean} */",
" * @param {number|String}",
]
for line in lines:
self.ShouldFailWrapperTypeCheck(line)
def ShouldFailVarNameCheck(self, line):
"""Checks that var unix_hacker, $dollar are style errors."""
error = self.checker.VarNameCheck(1, line)
self.assertNotEqual('', error,
msg='Should be flagged as style error: ' + line)
highlight = self.GetHighlight(line, error)
self.assertFalse('var ' in highlight);
def ShouldPassVarNameCheck(self, line):
"""Checks that variableNamesLikeThis aren't style errors."""
self.assertEqual('', self.checker.VarNameCheck(1, line),
msg='Should not be flagged as style error: ' + line)
def testVarNameFails(self):
lines = [
"var private_;",
" var _super_private",
" var unix_hacker = someFunc();",
]
for line in lines:
self.ShouldFailVarNameCheck(line)
def testVarNamePasses(self):
lines = [
" var namesLikeThis = [];",
" for (var i = 0; i < 10; ++i) { ",
"for (var i in obj) {",
" var one, two, three;",
" var magnumPI = {};",
" var g_browser = 'da browzer';",
"/** @const */ var Bla = options.Bla;", # goog.scope() replacement.
" var $ = function() {", # For legacy reasons.
" var StudlyCaps = cr.define('bla')", # Classes.
" var SCARE_SMALL_CHILDREN = [", # TODO(dbeam): add @const in
# front of all these vars like
"/** @const */ CONST_VAR = 1;", # this line has (<--).
]
for line in lines:
self.ShouldPassVarNameCheck(line)
class CssStyleGuideTest(SuperMoxTestBase):
def setUp(self):
SuperMoxTestBase.setUp(self)
self.fake_file_name = 'fake.css'
self.fake_file = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(self.fake_file, 'LocalPath')
self.fake_file.LocalPath().AndReturn(self.fake_file_name)
# Actual calls to NewContents() are defined in each test.
self.mox.StubOutWithMock(self.fake_file, 'NewContents')
self.input_api = self.mox.CreateMockAnything()
self.input_api.re = re
self.mox.StubOutWithMock(self.input_api, 'AffectedSourceFiles')
self.input_api.AffectedFiles(
include_deletes=False, file_filter=None).AndReturn([self.fake_file])
# Actual creations of PresubmitPromptWarning are defined in each test.
self.output_api = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(self.output_api, 'PresubmitPromptWarning',
use_mock_anything=True)
author_msg = ('Was the CSS checker useful? '
'Send feedback or hate mail to dbeam@chromium.org.')
self.output_api = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(self.output_api, 'PresubmitNotifyResult',
use_mock_anything=True)
self.output_api.PresubmitNotifyResult(author_msg).AndReturn(None)
def VerifyContentsProducesOutput(self, contents, output):
self.fake_file.NewContents().AndReturn(contents.splitlines())
self.output_api.PresubmitPromptWarning(
self.fake_file_name + ':\n' + output.strip()).AndReturn(None)
self.mox.ReplayAll()
css_checker.CSSChecker(self.input_api, self.output_api).RunChecks()
def testCssAlphaWithAtBlock(self):
self.VerifyContentsProducesOutput("""
<include src="../shared/css/cr/ui/overlay.css">
<include src="chrome://resources/totally-cool.css" />
/* A hopefully safely ignored comment and @media statement. /**/
@media print {
div {
display: block;
color: red;
}
}
.rule {
z-index: 5;
<if expr="not is macosx">
background-image: url(chrome://resources/BLAH); /* TODO(dbeam): Fix this. */
background-color: rgb(235, 239, 249);
</if>
<if expr="is_macosx">
background-color: white;
background-image: url(chrome://resources/BLAH2);
</if>
color: black;
}
<if expr="is_macosx">
.language-options-right {
visibility: hidden;
opacity: 1; /* TODO(dbeam): Fix this. */
}
</if>""", """
- Alphabetize properties and list vendor specific (i.e. -webkit) above standard.
display: block;
color: red;
z-index: 5;
color: black;""")
def testCssAlphaWithNonStandard(self):
self.VerifyContentsProducesOutput("""
div {
/* A hopefully safely ignored comment and @media statement. /**/
color: red;
-webkit-margin-start: 5px;
}""", """
- Alphabetize properties and list vendor specific (i.e. -webkit) above standard.
color: red;
-webkit-margin-start: 5px;""")
def testCssAlphaWithLongerDashedProps(self):
self.VerifyContentsProducesOutput("""
div {
border-left: 5px; /* A hopefully removed comment. */
border: 5px solid red;
}""", """
- Alphabetize properties and list vendor specific (i.e. -webkit) above standard.
border-left: 5px;
border: 5px solid red;""")
def testCssBracesHaveSpaceBeforeAndNothingAfter(self):
self.VerifyContentsProducesOutput("""
/* Hello! */div/* Comment here*/{
display: block;
}
blah /* hey! */
{
rule: value;
}
.this.is { /* allowed */
rule: value;
}""", """
- Start braces ({) end a selector, have a space before them and no rules after.
div{
{""")
def testCssClassesUseDashes(self):
self.VerifyContentsProducesOutput("""
.className,
.ClassName,
.class-name /* We should not catch this. */,
.class_name {
display: block;
}""", """
- Classes use .dash-form.
.className,
.ClassName,
.class_name {""")
def testCssCloseBraceOnNewLine(self):
self.VerifyContentsProducesOutput("""
@media { /* TODO(dbeam) Fix this case. */
.rule {
display: block;
}}
@-webkit-keyframe blah {
100% { height: -500px 0; }
}
#rule {
rule: value; }""", """
- Always put a rule closing brace (}) on a new line.
rule: value; }""")
def testCssColonsHaveSpaceAfter(self):
self.VerifyContentsProducesOutput("""
div:not(.class):not([attr=5]), /* We should not catch this. */
div:not(.class):not([attr]) /* Nor this. */ {
background: url(data:image/jpeg,asdfasdfsadf); /* Ignore this. */
background: -webkit-linear-gradient(left, red,
80% blah blee blar);
color: red;
display:block;
}""", """
- Colons (:) should have a space after them.
display:block;
- Don't use data URIs in source files. Use grit instead.
background: url(data:image/jpeg,asdfasdfsadf);""")
def testCssFavorSingleQuotes(self):
self.VerifyContentsProducesOutput("""
html[dir="rtl"] body,
html[dir=ltr] body /* TODO(dbeam): Require '' around rtl in future? */ {
background: url("chrome://resources/BLAH");
font-family: "Open Sans";
<if expr="is_macosx">
blah: blee;
</if>
}""", """
- Use single quotes (') instead of double quotes (") in strings.
html[dir="rtl"] body,
background: url("chrome://resources/BLAH");
font-family: "Open Sans";""")
def testCssHexCouldBeShorter(self):
self.VerifyContentsProducesOutput("""
#abc,
#abc-,
#abc-ghij,
#abcdef-,
#abcdef-ghij,
#aaaaaa,
#bbaacc {
background-color: #336699; /* Ignore short hex rule if not gray. */
color: #999999;
color: #666;
}""", """
- Use abbreviated hex (#rgb) when in form #rrggbb.
color: #999999; (replace with #999)
- Use rgb() over #hex when not a shade of gray (like #333).
background-color: #336699; (replace with rgb(51, 102, 153))""")
def testCssUseMillisecondsForSmallTimes(self):
self.VerifyContentsProducesOutput("""
.transition-0s /* This is gross but may happen. */ {
transform: one 0.2s;
transform: two .1s;
transform: tree 1s;
transform: four 300ms;
}""", """
- Use milliseconds for time measurements under 1 second.
transform: one 0.2s; (replace with 200ms)
transform: two .1s; (replace with 100ms)""")
def testCssNoDataUrisInSourceFiles(self):
self.VerifyContentsProducesOutput("""
img {
background: url( data:image/jpeg,4\/\/350|\/|3|2 );
background: url('data:image/jpeg,4\/\/350|\/|3|2');
}""", """
- Don't use data URIs in source files. Use grit instead.
background: url( data:image/jpeg,4\/\/350|\/|3|2 );
background: url('data:image/jpeg,4\/\/350|\/|3|2');""")
def testCssOneRulePerLine(self):
self.VerifyContentsProducesOutput("""
a:not([hidden]):not(.custom-appearance):not([version=1]):first-of-type,
a:not([hidden]):not(.custom-appearance):not([version=1]):first-of-type ~
input[type='checkbox']:not([hidden]),
div {
background: url(chrome://resources/BLAH);
rule: value; /* rule: value; */
rule: value; rule: value;
}""", """
- One rule per line (what not to do: color: red; margin: 0;).
rule: value; rule: value;""")
def testCssOneSelectorPerLine(self):
self.VerifyContentsProducesOutput("""
a,
div,a,
div,/* Hello! */ span,
#id.class([dir=rtl):not(.class):any(a, b, d) {
rule: value;
}
a,
div,a {
some-other: rule here;
}""", """
- One selector per line (what not to do: a, b {}).
div,a,
div, span,
div,a {""")
def testCssPseudoElementDoubleColon(self):
self.VerifyContentsProducesOutput("""
a:href,
br::after,
::-webkit-scrollbar-thumb,
a:not([empty]):hover:focus:active, /* shouldn't catch here and above */
abbr:after,
.tree-label:empty:after,
b:before,
:-WebKit-ScrollBar {
rule: value;
}""", """
- Pseudo-elements should use double colon (i.e. ::after).
:after (should be ::after)
:after (should be ::after)
:before (should be ::before)
:-WebKit-ScrollBar (should be ::-WebKit-ScrollBar)
""")
def testCssRgbIfNotGray(self):
self.VerifyContentsProducesOutput("""
#abc,
#aaa,
#aabbcc {
background: -webkit-linear-gradient(left, from(#abc), to(#def));
color: #bad;
color: #bada55;
}""", """
- Use rgb() over #hex when not a shade of gray (like #333).
background: -webkit-linear-gradient(left, from(#abc), to(#def)); """
"""(replace with rgb(170, 187, 204), rgb(221, 238, 255))
color: #bad; (replace with rgb(187, 170, 221))
color: #bada55; (replace with rgb(186, 218, 85))""")
def testCssZeroLengthTerms(self):
self.VerifyContentsProducesOutput("""
@-webkit-keyframe anim {
0% { /* Ignore key frames */
width: 0px;
}
10% {
width: 10px;
}
100% {
width: 100px;
}
}
.media-button.play > .state0.active,
.media-button[state='0'] > .state0.normal /* blah */, /* blee */
.media-button[state='0']:not(.disabled):hover > .state0.hover {
-webkit-animation: anim 0s;
-webkit-animation-duration: anim 0ms;
-webkit-transform: scale(0%),
translateX(0deg),
translateY(0rad),
translateZ(0grad);
background-position-x: 0em;
background-position-y: 0ex;
border-width: 0em;
color: hsl(0, 0%, 85%); /* Shouldn't trigger error. */
opacity: .0;
opacity: 0.0;
opacity: 0.;
}
@page {
border-width: 0mm;
height: 0cm;
width: 0in;
}""", """
- Make all zero length terms (i.e. 0px) 0 unless inside of hsl() or part of"""
""" @keyframe.
width: 0px;
-webkit-animation: anim 0s;
-webkit-animation-duration: anim 0ms;
-webkit-transform: scale(0%),
translateX(0deg),
translateY(0rad),
translateZ(0grad);
background-position-x: 0em;
background-position-y: 0ex;
border-width: 0em;
opacity: .0;
opacity: 0.0;
opacity: 0.;
border-width: 0mm;
height: 0cm;
width: 0in;
""")
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
GheRivero/ansible | lib/ansible/modules/cloud/azure/azure_rm_securitygroup_facts.py | 23 | 10605 | #!/usr/bin/python
#
# Copyright (c) 2016 Matt Davis, <mdavis@ansible.com>
# Chris Houseknecht, <house@redhat.com>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = '''
---
module: azure_rm_securitygroup_facts
version_added: "2.1"
short_description: Get security group facts.
description:
- Get facts for a specific security group or all security groups within a resource group.
options:
name:
description:
- Only show results for a specific security group.
resource_group:
description:
- Name of the resource group to use.
required: true
tags:
description:
- Limit results by providing a list of tags. Format tags as 'key' or 'key:value'.
extends_documentation_fragment:
- azure
author:
- "Chris Houseknecht (@chouseknecht)"
- "Matt Davis (@nitzmahone)"
'''
EXAMPLES = '''
- name: Get facts for one security group
azure_rm_securitygroup_facts:
resource_group: Testing
name: secgroup001
- name: Get facts for all security groups
azure_rm_securitygroup_facts:
resource_group: Testing
'''
RETURN = '''
azure_securitygroups:
description: List containing security group dicts.
returned: always
type: list
example: [{
"etag": 'W/"d036f4d7-d977-429a-a8c6-879bc2523399"',
"id": "/subscriptions/XXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXX/resourceGroups/Testing/providers/Microsoft.Network/networkSecurityGroups/secgroup001",
"location": "eastus2",
"name": "secgroup001",
"properties": {
"defaultSecurityRules": [
{
"etag": 'W/"d036f4d7-d977-429a-a8c6-879bc2523399"',
"id": "/subscriptions/XXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXX/resourceGroups/Testing/providers/Microsoft.Network/networkSecurityGroups/secgroup001/defaultSecurityRules/AllowVnetInBound",
"name": "AllowVnetInBound",
"properties": {
"access": "Allow",
"description": "Allow inbound traffic from all VMs in VNET",
"destinationAddressPrefix": "VirtualNetwork",
"destinationPortRange": "*",
"direction": "Inbound",
"priority": 65000,
"protocol": "*",
"provisioningState": "Succeeded",
"sourceAddressPrefix": "VirtualNetwork",
"sourcePortRange": "*"
}
},
{
"etag": 'W/"d036f4d7-d977-429a-a8c6-879bc2523399"',
"id": "/subscriptions/XXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXX/resourceGroups/Testing/providers/Microsoft.Network/networkSecurityGroups/secgroup001/defaultSecurityRules/AllowAzureLoadBalancerInBound",
"name": "AllowAzureLoadBalancerInBound",
"properties": {
"access": "Allow",
"description": "Allow inbound traffic from azure load balancer",
"destinationAddressPrefix": "*",
"destinationPortRange": "*",
"direction": "Inbound",
"priority": 65001,
"protocol": "*",
"provisioningState": "Succeeded",
"sourceAddressPrefix": "AzureLoadBalancer",
"sourcePortRange": "*"
}
},
{
"etag": 'W/"d036f4d7-d977-429a-a8c6-879bc2523399"',
"id": "/subscriptions/XXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXX/resourceGroups/Testing/providers/Microsoft.Network/networkSecurityGroups/secgroup001/defaultSecurityRules/DenyAllInBound",
"name": "DenyAllInBound",
"properties": {
"access": "Deny",
"description": "Deny all inbound traffic",
"destinationAddressPrefix": "*",
"destinationPortRange": "*",
"direction": "Inbound",
"priority": 65500,
"protocol": "*",
"provisioningState": "Succeeded",
"sourceAddressPrefix": "*",
"sourcePortRange": "*"
}
},
{
"etag": 'W/"d036f4d7-d977-429a-a8c6-879bc2523399"',
"id": "/subscriptions/XXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXX/resourceGroups/Testing/providers/Microsoft.Network/networkSecurityGroups/secgroup001/defaultSecurityRules/AllowVnetOutBound",
"name": "AllowVnetOutBound",
"properties": {
"access": "Allow",
"description": "Allow outbound traffic from all VMs to all VMs in VNET",
"destinationAddressPrefix": "VirtualNetwork",
"destinationPortRange": "*",
"direction": "Outbound",
"priority": 65000,
"protocol": "*",
"provisioningState": "Succeeded",
"sourceAddressPrefix": "VirtualNetwork",
"sourcePortRange": "*"
}
},
{
"etag": 'W/"d036f4d7-d977-429a-a8c6-879bc2523399"',
"id": "/subscriptions/XXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXX/resourceGroups/Testing/providers/Microsoft.Network/networkSecurityGroups/secgroup001/defaultSecurityRules/AllowInternetOutBound",
"name": "AllowInternetOutBound",
"properties": {
"access": "Allow",
"description": "Allow outbound traffic from all VMs to Internet",
"destinationAddressPrefix": "Internet",
"destinationPortRange": "*",
"direction": "Outbound",
"priority": 65001,
"protocol": "*",
"provisioningState": "Succeeded",
"sourceAddressPrefix": "*",
"sourcePortRange": "*"
}
},
{
"etag": 'W/"d036f4d7-d977-429a-a8c6-879bc2523399"',
"id": "/subscriptions/XXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXX/resourceGroups/Testing/providers/Microsoft.Network/networkSecurityGroups/secgroup001/defaultSecurityRules/DenyAllOutBound",
"name": "DenyAllOutBound",
"properties": {
"access": "Deny",
"description": "Deny all outbound traffic",
"destinationAddressPrefix": "*",
"destinationPortRange": "*",
"direction": "Outbound",
"priority": 65500,
"protocol": "*",
"provisioningState": "Succeeded",
"sourceAddressPrefix": "*",
"sourcePortRange": "*"
}
}
],
"networkInterfaces": [
{
"id": "/subscriptions/XXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXX/resourceGroups/Testing/providers/Microsoft.Network/networkInterfaces/nic004"
}
],
"provisioningState": "Succeeded",
"resourceGuid": "ebd00afa-5dc8-446f-810a-50dd6f671588",
"securityRules": []
},
"tags": {},
"type": "Microsoft.Network/networkSecurityGroups"
}]
''' # NOQA
try:
from msrestazure.azure_exceptions import CloudError
except:
# This is handled in azure_rm_common
pass
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
AZURE_OBJECT_CLASS = 'NetworkSecurityGroup'
class AzureRMSecurityGroupFacts(AzureRMModuleBase):
def __init__(self):
self.module_arg_spec = dict(
name=dict(type='str'),
resource_group=dict(required=True, type='str'),
tags=dict(type='list'),
)
self.results = dict(
changed=False,
ansible_facts=dict(azure_securitygroups=[])
)
self.name = None
self.resource_group = None
self.tags = None
super(AzureRMSecurityGroupFacts, self).__init__(self.module_arg_spec,
supports_tags=False,
facts_module=True)
def exec_module(self, **kwargs):
for key in self.module_arg_spec:
setattr(self, key, kwargs[key])
if self.name is not None:
self.results['ansible_facts']['azure_securitygroups'] = self.get_item()
else:
self.results['ansible_facts']['azure_securitygroups'] = self.list_items()
return self.results
def get_item(self):
self.log('Get properties for {0}'.format(self.name))
item = None
result = []
try:
item = self.network_client.network_security_groups.get(self.resource_group, self.name)
except CloudError:
pass
if item and self.has_tags(item.tags, self.tags):
grp = self.serialize_obj(item, AZURE_OBJECT_CLASS)
grp['name'] = item.name
result = [grp]
return result
def list_items(self):
self.log('List all items')
try:
response = self.network_client.network_security_groups.list(self.resource_group)
except Exception as exc:
self.fail("Error listing all items - {0}".format(str(exc)))
results = []
for item in response:
if self.has_tags(item.tags, self.tags):
grp = self.serialize_obj(item, AZURE_OBJECT_CLASS)
grp['name'] = item.name
results.append(grp)
return results
def main():
AzureRMSecurityGroupFacts()
if __name__ == '__main__':
main()
| gpl-3.0 |
moniqx4/bite-project | deps/gdata-python-client/src/gdata/finance/__init__.py | 125 | 15397 | #!/usr/bin/python
#
# Copyright (C) 2009 Tan Swee Heng
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains extensions to Atom objects used with Google Finance."""
__author__ = 'thesweeheng@gmail.com'
import atom
import gdata
GD_NAMESPACE = 'http://schemas.google.com/g/2005'
GF_NAMESPACE = 'http://schemas.google.com/finance/2007'
class Money(atom.AtomBase):
"""The <gd:money> element."""
_tag = 'money'
_namespace = GD_NAMESPACE
_attributes = atom.AtomBase._attributes.copy()
_attributes['amount'] = 'amount'
_attributes['currencyCode'] = 'currency_code'
def __init__(self, amount=None, currency_code=None, **kwargs):
self.amount = amount
self.currency_code = currency_code
atom.AtomBase.__init__(self, **kwargs)
def __str__(self):
return "%s %s" % (self.amount, self.currency_code)
def MoneyFromString(xml_string):
return atom.CreateClassFromXMLString(Money, xml_string)
class _Monies(atom.AtomBase):
"""An element containing multiple <gd:money> in multiple currencies."""
_namespace = GF_NAMESPACE
_children = atom.AtomBase._children.copy()
_children['{%s}money' % GD_NAMESPACE] = ('money', [Money])
def __init__(self, money=None, **kwargs):
self.money = money or []
atom.AtomBase.__init__(self, **kwargs)
def __str__(self):
return " / ".join(["%s" % i for i in self.money])
class CostBasis(_Monies):
"""The <gf:costBasis> element."""
_tag = 'costBasis'
def CostBasisFromString(xml_string):
return atom.CreateClassFromXMLString(CostBasis, xml_string)
class DaysGain(_Monies):
"""The <gf:daysGain> element."""
_tag = 'daysGain'
def DaysGainFromString(xml_string):
return atom.CreateClassFromXMLString(DaysGain, xml_string)
class Gain(_Monies):
"""The <gf:gain> element."""
_tag = 'gain'
def GainFromString(xml_string):
return atom.CreateClassFromXMLString(Gain, xml_string)
class MarketValue(_Monies):
"""The <gf:marketValue> element."""
_tag = 'gain'
_tag = 'marketValue'
def MarketValueFromString(xml_string):
return atom.CreateClassFromXMLString(MarketValue, xml_string)
class Commission(_Monies):
"""The <gf:commission> element."""
_tag = 'commission'
def CommissionFromString(xml_string):
return atom.CreateClassFromXMLString(Commission, xml_string)
class Price(_Monies):
"""The <gf:price> element."""
_tag = 'price'
def PriceFromString(xml_string):
return atom.CreateClassFromXMLString(Price, xml_string)
class Symbol(atom.AtomBase):
"""The <gf:symbol> element."""
_tag = 'symbol'
_namespace = GF_NAMESPACE
_attributes = atom.AtomBase._attributes.copy()
_attributes['fullName'] = 'full_name'
_attributes['exchange'] = 'exchange'
_attributes['symbol'] = 'symbol'
def __init__(self, full_name=None, exchange=None, symbol=None, **kwargs):
self.full_name = full_name
self.exchange = exchange
self.symbol = symbol
atom.AtomBase.__init__(self, **kwargs)
def __str__(self):
return "%s:%s (%s)" % (self.exchange, self.symbol, self.full_name)
def SymbolFromString(xml_string):
return atom.CreateClassFromXMLString(Symbol, xml_string)
class TransactionData(atom.AtomBase):
"""The <gf:transactionData> element."""
_tag = 'transactionData'
_namespace = GF_NAMESPACE
_attributes = atom.AtomBase._attributes.copy()
_attributes['type'] = 'type'
_attributes['date'] = 'date'
_attributes['shares'] = 'shares'
_attributes['notes'] = 'notes'
_children = atom.AtomBase._children.copy()
_children['{%s}commission' % GF_NAMESPACE] = ('commission', Commission)
_children['{%s}price' % GF_NAMESPACE] = ('price', Price)
def __init__(self, type=None, date=None, shares=None,
notes=None, commission=None, price=None, **kwargs):
self.type = type
self.date = date
self.shares = shares
self.notes = notes
self.commission = commission
self.price = price
atom.AtomBase.__init__(self, **kwargs)
def TransactionDataFromString(xml_string):
return atom.CreateClassFromXMLString(TransactionData, xml_string)
class TransactionEntry(gdata.GDataEntry):
"""An entry of the transaction feed.
A TransactionEntry contains TransactionData such as the transaction
type (Buy, Sell, Sell Short, or Buy to Cover), the number of units,
the date, the price, any commission, and any notes.
"""
_tag = 'entry'
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataEntry._children.copy()
_children['{%s}transactionData' % GF_NAMESPACE] = (
'transaction_data', TransactionData)
def __init__(self, transaction_data=None, **kwargs):
self.transaction_data = transaction_data
gdata.GDataEntry.__init__(self, **kwargs)
def transaction_id(self):
return self.id.text.split("/")[-1]
transaction_id = property(transaction_id, doc='The transaction ID.')
def TransactionEntryFromString(xml_string):
return atom.CreateClassFromXMLString(TransactionEntry, xml_string)
class TransactionFeed(gdata.GDataFeed):
"""A feed that lists all of the transactions that have been recorded for
a particular position.
A transaction is a collection of information about an instance of
buying or selling a particular security. The TransactionFeed lists all
of the transactions that have been recorded for a particular position
as a list of TransactionEntries.
"""
_tag = 'feed'
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataFeed._children.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [TransactionEntry])
def TransactionFeedFromString(xml_string):
return atom.CreateClassFromXMLString(TransactionFeed, xml_string)
class TransactionFeedLink(atom.AtomBase):
"""Link to TransactionFeed embedded in PositionEntry.
If a PositionFeed is queried with transactions='true', TransactionFeeds
are inlined in the returned PositionEntries. These TransactionFeeds are
accessible via TransactionFeedLink's feed attribute.
"""
_tag = 'feedLink'
_namespace = GD_NAMESPACE
_attributes = atom.AtomBase._attributes.copy()
_attributes['href'] = 'href'
_children = atom.AtomBase._children.copy()
_children['{%s}feed' % atom.ATOM_NAMESPACE] = (
'feed', TransactionFeed)
def __init__(self, href=None, feed=None, **kwargs):
self.href = href
self.feed = feed
atom.AtomBase.__init__(self, **kwargs)
class PositionData(atom.AtomBase):
"""The <gf:positionData> element."""
_tag = 'positionData'
_namespace = GF_NAMESPACE
_attributes = atom.AtomBase._attributes.copy()
_attributes['gainPercentage'] = 'gain_percentage'
_attributes['return1w'] = 'return1w'
_attributes['return4w'] = 'return4w'
_attributes['return3m'] = 'return3m'
_attributes['returnYTD'] = 'returnYTD'
_attributes['return1y'] = 'return1y'
_attributes['return3y'] = 'return3y'
_attributes['return5y'] = 'return5y'
_attributes['returnOverall'] = 'return_overall'
_attributes['shares'] = 'shares'
_children = atom.AtomBase._children.copy()
_children['{%s}costBasis' % GF_NAMESPACE] = ('cost_basis', CostBasis)
_children['{%s}daysGain' % GF_NAMESPACE] = ('days_gain', DaysGain)
_children['{%s}gain' % GF_NAMESPACE] = ('gain', Gain)
_children['{%s}marketValue' % GF_NAMESPACE] = ('market_value', MarketValue)
def __init__(self, gain_percentage=None,
return1w=None, return4w=None, return3m=None, returnYTD=None,
return1y=None, return3y=None, return5y=None, return_overall=None,
shares=None, cost_basis=None, days_gain=None,
gain=None, market_value=None, **kwargs):
self.gain_percentage = gain_percentage
self.return1w = return1w
self.return4w = return4w
self.return3m = return3m
self.returnYTD = returnYTD
self.return1y = return1y
self.return3y = return3y
self.return5y = return5y
self.return_overall = return_overall
self.shares = shares
self.cost_basis = cost_basis
self.days_gain = days_gain
self.gain = gain
self.market_value = market_value
atom.AtomBase.__init__(self, **kwargs)
def PositionDataFromString(xml_string):
return atom.CreateClassFromXMLString(PositionData, xml_string)
class PositionEntry(gdata.GDataEntry):
"""An entry of the position feed.
A PositionEntry contains the ticker exchange and Symbol for a stock,
mutual fund, or other security, along with PositionData such as the
number of units of that security that the user holds, and performance
statistics.
"""
_tag = 'entry'
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataEntry._children.copy()
_children['{%s}positionData' % GF_NAMESPACE] = (
'position_data', PositionData)
_children['{%s}symbol' % GF_NAMESPACE] = ('symbol', Symbol)
_children['{%s}feedLink' % GD_NAMESPACE] = (
'feed_link', TransactionFeedLink)
def __init__(self, position_data=None, symbol=None, feed_link=None,
**kwargs):
self.position_data = position_data
self.symbol = symbol
self.feed_link = feed_link
gdata.GDataEntry.__init__(self, **kwargs)
def position_title(self):
return self.title.text
position_title = property(position_title,
doc='The position title as a string (i.e. position.title.text).')
def ticker_id(self):
return self.id.text.split("/")[-1]
ticker_id = property(ticker_id, doc='The position TICKER ID.')
def transactions(self):
if self.feed_link.feed:
return self.feed_link.feed.entry
else:
return None
transactions = property(transactions, doc="""
Inlined TransactionEntries are returned if PositionFeed is queried
with transactions='true'.""")
def PositionEntryFromString(xml_string):
return atom.CreateClassFromXMLString(PositionEntry, xml_string)
class PositionFeed(gdata.GDataFeed):
"""A feed that lists all of the positions in a particular portfolio.
A position is a collection of information about a security that the
user holds. The PositionFeed lists all of the positions in a particular
portfolio as a list of PositionEntries.
"""
_tag = 'feed'
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataFeed._children.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [PositionEntry])
def PositionFeedFromString(xml_string):
return atom.CreateClassFromXMLString(PositionFeed, xml_string)
class PositionFeedLink(atom.AtomBase):
"""Link to PositionFeed embedded in PortfolioEntry.
If a PortfolioFeed is queried with positions='true', the PositionFeeds
are inlined in the returned PortfolioEntries. These PositionFeeds are
accessible via PositionFeedLink's feed attribute.
"""
_tag = 'feedLink'
_namespace = GD_NAMESPACE
_attributes = atom.AtomBase._attributes.copy()
_attributes['href'] = 'href'
_children = atom.AtomBase._children.copy()
_children['{%s}feed' % atom.ATOM_NAMESPACE] = (
'feed', PositionFeed)
def __init__(self, href=None, feed=None, **kwargs):
self.href = href
self.feed = feed
atom.AtomBase.__init__(self, **kwargs)
class PortfolioData(atom.AtomBase):
"""The <gf:portfolioData> element."""
_tag = 'portfolioData'
_namespace = GF_NAMESPACE
_attributes = atom.AtomBase._attributes.copy()
_attributes['currencyCode'] = 'currency_code'
_attributes['gainPercentage'] = 'gain_percentage'
_attributes['return1w'] = 'return1w'
_attributes['return4w'] = 'return4w'
_attributes['return3m'] = 'return3m'
_attributes['returnYTD'] = 'returnYTD'
_attributes['return1y'] = 'return1y'
_attributes['return3y'] = 'return3y'
_attributes['return5y'] = 'return5y'
_attributes['returnOverall'] = 'return_overall'
_children = atom.AtomBase._children.copy()
_children['{%s}costBasis' % GF_NAMESPACE] = ('cost_basis', CostBasis)
_children['{%s}daysGain' % GF_NAMESPACE] = ('days_gain', DaysGain)
_children['{%s}gain' % GF_NAMESPACE] = ('gain', Gain)
_children['{%s}marketValue' % GF_NAMESPACE] = ('market_value', MarketValue)
def __init__(self, currency_code=None, gain_percentage=None,
return1w=None, return4w=None, return3m=None, returnYTD=None,
return1y=None, return3y=None, return5y=None, return_overall=None,
cost_basis=None, days_gain=None, gain=None, market_value=None, **kwargs):
self.currency_code = currency_code
self.gain_percentage = gain_percentage
self.return1w = return1w
self.return4w = return4w
self.return3m = return3m
self.returnYTD = returnYTD
self.return1y = return1y
self.return3y = return3y
self.return5y = return5y
self.return_overall = return_overall
self.cost_basis = cost_basis
self.days_gain = days_gain
self.gain = gain
self.market_value = market_value
atom.AtomBase.__init__(self, **kwargs)
def PortfolioDataFromString(xml_string):
return atom.CreateClassFromXMLString(PortfolioData, xml_string)
class PortfolioEntry(gdata.GDataEntry):
"""An entry of the PortfolioFeed.
A PortfolioEntry contains the portfolio's title along with PortfolioData
such as currency, total market value, and overall performance statistics.
"""
_tag = 'entry'
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataEntry._children.copy()
_children['{%s}portfolioData' % GF_NAMESPACE] = (
'portfolio_data', PortfolioData)
_children['{%s}feedLink' % GD_NAMESPACE] = (
'feed_link', PositionFeedLink)
def __init__(self, portfolio_data=None, feed_link=None, **kwargs):
self.portfolio_data = portfolio_data
self.feed_link = feed_link
gdata.GDataEntry.__init__(self, **kwargs)
def portfolio_title(self):
return self.title.text
def set_portfolio_title(self, portfolio_title):
self.title = atom.Title(text=portfolio_title, title_type='text')
portfolio_title = property(portfolio_title, set_portfolio_title,
doc='The portfolio title as a string (i.e. portfolio.title.text).')
def portfolio_id(self):
return self.id.text.split("/")[-1]
portfolio_id = property(portfolio_id,
doc='The portfolio ID. Do not confuse with portfolio.id.')
def positions(self):
if self.feed_link.feed:
return self.feed_link.feed.entry
else:
return None
positions = property(positions, doc="""
Inlined PositionEntries are returned if PortfolioFeed was queried
with positions='true'.""")
def PortfolioEntryFromString(xml_string):
return atom.CreateClassFromXMLString(PortfolioEntry, xml_string)
class PortfolioFeed(gdata.GDataFeed):
"""A feed that lists all of the user's portfolios.
A portfolio is a collection of positions that the user holds in various
securities, plus metadata. The PortfolioFeed lists all of the user's
portfolios as a list of PortfolioEntries.
"""
_tag = 'feed'
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataFeed._children.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [PortfolioEntry])
def PortfolioFeedFromString(xml_string):
return atom.CreateClassFromXMLString(PortfolioFeed, xml_string)
| apache-2.0 |
dongjoon-hyun/tensorflow | tensorflow/contrib/distributions/python/kernel_tests/chi2_test.py | 25 | 3426 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for initializers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from scipy import stats
from tensorflow.contrib.distributions.python.ops import chi2 as chi2_lib
from tensorflow.python.framework import constant_op
from tensorflow.python.ops import math_ops
from tensorflow.python.platform import test
class Chi2Test(test.TestCase):
def testChi2LogPDF(self):
with self.cached_session():
batch_size = 6
df = constant_op.constant([2.0] * batch_size, dtype=np.float64)
df_v = 2.0
x = np.array([2.5, 2.5, 4.0, 0.1, 1.0, 2.0], dtype=np.float64)
chi2 = chi2_lib.Chi2(df=df)
expected_log_pdf = stats.chi2.logpdf(x, df_v)
log_pdf = chi2.log_prob(x)
self.assertEqual(log_pdf.get_shape(), (6,))
self.assertAllClose(log_pdf.eval(), expected_log_pdf)
pdf = chi2.prob(x)
self.assertEqual(pdf.get_shape(), (6,))
self.assertAllClose(pdf.eval(), np.exp(expected_log_pdf))
def testChi2CDF(self):
with self.cached_session():
batch_size = 6
df = constant_op.constant([2.0] * batch_size, dtype=np.float64)
df_v = 2.0
x = np.array([2.5, 2.5, 4.0, 0.1, 1.0, 2.0], dtype=np.float64)
chi2 = chi2_lib.Chi2(df=df)
expected_cdf = stats.chi2.cdf(x, df_v)
cdf = chi2.cdf(x)
self.assertEqual(cdf.get_shape(), (6,))
self.assertAllClose(cdf.eval(), expected_cdf)
def testChi2Mean(self):
with self.cached_session():
df_v = np.array([1., 3, 5], dtype=np.float64)
expected_mean = stats.chi2.mean(df_v)
chi2 = chi2_lib.Chi2(df=df_v)
self.assertEqual(chi2.mean().get_shape(), (3,))
self.assertAllClose(chi2.mean().eval(), expected_mean)
def testChi2Variance(self):
with self.cached_session():
df_v = np.array([1., 3, 5], np.float64)
expected_variances = stats.chi2.var(df_v)
chi2 = chi2_lib.Chi2(df=df_v)
self.assertEqual(chi2.variance().get_shape(), (3,))
self.assertAllClose(chi2.variance().eval(), expected_variances)
def testChi2Entropy(self):
with self.cached_session():
df_v = np.array([1., 3, 5], dtype=np.float64)
expected_entropy = stats.chi2.entropy(df_v)
chi2 = chi2_lib.Chi2(df=df_v)
self.assertEqual(chi2.entropy().get_shape(), (3,))
self.assertAllClose(chi2.entropy().eval(), expected_entropy)
def testChi2WithAbsDf(self):
with self.cached_session():
df_v = np.array([-1.3, -3.2, 5], dtype=np.float64)
chi2 = chi2_lib.Chi2WithAbsDf(df=df_v)
self.assertAllClose(
math_ops.floor(math_ops.abs(df_v)).eval(),
chi2.df.eval())
if __name__ == "__main__":
test.main()
| apache-2.0 |
holmes/intellij-community | python/lib/Lib/site-packages/django/contrib/auth/tests/urls.py | 90 | 1102 | from django.conf.urls.defaults import patterns
from django.contrib.auth.urls import urlpatterns
from django.contrib.auth.views import password_reset
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
from django.template import Template, RequestContext
def remote_user_auth_view(request):
"Dummy view for remote user tests"
t = Template("Username is {{ user }}.")
c = RequestContext(request, {})
return HttpResponse(t.render(c))
# special urls for auth test cases
urlpatterns = urlpatterns + patterns('',
(r'^logout/custom_query/$', 'django.contrib.auth.views.logout', dict(redirect_field_name='follow')),
(r'^logout/next_page/$', 'django.contrib.auth.views.logout', dict(next_page='/somewhere/')),
(r'^remote_user/$', remote_user_auth_view),
(r'^password_reset_from_email/$', 'django.contrib.auth.views.password_reset', dict(from_email='staffmember@example.com')),
(r'^login_required/$', login_required(password_reset)),
(r'^login_required_login_url/$', login_required(password_reset, login_url='/somewhere/')),
)
| apache-2.0 |
cardmagic/PyAMF | pyamf/adapters/_django_db_models_base.py | 1 | 8476 | # Copyright (c) 2007-2009 The PyAMF Project.
# See LICENSE.txt for details.
"""
`django.db.models` adapter module.
:see: `Django Project <http://www.djangoproject.com>`_
:since: 0.4.1
"""
from django.db.models.base import Model
from django.db.models import fields
from django.db.models.fields import related, files
import datetime
import pyamf
from pyamf.util import imports
class DjangoReferenceCollection(dict):
"""
This helper class holds a dict of klass to pk/objects loaded from the
underlying db.
:since: 0.5
"""
def _getClass(self, klass):
if klass not in self.keys():
self[klass] = {}
return self[klass]
def getClassKey(self, klass, key):
"""
Return an instance based on klass/key.
If an instance cannot be found then `KeyError` is raised.
:param klass: The class of the instance.
:param key: The primary_key of the instance.
:return: The instance linked to the `klass`/`key`.
:rtype: Instance of `klass`.
"""
d = self._getClass(klass)
return d[key]
def addClassKey(self, klass, key, obj):
"""
Adds an object to the collection, based on klass and key.
:param klass: The class of the object.
:param key: The datastore key of the object.
:param obj: The loaded instance from the datastore.
"""
d = self._getClass(klass)
d[key] = obj
class DjangoClassAlias(pyamf.ClassAlias):
def getCustomProperties(self):
self.fields = {}
self.relations = {}
self.columns = []
self.meta = self.klass._meta
for name in self.meta.get_all_field_names():
x = self.meta.get_field_by_name(name)[0]
if isinstance(x, files.FileField):
self.readonly_attrs.update([name])
if isinstance(x, related.RelatedObject):
continue
if not isinstance(x, related.ForeignKey):
self.fields[name] = x
else:
self.relations[name] = x
for k, v in self.klass.__dict__.iteritems():
if isinstance(v, related.ReverseManyRelatedObjectsDescriptor):
self.fields[k] = v.field
parent_fields = []
for field in self.meta.parents.values():
parent_fields.append(field.attname)
del self.relations[field.name]
self.exclude_attrs.update(parent_fields)
props = self.fields.keys()
self.encodable_properties.update(props)
self.decodable_properties.update(props)
def _compile_base_class(self, klass):
if klass is Model:
return
pyamf.ClassAlias._compile_base_class(self, klass)
def _encodeValue(self, field, value):
if value is fields.NOT_PROVIDED:
return pyamf.Undefined
if value is None:
return value
# deal with dates ..
if isinstance(field, fields.DateTimeField):
return value
elif isinstance(field, fields.DateField):
return datetime.datetime(value.year, value.month, value.day, 0, 0, 0)
elif isinstance(field, fields.TimeField):
return datetime.datetime(1970, 1, 1,
value.hour, value.minute, value.second, value.microsecond)
elif isinstance(value, files.FieldFile):
return value.name
return value
def _decodeValue(self, field, value):
if value is pyamf.Undefined:
return fields.NOT_PROVIDED
if isinstance(field, fields.AutoField) and value == 0:
return None
elif isinstance(field, fields.DateTimeField):
# deal with dates
return value
elif isinstance(field, fields.DateField):
if not value:
return None
return datetime.date(value.year, value.month, value.day)
elif isinstance(field, fields.TimeField):
if not value:
return None
return datetime.time(value.hour, value.minute, value.second, value.microsecond)
return value
def getEncodableAttributes(self, obj, **kwargs):
attrs = pyamf.ClassAlias.getEncodableAttributes(self, obj, **kwargs)
if not attrs:
attrs = {}
for name, prop in self.fields.iteritems():
if name not in attrs.keys():
continue
if isinstance(prop, related.ManyToManyField):
attrs[name] = [x for x in getattr(obj, name).all()]
else:
attrs[name] = self._encodeValue(prop, getattr(obj, name))
keys = attrs.keys()
for key in keys:
if key.startswith('_'):
del attrs[key]
for name, relation in self.relations.iteritems():
if '_%s_cache' % name in obj.__dict__:
attrs[name] = getattr(obj, name)
del attrs[relation.column]
if not attrs:
attrs = None
return attrs
def getDecodableAttributes(self, obj, attrs, **kwargs):
attrs = pyamf.ClassAlias.getDecodableAttributes(self, obj, attrs, **kwargs)
for n in self.decodable_properties:
if n in self.relations:
continue
f = self.fields[n]
attrs[f.attname] = self._decodeValue(f, attrs[n])
# primary key of django object must always be set first for
# relationships with other model objects to work properly
# and dict.iteritems() does not guarantee order
#
# django also forces the use only one attribute as primary key, so
# our obj._meta.pk.attname check is sufficient)
try:
setattr(obj, obj._meta.pk.attname, attrs[obj._meta.pk.attname])
del attrs[obj._meta.pk.attname]
except KeyError:
pass
return attrs
def getDjangoObjects(context):
"""
Returns a reference to the `django_objects` on the context. If it doesn't
exist then it is created.
:param context: The context to load the `django_objects` index from.
:type context: Instance of :class:`pyamf.BaseContext`
:return: The `django_objects` index reference.
:rtype: Instance of :class:`DjangoReferenceCollection`
:since: 0.5
"""
if not hasattr(context, 'django_objects'):
context.django_objects = DjangoReferenceCollection()
return context.django_objects
def writeDjangoObject(self, obj, *args, **kwargs):
"""
The Django ORM creates new instances of objects for each db request.
This is a problem for PyAMF as it uses the id(obj) of the object to do
reference checking.
We could just ignore the problem, but the objects are conceptually the
same so the effort should be made to attempt to resolve references for a
given object graph.
We create a new map on the encoder context object which contains a dict of
C{object.__class__: {key1: object1, key2: object2, .., keyn: objectn}}. We
use the primary key to do the reference checking.
:since: 0.5
"""
if not isinstance(obj, Model):
self.writeNonDjangoObject(obj, *args, **kwargs)
return
context = self.context
kls = obj.__class__
s = obj.pk
if s is None:
self.writeNonDjangoObject(obj, *args, **kwargs)
return
django_objects = getDjangoObjects(context)
try:
referenced_object = django_objects.getClassKey(kls, s)
except KeyError:
referenced_object = obj
django_objects.addClassKey(kls, s, obj)
self.writeNonDjangoObject(referenced_object, *args, **kwargs)
def install_django_reference_model_hook(mod):
"""
Called when :module:`pyamf.amf0` or :module:`pyamf.amf3` are imported. Attaches the
:func:`writeDjangoObject` method to the `Encoder` class in that module.
:param mod: The module imported.
:since: 0.4.1
"""
if not hasattr(mod.Encoder, 'writeNonDjangoObject'):
mod.Encoder.writeNonDjangoObject = mod.Encoder.writeObject
mod.Encoder.writeObject = writeDjangoObject
# initialise the module here: hook into pyamf
pyamf.register_alias_type(DjangoClassAlias, Model)
# hook the L{writeDjangobject} method to the Encoder class on import
imports.when_imported('pyamf.amf0', install_django_reference_model_hook)
imports.when_imported('pyamf.amf3', install_django_reference_model_hook)
| mit |
RenaudParis/servo | tests/wpt/update/fetchlogs.py | 222 | 3183 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import argparse
import cStringIO
import gzip
import json
import os
import requests
import urlparse
treeherder_base = "https://treeherder.mozilla.org/"
"""Simple script for downloading structured logs from treeherder.
For the moment this is specialised to work with web-platform-tests
logs; in due course it should move somewhere generic and get hooked
up to mach or similar"""
# Interpretation of the "job" list from
# https://github.com/mozilla/treeherder-service/blob/master/treeherder/webapp/api/utils.py#L18
def create_parser():
parser = argparse.ArgumentParser()
parser.add_argument("branch", action="store",
help="Branch on which jobs ran")
parser.add_argument("commit",
action="store",
help="Commit hash for push")
return parser
def download(url, prefix, dest, force_suffix=True):
if dest is None:
dest = "."
if prefix and not force_suffix:
name = os.path.join(dest, prefix + ".log")
else:
name = None
counter = 0
while not name or os.path.exists(name):
counter += 1
sep = "" if not prefix else "-"
name = os.path.join(dest, prefix + sep + str(counter) + ".log")
with open(name, "wb") as f:
resp = requests.get(url, stream=True)
for chunk in resp.iter_content(1024):
f.write(chunk)
def get_blobber_url(branch, job):
job_id = job["id"]
resp = requests.get(urlparse.urljoin(treeherder_base,
"/api/project/%s/artifact/?job_id=%i&name=Job%%20Info" % (branch,
job_id)))
job_data = resp.json()
if job_data:
assert len(job_data) == 1
job_data = job_data[0]
try:
details = job_data["blob"]["job_details"]
for item in details:
if item["value"] == "wpt_raw.log":
return item["url"]
except:
return None
def get_structured_logs(branch, commit, dest=None):
resp = requests.get(urlparse.urljoin(treeherder_base, "/api/project/%s/resultset/?revision=%s" % (branch, commit)))
revision_data = resp.json()
result_set = revision_data["results"][0]["id"]
resp = requests.get(urlparse.urljoin(treeherder_base, "/api/project/%s/jobs/?result_set_id=%s&count=2000&exclusion_profile=false" % (branch, result_set)))
job_data = resp.json()
for result in job_data["results"]:
job_type_name = result["job_type_name"]
if job_type_name.startswith("W3C Web Platform"):
url = get_blobber_url(branch, result)
if url:
prefix = result["platform"] # platform
download(url, prefix, None)
def main():
parser = create_parser()
args = parser.parse_args()
get_structured_logs(args.branch, args.commit)
if __name__ == "__main__":
main()
| mpl-2.0 |
vishnu2kmohan/dcos | test_util/installer_api_test.py | 4 | 14586 | """Python API for interacting with installer API
"""
import abc
import json
import os
from subprocess import CalledProcessError
from typing import Optional
import pkg_resources
import requests
import yaml
from retrying import retry
from pkgpanda.util import load_yaml
from ssh.tunnel import Tunnelled
MAX_STAGE_TIME = int(os.getenv('INSTALLER_API_MAX_STAGE_TIME', '900'))
class AbstractDcosInstaller(metaclass=abc.ABCMeta):
def __init__(self):
self.offline_mode = False
def setup_remote(
self, tunnel: Optional[Tunnelled], installer_path, download_url):
"""Creates a light, system-based ssh handler
Args:
tunnel: Tunneled instance to avoid recreating SSH connections.
If set to None, ssh_user, host, and ssh_key_path must be
set and one-off connections will be made
installer_path: (str) path on host to download installer to
download_url: (str) URL that installer can be pulled from
"""
self.installer_path = installer_path
self.tunnel = tunnel
self.url = "http://{}:9000".format(tunnel.host)
@retry(wait_fixed=3000, stop_max_delay=300 * 1000)
def download_dcos():
"""Response status 403 is fatal for curl's retry. Additionally, S3 buckets
have been returning 403 for valid uploads for 10-15 minutes after CI finished build
Therefore, give a five minute buffer to help stabilize CI
"""
self.tunnel.remote_cmd(['curl', '-fLsSv', '--retry', '20', '-Y', '100000', '-y', '60',
'--create-dirs', '-o', self.installer_path, download_url])
if download_url:
download_dcos()
def get_hashed_password(self, password):
p = self.tunnel.remote_cmd(["bash", self.installer_path, "--hash-password", password])
# password hash is last line output but output ends with newline
passwd_hash = p.decode('utf-8').split('\n')[-2]
return passwd_hash
@staticmethod
def ip_detect_script(preset_name):
try:
return pkg_resources.resource_string('gen', 'ip-detect/{}.sh'.format(preset_name)).decode('utf-8')
except OSError as exc:
raise Exception('Failed to read ip-detect script preset {}: {}'.format(preset_name, exc)) from exc
@abc.abstractmethod
def genconf(self, expect_errors=False):
pass
@abc.abstractmethod
def preflight(self, expect_errors=False):
pass
@abc.abstractmethod
def install_prereqs(self, expect_errors=False):
pass
@abc.abstractmethod
def deploy(self, expect_errors=False):
pass
@abc.abstractmethod
def postflight(self, expect_errors=False):
pass
class DcosApiInstaller(AbstractDcosInstaller):
def start_web_server(self):
cmd = ['DCOS_INSTALLER_DAEMONIZE=true', 'bash', self.installer_path, '--web']
if self.offline_mode:
cmd.append('--offline')
self.tunnel.remote_cmd(cmd)
@retry(wait_fixed=1000, stop_max_delay=10000)
def wait_for_up():
response = requests.get(self.url)
assert response.status_code == 200, "{} {}".format(response.status_code, response.content)
print("Webserver started")
wait_for_up()
def genconf(
self, master_list, agent_list, public_agent_list, ssh_user, ssh_key,
ip_detect, platform=None, rexray_config=None, rexray_config_preset=None,
zk_host=None, expect_errors=False, add_config_path=None):
"""Runs configuration generation.
Args:
master_list: list of IPv4 addresses to be used as masters
agent_list: list of IPv4 addresses to be used as agents
public_agent_list: list of IPv4 addresses to be used as public agents
ssh_user (str): name of SSH user that has access to targets
ssh_key (str): complete public SSH key for ssh_user. Must already
be installed on tagets as authorized_key
ip_detect (str): name of preset IP-detect script
platform (str): name of the infrastructure platform
rexray_config: complete contents of REX-Ray config file. Must be a
JSON-serializable object.
rexray_config_preset (str): name of preset REX-Ray config
zk_host (optional): if provided, zk is used for exhibitor backend
expect_errors (optional): raises error if result is unexpected
add_config_path (optional): string pointing to a file with additional
config parameters to be merged or used as overide
Raises:
AssertionError: "error" present in returned json keys when error
was not expected or vice versa
"""
headers = {'content-type': 'application/json'}
payload = {
'master_list': master_list,
'agent_list': agent_list,
'public_agent_list': public_agent_list,
'ssh_user': ssh_user,
'ssh_key': ssh_key,
'ip_detect_script': self.ip_detect_script(ip_detect)}
if platform:
payload['platform'] = platform
if rexray_config:
payload['rexray_config'] = rexray_config
if rexray_config_preset:
payload['rexray_config_preset'] = rexray_config_preset
if zk_host:
payload['exhibitor_zk_hosts'] = zk_host
if add_config_path:
add_config = load_yaml(add_config_path)
payload.update(add_config)
response = requests.post(self.url + '/api/v1/configure', headers=headers, data=json.dumps(payload))
assert response.status_code == 200, "{} {}".format(response.status_code, response.content)
response_json_keys = list(response.json().keys())
if expect_errors:
assert "error" in response_json_keys
else:
assert "error" not in response_json_keys
def install_prereqs(self, expect_errors=False):
assert not self.offline_mode, "Install prereqs can only be run without --offline mode"
self.preflight(expect_errors=expect_errors)
def preflight(self, expect_errors=False):
self.do_and_check('preflight', expect_errors)
def deploy(self, expect_errors=False):
self.do_and_check('deploy', expect_errors)
def postflight(self, expect_errors=False):
self.do_and_check('postflight', expect_errors)
def do_and_check(self, action, expect_errors):
"""Args:
action (str): one of 'preflight', 'deploy', 'postflight'
"""
self.start_action(action)
self.wait_for_check_action(
action=action, expect_errors=expect_errors,
wait=30000, stop_max_delay=MAX_STAGE_TIME * 1000)
def wait_for_check_action(self, action, wait, stop_max_delay, expect_errors):
"""Retries method against API until returned data shows that all hosts
have finished.
Args:
action (str): choies are 'preflight', 'deploy', 'postflight'
wait (int): how many milliseconds to wait between tries
stop_max_delay (int): total duration (in milliseconds) to retry for
expect_errors (boolean): raises error if result is not as expected
Raises:
AssertionError: checks 'host_status' and raises error...
-if expect_errors is False and not all status=='success'
-if expect_errors is True and all status=='success'
"""
@retry(wait_fixed=wait, stop_max_delay=stop_max_delay)
def wait_for_finish():
# Only return if output is not empty and all hosts are not running
output = self.check_action(action)
assert output != {}
host_data = output['hosts']
finished_run = all(map(lambda host: host['host_status'] not in ['running', 'unstarted'],
host_data.values()))
assert finished_run, 'Action timed out! Last output: {}'.format(output)
return host_data
host_data = wait_for_finish()
success = True
for host in host_data.keys():
if host_data[host]['host_status'] != 'success':
success = False
print("Failures detected in {}: {}".format(action, host_data[host]))
if expect_errors:
assert not success, "Results were successful, but errors were expected in {}".format(action)
else:
assert success, "Results for {} included failures, when all should have succeeded".format(action)
def start_action(self, action):
"""Args:
action (str): one of 'preflight', 'deploy', 'postflight'
"""
return requests.post(self.url + '/api/v1/action/{}'.format(action))
def check_action(self, action):
"""Args:
action (str): one of 'preflight', 'deploy', 'postflight', 'success'
"""
return requests.get(self.url + '/api/v1/action/{}'.format(action)).json()
class DcosCliInstaller(AbstractDcosInstaller):
def run_cli_cmd(self, mode, expect_errors=False):
"""Runs commands through the CLI
NOTE: We use `bash` as a wrapper here to make it so dcos_generate_config.sh
doesn't have to be executable
Args:
mode (str): single flag to be handed to CLI
expect_errors: raise error if result is unexpected
Raises:
AssertionError: if return_code is...
-zero and expect_errors is True
-nonzero and expect_errors is False
"""
cmd = ['bash', self.installer_path, mode]
if expect_errors:
try:
output = self.tunnel.remote_cmd(cmd, timeout=MAX_STAGE_TIME)
err_msg = "{} succeeded when it should have failed".format(cmd)
print(output)
raise AssertionError(err_msg)
except CalledProcessError:
# expected behavior
pass
else:
output = self.tunnel.remote_cmd(cmd, timeout=MAX_STAGE_TIME)
print(output)
return output
def genconf(
self, master_list, agent_list, public_agent_list, ssh_user, ssh_key,
ip_detect, platform=None, rexray_config=None, rexray_config_preset=None,
zk_host=None, expect_errors=False, add_config_path=None,
bootstrap_url='file:///opt/dcos_install_tmp'):
"""Runs configuration generation.
Args:
master_list: list of IPv4 addresses to be used as masters
agent_list: list of IPv4 addresses to be used as agents
public_agent_list: list of IPv$ addresses to be used as public agents
ssh_user (str): name of SSH user that has access to targets
ssh_key (str): complete public SSH key for ssh_user. Must already
be installed on tagets as authorized_key
ip_detect (str): name of preset IP-detect script
platform (str): name of the infrastructure platform
rexray_config: complete contents of REX-Ray config file. Must be a
JSON-serializable object.
rexray_config_preset (str): name of preset REX-Ray config
zk_host (optional): if provided, zk is used for exhibitor backend
expect_errors (optional): raises error if result is unexpected
add_config_path (optional): string pointing to a file with additional
config parameters to be merged or used as overide
Raises:
AssertionError: "error" present in returned json keys when error
was not expected or vice versa
"""
test_config = {
'cluster_name': 'SSH Installed DC/OS',
'bootstrap_url': bootstrap_url,
'dns_search': 'mesos',
'master_discovery': 'static',
'master_list': master_list,
'ssh_user': ssh_user,
'agent_list': agent_list,
'public_agent_list': public_agent_list,
'process_timeout': MAX_STAGE_TIME}
if platform:
test_config['platform'] = platform
if rexray_config:
test_config['rexray_config'] = rexray_config
if rexray_config_preset:
test_config['rexray_config_preset'] = rexray_config_preset
if zk_host:
test_config['exhibitor_storage_backend'] = 'zookeeper'
test_config['exhibitor_zk_hosts'] = zk_host
test_config['exhibitor_zk_path'] = '/exhibitor'
else:
test_config['exhibitor_storage_backend'] = 'static'
if add_config_path:
add_config = load_yaml(add_config_path)
test_config.update(add_config)
with open('config.yaml', 'w') as config_fh:
config_fh.write(yaml.dump(test_config))
with open('ip-detect', 'w') as ip_detect_fh:
ip_detect_fh.write(self.ip_detect_script(ip_detect))
with open('ssh_key', 'w') as key_fh:
key_fh.write(ssh_key)
remote_dir = os.path.dirname(self.installer_path)
self.tunnel.remote_cmd(['mkdir', '-p', os.path.join(remote_dir, 'genconf')])
self.tunnel.write_to_remote('config.yaml', os.path.join(remote_dir, 'genconf/config.yaml'))
self.tunnel.write_to_remote('ip-detect', os.path.join(remote_dir, 'genconf/ip-detect'))
self.tunnel.write_to_remote('ssh_key', os.path.join(remote_dir, 'genconf/ssh_key'))
self.tunnel.remote_cmd(['chmod', '600', os.path.join(remote_dir, 'genconf/ssh_key')])
self.run_cli_cmd('--genconf', expect_errors=expect_errors)
def preflight(self, expect_errors=False):
self.run_cli_cmd('--preflight', expect_errors=expect_errors)
def install_prereqs(self, expect_errors=False):
self.run_cli_cmd('--install-prereqs', expect_errors=expect_errors)
self.preflight()
def deploy(self, expect_errors=False):
self.run_cli_cmd('--deploy', expect_errors=expect_errors)
def postflight(self, expect_errors=False):
self.run_cli_cmd('--postflight', expect_errors=expect_errors)
def generate_node_upgrade_script(self, version, expect_errors=False):
# tunnel run_cmd calls check_output which returns the output hence returning this
return self.run_cli_cmd("--generate-node-upgrade-script " + version, expect_errors=expect_errors)
| apache-2.0 |
endlessm/chromium-browser | third_party/libyuv/tools_libyuv/valgrind/gdb_helper.py | 9 | 3053 | # Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
''' A bunch of helper functions for querying gdb.'''
import logging
import os
import re
import tempfile
GDB_LINE_RE = re.compile(r'Line ([0-9]*) of "([^"]*)".*')
def _GdbOutputToFileLine(output_line):
''' Parse the gdb output line, return a pair (file, line num) '''
match = GDB_LINE_RE.match(output_line)
if match:
return match.groups()[1], match.groups()[0]
else:
return None
def ResolveAddressesWithinABinary(binary_name, load_address, address_list):
''' For each address, return a pair (file, line num) '''
commands = tempfile.NamedTemporaryFile()
commands.write('add-symbol-file "%s" %s\n' % (binary_name, load_address))
for addr in address_list:
commands.write('info line *%s\n' % addr)
commands.write('quit\n')
commands.flush()
gdb_commandline = 'gdb -batch -x %s 2>/dev/null' % commands.name
gdb_pipe = os.popen(gdb_commandline)
result = gdb_pipe.readlines()
address_count = 0
ret = {}
for line in result:
if line.startswith('Line'):
ret[address_list[address_count]] = _GdbOutputToFileLine(line)
address_count += 1
if line.startswith('No line'):
ret[address_list[address_count]] = (None, None)
address_count += 1
gdb_pipe.close()
commands.close()
return ret
class AddressTable(object):
''' Object to do batched line number lookup. '''
def __init__(self):
self._load_addresses = {}
self._binaries = {}
self._all_resolved = False
def AddBinaryAt(self, binary, load_address):
''' Register a new shared library or executable. '''
self._load_addresses[binary] = load_address
def Add(self, binary, address):
''' Register a lookup request. '''
if binary == '':
logging.warn('adding address %s in empty binary?' % address)
if binary in self._binaries:
self._binaries[binary].append(address)
else:
self._binaries[binary] = [address]
self._all_resolved = False
def ResolveAll(self):
''' Carry out all lookup requests. '''
self._translation = {}
for binary in self._binaries.keys():
if binary != '' and binary in self._load_addresses:
load_address = self._load_addresses[binary]
addr = ResolveAddressesWithinABinary(
binary, load_address, self._binaries[binary])
self._translation[binary] = addr
self._all_resolved = True
def GetFileLine(self, binary, addr):
''' Get the (filename, linenum) result of a previously-registered lookup
request.
'''
if self._all_resolved:
if binary in self._translation:
if addr in self._translation[binary]:
return self._translation[binary][addr]
return (None, None)
| bsd-3-clause |
paolodedios/pybuilder | src/integrationtest/python/should_skip_undefined_keys_when_filtering_tests.py | 3 | 1371 | # -*- coding: utf-8 -*-
#
# This file is part of PyBuilder
#
# Copyright 2011-2020 PyBuilder Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from itest_support import IntegrationTestSupport
class Test(IntegrationTestSupport):
def test(self):
self.write_build_file("""
from pybuilder.core import use_plugin, init
use_plugin("copy_resources")
use_plugin("filter_resources")
@init
def init (project):
project.get_property("copy_resources_glob").append("*")
project.get_property("filter_resources_glob").append("spam")
""")
self.write_file("spam", "${version} ${any_undefined_key}")
reactor = self.prepare_reactor()
reactor.build("package")
self.assert_file_content("target/spam", "1.0.dev0 ${any_undefined_key}")
if __name__ == "__main__":
unittest.main()
| apache-2.0 |
nolanliou/tensorflow | tensorflow/contrib/losses/python/metric_learning/metric_loss_ops_test.py | 41 | 20535 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for triplet_semihard_loss."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.losses.python import metric_learning as metric_loss_ops
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.platform import test
try:
# pylint: disable=g-import-not-at-top
from sklearn import datasets
from sklearn import metrics
HAS_SKLEARN = True
except ImportError:
HAS_SKLEARN = False
def pairwise_distance_np(feature, squared=False):
"""Computes the pairwise distance matrix in numpy.
Args:
feature: 2-D numpy array of size [number of data, feature dimension]
squared: Boolean. If true, output is the pairwise squared euclidean
distance matrix; else, output is the pairwise euclidean distance matrix.
Returns:
pairwise_distances: 2-D numpy array of size
[number of data, number of data].
"""
triu = np.triu_indices(feature.shape[0], 1)
upper_tri_pdists = np.linalg.norm(feature[triu[1]] - feature[triu[0]], axis=1)
if squared:
upper_tri_pdists **= 2.
num_data = feature.shape[0]
pairwise_distances = np.zeros((num_data, num_data))
pairwise_distances[np.triu_indices(num_data, 1)] = upper_tri_pdists
# Make symmetrical.
pairwise_distances = pairwise_distances + pairwise_distances.T - np.diag(
pairwise_distances.diagonal())
return pairwise_distances
class ContrastiveLossTest(test.TestCase):
def testContrastive(self):
with self.test_session():
num_data = 10
feat_dim = 6
margin = 1.0
embeddings_anchor = np.random.rand(num_data, feat_dim).astype(np.float32)
embeddings_positive = np.random.rand(num_data, feat_dim).astype(
np.float32)
labels = np.random.randint(0, 2, size=(num_data,)).astype(np.float32)
# Compute the loss in NP
dist = np.sqrt(
np.sum(np.square(embeddings_anchor - embeddings_positive), axis=1))
loss_np = np.mean(
labels * np.square(dist) +
(1.0 - labels) * np.square(np.maximum(margin - dist, 0.0)))
# Compute the loss with TF
loss_tf = metric_loss_ops.contrastive_loss(
labels=ops.convert_to_tensor(labels),
embeddings_anchor=ops.convert_to_tensor(embeddings_anchor),
embeddings_positive=ops.convert_to_tensor(embeddings_positive),
margin=margin)
loss_tf = loss_tf.eval()
self.assertAllClose(loss_np, loss_tf)
class TripletSemiHardLossTest(test.TestCase):
def testTripletSemiHard(self):
with self.test_session():
num_data = 10
feat_dim = 6
margin = 1.0
num_classes = 4
embedding = np.random.rand(num_data, feat_dim).astype(np.float32)
labels = np.random.randint(
0, num_classes, size=(num_data)).astype(np.float32)
# Reshape labels to compute adjacency matrix.
labels_reshaped = np.reshape(labels, (labels.shape[0], 1))
# Compute the loss in NP.
adjacency = np.equal(labels_reshaped, labels_reshaped.T)
pdist_matrix = pairwise_distance_np(embedding, squared=True)
loss_np = 0.0
num_positives = 0.0
for i in range(num_data):
for j in range(num_data):
if adjacency[i][j] > 0.0 and i != j:
num_positives += 1.0
pos_distance = pdist_matrix[i][j]
neg_distances = []
for k in range(num_data):
if adjacency[i][k] == 0:
neg_distances.append(pdist_matrix[i][k])
# Sort by distance.
neg_distances.sort()
chosen_neg_distance = neg_distances[0]
for l in range(len(neg_distances)):
chosen_neg_distance = neg_distances[l]
if chosen_neg_distance > pos_distance:
break
loss_np += np.maximum(
0.0, margin - chosen_neg_distance + pos_distance)
loss_np /= num_positives
# Compute the loss in TF.
loss_tf = metric_loss_ops.triplet_semihard_loss(
labels=ops.convert_to_tensor(labels),
embeddings=ops.convert_to_tensor(embedding),
margin=margin)
loss_tf = loss_tf.eval()
self.assertAllClose(loss_np, loss_tf)
class LiftedStructLossTest(test.TestCase):
def testLiftedStruct(self):
with self.test_session():
num_data = 10
feat_dim = 6
margin = 1.0
num_classes = 4
embedding = np.random.rand(num_data, feat_dim).astype(np.float32)
labels = np.random.randint(
0, num_classes, size=(num_data)).astype(np.float32)
# Reshape labels to compute adjacency matrix.
labels_reshaped = np.reshape(labels, (labels.shape[0], 1))
# Compute the loss in NP
adjacency = np.equal(labels_reshaped, labels_reshaped.T)
pdist_matrix = pairwise_distance_np(embedding)
loss_np = 0.0
num_constraints = 0.0
for i in range(num_data):
for j in range(num_data):
if adjacency[i][j] > 0.0 and i != j:
d_pos = pdist_matrix[i][j]
negs = []
for k in range(num_data):
if not adjacency[i][k]:
negs.append(margin - pdist_matrix[i][k])
for l in range(num_data):
if not adjacency[j][l]:
negs.append(margin - pdist_matrix[j][l])
negs = np.array(negs)
max_elem = np.max(negs)
negs -= max_elem
negs = np.exp(negs)
soft_maximum = np.log(np.sum(negs)) + max_elem
num_constraints += 1.0
this_loss = max(soft_maximum + d_pos, 0)
loss_np += this_loss * this_loss
loss_np = loss_np / num_constraints / 2.0
# Compute the loss in TF
loss_tf = metric_loss_ops.lifted_struct_loss(
labels=ops.convert_to_tensor(labels),
embeddings=ops.convert_to_tensor(embedding),
margin=margin)
loss_tf = loss_tf.eval()
self.assertAllClose(loss_np, loss_tf)
def convert_to_list_of_sparse_tensor(np_matrix):
list_of_sparse_tensors = []
nrows, ncols = np_matrix.shape
for i in range(nrows):
sp_indices = []
for j in range(ncols):
if np_matrix[i][j] == 1:
sp_indices.append([j])
num_non_zeros = len(sp_indices)
list_of_sparse_tensors.append(sparse_tensor.SparseTensor(
indices=np.array(sp_indices),
values=np.ones((num_non_zeros,)),
dense_shape=np.array([ncols,])))
return list_of_sparse_tensors
class NpairsLossTest(test.TestCase):
def testNpairs(self):
with self.test_session():
num_data = 15
feat_dim = 6
num_classes = 5
reg_lambda = 0.02
embeddings_anchor = np.random.rand(num_data, feat_dim).astype(np.float32)
embeddings_positive = np.random.rand(num_data, feat_dim).astype(
np.float32)
labels = np.random.randint(
0, num_classes, size=(num_data)).astype(np.float32)
# Reshape labels to compute adjacency matrix.
labels_reshaped = np.reshape(labels, (labels.shape[0], 1))
# Compute the loss in NP
reg_term = np.mean(np.sum(np.square(embeddings_anchor), 1))
reg_term += np.mean(np.sum(np.square(embeddings_positive), 1))
reg_term *= 0.25 * reg_lambda
similarity_matrix = np.matmul(embeddings_anchor, embeddings_positive.T)
labels_remapped = np.equal(
labels_reshaped, labels_reshaped.T).astype(np.float32)
labels_remapped /= np.sum(labels_remapped, axis=1, keepdims=True)
xent_loss = math_ops.reduce_mean(nn.softmax_cross_entropy_with_logits(
logits=ops.convert_to_tensor(similarity_matrix),
labels=ops.convert_to_tensor(labels_remapped))).eval()
loss_np = xent_loss + reg_term
# Compute the loss in TF
loss_tf = metric_loss_ops.npairs_loss(
labels=ops.convert_to_tensor(labels),
embeddings_anchor=ops.convert_to_tensor(embeddings_anchor),
embeddings_positive=ops.convert_to_tensor(embeddings_positive),
reg_lambda=reg_lambda)
loss_tf = loss_tf.eval()
self.assertAllClose(loss_np, loss_tf)
class NpairsLossMultiLabelTest(test.TestCase):
def testNpairsMultiLabelLossWithSingleLabelEqualsNpairsLoss(self):
with self.test_session():
num_data = 15
feat_dim = 6
reg_lambda = 0.02
embeddings_anchor = np.random.rand(num_data, feat_dim).astype(np.float32)
embeddings_positive = np.random.rand(num_data, feat_dim).astype(
np.float32)
labels = np.arange(num_data)
labels = np.reshape(labels, -1)
# Compute vanila npairs loss.
loss_npairs = metric_loss_ops.npairs_loss(
labels=ops.convert_to_tensor(labels),
embeddings_anchor=ops.convert_to_tensor(embeddings_anchor),
embeddings_positive=ops.convert_to_tensor(embeddings_positive),
reg_lambda=reg_lambda).eval()
# Compute npairs multilabel loss.
labels_one_hot = np.identity(num_data)
loss_npairs_multilabel = metric_loss_ops.npairs_loss_multilabel(
sparse_labels=convert_to_list_of_sparse_tensor(labels_one_hot),
embeddings_anchor=ops.convert_to_tensor(embeddings_anchor),
embeddings_positive=ops.convert_to_tensor(embeddings_positive),
reg_lambda=reg_lambda).eval()
self.assertAllClose(loss_npairs, loss_npairs_multilabel)
def testNpairsMultiLabel(self):
with self.test_session():
num_data = 15
feat_dim = 6
num_classes = 10
reg_lambda = 0.02
embeddings_anchor = np.random.rand(num_data, feat_dim).astype(np.float32)
embeddings_positive = np.random.rand(num_data, feat_dim).astype(
np.float32)
labels = np.random.randint(0, 2, (num_data, num_classes))
# set entire column to one so that each row has at least one bit set.
labels[:, -1] = 1
# Compute the loss in NP
reg_term = np.mean(np.sum(np.square(embeddings_anchor), 1))
reg_term += np.mean(np.sum(np.square(embeddings_positive), 1))
reg_term *= 0.25 * reg_lambda
similarity_matrix = np.matmul(embeddings_anchor, embeddings_positive.T)
labels_remapped = np.dot(labels, labels.T).astype(np.float)
labels_remapped /= np.sum(labels_remapped, 1, keepdims=True)
xent_loss = math_ops.reduce_mean(nn.softmax_cross_entropy_with_logits(
logits=ops.convert_to_tensor(similarity_matrix),
labels=ops.convert_to_tensor(labels_remapped))).eval()
loss_np = xent_loss + reg_term
# Compute the loss in TF
loss_tf = metric_loss_ops.npairs_loss_multilabel(
sparse_labels=convert_to_list_of_sparse_tensor(labels),
embeddings_anchor=ops.convert_to_tensor(embeddings_anchor),
embeddings_positive=ops.convert_to_tensor(embeddings_positive),
reg_lambda=reg_lambda)
loss_tf = loss_tf.eval()
self.assertAllClose(loss_np, loss_tf)
def compute_ground_truth_cluster_score(feat, y):
y_unique = np.unique(y)
score_gt_np = 0.0
for c in y_unique:
feat_subset = feat[y == c, :]
pdist_subset = pairwise_distance_np(feat_subset)
score_gt_np += -1.0 * np.min(np.sum(pdist_subset, axis=0))
score_gt_np = score_gt_np.astype(np.float32)
return score_gt_np
def compute_cluster_loss_numpy(feat,
y,
margin_multiplier=1.0,
enable_pam_finetuning=True):
if enable_pam_finetuning:
facility = ForwardGreedyFacility(
n_clusters=np.unique(y).size).pam_augmented_fit(feat, y,
margin_multiplier)
else:
facility = ForwardGreedyFacility(
n_clusters=np.unique(y).size).loss_augmented_fit(feat, y,
margin_multiplier)
score_augmented = facility.score_aug_
score_gt = compute_ground_truth_cluster_score(feat, y)
return np.maximum(np.float32(0.0), score_augmented - score_gt)
class ForwardGreedyFacility(object):
def __init__(self, n_clusters=8):
self.n_clusters = n_clusters
self.center_ics_ = None
def _check_init_args(self):
# Check n_clusters.
if (self.n_clusters is None or self.n_clusters <= 0 or
not isinstance(self.n_clusters, int)):
raise ValueError('n_clusters has to be nonnegative integer.')
def loss_augmented_fit(self, feat, y, loss_mult):
"""Fit K-Medoids to the provided data."""
self._check_init_args()
# Check that the array is good and attempt to convert it to
# Numpy array if possible.
feat = self._check_array(feat)
# Apply distance metric to get the distance matrix.
pdists = pairwise_distance_np(feat)
num_data = feat.shape[0]
candidate_ids = list(range(num_data))
candidate_scores = np.zeros(num_data,)
subset = []
k = 0
while k < self.n_clusters:
candidate_scores = []
for i in candidate_ids:
# push i to subset.
subset.append(i)
marginal_cost = -1.0 * np.sum(np.min(pdists[:, subset], axis=1))
loss = 1.0 - metrics.normalized_mutual_info_score(
y, self._get_cluster_ics(pdists, subset))
candidate_scores.append(marginal_cost + loss_mult * loss)
# remove i from subset.
subset.pop()
# push i_star to subset.
i_star = candidate_ids[np.argmax(candidate_scores)]
subset.append(i_star)
# remove i_star from candidate indices.
candidate_ids.remove(i_star)
k += 1
# Expose labels_ which are the assignments of
# the training data to clusters.
self.labels_ = self._get_cluster_ics(pdists, subset)
# Expose cluster centers, i.e. medoids.
self.cluster_centers_ = feat.take(subset, axis=0)
# Expose indices of chosen cluster centers.
self.center_ics_ = subset
# Expose the score = -\sum_{i \in V} min_{j \in S} || x_i - x_j ||
self.score_ = np.float32(-1.0) * self._get_facility_distance(pdists, subset)
self.score_aug_ = self.score_ + loss_mult * (
1.0 - metrics.normalized_mutual_info_score(
y, self._get_cluster_ics(pdists, subset)))
self.score_aug_ = self.score_aug_.astype(np.float32)
# Expose the chosen cluster indices.
self.subset_ = subset
return self
def _augmented_update_medoid_ics_in_place(self, pdists, y_gt, cluster_ics,
medoid_ics, loss_mult):
for cluster_idx in range(self.n_clusters):
# y_pred = self._get_cluster_ics(D, medoid_ics)
# Don't prematurely do the assignment step.
# Do this after we've updated all cluster medoids.
y_pred = cluster_ics
if sum(y_pred == cluster_idx) == 0:
# Cluster is empty.
continue
curr_score = (
-1.0 * np.sum(
pdists[medoid_ics[cluster_idx], y_pred == cluster_idx]) +
loss_mult * (1.0 - metrics.normalized_mutual_info_score(
y_gt, y_pred)))
pdist_in = pdists[y_pred == cluster_idx, :]
pdist_in = pdist_in[:, y_pred == cluster_idx]
all_scores_fac = np.sum(-1.0 * pdist_in, axis=1)
all_scores_loss = []
for i in range(y_pred.size):
if y_pred[i] != cluster_idx:
continue
# remove this cluster's current centroid
medoid_ics_i = medoid_ics[:cluster_idx] + medoid_ics[cluster_idx + 1:]
# add this new candidate to the centroid list
medoid_ics_i += [i]
y_pred_i = self._get_cluster_ics(pdists, medoid_ics_i)
all_scores_loss.append(loss_mult * (
1.0 - metrics.normalized_mutual_info_score(y_gt, y_pred_i)))
all_scores = all_scores_fac + all_scores_loss
max_score_idx = np.argmax(all_scores)
max_score = all_scores[max_score_idx]
if max_score > curr_score:
medoid_ics[cluster_idx] = np.where(
y_pred == cluster_idx)[0][max_score_idx]
def pam_augmented_fit(self, feat, y, loss_mult):
pam_max_iter = 5
self._check_init_args()
feat = self._check_array(feat)
pdists = pairwise_distance_np(feat)
self.loss_augmented_fit(feat, y, loss_mult)
print('PAM -1 (before PAM): score: %f, score_aug: %f' % (
self.score_, self.score_aug_))
# Initialize from loss augmented facility location
subset = self.center_ics_
for iter_ in range(pam_max_iter):
# update the cluster assignment
cluster_ics = self._get_cluster_ics(pdists, subset)
# update the medoid for each clusters
self._augmented_update_medoid_ics_in_place(pdists, y, cluster_ics, subset,
loss_mult)
self.score_ = np.float32(-1.0) * self._get_facility_distance(
pdists, subset)
self.score_aug_ = self.score_ + loss_mult * (
1.0 - metrics.normalized_mutual_info_score(
y, self._get_cluster_ics(pdists, subset)))
self.score_aug_ = self.score_aug_.astype(np.float32)
print('PAM iter: %d, score: %f, score_aug: %f' % (iter_, self.score_,
self.score_aug_))
self.center_ics_ = subset
self.labels_ = cluster_ics
return self
def _check_array(self, feat):
# Check that the number of clusters is less than or equal to
# the number of samples
if self.n_clusters > feat.shape[0]:
raise ValueError('The number of medoids ' + '({}) '.format(
self.n_clusters) + 'must be larger than the number ' +
'of samples ({})'.format(feat.shape[0]))
return feat
def _get_cluster_ics(self, pdists, subset):
"""Returns cluster indices for pdist and current medoid indices."""
# Assign data points to clusters based on
# which cluster assignment yields
# the smallest distance`
cluster_ics = np.argmin(pdists[subset, :], axis=0)
return cluster_ics
def _get_facility_distance(self, pdists, subset):
return np.sum(np.min(pdists[subset, :], axis=0))
class ClusterLossTest(test.TestCase):
def _genClusters(self, n_samples, n_clusters):
blobs = datasets.make_blobs(
n_samples=n_samples, centers=n_clusters)
embedding, labels = blobs
embedding = (embedding - embedding.mean(axis=0)) / embedding.std(axis=0)
embedding = embedding.astype(np.float32)
return embedding, labels
def testClusteringLossPAMOff(self):
if not HAS_SKLEARN:
return
with self.test_session():
margin_multiplier = 10.0
embeddings, labels = self._genClusters(n_samples=128, n_clusters=64)
loss_np = compute_cluster_loss_numpy(
embeddings, labels, margin_multiplier, enable_pam_finetuning=False)
loss_tf = metric_loss_ops.cluster_loss(
labels=ops.convert_to_tensor(labels),
embeddings=ops.convert_to_tensor(embeddings),
margin_multiplier=margin_multiplier,
enable_pam_finetuning=False)
loss_tf = loss_tf.eval()
self.assertAllClose(loss_np, loss_tf)
def testClusteringLossPAMOn(self):
if not HAS_SKLEARN:
return
with self.test_session():
margin_multiplier = 10.0
embeddings, labels = self._genClusters(n_samples=128, n_clusters=64)
loss_np = compute_cluster_loss_numpy(
embeddings, labels, margin_multiplier, enable_pam_finetuning=True)
loss_tf = metric_loss_ops.cluster_loss(
labels=ops.convert_to_tensor(labels),
embeddings=ops.convert_to_tensor(embeddings),
margin_multiplier=margin_multiplier,
enable_pam_finetuning=True)
loss_tf = loss_tf.eval()
self.assertAllClose(loss_np, loss_tf)
if __name__ == '__main__':
test.main()
| apache-2.0 |
Frenzie/youtube-dl | youtube_dl/extractor/pbs.py | 2 | 13463 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
ExtractorError,
determine_ext,
int_or_none,
strip_jsonp,
unified_strdate,
US_RATINGS,
)
class PBSIE(InfoExtractor):
_VALID_URL = r'''(?x)https?://
(?:
# Direct video URL
video\.pbs\.org/(?:viralplayer|video)/(?P<id>[0-9]+)/? |
# Article with embedded player (or direct video)
(?:www\.)?pbs\.org/(?:[^/]+/){2,5}(?P<presumptive_id>[^/]+?)(?:\.html)?/?(?:$|[?\#]) |
# Player
video\.pbs\.org/(?:widget/)?partnerplayer/(?P<player_id>[^/]+)/
)
'''
_TESTS = [
{
'url': 'http://www.pbs.org/tpt/constitution-usa-peter-sagal/watch/a-more-perfect-union/',
'md5': 'ce1888486f0908d555a8093cac9a7362',
'info_dict': {
'id': '2365006249',
'ext': 'mp4',
'title': 'Constitution USA with Peter Sagal - A More Perfect Union',
'description': 'md5:ba0c207295339c8d6eced00b7c363c6a',
'duration': 3190,
},
'params': {
'skip_download': True, # requires ffmpeg
},
},
{
'url': 'http://www.pbs.org/wgbh/pages/frontline/losing-iraq/',
'md5': '143c98aa54a346738a3d78f54c925321',
'info_dict': {
'id': '2365297690',
'ext': 'mp4',
'title': 'FRONTLINE - Losing Iraq',
'description': 'md5:f5bfbefadf421e8bb8647602011caf8e',
'duration': 5050,
},
'params': {
'skip_download': True, # requires ffmpeg
}
},
{
'url': 'http://www.pbs.org/newshour/bb/education-jan-june12-cyberschools_02-23/',
'md5': 'b19856d7f5351b17a5ab1dc6a64be633',
'info_dict': {
'id': '2201174722',
'ext': 'mp4',
'title': 'PBS NewsHour - Cyber Schools Gain Popularity, but Quality Questions Persist',
'description': 'md5:5871c15cba347c1b3d28ac47a73c7c28',
'duration': 801,
},
},
{
'url': 'http://www.pbs.org/wnet/gperf/dudamel-conducts-verdi-requiem-hollywood-bowl-full-episode/3374/',
'md5': 'c62859342be2a0358d6c9eb306595978',
'info_dict': {
'id': '2365297708',
'ext': 'mp4',
'description': 'md5:68d87ef760660eb564455eb30ca464fe',
'title': 'Great Performances - Dudamel Conducts Verdi Requiem at the Hollywood Bowl - Full',
'duration': 6559,
'thumbnail': 're:^https?://.*\.jpg$',
},
'params': {
'skip_download': True, # requires ffmpeg
},
},
{
'url': 'http://www.pbs.org/wgbh/nova/earth/killer-typhoon.html',
'md5': '908f3e5473a693b266b84e25e1cf9703',
'info_dict': {
'id': '2365160389',
'display_id': 'killer-typhoon',
'ext': 'mp4',
'description': 'md5:c741d14e979fc53228c575894094f157',
'title': 'NOVA - Killer Typhoon',
'duration': 3172,
'thumbnail': 're:^https?://.*\.jpg$',
'upload_date': '20140122',
'age_limit': 10,
},
'params': {
'skip_download': True, # requires ffmpeg
},
},
{
'url': 'http://www.pbs.org/wgbh/pages/frontline/united-states-of-secrets/',
'info_dict': {
'id': 'united-states-of-secrets',
},
'playlist_count': 2,
},
{
'url': 'http://www.pbs.org/wgbh/americanexperience/films/death/player/',
'info_dict': {
'id': '2276541483',
'display_id': 'player',
'ext': 'mp4',
'title': 'American Experience - Death and the Civil War, Chapter 1',
'description': 'American Experience, TV’s most-watched history series, brings to life the compelling stories from our past that inform our understanding of the world today.',
'duration': 682,
'thumbnail': 're:^https?://.*\.jpg$',
},
'params': {
'skip_download': True, # requires ffmpeg
},
},
{
'url': 'http://video.pbs.org/video/2365367186/',
'info_dict': {
'id': '2365367186',
'display_id': '2365367186',
'ext': 'mp4',
'title': 'To Catch A Comet - Full Episode',
'description': 'On November 12, 2014, billions of kilometers from Earth, spacecraft orbiter Rosetta and lander Philae did what no other had dared to attempt \u2014 land on the volatile surface of a comet as it zooms around the sun at 67,000 km/hr. The European Space Agency hopes this mission can help peer into our past and unlock secrets of our origins.',
'duration': 3342,
'thumbnail': 're:^https?://.*\.jpg$',
},
'params': {
'skip_download': True, # requires ffmpeg
},
'skip': 'Expired',
},
{
# Video embedded in iframe containing angle brackets as attribute's value (e.g.
# "<iframe style='position: absolute;<br />\ntop: 0; left: 0;' ...", see
# https://github.com/rg3/youtube-dl/issues/7059)
'url': 'http://www.pbs.org/food/features/a-chefs-life-season-3-episode-5-prickly-business/',
'info_dict': {
'id': '2365546844',
'display_id': 'a-chefs-life-season-3-episode-5-prickly-business',
'ext': 'mp4',
'title': "A Chef's Life - Season 3, Ep. 5: Prickly Business",
'description': 'md5:61db2ddf27c9912f09c241014b118ed1',
'duration': 1480,
'thumbnail': 're:^https?://.*\.jpg$',
},
'params': {
'skip_download': True, # requires ffmpeg
},
},
{
# Frontline video embedded via flp2012.js
'url': 'http://www.pbs.org/wgbh/pages/frontline/the-atomic-artists',
'info_dict': {
'id': '2070868960',
'display_id': 'the-atomic-artists',
'ext': 'mp4',
'title': 'FRONTLINE - The Atomic Artists',
'description': 'md5:f5bfbefadf421e8bb8647602011caf8e',
'duration': 723,
'thumbnail': 're:^https?://.*\.jpg$',
},
'params': {
'skip_download': True, # requires ffmpeg
},
}
]
_ERRORS = {
101: 'We\'re sorry, but this video is not yet available.',
403: 'We\'re sorry, but this video is not available in your region due to right restrictions.',
404: 'We are experiencing technical difficulties that are preventing us from playing the video at this time. Please check back again soon.',
410: 'This video has expired and is no longer available for online streaming.',
}
def _extract_webpage(self, url):
mobj = re.match(self._VALID_URL, url)
presumptive_id = mobj.group('presumptive_id')
display_id = presumptive_id
if presumptive_id:
webpage = self._download_webpage(url, display_id)
upload_date = unified_strdate(self._search_regex(
r'<input type="hidden" id="air_date_[0-9]+" value="([^"]+)"',
webpage, 'upload date', default=None))
# tabbed frontline videos
tabbed_videos = re.findall(
r'<div[^>]+class="videotab[^"]*"[^>]+vid="(\d+)"', webpage)
if tabbed_videos:
return tabbed_videos, presumptive_id, upload_date
MEDIA_ID_REGEXES = [
r"div\s*:\s*'videoembed'\s*,\s*mediaid\s*:\s*'(\d+)'", # frontline video embed
r'class="coveplayerid">([^<]+)<', # coveplayer
r'<input type="hidden" id="pbs_video_id_[0-9]+" value="([0-9]+)"/>', # jwplayer
]
media_id = self._search_regex(
MEDIA_ID_REGEXES, webpage, 'media ID', fatal=False, default=None)
if media_id:
return media_id, presumptive_id, upload_date
# Fronline video embedded via flp
video_id = self._search_regex(
r'videoid\s*:\s*"([\d+a-z]{7,})"', webpage, 'videoid', default=None)
if video_id:
# pkg_id calculation is reverse engineered from
# http://www.pbs.org/wgbh/pages/frontline/js/flp2012.js
prg_id = self._search_regex(
r'videoid\s*:\s*"([\d+a-z]{7,})"', webpage, 'videoid')[7:]
if 'q' in prg_id:
prg_id = prg_id.split('q')[1]
prg_id = int(prg_id, 16)
getdir = self._download_json(
'http://www.pbs.org/wgbh/pages/frontline/.json/getdir/getdir%d.json' % prg_id,
presumptive_id, 'Downloading getdir JSON',
transform_source=strip_jsonp)
return getdir['mid'], presumptive_id, upload_date
for iframe in re.findall(r'(?s)<iframe(.+?)></iframe>', webpage):
url = self._search_regex(
r'src=(["\'])(?P<url>.+?partnerplayer.+?)\1', iframe,
'player URL', default=None, group='url')
if url:
break
mobj = re.match(self._VALID_URL, url)
player_id = mobj.group('player_id')
if not display_id:
display_id = player_id
if player_id:
player_page = self._download_webpage(
url, display_id, note='Downloading player page',
errnote='Could not download player page')
video_id = self._search_regex(
r'<div\s+id="video_([0-9]+)"', player_page, 'video ID')
else:
video_id = mobj.group('id')
display_id = video_id
return video_id, display_id, None
def _real_extract(self, url):
video_id, display_id, upload_date = self._extract_webpage(url)
if isinstance(video_id, list):
entries = [self.url_result(
'http://video.pbs.org/video/%s' % vid_id, 'PBS', vid_id)
for vid_id in video_id]
return self.playlist_result(entries, display_id)
info = self._download_json(
'http://video.pbs.org/videoInfo/%s?format=json&type=partner' % video_id,
display_id)
formats = []
for encoding_name in ('recommended_encoding', 'alternate_encoding'):
redirect = info.get(encoding_name)
if not redirect:
continue
redirect_url = redirect.get('url')
if not redirect_url:
continue
redirect_info = self._download_json(
redirect_url + '?format=json', display_id,
'Downloading %s video url info' % encoding_name)
if redirect_info['status'] == 'error':
raise ExtractorError(
'%s said: %s' % (
self.IE_NAME,
self._ERRORS.get(redirect_info['http_code'], redirect_info['message'])),
expected=True)
format_url = redirect_info.get('url')
if not format_url:
continue
if determine_ext(format_url) == 'm3u8':
formats.extend(self._extract_m3u8_formats(
format_url, display_id, 'mp4', preference=1, m3u8_id='hls'))
else:
formats.append({
'url': format_url,
'format_id': redirect.get('eeid'),
})
self._sort_formats(formats)
rating_str = info.get('rating')
if rating_str is not None:
rating_str = rating_str.rpartition('-')[2]
age_limit = US_RATINGS.get(rating_str)
subtitles = {}
closed_captions_url = info.get('closed_captions_url')
if closed_captions_url:
subtitles['en'] = [{
'ext': 'ttml',
'url': closed_captions_url,
}]
# info['title'] is often incomplete (e.g. 'Full Episode', 'Episode 5', etc)
# Try turning it to 'program - title' naming scheme if possible
alt_title = info.get('program', {}).get('title')
if alt_title:
info['title'] = alt_title + ' - ' + re.sub(r'^' + alt_title + '[\s\-:]+', '', info['title'])
return {
'id': video_id,
'display_id': display_id,
'title': info['title'],
'description': info['program'].get('description'),
'thumbnail': info.get('image_url'),
'duration': int_or_none(info.get('duration')),
'age_limit': age_limit,
'upload_date': upload_date,
'formats': formats,
'subtitles': subtitles,
}
| unlicense |
poiesisconsulting/openerp-restaurant | hr_attendance/hr_attendance.py | 6 | 8088 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields, osv
from openerp.tools.translate import _
class hr_action_reason(osv.osv):
_name = "hr.action.reason"
_description = "Action Reason"
_columns = {
'name': fields.char('Reason', size=64, required=True, help='Specifies the reason for Signing In/Signing Out.'),
'action_type': fields.selection([('sign_in', 'Sign in'), ('sign_out', 'Sign out')], "Action Type"),
}
_defaults = {
'action_type': 'sign_in',
}
def _employee_get(obj, cr, uid, context=None):
ids = obj.pool.get('hr.employee').search(cr, uid, [('user_id', '=', uid)], context=context)
return ids and ids[0] or False
class hr_attendance(osv.osv):
_name = "hr.attendance"
_description = "Attendance"
def _day_compute(self, cr, uid, ids, fieldnames, args, context=None):
res = dict.fromkeys(ids, '')
for obj in self.browse(cr, uid, ids, context=context):
res[obj.id] = time.strftime('%Y-%m-%d', time.strptime(obj.name, '%Y-%m-%d %H:%M:%S'))
return res
_columns = {
'name': fields.datetime('Date', required=True, select=1),
'action': fields.selection([('sign_in', 'Sign In'), ('sign_out', 'Sign Out'), ('action','Action')], 'Action', required=True),
'action_desc': fields.many2one("hr.action.reason", "Action Reason", domain="[('action_type', '=', action)]", help='Specifies the reason for Signing In/Signing Out in case of extra hours.'),
'employee_id': fields.many2one('hr.employee', "Employee", required=True, select=True),
'day': fields.function(_day_compute, type='char', string='Day', store=True, select=1, size=32),
}
_defaults = {
'name': lambda *a: time.strftime('%Y-%m-%d %H:%M:%S'), #please don't remove the lambda, if you remove it then the current time will not change
'employee_id': _employee_get,
}
def _altern_si_so(self, cr, uid, ids, context=None):
""" Alternance sign_in/sign_out check.
Previous (if exists) must be of opposite action.
Next (if exists) must be of opposite action.
"""
for att in self.browse(cr, uid, ids, context=context):
# search and browse for first previous and first next records
prev_att_ids = self.search(cr, uid, [('employee_id', '=', att.employee_id.id), ('name', '<', att.name), ('action', 'in', ('sign_in', 'sign_out'))], limit=1, order='name DESC')
next_add_ids = self.search(cr, uid, [('employee_id', '=', att.employee_id.id), ('name', '>', att.name), ('action', 'in', ('sign_in', 'sign_out'))], limit=1, order='name ASC')
prev_atts = self.browse(cr, uid, prev_att_ids, context=context)
next_atts = self.browse(cr, uid, next_add_ids, context=context)
# check for alternance, return False if at least one condition is not satisfied
if prev_atts and prev_atts[0].action == att.action: # previous exists and is same action
return False
if next_atts and next_atts[0].action == att.action: # next exists and is same action
return False
if (not prev_atts) and (not next_atts) and att.action != 'sign_in': # first attendance must be sign_in
return False
return True
_constraints = [(_altern_si_so, 'Error ! Sign in (resp. Sign out) must follow Sign out (resp. Sign in)', ['action'])]
_order = 'name desc'
class hr_employee(osv.osv):
_inherit = "hr.employee"
_description = "Employee"
def _state(self, cr, uid, ids, name, args, context=None):
result = {}
if not ids:
return result
for id in ids:
result[id] = 'absent'
cr.execute('SELECT hr_attendance.action, hr_attendance.employee_id \
FROM ( \
SELECT MAX(name) AS name, employee_id \
FROM hr_attendance \
WHERE action in (\'sign_in\', \'sign_out\') \
GROUP BY employee_id \
) AS foo \
LEFT JOIN hr_attendance \
ON (hr_attendance.employee_id = foo.employee_id \
AND hr_attendance.name = foo.name) \
WHERE hr_attendance.employee_id IN %s',(tuple(ids),))
for res in cr.fetchall():
result[res[1]] = res[0] == 'sign_in' and 'present' or 'absent'
return result
def _last_sign(self, cr, uid, ids, name, args, context=None):
result = {}
if not ids:
return result
for id in ids:
result[id] = False
cr.execute("""select max(name) as name
from hr_attendance
where action in ('sign_in', 'sign_out') and employee_id = %s""",(id,))
for res in cr.fetchall():
result[id] = res[0]
return result
def _attendance_access(self, cr, uid, ids, name, args, context=None):
# this function field use to hide attendance button to singin/singout from menu
group = self.pool.get('ir.model.data').get_object(cr, uid, 'base', 'group_hr_attendance')
visible = False
if uid in [user.id for user in group.users]:
visible = True
return dict([(x, visible) for x in ids])
_columns = {
'state': fields.function(_state, type='selection', selection=[('absent', 'Absent'), ('present', 'Present')], string='Attendance'),
'last_sign': fields.function(_last_sign, type='datetime', string='Last Sign'),
'attendance_access': fields.function(_attendance_access, string='Attendance Access', type='boolean'),
}
def _action_check(self, cr, uid, emp_id, dt=False, context=None):
cr.execute('SELECT MAX(name) FROM hr_attendance WHERE employee_id=%s', (emp_id,))
res = cr.fetchone()
return not (res and (res[0]>=(dt or time.strftime('%Y-%m-%d %H:%M:%S'))))
def attendance_action_change(self, cr, uid, ids, context=None):
if context is None:
context = {}
action_date = context.get('action_date', False)
action = context.get('action', False)
hr_attendance = self.pool.get('hr.attendance')
warning_sign = {'sign_in': _('Sign In'), 'sign_out': _('Sign Out')}
for employee in self.browse(cr, uid, ids, context=context):
if not action:
if employee.state == 'present': action = 'sign_out'
if employee.state == 'absent': action = 'sign_in'
if not self._action_check(cr, uid, employee.id, action_date, context):
raise osv.except_osv(_('Warning'), _('You tried to %s with a date anterior to another event !\nTry to contact the HR Manager to correct attendances.')%(warning_sign[action],))
vals = {'action': action, 'employee_id': employee.id}
if action_date:
vals['name'] = action_date
hr_attendance.create(cr, uid, vals, context=context)
return True
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
CSC-IT-Center-for-Science/pouta-blueprints | pebbles/tests/test_docker_driver.py | 1 | 32367 | import json
import logging
import docker.errors
import pebbles.drivers.provisioning.docker_driver as docker_driver
from pebbles.tests.base import BaseTestCase
from pebbles.drivers.provisioning.docker_driver import NAMESPACE_CPU, NAMESPACE_GPU
from pebbles.drivers.provisioning.docker_driver import DD_STATE_ACTIVE, DD_STATE_INACTIVE, DD_STATE_SPAWNED, DD_STATE_REMOVED, KEY_PREFIX_POOL, KEY_CONFIG
import mock
from sys import version_info
import docker.utils
import time
if version_info.major == 2:
import __builtin__ as builtins
else:
import builtins
logging.basicConfig()
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
namespace_values = [NAMESPACE_CPU, NAMESPACE_GPU]
# decorator for overriding open
def mock_open_context(func):
def inner(*args, **kwargs):
with mock.patch.object(builtins, 'open', mock.mock_open(read_data='1234123412341234')):
return func(*args, **kwargs)
return inner
# decorator for raising RuntimeError if in failure mode
def raise_on_failure_mode(func):
def inner(*args, **kwargs):
if args[0].failure_mode:
raise RuntimeError('In failure mode')
return func(*args, **kwargs)
return inner
class MockResponse(object):
def __init__(self, status_code):
self.status_code = status_code
class OpenStackServiceMock(object):
def __init__(self, config):
self.spawn_count = 0
self.servers = []
self.failure_mode = False
@raise_on_failure_mode
def provision_instance(self, display_name, image_name, flavor_name,
public_key, extra_sec_groups=None,
master_sg_name=None, allocate_public_ip=True,
root_volume_size=0, data_volume_size=0, data_volume_type=None,
nics=None,
userdata=None
):
self.spawn_count += 1
res = dict(
server_id='%s' % self.spawn_count
)
res['address_data'] = dict(
private_ip='192.168.1.%d' % self.spawn_count,
public_ip=None,
)
if allocate_public_ip:
res['address_data']['public_ip'] = '172.16.0.%d' % self.spawn_count
if flavor_name.startswith("gpu"):
res['namespace'] = "DockerDriverGpu"
else:
res['namespace'] = "DockerDriver"
self.servers.append(res)
return res
@raise_on_failure_mode
def deprovision_instance(self, instance_id, name=None, delete_attached_volumes=False):
self.servers = [x for x in self.servers if str(x['server_id']) != str(instance_id)]
return {}
@raise_on_failure_mode
def upload_key(self, key_name, public_key):
pass
@raise_on_failure_mode
def delete_key(self, key_name):
pass
# noinspection PyUnusedLocal
class DockerClientMock(object):
def __init__(self):
self._containers = []
self.spawn_count = 0
self.failure_mode = False
@raise_on_failure_mode
def pull(self, image):
pass
@raise_on_failure_mode
def containers(self):
return self._containers[:]
def create_host_config(self, *args, **kwargs):
return {}
@raise_on_failure_mode
def create_container(self, name, **kwargs):
self.spawn_count += 1
container = dict(
Id='%s' % self.spawn_count,
Name=name,
Labels=dict(slots='1')
)
self._containers.append(container)
return container
@raise_on_failure_mode
def start(self, container_id, **kwargs):
pass
@raise_on_failure_mode
def remove_container(self, name, **kwargs):
matches = [x for x in self._containers if x['Name'] == name]
if len(matches) == 1:
container = matches[0]
self._containers.remove(container)
elif len(matches) == 0:
response = MockResponse(status_code=404)
raise docker.errors.APIError("foo", response=response, explanation='')
else:
raise RuntimeError('More than one container with same name detected')
@raise_on_failure_mode
def port(self, *args):
return [{'HostPort': 32768 + self.spawn_count % 32768}]
def load_image(self, *args):
pass
class PBClientMock(object):
def __init__(self):
self.instance_data = {}
config = dict(
memory_limit='512m',
environment_vars=''
)
self.blueprint_data = {
'bp-01': dict(
id='bp-01',
name='test blueprint 01',
config=config
)
}
self.blueprint_data['bp-01']['full_config'] = dict(
docker_image='csc/test_image',
internal_port=8888,
consumed_slots=1,
memory_limit=config['memory_limit'],
environment_vars=config['environment_vars']
)
self.namespaced_records = [{
'namespace': 'DockerDriver',
'key': 'backend_config',
'value': dict(
DD_HOST_IMAGE='CentOS-7',
DD_MAX_HOSTS=4,
DD_SHUTDOWN_MODE=False,
DD_FREE_SLOT_TARGET=4,
DD_HOST_FLAVOR_NAME_SMALL='standard.tiny',
DD_HOST_FLAVOR_SLOTS_SMALL=4,
DD_HOST_FLAVOR_NAME_LARGE='standard.xlarge',
DD_HOST_FLAVOR_SLOTS_LARGE=16,
DD_HOST_MASTER_SG='pb_server',
DD_HOST_EXTRA_SGS='',
DD_HOST_ROOT_VOLUME_SIZE=0,
DD_HOST_DATA_VOLUME_FACTOR=4,
DD_HOST_DATA_VOLUME_DEVICE='/dev/vdc',
DD_HOST_DATA_VOLUME_TYPE='',
),
'updated_ts': 0},
{
'namespace': 'DockerDriverGpu',
'key': 'backend_config',
'value': dict(
DD_HOST_IMAGE='CentOS-7-Cuda',
DD_MAX_HOSTS=2,
DD_SHUTDOWN_MODE=False,
DD_FREE_SLOT_TARGET=4,
DD_HOST_FLAVOR_NAME_SMALL='gpu.1.1gpu',
DD_HOST_FLAVOR_SLOTS_SMALL=210,
DD_HOST_FLAVOR_NAME_LARGE='gpu.1.1gpu',
DD_HOST_FLAVOR_SLOTS_LARGE=210,
DD_HOST_MASTER_SG='pb_server',
DD_HOST_EXTRA_SGS='',
DD_HOST_ROOT_VOLUME_SIZE=0,
DD_HOST_DATA_VOLUME_FACTOR=4,
DD_HOST_DATA_VOLUME_DEVICE='/dev/vdc',
DD_HOST_DATA_VOLUME_TYPE='',
),
'updated_ts': 0}]
def add_instance_data(self, instance_id):
self.instance_data[instance_id] = dict(
id='%s' % instance_id,
name='pb-%s' % instance_id,
state='starting',
blueprint_id='bp-01',
)
def get_instance_description(self, instance_id):
return self.instance_data[instance_id]
def get_blueprint_description(self, blueprint_id):
return self.blueprint_data[blueprint_id]
def do_instance_patch(self, instance_id, payload):
data = self.instance_data[instance_id]
data.update(payload)
if 'instance_data' in data.keys() and isinstance(data['instance_data'], str):
data['instance_data'] = json.loads(data['instance_data'])
def _filter_namespaced_records(self, namespace, key=None):
filters = [lambda x: x['namespace'] == namespace]
if key:
filters.append(lambda x: x['key'].startswith(key)) # mocking the 'like' SQL operator
filtered_record = filter(
lambda record: all(f(record) for f in filters),
self.namespaced_records
)
return list(filtered_record)
def get_namespaced_keyvalues(self, payload=None):
filtered_records = self._filter_namespaced_records(payload['namespace'], payload['key'])
return filtered_records
def get_namespaced_keyvalue(self, namespace, key):
filtered_record = self._filter_namespaced_records(namespace, key)[0]
return filtered_record
def create_or_modify_namespaced_keyvalue(self, namespace, key, payload):
if not self._filter_namespaced_records(namespace, key):
payload['updated_ts'] = time.time()
self.namespaced_records.append(payload)
else:
filtered_record = self._filter_namespaced_records(namespace, key)
filtered_record[0]['value'] = payload['value']
filtered_record[0]['updated_ts'] = time.time()
def delete_namespaced_keyvalue(self, namespace, key):
filtered_record = self._filter_namespaced_records(namespace, key)
if filtered_record:
self.namespaced_records.remove(filtered_record[0])
# noinspection PyUnusedLocal
class DockerDriverAccessMock(object):
def __init__(self, config):
self.json_data = {}
self.oss_mock = OpenStackServiceMock(config)
self.dc_mocks = {}
self.pbc_mock = PBClientMock()
self.shutdown_mode = False
self.failure_mode = False
def _filter_records(self, servers_value, namespace_value):
filters = [lambda x: x['namespace'] == namespace_value]
filtered_record = filter(
lambda record: all(f(record) for f in filters),
servers_value
)
# need to convert to list, because python3 filter returns generator instead of list as in python2
# This should not consume more memory because the filterd_record is not big
return list(filtered_record)
def load_records(self, token=None, url=None, namespace_value=NAMESPACE_CPU):
namespaced_records = self.pbc_mock.get_namespaced_keyvalues({'namespace': namespace_value, 'key': KEY_PREFIX_POOL})
hosts = []
for ns_record in namespaced_records:
hosts.append(ns_record['value'])
return hosts
def save_records(self, token, url, hosts, namespace_value):
for host in hosts:
_key = '%s_%s' % (KEY_PREFIX_POOL, host['id'])
payload = {
'namespace': namespace_value,
'key': _key
}
if host.get('state') in [DD_STATE_SPAWNED, DD_STATE_ACTIVE, DD_STATE_INACTIVE]: # POST or PUT
payload['value'] = host
self.pbc_mock.create_or_modify_namespaced_keyvalue(namespace_value, _key, payload)
elif host.get('state') == DD_STATE_REMOVED:
self.pbc_mock.delete_namespaced_keyvalue(namespace_value, _key)
def load_driver_config(self, token=None, url=None, namespace_value=NAMESPACE_CPU):
namespaced_record = self.pbc_mock.get_namespaced_keyvalue(namespace_value, KEY_CONFIG)
driver_config = namespaced_record['value']
return driver_config
def get_docker_client(self, docker_url):
if docker_url not in self.dc_mocks.keys():
self.dc_mocks[docker_url] = DockerClientMock()
self.dc_mocks[docker_url].failure_mode = self.failure_mode
return self.dc_mocks[docker_url]
def get_openstack_service(self, config):
return self.oss_mock
def get_pb_client(self, token, base_url, ssl_verify):
return self.pbc_mock
def run_ansible_on_host(self, host, custom_logger, config, playbook_name):
if self.failure_mode:
raise RuntimeError
@staticmethod
def proxy_add_route(route_id, target_url, options):
pass
@staticmethod
def proxy_remove_route(route_id):
pass
def __repr__(self):
res = dict(
hosts_data=self.hosts_data,
oss_mock='%s' % self.oss_mock
)
return json.dumps(res)
@staticmethod
def get_image_names():
return ['test/test1']
@staticmethod
def wait_for_port(ip_address, port, max_wait_secs=60):
pass
# noinspection PyProtectedMember
class DockerDriverTestCase(BaseTestCase):
def setUp(self):
# set up a constants to known values for tests
docker_driver.DD_HOST_LIFETIME = 900
@staticmethod
def create_docker_driver():
config = dict(
INSTANCE_DATA_DIR='/tmp',
M2M_CREDENTIAL_STORE='',
INTERNAL_API_BASE_URL='http://bogus/api/v1',
TEST_MODE=True,
PUBLIC_IPV4='10.0.0.1',
EXTERNAL_HTTPS_PORT=443,
)
dd = docker_driver.DockerDriver(logger, config)
dd._ap = DockerDriverAccessMock(config)
return dd
@mock_open_context
def test_spawn_one_host(self):
dd = self.create_docker_driver()
ddam = dd._get_ap()
# check that a host gets created
cur_ts = 1000000
dd._do_housekeep(token='foo', cur_ts=cur_ts)
for dd_kind in namespace_values:
self.assertEquals(len(ddam._filter_records(ddam.oss_mock.servers, dd_kind)), 1)
hosts_data = ddam.load_records(namespace_value=dd_kind)
host = hosts_data[0]
self.assertEquals(host['state'], DD_STATE_SPAWNED)
self.assertEquals(host['spawn_ts'], cur_ts)
# check that the new host gets activated
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
for dd_kind in namespace_values:
self.assertEquals(len(ddam._filter_records(ddam.oss_mock.servers, dd_kind)), 1)
hosts_data = ddam.load_records(namespace_value=dd_kind)
host = hosts_data[0]
self.assertEquals(host['state'], DD_STATE_ACTIVE)
# check that we don't scale up if there are no instances
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
for dd_kind in namespace_values:
self.assertEquals(len(ddam._filter_records(ddam.oss_mock.servers, dd_kind)), 1)
@mock_open_context
def test_do_not_spawn_if_not_used(self):
dd = self.create_docker_driver()
ddam = dd._get_ap()
# spawn a host
cur_ts = 1000000
dd._do_housekeep(token='foo', cur_ts=cur_ts)
# fast forward time past lifetime, but when the host is not used the lifetime should not tick
cur_ts += 60 + docker_driver.DD_HOST_LIFETIME
dd._do_housekeep(token='foo', cur_ts=cur_ts)
for dd_kind in namespace_values:
self.assertEquals(len(ddam._filter_records(ddam.oss_mock.servers, dd_kind)), 1)
@mock_open_context
def test_spawn_activate_remove(self):
dd = self.create_docker_driver()
ddam = dd._get_ap()
# spawn a host and activate it
cur_ts = 1000000
dd._do_housekeep(token='foo', cur_ts=cur_ts)
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
for dd_kind in namespace_values:
self.assertEquals(len(ddam._filter_records(ddam.oss_mock.servers, dd_kind)), 1)
hosts_data = ddam.load_records(namespace_value=dd_kind)
self.assertSetEqual({x['state'] for x in hosts_data}, {DD_STATE_ACTIVE})
# manipulate the host data a bit so that the host is marked as used
hosts_data[0]['lifetime_tick_ts'] = cur_ts
# fast forward time past host lifetime, should have one active and one spawned
cur_ts += 60 + docker_driver.DD_HOST_LIFETIME
dd._do_housekeep(token='foo', cur_ts=cur_ts)
for dd_kind in namespace_values:
self.assertEquals(len(ddam._filter_records(ddam.oss_mock.servers, dd_kind)), 2)
hosts_data = ddam.load_records(namespace_value=dd_kind)
self.assertSetEqual({x['state'] for x in hosts_data}, {DD_STATE_ACTIVE, DD_STATE_SPAWNED})
# next tick: should have two active
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
for dd_kind in namespace_values:
self.assertEquals(len(ddam._filter_records(ddam.oss_mock.servers, dd_kind)), 2)
hosts_data = ddam.load_records(namespace_value=dd_kind)
self.assertSetEqual({x['state'] for x in hosts_data}, {DD_STATE_ACTIVE, DD_STATE_ACTIVE})
# next tick: should have one inactive, one active
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
for dd_kind in namespace_values:
self.assertEquals(len(ddam._filter_records(ddam.oss_mock.servers, dd_kind)), 2)
hosts_data = ddam.load_records(namespace_value=dd_kind)
self.assertSetEqual({x['state'] for x in hosts_data}, {DD_STATE_INACTIVE, DD_STATE_ACTIVE})
# last tick: should have one active
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
for dd_kind in namespace_values:
self.assertEquals(len(ddam._filter_records(ddam.oss_mock.servers, dd_kind)), 1)
hosts_data = ddam.load_records(namespace_value=dd_kind)
self.assertSetEqual({x['state'] for x in hosts_data}, {DD_STATE_ACTIVE})
@mock_open_context
def test_provision_deprovision(self):
dd = self.create_docker_driver()
ddam = dd._get_ap()
# spawn a host and activate it
cur_ts = 1000000
dd._do_housekeep(token='foo', cur_ts=cur_ts)
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
# spawn an instance and destroy it
ddam.pbc_mock.add_instance_data('1001')
token = 'foo'
instance = ddam.pbc_mock.get_instance_description(instance_id='1001')
blueprint = ddam.pbc_mock.get_blueprint_description(instance['blueprint_id'])
blueprint_config = blueprint['full_config']
docker_hosts = dd._select_hosts(blueprint_config['consumed_slots'], token, int(time.time()))
dd._do_provision(token='foo', instance_id='1001', cur_ts=cur_ts, selected_host=docker_hosts[0])
dd._do_deprovision(token='foo', instance_id='1001')
@mock_open_context
def test_double_deprovision(self):
dd = self.create_docker_driver()
ddam = dd._get_ap()
# spawn a host and activate it
cur_ts = 1000000
dd._do_housekeep(token='foo', cur_ts=cur_ts)
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
# spawn an instance and destroy it twice, should not blow up
ddam.pbc_mock.add_instance_data('1001')
token = 'foo'
instance = ddam.pbc_mock.get_instance_description(instance_id='1001')
blueprint = ddam.pbc_mock.get_blueprint_description(instance['blueprint_id'])
blueprint_config = blueprint['full_config']
docker_hosts = dd._select_hosts(blueprint_config['consumed_slots'], token, int(time.time()))
dd._do_provision(token='foo', instance_id='1001', cur_ts=cur_ts, selected_host=docker_hosts[0])
dd._do_deprovision(token='foo', instance_id='1001')
# because base driver is bypassed in tests, instance state has to be set manually
ddam.pbc_mock.do_instance_patch('1001', dict(state='deleted'))
dd._do_deprovision(token='foo', instance_id='1001')
@mock_open_context
def test_double_deprovision_404(self):
dd = self.create_docker_driver()
ddam = dd._get_ap()
# spawn a host and activate it
cur_ts = 1000000
dd._do_housekeep(token='foo', cur_ts=cur_ts)
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
# spawn an instance and destroy it twice, should not blow up
ddam.pbc_mock.add_instance_data('1001')
token = 'foo'
instance = ddam.pbc_mock.get_instance_description(instance_id='1001')
blueprint = ddam.pbc_mock.get_blueprint_description(instance['blueprint_id'])
blueprint_config = blueprint['full_config']
docker_hosts = dd._select_hosts(blueprint_config['consumed_slots'], token, int(time.time()))
dd._do_provision(token='foo', instance_id='1001', cur_ts=cur_ts, selected_host=docker_hosts[0])
dd._do_deprovision(token='foo', instance_id='1001')
dd._do_deprovision(token='foo', instance_id='1001')
@mock_open_context
def test_scale_up_to_the_limit(self):
dd = self.create_docker_driver()
ddam = dd._get_ap()
# spawn a host and activate it
cur_ts = 1000000
dd._do_housekeep(token='foo', cur_ts=cur_ts)
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
for dd_kind in namespace_values:
hosts_data = ddam.pbc_mock.get_namespaced_keyvalue(dd_kind, KEY_CONFIG)
num_slots = (hosts_data['value']['DD_HOST_FLAVOR_SLOTS_SMALL'] +
hosts_data['value']['DD_HOST_FLAVOR_SLOTS_LARGE'] * (hosts_data['value']['DD_MAX_HOSTS'] - 1)
)
# spawn instances up to the limit
for i in range(0, num_slots):
ddam.pbc_mock.add_instance_data('%d' % (1000 + i))
token = 'foo'
instance = ddam.pbc_mock.get_instance_description(instance_id='%d' % (1000 + i))
blueprint = ddam.pbc_mock.get_blueprint_description(instance['blueprint_id'])
blueprint_config = blueprint['full_config']
docker_hosts = dd._select_hosts(blueprint_config['consumed_slots'], token, int(time.time()), dd_kind)
dd._do_provision(token='foo', instance_id='%d' % (1000 + i), cur_ts=cur_ts, selected_host=docker_hosts[0])
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
self.assertEquals(len(ddam._filter_records(ddam.oss_mock.servers, dd_kind)), hosts_data['value']['DD_MAX_HOSTS'])
try:
ddam.pbc_mock.add_instance_data('999')
token = 'foo'
instance = ddam.pbc_mock.get_instance_description(instance_id='999')
blueprint = ddam.pbc_mock.get_blueprint_description(instance['blueprint_id'])
blueprint_config = blueprint['full_config']
docker_hosts = dd._select_hosts(blueprint_config['consumed_slots'], token, int(time.time()), dd_kind)
dd._do_provision(token='foo', instance_id='999', cur_ts=cur_ts, selected_host=docker_hosts[0])
self.fail('pool should have been full')
except RuntimeWarning:
pass
@mock_open_context
def test_scale_down(self):
dd = self.create_docker_driver()
ddam = dd._get_ap()
# spawn a host and activate it
cur_ts = 1000000
dd._do_housekeep(token='foo', cur_ts=cur_ts)
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
for dd_kind in namespace_values:
hosts_data = ddam.pbc_mock.get_namespaced_keyvalue(dd_kind, KEY_CONFIG)
num_slots = (hosts_data['value']['DD_HOST_FLAVOR_SLOTS_SMALL'] +
hosts_data['value']['DD_HOST_FLAVOR_SLOTS_LARGE'] * (hosts_data['value']['DD_MAX_HOSTS'] - 1)
)
# spawn instances up to the limit
for i in range(0, num_slots):
ddam.pbc_mock.add_instance_data('%d' % (1000 + i))
token = 'foo'
instance = ddam.pbc_mock.get_instance_description(instance_id='%d' % (1000 + i))
blueprint = ddam.pbc_mock.get_blueprint_description(instance['blueprint_id'])
blueprint_config = blueprint['full_config']
docker_hosts = dd._select_hosts(blueprint_config['consumed_slots'], token, int(time.time()), dd_kind)
dd._do_provision(token='foo', instance_id='%d' % (1000 + i), cur_ts=cur_ts, selected_host=docker_hosts[0])
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
self.assertEquals(len(ddam._filter_records(ddam.oss_mock.servers, dd_kind)), hosts_data['value']['DD_MAX_HOSTS'])
# remove instances
for i in range(0, num_slots):
dd._do_deprovision(token='foo', instance_id='%d' % (1000 + i))
# let logic scale down (3 ticks per host should be enough)
cur_ts += docker_driver.DD_HOST_LIFETIME
for i in range(0, hosts_data['value']['DD_MAX_HOSTS'] * 3):
dd._do_housekeep(token='foo', cur_ts=cur_ts)
self.assertEquals(len(ddam._filter_records(ddam.oss_mock.servers, dd_kind)), 1)
@mock_open_context
def test_shutdown_mode(self):
dd = self.create_docker_driver()
ddam = dd._get_ap()
# spawn a host and activate it
cur_ts = 1000000
dd._do_housekeep(token='foo', cur_ts=cur_ts)
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
for dd_kind in namespace_values:
hosts_data = ddam.pbc_mock.get_namespaced_keyvalue(dd_kind, KEY_CONFIG)
hosts_data['value']['DD_SHUTDOWN_MODE'] = True
# set shutdown mode and see that we have scaled down
payload = {
'namespace': dd_kind,
'key': KEY_CONFIG
}
payload['value'] = hosts_data['value']
ddam.pbc_mock.create_or_modify_namespaced_keyvalue(dd_kind, KEY_CONFIG, payload)
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
for dd_kind in namespace_values:
self.assertEquals(len(ddam._filter_records(ddam.oss_mock.servers, dd_kind)), 0)
@mock_open_context
def test_inactive_host_with_instances(self):
dd = self.create_docker_driver()
ddam = dd._get_ap()
# spawn a host and activate it
cur_ts = 1000000
dd._do_housekeep(token='foo', cur_ts=cur_ts)
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
count = 999
for dd_kind in namespace_values:
count += 1
# add an instance
ddam.pbc_mock.add_instance_data(count)
token = 'foo'
instance = ddam.pbc_mock.get_instance_description(instance_id=count)
blueprint = ddam.pbc_mock.get_blueprint_description(instance['blueprint_id'])
blueprint_config = blueprint['full_config']
docker_hosts = dd._select_hosts(blueprint_config['consumed_slots'], token, int(time.time()), dd_kind)
dd._do_provision(token='foo', instance_id=count, cur_ts=cur_ts, selected_host=docker_hosts[0])
# change the state to inactive under the hood (this is possible due to a race
# between housekeep() and provision())
hosts_data = ddam.load_records(namespace_value=dd_kind)
hosts_data[0]['state'] = DD_STATE_INACTIVE
for i in range(5):
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
for dd_kind in namespace_values:
hosts_data = ddam.load_records(namespace_value=dd_kind)
self.assertEquals(len(ddam._filter_records(ddam.oss_mock.servers, dd_kind)), 2)
self.assertSetEqual({x['state'] for x in hosts_data}, {DD_STATE_INACTIVE, DD_STATE_ACTIVE})
# remove the instance and check that the host is removed also
dd._do_deprovision(token='foo', instance_id=(count))
dd._do_deprovision(token='foo', instance_id=(count - 1))
for i in range(5):
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
for dd_kind in namespace_values:
hosts_data = ddam.load_records(namespace_value=dd_kind)
self.assertEquals(len(ddam._filter_records(ddam.oss_mock.servers, dd_kind)), 1)
self.assertSetEqual({x['state'] for x in hosts_data}, {DD_STATE_ACTIVE})
@mock_open_context
def test_prepare_failing(self):
dd = self.create_docker_driver()
ddam = dd._get_ap()
# spawn a host
cur_ts = 1000000
dd._do_housekeep(token='foo', cur_ts=cur_ts)
# mimic a failure to prepare it
ddam.failure_mode = True
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
hosts_data = ddam.load_records()
self.assertSetEqual({x['state'] for x in hosts_data}, {DD_STATE_SPAWNED})
# recover
ddam.failure_mode = False
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
hosts_data = ddam.load_records()
self.assertSetEqual({x['state'] for x in hosts_data}, {DD_STATE_ACTIVE})
@mock_open_context
def test_prepare_failing_max_retries(self):
dd = self.create_docker_driver()
ddam = dd._get_ap()
# spawn a host
cur_ts = 1000000
dd._do_housekeep(token='foo', cur_ts=cur_ts)
# mimic a failure to prepare it
ddam.failure_mode = True
for i in range(docker_driver.DD_MAX_HOST_ERRORS + 1):
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
hosts_data = ddam.load_records()
self.assertSetEqual({x['state'] for x in hosts_data}, {DD_STATE_INACTIVE})
ddam.failure_mode = False
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
hosts_data = ddam.load_records() # Load the hosts_data each time to get the latest updates
self.assertEqual(len(hosts_data), 0)
for i in range(2):
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
hosts_data = ddam.load_records()
self.assertSetEqual({x['state'] for x in hosts_data}, {DD_STATE_ACTIVE})
@mock_open_context
def test_docker_comm_probs(self):
dd = self.create_docker_driver()
ddam = dd._get_ap()
# spawn a host and activate it
cur_ts = 1000000
dd._do_housekeep(token='foo', cur_ts=cur_ts)
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
ddam.pbc_mock.add_instance_data('1000')
token = 'foo'
instance = ddam.pbc_mock.get_instance_description(instance_id='1000')
blueprint = ddam.pbc_mock.get_blueprint_description(instance['blueprint_id'])
blueprint_config = blueprint['full_config']
# mimic a docker comm failure
ddam.failure_mode = True
try:
dd._do_provision(token='foo', instance_id='1000', cur_ts=cur_ts, selected_host=None)
self.fail('should have raised an error')
except Exception:
pass
ddam.failure_mode = False
docker_hosts = dd._select_hosts(blueprint_config['consumed_slots'], token, int(time.time()))
dd._do_provision(token='foo', instance_id='1000', cur_ts=cur_ts, selected_host=docker_hosts[0])
ddam.failure_mode = True
ddam.failure_mode = True
try:
dd._do_deprovision(token='foo', instance_id='1000')
self.fail('should have raised an error')
except Exception:
pass
ddam.failure_mode = False
dd._do_deprovision(token='foo', instance_id='1000')
# spawn only one kind of pool_vm
@mock_open_context
def test_spawn_dd_kind(self):
dd = self.create_docker_driver()
ddam = dd._get_ap()
# spawn a host and activate it
cur_ts = 1000000
dd._do_housekeep(token='foo', cur_ts=cur_ts)
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
for dd_kind in namespace_values:
hosts_data = ddam.pbc_mock.get_namespaced_keyvalue(dd_kind, KEY_CONFIG)
if dd_kind == NAMESPACE_CPU:
hosts_data['value']['DD_SHUTDOWN_MODE'] = True
# set shutdown mode and see that we have scaled down
payload = {
'namespace': dd_kind,
'key': KEY_CONFIG
}
payload['value'] = hosts_data['value']
ddam.pbc_mock.create_or_modify_namespaced_keyvalue(dd_kind, KEY_CONFIG, payload)
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
cur_ts += 60
dd._do_housekeep(token='foo', cur_ts=cur_ts)
self.assertEquals(len(ddam._filter_records(ddam.oss_mock.servers, NAMESPACE_CPU)), 0)
self.assertEquals(len(ddam._filter_records(ddam.oss_mock.servers, NAMESPACE_GPU)), 1)
| mit |
aznrice/android_kernel_samsung_afyonltetmo | tools/perf/scripts/python/net_dropmonitor.py | 4235 | 1554 | # Monitor the system for dropped packets and proudce a report of drop locations and counts
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
drop_log = {}
kallsyms = []
def get_kallsyms_table():
global kallsyms
try:
f = open("/proc/kallsyms", "r")
linecount = 0
for line in f:
linecount = linecount+1
f.seek(0)
except:
return
j = 0
for line in f:
loc = int(line.split()[0], 16)
name = line.split()[2]
j = j +1
if ((j % 100) == 0):
print "\r" + str(j) + "/" + str(linecount),
kallsyms.append({ 'loc': loc, 'name' : name})
print "\r" + str(j) + "/" + str(linecount)
kallsyms.sort()
return
def get_sym(sloc):
loc = int(sloc)
for i in kallsyms:
if (i['loc'] >= loc):
return (i['name'], i['loc']-loc)
return (None, 0)
def print_drop_table():
print "%25s %25s %25s" % ("LOCATION", "OFFSET", "COUNT")
for i in drop_log.keys():
(sym, off) = get_sym(i)
if sym == None:
sym = i
print "%25s %25s %25s" % (sym, off, drop_log[i])
def trace_begin():
print "Starting trace (Ctrl-C to dump results)"
def trace_end():
print "Gathering kallsyms data"
get_kallsyms_table()
print_drop_table()
# called from perf, when it finds a correspoinding event
def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm,
skbaddr, protocol, location):
slocation = str(location)
try:
drop_log[slocation] = drop_log[slocation] + 1
except:
drop_log[slocation] = 1
| gpl-2.0 |
savoirfairelinux/OpenUpgrade | addons/account/report/account_general_journal.py | 381 | 7669 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import osv
from openerp.report import report_sxw
from common_report_header import common_report_header
class journal_print(report_sxw.rml_parse, common_report_header):
def __init__(self, cr, uid, name, context=None):
if context is None:
context = {}
super(journal_print, self).__init__(cr, uid, name, context=context)
self.period_ids = []
self.journal_ids = []
self.localcontext.update( {
'time': time,
'lines': self.lines,
'periods': self.periods,
'sum_debit_period': self._sum_debit_period,
'sum_credit_period': self._sum_credit_period,
'sum_debit': self._sum_debit,
'sum_credit': self._sum_credit,
'get_fiscalyear': self._get_fiscalyear,
'get_account': self._get_account,
'get_start_period': self.get_start_period,
'get_end_period': self.get_end_period,
'get_sortby': self._get_sortby,
'get_filter': self._get_filter,
'get_journal': self._get_journal,
'get_start_date':self._get_start_date,
'get_end_date':self._get_end_date,
'display_currency':self._display_currency,
'get_target_move': self._get_target_move,
})
def set_context(self, objects, data, ids, report_type=None):
obj_move = self.pool.get('account.move.line')
new_ids = ids
self.query_get_clause = ''
self.target_move = data['form'].get('target_move', 'all')
if (data['model'] == 'ir.ui.menu'):
new_ids = 'active_ids' in data['form'] and data['form']['active_ids'] or []
self.query_get_clause = 'AND '
self.query_get_clause += obj_move._query_get(self.cr, self.uid, obj='l', context=data['form'].get('used_context', {}))
objects = self.pool.get('account.journal.period').browse(self.cr, self.uid, new_ids)
if new_ids:
self.cr.execute('SELECT period_id, journal_id FROM account_journal_period WHERE id IN %s', (tuple(new_ids),))
res = self.cr.fetchall()
self.period_ids, self.journal_ids = zip(*res)
return super(journal_print, self).set_context(objects, data, ids, report_type=report_type)
# returns a list of period objs
def periods(self, journal_period_objs):
dic = {}
def filter_unique(o):
key = o.period_id.id
res = key in dic
if not res:
dic[key] = True
return not res
filtered_objs = filter(filter_unique, journal_period_objs)
return map(lambda x: x.period_id, filtered_objs)
def lines(self, period_id):
if not self.journal_ids:
return []
move_state = ['draft','posted']
if self.target_move == 'posted':
move_state = ['posted']
self.cr.execute('SELECT j.code, j.name, l.amount_currency,c.symbol AS currency_code,l.currency_id, '
'SUM(l.debit) AS debit, SUM(l.credit) AS credit '
'FROM account_move_line l '
'LEFT JOIN account_move am ON (l.move_id=am.id) '
'LEFT JOIN account_journal j ON (l.journal_id=j.id) '
'LEFT JOIN res_currency c on (l.currency_id=c.id)'
'WHERE am.state IN %s AND l.period_id=%s AND l.journal_id IN %s ' + self.query_get_clause + ' '
'GROUP BY j.id, j.code, j.name, l.amount_currency, c.symbol, l.currency_id ',
(tuple(move_state), period_id, tuple(self.journal_ids)))
return self.cr.dictfetchall()
def _set_get_account_currency_code(self, account_id):
self.cr.execute("SELECT c.symbol AS code "\
"FROM res_currency c, account_account AS ac "\
"WHERE ac.id = %s AND ac.currency_id = c.id" % (account_id))
result = self.cr.fetchone()
if result:
self.account_currency = result[0]
else:
self.account_currency = False
def _get_account(self, data):
if data['model'] == 'account.journal.period':
return self.pool.get('account.journal.period').browse(self.cr, self.uid, data['id']).company_id.name
return super(journal_print, self)._get_account(data)
def _get_fiscalyear(self, data):
if data['model'] == 'account.journal.period':
return self.pool.get('account.journal.period').browse(self.cr, self.uid, data['id']).fiscalyear_id.name
return super(journal_print, self)._get_fiscalyear(data)
def _display_currency(self, data):
if data['model'] == 'account.journal.period':
return True
return data['form']['amount_currency']
def _sum_debit_period(self, period_id, journal_id=False):
if journal_id:
journals = [journal_id]
else:
journals = self.journal_ids
if not journals:
return 0.0
move_state = ['draft','posted']
if self.target_move == 'posted':
move_state = ['posted']
self.cr.execute('SELECT SUM(l.debit) FROM account_move_line l '
'LEFT JOIN account_move am ON (l.move_id=am.id) '
'WHERE am.state IN %s AND l.period_id=%s AND l.journal_id IN %s ' + self.query_get_clause + ' ' \
'AND l.state<>\'draft\'',
(tuple(move_state), period_id, tuple(journals)))
return self.cr.fetchone()[0] or 0.0
def _sum_credit_period(self, period_id, journal_id=None):
if journal_id:
journals = [journal_id]
else:
journals = self.journal_ids
move_state = ['draft','posted']
if self.target_move == 'posted':
move_state = ['posted']
if not journals:
return 0.0
self.cr.execute('SELECT SUM(l.credit) FROM account_move_line l '
'LEFT JOIN account_move am ON (l.move_id=am.id) '
'WHERE am.state IN %s AND l.period_id=%s AND l.journal_id IN %s '+ self.query_get_clause + ' ' \
'AND l.state<>\'draft\'',
(tuple(move_state), period_id, tuple(journals)))
return self.cr.fetchone()[0] or 0.0
class report_generaljournal(osv.AbstractModel):
_name = 'report.account.report_generaljournal'
_inherit = 'report.abstract_report'
_template = 'account.report_generaljournal'
_wrapped_report_class = journal_print
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
tchellomello/home-assistant | homeassistant/helpers/frame.py | 8 | 3066 | """Provide frame helper for finding the current frame context."""
import asyncio
import functools
import logging
from traceback import FrameSummary, extract_stack
from typing import Any, Callable, Optional, Tuple, TypeVar, cast
from homeassistant.exceptions import HomeAssistantError
_LOGGER = logging.getLogger(__name__)
CALLABLE_T = TypeVar("CALLABLE_T", bound=Callable) # pylint: disable=invalid-name
def get_integration_frame(
exclude_integrations: Optional[set] = None,
) -> Tuple[FrameSummary, str, str]:
"""Return the frame, integration and integration path of the current stack frame."""
found_frame = None
if not exclude_integrations:
exclude_integrations = set()
for frame in reversed(extract_stack()):
for path in ("custom_components/", "homeassistant/components/"):
try:
index = frame.filename.index(path)
start = index + len(path)
end = frame.filename.index("/", start)
integration = frame.filename[start:end]
if integration not in exclude_integrations:
found_frame = frame
break
except ValueError:
continue
if found_frame is not None:
break
if found_frame is None:
raise MissingIntegrationFrame
return found_frame, integration, path
class MissingIntegrationFrame(HomeAssistantError):
"""Raised when no integration is found in the frame."""
def report(what: str) -> None:
"""Report incorrect usage.
Async friendly.
"""
try:
integration_frame = get_integration_frame()
except MissingIntegrationFrame as err:
# Did not source from an integration? Hard error.
raise RuntimeError(
f"Detected code that {what}. Please report this issue."
) from err
report_integration(what, integration_frame)
def report_integration(
what: str, integration_frame: Tuple[FrameSummary, str, str]
) -> None:
"""Report incorrect usage in an integration.
Async friendly.
"""
found_frame, integration, path = integration_frame
index = found_frame.filename.index(path)
if path == "custom_components/":
extra = " to the custom component author"
else:
extra = ""
_LOGGER.warning(
"Detected integration that %s. "
"Please report issue%s for %s using this method at %s, line %s: %s",
what,
extra,
integration,
found_frame.filename[index:],
found_frame.lineno,
found_frame.line.strip(),
)
def warn_use(func: CALLABLE_T, what: str) -> CALLABLE_T:
"""Mock a function to warn when it was about to be used."""
if asyncio.iscoroutinefunction(func):
@functools.wraps(func)
async def report_use(*args: Any, **kwargs: Any) -> None:
report(what)
else:
@functools.wraps(func)
def report_use(*args: Any, **kwargs: Any) -> None:
report(what)
return cast(CALLABLE_T, report_use)
| apache-2.0 |
jmartinezchaine/OpenERP | openerp/addons/web/test/test_menu.py | 16 | 4792 | # -*- coding: utf-8 -*-
import mock
import unittest2
from ..controllers import main
from ..common.session import OpenERPSession
class Placeholder(object):
def __init__(self, **kwargs):
for k, v in kwargs.iteritems():
setattr(self, k, v)
class LoadTest(unittest2.TestCase):
def setUp(self):
self.menu = main.Menu()
self.menus_mock = mock.Mock()
self.request = Placeholder(session=OpenERPSession())
def tearDown(self):
del self.request
del self.menus_mock
del self.menu
@unittest2.skip
def test_empty(self):
self.menus_mock.search = mock.Mock(return_value=[])
self.menus_mock.read = mock.Mock(return_value=[])
root = self.menu.do_load(self.request)
self.menus_mock.search.assert_called_with([])
self.menus_mock.read.assert_called_with(
[], ['name', 'sequence', 'parent_id'])
self.assertListEqual(
root['children'],
[])
@unittest2.skip
def test_applications_sort(self):
self.menus_mock.search = mock.Mock(return_value=[1, 2, 3])
self.menus_mock.read = mock.Mock(return_value=[
{'id': 2, 'sequence': 3, 'parent_id': False},
{'id': 3, 'sequence': 2, 'parent_id': False},
{'id': 1, 'sequence': 1, 'parent_id': False},
])
root = self.menu.do_load(self.request)
self.menus_mock.read.assert_called_with(
[1, 2, 3], ['name', 'sequence', 'parent_id'])
self.assertEqual(
root['children'],
[{
'id': 1, 'sequence': 1,
'parent_id': False, 'children': []
}, {
'id': 3, 'sequence': 2,
'parent_id': False, 'children': []
}, {
'id': 2, 'sequence': 3,
'parent_id': False, 'children': []
}])
@unittest2.skip
def test_deep(self):
self.menus_mock.search = mock.Mock(return_value=[1, 2, 3, 4])
self.menus_mock.read = mock.Mock(return_value=[
{'id': 1, 'sequence': 1, 'parent_id': False},
{'id': 2, 'sequence': 2, 'parent_id': [1, '']},
{'id': 3, 'sequence': 1, 'parent_id': [2, '']},
{'id': 4, 'sequence': 2, 'parent_id': [2, '']},
])
root = self.menu.do_load(self.request)
self.assertEqual(
root['children'],
[{
'id': 1,
'sequence': 1,
'parent_id': False,
'children': [{
'id': 2,
'sequence': 2,
'parent_id': [1, ''],
'children': [{
'id': 3,
'sequence': 1,
'parent_id': [2, ''],
'children': []
}, {
'id': 4,
'sequence': 2,
'parent_id': [2, ''],
'children': []
}]
}]
}]
)
class ActionMungerTest(unittest2.TestCase):
def setUp(self):
self.menu = main.Menu()
def test_actual_treeview(self):
action = {
"views": [[False, "tree"], [False, "form"],
[False, "calendar"]],
"view_type": "tree",
"view_id": False,
"view_mode": "tree,form,calendar"
}
changed = action.copy()
del action['view_type']
main.fix_view_modes(changed)
self.assertEqual(changed, action)
@unittest2.skip
def test_list_view(self):
action = {
"views": [[False, "tree"], [False, "form"],
[False, "calendar"]],
"view_type": "form",
"view_id": False,
"view_mode": "tree,form,calendar"
}
main.fix_view_modes(action)
self.assertEqual(action, {
"views": [[False, "list"], [False, "form"],
[False, "calendar"]],
"view_id": False,
"view_mode": "list,form,calendar"
})
@unittest2.skip
def test_redundant_views(self):
action = {
"views": [[False, "tree"], [False, "form"],
[False, "calendar"], [42, "tree"]],
"view_type": "form",
"view_id": False,
"view_mode": "tree,form,calendar"
}
main.fix_view_modes(action)
self.assertEqual(action, {
"views": [[False, "list"], [False, "form"],
[False, "calendar"], [42, "list"]],
"view_id": False,
"view_mode": "list,form,calendar"
})
| agpl-3.0 |
NeostreamTechnology/Microservices | venv/lib/python2.7/site-packages/simplejson/tests/test_encode_basestring_ascii.py | 147 | 2337 | from unittest import TestCase
import simplejson.encoder
from simplejson.compat import b
CASES = [
(u'/\\"\ucafe\ubabe\uab98\ufcde\ubcda\uef4a\x08\x0c\n\r\t`1~!@#$%^&*()_+-=[]{}|;:\',./<>?', '"/\\\\\\"\\ucafe\\ubabe\\uab98\\ufcde\\ubcda\\uef4a\\b\\f\\n\\r\\t`1~!@#$%^&*()_+-=[]{}|;:\',./<>?"'),
(u'\u0123\u4567\u89ab\ucdef\uabcd\uef4a', '"\\u0123\\u4567\\u89ab\\ucdef\\uabcd\\uef4a"'),
(u'controls', '"controls"'),
(u'\x08\x0c\n\r\t', '"\\b\\f\\n\\r\\t"'),
(u'{"object with 1 member":["array with 1 element"]}', '"{\\"object with 1 member\\":[\\"array with 1 element\\"]}"'),
(u' s p a c e d ', '" s p a c e d "'),
(u'\U0001d120', '"\\ud834\\udd20"'),
(u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
(b('\xce\xb1\xce\xa9'), '"\\u03b1\\u03a9"'),
(u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
(b('\xce\xb1\xce\xa9'), '"\\u03b1\\u03a9"'),
(u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
(u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
(u"`1~!@#$%^&*()_+-={':[,]}|;.</>?", '"`1~!@#$%^&*()_+-={\':[,]}|;.</>?"'),
(u'\x08\x0c\n\r\t', '"\\b\\f\\n\\r\\t"'),
(u'\u0123\u4567\u89ab\ucdef\uabcd\uef4a', '"\\u0123\\u4567\\u89ab\\ucdef\\uabcd\\uef4a"'),
]
class TestEncodeBaseStringAscii(TestCase):
def test_py_encode_basestring_ascii(self):
self._test_encode_basestring_ascii(simplejson.encoder.py_encode_basestring_ascii)
def test_c_encode_basestring_ascii(self):
if not simplejson.encoder.c_encode_basestring_ascii:
return
self._test_encode_basestring_ascii(simplejson.encoder.c_encode_basestring_ascii)
def _test_encode_basestring_ascii(self, encode_basestring_ascii):
fname = encode_basestring_ascii.__name__
for input_string, expect in CASES:
result = encode_basestring_ascii(input_string)
#self.assertEqual(result, expect,
# '{0!r} != {1!r} for {2}({3!r})'.format(
# result, expect, fname, input_string))
self.assertEqual(result, expect,
'%r != %r for %s(%r)' % (result, expect, fname, input_string))
def test_sorted_dict(self):
items = [('one', 1), ('two', 2), ('three', 3), ('four', 4), ('five', 5)]
s = simplejson.dumps(dict(items), sort_keys=True)
self.assertEqual(s, '{"five": 5, "four": 4, "one": 1, "three": 3, "two": 2}')
| mit |
ppwwyyxx/tensorpack | tensorpack/utils/logger.py | 2 | 5996 | # -*- coding: utf-8 -*-
# File: logger.py
"""
The logger module itself has the common logging functions of Python's
:class:`logging.Logger`. For example:
.. code-block:: python
from tensorpack.utils import logger
logger.set_logger_dir('train_log/test')
logger.info("Test")
logger.error("Error happened!")
"""
import logging
import os
import os.path
import shutil
import sys
from datetime import datetime
from six.moves import input
from termcolor import colored
__all__ = ['set_logger_dir', 'auto_set_dir', 'get_logger_dir']
class _MyFormatter(logging.Formatter):
def format(self, record):
date = colored('[%(asctime)s @%(filename)s:%(lineno)d]', 'green')
msg = '%(message)s'
if record.levelno == logging.WARNING:
fmt = date + ' ' + colored('WRN', 'red', attrs=['blink']) + ' ' + msg
elif record.levelno == logging.ERROR or record.levelno == logging.CRITICAL:
fmt = date + ' ' + colored('ERR', 'red', attrs=['blink', 'underline']) + ' ' + msg
elif record.levelno == logging.DEBUG:
fmt = date + ' ' + colored('DBG', 'yellow', attrs=['blink']) + ' ' + msg
else:
fmt = date + ' ' + msg
if hasattr(self, '_style'):
# Python3 compatibility
self._style._fmt = fmt
self._fmt = fmt
return super(_MyFormatter, self).format(record)
def _getlogger():
# this file is synced to "dataflow" package as well
package_name = "dataflow" if __name__.startswith("dataflow") else "tensorpack"
logger = logging.getLogger(package_name)
logger.propagate = False
logger.setLevel(logging.INFO)
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(_MyFormatter(datefmt='%m%d %H:%M:%S'))
logger.addHandler(handler)
return logger
_logger = _getlogger()
_LOGGING_METHOD = ['info', 'warning', 'error', 'critical', 'exception', 'debug', 'setLevel', 'addFilter']
# export logger functions
for func in _LOGGING_METHOD:
locals()[func] = getattr(_logger, func)
__all__.append(func)
# 'warn' is deprecated in logging module
warn = _logger.warning
__all__.append('warn')
def _get_time_str():
return datetime.now().strftime('%m%d-%H%M%S')
# globals: logger file and directory:
LOG_DIR = None
_FILE_HANDLER = None
def _set_file(path):
global _FILE_HANDLER
if os.path.isfile(path):
backup_name = path + '.' + _get_time_str()
shutil.move(path, backup_name)
_logger.info("Existing log file '{}' backuped to '{}'".format(path, backup_name)) # noqa: F821
hdl = logging.FileHandler(
filename=path, encoding='utf-8', mode='w')
hdl.setFormatter(_MyFormatter(datefmt='%m%d %H:%M:%S'))
_FILE_HANDLER = hdl
_logger.addHandler(hdl)
_logger.info("Argv: " + ' '.join(sys.argv))
def set_logger_dir(dirname, action=None):
"""
Set the directory for global logging.
Args:
dirname(str): log directory
action(str): an action of ["k","d","q"] to be performed
when the directory exists. Will ask user by default.
"d": delete the directory. Note that the deletion may fail when
the directory is used by tensorboard.
"k": keep the directory. This is useful when you resume from a
previous training and want the directory to look as if the
training was not interrupted.
Note that this option does not load old models or any other
old states for you. It simply does nothing.
"""
dirname = os.path.normpath(dirname)
global LOG_DIR, _FILE_HANDLER
if _FILE_HANDLER:
# unload and close the old file handler, so that we may safely delete the logger directory
_logger.removeHandler(_FILE_HANDLER)
del _FILE_HANDLER
def dir_nonempty(dirname):
# If directory exists and nonempty (ignore hidden files), prompt for action
return os.path.isdir(dirname) and len([x for x in os.listdir(dirname) if x[0] != '.'])
if dir_nonempty(dirname):
if not action:
_logger.warning("""\
Log directory {} exists! Use 'd' to delete it. """.format(dirname))
_logger.warning("""\
If you're resuming from a previous run, you can choose to keep it.
Press any other key to exit. """)
while not action:
action = input("Select Action: k (keep) / d (delete) / q (quit):").lower().strip()
act = action
if act == 'b':
backup_name = dirname + _get_time_str()
shutil.move(dirname, backup_name)
info("Directory '{}' backuped to '{}'".format(dirname, backup_name)) # noqa: F821
elif act == 'd':
shutil.rmtree(dirname, ignore_errors=True)
if dir_nonempty(dirname):
shutil.rmtree(dirname, ignore_errors=False)
elif act == 'n':
dirname = dirname + _get_time_str()
info("Use a new log directory {}".format(dirname)) # noqa: F821
elif act == 'k':
pass
else:
raise OSError("Directory {} exits!".format(dirname))
LOG_DIR = dirname
from .fs import mkdir_p
mkdir_p(dirname)
_set_file(os.path.join(dirname, 'log.log'))
def auto_set_dir(action=None, name=None):
"""
Use :func:`logger.set_logger_dir` to set log directory to
"./train_log/{scriptname}:{name}". "scriptname" is the name of the main python file currently running"""
mod = sys.modules['__main__']
basename = os.path.basename(mod.__file__)
auto_dirname = os.path.join('train_log', basename[:basename.rfind('.')])
if name:
auto_dirname += '_%s' % name if os.name == 'nt' else ':%s' % name
set_logger_dir(auto_dirname, action=action)
def get_logger_dir():
"""
Returns:
The logger directory, or None if not set.
The directory is used for general logging, tensorboard events, checkpoints, etc.
"""
return LOG_DIR
| apache-2.0 |
ICOS-Carbon-Portal/data | src/main/python/update-restheart/Restheart.py | 1 | 2242 | import requests
class Restheart(object):
def __init__(self):
# self._baseUrl = 'http://127.0.0.1:8088/db/' # localhost
self._baseUrl = 'https://restheart.icos-cp.eu/db/' # production
self._verfify = True if self._baseUrl.__contains__('restheart') else False
def get_records_to_update(self, op, pagesize, collection):
resp = None
try:
url = self.get_url(op, pagesize, collection)
resp = requests.get(url, timeout=10, verify=self._verfify)
if resp.status_code != 200:
print(resp.status_code, resp.reason, resp.json())
return resp.json()
except:
print(resp)
def update_record(self, id, record, collection):
url = self._baseUrl + collection + '/' + id
headers = {"Content-Type": "application/json"}
resp = None
try:
resp = requests.patch(url, headers=headers, json=record, timeout=5, verify=self._verfify)
if resp.status_code != 200:
print(resp.status_code, resp.reason)
except:
print(resp)
def get_url(self, op, pagesize, collection):
if op == 'geo':
if collection == 'portaluse':
return self._baseUrl + collection + '?filter={"city":{"$exists":0}}&np&pagesize=' + str(pagesize)
elif collection == 'dobjdls':
return self._baseUrl + collection + '?filter={"$and":[{"ip":{"$exists":1}},{"city":{"$exists":0}}]}&np&pagesize=' + str(pagesize)
else:
raise ValueError("Unknown collection: " + collection)
elif op == 'label':
if collection == 'portaluse':
return self._baseUrl + collection + '?np&pagesize=' + str(pagesize)
# return self._baseUrl + collection + '?filter={"_id":{"$oid":"5bb21519f17df4d065e9c53c"}}&np&pagesize=' + str(pagesize)
# return self._baseUrl + collection + '?filter={"filterChange":{"$exists":1}}&np&pagesize=' + str(pagesize)
# return self._baseUrl + collection + '?filter={"previewNetCDF":{"$exists":1}}&np&pagesize=' + str(pagesize)
# return self._baseUrl + collection + '?filter={"previewTimeserie":{"$exists":1}}&np&pagesize=' + str(pagesize)
# return self._baseUrl + collection + '?filter={"$and":[{"filterChange":{"$exists":0}},{"previewNetCDF":{"$exists":0}},{"previewTimeserie":{"$exists":0}}]}&np&pagesize=' + str(pagesize)
else:
raise ValueError("Unknown collection: " + collection)
| gpl-3.0 |
pombreda/pyfilesystem-4 | fs/osfs/watch_inotify.py | 8 | 10520 | """
fs.osfs.watch_inotify
=============
Change watcher support for OSFS, backed by pyinotify.
"""
import os
import sys
import errno
import select
import threading
from fs.errors import *
from fs.path import *
from fs.watch import *
try:
import pyinotify
except Exception, e:
# pyinotify sometimes raises its own custom errors on import.
# How on earth are we supposed to catch them when we can't import them?
if isinstance(e,ImportError):
raise
raise ImportError("could not import pyinotify")
try:
pyinotify.WatchManager.get_fd
except AttributeError:
raise ImportError("pyinotify version is too old")
class OSFSWatchMixin(WatchableFSMixin):
"""Mixin providing change-watcher support via pyinotify."""
__watch_lock = threading.Lock()
__watch_thread = None
def close(self):
super(OSFSWatchMixin,self).close()
self.notify_watchers(CLOSED)
for watcher_list in self._watchers.values():
for watcher in watcher_list:
self.del_watcher(watcher)
self.__watch_lock.acquire()
try:
wt = self.__watch_thread
if wt is not None and not wt.watchers:
wt.stop()
wt.join()
OSFSWatchMixin.__watch_thread = None
finally:
self.__watch_lock.release()
@convert_os_errors
def add_watcher(self,callback,path="/",events=None,recursive=True):
super_add_watcher = super(OSFSWatchMixin,self).add_watcher
w = super_add_watcher(callback,path,events,recursive)
w._pyinotify_id = None
syspath = self.getsyspath(path)
if isinstance(syspath,unicode):
syspath = syspath.encode(sys.getfilesystemencoding())
# Each watch gets its own WatchManager, since it's tricky to make
# a single WatchManager handle multiple callbacks with different
# events for a single path. This means we pay one file descriptor
# for each watcher added to the filesystem. That's not too bad.
w._pyinotify_WatchManager = wm = pyinotify.WatchManager()
# Each individual notifier gets multiplexed by a single shared thread.
w._pyinotify_Notifier = pyinotify.Notifier(wm)
evtmask = self.__get_event_mask(events)
def process_events(event):
self.__route_event(w,event)
kwds = dict(rec=recursive,auto_add=recursive,quiet=False)
try:
wids = wm.add_watch(syspath,evtmask,process_events,**kwds)
except pyinotify.WatchManagerError, e:
raise OperationFailedError("add_watcher",details=e)
w._pyinotify_id = wids[syspath]
self.__watch_lock.acquire()
try:
wt = self.__get_watch_thread()
wt.add_watcher(w)
finally:
self.__watch_lock.release()
return w
@convert_os_errors
def del_watcher(self,watcher_or_callback):
if isinstance(watcher_or_callback,Watcher):
watchers = [watcher_or_callback]
else:
watchers = self._find_watchers(watcher_or_callback)
for watcher in watchers:
wm = watcher._pyinotify_WatchManager
wm.rm_watch(watcher._pyinotify_id,rec=watcher.recursive)
super(OSFSWatchMixin,self).del_watcher(watcher)
self.__watch_lock.acquire()
try:
wt = self.__get_watch_thread()
for watcher in watchers:
wt.del_watcher(watcher)
finally:
self.__watch_lock.release()
def __get_event_mask(self,events):
"""Convert the given set of events into a pyinotify event mask."""
if events is None:
events = (EVENT,)
mask = 0
for evt in events:
if issubclass(ACCESSED,evt):
mask |= pyinotify.IN_ACCESS
if issubclass(CREATED,evt):
mask |= pyinotify.IN_CREATE
if issubclass(REMOVED,evt):
mask |= pyinotify.IN_DELETE
mask |= pyinotify.IN_DELETE_SELF
if issubclass(MODIFIED,evt):
mask |= pyinotify.IN_ATTRIB
mask |= pyinotify.IN_MODIFY
mask |= pyinotify.IN_CLOSE_WRITE
if issubclass(MOVED_SRC,evt):
mask |= pyinotify.IN_MOVED_FROM
mask |= pyinotify.IN_MOVED_TO
if issubclass(MOVED_DST,evt):
mask |= pyinotify.IN_MOVED_FROM
mask |= pyinotify.IN_MOVED_TO
if issubclass(OVERFLOW,evt):
mask |= pyinotify.IN_Q_OVERFLOW
if issubclass(CLOSED,evt):
mask |= pyinotify.IN_UNMOUNT
return mask
def __route_event(self,watcher,inevt):
"""Convert pyinotify event into fs.watch event, then handle it."""
try:
path = self.unsyspath(inevt.pathname)
except ValueError:
return
try:
src_path = inevt.src_pathname
if src_path is not None:
src_path = self.unsyspath(src_path)
except (AttributeError,ValueError):
src_path = None
if inevt.mask & pyinotify.IN_ACCESS:
watcher.handle_event(ACCESSED(self,path))
if inevt.mask & pyinotify.IN_CREATE:
watcher.handle_event(CREATED(self,path))
# Recursive watching of directories in pyinotify requires
# the creation of a new watch for each subdir, resulting in
# a race condition whereby events in the subdir are missed.
# We'd prefer to duplicate events than to miss them.
if inevt.mask & pyinotify.IN_ISDIR:
try:
# pyinotify does this for dirs itself, we only.
# need to worry about newly-created files.
for child in self.listdir(path,files_only=True):
cpath = pathjoin(path,child)
self.notify_watchers(CREATED,cpath)
self.notify_watchers(MODIFIED,cpath,True)
except FSError:
pass
if inevt.mask & pyinotify.IN_DELETE:
watcher.handle_event(REMOVED(self,path))
if inevt.mask & pyinotify.IN_DELETE_SELF:
watcher.handle_event(REMOVED(self,path))
if inevt.mask & pyinotify.IN_ATTRIB:
watcher.handle_event(MODIFIED(self,path,False))
if inevt.mask & pyinotify.IN_MODIFY:
watcher.handle_event(MODIFIED(self,path,True))
if inevt.mask & pyinotify.IN_CLOSE_WRITE:
watcher.handle_event(MODIFIED(self,path,True))
if inevt.mask & pyinotify.IN_MOVED_FROM:
# Sorry folks, I'm not up for decoding the destination path.
watcher.handle_event(MOVED_SRC(self,path,None))
if inevt.mask & pyinotify.IN_MOVED_TO:
if getattr(inevt,"src_pathname",None):
watcher.handle_event(MOVED_SRC(self,src_path,path))
watcher.handle_event(MOVED_DST(self,path,src_path))
else:
watcher.handle_event(MOVED_DST(self,path,None))
if inevt.mask & pyinotify.IN_Q_OVERFLOW:
watcher.handle_event(OVERFLOW(self))
if inevt.mask & pyinotify.IN_UNMOUNT:
watcher.handle_event(CLOSE(self))
def __get_watch_thread(self):
"""Get the shared watch thread, initializing if necessary.
This method must only be called while holding self.__watch_lock, or
multiple notifiers could be created.
"""
if OSFSWatchMixin.__watch_thread is None:
OSFSWatchMixin.__watch_thread = SharedThreadedNotifier()
OSFSWatchMixin.__watch_thread.start()
return OSFSWatchMixin.__watch_thread
class SharedThreadedNotifier(threading.Thread):
"""pyinotifer Notifier that can manage multiple WatchManagers.
Each watcher added to an OSFS corresponds to a new pyinotify.WatchManager
instance. Rather than run a notifier thread for each manager, we run a
single thread that multiplexes between them all.
"""
def __init__(self):
super(SharedThreadedNotifier,self).__init__()
self.daemon = True
self.running = True
self._pipe_r, self._pipe_w = os.pipe()
self._poller = select.poll()
self._poller.register(self._pipe_r,select.POLLIN)
self.watchers = {}
def add_watcher(self,watcher):
fd = watcher._pyinotify_WatchManager.get_fd()
self.watchers[fd] = watcher
self._poller.register(fd,select.POLLIN)
# Bump the poll object so it recognises the new fd.
os.write(self._pipe_w,"H")
def del_watcher(self,watcher):
fd = watcher._pyinotify_WatchManager.get_fd()
try:
del self.watchers[fd]
except KeyError:
pass
else:
self._poller.unregister(fd)
def run(self):
# Grab some attributes of the select module, so they're available
# even when shutting down the interpreter.
_select_error = select.error
_select_POLLIN = select.POLLIN
# Loop until stopped, dispatching to individual notifiers.
while self.running:
try:
ready_fds = self._poller.poll()
except _select_error, e:
if e[0] != errno.EINTR:
raise
else:
for (fd,event) in ready_fds:
# Ignore all events other than "input ready".
if not event & _select_POLLIN:
continue
# For signals on our internal pipe, just read and discard.
if fd == self._pipe_r:
os.read(self._pipe_r,1)
# For notifier fds, dispath to the notifier methods.
else:
try:
notifier = self.watchers[fd]._pyinotify_Notifier
except KeyError:
pass
else:
notifier.read_events()
try:
notifier.process_events()
except EnvironmentError:
pass
def stop(self):
if self.running:
self.running = False
os.write(self._pipe_w,"S")
os.close(self._pipe_w)
| bsd-3-clause |
rarmknecht/peepdf | jsbeautifier/unpackers/packer.py | 76 | 3302 | #
# Unpacker for Dean Edward's p.a.c.k.e.r, a part of javascript beautifier
# by Einar Lielmanis <einar@jsbeautifier.org>
#
# written by Stefano Sanfilippo <a.little.coder@gmail.com>
#
# usage:
#
# if detect(some_string):
# unpacked = unpack(some_string)
#
"""Unpacker for Dean Edward's p.a.c.k.e.r"""
import re
import string
from jsbeautifier.unpackers import UnpackingError
PRIORITY = 1
def detect(source):
"""Detects whether `source` is P.A.C.K.E.R. coded."""
return source.replace(' ', '').startswith('eval(function(p,a,c,k,e,r')
def unpack(source):
"""Unpacks P.A.C.K.E.R. packed js code."""
payload, symtab, radix, count = _filterargs(source)
if count != len(symtab):
raise UnpackingError('Malformed p.a.c.k.e.r. symtab.')
try:
unbase = Unbaser(radix)
except TypeError:
raise UnpackingError('Unknown p.a.c.k.e.r. encoding.')
def lookup(match):
"""Look up symbols in the synthetic symtab."""
word = match.group(0)
return symtab[unbase(word)] or word
source = re.sub(r'\b\w+\b', lookup, payload)
return _replacestrings(source)
def _filterargs(source):
"""Juice from a source file the four args needed by decoder."""
argsregex = (r"}\('(.*)', *(\d+), *(\d+), *'(.*)'\."
r"split\('\|'\), *(\d+), *(.*)\)\)")
args = re.search(argsregex, source, re.DOTALL).groups()
try:
return args[0], args[3].split('|'), int(args[1]), int(args[2])
except ValueError:
raise UnpackingError('Corrupted p.a.c.k.e.r. data.')
def _replacestrings(source):
"""Strip string lookup table (list) and replace values in source."""
match = re.search(r'var *(_\w+)\=\["(.*?)"\];', source, re.DOTALL)
if match:
varname, strings = match.groups()
startpoint = len(match.group(0))
lookup = strings.split('","')
variable = '%s[%%d]' % varname
for index, value in enumerate(lookup):
source = source.replace(variable % index, '"%s"' % value)
return source[startpoint:]
return source
class Unbaser(object):
"""Functor for a given base. Will efficiently convert
strings to natural numbers."""
ALPHABET = {
62 : '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ',
95 : (' !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ'
'[\]^_`abcdefghijklmnopqrstuvwxyz{|}~')
}
def __init__(self, base):
self.base = base
# If base can be handled by int() builtin, let it do it for us
if 2 <= base <= 36:
self.unbase = lambda string: int(string, base)
else:
# Build conversion dictionary cache
try:
self.dictionary = dict((cipher, index) for
index, cipher in enumerate(self.ALPHABET[base]))
except KeyError:
raise TypeError('Unsupported base encoding.')
self.unbase = self._dictunbaser
def __call__(self, string):
return self.unbase(string)
def _dictunbaser(self, string):
"""Decodes a value to an integer."""
ret = 0
for index, cipher in enumerate(string[::-1]):
ret += (self.base ** index) * self.dictionary[cipher]
return ret
| gpl-3.0 |
Shouqun/node-gn | tools/depot_tools/fetch_configs/breakpad.py | 3 | 1074 | # Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import config_util # pylint: disable=import-error
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=no-init
class Breakpad(config_util.Config):
@staticmethod
def fetch_spec(props):
url = 'https://chromium.googlesource.com/breakpad/breakpad.git'
solution = {
'name': 'src',
'url': url,
'managed': False,
'custom_deps': {},
}
spec = {
'solutions': [solution],
}
if props.get('target_os'):
spec['target_os'] = props['target_os'].split(',')
if props.get('target_os_only'):
spec['target_os_only'] = props['target_os_only']
return {
'type': 'gclient_git',
'gclient_git_spec': spec,
}
@staticmethod
def expected_root(_props):
return 'src'
def main(argv=None):
return Breakpad().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
| mit |
numenta/nupic.research | nupic/research/frameworks/backprop_structure/ray_ax/__init__.py | 3 | 1162 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2020, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
from .optimize_accuracy import ax_optimize_accuracy, get_best_config
from .optimize_accuracy_weightsparsity import (
ax_optimize_accuracy_weightsparsity,
get_frontier_trials,
)
| agpl-3.0 |
domecraft/Games | RPG/classes.py | 1 | 3530 | class character:
def __init__(self, name, gender ,health, race, role, status, strength, defense, magic, bounty, income, reputation):
self.name = name
self.health = health
self.status = status
self.strength = strength
self.defense = defense
self.race = race
self.role = role
self.bounty = bounty
self.magic = magic
self.gender = gender
self.income = income
self.reputation = reputation
self.inventory = []
def modify_health(self, amount):
self.health += amount
def set_health(self, amount):
self.health = amount
def set_status(self, status):
self.status = status
def modify_str(self, amount):
self.strength += amount
def modify_def(self, amount):
self.defense += amount
def add_item(self, item):
self.inventory.append(item)
def remove_item(self, item):
if item in self.inventory:
self.inventory.remove(item)
else:
print item + " is not in your inventory!"
def set_race(self, race):
self.race = race
def modify_bounty(self, amount):
self.bounty += amount
def checkDead(self, health):
if self.health <= 0:
self.status = "dead"
return "dead"
else:
self.status = "alive"
return "alive"
def modify_income(self, amount):
self.income += amount
def modify_reputation(self, amount):
self.reputation += amount
#The following class is used for random npcs that I don't really develop in the storyline.
class basicCharacter:
def __init__(self, name, gender, income, status):
self.name = name
self.gender = gender
self.income = income
self.status = status
def set_status(self, status):
self.status = status
class store:
def __init__(self, name = "General Store" , owner = "Store Owner", alliance = "Rebellion"):
self.name = name
self.store_owner = owner
self.alliance = alliance
self.stock = {
'longsword': {'cost': 10, 'speed': 3, 'strength': 7, 'defense': 2},
'shortsword': {'cost': 8, 'speed': 5, 'strength': 4, 'defense': 2},
'bronze_armor': {'cost': 10, 'speed': -2, 'strength': 1, 'defense': 6},
'silver_armor': {'cost': 20, 'speed': -5, 'strength': 2, 'defense': 12},
'platinum_armor': {'cost': 35, 'speed': -8, 'strength': 4, 'defense': 20}
}
class town:
def __init__(self, name, ruler, alliance, income, population):
self.name = name
self.ruler = ruler
self.alliance = alliance
self.income = income
self.population = population
def set_ruler(self, ruler):
self.ruler = ruler
def set_name(self, name):
self.name = name
def set_alliance(self, alliance):
self.alliance = alliance
def modify_income(self, amount):
self.income += amount
def modify_pop(self, population):
self.population += population
class bar:
def __init__(self, name, owner, income):
self.name = name
self.owner = owner
self.income = income
def set_owner(self, owner):
self.owner = owner
def modify_income(amount):
self.income += amount
| gpl-2.0 |
brchiu/tensorflow | tensorflow/__init__.py | 29 | 1685 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# Bring in all of the public TensorFlow interface into this
# module.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=g-bad-import-order
from tensorflow.python import pywrap_tensorflow # pylint: disable=unused-import
from tensorflow.python.util.lazy_loader import LazyLoader
contrib = LazyLoader('contrib', globals(), 'tensorflow.contrib')
del LazyLoader
from tensorflow.python.platform import flags # pylint: disable=g-import-not-at-top
from tensorflow.python.platform import app # pylint: disable=g-import-not-at-top
app.flags = flags
del absolute_import
del division
del print_function
# These symbols appear because we import the python package which
# in turn imports from tensorflow.core and tensorflow.python. They
# must come from this module. So python adds these symbols for the
# resolution to succeed.
# pylint: disable=undefined-variable
del python
del core
# pylint: enable=undefined-variable
| apache-2.0 |
fbergmann/libSEDML | examples/python/create_sedml.py | 1 | 5521 | #!/usr/bin/env python
##
## @file create_sedml.py
## @brief cerates a SED-ML document.
## @author Frank T. Bergmann
##
## <!--------------------------------------------------------------------------
## This file is part of libSEDML. Please visit http://sed-ml.org for more
## information about SEDML, and the latest version of libSEDML.
##
## Copyright (c) 2013, Frank T. Bergmann
## All rights reserved.
##
## Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are met:
##
## 1. Redistributions of source code must retain the above copyright notice, this
## list of conditions and the following disclaimer.
## 2. Redistributions in binary form must reproduce the above copyright notice,
## this list of conditions and the following disclaimer in the documentation
## and/or other materials provided with the distribution.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
## ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
## WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
## DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
## ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
## (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
## LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
## ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
## SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
## ------------------------------------------------------------------------ -.
##
import sys
import os.path
import libsedml
def main (args):
"""Usage: create_sedml output-filename
"""
if (len(args) != 2):
print(main.__doc__)
sys.exit(1);
# create the document
doc = libsedml.SedDocument();
doc.setLevel(1);
doc.setVersion(1);
# create a first model referencing an sbml file
model = doc.createModel();
model.setId("model1");
model.setSource("file.xml");
model.setLanguage("urn:sedml:language:sbml");
# create a second model modifying a variable of that other sbml file
model = doc.createModel();
model.setId("model2");
model.setSource("model1");
model.setLanguage("urn:sedml:sbml");
# change a paramerter 'k' to 0.1
change = model.createChangeAttribute();
change.setTarget("/sbml:sbml/sbml:model/sbml:listOfParameters/sbml:parameter[@id='k']/@value");
change.setNewValue("0.1");
# remove species 's1'
remove = model.createRemoveXML();
remove.setTarget("/sbml:sbml/sbml:model/sbml:listOfSpecies/sbml:species[@id='S1']");
# now for something tricky we want to update the initialConcentration of 'S2' to be
# half what it was in the original model
compute = model.createComputeChange();
compute.setTarget("/sbml:sbml/sbml:model/sbml:listOfSpecies/sbml:species[@id="S2"]/@initialConcentration");
variable = compute.createVariable();
variable.setId("S2");
variable.setModelReference("model1");
variable.setTarget("/sbml:sbml/sbml:model/sbml:listOfSpecies/sbml:species[@id='S2']");
compute.setMath(libsedml.parseFormula("S2 / 2"));
# create simulation
tc = doc.createUniformTimeCourse();
tc.setId("sim1");
tc.setInitialTime(0.0);
tc.setOutputStartTime(0.0);
tc.setOutputEndTime(10.0);
tc.setNumberOfPoints(1000);
# need to set the correct KISAO Term
alg = tc.createAlgorithm();
alg.setKisaoID("KISAO:0000019");
# create a task that uses the simulation and the model above
task = doc.createTask();
task.setId("task1");
task.setModelReference("model1");
task.setSimulationReference("sim1");
# add a DataGenerator to hold the output for time
dg = doc.createDataGenerator();
dg.setId("time");
dg.setName("time");
var = dg.createVariable();
var.setId("v0");
var.setName("time");
var.setTaskReference("task1");
var.setSymbol("urn:sedml:symbol:time");
dg.setMath(libsedml.parseFormula("v0"));
# and one for S1
dg = doc.createDataGenerator();
dg.setId("S1");
dg.setName("S1");
var = dg.createVariable();
var.setId("v1");
var.setName("S1");
var.setTaskReference("task1");
var.setTarget("/sbml:sbml/sbml:model/sbml:listOfSpecies/sbml:species[@id='S1']");
dg.setMath(libsedml.parseFormula("v1"));
# add a report
report = doc.createReport();
report.setId("r1");
report.setName("report 1");
set = report.createDataSet();
set.setId("ds1");
set.setLabel("time");
set.setDataReference("time");
set = report.createDataSet();
set.setId("ds2");
set.setLabel("S1");
set.setDataReference("S1");
# add a 2d plot
plot = doc.createPlot2D();
plot.setId("p1");
plot.setName("S1 Timecourse");
curve = plot.createCurve();
curve.setId("c1");
curve.setName("S1");
curve.setLogX(False);
curve.setLogY(False);
curve.setXDataReference("time");
curve.setYDataReference("S1");
# add a 3D Plot
plot2 = doc.createPlot3D();
plot2.setId("p2");
plot2.setName("dunno");
surf = plot2.createSurface();
surf.setId("surf1");
surf.setName("S1");
surf.setLogX(False);
surf.setLogY(False);
surf.setLogZ(False);
surf.setXDataReference("time");
surf.setYDataReference("S1");
surf.setZDataReference("S1");
# write the document
libsedml.writeSedML(doc, args[1]);
if __name__ == '__main__':
main(sys.argv)
| bsd-2-clause |
basicthinker/ThyNVM | src/arch/x86/isa/insts/simd64/floating_point/arithmetic/__init__.py | 91 | 2470 | # Copyright (c) 2007 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
categories = ["addition",
"subtraction",
"multiplication",
"accumulation",
"reciprocal_estimation",
"reciprocal_square_root"]
microcode = '''
# 64 bit multimedia instructions
'''
for category in categories:
exec "import %s as cat" % category
microcode += cat.microcode
| bsd-3-clause |
jawilson/home-assistant | tests/components/sensor/test_mqtt.py | 5 | 2151 | """The tests for the MQTT sensor platform."""
import unittest
from homeassistant.bootstrap import _setup_component
import homeassistant.components.sensor as sensor
from tests.common import mock_mqtt_component, fire_mqtt_message
from tests.common import get_test_home_assistant
class TestSensorMQTT(unittest.TestCase):
"""Test the MQTT sensor."""
def setUp(self): # pylint: disable=invalid-name
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
mock_mqtt_component(self.hass)
def tearDown(self): # pylint: disable=invalid-name
"""Stop down everything that was started."""
self.hass.stop()
def test_setting_sensor_value_via_mqtt_message(self):
"""Test the setting of the value via MQTT."""
self.hass.config.components = ['mqtt']
assert _setup_component(self.hass, sensor.DOMAIN, {
sensor.DOMAIN: {
'platform': 'mqtt',
'name': 'test',
'state_topic': 'test-topic',
'unit_of_measurement': 'fav unit'
}
})
fire_mqtt_message(self.hass, 'test-topic', '100')
self.hass.block_till_done()
state = self.hass.states.get('sensor.test')
self.assertEqual('100', state.state)
self.assertEqual('fav unit',
state.attributes.get('unit_of_measurement'))
def test_setting_sensor_value_via_mqtt_json_message(self):
"""Test the setting of the value via MQTT with JSON playload."""
self.hass.config.components = ['mqtt']
assert _setup_component(self.hass, sensor.DOMAIN, {
sensor.DOMAIN: {
'platform': 'mqtt',
'name': 'test',
'state_topic': 'test-topic',
'unit_of_measurement': 'fav unit',
'value_template': '{{ value_json.val }}'
}
})
fire_mqtt_message(self.hass, 'test-topic', '{ "val": "100" }')
self.hass.block_till_done()
state = self.hass.states.get('sensor.test')
self.assertEqual('100', state.state)
| mit |
molmod/yaff | yaff/pes/colvar.py | 1 | 13249 | # -*- coding: utf-8 -*-
# YAFF is yet another force-field code.
# Copyright (C) 2011 Toon Verstraelen <Toon.Verstraelen@UGent.be>,
# Louis Vanduyfhuys <Louis.Vanduyfhuys@UGent.be>, Center for Molecular Modeling
# (CMM), Ghent University, Ghent, Belgium; all rights reserved unless otherwise
# stated.
#
# This file is part of YAFF.
#
# YAFF is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# YAFF is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>
#
# --
'''Collective variables
This module implements the computation of collective variables and their
derivatives, typically used in advanced sampling methods such as umbrella
sampling or metadynamics. The ``CollectiveVariable`` class is the main item
in this module, which is normally used in conjuction with an instance of the
``Bias`` class. Note that many collective variables such as bond lengths,
bending angles, improper angles, ... are already implemented by the
:mod:`yaff.pes.iclist` module, so no separate implementation needs to be
provided here.
'''
from __future__ import division
import numpy as np
from yaff.log import log
from yaff.pes.dlist import DeltaList
from yaff.pes.iclist import InternalCoordinateList
from yaff.sampling.utils import cell_lower
__all__ = [
'CollectiveVariable', 'CVVolume', 'CVCOMProjection','CVInternalCoordinate',
'CVLinCombIC',
]
class CollectiveVariable(object):
'''Base class for collective variables.'''
def __init__(self, name, system):
"""
**Arguments:**
name
A name for the collective variable.
system
The system for the collective variable.
"""
self.name = name
self.system = system
self.value = np.nan
self.gpos = np.zeros((system.natom, 3), float)
self.vtens = np.zeros((3, 3), float)
def get_conversion(self):
'''Auxiliary routine that allows base classes the specify the unit
conversion associated with the internal coordinate.
'''
raise NotImplementedError
def get_log(self):
'''Describe the internal coordinate in a format that is suitable for
screen logging.
'''
return '%s' % (self.__class__.__name__)
def compute(self, gpos=None, vtens=None):
"""Compute the collective variable and optionally some derivatives
The only variable inputs for the compute routine are the atomic
positions and the cell vectors.
**Optional arguments:**
gpos
The derivatives of the collective variable towards the Cartesian
coordinates of the atoms. ('g' stands for gradient and 'pos'
for positions.)
This must be a writeable numpy array with shape (N, 3) where N
is the number of atoms.
vtens
The force contribution to the pressure tensor. This is also
known as the virial tensor. It represents the derivative of the
energy towards uniform deformations, including changes in the
shape of the unit cell. (v stands for virial and 'tens' stands
for tensor.) This must be a writeable numpy array with shape (3,
3).
The collective variable value is returned. The optional arguments
are Fortran-style output arguments. When they are present, the
corresponding results are computed and **stored** to the current
contents of the array.
"""
#Subclasses implement their compute code here.
raise NotImplementedError
def get_last_computed_value(self):
"""Return the last value that was computed. It is not assured that this
value reflects the value for the current state of the system. This
is merely a convenience method to obtain the value without
performing an actual computation.
"""
return self.value
class CVInternalCoordinate(CollectiveVariable):
'''
An InternalCoordinate disguised as a CollectiveVariable so that it can
be used together with a BiasPotential.
This is less efficient than using the InternalCoordinate with a
ValenceTerm, so the latter is preferred if it is possible.
'''
def __init__(self, system, ic, comlist=None):
self.system = system
self.ic = ic
self.comlist = comlist
self.dlist = DeltaList(system if comlist is None else comlist)
self.iclist = InternalCoordinateList(self.dlist)
self.iclist.add_ic(ic)
def get_conversion(self):
return self.ic.get_conversion()
def compute(self, gpos=None, vtens=None):
if self.comlist is not None:
self.comlist.forward()
self.dlist.forward()
self.iclist.forward()
self.value = self.iclist.ictab[0]['value']
if gpos is not None: gpos[:] = 0.0
if vtens is not None: vtens[:] = 0.0
if not ((gpos is None) and (vtens is None)):
self.iclist.ictab[0]['grad'] = 1.0
self.iclist.back()
if self.comlist is None:
self.dlist.back(gpos, vtens)
else:
self.comlist.gpos[:] = 0.0
self.dlist.back(self.comlist.gpos, vtens)
self.comlist.back(gpos)
return self.value
class CVVolume(CollectiveVariable):
'''The volume of the simulation cell.'''
def __init__(self, system):
'''
**Arguments:**
system
An instance of the ``System`` class.
'''
if system.cell.nvec == 0:
raise TypeError('Can not compute volume of a non-periodic system.')
CollectiveVariable.__init__(self, 'CVVolume', system)
def get_conversion(self):
return np.power(log.length.conversion, self.system.cell.nvec)
def compute(self, gpos=None, vtens=None):
self.value = self.system.cell.volume
if gpos is not None:
# No dependence on atomic positions
gpos[:] = 0.0
if vtens is not None:
vtens[:] = np.identity(3)*self.value
return self.value
class CVCOMProjection(CollectiveVariable):
'''Compute the vector connecting two centers of masses and return the
projection along a selected vector. cv=(r_{COM}^{B}-r_{COM}^{A})[index]
and r_{COM} is a vector with centers of mass of groups A and B:
* first component: projected onto ``a`` vector of cell
* second component: projected onto vector perpendicular to ``a``
and in the plane spanned by ``a`` and ``b``
* third component: projected onto vector perpendicular to ``a`` and
``b``
Note that periodic boundary conditions are NOT taken into account
* the centers of mass are computed using absolute positions; this is
most likely the desired behavior
* the center of mass difference can in principle be periodic, but
the periodicity is not the same as the periodicity of the system,
because of the projection on a selected vector
'''
def __init__(self, system, groups, index):
'''
**Arguments:**
system
An instance of the ``System`` class
groups
List of 2 arrays, each array containing atomic indexes
used to compute one of the centers of mass
index
Selected projection vector:
* if index==0, projection onto ``a`` vector of cell
* if index==1, projection onto vector perpendicular to ``a``
and in the plane spanned by ``a`` and ``b``
* if index==2, projection onto vector perpendicular to ``a``
and ``b``
'''
CollectiveVariable.__init__(self, 'CVCOMProjection', system)
self.index = index
# Safety checks
assert len(groups)==2, "Exactly 2 groups need to be defined"
assert system.cell.nvec==3, "Only 3D periodic systems are supported"
assert self.index in [0,1,2], "Index should be one of 0,1,2"
# Masses need to be defined in order to compute centers of mass
if self.system.masses is None:
self.system.set_standard_masses()
# Define weights w_i such that difference of centers of mass can be
# computed as sum_i w_i r_i
self.weights = np.zeros((system.natom))
self.weights[groups[0]] = -self.system.masses[groups[0]]/np.sum(self.system.masses[groups[0]])
self.weights[groups[1]] = self.system.masses[groups[1]]/np.sum(self.system.masses[groups[1]])
def get_conversion(self):
return log.length.conversion
def compute(self, gpos=None, vtens=None):
'''
Consider a rotation of the entire system such that the ``a`` vector
is aligned with the X-axis, the ``b`` vector is in the XY-plane, and
the ``c`` vector chosen such that a right-handed basis is formed.
The rotated cell is lower-diagonal in the Yaff notation.
In this rotated system, it is fairly simple to compute the required
projections and derivatives, because the projections are simply the
Cartesian components. Values obtained in the rotated system are then
transformed back to the original system.
'''
# Compute rotation that makes cell lower diagonal
_, R = cell_lower(self.system.cell.rvecs)
# The projected vector of centers of mass difference (aka the
# collective variable) in the rotated system
cv_orig = np.sum(self.weights.reshape((-1,1))*self.system.pos, axis=0)
# Transform back to the original system
cv = np.dot(R, cv_orig)
self.value = cv[self.index]
if gpos is not None:
gpos[:] = 0.0
gpos[:,self.index] = self.weights
# Forces (vector) need to be rotated back to original system
gpos[:] = np.einsum('ij,kj', gpos, R.T)
if vtens is not None:
vtens[:] = 0.0
vtens[self.index,self.index:] = cv[self.index:]
vtens[self.index:,self.index] = cv[self.index:]
# Virial (tensor) needs to be rotated back to original system
vtens[:] = np.dot(R.T,np.dot(vtens[:],R))
return self.value
class CVLinCombIC(CollectiveVariable):
'''
A linear combination of InternalCoordinates:
cv = w0*ic0 + w1*ic1 + ...
'''
def __init__(self, system, ics, weights, comlist=None):
'''
**Arguments:**
system
An instance of the ``System`` class.
ics
A list of InternalCoordinate instances.
weights
A list defining the weight of each InternalCoordinate that is
used when computing the linear combination.
**Optional arguments:**
comlist
An instance COMList; if provided, this is used instead of the
normal DeltaList to compute the InternalCoordinates
'''
assert len(weights)==len(ics)
self.system = system
self.ics = ics
self.comlist = comlist
self.dlist = DeltaList(system if comlist is None else comlist)
self.iclist = InternalCoordinateList(self.dlist)
for ic in self.ics:
self.iclist.add_ic(ic)
self.weights = weights
def get_conversion(self):
# Units depend on the particular linear combination of internal
# coordinates
return 1.0
def compute(self, gpos=None, vtens=None):
if self.comlist is not None:
self.comlist.forward()
self.dlist.forward()
self.iclist.forward()
self.value = 0.0
for iic in range(len(self.ics)):
self.value += self.weights[iic]*self.iclist.ictab[iic]['value']
if gpos is not None: gpos[:] = 0.0
if vtens is not None: vtens[:] = 0.0
if not ((gpos is None) and (vtens is None)):
for iic in range(len(self.ics)):
# Derivative of the linear combination to this particular
# internal coordinate
self.iclist.ictab[iic]['grad'] = self.weights[iic]
self.iclist.back()
if self.comlist is None:
self.dlist.back(gpos, vtens)
else:
self.comlist.gpos[:] = 0.0
self.dlist.back(self.comlist.gpos, vtens)
self.comlist.back(gpos)
return self.value
| gpl-3.0 |
Edu-Glez/Bank_sentiment_analysis | env/lib/python3.6/site-packages/ipykernel/kernelapp.py | 5 | 19344 | """An Application for launching a kernel"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from __future__ import print_function
import atexit
import os
import sys
import signal
import traceback
import logging
from tornado import ioloop
import zmq
from zmq.eventloop import ioloop as zmq_ioloop
from zmq.eventloop.zmqstream import ZMQStream
from IPython.core.application import (
BaseIPythonApplication, base_flags, base_aliases, catch_config_error
)
from IPython.core.profiledir import ProfileDir
from IPython.core.shellapp import (
InteractiveShellApp, shell_flags, shell_aliases
)
from IPython.utils import io
from ipython_genutils.path import filefind, ensure_dir_exists
from traitlets import (
Any, Instance, Dict, Unicode, Integer, Bool, DottedObjectName, Type, default
)
from ipython_genutils.importstring import import_item
from jupyter_core.paths import jupyter_runtime_dir
from jupyter_client import write_connection_file
from jupyter_client.connect import ConnectionFileMixin
# local imports
from .iostream import IOPubThread
from .heartbeat import Heartbeat
from .ipkernel import IPythonKernel
from .parentpoller import ParentPollerUnix, ParentPollerWindows
from jupyter_client.session import (
Session, session_flags, session_aliases,
)
from .zmqshell import ZMQInteractiveShell
#-----------------------------------------------------------------------------
# Flags and Aliases
#-----------------------------------------------------------------------------
kernel_aliases = dict(base_aliases)
kernel_aliases.update({
'ip' : 'IPKernelApp.ip',
'hb' : 'IPKernelApp.hb_port',
'shell' : 'IPKernelApp.shell_port',
'iopub' : 'IPKernelApp.iopub_port',
'stdin' : 'IPKernelApp.stdin_port',
'control' : 'IPKernelApp.control_port',
'f' : 'IPKernelApp.connection_file',
'transport': 'IPKernelApp.transport',
})
kernel_flags = dict(base_flags)
kernel_flags.update({
'no-stdout' : (
{'IPKernelApp' : {'no_stdout' : True}},
"redirect stdout to the null device"),
'no-stderr' : (
{'IPKernelApp' : {'no_stderr' : True}},
"redirect stderr to the null device"),
'pylab' : (
{'IPKernelApp' : {'pylab' : 'auto'}},
"""Pre-load matplotlib and numpy for interactive use with
the default matplotlib backend."""),
})
# inherit flags&aliases for any IPython shell apps
kernel_aliases.update(shell_aliases)
kernel_flags.update(shell_flags)
# inherit flags&aliases for Sessions
kernel_aliases.update(session_aliases)
kernel_flags.update(session_flags)
_ctrl_c_message = """\
NOTE: When using the `ipython kernel` entry point, Ctrl-C will not work.
To exit, you will have to explicitly quit this process, by either sending
"quit" from a client, or using Ctrl-\\ in UNIX-like environments.
To read more about this, see https://github.com/ipython/ipython/issues/2049
"""
#-----------------------------------------------------------------------------
# Application class for starting an IPython Kernel
#-----------------------------------------------------------------------------
class IPKernelApp(BaseIPythonApplication, InteractiveShellApp,
ConnectionFileMixin):
name='ipython-kernel'
aliases = Dict(kernel_aliases)
flags = Dict(kernel_flags)
classes = [IPythonKernel, ZMQInteractiveShell, ProfileDir, Session]
# the kernel class, as an importstring
kernel_class = Type('ipykernel.ipkernel.IPythonKernel',
klass='ipykernel.kernelbase.Kernel',
help="""The Kernel subclass to be used.
This should allow easy re-use of the IPKernelApp entry point
to configure and launch kernels other than IPython's own.
""").tag(config=True)
kernel = Any()
poller = Any() # don't restrict this even though current pollers are all Threads
heartbeat = Instance(Heartbeat, allow_none=True)
ports = Dict()
subcommands = {
'install': (
'ipykernel.kernelspec.InstallIPythonKernelSpecApp',
'Install the IPython kernel'
),
}
# connection info:
connection_dir = Unicode()
@default('connection_dir')
def _default_connection_dir(self):
return jupyter_runtime_dir()
@property
def abs_connection_file(self):
if os.path.basename(self.connection_file) == self.connection_file:
return os.path.join(self.connection_dir, self.connection_file)
else:
return self.connection_file
# streams, etc.
no_stdout = Bool(False, help="redirect stdout to the null device").tag(config=True)
no_stderr = Bool(False, help="redirect stderr to the null device").tag(config=True)
outstream_class = DottedObjectName('ipykernel.iostream.OutStream',
help="The importstring for the OutStream factory").tag(config=True)
displayhook_class = DottedObjectName('ipykernel.displayhook.ZMQDisplayHook',
help="The importstring for the DisplayHook factory").tag(config=True)
# polling
parent_handle = Integer(int(os.environ.get('JPY_PARENT_PID') or 0),
help="""kill this process if its parent dies. On Windows, the argument
specifies the HANDLE of the parent process, otherwise it is simply boolean.
""").tag(config=True)
interrupt = Integer(int(os.environ.get('JPY_INTERRUPT_EVENT') or 0),
help="""ONLY USED ON WINDOWS
Interrupt this process when the parent is signaled.
""").tag(config=True)
def init_crash_handler(self):
sys.excepthook = self.excepthook
def excepthook(self, etype, evalue, tb):
# write uncaught traceback to 'real' stderr, not zmq-forwarder
traceback.print_exception(etype, evalue, tb, file=sys.__stderr__)
def init_poller(self):
if sys.platform == 'win32':
if self.interrupt or self.parent_handle:
self.poller = ParentPollerWindows(self.interrupt, self.parent_handle)
elif self.parent_handle:
self.poller = ParentPollerUnix()
def _bind_socket(self, s, port):
iface = '%s://%s' % (self.transport, self.ip)
if self.transport == 'tcp':
if port <= 0:
port = s.bind_to_random_port(iface)
else:
s.bind("tcp://%s:%i" % (self.ip, port))
elif self.transport == 'ipc':
if port <= 0:
port = 1
path = "%s-%i" % (self.ip, port)
while os.path.exists(path):
port = port + 1
path = "%s-%i" % (self.ip, port)
else:
path = "%s-%i" % (self.ip, port)
s.bind("ipc://%s" % path)
return port
def write_connection_file(self):
"""write connection info to JSON file"""
cf = self.abs_connection_file
self.log.debug("Writing connection file: %s", cf)
write_connection_file(cf, ip=self.ip, key=self.session.key, transport=self.transport,
shell_port=self.shell_port, stdin_port=self.stdin_port, hb_port=self.hb_port,
iopub_port=self.iopub_port, control_port=self.control_port)
def cleanup_connection_file(self):
cf = self.abs_connection_file
self.log.debug("Cleaning up connection file: %s", cf)
try:
os.remove(cf)
except (IOError, OSError):
pass
self.cleanup_ipc_files()
def init_connection_file(self):
if not self.connection_file:
self.connection_file = "kernel-%s.json"%os.getpid()
try:
self.connection_file = filefind(self.connection_file, ['.', self.connection_dir])
except IOError:
self.log.debug("Connection file not found: %s", self.connection_file)
# This means I own it, and I'll create it in this directory:
ensure_dir_exists(os.path.dirname(self.abs_connection_file), 0o700)
# Also, I will clean it up:
atexit.register(self.cleanup_connection_file)
return
try:
self.load_connection_file()
except Exception:
self.log.error("Failed to load connection file: %r", self.connection_file, exc_info=True)
self.exit(1)
def init_sockets(self):
# Create a context, a session, and the kernel sockets.
self.log.info("Starting the kernel at pid: %i", os.getpid())
context = zmq.Context.instance()
# Uncomment this to try closing the context.
# atexit.register(context.term)
self.shell_socket = context.socket(zmq.ROUTER)
self.shell_socket.linger = 1000
self.shell_port = self._bind_socket(self.shell_socket, self.shell_port)
self.log.debug("shell ROUTER Channel on port: %i" % self.shell_port)
self.stdin_socket = context.socket(zmq.ROUTER)
self.stdin_socket.linger = 1000
self.stdin_port = self._bind_socket(self.stdin_socket, self.stdin_port)
self.log.debug("stdin ROUTER Channel on port: %i" % self.stdin_port)
self.control_socket = context.socket(zmq.ROUTER)
self.control_socket.linger = 1000
self.control_port = self._bind_socket(self.control_socket, self.control_port)
self.log.debug("control ROUTER Channel on port: %i" % self.control_port)
self.init_iopub(context)
def init_iopub(self, context):
self.iopub_socket = context.socket(zmq.PUB)
self.iopub_socket.linger = 1000
self.iopub_port = self._bind_socket(self.iopub_socket, self.iopub_port)
self.log.debug("iopub PUB Channel on port: %i" % self.iopub_port)
self.configure_tornado_logger()
self.iopub_thread = IOPubThread(self.iopub_socket, pipe=True)
self.iopub_thread.start()
# backward-compat: wrap iopub socket API in background thread
self.iopub_socket = self.iopub_thread.background_socket
def init_heartbeat(self):
"""start the heart beating"""
# heartbeat doesn't share context, because it mustn't be blocked
# by the GIL, which is accessed by libzmq when freeing zero-copy messages
hb_ctx = zmq.Context()
self.heartbeat = Heartbeat(hb_ctx, (self.transport, self.ip, self.hb_port))
self.hb_port = self.heartbeat.port
self.log.debug("Heartbeat REP Channel on port: %i" % self.hb_port)
self.heartbeat.start()
def log_connection_info(self):
"""display connection info, and store ports"""
basename = os.path.basename(self.connection_file)
if basename == self.connection_file or \
os.path.dirname(self.connection_file) == self.connection_dir:
# use shortname
tail = basename
else:
tail = self.connection_file
lines = [
"To connect another client to this kernel, use:",
" --existing %s" % tail,
]
# log connection info
# info-level, so often not shown.
# frontends should use the %connect_info magic
# to see the connection info
for line in lines:
self.log.info(line)
# also raw print to the terminal if no parent_handle (`ipython kernel`)
# unless log-level is CRITICAL (--quiet)
if not self.parent_handle and self.log_level < logging.CRITICAL:
io.rprint(_ctrl_c_message)
for line in lines:
io.rprint(line)
self.ports = dict(shell=self.shell_port, iopub=self.iopub_port,
stdin=self.stdin_port, hb=self.hb_port,
control=self.control_port)
def init_blackhole(self):
"""redirects stdout/stderr to devnull if necessary"""
if self.no_stdout or self.no_stderr:
blackhole = open(os.devnull, 'w')
if self.no_stdout:
sys.stdout = sys.__stdout__ = blackhole
if self.no_stderr:
sys.stderr = sys.__stderr__ = blackhole
def init_io(self):
"""Redirect input streams and set a display hook."""
if self.outstream_class:
outstream_factory = import_item(str(self.outstream_class))
sys.stdout = outstream_factory(self.session, self.iopub_thread, u'stdout')
sys.stderr = outstream_factory(self.session, self.iopub_thread, u'stderr')
if self.displayhook_class:
displayhook_factory = import_item(str(self.displayhook_class))
self.displayhook = displayhook_factory(self.session, self.iopub_socket)
sys.displayhook = self.displayhook
self.patch_io()
def patch_io(self):
"""Patch important libraries that can't handle sys.stdout forwarding"""
try:
import faulthandler
except ImportError:
pass
else:
# Warning: this is a monkeypatch of `faulthandler.enable`, watch for possible
# updates to the upstream API and update accordingly (up-to-date as of Python 3.5):
# https://docs.python.org/3/library/faulthandler.html#faulthandler.enable
# change default file to __stderr__ from forwarded stderr
faulthandler_enable = faulthandler.enable
def enable(file=sys.__stderr__, all_threads=True, **kwargs):
return faulthandler_enable(file=file, all_threads=all_threads, **kwargs)
faulthandler.enable = enable
if hasattr(faulthandler, 'register'):
faulthandler_register = faulthandler.register
def register(signum, file=sys.__stderr__, all_threads=True, chain=False, **kwargs):
return faulthandler_register(signum, file=file, all_threads=all_threads,
chain=chain, **kwargs)
faulthandler.register = register
def init_signal(self):
signal.signal(signal.SIGINT, signal.SIG_IGN)
def init_kernel(self):
"""Create the Kernel object itself"""
shell_stream = ZMQStream(self.shell_socket)
control_stream = ZMQStream(self.control_socket)
kernel_factory = self.kernel_class.instance
kernel = kernel_factory(parent=self, session=self.session,
shell_streams=[shell_stream, control_stream],
iopub_thread=self.iopub_thread,
iopub_socket=self.iopub_socket,
stdin_socket=self.stdin_socket,
log=self.log,
profile_dir=self.profile_dir,
user_ns=self.user_ns,
)
kernel.record_ports({
name + '_port': port for name, port in self.ports.items()
})
self.kernel = kernel
# Allow the displayhook to get the execution count
self.displayhook.get_execution_count = lambda: kernel.execution_count
def init_gui_pylab(self):
"""Enable GUI event loop integration, taking pylab into account."""
# Register inline backend as default
# this is higher priority than matplotlibrc,
# but lower priority than anything else (mpl.use() for instance).
# This only affects matplotlib >= 1.5
if not os.environ.get('MPLBACKEND'):
os.environ['MPLBACKEND'] = 'module://ipykernel.pylab.backend_inline'
# Provide a wrapper for :meth:`InteractiveShellApp.init_gui_pylab`
# to ensure that any exception is printed straight to stderr.
# Normally _showtraceback associates the reply with an execution,
# which means frontends will never draw it, as this exception
# is not associated with any execute request.
shell = self.shell
_showtraceback = shell._showtraceback
try:
# replace error-sending traceback with stderr
def print_tb(etype, evalue, stb):
print ("GUI event loop or pylab initialization failed",
file=sys.stderr)
print (shell.InteractiveTB.stb2text(stb), file=sys.stderr)
shell._showtraceback = print_tb
InteractiveShellApp.init_gui_pylab(self)
finally:
shell._showtraceback = _showtraceback
def init_shell(self):
self.shell = getattr(self.kernel, 'shell', None)
if self.shell:
self.shell.configurables.append(self)
def init_extensions(self):
super(IPKernelApp, self).init_extensions()
# BEGIN HARDCODED WIDGETS HACK
# Ensure ipywidgets extension is loaded if available
extension_man = self.shell.extension_manager
if 'ipywidgets' not in extension_man.loaded:
try:
extension_man.load_extension('ipywidgets')
except ImportError as e:
self.log.debug('ipywidgets package not installed. Widgets will not be available.')
# END HARDCODED WIDGETS HACK
def configure_tornado_logger(self):
""" Configure the tornado logging.Logger.
Must set up the tornado logger or else tornado will call
basicConfig for the root logger which makes the root logger
go to the real sys.stderr instead of the capture streams.
This function mimics the setup of logging.basicConfig.
"""
logger = logging.getLogger('tornado')
handler = logging.StreamHandler()
formatter = logging.Formatter(logging.BASIC_FORMAT)
handler.setFormatter(formatter)
logger.addHandler(handler)
@catch_config_error
def initialize(self, argv=None):
super(IPKernelApp, self).initialize(argv)
if self.subapp is not None:
return
# register zmq IOLoop with tornado
zmq_ioloop.install()
self.init_blackhole()
self.init_connection_file()
self.init_poller()
self.init_sockets()
self.init_heartbeat()
# writing/displaying connection info must be *after* init_sockets/heartbeat
self.write_connection_file()
# Log connection info after writing connection file, so that the connection
# file is definitely available at the time someone reads the log.
self.log_connection_info()
self.init_io()
self.init_signal()
self.init_kernel()
# shell init steps
self.init_path()
self.init_shell()
if self.shell:
self.init_gui_pylab()
self.init_extensions()
self.init_code()
# flush stdout/stderr, so that anything written to these streams during
# initialization do not get associated with the first execution request
sys.stdout.flush()
sys.stderr.flush()
def start(self):
if self.subapp is not None:
return self.subapp.start()
if self.poller is not None:
self.poller.start()
self.kernel.start()
try:
ioloop.IOLoop.instance().start()
except KeyboardInterrupt:
pass
launch_new_instance = IPKernelApp.launch_instance
def main():
"""Run an IPKernel as an application"""
app = IPKernelApp.instance()
app.initialize()
app.start()
if __name__ == '__main__':
main()
| apache-2.0 |
yongshengwang/builthue | desktop/libs/libsentry/src/libsentry/test_client.py | 34 | 3196 | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import shutil
import tempfile
from nose.tools import assert_true, assert_equal, assert_false, assert_not_equal, assert_raises
from libsentry import sentry_site
from libsentry.conf import SENTRY_CONF_DIR
from libsentry.sentry_site import get_sentry_server_principal,\
get_sentry_server_admin_groups
from libsentry.client import SentryClient
def test_security_plain():
tmpdir = tempfile.mkdtemp()
finish = SENTRY_CONF_DIR.set_for_testing(tmpdir)
try:
xml = sentry_site_xml(provider='default')
file(os.path.join(tmpdir, 'sentry-site.xml'), 'w').write(xml)
sentry_site.reset()
assert_equal('test/test.com@TEST.COM', get_sentry_server_principal())
assert_equal(['hive', 'impala', 'hue'], get_sentry_server_admin_groups())
security = SentryClient('test.com', 11111, 'test')._get_security()
assert_equal('test', security['kerberos_principal_short_name'])
assert_equal(False, security['use_sasl'])
assert_equal('NOSASL', security['mechanism'])
finally:
sentry_site.reset()
finish()
shutil.rmtree(tmpdir)
def test_security_kerberos():
tmpdir = tempfile.mkdtemp()
finish = SENTRY_CONF_DIR.set_for_testing(tmpdir)
try:
xml = sentry_site_xml(provider='default', authentication='kerberos')
file(os.path.join(tmpdir, 'sentry-site.xml'), 'w').write(xml)
sentry_site.reset()
security = SentryClient('test.com', 11111, 'test')._get_security()
assert_equal(True, security['use_sasl'])
assert_equal('GSSAPI', security['mechanism'])
finally:
sentry_site.reset()
finish()
shutil.rmtree(tmpdir)
def sentry_site_xml(
provider='default',
kerberos_principal='test/test.com@TEST.COM',
authentication='NOSASL'):
return """
<configuration>
<property>
<name>hive.sentry.provider</name>
<value>%(provider)s</value>
</property>
<property>
<name>sentry.service.server.principal</name>
<value>%(kerberos_principal)s</value>
</property>
<property>
<name>sentry.service.security.mode</name>
<value>%(authentication)s</value>
</property>
<property>
<name>sentry.service.admin.group</name>
<value>hive,impala,hue</value>
</property>
</configuration>
""" % {
'provider': provider,
'kerberos_principal': kerberos_principal,
'authentication': authentication,
}
| apache-2.0 |
hgl888/chromium-crosswalk | third_party/mojo/src/mojo/public/tools/bindings/pylib/mojom/parse/parser.py | 32 | 13000 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Generates a syntax tree from a Mojo IDL file."""
import imp
import os.path
import sys
def _GetDirAbove(dirname):
"""Returns the directory "above" this file containing |dirname| (which must
also be "above" this file)."""
path = os.path.abspath(__file__)
while True:
path, tail = os.path.split(path)
assert tail
if tail == dirname:
return path
try:
imp.find_module("ply")
except ImportError:
sys.path.append(os.path.join(_GetDirAbove("public"), "public/third_party"))
from ply import lex
from ply import yacc
from ..error import Error
from . import ast
from .lexer import Lexer
_MAX_ORDINAL_VALUE = 0xffffffff
_MAX_ARRAY_SIZE = 0xffffffff
class ParseError(Error):
"""Class for errors from the parser."""
def __init__(self, filename, message, lineno=None, snippet=None):
Error.__init__(self, filename, message, lineno=lineno,
addenda=([snippet] if snippet else None))
# We have methods which look like they could be functions:
# pylint: disable=R0201
class Parser(object):
def __init__(self, lexer, source, filename):
self.tokens = lexer.tokens
self.source = source
self.filename = filename
# Names of functions
#
# In general, we name functions after the left-hand-side of the rule(s) that
# they handle. E.g., |p_foo_bar| for a rule |foo_bar : ...|.
#
# There may be multiple functions handling rules for the same left-hand-side;
# then we name the functions |p_foo_bar_N| (for left-hand-side |foo_bar|),
# where N is a number (numbered starting from 1). Note that using multiple
# functions is actually more efficient than having single functions handle
# multiple rules (and, e.g., distinguishing them by examining |len(p)|).
#
# It's also possible to have a function handling multiple rules with different
# left-hand-sides. We do not do this.
#
# See http://www.dabeaz.com/ply/ply.html#ply_nn25 for more details.
# TODO(vtl): Get rid of the braces in the module "statement". (Consider
# renaming "module" -> "package".) Then we'll be able to have a single rule
# for root (by making module "optional").
def p_root_1(self, p):
"""root : """
p[0] = ast.Mojom(None, ast.ImportList(), [])
def p_root_2(self, p):
"""root : root module"""
if p[1].module is not None:
raise ParseError(self.filename,
"Multiple \"module\" statements not allowed:",
p[2].lineno, snippet=self._GetSnippet(p[2].lineno))
if p[1].import_list.items or p[1].definition_list:
raise ParseError(
self.filename,
"\"module\" statements must precede imports and definitions:",
p[2].lineno, snippet=self._GetSnippet(p[2].lineno))
p[0] = p[1]
p[0].module = p[2]
def p_root_3(self, p):
"""root : root import"""
if p[1].definition_list:
raise ParseError(self.filename,
"\"import\" statements must precede definitions:",
p[2].lineno, snippet=self._GetSnippet(p[2].lineno))
p[0] = p[1]
p[0].import_list.Append(p[2])
def p_root_4(self, p):
"""root : root definition"""
p[0] = p[1]
p[0].definition_list.append(p[2])
def p_import(self, p):
"""import : IMPORT STRING_LITERAL SEMI"""
# 'eval' the literal to strip the quotes.
# TODO(vtl): This eval is dubious. We should unquote/unescape ourselves.
p[0] = ast.Import(eval(p[2]), filename=self.filename, lineno=p.lineno(2))
def p_module(self, p):
"""module : attribute_section MODULE identifier_wrapped SEMI"""
p[0] = ast.Module(p[3], p[1], filename=self.filename, lineno=p.lineno(2))
def p_definition(self, p):
"""definition : struct
| union
| interface
| enum
| const"""
p[0] = p[1]
def p_attribute_section_1(self, p):
"""attribute_section : """
p[0] = None
def p_attribute_section_2(self, p):
"""attribute_section : LBRACKET attribute_list RBRACKET"""
p[0] = p[2]
def p_attribute_list_1(self, p):
"""attribute_list : """
p[0] = ast.AttributeList()
def p_attribute_list_2(self, p):
"""attribute_list : nonempty_attribute_list"""
p[0] = p[1]
def p_nonempty_attribute_list_1(self, p):
"""nonempty_attribute_list : attribute"""
p[0] = ast.AttributeList(p[1])
def p_nonempty_attribute_list_2(self, p):
"""nonempty_attribute_list : nonempty_attribute_list COMMA attribute"""
p[0] = p[1]
p[0].Append(p[3])
def p_attribute(self, p):
"""attribute : NAME EQUALS evaled_literal
| NAME EQUALS NAME"""
p[0] = ast.Attribute(p[1], p[3], filename=self.filename, lineno=p.lineno(1))
def p_evaled_literal(self, p):
"""evaled_literal : literal"""
# 'eval' the literal to strip the quotes.
p[0] = eval(p[1])
def p_struct(self, p):
"""struct : attribute_section STRUCT NAME LBRACE struct_body RBRACE SEMI"""
p[0] = ast.Struct(p[3], p[1], p[5])
def p_struct_body_1(self, p):
"""struct_body : """
p[0] = ast.StructBody()
def p_struct_body_2(self, p):
"""struct_body : struct_body const
| struct_body enum
| struct_body struct_field"""
p[0] = p[1]
p[0].Append(p[2])
def p_struct_field(self, p):
"""struct_field : attribute_section typename NAME ordinal default SEMI"""
p[0] = ast.StructField(p[3], p[1], p[4], p[2], p[5])
def p_union(self, p):
"""union : attribute_section UNION NAME LBRACE union_body RBRACE SEMI"""
p[0] = ast.Union(p[3], p[1], p[5])
def p_union_body_1(self, p):
"""union_body : """
p[0] = ast.UnionBody()
def p_union_body_2(self, p):
"""union_body : union_body union_field"""
p[0] = p[1]
p[1].Append(p[2])
def p_union_field(self, p):
"""union_field : attribute_section typename NAME ordinal SEMI"""
p[0] = ast.UnionField(p[3], p[1], p[4], p[2])
def p_default_1(self, p):
"""default : """
p[0] = None
def p_default_2(self, p):
"""default : EQUALS constant"""
p[0] = p[2]
def p_interface(self, p):
"""interface : attribute_section INTERFACE NAME LBRACE interface_body \
RBRACE SEMI"""
p[0] = ast.Interface(p[3], p[1], p[5])
def p_interface_body_1(self, p):
"""interface_body : """
p[0] = ast.InterfaceBody()
def p_interface_body_2(self, p):
"""interface_body : interface_body const
| interface_body enum
| interface_body method"""
p[0] = p[1]
p[0].Append(p[2])
def p_response_1(self, p):
"""response : """
p[0] = None
def p_response_2(self, p):
"""response : RESPONSE LPAREN parameter_list RPAREN"""
p[0] = p[3]
def p_method(self, p):
"""method : attribute_section NAME ordinal LPAREN parameter_list RPAREN \
response SEMI"""
p[0] = ast.Method(p[2], p[1], p[3], p[5], p[7])
def p_parameter_list_1(self, p):
"""parameter_list : """
p[0] = ast.ParameterList()
def p_parameter_list_2(self, p):
"""parameter_list : nonempty_parameter_list"""
p[0] = p[1]
def p_nonempty_parameter_list_1(self, p):
"""nonempty_parameter_list : parameter"""
p[0] = ast.ParameterList(p[1])
def p_nonempty_parameter_list_2(self, p):
"""nonempty_parameter_list : nonempty_parameter_list COMMA parameter"""
p[0] = p[1]
p[0].Append(p[3])
def p_parameter(self, p):
"""parameter : attribute_section typename NAME ordinal"""
p[0] = ast.Parameter(p[3], p[1], p[4], p[2],
filename=self.filename, lineno=p.lineno(3))
def p_typename(self, p):
"""typename : nonnullable_typename QSTN
| nonnullable_typename"""
if len(p) == 2:
p[0] = p[1]
else:
p[0] = p[1] + "?"
def p_nonnullable_typename(self, p):
"""nonnullable_typename : basictypename
| array
| fixed_array
| associative_array
| interfacerequest"""
p[0] = p[1]
def p_basictypename(self, p):
"""basictypename : identifier
| handletype"""
p[0] = p[1]
def p_handletype(self, p):
"""handletype : HANDLE
| HANDLE LANGLE NAME RANGLE"""
if len(p) == 2:
p[0] = p[1]
else:
if p[3] not in ('data_pipe_consumer',
'data_pipe_producer',
'message_pipe',
'shared_buffer'):
# Note: We don't enable tracking of line numbers for everything, so we
# can't use |p.lineno(3)|.
raise ParseError(self.filename, "Invalid handle type %r:" % p[3],
lineno=p.lineno(1),
snippet=self._GetSnippet(p.lineno(1)))
p[0] = "handle<" + p[3] + ">"
def p_array(self, p):
"""array : ARRAY LANGLE typename RANGLE"""
p[0] = p[3] + "[]"
def p_fixed_array(self, p):
"""fixed_array : ARRAY LANGLE typename COMMA INT_CONST_DEC RANGLE"""
value = int(p[5])
if value == 0 or value > _MAX_ARRAY_SIZE:
raise ParseError(self.filename, "Fixed array size %d invalid:" % value,
lineno=p.lineno(5),
snippet=self._GetSnippet(p.lineno(5)))
p[0] = p[3] + "[" + p[5] + "]"
def p_associative_array(self, p):
"""associative_array : MAP LANGLE identifier COMMA typename RANGLE"""
p[0] = p[5] + "{" + p[3] + "}"
def p_interfacerequest(self, p):
"""interfacerequest : identifier AMP"""
p[0] = p[1] + "&"
def p_ordinal_1(self, p):
"""ordinal : """
p[0] = None
def p_ordinal_2(self, p):
"""ordinal : ORDINAL"""
value = int(p[1][1:])
if value > _MAX_ORDINAL_VALUE:
raise ParseError(self.filename, "Ordinal value %d too large:" % value,
lineno=p.lineno(1),
snippet=self._GetSnippet(p.lineno(1)))
p[0] = ast.Ordinal(value, filename=self.filename, lineno=p.lineno(1))
def p_enum(self, p):
"""enum : attribute_section ENUM NAME LBRACE nonempty_enum_value_list \
RBRACE SEMI
| attribute_section ENUM NAME LBRACE nonempty_enum_value_list \
COMMA RBRACE SEMI"""
p[0] = ast.Enum(p[3], p[1], p[5], filename=self.filename,
lineno=p.lineno(2))
def p_nonempty_enum_value_list_1(self, p):
"""nonempty_enum_value_list : enum_value"""
p[0] = ast.EnumValueList(p[1])
def p_nonempty_enum_value_list_2(self, p):
"""nonempty_enum_value_list : nonempty_enum_value_list COMMA enum_value"""
p[0] = p[1]
p[0].Append(p[3])
def p_enum_value(self, p):
"""enum_value : attribute_section NAME
| attribute_section NAME EQUALS int
| attribute_section NAME EQUALS identifier_wrapped"""
p[0] = ast.EnumValue(p[2], p[1], p[4] if len(p) == 5 else None,
filename=self.filename, lineno=p.lineno(2))
def p_const(self, p):
"""const : CONST typename NAME EQUALS constant SEMI"""
p[0] = ast.Const(p[3], p[2], p[5])
def p_constant(self, p):
"""constant : literal
| identifier_wrapped"""
p[0] = p[1]
def p_identifier_wrapped(self, p):
"""identifier_wrapped : identifier"""
p[0] = ('IDENTIFIER', p[1])
# TODO(vtl): Make this produce a "wrapped" identifier (probably as an
# |ast.Identifier|, to be added) and get rid of identifier_wrapped.
def p_identifier(self, p):
"""identifier : NAME
| NAME DOT identifier"""
p[0] = ''.join(p[1:])
def p_literal(self, p):
"""literal : int
| float
| TRUE
| FALSE
| DEFAULT
| STRING_LITERAL"""
p[0] = p[1]
def p_int(self, p):
"""int : int_const
| PLUS int_const
| MINUS int_const"""
p[0] = ''.join(p[1:])
def p_int_const(self, p):
"""int_const : INT_CONST_DEC
| INT_CONST_HEX"""
p[0] = p[1]
def p_float(self, p):
"""float : FLOAT_CONST
| PLUS FLOAT_CONST
| MINUS FLOAT_CONST"""
p[0] = ''.join(p[1:])
def p_error(self, e):
if e is None:
# Unexpected EOF.
# TODO(vtl): Can we figure out what's missing?
raise ParseError(self.filename, "Unexpected end of file")
raise ParseError(self.filename, "Unexpected %r:" % e.value, lineno=e.lineno,
snippet=self._GetSnippet(e.lineno))
def _GetSnippet(self, lineno):
return self.source.split('\n')[lineno - 1]
def Parse(source, filename):
lexer = Lexer(filename)
parser = Parser(lexer, source, filename)
lex.lex(object=lexer)
yacc.yacc(module=parser, debug=0, write_tables=0)
tree = yacc.parse(source)
return tree
| bsd-3-clause |
rane-hs/anntools-py3x | anntools/cooperation.py | 2 | 20493 | #!/usr/bin/python
# -*- coding: ascii -*-
'''
Cooperation schemes.
(C) 2007-2008 - Viktor Ferenczi (python@cx.hu) - Licence: GNU LGPL
This module provides cooperation schemes. It is very useful whenever forward
compatibility is required between tools operating on a common object space.
Cooperation instances allows cooperative access to the values of a
dictionary, which stores the object space. The object space stores individual
objects or storage objects. The type of the storage object depends on the
cooperation scheme used. Using cooperation schemes does not require to
change the class of the object space or store special objects in it as long
as a single object is stored for each key. This allows limited compatibility
with tools not compatible with cooperation schemes.
Example: You can add the ability of cooperation to your function annotation
based tool easily. Whenever you need to access the annotation (iterate, add,
remove, check) of an argument or return value just use your Cooperation
instance instead of direct access. Let the user of your library to specify
the Cooperation subclass to be used:
class MyTool:
def addAnnotation(self, fn, argname, annotator):
TupleCooperation(fn.__annotations__, argname).add(annotator)
NOTE: On Python 3.0 annotations are stored in fn.__annotations__ instead.
See the source code of other modules in this packages for more information.
You can subclass Cooperation and implement your scheme if none of the
predefined schemes fit your needs. Note that specifying __slots__ in
each of your subclasses can improve performace, since no __dict__ will be
created for the instances of you subclass. Please drop me a mail if you would
like your scheme to be included in this module. I could include it if it's
generic enough, could be usable for a wide range of applications and has the
essential unit test cases.
TODO: Optimization
TODO: SetCooperation
'''
#============================================================================
# Exported symbols
__all__=[
'CooperationError', 'NoCooperationError', 'DictCooperationError',
'CooperationFailed', 'DictCooperationFailed',
'Cooperation', 'NoCooperation', 'TupleCooperation', 'ListCooperation',
'DictCooperation',
]
#============================================================================
# Exceptions
class CooperationError(ValueError):
'''Raised when an existing object is about to be overwritten.
This means that an error at our side prevent correct cooperation.'''
pass
#----------------------------------------------------------------------------
class NoCooperationError(CooperationError):
'''Raised when an existing object is about to be overwritten
by the NoCooperation scheme. This means that an error at our side
prevent correct cooperation.'''
pass
#----------------------------------------------------------------------------
class DictCooperationError(CooperationError):
'''Raised when an existing object is about to be overwritten
by the DictCooperation scheme. This means that an error at our side
prevent correct cooperation.'''
pass
#----------------------------------------------------------------------------
class CooperationFailed(CooperationError):
'''Riased when the cooperation is failed due to an incompatible object
stored by someone else. This means that cooperation cannot be continued
due to an error or incompatibility in another tool accessing the same
object space.'''
pass
#----------------------------------------------------------------------------
class DictCooperationFailed(CooperationFailed):
'''Riased when DictCooperation is failed due to an incompatible object
stored by someone else. This means that cooperation cannot be continued
due to an error or incompatibility in another tool accessing the same
object space.'''
pass
#============================================================================
# Cooperation schemes
class Cooperation(object):
'''Base class for cooperation schemes. Note that objects are stored in a
separate object space, not in the Cooperation instance itself, so the
interface of Cooperation instances are like a proxy to a set, but
without the support for set operations (union, etc.). The Cooperation
object also does not guarantee uniqueness of stored values, but it could
be guaranteed by specific cooperation schemes. It is allowed to set
the space and key properties of a Cooperation instance, since it
does not store any objects or state information (stateless). This allows
fast reuse of a single Cooperation instance for operations with differnt
object keys or even object spaces. The class filter should not be
modified. While this is technically possible it would change the set of
accepted objects in a counter-intuitive way.'''
# NOTE: __slots__ should be defined for each subclass for performance
__slots__=['space', 'key', 'classfilter']
def __init__(self, space, key, classfilter=object):
'''Initialize cooperation for a given object space and object key.
An optional class or tuple of classes can be defined to filter
objects by their class. This is a convenient feature to easily
spearate sets of objects for different purposes.
@param space: The object space (dictionary) to operate on.
@param key: Object space key to identify the objects to be accessed.
@param classfilter: Optional class filter to separate objects for different purposes. Must be a class or a tuple of classes.
'''
self.space=space
self.key=key
self.classfilter=classfilter
# Informational methods
def iter(self):
'''Iterates on stored objects. Yields only those instances that fit
the class filter, which is an easy way to separate objects belonging
to differenct tools or serving different purposes. Note that the
called must specify an exact class filter or check object in the
loop and silently skip all foreign (unknown) objects it encounters
to ensure future compatibility.
IMPORTANT NOTE:
It's not recommended to store built-in Python objects, since they
are universal and cannot be associated for a single purpose.'''
raise NotImplementedError()
def len(self):
'''Returns the total number of objects could be iterated.'''
cnt=0
for obj in self:
cnt+=1
return cnt
def contains(self, obj):
'''Returns True if the object space contains the given object.
Always returns False, if the object does not fit the class filter.
@param obj: Object to search for.'''
if isinstance(obj, self.classfilter):
for o in self:
if o is obj: return True
return False
# Provides a set like interface
__iter__=iter
__len__=len
__contains__=contains
# Set like manipulation methods
def add(self, obj):
'''Adds an object. Does not guarantee, that the object is unique,
but it can be guaranteed by a specific cooperation schemes. Must
preserve any existing objects or raise a subclass of CooperationError
if the operation cannot be completed for some reason. Note, that the
object to be added must fit the class filter or ValueError raised.
This restriction is due to mathematical correctness, since if an
object could be added that does not fit the filter it cannot be
accessed later by the same Cooperation instance.
@param obj: The object to be added.
@raise ValueError: Raised when the object to be added does not fit the class filter.
@raise CooperationError: Raised when an existing object is about to be overwritten.
@raise CooperationFailed: Raised when an incompatible object is in the way.
'''
raise NotImplementedError()
def remove(self, obj):
'''Removes an object. Does not guarantee, that all instances of this
object will be removed, only guarantees that at least one is removed.
Subclasses may provide different behavior regarding uniqueness. Note
that the object is searched by it's identity, not by it's value. The
object is checked against the class filter to prevent accidental
removal of foreign objects or other incorrect usage.
@param obj: The object to be removed.
@raise ValueError: Raised when the object to be removed does not fit the class filter.
'''
raise NotImplementedError()
#----------------------------------------------------------------------------
class NoCooperation(Cooperation):
'''No cooperation allowed. At most one object can be stored. Trying to
add a second object will raise NoCooperationError.'''
__slots__=['space', 'key', 'classfilter']
def iter(self):
'''Yields at most one object.'''
if self.key in self.space:
obj=self.space[self.key]
if isinstance(obj, self.classfilter):
yield obj
__iter__=iter
def add(self, obj):
'''Stores the object. Raises NoCooperationError if an existing object
is about to be overwritten. Raises ValueError if the object does not
fit the class filter.
@param obj: The object to be appended.
@raise ValueError: Raised when the object to be added does not fit the class filter.
@raise NoCooperationError: Raised when an existing object is about to be overwritten.
'''
if not isinstance(obj, self.classfilter):
raise ValueError('Object does not fit the class filter: key=%r, classfilter=%r'%(self.key, self.classfilter))
if self.key not in self.space:
self.space[self.key]=obj
else:
raise NoCooperationError('Trying to overwrite an existing object! key=%r'%(self.key,))
def remove(self, obj):
'''Removes the object. Does nothing if no object is stored or the
stored object is not the given object. This behavior is for
compatibility with other cooperation schemes. Raises ValueError if
the object does not fit the class filter.
@param obj: The object to be removed.
@raise ValueError: Raised when the object to be removed does not fit the class filter.
'''
if not isinstance(obj, self.classfilter):
raise ValueError('Object does not fit the class filter: key=%r, classfilter=%r'%(self.key, self.classfilter))
if self.key in self.space and self.space[self.key] is obj:
del self.space[self.key]
#----------------------------------------------------------------------------
class TupleCooperation(Cooperation):
'''Cooperation scheme that stores objects in a tuple.
This is the default cooperation scheme for all modules in the anntools
package, since it provides a simple way of cooperation without too much
overhead or restrictions imposed on what type of objects can be stored.
This should cover most of the use cases while giving reasonably good
performance as long as the number of objects is not very high. This is
also a convenient scheme if you have to specify a singel object or a
tuple of objects by hand as in the case of function annotations.'''
__slots__=['space', 'key', 'classfilter']
def iter(self):
'''Yields all stored objects fitting the class filter.'''
if self.key not in self.space:
return
storage=self.space[self.key]
classfilter=self.classfilter
if isinstance(storage, tuple):
for obj in storage:
if isinstance(obj, classfilter):
yield obj
elif isinstance(storage, classfilter):
yield storage
__iter__=iter
def add(self, obj):
'''Adds the object, introduces a tuple as storage object if more than
one objects are stored in the object space. Adding a tuple appends
it's contents.
@param obj: The object to be appended.
@raise ValueError: Raised when the object to be added does not fit the class filter.
'''
if isinstance(obj, tuple):
for o in obj:
self.add(o)
return
if not isinstance(obj, self.classfilter):
raise ValueError('Object does not fit the class filter: key=%r, classfilter=%r'%(self.key, self.classfilter))
space=self.space
key=self.key
if key in space:
storage=space[key]
if isinstance(storage, tuple):
space[key]=storage+(obj,)
else:
space[key]=(storage, obj)
else:
space[key]=obj
def remove(self, obj):
'''Removes an object from the object space.
Replaces tuple by a single object if only one object left.
Deletes the key from the object space if no objects left.
@param obj: The object to be removed.
@raise ValueError: Raised when the object to be removed does not fit the class filter.
'''
if isinstance(obj, tuple):
for o in obj:
self.remove(o)
return
if not isinstance(obj, self.classfilter):
raise ValueError('Object does not fit the class filter: key=%r, classfilter=%r'%(self.key, self.classfilter))
space=self.space
key=self.key
if key not in space:
return
storage=space[key]
if isinstance(storage, tuple):
filtered=[a for a in storage if a is not obj]
if len(filtered)<len(storage):
if not filtered:
del space[key]
elif len(filtered)<2:
space[key]=filtered[0]
else:
space[key]=tuple(filtered)
elif storage is obj:
del space[key]
#----------------------------------------------------------------------------
class ListCooperation(Cooperation):
'''Cooperation scheme that stores objects in a list.'''
__slots__=['space', 'key', 'classfilter']
def iter(self):
'''Yields all stored objects fitting the class filter.'''
if self.key not in self.space:
return
storage=self.space[self.key]
classfilter=self.classfilter
if isinstance(storage, list):
for obj in storage:
if isinstance(obj, classfilter):
yield obj
elif isinstance(storage, classfilter):
yield storage
__iter__=iter
def add(self, obj):
'''Adds the object, introduces a list as storage object if more than
one objects are stored in the object space. Adding a list appends
it's contents.
@param obj: The object to be appended.
@raise ValueError: Raised when the object to be added does not fit the class filter.
'''
if isinstance(obj, list):
for o in obj:
self.add(o)
return
if not isinstance(obj, self.classfilter):
raise ValueError('Object does not fit the class filter: key=%r, classfilter=%r'%(self.key, self.classfilter))
space=self.space
key=self.key
if key in space:
storage=space[key]
if isinstance(storage, list):
storage.append(obj)
else:
space[key]=[storage, obj]
else:
space[key]=obj
def remove(self, obj):
'''Removes an object from the object space.
Replaces list by a single object if only one object left.
Deletes the key from the object space if no objects left.
@param obj: The object to be removed.
@raise ValueError: Raised when the object to be removed does not fit the class filter.
'''
if isinstance(obj, list):
for o in obj:
self.remove(o)
return
if not isinstance(obj, self.classfilter):
raise ValueError('Object does not fit the class filter: key=%r, classfilter=%r'%(self.key, self.classfilter))
space=self.space
key=self.key
if key not in space:
return
storage=space[key]
if isinstance(storage, list):
idx=[i for i,o in enumerate(storage) if o is obj]
for i in idx:
del storage[i]
if not storage:
del space[key]
elif len(storage)<2:
space[key]=storage[0]
elif storage is obj:
del space[key]
#----------------------------------------------------------------------------
class DictCooperation(Cooperation):
'''Cooperation scheme that stores objects in a dictionary. An additional
key must be given to the constructor that identifies the objects to be
accessed in the dictionary used as the storage. Note, that only one
object can be stored by this scheme for each cooperative partner using
different storage keys. This scheme does not allow single objects in the
objects space. The object space can only contain dictionaries.'''
__slots__=['space', 'key', 'classfilter', 'storekey']
def __init__(self, space, key, storekey, classfilter=object):
'''Initialize cooperation scheme.
@param storekey Hashable key to identify our object in the object storage.
'''
Cooperation.__init__(self, space, key, classfilter)
self.storekey=storekey
def iter(self):
'''Yields a single object if exists.'''
if self.key in self.space:
storage=self.space[self.key]
if self.storekey in storage:
obj=storage[self.storekey]
if isinstance(obj, self.classfilter):
yield obj
__iter__=iter
def add(self, obj):
'''Adds a new object. Introduces a dictionary into the object space
if no object already exists with the object key. Raises ValueError
if the object to be added does not fit the class filter. Riases
DictCooperationError if an object already stored. Raises
DictCooperationFailed if the object space contains a non-dictionary
(hence incompatible) object.
@param obj: The object to be appended.
@raise ValueError: Raised when trying to add an object that does not fit the class filter.
@raise DictCooperationError: Raised when an existing object is about to be overwritten.
@raise DictCooperationFailed: Raised when cooperation is failed due to an incompatible object found in the object space.
'''
if not isinstance(obj, self.classfilter):
raise ValueError('Object does not fit the class filter: key=%r, classfilter=%r'%(self.key, self.classfilter))
space=self.space
key=self.key
if key in space:
storage=space[key]
if not isinstance(storage, dict):
raise DictCooperationFailed('Incompatible object found in object space! Dicionary cooperation failed. key=%r'%(self.key,))
storekey=self.storekey
if storekey in storage:
raise DictCooperationError('Trying to overwrite an existing object! key=%r, storekey=%r'%(self.key, self.storekey))
storage[storekey]=obj
else:
space[key]={self.storekey:obj}
def remove(self, obj):
'''Removes the given object.
@param obj: The object to be removed.
@raise ValueError: Raised when trying to remove an object that does not fit the class filter.
'''
if not isinstance(obj, self.classfilter):
raise ValueError('Object does not fit the class filter: key=%r, classfilter=%r'%(self.key, self.classfilter))
space=self.space
key=self.key
if key in space:
storage=space[key]
if not isinstance(storage, dict):
raise DictCooperationFailed('Incompatible object found in object space! Dicionary cooperation failed. key=%r'%(self.key,))
storekey=self.storekey
if storekey in storage:
del storage[storekey]
if not storage:
del space[key]
#============================================================================
| lgpl-2.1 |
yanweifu/reweighted-ws | show-param-trajectory.py | 6 | 1912 | #!/usr/bin/env python2
from __future__ import division, print_function
import logging
import h5py
import numpy as np
import tsne
import pylab
#x2 = tsne.bh_sne(x)
if __name__ == "__main__":
import sys
import argparse
logger = logging.getLogger(__name__)
parser = argparse.ArgumentParser()
parser.add_argument('--verbose', '-v', action="store_true", default=False)
parser.add_argument('--param', type=str, default="L0.P.W_mu")
parser.add_argument('result_dir', nargs='+')
args = parser.parse_args()
if args.verbose:
level = logging.DEBUG
else:
level = logging.INFO
FORMAT = '[%(asctime)s] %(message)s'
DATEFMT = "%H:%M:%S"
logging.basicConfig(format=FORMAT, datefmt=DATEFMT, level=level)
P_all = None
N_iter = []
for i, d in enumerate(args.result_dir):
fname = d+"/results.h5"
with h5py.File(fname, 'r') as h5:
logger.debug("Keys:")
for k, v in h5.iteritems():
logger.debug(" %-30s %s" % (k, v.shape))
key = "model." + args.param
P = h5[key][:]
n_iter = P.shape[0]
P = P.reshape([n_iter, -1])
mask = np.isfinite(P).all(axis=1)
P = P[mask]
logger.info("%s: loaded %d iterations (%d contained NaNs)" % (d, mask.sum(), n_iter-mask.sum()))
N_iter.append(P.shape[0])
if P_all is None:
P_all = P
else:
P_all = np.concatenate([P_all, P])
P_all = P_all.astype(np.float)
logger.info("Running T-SNE on %s" % str(P_all.shape))
P2_all = tsne.bh_sne(P_all, pca_d=None, perplexity=10, theta=0.5)
for n_iter in N_iter:
P2 = P2_all[:n_iter]
P2_all = P2_all[n_iter:]
c = np.linspace(0, 1, n_iter)
pylab.scatter(P2[:,0], P2[:,1], c=c)
pylab.show(block=True)
| agpl-3.0 |
P0cL4bs/3vilTwinAttacker | plugins/captivePortal/flask_demo.py | 2 | 1778 | import re
from ast import literal_eval
from plugins.captivePortal.plugin import CaptiveTemplatePlugin
"""
Description:
This program is a core for wifi-pumpkin.py. file which includes functionality
plugins for Pumpkin-Proxy.
Copyright:
Copyright (C) 2015-2016 Marcos Nesster P0cl4bs Team
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>
"""
class FlaskDemo(CaptiveTemplatePlugin):
meta = {
'Name' : 'FlaskDemo',
'Version' : '1.0',
'Description' : 'Example is a simple portal default page',
'Author' : 'Pumpkin-Dev',
'TemplatePath' : 'templates/Flask',
'StaticPath' : 'templates/Flask/static',
'Preview' : 'plugins/captivePortal/templates/Flask/preview.png'
}
def __init__(self):
for key,value in self.meta.items():
self.__dict__[key] = value
self.dict_domain = {}
self.ConfigParser = True
def init_language(self, lang):
if (lang.lower() != 'default'):
self.TemplatePath = 'templates/Flask/language/{}'.format(lang)
return
for key,value in self.meta.items():
self.__dict__[key] = value | gpl-3.0 |
rlee287/pyautoupdate | pyautoupdate/launcher.py | 2 | 31325 | from __future__ import absolute_import, print_function
from datetime import datetime
from logging import WARNING
import multiprocessing
import os
import pprint
import re
import shutil
from sys import version_info
import tempfile
import warnings
# Module in different place depending on python version
if version_info[0] == 2: # pragma: no branch
from urlparse import urlparse, urlunparse
else:
from urllib.parse import urlparse, urlunparse
from pkg_resources import parse_version, PEP440Warning
try:
from pkg_resources import SetuptoolsVersion as Version
#from packaging.version import Version
# Behaves differently than the packaging.version
# Causing test failures
except ImportError:
from pkg_resources.extern.packaging.version import Version
from setuptools.archive_util import unpack_archive, UnrecognizedFormat
import requests
from ._file_glob import copy_glob
from .exceptions import ProcessRunningException, CorruptedFileWarning
class Launcher(object):
"""Creates a :class:`Launcher` object. This class provides the main
functionality of the Pyautoupdate module.
:param str filepath: Path to file to execute
:param str url: Base URL from which to download new versions
.. note::
This must be an HTTPS url. HTTP urls are silently changed into HTTPS.
**Use of HTTPS is strictly enforced.**
Parameters, queries, and fragments will be stripped from the URL.
:param str newfiles: Name of ``.zip``, ``tar.gz`` or ``.tar.bz2`` archive
with new versions to download from site
:param int log_level: Logging level for the built in logger
:param tuple args: ``args`` passed to the launched code
:param dict kwargs: ``kwargs`` passed to the launched code
When the code is launched, the following variables are already defined:
+-------------+-------------------------------------------------+
|Variable Name|Value Description |
+=============+=================================================+
|``filepath`` |Path to the file that was initially launched |
+-------------+-------------------------------------------------+
|``url`` |Base url to check and download new versions |
+-------------+-------------------------------------------------+
|``newfiles`` |Name of the archive to download from the server |
+-------------+-------------------------------------------------+
|``check_new``|Method to check for updated code |
+-------------+-------------------------------------------------+
|``pid`` |PID of parent process that spawns the code |
+-------------+-------------------------------------------------+
|``log`` |Logger for Pyautoupdate and for the executed code|
+-------------+-------------------------------------------------+
|``args`` |``args`` tuple for the spawned code |
+-------------+-------------------------------------------------+
|``kwargs`` |``kwargs`` dict for the spawned code |
+-------------+-------------------------------------------------+
.. warning::
The :class:`Launcher` uses :class:`multiprocessing.Process`
to run the code.
Please ensure that all arguments passed in as ``args`` and ``kwargs``
can be pickled. Non-pickleable arguments cannot be passed to the child
process on Windows, and an error will be raised when attempting to run
user code.
"""
# The name of the file containing version numbers
version_doc = "version.txt"
# A log file that records the results of checking for updates
version_check_log = "version_check.log"
# The file with the paths of the code and resources in an application
file_list = "filelist.txt"
# The directory into which the newer versions are unpacked
updatedir = ".pyautodownloads"
# A marker file used to indicate that a new version is available
queue_update = ".queue"
def __init__(self, filepath, url,
newfiles='project.zip',
log_level=WARNING,
*args, **kwargs):
# Initialize logger
self.log = multiprocessing.get_logger()
self.log.setLevel(log_level)
# Create handle to self.log only if necessary
if len(self.log.handlers) == 0:
# Create handler to sys.stderr
multiprocessing.log_to_stderr()
self.log.info("Initializing launcher")
self.log.debug("Validating files")
# Check that self.version_doc is valid
if not self.version_doc_validator():
self.log.error("{0} does not have a valid version number!\n"
"{0} is a reserved file name.\n"
"It will be overwritten by this program!\n"
"If the {0} is corrupted,\n"
"Please use the logfile at {1} to restore it."
.format(self.version_doc, self.version_check_log))
warnings.warn("{0} is corrupted!".format(self.version_doc),
CorruptedFileWarning,
stacklevel=2)
# Check that self.version_log is valid
open(self.version_check_log, 'a').close() # "Touch" self.version_log
if not self.version_log_validator():
self.log.warning("Log file at {0} is corrupted!\n"
"{0} is a reserved file name.\n"
"Please ensure that your program is "
"not using it.".format(self.version_check_log))
warnings.warn("{0} is corrupted!"
.format(self.version_check_log),
CorruptedFileWarning,
stacklevel=2)
self.log.debug("Validating arguments")
# Check that filepath is specified
if len(filepath) == 0:
raise ValueError("Filepath must not be empty")
if ".." in os.path.relpath(filepath, start=os.getcwd()):
# Filepath must be inside
# Enforcing this prevents ../../../etc/passwd style attacks
raise ValueError("Filepath must be inside of folder "
"containing initial script")
self.filepath = filepath
# Check that URL is specified
if len(url) == 0:
raise ValueError("URL must not be empty")
self.url = url
# URL parsing section
schemaobj = urlparse(self.url)
# Add https schema if necessary and replace http with https
if schemaobj.scheme not in ["", "https", "http"]:
raise ValueError("Url must be http or https")
if schemaobj.scheme == "":
self.url = "https://" + self.url
schemaobj = urlparse(self.url)
# Intended behavior is to remove parameters, query, and fragment
self.url = urlunparse(("https", schemaobj.netloc, schemaobj.path,
"", "", ""))
# Append slash to end of URL if it is not present
if not url.endswith("/"):
self.url = self.url + "/"
# Check for valid newfiles
# Split along both types of path seperators
if len(re.split(r"\\|\/", os.path.normpath(newfiles))) > 1:
raise ValueError("newfiles should be a single archive name")
elif not newfiles.endswith((".zip", ".tar.gz", ".tar.bz2")):
raise ValueError("newfiles must be a zip, gzip, or bzip file")
else:
self.newfiles = newfiles
# Initialize other variables
self.update = multiprocessing.Lock()
self.pid = os.getpid()
self.args = args
self.kwargs = kwargs
self.__process = multiprocessing.Process(target=self._call_code,
args=self.args,
kwargs=self.kwargs)
self.past_terminated = False
self.__process_alive = multiprocessing.Event()
self.log.info("Launcher initialized successfully")
####################### Filename getters and validators ######################
def version_doc_validator(self):
"""Validates the file containing the current version number.
:return: Whether the version_doc is a proper version
:rtype: bool
"""
# Version is valid only if it exists
version_valid = os.path.isfile(self.version_doc)
if version_valid:
try:
# If statement earlier signifies that version file must exist
with open(self.version_doc, "r") as version_check:
# Read and parse version
# If vers is empty, SetuptoolsLegacyVersion is created
# Empty version cannot be valid
vers = version_check.read()
vers_obj = parse_version(vers)
if not isinstance(vers_obj, Version):
raise PEP440Warning
except PEP440Warning: # Thrown if file has invalid version
version_valid = False
return version_valid
def version_log_validator(self):
"""Validates the file containing the version history.
:return: Whether the version_log is formatted properly
:rtype: bool
"""
valid_log = True
# Match log file against regex
with open(self.version_check_log, "r") as log_file:
log_syntax = re.compile(
r"Old .+?\|(New .+?|Up to date|Server invalid)\|Time .+?")
version = log_file.read()
if version != "\n" and len(version) > 0:
has_match = re.match(log_syntax, version)
valid_log = bool(has_match)
return valid_log
########################### Process manipulation #############################
@property
def process_is_alive(self):
"""Property indicating whether the process is alive
To see if user code is running, please use
:meth:`Launcher.process_code_running`. Note that the process needs
to initialize itself before it can run user code.
"""
return self.__process.is_alive()
@property
def process_code_running(self):
"""Property indicating whether the user code is running
To see if the process has started, please use
:meth:`Launcher.process_is_alive`. Note that the process needs
to initialize itself before it can run user code.
"""
return self.__process_alive.is_set()
@property
def process_pid(self):
"""Property indicating the process PID, if it exists"""
return self.__process.pid
@property
def process_exitcode(self):
"""Property indicating the process exitcode, if it exists"""
if self.past_terminated:
# SIGTERM is signal 15 on Linux
# Preserve compatibility on Windows
return -15
else:
return self.__process.exitcode
def process_join(self, timeout=None):
"""Joins the process
.. seealso:: :meth:`multiprocessing.Process.join`
"""
self.log.info("Joining process")
self.__process.join(timeout)
def process_terminate(self):
"""Terminates the process.
.. warning::
All the provisos of :meth:`multiprocessing.Process.terminate`
apply.
Attempts are made in the code to ensure that internal variables
inside the Launcher class are properly cleaned up. However, there is
little protection for user supplied code in case of termination.
:return: Whether process was terminated
:rtype: bool
"""
# TODO: Troubleshoot xfail test
if self.process_is_alive:
self.log.warning("Terminating Process")
self.__process.terminate()
self.__process_alive.clear()
# Release lock to avoid update deadlock later
self.log.debug("Releasing code lock after termination")
self.update.release()
# Reinitialize process now because is_alive is not properly reset
# After a process termination
self.log.debug("Reinitializing process object after termination")
self.__process = None
self.__process = multiprocessing.Process(target=
self._call_code,
args=self.args,
kwargs=self.kwargs)
self.past_terminated = True
return True
else:
self.log.warning("Attempted to terminate dead process")
return False
########################### Code execution methods ###########################
def _call_code(self, *args, **kwargs):
"""Internal function to execute the user code.
This is used as target of a
:class:`multiprocessing.Process` instance.
:param tuple args: ``*args`` tuple from self.args
:param dict kwargs: ``**kwargs`` dict from self.kwargs
.. warning::
End users should never call this directly.
Please use the :meth:`run` method instead.
"""
# Open code file
# Acquire lock here to avoid TOCTTOU issues with opened code file
# multiprocessing.get_logger again since this is not pickleable
local_log = multiprocessing.get_logger()
local_log.debug("Acquiring code lock to run code")
self.update.acquire()
with open(self.filepath, mode='r') as code_file:
code = code_file.read()
# Set up variables visible to child process
localvar = vars(self).copy()
# Manipulate __dict__ attribute to add handle to check_new
localvar["check_new"] = self.check_new
# Remove handle to process object and lock
# Neither should not be tampered with in child process code
del localvar["_Launcher__process"]
del localvar["_Launcher__process_alive"]
del localvar["update"]
del localvar["past_terminated"]
# Pass in args, kwargs, and logger
localvar["args"] = args
localvar["kwargs"] = kwargs
localvar["log"] = local_log
local_log.debug("Starting process with "
"the following local variables:\n" +
pprint.pformat(localvar))
# Execute code in file
local_log.info("Starting code from file")
try:
self.__process_alive.set()
exec(code, dict(), localvar)
finally:
local_log.debug("Releasing code lock after running code")
self.update.release()
self.__process_alive.clear()
# Reset past_terminated to False
# (if terminated and rerun, past_terminated should be false)
self.past_terminated = False
def run(self, background=False):
"""Runs the user code.
If background is ``False``, returns the Process's exitcode.
:param bool background: Whether to run code in background
:return: the exit code if background is ``False``
:rtype: :class:`int` or :class:`None`
"""
# Find the right error to raise depending on python version
self.log.info("Checking file existence")
try:
error_to_raise = FileNotFoundError
except NameError:
error_to_raise = IOError
if not os.path.isfile(self.filepath):
raise error_to_raise("No file at {0}".format(self.filepath))
self.log.info("Checking process status")
if self.process_is_alive:
self.log.error("Process is already running")
raise ProcessRunningException
elif self.process_pid is None:
# Process has not run yet
self.log.info("Process has not run yet")
self.log.info("Starting process")
# self.log is not pickleable
# The variable will be reinstantiated inside _call_code
# Temporarily remove here and reinstantiate after start
del self.log
try:
self.__process.start()
finally:
self.log = multiprocessing.get_logger()
self.log.info("Process started")
if not background:
self.process_join()
# Exit code can be used by program that calls the launcher
return self.process_exitcode
elif self.process_exitcode is not None:
# Process has already terminated
# Reinitialize the process instance
self.log.info("Process has already finished")
self.log.info("Reinitializing process object")
self.__process = multiprocessing.Process(target=
self._call_code,
args=self.args,
kwargs=self.kwargs)
# Recursion, since this will reset @property properties
self.run(background)
else: # pragma: no cover
# Should never happen
self.log.error("Process exitcode exists without PID!")
self.log.error("The application is probably in an unstable state.")
######################### New code retrieval methods #########################
def check_new(self):
"""Retrieves the latest version number from the remote host.
:return: Whether a newer version is available
:rtype: bool
.. note::
This function internally uses setuptool's ``parse_version``
to compare versions.
Any versioning scheme conforming to :pep:`440` can be used.
When the server contains an invalid version specification, this
returns ``false``.
.. versionchanged:: 1.0.0
Previously, an invalid server version would cause an exception.
"""
self.log.info("Checking for updates")
request_time = datetime.utcnow()
# If self.queue_update is already present, return false
# TODO: Check again?
if os.path.isfile(self.queue_update):
with open(self.queue_update, 'r') as new_version:
newver = new_version.read()
newver_obj = parse_version(newver)
return isinstance(newver_obj, Version)
else:
versionurl = self.url + self.version_doc
# Get new files
self.log.debug("Retrieving new version from {0}"
.format(versionurl))
get_new = requests.get(versionurl, allow_redirects=True)
get_new.raise_for_status()
newver = get_new.text
newver_obj = parse_version(newver)
# Read in old version
with open(self.version_doc, 'r') as old_version:
oldver = old_version.read()
oldver = oldver.rstrip("\n")
# Compare old version with new version
invalid = not isinstance(newver_obj, Version)
# Check if new version is valid
if invalid:
self.log.error("Retrieved version number is invalid!\n"
"Please contact the software authors.\n"
"Please include the generated data dump "
"in a bug report.")
# Write notification into log file
version_to_add = "Old {0}|Server Invalid|Time {1}\n"\
.format(oldver, request_time)
with open(self.version_check_log, "a") as log_file:
log_file.write(version_to_add)
newver_dump = None
# If invalid, dump into dump file
try:
newver_dump = tempfile.NamedTemporaryFile(prefix="newverdump",
delete=False,
mode="wt",
dir=os.getcwd())
self.log.error("Writing invalid version into {0}"
.format(newver_dump.name))
newver_dump.write(newver)
# finally runs after return statement
return False
except Exception:
self.log.exception("Unable to write data dump")
return False
finally:
if newver_dump is not None:
newver_dump.close()
# Throw warning after logging
# If version is invalid, upgrade cannot succeed
warnings.warn("Invalid Server version!", CorruptedFileWarning)
# newver_obj will be proper version by this point
has_new = (newver_obj > parse_version(oldver))
# Add entry to the logfile and update version.txt
if has_new:
version_to_add = "Old {0}|New {1}|Time {2}\n"\
.format(oldver, newver, request_time)
with open(self.queue_update, 'w') as new_version:
new_version.write(newver)
else:
version_to_add = "Old {0}|Up to date|Time {1}\n"\
.format(oldver, request_time)
with open(self.version_check_log, "a") as log_file:
log_file.write(version_to_add)
return has_new
def _reset_update_files(self):
"""Resets the update files to its default state.
It empties the existing update directory or creates a new one
if it doesn't exist.
"""
self.log.debug("Resetting update directory")
if os.path.isdir(self.updatedir):
# Remove old contents
shutil.rmtree(self.updatedir)
# Make new empty directory
# shutil.rmtree would have deleted the directory
os.mkdir(self.updatedir)
# Remove old archive if it is left behind
if os.path.isfile(self.newfiles):
os.remove(self.newfiles)
def _get_new(self, allow_redirects=True, chunk_size=512):
"""Retrieves the new archive and extracts it to self.updatedir."""
if not os.path.isfile(self.queue_update):
self.log.info("No need to retrieve new version as "
"existing one is up to date")
return
self.log.info("Retrieving new version")
newurl = self.url + self.newfiles
# Get new files
http_get = requests.get(newurl, stream=True,
allow_redirects=allow_redirects)
http_get.raise_for_status()
with open(self.newfiles, 'wb') as filehandle:
for chunk in http_get.iter_content(chunk_size=chunk_size):
if chunk:
filehandle.write(chunk)
# Unpack archive and remove it after extraction
try:
self.log.info("Unpacking downloaded archive")
unpack_archive(self.newfiles, self.updatedir)
except UnrecognizedFormat:
self.log.error("Retrieved version archive is invalid!\n"
"Please contact the software authors.\n"
"Please include the invalid archive "
"in a bug report.")
os.rename(self.newfiles, self.newfiles + ".dump")
else:
# Remove archive only if unpack operation succeeded
self.log.info("Removing archive after extraction")
os.remove(self.newfiles)
def _replace_files(self):
"""Replaces the existing files with the downloaded files.
:return: Whether update succeeded
:rtype: bool
"""
# Only replace if update and replacement are queued
is_downloaded = os.path.isdir(self.updatedir) and \
os.listdir(self.updatedir)
if not (os.path.isfile(self.queue_update) and is_downloaded):
return False
# Attempt to acquire code lock here and exit if unable to
# The finally block runs after the "return" statement
# This can cause a double-release under some circumstances
# Acquiring the lock here prevents this from happening
else:
self.log.debug("Acquiring code log to update files")
if not self.update.acquire(False):
self.log.warning("Could not acquire lock to update files")
return False
try:
# TODO: Make this code safer and possibly leave diagnostics
# if the update operation errors out in the middle
self.log.debug("Writing new version into {0}"
.format(self.version_doc))
os.rename(self.version_doc, self.version_doc + ".bak")
os.rename(self.queue_update, self.version_doc)
os.remove(self.version_doc + ".bak")
self.log.info("Replacing files")
# Read in files from filelist and move to tempdir
# If update fails, it is important to leave tempdir for diagnostics
# Not cleaned up in a finally statement by design
tempdir = tempfile.mkdtemp()
self.log.debug("Created tempdir at {0}".format(tempdir))
self.log.info("Backing up current filelist")
filelist_backup = None
try:
filelist_backup = tempfile.NamedTemporaryFile(delete=False)
with open(self.file_list, "r+b") as file_handle:
shutil.copyfileobj(file_handle, filelist_backup)
except Exception:
self.log.exception("Backup of current filelist failed!")
raise
finally:
if filelist_backup is not None:
filelist_backup.close()
self.log.info("Moving old files to tempdir")
with open(self.file_list, "r") as file_handle:
for line in file_handle:
file_rm = os.path.normpath(os.path.join(".", line))
file_rm = file_rm.rstrip("\n")
# Confirm that each file in filelist exists
if not os.path.isfile(file_rm):
self.log.error("{0} contains the invalid "
"filepath {1}.\n"
"Please check that {0} is not being "
"used!\n"
"Otherwise the {0} is corrupted.\n"
"Updates will fail until this is "
"restored."
.format(self.file_list, file_rm))
warnings.warn("{0} is corrupted and contains the "
"invalid path {1}!"
.format(self.file_list, file_rm),
CorruptedFileWarning,
stacklevel=2)
else:
file_rm_temp = os.path.join(tempdir, file_rm)
file_rm_temp_dir = os.path.dirname(file_rm_temp)
if not os.path.isdir(file_rm_temp_dir):
# exist_ok does not exist in Python 2
os.makedirs(file_rm_temp_dir)
if file_rm.split(os.path.sep)[0] not in \
[self.updatedir,
self.version_doc,
self.version_check_log]:
self.log.debug("Moving {0} to {1}".format(file_rm,
tempdir))
shutil.move(file_rm, file_rm_temp)
file_rm_dir = os.path.dirname(file_rm)
if os.path.isdir(file_rm_dir):
if not os.listdir(file_rm_dir):
os.rmdir(file_rm_dir)
self.log.debug("Removing directory {0}"
.format(file_rm_dir))
self.log.info("Removing old filelist")
os.remove(self.file_list)
self.log.info("Creating new filelist")
filelist_new = list()
relpath_start = os.path.join(self.updatedir)
for dirpath, _, filenames in os.walk(self.updatedir):
# _ is dirnames, but it is unused
for filename in filenames:
filepath = os.path.normpath(os.path.join(dirpath,
filename))
filepath = os.path.relpath(filepath, start=relpath_start)
filepath += "\n"
filelist_new.append(filepath)
self.log.debug("New filelist is:\n" +
pprint.pformat(filelist_new))
self.log.info("Writing new filelist to {0}"
.format(self.file_list))
with open(self.file_list, "w") as file_handle:
file_handle.writelines(filelist_new)
self.log.info("Copying downloaded contents to current directory")
copy_glob(os.path.join(self.updatedir, "*"), ".")
self.log.info("Removing backup filelist")
os.remove(filelist_backup.name)
self.log.info("Removing tempdir")
shutil.rmtree(tempdir)
self._reset_update_files()
except Exception:
self.log.exception("An error occured during the update process.")
self.log.error("The temporary directory used is left behind at {}."
.format(tempdir))
raise
finally:
self.log.debug("Releasing lock after updating files")
self.update.release()
return True
def update_code(self):
"""Updates the code if necessary.
:return: Whether update succeeded
:rtype: bool
"""
if self.check_new():
# self.check_new will create a self.queue_update file
self.log.info("Beginning update process")
if (not os.path.isdir(self.updatedir) or
(os.path.isdir(self.updatedir) and
os.listdir(self.updatedir))):
self._reset_update_files()
self._get_new()
update_successful = self._replace_files()
if update_successful:
self._reset_update_files()
self.log.info("Update successful")
else:
self.log.info("Update failed")
else:
self.log.info("Already up to date")
update_successful = False
return update_successful
| lgpl-2.1 |
ekiourk/ansible-modules-core | database/mysql/mysql_user.py | 24 | 17625 | #!/usr/bin/python
# (c) 2012, Mark Theunissen <mark.theunissen@gmail.com>
# Sponsored by Four Kitchens http://fourkitchens.com.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: mysql_user
short_description: Adds or removes a user from a MySQL database.
description:
- Adds or removes a user from a MySQL database.
version_added: "0.6"
options:
name:
description:
- name of the user (role) to add or remove
required: true
password:
description:
- set the user's password
required: false
default: null
host:
description:
- the 'host' part of the MySQL username
required: false
default: localhost
login_user:
description:
- The username used to authenticate with
required: false
default: null
login_password:
description:
- The password used to authenticate with
required: false
default: null
login_host:
description:
- Host running the database
required: false
default: localhost
login_port:
description:
- Port of the MySQL server
required: false
default: 3306
version_added: '1.4'
login_unix_socket:
description:
- The path to a Unix domain socket for local connections
required: false
default: null
priv:
description:
- "MySQL privileges string in the format: C(db.table:priv1,priv2)"
required: false
default: null
append_privs:
description:
- Append the privileges defined by priv to the existing ones for this
user instead of overwriting existing ones.
required: false
choices: [ "yes", "no" ]
default: "no"
version_added: "1.4"
state:
description:
- Whether the user should exist. When C(absent), removes
the user.
required: false
default: present
choices: [ "present", "absent" ]
check_implicit_admin:
description:
- Check if mysql allows login as root/nopassword before trying supplied credentials.
required: false
choices: [ "yes", "no" ]
default: "no"
version_added: "1.3"
update_password:
required: false
default: always
choices: ['always', 'on_create']
version_added: "2.0"
description:
- C(always) will update passwords if they differ. C(on_create) will only set the password for newly created users.
config_file:
description:
- Specify a config file from which user and password are to be read
required: false
default: '~/.my.cnf'
version_added: "2.0"
notes:
- Requires the MySQLdb Python package on the remote host. For Ubuntu, this
is as easy as apt-get install python-mysqldb.
- Both C(login_password) and C(login_user) are required when you are
passing credentials. If none are present, the module will attempt to read
the credentials from C(~/.my.cnf), and finally fall back to using the MySQL
default login of 'root' with no password.
- "MySQL server installs with default login_user of 'root' and no password. To secure this user
as part of an idempotent playbook, you must create at least two tasks: the first must change the root user's password,
without providing any login_user/login_password details. The second must drop a ~/.my.cnf file containing
the new root credentials. Subsequent runs of the playbook will then succeed by reading the new credentials from
the file."
requirements: [ "MySQLdb" ]
author: "Mark Theunissen (@marktheunissen)"
'''
EXAMPLES = """
# Create database user with name 'bob' and password '12345' with all database privileges
- mysql_user: name=bob password=12345 priv=*.*:ALL state=present
# Creates database user 'bob' and password '12345' with all database privileges and 'WITH GRANT OPTION'
- mysql_user: name=bob password=12345 priv=*.*:ALL,GRANT state=present
# Modify user Bob to require SSL connections. Note that REQUIRESSL is a special privilege that should only apply to *.* by itself.
- mysql_user: name=bob append_privs=true priv=*.*:REQUIRESSL state=present
# Ensure no user named 'sally' exists, also passing in the auth credentials.
- mysql_user: login_user=root login_password=123456 name=sally state=absent
# Specify grants composed of more than one word
- mysql_user: name=replication password=12345 priv=*.*:"REPLICATION CLIENT" state=present
# Revoke all privileges for user 'bob' and password '12345'
- mysql_user: name=bob password=12345 priv=*.*:USAGE state=present
# Example privileges string format
mydb.*:INSERT,UPDATE/anotherdb.*:SELECT/yetanotherdb.*:ALL
# Example using login_unix_socket to connect to server
- mysql_user: name=root password=abc123 login_unix_socket=/var/run/mysqld/mysqld.sock
# Example .my.cnf file for setting the root password
[client]
user=root
password=n<_665{vS43y
"""
import getpass
import tempfile
import re
try:
import MySQLdb
except ImportError:
mysqldb_found = False
else:
mysqldb_found = True
VALID_PRIVS = frozenset(('CREATE', 'DROP', 'GRANT', 'GRANT OPTION',
'LOCK TABLES', 'REFERENCES', 'EVENT', 'ALTER',
'DELETE', 'INDEX', 'INSERT', 'SELECT', 'UPDATE',
'CREATE TEMPORARY TABLES', 'TRIGGER', 'CREATE VIEW',
'SHOW VIEW', 'ALTER ROUTINE', 'CREATE ROUTINE',
'EXECUTE', 'FILE', 'CREATE TABLESPACE', 'CREATE USER',
'PROCESS', 'PROXY', 'RELOAD', 'REPLICATION CLIENT',
'REPLICATION SLAVE', 'SHOW DATABASES', 'SHUTDOWN',
'SUPER', 'ALL', 'ALL PRIVILEGES', 'USAGE', 'REQUIRESSL'))
class InvalidPrivsError(Exception):
pass
# ===========================================
# MySQL module specific support methods.
#
def connect(module, login_user=None, login_password=None, config_file=''):
config = {
'host': module.params['login_host'],
'db': 'mysql'
}
if module.params['login_unix_socket']:
config['unix_socket'] = module.params['login_unix_socket']
else:
config['port'] = module.params['login_port']
if os.path.exists(config_file):
config['read_default_file'] = config_file
# If login_user or login_password are given, they should override the
# config file
if login_user is not None:
config['user'] = login_user
if login_password is not None:
config['passwd'] = login_password
db_connection = MySQLdb.connect(**config)
return db_connection.cursor()
def user_exists(cursor, user, host):
cursor.execute("SELECT count(*) FROM user WHERE user = %s AND host = %s", (user,host))
count = cursor.fetchone()
return count[0] > 0
def user_add(cursor, user, host, password, new_priv):
cursor.execute("CREATE USER %s@%s IDENTIFIED BY %s", (user,host,password))
if new_priv is not None:
for db_table, priv in new_priv.iteritems():
privileges_grant(cursor, user,host,db_table,priv)
return True
def user_mod(cursor, user, host, password, new_priv, append_privs):
changed = False
grant_option = False
# Handle passwords
if password is not None:
cursor.execute("SELECT password FROM user WHERE user = %s AND host = %s", (user,host))
current_pass_hash = cursor.fetchone()
cursor.execute("SELECT PASSWORD(%s)", (password,))
new_pass_hash = cursor.fetchone()
if current_pass_hash[0] != new_pass_hash[0]:
cursor.execute("SET PASSWORD FOR %s@%s = PASSWORD(%s)", (user,host,password))
changed = True
# Handle privileges
if new_priv is not None:
curr_priv = privileges_get(cursor, user,host)
# If the user has privileges on a db.table that doesn't appear at all in
# the new specification, then revoke all privileges on it.
for db_table, priv in curr_priv.iteritems():
# If the user has the GRANT OPTION on a db.table, revoke it first.
if "GRANT" in priv:
grant_option = True
if db_table not in new_priv:
if user != "root" and "PROXY" not in priv and not append_privs:
privileges_revoke(cursor, user,host,db_table,priv,grant_option)
changed = True
# If the user doesn't currently have any privileges on a db.table, then
# we can perform a straight grant operation.
for db_table, priv in new_priv.iteritems():
if db_table not in curr_priv:
privileges_grant(cursor, user,host,db_table,priv)
changed = True
# If the db.table specification exists in both the user's current privileges
# and in the new privileges, then we need to see if there's a difference.
db_table_intersect = set(new_priv.keys()) & set(curr_priv.keys())
for db_table in db_table_intersect:
priv_diff = set(new_priv[db_table]) ^ set(curr_priv[db_table])
if (len(priv_diff) > 0):
if not append_privs:
privileges_revoke(cursor, user,host,db_table,curr_priv[db_table],grant_option)
privileges_grant(cursor, user,host,db_table,new_priv[db_table])
changed = True
return changed
def user_delete(cursor, user, host):
cursor.execute("DROP USER %s@%s", (user, host))
return True
def privileges_get(cursor, user,host):
""" MySQL doesn't have a better method of getting privileges aside from the
SHOW GRANTS query syntax, which requires us to then parse the returned string.
Here's an example of the string that is returned from MySQL:
GRANT USAGE ON *.* TO 'user'@'localhost' IDENTIFIED BY 'pass';
This function makes the query and returns a dictionary containing the results.
The dictionary format is the same as that returned by privileges_unpack() below.
"""
output = {}
cursor.execute("SHOW GRANTS FOR %s@%s", (user, host))
grants = cursor.fetchall()
def pick(x):
if x == 'ALL PRIVILEGES':
return 'ALL'
else:
return x
for grant in grants:
res = re.match("GRANT (.+) ON (.+) TO '.*'@'.+'( IDENTIFIED BY PASSWORD '.+')? ?(.*)", grant[0])
if res is None:
raise InvalidPrivsError('unable to parse the MySQL grant string: %s' % grant[0])
privileges = res.group(1).split(", ")
privileges = [ pick(x) for x in privileges]
if "WITH GRANT OPTION" in res.group(4):
privileges.append('GRANT')
if "REQUIRE SSL" in res.group(4):
privileges.append('REQUIRESSL')
db = res.group(2)
output[db] = privileges
return output
def privileges_unpack(priv):
""" Take a privileges string, typically passed as a parameter, and unserialize
it into a dictionary, the same format as privileges_get() above. We have this
custom format to avoid using YAML/JSON strings inside YAML playbooks. Example
of a privileges string:
mydb.*:INSERT,UPDATE/anotherdb.*:SELECT/yetanother.*:ALL
The privilege USAGE stands for no privileges, so we add that in on *.* if it's
not specified in the string, as MySQL will always provide this by default.
"""
output = {}
privs = []
for item in priv.strip().split('/'):
pieces = item.strip().split(':')
dbpriv = pieces[0].rsplit(".", 1)
pieces[0] = "`%s`.%s" % (dbpriv[0].strip('`'), dbpriv[1])
if '(' in pieces[1]:
output[pieces[0]] = re.split(r',\s*(?=[^)]*(?:\(|$))', pieces[1].upper())
for i in output[pieces[0]]:
privs.append(re.sub(r'\(.*\)','',i))
else:
output[pieces[0]] = pieces[1].upper().split(',')
privs = output[pieces[0]]
new_privs = frozenset(privs)
if not new_privs.issubset(VALID_PRIVS):
raise InvalidPrivsError('Invalid privileges specified: %s' % new_privs.difference(VALID_PRIVS))
if '*.*' not in output:
output['*.*'] = ['USAGE']
# if we are only specifying something like REQUIRESSL in *.* we still need
# to add USAGE as a privilege to avoid syntax errors
if priv.find('REQUIRESSL') != -1 and 'USAGE' not in output['*.*']:
output['*.*'].append('USAGE')
return output
def privileges_revoke(cursor, user,host,db_table,priv,grant_option):
# Escape '%' since mysql db.execute() uses a format string
db_table = db_table.replace('%', '%%')
if grant_option:
query = ["REVOKE GRANT OPTION ON %s" % mysql_quote_identifier(db_table, 'table')]
query.append("FROM %s@%s")
query = ' '.join(query)
cursor.execute(query, (user, host))
priv_string = ",".join([p for p in priv if p not in ('GRANT', 'REQUIRESSL')])
query = ["REVOKE %s ON %s" % (priv_string, mysql_quote_identifier(db_table, 'table'))]
query.append("FROM %s@%s")
query = ' '.join(query)
cursor.execute(query, (user, host))
def privileges_grant(cursor, user,host,db_table,priv):
# Escape '%' since mysql db.execute uses a format string and the
# specification of db and table often use a % (SQL wildcard)
db_table = db_table.replace('%', '%%')
priv_string = ",".join([p for p in priv if p not in ('GRANT', 'REQUIRESSL')])
query = ["GRANT %s ON %s" % (priv_string, mysql_quote_identifier(db_table, 'table'))]
query.append("TO %s@%s")
if 'GRANT' in priv:
query.append("WITH GRANT OPTION")
if 'REQUIRESSL' in priv:
query.append("REQUIRE SSL")
query = ' '.join(query)
cursor.execute(query, (user, host))
# ===========================================
# Module execution.
#
def main():
module = AnsibleModule(
argument_spec = dict(
login_user=dict(default=None),
login_password=dict(default=None),
login_host=dict(default="localhost"),
login_port=dict(default=3306, type='int'),
login_unix_socket=dict(default=None),
user=dict(required=True, aliases=['name']),
password=dict(default=None, no_log=True),
host=dict(default="localhost"),
state=dict(default="present", choices=["absent", "present"]),
priv=dict(default=None),
append_privs=dict(default=False, type='bool'),
check_implicit_admin=dict(default=False, type='bool'),
update_password=dict(default="always", choices=["always", "on_create"]),
config_file=dict(default="~/.my.cnf"),
)
)
login_user = module.params["login_user"]
login_password = module.params["login_password"]
user = module.params["user"]
password = module.params["password"]
host = module.params["host"].lower()
state = module.params["state"]
priv = module.params["priv"]
check_implicit_admin = module.params['check_implicit_admin']
config_file = module.params['config_file']
append_privs = module.boolean(module.params["append_privs"])
update_password = module.params['update_password']
config_file = os.path.expanduser(os.path.expandvars(config_file))
if not mysqldb_found:
module.fail_json(msg="the python mysqldb module is required")
if priv is not None:
try:
priv = privileges_unpack(priv)
except Exception, e:
module.fail_json(msg="invalid privileges string: %s" % str(e))
cursor = None
try:
if check_implicit_admin:
try:
cursor = connect(module, 'root', '', config_file)
except:
pass
if not cursor:
cursor = connect(module, login_user, login_password, config_file)
except Exception, e:
module.fail_json(msg="unable to connect to database, check login_user and login_password are correct or ~/.my.cnf has the credentials. Exception message: %s" % e)
if state == "present":
if user_exists(cursor, user, host):
try:
if update_password == 'always':
changed = user_mod(cursor, user, host, password, priv, append_privs)
else:
changed = user_mod(cursor, user, host, None, priv, append_privs)
except (SQLParseError, InvalidPrivsError, MySQLdb.Error), e:
module.fail_json(msg=str(e))
else:
if password is None:
module.fail_json(msg="password parameter required when adding a user")
try:
changed = user_add(cursor, user, host, password, priv)
except (SQLParseError, InvalidPrivsError, MySQLdb.Error), e:
module.fail_json(msg=str(e))
elif state == "absent":
if user_exists(cursor, user, host):
changed = user_delete(cursor, user, host)
else:
changed = False
module.exit_json(changed=changed, user=user)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.database import *
if __name__ == '__main__':
main()
| gpl-3.0 |
egaxegax/django-dbcartajs | django/contrib/webdesign/tests.py | 232 | 1092 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import unittest
from django.contrib.webdesign.lorem_ipsum import *
from django.template import loader, Context
class WebdesignTest(unittest.TestCase):
def test_words(self):
self.assertEqual(words(7), 'lorem ipsum dolor sit amet consectetur adipisicing')
def test_paragraphs(self):
self.assertEqual(paragraphs(1),
['Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.'])
def test_lorem_tag(self):
t = loader.get_template_from_string("{% load webdesign %}{% lorem 3 w %}")
self.assertEqual(t.render(Context({})),
'lorem ipsum dolor')
| gpl-2.0 |
mivade/tornado | tornado/platform/caresresolver.py | 4 | 3465 | import pycares # type: ignore
import socket
from tornado.concurrent import Future
from tornado import gen
from tornado.ioloop import IOLoop
from tornado.netutil import Resolver, is_valid_ip
import typing
if typing.TYPE_CHECKING:
from typing import Generator, Any, List, Tuple, Dict # noqa: F401
class CaresResolver(Resolver):
"""Name resolver based on the c-ares library.
This is a non-blocking and non-threaded resolver. It may not produce
the same results as the system resolver, but can be used for non-blocking
resolution when threads cannot be used.
c-ares fails to resolve some names when ``family`` is ``AF_UNSPEC``,
so it is only recommended for use in ``AF_INET`` (i.e. IPv4). This is
the default for ``tornado.simple_httpclient``, but other libraries
may default to ``AF_UNSPEC``.
.. versionchanged:: 5.0
The ``io_loop`` argument (deprecated since version 4.1) has been removed.
.. deprecated:: 6.2
This class is deprecated and will be removed in Tornado 7.0. Use the default
thread-based resolver instead.
"""
def initialize(self) -> None:
self.io_loop = IOLoop.current()
self.channel = pycares.Channel(sock_state_cb=self._sock_state_cb)
self.fds = {} # type: Dict[int, int]
def _sock_state_cb(self, fd: int, readable: bool, writable: bool) -> None:
state = (IOLoop.READ if readable else 0) | (IOLoop.WRITE if writable else 0)
if not state:
self.io_loop.remove_handler(fd)
del self.fds[fd]
elif fd in self.fds:
self.io_loop.update_handler(fd, state)
self.fds[fd] = state
else:
self.io_loop.add_handler(fd, self._handle_events, state)
self.fds[fd] = state
def _handle_events(self, fd: int, events: int) -> None:
read_fd = pycares.ARES_SOCKET_BAD
write_fd = pycares.ARES_SOCKET_BAD
if events & IOLoop.READ:
read_fd = fd
if events & IOLoop.WRITE:
write_fd = fd
self.channel.process_fd(read_fd, write_fd)
@gen.coroutine
def resolve(
self, host: str, port: int, family: int = 0
) -> "Generator[Any, Any, List[Tuple[int, Any]]]":
if is_valid_ip(host):
addresses = [host]
else:
# gethostbyname doesn't take callback as a kwarg
fut = Future() # type: Future[Tuple[Any, Any]]
self.channel.gethostbyname(
host, family, lambda result, error: fut.set_result((result, error))
)
result, error = yield fut
if error:
raise IOError(
"C-Ares returned error %s: %s while resolving %s"
% (error, pycares.errno.strerror(error), host)
)
addresses = result.addresses
addrinfo = []
for address in addresses:
if "." in address:
address_family = socket.AF_INET
elif ":" in address:
address_family = socket.AF_INET6
else:
address_family = socket.AF_UNSPEC
if family != socket.AF_UNSPEC and family != address_family:
raise IOError(
"Requested socket family %d but got %d" % (family, address_family)
)
addrinfo.append((typing.cast(int, address_family), (address, port)))
return addrinfo
| apache-2.0 |
saimn/glue | glue/core/decorators.py | 5 | 2231 | from __future__ import absolute_import, division, print_function
from functools import wraps
__all__ = ['memoize', 'singleton', 'memoize_attr_check']
def _make_key(args, kwargs):
return args, frozenset(kwargs.items())
def memoize(func):
"""Save results of function calls to avoid repeated calculation"""
memo = {}
@wraps(func)
def wrapper(*args, **kwargs):
# Note that here we have two separate try...except statements, because
# we want to make sure that we catch only TypeError on the first
# statement, and both TypeError and KeyError on the second.
try:
key = _make_key(args, kwargs)
except TypeError: # unhashable input
return func(*args, **kwargs)
try:
return memo[key]
except KeyError:
result = func(*args, **kwargs)
memo[key] = result
return result
except TypeError: # unhashable input
return func(*args, **kwargs)
wrapper.__memoize_cache = memo
return wrapper
def clear_cache(func):
"""
Clear the cache of a function that has potentially been
decorated by memoize. Safely ignores non-decorated functions
"""
try:
func.__memoize_cache.clear()
except AttributeError:
pass
def memoize_attr_check(attr):
""" Memoize a method call, cached both on arguments and given attribute
of first argument (which is presumably self)
Has the effect of re-calculating results if a specific attribute changes
"""
def decorator(func):
# must return a decorator function
@wraps(func)
def result(*args, **kwargs):
first_arg = getattr(args[0], attr)
return memo(first_arg, *args, **kwargs)
@memoize
def memo(*args, **kwargs):
return func(*args[1:], **kwargs)
return result
return decorator
def singleton(cls):
"""Turn a class into a singleton, such that new objects
in this class share the same instance"""
instances = {}
@wraps(cls)
def getinstance():
if cls not in instances:
instances[cls] = cls()
return instances[cls]
return getinstance
| bsd-3-clause |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.