repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990
values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15
values |
|---|---|---|---|---|---|
anki1909/peach | peach/fuzzy/cmeans.py | 6 | 8245 | ################################################################################
# Peach - Computational Intelligence for Python
# Jose Alexandre Nalon
#
# This file: fuzzy/cmeans.py
# Fuzzy C-Means algorithm
################################################################################
# Doc string, reStructuredText formatted:
__doc__ = """
Fuzzy C-Means
Fuzzy C-Means is a clustering algorithm based on fuzzy logic.
This package implements the fuzzy c-means algorithm for clustering and
classification. This algorithm is very simple, yet very efficient. From a
training set and an initial condition which gives the membership values of each
example in the training set to the clusters, it converges very fastly to crisper
sets.
The initial conditions, ie, the starting membership, must follow some rules.
Please, refer to any bibliography about the subject to see why. Those rules are:
no example might have membership 1 in every class, and the sum of the membership
of every component must be equal to 1. This means that the initial condition is
a fuzzy partition of the universe.
"""
################################################################################
import numpy
from numpy import dot, array, sum, zeros, outer, any
################################################################################
# Fuzzy C-Means class
################################################################################
class FuzzyCMeans(object):
'''
Fuzzy C-Means convergence.
Use this class to instantiate a fuzzy c-means object. The object must be
given a training set and initial conditions. The training set is a list or
an array of N-dimensional vectors; the initial conditions are a list of the
initial membership values for every vector in the training set -- thus, the
length of both lists must be the same. The number of columns in the initial
conditions must be the same number of classes. That is, if you are, for
example, classifying in ``C`` classes, then the initial conditions must have
``C`` columns.
There are restrictions in the initial conditions: first, no column can be
all zeros or all ones -- if that happened, then the class described by this
column is unnecessary; second, the sum of the memberships of every example
must be one -- that is, the sum of the membership in every column in each
line must be one. This means that the initial condition is a perfect
partition of ``C`` subsets.
Notice, however, that *no checking* is done. If your algorithm seems to be
behaving strangely, try to check these conditions.
'''
def __init__(self, training_set, initial_conditions, m=2.):
'''
Initializes the algorithm.
:Parameters:
training_set
A list or array of vectors containing the data to be classified.
Each of the vectors in this list *must* have the same dimension, or
the algorithm won't behave correctly. Notice that each vector can be
given as a tuple -- internally, everything is converted to arrays.
initial_conditions
A list or array of vectors containing the initial membership values
associated to each example in the training set. Each column of this
array contains the membership assigned to the corresponding class
for that vector. Notice that each vector can be given as a tuple --
internally, everything is converted to arrays.
m
This is the aggregation value. The bigger it is, the smoother will
be the classification. Please, consult the bibliography about the
subject. ``m`` must be bigger than 1. Its default value is 2
'''
self.__x = array(training_set)
self.__mu = array(initial_conditions)
self.m = m
'''The fuzzyness coefficient. Must be bigger than 1, the closest it is
to 1, the smoother the membership curves will be.'''
self.__c = self.centers()
def __getc(self):
return self.__c
def __setc(self, c):
self.__c = array(c).reshape(self.__c.shape)
c = property(__getc, __setc)
'''A ``numpy`` array containing the centers of the classes in the algorithm.
Each line represents a center, and the number of lines is the number of
classes. This property is read and write, but care must be taken when
setting new centers: if the dimensions are not exactly the same as given in
the instantiation of the class (*ie*, *C* centers of dimension *N*, an
exception will be raised.'''
def __getmu(self):
return self.__mu
mu = property(__getmu, None)
'''The membership values for every vector in the training set. This property
is modified at each step of the execution of the algorithm. This property is
not writable.'''
def __getx(self):
return self.__x
x = property(__getx, None)
'''The vectors in which the algorithm bases its convergence. This property
is not writable.'''
def centers(self):
'''
Given the present state of the algorithm, recalculates the centers, that
is, the position of the vectors representing each of the classes. Notice
that this method modifies the state of the algorithm if any change was
made to any parameter. This method receives no arguments and will seldom
be used externally. It can be useful if you want to step over the
algorithm. *This method has a colateral effect!* If you use it, the
``c`` property (see above) will be modified.
:Returns:
A vector containing, in each line, the position of the centers of the
algorithm.
'''
mm = self.__mu ** self.m
c = dot(self.__x.T, mm) / sum(mm, axis=0)
self.__c = c.T
return self.__c
def membership(self):
'''
Given the present state of the algorithm, recalculates the membership of
each example on each class. That is, it modifies the initial conditions
to represent an evolved state of the algorithm. Notice that this method
modifies the state of the algorithm if any change was made to any
parameter.
:Returns:
A vector containing, in each line, the membership of the corresponding
example in each class.
'''
x = self.__x
c = self.__c
M, _ = x.shape
C, _ = c.shape
r = zeros((M, C))
m1 = 1./(self.m-1.)
for k in range(M):
den = sum((x[k] - c)**2., axis=1)
if any(den == 0):
return self.__mu
frac = outer(den, 1./den)**m1
r[k, :] = 1. / sum(frac, axis=1)
self.__mu = r
return self.__mu
def step(self):
'''
This method runs one step of the algorithm. It might be useful to track
the changes in the parameters.
:Returns:
The norm of the change in the membership values of the examples. It
can be used to track convergence and as an estimate of the error.
'''
old = self.__mu
self.membership()
self.centers()
return sum(self.__mu - old)**2.
def __call__(self, emax=1.e-10, imax=20):
'''
The ``__call__`` interface is used to run the algorithm until
convergence is found.
:Parameters:
emax
Specifies the maximum error admitted in the execution of the
algorithm. It defaults to 1.e-10. The error is tracked according to
the norm returned by the ``step()`` method.
imax
Specifies the maximum number of iterations admitted in the execution
of the algorithm. It defaults to 20.
:Returns:
An array containing, at each line, the vectors representing the
centers of the clustered regions.
'''
error = 1.
i = 0
while error > emax and i < imax:
error = self.step()
i = i + 1
return self.c
################################################################################
# Test.
if __name__ == "__main__":
pass | lgpl-2.1 |
ettm2012/MissionPlanner | Lib/site-packages/scipy/integrate/tests/test_quadrature.py | 57 | 3469 | import numpy
from numpy import cos, sin, pi
from numpy.testing import TestCase, run_module_suite, assert_equal, \
assert_almost_equal, assert_allclose
from scipy.integrate import quadrature, romberg, romb, newton_cotes
class TestQuadrature(TestCase):
def quad(self, x, a, b, args):
raise NotImplementedError
def test_quadrature(self):
# Typical function with two extra arguments:
def myfunc(x,n,z): # Bessel function integrand
return cos(n*x-z*sin(x))/pi
val, err = quadrature(myfunc,0,pi,(2,1.8))
table_val = 0.30614353532540296487
assert_almost_equal(val, table_val, decimal=7)
def test_quadrature_rtol(self):
def myfunc(x,n,z): # Bessel function integrand
return 1e90 * cos(n*x-z*sin(x))/pi
val, err = quadrature(myfunc,0,pi,(2,1.8),rtol=1e-10)
table_val = 1e90 * 0.30614353532540296487
assert_allclose(val, table_val, rtol=1e-10)
def test_romberg(self):
# Typical function with two extra arguments:
def myfunc(x, n, z): # Bessel function integrand
return cos(n*x-z*sin(x))/pi
val = romberg(myfunc,0,pi, args=(2, 1.8))
table_val = 0.30614353532540296487
assert_almost_equal(val, table_val, decimal=7)
def test_romberg_rtol(self):
# Typical function with two extra arguments:
def myfunc(x, n, z): # Bessel function integrand
return 1e19*cos(n*x-z*sin(x))/pi
val = romberg(myfunc,0,pi, args=(2, 1.8), rtol=1e-10)
table_val = 1e19*0.30614353532540296487
assert_allclose(val, table_val, rtol=1e-10)
def test_romb(self):
assert_equal(romb(numpy.arange(17)),128)
def test_non_dtype(self):
# Check that we work fine with functions returning float
import math
valmath = romberg(math.sin, 0, 1)
expected_val = 0.45969769413185085
assert_almost_equal(valmath, expected_val, decimal=7)
def test_newton_cotes(self):
"""Test the first few degrees, for evenly spaced points."""
n = 1
wts, errcoff = newton_cotes(n, 1)
assert_equal(wts, n*numpy.array([0.5, 0.5]))
assert_almost_equal(errcoff, -n**3/12.0)
n = 2
wts, errcoff = newton_cotes(n, 1)
assert_almost_equal(wts, n*numpy.array([1.0, 4.0, 1.0])/6.0)
assert_almost_equal(errcoff, -n**5/2880.0)
n = 3
wts, errcoff = newton_cotes(n, 1)
assert_almost_equal(wts, n*numpy.array([1.0, 3.0, 3.0, 1.0])/8.0)
assert_almost_equal(errcoff, -n**5/6480.0)
n = 4
wts, errcoff = newton_cotes(n, 1)
assert_almost_equal(wts, n*numpy.array([7.0, 32.0, 12.0, 32.0, 7.0])/90.0)
assert_almost_equal(errcoff, -n**7/1935360.0)
def test_newton_cotes2(self):
"""Test newton_cotes with points that are not evenly spaced."""
x = numpy.array([0.0, 1.5, 2.0])
y = x**2
wts, errcoff = newton_cotes(x)
exact_integral = 8.0/3
numeric_integral = numpy.dot(wts, y)
assert_almost_equal(numeric_integral, exact_integral)
x = numpy.array([0.0, 1.4, 2.1, 3.0])
y = x**2
wts, errcoff = newton_cotes(x)
exact_integral = 9.0
numeric_integral = numpy.dot(wts, y)
assert_almost_equal(numeric_integral, exact_integral)
if __name__ == "__main__":
run_module_suite()
| gpl-3.0 |
pjg101/SickRage | lib/chardet/big5freq.py | 342 | 31254 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# Big5 frequency table
# by Taiwan's Mandarin Promotion Council
# <http://www.edu.tw:81/mandr/>
#
# 128 --> 0.42261
# 256 --> 0.57851
# 512 --> 0.74851
# 1024 --> 0.89384
# 2048 --> 0.97583
#
# Ideal Distribution Ratio = 0.74851/(1-0.74851) =2.98
# Random Distribution Ration = 512/(5401-512)=0.105
#
# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR
BIG5_TYPICAL_DISTRIBUTION_RATIO = 0.75
#Char to FreqOrder table
BIG5_TABLE_SIZE = 5376
BIG5_CHAR_TO_FREQ_ORDER = (
1,1801,1506, 255,1431, 198, 9, 82, 6,5008, 177, 202,3681,1256,2821, 110, # 16
3814, 33,3274, 261, 76, 44,2114, 16,2946,2187,1176, 659,3971, 26,3451,2653, # 32
1198,3972,3350,4202, 410,2215, 302, 590, 361,1964, 8, 204, 58,4510,5009,1932, # 48
63,5010,5011, 317,1614, 75, 222, 159,4203,2417,1480,5012,3555,3091, 224,2822, # 64
3682, 3, 10,3973,1471, 29,2787,1135,2866,1940, 873, 130,3275,1123, 312,5013, # 80
4511,2052, 507, 252, 682,5014, 142,1915, 124, 206,2947, 34,3556,3204, 64, 604, # 96
5015,2501,1977,1978, 155,1991, 645, 641,1606,5016,3452, 337, 72, 406,5017, 80, # 112
630, 238,3205,1509, 263, 939,1092,2654, 756,1440,1094,3453, 449, 69,2987, 591, # 128
179,2096, 471, 115,2035,1844, 60, 50,2988, 134, 806,1869, 734,2036,3454, 180, # 144
995,1607, 156, 537,2907, 688,5018, 319,1305, 779,2145, 514,2379, 298,4512, 359, # 160
2502, 90,2716,1338, 663, 11, 906,1099,2553, 20,2441, 182, 532,1716,5019, 732, # 176
1376,4204,1311,1420,3206, 25,2317,1056, 113, 399, 382,1950, 242,3455,2474, 529, # 192
3276, 475,1447,3683,5020, 117, 21, 656, 810,1297,2300,2334,3557,5021, 126,4205, # 208
706, 456, 150, 613,4513, 71,1118,2037,4206, 145,3092, 85, 835, 486,2115,1246, # 224
1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,5022,2128,2359, 347,3815, 221, # 240
3558,3135,5023,1956,1153,4207, 83, 296,1199,3093, 192, 624, 93,5024, 822,1898, # 256
2823,3136, 795,2065, 991,1554,1542,1592, 27, 43,2867, 859, 139,1456, 860,4514, # 272
437, 712,3974, 164,2397,3137, 695, 211,3037,2097, 195,3975,1608,3559,3560,3684, # 288
3976, 234, 811,2989,2098,3977,2233,1441,3561,1615,2380, 668,2077,1638, 305, 228, # 304
1664,4515, 467, 415,5025, 262,2099,1593, 239, 108, 300, 200,1033, 512,1247,2078, # 320
5026,5027,2176,3207,3685,2682, 593, 845,1062,3277, 88,1723,2038,3978,1951, 212, # 336
266, 152, 149, 468,1899,4208,4516, 77, 187,5028,3038, 37, 5,2990,5029,3979, # 352
5030,5031, 39,2524,4517,2908,3208,2079, 55, 148, 74,4518, 545, 483,1474,1029, # 368
1665, 217,1870,1531,3138,1104,2655,4209, 24, 172,3562, 900,3980,3563,3564,4519, # 384
32,1408,2824,1312, 329, 487,2360,2251,2717, 784,2683, 4,3039,3351,1427,1789, # 400
188, 109, 499,5032,3686,1717,1790, 888,1217,3040,4520,5033,3565,5034,3352,1520, # 416
3687,3981, 196,1034, 775,5035,5036, 929,1816, 249, 439, 38,5037,1063,5038, 794, # 432
3982,1435,2301, 46, 178,3278,2066,5039,2381,5040, 214,1709,4521, 804, 35, 707, # 448
324,3688,1601,2554, 140, 459,4210,5041,5042,1365, 839, 272, 978,2262,2580,3456, # 464
2129,1363,3689,1423, 697, 100,3094, 48, 70,1231, 495,3139,2196,5043,1294,5044, # 480
2080, 462, 586,1042,3279, 853, 256, 988, 185,2382,3457,1698, 434,1084,5045,3458, # 496
314,2625,2788,4522,2335,2336, 569,2285, 637,1817,2525, 757,1162,1879,1616,3459, # 512
287,1577,2116, 768,4523,1671,2868,3566,2526,1321,3816, 909,2418,5046,4211, 933, # 528
3817,4212,2053,2361,1222,4524, 765,2419,1322, 786,4525,5047,1920,1462,1677,2909, # 544
1699,5048,4526,1424,2442,3140,3690,2600,3353,1775,1941,3460,3983,4213, 309,1369, # 560
1130,2825, 364,2234,1653,1299,3984,3567,3985,3986,2656, 525,1085,3041, 902,2001, # 576
1475, 964,4527, 421,1845,1415,1057,2286, 940,1364,3141, 376,4528,4529,1381, 7, # 592
2527, 983,2383, 336,1710,2684,1846, 321,3461, 559,1131,3042,2752,1809,1132,1313, # 608
265,1481,1858,5049, 352,1203,2826,3280, 167,1089, 420,2827, 776, 792,1724,3568, # 624
4214,2443,3281,5050,4215,5051, 446, 229, 333,2753, 901,3818,1200,1557,4530,2657, # 640
1921, 395,2754,2685,3819,4216,1836, 125, 916,3209,2626,4531,5052,5053,3820,5054, # 656
5055,5056,4532,3142,3691,1133,2555,1757,3462,1510,2318,1409,3569,5057,2146, 438, # 672
2601,2910,2384,3354,1068, 958,3043, 461, 311,2869,2686,4217,1916,3210,4218,1979, # 688
383, 750,2755,2627,4219, 274, 539, 385,1278,1442,5058,1154,1965, 384, 561, 210, # 704
98,1295,2556,3570,5059,1711,2420,1482,3463,3987,2911,1257, 129,5060,3821, 642, # 720
523,2789,2790,2658,5061, 141,2235,1333, 68, 176, 441, 876, 907,4220, 603,2602, # 736
710, 171,3464, 404, 549, 18,3143,2398,1410,3692,1666,5062,3571,4533,2912,4534, # 752
5063,2991, 368,5064, 146, 366, 99, 871,3693,1543, 748, 807,1586,1185, 22,2263, # 768
379,3822,3211,5065,3212, 505,1942,2628,1992,1382,2319,5066, 380,2362, 218, 702, # 784
1818,1248,3465,3044,3572,3355,3282,5067,2992,3694, 930,3283,3823,5068, 59,5069, # 800
585, 601,4221, 497,3466,1112,1314,4535,1802,5070,1223,1472,2177,5071, 749,1837, # 816
690,1900,3824,1773,3988,1476, 429,1043,1791,2236,2117, 917,4222, 447,1086,1629, # 832
5072, 556,5073,5074,2021,1654, 844,1090, 105, 550, 966,1758,2828,1008,1783, 686, # 848
1095,5075,2287, 793,1602,5076,3573,2603,4536,4223,2948,2302,4537,3825, 980,2503, # 864
544, 353, 527,4538, 908,2687,2913,5077, 381,2629,1943,1348,5078,1341,1252, 560, # 880
3095,5079,3467,2870,5080,2054, 973, 886,2081, 143,4539,5081,5082, 157,3989, 496, # 896
4224, 57, 840, 540,2039,4540,4541,3468,2118,1445, 970,2264,1748,1966,2082,4225, # 912
3144,1234,1776,3284,2829,3695, 773,1206,2130,1066,2040,1326,3990,1738,1725,4226, # 928
279,3145, 51,1544,2604, 423,1578,2131,2067, 173,4542,1880,5083,5084,1583, 264, # 944
610,3696,4543,2444, 280, 154,5085,5086,5087,1739, 338,1282,3096, 693,2871,1411, # 960
1074,3826,2445,5088,4544,5089,5090,1240, 952,2399,5091,2914,1538,2688, 685,1483, # 976
4227,2475,1436, 953,4228,2055,4545, 671,2400, 79,4229,2446,3285, 608, 567,2689, # 992
3469,4230,4231,1691, 393,1261,1792,2401,5092,4546,5093,5094,5095,5096,1383,1672, # 1008
3827,3213,1464, 522,1119, 661,1150, 216, 675,4547,3991,1432,3574, 609,4548,2690, # 1024
2402,5097,5098,5099,4232,3045, 0,5100,2476, 315, 231,2447, 301,3356,4549,2385, # 1040
5101, 233,4233,3697,1819,4550,4551,5102, 96,1777,1315,2083,5103, 257,5104,1810, # 1056
3698,2718,1139,1820,4234,2022,1124,2164,2791,1778,2659,5105,3097, 363,1655,3214, # 1072
5106,2993,5107,5108,5109,3992,1567,3993, 718, 103,3215, 849,1443, 341,3357,2949, # 1088
1484,5110,1712, 127, 67, 339,4235,2403, 679,1412, 821,5111,5112, 834, 738, 351, # 1104
2994,2147, 846, 235,1497,1881, 418,1993,3828,2719, 186,1100,2148,2756,3575,1545, # 1120
1355,2950,2872,1377, 583,3994,4236,2581,2995,5113,1298,3699,1078,2557,3700,2363, # 1136
78,3829,3830, 267,1289,2100,2002,1594,4237, 348, 369,1274,2197,2178,1838,4552, # 1152
1821,2830,3701,2757,2288,2003,4553,2951,2758, 144,3358, 882,4554,3995,2759,3470, # 1168
4555,2915,5114,4238,1726, 320,5115,3996,3046, 788,2996,5116,2831,1774,1327,2873, # 1184
3997,2832,5117,1306,4556,2004,1700,3831,3576,2364,2660, 787,2023, 506, 824,3702, # 1200
534, 323,4557,1044,3359,2024,1901, 946,3471,5118,1779,1500,1678,5119,1882,4558, # 1216
165, 243,4559,3703,2528, 123, 683,4239, 764,4560, 36,3998,1793, 589,2916, 816, # 1232
626,1667,3047,2237,1639,1555,1622,3832,3999,5120,4000,2874,1370,1228,1933, 891, # 1248
2084,2917, 304,4240,5121, 292,2997,2720,3577, 691,2101,4241,1115,4561, 118, 662, # 1264
5122, 611,1156, 854,2386,1316,2875, 2, 386, 515,2918,5123,5124,3286, 868,2238, # 1280
1486, 855,2661, 785,2216,3048,5125,1040,3216,3578,5126,3146, 448,5127,1525,5128, # 1296
2165,4562,5129,3833,5130,4242,2833,3579,3147, 503, 818,4001,3148,1568, 814, 676, # 1312
1444, 306,1749,5131,3834,1416,1030, 197,1428, 805,2834,1501,4563,5132,5133,5134, # 1328
1994,5135,4564,5136,5137,2198, 13,2792,3704,2998,3149,1229,1917,5138,3835,2132, # 1344
5139,4243,4565,2404,3580,5140,2217,1511,1727,1120,5141,5142, 646,3836,2448, 307, # 1360
5143,5144,1595,3217,5145,5146,5147,3705,1113,1356,4002,1465,2529,2530,5148, 519, # 1376
5149, 128,2133, 92,2289,1980,5150,4003,1512, 342,3150,2199,5151,2793,2218,1981, # 1392
3360,4244, 290,1656,1317, 789, 827,2365,5152,3837,4566, 562, 581,4004,5153, 401, # 1408
4567,2252, 94,4568,5154,1399,2794,5155,1463,2025,4569,3218,1944,5156, 828,1105, # 1424
4245,1262,1394,5157,4246, 605,4570,5158,1784,2876,5159,2835, 819,2102, 578,2200, # 1440
2952,5160,1502, 436,3287,4247,3288,2836,4005,2919,3472,3473,5161,2721,2320,5162, # 1456
5163,2337,2068, 23,4571, 193, 826,3838,2103, 699,1630,4248,3098, 390,1794,1064, # 1472
3581,5164,1579,3099,3100,1400,5165,4249,1839,1640,2877,5166,4572,4573, 137,4250, # 1488
598,3101,1967, 780, 104, 974,2953,5167, 278, 899, 253, 402, 572, 504, 493,1339, # 1504
5168,4006,1275,4574,2582,2558,5169,3706,3049,3102,2253, 565,1334,2722, 863, 41, # 1520
5170,5171,4575,5172,1657,2338, 19, 463,2760,4251, 606,5173,2999,3289,1087,2085, # 1536
1323,2662,3000,5174,1631,1623,1750,4252,2691,5175,2878, 791,2723,2663,2339, 232, # 1552
2421,5176,3001,1498,5177,2664,2630, 755,1366,3707,3290,3151,2026,1609, 119,1918, # 1568
3474, 862,1026,4253,5178,4007,3839,4576,4008,4577,2265,1952,2477,5179,1125, 817, # 1584
4254,4255,4009,1513,1766,2041,1487,4256,3050,3291,2837,3840,3152,5180,5181,1507, # 1600
5182,2692, 733, 40,1632,1106,2879, 345,4257, 841,2531, 230,4578,3002,1847,3292, # 1616
3475,5183,1263, 986,3476,5184, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562, # 1632
4010,4011,2954, 967,2761,2665,1349, 592,2134,1692,3361,3003,1995,4258,1679,4012, # 1648
1902,2188,5185, 739,3708,2724,1296,1290,5186,4259,2201,2202,1922,1563,2605,2559, # 1664
1871,2762,3004,5187, 435,5188, 343,1108, 596, 17,1751,4579,2239,3477,3709,5189, # 1680
4580, 294,3582,2955,1693, 477, 979, 281,2042,3583, 643,2043,3710,2631,2795,2266, # 1696
1031,2340,2135,2303,3584,4581, 367,1249,2560,5190,3585,5191,4582,1283,3362,2005, # 1712
240,1762,3363,4583,4584, 836,1069,3153, 474,5192,2149,2532, 268,3586,5193,3219, # 1728
1521,1284,5194,1658,1546,4260,5195,3587,3588,5196,4261,3364,2693,1685,4262, 961, # 1744
1673,2632, 190,2006,2203,3841,4585,4586,5197, 570,2504,3711,1490,5198,4587,2633, # 1760
3293,1957,4588, 584,1514, 396,1045,1945,5199,4589,1968,2449,5200,5201,4590,4013, # 1776
619,5202,3154,3294, 215,2007,2796,2561,3220,4591,3221,4592, 763,4263,3842,4593, # 1792
5203,5204,1958,1767,2956,3365,3712,1174, 452,1477,4594,3366,3155,5205,2838,1253, # 1808
2387,2189,1091,2290,4264, 492,5206, 638,1169,1825,2136,1752,4014, 648, 926,1021, # 1824
1324,4595, 520,4596, 997, 847,1007, 892,4597,3843,2267,1872,3713,2405,1785,4598, # 1840
1953,2957,3103,3222,1728,4265,2044,3714,4599,2008,1701,3156,1551, 30,2268,4266, # 1856
5207,2027,4600,3589,5208, 501,5209,4267, 594,3478,2166,1822,3590,3479,3591,3223, # 1872
829,2839,4268,5210,1680,3157,1225,4269,5211,3295,4601,4270,3158,2341,5212,4602, # 1888
4271,5213,4015,4016,5214,1848,2388,2606,3367,5215,4603, 374,4017, 652,4272,4273, # 1904
375,1140, 798,5216,5217,5218,2366,4604,2269, 546,1659, 138,3051,2450,4605,5219, # 1920
2254, 612,1849, 910, 796,3844,1740,1371, 825,3845,3846,5220,2920,2562,5221, 692, # 1936
444,3052,2634, 801,4606,4274,5222,1491, 244,1053,3053,4275,4276, 340,5223,4018, # 1952
1041,3005, 293,1168, 87,1357,5224,1539, 959,5225,2240, 721, 694,4277,3847, 219, # 1968
1478, 644,1417,3368,2666,1413,1401,1335,1389,4019,5226,5227,3006,2367,3159,1826, # 1984
730,1515, 184,2840, 66,4607,5228,1660,2958, 246,3369, 378,1457, 226,3480, 975, # 2000
4020,2959,1264,3592, 674, 696,5229, 163,5230,1141,2422,2167, 713,3593,3370,4608, # 2016
4021,5231,5232,1186, 15,5233,1079,1070,5234,1522,3224,3594, 276,1050,2725, 758, # 2032
1126, 653,2960,3296,5235,2342, 889,3595,4022,3104,3007, 903,1250,4609,4023,3481, # 2048
3596,1342,1681,1718, 766,3297, 286, 89,2961,3715,5236,1713,5237,2607,3371,3008, # 2064
5238,2962,2219,3225,2880,5239,4610,2505,2533, 181, 387,1075,4024, 731,2190,3372, # 2080
5240,3298, 310, 313,3482,2304, 770,4278, 54,3054, 189,4611,3105,3848,4025,5241, # 2096
1230,1617,1850, 355,3597,4279,4612,3373, 111,4280,3716,1350,3160,3483,3055,4281, # 2112
2150,3299,3598,5242,2797,4026,4027,3009, 722,2009,5243,1071, 247,1207,2343,2478, # 2128
1378,4613,2010, 864,1437,1214,4614, 373,3849,1142,2220, 667,4615, 442,2763,2563, # 2144
3850,4028,1969,4282,3300,1840, 837, 170,1107, 934,1336,1883,5244,5245,2119,4283, # 2160
2841, 743,1569,5246,4616,4284, 582,2389,1418,3484,5247,1803,5248, 357,1395,1729, # 2176
3717,3301,2423,1564,2241,5249,3106,3851,1633,4617,1114,2086,4285,1532,5250, 482, # 2192
2451,4618,5251,5252,1492, 833,1466,5253,2726,3599,1641,2842,5254,1526,1272,3718, # 2208
4286,1686,1795, 416,2564,1903,1954,1804,5255,3852,2798,3853,1159,2321,5256,2881, # 2224
4619,1610,1584,3056,2424,2764, 443,3302,1163,3161,5257,5258,4029,5259,4287,2506, # 2240
3057,4620,4030,3162,2104,1647,3600,2011,1873,4288,5260,4289, 431,3485,5261, 250, # 2256
97, 81,4290,5262,1648,1851,1558, 160, 848,5263, 866, 740,1694,5264,2204,2843, # 2272
3226,4291,4621,3719,1687, 950,2479, 426, 469,3227,3720,3721,4031,5265,5266,1188, # 2288
424,1996, 861,3601,4292,3854,2205,2694, 168,1235,3602,4293,5267,2087,1674,4622, # 2304
3374,3303, 220,2565,1009,5268,3855, 670,3010, 332,1208, 717,5269,5270,3603,2452, # 2320
4032,3375,5271, 513,5272,1209,2882,3376,3163,4623,1080,5273,5274,5275,5276,2534, # 2336
3722,3604, 815,1587,4033,4034,5277,3605,3486,3856,1254,4624,1328,3058,1390,4035, # 2352
1741,4036,3857,4037,5278, 236,3858,2453,3304,5279,5280,3723,3859,1273,3860,4625, # 2368
5281, 308,5282,4626, 245,4627,1852,2480,1307,2583, 430, 715,2137,2454,5283, 270, # 2384
199,2883,4038,5284,3606,2727,1753, 761,1754, 725,1661,1841,4628,3487,3724,5285, # 2400
5286, 587, 14,3305, 227,2608, 326, 480,2270, 943,2765,3607, 291, 650,1884,5287, # 2416
1702,1226, 102,1547, 62,3488, 904,4629,3489,1164,4294,5288,5289,1224,1548,2766, # 2432
391, 498,1493,5290,1386,1419,5291,2056,1177,4630, 813, 880,1081,2368, 566,1145, # 2448
4631,2291,1001,1035,2566,2609,2242, 394,1286,5292,5293,2069,5294, 86,1494,1730, # 2464
4039, 491,1588, 745, 897,2963, 843,3377,4040,2767,2884,3306,1768, 998,2221,2070, # 2480
397,1827,1195,1970,3725,3011,3378, 284,5295,3861,2507,2138,2120,1904,5296,4041, # 2496
2151,4042,4295,1036,3490,1905, 114,2567,4296, 209,1527,5297,5298,2964,2844,2635, # 2512
2390,2728,3164, 812,2568,5299,3307,5300,1559, 737,1885,3726,1210, 885, 28,2695, # 2528
3608,3862,5301,4297,1004,1780,4632,5302, 346,1982,2222,2696,4633,3863,1742, 797, # 2544
1642,4043,1934,1072,1384,2152, 896,4044,3308,3727,3228,2885,3609,5303,2569,1959, # 2560
4634,2455,1786,5304,5305,5306,4045,4298,1005,1308,3728,4299,2729,4635,4636,1528, # 2576
2610, 161,1178,4300,1983, 987,4637,1101,4301, 631,4046,1157,3229,2425,1343,1241, # 2592
1016,2243,2570, 372, 877,2344,2508,1160, 555,1935, 911,4047,5307, 466,1170, 169, # 2608
1051,2921,2697,3729,2481,3012,1182,2012,2571,1251,2636,5308, 992,2345,3491,1540, # 2624
2730,1201,2071,2406,1997,2482,5309,4638, 528,1923,2191,1503,1874,1570,2369,3379, # 2640
3309,5310, 557,1073,5311,1828,3492,2088,2271,3165,3059,3107, 767,3108,2799,4639, # 2656
1006,4302,4640,2346,1267,2179,3730,3230, 778,4048,3231,2731,1597,2667,5312,4641, # 2672
5313,3493,5314,5315,5316,3310,2698,1433,3311, 131, 95,1504,4049, 723,4303,3166, # 2688
1842,3610,2768,2192,4050,2028,2105,3731,5317,3013,4051,1218,5318,3380,3232,4052, # 2704
4304,2584, 248,1634,3864, 912,5319,2845,3732,3060,3865, 654, 53,5320,3014,5321, # 2720
1688,4642, 777,3494,1032,4053,1425,5322, 191, 820,2121,2846, 971,4643, 931,3233, # 2736
135, 664, 783,3866,1998, 772,2922,1936,4054,3867,4644,2923,3234, 282,2732, 640, # 2752
1372,3495,1127, 922, 325,3381,5323,5324, 711,2045,5325,5326,4055,2223,2800,1937, # 2768
4056,3382,2224,2255,3868,2305,5327,4645,3869,1258,3312,4057,3235,2139,2965,4058, # 2784
4059,5328,2225, 258,3236,4646, 101,1227,5329,3313,1755,5330,1391,3314,5331,2924, # 2800
2057, 893,5332,5333,5334,1402,4305,2347,5335,5336,3237,3611,5337,5338, 878,1325, # 2816
1781,2801,4647, 259,1385,2585, 744,1183,2272,4648,5339,4060,2509,5340, 684,1024, # 2832
4306,5341, 472,3612,3496,1165,3315,4061,4062, 322,2153, 881, 455,1695,1152,1340, # 2848
660, 554,2154,4649,1058,4650,4307, 830,1065,3383,4063,4651,1924,5342,1703,1919, # 2864
5343, 932,2273, 122,5344,4652, 947, 677,5345,3870,2637, 297,1906,1925,2274,4653, # 2880
2322,3316,5346,5347,4308,5348,4309, 84,4310, 112, 989,5349, 547,1059,4064, 701, # 2896
3613,1019,5350,4311,5351,3497, 942, 639, 457,2306,2456, 993,2966, 407, 851, 494, # 2912
4654,3384, 927,5352,1237,5353,2426,3385, 573,4312, 680, 921,2925,1279,1875, 285, # 2928
790,1448,1984, 719,2168,5354,5355,4655,4065,4066,1649,5356,1541, 563,5357,1077, # 2944
5358,3386,3061,3498, 511,3015,4067,4068,3733,4069,1268,2572,3387,3238,4656,4657, # 2960
5359, 535,1048,1276,1189,2926,2029,3167,1438,1373,2847,2967,1134,2013,5360,4313, # 2976
1238,2586,3109,1259,5361, 700,5362,2968,3168,3734,4314,5363,4315,1146,1876,1907, # 2992
4658,2611,4070, 781,2427, 132,1589, 203, 147, 273,2802,2407, 898,1787,2155,4071, # 3008
4072,5364,3871,2803,5365,5366,4659,4660,5367,3239,5368,1635,3872, 965,5369,1805, # 3024
2699,1516,3614,1121,1082,1329,3317,4073,1449,3873, 65,1128,2848,2927,2769,1590, # 3040
3874,5370,5371, 12,2668, 45, 976,2587,3169,4661, 517,2535,1013,1037,3240,5372, # 3056
3875,2849,5373,3876,5374,3499,5375,2612, 614,1999,2323,3877,3110,2733,2638,5376, # 3072
2588,4316, 599,1269,5377,1811,3735,5378,2700,3111, 759,1060, 489,1806,3388,3318, # 3088
1358,5379,5380,2391,1387,1215,2639,2256, 490,5381,5382,4317,1759,2392,2348,5383, # 3104
4662,3878,1908,4074,2640,1807,3241,4663,3500,3319,2770,2349, 874,5384,5385,3501, # 3120
3736,1859, 91,2928,3737,3062,3879,4664,5386,3170,4075,2669,5387,3502,1202,1403, # 3136
3880,2969,2536,1517,2510,4665,3503,2511,5388,4666,5389,2701,1886,1495,1731,4076, # 3152
2370,4667,5390,2030,5391,5392,4077,2702,1216, 237,2589,4318,2324,4078,3881,4668, # 3168
4669,2703,3615,3504, 445,4670,5393,5394,5395,5396,2771, 61,4079,3738,1823,4080, # 3184
5397, 687,2046, 935, 925, 405,2670, 703,1096,1860,2734,4671,4081,1877,1367,2704, # 3200
3389, 918,2106,1782,2483, 334,3320,1611,1093,4672, 564,3171,3505,3739,3390, 945, # 3216
2641,2058,4673,5398,1926, 872,4319,5399,3506,2705,3112, 349,4320,3740,4082,4674, # 3232
3882,4321,3741,2156,4083,4675,4676,4322,4677,2408,2047, 782,4084, 400, 251,4323, # 3248
1624,5400,5401, 277,3742, 299,1265, 476,1191,3883,2122,4324,4325,1109, 205,5402, # 3264
2590,1000,2157,3616,1861,5403,5404,5405,4678,5406,4679,2573, 107,2484,2158,4085, # 3280
3507,3172,5407,1533, 541,1301, 158, 753,4326,2886,3617,5408,1696, 370,1088,4327, # 3296
4680,3618, 579, 327, 440, 162,2244, 269,1938,1374,3508, 968,3063, 56,1396,3113, # 3312
2107,3321,3391,5409,1927,2159,4681,3016,5410,3619,5411,5412,3743,4682,2485,5413, # 3328
2804,5414,1650,4683,5415,2613,5416,5417,4086,2671,3392,1149,3393,4087,3884,4088, # 3344
5418,1076, 49,5419, 951,3242,3322,3323, 450,2850, 920,5420,1812,2805,2371,4328, # 3360
1909,1138,2372,3885,3509,5421,3243,4684,1910,1147,1518,2428,4685,3886,5422,4686, # 3376
2393,2614, 260,1796,3244,5423,5424,3887,3324, 708,5425,3620,1704,5426,3621,1351, # 3392
1618,3394,3017,1887, 944,4329,3395,4330,3064,3396,4331,5427,3744, 422, 413,1714, # 3408
3325, 500,2059,2350,4332,2486,5428,1344,1911, 954,5429,1668,5430,5431,4089,2409, # 3424
4333,3622,3888,4334,5432,2307,1318,2512,3114, 133,3115,2887,4687, 629, 31,2851, # 3440
2706,3889,4688, 850, 949,4689,4090,2970,1732,2089,4335,1496,1853,5433,4091, 620, # 3456
3245, 981,1242,3745,3397,1619,3746,1643,3326,2140,2457,1971,1719,3510,2169,5434, # 3472
3246,5435,5436,3398,1829,5437,1277,4690,1565,2048,5438,1636,3623,3116,5439, 869, # 3488
2852, 655,3890,3891,3117,4092,3018,3892,1310,3624,4691,5440,5441,5442,1733, 558, # 3504
4692,3747, 335,1549,3065,1756,4336,3748,1946,3511,1830,1291,1192, 470,2735,2108, # 3520
2806, 913,1054,4093,5443,1027,5444,3066,4094,4693, 982,2672,3399,3173,3512,3247, # 3536
3248,1947,2807,5445, 571,4694,5446,1831,5447,3625,2591,1523,2429,5448,2090, 984, # 3552
4695,3749,1960,5449,3750, 852, 923,2808,3513,3751, 969,1519, 999,2049,2325,1705, # 3568
5450,3118, 615,1662, 151, 597,4095,2410,2326,1049, 275,4696,3752,4337, 568,3753, # 3584
3626,2487,4338,3754,5451,2430,2275, 409,3249,5452,1566,2888,3514,1002, 769,2853, # 3600
194,2091,3174,3755,2226,3327,4339, 628,1505,5453,5454,1763,2180,3019,4096, 521, # 3616
1161,2592,1788,2206,2411,4697,4097,1625,4340,4341, 412, 42,3119, 464,5455,2642, # 3632
4698,3400,1760,1571,2889,3515,2537,1219,2207,3893,2643,2141,2373,4699,4700,3328, # 3648
1651,3401,3627,5456,5457,3628,2488,3516,5458,3756,5459,5460,2276,2092, 460,5461, # 3664
4701,5462,3020, 962, 588,3629, 289,3250,2644,1116, 52,5463,3067,1797,5464,5465, # 3680
5466,1467,5467,1598,1143,3757,4342,1985,1734,1067,4702,1280,3402, 465,4703,1572, # 3696
510,5468,1928,2245,1813,1644,3630,5469,4704,3758,5470,5471,2673,1573,1534,5472, # 3712
5473, 536,1808,1761,3517,3894,3175,2645,5474,5475,5476,4705,3518,2929,1912,2809, # 3728
5477,3329,1122, 377,3251,5478, 360,5479,5480,4343,1529, 551,5481,2060,3759,1769, # 3744
2431,5482,2930,4344,3330,3120,2327,2109,2031,4706,1404, 136,1468,1479, 672,1171, # 3760
3252,2308, 271,3176,5483,2772,5484,2050, 678,2736, 865,1948,4707,5485,2014,4098, # 3776
2971,5486,2737,2227,1397,3068,3760,4708,4709,1735,2931,3403,3631,5487,3895, 509, # 3792
2854,2458,2890,3896,5488,5489,3177,3178,4710,4345,2538,4711,2309,1166,1010, 552, # 3808
681,1888,5490,5491,2972,2973,4099,1287,1596,1862,3179, 358, 453, 736, 175, 478, # 3824
1117, 905,1167,1097,5492,1854,1530,5493,1706,5494,2181,3519,2292,3761,3520,3632, # 3840
4346,2093,4347,5495,3404,1193,2489,4348,1458,2193,2208,1863,1889,1421,3331,2932, # 3856
3069,2182,3521, 595,2123,5496,4100,5497,5498,4349,1707,2646, 223,3762,1359, 751, # 3872
3121, 183,3522,5499,2810,3021, 419,2374, 633, 704,3897,2394, 241,5500,5501,5502, # 3888
838,3022,3763,2277,2773,2459,3898,1939,2051,4101,1309,3122,2246,1181,5503,1136, # 3904
2209,3899,2375,1446,4350,2310,4712,5504,5505,4351,1055,2615, 484,3764,5506,4102, # 3920
625,4352,2278,3405,1499,4353,4103,5507,4104,4354,3253,2279,2280,3523,5508,5509, # 3936
2774, 808,2616,3765,3406,4105,4355,3123,2539, 526,3407,3900,4356, 955,5510,1620, # 3952
4357,2647,2432,5511,1429,3766,1669,1832, 994, 928,5512,3633,1260,5513,5514,5515, # 3968
1949,2293, 741,2933,1626,4358,2738,2460, 867,1184, 362,3408,1392,5516,5517,4106, # 3984
4359,1770,1736,3254,2934,4713,4714,1929,2707,1459,1158,5518,3070,3409,2891,1292, # 4000
1930,2513,2855,3767,1986,1187,2072,2015,2617,4360,5519,2574,2514,2170,3768,2490, # 4016
3332,5520,3769,4715,5521,5522, 666,1003,3023,1022,3634,4361,5523,4716,1814,2257, # 4032
574,3901,1603, 295,1535, 705,3902,4362, 283, 858, 417,5524,5525,3255,4717,4718, # 4048
3071,1220,1890,1046,2281,2461,4107,1393,1599, 689,2575, 388,4363,5526,2491, 802, # 4064
5527,2811,3903,2061,1405,2258,5528,4719,3904,2110,1052,1345,3256,1585,5529, 809, # 4080
5530,5531,5532, 575,2739,3524, 956,1552,1469,1144,2328,5533,2329,1560,2462,3635, # 4096
3257,4108, 616,2210,4364,3180,2183,2294,5534,1833,5535,3525,4720,5536,1319,3770, # 4112
3771,1211,3636,1023,3258,1293,2812,5537,5538,5539,3905, 607,2311,3906, 762,2892, # 4128
1439,4365,1360,4721,1485,3072,5540,4722,1038,4366,1450,2062,2648,4367,1379,4723, # 4144
2593,5541,5542,4368,1352,1414,2330,2935,1172,5543,5544,3907,3908,4724,1798,1451, # 4160
5545,5546,5547,5548,2936,4109,4110,2492,2351, 411,4111,4112,3637,3333,3124,4725, # 4176
1561,2674,1452,4113,1375,5549,5550, 47,2974, 316,5551,1406,1591,2937,3181,5552, # 4192
1025,2142,3125,3182, 354,2740, 884,2228,4369,2412, 508,3772, 726,3638, 996,2433, # 4208
3639, 729,5553, 392,2194,1453,4114,4726,3773,5554,5555,2463,3640,2618,1675,2813, # 4224
919,2352,2975,2353,1270,4727,4115, 73,5556,5557, 647,5558,3259,2856,2259,1550, # 4240
1346,3024,5559,1332, 883,3526,5560,5561,5562,5563,3334,2775,5564,1212, 831,1347, # 4256
4370,4728,2331,3909,1864,3073, 720,3910,4729,4730,3911,5565,4371,5566,5567,4731, # 4272
5568,5569,1799,4732,3774,2619,4733,3641,1645,2376,4734,5570,2938, 669,2211,2675, # 4288
2434,5571,2893,5572,5573,1028,3260,5574,4372,2413,5575,2260,1353,5576,5577,4735, # 4304
3183, 518,5578,4116,5579,4373,1961,5580,2143,4374,5581,5582,3025,2354,2355,3912, # 4320
516,1834,1454,4117,2708,4375,4736,2229,2620,1972,1129,3642,5583,2776,5584,2976, # 4336
1422, 577,1470,3026,1524,3410,5585,5586, 432,4376,3074,3527,5587,2594,1455,2515, # 4352
2230,1973,1175,5588,1020,2741,4118,3528,4737,5589,2742,5590,1743,1361,3075,3529, # 4368
2649,4119,4377,4738,2295, 895, 924,4378,2171, 331,2247,3076, 166,1627,3077,1098, # 4384
5591,1232,2894,2231,3411,4739, 657, 403,1196,2377, 542,3775,3412,1600,4379,3530, # 4400
5592,4740,2777,3261, 576, 530,1362,4741,4742,2540,2676,3776,4120,5593, 842,3913, # 4416
5594,2814,2032,1014,4121, 213,2709,3413, 665, 621,4380,5595,3777,2939,2435,5596, # 4432
2436,3335,3643,3414,4743,4381,2541,4382,4744,3644,1682,4383,3531,1380,5597, 724, # 4448
2282, 600,1670,5598,1337,1233,4745,3126,2248,5599,1621,4746,5600, 651,4384,5601, # 4464
1612,4385,2621,5602,2857,5603,2743,2312,3078,5604, 716,2464,3079, 174,1255,2710, # 4480
4122,3645, 548,1320,1398, 728,4123,1574,5605,1891,1197,3080,4124,5606,3081,3082, # 4496
3778,3646,3779, 747,5607, 635,4386,4747,5608,5609,5610,4387,5611,5612,4748,5613, # 4512
3415,4749,2437, 451,5614,3780,2542,2073,4388,2744,4389,4125,5615,1764,4750,5616, # 4528
4390, 350,4751,2283,2395,2493,5617,4391,4126,2249,1434,4127, 488,4752, 458,4392, # 4544
4128,3781, 771,1330,2396,3914,2576,3184,2160,2414,1553,2677,3185,4393,5618,2494, # 4560
2895,2622,1720,2711,4394,3416,4753,5619,2543,4395,5620,3262,4396,2778,5621,2016, # 4576
2745,5622,1155,1017,3782,3915,5623,3336,2313, 201,1865,4397,1430,5624,4129,5625, # 4592
5626,5627,5628,5629,4398,1604,5630, 414,1866, 371,2595,4754,4755,3532,2017,3127, # 4608
4756,1708, 960,4399, 887, 389,2172,1536,1663,1721,5631,2232,4130,2356,2940,1580, # 4624
5632,5633,1744,4757,2544,4758,4759,5634,4760,5635,2074,5636,4761,3647,3417,2896, # 4640
4400,5637,4401,2650,3418,2815, 673,2712,2465, 709,3533,4131,3648,4402,5638,1148, # 4656
502, 634,5639,5640,1204,4762,3649,1575,4763,2623,3783,5641,3784,3128, 948,3263, # 4672
121,1745,3916,1110,5642,4403,3083,2516,3027,4132,3785,1151,1771,3917,1488,4133, # 4688
1987,5643,2438,3534,5644,5645,2094,5646,4404,3918,1213,1407,2816, 531,2746,2545, # 4704
3264,1011,1537,4764,2779,4405,3129,1061,5647,3786,3787,1867,2897,5648,2018, 120, # 4720
4406,4407,2063,3650,3265,2314,3919,2678,3419,1955,4765,4134,5649,3535,1047,2713, # 4736
1266,5650,1368,4766,2858, 649,3420,3920,2546,2747,1102,2859,2679,5651,5652,2000, # 4752
5653,1111,3651,2977,5654,2495,3921,3652,2817,1855,3421,3788,5655,5656,3422,2415, # 4768
2898,3337,3266,3653,5657,2577,5658,3654,2818,4135,1460, 856,5659,3655,5660,2899, # 4784
2978,5661,2900,3922,5662,4408, 632,2517, 875,3923,1697,3924,2296,5663,5664,4767, # 4800
3028,1239, 580,4768,4409,5665, 914, 936,2075,1190,4136,1039,2124,5666,5667,5668, # 4816
5669,3423,1473,5670,1354,4410,3925,4769,2173,3084,4137, 915,3338,4411,4412,3339, # 4832
1605,1835,5671,2748, 398,3656,4413,3926,4138, 328,1913,2860,4139,3927,1331,4414, # 4848
3029, 937,4415,5672,3657,4140,4141,3424,2161,4770,3425, 524, 742, 538,3085,1012, # 4864
5673,5674,3928,2466,5675, 658,1103, 225,3929,5676,5677,4771,5678,4772,5679,3267, # 4880
1243,5680,4142, 963,2250,4773,5681,2714,3658,3186,5682,5683,2596,2332,5684,4774, # 4896
5685,5686,5687,3536, 957,3426,2547,2033,1931,2941,2467, 870,2019,3659,1746,2780, # 4912
2781,2439,2468,5688,3930,5689,3789,3130,3790,3537,3427,3791,5690,1179,3086,5691, # 4928
3187,2378,4416,3792,2548,3188,3131,2749,4143,5692,3428,1556,2549,2297, 977,2901, # 4944
2034,4144,1205,3429,5693,1765,3430,3189,2125,1271, 714,1689,4775,3538,5694,2333, # 4960
3931, 533,4417,3660,2184, 617,5695,2469,3340,3539,2315,5696,5697,3190,5698,5699, # 4976
3932,1988, 618, 427,2651,3540,3431,5700,5701,1244,1690,5702,2819,4418,4776,5703, # 4992
3541,4777,5704,2284,1576, 473,3661,4419,3432, 972,5705,3662,5706,3087,5707,5708, # 5008
4778,4779,5709,3793,4145,4146,5710, 153,4780, 356,5711,1892,2902,4420,2144, 408, # 5024
803,2357,5712,3933,5713,4421,1646,2578,2518,4781,4782,3934,5714,3935,4422,5715, # 5040
2416,3433, 752,5716,5717,1962,3341,2979,5718, 746,3030,2470,4783,4423,3794, 698, # 5056
4784,1893,4424,3663,2550,4785,3664,3936,5719,3191,3434,5720,1824,1302,4147,2715, # 5072
3937,1974,4425,5721,4426,3192, 823,1303,1288,1236,2861,3542,4148,3435, 774,3938, # 5088
5722,1581,4786,1304,2862,3939,4787,5723,2440,2162,1083,3268,4427,4149,4428, 344, # 5104
1173, 288,2316, 454,1683,5724,5725,1461,4788,4150,2597,5726,5727,4789, 985, 894, # 5120
5728,3436,3193,5729,1914,2942,3795,1989,5730,2111,1975,5731,4151,5732,2579,1194, # 5136
425,5733,4790,3194,1245,3796,4429,5734,5735,2863,5736, 636,4791,1856,3940, 760, # 5152
1800,5737,4430,2212,1508,4792,4152,1894,1684,2298,5738,5739,4793,4431,4432,2213, # 5168
479,5740,5741, 832,5742,4153,2496,5743,2980,2497,3797, 990,3132, 627,1815,2652, # 5184
4433,1582,4434,2126,2112,3543,4794,5744, 799,4435,3195,5745,4795,2113,1737,3031, # 5200
1018, 543, 754,4436,3342,1676,4796,4797,4154,4798,1489,5746,3544,5747,2624,2903, # 5216
4155,5748,5749,2981,5750,5751,5752,5753,3196,4799,4800,2185,1722,5754,3269,3270, # 5232
1843,3665,1715, 481, 365,1976,1857,5755,5756,1963,2498,4801,5757,2127,3666,3271, # 5248
433,1895,2064,2076,5758, 602,2750,5759,5760,5761,5762,5763,3032,1628,3437,5764, # 5264
3197,4802,4156,2904,4803,2519,5765,2551,2782,5766,5767,5768,3343,4804,2905,5769, # 5280
4805,5770,2864,4806,4807,1221,2982,4157,2520,5771,5772,5773,1868,1990,5774,5775, # 5296
5776,1896,5777,5778,4808,1897,4158, 318,5779,2095,4159,4437,5780,5781, 485,5782, # 5312
938,3941, 553,2680, 116,5783,3942,3667,5784,3545,2681,2783,3438,3344,2820,5785, # 5328
3668,2943,4160,1747,2944,2983,5786,5787, 207,5788,4809,5789,4810,2521,5790,3033, # 5344
890,3669,3943,5791,1878,3798,3439,5792,2186,2358,3440,1652,5793,5794,5795, 941, # 5360
2299, 208,3546,4161,2020, 330,4438,3944,2906,2499,3799,4439,4811,5796,5797,5798, # 5376
)
| gpl-3.0 |
nils-tekampe/duplicity | duplicity/collections.py | 1 | 46283 | # -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright 2002 Ben Escoto <ben@emerose.org>
# Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
#
# This file is part of duplicity.
#
# Duplicity is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at your
# option) any later version.
#
# Duplicity is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""Classes and functions on collections of backup volumes"""
from future_builtins import filter, map
import types
import gettext
import sys
from duplicity import log
from duplicity import file_naming
from duplicity import path
from duplicity import util
from duplicity import dup_time
from duplicity import globals
from duplicity import manifest
from duplicity import util
from duplicity.gpg import GPGError
# For type testing against both int and long types that works in python 2/3
if sys.version_info < (3,):
integer_types = (int, types.LongType)
else:
integer_types = (int,)
class CollectionsError(Exception):
pass
class BackupSet:
"""
Backup set - the backup information produced by one session
"""
def __init__(self, backend):
"""
Initialize new backup set, only backend is required at first
"""
self.backend = backend
self.info_set = False # true if fields are set
self.volume_name_dict = {} # dict from volume number to filename
self.remote_manifest_name = None # full name of remote manifest
self.local_manifest_path = None # full path to local manifest
self.time = None # will be set if is full backup set
self.start_time = None # will be set if inc
self.end_time = None # will be set if inc
self.partial = False # true if a partial backup
self.encrypted = False # true if an encrypted backup
self.files_changed = []
def is_complete(self):
"""
Assume complete if found manifest file
"""
return self.remote_manifest_name
def add_filename(self, filename):
"""
Add a filename to given set. Return true if it fits.
The filename will match the given set if it has the right
times and is of the right type. The information will be set
from the first filename given.
@param filename: name of file to add
@type filename: string
"""
pr = file_naming.parse(filename)
if not pr or not (pr.type == "full" or pr.type == "inc"):
return False
if not self.info_set:
self.set_info(pr)
else:
if pr.type != self.type:
return False
if pr.time != self.time:
return False
if (pr.start_time != self.start_time or
pr.end_time != self.end_time):
return False
if bool(pr.encrypted) != bool(self.encrypted):
if self.partial and pr.encrypted:
self.encrypted = pr.encrypted
if pr.manifest:
self.set_manifest(filename)
else:
assert pr.volume_number is not None
assert pr.volume_number not in self.volume_name_dict, \
(self.volume_name_dict, filename)
self.volume_name_dict[pr.volume_number] = filename
return True
def set_info(self, pr):
"""
Set BackupSet information from ParseResults object
@param pr: parse results
@type pf: ParseResults
"""
assert not self.info_set
self.type = pr.type
self.time = pr.time
self.start_time = pr.start_time
self.end_time = pr.end_time
self.time = pr.time
self.partial = pr.partial
self.encrypted = bool(pr.encrypted)
self.info_set = True
def set_files_changed(self):
mf = self.get_manifest()
self.files_changed = mf.get_files_changed()
def set_manifest(self, remote_filename):
"""
Add local and remote manifest filenames to backup set
"""
assert not self.remote_manifest_name, (self.remote_manifest_name,
remote_filename)
self.remote_manifest_name = remote_filename
for local_filename in globals.archive_dir_path.listdir():
pr = file_naming.parse(local_filename)
if (pr and pr.manifest and pr.type == self.type and
pr.time == self.time and
pr.start_time == self.start_time and
pr.end_time == self.end_time):
self.local_manifest_path = \
globals.archive_dir_path.append(local_filename)
self.set_files_changed()
break
def delete(self):
"""
Remove all files in set, both local and remote
"""
rfn = self.get_filenames()
rfn.reverse()
try:
self.backend.delete(rfn)
except Exception:
log.Debug(_("BackupSet.delete: missing %s") % [util.ufn(f) for f in rfn])
pass
for lfn in globals.archive_dir_path.listdir():
pr = file_naming.parse(lfn)
if (pr and pr.time == self.time and
pr.start_time == self.start_time and
pr.end_time == self.end_time):
try:
globals.archive_dir_path.append(lfn).delete()
except Exception:
log.Debug(_("BackupSet.delete: missing %s") % [util.ufn(f) for f in lfn])
pass
util.release_lockfile()
def __unicode__(self):
"""
For now just list files in set
"""
filelist = []
if self.remote_manifest_name:
filelist.append(self.remote_manifest_name)
filelist.extend(self.volume_name_dict.values())
return u"[%s]" % u", ".join(map(util.ufn, filelist))
def get_timestr(self):
"""
Return time string suitable for log statements
"""
return dup_time.timetopretty(self.time or self.end_time)
def check_manifests(self):
"""
Make sure remote manifest is equal to local one
"""
if not self.remote_manifest_name and not self.local_manifest_path:
log.FatalError(_("Fatal Error: No manifests found for most recent backup"),
log.ErrorCode.no_manifests)
assert self.remote_manifest_name, "if only one, should be remote"
remote_manifest = self.get_remote_manifest()
if self.local_manifest_path:
local_manifest = self.get_local_manifest()
if remote_manifest and self.local_manifest_path and local_manifest:
if remote_manifest != local_manifest:
log.FatalError(_("Fatal Error: Remote manifest does not match "
"local one. Either the remote backup set or "
"the local archive directory has been corrupted."),
log.ErrorCode.mismatched_manifests)
if not remote_manifest:
if self.local_manifest_path:
remote_manifest = local_manifest
else:
log.FatalError(_("Fatal Error: Neither remote nor local "
"manifest is readable."),
log.ErrorCode.unreadable_manifests)
remote_manifest.check_dirinfo()
def get_local_manifest(self):
"""
Return manifest object by reading local manifest file
"""
assert self.local_manifest_path
manifest_buffer = self.local_manifest_path.get_data()
log.Info(_("Processing local manifest %s (%s)") % (self.local_manifest_path.name, len(manifest_buffer)))
return manifest.Manifest().from_string(manifest_buffer)
def get_remote_manifest(self):
"""
Return manifest by reading remote manifest on backend
"""
assert self.remote_manifest_name
# Following by MDR. Should catch if remote encrypted with
# public key w/o secret key
try:
manifest_buffer = self.backend.get_data(self.remote_manifest_name)
except GPGError as message:
# TODO: We check for gpg v1 and v2 messages, should be an error code
if ("secret key not available" in message.args[0] or
"No secret key" in message.args[0]):
return None
else:
raise
log.Info(_("Processing remote manifest %s (%s)") % (self.remote_manifest_name, len(manifest_buffer)))
return manifest.Manifest().from_string(manifest_buffer)
def get_manifest(self):
"""
Return manifest object, showing preference for local copy
"""
if self.local_manifest_path:
return self.get_local_manifest()
else:
return self.get_remote_manifest()
def get_filenames(self):
"""
Return sorted list of (remote) filenames of files in set
"""
assert self.info_set
volume_num_list = self.volume_name_dict.keys()
volume_num_list.sort()
volume_filenames = [self.volume_name_dict[x] for x in volume_num_list]
if self.remote_manifest_name:
# For convenience of implementation for restart support, we treat
# local partial manifests as this set's remote manifest. But
# when specifically asked for a list of remote filenames, we
# should not include it.
pr = file_naming.parse(self.remote_manifest_name)
if not pr or not pr.partial:
volume_filenames.append(self.remote_manifest_name)
return volume_filenames
def get_time(self):
"""
Return time if full backup, or end_time if incremental
"""
if self.time:
return self.time
if self.end_time:
return self.end_time
assert 0, "Neither self.time nor self.end_time set"
def get_files_changed(self):
return self.files_changed
def __len__(self):
"""
Return the number of volumes in the set
"""
return len(self.volume_name_dict.keys())
class BackupChain:
"""
BackupChain - a number of linked BackupSets
A BackupChain always starts with a full backup set and continues
with incremental ones.
"""
def __init__(self, backend):
"""
Initialize new chain, only backend is required at first
"""
self.backend = backend
self.fullset = None
self.incset_list = [] # sorted list of BackupSets
self.start_time, self.end_time = None, None
def set_full(self, fullset):
"""
Add full backup set
"""
assert not self.fullset and isinstance(fullset, BackupSet)
self.fullset = fullset
assert fullset.time
self.start_time, self.end_time = fullset.time, fullset.time
def add_inc(self, incset):
"""
Add incset to self. Return False if incset does not match
"""
if self.end_time == incset.start_time:
self.incset_list.append(incset)
else:
if (self.incset_list and
incset.start_time == self.incset_list[-1].start_time and
incset.end_time > self.incset_list[-1]):
log.Info(_("Preferring Backupset over previous one!"))
self.incset_list[-1] = incset
else:
log.Info(_("Ignoring incremental Backupset (start_time: %s; needed: %s)") %
(dup_time.timetopretty(incset.start_time),
dup_time.timetopretty(self.end_time)))
return False
self.end_time = incset.end_time
log.Info(_("Added incremental Backupset (start_time: %s / end_time: %s)") %
(dup_time.timetopretty(incset.start_time),
dup_time.timetopretty(incset.end_time)))
assert self.end_time
return True
def delete(self, keep_full=False):
"""
Delete all sets in chain, in reverse order
"""
for i in range(len(self.incset_list) - 1, -1, -1):
self.incset_list[i].delete()
if self.fullset and not keep_full:
self.fullset.delete()
def get_sets_at_time(self, time):
"""
Return a list of sets in chain earlier or equal to time
"""
older_incsets = [s for s in self.incset_list if s.end_time <= time]
return [self.fullset] + older_incsets
def get_last(self):
"""
Return last BackupSet in chain
"""
if self.incset_list:
return self.incset_list[-1]
else:
return self.fullset
def get_first(self):
"""
Return first BackupSet in chain (ie the full backup)
"""
return self.fullset
def short_desc(self):
"""
Return a short one-line description of the chain,
suitable for log messages.
"""
return "[%s]-[%s]" % (dup_time.timetopretty(self.start_time),
dup_time.timetopretty(self.end_time))
def to_log_info(self, prefix=''):
"""
Return summary, suitable for printing to log
"""
l = []
for s in self.get_all_sets():
if s.time:
type = "full"
time = s.time
else:
type = "inc"
time = s.end_time
if s.encrypted:
enc = "enc"
else:
enc = "noenc"
l.append("%s%s %s %d %s" % (prefix, type, dup_time.timetostring(time), (len(s)), enc))
return l
def __str__(self):
"""
Return string representation, for testing purposes
"""
set_schema = "%20s %30s %15s"
l = ["-------------------------",
_("Chain start time: ") + dup_time.timetopretty(self.start_time),
_("Chain end time: ") + dup_time.timetopretty(self.end_time),
_("Number of contained backup sets: %d") %
(len(self.incset_list) + 1,),
_("Total number of contained volumes: %d") %
(self.get_num_volumes(),),
set_schema % (_("Type of backup set:"), _("Time:"), _("Num volumes:"))]
for s in self.get_all_sets():
if s.time:
type = _("Full")
time = s.time
else:
type = _("Incremental")
time = s.end_time
l.append(set_schema % (type, dup_time.timetopretty(time), len(s)))
l.append("-------------------------")
return "\n".join(l)
def get_num_volumes(self):
"""
Return the total number of volumes in the chain
"""
n = 0
for s in self.get_all_sets():
n += len(s)
return n
def get_all_sets(self):
"""
Return list of all backup sets in chain
"""
if self.fullset:
return [self.fullset] + self.incset_list
else:
return self.incset_list
class SignatureChain:
"""
A number of linked SignatureSets
Analog to BackupChain - start with a full-sig, and continue with
new-sigs.
"""
def __init__(self, local, location):
"""
Return new SignatureChain.
local should be true iff the signature chain resides in
globals.archive_dir_path and false if the chain is in
globals.backend.
@param local: True if sig chain in globals.archive_dir_path
@type local: Boolean
@param location: Where the sig chain is located
@type location: globals.archive_dir_path or globals.backend
"""
if local:
self.archive_dir_path, self.backend = location, None
else:
self.archive_dir_path, self.backend = None, location
self.fullsig = None # filename of full signature
self.inclist = [] # list of filenames of incremental signatures
self.start_time, self.end_time = None, None
def __str__(self):
"""
Local or Remote and List of files in the set
"""
if self.archive_dir_path:
place = _("local")
else:
place = _("remote")
filelist = []
if self.fullsig:
filelist.append(self.fullsig)
filelist.extend(self.inclist)
return "%s: [%s]" % (place, ", ".join(filelist))
def check_times(self, time_list):
"""
Check to make sure times are in whole seconds
"""
for time in time_list:
if type(time) not in integer_types:
assert 0, "Time %s in %s wrong type" % (time, time_list)
def islocal(self):
"""
Return true if represents a signature chain in archive_dir
"""
if self.archive_dir_path:
return True
else:
return False
def add_filename(self, filename, pr=None):
"""
Add new sig filename to current chain. Return true if fits
"""
if not pr:
pr = file_naming.parse(filename)
if not pr:
return None
if self.fullsig:
if pr.type != "new-sig":
return None
if pr.start_time != self.end_time:
return None
self.inclist.append(filename)
self.check_times([pr.end_time])
self.end_time = pr.end_time
return 1
else:
if pr.type != "full-sig":
return None
self.fullsig = filename
self.check_times([pr.time, pr.time])
self.start_time, self.end_time = pr.time, pr.time
return 1
def get_fileobjs(self, time=None):
"""
Return ordered list of signature fileobjs opened for reading,
optionally at a certain time
"""
assert self.fullsig
if self.archive_dir_path: # local
def filename_to_fileobj(filename):
"""Open filename in archive_dir_path, return filtered fileobj"""
sig_dp = path.DupPath(self.archive_dir_path.name, (filename,))
return sig_dp.filtered_open("rb")
else:
filename_to_fileobj = self.backend.get_fileobj_read
return [filename_to_fileobj(f) for f in self.get_filenames(time)]
def delete(self, keep_full=False):
"""
Remove all files in signature set
"""
# Try to delete in opposite order, so something useful even if aborted
if self.archive_dir_path:
for i in range(len(self.inclist) - 1, -1, -1):
self.archive_dir_path.append(self.inclist[i]).delete()
if not keep_full:
self.archive_dir_path.append(self.fullsig).delete()
else:
assert self.backend
inclist_copy = self.inclist[:]
inclist_copy.reverse()
if not keep_full:
inclist_copy.append(self.fullsig)
self.backend.delete(inclist_copy)
def get_filenames(self, time=None):
"""
Return ordered list of filenames in set, up to a provided time
"""
if self.fullsig:
l = [self.fullsig]
else:
l = []
inclist = self.inclist
if time:
inclist = filter(lambda n: file_naming.parse(n).end_time <= time,
inclist)
l.extend(inclist)
return l
class CollectionsStatus:
"""
Hold information about available chains and sets
"""
def __init__(self, backend, archive_dir_path):
"""
Make new object. Does not set values
"""
self.backend = backend
self.archive_dir_path = archive_dir_path
# Will hold (signature chain, backup chain) pair of active
# (most recent) chains
self.matched_chain_pair = None
# These should be sorted by end_time
self.all_backup_chains = None
self.other_backup_chains = None
self.all_sig_chains = None
# Other misc paths and sets which shouldn't be there
self.local_orphaned_sig_names = []
self.remote_orphaned_sig_names = []
self.orphaned_backup_sets = None
self.incomplete_backup_sets = None
# True if set_values() below has run
self.values_set = None
def to_log_info(self):
"""
Return summary of the collection, suitable for printing to log
"""
l = ["backend %s" % (self.backend.__class__.__name__,),
"archive-dir %s" % (self.archive_dir_path,)]
for i in range(len(self.other_backup_chains)):
# A bit of a misnomer. Chain might have a sig.
l.append("chain-no-sig %d" % (i,))
l += self.other_backup_chains[i].to_log_info(' ')
if self.matched_chain_pair:
l.append("chain-complete")
l += self.matched_chain_pair[1].to_log_info(' ')
l.append("orphaned-sets-num %d" % (len(self.orphaned_backup_sets),))
l.append("incomplete-sets-num %d" % (len(self.incomplete_backup_sets),))
return l
def __unicode__(self):
"""
Return string summary of the collection
"""
l = [_("Collection Status"),
u"-----------------",
_("Connecting with backend: %s") %
(self.backend.__class__.__name__,),
_("Archive dir: %s") % (util.ufn(self.archive_dir_path.name),)]
l.append("\n" +
ngettext("Found %d secondary backup chain.",
"Found %d secondary backup chains.",
len(self.other_backup_chains))
% len(self.other_backup_chains))
for i in range(len(self.other_backup_chains)):
l.append(_("Secondary chain %d of %d:") %
(i + 1, len(self.other_backup_chains)))
l.append(unicode(self.other_backup_chains[i]))
l.append("")
if self.matched_chain_pair:
l.append("\n" + _("Found primary backup chain with matching "
"signature chain:"))
l.append(unicode(self.matched_chain_pair[1]))
else:
l.append(_("No backup chains with active signatures found"))
if self.orphaned_backup_sets or self.incomplete_backup_sets:
l.append(ngettext("Also found %d backup set not part of any chain,",
"Also found %d backup sets not part of any chain,",
len(self.orphaned_backup_sets))
% (len(self.orphaned_backup_sets),))
l.append(ngettext("and %d incomplete backup set.",
"and %d incomplete backup sets.",
len(self.incomplete_backup_sets))
% (len(self.incomplete_backup_sets),))
# TRANSL: "cleanup" is a hard-coded command, so do not translate it
l.append(_('These may be deleted by running duplicity with the '
'"cleanup" command.'))
else:
l.append(_("No orphaned or incomplete backup sets found."))
return u"\n".join(l)
def set_values(self, sig_chain_warning=1):
"""
Set values from archive_dir_path and backend.
Returns self for convenience. If sig_chain_warning is set to None,
do not warn about unnecessary sig chains. This is because there may
naturally be some unecessary ones after a full backup.
"""
self.values_set = 1
# get remote filename list
backend_filename_list = self.backend.list()
log.Debug(ngettext("%d file exists on backend",
"%d files exist on backend",
len(backend_filename_list)) %
len(backend_filename_list))
# get local filename list
local_filename_list = self.archive_dir_path.listdir()
log.Debug(ngettext("%d file exists in cache",
"%d files exist in cache",
len(local_filename_list)) %
len(local_filename_list))
# check for partial backups
partials = []
for local_filename in local_filename_list:
pr = file_naming.parse(local_filename)
if pr and pr.partial:
partials.append(local_filename)
# get various backup sets and chains
(backup_chains, self.orphaned_backup_sets,
self.incomplete_backup_sets) = \
self.get_backup_chains(partials + backend_filename_list)
backup_chains = self.get_sorted_chains(backup_chains)
self.all_backup_chains = backup_chains
assert len(backup_chains) == len(self.all_backup_chains), \
"get_sorted_chains() did something more than re-ordering"
local_sig_chains, self.local_orphaned_sig_names = \
self.get_signature_chains(True)
remote_sig_chains, self.remote_orphaned_sig_names = \
self.get_signature_chains(False, filelist=backend_filename_list)
self.set_matched_chain_pair(local_sig_chains + remote_sig_chains,
backup_chains)
self.warn(sig_chain_warning)
return self
def set_matched_chain_pair(self, sig_chains, backup_chains):
"""
Set self.matched_chain_pair and self.other_sig/backup_chains
The latest matched_chain_pair will be set. If there are both
remote and local signature chains capable of matching the
latest backup chain, use the local sig chain (it does not need
to be downloaded).
"""
sig_chains = sig_chains and self.get_sorted_chains(sig_chains)
self.all_sig_chains = sig_chains
self.other_backup_chains = backup_chains[:]
self.matched_chain_pair = None
if sig_chains and backup_chains:
latest_backup_chain = backup_chains[-1]
for i in range(len(sig_chains) - 1, -1, -1):
if sig_chains[i].end_time == latest_backup_chain.end_time:
pass
# See if the set before last matches:
elif (len(latest_backup_chain.get_all_sets()) >= 2 and
sig_chains[i].end_time == latest_backup_chain.get_all_sets()[-2].end_time):
# It matches, remove the last backup set:
log.Warn(_("Warning, discarding last backup set, because "
"of missing signature file."))
self.incomplete_backup_sets.append(latest_backup_chain.incset_list[-1])
latest_backup_chain.incset_list = latest_backup_chain.incset_list[:-1]
else:
continue
# Found a matching pair:
if self.matched_chain_pair is None:
self.matched_chain_pair = (sig_chains[i], latest_backup_chain)
break
if self.matched_chain_pair:
self.other_backup_chains.remove(self.matched_chain_pair[1])
def warn(self, sig_chain_warning):
"""
Log various error messages if find incomplete/orphaned files
"""
assert self.values_set
if self.local_orphaned_sig_names:
log.Warn(ngettext("Warning, found the following local orphaned "
"signature file:",
"Warning, found the following local orphaned "
"signature files:",
len(self.local_orphaned_sig_names)) + u"\n" +
u"\n".join(map(util.ufn, self.local_orphaned_sig_names)),
log.WarningCode.orphaned_sig)
if self.remote_orphaned_sig_names:
log.Warn(ngettext("Warning, found the following remote orphaned "
"signature file:",
"Warning, found the following remote orphaned "
"signature files:",
len(self.remote_orphaned_sig_names)) + u"\n" +
u"\n".join(map(util.ufn, self.remote_orphaned_sig_names)),
log.WarningCode.orphaned_sig)
if self.all_sig_chains and sig_chain_warning and not self.matched_chain_pair:
log.Warn(_("Warning, found signatures but no corresponding "
"backup files"), log.WarningCode.unmatched_sig)
if self.incomplete_backup_sets:
log.Warn(_("Warning, found incomplete backup sets, probably left "
"from aborted session"), log.WarningCode.incomplete_backup)
if self.orphaned_backup_sets:
log.Warn(ngettext("Warning, found the following orphaned "
"backup file:",
"Warning, found the following orphaned "
"backup files:",
len(self.orphaned_backup_sets)) + u"\n" +
u"\n".join(map(unicode, self.orphaned_backup_sets)),
log.WarningCode.orphaned_backup)
def get_backup_chains(self, filename_list):
"""
Split given filename_list into chains
Return value will be tuple (list of chains, list of sets, list
of incomplete sets), where the list of sets will comprise sets
not fitting into any chain, and the incomplete sets are sets
missing files.
"""
log.Debug(_("Extracting backup chains from list of files: %s")
% [util.ufn(f) for f in filename_list])
# First put filenames in set form
sets = []
def add_to_sets(filename):
"""
Try adding filename to existing sets, or make new one
"""
for set in sets:
if set.add_filename(filename):
log.Debug(_("File %s is part of known set") % (util.ufn(filename),))
break
else:
log.Debug(_("File %s is not part of a known set; creating new set") % (util.ufn(filename),))
new_set = BackupSet(self.backend)
if new_set.add_filename(filename):
sets.append(new_set)
else:
log.Debug(_("Ignoring file (rejected by backup set) '%s'") % util.ufn(filename))
for f in filename_list:
add_to_sets(f)
sets, incomplete_sets = self.get_sorted_sets(sets)
chains, orphaned_sets = [], []
def add_to_chains(set):
"""
Try adding set to existing chains, or make new one
"""
if set.type == "full":
new_chain = BackupChain(self.backend)
new_chain.set_full(set)
chains.append(new_chain)
log.Debug(_("Found backup chain %s") % (new_chain.short_desc()))
else:
assert set.type == "inc"
for chain in chains:
if chain.add_inc(set):
log.Debug(_("Added set %s to pre-existing chain %s") % (set.get_timestr(),
chain.short_desc()))
break
else:
log.Debug(_("Found orphaned set %s") % (set.get_timestr(),))
orphaned_sets.append(set)
for s in sets:
add_to_chains(s)
return (chains, orphaned_sets, incomplete_sets)
def get_sorted_sets(self, set_list):
"""
Sort set list by end time, return (sorted list, incomplete)
"""
time_set_pairs, incomplete_sets = [], []
for set in set_list:
if not set.is_complete():
incomplete_sets.append(set)
elif set.type == "full":
time_set_pairs.append((set.time, set))
else:
time_set_pairs.append((set.end_time, set))
time_set_pairs.sort()
return ([p[1] for p in time_set_pairs], incomplete_sets)
def get_signature_chains(self, local, filelist=None):
"""
Find chains in archive_dir_path (if local is true) or backend
Use filelist if given, otherwise regenerate. Return value is
pair (list of chains, list of signature paths not in any
chains).
"""
def get_filelist():
if filelist is not None:
return filelist
elif local:
return self.archive_dir_path.listdir()
else:
return self.backend.list()
def get_new_sigchain():
"""
Return new empty signature chain
"""
if local:
return SignatureChain(True, self.archive_dir_path)
else:
return SignatureChain(False, self.backend)
# Build initial chains from full sig filenames
chains, new_sig_filenames = [], []
for filename in get_filelist():
pr = file_naming.parse(filename)
if pr:
if pr.type == "full-sig":
new_chain = get_new_sigchain()
assert new_chain.add_filename(filename, pr)
chains.append(new_chain)
elif pr.type == "new-sig":
new_sig_filenames.append(filename)
# compare by file time
def by_start_time(a, b):
return int(file_naming.parse(a).start_time) - int(file_naming.parse(b).start_time)
# Try adding new signatures to existing chains
orphaned_filenames = []
new_sig_filenames.sort(by_start_time)
for sig_filename in new_sig_filenames:
for chain in chains:
if chain.add_filename(sig_filename):
break
else:
orphaned_filenames.append(sig_filename)
return (chains, orphaned_filenames)
def get_sorted_chains(self, chain_list):
"""
Return chains sorted by end_time. If tie, local goes last
"""
# Build dictionary from end_times to lists of corresponding chains
endtime_chain_dict = {}
for chain in chain_list:
if chain.end_time in endtime_chain_dict:
endtime_chain_dict[chain.end_time].append(chain)
else:
endtime_chain_dict[chain.end_time] = [chain]
# Use dictionary to build final sorted list
sorted_end_times = endtime_chain_dict.keys()
sorted_end_times.sort()
sorted_chain_list = []
for end_time in sorted_end_times:
chain_list = endtime_chain_dict[end_time]
if len(chain_list) == 1:
sorted_chain_list.append(chain_list[0])
else:
assert len(chain_list) == 2
if chain_list[0].backend: # is remote, goes first
sorted_chain_list.append(chain_list[0])
sorted_chain_list.append(chain_list[1])
else: # is local, goes second
sorted_chain_list.append(chain_list[1])
sorted_chain_list.append(chain_list[0])
return sorted_chain_list
def get_backup_chain_at_time(self, time):
"""
Return backup chain covering specified time
Tries to find the backup chain covering the given time. If
there is none, return the earliest chain before, and failing
that, the earliest chain.
"""
if not self.all_backup_chains:
raise CollectionsError("No backup chains found")
covering_chains = [c for c in self.all_backup_chains
if c.start_time <= time <= c.end_time]
if len(covering_chains) > 1:
raise CollectionsError("Two chains cover the given time")
elif len(covering_chains) == 1:
return covering_chains[0]
old_chains = [c for c in self.all_backup_chains if c.end_time < time]
if old_chains:
return old_chains[-1]
else:
return self.all_backup_chains[0] # no chains are old enough
def get_signature_chain_at_time(self, time):
"""
Return signature chain covering specified time
Tries to find the signature chain covering the given time. If
there is none, return the earliest chain before, and failing
that, the earliest chain.
"""
if not self.all_sig_chains:
raise CollectionsError("No signature chains found")
covering_chains = [c for c in self.all_sig_chains
if c.start_time <= time <= c.end_time]
if covering_chains:
return covering_chains[-1] # prefer local if multiple sig chains
old_chains = [c for c in self.all_sig_chains if c.end_time < time]
if old_chains:
return old_chains[-1]
else:
# no chains are old enough, give oldest and warn user
oldest = self.all_sig_chains[0]
if time < oldest.start_time:
log.Warn(_("No signature chain for the requested time. "
"Using oldest available chain, starting at time %s.") %
dup_time.timetopretty(oldest.start_time),
log.WarningCode.no_sig_for_time,
dup_time.timetostring(oldest.start_time))
return oldest
def get_extraneous(self, extra_clean):
"""
Return list of the names of extraneous duplicity files
A duplicity file is considered extraneous if it is
recognizable as a duplicity file, but isn't part of some
complete backup set, or current signature chain.
"""
assert self.values_set
local_filenames = []
remote_filenames = []
ext_containers = self.orphaned_backup_sets + self.incomplete_backup_sets
if extra_clean:
old_sig_chains = self.all_sig_chains[:]
if self.matched_chain_pair:
matched_sig_chain = self.matched_chain_pair[0]
for sig_chain in self.all_sig_chains:
if (sig_chain.start_time == matched_sig_chain.start_time and
sig_chain.end_time == matched_sig_chain.end_time):
old_sig_chains.remove(sig_chain)
ext_containers += old_sig_chains
for set_or_chain in ext_containers:
if set_or_chain.backend:
remote_filenames.extend(set_or_chain.get_filenames())
else:
local_filenames.extend(set_or_chain.get_filenames())
local_filenames += self.local_orphaned_sig_names
remote_filenames += self.remote_orphaned_sig_names
return local_filenames, remote_filenames
def sort_sets(self, setlist):
"""Return new list containing same elems of setlist, sorted by time"""
pairs = [(s.get_time(), s) for s in setlist]
pairs.sort()
return [p[1] for p in pairs]
def get_chains_older_than(self, t):
"""
Returns a list of backup chains older than the given time t
All of the times will be associated with an intact chain.
Furthermore, none of the times will be of a chain which a newer
set may depend on. For instance, if set A is a full set older
than t, and set B is an incremental based on A which is newer
than t, then the time of set A will not be returned.
"""
assert self.values_set
old_chains = []
for chain in self.all_backup_chains:
if (chain.end_time < t and
(not self.matched_chain_pair or
chain is not self.matched_chain_pair[1])):
# don't delete the active (matched) chain
old_chains.append(chain)
return old_chains
def get_signature_chains_older_than(self, t):
"""
Returns a list of signature chains older than the given time t
All of the times will be associated with an intact chain.
Furthermore, none of the times will be of a chain which a newer
set may depend on. For instance, if set A is a full set older
than t, and set B is an incremental based on A which is newer
than t, then the time of set A will not be returned.
"""
assert self.values_set
old_chains = []
for chain in self.all_sig_chains:
if (chain.end_time < t and
(not self.matched_chain_pair or
chain is not self.matched_chain_pair[0])):
# don't delete the active (matched) chain
old_chains.append(chain)
return old_chains
def get_last_full_backup_time(self):
"""
Return the time of the last full backup,
or 0 if there is none.
"""
return self.get_nth_last_full_backup_time(1)
def get_nth_last_full_backup_time(self, n):
"""
Return the time of the nth to last full backup,
or 0 if there is none.
"""
chain = self.get_nth_last_backup_chain(n)
if chain is None:
return 0
else:
return chain.get_first().time
def get_last_backup_chain(self):
"""
Return the last full backup of the collection,
or None if there is no full backup chain.
"""
return self.get_nth_last_backup_chain(1)
def get_nth_last_backup_chain(self, n):
"""
Return the nth-to-last full backup of the collection,
or None if there is less than n backup chains.
NOTE: n = 1 -> time of latest available chain (n = 0 is not
a valid input). Thus the second-to-last is obtained with n=2
rather than n=1.
"""
def mycmp(x, y):
return cmp(x.get_first().time, y.get_first().time)
assert self.values_set
assert n > 0
if len(self.all_backup_chains) < n:
return None
sorted = self.all_backup_chains[:]
sorted.sort(mycmp)
sorted.reverse()
return sorted[n - 1]
def get_older_than(self, t):
"""
Returns a list of backup sets older than the given time t
All of the times will be associated with an intact chain.
Furthermore, none of the times will be of a set which a newer
set may depend on. For instance, if set A is a full set older
than t, and set B is an incremental based on A which is newer
than t, then the time of set A will not be returned.
"""
old_sets = []
for chain in self.get_chains_older_than(t):
old_sets.extend(chain.get_all_sets())
return self.sort_sets(old_sets)
def get_older_than_required(self, t):
"""
Returns list of old backup sets required by new sets
This function is similar to the previous one, but it only
returns the times of sets which are old but part of the chains
where the newer end of the chain is newer than t.
"""
assert self.values_set
new_chains = filter(lambda c: c.end_time >= t, self.all_backup_chains)
result_sets = []
for chain in new_chains:
old_sets = filter(lambda s: s.get_time() < t, chain.get_all_sets())
result_sets.extend(old_sets)
return self.sort_sets(result_sets)
def get_file_changed_record(self, filepath):
"""
Returns time line of specified file changed
"""
# quick fix to spaces in filepath
modified_filepath = ""
if " " in filepath:
modified_filepath = '"' + filepath.replace(" ", r"\x20") + '"'
if not self.matched_chain_pair:
return ""
all_backup_set = self.matched_chain_pair[1].get_all_sets()
specified_file_backup_set = []
specified_file_backup_type = []
for bs in all_backup_set:
filelist = [fileinfo[1] for fileinfo in bs.get_files_changed()]
if modified_filepath in filelist:
specified_file_backup_set.append(bs)
index = filelist.index(modified_filepath)
specified_file_backup_type.append(bs.get_files_changed()[index][0])
return FileChangedStatus(filepath, list(zip(specified_file_backup_type, specified_file_backup_set)))
class FileChangedStatus:
def __init__(self, filepath, fileinfo_list):
self.filepath = filepath
self.fileinfo_list = fileinfo_list
def __unicode__(self):
set_schema = "%20s %30s %20s"
l = ["-------------------------",
_("File: %s") % (self.filepath),
_("Total number of backup: %d") % len(self.fileinfo_list),
set_schema % (_("Type of backup set:"), _("Time:"), _("Type of file change:"))]
for s in self.fileinfo_list:
backup_type = s[0]
backup_set = s[1]
if backup_set.time:
type = _("Full")
else:
type = _("Incremental")
l.append(set_schema % (type, dup_time.timetopretty(backup_set.get_time()), backup_type.title()))
l.append("-------------------------")
return "\n".join(l)
| gpl-2.0 |
peterfpeterson/mantid | scripts/Calibration/tube_calib_fit_params.py | 3 | 2897 | # Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source,
# Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS
# SPDX - License - Identifier: GPL - 3.0 +
class TubeCalibFitParams(object):
# This class is to take the fitting method and parameters for fitting the peaks created by the calibration slits etc
# and to deliver them to TubeCalib, so it can fit the peaks appropriately
# Author: Karl Palmen ISIS
def __init__(self, peaks, height=1000.0, width=30.0, threePointMethod=False, outEdge=30.0, inEdge=50.0, edgeGrad=6.0, margin=15):
"""
Holds the parameters needed for fitting the positions of the peaks formed by the slits or edges.
The constructor has the following arguments:
:param peaks: expected positions of the peaks in pixels
:param height: expect height of peaks
:param width: expected width (sigma for Gaussian fitting) of peaks
:param threePointMethod: True if three point method is used (first and last peaks are the extreme ends of very wide peaks).
:param margin: defines the region around the peak that will be considered for fitting
This class has also an attribute, called automatic, accessed through
:meth:`~tube_calib_fit_params.TubeCalibFitParams.getAutomatic`, that
defines it a dynamic evaluation of the fit parameters can be used or not
(see :func:`~tube.calibrate` to check how the automatic flag will be used).
The function :func:`~tube_calib.getCalibration` of :mod:`tube_calib` needs such an object.
"""
# Peaks
self.height = height*1.0
self.width = width*1.0
self.peaks = peaks
# Margin
self.margin = margin
# Three pointMethod parameter (to be phased out)
self.threePointMethod = threePointMethod
# Edges
self.outEdge = outEdge
self.inEdge = inEdge
self.edgeGrad = edgeGrad
self.automatic = False
def getPeaks(self):
return self.peaks
def getHeightAndWidth(self):
return self.height, self.width
def isThreePointMethod(self):
return self.threePointMethod
def getEdgeParameters(self):
return self.outEdge, self.inEdge, self.edgeGrad
def getMargin(self):
return self.margin
def setMargin(self, mar):
self.margin = mar
def setAutomatic(self, boolOption):
self.automatic = boolOption
def getAutomatic(self):
return self.automatic
def __str__(self):
return ('peaks'+str(self.peaks)+'height'+str(self.height)+'width'+str(self.width)+'margin'
+ str(self.margin)+'outedge'+str(self.outEdge)+'inedge'+str(self.inEdge)+'edgegrad'+str(self.edgeGrad))
| gpl-3.0 |
apanju/GMIO_Odoo | openerp/tests/common.py | 39 | 13964 | # -*- coding: utf-8 -*-
"""
The module :mod:`openerp.tests.common` provides unittest2 test cases and a few
helpers and classes to write tests.
"""
import errno
import glob
import json
import logging
import os
import select
import subprocess
import threading
import time
import itertools
import unittest2
import urllib2
import xmlrpclib
from contextlib import contextmanager
from datetime import datetime, timedelta
import werkzeug
import openerp
from openerp import api
from openerp.modules.registry import RegistryManager
_logger = logging.getLogger(__name__)
# The openerp library is supposed already configured.
ADDONS_PATH = openerp.tools.config['addons_path']
HOST = '127.0.0.1'
PORT = openerp.tools.config['xmlrpc_port']
# Useless constant, tests are aware of the content of demo data
ADMIN_USER_ID = openerp.SUPERUSER_ID
def get_db_name():
db = openerp.tools.config['db_name']
# If the database name is not provided on the command-line,
# use the one on the thread (which means if it is provided on
# the command-line, this will break when installing another
# database from XML-RPC).
if not db and hasattr(threading.current_thread(), 'dbname'):
return threading.current_thread().dbname
return db
# For backwards-compatibility - get_db_name() should be used instead
DB = get_db_name()
def at_install(flag):
""" Sets the at-install state of a test, the flag is a boolean specifying
whether the test should (``True``) or should not (``False``) run during
module installation.
By default, tests are run right after installing the module, before
starting the installation of the next module.
"""
def decorator(obj):
obj.at_install = flag
return obj
return decorator
def post_install(flag):
""" Sets the post-install state of a test. The flag is a boolean
specifying whether the test should or should not run after a set of
module installations.
By default, tests are *not* run after installation of all modules in the
current installation set.
"""
def decorator(obj):
obj.post_install = flag
return obj
return decorator
class BaseCase(unittest2.TestCase):
"""
Subclass of TestCase for common OpenERP-specific code.
This class is abstract and expects self.registry, self.cr and self.uid to be
initialized by subclasses.
"""
def cursor(self):
return self.registry.cursor()
def ref(self, xid):
""" Returns database ID for the provided :term:`external identifier`,
shortcut for ``get_object_reference``
:param xid: fully-qualified :term:`external identifier`, in the form
:samp:`{module}.{identifier}`
:raise: ValueError if not found
:returns: registered id
"""
assert "." in xid, "this method requires a fully qualified parameter, in the following form: 'module.identifier'"
module, xid = xid.split('.')
_, id = self.registry('ir.model.data').get_object_reference(self.cr, self.uid, module, xid)
return id
def browse_ref(self, xid):
""" Returns a record object for the provided
:term:`external identifier`
:param xid: fully-qualified :term:`external identifier`, in the form
:samp:`{module}.{identifier}`
:raise: ValueError if not found
:returns: :class:`~openerp.models.BaseModel`
"""
assert "." in xid, "this method requires a fully qualified parameter, in the following form: 'module.identifier'"
module, xid = xid.split('.')
return self.registry('ir.model.data').get_object(self.cr, self.uid, module, xid)
@contextmanager
def _assertRaises(self, exception):
""" Context manager that clears the environment upon failure. """
with super(BaseCase, self).assertRaises(exception) as cm:
with self.env.clear_upon_failure():
yield cm
def assertRaises(self, exception, func=None, *args, **kwargs):
if func:
with self._assertRaises(exception):
func(*args, **kwargs)
else:
return self._assertRaises(exception)
class TransactionCase(BaseCase):
""" TestCase in which each test method is run in its own transaction,
and with its own cursor. The transaction is rolled back and the cursor
is closed after each test.
"""
def setUp(self):
self.registry = RegistryManager.get(get_db_name())
#: current transaction's cursor
self.cr = self.cursor()
self.uid = openerp.SUPERUSER_ID
#: :class:`~openerp.api.Environment` for the current test case
self.env = api.Environment(self.cr, self.uid, {})
def tearDown(self):
# rollback and close the cursor, and reset the environments
self.env.reset()
self.cr.rollback()
self.cr.close()
def patch_order(self, model, order):
m_e = self.env[model]
m_r = self.registry(model)
old_order = m_e._order
@self.addCleanup
def cleanup():
m_r._order = type(m_e)._order = old_order
m_r._order = type(m_e)._order = order
class SingleTransactionCase(BaseCase):
""" TestCase in which all test methods are run in the same transaction,
the transaction is started with the first test method and rolled back at
the end of the last.
"""
@classmethod
def setUpClass(cls):
cls.registry = RegistryManager.get(get_db_name())
cls.cr = cls.registry.cursor()
cls.uid = openerp.SUPERUSER_ID
cls.env = api.Environment(cls.cr, cls.uid, {})
@classmethod
def tearDownClass(cls):
# rollback and close the cursor, and reset the environments
cls.env.reset()
cls.cr.rollback()
cls.cr.close()
savepoint_seq = itertools.count()
class SavepointCase(SingleTransactionCase):
""" Similar to :class:`SingleTransactionCase` in that all test methods
are run in a single transaction *but* each test case is run inside a
rollbacked savepoint (sub-transaction).
Useful for test cases containing fast tests but with significant database
setup common to all cases (complex in-db test data): :meth:`~.setUpClass`
can be used to generate db test data once, then all test cases use the
same data without influencing one another but without having to recreate
the test data either.
"""
def setUp(self):
self._savepoint_id = next(savepoint_seq)
self.cr.execute('SAVEPOINT test_%d' % self._savepoint_id)
def tearDown(self):
self.cr.execute('ROLLBACK TO SAVEPOINT test_%d' % self._savepoint_id)
self.env.clear()
self.registry.clear_caches()
class RedirectHandler(urllib2.HTTPRedirectHandler):
"""
HTTPRedirectHandler is predicated upon HTTPErrorProcessor being used and
works by intercepting 3xy "errors".
Inherit from it to handle 3xy non-error responses instead, as we're not
using the error processor
"""
def http_response(self, request, response):
code, msg, hdrs = response.code, response.msg, response.info()
if 300 <= code < 400:
return self.parent.error(
'http', request, response, code, msg, hdrs)
return response
https_response = http_response
class HttpCase(TransactionCase):
""" Transactional HTTP TestCase with url_open and phantomjs helpers.
"""
def __init__(self, methodName='runTest'):
super(HttpCase, self).__init__(methodName)
# v8 api with correct xmlrpc exception handling.
self.xmlrpc_url = url_8 = 'http://%s:%d/xmlrpc/2/' % (HOST, PORT)
self.xmlrpc_common = xmlrpclib.ServerProxy(url_8 + 'common')
self.xmlrpc_db = xmlrpclib.ServerProxy(url_8 + 'db')
self.xmlrpc_object = xmlrpclib.ServerProxy(url_8 + 'object')
def setUp(self):
super(HttpCase, self).setUp()
self.registry.enter_test_mode()
# setup a magic session_id that will be rollbacked
self.session = openerp.http.root.session_store.new()
self.session_id = self.session.sid
self.session.db = get_db_name()
openerp.http.root.session_store.save(self.session)
# setup an url opener helper
self.opener = urllib2.OpenerDirector()
self.opener.add_handler(urllib2.UnknownHandler())
self.opener.add_handler(urllib2.HTTPHandler())
self.opener.add_handler(urllib2.HTTPSHandler())
self.opener.add_handler(urllib2.HTTPCookieProcessor())
self.opener.add_handler(RedirectHandler())
self.opener.addheaders.append(('Cookie', 'session_id=%s' % self.session_id))
def tearDown(self):
self.registry.leave_test_mode()
super(HttpCase, self).tearDown()
def url_open(self, url, data=None, timeout=10):
if url.startswith('/'):
url = "http://localhost:%s%s" % (PORT, url)
return self.opener.open(url, data, timeout)
def authenticate(self, user, password):
if user is not None:
url = '/login?%s' % werkzeug.urls.url_encode({'db': get_db_name(),'login': user, 'key': password})
auth = self.url_open(url)
assert auth.getcode() < 400, "Auth failure %d" % auth.getcode()
def phantom_poll(self, phantom, timeout):
""" Phantomjs Test protocol.
Use console.log in phantomjs to output test results:
- for a success: console.log("ok")
- for an error: console.log("error")
Other lines are relayed to the test log.
"""
t0 = datetime.now()
td = timedelta(seconds=timeout)
buf = bytearray()
while True:
# timeout
self.assertLess(datetime.now() - t0, td,
"PhantomJS tests should take less than %s seconds" % timeout)
# read a byte
try:
ready, _, _ = select.select([phantom.stdout], [], [], 0.5)
except select.error, e:
# In Python 2, select.error has no relation to IOError or
# OSError, and no errno/strerror/filename, only a pair of
# unnamed arguments (matching errno and strerror)
err, _ = e.args
if err == errno.EINTR:
continue
raise
if ready:
s = phantom.stdout.read(1)
if not s:
break
buf.append(s)
# process lines
if '\n' in buf:
line, buf = buf.split('\n', 1)
line = str(line)
# relay everything from console.log, even 'ok' or 'error...' lines
_logger.info("phantomjs: %s", line)
if line == "ok":
break
if line.startswith("error"):
line_ = line[6:]
# when error occurs the execution stack may be sent as as JSON
try:
line_ = json.loads(line_)
except ValueError:
pass
self.fail(line_ or "phantomjs test failed")
def phantom_run(self, cmd, timeout):
_logger.info('phantom_run executing %s', ' '.join(cmd))
ls_glob = os.path.expanduser('~/.qws/share/data/Ofi Labs/PhantomJS/http_localhost_%s.*'%PORT)
for i in glob.glob(ls_glob):
_logger.info('phantomjs unlink localstorage %s', i)
os.unlink(i)
try:
phantom = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=None)
except OSError:
raise unittest2.SkipTest("PhantomJS not found")
try:
self.phantom_poll(phantom, timeout)
finally:
# kill phantomjs if phantom.exit() wasn't called in the test
if phantom.poll() is None:
phantom.terminate()
phantom.wait()
self._wait_remaining_requests()
# we ignore phantomjs return code as we kill it as soon as we have ok
_logger.info("phantom_run execution finished")
def _wait_remaining_requests(self):
t0 = int(time.time())
for thread in threading.enumerate():
if thread.name.startswith('openerp.service.http.request.'):
while thread.isAlive():
# Need a busyloop here as thread.join() masks signals
# and would prevent the forced shutdown.
thread.join(0.05)
time.sleep(0.05)
t1 = int(time.time())
if t0 != t1:
_logger.info('remaining requests')
openerp.tools.misc.dumpstacks()
t0 = t1
def phantom_js(self, url_path, code, ready="window", login=None, timeout=60, **kw):
""" Test js code running in the browser
- optionnally log as 'login'
- load page given by url_path
- wait for ready object to be available
- eval(code) inside the page
To signal success test do:
console.log('ok')
To signal failure do:
console.log('error')
If neither are done before timeout test fails.
"""
options = {
'port': PORT,
'db': get_db_name(),
'url_path': url_path,
'code': code,
'ready': ready,
'timeout' : timeout,
'login' : login,
'session_id': self.session_id,
}
options.update(kw)
options.setdefault('password', options.get('login'))
phantomtest = os.path.join(os.path.dirname(__file__), 'phantomtest.js')
cmd = ['phantomjs', phantomtest, json.dumps(options)]
self.phantom_run(cmd, timeout)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
centrofermi/e3pipe | mc/E3TelescopeBase.py | 1 | 7976 | #!/usr/bin/env python
# *********************************************************************
# * Copyright (C) 2014 Luca Baldini (luca.baldini@pi.infn.it) *
# * *
# * For the license terms see the file LICENSE, distributed *
# * along with this software. *
# *********************************************************************
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import random
import math
from e3pipe.tracking.E3Point import E3Point
from e3pipe.tracking.E3Vector import E3Vector
from e3pipe.tracking.E3Track import E3Track
from e3pipe.mc.E3MuonFluxService import E3MuonFluxService
from e3pipe.mc.E3PoissonService import E3PoissonService
from e3pipe.tracking.E3FittingTool2dWeighted import E3FittingTool2dWeighted
from e3pipe.tracking.E3FittingTool2dUnweighted import E3FittingTool2dUnweighted
from e3pipe.tracking.E3FittingToolAnalyzer import E3FittingToolAnalyzer
from e3pipe.mc.__mrpc__ import *
class E3TelescopeBase:
""" Base class describing the basic geometry of a telescope.
We use a coordinate system whose origin is in one of the corners of the
lowermost RPC plane.
"""
FIT_TOOL_DICT = {'2dw' : E3FittingTool2dWeighted,
'2dnw': E3FittingTool2dUnweighted,
'ana' : E3FittingToolAnalyzer
}
def __init__(self, name = 'EEE-00',
d12 = 50., d23 = 50., phiNorth = 0.,
latitude = 0., longitude = 0, altitude = 0.,
triggerMask = 0b111, fitTool = '2dw'):
""" Constructor.
"""
self.__Name = name
self.__Z = [0., d12, d12 + d23]
self.__PhiNorth = phiNorth
self.__TriggerMask = triggerMask
self.__Longitude = longitude
self.__Latitude = latitude
self.__Altitude = altitude
self.__FluxService = E3MuonFluxService()
self.__PoissonService = E3PoissonService(7.5e-3*MRPC_ACTIVE_AREA)
self.__FittingTool = self.FIT_TOOL_DICT[fitTool]()
def name(self):
""" Return the name.
"""
return self.__Name
def fluxService(self):
"""
"""
return self.__FluxService
def z(self, plane):
""" Return the z coordinate of a given plane (0 is the lowermost one,
2 is the uppermost one.)
"""
return self.__Z[plane]
def ztop(self):
""" Return the z coordinate of the uppermost plane.
"""
return self.__Z[2]
def zmid(self):
""" Return the z coordinate of the mid plane.
"""
return self.__Z[1]
def zbot(self):
""" Return the z coordinate of the lowermost plane.
"""
return self.__Z[0]
def phiNorth(self):
""" Return the angle w.r.t. the magnetic North,
"""
return self.__PhiNorth
def randomPoint(self, plane = 2):
"""
"""
x = random.uniform(MRPC_X_MIN, MRPC_X_MAX)
y = random.uniform(MRPC_Y_MIN, MRPC_Y_MAX)
z = self.z(plane)
return E3Point(x, y, z)
def shootMuon(self):
""" Generate and propagate a muon track through the detector.
This is actually the main class method as far as Monte Carlo
simulations are concerned.
TODO: we have to generalize for a generic trigger mask, which
shouldn't be too hard.
"""
# Extract a random time for a stationary Poisson process.
timestamp = self.__PoissonService.next()
# Extract a random direction from the flux service.
mcDirection = self.__FluxService.next()
# Extract a random point on the top plane.
mcHitTop = self.randomPoint(plane = 2)
# Create the MC track and extrapolate it to the other planes.
mcTrack = E3Track(mcHitTop, mcDirection)
mcHitMid = mcTrack.extrapolate(self.zmid())
mcHitBot = mcTrack.extrapolate(self.zbot())
# Check that the event triggered the telescope.
if not self.withinActiveArea(mcHitBot.x(), mcHitBot.y()):
return
# Digitize the hits in the detector.
digiHitTop = self.digitize(mcHitTop)
digiHitMid = self.digitize(mcHitMid)
digiHitBot = self.digitize(mcHitBot)
# Find the best-fit track.
self.__FittingTool.run([digiHitBot, digiHitMid, digiHitTop])
reconTrack = self.__FittingTool.track()
# Calculate the track length and the time of flight.
pbot = reconTrack.extrapolate(self.zbot())
ptop = reconTrack.extrapolate(self.ztop())
trackLength = pbot.dist(ptop)
c = 29979245800.
tof = trackLength/c*1000000000
# Rotate the director cosines in the absolute reference system.
mcDirectionRot = mcDirection.rotatez(self.phiNorth(), deg = True)
reconDirectionRot = reconTrack.direction().rotatez(self.phiNorth(),
deg = True)
return {'Seconds' : timestamp.seconds(),
'Nanoseconds' : timestamp.nanoseconds(),
'McPosXBot' : mcHitBot.x(),
'McPosYBot' : mcHitBot.y(),
'McPosXMid' : mcHitMid.x(),
'McPosYMid' : mcHitMid.y(),
'McPosXTop' : mcHitTop.x(),
'McPosYTop' : mcHitTop.y(),
'McXDir' : mcDirectionRot.x(),
'McYDir' : mcDirectionRot.y(),
'McZDir' : mcDirectionRot.z(),
'PosXBot' : digiHitBot.x(),
'PosYBot' : digiHitBot.y(),
'PosXMid' : digiHitMid.x(),
'PosYMid' : digiHitMid.y(),
'PosXTop' : digiHitTop.x(),
'PosYTop' : digiHitTop.y(),
'IntersectXMid' : reconTrack.x0(),
'IntersectYMid' : reconTrack.y0(),
'IntersectZMid' : reconTrack.z0(),
'XDir' : reconDirectionRot.x(),
'YDir' : reconDirectionRot.y(),
'ZDir' : reconDirectionRot.z(),
'ChiSquare' : reconTrack.chi2(),
'TimeOfFlight' : tof,
'TrackLength' : trackLength
}
def withinActiveArea(self, x, y):
""" Return whether a given (x, y) two-dimensional point is within
the active area.
"""
return x >= 0 and x <= MRPC_LENGTH and y >= 0 and y <= MRPC_WIDTH
def digitize(self, point):
""" Digitize a hit on a plane of the MRPC.
"""
x = random.gauss(point.x(), MRPC_LONGITUDINAL_SIGMA)
y = int(point.y()/MRPC_STRIP_PITCH + 0.5)*MRPC_STRIP_PITCH
z = point.z()
return E3Point(x, y, z)
def __str__(self):
""" String formatting.
"""
return '%s: z = %s, phi to N = %.3f' %\
(self.name(), self.__Z, self.phiNorth())
if __name__ == '__main__':
telescope = E3TelescopeBase()
print telescope
p = telescope.randomPoint(2)
print p, telescope.digitize(p)
ztop = telescope.ztop()
p = E3Point(10, 1.7, ztop)
print p, telescope.digitize(p)
| gpl-3.0 |
yfried/ansible | lib/ansible/modules/storage/netapp/netapp_e_flashcache.py | 16 | 15757 | #!/usr/bin/python
# (c) 2016, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
module: netapp_e_flashcache
author: Kevin Hulquest (@hulquest)
version_added: '2.2'
short_description: NetApp E-Series manage SSD caches
description:
- Create or remove SSD caches on a NetApp E-Series storage array.
options:
api_username:
required: true
description:
- The username to authenticate with the SANtricity WebServices Proxy or embedded REST API.
api_password:
required: true
description:
- The password to authenticate with the SANtricity WebServices Proxy or embedded REST API.
api_url:
required: true
description:
- The url to the SANtricity WebServices Proxy or embedded REST API.
validate_certs:
required: false
default: true
description:
- Should https certificates be validated?
ssid:
required: true
description:
- The ID of the array to manage (as configured on the web services proxy).
state:
required: true
description:
- Whether the specified SSD cache should exist or not.
choices: ['present', 'absent']
default: present
name:
required: true
description:
- The name of the SSD cache to manage
io_type:
description:
- The type of workload to optimize the cache for.
choices: ['filesystem','database','media']
default: filesystem
disk_count:
description:
- The minimum number of disks to use for building the cache. The cache will be expanded if this number exceeds the number of disks already in place
size_unit:
description:
- The unit to be applied to size arguments
choices: ['bytes', 'b', 'kb', 'mb', 'gb', 'tb', 'pb', 'eb', 'zb', 'yb']
default: gb
cache_size_min:
description:
- The minimum size (in size_units) of the ssd cache. The cache will be expanded if this exceeds the current size of the cache.
'''
EXAMPLES = """
- name: Flash Cache
netapp_e_flashcache:
ssid: "{{ ssid }}"
api_url: "{{ netapp_api_url }}"
api_username: "{{ netapp_api_username }}"
api_password: "{{ netapp_api_password }}"
validate_certs: "{{ netapp_api_validate_certs }}"
name: SSDCacheBuiltByAnsible
"""
RETURN = """
msg:
description: Success message
returned: success
type: string
sample: json for newly created flash cache
"""
import json
import logging
import sys
import traceback
from ansible.module_utils.api import basic_auth_argument_spec
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves import reduce
from ansible.module_utils.six.moves.urllib.error import HTTPError
from ansible.module_utils._text import to_native
from ansible.module_utils.urls import open_url
def request(url, data=None, headers=None, method='GET', use_proxy=True,
force=False, last_mod_time=None, timeout=10, validate_certs=True,
url_username=None, url_password=None, http_agent=None, force_basic_auth=True, ignore_errors=False):
try:
r = open_url(url=url, data=data, headers=headers, method=method, use_proxy=use_proxy,
force=force, last_mod_time=last_mod_time, timeout=timeout, validate_certs=validate_certs,
url_username=url_username, url_password=url_password, http_agent=http_agent,
force_basic_auth=force_basic_auth)
except HTTPError as err:
r = err.fp
try:
raw_data = r.read()
if raw_data:
data = json.loads(raw_data)
else:
raw_data = None
except:
if ignore_errors:
pass
else:
raise Exception(raw_data)
resp_code = r.getcode()
if resp_code >= 400 and not ignore_errors:
raise Exception(resp_code, data)
else:
return resp_code, data
class NetAppESeriesFlashCache(object):
def __init__(self):
self.name = None
self.log_mode = None
self.log_path = None
self.api_url = None
self.api_username = None
self.api_password = None
self.ssid = None
self.validate_certs = None
self.disk_count = None
self.size_unit = None
self.cache_size_min = None
self.io_type = None
self.driveRefs = None
self.state = None
self._size_unit_map = dict(
bytes=1,
b=1,
kb=1024,
mb=1024 ** 2,
gb=1024 ** 3,
tb=1024 ** 4,
pb=1024 ** 5,
eb=1024 ** 6,
zb=1024 ** 7,
yb=1024 ** 8
)
argument_spec = basic_auth_argument_spec()
argument_spec.update(dict(
api_username=dict(type='str', required=True),
api_password=dict(type='str', required=True, no_log=True),
api_url=dict(type='str', required=True),
state=dict(default='present', choices=['present', 'absent'], type='str'),
ssid=dict(required=True, type='str'),
name=dict(required=True, type='str'),
disk_count=dict(type='int'),
disk_refs=dict(type='list'),
cache_size_min=dict(type='int'),
io_type=dict(default='filesystem', choices=['filesystem', 'database', 'media']),
size_unit=dict(default='gb', choices=['bytes', 'b', 'kb', 'mb', 'gb', 'tb', 'pb', 'eb', 'zb', 'yb'],
type='str'),
criteria_disk_phy_type=dict(choices=['sas', 'sas4k', 'fibre', 'fibre520b', 'scsi', 'sata', 'pata'],
type='str'),
log_mode=dict(type='str'),
log_path=dict(type='str'),
))
self.module = AnsibleModule(
argument_spec=argument_spec,
required_if=[
],
mutually_exclusive=[
],
# TODO: update validation for various selection criteria
supports_check_mode=True
)
self.__dict__.update(self.module.params)
# logging setup
self._logger = logging.getLogger(self.__class__.__name__)
self.debug = self._logger.debug
if self.log_mode == 'file' and self.log_path:
logging.basicConfig(level=logging.DEBUG, filename=self.log_path)
elif self.log_mode == 'stderr':
logging.basicConfig(level=logging.DEBUG, stream=sys.stderr)
self.post_headers = dict(Accept="application/json")
self.post_headers['Content-Type'] = 'application/json'
def get_candidate_disks(self, disk_count, size_unit='gb', capacity=None):
self.debug("getting candidate disks...")
drives_req = dict(
driveCount=disk_count,
sizeUnit=size_unit,
driveType='ssd',
)
if capacity:
drives_req['targetUsableCapacity'] = capacity
(rc, drives_resp) = request(self.api_url + "/storage-systems/%s/drives" % (self.ssid),
data=json.dumps(drives_req), headers=self.post_headers, method='POST',
url_username=self.api_username, url_password=self.api_password,
validate_certs=self.validate_certs)
if rc == 204:
self.module.fail_json(msg='Cannot find disks to match requested criteria for ssd cache')
disk_ids = [d['id'] for d in drives_resp]
bytes = reduce(lambda s, d: s + int(d['usableCapacity']), drives_resp, 0)
return (disk_ids, bytes)
def create_cache(self):
(disk_ids, bytes) = self.get_candidate_disks(disk_count=self.disk_count, size_unit=self.size_unit,
capacity=self.cache_size_min)
self.debug("creating ssd cache...")
create_fc_req = dict(
driveRefs=disk_ids,
name=self.name
)
(rc, self.resp) = request(self.api_url + "/storage-systems/%s/flash-cache" % (self.ssid),
data=json.dumps(create_fc_req), headers=self.post_headers, method='POST',
url_username=self.api_username, url_password=self.api_password,
validate_certs=self.validate_certs)
def update_cache(self):
self.debug('updating flash cache config...')
update_fc_req = dict(
name=self.name,
configType=self.io_type
)
(rc, self.resp) = request(self.api_url + "/storage-systems/%s/flash-cache/configure" % (self.ssid),
data=json.dumps(update_fc_req), headers=self.post_headers, method='POST',
url_username=self.api_username, url_password=self.api_password,
validate_certs=self.validate_certs)
def delete_cache(self):
self.debug('deleting flash cache...')
(rc, self.resp) = request(self.api_url + "/storage-systems/%s/flash-cache" % (self.ssid), method='DELETE',
url_username=self.api_username, url_password=self.api_password,
validate_certs=self.validate_certs, ignore_errors=True)
@property
def needs_more_disks(self):
if len(self.cache_detail['driveRefs']) < self.disk_count:
self.debug("needs resize: current disk count %s < requested requested count %s",
len(self.cache_detail['driveRefs']), self.disk_count)
return True
@property
def needs_less_disks(self):
if len(self.cache_detail['driveRefs']) > self.disk_count:
self.debug("needs resize: current disk count %s < requested requested count %s",
len(self.cache_detail['driveRefs']), self.disk_count)
return True
@property
def current_size_bytes(self):
return int(self.cache_detail['fcDriveInfo']['fcWithDrives']['usedCapacity'])
@property
def requested_size_bytes(self):
if self.cache_size_min:
return self.cache_size_min * self._size_unit_map[self.size_unit]
else:
return 0
@property
def needs_more_capacity(self):
if self.current_size_bytes < self.requested_size_bytes:
self.debug("needs resize: current capacity %sb is less than requested minimum %sb",
self.current_size_bytes, self.requested_size_bytes)
return True
@property
def needs_resize(self):
return self.needs_more_disks or self.needs_more_capacity or self.needs_less_disks
def resize_cache(self):
# increase up to disk count first, then iteratively add disks until we meet requested capacity
# TODO: perform this calculation in check mode
current_disk_count = len(self.cache_detail['driveRefs'])
proposed_new_disks = 0
proposed_additional_bytes = 0
proposed_disk_ids = []
if self.needs_more_disks:
proposed_disk_count = self.disk_count - current_disk_count
(disk_ids, bytes) = self.get_candidate_disks(disk_count=proposed_disk_count)
proposed_additional_bytes = bytes
proposed_disk_ids = disk_ids
while self.current_size_bytes + proposed_additional_bytes < self.requested_size_bytes:
proposed_new_disks += 1
(disk_ids, bytes) = self.get_candidate_disks(disk_count=proposed_new_disks)
proposed_disk_ids = disk_ids
proposed_additional_bytes = bytes
add_drives_req = dict(
driveRef=proposed_disk_ids
)
self.debug("adding drives to flash-cache...")
(rc, self.resp) = request(self.api_url + "/storage-systems/%s/flash-cache/addDrives" % (self.ssid),
data=json.dumps(add_drives_req), headers=self.post_headers, method='POST',
url_username=self.api_username, url_password=self.api_password,
validate_certs=self.validate_certs)
elif self.needs_less_disks and self.driveRefs:
rm_drives = dict(driveRef=self.driveRefs)
(rc, self.resp) = request(self.api_url + "/storage-systems/%s/flash-cache/removeDrives" % (self.ssid),
data=json.dumps(rm_drives), headers=self.post_headers, method='POST',
url_username=self.api_username, url_password=self.api_password,
validate_certs=self.validate_certs)
def apply(self):
result = dict(changed=False)
(rc, cache_resp) = request(self.api_url + "/storage-systems/%s/flash-cache" % (self.ssid),
url_username=self.api_username, url_password=self.api_password,
validate_certs=self.validate_certs, ignore_errors=True)
if rc == 200:
self.cache_detail = cache_resp
else:
self.cache_detail = None
if rc not in [200, 404]:
raise Exception(
"Unexpected error code %s fetching flash cache detail. Response data was %s" % (rc, cache_resp))
if self.state == 'present':
if self.cache_detail:
# TODO: verify parameters against detail for changes
if self.cache_detail['name'] != self.name:
self.debug("CHANGED: name differs")
result['changed'] = True
if self.cache_detail['flashCacheBase']['configType'] != self.io_type:
self.debug("CHANGED: io_type differs")
result['changed'] = True
if self.needs_resize:
self.debug("CHANGED: resize required")
result['changed'] = True
else:
self.debug("CHANGED: requested state is 'present' but cache does not exist")
result['changed'] = True
else: # requested state is absent
if self.cache_detail:
self.debug("CHANGED: requested state is 'absent' but cache exists")
result['changed'] = True
if not result['changed']:
self.debug("no changes, exiting...")
self.module.exit_json(**result)
if self.module.check_mode:
self.debug("changes pending in check mode, exiting early...")
self.module.exit_json(**result)
if self.state == 'present':
if not self.cache_detail:
self.create_cache()
else:
if self.needs_resize:
self.resize_cache()
# run update here as well, since io_type can't be set on creation
self.update_cache()
elif self.state == 'absent':
self.delete_cache()
# TODO: include other details about the storage pool (size, type, id, etc)
self.module.exit_json(changed=result['changed'], **self.resp)
def main():
sp = NetAppESeriesFlashCache()
try:
sp.apply()
except Exception as e:
sp.debug("Exception in apply(): \n%s", to_native(e))
sp.module.fail_json(msg="Failed to create flash cache. Error[%s]" % to_native(e),
exception=traceback.format_exc())
if __name__ == '__main__':
main()
| gpl-3.0 |
Soya93/Extract-Refactoring | python/lib/Lib/site-packages/django/contrib/localflavor/ca/ca_provinces.py | 199 | 1397 | """
An alphabetical list of provinces and territories for use as `choices`
in a formfield., and a mapping of province misspellings/abbreviations to
normalized abbreviations
Source: http://www.canada.gc.ca/othergov/prov_e.html
This exists in this standalone file so that it's only imported into memory
when explicitly needed.
"""
PROVINCE_CHOICES = (
('AB', 'Alberta'),
('BC', 'British Columbia'),
('MB', 'Manitoba'),
('NB', 'New Brunswick'),
('NF', 'Newfoundland and Labrador'),
('NT', 'Northwest Territories'),
('NS', 'Nova Scotia'),
('NU', 'Nunavut'),
('ON', 'Ontario'),
('PE', 'Prince Edward Island'),
('QC', 'Quebec'),
('SK', 'Saskatchewan'),
('YK', 'Yukon')
)
PROVINCES_NORMALIZED = {
'ab': 'AB',
'alberta': 'AB',
'bc': 'BC',
'b.c.': 'BC',
'british columbia': 'BC',
'mb': 'MB',
'manitoba': 'MB',
'nb': 'NB',
'new brunswick': 'NB',
'nf': 'NF',
'newfoundland': 'NF',
'newfoundland and labrador': 'NF',
'nt': 'NT',
'northwest territories': 'NT',
'ns': 'NS',
'nova scotia': 'NS',
'nu': 'NU',
'nunavut': 'NU',
'on': 'ON',
'ontario': 'ON',
'pe': 'PE',
'pei': 'PE',
'p.e.i.': 'PE',
'prince edward island': 'PE',
'qc': 'QC',
'quebec': 'QC',
'sk': 'SK',
'saskatchewan': 'SK',
'yk': 'YK',
'yukon': 'YK',
} | apache-2.0 |
github-account-because-they-want-it/django | django/core/management/commands/inspectdb.py | 100 | 11994 | from __future__ import unicode_literals
import keyword
import re
from collections import OrderedDict
from django.core.management.base import BaseCommand, CommandError
from django.db import DEFAULT_DB_ALIAS, connections
class Command(BaseCommand):
help = "Introspects the database tables in the given database and outputs a Django model module."
requires_system_checks = False
db_module = 'django.db'
def add_arguments(self, parser):
parser.add_argument('--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS, help='Nominates a database to '
'introspect. Defaults to using the "default" database.')
def handle(self, **options):
try:
for line in self.handle_inspection(options):
self.stdout.write("%s\n" % line)
except NotImplementedError:
raise CommandError("Database inspection isn't supported for the currently selected database backend.")
def handle_inspection(self, options):
connection = connections[options['database']]
# 'table_name_filter' is a stealth option
table_name_filter = options.get('table_name_filter')
table2model = lambda table_name: re.sub(r'[^a-zA-Z0-9]', '', table_name.title())
strip_prefix = lambda s: s[1:] if s.startswith("u'") else s
with connection.cursor() as cursor:
yield "# This is an auto-generated Django model module."
yield "# You'll have to do the following manually to clean this up:"
yield "# * Rearrange models' order"
yield "# * Make sure each model has one field with primary_key=True"
yield "# * Make sure each ForeignKey has `on_delete` set to the desidered behavior."
yield (
"# * Remove `managed = False` lines if you wish to allow "
"Django to create, modify, and delete the table"
)
yield "# Feel free to rename the models, but don't rename db_table values or field names."
yield "from __future__ import unicode_literals"
yield ''
yield 'from %s import models' % self.db_module
known_models = []
for table_name in connection.introspection.table_names(cursor):
if table_name_filter is not None and callable(table_name_filter):
if not table_name_filter(table_name):
continue
yield ''
yield ''
yield 'class %s(models.Model):' % table2model(table_name)
known_models.append(table2model(table_name))
try:
relations = connection.introspection.get_relations(cursor, table_name)
except NotImplementedError:
relations = {}
try:
indexes = connection.introspection.get_indexes(cursor, table_name)
except NotImplementedError:
indexes = {}
try:
constraints = connection.introspection.get_constraints(cursor, table_name)
except NotImplementedError:
constraints = {}
used_column_names = [] # Holds column names used in the table so far
for row in connection.introspection.get_table_description(cursor, table_name):
comment_notes = [] # Holds Field notes, to be displayed in a Python comment.
extra_params = OrderedDict() # Holds Field parameters such as 'db_column'.
column_name = row[0]
is_relation = column_name in relations
att_name, params, notes = self.normalize_col_name(
column_name, used_column_names, is_relation)
extra_params.update(params)
comment_notes.extend(notes)
used_column_names.append(att_name)
# Add primary_key and unique, if necessary.
if column_name in indexes:
if indexes[column_name]['primary_key']:
extra_params['primary_key'] = True
elif indexes[column_name]['unique']:
extra_params['unique'] = True
if is_relation:
rel_to = (
"self" if relations[column_name][1] == table_name
else table2model(relations[column_name][1])
)
if rel_to in known_models:
field_type = 'ForeignKey(%s' % rel_to
else:
field_type = "ForeignKey('%s'" % rel_to
else:
# Calling `get_field_type` to get the field type string and any
# additional parameters and notes.
field_type, field_params, field_notes = self.get_field_type(connection, table_name, row)
extra_params.update(field_params)
comment_notes.extend(field_notes)
field_type += '('
# Don't output 'id = meta.AutoField(primary_key=True)', because
# that's assumed if it doesn't exist.
if att_name == 'id' and extra_params == {'primary_key': True}:
if field_type == 'AutoField(':
continue
elif field_type == 'IntegerField(' and not connection.features.can_introspect_autofield:
comment_notes.append('AutoField?')
# Add 'null' and 'blank', if the 'null_ok' flag was present in the
# table description.
if row[6]: # If it's NULL...
if field_type == 'BooleanField(':
field_type = 'NullBooleanField('
else:
extra_params['blank'] = True
extra_params['null'] = True
field_desc = '%s = %s%s' % (
att_name,
# Custom fields will have a dotted path
'' if '.' in field_type else 'models.',
field_type,
)
if field_type.startswith('ForeignKey('):
field_desc += ', models.DO_NOTHING'
if extra_params:
if not field_desc.endswith('('):
field_desc += ', '
field_desc += ', '.join(
'%s=%s' % (k, strip_prefix(repr(v)))
for k, v in extra_params.items())
field_desc += ')'
if comment_notes:
field_desc += ' # ' + ' '.join(comment_notes)
yield ' %s' % field_desc
for meta_line in self.get_meta(table_name, constraints):
yield meta_line
def normalize_col_name(self, col_name, used_column_names, is_relation):
"""
Modify the column name to make it Python-compatible as a field name
"""
field_params = {}
field_notes = []
new_name = col_name.lower()
if new_name != col_name:
field_notes.append('Field name made lowercase.')
if is_relation:
if new_name.endswith('_id'):
new_name = new_name[:-3]
else:
field_params['db_column'] = col_name
new_name, num_repl = re.subn(r'\W', '_', new_name)
if num_repl > 0:
field_notes.append('Field renamed to remove unsuitable characters.')
if new_name.find('__') >= 0:
while new_name.find('__') >= 0:
new_name = new_name.replace('__', '_')
if col_name.lower().find('__') >= 0:
# Only add the comment if the double underscore was in the original name
field_notes.append("Field renamed because it contained more than one '_' in a row.")
if new_name.startswith('_'):
new_name = 'field%s' % new_name
field_notes.append("Field renamed because it started with '_'.")
if new_name.endswith('_'):
new_name = '%sfield' % new_name
field_notes.append("Field renamed because it ended with '_'.")
if keyword.iskeyword(new_name):
new_name += '_field'
field_notes.append('Field renamed because it was a Python reserved word.')
if new_name[0].isdigit():
new_name = 'number_%s' % new_name
field_notes.append("Field renamed because it wasn't a valid Python identifier.")
if new_name in used_column_names:
num = 0
while '%s_%d' % (new_name, num) in used_column_names:
num += 1
new_name = '%s_%d' % (new_name, num)
field_notes.append('Field renamed because of name conflict.')
if col_name != new_name and field_notes:
field_params['db_column'] = col_name
return new_name, field_params, field_notes
def get_field_type(self, connection, table_name, row):
"""
Given the database connection, the table name, and the cursor row
description, this routine will return the given field type name, as
well as any additional keyword parameters and notes for the field.
"""
field_params = OrderedDict()
field_notes = []
try:
field_type = connection.introspection.get_field_type(row[1], row)
except KeyError:
field_type = 'TextField'
field_notes.append('This field type is a guess.')
# This is a hook for data_types_reverse to return a tuple of
# (field_type, field_params_dict).
if type(field_type) is tuple:
field_type, new_params = field_type
field_params.update(new_params)
# Add max_length for all CharFields.
if field_type == 'CharField' and row[3]:
field_params['max_length'] = int(row[3])
if field_type == 'DecimalField':
if row[4] is None or row[5] is None:
field_notes.append(
'max_digits and decimal_places have been guessed, as this '
'database handles decimal fields as float')
field_params['max_digits'] = row[4] if row[4] is not None else 10
field_params['decimal_places'] = row[5] if row[5] is not None else 5
else:
field_params['max_digits'] = row[4]
field_params['decimal_places'] = row[5]
return field_type, field_params, field_notes
def get_meta(self, table_name, constraints):
"""
Return a sequence comprising the lines of code necessary
to construct the inner Meta class for the model corresponding
to the given database table name.
"""
unique_together = []
for index, params in constraints.items():
if params['unique']:
columns = params['columns']
if len(columns) > 1:
# we do not want to include the u"" or u'' prefix
# so we build the string rather than interpolate the tuple
tup = '(' + ', '.join("'%s'" % c for c in columns) + ')'
unique_together.append(tup)
meta = ["",
" class Meta:",
" managed = False",
" db_table = '%s'" % table_name]
if unique_together:
tup = '(' + ', '.join(unique_together) + ',)'
meta += [" unique_together = %s" % tup]
return meta
| bsd-3-clause |
forge33/CouchPotatoServer | couchpotato/core/media/_base/media/index.py | 73 | 6107 | from string import ascii_letters
from hashlib import md5
from CodernityDB.tree_index import MultiTreeBasedIndex, TreeBasedIndex
from couchpotato.core.helpers.encoding import toUnicode, simplifyString
class MediaIndex(MultiTreeBasedIndex):
_version = 3
custom_header = """from CodernityDB.tree_index import MultiTreeBasedIndex"""
def __init__(self, *args, **kwargs):
kwargs['key_format'] = '32s'
super(MediaIndex, self).__init__(*args, **kwargs)
def make_key(self, key):
return md5(key).hexdigest()
def make_key_value(self, data):
if data.get('_t') == 'media' and (data.get('identifier') or data.get('identifiers')):
identifiers = data.get('identifiers', {})
if data.get('identifier') and 'imdb' not in identifiers:
identifiers['imdb'] = data.get('identifier')
ids = []
for x in identifiers:
ids.append(md5('%s-%s' % (x, identifiers[x])).hexdigest())
return ids, None
class MediaStatusIndex(TreeBasedIndex):
_version = 1
def __init__(self, *args, **kwargs):
kwargs['key_format'] = '32s'
super(MediaStatusIndex, self).__init__(*args, **kwargs)
def make_key(self, key):
return md5(key).hexdigest()
def make_key_value(self, data):
if data.get('_t') == 'media' and data.get('status'):
return md5(data.get('status')).hexdigest(), None
class MediaTypeIndex(TreeBasedIndex):
_version = 1
def __init__(self, *args, **kwargs):
kwargs['key_format'] = '32s'
super(MediaTypeIndex, self).__init__(*args, **kwargs)
def make_key(self, key):
return md5(key).hexdigest()
def make_key_value(self, data):
if data.get('_t') == 'media' and data.get('type'):
return md5(data.get('type')).hexdigest(), None
class TitleSearchIndex(MultiTreeBasedIndex):
_version = 1
custom_header = """from CodernityDB.tree_index import MultiTreeBasedIndex
from itertools import izip
from couchpotato.core.helpers.encoding import simplifyString"""
def __init__(self, *args, **kwargs):
kwargs['key_format'] = '32s'
super(TitleSearchIndex, self).__init__(*args, **kwargs)
self.__l = kwargs.get('w_len', 2)
def make_key_value(self, data):
if data.get('_t') == 'media' and len(data.get('title', '')) > 0:
out = set()
title = str(simplifyString(data.get('title').lower()))
l = self.__l
title_split = title.split()
for x in range(len(title_split)):
combo = ' '.join(title_split[x:])[:32].strip()
out.add(combo.rjust(32, '_'))
combo_range = max(l, min(len(combo), 32))
for cx in range(1, combo_range):
ccombo = combo[:-cx].strip()
if len(ccombo) > l:
out.add(ccombo.rjust(32, '_'))
return out, None
def make_key(self, key):
return key.rjust(32, '_').lower()
class TitleIndex(TreeBasedIndex):
_version = 4
custom_header = """from CodernityDB.tree_index import TreeBasedIndex
from string import ascii_letters
from couchpotato.core.helpers.encoding import toUnicode, simplifyString"""
def __init__(self, *args, **kwargs):
kwargs['key_format'] = '32s'
super(TitleIndex, self).__init__(*args, **kwargs)
def make_key(self, key):
return self.simplify(key)
def make_key_value(self, data):
if data.get('_t') == 'media' and data.get('title') is not None and len(data.get('title')) > 0:
return self.simplify(data['title']), None
def simplify(self, title):
title = toUnicode(title)
nr_prefix = '' if title and len(title) > 0 and title[0] in ascii_letters else '#'
title = simplifyString(title)
for prefix in ['the ', 'an ', 'a ']:
if prefix == title[:len(prefix)]:
title = title[len(prefix):]
break
return str(nr_prefix + title).ljust(32, ' ')[:32]
class StartsWithIndex(TreeBasedIndex):
_version = 3
custom_header = """from CodernityDB.tree_index import TreeBasedIndex
from string import ascii_letters
from couchpotato.core.helpers.encoding import toUnicode, simplifyString"""
def __init__(self, *args, **kwargs):
kwargs['key_format'] = '1s'
super(StartsWithIndex, self).__init__(*args, **kwargs)
def make_key(self, key):
return self.first(key)
def make_key_value(self, data):
if data.get('_t') == 'media' and data.get('title') is not None:
return self.first(data['title']), None
def first(self, title):
title = toUnicode(title)
title = simplifyString(title)
for prefix in ['the ', 'an ', 'a ']:
if prefix == title[:len(prefix)]:
title = title[len(prefix):]
break
return str(title[0] if title and len(title) > 0 and title[0] in ascii_letters else '#').lower()
class MediaChildrenIndex(TreeBasedIndex):
_version = 1
def __init__(self, *args, **kwargs):
kwargs['key_format'] = '32s'
super(MediaChildrenIndex, self).__init__(*args, **kwargs)
def make_key(self, key):
return key
def make_key_value(self, data):
if data.get('_t') == 'media' and data.get('parent_id'):
return data.get('parent_id'), None
class MediaTagIndex(MultiTreeBasedIndex):
_version = 2
custom_header = """from CodernityDB.tree_index import MultiTreeBasedIndex"""
def __init__(self, *args, **kwargs):
kwargs['key_format'] = '32s'
super(MediaTagIndex, self).__init__(*args, **kwargs)
def make_key_value(self, data):
if data.get('_t') == 'media' and data.get('tags') and len(data.get('tags', [])) > 0:
tags = set()
for tag in data.get('tags', []):
tags.add(self.make_key(tag))
return list(tags), None
def make_key(self, key):
return md5(key).hexdigest()
| gpl-3.0 |
sebbrandt87/jenkins-job-builder | jenkins_jobs/modules/project_matrix.py | 17 | 6317 | # Copyright 2012 Julian Taylor <jtaylor.debian@googlemail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
The matrix project module handles creating Jenkins matrix
projects. To create a matrix project specify ``matrix`` in the
``project-type`` attribute to the :ref:`Job` definition.
Currently it supports four axes which share the same
internal YAML structure:
* label expressions (``label-expression``)
* user-defined values (``user-defined``)
* slave name or label (``slave``)
* JDK name (``jdk``)
Requires the Jenkins :jenkins-wiki:`Matrix Project Plugin
<Matrix+Project+Plugin>`.
The module also supports additional, plugin-defined axes:
* DynamicAxis (``dynamic``), requires the Jenkins
:jenkins-wiki:`DynamicAxis Plugin <DynamicAxis+Plugin>`
* GroovyAxis (``groovy``), requires the Jenkins
:jenkins-wiki:`GroovyAxis Plugin <GroovyAxis>`
To tie the parent job to a specific node, you should use ``node`` parameter.
On a matrix project, this will tie *only* the parent job. To restrict axes
jobs, you can define a single value ``slave`` axis.
:Job Parameters:
* **execution-strategy** (optional):
* **combination-filter** (`str`): axes selection filter
* **sequential** (`bool`): run builds sequentially (default false)
* **touchstone** (optional):
* **expr** (`str`) -- selection filter for the touchstone build
* **result** (`str`) -- required result of the job: \
stable (default) or unstable
* **axes** (`list`):
* **axis**:
* **type** (`str`) -- axis type, must be either
'label-expression', 'user-defined', 'slave' or 'jdk'.
* **name** (`str`) -- name of the axis
* **values** (`list`) -- values of the axis
The module supports also ShiningPanda axes:
Example:
.. literalinclude:: /../../tests/general/fixtures/matrix-axis003.yaml
Requires the Jenkins :jenkins-wiki:`ShiningPanda Plugin <ShiningPanda+Plugin>`.
Example:
.. literalinclude:: /../../tests/yamlparser/fixtures/project-matrix001.yaml
:language: yaml
"""
import xml.etree.ElementTree as XML
import jenkins_jobs.modules.base
class Matrix(jenkins_jobs.modules.base.Base):
sequence = 0
# List the supported Axis names in our configuration
# and map them to the Jenkins XML element name.
supported_axis = {
'label-expression': 'hudson.matrix.LabelExpAxis',
'user-defined': 'hudson.matrix.TextAxis',
'slave': 'hudson.matrix.LabelAxis',
'jdk': 'hudson.matrix.JDKAxis',
'dynamic': 'ca.silvermaplesolutions.jenkins.plugins.daxis.DynamicAxis',
'python': 'jenkins.plugins.shiningpanda.matrix.PythonAxis',
'tox': 'jenkins.plugins.shiningpanda.matrix.ToxAxis',
'groovy': 'org.jenkinsci.plugins.GroovyAxis',
}
def root_xml(self, data):
root = XML.Element('matrix-project')
ex_r = XML.SubElement(root, 'executionStrategy',
{'class': 'hudson.matrix.'
'DefaultMatrixExecutionStrategyImpl'})
ex_d = data.get('execution-strategy', {})
XML.SubElement(root, 'combinationFilter').text = \
str(ex_d.get('combination-filter', '')).rstrip()
XML.SubElement(ex_r, 'runSequentially').text = \
str(ex_d.get('sequential', False)).lower()
if 'touchstone' in ex_d:
XML.SubElement(ex_r, 'touchStoneCombinationFilter').text = \
str(ex_d['touchstone'].get('expr', ''))
t_r = XML.SubElement(ex_r, 'touchStoneResultCondition')
n = ex_d['touchstone'].get('result', 'stable').upper()
if n not in ('STABLE', 'UNSTABLE'):
raise ValueError('Required result must be stable or unstable')
XML.SubElement(t_r, 'name').text = n
if n == "STABLE":
XML.SubElement(t_r, 'ordinal').text = '0'
XML.SubElement(t_r, 'color').text = 'BLUE'
else:
XML.SubElement(t_r, 'ordinal').text = '1'
XML.SubElement(t_r, 'color').text = 'YELLOW'
ax_root = XML.SubElement(root, 'axes')
for axis_ in data.get('axes', []):
axis = axis_['axis']
axis_type = axis['type']
if axis_type not in self.supported_axis:
raise ValueError('Only %s axes types are supported'
% self.supported_axis.keys())
axis_name = self.supported_axis.get(axis_type)
lbl_root = XML.SubElement(ax_root, axis_name)
name, values = axis.get('name', ''), axis.get('values', [''])
if axis_type == 'jdk':
XML.SubElement(lbl_root, 'name').text = 'jdk'
elif axis_type == 'python':
XML.SubElement(lbl_root, 'name').text = 'PYTHON'
elif axis_type == 'tox':
XML.SubElement(lbl_root, 'name').text = 'TOXENV'
else:
XML.SubElement(lbl_root, 'name').text = str(name)
if axis_type != "groovy":
v_root = XML.SubElement(lbl_root, 'values')
if axis_type == "dynamic":
XML.SubElement(v_root, 'string').text = str(values[0])
XML.SubElement(lbl_root, 'varName').text = str(values[0])
v_root = XML.SubElement(lbl_root, 'axisValues')
XML.SubElement(v_root, 'string').text = 'default'
elif axis_type == "groovy":
command = XML.SubElement(lbl_root, 'groovyString')
command.text = axis.get('command')
XML.SubElement(lbl_root, 'computedValues').text = ''
else:
for v in values:
XML.SubElement(v_root, 'string').text = str(v)
return root
| apache-2.0 |
Aminakh/or-tools | examples/python/kenken2.py | 34 | 4935 | # Copyright 2010 Hakan Kjellerstrand hakank@bonetmail.com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
KenKen puzzle in Google CP Solver.
http://en.wikipedia.org/wiki/KenKen
'''
KenKen or KEN-KEN is a style of arithmetic and logical puzzle sharing
several characteristics with sudoku. The name comes from Japanese and
is translated as 'square wisdom' or 'cleverness squared'.
...
The objective is to fill the grid in with the digits 1 through 6 such that:
* Each row contains exactly one of each digit
* Each column contains exactly one of each digit
* Each bold-outlined group of cells is a cage containing digits which
achieve the specified result using the specified mathematical operation:
addition (+),
subtraction (-),
multiplication (x),
and division (/).
(Unlike in Killer sudoku, digits may repeat within a group.)
...
More complex KenKen problems are formed using the principles described
above but omitting the symbols +, -, x and /, thus leaving them as
yet another unknown to be determined.
'''
The solution is:
5 6 3 4 1 2
6 1 4 5 2 3
4 5 2 3 6 1
3 4 1 2 5 6
2 3 6 1 4 5
1 2 5 6 3 4
This model was created by Hakan Kjellerstrand (hakank@bonetmail.com)
Also see my other Google CP Solver models:
http://www.hakank.org/google_or_tools/
"""
import sys
from ortools.constraint_solver import pywrapcp
#
# Ensure that the sum of the segments
# in cc == res
#
def calc(cc, x, res):
solver = x.values()[0].solver()
if len(cc) == 2:
# for two operands there may be
# a lot of variants
c00, c01 = cc[0]
c10, c11 = cc[1]
a = x[c00 - 1, c01 - 1]
b = x[c10 - 1, c11 - 1]
r1 = solver.IsEqualCstVar(a + b, res)
r2 = solver.IsEqualCstVar(a * b, res)
r3 = solver.IsEqualVar(a * res, b)
r4 = solver.IsEqualVar(b * res, a)
r5 = solver.IsEqualCstVar(a - b, res)
r6 = solver.IsEqualCstVar(b - a, res)
solver.Add(r1 + r2 + r3 + r4 + r5 + r6 >= 1)
else:
# res is either sum or product of the segment
xx = [x[i[0] - 1, i[1] - 1] for i in cc]
# Sum
# # SumEquality don't work:
# this_sum = solver.SumEquality(xx, res)
this_sum = solver.IsEqualCstVar(solver.Sum(xx), res)
# Product
# # Prod (or MakeProd) don't work:
# this_prod = solver.IsEqualCstVar(solver.Prod(xx), res)
this_prod = solver.IsEqualCstVar(reduce(lambda a, b: a * b, xx), res)
solver.Add(this_sum + this_prod >= 1)
def main():
# Create the solver.
solver = pywrapcp.Solver("KenKen")
#
# data
#
# size of matrix
n = 6
# For a better view of the problem, see
# http://en.wikipedia.org/wiki/File:KenKenProblem.svg
# hints
# [sum, [segments]]
# Note: 1-based
problem = [
[11, [[1, 1], [2, 1]]],
[2, [[1, 2], [1, 3]]],
[20, [[1, 4], [2, 4]]],
[6, [[1, 5], [1, 6], [2, 6], [3, 6]]],
[3, [[2, 2], [2, 3]]],
[3, [[2, 5], [3, 5]]],
[240, [[3, 1], [3, 2], [4, 1], [4, 2]]],
[6, [[3, 3], [3, 4]]],
[6, [[4, 3], [5, 3]]],
[7, [[4, 4], [5, 4], [5, 5]]],
[30, [[4, 5], [4, 6]]],
[6, [[5, 1], [5, 2]]],
[9, [[5, 6], [6, 6]]],
[8, [[6, 1], [6, 2], [6, 3]]],
[2, [[6, 4], [6, 5]]]]
num_p = len(problem)
#
# variables
#
# the set
x = {}
for i in range(n):
for j in range(n):
x[i, j] = solver.IntVar(1, n, "x[%i,%i]" % (i, j))
x_flat = [x[i, j] for i in range(n) for j in range(n)]
#
# constraints
#
# all rows and columns must be unique
for i in range(n):
row = [x[i, j] for j in range(n)]
solver.Add(solver.AllDifferent(row))
col = [x[j, i] for j in range(n)]
solver.Add(solver.AllDifferent(col))
# calculate the segments
for (res, segment) in problem:
calc(segment, x, res)
#
# search and solution
#
db = solver.Phase(x_flat,
solver.INT_VAR_DEFAULT,
solver.INT_VALUE_DEFAULT)
solver.NewSearch(db)
num_solutions = 0
while solver.NextSolution():
for i in range(n):
for j in range(n):
print x[i, j].Value(),
print
print
num_solutions += 1
solver.EndSearch()
print
print "num_solutions:", num_solutions
print "failures:", solver.Failures()
print "branches:", solver.Branches()
print "WallTime:", solver.WallTime()
if __name__ == "__main__":
main()
| apache-2.0 |
teeple/pns_server | work/install/Python-2.7.4/Lib/encodings/cp860.py | 593 | 34937 | """ Python Character Mapping Codec generated from 'VENDORS/MICSFT/PC/CP860.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp860',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA
0x0081: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
0x0082: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
0x0083: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x0084: 0x00e3, # LATIN SMALL LETTER A WITH TILDE
0x0085: 0x00e0, # LATIN SMALL LETTER A WITH GRAVE
0x0086: 0x00c1, # LATIN CAPITAL LETTER A WITH ACUTE
0x0087: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
0x0088: 0x00ea, # LATIN SMALL LETTER E WITH CIRCUMFLEX
0x0089: 0x00ca, # LATIN CAPITAL LETTER E WITH CIRCUMFLEX
0x008a: 0x00e8, # LATIN SMALL LETTER E WITH GRAVE
0x008b: 0x00cd, # LATIN CAPITAL LETTER I WITH ACUTE
0x008c: 0x00d4, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
0x008d: 0x00ec, # LATIN SMALL LETTER I WITH GRAVE
0x008e: 0x00c3, # LATIN CAPITAL LETTER A WITH TILDE
0x008f: 0x00c2, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX
0x0090: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
0x0091: 0x00c0, # LATIN CAPITAL LETTER A WITH GRAVE
0x0092: 0x00c8, # LATIN CAPITAL LETTER E WITH GRAVE
0x0093: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x0094: 0x00f5, # LATIN SMALL LETTER O WITH TILDE
0x0095: 0x00f2, # LATIN SMALL LETTER O WITH GRAVE
0x0096: 0x00da, # LATIN CAPITAL LETTER U WITH ACUTE
0x0097: 0x00f9, # LATIN SMALL LETTER U WITH GRAVE
0x0098: 0x00cc, # LATIN CAPITAL LETTER I WITH GRAVE
0x0099: 0x00d5, # LATIN CAPITAL LETTER O WITH TILDE
0x009a: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x009b: 0x00a2, # CENT SIGN
0x009c: 0x00a3, # POUND SIGN
0x009d: 0x00d9, # LATIN CAPITAL LETTER U WITH GRAVE
0x009e: 0x20a7, # PESETA SIGN
0x009f: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE
0x00a0: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
0x00a1: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
0x00a2: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
0x00a3: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
0x00a4: 0x00f1, # LATIN SMALL LETTER N WITH TILDE
0x00a5: 0x00d1, # LATIN CAPITAL LETTER N WITH TILDE
0x00a6: 0x00aa, # FEMININE ORDINAL INDICATOR
0x00a7: 0x00ba, # MASCULINE ORDINAL INDICATOR
0x00a8: 0x00bf, # INVERTED QUESTION MARK
0x00a9: 0x00d2, # LATIN CAPITAL LETTER O WITH GRAVE
0x00aa: 0x00ac, # NOT SIGN
0x00ab: 0x00bd, # VULGAR FRACTION ONE HALF
0x00ac: 0x00bc, # VULGAR FRACTION ONE QUARTER
0x00ad: 0x00a1, # INVERTED EXCLAMATION MARK
0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00b0: 0x2591, # LIGHT SHADE
0x00b1: 0x2592, # MEDIUM SHADE
0x00b2: 0x2593, # DARK SHADE
0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x00b5: 0x2561, # BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
0x00b6: 0x2562, # BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
0x00b7: 0x2556, # BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
0x00b8: 0x2555, # BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
0x00bd: 0x255c, # BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
0x00be: 0x255b, # BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x00c6: 0x255e, # BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
0x00c7: 0x255f, # BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x00cf: 0x2567, # BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
0x00d0: 0x2568, # BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
0x00d1: 0x2564, # BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
0x00d2: 0x2565, # BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
0x00d3: 0x2559, # BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
0x00d4: 0x2558, # BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE
0x00d5: 0x2552, # BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
0x00d6: 0x2553, # BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE
0x00d7: 0x256b, # BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
0x00d8: 0x256a, # BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x00db: 0x2588, # FULL BLOCK
0x00dc: 0x2584, # LOWER HALF BLOCK
0x00dd: 0x258c, # LEFT HALF BLOCK
0x00de: 0x2590, # RIGHT HALF BLOCK
0x00df: 0x2580, # UPPER HALF BLOCK
0x00e0: 0x03b1, # GREEK SMALL LETTER ALPHA
0x00e1: 0x00df, # LATIN SMALL LETTER SHARP S
0x00e2: 0x0393, # GREEK CAPITAL LETTER GAMMA
0x00e3: 0x03c0, # GREEK SMALL LETTER PI
0x00e4: 0x03a3, # GREEK CAPITAL LETTER SIGMA
0x00e5: 0x03c3, # GREEK SMALL LETTER SIGMA
0x00e6: 0x00b5, # MICRO SIGN
0x00e7: 0x03c4, # GREEK SMALL LETTER TAU
0x00e8: 0x03a6, # GREEK CAPITAL LETTER PHI
0x00e9: 0x0398, # GREEK CAPITAL LETTER THETA
0x00ea: 0x03a9, # GREEK CAPITAL LETTER OMEGA
0x00eb: 0x03b4, # GREEK SMALL LETTER DELTA
0x00ec: 0x221e, # INFINITY
0x00ed: 0x03c6, # GREEK SMALL LETTER PHI
0x00ee: 0x03b5, # GREEK SMALL LETTER EPSILON
0x00ef: 0x2229, # INTERSECTION
0x00f0: 0x2261, # IDENTICAL TO
0x00f1: 0x00b1, # PLUS-MINUS SIGN
0x00f2: 0x2265, # GREATER-THAN OR EQUAL TO
0x00f3: 0x2264, # LESS-THAN OR EQUAL TO
0x00f4: 0x2320, # TOP HALF INTEGRAL
0x00f5: 0x2321, # BOTTOM HALF INTEGRAL
0x00f6: 0x00f7, # DIVISION SIGN
0x00f7: 0x2248, # ALMOST EQUAL TO
0x00f8: 0x00b0, # DEGREE SIGN
0x00f9: 0x2219, # BULLET OPERATOR
0x00fa: 0x00b7, # MIDDLE DOT
0x00fb: 0x221a, # SQUARE ROOT
0x00fc: 0x207f, # SUPERSCRIPT LATIN SMALL LETTER N
0x00fd: 0x00b2, # SUPERSCRIPT TWO
0x00fe: 0x25a0, # BLACK SQUARE
0x00ff: 0x00a0, # NO-BREAK SPACE
})
### Decoding Table
decoding_table = (
u'\x00' # 0x0000 -> NULL
u'\x01' # 0x0001 -> START OF HEADING
u'\x02' # 0x0002 -> START OF TEXT
u'\x03' # 0x0003 -> END OF TEXT
u'\x04' # 0x0004 -> END OF TRANSMISSION
u'\x05' # 0x0005 -> ENQUIRY
u'\x06' # 0x0006 -> ACKNOWLEDGE
u'\x07' # 0x0007 -> BELL
u'\x08' # 0x0008 -> BACKSPACE
u'\t' # 0x0009 -> HORIZONTAL TABULATION
u'\n' # 0x000a -> LINE FEED
u'\x0b' # 0x000b -> VERTICAL TABULATION
u'\x0c' # 0x000c -> FORM FEED
u'\r' # 0x000d -> CARRIAGE RETURN
u'\x0e' # 0x000e -> SHIFT OUT
u'\x0f' # 0x000f -> SHIFT IN
u'\x10' # 0x0010 -> DATA LINK ESCAPE
u'\x11' # 0x0011 -> DEVICE CONTROL ONE
u'\x12' # 0x0012 -> DEVICE CONTROL TWO
u'\x13' # 0x0013 -> DEVICE CONTROL THREE
u'\x14' # 0x0014 -> DEVICE CONTROL FOUR
u'\x15' # 0x0015 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x0016 -> SYNCHRONOUS IDLE
u'\x17' # 0x0017 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x0018 -> CANCEL
u'\x19' # 0x0019 -> END OF MEDIUM
u'\x1a' # 0x001a -> SUBSTITUTE
u'\x1b' # 0x001b -> ESCAPE
u'\x1c' # 0x001c -> FILE SEPARATOR
u'\x1d' # 0x001d -> GROUP SEPARATOR
u'\x1e' # 0x001e -> RECORD SEPARATOR
u'\x1f' # 0x001f -> UNIT SEPARATOR
u' ' # 0x0020 -> SPACE
u'!' # 0x0021 -> EXCLAMATION MARK
u'"' # 0x0022 -> QUOTATION MARK
u'#' # 0x0023 -> NUMBER SIGN
u'$' # 0x0024 -> DOLLAR SIGN
u'%' # 0x0025 -> PERCENT SIGN
u'&' # 0x0026 -> AMPERSAND
u"'" # 0x0027 -> APOSTROPHE
u'(' # 0x0028 -> LEFT PARENTHESIS
u')' # 0x0029 -> RIGHT PARENTHESIS
u'*' # 0x002a -> ASTERISK
u'+' # 0x002b -> PLUS SIGN
u',' # 0x002c -> COMMA
u'-' # 0x002d -> HYPHEN-MINUS
u'.' # 0x002e -> FULL STOP
u'/' # 0x002f -> SOLIDUS
u'0' # 0x0030 -> DIGIT ZERO
u'1' # 0x0031 -> DIGIT ONE
u'2' # 0x0032 -> DIGIT TWO
u'3' # 0x0033 -> DIGIT THREE
u'4' # 0x0034 -> DIGIT FOUR
u'5' # 0x0035 -> DIGIT FIVE
u'6' # 0x0036 -> DIGIT SIX
u'7' # 0x0037 -> DIGIT SEVEN
u'8' # 0x0038 -> DIGIT EIGHT
u'9' # 0x0039 -> DIGIT NINE
u':' # 0x003a -> COLON
u';' # 0x003b -> SEMICOLON
u'<' # 0x003c -> LESS-THAN SIGN
u'=' # 0x003d -> EQUALS SIGN
u'>' # 0x003e -> GREATER-THAN SIGN
u'?' # 0x003f -> QUESTION MARK
u'@' # 0x0040 -> COMMERCIAL AT
u'A' # 0x0041 -> LATIN CAPITAL LETTER A
u'B' # 0x0042 -> LATIN CAPITAL LETTER B
u'C' # 0x0043 -> LATIN CAPITAL LETTER C
u'D' # 0x0044 -> LATIN CAPITAL LETTER D
u'E' # 0x0045 -> LATIN CAPITAL LETTER E
u'F' # 0x0046 -> LATIN CAPITAL LETTER F
u'G' # 0x0047 -> LATIN CAPITAL LETTER G
u'H' # 0x0048 -> LATIN CAPITAL LETTER H
u'I' # 0x0049 -> LATIN CAPITAL LETTER I
u'J' # 0x004a -> LATIN CAPITAL LETTER J
u'K' # 0x004b -> LATIN CAPITAL LETTER K
u'L' # 0x004c -> LATIN CAPITAL LETTER L
u'M' # 0x004d -> LATIN CAPITAL LETTER M
u'N' # 0x004e -> LATIN CAPITAL LETTER N
u'O' # 0x004f -> LATIN CAPITAL LETTER O
u'P' # 0x0050 -> LATIN CAPITAL LETTER P
u'Q' # 0x0051 -> LATIN CAPITAL LETTER Q
u'R' # 0x0052 -> LATIN CAPITAL LETTER R
u'S' # 0x0053 -> LATIN CAPITAL LETTER S
u'T' # 0x0054 -> LATIN CAPITAL LETTER T
u'U' # 0x0055 -> LATIN CAPITAL LETTER U
u'V' # 0x0056 -> LATIN CAPITAL LETTER V
u'W' # 0x0057 -> LATIN CAPITAL LETTER W
u'X' # 0x0058 -> LATIN CAPITAL LETTER X
u'Y' # 0x0059 -> LATIN CAPITAL LETTER Y
u'Z' # 0x005a -> LATIN CAPITAL LETTER Z
u'[' # 0x005b -> LEFT SQUARE BRACKET
u'\\' # 0x005c -> REVERSE SOLIDUS
u']' # 0x005d -> RIGHT SQUARE BRACKET
u'^' # 0x005e -> CIRCUMFLEX ACCENT
u'_' # 0x005f -> LOW LINE
u'`' # 0x0060 -> GRAVE ACCENT
u'a' # 0x0061 -> LATIN SMALL LETTER A
u'b' # 0x0062 -> LATIN SMALL LETTER B
u'c' # 0x0063 -> LATIN SMALL LETTER C
u'd' # 0x0064 -> LATIN SMALL LETTER D
u'e' # 0x0065 -> LATIN SMALL LETTER E
u'f' # 0x0066 -> LATIN SMALL LETTER F
u'g' # 0x0067 -> LATIN SMALL LETTER G
u'h' # 0x0068 -> LATIN SMALL LETTER H
u'i' # 0x0069 -> LATIN SMALL LETTER I
u'j' # 0x006a -> LATIN SMALL LETTER J
u'k' # 0x006b -> LATIN SMALL LETTER K
u'l' # 0x006c -> LATIN SMALL LETTER L
u'm' # 0x006d -> LATIN SMALL LETTER M
u'n' # 0x006e -> LATIN SMALL LETTER N
u'o' # 0x006f -> LATIN SMALL LETTER O
u'p' # 0x0070 -> LATIN SMALL LETTER P
u'q' # 0x0071 -> LATIN SMALL LETTER Q
u'r' # 0x0072 -> LATIN SMALL LETTER R
u's' # 0x0073 -> LATIN SMALL LETTER S
u't' # 0x0074 -> LATIN SMALL LETTER T
u'u' # 0x0075 -> LATIN SMALL LETTER U
u'v' # 0x0076 -> LATIN SMALL LETTER V
u'w' # 0x0077 -> LATIN SMALL LETTER W
u'x' # 0x0078 -> LATIN SMALL LETTER X
u'y' # 0x0079 -> LATIN SMALL LETTER Y
u'z' # 0x007a -> LATIN SMALL LETTER Z
u'{' # 0x007b -> LEFT CURLY BRACKET
u'|' # 0x007c -> VERTICAL LINE
u'}' # 0x007d -> RIGHT CURLY BRACKET
u'~' # 0x007e -> TILDE
u'\x7f' # 0x007f -> DELETE
u'\xc7' # 0x0080 -> LATIN CAPITAL LETTER C WITH CEDILLA
u'\xfc' # 0x0081 -> LATIN SMALL LETTER U WITH DIAERESIS
u'\xe9' # 0x0082 -> LATIN SMALL LETTER E WITH ACUTE
u'\xe2' # 0x0083 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\xe3' # 0x0084 -> LATIN SMALL LETTER A WITH TILDE
u'\xe0' # 0x0085 -> LATIN SMALL LETTER A WITH GRAVE
u'\xc1' # 0x0086 -> LATIN CAPITAL LETTER A WITH ACUTE
u'\xe7' # 0x0087 -> LATIN SMALL LETTER C WITH CEDILLA
u'\xea' # 0x0088 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
u'\xca' # 0x0089 -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
u'\xe8' # 0x008a -> LATIN SMALL LETTER E WITH GRAVE
u'\xcd' # 0x008b -> LATIN CAPITAL LETTER I WITH ACUTE
u'\xd4' # 0x008c -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
u'\xec' # 0x008d -> LATIN SMALL LETTER I WITH GRAVE
u'\xc3' # 0x008e -> LATIN CAPITAL LETTER A WITH TILDE
u'\xc2' # 0x008f -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
u'\xc9' # 0x0090 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\xc0' # 0x0091 -> LATIN CAPITAL LETTER A WITH GRAVE
u'\xc8' # 0x0092 -> LATIN CAPITAL LETTER E WITH GRAVE
u'\xf4' # 0x0093 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\xf5' # 0x0094 -> LATIN SMALL LETTER O WITH TILDE
u'\xf2' # 0x0095 -> LATIN SMALL LETTER O WITH GRAVE
u'\xda' # 0x0096 -> LATIN CAPITAL LETTER U WITH ACUTE
u'\xf9' # 0x0097 -> LATIN SMALL LETTER U WITH GRAVE
u'\xcc' # 0x0098 -> LATIN CAPITAL LETTER I WITH GRAVE
u'\xd5' # 0x0099 -> LATIN CAPITAL LETTER O WITH TILDE
u'\xdc' # 0x009a -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\xa2' # 0x009b -> CENT SIGN
u'\xa3' # 0x009c -> POUND SIGN
u'\xd9' # 0x009d -> LATIN CAPITAL LETTER U WITH GRAVE
u'\u20a7' # 0x009e -> PESETA SIGN
u'\xd3' # 0x009f -> LATIN CAPITAL LETTER O WITH ACUTE
u'\xe1' # 0x00a0 -> LATIN SMALL LETTER A WITH ACUTE
u'\xed' # 0x00a1 -> LATIN SMALL LETTER I WITH ACUTE
u'\xf3' # 0x00a2 -> LATIN SMALL LETTER O WITH ACUTE
u'\xfa' # 0x00a3 -> LATIN SMALL LETTER U WITH ACUTE
u'\xf1' # 0x00a4 -> LATIN SMALL LETTER N WITH TILDE
u'\xd1' # 0x00a5 -> LATIN CAPITAL LETTER N WITH TILDE
u'\xaa' # 0x00a6 -> FEMININE ORDINAL INDICATOR
u'\xba' # 0x00a7 -> MASCULINE ORDINAL INDICATOR
u'\xbf' # 0x00a8 -> INVERTED QUESTION MARK
u'\xd2' # 0x00a9 -> LATIN CAPITAL LETTER O WITH GRAVE
u'\xac' # 0x00aa -> NOT SIGN
u'\xbd' # 0x00ab -> VULGAR FRACTION ONE HALF
u'\xbc' # 0x00ac -> VULGAR FRACTION ONE QUARTER
u'\xa1' # 0x00ad -> INVERTED EXCLAMATION MARK
u'\xab' # 0x00ae -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbb' # 0x00af -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u2591' # 0x00b0 -> LIGHT SHADE
u'\u2592' # 0x00b1 -> MEDIUM SHADE
u'\u2593' # 0x00b2 -> DARK SHADE
u'\u2502' # 0x00b3 -> BOX DRAWINGS LIGHT VERTICAL
u'\u2524' # 0x00b4 -> BOX DRAWINGS LIGHT VERTICAL AND LEFT
u'\u2561' # 0x00b5 -> BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
u'\u2562' # 0x00b6 -> BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
u'\u2556' # 0x00b7 -> BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
u'\u2555' # 0x00b8 -> BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
u'\u2563' # 0x00b9 -> BOX DRAWINGS DOUBLE VERTICAL AND LEFT
u'\u2551' # 0x00ba -> BOX DRAWINGS DOUBLE VERTICAL
u'\u2557' # 0x00bb -> BOX DRAWINGS DOUBLE DOWN AND LEFT
u'\u255d' # 0x00bc -> BOX DRAWINGS DOUBLE UP AND LEFT
u'\u255c' # 0x00bd -> BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
u'\u255b' # 0x00be -> BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
u'\u2510' # 0x00bf -> BOX DRAWINGS LIGHT DOWN AND LEFT
u'\u2514' # 0x00c0 -> BOX DRAWINGS LIGHT UP AND RIGHT
u'\u2534' # 0x00c1 -> BOX DRAWINGS LIGHT UP AND HORIZONTAL
u'\u252c' # 0x00c2 -> BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
u'\u251c' # 0x00c3 -> BOX DRAWINGS LIGHT VERTICAL AND RIGHT
u'\u2500' # 0x00c4 -> BOX DRAWINGS LIGHT HORIZONTAL
u'\u253c' # 0x00c5 -> BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
u'\u255e' # 0x00c6 -> BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
u'\u255f' # 0x00c7 -> BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
u'\u255a' # 0x00c8 -> BOX DRAWINGS DOUBLE UP AND RIGHT
u'\u2554' # 0x00c9 -> BOX DRAWINGS DOUBLE DOWN AND RIGHT
u'\u2569' # 0x00ca -> BOX DRAWINGS DOUBLE UP AND HORIZONTAL
u'\u2566' # 0x00cb -> BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
u'\u2560' # 0x00cc -> BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
u'\u2550' # 0x00cd -> BOX DRAWINGS DOUBLE HORIZONTAL
u'\u256c' # 0x00ce -> BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
u'\u2567' # 0x00cf -> BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
u'\u2568' # 0x00d0 -> BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
u'\u2564' # 0x00d1 -> BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
u'\u2565' # 0x00d2 -> BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
u'\u2559' # 0x00d3 -> BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
u'\u2558' # 0x00d4 -> BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE
u'\u2552' # 0x00d5 -> BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
u'\u2553' # 0x00d6 -> BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE
u'\u256b' # 0x00d7 -> BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
u'\u256a' # 0x00d8 -> BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
u'\u2518' # 0x00d9 -> BOX DRAWINGS LIGHT UP AND LEFT
u'\u250c' # 0x00da -> BOX DRAWINGS LIGHT DOWN AND RIGHT
u'\u2588' # 0x00db -> FULL BLOCK
u'\u2584' # 0x00dc -> LOWER HALF BLOCK
u'\u258c' # 0x00dd -> LEFT HALF BLOCK
u'\u2590' # 0x00de -> RIGHT HALF BLOCK
u'\u2580' # 0x00df -> UPPER HALF BLOCK
u'\u03b1' # 0x00e0 -> GREEK SMALL LETTER ALPHA
u'\xdf' # 0x00e1 -> LATIN SMALL LETTER SHARP S
u'\u0393' # 0x00e2 -> GREEK CAPITAL LETTER GAMMA
u'\u03c0' # 0x00e3 -> GREEK SMALL LETTER PI
u'\u03a3' # 0x00e4 -> GREEK CAPITAL LETTER SIGMA
u'\u03c3' # 0x00e5 -> GREEK SMALL LETTER SIGMA
u'\xb5' # 0x00e6 -> MICRO SIGN
u'\u03c4' # 0x00e7 -> GREEK SMALL LETTER TAU
u'\u03a6' # 0x00e8 -> GREEK CAPITAL LETTER PHI
u'\u0398' # 0x00e9 -> GREEK CAPITAL LETTER THETA
u'\u03a9' # 0x00ea -> GREEK CAPITAL LETTER OMEGA
u'\u03b4' # 0x00eb -> GREEK SMALL LETTER DELTA
u'\u221e' # 0x00ec -> INFINITY
u'\u03c6' # 0x00ed -> GREEK SMALL LETTER PHI
u'\u03b5' # 0x00ee -> GREEK SMALL LETTER EPSILON
u'\u2229' # 0x00ef -> INTERSECTION
u'\u2261' # 0x00f0 -> IDENTICAL TO
u'\xb1' # 0x00f1 -> PLUS-MINUS SIGN
u'\u2265' # 0x00f2 -> GREATER-THAN OR EQUAL TO
u'\u2264' # 0x00f3 -> LESS-THAN OR EQUAL TO
u'\u2320' # 0x00f4 -> TOP HALF INTEGRAL
u'\u2321' # 0x00f5 -> BOTTOM HALF INTEGRAL
u'\xf7' # 0x00f6 -> DIVISION SIGN
u'\u2248' # 0x00f7 -> ALMOST EQUAL TO
u'\xb0' # 0x00f8 -> DEGREE SIGN
u'\u2219' # 0x00f9 -> BULLET OPERATOR
u'\xb7' # 0x00fa -> MIDDLE DOT
u'\u221a' # 0x00fb -> SQUARE ROOT
u'\u207f' # 0x00fc -> SUPERSCRIPT LATIN SMALL LETTER N
u'\xb2' # 0x00fd -> SUPERSCRIPT TWO
u'\u25a0' # 0x00fe -> BLACK SQUARE
u'\xa0' # 0x00ff -> NO-BREAK SPACE
)
### Encoding Map
encoding_map = {
0x0000: 0x0000, # NULL
0x0001: 0x0001, # START OF HEADING
0x0002: 0x0002, # START OF TEXT
0x0003: 0x0003, # END OF TEXT
0x0004: 0x0004, # END OF TRANSMISSION
0x0005: 0x0005, # ENQUIRY
0x0006: 0x0006, # ACKNOWLEDGE
0x0007: 0x0007, # BELL
0x0008: 0x0008, # BACKSPACE
0x0009: 0x0009, # HORIZONTAL TABULATION
0x000a: 0x000a, # LINE FEED
0x000b: 0x000b, # VERTICAL TABULATION
0x000c: 0x000c, # FORM FEED
0x000d: 0x000d, # CARRIAGE RETURN
0x000e: 0x000e, # SHIFT OUT
0x000f: 0x000f, # SHIFT IN
0x0010: 0x0010, # DATA LINK ESCAPE
0x0011: 0x0011, # DEVICE CONTROL ONE
0x0012: 0x0012, # DEVICE CONTROL TWO
0x0013: 0x0013, # DEVICE CONTROL THREE
0x0014: 0x0014, # DEVICE CONTROL FOUR
0x0015: 0x0015, # NEGATIVE ACKNOWLEDGE
0x0016: 0x0016, # SYNCHRONOUS IDLE
0x0017: 0x0017, # END OF TRANSMISSION BLOCK
0x0018: 0x0018, # CANCEL
0x0019: 0x0019, # END OF MEDIUM
0x001a: 0x001a, # SUBSTITUTE
0x001b: 0x001b, # ESCAPE
0x001c: 0x001c, # FILE SEPARATOR
0x001d: 0x001d, # GROUP SEPARATOR
0x001e: 0x001e, # RECORD SEPARATOR
0x001f: 0x001f, # UNIT SEPARATOR
0x0020: 0x0020, # SPACE
0x0021: 0x0021, # EXCLAMATION MARK
0x0022: 0x0022, # QUOTATION MARK
0x0023: 0x0023, # NUMBER SIGN
0x0024: 0x0024, # DOLLAR SIGN
0x0025: 0x0025, # PERCENT SIGN
0x0026: 0x0026, # AMPERSAND
0x0027: 0x0027, # APOSTROPHE
0x0028: 0x0028, # LEFT PARENTHESIS
0x0029: 0x0029, # RIGHT PARENTHESIS
0x002a: 0x002a, # ASTERISK
0x002b: 0x002b, # PLUS SIGN
0x002c: 0x002c, # COMMA
0x002d: 0x002d, # HYPHEN-MINUS
0x002e: 0x002e, # FULL STOP
0x002f: 0x002f, # SOLIDUS
0x0030: 0x0030, # DIGIT ZERO
0x0031: 0x0031, # DIGIT ONE
0x0032: 0x0032, # DIGIT TWO
0x0033: 0x0033, # DIGIT THREE
0x0034: 0x0034, # DIGIT FOUR
0x0035: 0x0035, # DIGIT FIVE
0x0036: 0x0036, # DIGIT SIX
0x0037: 0x0037, # DIGIT SEVEN
0x0038: 0x0038, # DIGIT EIGHT
0x0039: 0x0039, # DIGIT NINE
0x003a: 0x003a, # COLON
0x003b: 0x003b, # SEMICOLON
0x003c: 0x003c, # LESS-THAN SIGN
0x003d: 0x003d, # EQUALS SIGN
0x003e: 0x003e, # GREATER-THAN SIGN
0x003f: 0x003f, # QUESTION MARK
0x0040: 0x0040, # COMMERCIAL AT
0x0041: 0x0041, # LATIN CAPITAL LETTER A
0x0042: 0x0042, # LATIN CAPITAL LETTER B
0x0043: 0x0043, # LATIN CAPITAL LETTER C
0x0044: 0x0044, # LATIN CAPITAL LETTER D
0x0045: 0x0045, # LATIN CAPITAL LETTER E
0x0046: 0x0046, # LATIN CAPITAL LETTER F
0x0047: 0x0047, # LATIN CAPITAL LETTER G
0x0048: 0x0048, # LATIN CAPITAL LETTER H
0x0049: 0x0049, # LATIN CAPITAL LETTER I
0x004a: 0x004a, # LATIN CAPITAL LETTER J
0x004b: 0x004b, # LATIN CAPITAL LETTER K
0x004c: 0x004c, # LATIN CAPITAL LETTER L
0x004d: 0x004d, # LATIN CAPITAL LETTER M
0x004e: 0x004e, # LATIN CAPITAL LETTER N
0x004f: 0x004f, # LATIN CAPITAL LETTER O
0x0050: 0x0050, # LATIN CAPITAL LETTER P
0x0051: 0x0051, # LATIN CAPITAL LETTER Q
0x0052: 0x0052, # LATIN CAPITAL LETTER R
0x0053: 0x0053, # LATIN CAPITAL LETTER S
0x0054: 0x0054, # LATIN CAPITAL LETTER T
0x0055: 0x0055, # LATIN CAPITAL LETTER U
0x0056: 0x0056, # LATIN CAPITAL LETTER V
0x0057: 0x0057, # LATIN CAPITAL LETTER W
0x0058: 0x0058, # LATIN CAPITAL LETTER X
0x0059: 0x0059, # LATIN CAPITAL LETTER Y
0x005a: 0x005a, # LATIN CAPITAL LETTER Z
0x005b: 0x005b, # LEFT SQUARE BRACKET
0x005c: 0x005c, # REVERSE SOLIDUS
0x005d: 0x005d, # RIGHT SQUARE BRACKET
0x005e: 0x005e, # CIRCUMFLEX ACCENT
0x005f: 0x005f, # LOW LINE
0x0060: 0x0060, # GRAVE ACCENT
0x0061: 0x0061, # LATIN SMALL LETTER A
0x0062: 0x0062, # LATIN SMALL LETTER B
0x0063: 0x0063, # LATIN SMALL LETTER C
0x0064: 0x0064, # LATIN SMALL LETTER D
0x0065: 0x0065, # LATIN SMALL LETTER E
0x0066: 0x0066, # LATIN SMALL LETTER F
0x0067: 0x0067, # LATIN SMALL LETTER G
0x0068: 0x0068, # LATIN SMALL LETTER H
0x0069: 0x0069, # LATIN SMALL LETTER I
0x006a: 0x006a, # LATIN SMALL LETTER J
0x006b: 0x006b, # LATIN SMALL LETTER K
0x006c: 0x006c, # LATIN SMALL LETTER L
0x006d: 0x006d, # LATIN SMALL LETTER M
0x006e: 0x006e, # LATIN SMALL LETTER N
0x006f: 0x006f, # LATIN SMALL LETTER O
0x0070: 0x0070, # LATIN SMALL LETTER P
0x0071: 0x0071, # LATIN SMALL LETTER Q
0x0072: 0x0072, # LATIN SMALL LETTER R
0x0073: 0x0073, # LATIN SMALL LETTER S
0x0074: 0x0074, # LATIN SMALL LETTER T
0x0075: 0x0075, # LATIN SMALL LETTER U
0x0076: 0x0076, # LATIN SMALL LETTER V
0x0077: 0x0077, # LATIN SMALL LETTER W
0x0078: 0x0078, # LATIN SMALL LETTER X
0x0079: 0x0079, # LATIN SMALL LETTER Y
0x007a: 0x007a, # LATIN SMALL LETTER Z
0x007b: 0x007b, # LEFT CURLY BRACKET
0x007c: 0x007c, # VERTICAL LINE
0x007d: 0x007d, # RIGHT CURLY BRACKET
0x007e: 0x007e, # TILDE
0x007f: 0x007f, # DELETE
0x00a0: 0x00ff, # NO-BREAK SPACE
0x00a1: 0x00ad, # INVERTED EXCLAMATION MARK
0x00a2: 0x009b, # CENT SIGN
0x00a3: 0x009c, # POUND SIGN
0x00aa: 0x00a6, # FEMININE ORDINAL INDICATOR
0x00ab: 0x00ae, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00ac: 0x00aa, # NOT SIGN
0x00b0: 0x00f8, # DEGREE SIGN
0x00b1: 0x00f1, # PLUS-MINUS SIGN
0x00b2: 0x00fd, # SUPERSCRIPT TWO
0x00b5: 0x00e6, # MICRO SIGN
0x00b7: 0x00fa, # MIDDLE DOT
0x00ba: 0x00a7, # MASCULINE ORDINAL INDICATOR
0x00bb: 0x00af, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00bc: 0x00ac, # VULGAR FRACTION ONE QUARTER
0x00bd: 0x00ab, # VULGAR FRACTION ONE HALF
0x00bf: 0x00a8, # INVERTED QUESTION MARK
0x00c0: 0x0091, # LATIN CAPITAL LETTER A WITH GRAVE
0x00c1: 0x0086, # LATIN CAPITAL LETTER A WITH ACUTE
0x00c2: 0x008f, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX
0x00c3: 0x008e, # LATIN CAPITAL LETTER A WITH TILDE
0x00c7: 0x0080, # LATIN CAPITAL LETTER C WITH CEDILLA
0x00c8: 0x0092, # LATIN CAPITAL LETTER E WITH GRAVE
0x00c9: 0x0090, # LATIN CAPITAL LETTER E WITH ACUTE
0x00ca: 0x0089, # LATIN CAPITAL LETTER E WITH CIRCUMFLEX
0x00cc: 0x0098, # LATIN CAPITAL LETTER I WITH GRAVE
0x00cd: 0x008b, # LATIN CAPITAL LETTER I WITH ACUTE
0x00d1: 0x00a5, # LATIN CAPITAL LETTER N WITH TILDE
0x00d2: 0x00a9, # LATIN CAPITAL LETTER O WITH GRAVE
0x00d3: 0x009f, # LATIN CAPITAL LETTER O WITH ACUTE
0x00d4: 0x008c, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
0x00d5: 0x0099, # LATIN CAPITAL LETTER O WITH TILDE
0x00d9: 0x009d, # LATIN CAPITAL LETTER U WITH GRAVE
0x00da: 0x0096, # LATIN CAPITAL LETTER U WITH ACUTE
0x00dc: 0x009a, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x00df: 0x00e1, # LATIN SMALL LETTER SHARP S
0x00e0: 0x0085, # LATIN SMALL LETTER A WITH GRAVE
0x00e1: 0x00a0, # LATIN SMALL LETTER A WITH ACUTE
0x00e2: 0x0083, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x00e3: 0x0084, # LATIN SMALL LETTER A WITH TILDE
0x00e7: 0x0087, # LATIN SMALL LETTER C WITH CEDILLA
0x00e8: 0x008a, # LATIN SMALL LETTER E WITH GRAVE
0x00e9: 0x0082, # LATIN SMALL LETTER E WITH ACUTE
0x00ea: 0x0088, # LATIN SMALL LETTER E WITH CIRCUMFLEX
0x00ec: 0x008d, # LATIN SMALL LETTER I WITH GRAVE
0x00ed: 0x00a1, # LATIN SMALL LETTER I WITH ACUTE
0x00f1: 0x00a4, # LATIN SMALL LETTER N WITH TILDE
0x00f2: 0x0095, # LATIN SMALL LETTER O WITH GRAVE
0x00f3: 0x00a2, # LATIN SMALL LETTER O WITH ACUTE
0x00f4: 0x0093, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x00f5: 0x0094, # LATIN SMALL LETTER O WITH TILDE
0x00f7: 0x00f6, # DIVISION SIGN
0x00f9: 0x0097, # LATIN SMALL LETTER U WITH GRAVE
0x00fa: 0x00a3, # LATIN SMALL LETTER U WITH ACUTE
0x00fc: 0x0081, # LATIN SMALL LETTER U WITH DIAERESIS
0x0393: 0x00e2, # GREEK CAPITAL LETTER GAMMA
0x0398: 0x00e9, # GREEK CAPITAL LETTER THETA
0x03a3: 0x00e4, # GREEK CAPITAL LETTER SIGMA
0x03a6: 0x00e8, # GREEK CAPITAL LETTER PHI
0x03a9: 0x00ea, # GREEK CAPITAL LETTER OMEGA
0x03b1: 0x00e0, # GREEK SMALL LETTER ALPHA
0x03b4: 0x00eb, # GREEK SMALL LETTER DELTA
0x03b5: 0x00ee, # GREEK SMALL LETTER EPSILON
0x03c0: 0x00e3, # GREEK SMALL LETTER PI
0x03c3: 0x00e5, # GREEK SMALL LETTER SIGMA
0x03c4: 0x00e7, # GREEK SMALL LETTER TAU
0x03c6: 0x00ed, # GREEK SMALL LETTER PHI
0x207f: 0x00fc, # SUPERSCRIPT LATIN SMALL LETTER N
0x20a7: 0x009e, # PESETA SIGN
0x2219: 0x00f9, # BULLET OPERATOR
0x221a: 0x00fb, # SQUARE ROOT
0x221e: 0x00ec, # INFINITY
0x2229: 0x00ef, # INTERSECTION
0x2248: 0x00f7, # ALMOST EQUAL TO
0x2261: 0x00f0, # IDENTICAL TO
0x2264: 0x00f3, # LESS-THAN OR EQUAL TO
0x2265: 0x00f2, # GREATER-THAN OR EQUAL TO
0x2320: 0x00f4, # TOP HALF INTEGRAL
0x2321: 0x00f5, # BOTTOM HALF INTEGRAL
0x2500: 0x00c4, # BOX DRAWINGS LIGHT HORIZONTAL
0x2502: 0x00b3, # BOX DRAWINGS LIGHT VERTICAL
0x250c: 0x00da, # BOX DRAWINGS LIGHT DOWN AND RIGHT
0x2510: 0x00bf, # BOX DRAWINGS LIGHT DOWN AND LEFT
0x2514: 0x00c0, # BOX DRAWINGS LIGHT UP AND RIGHT
0x2518: 0x00d9, # BOX DRAWINGS LIGHT UP AND LEFT
0x251c: 0x00c3, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
0x2524: 0x00b4, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
0x252c: 0x00c2, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
0x2534: 0x00c1, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
0x253c: 0x00c5, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
0x2550: 0x00cd, # BOX DRAWINGS DOUBLE HORIZONTAL
0x2551: 0x00ba, # BOX DRAWINGS DOUBLE VERTICAL
0x2552: 0x00d5, # BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
0x2553: 0x00d6, # BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE
0x2554: 0x00c9, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
0x2555: 0x00b8, # BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
0x2556: 0x00b7, # BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
0x2557: 0x00bb, # BOX DRAWINGS DOUBLE DOWN AND LEFT
0x2558: 0x00d4, # BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE
0x2559: 0x00d3, # BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
0x255a: 0x00c8, # BOX DRAWINGS DOUBLE UP AND RIGHT
0x255b: 0x00be, # BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
0x255c: 0x00bd, # BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
0x255d: 0x00bc, # BOX DRAWINGS DOUBLE UP AND LEFT
0x255e: 0x00c6, # BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
0x255f: 0x00c7, # BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
0x2560: 0x00cc, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
0x2561: 0x00b5, # BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
0x2562: 0x00b6, # BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
0x2563: 0x00b9, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
0x2564: 0x00d1, # BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
0x2565: 0x00d2, # BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
0x2566: 0x00cb, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
0x2567: 0x00cf, # BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
0x2568: 0x00d0, # BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
0x2569: 0x00ca, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
0x256a: 0x00d8, # BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
0x256b: 0x00d7, # BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
0x256c: 0x00ce, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
0x2580: 0x00df, # UPPER HALF BLOCK
0x2584: 0x00dc, # LOWER HALF BLOCK
0x2588: 0x00db, # FULL BLOCK
0x258c: 0x00dd, # LEFT HALF BLOCK
0x2590: 0x00de, # RIGHT HALF BLOCK
0x2591: 0x00b0, # LIGHT SHADE
0x2592: 0x00b1, # MEDIUM SHADE
0x2593: 0x00b2, # DARK SHADE
0x25a0: 0x00fe, # BLACK SQUARE
}
| gpl-2.0 |
J4LP/hr2 | j4hr/utils.py | 1 | 1914 | # -*- coding: utf-8 -*-
'''Helper utilities and decorators.'''
from flask import session, flash, redirect, url_for, Response, request
from flask.ext.login import current_user
from functools import wraps
def flash_errors(form):
'''Flash all errors for a form.'''
for field, errors in form.errors.items():
for error in errors:
flash("Error in the {0} field - {1}"
.format(getattr(form, field).label.text, error), 'warning')
class ReverseProxied(object):
'''Wrap the application in this middleware and configure the
front-end server to add these headers, to let you quietly bind
this to a URL other than / and to an HTTP scheme that is
different than what is used locally.
In nginx:
location /myprefix {
proxy_pass http://192.168.0.1:5001;
proxy_set_header Host $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Scheme $scheme;
proxy_set_header X-Script-Name /myprefix;
}
:param app: the WSGI application
'''
def __init__(self, app):
self.app = app
def __call__(self, environ, start_response):
script_name = environ.get('HTTP_X_SCRIPT_NAME', '')
if script_name:
environ['SCRIPT_NAME'] = script_name
path_info = environ['PATH_INFO']
if path_info.startswith(script_name):
environ['PATH_INFO'] = path_info[len(script_name):]
scheme = environ.get('HTTP_X_SCHEME', '')
if scheme:
environ['wsgi.url_scheme'] = scheme
return self.app(environ, start_response)
def login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if session.get('j4oauth_token', None) is None:
return redirect(url_for('admin.login', next=request.url))
return f(*args, **kwargs)
return decorated_function
| mit |
vitorio/bite-project | deps/gdata-python-client/src/gdata/codesearch/__init__.py | 274 | 4934 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2007 Benoit Chesneau <benoitc@metavers.net>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""Contains extensions to Atom objects used by Google Codesearch"""
__author__ = 'Benoit Chesneau'
import atom
import gdata
CODESEARCH_NAMESPACE='http://schemas.google.com/codesearch/2006'
CODESEARCH_TEMPLATE='{http://shema.google.com/codesearch/2006}%s'
class Match(atom.AtomBase):
""" The Google Codesearch match element """
_tag = 'match'
_namespace = CODESEARCH_NAMESPACE
_children = atom.AtomBase._children.copy()
_attributes = atom.AtomBase._attributes.copy()
_attributes['lineNumber'] = 'line_number'
_attributes['type'] = 'type'
def __init__(self, line_number=None, type=None, extension_elements=None,
extension_attributes=None, text=None):
self.text = text
self.type = type
self.line_number = line_number
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
class File(atom.AtomBase):
""" The Google Codesearch file element"""
_tag = 'file'
_namespace = CODESEARCH_NAMESPACE
_children = atom.AtomBase._children.copy()
_attributes = atom.AtomBase._attributes.copy()
_attributes['name'] = 'name'
def __init__(self, name=None, extension_elements=None,
extension_attributes=None, text=None):
self.text = text
self.name = name
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
class Package(atom.AtomBase):
""" The Google Codesearch package element"""
_tag = 'package'
_namespace = CODESEARCH_NAMESPACE
_children = atom.AtomBase._children.copy()
_attributes = atom.AtomBase._attributes.copy()
_attributes['name'] = 'name'
_attributes['uri'] = 'uri'
def __init__(self, name=None, uri=None, extension_elements=None,
extension_attributes=None, text=None):
self.text = text
self.name = name
self.uri = uri
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
class CodesearchEntry(gdata.GDataEntry):
""" Google codesearch atom entry"""
_tag = gdata.GDataEntry._tag
_namespace = gdata.GDataEntry._namespace
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_children['{%s}file' % CODESEARCH_NAMESPACE] = ('file', File)
_children['{%s}package' % CODESEARCH_NAMESPACE] = ('package', Package)
_children['{%s}match' % CODESEARCH_NAMESPACE] = ('match', [Match])
def __init__(self, author=None, category=None, content=None,
atom_id=None, link=None, published=None,
title=None, updated=None,
match=None,
extension_elements=None, extension_attributes=None, text=None):
gdata.GDataEntry.__init__(self, author=author, category=category,
content=content, atom_id=atom_id, link=link,
published=published, title=title,
updated=updated, text=None)
self.match = match or []
def CodesearchEntryFromString(xml_string):
"""Converts an XML string into a CodesearchEntry object.
Args:
xml_string: string The XML describing a Codesearch feed entry.
Returns:
A CodesearchEntry object corresponding to the given XML.
"""
return atom.CreateClassFromXMLString(CodesearchEntry, xml_string)
class CodesearchFeed(gdata.GDataFeed):
"""feed containing list of Google codesearch Items"""
_tag = gdata.GDataFeed._tag
_namespace = gdata.GDataFeed._namespace
_children = gdata.GDataFeed._children.copy()
_attributes = gdata.GDataFeed._attributes.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [CodesearchEntry])
def CodesearchFeedFromString(xml_string):
"""Converts an XML string into a CodesearchFeed object.
Args:
xml_string: string The XML describing a Codesearch feed.
Returns:
A CodeseartchFeed object corresponding to the given XML.
"""
return atom.CreateClassFromXMLString(CodesearchFeed, xml_string)
| apache-2.0 |
damdam-s/e-commerce | sale_payment_method/sale.py | 16 | 9551 | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Guewen Baconnier, Sébastien Beau
# Copyright (C) 2011 Akretion Sébastien BEAU <sebastien.beau@akretion.com>
# Copyright 2013 Camptocamp SA (Guewen Baconnier)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
from openerp import api, models, fields, exceptions, _
import openerp.addons.decimal_precision as dp
_logger = logging.getLogger(__name__)
class SaleOrder(models.Model):
_inherit = 'sale.order'
@api.one
@api.depends('amount_total', 'payment_ids.credit', 'payment_ids.debit')
def _compute_amount(self):
paid_amount = sum(line.credit - line.debit
for line in self.payment_ids)
self.amount_paid = paid_amount
self.residual = self.amount_total - paid_amount
payment_ids = fields.Many2many(
comodel_name='account.move.line',
string='Payments Entries',
domain=[('account_id.type', '=', 'receivable')],
copy=False,
)
payment_method_id = fields.Many2one(
comodel_name='payment.method',
string='Payment Method',
ondelete='restrict',
)
residual = fields.Float(
compute='_compute_amount',
digits_compute=dp.get_precision('Account'),
string='Balance',
store=False,
)
amount_paid = fields.Float(
compute='_compute_amount',
digits_compute=dp.get_precision('Account'),
string='Amount Paid',
store=False,
)
@api.multi
def action_cancel(self):
for sale in self:
if sale.payment_ids:
raise exceptions.Warning(_('Cannot cancel this sales order '
'because automatic payment entries '
'are linked with it.'))
return super(SaleOrder, self).action_cancel()
@api.multi
def automatic_payment(self, amount=None):
""" Create the payment entries to pay a sale order, respecting
the payment terms.
If no amount is defined, it will pay the residual amount of the sale
order.
"""
self.ensure_one()
method = self.payment_method_id
if not method:
raise exceptions.Warning(
_("An automatic payment can not be created for the sale "
"order %s because it has no payment method.") % self.name
)
if not method.journal_id:
raise exceptions.Warning(
_("An automatic payment should be created for the sale order"
" %s but the payment method '%s' has no journal defined.") %
(self.name, method.name)
)
journal = method.journal_id
date = self.date_order[:10]
if amount is None:
amount = self.residual
if self.payment_term:
amounts = self.payment_term.compute(amount, date_ref=date)[0]
else:
amounts = [(date, amount)]
# reversed is cosmetic, compute returns terms in the 'wrong' order
for date, amount in reversed(amounts):
self._add_payment(journal, amount, date)
return True
@api.multi
def add_payment(self, journal_id, amount, date=None, description=None):
""" Generate payment move lines of a certain amount linked
with the sale order.
"""
self.ensure_one()
journal_model = self.env['account.journal']
if date is None:
date = self.date_order
journal = journal_model.browse(journal_id)
self._add_payment(journal, amount, date, description)
return True
@api.multi
def _add_payment(self, journal, amount, date, description=None):
""" Generate move lines entries to pay the sale order. """
move_model = self.env['account.move']
period_model = self.env['account.period']
period = period_model.find(dt=date)
move_name = description or self._get_payment_move_name(journal, period)
move_vals = self._prepare_payment_move(move_name, journal,
period, date)
move_lines = self._prepare_payment_move_lines(move_name, journal,
period, amount, date)
move_vals['line_id'] = [(0, 0, line) for line in move_lines]
move_model.create(move_vals)
@api.model
def _get_payment_move_name(self, journal, period):
sequence = journal.sequence_id
if not sequence:
raise exceptions.Warning(_('Please define a sequence on the '
'journal %s.') % journal.name)
if not sequence.active:
raise exceptions.Warning(_('Please activate the sequence of the '
'journal %s.') % journal.name)
sequence = sequence.with_context(fiscalyear_id=period.fiscalyear_id.id)
# next_by_id not compatible with new api
sequence_model = self.pool['ir.sequence']
name = sequence_model.next_by_id(self.env.cr, self.env.uid,
sequence.id,
context=self.env.context)
return name
@api.multi
def _prepare_payment_move(self, move_name, journal, period, date):
return {'name': move_name,
'journal_id': journal.id,
'date': date,
'ref': self.name,
'period_id': period.id,
}
@api.multi
def _prepare_payment_move_line(self, move_name, journal, period,
amount, date):
# to remove in v9
_logger.warning('Deprecated: _prepare_payment_move_line has been '
'deprecated in favor of _prepare_payment_move_lines')
return self._prepare_payment_move_lines(move_name, journal, period,
amount, date)
@api.multi
def _prepare_payment_move_lines(self, move_name, journal, period,
amount, date):
partner = self.partner_id.commercial_partner_id
company = journal.company_id
currency = self.env['res.currency'].browse()
# if the lines are not in a different currency,
# the amount_currency stays at 0.0
amount_currency = 0.0
if journal.currency and journal.currency != company.currency_id:
# when the journal have a currency, we have to convert
# the amount to the currency of the company and set
# the journal's currency on the lines
currency = journal.currency
company_amount = currency.compute(amount, company.currency_id)
amount_currency, amount = amount, company_amount
# payment line (bank / cash)
debit_line = {
'name': move_name,
'debit': amount,
'credit': 0.0,
'account_id': journal.default_credit_account_id.id,
'journal_id': journal.id,
'period_id': period.id,
'partner_id': partner.id,
'date': date,
'amount_currency': amount_currency,
'currency_id': currency.id,
}
# payment line (receivable)
credit_line = {
'name': move_name,
'debit': 0.0,
'credit': amount,
'account_id': partner.property_account_receivable.id,
'journal_id': journal.id,
'period_id': period.id,
'partner_id': partner.id,
'date': date,
'amount_currency': -amount_currency,
'currency_id': currency.id,
'sale_ids': [(4, self.id)],
}
return debit_line, credit_line
@api.onchange('payment_method_id')
def onchange_payment_method_id_set_payment_term(self):
if not self.payment_method_id:
return
method = self.payment_method_id
if method.payment_term_id:
self.payment_term = method.payment_term_id.id
@api.multi
def action_view_payments(self):
""" Return an action to display the payment linked
with the sale order """
self.ensure_one()
moves = self.mapped('payment_ids.move_id')
xmlid = ('account', 'action_move_journal_line')
action = self.env['ir.actions.act_window'].for_xml_id(*xmlid)
if len(moves) > 1:
action['domain'] = [('id', 'in', moves.ids)]
else:
ref = self.env.ref('account.view_move_form')
action['views'] = [(ref.id, 'form')]
action['res_id'] = moves.id if moves else False
return action
| agpl-3.0 |
ProfessionalIT/maxigenios-website | sdk/google_appengine/lib/django-1.3/django/db/models/expressions.py | 229 | 4992 | import datetime
from django.utils import tree
from django.utils.copycompat import deepcopy
class ExpressionNode(tree.Node):
"""
Base class for all query expressions.
"""
# Arithmetic connectors
ADD = '+'
SUB = '-'
MUL = '*'
DIV = '/'
MOD = '%%' # This is a quoted % operator - it is quoted
# because it can be used in strings that also
# have parameter substitution.
# Bitwise operators
AND = '&'
OR = '|'
def __init__(self, children=None, connector=None, negated=False):
if children is not None and len(children) > 1 and connector is None:
raise TypeError('You have to specify a connector.')
super(ExpressionNode, self).__init__(children, connector, negated)
def _combine(self, other, connector, reversed, node=None):
if isinstance(other, datetime.timedelta):
return DateModifierNode([self, other], connector)
if reversed:
obj = ExpressionNode([other], connector)
obj.add(node or self, connector)
else:
obj = node or ExpressionNode([self], connector)
obj.add(other, connector)
return obj
###################
# VISITOR METHODS #
###################
def prepare(self, evaluator, query, allow_joins):
return evaluator.prepare_node(self, query, allow_joins)
def evaluate(self, evaluator, qn, connection):
return evaluator.evaluate_node(self, qn, connection)
#############
# OPERATORS #
#############
def __add__(self, other):
return self._combine(other, self.ADD, False)
def __sub__(self, other):
return self._combine(other, self.SUB, False)
def __mul__(self, other):
return self._combine(other, self.MUL, False)
def __div__(self, other):
return self._combine(other, self.DIV, False)
def __mod__(self, other):
return self._combine(other, self.MOD, False)
def __and__(self, other):
return self._combine(other, self.AND, False)
def __or__(self, other):
return self._combine(other, self.OR, False)
def __radd__(self, other):
return self._combine(other, self.ADD, True)
def __rsub__(self, other):
return self._combine(other, self.SUB, True)
def __rmul__(self, other):
return self._combine(other, self.MUL, True)
def __rdiv__(self, other):
return self._combine(other, self.DIV, True)
def __rmod__(self, other):
return self._combine(other, self.MOD, True)
def __rand__(self, other):
return self._combine(other, self.AND, True)
def __ror__(self, other):
return self._combine(other, self.OR, True)
def prepare_database_save(self, unused):
return self
class F(ExpressionNode):
"""
An expression representing the value of the given field.
"""
def __init__(self, name):
super(F, self).__init__(None, None, False)
self.name = name
def __deepcopy__(self, memodict):
obj = super(F, self).__deepcopy__(memodict)
obj.name = self.name
return obj
def prepare(self, evaluator, query, allow_joins):
return evaluator.prepare_leaf(self, query, allow_joins)
def evaluate(self, evaluator, qn, connection):
return evaluator.evaluate_leaf(self, qn, connection)
class DateModifierNode(ExpressionNode):
"""
Node that implements the following syntax:
filter(end_date__gt=F('start_date') + datetime.timedelta(days=3, seconds=200))
which translates into:
POSTGRES:
WHERE end_date > (start_date + INTERVAL '3 days 200 seconds')
MYSQL:
WHERE end_date > (start_date + INTERVAL '3 0:0:200:0' DAY_MICROSECOND)
ORACLE:
WHERE end_date > (start_date + INTERVAL '3 00:03:20.000000' DAY(1) TO SECOND(6))
SQLITE:
WHERE end_date > django_format_dtdelta(start_date, "+" "3", "200", "0")
(A custom function is used in order to preserve six digits of fractional
second information on sqlite, and to format both date and datetime values.)
Note that microsecond comparisons are not well supported with MySQL, since
MySQL does not store microsecond information.
Only adding and subtracting timedeltas is supported, attempts to use other
operations raise a TypeError.
"""
def __init__(self, children, connector, negated=False):
if len(children) != 2:
raise TypeError('Must specify a node and a timedelta.')
if not isinstance(children[1], datetime.timedelta):
raise TypeError('Second child must be a timedelta.')
if connector not in (self.ADD, self.SUB):
raise TypeError('Connector must be + or -, not %s' % connector)
super(DateModifierNode, self).__init__(children, connector, negated)
def evaluate(self, evaluator, qn, connection):
return evaluator.evaluate_date_modifier_node(self, qn, connection)
| mit |
damonkohler/sl4a | python/src/Lib/test/test_sets.py | 56 | 27873 | #!/usr/bin/env python
import warnings
warnings.filterwarnings("ignore", "the sets module is deprecated",
DeprecationWarning, "test\.test_sets")
import unittest, operator, copy, pickle, random
from sets import Set, ImmutableSet
from test import test_support
empty_set = Set()
#==============================================================================
class TestBasicOps(unittest.TestCase):
def test_repr(self):
if self.repr is not None:
self.assertEqual(repr(self.set), self.repr)
def test_length(self):
self.assertEqual(len(self.set), self.length)
def test_self_equality(self):
self.assertEqual(self.set, self.set)
def test_equivalent_equality(self):
self.assertEqual(self.set, self.dup)
def test_copy(self):
self.assertEqual(self.set.copy(), self.dup)
def test_self_union(self):
result = self.set | self.set
self.assertEqual(result, self.dup)
def test_empty_union(self):
result = self.set | empty_set
self.assertEqual(result, self.dup)
def test_union_empty(self):
result = empty_set | self.set
self.assertEqual(result, self.dup)
def test_self_intersection(self):
result = self.set & self.set
self.assertEqual(result, self.dup)
def test_empty_intersection(self):
result = self.set & empty_set
self.assertEqual(result, empty_set)
def test_intersection_empty(self):
result = empty_set & self.set
self.assertEqual(result, empty_set)
def test_self_symmetric_difference(self):
result = self.set ^ self.set
self.assertEqual(result, empty_set)
def checkempty_symmetric_difference(self):
result = self.set ^ empty_set
self.assertEqual(result, self.set)
def test_self_difference(self):
result = self.set - self.set
self.assertEqual(result, empty_set)
def test_empty_difference(self):
result = self.set - empty_set
self.assertEqual(result, self.dup)
def test_empty_difference_rev(self):
result = empty_set - self.set
self.assertEqual(result, empty_set)
def test_iteration(self):
for v in self.set:
self.assert_(v in self.values)
def test_pickling(self):
p = pickle.dumps(self.set)
copy = pickle.loads(p)
self.assertEqual(self.set, copy,
"%s != %s" % (self.set, copy))
#------------------------------------------------------------------------------
class TestBasicOpsEmpty(TestBasicOps):
def setUp(self):
self.case = "empty set"
self.values = []
self.set = Set(self.values)
self.dup = Set(self.values)
self.length = 0
self.repr = "Set([])"
#------------------------------------------------------------------------------
class TestBasicOpsSingleton(TestBasicOps):
def setUp(self):
self.case = "unit set (number)"
self.values = [3]
self.set = Set(self.values)
self.dup = Set(self.values)
self.length = 1
self.repr = "Set([3])"
def test_in(self):
self.failUnless(3 in self.set)
def test_not_in(self):
self.failUnless(2 not in self.set)
#------------------------------------------------------------------------------
class TestBasicOpsTuple(TestBasicOps):
def setUp(self):
self.case = "unit set (tuple)"
self.values = [(0, "zero")]
self.set = Set(self.values)
self.dup = Set(self.values)
self.length = 1
self.repr = "Set([(0, 'zero')])"
def test_in(self):
self.failUnless((0, "zero") in self.set)
def test_not_in(self):
self.failUnless(9 not in self.set)
#------------------------------------------------------------------------------
class TestBasicOpsTriple(TestBasicOps):
def setUp(self):
self.case = "triple set"
self.values = [0, "zero", operator.add]
self.set = Set(self.values)
self.dup = Set(self.values)
self.length = 3
self.repr = None
#==============================================================================
def baditer():
raise TypeError
yield True
def gooditer():
yield True
class TestExceptionPropagation(unittest.TestCase):
"""SF 628246: Set constructor should not trap iterator TypeErrors"""
def test_instanceWithException(self):
self.assertRaises(TypeError, Set, baditer())
def test_instancesWithoutException(self):
# All of these iterables should load without exception.
Set([1,2,3])
Set((1,2,3))
Set({'one':1, 'two':2, 'three':3})
Set(xrange(3))
Set('abc')
Set(gooditer())
#==============================================================================
class TestSetOfSets(unittest.TestCase):
def test_constructor(self):
inner = Set([1])
outer = Set([inner])
element = outer.pop()
self.assertEqual(type(element), ImmutableSet)
outer.add(inner) # Rebuild set of sets with .add method
outer.remove(inner)
self.assertEqual(outer, Set()) # Verify that remove worked
outer.discard(inner) # Absence of KeyError indicates working fine
#==============================================================================
class TestBinaryOps(unittest.TestCase):
def setUp(self):
self.set = Set((2, 4, 6))
def test_eq(self): # SF bug 643115
self.assertEqual(self.set, Set({2:1,4:3,6:5}))
def test_union_subset(self):
result = self.set | Set([2])
self.assertEqual(result, Set((2, 4, 6)))
def test_union_superset(self):
result = self.set | Set([2, 4, 6, 8])
self.assertEqual(result, Set([2, 4, 6, 8]))
def test_union_overlap(self):
result = self.set | Set([3, 4, 5])
self.assertEqual(result, Set([2, 3, 4, 5, 6]))
def test_union_non_overlap(self):
result = self.set | Set([8])
self.assertEqual(result, Set([2, 4, 6, 8]))
def test_intersection_subset(self):
result = self.set & Set((2, 4))
self.assertEqual(result, Set((2, 4)))
def test_intersection_superset(self):
result = self.set & Set([2, 4, 6, 8])
self.assertEqual(result, Set([2, 4, 6]))
def test_intersection_overlap(self):
result = self.set & Set([3, 4, 5])
self.assertEqual(result, Set([4]))
def test_intersection_non_overlap(self):
result = self.set & Set([8])
self.assertEqual(result, empty_set)
def test_sym_difference_subset(self):
result = self.set ^ Set((2, 4))
self.assertEqual(result, Set([6]))
def test_sym_difference_superset(self):
result = self.set ^ Set((2, 4, 6, 8))
self.assertEqual(result, Set([8]))
def test_sym_difference_overlap(self):
result = self.set ^ Set((3, 4, 5))
self.assertEqual(result, Set([2, 3, 5, 6]))
def test_sym_difference_non_overlap(self):
result = self.set ^ Set([8])
self.assertEqual(result, Set([2, 4, 6, 8]))
def test_cmp(self):
a, b = Set('a'), Set('b')
self.assertRaises(TypeError, cmp, a, b)
# You can view this as a buglet: cmp(a, a) does not raise TypeError,
# because __eq__ is tried before __cmp__, and a.__eq__(a) returns True,
# which Python thinks is good enough to synthesize a cmp() result
# without calling __cmp__.
self.assertEqual(cmp(a, a), 0)
self.assertRaises(TypeError, cmp, a, 12)
self.assertRaises(TypeError, cmp, "abc", a)
def test_inplace_on_self(self):
t = self.set.copy()
t |= t
self.assertEqual(t, self.set)
t &= t
self.assertEqual(t, self.set)
t -= t
self.assertEqual(len(t), 0)
t = self.set.copy()
t ^= t
self.assertEqual(len(t), 0)
#==============================================================================
class TestUpdateOps(unittest.TestCase):
def setUp(self):
self.set = Set((2, 4, 6))
def test_union_subset(self):
self.set |= Set([2])
self.assertEqual(self.set, Set((2, 4, 6)))
def test_union_superset(self):
self.set |= Set([2, 4, 6, 8])
self.assertEqual(self.set, Set([2, 4, 6, 8]))
def test_union_overlap(self):
self.set |= Set([3, 4, 5])
self.assertEqual(self.set, Set([2, 3, 4, 5, 6]))
def test_union_non_overlap(self):
self.set |= Set([8])
self.assertEqual(self.set, Set([2, 4, 6, 8]))
def test_union_method_call(self):
self.set.union_update(Set([3, 4, 5]))
self.assertEqual(self.set, Set([2, 3, 4, 5, 6]))
def test_intersection_subset(self):
self.set &= Set((2, 4))
self.assertEqual(self.set, Set((2, 4)))
def test_intersection_superset(self):
self.set &= Set([2, 4, 6, 8])
self.assertEqual(self.set, Set([2, 4, 6]))
def test_intersection_overlap(self):
self.set &= Set([3, 4, 5])
self.assertEqual(self.set, Set([4]))
def test_intersection_non_overlap(self):
self.set &= Set([8])
self.assertEqual(self.set, empty_set)
def test_intersection_method_call(self):
self.set.intersection_update(Set([3, 4, 5]))
self.assertEqual(self.set, Set([4]))
def test_sym_difference_subset(self):
self.set ^= Set((2, 4))
self.assertEqual(self.set, Set([6]))
def test_sym_difference_superset(self):
self.set ^= Set((2, 4, 6, 8))
self.assertEqual(self.set, Set([8]))
def test_sym_difference_overlap(self):
self.set ^= Set((3, 4, 5))
self.assertEqual(self.set, Set([2, 3, 5, 6]))
def test_sym_difference_non_overlap(self):
self.set ^= Set([8])
self.assertEqual(self.set, Set([2, 4, 6, 8]))
def test_sym_difference_method_call(self):
self.set.symmetric_difference_update(Set([3, 4, 5]))
self.assertEqual(self.set, Set([2, 3, 5, 6]))
def test_difference_subset(self):
self.set -= Set((2, 4))
self.assertEqual(self.set, Set([6]))
def test_difference_superset(self):
self.set -= Set((2, 4, 6, 8))
self.assertEqual(self.set, Set([]))
def test_difference_overlap(self):
self.set -= Set((3, 4, 5))
self.assertEqual(self.set, Set([2, 6]))
def test_difference_non_overlap(self):
self.set -= Set([8])
self.assertEqual(self.set, Set([2, 4, 6]))
def test_difference_method_call(self):
self.set.difference_update(Set([3, 4, 5]))
self.assertEqual(self.set, Set([2, 6]))
#==============================================================================
class TestMutate(unittest.TestCase):
def setUp(self):
self.values = ["a", "b", "c"]
self.set = Set(self.values)
def test_add_present(self):
self.set.add("c")
self.assertEqual(self.set, Set("abc"))
def test_add_absent(self):
self.set.add("d")
self.assertEqual(self.set, Set("abcd"))
def test_add_until_full(self):
tmp = Set()
expected_len = 0
for v in self.values:
tmp.add(v)
expected_len += 1
self.assertEqual(len(tmp), expected_len)
self.assertEqual(tmp, self.set)
def test_remove_present(self):
self.set.remove("b")
self.assertEqual(self.set, Set("ac"))
def test_remove_absent(self):
try:
self.set.remove("d")
self.fail("Removing missing element should have raised LookupError")
except LookupError:
pass
def test_remove_until_empty(self):
expected_len = len(self.set)
for v in self.values:
self.set.remove(v)
expected_len -= 1
self.assertEqual(len(self.set), expected_len)
def test_discard_present(self):
self.set.discard("c")
self.assertEqual(self.set, Set("ab"))
def test_discard_absent(self):
self.set.discard("d")
self.assertEqual(self.set, Set("abc"))
def test_clear(self):
self.set.clear()
self.assertEqual(len(self.set), 0)
def test_pop(self):
popped = {}
while self.set:
popped[self.set.pop()] = None
self.assertEqual(len(popped), len(self.values))
for v in self.values:
self.failUnless(v in popped)
def test_update_empty_tuple(self):
self.set.union_update(())
self.assertEqual(self.set, Set(self.values))
def test_update_unit_tuple_overlap(self):
self.set.union_update(("a",))
self.assertEqual(self.set, Set(self.values))
def test_update_unit_tuple_non_overlap(self):
self.set.union_update(("a", "z"))
self.assertEqual(self.set, Set(self.values + ["z"]))
#==============================================================================
class TestSubsets(unittest.TestCase):
case2method = {"<=": "issubset",
">=": "issuperset",
}
reverse = {"==": "==",
"!=": "!=",
"<": ">",
">": "<",
"<=": ">=",
">=": "<=",
}
def test_issubset(self):
x = self.left
y = self.right
for case in "!=", "==", "<", "<=", ">", ">=":
expected = case in self.cases
# Test the binary infix spelling.
result = eval("x" + case + "y", locals())
self.assertEqual(result, expected)
# Test the "friendly" method-name spelling, if one exists.
if case in TestSubsets.case2method:
method = getattr(x, TestSubsets.case2method[case])
result = method(y)
self.assertEqual(result, expected)
# Now do the same for the operands reversed.
rcase = TestSubsets.reverse[case]
result = eval("y" + rcase + "x", locals())
self.assertEqual(result, expected)
if rcase in TestSubsets.case2method:
method = getattr(y, TestSubsets.case2method[rcase])
result = method(x)
self.assertEqual(result, expected)
#------------------------------------------------------------------------------
class TestSubsetEqualEmpty(TestSubsets):
left = Set()
right = Set()
name = "both empty"
cases = "==", "<=", ">="
#------------------------------------------------------------------------------
class TestSubsetEqualNonEmpty(TestSubsets):
left = Set([1, 2])
right = Set([1, 2])
name = "equal pair"
cases = "==", "<=", ">="
#------------------------------------------------------------------------------
class TestSubsetEmptyNonEmpty(TestSubsets):
left = Set()
right = Set([1, 2])
name = "one empty, one non-empty"
cases = "!=", "<", "<="
#------------------------------------------------------------------------------
class TestSubsetPartial(TestSubsets):
left = Set([1])
right = Set([1, 2])
name = "one a non-empty proper subset of other"
cases = "!=", "<", "<="
#------------------------------------------------------------------------------
class TestSubsetNonOverlap(TestSubsets):
left = Set([1])
right = Set([2])
name = "neither empty, neither contains"
cases = "!="
#==============================================================================
class TestOnlySetsInBinaryOps(unittest.TestCase):
def test_eq_ne(self):
# Unlike the others, this is testing that == and != *are* allowed.
self.assertEqual(self.other == self.set, False)
self.assertEqual(self.set == self.other, False)
self.assertEqual(self.other != self.set, True)
self.assertEqual(self.set != self.other, True)
def test_ge_gt_le_lt(self):
self.assertRaises(TypeError, lambda: self.set < self.other)
self.assertRaises(TypeError, lambda: self.set <= self.other)
self.assertRaises(TypeError, lambda: self.set > self.other)
self.assertRaises(TypeError, lambda: self.set >= self.other)
self.assertRaises(TypeError, lambda: self.other < self.set)
self.assertRaises(TypeError, lambda: self.other <= self.set)
self.assertRaises(TypeError, lambda: self.other > self.set)
self.assertRaises(TypeError, lambda: self.other >= self.set)
def test_union_update_operator(self):
try:
self.set |= self.other
except TypeError:
pass
else:
self.fail("expected TypeError")
def test_union_update(self):
if self.otherIsIterable:
self.set.union_update(self.other)
else:
self.assertRaises(TypeError, self.set.union_update, self.other)
def test_union(self):
self.assertRaises(TypeError, lambda: self.set | self.other)
self.assertRaises(TypeError, lambda: self.other | self.set)
if self.otherIsIterable:
self.set.union(self.other)
else:
self.assertRaises(TypeError, self.set.union, self.other)
def test_intersection_update_operator(self):
try:
self.set &= self.other
except TypeError:
pass
else:
self.fail("expected TypeError")
def test_intersection_update(self):
if self.otherIsIterable:
self.set.intersection_update(self.other)
else:
self.assertRaises(TypeError,
self.set.intersection_update,
self.other)
def test_intersection(self):
self.assertRaises(TypeError, lambda: self.set & self.other)
self.assertRaises(TypeError, lambda: self.other & self.set)
if self.otherIsIterable:
self.set.intersection(self.other)
else:
self.assertRaises(TypeError, self.set.intersection, self.other)
def test_sym_difference_update_operator(self):
try:
self.set ^= self.other
except TypeError:
pass
else:
self.fail("expected TypeError")
def test_sym_difference_update(self):
if self.otherIsIterable:
self.set.symmetric_difference_update(self.other)
else:
self.assertRaises(TypeError,
self.set.symmetric_difference_update,
self.other)
def test_sym_difference(self):
self.assertRaises(TypeError, lambda: self.set ^ self.other)
self.assertRaises(TypeError, lambda: self.other ^ self.set)
if self.otherIsIterable:
self.set.symmetric_difference(self.other)
else:
self.assertRaises(TypeError, self.set.symmetric_difference, self.other)
def test_difference_update_operator(self):
try:
self.set -= self.other
except TypeError:
pass
else:
self.fail("expected TypeError")
def test_difference_update(self):
if self.otherIsIterable:
self.set.difference_update(self.other)
else:
self.assertRaises(TypeError,
self.set.difference_update,
self.other)
def test_difference(self):
self.assertRaises(TypeError, lambda: self.set - self.other)
self.assertRaises(TypeError, lambda: self.other - self.set)
if self.otherIsIterable:
self.set.difference(self.other)
else:
self.assertRaises(TypeError, self.set.difference, self.other)
#------------------------------------------------------------------------------
class TestOnlySetsNumeric(TestOnlySetsInBinaryOps):
def setUp(self):
self.set = Set((1, 2, 3))
self.other = 19
self.otherIsIterable = False
#------------------------------------------------------------------------------
class TestOnlySetsDict(TestOnlySetsInBinaryOps):
def setUp(self):
self.set = Set((1, 2, 3))
self.other = {1:2, 3:4}
self.otherIsIterable = True
#------------------------------------------------------------------------------
class TestOnlySetsOperator(TestOnlySetsInBinaryOps):
def setUp(self):
self.set = Set((1, 2, 3))
self.other = operator.add
self.otherIsIterable = False
#------------------------------------------------------------------------------
class TestOnlySetsTuple(TestOnlySetsInBinaryOps):
def setUp(self):
self.set = Set((1, 2, 3))
self.other = (2, 4, 6)
self.otherIsIterable = True
#------------------------------------------------------------------------------
class TestOnlySetsString(TestOnlySetsInBinaryOps):
def setUp(self):
self.set = Set((1, 2, 3))
self.other = 'abc'
self.otherIsIterable = True
#------------------------------------------------------------------------------
class TestOnlySetsGenerator(TestOnlySetsInBinaryOps):
def setUp(self):
def gen():
for i in xrange(0, 10, 2):
yield i
self.set = Set((1, 2, 3))
self.other = gen()
self.otherIsIterable = True
#------------------------------------------------------------------------------
class TestOnlySetsofSets(TestOnlySetsInBinaryOps):
def setUp(self):
self.set = Set((1, 2, 3))
self.other = [Set('ab'), ImmutableSet('cd')]
self.otherIsIterable = True
#==============================================================================
class TestCopying(unittest.TestCase):
def test_copy(self):
dup = self.set.copy()
dup_list = list(dup); dup_list.sort()
set_list = list(self.set); set_list.sort()
self.assertEqual(len(dup_list), len(set_list))
for i in range(len(dup_list)):
self.failUnless(dup_list[i] is set_list[i])
def test_deep_copy(self):
dup = copy.deepcopy(self.set)
##print type(dup), repr(dup)
dup_list = list(dup); dup_list.sort()
set_list = list(self.set); set_list.sort()
self.assertEqual(len(dup_list), len(set_list))
for i in range(len(dup_list)):
self.assertEqual(dup_list[i], set_list[i])
#------------------------------------------------------------------------------
class TestCopyingEmpty(TestCopying):
def setUp(self):
self.set = Set()
#------------------------------------------------------------------------------
class TestCopyingSingleton(TestCopying):
def setUp(self):
self.set = Set(["hello"])
#------------------------------------------------------------------------------
class TestCopyingTriple(TestCopying):
def setUp(self):
self.set = Set(["zero", 0, None])
#------------------------------------------------------------------------------
class TestCopyingTuple(TestCopying):
def setUp(self):
self.set = Set([(1, 2)])
#------------------------------------------------------------------------------
class TestCopyingNested(TestCopying):
def setUp(self):
self.set = Set([((1, 2), (3, 4))])
#==============================================================================
class TestIdentities(unittest.TestCase):
def setUp(self):
self.a = Set([random.randrange(100) for i in xrange(50)])
self.b = Set([random.randrange(100) for i in xrange(50)])
def test_binopsVsSubsets(self):
a, b = self.a, self.b
self.assert_(a - b <= a)
self.assert_(b - a <= b)
self.assert_(a & b <= a)
self.assert_(a & b <= b)
self.assert_(a | b >= a)
self.assert_(a | b >= b)
self.assert_(a ^ b <= a | b)
def test_commutativity(self):
a, b = self.a, self.b
self.assertEqual(a&b, b&a)
self.assertEqual(a|b, b|a)
self.assertEqual(a^b, b^a)
if a != b:
self.assertNotEqual(a-b, b-a)
def test_reflexsive_relations(self):
a, zero = self.a, Set()
self.assertEqual(a ^ a, zero)
self.assertEqual(a - a, zero)
self.assertEqual(a | a, a)
self.assertEqual(a & a, a)
self.assert_(a <= a)
self.assert_(a >= a)
self.assert_(a == a)
def test_summations(self):
# check that sums of parts equal the whole
a, b = self.a, self.b
self.assertEqual((a-b)|(a&b)|(b-a), a|b)
self.assertEqual((a&b)|(a^b), a|b)
self.assertEqual(a|(b-a), a|b)
self.assertEqual((a-b)|b, a|b)
self.assertEqual((a-b)|(a&b), a)
self.assertEqual((b-a)|(a&b), b)
self.assertEqual((a-b)|(b-a), a^b)
def test_exclusion(self):
# check that inverse operations do not overlap
a, b, zero = self.a, self.b, Set()
self.assertEqual((a-b)&b, zero)
self.assertEqual((b-a)&a, zero)
self.assertEqual((a&b)&(a^b), zero)
def test_cardinality_relations(self):
a, b = self.a, self.b
self.assertEqual(len(a), len(a-b) + len(a&b))
self.assertEqual(len(b), len(b-a) + len(a&b))
self.assertEqual(len(a^b), len(a-b) + len(b-a))
self.assertEqual(len(a|b), len(a-b) + len(a&b) + len(b-a))
self.assertEqual(len(a^b) + len(a&b), len(a|b))
#==============================================================================
libreftest = """
Example from the Library Reference: Doc/lib/libsets.tex
>>> from sets import Set as Base # override _repr to get sorted output
>>> class Set(Base):
... def _repr(self):
... return Base._repr(self, sorted=True)
>>> engineers = Set(['John', 'Jane', 'Jack', 'Janice'])
>>> programmers = Set(['Jack', 'Sam', 'Susan', 'Janice'])
>>> managers = Set(['Jane', 'Jack', 'Susan', 'Zack'])
>>> employees = engineers | programmers | managers # union
>>> engineering_management = engineers & managers # intersection
>>> fulltime_management = managers - engineers - programmers # difference
>>> engineers.add('Marvin')
>>> print engineers
Set(['Jack', 'Jane', 'Janice', 'John', 'Marvin'])
>>> employees.issuperset(engineers) # superset test
False
>>> employees.union_update(engineers) # update from another set
>>> employees.issuperset(engineers)
True
>>> for group in [engineers, programmers, managers, employees]:
... group.discard('Susan') # unconditionally remove element
... print group
...
Set(['Jack', 'Jane', 'Janice', 'John', 'Marvin'])
Set(['Jack', 'Janice', 'Sam'])
Set(['Jack', 'Jane', 'Zack'])
Set(['Jack', 'Jane', 'Janice', 'John', 'Marvin', 'Sam', 'Zack'])
"""
#==============================================================================
__test__ = {'libreftest' : libreftest}
def test_main(verbose=None):
import doctest
from test import test_sets
test_support.run_unittest(
TestSetOfSets,
TestExceptionPropagation,
TestBasicOpsEmpty,
TestBasicOpsSingleton,
TestBasicOpsTuple,
TestBasicOpsTriple,
TestBinaryOps,
TestUpdateOps,
TestMutate,
TestSubsetEqualEmpty,
TestSubsetEqualNonEmpty,
TestSubsetEmptyNonEmpty,
TestSubsetPartial,
TestSubsetNonOverlap,
TestOnlySetsNumeric,
TestOnlySetsDict,
TestOnlySetsOperator,
TestOnlySetsTuple,
TestOnlySetsString,
TestOnlySetsGenerator,
TestOnlySetsofSets,
TestCopyingEmpty,
TestCopyingSingleton,
TestCopyingTriple,
TestCopyingTuple,
TestCopyingNested,
TestIdentities,
doctest.DocTestSuite(test_sets),
)
if __name__ == "__main__":
test_main(verbose=True)
| apache-2.0 |
Dhivyap/ansible | lib/ansible/modules/network/fortios/fortios_system_ddns.py | 13 | 14913 | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_system_ddns
short_description: Configure DDNS in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify system feature and ddns category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
version_added: "2.9"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
state:
description:
- Indicates whether to create or remove the object.
type: str
required: true
choices:
- present
- absent
system_ddns:
description:
- Configure DDNS.
default: null
type: dict
suboptions:
bound_ip:
description:
- Bound IP address.
type: str
clear_text:
description:
- Enable/disable use of clear text connections.
type: str
choices:
- disable
- enable
ddns_auth:
description:
- Enable/disable TSIG authentication for your DDNS server.
type: str
choices:
- disable
- tsig
ddns_domain:
description:
- Your fully qualified domain name (for example, yourname.DDNS.com).
type: str
ddns_key:
description:
- DDNS update key (base 64 encoding).
type: str
ddns_keyname:
description:
- DDNS update key name.
type: str
ddns_password:
description:
- DDNS password.
type: str
ddns_server:
description:
- Select a DDNS service provider.
type: str
choices:
- dyndns.org
- dyns.net
- tzo.com
- vavic.com
- dipdns.net
- now.net.cn
- dhs.org
- easydns.com
- genericDDNS
- FortiGuardDDNS
- noip.com
ddns_server_ip:
description:
- Generic DDNS server IP.
type: str
ddns_sn:
description:
- DDNS Serial Number.
type: str
ddns_ttl:
description:
- Time-to-live for DDNS packets.
type: int
ddns_username:
description:
- DDNS user name.
type: str
ddns_zone:
description:
- Zone of your domain name (for example, DDNS.com).
type: str
ddnsid:
description:
- DDNS ID.
required: true
type: int
monitor_interface:
description:
- Monitored interface.
type: list
suboptions:
interface_name:
description:
- Interface name. Source system.interface.name.
type: str
ssl_certificate:
description:
- Name of local certificate for SSL connections. Source certificate.local.name.
type: str
update_interval:
description:
- DDNS update interval (60 - 2592000 sec).
type: int
use_public_ip:
description:
- Enable/disable use of public IP address.
type: str
choices:
- disable
- enable
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: Configure DDNS.
fortios_system_ddns:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
state: "present"
system_ddns:
bound_ip: "<your_own_value>"
clear_text: "disable"
ddns_auth: "disable"
ddns_domain: "<your_own_value>"
ddns_key: "<your_own_value>"
ddns_keyname: "<your_own_value>"
ddns_password: "<your_own_value>"
ddns_server: "dyndns.org"
ddns_server_ip: "<your_own_value>"
ddns_sn: "<your_own_value>"
ddns_ttl: "13"
ddns_username: "<your_own_value>"
ddns_zone: "<your_own_value>"
ddnsid: "16"
monitor_interface:
-
interface_name: "<your_own_value> (source system.interface.name)"
ssl_certificate: "<your_own_value> (source certificate.local.name)"
update_interval: "20"
use_public_ip: "disable"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_system_ddns_data(json):
option_list = ['bound_ip', 'clear_text', 'ddns_auth',
'ddns_domain', 'ddns_key', 'ddns_keyname',
'ddns_password', 'ddns_server', 'ddns_server_ip',
'ddns_sn', 'ddns_ttl', 'ddns_username',
'ddns_zone', 'ddnsid', 'monitor_interface',
'ssl_certificate', 'update_interval', 'use_public_ip']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for elem in data:
elem = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def system_ddns(data, fos):
vdom = data['vdom']
state = data['state']
system_ddns_data = data['system_ddns']
filtered_data = underscore_to_hyphen(filter_system_ddns_data(system_ddns_data))
if state == "present":
return fos.set('system',
'ddns',
data=filtered_data,
vdom=vdom)
elif state == "absent":
return fos.delete('system',
'ddns',
mkey=filtered_data['ddnsid'],
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_system(data, fos):
if data['system_ddns']:
resp = system_ddns(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success", \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "default": "", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"state": {"required": True, "type": "str",
"choices": ["present", "absent"]},
"system_ddns": {
"required": False, "type": "dict", "default": None,
"options": {
"bound_ip": {"required": False, "type": "str"},
"clear_text": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"ddns_auth": {"required": False, "type": "str",
"choices": ["disable", "tsig"]},
"ddns_domain": {"required": False, "type": "str"},
"ddns_key": {"required": False, "type": "str"},
"ddns_keyname": {"required": False, "type": "str"},
"ddns_password": {"required": False, "type": "str"},
"ddns_server": {"required": False, "type": "str",
"choices": ["dyndns.org", "dyns.net", "tzo.com",
"vavic.com", "dipdns.net", "now.net.cn",
"dhs.org", "easydns.com", "genericDDNS",
"FortiGuardDDNS", "noip.com"]},
"ddns_server_ip": {"required": False, "type": "str"},
"ddns_sn": {"required": False, "type": "str"},
"ddns_ttl": {"required": False, "type": "int"},
"ddns_username": {"required": False, "type": "str"},
"ddns_zone": {"required": False, "type": "str"},
"ddnsid": {"required": True, "type": "int"},
"monitor_interface": {"required": False, "type": "list",
"options": {
"interface_name": {"required": False, "type": "str"}
}},
"ssl_certificate": {"required": False, "type": "str"},
"update_interval": {"required": False, "type": "int"},
"use_public_ip": {"required": False, "type": "str",
"choices": ["disable", "enable"]}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
# legacy_mode refers to using fortiosapi instead of HTTPAPI
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_system(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_system(module.params, fos)
fos.logout()
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| gpl-3.0 |
maccaspacca/BismuthToolsWeb | bisurl.py | 3 | 1782 | import hashlib, base64
def checksum(string):
#return base64.urlsafe_b85encode(string.encode("utf-8")).decode("utf-8")[:8]
m = hashlib.md5()
m.update(string.encode("utf-8"))
return base64.b85encode(m.digest()).decode("utf-8")
def create_url(app_log, command, recipient, amount, operation, openfield):
if command == "pay":
openfield_b85_encode = base64.b85encode(openfield.encode("utf-8")).decode("utf-8")
operation_b85_encode = base64.b85encode(operation.encode("utf-8")).decode("utf-8")
url_partial = "bis://{}/{}/{}/{}/{}/".format(command,recipient,amount,operation_b85_encode,openfield_b85_encode)
url_constructed = url_partial+checksum(url_partial)
app_log.warning(url_constructed)
return url_constructed
def read_url(app_log, url):
url_split = url.split("/")
app_log.warning(url_split)
reconstruct = "bis://{}/{}/{}/{}/{}/".format(url_split[2],url_split[3],url_split[4],url_split[5],url_split[6],url_split[7])
operation_b85_decode = base64.b85decode(url_split[5]).decode("utf-8")
openfield_b85_decode = base64.b85decode(url_split[6]).decode("utf-8")
if checksum(reconstruct) == url_split[7]:
url_deconstructed = url_split[2],url_split[3],url_split[4],operation_b85_decode,openfield_b85_decode
app_log.warning("Checksum match")
return url_deconstructed
else:
app_log.warning("Checksum mismatch",checksum(reconstruct),url_split[7])
return
if __name__ == "__main__":
#test
import log
app_log = log.log("node.log", "WARNING", True)
print ("create_url", create_url (app_log, "pay", "recipient", "10", "test", "test1"))
print ("read_url", read_url(app_log, "bis://pay/recipient/10/bY*jN/bY*jNF#/w8YD>)O(N?Z)!FWkoCFX")) | gpl-3.0 |
boooka/GeoPowerOff | venv/lib/python2.7/site-packages/django/db/models/sql/datastructures.py | 71 | 2522 | """
Useful auxiliary data structures for query construction. Not useful outside
the SQL domain.
"""
class Col(object):
def __init__(self, alias, target, source):
self.alias, self.target, self.source = alias, target, source
def as_sql(self, qn, connection):
return "%s.%s" % (qn(self.alias), qn(self.target.column)), []
@property
def output_field(self):
return self.source
def relabeled_clone(self, relabels):
return self.__class__(relabels.get(self.alias, self.alias), self.target, self.source)
def get_group_by_cols(self):
return [(self.alias, self.target.column)]
def get_lookup(self, name):
return self.output_field.get_lookup(name)
def get_transform(self, name):
return self.output_field.get_transform(name)
def prepare(self):
return self
class EmptyResultSet(Exception):
pass
class MultiJoin(Exception):
"""
Used by join construction code to indicate the point at which a
multi-valued join was attempted (if the caller wants to treat that
exceptionally).
"""
def __init__(self, names_pos, path_with_names):
self.level = names_pos
# The path travelled, this includes the path to the multijoin.
self.names_with_path = path_with_names
class Empty(object):
pass
class Date(object):
"""
Add a date selection column.
"""
def __init__(self, col, lookup_type):
self.col = col
self.lookup_type = lookup_type
def relabeled_clone(self, change_map):
return self.__class__((change_map.get(self.col[0], self.col[0]), self.col[1]))
def as_sql(self, qn, connection):
if isinstance(self.col, (list, tuple)):
col = '%s.%s' % tuple(qn(c) for c in self.col)
else:
col = self.col
return connection.ops.date_trunc_sql(self.lookup_type, col), []
class DateTime(object):
"""
Add a datetime selection column.
"""
def __init__(self, col, lookup_type, tzname):
self.col = col
self.lookup_type = lookup_type
self.tzname = tzname
def relabeled_clone(self, change_map):
return self.__class__((change_map.get(self.col[0], self.col[0]), self.col[1]))
def as_sql(self, qn, connection):
if isinstance(self.col, (list, tuple)):
col = '%s.%s' % tuple(qn(c) for c in self.col)
else:
col = self.col
return connection.ops.datetime_trunc_sql(self.lookup_type, col, self.tzname)
| apache-2.0 |
coletivoEITA/noosfero | public/javascripts/strophejs-1.1.3/examples/attach/boshclient.py | 38 | 5302 | import sys, os
import httplib, urllib
import random, binascii
from urlparse import urlparse
from punjab.httpb import HttpbParse
from twisted.words.xish import domish
from twisted.words.protocols.jabber import jid
TLS_XMLNS = 'urn:ietf:params:xml:ns:xmpp-tls'
SASL_XMLNS = 'urn:ietf:params:xml:ns:xmpp-sasl'
BIND_XMLNS = 'urn:ietf:params:xml:ns:xmpp-bind'
SESSION_XMLNS = 'urn:ietf:params:xml:ns:xmpp-session'
class BOSHClient:
def __init__(self, jabberid, password, bosh_service):
self.rid = random.randint(0, 10000000)
self.jabberid = jid.internJID(jabberid)
self.password = password
self.authid = None
self.sid = None
self.logged_in = False
self.headers = {"Content-type": "text/xml",
"Accept": "text/xml"}
self.bosh_service = urlparse(bosh_service)
def buildBody(self, child=None):
"""Build a BOSH body.
"""
body = domish.Element(("http://jabber.org/protocol/httpbind", "body"))
body['content'] = 'text/xml; charset=utf-8'
self.rid = self.rid + 1
body['rid'] = str(self.rid)
body['sid'] = str(self.sid)
body['xml:lang'] = 'en'
if child is not None:
body.addChild(child)
return body
def sendBody(self, body):
"""Send the body.
"""
parser = HttpbParse(True)
# start new session
conn = httplib.HTTPConnection(self.bosh_service.netloc)
conn.request("POST", self.bosh_service.path,
body.toXml(), self.headers)
response = conn.getresponse()
data = ''
if response.status == 200:
data = response.read()
conn.close()
return parser.parse(data)
def startSessionAndAuth(self, hold='1', wait='70'):
# Create a session
# create body
body = domish.Element(("http://jabber.org/protocol/httpbind", "body"))
body['content'] = 'text/xml; charset=utf-8'
body['hold'] = hold
body['rid'] = str(self.rid)
body['to'] = self.jabberid.host
body['wait'] = wait
body['window'] = '5'
body['xml:lang'] = 'en'
retb, elems = self.sendBody(body)
if type(retb) != str and retb.hasAttribute('authid') and \
retb.hasAttribute('sid'):
self.authid = retb['authid']
self.sid = retb['sid']
# go ahead and auth
auth = domish.Element((SASL_XMLNS, 'auth'))
auth['mechanism'] = 'PLAIN'
# TODO: add authzid
if auth['mechanism'] == 'PLAIN':
auth_str = ""
auth_str += "\000"
auth_str += self.jabberid.user.encode('utf-8')
auth_str += "\000"
try:
auth_str += self.password.encode('utf-8').strip()
except UnicodeDecodeError:
auth_str += self.password.decode('latin1') \
.encode('utf-8').strip()
auth.addContent(binascii.b2a_base64(auth_str))
retb, elems = self.sendBody(self.buildBody(auth))
if len(elems) == 0:
# poll for data
retb, elems = self.sendBody(self.buildBody())
if len(elems) > 0:
if elems[0].name == 'success':
retb, elems = self.sendBody(self.buildBody())
has_bind = False
for child in elems[0].children:
if child.name == 'bind':
has_bind = True
break
if has_bind:
iq = domish.Element(('jabber:client', 'iq'))
iq['type'] = 'set'
iq.addUniqueId()
iq.addElement('bind')
iq.bind['xmlns'] = BIND_XMLNS
if self.jabberid.resource:
iq.bind.addElement('resource')
iq.bind.resource.addContent(
self.jabberid.resource)
retb, elems = self.sendBody(self.buildBody(iq))
if type(retb) != str and retb.name == 'body':
# send session
iq = domish.Element(('jabber:client', 'iq'))
iq['type'] = 'set'
iq.addUniqueId()
iq.addElement('session')
iq.session['xmlns'] = SESSION_XMLNS
retb, elems = self.sendBody(self.buildBody(iq))
# did not bind, TODO - add a retry?
if type(retb) != str and retb.name == 'body':
self.logged_in = True
# bump up the rid, punjab already
# received self.rid
self.rid += 1
if __name__ == '__main__':
USERNAME = sys.argv[1]
PASSWORD = sys.argv[2]
SERVICE = sys.argv[3]
c = BOSHClient(USERNAME, PASSWORD, SERVICE)
c.startSessionAndAuth()
print c.logged_in
| agpl-3.0 |
suhussai/youtube-dl | devscripts/check-porn.py | 138 | 1925 | #!/usr/bin/env python
from __future__ import unicode_literals
"""
This script employs a VERY basic heuristic ('porn' in webpage.lower()) to check
if we are not 'age_limit' tagging some porn site
A second approach implemented relies on a list of porn domains, to activate it
pass the list filename as the only argument
"""
# Allow direct execution
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from test.helper import get_testcases
from youtube_dl.utils import compat_urllib_parse_urlparse
from youtube_dl.utils import compat_urllib_request
if len(sys.argv) > 1:
METHOD = 'LIST'
LIST = open(sys.argv[1]).read().decode('utf8').strip()
else:
METHOD = 'EURISTIC'
for test in get_testcases():
if METHOD == 'EURISTIC':
try:
webpage = compat_urllib_request.urlopen(test['url'], timeout=10).read()
except Exception:
print('\nFail: {0}'.format(test['name']))
continue
webpage = webpage.decode('utf8', 'replace')
RESULT = 'porn' in webpage.lower()
elif METHOD == 'LIST':
domain = compat_urllib_parse_urlparse(test['url']).netloc
if not domain:
print('\nFail: {0}'.format(test['name']))
continue
domain = '.'.join(domain.split('.')[-2:])
RESULT = ('.' + domain + '\n' in LIST or '\n' + domain + '\n' in LIST)
if RESULT and ('info_dict' not in test or 'age_limit' not in test['info_dict'] or
test['info_dict']['age_limit'] != 18):
print('\nPotential missing age_limit check: {0}'.format(test['name']))
elif not RESULT and ('info_dict' in test and 'age_limit' in test['info_dict'] and
test['info_dict']['age_limit'] == 18):
print('\nPotential false negative: {0}'.format(test['name']))
else:
sys.stdout.write('.')
sys.stdout.flush()
print()
| unlicense |
JimCircadian/ansible | test/units/executor/module_common/test_recursive_finder.py | 85 | 5823 | # (c) 2017, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import imp
import pytest
import zipfile
from collections import namedtuple
from functools import partial
from io import BytesIO, StringIO
import ansible.errors
from ansible.executor.module_common import recursive_finder
from ansible.module_utils.six import PY2
from ansible.module_utils.six.moves import builtins
original_find_module = imp.find_module
@pytest.fixture
def finder_containers():
FinderContainers = namedtuple('FinderContainers', ['py_module_names', 'py_module_cache', 'zf'])
py_module_names = set()
# py_module_cache = {('__init__',): b''}
py_module_cache = {}
zipoutput = BytesIO()
zf = zipfile.ZipFile(zipoutput, mode='w', compression=zipfile.ZIP_STORED)
# zf.writestr('ansible/__init__.py', b'')
return FinderContainers(py_module_names, py_module_cache, zf)
def find_module_foo(module_utils_data, *args, **kwargs):
if args[0] == 'foo':
return (module_utils_data, '/usr/lib/python2.7/site-packages/ansible/module_utils/foo.py', ('.py', 'r', imp.PY_SOURCE))
return original_find_module(*args, **kwargs)
def find_package_foo(module_utils_data, *args, **kwargs):
if args[0] == 'foo':
return (module_utils_data, '/usr/lib/python2.7/site-packages/ansible/module_utils/foo', ('', '', imp.PKG_DIRECTORY))
return original_find_module(*args, **kwargs)
class TestRecursiveFinder(object):
def test_no_module_utils(self, finder_containers):
name = 'ping'
data = b'#!/usr/bin/python\nreturn \'{\"changed\": false}\''
recursive_finder(name, data, *finder_containers)
assert finder_containers.py_module_names == set(())
assert finder_containers.py_module_cache == {}
assert frozenset(finder_containers.zf.namelist()) == frozenset()
def test_from_import_toplevel_package(self, finder_containers, mocker):
if PY2:
module_utils_data = BytesIO(b'# License\ndef do_something():\n pass\n')
else:
module_utils_data = StringIO(u'# License\ndef do_something():\n pass\n')
mocker.patch('imp.find_module', side_effect=partial(find_package_foo, module_utils_data))
mocker.patch('ansible.executor.module_common._slurp', side_effect=lambda x: b'# License\ndef do_something():\n pass\n')
name = 'ping'
data = b'#!/usr/bin/python\nfrom ansible.module_utils import foo'
recursive_finder(name, data, *finder_containers)
mocker.stopall()
assert finder_containers.py_module_names == set((('foo', '__init__'),))
assert finder_containers.py_module_cache == {}
assert frozenset(finder_containers.zf.namelist()) == frozenset(('ansible/module_utils/foo/__init__.py',))
def test_from_import_toplevel_module(self, finder_containers, mocker):
if PY2:
module_utils_data = BytesIO(b'# License\ndef do_something():\n pass\n')
else:
module_utils_data = StringIO(u'# License\ndef do_something():\n pass\n')
mocker.patch('imp.find_module', side_effect=partial(find_module_foo, module_utils_data))
name = 'ping'
data = b'#!/usr/bin/python\nfrom ansible.module_utils import foo'
recursive_finder(name, data, *finder_containers)
mocker.stopall()
assert finder_containers.py_module_names == set((('foo',),))
assert finder_containers.py_module_cache == {}
assert frozenset(finder_containers.zf.namelist()) == frozenset(('ansible/module_utils/foo.py',))
#
# Test importing six with many permutations because it is not a normal module
#
def test_from_import_six(self, finder_containers):
name = 'ping'
data = b'#!/usr/bin/python\nfrom ansible.module_utils import six'
recursive_finder(name, data, *finder_containers)
assert finder_containers.py_module_names == set((('six', '__init__'),))
assert finder_containers.py_module_cache == {}
assert frozenset(finder_containers.zf.namelist()) == frozenset(('ansible/module_utils/six/__init__.py', ))
def test_import_six(self, finder_containers):
name = 'ping'
data = b'#!/usr/bin/python\nimport ansible.module_utils.six'
recursive_finder(name, data, *finder_containers)
assert finder_containers.py_module_names == set((('six', '__init__'),))
assert finder_containers.py_module_cache == {}
assert frozenset(finder_containers.zf.namelist()) == frozenset(('ansible/module_utils/six/__init__.py', ))
def test_import_six_from_many_submodules(self, finder_containers):
name = 'ping'
data = b'#!/usr/bin/python\nfrom ansible.module_utils.six.moves.urllib.parse import urlparse'
recursive_finder(name, data, *finder_containers)
assert finder_containers.py_module_names == set((('six', '__init__'),))
assert finder_containers.py_module_cache == {}
assert frozenset(finder_containers.zf.namelist()) == frozenset(('ansible/module_utils/six/__init__.py',))
| gpl-3.0 |
jiumx60rus/grishyGhost | node_modules/nodegit/node_modules/pangyp/gyp/PRESUBMIT.py | 88 | 3685 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Top-level presubmit script for GYP.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into gcl.
"""
PYLINT_BLACKLIST = [
# TODO: fix me.
# From SCons, not done in google style.
'test/lib/TestCmd.py',
'test/lib/TestCommon.py',
'test/lib/TestGyp.py',
]
PYLINT_DISABLED_WARNINGS = [
# TODO: fix me.
# Many tests include modules they don't use.
'W0611',
# Possible unbalanced tuple unpacking with sequence.
'W0632',
# Attempting to unpack a non-sequence.
'W0633',
# Include order doesn't properly include local files?
'F0401',
# Some use of built-in names.
'W0622',
# Some unused variables.
'W0612',
# Operator not preceded/followed by space.
'C0323',
'C0322',
# Unnecessary semicolon.
'W0301',
# Unused argument.
'W0613',
# String has no effect (docstring in wrong place).
'W0105',
# map/filter on lambda could be replaced by comprehension.
'W0110',
# Use of eval.
'W0123',
# Comma not followed by space.
'C0324',
# Access to a protected member.
'W0212',
# Bad indent.
'W0311',
# Line too long.
'C0301',
# Undefined variable.
'E0602',
# Not exception type specified.
'W0702',
# No member of that name.
'E1101',
# Dangerous default {}.
'W0102',
# Cyclic import.
'R0401',
# Others, too many to sort.
'W0201', 'W0232', 'E1103', 'W0621', 'W0108', 'W0223', 'W0231',
'R0201', 'E0101', 'C0321',
# ************* Module copy
# W0104:427,12:_test.odict.__setitem__: Statement seems to have no effect
'W0104',
]
def CheckChangeOnUpload(input_api, output_api):
report = []
report.extend(input_api.canned_checks.PanProjectChecks(
input_api, output_api))
return report
def CheckChangeOnCommit(input_api, output_api):
report = []
# Accept any year number from 2009 to the current year.
current_year = int(input_api.time.strftime('%Y'))
allowed_years = (str(s) for s in reversed(xrange(2009, current_year + 1)))
years_re = '(' + '|'.join(allowed_years) + ')'
# The (c) is deprecated, but tolerate it until it's removed from all files.
license = (
r'.*? Copyright (\(c\) )?%(year)s Google Inc\. All rights reserved\.\n'
r'.*? Use of this source code is governed by a BSD-style license that '
r'can be\n'
r'.*? found in the LICENSE file\.\n'
) % {
'year': years_re,
}
report.extend(input_api.canned_checks.PanProjectChecks(
input_api, output_api, license_header=license))
report.extend(input_api.canned_checks.CheckTreeIsOpen(
input_api, output_api,
'http://gyp-status.appspot.com/status',
'http://gyp-status.appspot.com/current'))
import os
import sys
old_sys_path = sys.path
try:
sys.path = ['pylib', 'test/lib'] + sys.path
blacklist = PYLINT_BLACKLIST
if sys.platform == 'win32':
blacklist = [os.path.normpath(x).replace('\\', '\\\\')
for x in PYLINT_BLACKLIST]
report.extend(input_api.canned_checks.RunPylint(
input_api,
output_api,
black_list=blacklist,
disabled_warnings=PYLINT_DISABLED_WARNINGS))
finally:
sys.path = old_sys_path
return report
TRYBOTS = [
'gyp-win32',
'gyp-win64',
'gyp-linux',
'gyp-mac',
]
def GetPreferredTryMasters(_, change):
return {
'tryserver.nacl': { t: set(['defaulttests']) for t in TRYBOTS },
}
| mit |
ankanaan/chimera | src/chimera/core/config.py | 1 | 13818 | #! /usr/bin/env python
# -*- coding: iso-8859-1 -*-
# chimera - observatory automation system
# Copyright (C) 2006-2007 P. Henrique Silva <henrique@astro.ufsc.br>
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
from types import (IntType, FloatType, StringType, LongType,
DictType, TupleType, ListType, BooleanType,
NoneType)
try:
import cPickle as pickle
except ImportError:
import pickle
import logging
log = logging.getLogger(__name__)
from chimera.util.enum import EnumValue
from chimera.core.exceptions import OptionConversionException
from chimera.util.coord import Coord
from chimera.util.position import Position
class Option(object):
def __init__(self, name, value, checker):
self._name = name
self._value = value
self._default = value
self._checker = checker
def set(self, value):
try:
oldvalue = self._value
self._value = self._checker.check(value)
return oldvalue
except OptionConversionException, e:
log.debug("Error setting %s: %s." % (self._name, str(e)))
raise e
def get(self):
return self._value
class Checker (object):
def check(self, value):
pass
class IgnoreChecker (Checker):
def __init__(self):
Checker.__init__(self)
def check(self, value):
return value
class IntChecker (Checker):
def __init__(self):
Checker.__init__(self)
def check(self, value):
# we MUST return an int or raise OptionConversionException
# if we can't get one from "value"
# simple case
if type(value) in (IntType, LongType, FloatType, BooleanType):
return int(value)
if type(value) == StringType:
# try to convert to int (use float first and then cast (loosely)
try:
tmp = float(value)
tmp = int(tmp)
return tmp
except ValueError:
# couldn't convert, nothing to do
raise OptionConversionException(
"couldn't convert '%s' to int value." % value)
raise OptionConversionException(
"couldn't convert '%s' to int." % str(type(value)))
class FloatChecker (Checker):
def __init__(self):
Checker.__init__(self)
def check(self, value):
# we MUST return an float or raise OptionConversionException
# if we can't get one from "value"
# simple case
if type(value) in (FloatType, IntType, LongType, BooleanType):
return float(value)
if type(value) == StringType:
# try to convert to int
try:
tmp = float(value)
return tmp
except ValueError:
# couldn't convert, nothing to do
raise OptionConversionException(
"couldn't convert '%s' to float value." % value)
raise OptionConversionException(
"couldn't convert %s to float." % str(type(value)))
class StringChecker (Checker):
def __init__(self):
Checker.__init__(self)
def check(self, value):
# we MUST return an str or raise OptionConversionException
# if we can't get one from "value"
# simple case (nearly everything can be converted to str, just cross
# fingers and convert!)
return str(value)
class NoneChecker (Checker):
def __init__(self):
Checker.__init__(self)
def check(self, value):
# Just return the None value.
return value
class BoolChecker (Checker):
def __init__(self):
Checker.__init__(self)
self._trueValues = ["true", "yes", "y", "on", "1"]
self._falseValues = ["false", "no", "n", "off", "0"]
self._truthTable = self._trueValues + self._falseValues
def check(self, value):
# we MUST return an bool or raise OptionConversionException
# if we can't get one from "value"
if type(value) == BooleanType:
return value
# only accept 0 and 1 as valid booleans...
# cause a lot of problems in OptionChecker accept the same as python
# truth tables assume
if type(value) in (IntType, LongType, FloatType):
if value == 1:
return True
if value == 0:
return False
if type(value) == StringType:
value = value.strip().lower()
if value in self._truthTable:
return value in self._trueValues
raise OptionConversionException(
"couldn't convert '%s' to bool." % value)
# any other type, raise exception
raise OptionConversionException(
"couldn't convert %s to bool." % str(type(value)))
class OptionsChecker (Checker):
def __init__(self, options):
Checker.__init__(self)
self._options = self._readOptions(options)
def _readOptions(self, opt):
# options = [ {"value": value, "checker", checker}, ...]
options = []
for value in opt:
if type(value) in (IntType, LongType):
options.append({"value": value,
"checker": IntChecker()})
continue
if type(value) == FloatType:
options.append({"value": value,
"checker": FloatChecker()})
continue
if type(value) == StringType:
options.append({"value": value,
"checker": StringChecker()})
continue
if type(value) == BooleanType:
options.append({"value": value,
"checker": BoolChecker()})
continue
return options
def check(self, value):
for option in self._options:
try:
tmp = option["checker"].check(value)
if tmp == option["value"]:
return option["value"]
else:
continue
except OptionConversionException:
continue
raise OptionConversionException(
"'%s' isn't a valid option." % str(value))
class RangeChecker (Checker):
def __init__(self, value):
Checker.__init__(self)
self._min = value[0]
self._max = value[1]
if type(value[0]) == FloatType:
self._checker = FloatChecker()
else:
self._checker = IntChecker()
def check(self, value):
try:
tmp = self._checker.check(value)
except OptionConversionException:
raise OptionConversionException(
"'%s' isn't a valid option." % str(value))
else:
# inclusive
if (tmp >= self._min) and (tmp <= self._max):
return tmp
else:
raise OptionConversionException(
"'%s' it's outside valid limits (%f <= x <= %f." %
(str(value), self._min, self._max))
class EnumChecker (Checker):
def __init__(self, value):
Checker.__init__(self)
self.enumtype = value.enumtype
def check(self, value):
if type(value) == EnumValue:
if value in self.enumtype:
return value
if type(value) == StringType:
ret = [v for v in self.enumtype if str(v).upper() == value.upper()]
if ret:
return ret[0]
raise OptionConversionException(
'invalid enum value %s. not a %s enum.' %
(value, str(self.enumtype)))
class CoordOption (Option):
def __init__(self, name, value, checker):
Option.__init__(self, name, value, checker)
self._state = value.state
def set(self, value):
try:
oldvalue = self._value
self._value = self._checker.check(value, self._state)
return oldvalue
except OptionConversionException, e:
log.debug("Error setting %s: %s." % (self._name, str(e)))
raise e
class CoordChecker (Checker):
def __init__(self, value):
Checker.__init__(self)
def check(self, value, state=None):
if not isinstance(value, Coord):
try:
return Coord.fromState(value, state)
except ValueError:
pass
# any other type is ignored
raise OptionConversionException('invalid coord value %s.' % value)
class PositionOption (Option):
def __init__(self, name, value, checker):
Option.__init__(self, name, value, checker)
self._system = value.system
self._epoch = value.epoch
def set(self, value):
try:
oldvalue = self._value
self._value = self._checker.check(value, self._system, self._epoch)
return oldvalue
except OptionConversionException, e:
log.debug("Error setting %s: %s." % (self._name, str(e)))
raise e
# FIXME: check and convert positions
class PositionChecker (Checker):
def __init__(self, value):
Checker.__init__(self)
def check(self, value, state=None):
return value
class Config (object):
def __init__(self, obj):
if type(obj) == DictType:
self._options = self._readOptions(obj)
else:
self._options = self._readOptions(obj.__config__)
def _readOptions(self, opt):
options = {}
for name, value in opt.items():
if type(value) in (IntType, LongType):
options[name] = Option(name, value, IntChecker())
continue
if type(value) == FloatType:
options[name] = Option(name, value, FloatChecker())
continue
if type(value) == StringType:
options[name] = Option(name, value, StringChecker())
continue
if type(value) == BooleanType:
options[name] = Option(name, value, BoolChecker())
continue
if isinstance(value, NoneType):
options[name] = Option(name, value, NoneChecker())
continue
# FIXME: for list and tuple we use the first element as default option
# there is no way to specify other default for this types
if type(value) == ListType:
options[name] = Option(name, value[0], OptionsChecker(value))
continue
if type(value) == TupleType:
options[name] = Option(name, value[0], RangeChecker(value))
continue
if type(value) == EnumValue:
options[name] = Option(name, value, EnumChecker(value))
continue
# special Coord type, remember which state create the
# option to allow the use of the right constructor when
# checking new values
if isinstance(value, Coord):
options[name] = CoordOption(name, value, CoordChecker(value))
continue
if isinstance(value, Position):
options[name] = PositionOption(name, value, PositionChecker(value))
continue
raise ValueError("Invalid option type: %s." % type(value))
return options
def __contains__(self, name):
return name in self._options
def __len__(self):
return len(self._options)
def __getitem__(self, name):
if type(name) != StringType:
raise TypeError
if name in self:
return self._options[name].get()
else:
raise KeyError("invalid option: %s." % name)
def __setitem__(self, name, value):
# if value exists, run template checker and set _config
if name in self:
return self._options[name].set(value)
# rant about invalid option
else:
raise KeyError("invalid option: %s." % name)
def __iter__(self):
return self.iterkeys()
def iterkeys(self):
return self._options.__iter__()
def itervalues(self):
for name in self._options:
yield self._options[name].get()
def iteritems(self):
for name in self._options:
yield (name, self._options[name].get())
def keys(self):
return [key for key in self.iterkeys()]
def values(self):
return [value for value in self.itervalues()]
def items(self):
return [(name, value) for name, value in self.iteritems()]
def __iadd__(self, other):
if type(other) not in (Config, DictType):
return self
if type(other) == DictType:
other = Config(other)
for name, value in other._options.items():
if not name in self._options:
raise KeyError("invalid option: %s" % name)
self._options[name] = value
return self
| gpl-2.0 |
Huyuwei/tvm | python/tvm/schedule.py | 2 | 20831 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""The computation schedule api of TVM."""
from __future__ import absolute_import as _abs
from ._ffi.base import string_types
from ._ffi.node import NodeBase, register_node
from ._ffi.node import convert_to_node as _convert_to_node
from ._ffi.function import _init_api, Function
from ._ffi.function import convert_to_tvm_func as _convert_tvm_func
from . import _api_internal
from . import tensor as _tensor
from . import expr as _expr
from . import container as _container
def convert(value):
"""Convert value to TVM node or function.
Parameters
----------
value : python value
Returns
-------
tvm_val : Node or Function
Converted value in TVM
"""
if isinstance(value, (Function, NodeBase)):
return value
if callable(value):
return _convert_tvm_func(value)
return _convert_to_node(value)
@register_node
class Buffer(NodeBase):
"""Symbolic data buffer in TVM.
Buffer provide a way to represent data layout
specialization of data structure in TVM.
Do not construct directly, use :any:`decl_buffer` instead.
See the documentation of :any:`decl_buffer` for more details.
See Also
--------
decl_buffer : Declare a buffer
"""
READ = 1
WRITE = 2
def access_ptr(self, access_mask, ptr_type="handle", content_lanes=1, offset=0):
"""Get an access pointer to the head of buffer.
This is the recommended method to get buffer data
ptress when interacting with external functions.
Parameters
----------
access_mask : int
The access pattern MASK. Indicate whether the
access will read or write to the data content.
ptr_type : str, optional
The data type of the result pointer. Do not specify
unless we want to cast pointer to specific type.
content_lanes: int, optional
The number of lanes for the data type. This value
is greater than one for vector types.
offset: Expr, optional
The offset of pointer. We can use it to offset by
the number of elements from the address of ptr.
Examples
--------
.. code-block:: python
import tvm.schedule.Buffer
# Get access ptr for read
buffer.access_ptr("r")
# Get access ptr for read/write with bitmask
buffer.access_ptr(Buffer.READ | Buffer.WRITE)
# Get access ptr for read/write with str flag
buffer.access_ptr("rw")
# Get access ptr for read with offset
buffer.access_ptr("r", offset = 100)
"""
if isinstance(access_mask, string_types):
mask = 0
for value in access_mask:
if value == "r":
mask = mask | Buffer.READ
elif value == "w":
mask = mask | Buffer.WRITE
else:
raise ValueError("Unknown access_mask %s" % access_mask)
access_mask = mask
offset = convert(offset)
return _api_internal._BufferAccessPtr(self, access_mask, ptr_type,
content_lanes, offset)
def vload(self, begin, dtype=None):
"""Generate an Expr that loads dtype from begin index.
Parameters
----------
begin : Array of Expr
The beginning index in unit of Buffer.dtype
dtype : str
The data type to be loaded,
can be vector type which have lanes that is multiple of Buffer.dtype
Returns
-------
load : Expr
The corresponding load expression.
"""
begin = (begin,) if isinstance(begin, (int, _expr.Expr)) else begin
dtype = dtype if dtype else self.dtype
return _api_internal._BufferVLoad(self, begin, dtype)
def vstore(self, begin, value):
"""Generate a Stmt that store value into begin index.
Parameters
----------
begin : Array of Expr
The beginning index in unit of Buffer.dtype
value : Expr
The value to be stored.
Returns
-------
store : Stmt
The corresponding store stmt.
"""
begin = (begin,) if isinstance(begin, (int, _expr.Expr)) else begin
return _api_internal._BufferVStore(self, begin, value)
@register_node
class Split(NodeBase):
"""Split operation on axis."""
@register_node
class Fuse(NodeBase):
"""Fuse operation on axis."""
@register_node
class Singleton(NodeBase):
"""Singleton axis."""
@register_node
class IterVar(NodeBase, _expr.ExprOp):
"""Represent iteration variable.
IterVar is normally created by Operation, to represent
axis iterations in the computation.
It can also created by schedule primitives like :any:`tvm.schedule.Stage.split`.
See Also
--------
tvm.thread_axis: Create thread axis IterVar.
tvm.reduce_axis: Create reduce axis IterVar.
"""
DataPar = 0
ThreadIndex = 1
CommReduce = 2
Ordered = 3
DimInfo = 4
Unrolled = 5
Vectorized = 6
Parallelized = 7
Tensorized = 8
_tensor.iter_var_cls = IterVar
def create_schedule(ops):
"""Create a schedule for list of ops
Parameters
----------
ops : list of Operations
The source expression.
Returns
-------
sch : schedule.Schedule
The created schedule.
"""
if not isinstance(ops, (list, _container.Array)):
ops = [ops]
return _api_internal._CreateSchedule(ops)
@register_node
class Schedule(NodeBase):
"""Schedule for all the stages."""
def __getitem__(self, k):
if isinstance(k, _tensor.Tensor):
k = k.op
if not isinstance(k, _tensor.Operation):
raise ValueError("Expect schedule key to be Tensor or Operation")
if k not in self.stage_map:
raise ValueError("Cannot find the operation %s in schedule" % (str(k)))
return self.stage_map[k]
def normalize(self):
"""Build a normalized schedule from the current schedule.
Insert necessary rebase to make certain iter var to start from 0.
This is needed before bound inference and followup step.
Returns
-------
sch : Schedule
The normalized schedule.
"""
return _api_internal._ScheduleNormalize(self)
def create_group(self, outputs, inputs, include_inputs=False):
"""Create stage group by giving output and input boundary.
The operators between outputs and inputs are placed as member of group.
outputs are include in the group, while inputs are not included.
Parameters
----------
outputs : list of Tensors
The outputs of the group.
inputs : list of Tensors
The inputs of the group.
include_inputs : boolean, optional
Whether include input operations in the group if they are used by outputs.
Returns
-------
group : Stage
A virtual stage represents the group, user can use compute_at to move
the attachment point of the group.
"""
if isinstance(outputs, _tensor.Tensor):
outputs = [outputs]
if isinstance(inputs, _tensor.Tensor):
inputs = [inputs]
return _api_internal._ScheduleCreateGroup(
self, outputs, inputs, include_inputs)
def cache_read(self, tensor, scope, readers):
"""Create a cache read of original tensor for readers.
This will mutate the body of the readers.
A new cache stage will be created for the tensor.
Call this before doing any split/fuse schedule.
Parameters
----------
tensor : Tensor
The tensor to be cached.
scope : str
The scope of cached
readers : list of Tensor or Operation
The readers to read the cache.
Returns
-------
cache : Tensor
The created cache tensor.
"""
if isinstance(readers, (_tensor.Tensor, _tensor.Operation)):
readers = [readers]
readers = [t.op if isinstance(t, _tensor.Tensor) else t for t in readers]
return _api_internal._ScheduleCacheRead(self, tensor, scope, readers)
def cache_write(self, tensor, scope):
"""Create a cache write of original tensor, before storing into tensor.
This will mutate the body of the tensor.
A new cache stage will created before feed into the tensor.
This function can be used to support data layout transformation.
If there is a split/fuse/reorder on the data parallel axis of tensor
before cache_write is called. The intermediate cache stores
the data in the layout as the iteration order of leave axis.
The data will be transformed back to the original layout in the original tensor.
User can further call compute_inline to inline the original layout and keep
the data stored in the transformed layout.
Parameters
----------
tensor : Tensor, list or tuple
The tensors to be feed to. All the tensors must be produced by one computeOp
scope : str
The scope of cached
Returns
-------
cache : Tensor
The created cache tensor.
"""
return _api_internal._ScheduleCacheWrite(self, tensor, scope)
def rfactor(self, tensor, axis, factor_axis=0):
""" Factor a reduction axis in tensor's schedule to be an explicit axis.
This will create a new stage that generated the new tensor with axis
as the first dimension. The tensor's body will be rewritten as a reduction
over the factored tensor.
Parameters
----------
tensor : Tensor
The tensor to be factored.
axis : IterVar
The reduction axis in the schedule to be factored.
factor_axis : int
The position where the new axis is placed.
Returns
-------
tfactor : Tensor or Array of Tensor
The created factored tensor.
"""
factored = _api_internal._ScheduleRFactor(self, tensor, axis, factor_axis)
return factored[0] if len(factored) == 1 else factored
@register_node
class Stage(NodeBase):
"""A Stage represents schedule for one operation."""
def split(self, parent, factor=None, nparts=None):
"""Split the stage either by factor providing outer scope, or both
Parameters
----------
parent : IterVar
The parent iter var.
factor : Expr, optional
The splitting factor
nparts : Expr, optional
The number of outer parts.
Returns
-------
outer : IterVar
The outer variable of iteration.
inner : IterVar
The inner variable of iteration.
"""
if nparts is not None:
if factor is not None:
raise ValueError("Do not need to provide both outer and nparts")
outer, inner = _api_internal._StageSplitByNParts(self, parent, nparts)
else:
if factor is None:
raise ValueError("Either nparts or factor need to be provided")
outer, inner = _api_internal._StageSplitByFactor(self, parent, factor)
return outer, inner
def fuse(self, *args):
"""Fuse multiple consecutive iteration variables into a single iteration variable.
fused = fuse(...fuse(fuse(args[0], args[1]), args[2]),..., args[-1])
The order is from outer to inner.
Parameters
----------
args : list of IterVars
Itervars that proceeds each other
Returns
-------
fused : IterVar
The fused variable of iteration.
"""
fused = _api_internal._StageFuse(self, args)
return fused
def set_scope(self, scope):
"""Set the thread scope of this stage
Parameters
----------
scope : str
The thread scope of this stage
"""
return _api_internal._StageSetScope(self, scope)
def bind(self, ivar, thread_ivar):
"""Bind ivar to thread index thread_ivar
Parameters
----------
ivar : IterVar
The iteration to be binded to thread.
thread_ivar : IterVar
The thread to be binded.
"""
_api_internal._StageBind(self, ivar, thread_ivar)
def env_threads(self, threads):
"""Mark threads to be launched at the outer scope of composed op.
Parameters
----------
threads : list of threads
The threads to be launched.
"""
if isinstance(threads, IterVar):
threads = [threads]
_api_internal._StageEnvThreads(self, threads)
def set_store_predicate(self, predicate):
"""Set predicate under which store to the array can be performed.
Use this when there are duplicated threads doing the same store and we only
need one of them to do the store.
Parameters
----------
predicate : Expr
The guard condition fo store.
"""
_api_internal._StageSetStorePredicate(self, predicate)
def compute_at(self, parent, scope):
"""Attach the stage at parent's scope
Parameters
----------
parent : Stage
The parent stage
scope : IterVar
The loop scope t be attached to.
"""
_api_internal._StageComputeAt(self, parent, scope)
def compute_inline(self):
"""Mark stage as inline
Parameters
----------
parent : Stage
The parent stage
"""
_api_internal._StageComputeInline(self)
def compute_root(self):
"""Attach the stage at parent, and mark it as root
Parameters
----------
parent : Stage
The parent stage
"""
_api_internal._StageComputeRoot(self)
def reorder(self, *args):
"""reorder the arguments in the specified order.
Parameters
----------
args : list of IterVar
The order to be ordered
"""
_api_internal._StageReorder(self, args)
def tile(self, x_parent, y_parent, x_factor, y_factor):
""" Perform tiling on two dimensions
The final loop order from outmost to inner most are
[x_outer, y_outer, x_inner, y_inner]
Parameters
----------
x_parent : IterVar
The original x dimension
y_parent : IterVar
The original y dimension
x_factor : Expr
The stride factor on x axis
y_factor : Expr
The stride factor on y axis
Returns
-------
x_outer : IterVar
Outer axis of x dimension
y_outer : IterVar
Outer axis of y dimension
x_inner : IterVar
Inner axis of x dimension
p_y_inner : IterVar
Inner axis of y dimension
"""
x_outer, y_outer, x_inner, y_inner = _api_internal._StageTile(
self, x_parent, y_parent, x_factor, y_factor)
return x_outer, y_outer, x_inner, y_inner
def vectorize(self, var):
"""Vectorize the iteration.
Parameters
----------
var : IterVar
The iteration to be vectorize
"""
_api_internal._StageVectorize(self, var)
def tensorize(self, var, tensor_intrin):
"""Tensorize the computation enclosed by var with tensor_intrin
Parameters
----------
var : IterVar
The iteration boundary of tensorization.
tensor_intrin : TensorIntrin
The tensor intrinsic used for computation.
"""
_api_internal._StageTensorize(self, var, tensor_intrin)
def unroll(self, var):
"""Unroll the iteration.
Parameters
----------
var : IterVar
The iteration to be unrolled.
"""
_api_internal._StageUnroll(self, var)
def parallel(self, var):
"""Parallelize the iteration.
Parameters
----------
var : IterVar
The iteration to be parallelized.
"""
_api_internal._StageParallel(self, var)
def pragma(self, var, pragma_type, pragma_value=None):
"""Annotate the iteration with pragma
This will translate to a pragma_scope surrounding
the corresponding loop generated.
Useful to support experimental features and extensions.
Parameters
----------
var : IterVar
The iteration to be anotated
pragma_type : str
The pragma string to be annotated
pragma_value : Expr, optional
The pragma value to pass along the pragma
Note
----
Most pragmas are advanced/experimental features
and may subject to change. List of supported pragmas:
- **debug_skip_region**
Force skip the region marked by the axis and turn it into no-op.
This is useful for debug purposes.
- **parallel_launch_point**
Specify to launch parallel threads outside the
specified iteration loop. By default the threads
launch at the point of parallel construct.
This pragma moves the launching point to even outer scope.
The threads are launched once and reused across multiple
parallel constructs as BSP style program.
- **parallel_barrier_when_finish**
Insert a synchronization barrier between working threads
after the specified loop iteration finishes.
- **parallel_stride_pattern**
Hint parallel loop to execute in strided pattern.
:code:`for (int i = task_id; i < end; i += num_task)`
"""
if isinstance(pragma_value, string_types):
pragma_value = convert(pragma_value)
_api_internal._StagePragma(self, var, pragma_type, pragma_value)
def prefetch(self, tensor, var, offset):
"""Prefetch the specified variable
Parameters
----------
tensor : Tensor
The tensor to be prefetched
var : IterVar
The loop point at which the prefetching is applied
offset : Expr
The number of iterations to be prefetched before actual execution
"""
_api_internal._StagePrefetch(self, tensor, var, offset)
def storage_align(self, axis, factor, offset):
"""Set alignment requirement for specific axis
This ensures that stride[axis] == k * factor + offset for some k.
This is useful to set memory layout to for more friendly memory
access pattern. For example, we can set alignment to be
factor=2, offset=1 to avoid bank conflict for thread access on
higher dimension in GPU shared memory.
Parameters
----------
axis : IterVar
The axis dimension to be aligned.
factor : int
The factor in alignment specification.
offset : int
The offset in the alignment specification.
"""
_api_internal._StageStorageAlign(self, axis, factor, offset)
def double_buffer(self):
"""Compute the current stage via double buffering.
This can only be applied to intermediate stage.
This will double the storage cost of the current stage.
Can be useful to hide load latency.
"""
_api_internal._StageDoubleBuffer(self)
def opengl(self):
"""The special OpenGL schedule
Maps each output element to a pixel.
"""
_api_internal._StageOpenGL(self)
_init_api("tvm.schedule")
| apache-2.0 |
hlzz/dotfiles | graphics/VTK-7.0.0/Examples/Infovis/Python/circular_vertices.py | 2 | 1383 | #!/usr/bin/env python
"""
This file demonstrates drawing graphs using circular vertices via
vtkRenderedGraphRepresentation.
"""
from vtk import *
#------------------------------------------------------------------------------
# Script Entry Point
#------------------------------------------------------------------------------
if __name__ == "__main__":
# Create a random graph
source = vtkRandomGraphSource()
source.SetNumberOfVertices(15)
source.SetStartWithTree(True)
#----------------------------------------------------------
# Draw the graph in a window
theme = vtkViewTheme.CreateMellowTheme()
theme.SetLineWidth(4)
theme.SetPointSize(15)
theme.SetCellOpacity(1)
theme.FastDelete()
# Rendered graph representation to make vertices circles
rep = vtkRenderedGraphRepresentation()
rep.SetInputConnection(0, source.GetOutputPort())
# vtkGraphToGlyph::CIRCLE == 7
rep.SetGlyphType(7)
# View containing the merged graph
view = vtkGraphLayoutView()
view.SetRepresentation( rep )
view.SetVertexLabelArrayName("vertex id")
view.SetVertexLabelVisibility(True)
view.SetLayoutStrategyToSimple2D()
view.ApplyViewTheme(theme)
view.GetRenderWindow().SetSize(600, 600)
view.ResetCamera()
view.Render()
view.GetInteractor().Start()
| bsd-3-clause |
2uller/LotF | App/Lib/distutils/tests/test_dist.py | 4 | 16153 | # -*- coding: utf8 -*-
"""Tests for distutils.dist."""
import os
import StringIO
import sys
import unittest
import warnings
import textwrap
from distutils.dist import Distribution, fix_help_options
from distutils.cmd import Command
import distutils.dist
from test.test_support import TESTFN, captured_stdout, run_unittest
from distutils.tests import support
class test_dist(Command):
"""Sample distutils extension command."""
user_options = [
("sample-option=", "S", "help text"),
]
def initialize_options(self):
self.sample_option = None
class TestDistribution(Distribution):
"""Distribution subclasses that avoids the default search for
configuration files.
The ._config_files attribute must be set before
.parse_config_files() is called.
"""
def find_config_files(self):
return self._config_files
class DistributionTestCase(support.TempdirManager,
support.LoggingSilencer,
support.EnvironGuard,
unittest.TestCase):
def setUp(self):
super(DistributionTestCase, self).setUp()
self.argv = sys.argv, sys.argv[:]
del sys.argv[1:]
def tearDown(self):
sys.argv = self.argv[0]
sys.argv[:] = self.argv[1]
super(DistributionTestCase, self).tearDown()
def create_distribution(self, configfiles=()):
d = TestDistribution()
d._config_files = configfiles
d.parse_config_files()
d.parse_command_line()
return d
def test_debug_mode(self):
with open(TESTFN, "w") as f:
f.write("[global]\n")
f.write("command_packages = foo.bar, splat")
files = [TESTFN]
sys.argv.append("build")
with captured_stdout() as stdout:
self.create_distribution(files)
stdout.seek(0)
self.assertEqual(stdout.read(), '')
distutils.dist.DEBUG = True
try:
with captured_stdout() as stdout:
self.create_distribution(files)
stdout.seek(0)
self.assertEqual(stdout.read(), '')
finally:
distutils.dist.DEBUG = False
def test_command_packages_unspecified(self):
sys.argv.append("build")
d = self.create_distribution()
self.assertEqual(d.get_command_packages(), ["distutils.command"])
def test_command_packages_cmdline(self):
from distutils.tests.test_dist import test_dist
sys.argv.extend(["--command-packages",
"foo.bar,distutils.tests",
"test_dist",
"-Ssometext",
])
d = self.create_distribution()
# let's actually try to load our test command:
self.assertEqual(d.get_command_packages(),
["distutils.command", "foo.bar", "distutils.tests"])
cmd = d.get_command_obj("test_dist")
self.assertIsInstance(cmd, test_dist)
self.assertEqual(cmd.sample_option, "sometext")
def test_command_packages_configfile(self):
sys.argv.append("build")
self.addCleanup(os.unlink, TESTFN)
f = open(TESTFN, "w")
try:
print >> f, "[global]"
print >> f, "command_packages = foo.bar, splat"
finally:
f.close()
d = self.create_distribution([TESTFN])
self.assertEqual(d.get_command_packages(),
["distutils.command", "foo.bar", "splat"])
# ensure command line overrides config:
sys.argv[1:] = ["--command-packages", "spork", "build"]
d = self.create_distribution([TESTFN])
self.assertEqual(d.get_command_packages(),
["distutils.command", "spork"])
# Setting --command-packages to '' should cause the default to
# be used even if a config file specified something else:
sys.argv[1:] = ["--command-packages", "", "build"]
d = self.create_distribution([TESTFN])
self.assertEqual(d.get_command_packages(), ["distutils.command"])
def test_write_pkg_file(self):
# Check DistributionMetadata handling of Unicode fields
tmp_dir = self.mkdtemp()
my_file = os.path.join(tmp_dir, 'f')
klass = Distribution
dist = klass(attrs={'author': u'Mister Café',
'name': 'my.package',
'maintainer': u'Café Junior',
'description': u'Café torréfié',
'long_description': u'Héhéhé'})
# let's make sure the file can be written
# with Unicode fields. they are encoded with
# PKG_INFO_ENCODING
dist.metadata.write_pkg_file(open(my_file, 'w'))
# regular ascii is of course always usable
dist = klass(attrs={'author': 'Mister Cafe',
'name': 'my.package',
'maintainer': 'Cafe Junior',
'description': 'Cafe torrefie',
'long_description': 'Hehehe'})
my_file2 = os.path.join(tmp_dir, 'f2')
dist.metadata.write_pkg_file(open(my_file2, 'w'))
def test_empty_options(self):
# an empty options dictionary should not stay in the
# list of attributes
# catching warnings
warns = []
def _warn(msg):
warns.append(msg)
self.addCleanup(setattr, warnings, 'warn', warnings.warn)
warnings.warn = _warn
dist = Distribution(attrs={'author': 'xxx', 'name': 'xxx',
'version': 'xxx', 'url': 'xxxx',
'options': {}})
self.assertEqual(len(warns), 0)
self.assertNotIn('options', dir(dist))
def test_finalize_options(self):
attrs = {'keywords': 'one,two',
'platforms': 'one,two'}
dist = Distribution(attrs=attrs)
dist.finalize_options()
# finalize_option splits platforms and keywords
self.assertEqual(dist.metadata.platforms, ['one', 'two'])
self.assertEqual(dist.metadata.keywords, ['one', 'two'])
def test_get_command_packages(self):
dist = Distribution()
self.assertEqual(dist.command_packages, None)
cmds = dist.get_command_packages()
self.assertEqual(cmds, ['distutils.command'])
self.assertEqual(dist.command_packages,
['distutils.command'])
dist.command_packages = 'one,two'
cmds = dist.get_command_packages()
self.assertEqual(cmds, ['distutils.command', 'one', 'two'])
def test_announce(self):
# make sure the level is known
dist = Distribution()
args = ('ok',)
kwargs = {'level': 'ok2'}
self.assertRaises(ValueError, dist.announce, args, kwargs)
def test_find_config_files_disable(self):
# Ticket #1180: Allow user to disable their home config file.
temp_home = self.mkdtemp()
if os.name == 'posix':
user_filename = os.path.join(temp_home, ".pydistutils.cfg")
else:
user_filename = os.path.join(temp_home, "pydistutils.cfg")
with open(user_filename, 'w') as f:
f.write('[distutils]\n')
def _expander(path):
return temp_home
old_expander = os.path.expanduser
os.path.expanduser = _expander
try:
d = distutils.dist.Distribution()
all_files = d.find_config_files()
d = distutils.dist.Distribution(attrs={'script_args':
['--no-user-cfg']})
files = d.find_config_files()
finally:
os.path.expanduser = old_expander
# make sure --no-user-cfg disables the user cfg file
self.assertEqual(len(all_files)-1, len(files))
class MetadataTestCase(support.TempdirManager, support.EnvironGuard,
unittest.TestCase):
def setUp(self):
super(MetadataTestCase, self).setUp()
self.argv = sys.argv, sys.argv[:]
def tearDown(self):
sys.argv = self.argv[0]
sys.argv[:] = self.argv[1]
super(MetadataTestCase, self).tearDown()
def test_classifier(self):
attrs = {'name': 'Boa', 'version': '3.0',
'classifiers': ['Programming Language :: Python :: 3']}
dist = Distribution(attrs)
meta = self.format_metadata(dist)
self.assertIn('Metadata-Version: 1.1', meta)
def test_download_url(self):
attrs = {'name': 'Boa', 'version': '3.0',
'download_url': 'http://example.org/boa'}
dist = Distribution(attrs)
meta = self.format_metadata(dist)
self.assertIn('Metadata-Version: 1.1', meta)
def test_long_description(self):
long_desc = textwrap.dedent("""\
example::
We start here
and continue here
and end here.""")
attrs = {"name": "package",
"version": "1.0",
"long_description": long_desc}
dist = Distribution(attrs)
meta = self.format_metadata(dist)
meta = meta.replace('\n' + 8 * ' ', '\n')
self.assertIn(long_desc, meta)
def test_simple_metadata(self):
attrs = {"name": "package",
"version": "1.0"}
dist = Distribution(attrs)
meta = self.format_metadata(dist)
self.assertIn("Metadata-Version: 1.0", meta)
self.assertNotIn("provides:", meta.lower())
self.assertNotIn("requires:", meta.lower())
self.assertNotIn("obsoletes:", meta.lower())
def test_provides(self):
attrs = {"name": "package",
"version": "1.0",
"provides": ["package", "package.sub"]}
dist = Distribution(attrs)
self.assertEqual(dist.metadata.get_provides(),
["package", "package.sub"])
self.assertEqual(dist.get_provides(),
["package", "package.sub"])
meta = self.format_metadata(dist)
self.assertIn("Metadata-Version: 1.1", meta)
self.assertNotIn("requires:", meta.lower())
self.assertNotIn("obsoletes:", meta.lower())
def test_provides_illegal(self):
self.assertRaises(ValueError, Distribution,
{"name": "package",
"version": "1.0",
"provides": ["my.pkg (splat)"]})
def test_requires(self):
attrs = {"name": "package",
"version": "1.0",
"requires": ["other", "another (==1.0)"]}
dist = Distribution(attrs)
self.assertEqual(dist.metadata.get_requires(),
["other", "another (==1.0)"])
self.assertEqual(dist.get_requires(),
["other", "another (==1.0)"])
meta = self.format_metadata(dist)
self.assertIn("Metadata-Version: 1.1", meta)
self.assertNotIn("provides:", meta.lower())
self.assertIn("Requires: other", meta)
self.assertIn("Requires: another (==1.0)", meta)
self.assertNotIn("obsoletes:", meta.lower())
def test_requires_illegal(self):
self.assertRaises(ValueError, Distribution,
{"name": "package",
"version": "1.0",
"requires": ["my.pkg (splat)"]})
def test_obsoletes(self):
attrs = {"name": "package",
"version": "1.0",
"obsoletes": ["other", "another (<1.0)"]}
dist = Distribution(attrs)
self.assertEqual(dist.metadata.get_obsoletes(),
["other", "another (<1.0)"])
self.assertEqual(dist.get_obsoletes(),
["other", "another (<1.0)"])
meta = self.format_metadata(dist)
self.assertIn("Metadata-Version: 1.1", meta)
self.assertNotIn("provides:", meta.lower())
self.assertNotIn("requires:", meta.lower())
self.assertIn("Obsoletes: other", meta)
self.assertIn("Obsoletes: another (<1.0)", meta)
def test_obsoletes_illegal(self):
self.assertRaises(ValueError, Distribution,
{"name": "package",
"version": "1.0",
"obsoletes": ["my.pkg (splat)"]})
def format_metadata(self, dist):
sio = StringIO.StringIO()
dist.metadata.write_pkg_file(sio)
return sio.getvalue()
def test_custom_pydistutils(self):
# fixes #2166
# make sure pydistutils.cfg is found
if os.name == 'posix':
user_filename = ".pydistutils.cfg"
else:
user_filename = "pydistutils.cfg"
temp_dir = self.mkdtemp()
user_filename = os.path.join(temp_dir, user_filename)
f = open(user_filename, 'w')
try:
f.write('.')
finally:
f.close()
try:
dist = Distribution()
# linux-style
if sys.platform in ('linux', 'darwin'):
os.environ['HOME'] = temp_dir
files = dist.find_config_files()
self.assertIn(user_filename, files)
# win32-style
if sys.platform == 'win32':
# home drive should be found
os.environ['HOME'] = temp_dir
files = dist.find_config_files()
self.assertIn(user_filename, files,
'%r not found in %r' % (user_filename, files))
finally:
os.remove(user_filename)
def test_fix_help_options(self):
help_tuples = [('a', 'b', 'c', 'd'), (1, 2, 3, 4)]
fancy_options = fix_help_options(help_tuples)
self.assertEqual(fancy_options[0], ('a', 'b', 'c'))
self.assertEqual(fancy_options[1], (1, 2, 3))
def test_show_help(self):
# smoke test, just makes sure some help is displayed
dist = Distribution()
sys.argv = []
dist.help = 1
dist.script_name = 'setup.py'
with captured_stdout() as s:
dist.parse_command_line()
output = [line for line in s.getvalue().split('\n')
if line.strip() != '']
self.assertTrue(output)
def test_read_metadata(self):
attrs = {"name": "package",
"version": "1.0",
"long_description": "desc",
"description": "xxx",
"download_url": "http://example.com",
"keywords": ['one', 'two'],
"requires": ['foo']}
dist = Distribution(attrs)
metadata = dist.metadata
# write it then reloads it
PKG_INFO = StringIO.StringIO()
metadata.write_pkg_file(PKG_INFO)
PKG_INFO.seek(0)
metadata.read_pkg_file(PKG_INFO)
self.assertEqual(metadata.name, "package")
self.assertEqual(metadata.version, "1.0")
self.assertEqual(metadata.description, "xxx")
self.assertEqual(metadata.download_url, 'http://example.com')
self.assertEqual(metadata.keywords, ['one', 'two'])
self.assertEqual(metadata.platforms, ['UNKNOWN'])
self.assertEqual(metadata.obsoletes, None)
self.assertEqual(metadata.requires, ['foo'])
def test_suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(DistributionTestCase))
suite.addTest(unittest.makeSuite(MetadataTestCase))
return suite
if __name__ == "__main__":
run_unittest(test_suite())
| gpl-2.0 |
tillahoffmann/tensorflow | tensorflow/contrib/tensor_forest/hybrid/python/hybrid_model.py | 138 | 4774 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Defines the model abstraction for hybrid models."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
from tensorflow.contrib import layers
from tensorflow.contrib.framework.python.ops import variables as framework_variables
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import variables
from tensorflow.python.training import adagrad
class HybridModel(object):
"""Defines a hybrid model.
Models chain together the results of inference layers and provide training
capabilities.
"""
# pylint: disable=unused-argument
def __init__(self,
params,
device_assigner=None,
optimizer_class=adagrad.AdagradOptimizer,
**kwargs):
self.device_assigner = (
device_assigner or framework_variables.VariableDeviceChooser())
self.params = params
self.optimizer = optimizer_class(self.params.learning_rate)
self.is_regression = params.regression
self.regularizer = None
if params.regularization == "l1":
self.regularizer = layers.l1_regularizer(
self.params.regularization_strength)
elif params.regularization == "l2":
self.regularizer = layers.l2_regularizer(
self.params.regularization_strength)
def _do_layer_inference(self, layer, data):
# If this is a collection of layers, return the mean of their inference
# results.
if isinstance(layer, collections.Iterable):
return math_ops.reduce_mean(
array_ops.stack([l.inference_graph(data) for l in layer]), 0)
# If this is a single layer, return its inference result.
else:
return layer.inference_graph(data)
def _base_inference(self, data, data_spec=None):
"""Returns an op that performs inference without a softmax."""
inference_result = self._do_layer_inference(self.layers[0], data)
for layer in self.layers[1:]:
inference_result = self._do_layer_inference(layer, inference_result)
output_size = 1 if self.is_regression else self.params.num_classes
output = layers.fully_connected(
inference_result, output_size, activation_fn=array_ops.identity)
return output
def inference_graph(self, data, data_spec=None):
"""Returns the op that performs inference on a batch of data."""
return nn_ops.softmax(self._base_inference(data, data_spec=data_spec))
def training_inference_graph(self, data, data_spec=None):
"""Returns an inference-without-softmax op for training purposes."""
return self._base_inference(data, data_spec=data_spec)
def predict_proba(self, data, data_spec=None):
inference_result = self.inference_graph(data, data_spec=data_spec)
probabilities = nn_ops.softmax(inference_result, name="probabilities")
return probabilities
def training_graph(self, data, labels, data_spec=None, epoch=None):
"""Returns the op that trains the hybrid model."""
return self.optimizer.minimize(self.training_loss(data, labels))
def loss(self, data, labels):
"""The loss to minimize while training."""
if self.is_regression:
diff = self.training_inference_graph(data) - math_ops.to_float(labels)
mean_squared_error = math_ops.reduce_mean(diff * diff)
root_mean_squared_error = math_ops.sqrt(mean_squared_error, name="loss")
loss = root_mean_squared_error
else:
loss = math_ops.reduce_mean(
nn_ops.sparse_softmax_cross_entropy_with_logits(
labels=array_ops.squeeze(math_ops.to_int32(labels)),
logits=self.training_inference_graph(data)),
name="loss")
if self.regularizer:
loss += layers.apply_regularization(self.regularizer,
variables.trainable_variables())
return loss
def training_loss(self, data, labels):
return self.loss(data, labels)
def validation_loss(self, data, labels):
return self.loss(data, labels)
| apache-2.0 |
vmarkovtsev/django | tests/utils_tests/test_safestring.py | 278 | 3677 | from __future__ import unicode_literals
from django.template import Context, Template
from django.test import SimpleTestCase
from django.utils import html, six, text
from django.utils.encoding import force_bytes, force_text
from django.utils.functional import lazy
from django.utils.safestring import (
EscapeData, SafeData, mark_for_escaping, mark_safe,
)
lazystr = lazy(force_text, six.text_type)
lazybytes = lazy(force_bytes, bytes)
class customescape(six.text_type):
def __html__(self):
# implement specific and obviously wrong escaping
# in order to be able to tell for sure when it runs
return self.replace('<', '<<').replace('>', '>>')
class SafeStringTest(SimpleTestCase):
def assertRenderEqual(self, tpl, expected, **context):
context = Context(context)
tpl = Template(tpl)
self.assertEqual(tpl.render(context), expected)
def test_mark_safe(self):
s = mark_safe('a&b')
self.assertRenderEqual('{{ s }}', 'a&b', s=s)
self.assertRenderEqual('{{ s|force_escape }}', 'a&b', s=s)
def test_mark_safe_object_implementing_dunder_html(self):
e = customescape('<a&b>')
s = mark_safe(e)
self.assertIs(s, e)
self.assertRenderEqual('{{ s }}', '<<a&b>>', s=s)
self.assertRenderEqual('{{ s|force_escape }}', '<a&b>', s=s)
def test_mark_safe_lazy(self):
s = lazystr('a&b')
b = lazybytes(b'a&b')
self.assertIsInstance(mark_safe(s), SafeData)
self.assertIsInstance(mark_safe(b), SafeData)
self.assertRenderEqual('{{ s }}', 'a&b', s=mark_safe(s))
def test_mark_safe_object_implementing_dunder_str(self):
class Obj(object):
def __str__(self):
return '<obj>'
s = mark_safe(Obj())
self.assertRenderEqual('{{ s }}', '<obj>', s=s)
def test_mark_safe_result_implements_dunder_html(self):
self.assertEqual(mark_safe('a&b').__html__(), 'a&b')
def test_mark_safe_lazy_result_implements_dunder_html(self):
self.assertEqual(mark_safe(lazystr('a&b')).__html__(), 'a&b')
def test_mark_for_escaping(self):
s = mark_for_escaping('a&b')
self.assertRenderEqual('{{ s }}', 'a&b', s=s)
self.assertRenderEqual('{{ s }}', 'a&b', s=mark_for_escaping(s))
def test_mark_for_escaping_object_implementing_dunder_html(self):
e = customescape('<a&b>')
s = mark_for_escaping(e)
self.assertIs(s, e)
self.assertRenderEqual('{{ s }}', '<<a&b>>', s=s)
self.assertRenderEqual('{{ s|force_escape }}', '<a&b>', s=s)
def test_mark_for_escaping_lazy(self):
s = lazystr('a&b')
b = lazybytes(b'a&b')
self.assertIsInstance(mark_for_escaping(s), EscapeData)
self.assertIsInstance(mark_for_escaping(b), EscapeData)
self.assertRenderEqual('{% autoescape off %}{{ s }}{% endautoescape %}', 'a&b', s=mark_for_escaping(s))
def test_mark_for_escaping_object_implementing_dunder_str(self):
class Obj(object):
def __str__(self):
return '<obj>'
s = mark_for_escaping(Obj())
self.assertRenderEqual('{{ s }}', '<obj>', s=s)
def test_add_lazy_safe_text_and_safe_text(self):
s = html.escape(lazystr('a'))
s += mark_safe('&b')
self.assertRenderEqual('{{ s }}', 'a&b', s=s)
s = html.escapejs(lazystr('a'))
s += mark_safe('&b')
self.assertRenderEqual('{{ s }}', 'a&b', s=s)
s = text.slugify(lazystr('a'))
s += mark_safe('&b')
self.assertRenderEqual('{{ s }}', 'a&b', s=s)
| bsd-3-clause |
JimCircadian/ansible | lib/ansible/module_utils/network/exos/exos.py | 57 | 3764 | # This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# (c) 2016 Red Hat Inc.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
import json
from ansible.module_utils._text import to_text
from ansible.module_utils.basic import env_fallback, return_values
from ansible.module_utils.network.common.utils import to_list
from ansible.module_utils.connection import Connection
_DEVICE_CONFIGS = {}
def get_connection(module):
if hasattr(module, '_exos_connection'):
return module._exos_connection
capabilities = get_capabilities(module)
network_api = capabilities.get('network_api')
if network_api == 'cliconf':
module._exos_connection = Connection(module._socket_path)
else:
module.fail_json(msg='Invalid connection type %s' % network_api)
return module._exos_connection
def get_capabilities(module):
if hasattr(module, '_exos_capabilities'):
return module._exos_capabilities
capabilities = Connection(module._socket_path).get_capabilities()
module._exos_capabilities = json.loads(capabilities)
return module._exos_capabilities
def get_config(module, flags=None):
global _DEVICE_CONFIGS
if _DEVICE_CONFIGS != {}:
return _DEVICE_CONFIGS
else:
connection = get_connection(module)
out = connection.get_config()
cfg = to_text(out, errors='surrogate_then_replace').strip()
_DEVICE_CONFIGS = cfg
return cfg
def run_commands(module, commands, check_rc=True):
responses = list()
connection = get_connection(module)
for cmd in to_list(commands):
if isinstance(cmd, dict):
command = cmd['command']
prompt = cmd['prompt']
answer = cmd['answer']
else:
command = cmd
prompt = None
answer = None
out = connection.get(command, prompt, answer)
try:
out = to_text(out, errors='surrogate_or_strict')
except UnicodeError:
module.fail_json(msg=u'Failed to decode output from %s: %s' % (cmd, to_text(out)))
responses.append(out)
return responses
def load_config(module, commands):
connection = get_connection(module)
out = connection.edit_config(commands)
| gpl-3.0 |
faywong/FFPlayer | project/jni/python/src/Lib/test/test_defaultdict.py | 51 | 5532 | """Unit tests for collections.defaultdict."""
import os
import copy
import tempfile
import unittest
from test import test_support
from collections import defaultdict
def foobar():
return list
class TestDefaultDict(unittest.TestCase):
def test_basic(self):
d1 = defaultdict()
self.assertEqual(d1.default_factory, None)
d1.default_factory = list
d1[12].append(42)
self.assertEqual(d1, {12: [42]})
d1[12].append(24)
self.assertEqual(d1, {12: [42, 24]})
d1[13]
d1[14]
self.assertEqual(d1, {12: [42, 24], 13: [], 14: []})
self.assert_(d1[12] is not d1[13] is not d1[14])
d2 = defaultdict(list, foo=1, bar=2)
self.assertEqual(d2.default_factory, list)
self.assertEqual(d2, {"foo": 1, "bar": 2})
self.assertEqual(d2["foo"], 1)
self.assertEqual(d2["bar"], 2)
self.assertEqual(d2[42], [])
self.assert_("foo" in d2)
self.assert_("foo" in d2.keys())
self.assert_("bar" in d2)
self.assert_("bar" in d2.keys())
self.assert_(42 in d2)
self.assert_(42 in d2.keys())
self.assert_(12 not in d2)
self.assert_(12 not in d2.keys())
d2.default_factory = None
self.assertEqual(d2.default_factory, None)
try:
d2[15]
except KeyError, err:
self.assertEqual(err.args, (15,))
else:
self.fail("d2[15] didn't raise KeyError")
self.assertRaises(TypeError, defaultdict, 1)
def test_missing(self):
d1 = defaultdict()
self.assertRaises(KeyError, d1.__missing__, 42)
d1.default_factory = list
self.assertEqual(d1.__missing__(42), [])
def test_repr(self):
d1 = defaultdict()
self.assertEqual(d1.default_factory, None)
self.assertEqual(repr(d1), "defaultdict(None, {})")
d1[11] = 41
self.assertEqual(repr(d1), "defaultdict(None, {11: 41})")
d2 = defaultdict(int)
self.assertEqual(d2.default_factory, int)
d2[12] = 42
self.assertEqual(repr(d2), "defaultdict(<type 'int'>, {12: 42})")
def foo(): return 43
d3 = defaultdict(foo)
self.assert_(d3.default_factory is foo)
d3[13]
self.assertEqual(repr(d3), "defaultdict(%s, {13: 43})" % repr(foo))
def test_print(self):
d1 = defaultdict()
def foo(): return 42
d2 = defaultdict(foo, {1: 2})
# NOTE: We can't use tempfile.[Named]TemporaryFile since this
# code must exercise the tp_print C code, which only gets
# invoked for *real* files.
tfn = tempfile.mktemp()
try:
f = open(tfn, "w+")
try:
print >>f, d1
print >>f, d2
f.seek(0)
self.assertEqual(f.readline(), repr(d1) + "\n")
self.assertEqual(f.readline(), repr(d2) + "\n")
finally:
f.close()
finally:
os.remove(tfn)
def test_copy(self):
d1 = defaultdict()
d2 = d1.copy()
self.assertEqual(type(d2), defaultdict)
self.assertEqual(d2.default_factory, None)
self.assertEqual(d2, {})
d1.default_factory = list
d3 = d1.copy()
self.assertEqual(type(d3), defaultdict)
self.assertEqual(d3.default_factory, list)
self.assertEqual(d3, {})
d1[42]
d4 = d1.copy()
self.assertEqual(type(d4), defaultdict)
self.assertEqual(d4.default_factory, list)
self.assertEqual(d4, {42: []})
d4[12]
self.assertEqual(d4, {42: [], 12: []})
def test_shallow_copy(self):
d1 = defaultdict(foobar, {1: 1})
d2 = copy.copy(d1)
self.assertEqual(d2.default_factory, foobar)
self.assertEqual(d2, d1)
d1.default_factory = list
d2 = copy.copy(d1)
self.assertEqual(d2.default_factory, list)
self.assertEqual(d2, d1)
def test_deep_copy(self):
d1 = defaultdict(foobar, {1: [1]})
d2 = copy.deepcopy(d1)
self.assertEqual(d2.default_factory, foobar)
self.assertEqual(d2, d1)
self.assert_(d1[1] is not d2[1])
d1.default_factory = list
d2 = copy.deepcopy(d1)
self.assertEqual(d2.default_factory, list)
self.assertEqual(d2, d1)
def test_keyerror_without_factory(self):
d1 = defaultdict()
try:
d1[(1,)]
except KeyError, err:
self.assertEqual(err.args[0], (1,))
else:
self.fail("expected KeyError")
def test_recursive_repr(self):
# Issue2045: stack overflow when default_factory is a bound method
class sub(defaultdict):
def __init__(self):
self.default_factory = self._factory
def _factory(self):
return []
d = sub()
self.assert_(repr(d).startswith(
"defaultdict(<bound method sub._factory of defaultdict(..."))
# NOTE: printing a subclass of a builtin type does not call its
# tp_print slot. So this part is essentially the same test as above.
tfn = tempfile.mktemp()
try:
f = open(tfn, "w+")
try:
print >>f, d
finally:
f.close()
finally:
os.remove(tfn)
def test_main():
test_support.run_unittest(TestDefaultDict)
if __name__ == "__main__":
test_main()
| lgpl-2.1 |
jjack15/CS402-Project-UTK | OnlinePythonTutor/v1-v2/cgi-bin/pg_logger.py | 1 | 10968 | # Online Python Tutor
# https://github.com/pgbovine/OnlinePythonTutor/
#
# Copyright (C) 2010-2012 Philip J. Guo (philip@pgbovine.net)
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# This is the meat of the Online Python Tutor back-end. It implements a
# full logger for Python program execution (based on pdb, the standard
# Python debugger imported via the bdb module), printing out the values
# of all in-scope data structures after each executed instruction.
# Note that I've only tested this logger on Python 2.6, so it will
# probably fail in subtle ways on other Python 2.X (and will DEFINITELY
# fail on Python 3.X).
# upper-bound on the number of executed lines, in order to guard against
# infinite loops
MAX_EXECUTED_LINES = 200
def set_max_executed_lines(m):
global MAX_EXECUTED_LINES
MAX_EXECUTED_LINES = m
import sys
import bdb # the KEY import here!
import os
import re
import traceback
import cStringIO
import pg_encoder
IGNORE_VARS = set(('__stdout__', '__builtins__', '__name__', '__exception__'))
def get_user_stdout(frame):
return frame.f_globals['__stdout__'].getvalue()
def get_user_globals(frame):
d = filter_var_dict(frame.f_globals)
# also filter out __return__ for globals only, but NOT for locals
if '__return__' in d:
del d['__return__']
return d
def get_user_locals(frame):
return filter_var_dict(frame.f_locals)
def filter_var_dict(d):
ret = {}
for (k,v) in d.iteritems():
if k not in IGNORE_VARS:
ret[k] = v
return ret
class PGLogger(bdb.Bdb):
def __init__(self, finalizer_func, ignore_id=False):
bdb.Bdb.__init__(self)
self.mainpyfile = ''
self._wait_for_mainpyfile = 0
# a function that takes the output trace as a parameter and
# processes it
self.finalizer_func = finalizer_func
# each entry contains a dict with the information for a single
# executed line
self.trace = []
# don't print out a custom ID for each object
# (for regression testing)
self.ignore_id = ignore_id
def reset(self):
bdb.Bdb.reset(self)
self.forget()
def forget(self):
self.lineno = None
self.stack = []
self.curindex = 0
self.curframe = None
def setup(self, f, t):
self.forget()
self.stack, self.curindex = self.get_stack(f, t)
self.curframe = self.stack[self.curindex][0]
# Override Bdb methods
def user_call(self, frame, argument_list):
"""This method is called when there is the remote possibility
that we ever need to stop in this function."""
if self._wait_for_mainpyfile:
return
if self.stop_here(frame):
self.interaction(frame, None, 'call')
def user_line(self, frame):
"""This function is called when we stop or break at this line."""
if self._wait_for_mainpyfile:
if (self.canonic(frame.f_code.co_filename) != "<string>" or
frame.f_lineno <= 0):
return
self._wait_for_mainpyfile = 0
self.interaction(frame, None, 'step_line')
def user_return(self, frame, return_value):
"""This function is called when a return trap is set here."""
frame.f_locals['__return__'] = return_value
self.interaction(frame, None, 'return')
def user_exception(self, frame, exc_info):
exc_type, exc_value, exc_traceback = exc_info
"""This function is called if an exception occurs,
but only if we are to stop at or just below this level."""
frame.f_locals['__exception__'] = exc_type, exc_value
if type(exc_type) == type(''):
exc_type_name = exc_type
else: exc_type_name = exc_type.__name__
self.interaction(frame, exc_traceback, 'exception')
# General interaction function
def interaction(self, frame, traceback, event_type):
self.setup(frame, traceback)
tos = self.stack[self.curindex]
lineno = tos[1]
# each element is a pair of (function name, ENCODED locals dict)
encoded_stack_locals = []
# climb up until you find '<module>', which is (hopefully) the global scope
i = self.curindex
while True:
cur_frame = self.stack[i][0]
cur_name = cur_frame.f_code.co_name
if cur_name == '<module>':
break
# special case for lambdas - grab their line numbers too
if cur_name == '<lambda>':
cur_name = 'lambda on line ' + str(cur_frame.f_code.co_firstlineno)
elif cur_name == '':
cur_name = 'unnamed function'
# encode in a JSON-friendly format now, in order to prevent ill
# effects of aliasing later down the line ...
encoded_locals = {}
for (k, v) in get_user_locals(cur_frame).iteritems():
# don't display some built-in locals ...
if k != '__module__':
encoded_locals[k] = pg_encoder.encode(v, self.ignore_id)
encoded_stack_locals.append((cur_name, encoded_locals))
i -= 1
# encode in a JSON-friendly format now, in order to prevent ill
# effects of aliasing later down the line ...
encoded_globals = {}
for (k, v) in get_user_globals(tos[0]).iteritems():
encoded_globals[k] = pg_encoder.encode(v, self.ignore_id)
trace_entry = dict(line=lineno,
event=event_type,
func_name=tos[0].f_code.co_name,
globals=encoded_globals,
stack_locals=encoded_stack_locals,
stdout=get_user_stdout(tos[0]))
# if there's an exception, then record its info:
if event_type == 'exception':
# always check in f_locals
exc = frame.f_locals['__exception__']
trace_entry['exception_msg'] = exc[0].__name__ + ': ' + str(exc[1])
self.trace.append(trace_entry)
if len(self.trace) >= MAX_EXECUTED_LINES:
self.trace.append(dict(event='instruction_limit_reached', exception_msg='(stopped after ' + str(MAX_EXECUTED_LINES) + ' steps to prevent possible infinite loop)'))
self.force_terminate()
self.forget()
def _runscript(self, script_str):
# When bdb sets tracing, a number of call and line events happens
# BEFORE debugger even reaches user's code (and the exact sequence of
# events depends on python version). So we take special measures to
# avoid stopping before we reach the main script (see user_line and
# user_call for details).
self._wait_for_mainpyfile = 1
# ok, let's try to sorta 'sandbox' the user script by not
# allowing certain potentially dangerous operations:
user_builtins = {}
for (k,v) in __builtins__.iteritems():
if k in ('reload', 'input', 'apply', 'open', 'compile',
'__import__', 'file', 'eval', 'execfile',
'exit', 'quit', 'raw_input',
'dir', 'globals', 'locals', 'vars',
'compile'):
continue
user_builtins[k] = v
# redirect stdout of the user program to a memory buffer
user_stdout = cStringIO.StringIO()
sys.stdout = user_stdout
user_globals = {"__name__" : "__main__",
"__builtins__" : user_builtins,
"__stdout__" : user_stdout}
try:
self.run(script_str, user_globals, user_globals)
# sys.exit ...
except SystemExit:
sys.exit(0)
except:
#traceback.print_exc() # uncomment this to see the REAL exception msg
trace_entry = dict(event='uncaught_exception')
exc = sys.exc_info()[1]
if hasattr(exc, 'lineno'):
trace_entry['line'] = exc.lineno
if hasattr(exc, 'offset'):
trace_entry['offset'] = exc.offset
if hasattr(exc, 'msg'):
trace_entry['exception_msg'] = "Error: " + exc.msg
else:
trace_entry['exception_msg'] = "Unknown error"
self.trace.append(trace_entry)
self.finalize()
sys.exit(0) # need to forceably STOP execution
def force_terminate(self):
self.finalize()
sys.exit(0) # need to forceably STOP execution
def finalize(self):
sys.stdout = sys.__stdout__
assert len(self.trace) <= (MAX_EXECUTED_LINES + 1)
# filter all entries after 'return' from '<module>', since they
# seem extraneous:
res = []
for e in self.trace:
res.append(e)
if e['event'] == 'return' and e['func_name'] == '<module>':
break
# another hack: if the SECOND to last entry is an 'exception'
# and the last entry is return from <module>, then axe the last
# entry, for aesthetic reasons :)
if len(res) >= 2 and \
res[-2]['event'] == 'exception' and \
res[-1]['event'] == 'return' and res[-1]['func_name'] == '<module>':
res.pop()
self.trace = res
#for e in self.trace: print e
self.finalizer_func(self.trace)
# the MAIN meaty function!!!
def exec_script_str(script_str, finalizer_func, ignore_id=False):
logger = PGLogger(finalizer_func, ignore_id)
logger._runscript(script_str)
logger.finalize()
def exec_file_and_pretty_print(mainpyfile):
import pprint
if not os.path.exists(mainpyfile):
print 'Error:', mainpyfile, 'does not exist'
sys.exit(1)
def pretty_print(output_lst):
for e in output_lst:
pprint.pprint(e)
output_lst = exec_script_str(open(mainpyfile).read(), pretty_print)
if __name__ == '__main__':
# need this round-about import to get __builtins__ to work :0
import pg_logger
pg_logger.exec_file_and_pretty_print(sys.argv[1])
| agpl-3.0 |
heeraj123/oh-mainline | mysite/profile/migrations/0031_remove_dead_things.py | 17 | 7035 | # This file is part of OpenHatch.
# Copyright (C) 2009 OpenHatch, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from south.db import db
from django.db import models
from mysite.profile.models import *
class Migration:
def forwards(self, orm):
# Deleting field 'Link_ProjectExp_Tag.time_record_was_created'
db.delete_column('profile_link_projectexp_tag', 'time_record_was_created')
# Deleting field 'Link_Project_Tag.time_record_was_created'
db.delete_column('profile_link_project_tag', 'time_record_was_created')
# Deleting field 'Link_Person_Tag.time_record_was_created'
db.delete_column('profile_link_person_tag', 'time_record_was_created')
# Deleting field 'ProjectExp.last_touched'
db.delete_column('profile_projectexp', 'last_touched')
# Deleting field 'ProjectExp.time_record_was_created'
db.delete_column('profile_projectexp', 'time_record_was_created')
def backwards(self, orm):
# Adding field 'Link_ProjectExp_Tag.time_record_was_created'
db.add_column('profile_link_projectexp_tag', 'time_record_was_created', models.DateTimeField(default=datetime.datetime(2009, 7, 10, 15, 24, 43, 964291)))
# Adding field 'Link_Project_Tag.time_record_was_created'
db.add_column('profile_link_project_tag', 'time_record_was_created', models.DateTimeField(default=datetime.datetime(2009, 7, 10, 15, 24, 45, 653482)))
# Adding field 'Link_Person_Tag.time_record_was_created'
db.add_column('profile_link_person_tag', 'time_record_was_created', models.DateTimeField(default=datetime.datetime(2009, 7, 10, 15, 24, 44, 730680)))
# Adding field 'ProjectExp.last_touched'
db.add_column('profile_projectexp', 'last_touched', models.DateTimeField(null=True))
# Adding field 'ProjectExp.time_record_was_created'
db.add_column('profile_projectexp', 'time_record_was_created', models.DateTimeField(null=True))
models = {
'profile.person': {
'gotten_name_from_ohloh': ('models.BooleanField', [], {'default': 'False'}),
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'interested_in_working_on': ('models.CharField', [], {'default': "''", 'max_length': '1024'}),
'ohloh_grab_completed': ('models.BooleanField', [], {'default': 'False'}),
'poll_on_next_web_view': ('models.BooleanField', [], {'default': 'True'}),
'user': ('models.ForeignKey', ["orm['auth.User']"], {'unique': 'True'})
},
'profile.link_person_tag': {
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'person': ('models.ForeignKey', ["orm['profile.Person']"], {}),
'source': ('models.CharField', [], {'max_length': '200'}),
'tag': ('models.ForeignKey', ["orm['profile.Tag']"], {})
},
'profile.tag': {
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'tag_type': ('models.ForeignKey', ["orm['profile.TagType']"], {}),
'text': ('models.CharField', [], {'max_length': '50'})
},
'profile.link_projectexp_tag': {
'Meta': {'unique_together': "[('tag','project_exp','source'),]"},
'favorite': ('models.BooleanField', [], {'default': 'False'}),
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'project_exp': ('models.ForeignKey', ["orm['profile.ProjectExp']"], {}),
'source': ('models.CharField', [], {'max_length': '200'}),
'tag': ('models.ForeignKey', ["orm['profile.Tag']"], {})
},
'profile.sourceforgeperson': {
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'username': ('models.CharField', [], {'max_length': '200'})
},
'profile.link_sf_proj_dude_fm': {
'Meta': {'unique_together': "[('person','project'),]"},
'date_collected': ('models.DateTimeField', [], {}),
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'is_admin': ('models.BooleanField', [], {'default': 'False'}),
'person': ('models.ForeignKey', ["orm['profile.SourceForgePerson']"], {}),
'position': ('models.CharField', [], {'max_length': '200'}),
'project': ('models.ForeignKey', ["orm['profile.SourceForgeProject']"], {})
},
'profile.sourceforgeproject': {
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'unixname': ('models.CharField', [], {'max_length': '200'})
},
'search.project': {
'_stub': True,
'id': ('models.AutoField', [], {'primary_key': 'True'})
},
'auth.user': {
'_stub': True,
'id': ('models.AutoField', [], {'primary_key': 'True'})
},
'profile.link_project_tag': {
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'project': ('models.ForeignKey', ["orm['search.Project']"], {}),
'source': ('models.CharField', [], {'max_length': '200'}),
'tag': ('models.ForeignKey', ["orm['profile.Tag']"], {})
},
'profile.tagtype': {
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'name': ('models.CharField', [], {'max_length': '100'}),
'prefix': ('models.CharField', [], {'max_length': '20'})
},
'profile.projectexp': {
'description': ('models.TextField', [], {}),
'favorite': ('models.BooleanField', [], {'default': '0'}),
'id': ('models.AutoField', [], {'primary_key': 'True'}),
'man_months': ('models.PositiveIntegerField', [], {'null': 'True'}),
'person': ('models.ForeignKey', ["orm['profile.Person']"], {}),
'person_role': ('models.CharField', [], {'max_length': '200'}),
'primary_language': ('models.CharField', [], {'max_length': '200', 'null': 'True'}),
'project': ('models.ForeignKey', ["orm['search.Project']"], {}),
'source': ('models.CharField', [], {'max_length': '100', 'null': 'True'}),
'url': ('models.URLField', [], {'max_length': '200', 'null': 'True'})
}
}
complete_apps = ['profile']
| agpl-3.0 |
alivecor/tensorflow | tensorflow/contrib/learn/python/learn/datasets/load_csv_test.py | 137 | 1348 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.learn.python.learn import datasets
from tensorflow.python.platform import test
class LoadCsvTest(test.TestCase):
"""Test load csv functions."""
def testIris(self):
iris = datasets.load_iris()
self.assertTupleEqual(iris.data.shape, (150, 4))
self.assertTupleEqual(iris.target.shape, (150,))
def testBoston(self):
boston = datasets.load_boston()
self.assertTupleEqual(boston.data.shape, (506, 13))
self.assertTupleEqual(boston.target.shape, (506,))
if __name__ == "__main__":
test.main()
| apache-2.0 |
purpleidea/macaronic-net | django/core/management/commands/makemessages.py | 154 | 16507 | import fnmatch
import glob
import os
import re
import sys
from itertools import dropwhile
from optparse import make_option
from subprocess import PIPE, Popen
from django.core.management.base import CommandError, NoArgsCommand
from django.utils.text import get_text_list
pythonize_re = re.compile(r'(?:^|\n)\s*//')
plural_forms_re = re.compile(r'^(?P<value>"Plural-Forms.+?\\n")\s*$', re.MULTILINE | re.DOTALL)
def handle_extensions(extensions=('html',)):
"""
organizes multiple extensions that are separated with commas or passed by
using --extension/-e multiple times.
for example: running 'django-admin makemessages -e js,txt -e xhtml -a'
would result in a extension list: ['.js', '.txt', '.xhtml']
>>> handle_extensions(['.html', 'html,js,py,py,py,.py', 'py,.py'])
['.html', '.js']
>>> handle_extensions(['.html, txt,.tpl'])
['.html', '.tpl', '.txt']
"""
ext_list = []
for ext in extensions:
ext_list.extend(ext.replace(' ','').split(','))
for i, ext in enumerate(ext_list):
if not ext.startswith('.'):
ext_list[i] = '.%s' % ext_list[i]
# we don't want *.py files here because of the way non-*.py files
# are handled in make_messages() (they are copied to file.ext.py files to
# trick xgettext to parse them as Python files)
return set([x for x in ext_list if x != '.py'])
def _popen(cmd):
"""
Friendly wrapper around Popen for Windows
"""
p = Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE, close_fds=os.name != 'nt', universal_newlines=True)
return p.communicate()
def walk(root, topdown=True, onerror=None, followlinks=False):
"""
A version of os.walk that can follow symlinks for Python < 2.6
"""
for dirpath, dirnames, filenames in os.walk(root, topdown, onerror):
yield (dirpath, dirnames, filenames)
if followlinks:
for d in dirnames:
p = os.path.join(dirpath, d)
if os.path.islink(p):
for link_dirpath, link_dirnames, link_filenames in walk(p):
yield (link_dirpath, link_dirnames, link_filenames)
def is_ignored(path, ignore_patterns):
"""
Helper function to check if the given path should be ignored or not.
"""
for pattern in ignore_patterns:
if fnmatch.fnmatchcase(path, pattern):
return True
return False
def find_files(root, ignore_patterns, verbosity, symlinks=False):
"""
Helper function to get all files in the given root.
"""
all_files = []
for (dirpath, dirnames, filenames) in walk(".", followlinks=symlinks):
for f in filenames:
norm_filepath = os.path.normpath(os.path.join(dirpath, f))
if is_ignored(norm_filepath, ignore_patterns):
if verbosity > 1:
sys.stdout.write('ignoring file %s in %s\n' % (f, dirpath))
else:
all_files.extend([(dirpath, f)])
all_files.sort()
return all_files
def copy_plural_forms(msgs, locale, domain, verbosity):
"""
Copies plural forms header contents from a Django catalog of locale to
the msgs string, inserting it at the right place. msgs should be the
contents of a newly created .po file.
"""
import django
django_dir = os.path.normpath(os.path.join(os.path.dirname(django.__file__)))
if domain == 'djangojs':
domains = ('djangojs', 'django')
else:
domains = ('django',)
for domain in domains:
django_po = os.path.join(django_dir, 'conf', 'locale', locale, 'LC_MESSAGES', '%s.po' % domain)
if os.path.exists(django_po):
m = plural_forms_re.search(open(django_po, 'rU').read())
if m:
if verbosity > 1:
sys.stderr.write("copying plural forms: %s\n" % m.group('value'))
lines = []
seen = False
for line in msgs.split('\n'):
if not line and not seen:
line = '%s\n' % m.group('value')
seen = True
lines.append(line)
msgs = '\n'.join(lines)
break
return msgs
def make_messages(locale=None, domain='django', verbosity='1', all=False,
extensions=None, symlinks=False, ignore_patterns=[], no_wrap=False,
no_obsolete=False):
"""
Uses the locale directory from the Django SVN tree or an application/
project to process all
"""
# Need to ensure that the i18n framework is enabled
from django.conf import settings
if settings.configured:
settings.USE_I18N = True
else:
settings.configure(USE_I18N = True)
from django.utils.translation import templatize
invoked_for_django = False
if os.path.isdir(os.path.join('conf', 'locale')):
localedir = os.path.abspath(os.path.join('conf', 'locale'))
invoked_for_django = True
# Ignoring all contrib apps
ignore_patterns += ['contrib/*']
elif os.path.isdir('locale'):
localedir = os.path.abspath('locale')
else:
raise CommandError("This script should be run from the Django SVN tree or your project or app tree. If you did indeed run it from the SVN checkout or your project or application, maybe you are just missing the conf/locale (in the django tree) or locale (for project and application) directory? It is not created automatically, you have to create it by hand if you want to enable i18n for your project or application.")
if domain not in ('django', 'djangojs'):
raise CommandError("currently makemessages only supports domains 'django' and 'djangojs'")
if (locale is None and not all) or domain is None:
message = "Type '%s help %s' for usage information." % (os.path.basename(sys.argv[0]), sys.argv[1])
raise CommandError(message)
# We require gettext version 0.15 or newer.
output = _popen('xgettext --version')[0]
match = re.search(r'(?P<major>\d+)\.(?P<minor>\d+)', output)
if match:
xversion = (int(match.group('major')), int(match.group('minor')))
if xversion < (0, 15):
raise CommandError("Django internationalization requires GNU gettext 0.15 or newer. You are using version %s, please upgrade your gettext toolset." % match.group())
languages = []
if locale is not None:
languages.append(locale)
elif all:
locale_dirs = filter(os.path.isdir, glob.glob('%s/*' % localedir))
languages = [os.path.basename(l) for l in locale_dirs]
wrap = no_wrap and '--no-wrap' or ''
for locale in languages:
if verbosity > 0:
print "processing language", locale
basedir = os.path.join(localedir, locale, 'LC_MESSAGES')
if not os.path.isdir(basedir):
os.makedirs(basedir)
pofile = os.path.join(basedir, '%s.po' % domain)
potfile = os.path.join(basedir, '%s.pot' % domain)
if os.path.exists(potfile):
os.unlink(potfile)
for dirpath, file in find_files(".", ignore_patterns, verbosity, symlinks=symlinks):
file_base, file_ext = os.path.splitext(file)
if domain == 'djangojs' and file_ext in extensions:
if verbosity > 1:
sys.stdout.write('processing file %s in %s\n' % (file, dirpath))
src = open(os.path.join(dirpath, file), "rU").read()
src = pythonize_re.sub('\n#', src)
thefile = '%s.py' % file
f = open(os.path.join(dirpath, thefile), "w")
try:
f.write(src)
finally:
f.close()
cmd = (
'xgettext -d %s -L Perl %s --keyword=gettext_noop '
'--keyword=gettext_lazy --keyword=ngettext_lazy:1,2 '
'--keyword=pgettext:1c,2 --keyword=npgettext:1c,2,3 '
'--from-code UTF-8 --add-comments=Translators -o - "%s"' % (
domain, wrap, os.path.join(dirpath, thefile)
)
)
msgs, errors = _popen(cmd)
if errors:
os.unlink(os.path.join(dirpath, thefile))
if os.path.exists(potfile):
os.unlink(potfile)
raise CommandError(
"errors happened while running xgettext on %s\n%s" %
(file, errors))
if msgs:
old = '#: ' + os.path.join(dirpath, thefile)[2:]
new = '#: ' + os.path.join(dirpath, file)[2:]
msgs = msgs.replace(old, new)
if os.path.exists(potfile):
# Strip the header
msgs = '\n'.join(dropwhile(len, msgs.split('\n')))
else:
msgs = msgs.replace('charset=CHARSET', 'charset=UTF-8')
f = open(potfile, 'ab')
try:
f.write(msgs)
finally:
f.close()
os.unlink(os.path.join(dirpath, thefile))
elif domain == 'django' and (file_ext == '.py' or file_ext in extensions):
thefile = file
orig_file = os.path.join(dirpath, file)
if file_ext in extensions:
src = open(orig_file, "rU").read()
thefile = '%s.py' % file
f = open(os.path.join(dirpath, thefile), "w")
try:
f.write(templatize(src, orig_file[2:]))
finally:
f.close()
if verbosity > 1:
sys.stdout.write('processing file %s in %s\n' % (file, dirpath))
cmd = (
'xgettext -d %s -L Python %s --keyword=gettext_noop '
'--keyword=gettext_lazy --keyword=ngettext_lazy:1,2 '
'--keyword=ugettext_noop --keyword=ugettext_lazy '
'--keyword=ungettext_lazy:1,2 --keyword=pgettext:1c,2 '
'--keyword=npgettext:1c,2,3 --keyword=pgettext_lazy:1c,2 '
'--keyword=npgettext_lazy:1c,2,3 --from-code UTF-8 '
'--add-comments=Translators -o - "%s"' % (
domain, wrap, os.path.join(dirpath, thefile))
)
msgs, errors = _popen(cmd)
if errors:
if thefile != file:
os.unlink(os.path.join(dirpath, thefile))
if os.path.exists(potfile):
os.unlink(potfile)
raise CommandError(
"errors happened while running xgettext on %s\n%s" %
(file, errors))
if msgs:
if thefile != file:
old = '#: ' + os.path.join(dirpath, thefile)[2:]
new = '#: ' + orig_file[2:]
msgs = msgs.replace(old, new)
if os.path.exists(potfile):
# Strip the header
msgs = '\n'.join(dropwhile(len, msgs.split('\n')))
else:
msgs = msgs.replace('charset=CHARSET', 'charset=UTF-8')
f = open(potfile, 'ab')
try:
f.write(msgs)
finally:
f.close()
if thefile != file:
os.unlink(os.path.join(dirpath, thefile))
if os.path.exists(potfile):
msgs, errors = _popen('msguniq %s --to-code=utf-8 "%s"' %
(wrap, potfile))
if errors:
os.unlink(potfile)
raise CommandError(
"errors happened while running msguniq\n%s" % errors)
if os.path.exists(pofile):
f = open(potfile, 'w')
try:
f.write(msgs)
finally:
f.close()
msgs, errors = _popen('msgmerge %s -q "%s" "%s"' %
(wrap, pofile, potfile))
if errors:
os.unlink(potfile)
raise CommandError(
"errors happened while running msgmerge\n%s" % errors)
elif not invoked_for_django:
msgs = copy_plural_forms(msgs, locale, domain, verbosity)
msgs = msgs.replace(
"#. #-#-#-#-# %s.pot (PACKAGE VERSION) #-#-#-#-#\n" % domain, "")
f = open(pofile, 'wb')
try:
f.write(msgs)
finally:
f.close()
os.unlink(potfile)
if no_obsolete:
msgs, errors = _popen('msgattrib %s -o "%s" --no-obsolete "%s"' %
(wrap, pofile, pofile))
if errors:
raise CommandError(
"errors happened while running msgattrib\n%s" % errors)
class Command(NoArgsCommand):
option_list = NoArgsCommand.option_list + (
make_option('--locale', '-l', default=None, dest='locale',
help='Creates or updates the message files for the given locale (e.g. pt_BR).'),
make_option('--domain', '-d', default='django', dest='domain',
help='The domain of the message files (default: "django").'),
make_option('--all', '-a', action='store_true', dest='all',
default=False, help='Updates the message files for all existing locales.'),
make_option('--extension', '-e', dest='extensions',
help='The file extension(s) to examine (default: ".html", separate multiple extensions with commas, or use -e multiple times)',
action='append'),
make_option('--symlinks', '-s', action='store_true', dest='symlinks',
default=False, help='Follows symlinks to directories when examining source code and templates for translation strings.'),
make_option('--ignore', '-i', action='append', dest='ignore_patterns',
default=[], metavar='PATTERN', help='Ignore files or directories matching this glob-style pattern. Use multiple times to ignore more.'),
make_option('--no-default-ignore', action='store_false', dest='use_default_ignore_patterns',
default=True, help="Don't ignore the common glob-style patterns 'CVS', '.*' and '*~'."),
make_option('--no-wrap', action='store_true', dest='no_wrap',
default=False, help="Don't break long message lines into several lines"),
make_option('--no-obsolete', action='store_true', dest='no_obsolete',
default=False, help="Remove obsolete message strings"),
)
help = ( "Runs over the entire source tree of the current directory and "
"pulls out all strings marked for translation. It creates (or updates) a message "
"file in the conf/locale (in the django tree) or locale (for projects and "
"applications) directory.\n\nYou must run this command with one of either the "
"--locale or --all options.")
requires_model_validation = False
can_import_settings = False
def handle_noargs(self, *args, **options):
locale = options.get('locale')
domain = options.get('domain')
verbosity = int(options.get('verbosity'))
process_all = options.get('all')
extensions = options.get('extensions')
symlinks = options.get('symlinks')
ignore_patterns = options.get('ignore_patterns')
if options.get('use_default_ignore_patterns'):
ignore_patterns += ['CVS', '.*', '*~']
ignore_patterns = list(set(ignore_patterns))
no_wrap = options.get('no_wrap')
no_obsolete = options.get('no_obsolete')
if domain == 'djangojs':
extensions = handle_extensions(extensions or ['js'])
else:
extensions = handle_extensions(extensions or ['html'])
if verbosity > 1:
sys.stdout.write('examining files with the extensions: %s\n'
% get_text_list(list(extensions), 'and'))
make_messages(locale, domain, verbosity, process_all, extensions, symlinks, ignore_patterns, no_wrap, no_obsolete)
| agpl-3.0 |
BeeeOn/server | t/xmlui/t2003-myprofile.py | 1 | 2181 | #! /usr/bin/env python3
import config
config.import_libs()
import unittest
from xmlui import Connector, Response, Login, Logout
from xmlui import GetMyProfile
class TestGetMyProfile(unittest.TestCase):
"""
Create 2 sessions for 2 different users.
"""
def setUp(self):
c = Connector(config.xmlui_host, config.xmlui_port, config.xmlui_ssl)
response = c.request(config.PERMIT_LOGIN)
self.assertTrue(response.is_data())
self.session = response.sessionid()
response = c.request(config.PERMIT_LOGIN2)
self.assertTrue(response.is_data())
self.session2 = response.sessionid()
"""
Destroy the sessions.
"""
def tearDown(self):
c = Connector(config.xmlui_host, config.xmlui_port, config.xmlui_ssl)
response = c.request(Logout(self.session2))
self.assertTrue(response.is_ok())
response = c.request(Logout(self.session))
self.assertTrue(response.is_ok())
def test0_getmyprofile_first(self):
c = Connector(config.xmlui_host, config.xmlui_port, config.xmlui_ssl)
response = c.request(GetMyProfile(self.session))
self.assertTrue(response.is_data())
self.assertEqual("joe.doe@example.org",
response.root[0].get("email"))
self.assertEqual("Joe", response.root[0].get("name"))
self.assertEqual("Joe", response.root[0].get("first_name"))
self.assertEqual("Doe", response.root[0].get("surname"))
self.assertEqual("Doe", response.root[0].get("last_name"))
self.assertEqual("unknown", response.root[0].get("gender"))
def test1_getmyprofile_second(self):
c = Connector(config.xmlui_host, config.xmlui_port, config.xmlui_ssl)
response = c.request(GetMyProfile(self.session2))
self.assertTrue(response.is_data())
self.assertEqual("john.smith@example.org",
response.root[0].get("email"))
self.assertEqual("John", response.root[0].get("name"))
self.assertEqual("John", response.root[0].get("first_name"))
self.assertEqual("Smith", response.root[0].get("surname"))
self.assertEqual("Smith", response.root[0].get("last_name"))
self.assertEqual("unknown", response.root[0].get("gender"))
if __name__ == '__main__':
import sys
import taprunner
unittest.main(testRunner=taprunner.TAPTestRunner(stream = sys.stdout))
| bsd-3-clause |
WorldViews/Spirals | scripts/TwitterWatcherPeace.py | 1 | 2822 | from tweepy import Stream
from tweepy import OAuthHandler
from tweepy.streaming import StreamListener
import os, urllib2, json
import ImageResizer
#IMAGE_DIR = "C:/kimber/WorldViews/twitter_images"
IMAGE_DIR = "../images/twitter_images"
CONFIG_PATH = "C:/kimber/WorldViews/twitter_auth_config.py"
"""
You can get authentication values at twitter developer website https://dev.twitter.com/
"""
config = {}
execfile(CONFIG_PATH, config)
ckey = config['ckey']
csecret = config['csecret']
atoken = config['atoken']
asecret = config['asecret']
print "ckey", ckey
print "csecret", csecret
def saveImage(url, id):
path = "%s/%s.jpg" % (IMAGE_DIR, id)
pow2path = "%s/%s_p2.jpg" % (IMAGE_DIR, id)
print "Saving to", path
try:
uos = urllib2.urlopen(url)
except:
print "Couldn't open", url
return None
try:
file(path, "wb").write(uos.read())
except:
print "Couldn't save", path
return None
ImageResizer.resizePow2(path, pow2path)
return path
class listener(StreamListener):
n = 0
def on_data(self, data):
#print data
obj = json.loads(data)
#if "geo" not in obj:
# return True
text = obj.get('text', None)
geo = obj.get('geo', None)
try:
print text
except:
print "text wouldn't print"
media_urls = []
try:
ents = obj['entities']
media = ents['media']
for med in media:
if 'media_url' in med:
media_urls.append(med['media_url'])
except KeyError:
pass
if geo and media_urls:
try:
print "text", text
except:
print "text ****"
print "geo", geo
print "media_urls", media_urls
self.n += 1
url = media_urls[0]
id = "%07d" % self.n
path = saveImage(url, id)
if path:
jsonPath = "%s/%s.json" % (IMAGE_DIR, id)
json.dump(obj, file(jsonPath, "w"))
print
return True
def on_error(self, status):
print "on_error:"
print status
def verifyDir(path):
if not os.path.exists(path):
print "Creating", path
os.makedirs(path)
class TwitterWatcher:
def __init__(self):
auth = OAuthHandler(ckey, csecret)
auth.set_access_token(atoken, asecret)
self.twitterStream = Stream(auth, listener())
verifyDir(IMAGE_DIR)
def run(self):
#self.twitterStream.filter(locations=[-180.0, -90.0, 180.0, 90.0])
self.twitterStream.filter(track=["#peace", "#peacetree"])
def run():
tw = TwitterWatcher()
tw.run()
if __name__ == '__main__':
run()
| mit |
sda2b/youtube-dl | youtube_dl/extractor/syfy.py | 159 | 1827 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
class SyfyIE(InfoExtractor):
_VALID_URL = r'https?://www\.syfy\.com/(?:videos/.+?vid:(?P<id>[0-9]+)|(?!videos)(?P<video_name>[^/]+)(?:$|[?#]))'
_TESTS = [{
'url': 'http://www.syfy.com/videos/Robot%20Combat%20League/Behind%20the%20Scenes/vid:2631458',
'info_dict': {
'id': 'NmqMrGnXvmO1',
'ext': 'flv',
'title': 'George Lucas has Advice for his Daughter',
'description': 'Listen to what insights George Lucas give his daughter Amanda.',
},
'add_ie': ['ThePlatform'],
}, {
'url': 'http://www.syfy.com/wilwheaton',
'md5': '94dfa54ee3ccb63295b276da08c415f6',
'info_dict': {
'id': '4yoffOOXC767',
'ext': 'flv',
'title': 'The Wil Wheaton Project - Premiering May 27th at 10/9c.',
'description': 'The Wil Wheaton Project premieres May 27th at 10/9c. Don\'t miss it.',
},
'add_ie': ['ThePlatform'],
'skip': 'Blocked outside the US',
}]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_name = mobj.group('video_name')
if video_name:
generic_webpage = self._download_webpage(url, video_name)
video_id = self._search_regex(
r'<iframe.*?class="video_iframe_page"\s+src="/_utils/video/thP_video_controller.php.*?_vid([0-9]+)">',
generic_webpage, 'video ID')
url = 'http://www.syfy.com/videos/%s/%s/vid:%s' % (
video_name, video_name, video_id)
else:
video_id = mobj.group('id')
webpage = self._download_webpage(url, video_id)
return self.url_result(self._og_search_video_url(webpage))
| unlicense |
christianblunden/googmuze | resources/lib/google/protobuf/compiler/plugin_pb2.py | 24 | 6750 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/compiler/plugin.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
import google.protobuf.descriptor_pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/compiler/plugin.proto',
package='google.protobuf.compiler',
serialized_pb='\n%google/protobuf/compiler/plugin.proto\x12\x18google.protobuf.compiler\x1a google/protobuf/descriptor.proto\"}\n\x14\x43odeGeneratorRequest\x12\x18\n\x10\x66ile_to_generate\x18\x01 \x03(\t\x12\x11\n\tparameter\x18\x02 \x01(\t\x12\x38\n\nproto_file\x18\x0f \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xaa\x01\n\x15\x43odeGeneratorResponse\x12\r\n\x05\x65rror\x18\x01 \x01(\t\x12\x42\n\x04\x66ile\x18\x0f \x03(\x0b\x32\x34.google.protobuf.compiler.CodeGeneratorResponse.File\x1a>\n\x04\x46ile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x17\n\x0finsertion_point\x18\x02 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x0f \x01(\tB,\n\x1c\x63om.google.protobuf.compilerB\x0cPluginProtos')
_CODEGENERATORREQUEST = _descriptor.Descriptor(
name='CodeGeneratorRequest',
full_name='google.protobuf.compiler.CodeGeneratorRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='file_to_generate', full_name='google.protobuf.compiler.CodeGeneratorRequest.file_to_generate', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='parameter', full_name='google.protobuf.compiler.CodeGeneratorRequest.parameter', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='proto_file', full_name='google.protobuf.compiler.CodeGeneratorRequest.proto_file', index=2,
number=15, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=101,
serialized_end=226,
)
_CODEGENERATORRESPONSE_FILE = _descriptor.Descriptor(
name='File',
full_name='google.protobuf.compiler.CodeGeneratorResponse.File',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='google.protobuf.compiler.CodeGeneratorResponse.File.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='insertion_point', full_name='google.protobuf.compiler.CodeGeneratorResponse.File.insertion_point', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='content', full_name='google.protobuf.compiler.CodeGeneratorResponse.File.content', index=2,
number=15, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=337,
serialized_end=399,
)
_CODEGENERATORRESPONSE = _descriptor.Descriptor(
name='CodeGeneratorResponse',
full_name='google.protobuf.compiler.CodeGeneratorResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='error', full_name='google.protobuf.compiler.CodeGeneratorResponse.error', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=unicode("", "utf-8"),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='file', full_name='google.protobuf.compiler.CodeGeneratorResponse.file', index=1,
number=15, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_CODEGENERATORRESPONSE_FILE, ],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
serialized_start=229,
serialized_end=399,
)
_CODEGENERATORREQUEST.fields_by_name['proto_file'].message_type = google.protobuf.descriptor_pb2._FILEDESCRIPTORPROTO
_CODEGENERATORRESPONSE_FILE.containing_type = _CODEGENERATORRESPONSE;
_CODEGENERATORRESPONSE.fields_by_name['file'].message_type = _CODEGENERATORRESPONSE_FILE
DESCRIPTOR.message_types_by_name['CodeGeneratorRequest'] = _CODEGENERATORREQUEST
DESCRIPTOR.message_types_by_name['CodeGeneratorResponse'] = _CODEGENERATORRESPONSE
class CodeGeneratorRequest(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CODEGENERATORREQUEST
# @@protoc_insertion_point(class_scope:google.protobuf.compiler.CodeGeneratorRequest)
class CodeGeneratorResponse(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
class File(_message.Message):
__metaclass__ = _reflection.GeneratedProtocolMessageType
DESCRIPTOR = _CODEGENERATORRESPONSE_FILE
# @@protoc_insertion_point(class_scope:google.protobuf.compiler.CodeGeneratorResponse.File)
DESCRIPTOR = _CODEGENERATORRESPONSE
# @@protoc_insertion_point(class_scope:google.protobuf.compiler.CodeGeneratorResponse)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), '\n\034com.google.protobuf.compilerB\014PluginProtos')
# @@protoc_insertion_point(module_scope)
| gpl-3.0 |
tmerrick1/spack | var/spack/repos/builtin/packages/minighost/package.py | 5 | 3454 | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import tarfile
from spack import *
class Minighost(MakefilePackage):
"""Proxy Application. A Finite Difference proxy
application which implements a difference stencil
across a homogenous three dimensional domain.
"""
homepage = "http://mantevo.org"
url = "http://mantevo.org/downloads/releaseTarballs/miniapps/MiniGhost/miniGhost_1.0.1.tar.gz"
tags = ['proxy-app']
version('1.0.1', '2a4ac4383e9be00f87b6067c3cfe6463')
variant('mpi', default=True, description='Enable MPI Support')
depends_on('mpi', when='+mpi')
parallel = False
@property
def build_targets(self):
targets = ['--directory=miniGhost_ref']
if '+mpi' in self.spec:
targets.append('PROTOCOL=-D_MG_MPI')
targets.append('FC={0}'.format(self.spec['mpi'].mpif77))
# CC is only used for linking, use it to pull in the right f77 libs
targets.append('CC={0}'.format(self.spec['mpi'].mpif77))
else:
targets.append('PROTOCOL=-D_MG_SERIAL')
targets.append('FC=f77')
targets.append('CC=cc')
if '%gcc' in self.spec:
targets.append('COMPILER_SUITE=gnu')
targets.append('LIBS=-lm -lgfortran')
elif '%cce' in self.spec:
targets.append('COMPILER_SUITE=cray')
elif '%intel' in self.spec:
targets.append('COMPILER_SUITE=intel')
elif '%pgi' in self.spec:
targets.append('COMPILER_SUITE=pgi')
return targets
def edit(self, spec, prefix):
inner_tar = tarfile.open(
'miniGhost_ref_{0}.tar.gz'.format(self.version.up_to(3)))
inner_tar.extractall()
def install(self, spec, prefix):
# Manual Installation
mkdirp(prefix.bin)
mkdirp(prefix.doc)
install('miniGhost_ref/miniGhost.x', prefix.bin)
install('miniGhost_ref/default-settings.h', prefix.bin)
if '+mpi' in spec:
install('miniGhost_ref/runtest.mpi', prefix.bin)
install('miniGhost_ref/runtest.mpi.ds', prefix.bin)
else:
install('miniGhost_ref/runtest.serial', prefix.bin)
install('README', prefix.doc)
| lgpl-2.1 |
huggingface/pytorch-transformers | utils/check_tf_ops.py | 2 | 3574 | # coding=utf-8
# Copyright 2020 The HuggingFace Inc. team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import json
import os
from tensorflow.core.protobuf.saved_model_pb2 import SavedModel
# All paths are set with the intent you should run this script from the root of the repo with the command
# python utils/check_copies.py
REPO_PATH = "."
# Internal TensorFlow ops that can be safely ignored (mostly specific to a saved model)
INTERNAL_OPS = [
"Assert",
"AssignVariableOp",
"EmptyTensorList",
"MergeV2Checkpoints",
"ReadVariableOp",
"ResourceGather",
"RestoreV2",
"SaveV2",
"ShardedFilename",
"StatefulPartitionedCall",
"StaticRegexFullMatch",
"VarHandleOp",
]
def onnx_compliancy(saved_model_path, strict, opset):
saved_model = SavedModel()
onnx_ops = []
with open(os.path.join(REPO_PATH, "utils", "tf_ops", "onnx.json")) as f:
onnx_opsets = json.load(f)["opsets"]
for i in range(1, opset + 1):
onnx_ops.extend(onnx_opsets[str(i)])
with open(saved_model_path, "rb") as f:
saved_model.ParseFromString(f.read())
model_op_names = set()
# Iterate over every metagraph in case there is more than one (a saved model can contain multiple graphs)
for meta_graph in saved_model.meta_graphs:
# Add operations in the graph definition
model_op_names.update(node.op for node in meta_graph.graph_def.node)
# Go through the functions in the graph definition
for func in meta_graph.graph_def.library.function:
# Add operations in each function
model_op_names.update(node.op for node in func.node_def)
# Convert to list, sorted if you want
model_op_names = sorted(model_op_names)
incompatible_ops = []
for op in model_op_names:
if op not in onnx_ops and op not in INTERNAL_OPS:
incompatible_ops.append(op)
if strict and len(incompatible_ops) > 0:
raise Exception(f"Found the following incompatible ops for the opset {opset}:\n" + incompatible_ops)
elif len(incompatible_ops) > 0:
print(f"Found the following incompatible ops for the opset {opset}:")
print(*incompatible_ops, sep="\n")
else:
print(f"The saved model {saved_model_path} can properly be converted with ONNX.")
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--saved_model_path", help="Path of the saved model to check (the .pb file).")
parser.add_argument(
"--opset", default=12, type=int, help="The ONNX opset against which the model has to be tested."
)
parser.add_argument(
"--framework", choices=["onnx"], default="onnx", help="Frameworks against which to test the saved model."
)
parser.add_argument(
"--strict", action="store_true", help="Whether make the checking strict (raise errors) or not (raise warnings)"
)
args = parser.parse_args()
if args.framework == "onnx":
onnx_compliancy(args.saved_model_path, args.strict, args.opset)
| apache-2.0 |
byterom/android_external_chromium_org | chrome/common/extensions/docs/server2/whats_new_data_source.py | 41 | 3499 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from itertools import groupby
from operator import itemgetter
import posixpath
from data_source import DataSource
from extensions_paths import JSON_TEMPLATES, PUBLIC_TEMPLATES
from future import Future
from platform_util import GetPlatforms
class WhatsNewDataSource(DataSource):
''' This class creates a list of "what is new" by chrome version.
'''
def __init__(self, server_instance, _):
self._parse_cache = server_instance.compiled_fs_factory.ForJson(
server_instance.host_file_system_provider.GetMaster())
self._object_store = server_instance.object_store_creator.Create(
WhatsNewDataSource)
self._platform_bundle = server_instance.platform_bundle
def _GenerateChangesListWithVersion(self, platform, whats_new_json):
return [{
'id': change_id,
'type': change['type'],
'description': change['description'],
'version': change['version']
} for change_id, change in whats_new_json.iteritems()]
def _GetAPIVersion(self, platform, api_name):
version = None
category = self._platform_bundle.GetAPICategorizer(platform).GetCategory(
api_name)
if category == 'chrome':
channel_info = self._platform_bundle.GetAvailabilityFinder(
platform).GetAPIAvailability(api_name).channel_info
channel = channel_info.channel
if channel == 'stable':
version = channel_info.version
return version
def _GenerateAPIListWithVersion(self, platform):
data = []
for api_name, api_model in self._platform_bundle.GetAPIModels(
platform).IterModels():
version = self._GetAPIVersion(platform, api_name)
if version:
api = {
'name': api_name,
'description': api_model.description,
'version' : version,
'type': 'apis',
}
data.append(api)
data.sort(key=itemgetter('version'))
return data
def _GenerateWhatsNewDict(self):
whats_new_json_future = self._parse_cache.GetFromFile(
posixpath.join(JSON_TEMPLATES, 'whats_new.json'))
def _MakeDictByPlatform(platform):
whats_new_json = whats_new_json_future.Get()
platform_list = []
apis = self._GenerateAPIListWithVersion(platform)
apis.extend(self._GenerateChangesListWithVersion(platform,
whats_new_json))
apis.sort(key=itemgetter('version'), reverse=True)
for version, group in groupby(apis, key=itemgetter('version')):
whats_new_by_version = {
'version': version,
}
for item in group:
item_type = item['type']
if item_type not in whats_new_by_version:
whats_new_by_version[item_type] = []
whats_new_by_version[item_type].append(item)
platform_list.append(whats_new_by_version)
return platform_list
def resolve():
return dict((platform, _MakeDictByPlatform(platform))
for platform in GetPlatforms())
return Future(callback=resolve)
def _GetCachedWhatsNewData(self):
data = self._object_store.Get('whats_new_data').Get()
if data is None:
data = self._GenerateWhatsNewDict().Get()
self._object_store.Set('whats_new_data', data)
return data
def get(self, key):
return self._GetCachedWhatsNewData().get(key)
def Refresh(self, path):
return self._GenerateWhatsNewDict()
| bsd-3-clause |
xiaotangyuan/django-rest-framework | tests/test_filters.py | 38 | 30783 | from __future__ import unicode_literals
import datetime
from decimal import Decimal
from django.conf.urls import url
from django.core.urlresolvers import reverse
from django.db import models
from django.test import TestCase
from django.test.utils import override_settings
from django.utils import unittest
from django.utils.dateparse import parse_date
from django.utils.six.moves import reload_module
from rest_framework import filters, generics, serializers, status
from rest_framework.compat import django_filters
from rest_framework.test import APIRequestFactory
from .models import BaseFilterableItem, BasicModel, FilterableItem
factory = APIRequestFactory()
if django_filters:
class FilterableItemSerializer(serializers.ModelSerializer):
class Meta:
model = FilterableItem
# Basic filter on a list view.
class FilterFieldsRootView(generics.ListCreateAPIView):
queryset = FilterableItem.objects.all()
serializer_class = FilterableItemSerializer
filter_fields = ['decimal', 'date']
filter_backends = (filters.DjangoFilterBackend,)
# These class are used to test a filter class.
class SeveralFieldsFilter(django_filters.FilterSet):
text = django_filters.CharFilter(lookup_type='icontains')
decimal = django_filters.NumberFilter(lookup_type='lt')
date = django_filters.DateFilter(lookup_type='gt')
class Meta:
model = FilterableItem
fields = ['text', 'decimal', 'date']
class FilterClassRootView(generics.ListCreateAPIView):
queryset = FilterableItem.objects.all()
serializer_class = FilterableItemSerializer
filter_class = SeveralFieldsFilter
filter_backends = (filters.DjangoFilterBackend,)
# These classes are used to test a misconfigured filter class.
class MisconfiguredFilter(django_filters.FilterSet):
text = django_filters.CharFilter(lookup_type='icontains')
class Meta:
model = BasicModel
fields = ['text']
class IncorrectlyConfiguredRootView(generics.ListCreateAPIView):
queryset = FilterableItem.objects.all()
serializer_class = FilterableItemSerializer
filter_class = MisconfiguredFilter
filter_backends = (filters.DjangoFilterBackend,)
class FilterClassDetailView(generics.RetrieveAPIView):
queryset = FilterableItem.objects.all()
serializer_class = FilterableItemSerializer
filter_class = SeveralFieldsFilter
filter_backends = (filters.DjangoFilterBackend,)
# These classes are used to test base model filter support
class BaseFilterableItemFilter(django_filters.FilterSet):
text = django_filters.CharFilter()
class Meta:
model = BaseFilterableItem
class BaseFilterableItemFilterRootView(generics.ListCreateAPIView):
queryset = FilterableItem.objects.all()
serializer_class = FilterableItemSerializer
filter_class = BaseFilterableItemFilter
filter_backends = (filters.DjangoFilterBackend,)
# Regression test for #814
class FilterFieldsQuerysetView(generics.ListCreateAPIView):
queryset = FilterableItem.objects.all()
serializer_class = FilterableItemSerializer
filter_fields = ['decimal', 'date']
filter_backends = (filters.DjangoFilterBackend,)
class GetQuerysetView(generics.ListCreateAPIView):
serializer_class = FilterableItemSerializer
filter_class = SeveralFieldsFilter
filter_backends = (filters.DjangoFilterBackend,)
def get_queryset(self):
return FilterableItem.objects.all()
urlpatterns = [
url(r'^(?P<pk>\d+)/$', FilterClassDetailView.as_view(), name='detail-view'),
url(r'^$', FilterClassRootView.as_view(), name='root-view'),
url(r'^get-queryset/$', GetQuerysetView.as_view(),
name='get-queryset-view'),
]
class CommonFilteringTestCase(TestCase):
def _serialize_object(self, obj):
return {'id': obj.id, 'text': obj.text, 'decimal': str(obj.decimal), 'date': obj.date.isoformat()}
def setUp(self):
"""
Create 10 FilterableItem instances.
"""
base_data = ('a', Decimal('0.25'), datetime.date(2012, 10, 8))
for i in range(10):
text = chr(i + ord(base_data[0])) * 3 # Produces string 'aaa', 'bbb', etc.
decimal = base_data[1] + i
date = base_data[2] - datetime.timedelta(days=i * 2)
FilterableItem(text=text, decimal=decimal, date=date).save()
self.objects = FilterableItem.objects
self.data = [
self._serialize_object(obj)
for obj in self.objects.all()
]
class IntegrationTestFiltering(CommonFilteringTestCase):
"""
Integration tests for filtered list views.
"""
@unittest.skipUnless(django_filters, 'django-filter not installed')
def test_get_filtered_fields_root_view(self):
"""
GET requests to paginated ListCreateAPIView should return paginated results.
"""
view = FilterFieldsRootView.as_view()
# Basic test with no filter.
request = factory.get('/')
response = view(request).render()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data, self.data)
# Tests that the decimal filter works.
search_decimal = Decimal('2.25')
request = factory.get('/', {'decimal': '%s' % search_decimal})
response = view(request).render()
self.assertEqual(response.status_code, status.HTTP_200_OK)
expected_data = [f for f in self.data if Decimal(f['decimal']) == search_decimal]
self.assertEqual(response.data, expected_data)
# Tests that the date filter works.
search_date = datetime.date(2012, 9, 22)
request = factory.get('/', {'date': '%s' % search_date}) # search_date str: '2012-09-22'
response = view(request).render()
self.assertEqual(response.status_code, status.HTTP_200_OK)
expected_data = [f for f in self.data if parse_date(f['date']) == search_date]
self.assertEqual(response.data, expected_data)
@unittest.skipUnless(django_filters, 'django-filter not installed')
def test_filter_with_queryset(self):
"""
Regression test for #814.
"""
view = FilterFieldsQuerysetView.as_view()
# Tests that the decimal filter works.
search_decimal = Decimal('2.25')
request = factory.get('/', {'decimal': '%s' % search_decimal})
response = view(request).render()
self.assertEqual(response.status_code, status.HTTP_200_OK)
expected_data = [f for f in self.data if Decimal(f['decimal']) == search_decimal]
self.assertEqual(response.data, expected_data)
@unittest.skipUnless(django_filters, 'django-filter not installed')
def test_filter_with_get_queryset_only(self):
"""
Regression test for #834.
"""
view = GetQuerysetView.as_view()
request = factory.get('/get-queryset/')
view(request).render()
# Used to raise "issubclass() arg 2 must be a class or tuple of classes"
# here when neither `model' nor `queryset' was specified.
@unittest.skipUnless(django_filters, 'django-filter not installed')
def test_get_filtered_class_root_view(self):
"""
GET requests to filtered ListCreateAPIView that have a filter_class set
should return filtered results.
"""
view = FilterClassRootView.as_view()
# Basic test with no filter.
request = factory.get('/')
response = view(request).render()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data, self.data)
# Tests that the decimal filter set with 'lt' in the filter class works.
search_decimal = Decimal('4.25')
request = factory.get('/', {'decimal': '%s' % search_decimal})
response = view(request).render()
self.assertEqual(response.status_code, status.HTTP_200_OK)
expected_data = [f for f in self.data if Decimal(f['decimal']) < search_decimal]
self.assertEqual(response.data, expected_data)
# Tests that the date filter set with 'gt' in the filter class works.
search_date = datetime.date(2012, 10, 2)
request = factory.get('/', {'date': '%s' % search_date}) # search_date str: '2012-10-02'
response = view(request).render()
self.assertEqual(response.status_code, status.HTTP_200_OK)
expected_data = [f for f in self.data if parse_date(f['date']) > search_date]
self.assertEqual(response.data, expected_data)
# Tests that the text filter set with 'icontains' in the filter class works.
search_text = 'ff'
request = factory.get('/', {'text': '%s' % search_text})
response = view(request).render()
self.assertEqual(response.status_code, status.HTTP_200_OK)
expected_data = [f for f in self.data if search_text in f['text'].lower()]
self.assertEqual(response.data, expected_data)
# Tests that multiple filters works.
search_decimal = Decimal('5.25')
search_date = datetime.date(2012, 10, 2)
request = factory.get('/', {
'decimal': '%s' % (search_decimal,),
'date': '%s' % (search_date,)
})
response = view(request).render()
self.assertEqual(response.status_code, status.HTTP_200_OK)
expected_data = [f for f in self.data if parse_date(f['date']) > search_date and
Decimal(f['decimal']) < search_decimal]
self.assertEqual(response.data, expected_data)
@unittest.skipUnless(django_filters, 'django-filter not installed')
def test_incorrectly_configured_filter(self):
"""
An error should be displayed when the filter class is misconfigured.
"""
view = IncorrectlyConfiguredRootView.as_view()
request = factory.get('/')
self.assertRaises(AssertionError, view, request)
@unittest.skipUnless(django_filters, 'django-filter not installed')
def test_base_model_filter(self):
"""
The `get_filter_class` model checks should allow base model filters.
"""
view = BaseFilterableItemFilterRootView.as_view()
request = factory.get('/?text=aaa')
response = view(request).render()
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(len(response.data), 1)
@unittest.skipUnless(django_filters, 'django-filter not installed')
def test_unknown_filter(self):
"""
GET requests with filters that aren't configured should return 200.
"""
view = FilterFieldsRootView.as_view()
search_integer = 10
request = factory.get('/', {'integer': '%s' % search_integer})
response = view(request).render()
self.assertEqual(response.status_code, status.HTTP_200_OK)
class IntegrationTestDetailFiltering(CommonFilteringTestCase):
"""
Integration tests for filtered detail views.
"""
urls = 'tests.test_filters'
def _get_url(self, item):
return reverse('detail-view', kwargs=dict(pk=item.pk))
@unittest.skipUnless(django_filters, 'django-filter not installed')
def test_get_filtered_detail_view(self):
"""
GET requests to filtered RetrieveAPIView that have a filter_class set
should return filtered results.
"""
item = self.objects.all()[0]
data = self._serialize_object(item)
# Basic test with no filter.
response = self.client.get(self._get_url(item))
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data, data)
# Tests that the decimal filter set that should fail.
search_decimal = Decimal('4.25')
high_item = self.objects.filter(decimal__gt=search_decimal)[0]
response = self.client.get(
'{url}'.format(url=self._get_url(high_item)),
{'decimal': '{param}'.format(param=search_decimal)})
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
# Tests that the decimal filter set that should succeed.
search_decimal = Decimal('4.25')
low_item = self.objects.filter(decimal__lt=search_decimal)[0]
low_item_data = self._serialize_object(low_item)
response = self.client.get(
'{url}'.format(url=self._get_url(low_item)),
{'decimal': '{param}'.format(param=search_decimal)})
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data, low_item_data)
# Tests that multiple filters works.
search_decimal = Decimal('5.25')
search_date = datetime.date(2012, 10, 2)
valid_item = self.objects.filter(decimal__lt=search_decimal, date__gt=search_date)[0]
valid_item_data = self._serialize_object(valid_item)
response = self.client.get(
'{url}'.format(url=self._get_url(valid_item)), {
'decimal': '{decimal}'.format(decimal=search_decimal),
'date': '{date}'.format(date=search_date)
})
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data, valid_item_data)
class SearchFilterModel(models.Model):
title = models.CharField(max_length=20)
text = models.CharField(max_length=100)
class SearchFilterSerializer(serializers.ModelSerializer):
class Meta:
model = SearchFilterModel
class SearchFilterTests(TestCase):
def setUp(self):
# Sequence of title/text is:
#
# z abc
# zz bcd
# zzz cde
# ...
for idx in range(10):
title = 'z' * (idx + 1)
text = (
chr(idx + ord('a')) +
chr(idx + ord('b')) +
chr(idx + ord('c'))
)
SearchFilterModel(title=title, text=text).save()
def test_search(self):
class SearchListView(generics.ListAPIView):
queryset = SearchFilterModel.objects.all()
serializer_class = SearchFilterSerializer
filter_backends = (filters.SearchFilter,)
search_fields = ('title', 'text')
view = SearchListView.as_view()
request = factory.get('/', {'search': 'b'})
response = view(request)
self.assertEqual(
response.data,
[
{'id': 1, 'title': 'z', 'text': 'abc'},
{'id': 2, 'title': 'zz', 'text': 'bcd'}
]
)
def test_exact_search(self):
class SearchListView(generics.ListAPIView):
queryset = SearchFilterModel.objects.all()
serializer_class = SearchFilterSerializer
filter_backends = (filters.SearchFilter,)
search_fields = ('=title', 'text')
view = SearchListView.as_view()
request = factory.get('/', {'search': 'zzz'})
response = view(request)
self.assertEqual(
response.data,
[
{'id': 3, 'title': 'zzz', 'text': 'cde'}
]
)
def test_startswith_search(self):
class SearchListView(generics.ListAPIView):
queryset = SearchFilterModel.objects.all()
serializer_class = SearchFilterSerializer
filter_backends = (filters.SearchFilter,)
search_fields = ('title', '^text')
view = SearchListView.as_view()
request = factory.get('/', {'search': 'b'})
response = view(request)
self.assertEqual(
response.data,
[
{'id': 2, 'title': 'zz', 'text': 'bcd'}
]
)
def test_regexp_search(self):
class SearchListView(generics.ListAPIView):
queryset = SearchFilterModel.objects.all()
serializer_class = SearchFilterSerializer
filter_backends = (filters.SearchFilter,)
search_fields = ('$title', '$text')
view = SearchListView.as_view()
request = factory.get('/', {'search': 'z{2} ^b'})
response = view(request)
self.assertEqual(
response.data,
[
{'id': 2, 'title': 'zz', 'text': 'bcd'}
]
)
def test_search_with_nonstandard_search_param(self):
with override_settings(REST_FRAMEWORK={'SEARCH_PARAM': 'query'}):
reload_module(filters)
class SearchListView(generics.ListAPIView):
queryset = SearchFilterModel.objects.all()
serializer_class = SearchFilterSerializer
filter_backends = (filters.SearchFilter,)
search_fields = ('title', 'text')
view = SearchListView.as_view()
request = factory.get('/', {'query': 'b'})
response = view(request)
self.assertEqual(
response.data,
[
{'id': 1, 'title': 'z', 'text': 'abc'},
{'id': 2, 'title': 'zz', 'text': 'bcd'}
]
)
reload_module(filters)
class AttributeModel(models.Model):
label = models.CharField(max_length=32)
class SearchFilterModelM2M(models.Model):
title = models.CharField(max_length=20)
text = models.CharField(max_length=100)
attributes = models.ManyToManyField(AttributeModel)
class SearchFilterM2MSerializer(serializers.ModelSerializer):
class Meta:
model = SearchFilterModelM2M
class SearchFilterM2MTests(TestCase):
def setUp(self):
# Sequence of title/text/attributes is:
#
# z abc [1, 2, 3]
# zz bcd [1, 2, 3]
# zzz cde [1, 2, 3]
# ...
for idx in range(3):
label = 'w' * (idx + 1)
AttributeModel(label=label)
for idx in range(10):
title = 'z' * (idx + 1)
text = (
chr(idx + ord('a')) +
chr(idx + ord('b')) +
chr(idx + ord('c'))
)
SearchFilterModelM2M(title=title, text=text).save()
SearchFilterModelM2M.objects.get(title='zz').attributes.add(1, 2, 3)
def test_m2m_search(self):
class SearchListView(generics.ListAPIView):
queryset = SearchFilterModelM2M.objects.all()
serializer_class = SearchFilterM2MSerializer
filter_backends = (filters.SearchFilter,)
search_fields = ('=title', 'text', 'attributes__label')
view = SearchListView.as_view()
request = factory.get('/', {'search': 'zz'})
response = view(request)
self.assertEqual(len(response.data), 1)
class OrderingFilterModel(models.Model):
title = models.CharField(max_length=20)
text = models.CharField(max_length=100)
class OrderingFilterRelatedModel(models.Model):
related_object = models.ForeignKey(OrderingFilterModel,
related_name="relateds")
class OrderingFilterSerializer(serializers.ModelSerializer):
class Meta:
model = OrderingFilterModel
class DjangoFilterOrderingModel(models.Model):
date = models.DateField()
text = models.CharField(max_length=10)
class Meta:
ordering = ['-date']
class DjangoFilterOrderingSerializer(serializers.ModelSerializer):
class Meta:
model = DjangoFilterOrderingModel
class DjangoFilterOrderingTests(TestCase):
def setUp(self):
data = [{
'date': datetime.date(2012, 10, 8),
'text': 'abc'
}, {
'date': datetime.date(2013, 10, 8),
'text': 'bcd'
}, {
'date': datetime.date(2014, 10, 8),
'text': 'cde'
}]
for d in data:
DjangoFilterOrderingModel.objects.create(**d)
@unittest.skipUnless(django_filters, 'django-filter not installed')
def test_default_ordering(self):
class DjangoFilterOrderingView(generics.ListAPIView):
serializer_class = DjangoFilterOrderingSerializer
queryset = DjangoFilterOrderingModel.objects.all()
filter_backends = (filters.DjangoFilterBackend,)
filter_fields = ['text']
ordering = ('-date',)
view = DjangoFilterOrderingView.as_view()
request = factory.get('/')
response = view(request)
self.assertEqual(
response.data,
[
{'id': 3, 'date': '2014-10-08', 'text': 'cde'},
{'id': 2, 'date': '2013-10-08', 'text': 'bcd'},
{'id': 1, 'date': '2012-10-08', 'text': 'abc'}
]
)
class OrderingFilterTests(TestCase):
def setUp(self):
# Sequence of title/text is:
#
# zyx abc
# yxw bcd
# xwv cde
for idx in range(3):
title = (
chr(ord('z') - idx) +
chr(ord('y') - idx) +
chr(ord('x') - idx)
)
text = (
chr(idx + ord('a')) +
chr(idx + ord('b')) +
chr(idx + ord('c'))
)
OrderingFilterModel(title=title, text=text).save()
def test_ordering(self):
class OrderingListView(generics.ListAPIView):
queryset = OrderingFilterModel.objects.all()
serializer_class = OrderingFilterSerializer
filter_backends = (filters.OrderingFilter,)
ordering = ('title',)
ordering_fields = ('text',)
view = OrderingListView.as_view()
request = factory.get('/', {'ordering': 'text'})
response = view(request)
self.assertEqual(
response.data,
[
{'id': 1, 'title': 'zyx', 'text': 'abc'},
{'id': 2, 'title': 'yxw', 'text': 'bcd'},
{'id': 3, 'title': 'xwv', 'text': 'cde'},
]
)
def test_reverse_ordering(self):
class OrderingListView(generics.ListAPIView):
queryset = OrderingFilterModel.objects.all()
serializer_class = OrderingFilterSerializer
filter_backends = (filters.OrderingFilter,)
ordering = ('title',)
ordering_fields = ('text',)
view = OrderingListView.as_view()
request = factory.get('/', {'ordering': '-text'})
response = view(request)
self.assertEqual(
response.data,
[
{'id': 3, 'title': 'xwv', 'text': 'cde'},
{'id': 2, 'title': 'yxw', 'text': 'bcd'},
{'id': 1, 'title': 'zyx', 'text': 'abc'},
]
)
def test_incorrectfield_ordering(self):
class OrderingListView(generics.ListAPIView):
queryset = OrderingFilterModel.objects.all()
serializer_class = OrderingFilterSerializer
filter_backends = (filters.OrderingFilter,)
ordering = ('title',)
ordering_fields = ('text',)
view = OrderingListView.as_view()
request = factory.get('/', {'ordering': 'foobar'})
response = view(request)
self.assertEqual(
response.data,
[
{'id': 3, 'title': 'xwv', 'text': 'cde'},
{'id': 2, 'title': 'yxw', 'text': 'bcd'},
{'id': 1, 'title': 'zyx', 'text': 'abc'},
]
)
def test_default_ordering(self):
class OrderingListView(generics.ListAPIView):
queryset = OrderingFilterModel.objects.all()
serializer_class = OrderingFilterSerializer
filter_backends = (filters.OrderingFilter,)
ordering = ('title',)
oredering_fields = ('text',)
view = OrderingListView.as_view()
request = factory.get('')
response = view(request)
self.assertEqual(
response.data,
[
{'id': 3, 'title': 'xwv', 'text': 'cde'},
{'id': 2, 'title': 'yxw', 'text': 'bcd'},
{'id': 1, 'title': 'zyx', 'text': 'abc'},
]
)
def test_default_ordering_using_string(self):
class OrderingListView(generics.ListAPIView):
queryset = OrderingFilterModel.objects.all()
serializer_class = OrderingFilterSerializer
filter_backends = (filters.OrderingFilter,)
ordering = 'title'
ordering_fields = ('text',)
view = OrderingListView.as_view()
request = factory.get('')
response = view(request)
self.assertEqual(
response.data,
[
{'id': 3, 'title': 'xwv', 'text': 'cde'},
{'id': 2, 'title': 'yxw', 'text': 'bcd'},
{'id': 1, 'title': 'zyx', 'text': 'abc'},
]
)
def test_ordering_by_aggregate_field(self):
# create some related models to aggregate order by
num_objs = [2, 5, 3]
for obj, num_relateds in zip(OrderingFilterModel.objects.all(),
num_objs):
for _ in range(num_relateds):
new_related = OrderingFilterRelatedModel(
related_object=obj
)
new_related.save()
class OrderingListView(generics.ListAPIView):
serializer_class = OrderingFilterSerializer
filter_backends = (filters.OrderingFilter,)
ordering = 'title'
ordering_fields = '__all__'
queryset = OrderingFilterModel.objects.all().annotate(
models.Count("relateds"))
view = OrderingListView.as_view()
request = factory.get('/', {'ordering': 'relateds__count'})
response = view(request)
self.assertEqual(
response.data,
[
{'id': 1, 'title': 'zyx', 'text': 'abc'},
{'id': 3, 'title': 'xwv', 'text': 'cde'},
{'id': 2, 'title': 'yxw', 'text': 'bcd'},
]
)
def test_ordering_with_nonstandard_ordering_param(self):
with override_settings(REST_FRAMEWORK={'ORDERING_PARAM': 'order'}):
reload_module(filters)
class OrderingListView(generics.ListAPIView):
queryset = OrderingFilterModel.objects.all()
serializer_class = OrderingFilterSerializer
filter_backends = (filters.OrderingFilter,)
ordering = ('title',)
ordering_fields = ('text',)
view = OrderingListView.as_view()
request = factory.get('/', {'order': 'text'})
response = view(request)
self.assertEqual(
response.data,
[
{'id': 1, 'title': 'zyx', 'text': 'abc'},
{'id': 2, 'title': 'yxw', 'text': 'bcd'},
{'id': 3, 'title': 'xwv', 'text': 'cde'},
]
)
reload_module(filters)
class SensitiveOrderingFilterModel(models.Model):
username = models.CharField(max_length=20)
password = models.CharField(max_length=100)
# Three different styles of serializer.
# All should allow ordering by username, but not by password.
class SensitiveDataSerializer1(serializers.ModelSerializer):
username = serializers.CharField()
class Meta:
model = SensitiveOrderingFilterModel
fields = ('id', 'username')
class SensitiveDataSerializer2(serializers.ModelSerializer):
username = serializers.CharField()
password = serializers.CharField(write_only=True)
class Meta:
model = SensitiveOrderingFilterModel
fields = ('id', 'username', 'password')
class SensitiveDataSerializer3(serializers.ModelSerializer):
user = serializers.CharField(source='username')
class Meta:
model = SensitiveOrderingFilterModel
fields = ('id', 'user')
class SensitiveOrderingFilterTests(TestCase):
def setUp(self):
for idx in range(3):
username = {0: 'userA', 1: 'userB', 2: 'userC'}[idx]
password = {0: 'passA', 1: 'passC', 2: 'passB'}[idx]
SensitiveOrderingFilterModel(username=username, password=password).save()
def test_order_by_serializer_fields(self):
for serializer_cls in [
SensitiveDataSerializer1,
SensitiveDataSerializer2,
SensitiveDataSerializer3
]:
class OrderingListView(generics.ListAPIView):
queryset = SensitiveOrderingFilterModel.objects.all().order_by('username')
filter_backends = (filters.OrderingFilter,)
serializer_class = serializer_cls
view = OrderingListView.as_view()
request = factory.get('/', {'ordering': '-username'})
response = view(request)
if serializer_cls == SensitiveDataSerializer3:
username_field = 'user'
else:
username_field = 'username'
# Note: Inverse username ordering correctly applied.
self.assertEqual(
response.data,
[
{'id': 3, username_field: 'userC'},
{'id': 2, username_field: 'userB'},
{'id': 1, username_field: 'userA'},
]
)
def test_cannot_order_by_non_serializer_fields(self):
for serializer_cls in [
SensitiveDataSerializer1,
SensitiveDataSerializer2,
SensitiveDataSerializer3
]:
class OrderingListView(generics.ListAPIView):
queryset = SensitiveOrderingFilterModel.objects.all().order_by('username')
filter_backends = (filters.OrderingFilter,)
serializer_class = serializer_cls
view = OrderingListView.as_view()
request = factory.get('/', {'ordering': 'password'})
response = view(request)
if serializer_cls == SensitiveDataSerializer3:
username_field = 'user'
else:
username_field = 'username'
# Note: The passwords are not in order. Default ordering is used.
self.assertEqual(
response.data,
[
{'id': 1, username_field: 'userA'}, # PassB
{'id': 2, username_field: 'userB'}, # PassC
{'id': 3, username_field: 'userC'}, # PassA
]
)
| bsd-2-clause |
terbolous/SickRage | lib/github/RepositoryKey.py | 72 | 5483 | # -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2012 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2012 Zearin <zearin@gonk.net> #
# Copyright 2013 AKFish <akfish@gmail.com> #
# Copyright 2013 Srijan Choudhary <srijan4@gmail.com> #
# Copyright 2013 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2013 martinqt <m.ki2@laposte.net> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import github.GithubObject
class RepositoryKey(github.GithubObject.CompletableGithubObject):
"""
This class represents RepositoryKeys. The reference can be found here http://developer.github.com/v3/repos/keys/
"""
def __init__(self, requester, headers, attributes, completed, repoUrl):
github.GithubObject.CompletableGithubObject.__init__(self, requester, headers, attributes, completed)
self.__repoUrl = repoUrl
@property
def __customUrl(self):
return self.__repoUrl + "/keys/" + str(self.id)
@property
def id(self):
"""
:type: integer
"""
self._completeIfNotSet(self._id)
return self._id.value
@property
def key(self):
"""
:type: string
"""
self._completeIfNotSet(self._key)
return self._key.value
@property
def title(self):
"""
:type: string
"""
self._completeIfNotSet(self._title)
return self._title.value
@property
def url(self):
"""
:type: string
"""
self._completeIfNotSet(self._url)
return self._url.value
@property
def verified(self):
"""
:type: bool
"""
self._completeIfNotSet(self._verified)
return self._verified.value
def delete(self):
"""
:calls: `DELETE /repos/:owner/:repo/keys/:id <http://developer.github.com/v3/repos/keys>`_
:rtype: None
"""
headers, data = self._requester.requestJsonAndCheck(
"DELETE",
self.__customUrl
)
def edit(self, title=github.GithubObject.NotSet, key=github.GithubObject.NotSet):
"""
:calls: `PATCH /repos/:owner/:repo/keys/:id <http://developer.github.com/v3/repos/keys>`_
:param title: string
:param key: string
:rtype: None
"""
assert title is github.GithubObject.NotSet or isinstance(title, (str, unicode)), title
assert key is github.GithubObject.NotSet or isinstance(key, (str, unicode)), key
post_parameters = dict()
if title is not github.GithubObject.NotSet:
post_parameters["title"] = title
if key is not github.GithubObject.NotSet:
post_parameters["key"] = key
headers, data = self._requester.requestJsonAndCheck(
"PATCH",
self.__customUrl,
input=post_parameters
)
self._useAttributes(data)
def _initAttributes(self):
self._id = github.GithubObject.NotSet
self._key = github.GithubObject.NotSet
self._title = github.GithubObject.NotSet
self._url = github.GithubObject.NotSet
self._verified = github.GithubObject.NotSet
def _useAttributes(self, attributes):
if "id" in attributes: # pragma no branch
self._id = self._makeIntAttribute(attributes["id"])
if "key" in attributes: # pragma no branch
self._key = self._makeStringAttribute(attributes["key"])
if "title" in attributes: # pragma no branch
self._title = self._makeStringAttribute(attributes["title"])
if "url" in attributes: # pragma no branch
self._url = self._makeStringAttribute(attributes["url"])
if "verified" in attributes: # pragma no branch
self._verified = self._makeBoolAttribute(attributes["verified"])
| gpl-3.0 |
apanju/GMIO_Odoo | addons/website_event/tests/__init__.py | 413 | 1072 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 20123TODAY OpenERP S.A. <http://www.openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import test_ui
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
vmax-feihu/hue | desktop/core/ext-py/Pygments-1.3.1/pygments/styles/vim.py | 75 | 1976 | # -*- coding: utf-8 -*-
"""
pygments.styles.vim
~~~~~~~~~~~~~~~~~~~
A highlighting style for Pygments, inspired by vim.
:copyright: Copyright 2006-2010 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Whitespace, Token
class VimStyle(Style):
"""
Styles somewhat like vim 7.0
"""
background_color = "#000000"
highlight_color = "#222222"
default_style = "#cccccc"
styles = {
Token: "#cccccc",
Whitespace: "",
Comment: "#000080",
Comment.Preproc: "",
Comment.Special: "bold #cd0000",
Keyword: "#cdcd00",
Keyword.Declaration: "#00cd00",
Keyword.Namespace: "#cd00cd",
Keyword.Pseudo: "",
Keyword.Type: "#00cd00",
Operator: "#3399cc",
Operator.Word: "#cdcd00",
Name: "",
Name.Class: "#00cdcd",
Name.Builtin: "#cd00cd",
Name.Exception: "bold #666699",
Name.Variable: "#00cdcd",
String: "#cd0000",
Number: "#cd00cd",
Generic.Heading: "bold #000080",
Generic.Subheading: "bold #800080",
Generic.Deleted: "#cd0000",
Generic.Inserted: "#00cd00",
Generic.Error: "#FF0000",
Generic.Emph: "italic",
Generic.Strong: "bold",
Generic.Prompt: "bold #000080",
Generic.Output: "#888",
Generic.Traceback: "#04D",
Error: "border:#FF0000"
}
| apache-2.0 |
uclouvain/OSIS-Louvain | attribution/models/attribution_new.py | 1 | 4493 | ##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2019 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
from django.contrib import admin
from django.db import models
from django.utils.translation import gettext_lazy as _
from attribution.models.enums.function import Functions
from base.models.utils.utils import filter_with_list_or_object
class AttributionNewAdmin(admin.ModelAdmin):
list_display = ('tutor', 'score_responsible', 'function', 'learning_container_year', 'start_year', 'end_year',
'changed', 'substitute')
list_filter = ('learning_container_year__academic_year', 'score_responsible')
fieldsets = ((None, {'fields': ('learning_container_year', 'tutor', 'function', 'score_responsible',
'start_year', 'end_year', 'substitute')}),)
raw_id_fields = ('learning_container_year', 'tutor', 'substitute')
search_fields = ['tutor__person__first_name', 'tutor__person__last_name', 'learning_container_year__acronym',
'tutor__person__global_id', 'function']
actions = ['publish_attribution_to_portal']
def publish_attribution_to_portal(self, request, queryset):
from attribution.business import attribution_json
global_ids = list(queryset.values_list('tutor__person__global_id', flat=True))
return attribution_json.publish_to_portal(global_ids)
publish_attribution_to_portal.short_description = _("Publish attribution to portal")
class AttributionNew(models.Model):
external_id = models.CharField(max_length=100, blank=True, null=True, db_index=True)
changed = models.DateTimeField(null=True, auto_now=True)
learning_container_year = models.ForeignKey('base.LearningContainerYear', on_delete=models.CASCADE)
tutor = models.ForeignKey('base.Tutor', on_delete=models.CASCADE)
function = models.CharField(max_length=35, choices=Functions.choices(), db_index=True, verbose_name=_("Function"))
start_date = models.DateField(blank=True, null=True)
end_date = models.DateField(blank=True, null=True)
start_year = models.IntegerField(blank=True, null=True, verbose_name=_("Start"))
end_year = models.IntegerField(blank=True, null=True)
score_responsible = models.BooleanField(default=False)
substitute = models.ForeignKey('base.Person', blank=True, null=True, on_delete=models.CASCADE)
def __str__(self):
return u"%s - %s" % (self.tutor.person, self.function)
@property
def duration(self):
if self.start_year and self.end_year:
return (self.end_year - self.start_year) + 1
return None
def search(*args, **kwargs):
qs = AttributionNew.objects.all()
if "learning_container_year" in kwargs:
qs = filter_with_list_or_object('learning_container_year', AttributionNew, **kwargs)
if "tutor" in kwargs:
qs = qs.filter(tutor=kwargs['tutor'])
if "score_responsible" in kwargs:
qs = qs.filter(score_responsible=kwargs['score_responsible'])
if "global_id" in kwargs:
if isinstance(kwargs['global_id'], list):
qs = qs.filter(tutor__person__global_id__in=kwargs['global_id'])
else:
qs = qs.filter(tutor__person__global_id=kwargs['global_id'])
return qs.select_related('tutor__person', 'learning_container_year')
| agpl-3.0 |
KylinUI/android_kernel_samsung_hlte | tools/perf/scripts/python/check-perf-trace.py | 11214 | 2503 | # perf script event handlers, generated by perf script -g python
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# This script tests basic functionality such as flag and symbol
# strings, common_xxx() calls back into perf, begin, end, unhandled
# events, etc. Basically, if this script runs successfully and
# displays expected results, Python scripting support should be ok.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Core import *
from perf_trace_context import *
unhandled = autodict()
def trace_begin():
print "trace_begin"
pass
def trace_end():
print_unhandled()
def irq__softirq_entry(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
vec):
print_header(event_name, common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
print_uncommon(context)
print "vec=%s\n" % \
(symbol_str("irq__softirq_entry", "vec", vec)),
def kmem__kmalloc(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
call_site, ptr, bytes_req, bytes_alloc,
gfp_flags):
print_header(event_name, common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
print_uncommon(context)
print "call_site=%u, ptr=%u, bytes_req=%u, " \
"bytes_alloc=%u, gfp_flags=%s\n" % \
(call_site, ptr, bytes_req, bytes_alloc,
flag_str("kmem__kmalloc", "gfp_flags", gfp_flags)),
def trace_unhandled(event_name, context, event_fields_dict):
try:
unhandled[event_name] += 1
except TypeError:
unhandled[event_name] = 1
def print_header(event_name, cpu, secs, nsecs, pid, comm):
print "%-20s %5u %05u.%09u %8u %-20s " % \
(event_name, cpu, secs, nsecs, pid, comm),
# print trace fields not included in handler args
def print_uncommon(context):
print "common_preempt_count=%d, common_flags=%s, common_lock_depth=%d, " \
% (common_pc(context), trace_flag_str(common_flags(context)), \
common_lock_depth(context))
def print_unhandled():
keys = unhandled.keys()
if not keys:
return
print "\nunhandled events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"-----------"),
for event_name in keys:
print "%-40s %10d\n" % (event_name, unhandled[event_name])
| gpl-2.0 |
wijjo/cmdo | cmdo/publish_text.py | 1 | 22377 | #===============================================================================
#===============================================================================
# publish_text - text documentation generator for Cmdo
#
# Text publisher (simplistic)
#
# Note: You will probably get better results by publishing as HTML and then
# filtering through lynx or w3m, i.e. using the -dump option.
#
# Author Steve Cooper steve@wijjo.com
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#===============================================================================
import sys
import math
import inspect
import text_utility
from urlparse import urlparse
#===============================================================================
# Tunable style parameters
#===============================================================================
class Frame(object):
def __init__(self, tl, t, tr, l, r, bl, b, br):
self.tl = tl
self.t = t
self.tr = tr
self.l = l
self.r = r
self.bl = bl
self.b = b
self.br = br
if (self.tl or self.tr) and not self.t:
self.t = ' '
if (self.bl or self.br) and not self.b:
self.b = ' '
def build(self, sIn):
sm = self.l + sIn + self.r
ntc = len(sm) - (len(self.tl) + len(self.tr))
nbc = len(sm) - (len(self.bl) + len(self.br))
st = self.tl
if self.t and ntc > 0:
st += ((self.t * ((ntc + len(self.t) - 1) / len(self.t)))[:ntc])
st += self.tr
sb = self.bl
if self.b and nbc > 0:
sb += ((self.b * ((nbc + len(self.b) - 1) / len(self.b)))[:nbc])
sb += self.br
sOut = st
if sOut:
sOut += '\n'
sOut += sm
if sb:
sOut += '\n'
sOut += sb
return sOut
# Heading frames. Last one is used if level >= length of list.
framesHeading = [
Frame('+', '=', '+', '| ', ' |', '+', '=', '+'),
Frame('' , '-', '' , ' ' , ' ' , '' , '-', '' ),
Frame('' , '' , '' , '' , '' , '' , '-', '' ),
Frame('' , '' , '' , '= ', ' =', '' , '' , '' ),
]
# How numbered list items are prefixed.
# This format gets processed twice, first time with the max # of digits
formatNumberedItem = '%%%dd: '
# What precedes a bullet list item
prefixBullet = '* '
# Indent size in characters
widthIndent = 2
# The block forms/types that produce text chunks that are accumulated until
# flushed by other block forms/types.
formsText = ['text', 'link']
# The string to repeat for table borders
borderTable = '- '
# Max width for wrapping
maxWidth = 80
#===============================================================================
class Publisher(object):
class Cell(object):
def __init__(self):
self.text = ''
class Row(object):
def __init__(self):
self.cells = []
def addCell(self):
self.cells.append(Publisher.Cell())
def lastCell(self):
assert len(self.cells) > 0
return self.cells[-1]
class Table(object):
def __init__(self, headers):
self.headers = headers
self.rows = []
if not self.headers:
self.headers = [] # headers should always be a list
def addRow(self):
self.rows.append(Publisher.Row())
def lastRow(self):
assert len(self.rows) > 0
return self.rows[-1]
class PendingTables(object):
def __init__(self):
self.tables = []
def addTable(self, headers):
self.tables.append(Publisher.Table(headers))
def lastTable(self):
assert len(self.tables) > 0
return self.tables[-1]
def popTable(self):
assert len(self.tables) > 0
return self.tables.pop()
#---------------------------------------------------------------------------
def __init__(self):
self.dumper = Dumper()
self.nNest = 0
self.nCur = [0]
self.nFlushes = 0
self.plaintextDoc = False
self.types = 'text/plain'
self.extension = '.txt'
# Table data is cached in raw form before flushing as plain text
# Handle nested tables (someday).
self.tablesPending = Publisher.PendingTables()
#---------------------------------------------------------------------------
def docBegin(self, context, text, style):
pass
#---------------------------------------------------------------------------
def docEnd(self, context):
# Add final linefeed, if not all plaintext.
if not self.plaintextDoc:
context.write('\n')
#---------------------------------------------------------------------------
def nodeBegin(self, context):
form = context.getProp('form', default = '')
context.setCache('traceLabel', 'nodeBegin:1')
self.nCur[self.nNest] += 1
self.nCur.append(0)
self.nNest += 1
# Work from cache, not node properties
context.cacheProp('text')
context.cacheProp('heading')
# Process by form
if form == 'block':
self.blockBegin(context)
elif form == 'list':
self.listBegin(context)
elif form == 'item':
self.itemBegin(context)
elif form == 'table':
self.tableBegin(context)
elif form == 'row':
self.rowBegin(context)
elif form == 'cell':
self.cellBegin(context)
elif form == 'plaintext':
self.plaintextBegin(context)
elif form == 'link':
self.linkBegin(context)
else:
self.otherBegin(context)
context.setCache('traceLabel', 'nodeBegin:2')
# Insert a gap whenever the block form changes
formPrev = context.getCache('formPrev', level = 1)
if formPrev != form:
#self.trace(context, '%s=>%s' % (formPrev, form))
if formPrev is not None:
self.setGapBefore(context, 1)
context.setCache('formPrev', form, level = 1)
# Consolidate text upward, if not at a form requiring flushing.
# Also consolidate table cell text as we go.
if not form or form in formsText:
if context.hasCache('text') and context.getDepth() > 1:
self.consolidateText(context)
#---------------------------------------------------------------------------
def nodeEnd(self, context):
form = context.getProp('form', default = '')
context.setCache('traceLabel', 'nodeEnd:1')
# Process by form
if form == 'block':
self.blockEnd(context)
elif form == 'list':
self.listEnd(context)
elif form == 'item':
self.itemEnd(context)
elif form == 'table':
self.tableEnd(context)
elif form == 'row':
self.rowEnd(context)
elif form == 'cell':
self.cellEnd(context)
elif form == 'plaintext':
self.plaintextEnd(context)
elif form == 'link':
self.linkEnd(context)
else:
self.otherEnd(context)
# Flush all pending text if the current node has text ready to go
if context.hasCache('text') or context.hasCache('heading'):
if self.plaintextDoc and form and form != 'plaintext':
self.plaintextDoc = False
self.flushPending(context)
# Flush pending bottom border
borderBottom = context.getCache('borderBottom')
if borderBottom:
indent = context.getCache('indent', '', inherit = True)
context.write('%s%s\n' % (indent, borderBottom))
context.setCache('borderBottom', '')
context.setCache('traceLabel', 'nodeEnd:2')
self.nNest -= 1
self.nCur.pop()
#---------------------------------------------------------------------------
def blockBegin(self, context):
if context.hasProp('text') or context.hasProp('heading'):
self.setGapBefore(context, 1)
#---------------------------------------------------------------------------
def listBegin(self, context):
# Indent a nested list
if context.getProp('form', level = 1) == 'item':
context.appendCacheString('indent', ' ' * widthIndent)
else:
self.setGapBefore(context, 1)
#---------------------------------------------------------------------------
def itemBegin(self, context):
style = context.getProp('style', inherit = True)
if style == 'bullet':
context.setCache('prefix', prefixBullet)
elif style == 'number':
nItem = context.getCache('nItem', default = 0, level = 1)
context.setCache('nItem', nItem + 1, level = 1)
format = formatNumberedItem % numWidth(context.getBreadth())
context.setCache('prefix', format % (nItem + 1))
#---------------------------------------------------------------------------
def tableBegin(self, context):
self.tablesPending.addTable(context.getProp('headers'))
#---------------------------------------------------------------------------
def rowBegin(self, context):
self.tablesPending.lastTable().addRow()
#---------------------------------------------------------------------------
def cellBegin(self, context):
self.tablesPending.lastTable().lastRow().addCell()
#---------------------------------------------------------------------------
def plaintextBegin(self, context):
if self.nFlushes == 0 and not context.hasProp('heading', inherit = True):
self.plaintextDoc = True
else:
context.appendCacheString('indent', ' ' * widthIndent, inherit = True)
self.setGapBefore(context, 1)
#---------------------------------------------------------------------------
def linkBegin(self, context):
url = context.getProp('url')
label = context.getProp('text')
if not label:
context.setCache('text', url)
else:
if urlparse(url)[0] != '':
context.setCache('text', '%s (%s)' % (label, url))
else:
context.setCache('text', label)
#---------------------------------------------------------------------------
def otherBegin(self, context):
pass
#---------------------------------------------------------------------------
def blockEnd(self, context):
self.setGapBefore(context, 1, inherit = True)
#---------------------------------------------------------------------------
def listEnd(self, context):
self.setGapBefore(context, 1, inherit = True)
#---------------------------------------------------------------------------
def itemEnd(self, context):
pass
#---------------------------------------------------------------------------
def tableEnd(self, context):
table = self.tablesPending.popTable()
indent = 1 # number of spaces before header line or row
# First determine the maximum column widths (for headers and cells
widths = [len(header)+2 for header in table.headers]
for row in table.rows:
for iCell in range(len(row.cells)):
width = len(row.cells[iCell].text)
if iCell >= len(widths):
widths.append(1) # minimum width is 1
if width > widths[iCell]:
widths[iCell] = width
widthTotal = sum(widths)
# If it fits build a plaintext table with columns
if widthTotal <= maxWidth:
sLines = []
if table.headers:
iPos = 0
sLines.append('')
for iHeader in range(len(table.headers)):
gap = iPos - len(sLines[-1])
if gap > 0:
sLines[-1] += (' ' * gap)
sLines[-1] += '-%s-' % table.headers[iHeader]
iPos += (widths[iHeader] + 2)
for row in table.rows:
iPos = 0
sLines.append('')
for iCell in range(len(row.cells)):
gap = iPos - len(sLines[-1])
if gap > 0:
sLines[-1] += (' ' * gap)
sLines[-1] += row.cells[iCell].text
iPos += (widths[iCell] + 2)
# If it's too wide use indentation instead of columns
else:
sLines = []
widthTotal = 0 # Re-calculated below
for iHeader in range(len(table.headers)):
sLines.append('%s-%s-' % (' ' * iHeader, table.headers[iHeader]))
if len(sLines[-1]) > widthTotal:
widthTotal = len(sLines[-1])
for row in table.rows:
for iCell in range(len(row.cells)):
sLines.append('%s%s' % (' ' * iCell, row.cells[iCell].text))
if len(sLines[-1]) > widthTotal:
widthTotal = len(sLines[-1])
# Generate a border
lenBorder = (widthTotal + ((len(widths) - 1) * 2) + (2 * indent))
border = (borderTable * (((lenBorder - 1) / len(borderTable)) + 1))[:lenBorder]
# Make sure an alternating pattern (like "- ") doesn't look too short.
if border[-1] == ' ':
border += borderTable
context.setCache('borderTop' , border)
context.setCache('borderBottom', border)
context.setCache('text', '\n'.join(sLines))
context.setCache('plaintext', True)
context.setCache('indentInside', ' ' * indent)
self.setGapBefore(context, 1, inherit = True)
#---------------------------------------------------------------------------
def rowEnd(self, context):
pass
#---------------------------------------------------------------------------
def cellEnd(self, context):
if context.hasCache('text'):
text = context.takeCache('text').replace('\n', ' ')
self.tablesPending.lastTable().lastRow().lastCell().text += text
#---------------------------------------------------------------------------
def plaintextEnd(self, context):
pass
#---------------------------------------------------------------------------
def linkEnd(self, context):
pass
#---------------------------------------------------------------------------
def otherEnd(self, context):
pass
#---------------------------------------------------------------------------
def setGapBefore(self, context, gap, level = 0, inherit = False):
#self.trace(context, 'setGapBefore(%d, level=%d, inherit=%s)' % (gap, level, inherit))
gapCur = context.getCache('gapBefore', default = 0, level = level, inherit = inherit)
if gap > gapCur:
context.setCache('gapBefore', gap, level = level, inherit = inherit)
#---------------------------------------------------------------------------
# Consolidate text chunks in parent node unless this is a block that
# flushes text (done in nodeEnd()).
def consolidateText(self, context):
form = context.getProp('form', level = 1, inherit = True)
# Move the heading up
context.consolidateCacheText('heading', '\n')
# Consolidate text
if form == 'plaintext':
# Plaintext requires removal of excess whitespace before consolidation.
text = context.getCache('text', default = '')
text2 = '\n'.join([line for line in text_utility.textFormatPlain(text, '')])
context.setCache('text', text2)
context.consolidateCacheText('text', '\n\n')
else:
# Move normal text up
context.consolidateCacheText('text', '\n')
# Move the gap requirement up too
gap = context.takeCache('gapBefore')
if gap:
context.setCache('gapBefore', gap, level = 1)
#---------------------------------------------------------------------------
def flushPending(self, context):
# Make sure a non-plaintext document starts with at least one blank line
depth = context.getDepth()
if self.nFlushes == 0 and not self.plaintextDoc:
gap = 1
else:
gap = 0
self.nFlushes += 1
# Start from the topmost node with text and work down. There should be
# no remaining cached text, gaps, headings or borders when done.
for level in range(depth-1, -1, -1):
gapCur = context.takeCache('gapBefore', default = 0, level = level)
if gapCur > gap:
gap = gapCur
indent = context.getCache('indent', '', level = level, inherit = True)
indentInside = context.getCache('indentInside', '', level = level, inherit = True)
# Heading?
heading = context.takeCache('heading', level = level)
if heading:
hdLevel = len(context.getPropsStack('heading', level = level + 1))
frame = framesHeading[min(hdLevel, len(framesHeading) - 1)]
if frame.t and gap < 2:
gap = 2
elif gap < 1:
gap = 1
self._writeln(context, gap, indent, frame.build(heading))
gap = 1
# Top border?
borderTop = context.takeCache('borderTop', level = level)
if borderTop:
self._writeln(context, gap, indent, borderTop)
gap = 0
# Text?
indent += indentInside
text = context.takeCache('text', default = '', level = level)
if text:
if (context.getProp('form', level = level, inherit = True) == 'plaintext' or
context.getCache('plaintext')):
for line in text_utility.textFormatPlain(text, indent):
self._writeln(context, gap, '', line)
gap = 0
self.setGapBefore(context, 1, level = level, inherit = True)
else:
prefix = context.getCache('prefix', default = '', level = level, inherit = True)
for line in text_utility.textFormatWrapped(text, indent, prefix, maxWidth):
self._writeln(context, gap, '', line)
gap = 0
#---------------------------------------------------------------------------
def _writeln(self, context, gap, indent, s):
iStart = 0
while gap > 0 and iStart < len(s):
if s[iStart] == '\n':
iStart += 1
else:
context.write('\n')
gap -= 1
if indent:
context.write(indent)
context.write(s)
context.write('\n')
#---------------------------------------------------------------------------
def trace(self, context, s):
stackLines = []
fileName = None
stack = inspect.stack()
for stackItem in stack[1:]:
if fileName is None:
fileName = stackItem[1]
elif fileName != stackItem[1]:
break
stackLines.append(stackItem[2])
print ':::{%s(%s,%d,%d)}: %s [stack=%s]' % (
context.getCache('traceLabel'),
context.getProp('form'),
self.nNest,
self.nCur[self.nNest-1],
s,
','.join([str(n) for n in stackLines]),
)
#===============================================================================
# Utility functions and classes
#
# May be redundant to have these here, but trying to keep this module
# independently useful without cmdo.
#===============================================================================
def numWidth(n):
try:
return int(math.log10(abs(n))) + 1
except:
return 1
class Dumper:
@staticmethod
def abstractDict(d):
s = ''
keys = d.keys()
keys.sort()
sep = ''
for key in keys:
val = d[key]
if val is not None:
if text_utility.isString(val):
sVal = '"%s' % val[:20]
if len(sVal) < len(val):
sVal += '...(%d)' % len(val)
sVal += '"'
else:
sVal = str(val)
s += '%s%s=%s' % (sep, key, sVal)
if not sep:
sep = ' '
if not s:
s = '(empty)'
return s
def __init__(self):
self.indent = ''
def writeln(self, s):
print '%s%s' % (self.indent[:-2], s)
def dump(self, context):
form = context.getProp('form')
label = context.getCache('traceLabel')
self.writeln('>>>>> %s form=%s' % (label, form))
indent2 = ''
for level in context.levels:
self.writeln('%sOPTIONS: %s' % (indent2, Dumper.abstractDict(level.props)))
self.writeln('%sCACHE..: %s' % (indent2, Dumper.abstractDict(level.cache)))
indent2 += (' ' * widthIndent)
self.writeln('<<<<<')
def begin(self):
self.indent += (' ' * widthIndent)
def end(self):
self.indent = self.indent[:-2]
def message(self, s):
self.writeln('!!!%s' % s)
| gpl-2.0 |
thodoris/djangoPharma | djangoPharma/env/Lib/site-packages/debug_toolbar/panels/sql/utils.py | 11 | 1987 | from __future__ import absolute_import, unicode_literals
import re
import sqlparse
from django.utils.html import escape
from sqlparse import tokens as T
class BoldKeywordFilter:
"""sqlparse filter to bold SQL keywords"""
def process(self, stream):
"""Process the token stream"""
for token_type, value in stream:
is_keyword = token_type in T.Keyword
if is_keyword:
yield T.Text, '<strong>'
yield token_type, escape(value)
if is_keyword:
yield T.Text, '</strong>'
def reformat_sql(sql):
stack = sqlparse.engine.FilterStack()
stack.preprocess.append(BoldKeywordFilter()) # add our custom filter
stack.postprocess.append(sqlparse.filters.SerializerUnicode()) # tokens -> strings
return swap_fields(''.join(stack.run(sql)))
def swap_fields(sql):
expr = r'SELECT</strong> (...........*?) <strong>FROM'
subs = (r'SELECT</strong> '
r'<a class="djDebugUncollapsed djDebugToggle" href="#">•••</a> '
r'<a class="djDebugCollapsed djDebugToggle" href="#">\1</a> '
r'<strong>FROM')
return re.sub(expr, subs, sql)
def contrasting_color_generator():
"""
Generate constrasting colors by varying most significant bit of RGB first,
and then vary subsequent bits systematically.
"""
def rgb_to_hex(rgb):
return '#%02x%02x%02x' % tuple(rgb)
triples = [(1, 0, 0), (0, 1, 0), (0, 0, 1),
(1, 1, 0), (0, 1, 1), (1, 0, 1), (1, 1, 1)]
n = 1 << 7
so_far = [[0, 0, 0]]
while True:
if n == 0: # This happens after 2**24 colours; presumably, never
yield "#000000" # black
copy_so_far = list(so_far)
for triple in triples:
for previous in copy_so_far:
rgb = [n * triple[i] + previous[i] for i in range(3)]
so_far.append(rgb)
yield rgb_to_hex(rgb)
n >>= 1
| apache-2.0 |
j5shi/Thruster | pylibs/json/tests/test_encode_basestring_ascii.py | 17 | 2045 | from collections import OrderedDict
from json.tests import PyTest, CTest
CASES = [
(u'/\\"\ucafe\ubabe\uab98\ufcde\ubcda\uef4a\x08\x0c\n\r\t`1~!@#$%^&*()_+-=[]{}|;:\',./<>?', '"/\\\\\\"\\ucafe\\ubabe\\uab98\\ufcde\\ubcda\\uef4a\\b\\f\\n\\r\\t`1~!@#$%^&*()_+-=[]{}|;:\',./<>?"'),
(u'\u0123\u4567\u89ab\ucdef\uabcd\uef4a', '"\\u0123\\u4567\\u89ab\\ucdef\\uabcd\\uef4a"'),
(u'controls', '"controls"'),
(u'\x08\x0c\n\r\t', '"\\b\\f\\n\\r\\t"'),
(u'{"object with 1 member":["array with 1 element"]}', '"{\\"object with 1 member\\":[\\"array with 1 element\\"]}"'),
(u' s p a c e d ', '" s p a c e d "'),
(u'\U0001d120', '"\\ud834\\udd20"'),
(u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
('\xce\xb1\xce\xa9', '"\\u03b1\\u03a9"'),
(u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
('\xce\xb1\xce\xa9', '"\\u03b1\\u03a9"'),
(u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
(u'\u03b1\u03a9', '"\\u03b1\\u03a9"'),
(u"`1~!@#$%^&*()_+-={':[,]}|;.</>?", '"`1~!@#$%^&*()_+-={\':[,]}|;.</>?"'),
(u'\x08\x0c\n\r\t', '"\\b\\f\\n\\r\\t"'),
(u'\u0123\u4567\u89ab\ucdef\uabcd\uef4a', '"\\u0123\\u4567\\u89ab\\ucdef\\uabcd\\uef4a"'),
]
class TestEncodeBasestringAscii(object):
def test_encode_basestring_ascii(self):
fname = self.json.encoder.encode_basestring_ascii.__name__
for input_string, expect in CASES:
result = self.json.encoder.encode_basestring_ascii(input_string)
self.assertEqual(result, expect,
'{0!r} != {1!r} for {2}({3!r})'.format(
result, expect, fname, input_string))
def test_ordered_dict(self):
# See issue 6105
items = [('one', 1), ('two', 2), ('three', 3), ('four', 4), ('five', 5)]
s = self.dumps(OrderedDict(items))
self.assertEqual(s, '{"one": 1, "two": 2, "three": 3, "four": 4, "five": 5}')
class TestPyEncodeBasestringAscii(TestEncodeBasestringAscii, PyTest): pass
class TestCEncodeBasestringAscii(TestEncodeBasestringAscii, CTest): pass
| gpl-2.0 |
sthirugn/robottelo | tests/foreman/ui/test_bookmark.py | 1 | 25041 | """Test classes for Bookmark tests
@Requirement: Bookmark
@CaseAutomation: Automated
@CaseLevel: Acceptance
@CaseComponent: UI
@TestType: Functional
@CaseImportance: High
@Upstream: No
"""
# -*- encoding: utf-8 -*-
import random
from fauxfactory import gen_string
from nailgun import entities
from robottelo.constants import BOOKMARK_ENTITIES, STRING_TYPES
from robottelo.decorators import (
bz_bug_is_open,
run_in_one_thread,
skip_if_bug_open,
tier1,
tier2,
)
from robottelo.test import UITestCase
from robottelo.ui.base import UIError
from robottelo.ui.locators import common_locators, locators
from robottelo.ui.session import Session
@run_in_one_thread
class BookmarkTestCase(UITestCase):
"""Test for common Bookmark operations in UI"""
@classmethod
def setUpClass(cls):
"""Display all the bookmarks on the same page, create user and entities
for testing.
"""
super(BookmarkTestCase, cls).setUpClass()
cls.per_page = entities.Setting().search(
query={'search': 'name="entries_per_page"'})[0]
cls.saved_per_page = str(cls.per_page.value)
cls.per_page.value = '100000'
cls.per_page.update({'value'})
cls.entities = []
# Custom user for bookmark visibility testing
role = entities.Role().search(query={'search': 'name="Viewer"'})[0]
cls.custom_password = gen_string('alphanumeric')
cls.custom_user = entities.User(
role=[role],
password=cls.custom_password,
).create()
for entity in BOOKMARK_ENTITIES:
# Skip the entities, which can't be tested ATM (require framework
# update)
skip = entity.get('skip_for_ui')
if skip and (skip is True or bz_bug_is_open(skip)):
continue
cls.entities.append(entity)
# Some pages require at least 1 existing entity for search bar to
# appear. Creating 1 entity for such pages
if entity.get('setup'):
# entities with 1 organization
if entity['name'] in ('Hosts',):
entity['setup'](organization=cls.session_org).create()
# entities with no organizations
elif entity['name'] in (
'Compute_Profile',
'ConfigGroups',
'HardwareModel',
'PuppetClasses',
'UserGroup'):
entity['setup']().create()
# entities with multiple organizations
else:
entity['setup'](organization=[cls.session_org]).create()
@classmethod
def set_session_org(cls):
cls.session_org = entities.Organization(
name=gen_string('alphanumeric')).create()
@classmethod
def tearDownClass(cls):
"""Restore previous 'entries_per_page' value"""
cls.per_page.value = cls.saved_per_page
cls.per_page.update({'value'})
super(BookmarkTestCase, cls).tearDownClass()
@classmethod
def getOneEntity(cls):
"""Return 1 entity to test"""
return [cls.entities[random.randint(0, len(cls.entities)-1)]]
# CREATE TESTS
@tier1
def test_positive_create_bookmark_populate_auto(self):
"""Create a bookmark with auto-populating of the query
@id: 6a51a8d4-b641-4148-9ee8-a62f09aaa4af
@Steps:
1. Navigate to the entity page
2. Input a random text into the search field
3. Choose "bookmark this search" from the search drop-down menu
4. Input a random name for a bookmark name
5. Verify the query field is automatically populated and the public
option is checked
6. Click the create button
7. Verify that bookmark's name appears in the search dropdown
8. List the bookmarks (Navigate to Administer -> Bookmarks)
@Assert: No errors, Bookmark is displayed, controller matches the
entity the bookmark was created for
"""
for entity in self.getOneEntity():
with self.subTest(entity):
with Session(self.browser):
name = gen_string(random.choice(STRING_TYPES))
ui_lib = getattr(self, entity['name'].lower())
ui_lib.create_a_bookmark(
name=name,
public=True,
searchbox_query=gen_string(
random.choice(STRING_TYPES)
),
)
self.assertIsNotNone(
self.bookmark.search(entity['controller'], name))
@tier1
def test_positive_create_bookmark_populate_manual(self):
"""Create a bookmark with manually populating the name and query
@id: 6ab2221d-8fd5-484f-ac99-b856db9fa70a
@Steps:
1. Navigate to the entity page
2. Choose "bookmark this search" from the search drop-down menu
3. Input a random name for a bookmark name
4. Enter random text into Query field
5. Click the create button
6. Verify that bookmark's name appears in the search dropdown
7. List the bookmarks (Navigate to Administer -> Bookmarks)
@Assert: No errors, Bookmark is displayed, controller matches the
entity the bookmark was created for
"""
for entity in self.getOneEntity():
with self.subTest(entity):
with Session(self.browser):
name = gen_string(random.choice(STRING_TYPES))
ui_lib = getattr(self, entity['name'].lower())
ui_lib.create_a_bookmark(
name=name,
public=True,
query=gen_string(random.choice(STRING_TYPES)),
)
self.assertIsNotNone(
self.bookmark.search(entity['controller'], name))
@tier2
def test_positive_create_bookmark_public(self):
"""Create and check visibility of the (non)public bookmarks
@id: 93139529-7690-429b-83fe-3dcbac4f91dc
@Setup:
1. Create a non-admin user with 'viewer' role
@Steps:
1. Navigate to the entity page
2. Input a random text into the search field
3. Choose "bookmark this search" from the search drop-down menu
4. Input a random name for a bookmark name
5. Verify the query field is automatically populated and the public
option is checked
6. Click the create button
7. Choose "bookmark this search" from the search drop-down menu
8. Input a random name for a bookmark name
9. Verify the query field is automatically populated and the public
option is unchecked
10. Click the create button
11. Verify that bookmark's name appears in the search dropdown
12. List the bookmarks (Navigate to Administer -> Bookmarks)
13. Login as the pre-created user
14. Navigate to the entity
15. Click the dropdown
16. Verify that the non-public bookmark is not listed
@Assert: No errors, Bookmark is displayed, controller matches the
entity the bookmark was created for
@CaseLevel: Integration
"""
for entity in self.getOneEntity():
with self.subTest(entity):
with Session(self.browser):
name = gen_string(random.choice(STRING_TYPES))
ui_lib = getattr(self, entity['name'].lower())
ui_lib.create_a_bookmark(
name=name,
public=False,
searchbox_query=gen_string(
random.choice(STRING_TYPES)
),
)
self.assertIsNotNone(
self.bookmark.search(entity['controller'], name))
with Session(self.browser, user=self.custom_user.login,
password=self.custom_password):
self.assertIsNone(
self.bookmark.search(entity['controller'], name))
@skip_if_bug_open('bugzilla', 1326633)
@tier1
def test_negative_create_bookmark_no_name(self):
"""Create a bookmark with empty name
@id: ebb64459-a865-4029-bc7e-93e8d13dd877
@Steps:
1. Navigate to the entity page
2. Choose "bookmark this search" from the search drop-down menu
3. Input empty string for name
4. Enter random text into Query field
5. Click the create button
6. List the bookmarks (Navigate to Administer -> Bookmarks)
@Assert: Error notification - name cannot be empty, Bookmark is not
created (not listed)
"""
for entity in self.getOneEntity():
with self.subTest(entity):
with Session(self.browser) as session:
name = ''
ui_lib = getattr(self, entity['name'].lower())
ui_lib.create_a_bookmark(
name=name,
public=True,
query=gen_string(random.choice(STRING_TYPES)),
)
# Not sure what kind of validation will be added when
# BZ1326633 is fixed. Need to double check that when BZ is
# closed.
self.assertIsNotNone(
session.nav.wait_until_element(
common_locators['notif.error'])
)
@tier1
def test_negative_create_bookmark_no_query(self):
"""Create a bookmark with empty query
@id: 2c22ba18-a465-4977-8013-9336d1f648e8
@Steps:
1. Navigate to the entity page
2. Choose "bookmark this search" from the search drop-down menu
3. Enter random text into name field
4. Input empty string for search query
5. Click the create button
6. List the bookmarks (Navigate to Administer -> Bookmarks)
@Assert: Error notification - search query cannot be empty, Bookmark is
not created (not listed)
"""
for entity in self.getOneEntity():
with self.subTest(entity):
with Session(self.browser):
name = gen_string(random.choice(STRING_TYPES))
ui_lib = getattr(self, entity['name'].lower())
ui_lib.create_a_bookmark(
name=name,
public=True,
query='',
)
self.assertIsNone(
self.bookmark.search(entity['controller'], name))
@tier1
def test_negative_create_bookmark_same_name(self):
"""Create bookmarks with the same names
@id: 210c36b2-29bd-40d9-b120-16a1a031b20c
@Setup:
1. Create a bookmark of a random name
@Steps:
1. Navigate to the entity page
2. Choose "bookmark this search" from the search drop-down menu
3. Input the same name as the pre-created bm
4. Enter random text into Query field
5. Click the create button
6. List the bookmarks (Navigate to Administer -> Bookmarks)
@Assert: Error notification - name already taken, Bookmark is not
created (not listed)
"""
for entity in self.getOneEntity():
with self.subTest(entity):
with Session(self.browser):
name = gen_string(random.choice(STRING_TYPES))
ui_lib = getattr(self, entity['name'].lower())
for _ in range(2):
ui_lib.create_a_bookmark(
name=name,
public=True,
query=gen_string(random.choice(STRING_TYPES)),
)
self.bookmark.navigate_to_entity()
strategy, value = locators['bookmark.select_name']
bms = self.browser.find_elements(
strategy, value % (entity['controller'], name))
self.assertEqual(len(bms), 1)
# UPDATE TESTS
@tier1
def test_positive_update_bookmark_name(self):
"""Update and save a bookmark
@id: 095ba7c5-82bd-4ed3-ae6d-f6ba0ad7480c
@Setup:
1. Create a bookmark of a random name with random query
@Steps:
1. List the bookmarks (Navigate to Administer -> Bookmarks)
2. Click the pre-created bookmark
3. Edit the name
4. Submit
5. Navigate to the entity page
6. Click the search dropdown
@Assert: The new bookmark name is listed
"""
for entity in self.getOneEntity():
with self.subTest(entity):
with Session(self.browser):
name = gen_string(random.choice(STRING_TYPES))
query = gen_string(random.choice(STRING_TYPES))
ui_lib = getattr(self, entity['name'].lower())
ui_lib.create_a_bookmark(
name=name,
public=True,
query=query,
)
new_name = gen_string(random.choice(STRING_TYPES))
self.bookmark.update(
entity['controller'], name, new_name, query)
self.assertIsNotNone(
self.bookmark.search(entity['controller'], new_name))
@tier1
def test_negative_update_bookmark_name(self):
"""Update and save a bookmark with name already taken
@id: 3e74cf60-2863-4ca3-9440-7081547f3c4f
@Setup:
1. Create 2 bookmarks of random names with random query
@Steps:
1. List the bookmarks (Navigate to Administer -> Bookmarks)
2. Select the first pre-created bookmark
3. Edit the name to one of the other pre-created bookmarks
4. Submit
@Assert: Error - name already taken, bookmark not updated
"""
for entity in self.getOneEntity():
with self.subTest(entity):
with Session(self.browser):
bm1_name = gen_string(random.choice(STRING_TYPES))
bm2_name = gen_string(random.choice(STRING_TYPES))
ui_lib = getattr(self, entity['name'].lower())
for name in (bm1_name, bm2_name):
ui_lib.create_a_bookmark(
name=name,
public=True,
query=gen_string(random.choice(STRING_TYPES)),
)
self.bookmark.update(
entity['controller'],
bm2_name,
bm1_name,
gen_string(random.choice(STRING_TYPES)),
)
self.assertTrue(self.bookmark.wait_until_element(
common_locators['name_haserror']))
self.assertIsNotNone(
self.bookmark.search(entity['controller'], bm2_name))
@tier1
def test_negative_update_bookmark_name_empty(self):
"""Update and save a bookmark with an empty name
@id: 7d7f713d-e377-446e-a9e9-06364bcc25c0
@Setup:
1. Create a bookmark of a random name with random query
@Steps:
1. List the bookmarks (Navigate to Administer -> Bookmarks)
2. Click the pre-created bookmark
3. Delete the name
4. Submit
5. Navigate to the entity page
6. Click the search dropdown
@Assert: Error - name cannot be empty, bookmark not updated
"""
for entity in self.getOneEntity():
with self.subTest(entity):
with Session(self.browser):
name = gen_string(random.choice(STRING_TYPES))
query = gen_string(random.choice(STRING_TYPES))
ui_lib = getattr(self, entity['name'].lower())
ui_lib.create_a_bookmark(
name=name,
public=True,
query=query,
)
self.bookmark.update(
entity['controller'], name, '', query)
self.assertTrue(self.bookmark.wait_until_element(
common_locators['name_haserror']))
self.assertIsNotNone(
self.bookmark.search(entity['controller'], name))
@skip_if_bug_open('bugzilla', 1324484)
@tier1
def test_positive_update_bookmark_query(self):
"""Update and save a bookmark query
@id: 19c994f0-2567-47bb-8486-bc441602bc7a
@Setup:
1. Create a bookmark of a random name with random query
@Steps:
1. List the bookmarks (Navigate to Administer -> Bookmarks)
2. Click the pre-created bookmark
3. Edit the Search query field
4. Submit
5. Navigate to the entity page
6. Select the updated bookmark from the query
@Assert: The updated query is populated and submitted
"""
for entity in self.getOneEntity():
with self.subTest(entity):
with Session(self.browser):
name = gen_string(random.choice(STRING_TYPES))
ui_lib = getattr(self, entity['name'].lower())
ui_lib.create_a_bookmark(
name=name,
public=True,
query=gen_string(random.choice(STRING_TYPES)),
)
new_query = gen_string(random.choice(STRING_TYPES))
self.bookmark.update(
entity['controller'], name, new_query=new_query)
self.assertTrue(
self.bookmark.validate_field(
entity['controller'], name, 'query', new_query)
)
@skip_if_bug_open('bugzilla', 1324484)
@tier1
def test_negative_update_bookmark_query_empty(self):
"""Update and save a bookmark with an empty query
@id: 516b314b-7712-455a-b1d4-d09730acbec9
@Setup:
1. Create a bookmark of a random name with random query
@Steps:
1. List the bookmarks (Navigate to Administer -> Bookmarks)
2. Click the pre-created bookmark
3. Delete the search query
4. Submit
5. Navigate to the entity page
6. Click the search dropdown
@Assert: Error - search query cannot be empty, bookmark not updated
"""
for entity in self.getOneEntity():
with self.subTest(entity):
with Session(self.browser):
name = gen_string(random.choice(STRING_TYPES))
query = gen_string(random.choice(STRING_TYPES))
ui_lib = getattr(self, entity['name'].lower())
ui_lib.create_a_bookmark(
name=name,
public=True,
query=query,
)
self.bookmark.update(
entity['controller'], name, new_query='')
self.assertTrue(self.bookmark.wait_until_element(
common_locators['haserror']))
self.assertTrue(
self.bookmark.validate_field(
entity['controller'], name, 'query', query)
)
@tier2
def test_positive_update_bookmark_public(self):
"""Update and save a bookmark public state
@id: 63646c41-5441-4547-a4d0-744286122405
@Setup:
1. Create 2 bookmarks of a random name with random query, one public
and one private
2. Create a non-admin user with 'viewer' role
@Steps:
1. Login to Satellite server (establish a UI session) as
the pre-created user
2. Navigate to the entity
3. List the bookmarks by clicking the drop down menu
4. Verify that only the public bookmark is listed
5. Log out
6. Login to Satellite server (establish a UI session) as the admin
user
7. List the bookmarks (Navigate to Administer -> Bookmarks)
8. Click the public pre-created bookmark
9. Uncheck 'public'
10. Submit
11. Click the private pre-created bookmark
12. Check 'public'
13. Submit
14. Logout
15. Login to Satellite server (establish a UI session) as the
pre-created user
16. Navigate to the entity
17. List the bookmarks by clicking the drop down menu
@Assert: New public bookmark is listed, and the private
one is hidden
@CaseLevel: Integration
"""
with Session(self.browser):
bm1_name = gen_string(random.choice(STRING_TYPES))
bm1_entity = self.getOneEntity()[0]
bm2_name = gen_string(random.choice(STRING_TYPES))
bm2_entity = self.getOneEntity()[0]
bm1_page = getattr(self, bm1_entity['name'].lower())
bm1_page.create_a_bookmark(
name=bm1_name,
public=True,
query=gen_string('alphanumeric'),
)
bm2_page = getattr(self, bm2_entity['name'].lower())
bm2_page.create_a_bookmark(
name=bm2_name,
public=False,
query=gen_string('alphanumeric'),
)
with Session(self.browser, user=self.custom_user.login,
password=self.custom_password):
self.assertIsNotNone(
self.bookmark.search(bm1_entity['controller'], bm1_name))
self.assertIsNone(
self.bookmark.search(bm2_entity['controller'], bm2_name))
with Session(self.browser):
self.bookmark.update(
bm1_entity['controller'], bm1_name, new_public=False)
self.bookmark.update(
bm2_entity['controller'], bm2_name, new_public=True)
with Session(self.browser, user=self.custom_user.login,
password=self.custom_password):
self.assertIsNone(
self.bookmark.search(bm1_entity['controller'], bm1_name))
self.assertIsNotNone(
self.bookmark.search(bm2_entity['controller'], bm2_name))
# DELETE TESTS
@tier1
def test_positive_delete_bookmark(self):
"""Simple removal of a bookmark query
@id: 46c7cf47-7e86-4d81-ba07-4c2405801552
@Setup:
1. Create a bookmark of a random name with random query
@Steps:
1. List the bookmarks (Navigate to Administer -> Bookmarks)
2. Click Delete next to a pre-created bookmark
3. Verify the bookmark is no longer listed
@Assert: The bookmark is deleted
"""
for entity in self.entities:
with self.subTest(entity):
with Session(self.browser):
name = gen_string(random.choice(STRING_TYPES))
ui_lib = getattr(self, entity['name'].lower())
ui_lib.create_a_bookmark(
name=name,
public=True,
query=gen_string(random.choice(STRING_TYPES)),
)
self.assertIsNotNone(
self.bookmark.search(entity['controller'], name))
self.bookmark.delete(entity['controller'], name)
@tier2
def test_negative_delete_bookmark(self):
"""Simple removal of a bookmark query without permissions
@id: 1a94bf2b-bcc6-4663-b70d-e13244a0783b
@Setup:
1. Create a bookmark of a random name with random query
2. Create a non-admin user without destroy_bookmark role (e.g. viewer)
@Steps:
1. Login to Satellite server (establish a UI session) as a non-admin
user
2. List the bookmarks (Navigate to Administer -> Bookmarks)
@Assert: The delete buttons are not displayed
@CaseLevel: Integration
"""
bm = entities.Bookmark(
controller=self.getOneEntity()[0]['controller'],
public=True,
).create()
with Session(self.browser, user=self.custom_user.login,
password=self.custom_password):
with self.assertRaises(UIError):
self.bookmark.delete(bm.controller, bm.name)
| gpl-3.0 |
seaotterman/tensorflow | tensorflow/contrib/layers/python/layers/layers_test.py | 10 | 144946 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tf.contrib.layers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import numpy as np
from tensorflow.contrib import layers as layers_lib
from tensorflow.contrib.framework.python.ops import arg_scope
from tensorflow.contrib.framework.python.ops import variables
from tensorflow.contrib.layers.python.layers import layers as _layers
from tensorflow.contrib.layers.python.layers import regularizers
from tensorflow.contrib.losses.python.losses import loss_ops
from tensorflow.python.client import session
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors_impl
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import gradients_impl
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import partitioned_variables
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import state_ops
from tensorflow.python.ops import template
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables as variables_lib
from tensorflow.python.platform import test
class AvgPool2DTest(test.TestCase):
def testInvalidDataFormat(self):
height, width = 3, 6
images = np.random.uniform(size=(5, height, width, 3))
with self.assertRaisesRegexp(ValueError,
'data_format has to be either NCHW or NHWC.'):
_layers.avg_pool2d(images, [3, 3], data_format='CHWN')
def testCreateAvgPool(self):
height, width = 3, 6
images = np.random.uniform(size=(5, height, width, 3))
output = _layers.avg_pool2d(images, [3, 3])
self.assertEqual(output.op.name, 'AvgPool2D/AvgPool')
self.assertListEqual(output.get_shape().as_list(), [5, 1, 2, 3])
def testCreateAvgPoolNCHW(self):
height, width = 3, 6
images = np.random.uniform(size=(5, 2, height, width))
output = _layers.avg_pool2d(images, [3, 3], data_format='NCHW')
self.assertEquals(output.op.name, 'AvgPool2D/AvgPool')
self.assertListEqual(output.get_shape().as_list(), [5, 2, 1, 2])
def testCollectOutputs(self):
height, width = 3, 6
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = _layers.avg_pool2d(images, [3, 3], outputs_collections='outputs')
output_collected = ops.get_collection('outputs')[0]
self.assertEqual(output_collected.aliases, ['AvgPool2D'])
self.assertEqual(output_collected, output)
def testCreateSquareAvgPool(self):
height, width = 3, 6
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = _layers.avg_pool2d(images, 3)
self.assertEqual(output.op.name, 'AvgPool2D/AvgPool')
self.assertListEqual(output.get_shape().as_list(), [5, 1, 2, 3])
def testCreateAvgPoolWithScope(self):
height, width = 3, 6
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = _layers.avg_pool2d(images, [3, 3], scope='pool1')
self.assertEqual(output.op.name, 'pool1/AvgPool')
def testCreateAvgPoolWithSamePadding(self):
height, width = 3, 6
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = _layers.avg_pool2d(images, [3, 3], padding='SAME')
self.assertListEqual(output.get_shape().as_list(), [5, 2, 3, 3])
def testCreateAvgPoolWithSamePaddingNCHW(self):
height, width = 3, 6
images = random_ops.random_uniform((5, 3, height, width), seed=1)
output = _layers.avg_pool2d(
images, [3, 3], padding='SAME', data_format='NCHW')
self.assertListEqual(output.get_shape().as_list(), [5, 3, 2, 3])
def testCreateAvgPoolStrideWithSamePadding(self):
height, width = 3, 6
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = _layers.avg_pool2d(images, [3, 3], stride=1, padding='SAME')
self.assertListEqual(output.get_shape().as_list(), [5, height, width, 3])
def testGlobalAvgPool(self):
height, width = 3, 6
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = _layers.avg_pool2d(images, images.get_shape()[1:3], stride=1)
self.assertListEqual(output.get_shape().as_list(), [5, 1, 1, 3])
class PoolTest(test.TestCase):
def testCreatePool(self):
height, width = 3, 3
images = np.random.uniform(size=(5, height, width, 3))
output = _layers.pool(images, [3, 3], pooling_type='AVG')
self.assertEqual(output.op.name, 'avg_pool')
self.assertListEqual(output.get_shape().as_list(), [5, 1, 1, 3])
def testCreatePoolNCHW(self):
height, width = 3, 3
images = np.random.uniform(size=(5, 3, height, width))
output = _layers.pool(
images, [3, 3], pooling_type='AVG', data_format='NCHW')
self.assertEqual(output.op.name, 'avg_pool')
self.assertListEqual(output.get_shape().as_list(), [5, 3, 1, 1])
def testCollectOutputs(self):
height, width = 3, 3
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = _layers.pool(
images, [3, 3], pooling_type='AVG', outputs_collections='outputs')
output_collected = ops.get_collection('outputs')[0]
self.assertEqual(output_collected.aliases, ['avg_pool'])
self.assertEqual(output_collected, output)
def testCreateSquareAvgPool(self):
height, width = 3, 3
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = _layers.pool(images, 3, pooling_type='AVG')
self.assertEqual(output.op.name, 'avg_pool')
self.assertEqual(output.get_shape().as_list(), [5, 1, 1, 3])
def testCreateMaxPoolWithScope(self):
height, width = 3, 3
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = _layers.pool(images, [3, 3], pooling_type='MAX', scope='pool1')
self.assertEqual(output.op.name, 'pool1')
def testCreateMaxPoolWithSamePadding(self):
height, width = 3, 3
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = _layers.pool(images, [3, 3], pooling_type='MAX', padding='SAME')
self.assertEqual(output.get_shape().as_list(), [5, 3, 3, 3])
def testCreateAvgPoolStrideWithSamePadding(self):
height, width = 3, 3
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = _layers.pool(
images, [3, 3], stride=1, padding='SAME', pooling_type='AVG')
self.assertEqual(output.get_shape().as_list(), [5, height, width, 3])
def testGlobalAvgPool(self):
height, width = 3, 3
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = _layers.pool(
images, images.get_shape()[1:3], stride=1, pooling_type='AVG')
self.assertEqual(output.get_shape().as_list(), [5, 1, 1, 3])
def testAvgPoolWithStride(self):
height, width = 5, 8
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = _layers.pool(images, [2, 3], stride=[1, 2], pooling_type='AVG')
self.assertEqual(output.get_shape().as_list(), [5, 4, 3, 3])
def testAvgPoolWithDilation(self):
height, width = 5, 8
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = _layers.pool(
images, [2, 3], dilation_rate=[1, 2], pooling_type='AVG')
self.assertEqual(output.get_shape().as_list(), [5, 4, 4, 3])
def testAvgPoolWithDilationNCHW(self):
height, width = 5, 8
images = random_ops.random_uniform((5, 3, height, width), seed=1)
output = _layers.pool(
images, [2, 3],
dilation_rate=[1, 2],
pooling_type='AVG',
data_format='NCHW')
self.assertEqual(output.get_shape().as_list(), [5, 3, 4, 4])
class BiasAddTest(test.TestCase):
def testCreate(self):
height, width = 3, 3
with self.test_session():
images = np.random.uniform(size=(5, height, width, 3))
output = _layers.bias_add(images)
self.assertEqual(output.op.name, 'BiasAdd/BiasAdd')
self.assertListEqual(output.get_shape().as_list(), [5, height, width, 3])
def testCreateWithActivation(self):
height, width = 3, 3
with self.test_session():
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = _layers.bias_add(images, activation_fn=nn_ops.relu)
self.assertEqual(output.op.name, 'BiasAdd/Relu')
self.assertListEqual(output.get_shape().as_list(), [5, height, width, 3])
def testCreateDimensions(self):
dims = (2, 3, 4)
shape = [5, 2, 3, 4]
with self.test_session():
for d in dims:
input_shape = shape[:d]
inputs = random_ops.random_uniform(input_shape, seed=1)
output = _layers.bias_add(inputs)
self.assertListEqual(output.get_shape().as_list(), input_shape)
biases = variables.get_variables_by_name('biases')[-1]
self.assertListEqual(biases.get_shape().as_list(), [input_shape[-1]])
class ConvolutionTest(test.TestCase):
def testInvalidDataFormat(self):
height, width = 7, 9
with self.test_session():
images = random_ops.random_uniform((5, height, width, 3), seed=1)
with self.assertRaisesRegexp(ValueError, 'data_format'):
layers_lib.convolution2d(images, 32, 3, data_format='CHWN')
def testCreateConv(self):
height, width = 7, 9
with self.test_session():
images = np.random.uniform(size=(5, height, width, 4)).astype(np.float32)
output = layers_lib.convolution2d(images, 32, [3, 3])
self.assertEqual(output.op.name, 'Conv/Relu')
self.assertListEqual(output.get_shape().as_list(), [5, height, width, 32])
weights = variables.get_variables_by_name('weights')[0]
self.assertListEqual(weights.get_shape().as_list(), [3, 3, 4, 32])
biases = variables.get_variables_by_name('biases')[0]
self.assertListEqual(biases.get_shape().as_list(), [32])
def testCreateConvNCHW(self):
height, width = 7, 9
with self.test_session():
images = np.random.uniform(size=(5, 4, height, width)).astype(np.float32)
output = layers_lib.convolution2d(images, 32, [3, 3], data_format='NCHW')
self.assertEqual(output.op.name, 'Conv/Relu')
self.assertListEqual(output.get_shape().as_list(), [5, 32, height, width])
weights = variables.get_variables_by_name('weights')[0]
self.assertListEqual(weights.get_shape().as_list(), [3, 3, 4, 32])
biases = variables.get_variables_by_name('biases')[0]
self.assertListEqual(biases.get_shape().as_list(), [32])
def testCreateSquareConv(self):
height, width = 7, 9
with self.test_session():
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = layers_lib.convolution2d(images, 32, 3)
self.assertEqual(output.op.name, 'Conv/Relu')
self.assertListEqual(output.get_shape().as_list(), [5, height, width, 32])
def testCreateConvWithTensorShape(self):
height, width = 7, 9
with self.test_session():
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = layers_lib.convolution2d(images, 32, images.get_shape()[1:3])
self.assertEqual(output.op.name, 'Conv/Relu')
self.assertListEqual(output.get_shape().as_list(), [5, height, width, 32])
def testCreateFullyConv(self):
height, width = 7, 9
with self.test_session():
images = random_ops.random_uniform((5, height, width, 32), seed=1)
output = layers_lib.convolution2d(
images, 64, images.get_shape()[1:3], padding='VALID')
self.assertEqual(output.op.name, 'Conv/Relu')
self.assertListEqual(output.get_shape().as_list(), [5, 1, 1, 64])
biases = variables.get_variables_by_name('biases')[0]
self.assertListEqual(biases.get_shape().as_list(), [64])
def testFullyConvWithCustomGetter(self):
height, width = 7, 9
with self.test_session():
called = [0]
def custom_getter(getter, *args, **kwargs):
called[0] += 1
return getter(*args, **kwargs)
with variable_scope.variable_scope('test', custom_getter=custom_getter):
images = random_ops.random_uniform((5, height, width, 32), seed=1)
layers_lib.convolution2d(images, 64, images.get_shape()[1:3])
self.assertEqual(called[0], 2) # Custom getter called twice.
def testCreateVerticalConv(self):
height, width = 7, 9
with self.test_session():
images = random_ops.random_uniform((5, height, width, 4), seed=1)
output = layers_lib.convolution2d(images, 32, [3, 1])
self.assertEqual(output.op.name, 'Conv/Relu')
self.assertListEqual(output.get_shape().as_list(), [5, height, width, 32])
weights = variables.get_variables_by_name('weights')[0]
self.assertListEqual(weights.get_shape().as_list(), [3, 1, 4, 32])
biases = variables.get_variables_by_name('biases')[0]
self.assertListEqual(biases.get_shape().as_list(), [32])
def testCreateHorizontalConv(self):
height, width = 7, 9
with self.test_session():
images = random_ops.random_uniform((5, height, width, 4), seed=1)
output = layers_lib.convolution2d(images, 32, [1, 3])
self.assertEqual(output.op.name, 'Conv/Relu')
self.assertListEqual(output.get_shape().as_list(), [5, height, width, 32])
weights = variables.get_variables_by_name('weights')[0]
self.assertListEqual(weights.get_shape().as_list(), [1, 3, 4, 32])
def testCreateConvWithStride(self):
height, width = 6, 8
with self.test_session():
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = layers_lib.convolution2d(images, 32, [3, 3], stride=2)
self.assertEqual(output.op.name, 'Conv/Relu')
self.assertListEqual(output.get_shape().as_list(),
[5, height / 2, width / 2, 32])
def testCreateConvCreatesWeightsAndBiasesVars(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 3), seed=1)
with self.test_session():
self.assertFalse(variables.get_variables('conv1/weights'))
self.assertFalse(variables.get_variables('conv1/biases'))
layers_lib.convolution2d(images, 32, [3, 3], scope='conv1')
self.assertTrue(variables.get_variables('conv1/weights'))
self.assertTrue(variables.get_variables('conv1/biases'))
def testCreateConvWithScope(self):
height, width = 7, 9
with self.test_session():
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = layers_lib.convolution2d(images, 32, [3, 3], scope='conv1')
self.assertEqual(output.op.name, 'conv1/Relu')
def testCreateConvWithCollection(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 3), seed=1)
with ops.name_scope('fe'):
conv = layers_lib.convolution2d(
images, 32, [3, 3], outputs_collections='outputs', scope='Conv')
output_collected = ops.get_collection('outputs')[0]
self.assertEqual(output_collected.aliases, ['fe/Conv'])
self.assertEqual(output_collected, conv)
def testCreateConvWithoutActivation(self):
height, width = 7, 9
with self.test_session():
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = layers_lib.convolution2d(images, 32, [3, 3], activation_fn=None)
self.assertEqual(output.op.name, 'Conv/BiasAdd')
def testCreateConvValid(self):
height, width = 7, 9
with self.test_session():
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = layers_lib.convolution2d(images, 32, [3, 3], padding='VALID')
self.assertListEqual(output.get_shape().as_list(), [5, 5, 7, 32])
def testCreateConvWithWD(self):
height, width = 7, 9
weight_decay = 0.01
with self.test_session() as sess:
images = random_ops.random_uniform((5, height, width, 3), seed=1)
regularizer = regularizers.l2_regularizer(weight_decay)
layers_lib.convolution2d(
images, 32, [3, 3], weights_regularizer=regularizer)
l2_loss = nn_ops.l2_loss(variables.get_variables_by_name('weights')[0])
wd = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)[0]
self.assertEqual(wd.op.name, 'Conv/kernel/Regularizer/l2_regularizer')
sess.run(variables_lib.global_variables_initializer())
self.assertAlmostEqual(sess.run(wd), weight_decay * l2_loss.eval())
def testCreateConvNoRegularizers(self):
height, width = 7, 9
with self.test_session():
images = random_ops.random_uniform((5, height, width, 3), seed=1)
layers_lib.convolution2d(images, 32, [3, 3])
self.assertEqual(
ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES), [])
def testReuseVars(self):
height, width = 7, 9
with self.test_session():
images = random_ops.random_uniform((5, height, width, 3), seed=1)
layers_lib.convolution2d(images, 32, [3, 3], scope='conv1')
self.assertEqual(len(variables.get_variables()), 2)
layers_lib.convolution2d(images, 32, [3, 3], scope='conv1', reuse=True)
self.assertEqual(len(variables.get_variables()), 2)
def testNonReuseVars(self):
height, width = 7, 9
with self.test_session():
images = random_ops.random_uniform((5, height, width, 3), seed=1)
layers_lib.convolution2d(images, 32, [3, 3])
self.assertEqual(len(variables.get_variables()), 2)
layers_lib.convolution2d(images, 32, [3, 3])
self.assertEqual(len(variables.get_variables()), 4)
def testReuseConvWithWD(self):
height, width = 7, 9
with self.test_session():
images = random_ops.random_uniform((5, height, width, 3), seed=1)
weight_decay = regularizers.l2_regularizer(0.01)
with arg_scope(
[layers_lib.convolution2d], weights_regularizer=weight_decay):
layers_lib.convolution2d(images, 32, [3, 3], scope='conv1')
self.assertEqual(len(variables.get_variables()), 2)
self.assertEqual(
len(ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)), 1)
layers_lib.convolution2d(images, 32, [3, 3], scope='conv1', reuse=True)
self.assertEqual(len(variables.get_variables()), 2)
self.assertEqual(
len(ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)), 1)
def testConvWithBatchNorm(self):
height, width = 7, 9
with self.test_session():
images = random_ops.random_uniform((5, height, width, 32), seed=1)
with arg_scope(
[layers_lib.convolution2d],
normalizer_fn=_layers.batch_norm,
normalizer_params={'decay': 0.9}):
net = layers_lib.convolution2d(images, 32, [3, 3])
net = layers_lib.convolution2d(net, 32, [3, 3])
self.assertEqual(len(variables.get_variables()), 8)
self.assertEqual(len(variables.get_variables('Conv/BatchNorm')), 3)
self.assertEqual(len(variables.get_variables('Conv_1/BatchNorm')), 3)
def testReuseConvWithBatchNorm(self):
height, width = 7, 9
with self.test_session():
images = random_ops.random_uniform((5, height, width, 32), seed=1)
with arg_scope(
[layers_lib.convolution2d],
normalizer_fn=_layers.batch_norm,
normalizer_params={'decay': 0.9}):
net = layers_lib.convolution2d(images, 32, [3, 3], scope='Conv')
net = layers_lib.convolution2d(
net, 32, [3, 3], scope='Conv', reuse=True)
self.assertEqual(len(variables.get_variables()), 4)
self.assertEqual(len(variables.get_variables('Conv/BatchNorm')), 3)
self.assertEqual(len(variables.get_variables('Conv_1/BatchNorm')), 0)
def testCreateConvCreatesWeightsAndBiasesVarsWithRateTwo(self):
height, width = 7, 9
images = random_ops.random_uniform((5, height, width, 3), seed=1)
with self.test_session():
self.assertFalse(variables.get_variables('conv1/weights'))
self.assertFalse(variables.get_variables('conv1/biases'))
layers_lib.convolution2d(images, 32, [3, 3], rate=2, scope='conv1')
self.assertTrue(variables.get_variables('conv1/weights'))
self.assertTrue(variables.get_variables('conv1/biases'))
def testOutputSizeWithRateTwoSamePadding(self):
num_filters = 32
input_size = [5, 10, 12, 3]
expected_size = [5, 10, 12, num_filters]
images = random_ops.random_uniform(input_size, seed=1)
output = layers_lib.convolution2d(
images, num_filters, [3, 3], rate=2, padding='SAME')
self.assertListEqual(list(output.get_shape().as_list()), expected_size)
with self.test_session() as sess:
sess.run(variables_lib.global_variables_initializer())
self.assertEqual(output.op.name, 'Conv/Relu')
self.assertListEqual(list(output.eval().shape), expected_size)
def testOutputSizeWithRateTwoValidPadding(self):
num_filters = 32
input_size = [5, 10, 12, 3]
expected_size = [5, 6, 8, num_filters]
images = random_ops.random_uniform(input_size, seed=1)
output = layers_lib.convolution2d(
images, num_filters, [3, 3], rate=2, padding='VALID')
self.assertListEqual(list(output.get_shape().as_list()), expected_size)
with self.test_session() as sess:
sess.run(variables_lib.global_variables_initializer())
self.assertEqual(output.op.name, 'Conv/Relu')
self.assertListEqual(list(output.eval().shape), expected_size)
def testOutputSizeWithRateTwoThreeValidPadding(self):
num_filters = 32
input_size = [5, 10, 12, 3]
expected_size = [5, 6, 6, num_filters]
images = random_ops.random_uniform(input_size, seed=1)
output = layers_lib.convolution2d(
images, num_filters, [3, 3], rate=[2, 3], padding='VALID')
self.assertListEqual(list(output.get_shape().as_list()), expected_size)
with self.test_session() as sess:
sess.run(variables_lib.global_variables_initializer())
self.assertEquals(output.op.name, 'Conv/Relu')
self.assertListEqual(list(output.eval().shape), expected_size)
def testDynamicOutputSizeWithRateOneValidPadding(self):
num_filters = 32
input_size = [5, 9, 11, 3]
expected_size = [None, None, None, num_filters]
expected_size_dynamic = [5, 7, 9, num_filters]
with self.test_session():
images = array_ops.placeholder(np.float32,
[None, None, None, input_size[3]])
output = layers_lib.convolution2d(
images, num_filters, [3, 3], rate=1, padding='VALID')
variables_lib.global_variables_initializer().run()
self.assertEqual(output.op.name, 'Conv/Relu')
self.assertListEqual(output.get_shape().as_list(), expected_size)
eval_output = output.eval({images: np.zeros(input_size, np.float32)})
self.assertListEqual(list(eval_output.shape), expected_size_dynamic)
def testDynamicOutputSizeWithRateOneValidPaddingNCHW(self):
if test.is_gpu_available(cuda_only=True):
num_filters = 32
input_size = [5, 3, 9, 11]
expected_size = [None, num_filters, None, None]
expected_size_dynamic = [5, num_filters, 7, 9]
with self.test_session(use_gpu=True):
images = array_ops.placeholder(np.float32,
[None, input_size[1], None, None])
output = layers_lib.convolution2d(
images,
num_filters, [3, 3],
rate=1,
padding='VALID',
data_format='NCHW')
variables_lib.global_variables_initializer().run()
self.assertEqual(output.op.name, 'Conv/Relu')
self.assertListEqual(output.get_shape().as_list(), expected_size)
eval_output = output.eval({images: np.zeros(input_size, np.float32)})
self.assertListEqual(list(eval_output.shape), expected_size_dynamic)
def testDynamicOutputSizeWithRateTwoValidPadding(self):
num_filters = 32
input_size = [5, 9, 11, 3]
expected_size = [None, None, None, num_filters]
expected_size_dynamic = [5, 5, 7, num_filters]
with self.test_session():
images = array_ops.placeholder(np.float32,
[None, None, None, input_size[3]])
output = layers_lib.convolution2d(
images, num_filters, [3, 3], rate=2, padding='VALID')
variables_lib.global_variables_initializer().run()
self.assertEqual(output.op.name, 'Conv/Relu')
self.assertListEqual(output.get_shape().as_list(), expected_size)
eval_output = output.eval({images: np.zeros(input_size, np.float32)})
self.assertListEqual(list(eval_output.shape), expected_size_dynamic)
def testWithScope(self):
num_filters = 32
input_size = [5, 9, 11, 3]
expected_size = [5, 5, 7, num_filters]
images = random_ops.random_uniform(input_size, seed=1)
output = layers_lib.convolution2d(
images, num_filters, [3, 3], rate=2, padding='VALID', scope='conv7')
with self.test_session() as sess:
sess.run(variables_lib.global_variables_initializer())
self.assertEqual(output.op.name, 'conv7/Relu')
self.assertListEqual(list(output.eval().shape), expected_size)
def testWithScopeWithoutActivation(self):
num_filters = 32
input_size = [5, 9, 11, 3]
expected_size = [5, 5, 7, num_filters]
images = random_ops.random_uniform(input_size, seed=1)
output = layers_lib.convolution2d(
images,
num_filters, [3, 3],
rate=2,
padding='VALID',
activation_fn=None,
scope='conv7')
with self.test_session() as sess:
sess.run(variables_lib.global_variables_initializer())
self.assertEqual(output.op.name, 'conv7/BiasAdd')
self.assertListEqual(list(output.eval().shape), expected_size)
class Convolution2dTransposeTests(test.TestCase):
def testTrainableFlagIsPassedOn(self):
for trainable in [True, False]:
with ops.Graph().as_default():
num_filters = 32
input_size = [5, 10, 12, 3]
images = random_ops.random_uniform(input_size, seed=1)
layers_lib.conv2d_transpose(
images, num_filters, [3, 3], stride=1, trainable=trainable)
model_variables = variables.get_model_variables()
trainable_variables = variables_lib.trainable_variables()
for model_variable in model_variables:
self.assertEqual(trainable, model_variable in trainable_variables)
def testInvalidDataFormat(self):
height, width = 7, 9
with self.test_session():
images = random_ops.random_uniform((5, height, width, 3), seed=1)
with self.assertRaisesRegexp(
ValueError, 'data_format has to be either NCHW or NHWC.'):
_layers.convolution2d_transpose(images, 32, 3, data_format='CHWN')
def testOutputSizeWithStrideOneSamePaddingNCHW(self):
# `NCHW` data fomat is only supported for `GPU` device.
if test.is_gpu_available(cuda_only=True):
with self.test_session(use_gpu=True) as sess:
num_filters = 32
input_size = [5, 3, 10, 12]
expected_size = [5, num_filters, 10, 12]
images = random_ops.random_uniform(input_size, seed=1)
output = layers_lib.conv2d_transpose(
images,
num_filters, [3, 3],
stride=1,
padding='SAME',
data_format='NCHW')
self.assertEqual(output.op.name, 'Conv2d_transpose/Relu')
sess.run(variables_lib.global_variables_initializer())
self.assertListEqual(list(output.eval().shape), expected_size)
def testOutputSizeWithStrideOneValidPaddingNCHW(self):
if test.is_gpu_available(cuda_only=True):
with self.test_session(use_gpu=True) as sess:
num_filters = 32
input_size = [5, 3, 10, 12]
expected_size = [5, num_filters, 12, 14]
images = random_ops.random_uniform(input_size, seed=1)
output = layers_lib.conv2d_transpose(
images,
num_filters, [3, 3],
stride=1,
padding='VALID',
data_format='NCHW')
self.assertEqual(output.op.name, 'Conv2d_transpose/Relu')
sess.run(variables_lib.global_variables_initializer())
self.assertListEqual(list(output.eval().shape), expected_size)
def testOutputSizeWithStrideTwoValidPaddingNCHW(self):
if test.is_gpu_available(cuda_only=True):
with self.test_session(use_gpu=True) as sess:
num_filters = 32
input_size = [5, 3, 9, 11]
expected_size = [5, num_filters, 19, 23]
images = random_ops.random_uniform(input_size, seed=1)
output = layers_lib.conv2d_transpose(
images,
num_filters, [3, 3],
stride=[2, 2],
padding='VALID',
data_format='NCHW')
self.assertEqual(output.op.name, 'Conv2d_transpose/Relu')
self.assertListEqual(list(output.get_shape().as_list()), expected_size)
sess.run(variables_lib.global_variables_initializer())
self.assertListEqual(list(output.eval().shape), expected_size)
def testOutputSizeWith1x1StrideTwoSamePaddingNCHW(self):
if test.is_gpu_available(cuda_only=True):
with self.test_session(use_gpu=True) as sess:
num_filters = 1
input_size = [1, 1, 1, 1]
expected_size = [1, num_filters, 2, 2]
images = random_ops.random_uniform(input_size, seed=1)
output = layers_lib.conv2d_transpose(
images,
num_filters, [2, 2],
stride=[2, 2],
padding='SAME',
data_format='NCHW')
self.assertListEqual(list(output.get_shape().as_list()), expected_size)
sess.run(variables_lib.global_variables_initializer())
self.assertEqual(output.op.name, 'Conv2d_transpose/Relu')
self.assertListEqual(list(output.eval().shape), expected_size)
def testOutputSizeWith1x1StrideTwoValidPaddingNCHW(self):
if test.is_gpu_available(cuda_only=True):
with self.test_session(use_gpu=True) as sess:
num_filters = 1
input_size = [1, 1, 1, 1]
expected_size = [1, num_filters, 2, 2]
images = random_ops.random_uniform(input_size, seed=1)
output = layers_lib.conv2d_transpose(
images,
num_filters, [2, 2],
stride=[2, 2],
padding='VALID',
data_format='NCHW')
sess.run(variables_lib.global_variables_initializer())
self.assertEqual(output.op.name, 'Conv2d_transpose/Relu')
self.assertListEqual(list(output.eval().shape), expected_size)
def testOutputSizeWith2x2StrideTwoSamePaddingNCHW(self):
if test.is_gpu_available(cuda_only=True):
with self.test_session(use_gpu=True) as sess:
num_filters = 1
input_size = [1, 1, 2, 2]
expected_size = [1, num_filters, 4, 4]
images = random_ops.random_uniform(input_size, seed=1)
output = layers_lib.conv2d_transpose(
images,
num_filters, [2, 2],
stride=[2, 2],
padding='SAME',
data_format='NCHW')
sess.run(variables_lib.global_variables_initializer())
self.assertEqual(output.op.name, 'Conv2d_transpose/Relu')
self.assertListEqual(list(output.eval().shape), expected_size)
def testOutputSizeWith2x2StrideTwoValidPaddingNCHW(self):
if test.is_gpu_available(cuda_only=True):
with self.test_session(use_gpu=True) as sess:
num_filters = 1
input_size = [1, 1, 2, 2]
expected_size = [1, num_filters, 4, 4]
images = random_ops.random_uniform(input_size, seed=1)
output = layers_lib.conv2d_transpose(
images,
num_filters, [2, 2],
stride=[2, 2],
padding='VALID',
data_format='NCHW')
sess.run(variables_lib.global_variables_initializer())
self.assertEqual(output.op.name, 'Conv2d_transpose/Relu')
self.assertListEqual(list(output.eval().shape), expected_size)
def testOutputSizeWithStride2x1NCHW(self):
if test.is_gpu_available(cuda_only=True):
with self.test_session(use_gpu=True) as sess:
num_filters = 1
input_size = [1, 1, 3, 2]
expected_size = [1, num_filters, 6, 5]
images = random_ops.random_uniform(input_size, seed=1)
output = layers_lib.conv2d_transpose(
images,
num_filters, [2, 4],
stride=[2, 1],
padding='VALID',
data_format='NCHW')
sess.run(variables_lib.global_variables_initializer())
self.assertEqual(output.op.name, 'Conv2d_transpose/Relu')
self.assertListEqual(list(output.eval().shape), expected_size)
def testOutputSizeWithStride2x4NCHW(self):
if test.is_gpu_available(cuda_only=True):
with self.test_session(use_gpu=True) as sess:
num_filters = 1
input_size = [1, 1, 3, 2]
expected_size = [1, num_filters, 6, 8]
images = random_ops.random_uniform(input_size, seed=1)
output = layers_lib.conv2d_transpose(
images,
num_filters, [2, 4],
stride=[2, 4],
padding='VALID',
data_format='NCHW')
sess.run(variables_lib.global_variables_initializer())
self.assertEqual(output.op.name, 'Conv2d_transpose/Relu')
self.assertListEqual(list(output.eval().shape), expected_size)
def testOutputSizeWithStride2x5NCHW(self):
if test.is_gpu_available(cuda_only=True):
with self.test_session(use_gpu=True) as sess:
num_filters = 1
input_size = [1, 1, 3, 2]
expected_size = [1, num_filters, 6, 10]
images = random_ops.random_uniform(input_size, seed=1)
output = layers_lib.conv2d_transpose(
images,
num_filters, [2, 4],
stride=[2, 5],
padding='VALID',
data_format='NCHW')
sess.run(variables_lib.global_variables_initializer())
self.assertEqual(output.op.name, 'Conv2d_transpose/Relu')
self.assertListEqual(list(output.eval().shape), expected_size)
def testOutputSizeWithStrideOneSamePadding(self):
num_filters = 32
input_size = [5, 10, 12, 3]
expected_size = [5, 10, 12, num_filters]
images = random_ops.random_uniform(input_size, seed=1)
output = layers_lib.conv2d_transpose(
images, num_filters, [3, 3], stride=1, padding='SAME')
self.assertEqual(output.op.name, 'Conv2d_transpose/Relu')
with self.test_session() as sess:
sess.run(variables_lib.global_variables_initializer())
self.assertListEqual(list(output.eval().shape), expected_size)
def testOutputSizeWithStrideOneValidPadding(self):
num_filters = 32
input_size = [5, 10, 12, 3]
expected_size = [5, 12, 14, num_filters]
images = random_ops.random_uniform(input_size, seed=1)
output = layers_lib.conv2d_transpose(
images, num_filters, [3, 3], stride=1, padding='VALID')
self.assertEqual(output.op.name, 'Conv2d_transpose/Relu')
with self.test_session() as sess:
sess.run(variables_lib.global_variables_initializer())
self.assertListEqual(list(output.eval().shape), expected_size)
def testOutputSizeWithStrideTwoValidPadding(self):
num_filters = 32
input_size = [5, 9, 11, 3]
expected_size = [5, 19, 23, num_filters]
images = random_ops.random_uniform(input_size, seed=1)
output = layers_lib.conv2d_transpose(
images, num_filters, [3, 3], stride=[2, 2], padding='VALID')
self.assertEqual(output.op.name, 'Conv2d_transpose/Relu')
self.assertListEqual(list(output.get_shape().as_list()), expected_size)
with self.test_session() as sess:
sess.run(variables_lib.global_variables_initializer())
self.assertListEqual(list(output.eval().shape), expected_size)
def testOutputSizeWith1x1StrideTwoSamePadding(self):
num_filters = 1
input_size = [1, 1, 1, 1]
expected_size = [1, 2, 2, num_filters]
images = random_ops.random_uniform(input_size, seed=1)
output = layers_lib.conv2d_transpose(
images, num_filters, [2, 2], stride=[2, 2], padding='SAME')
self.assertListEqual(list(output.get_shape().as_list()), expected_size)
with self.test_session() as sess:
sess.run(variables_lib.global_variables_initializer())
self.assertEqual(output.op.name, 'Conv2d_transpose/Relu')
self.assertListEqual(list(output.eval().shape), expected_size)
def testOutputSizeWith1x1StrideTwoValidPadding(self):
num_filters = 1
input_size = [1, 1, 1, 1]
expected_size = [1, 2, 2, num_filters]
images = random_ops.random_uniform(input_size, seed=1)
output = layers_lib.conv2d_transpose(
images, num_filters, [2, 2], stride=[2, 2], padding='VALID')
with self.test_session() as sess:
sess.run(variables_lib.global_variables_initializer())
self.assertEqual(output.op.name, 'Conv2d_transpose/Relu')
self.assertListEqual(list(output.eval().shape), expected_size)
def testOutputSizeWith2x2StrideTwoSamePadding(self):
num_filters = 1
input_size = [1, 2, 2, 1]
expected_size = [1, 4, 4, num_filters]
images = random_ops.random_uniform(input_size, seed=1)
output = layers_lib.conv2d_transpose(
images, num_filters, [2, 2], stride=[2, 2], padding='SAME')
with self.test_session() as sess:
sess.run(variables_lib.global_variables_initializer())
self.assertEqual(output.op.name, 'Conv2d_transpose/Relu')
self.assertListEqual(list(output.eval().shape), expected_size)
def testOutputSizeWith2x2StrideTwoValidPadding(self):
num_filters = 1
input_size = [1, 2, 2, 1]
expected_size = [1, 4, 4, num_filters]
images = random_ops.random_uniform(input_size, seed=1)
output = layers_lib.conv2d_transpose(
images, num_filters, [2, 2], stride=[2, 2], padding='VALID')
with self.test_session() as sess:
sess.run(variables_lib.global_variables_initializer())
self.assertEqual(output.op.name, 'Conv2d_transpose/Relu')
self.assertListEqual(list(output.eval().shape), expected_size)
def testOutputSizeWithStride2x1(self):
num_filters = 1
input_size = [1, 3, 2, 1]
expected_size = [1, 6, 5, num_filters]
images = random_ops.random_uniform(input_size, seed=1)
output = layers_lib.conv2d_transpose(
images, num_filters, [2, 4], stride=[2, 1], padding='VALID')
with self.test_session() as sess:
sess.run(variables_lib.global_variables_initializer())
self.assertEqual(output.op.name, 'Conv2d_transpose/Relu')
self.assertListEqual(list(output.eval().shape), expected_size)
def testOutputSizeWithStride2x4(self):
num_filters = 1
input_size = [1, 3, 2, 1]
expected_size = [1, 6, 8, num_filters]
images = random_ops.random_uniform(input_size, seed=1)
output = layers_lib.conv2d_transpose(
images, num_filters, [2, 4], stride=[2, 4], padding='VALID')
with self.test_session() as sess:
sess.run(variables_lib.global_variables_initializer())
self.assertEqual(output.op.name, 'Conv2d_transpose/Relu')
self.assertListEqual(list(output.eval().shape), expected_size)
def testOutputSizeWithStride2x5(self):
num_filters = 1
input_size = [1, 3, 2, 1]
expected_size = [1, 6, 10, num_filters]
images = random_ops.random_uniform(input_size, seed=1)
output = layers_lib.conv2d_transpose(
images, num_filters, [2, 4], stride=[2, 5], padding='VALID')
with self.test_session() as sess:
sess.run(variables_lib.global_variables_initializer())
self.assertEqual(output.op.name, 'Conv2d_transpose/Relu')
self.assertListEqual(list(output.eval().shape), expected_size)
def testOutputSizeRandomSizesAndStridesValidPadding(self):
np.random.seed(0)
max_image_size = 10
for _ in range(10):
num_filters = 1
input_size = [
1, np.random.randint(1, max_image_size),
np.random.randint(1, max_image_size), 1
]
filter_size = [
np.random.randint(1, input_size[1] + 1),
np.random.randint(1, input_size[2] + 1)
]
stride = [np.random.randint(1, 3), np.random.randint(1, 3)]
ops.reset_default_graph()
graph = ops.Graph()
with graph.as_default():
images = random_ops.random_uniform(input_size, seed=1)
transpose = layers_lib.conv2d_transpose(
images, num_filters, filter_size, stride=stride, padding='VALID')
conv = layers_lib.conv2d(
transpose, num_filters, filter_size, stride=stride, padding='VALID')
with self.test_session(graph=graph) as sess:
sess.run(variables_lib.global_variables_initializer())
self.assertListEqual(list(conv.eval().shape), input_size)
def testDynamicOutputSizeWithStrideTwoValidPadding(self):
num_filters = 32
input_size = [5, 9, 11, 3]
expected_size = [None, None, None, num_filters]
expected_size_dynamic = [5, 19, 23, num_filters]
images = array_ops.placeholder(np.float32,
[None, None, None, input_size[3]])
output = layers_lib.conv2d_transpose(
images, num_filters, [3, 3], stride=[2, 2], padding='VALID')
self.assertListEqual(output.get_shape().as_list(), expected_size)
with self.test_session() as sess:
sess.run(variables_lib.global_variables_initializer())
self.assertEqual(output.op.name, 'Conv2d_transpose/Relu')
eval_output = output.eval({images: np.zeros(input_size, np.float32)})
self.assertListEqual(list(eval_output.shape), expected_size_dynamic)
def testDynamicOutputSizeWithStrideTwoSamePadding(self):
num_filters = 32
input_size = [5, 9, 11, 3]
expected_size = [None, None, None, num_filters]
expected_size_dynamic = [5, 18, 22, num_filters]
with self.test_session():
images = array_ops.placeholder(np.float32,
[None, None, None, input_size[3]])
output = layers_lib.conv2d_transpose(
images, num_filters, [3, 3], stride=[2, 2], padding='SAME')
variables_lib.global_variables_initializer().run()
self.assertEqual(output.op.name, 'Conv2d_transpose/Relu')
self.assertListEqual(output.get_shape().as_list(), expected_size)
eval_output = output.eval({images: np.zeros(input_size, np.float32)})
self.assertListEqual(list(eval_output.shape), expected_size_dynamic)
def testWithScope(self):
num_filters = 32
input_size = [5, 9, 11, 3]
expected_size = [5, 19, 23, num_filters]
images = random_ops.random_uniform(input_size, seed=1)
output = layers_lib.conv2d_transpose(
images, num_filters, [3, 3], stride=2, padding='VALID', scope='conv7')
self.assertEqual(output.op.name, 'conv7/Relu')
with self.test_session() as sess:
sess.run(variables_lib.global_variables_initializer())
self.assertListEqual(list(output.eval().shape), expected_size)
def testWithScopeWithoutActivation(self):
num_filters = 32
input_size = [5, 9, 11, 3]
expected_size = [5, 19, 23, num_filters]
images = random_ops.random_uniform(input_size, seed=1)
output = layers_lib.conv2d_transpose(
images,
num_filters, [3, 3],
stride=2,
padding='VALID',
activation_fn=None,
scope='conv7')
self.assertEqual(output.op.name, 'conv7/BiasAdd')
with self.test_session() as sess:
sess.run(variables_lib.global_variables_initializer())
self.assertListEqual(list(output.eval().shape), expected_size)
def testDeconvWithoutBiasesProducesConv2dTranspose(self):
num_filters = 32
input_size = [5, 9, 11, 3]
expected_size = [5, 19, 23, num_filters]
stride = 2
padding = 'VALID'
with self.test_session() as sess:
images = random_ops.random_uniform(input_size, seed=1)
output_deconv = layers_lib.conv2d_transpose(
images,
num_filters, [3, 3],
stride=stride,
padding=padding,
activation_fn=None,
scope='conv7')
weights = variables.get_variables_by_name('conv7/weights')[0]
output_conv2d_transpose = nn_ops.conv2d_transpose(
images,
weights,
expected_size, [1, stride, stride, 1],
padding=padding)
sess.run(variables_lib.global_variables_initializer())
output_deconv, output_conv2d_transpose = sess.run(
[output_deconv, output_conv2d_transpose])
self.assertTrue(
np.isclose(output_deconv, output_conv2d_transpose, 1e-5, 1e-5).all())
class ConvolutionInPlaneTest(test.TestCase):
def testHorzConvWithBlankImage(self):
image = array_ops.ones((1, 10, 10, 1))
horz_gradients = layers_lib.conv2d_in_plane(
image,
weights_initializer=init_ops.constant_initializer([1, -1]),
kernel_size=[1, 2],
padding='VALID',
activation_fn=None)
init_op = variables_lib.global_variables_initializer()
with self.test_session() as sess:
sess.run(init_op)
result = sess.run(horz_gradients)
expected = np.zeros((1, 10, 9, 1))
self.assertAllEqual(result, expected)
def testHorzConvWithBlankImageAndPlaceholder(self):
image = array_ops.placeholder(dtypes.float32, shape=(None, None, None, 1))
horz_gradients = layers_lib.conv2d_in_plane(
image,
weights_initializer=init_ops.constant_initializer([1, -1]),
kernel_size=[1, 2],
padding='VALID',
activation_fn=None)
init_op = variables_lib.global_variables_initializer()
with self.test_session() as sess:
sess.run(init_op)
result = sess.run(horz_gradients,
feed_dict={image: np.ones((1, 10, 10, 1))})
expected = np.zeros((1, 10, 9, 1))
self.assertAllEqual(result, expected)
def testHorzConvWithRandomImageMultiBatch(self):
np.random.seed(1)
image = np.random.rand(5, 10, 10, 1)
expected = image[:, :, 0:-1, :] - image[:, :, 1:, :]
tf_image = constant_op.constant(image, dtype=dtypes.float32)
horz_gradients = layers_lib.conv2d_in_plane(
tf_image,
weights_initializer=init_ops.constant_initializer([1, -1]),
kernel_size=[1, 2],
padding='VALID',
activation_fn=None)
init_op = variables_lib.global_variables_initializer()
with self.test_session() as sess:
sess.run(init_op)
result = sess.run(horz_gradients)
self.assertAllClose(result, expected, rtol=1e-5, atol=1e-5)
def testHorzConvWithRandomImageMultiBatchMultiChannel(self):
np.random.seed(1)
image = np.random.rand(5, 10, 10, 7)
expected = image[:, :, 0:-1, :] - image[:, :, 1:, :]
tf_image = constant_op.constant(image, dtype=dtypes.float32)
horz_gradients = layers_lib.conv2d_in_plane(
tf_image,
weights_initializer=init_ops.constant_initializer([1, -1]),
kernel_size=[1, 2],
padding='VALID',
activation_fn=None)
init_op = variables_lib.global_variables_initializer()
with self.test_session() as sess:
sess.run(init_op)
result = sess.run(horz_gradients)
self.assertAllClose(result, expected, rtol=1e-5, atol=1e-5)
def testHorzConvWithVaryingImage(self):
image = np.asmatrix(('1.0 2.0 3.0;' '1.1 2.0 4.0;' '-4.3 0.0 8.9'))
expected = np.asmatrix(('-1.0 -1.0;' '-0.9 -2.0;' '-4.3 -8.9'))
expected = np.reshape(np.asarray(expected), (1, 3, 2, 1))
tf_image = constant_op.constant(
image, shape=(1, 3, 3, 1), dtype=dtypes.float32)
horz_gradients = layers_lib.conv2d_in_plane(
tf_image,
weights_initializer=init_ops.constant_initializer([1, -1]),
kernel_size=[1, 2],
padding='VALID',
activation_fn=None)
init_op = variables_lib.global_variables_initializer()
with self.test_session() as sess:
sess.run(init_op)
result = sess.run(horz_gradients)
self.assertAllClose(result, expected, rtol=1e-5, atol=1e-5)
def testVertConvWithBlankImage(self):
image = array_ops.ones((1, 10, 10, 1))
vert_gradients = layers_lib.conv2d_in_plane(
image,
weights_initializer=init_ops.constant_initializer([1, -1]),
kernel_size=[2, 1],
padding='VALID',
activation_fn=None)
init_op = variables_lib.global_variables_initializer()
with self.test_session() as sess:
sess.run(init_op)
result = sess.run(vert_gradients)
expected = np.zeros((1, 9, 10, 1))
self.assertAllEqual(result, expected)
def testVertConvWithVaryingImage(self):
image = np.asmatrix(('1.0 2.0 3.0;' '1.1 2.0 4.0;' '-4.3 0.0 8.9'))
expected = np.asmatrix(('-0.1 0.0 -1.0;' ' 5.4 2.0 -4.9'))
expected = np.reshape(np.asarray(expected), (1, 2, 3, 1))
tf_image = constant_op.constant(
image, shape=(1, 3, 3, 1), dtype=dtypes.float32)
vert_gradients = layers_lib.conv2d_in_plane(
tf_image,
weights_initializer=init_ops.constant_initializer([1, -1]),
kernel_size=[2, 1],
padding='VALID',
activation_fn=None)
init_op = variables_lib.global_variables_initializer()
with self.test_session() as sess:
sess.run(init_op)
result = sess.run(vert_gradients)
self.assertAllClose(result, expected, rtol=1e-5, atol=1e-5)
class DropoutTest(test.TestCase):
def testCreateDropout(self):
height, width = 3, 3
with self.test_session():
images = np.random.uniform(size=(5, height, width, 3))
output = _layers.dropout(images)
self.assertEqual(output.op.name, 'Dropout/dropout/mul')
output.get_shape().assert_is_compatible_with(
ops.convert_to_tensor(images).get_shape())
def testCreateDropoutWithConstantTrue(self):
height, width = 3, 3
with self.test_session():
is_training = constant_op.constant(True)
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = _layers.dropout(images, is_training=is_training)
output.get_shape().assert_is_compatible_with(images.get_shape())
def testCreateDropoutWithConstantFalse(self):
height, width = 3, 3
with self.test_session():
is_training = constant_op.constant(False)
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = _layers.dropout(images, is_training=is_training)
output.get_shape().assert_is_compatible_with(images.get_shape())
def testCreateDropoutWithPlaceholder(self):
height, width = 3, 3
with self.test_session():
is_training = array_ops.placeholder(dtype=dtypes.bool, shape=[])
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = _layers.dropout(images, is_training=is_training)
self.assertEqual(output.op.name, 'Dropout/cond/Merge')
output.get_shape().assert_is_compatible_with(images.get_shape())
def testCollectOutputs(self):
height, width = 3, 3
with self.test_session():
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = _layers.dropout(images, outputs_collections='outputs')
c_output = ops.get_collection('outputs')[0]
self.assertEqual(c_output.aliases, ['Dropout'])
self.assertEqual(c_output, output)
def testDropout(self):
height, width = 10, 10
with self.test_session() as sess:
images = random_ops.random_uniform(
(5, height, width, 3), seed=1, name='images')
num_elem_initial = math_ops.reduce_mean(math_ops.to_float(images > 0))
output = _layers.dropout(images)
num_elem = math_ops.reduce_mean(math_ops.to_float(output > 0))
sess.run(variables_lib.global_variables_initializer())
num_elem, num_elem_initial = sess.run([num_elem, num_elem_initial])
self.assertLess(num_elem, num_elem_initial / 2 + 0.1)
self.assertGreater(num_elem, num_elem_initial / 2 - 0.1)
def testCreateDropoutNoTraining(self):
height, width = 3, 3
with self.test_session() as sess:
images = random_ops.random_uniform(
(5, height, width, 3), seed=1, name='images')
num_elem_initial = math_ops.reduce_mean(math_ops.to_float(images > 0))
output = _layers.dropout(images, is_training=False)
num_elem = math_ops.reduce_mean(math_ops.to_float(output > 0))
sess.run(variables_lib.global_variables_initializer())
num_elem, num_elem_initial = sess.run([num_elem, num_elem_initial])
self.assertEqual(num_elem, num_elem_initial)
outputs, inputs = sess.run([output, images])
self.assertAllClose(outputs, inputs)
def testCreateFCFollowByDropout(self):
height, width = 3, 3
with self.test_session() as sess:
images = random_ops.random_uniform(
(5, height, width, 3), seed=1, name='images')
output = _layers.fully_connected(images, 50)
num_elem_initial = math_ops.reduce_mean(math_ops.to_float(output > 0))
output = _layers.dropout(output)
num_elem = math_ops.reduce_mean(math_ops.to_float(output > 0))
sess.run(variables_lib.global_variables_initializer())
num_elem, num_elem_initial = sess.run([num_elem, num_elem_initial])
self.assertLess(num_elem, num_elem_initial / 2 + 0.1)
self.assertGreater(num_elem, num_elem_initial / 2 - 0.1)
def testCreateFCWithDropout(self):
height, width = 3, 3
with self.test_session() as sess:
images = random_ops.random_uniform(
(5, height, width, 3), seed=1, name='images')
output = _layers.fully_connected(
images, 50, normalizer_fn=_layers.dropout)
num_elem = math_ops.reduce_mean(math_ops.to_float(output > 0))
sess.run(variables_lib.global_variables_initializer())
num_elem = sess.run(num_elem)
self.assertLess(num_elem, 0.5)
self.assertGreater(num_elem, 0.1)
class FlattenTest(test.TestCase):
def testInvalidRank(self):
with ops.Graph().as_default() as g, self.test_session(g):
inputs = array_ops.placeholder(dtype=dtypes.float32)
inputs.set_shape(tensor_shape.TensorShape((5,)))
with self.assertRaisesRegexp(ValueError,
'must have a least 2 dimensions'):
_layers.flatten(inputs)
def testUnknownLastDim(self):
with ops.Graph().as_default() as g, self.test_session(g):
inputs = array_ops.placeholder(dtype=dtypes.float32)
inputs.set_shape(tensor_shape.TensorShape((5, None)))
output = _layers.flatten(inputs)
self.assertEqual(output.get_shape().as_list(), [5, None])
def testCollectOutputs(self):
height, width = 3, 3
with self.test_session():
images = np.random.uniform(size=(5, height, width, 3))
output = _layers.flatten(images, outputs_collections='outputs')
c_output = ops.get_collection('outputs')[0]
self.assertEqual(c_output.aliases, ['Flatten'])
self.assertEqual(c_output, output)
def testFlatten4D(self):
height, width = 3, 3
with self.test_session():
images = random_ops.random_uniform(
(5, height, width, 3), seed=1, name='images')
output = _layers.flatten(images)
self.assertEqual(output.get_shape().num_elements(),
images.get_shape().num_elements())
self.assertEqual(output.get_shape()[0], images.get_shape()[0])
def testFlatten3D(self):
height, width = 3, 3
with self.test_session():
images = random_ops.random_uniform(
(5, height, width), seed=1, name='images')
output = _layers.flatten(images)
self.assertEqual(output.get_shape().num_elements(),
images.get_shape().num_elements())
self.assertEqual(output.get_shape()[0], images.get_shape()[0])
def testFlattenBatchSize(self):
height, width = 3, 3
with self.test_session() as sess:
images = random_ops.random_uniform(
(5, height, width, 3), seed=1, name='images')
inputs = array_ops.placeholder(dtypes.int32, (None, height, width, 3))
output = _layers.flatten(inputs)
self.assertEqual(output.get_shape().as_list(), [None, height * width * 3])
output = sess.run(output, {inputs: images.eval()})
self.assertEqual(output.size, images.get_shape().num_elements())
self.assertEqual(output.shape[0], images.get_shape()[0])
def testUnknownDims(self):
height = width = depth = 3
with self.test_session() as sess:
images = random_ops.random_uniform(
(5, height, width, depth), seed=1, name='images')
inputs = array_ops.placeholder(dtypes.int32, (None, None, None, None))
output = _layers.flatten(inputs)
output = sess.run(output, {inputs: images.eval()})
self.assertEqual(output.size, images.get_shape().num_elements())
self.assertEqual(output.shape[0], images.get_shape()[0])
def _sparsify(array, threshold=0.5):
array[array < threshold] = 0
non_zero = np.where(array)
indices = np.vstack(non_zero).T
values = array[non_zero]
shape = array.shape
return indices, values, shape
class PartialFlattenTest(test.TestCase):
def testDensePartialFlatten(self):
"""Test `_inner_flatten` on `Tensor`s."""
shape = [2, 3, 4, 5, 6]
np.random.seed(5446)
inputs = np.random.randint(0, 100, size=shape)
for new_rank in [1, 2, 3, 4, 5]:
expected_new_shape = (
shape[:new_rank - 1] + [np.prod(shape[new_rank - 1:])])
expected_flattened = np.reshape(inputs, expected_new_shape)
flattened_t = _layers._inner_flatten(inputs, new_rank)
static_shape = flattened_t.get_shape().as_list()
self.assertEqual(static_shape, expected_new_shape)
with self.test_session() as sess:
flattened = sess.run(flattened_t)
np.testing.assert_array_equal(expected_flattened, flattened)
def testSparsePartialFlatten(self):
"""Test `_inner_flatten` on `SparseTensor`s."""
shape = [4, 3, 11, 6, 1, 3]
np.random.seed(10301)
random_ = np.random.rand(*shape)
indices, values, _ = _sparsify(random_)
for new_rank in [1, 2, 3, 4, 5]:
expected_shape = (shape[:new_rank - 1] + [np.prod(shape[new_rank - 1:])])
reshaped_random_ = np.reshape(random_, expected_shape)
expected_indices, expected_values, _ = _sparsify(reshaped_random_)
inputs_t = sparse_tensor.SparseTensor(indices, values, shape)
flattened_t = _layers._inner_flatten(inputs_t, new_rank)
with self.test_session() as sess:
flattened = sess.run(flattened_t)
np.testing.assert_array_equal(expected_indices, flattened.indices)
np.testing.assert_array_equal(expected_values, flattened.values)
np.testing.assert_array_equal(expected_shape, flattened.dense_shape)
def testIncompleteShape(self):
"""Test `_inner_flatten` shape inference for incomplete shapes."""
shape = [2, None, 4, None, 5, 6]
inputs = array_ops.placeholder(dtypes.int32)
inputs.set_shape(shape)
flattened1 = _layers._inner_flatten(inputs, 1)
self.assertEqual([None], flattened1.get_shape().as_list())
flattened2 = _layers._inner_flatten(inputs, 2)
self.assertEqual([2, None], flattened2.get_shape().as_list())
flattened3 = _layers._inner_flatten(inputs, 3)
self.assertEqual([2, None, None], flattened3.get_shape().as_list())
flattened4 = _layers._inner_flatten(inputs, 4)
self.assertEqual([2, None, 4, None], flattened4.get_shape().as_list())
flattened5 = _layers._inner_flatten(inputs, 5)
self.assertEqual([2, None, 4, None, 30], flattened5.get_shape().as_list())
def testDenseFlattenRankAssertion(self):
"""Test `_inner_flatten` rank assertion for dense tensors."""
shape = [2, 3]
new_rank = 3
inputs = array_ops.placeholder(dtypes.int32)
inputs.set_shape(shape)
with self.assertRaisesRegexp(ValueError,
'inputs has rank less than new_rank'):
_layers._inner_flatten(inputs, new_rank)
def testSparseFlattenRankAssertion(self):
"""Test `_inner_flatten` rank assertion for sparse tensors."""
shape = [2, 3]
new_rank = 3
np.random.seed(10301)
random_ = np.random.rand(*shape)
indices, values, _ = _sparsify(random_)
inputs = sparse_tensor.SparseTensor(indices, values, shape)
with self.assertRaisesRegexp(ValueError,
'Inputs has rank less than new_rank'):
_layers._inner_flatten(inputs, new_rank)
class FCTest(test.TestCase):
def testCreateFC(self):
height, width = 3, 3
for layer_fn in (_layers.fully_connected, layers_lib.relu):
with ops.Graph().as_default() as g, self.test_session(g):
inputs = np.random.uniform(size=(5, height * width * 3))
output = layer_fn(inputs, 32)
self.assertEqual(output.op.name, 'fully_connected/Relu')
self.assertListEqual(output.get_shape().as_list(), [5, 32])
weights = variables.get_variables_by_name('weights')[0]
self.assertListEqual(weights.get_shape().as_list(), [3 * 3 * 3, 32])
biases = variables.get_variables_by_name('biases')[0]
self.assertListEqual(biases.get_shape().as_list(), [32])
def testCreateFCWithScope(self):
height, width = 3, 3
with self.test_session():
inputs = random_ops.random_uniform((5, height * width * 3), seed=1)
output = _layers.fully_connected(inputs, 32, scope='fc1')
self.assertEqual(output.op.name, 'fc1/Relu')
def testCreateFCWithCollection(self):
height, width = 3, 3
inputs = random_ops.random_uniform((5, height * width * 3), seed=1)
with ops.name_scope('fe'):
fc = _layers.fully_connected(
inputs, 7, outputs_collections='outputs', scope='fc')
output_collected = ops.get_collection('outputs')[0]
self.assertEqual(output_collected.aliases, ['fe/fc'])
self.assertEqual(output_collected, fc)
def testCreateFcCreatesWeightsAndBiasesVars(self):
height, width = 3, 3
inputs = random_ops.random_uniform((5, height * width * 3), seed=1)
with self.test_session():
self.assertFalse(variables.get_variables('fc1/weights'))
self.assertFalse(variables.get_variables('fc1/biases'))
_layers.fully_connected(inputs, 32, scope='fc1')
self.assertTrue(variables.get_variables('fc1/weights'))
self.assertTrue(variables.get_variables('fc1/biases'))
def testReuseVars(self):
height, width = 3, 3
inputs = random_ops.random_uniform((5, height * width * 3), seed=1)
with self.test_session():
_layers.fully_connected(inputs, 32, scope='fc1')
self.assertEqual(len(variables.get_variables('fc1')), 2)
_layers.fully_connected(inputs, 32, scope='fc1', reuse=True)
self.assertEqual(len(variables.get_variables('fc1')), 2)
def testNonReuseVars(self):
height, width = 3, 3
inputs = random_ops.random_uniform((5, height * width * 3), seed=1)
with self.test_session():
_layers.fully_connected(inputs, 32)
self.assertEqual(len(variables.get_variables('fully_connected')), 2)
_layers.fully_connected(inputs, 32)
self.assertEqual(len(variables.get_variables('fully_connected')), 4)
def testReuseWithRegularizer(self):
height, width = 3, 3
regularizer = lambda x: math_ops.reduce_sum(x) * 1e-3
inputs = random_ops.random_uniform((5, height * width * 3), seed=1)
_layers.fully_connected(
inputs, 32, scope='fc1', weights_regularizer=regularizer)
self.assertEqual(
len(ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)), 1)
self.assertEqual(len(loss_ops.get_regularization_losses()), 1)
_layers.fully_connected(
inputs, 32, scope='fc1', weights_regularizer=regularizer, reuse=True)
self.assertEqual(
len(ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)), 1)
self.assertEqual(len(loss_ops.get_regularization_losses()), 1)
with variable_scope.variable_scope('outer', reuse=False):
_layers.fully_connected(inputs, 32, weights_regularizer=regularizer)
self.assertEqual(
len(ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)), 2)
self.assertEqual(len(loss_ops.get_regularization_losses()), 2)
with variable_scope.variable_scope('outer', reuse=True):
_layers.fully_connected(inputs, 32, weights_regularizer=regularizer)
self.assertEqual(
len(ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)), 2)
self.assertEqual(len(loss_ops.get_regularization_losses()), 2)
def testCreateFCWithoutActivation(self):
height, width = 3, 3
with self.test_session():
inputs = random_ops.random_uniform((5, height * width * 3), seed=1)
output = _layers.fully_connected(inputs, 32, activation_fn=None)
self.assertEqual(output.op.name, 'fully_connected/BiasAdd')
def testCreateFCWithWD(self):
height, width = 3, 3
with self.test_session() as sess:
inputs = random_ops.random_uniform((5, height * width * 3), seed=1)
weight_decay = regularizers.l2_regularizer(0.01)
_layers.fully_connected(inputs, 32, weights_regularizer=weight_decay)
wd = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)[0]
self.assertEqual(wd.op.name,
'fully_connected/kernel/Regularizer/l2_regularizer')
sess.run(variables_lib.global_variables_initializer())
self.assertLess(sess.run(wd), 0.4)
def testCreateFCWithBD(self):
height, width = 3, 3
with self.test_session() as sess:
inputs = random_ops.random_uniform((5, height * width * 3), seed=1)
bias_decay = regularizers.l2_regularizer(0.01)
_layers.fully_connected(inputs, 32, biases_regularizer=bias_decay)
wd = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)[0]
self.assertEqual(wd.op.name,
'fully_connected/bias/Regularizer/l2_regularizer')
sess.run(variables_lib.global_variables_initializer())
self.assertLess(sess.run(wd), 0.4)
def testCreateNoRegularizers(self):
height, width = 3, 3
with self.test_session():
inputs = random_ops.random_uniform((5, height * width * 3), seed=1)
_layers.fully_connected(inputs, 32)
self.assertEqual(
ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES), [])
def testReuseFCWithWD(self):
height, width = 3, 3
with self.test_session():
inputs = random_ops.random_uniform((5, height * width * 3), seed=1)
weight_decay = regularizers.l2_regularizer(0.01)
_layers.fully_connected(
inputs, 32, weights_regularizer=weight_decay, scope='FC')
self.assertEqual(len(variables.get_variables()), 2)
self.assertEqual(
len(ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)), 1)
_layers.fully_connected(
inputs, 32, weights_regularizer=weight_decay, scope='FC', reuse=True)
self.assertEqual(len(variables.get_variables()), 2)
self.assertEqual(
len(ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)), 1)
def testFCWithBatchNorm(self):
height, width = 3, 3
with self.test_session():
images = random_ops.random_uniform((5, height * width * 3), seed=1)
with arg_scope(
[_layers.fully_connected],
normalizer_fn=_layers.batch_norm,
normalizer_params={'decay': 0.9}):
net = _layers.fully_connected(images, 27)
net = _layers.fully_connected(net, 27)
self.assertEqual(len(variables.get_variables()), 8)
self.assertEqual(
len(variables.get_variables('fully_connected/BatchNorm')), 3)
self.assertEqual(
len(variables.get_variables('fully_connected_1/BatchNorm')), 3)
def testReuseFCWithBatchNorm(self):
height, width = 3, 3
with self.test_session():
images = random_ops.random_uniform((5, height * width * 3), seed=1)
with arg_scope(
[_layers.fully_connected],
normalizer_fn=_layers.batch_norm,
normalizer_params={'decay': 0.9}):
net = _layers.fully_connected(images, 27, scope='fc1')
net = _layers.fully_connected(net, 27, scope='fc1', reuse=True)
self.assertEqual(len(variables.get_variables()), 4)
self.assertEqual(len(variables.get_variables('fc1/BatchNorm')), 3)
class BatchNormTest(test.TestCase):
def _addBesselsCorrection(self, sample_size, expected_var):
correction_factor = sample_size / (sample_size - 1)
expected_var *= correction_factor
return expected_var, correction_factor
def testUnknownShape(self):
with ops.Graph().as_default() as g, self.test_session(g):
inputs = array_ops.placeholder(dtype=dtypes.float32)
with self.assertRaisesRegexp(ValueError, 'undefined rank'):
_layers.batch_norm(inputs)
def testInvalidDataFormat(self):
with ops.Graph().as_default() as g, self.test_session(g):
inputs = array_ops.placeholder(dtype=dtypes.float32)
with self.assertRaisesRegexp(
ValueError, 'data_format has to be either NCHW or NHWC.'):
_layers.batch_norm(inputs, data_format='CHWN')
def testUnknownChannelsDimNHWC(self):
with ops.Graph().as_default() as g, self.test_session(g):
inputs = array_ops.placeholder(dtype=dtypes.float32)
inputs.set_shape(tensor_shape.TensorShape((5, 3, 3, None)))
with self.assertRaisesRegexp(ValueError, 'undefined'):
_layers.batch_norm(inputs, data_format='NHWC')
def testUnknownChannelsDimNCHW(self):
with ops.Graph().as_default() as g, self.test_session(g):
inputs = array_ops.placeholder(dtype=dtypes.float32)
inputs.set_shape(tensor_shape.TensorShape((5, None, 3, 3)))
with self.assertRaisesRegexp(ValueError, 'undefined'):
_layers.batch_norm(inputs, data_format='NCHW')
def testWeightedMomentsFused(self):
with ops.Graph().as_default() as g, self.test_session(g):
inputs = array_ops.placeholder(dtype=dtypes.float32, shape=(5, 3, 3, 7))
batch_weights = array_ops.placeholder(dtype=dtypes.float32)
with self.assertRaisesRegexp(ValueError, 'Weighted mean and variance'):
_layers.batch_norm(inputs, batch_weights=batch_weights, fused=True)
def testParamRegularizersFused(self):
with ops.Graph().as_default() as g, self.test_session(g):
inputs = array_ops.placeholder(dtype=dtypes.float32, shape=(5, 3, 3, 7))
with self.assertRaisesRegexp(ValueError,
'Regularizers are not currently'):
_layers.batch_norm(inputs, param_regularizers={}, fused=True)
def _testCreateOp(self, fused):
height, width = 3, 3
with self.test_session():
images = np.random.uniform(size=(5, height, width, 3)).astype('f')
output = _layers.batch_norm(images, fused=fused)
expected_name = ('BatchNorm/FusedBatchNorm' if fused else
'BatchNorm/batchnorm')
self.assertTrue(output.op.name.startswith(expected_name))
self.assertListEqual(output.get_shape().as_list(), [5, height, width, 3])
self.assertEqual(
ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES), [])
def testCreateOpDefault(self):
self._testCreateOp(False)
def testCreateOpFused(self):
self._testCreateOp(True)
def testCreateOpBetaRegularizer(self):
height, width = 3, 3
with self.test_session():
reg = lambda x: 0.1 * math_ops.reduce_sum(x)
images = np.random.uniform(size=(5, height, width, 3)).astype('f')
_layers.batch_norm(images, param_regularizers={'beta': reg})
self.assertEqual(
len(ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)), 1)
beta_decay = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)[0]
self.assertEqual(beta_decay.op.name, 'BatchNorm/beta/Regularizer/mul')
def testCreateOpGammaRegularizer(self):
height, width = 3, 3
with self.test_session():
reg = lambda x: 0.1 * math_ops.reduce_sum(x)
images = np.random.uniform(size=(5, height, width, 3)).astype('f')
_layers.batch_norm(
images, param_regularizers={'gamma': reg}, scale=True)
self.assertEqual(
len(ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)), 1)
gamma_decay = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)[0]
self.assertEqual(gamma_decay.op.name, 'BatchNorm/gamma/Regularizer/mul')
def testCreateVariables(self):
height, width = 3, 3
with self.test_session():
images = random_ops.random_uniform((5, height, width, 3), seed=1)
_layers.batch_norm(images, scale=True)
beta = variables.get_variables_by_name('beta')[0]
gamma = variables.get_variables_by_name('gamma')[0]
self.assertEqual(beta.op.name, 'BatchNorm/beta')
self.assertEqual(gamma.op.name, 'BatchNorm/gamma')
moving_mean = variables.get_variables_by_name('moving_mean')[0]
moving_variance = variables.get_variables_by_name('moving_variance')[0]
self.assertEqual(moving_mean.op.name, 'BatchNorm/moving_mean')
self.assertEqual(moving_variance.op.name, 'BatchNorm/moving_variance')
def testMovingAverageVariables(self):
height, width = 3, 3
with self.test_session():
images = random_ops.random_uniform((5, height, width, 3), seed=1)
_layers.batch_norm(images, scale=True)
self.assertEqual(len(variables.get_model_variables()), 4)
moving_mean = variables.get_variables_by_name('moving_mean')[0]
moving_variance = variables.get_variables_by_name('moving_variance')[0]
self.assertEqual(moving_mean.op.name, 'BatchNorm/moving_mean')
self.assertEqual(moving_variance.op.name, 'BatchNorm/moving_variance')
def testMovingAverageVariablesZeroDebias(self):
height, width = 3, 3
with self.test_session():
images = random_ops.random_uniform((5, height, width, 3), seed=1)
_layers.batch_norm(images, scale=True, zero_debias_moving_mean=True)
self.assertEqual(len(variables.get_model_variables()), 6)
moving_mean = variables.get_variables_by_name('moving_mean')[0]
moving_variance = variables.get_variables_by_name('moving_variance')[0]
biased = variables.get_variables_by_name('biased')[0]
local_step = variables.get_variables_by_name('local_step')[0]
self.assertEqual(moving_mean.op.name, 'BatchNorm/moving_mean')
self.assertEqual(moving_variance.op.name, 'BatchNorm/moving_variance')
self.assertEqual(biased.op.name, 'BatchNorm/BatchNorm/moving_mean/biased')
self.assertEqual(local_step.op.name,
'BatchNorm/BatchNorm/moving_mean/local_step')
def testUpdatesCollection(self):
height, width = 3, 3
with self.test_session():
images = random_ops.random_uniform((5, height, width, 3), seed=1)
_layers.batch_norm(images, updates_collections='my_update_ops')
update_layers = ops.get_collection('my_update_ops')
update_moving_mean = update_layers[0]
update_moving_variance = update_layers[1]
self.assertEqual(update_moving_mean.op.name, 'BatchNorm/AssignMovingAvg')
self.assertEqual(update_moving_variance.op.name,
'BatchNorm/AssignMovingAvg_1')
def testVariablesCollections(self):
variables_collections = {
'beta': ['beta'],
'gamma': ['gamma'],
'moving_mean': ['moving_mean'],
'moving_variance': ['moving_variance'],
}
images = random_ops.random_uniform((5, 5, 5, 3), seed=1)
_layers.batch_norm(
images, scale=True, variables_collections=variables_collections)
for var_name, collection_names in variables_collections.items():
collection = ops.get_collection(collection_names[0])
self.assertEqual(len(collection), 1)
var_name_in_collection = collection[0].op.name
self.assertEqual(var_name_in_collection, 'BatchNorm/' + var_name)
def testReuseVariables(self):
height, width = 3, 3
with self.test_session():
images = random_ops.random_uniform((5, height, width, 3), seed=1)
_layers.batch_norm(images, scale=True, scope='bn')
_layers.batch_norm(images, scale=True, scope='bn', reuse=True)
beta = variables.get_variables_by_name('beta')
gamma = variables.get_variables_by_name('gamma')
self.assertEqual(len(beta), 1)
self.assertEqual(len(gamma), 1)
moving_mean = variables.get_variables_by_name('moving_mean')
moving_variance = variables.get_variables_by_name('moving_variance')
moving_vars = moving_mean + moving_variance
self.assertEqual(len(moving_vars), 2)
def testReuseUpdateOps(self):
height, width = 3, 3
with self.test_session():
images = random_ops.random_uniform((5, height, width, 3), seed=1)
with arg_scope([_layers.batch_norm], updates_collections='update_ops'):
_layers.batch_norm(images, scope='bn')
self.assertEqual(len(ops.get_collection('update_ops')), 2)
_layers.batch_norm(images, scope='bn', reuse=True)
self.assertEqual(len(ops.get_collection('update_ops')), 4)
def testCreateMovingVars(self):
height, width = 3, 3
with self.test_session():
images = random_ops.random_uniform((5, height, width, 3), seed=1)
_ = _layers.batch_norm(images)
moving_mean = variables.get_variables('BatchNorm/moving_mean')
self.assertEqual(len(moving_mean), 1)
self.assertEqual(moving_mean[0].op.name, 'BatchNorm/moving_mean')
moving_variance = variables.get_variables('BatchNorm/moving_variance')
self.assertEqual(len(moving_variance), 1)
self.assertEqual(moving_variance[0].op.name, 'BatchNorm/moving_variance')
def testZeroDebiasMovingMean(self):
height, width = 3, 3
batch_size = 10
channels = 3
np.random.seed(1)
image_shape = (batch_size, height, width, channels)
axis = (0, 1, 2)
image_values = np.random.rand(*image_shape)
expected_mean = np.mean(image_values, axis=axis)
expected_var = np.var(image_values, axis=axis)
images = constant_op.constant(
image_values, shape=image_shape, dtype=dtypes.float32)
output = _layers.batch_norm(
images,
decay=0.1,
updates_collections=None,
zero_debias_moving_mean=True)
moving_mean = variables.get_variables_by_name('BatchNorm/moving_mean')[0]
moving_variance = variables.get_variables_by_name('moving_variance')[0]
biased = variables.get_variables_by_name('biased')[0]
local_step = variables.get_variables_by_name('local_step')[0]
with self.test_session() as sess:
sess.run(variables_lib.global_variables_initializer())
self.assertAllClose(local_step.eval(), 0)
self.assertAllClose(moving_mean.eval(), [0] * channels)
self.assertAllClose(biased.eval(), [0] * channels)
self.assertAllClose(moving_variance.eval(), [1] * channels)
for i in range(10):
self.assertAllClose(local_step.eval(), i)
sess.run([output])
# In this case moving_mean == expected_mean after each update
self.assertAllClose(moving_mean.eval(), expected_mean)
# After 10 updates with decay 0.1 moving_mean == expected_mean,
# biased == expected_mean and moving_variance == expected_var.
self.assertAllClose(moving_mean.eval(), expected_mean)
self.assertAllClose(moving_variance.eval(), expected_var)
self.assertAllClose(biased.eval(), expected_mean)
def _testNoneUpdatesCollections(self,
fused,
data_format='NHWC',
zero_debias_moving_mean=False):
height, width = 2, 2
batch_size = 10
channels = 3
np.random.seed(1)
use_gpu = fused
with self.test_session(use_gpu=use_gpu) as sess:
if data_format == 'NHWC':
image_shape = (batch_size, height, width, channels)
axis = (0, 1, 2)
else:
image_shape = (batch_size, channels, height, width)
axis = (0, 2, 3)
image_values = np.random.rand(*image_shape)
expected_mean = np.mean(image_values, axis=axis)
expected_var = np.var(image_values, axis=axis)
if fused:
# Add Bessel's correction
expected_var, _ = self._addBesselsCorrection(batch_size * height *
width, expected_var)
images = constant_op.constant(
image_values, shape=image_shape, dtype=dtypes.float32)
output = _layers.batch_norm(
images,
decay=0.1,
updates_collections=None,
fused=fused,
data_format=data_format,
zero_debias_moving_mean=zero_debias_moving_mean)
# updates_ops are not added to UPDATE_OPS collection.
self.assertEqual(ops.get_collection(ops.GraphKeys.UPDATE_OPS), [])
# Initialize all variables
sess.run(variables_lib.global_variables_initializer())
moving_mean = variables.get_variables('BatchNorm/moving_mean')[0]
moving_variance = variables.get_variables('BatchNorm/moving_variance')[0]
mean, variance = sess.run([moving_mean, moving_variance])
# After initialization moving_mean == 0 and moving_variance == 1.
self.assertAllClose(mean, [0] * channels)
self.assertAllClose(variance, [1] * channels)
for _ in range(10):
sess.run([output])
if zero_debias_moving_mean:
# In this case moving_mean == expected_mean after update
self.assertAllClose(moving_mean.eval(), expected_mean)
mean = moving_mean.eval()
variance = moving_variance.eval()
# After 10 updates with decay 0.1 moving_mean == expected_mean and
# moving_variance == expected_var.
self.assertAllClose(mean, expected_mean)
self.assertAllClose(variance, expected_var)
def testNoneUpdatesCollectionsNHWC(self):
self._testNoneUpdatesCollections(False, data_format='NHWC')
def testNoneUpdatesCollectionsNCHW(self):
self._testNoneUpdatesCollections(False, data_format='NCHW')
def testNoneUpdatesCollectionsNHWCZeroDebias(self):
self._testNoneUpdatesCollections(
False, data_format='NHWC', zero_debias_moving_mean=True)
def testNoneUpdatesCollectionsNCHWZeroDebias(self):
self._testNoneUpdatesCollections(
False, data_format='NCHW', zero_debias_moving_mean=True)
def testNoneUpdatesCollectionsFusedNCHW(self):
if test.is_gpu_available(cuda_only=True):
self._testNoneUpdatesCollections(True, data_format='NCHW')
def testNoneUpdatesCollectionsFusedNHWC(self):
self._testNoneUpdatesCollections(True, data_format='NHWC')
def testNoneUpdatesCollectionsFusedNCHWZeroDebias(self):
if test.is_gpu_available(cuda_only=True):
self._testNoneUpdatesCollections(
True, data_format='NCHW', zero_debias_moving_mean=True)
def testNoneUpdatesCollectionsFusedNHWCZeroDebias(self):
self._testNoneUpdatesCollections(
True, data_format='NHWC', zero_debias_moving_mean=True)
def _testDelayedUpdateMovingVars(self,
fused,
data_format='NHWC',
zero_debias_moving_mean=False):
height, width = 2, 2
batch_size = 10
channels = 3
np.random.seed(1)
use_gpu = fused
with self.test_session(use_gpu=use_gpu) as sess:
if data_format == 'NHWC':
image_shape = (batch_size, height, width, channels)
axis = (0, 1, 2)
else:
image_shape = (batch_size, channels, height, width)
axis = (0, 2, 3)
image_values = np.random.rand(*image_shape)
expected_mean = np.mean(image_values, axis=axis)
expected_var = np.var(image_values, axis=axis)
if fused:
# Add Bessel's correction
expected_var, correction_factor = self._addBesselsCorrection(
batch_size * height * width, expected_var)
images = constant_op.constant(
image_values, shape=image_shape, dtype=dtypes.float32)
output = _layers.batch_norm(
images,
decay=0.1,
fused=fused,
data_format=data_format,
zero_debias_moving_mean=zero_debias_moving_mean)
update_ops = ops.get_collection(ops.GraphKeys.UPDATE_OPS)
# updates_ops are added to UPDATE_OPS collection.
self.assertEqual(len(update_ops), 2)
with ops.control_dependencies(update_ops):
barrier = control_flow_ops.no_op(name='barrier')
output = control_flow_ops.with_dependencies([barrier], output)
# Initialize all variables
sess.run(variables_lib.global_variables_initializer())
moving_mean = variables.get_variables('BatchNorm/moving_mean')[0]
moving_variance = variables.get_variables('BatchNorm/moving_variance')[0]
mean, variance = sess.run([moving_mean, moving_variance])
# After initialization moving_mean == 0 and moving_variance == 1.
self.assertAllClose(mean, [0] * channels)
self.assertAllClose(variance, [1] * channels)
for _ in range(10):
sess.run([output])
if zero_debias_moving_mean:
# In this case moving_mean == expected_mean after update
self.assertAllClose(moving_mean.eval(), expected_mean)
mean = moving_mean.eval()
variance = moving_variance.eval()
# After 10 updates with decay 0.1 moving_mean == expected_mean and
# moving_variance == expected_var.
self.assertAllClose(mean, expected_mean)
if fused:
# Add Bessel's correction
moving_variance_corrected = moving_variance / correction_factor
correct_moving_variance = state_ops.assign(moving_variance,
moving_variance_corrected)
sess.run(correct_moving_variance)
self.assertAllClose(variance, expected_var)
def testDelayedUpdateMovingVarsNHWC(self):
self._testDelayedUpdateMovingVars(False, data_format='NHWC')
def testDelayedUpdateMovingVarsNCHW(self):
self._testDelayedUpdateMovingVars(False, data_format='NCHW')
def testDelayedUpdateMovingVarsFusedNCHW(self):
if test.is_gpu_available(cuda_only=True):
self._testDelayedUpdateMovingVars(True, data_format='NCHW')
def testDelayedUpdateMovingVarsFusedNHWC(self):
self._testDelayedUpdateMovingVars(True, data_format='NHWC')
def testDelayedUpdateMovingVars(self):
self._testDelayedUpdateMovingVars(False)
def _testEvalMovingVars(self, zero_debias_moving_mean=False):
height, width = 3, 3
with self.test_session() as sess:
image_shape = (10, height, width, 3)
image_values = np.random.rand(*image_shape)
expected_mean = np.mean(image_values, axis=(0, 1, 2))
expected_var = np.var(image_values, axis=(0, 1, 2))
images = constant_op.constant(
image_values, shape=image_shape, dtype=dtypes.float32)
output = _layers.batch_norm(images, decay=0.1, is_training=False)
self.assertEqual(ops.get_collection(ops.GraphKeys.UPDATE_OPS), [])
# Initialize all variables
sess.run(variables_lib.global_variables_initializer())
moving_mean = variables.get_variables('BatchNorm/moving_mean')[0]
moving_variance = variables.get_variables('BatchNorm/moving_variance')[0]
mean, variance = sess.run([moving_mean, moving_variance])
# After initialization moving_mean == 0 and moving_variance == 1.
self.assertAllClose(mean, [0] * 3)
self.assertAllClose(variance, [1] * 3)
# Simulate assigment from saver restore.
init_assigns = [
state_ops.assign(moving_mean, expected_mean),
state_ops.assign(moving_variance, expected_var)
]
sess.run(init_assigns)
for _ in range(10):
sess.run([output], {images: np.random.rand(*image_shape)})
mean = moving_mean.eval()
variance = moving_variance.eval()
# Although we feed different images, the moving_mean and moving_variance
# shouldn't change.
self.assertAllClose(mean, expected_mean)
self.assertAllClose(variance, expected_var)
def testEvalMovingVars(self):
self._testEvalMovingVars()
def testEvalMovingVarsZeroDebias(self):
self._testEvalMovingVars(True)
def testEvalMovingVarsWithPartitioner(self):
# This test makes sure that the moving-mean and moving-variance logic works
# when `batch_norm` is called within a variable-scope that has a variable
# partitioner.
partitioner = partitioned_variables.fixed_size_partitioner(2, axis=0)
with variable_scope.variable_scope(
variable_scope.get_variable_scope(), partitioner=partitioner):
self.testEvalMovingVars()
def _testReuseVars(self, fused, zero_debias_moving_mean=False):
height, width = 3, 3
batch_size = 10
channels = 3
with self.test_session() as sess:
image_shape = (batch_size, height, width, channels)
image_values = np.random.rand(*image_shape)
expected_mean = np.mean(image_values, axis=(0, 1, 2))
expected_var = np.var(image_values, axis=(0, 1, 2))
if fused:
# Add Bessel's correction
expected_var, correction_factor = self._addBesselsCorrection(
batch_size * height * width, expected_var)
images = constant_op.constant(
image_values, shape=image_shape, dtype=dtypes.float32)
output_train = _layers.batch_norm(
images,
decay=0.1,
is_training=True,
scope='BN',
fused=fused,
zero_debias_moving_mean=zero_debias_moving_mean)
output_eval = _layers.batch_norm(
images,
decay=0.1,
is_training=False,
scope='BN',
reuse=True,
fused=fused,
zero_debias_moving_mean=zero_debias_moving_mean)
# Initialize all variables
sess.run(variables_lib.global_variables_initializer())
moving_mean = variables.get_variables('BN/moving_mean')[0]
moving_variance = variables.get_variables('BN/moving_variance')[0]
mean, variance = sess.run([moving_mean, moving_variance])
# After initialization moving_mean == 0 and moving_variance == 1.
self.assertAllClose(mean, [0] * channels)
self.assertAllClose(variance, [1] * channels)
update_ops = ops.get_collection(ops.GraphKeys.UPDATE_OPS)
with ops.control_dependencies(update_ops):
barrier = control_flow_ops.no_op(name='barrier')
train_op = control_flow_ops.with_dependencies([barrier], output_train)
# Before updates the outputs are different for train and eval.
self.assertFalse(
np.allclose(sess.run([output_train]), sess.run([output_eval])))
for _ in range(10):
sess.run([train_op])
mean = moving_mean.eval()
variance = moving_variance.eval()
# After 10 updates with decay 0.1 moving_mean == expected_mean and
# moving_variance == expected_var.
self.assertAllClose(mean, expected_mean)
if fused:
# Add Bessel's correction
moving_variance_corrected = moving_variance / correction_factor
correct_moving_variance = state_ops.assign(moving_variance,
moving_variance_corrected)
sess.run(correct_moving_variance)
self.assertAllClose(variance, expected_var)
# After convergence output_train and output_eval should be the same.
self.assertAllClose(sess.run([output_train]), sess.run([output_eval]))
def testReuseVarsDefault(self):
self._testReuseVars(False)
def testReuseVarsFused(self):
self._testReuseVars(True)
def testReuseVarsDefaultZeroDebias(self):
self._testReuseVars(False, True)
def testReuseVarsFusedZeroDebias(self):
self._testReuseVars(True, True)
def _testIsTrainingVariable(self,
fused,
data_format='NHWC',
zero_debias_moving_mean=False):
height, width = 2, 2
batch_size = 10
channels = 3
np.random.seed(1)
use_gpu = fused
np.random.seed(1)
with self.test_session(use_gpu=use_gpu) as sess:
if data_format == 'NHWC':
image_shape = (batch_size, height, width, channels)
axis = (0, 1, 2)
else:
image_shape = (batch_size, channels, height, width)
axis = (0, 2, 3)
image_values = np.random.rand(*image_shape)
expected_mean = np.mean(image_values, axis=axis)
expected_var = np.var(image_values, axis=axis)
if fused:
# Add Bessel's correction
expected_var, correction_factor = self._addBesselsCorrection(
batch_size * height * width, expected_var)
images = constant_op.constant(
image_values, shape=image_shape, dtype=dtypes.float32)
is_training = variables_lib.Variable(True)
output = _layers.batch_norm(
images,
decay=0.1,
is_training=is_training,
fused=fused,
data_format=data_format,
zero_debias_moving_mean=zero_debias_moving_mean)
# Initialize all variables
sess.run(variables_lib.global_variables_initializer())
moving_mean = variables.get_variables('BatchNorm/moving_mean')[0]
moving_variance = variables.get_variables('BatchNorm/moving_variance')[0]
mean, variance = sess.run([moving_mean, moving_variance])
# After initialization moving_mean == 0 and moving_variance == 1.
self.assertAllClose(mean, [0] * channels)
self.assertAllClose(variance, [1] * channels)
# Before updates the outputs are different depending of is_training.
output_true = sess.run([output], {is_training: True})
output_false = sess.run([output], {is_training: False})
self.assertFalse(np.allclose(output_true, output_false))
update_ops = ops.get_collection(ops.GraphKeys.UPDATE_OPS)
with ops.control_dependencies(update_ops):
barrier = control_flow_ops.no_op(name='barrier')
train_op = control_flow_ops.with_dependencies([barrier], output)
for _ in range(10):
sess.run([train_op])
mean = moving_mean.eval()
variance = moving_variance.eval()
# After 10 updates with decay 0.1 moving_mean == expected_mean and
# moving_variance == expected_var.
self.assertAllClose(mean, expected_mean)
self.assertAllClose(variance, expected_var)
# After updates to convergence the outputs don't depend on is_training.
output_true = sess.run([output], {is_training: True})
if fused:
# Add Bessel's correction
moving_variance_corrected = moving_variance / correction_factor
correct_moving_variance = state_ops.assign(moving_variance,
moving_variance_corrected)
sess.run(correct_moving_variance)
output_false = sess.run([output], {is_training: False})
self.assertAllClose(output_true, output_false)
def testIsTrainingVariableNHWC(self):
self._testIsTrainingVariable(False, data_format='NHWC')
def testIsTrainingVariableNCHW(self):
self._testIsTrainingVariable(False, data_format='NCHW')
def testIsTrainingVariableNHWCZeroDebias(self):
self._testIsTrainingVariable(
False, data_format='NHWC', zero_debias_moving_mean=True)
def testIsTrainingVariableNCHWZeroDebias(self):
self._testIsTrainingVariable(
False, data_format='NCHW', zero_debias_moving_mean=True)
def testIsTrainingVariableFusedNCHW(self):
if test.is_gpu_available(cuda_only=True):
self._testIsTrainingVariable(True, data_format='NCHW')
def testIsTrainingVariableFusedNHWC(self):
self._testIsTrainingVariable(True, data_format='NHWC')
def testIsTrainingVariableFusedNCHWZeroDebias(self):
if test.is_gpu_available(cuda_only=True):
self._testIsTrainingVariable(
True, data_format='NCHW', zero_debias_moving_mean=True)
def testIsTrainingVariableFusedNHWCZeroDebias(self):
self._testIsTrainingVariable(
True, data_format='NHWC', zero_debias_moving_mean=True)
def testNoUpdatesWhenIsTrainingFalse(self):
height, width = 3, 3
with self.test_session() as sess:
image_shape = (10, height, width, 3)
image_values = np.random.rand(*image_shape)
images = constant_op.constant(
image_values, shape=image_shape, dtype=dtypes.float32)
output = _layers.batch_norm(images, decay=0.1, is_training=False)
update_ops = ops.get_collection(ops.GraphKeys.UPDATE_OPS)
# updates_ops are not added to UPDATE_OPS collection.
self.assertEqual(len(update_ops), 0)
# Initialize all variables
sess.run(variables_lib.global_variables_initializer())
moving_mean = variables.get_variables('BatchNorm/moving_mean')[0]
moving_variance = variables.get_variables('BatchNorm/moving_variance')[0]
mean, variance = sess.run([moving_mean, moving_variance])
# After initialization moving_mean == 0 and moving_variance == 1.
self.assertAllClose(mean, [0] * 3)
self.assertAllClose(variance, [1] * 3)
# When is_training is False batch_norm doesn't update moving_vars.
for _ in range(10):
sess.run([output])
self.assertAllClose(moving_mean.eval(), [0] * 3)
self.assertAllClose(moving_variance.eval(), [1] * 3)
def testNoneUpdatesCollectionNoTraining(self):
height, width = 3, 3
with self.test_session() as sess:
image_shape = (10, height, width, 3)
image_values = np.random.rand(*image_shape)
images = constant_op.constant(
image_values, shape=image_shape, dtype=dtypes.float32)
output = _layers.batch_norm(
images, decay=0.1, updates_collections=None, is_training=False)
# updates_ops are not added to UPDATE_OPS collection.
self.assertEqual(ops.get_collection(ops.GraphKeys.UPDATE_OPS), [])
# Initialize all variables
sess.run(variables_lib.global_variables_initializer())
moving_mean = variables.get_variables('BatchNorm/moving_mean')[0]
moving_variance = variables.get_variables('BatchNorm/moving_variance')[0]
mean, variance = sess.run([moving_mean, moving_variance])
# After initialization moving_mean == 0 and moving_variance == 1.
self.assertAllClose(mean, [0] * 3)
self.assertAllClose(variance, [1] * 3)
# When is_training is False batch_norm doesn't update moving_vars.
for _ in range(10):
sess.run([output])
self.assertAllClose(moving_mean.eval(), [0] * 3)
self.assertAllClose(moving_variance.eval(), [1] * 3)
def _testNoneUpdatesCollectionIsTrainingVariable(self,
fused,
data_format='NHWC'):
height, width = 2, 2
batch_size = 10
channels = 3
np.random.seed(1)
use_gpu = fused
with self.test_session(use_gpu=use_gpu) as sess:
if data_format == 'NHWC':
image_shape = (batch_size, height, width, channels)
axis = (0, 1, 2)
else:
image_shape = (batch_size, channels, height, width)
axis = (0, 2, 3)
image_values = np.random.rand(*image_shape)
expected_mean = np.mean(image_values, axis=axis)
expected_var = np.var(image_values, axis=axis)
if fused:
# Add Bessel's correction
expected_var, correction_factor = self._addBesselsCorrection(
batch_size * height * width, expected_var)
images = constant_op.constant(
image_values, shape=image_shape, dtype=dtypes.float32)
is_training = variables_lib.Variable(True)
output = _layers.batch_norm(
images,
decay=0.1,
updates_collections=None,
is_training=is_training,
fused=fused,
data_format=data_format)
# updates_ops are not added to UPDATE_OPS collection.
self.assertEqual(ops.get_collection(ops.GraphKeys.UPDATE_OPS), [])
# Initialize all variables
sess.run(variables_lib.global_variables_initializer())
moving_mean = variables.get_variables('BatchNorm/moving_mean')[0]
moving_variance = variables.get_variables('BatchNorm/moving_variance')[0]
mean, variance = sess.run([moving_mean, moving_variance])
# After initialization moving_mean == 0 and moving_variance == 1.
self.assertAllClose(mean, [0] * channels)
self.assertAllClose(variance, [1] * channels)
# When is_training is False batch_norm doesn't update moving_vars.
for _ in range(10):
sess.run([output], {is_training: False})
self.assertAllClose(moving_mean.eval(), [0] * channels)
self.assertAllClose(moving_variance.eval(), [1] * channels)
# Before updates the outputs are different depending of is_training.
output_true = sess.run([output], {is_training: True})
output_false = sess.run([output], {is_training: False})
self.assertFalse(np.allclose(output_true, output_false))
# When is_training is True update moving_vars.
for _ in range(10):
sess.run([output], {is_training: True})
# After 10 updates with decay 0.1 moving_mean == expected_mean and
# moving_variance == expected_var.
self.assertAllClose(moving_mean.eval(), expected_mean)
self.assertAllClose(moving_variance.eval(), expected_var)
# After updates to convergence the outputs don't depend on is_training.
output_true = sess.run([output], {is_training: True})
if fused:
# Add Bessel's correction
moving_variance_corrected = moving_variance / correction_factor
correct_moving_variance = state_ops.assign(moving_variance,
moving_variance_corrected)
sess.run(correct_moving_variance)
output_false = sess.run([output], {is_training: False})
self.assertTrue(np.allclose(output_true, output_false))
def testNoneUpdatesCollectionIsTrainingVariableNHWC(self):
self._testNoneUpdatesCollectionIsTrainingVariable(False, data_format='NHWC')
def testNoneUpdatesCollectionIsTrainingVariableNCHW(self):
self._testNoneUpdatesCollectionIsTrainingVariable(False, data_format='NCHW')
def testNoneUpdatesCollectionIsTrainingVariableFusedNCHW(self):
if test.is_gpu_available(cuda_only=True):
self._testNoneUpdatesCollectionIsTrainingVariable(
True, data_format='NCHW')
def testNoneUpdatesCollectionIsTrainingVariableFusedNHWC(self):
self._testNoneUpdatesCollectionIsTrainingVariable(True, data_format='NHWC')
def _testTrainMovingVars(self, fused, data_format='NHWC'):
# Test that the gradients are stable while the moving_mean is updated.
# Since the moving_mean is used as shift to compute the tf.momments, the
# gradients could diverge, this test checks that gradients remains stable
# while the moving_mean is updated.
height, width = 7, 7
batch_size = 10
channels = 32
np.random.seed(1)
use_gpu = fused
with self.test_session(use_gpu=use_gpu) as sess:
if data_format == 'NHWC':
image_shape = (batch_size, height, width, channels)
axis = (0, 1, 2)
else:
image_shape = (batch_size, channels, height, width)
axis = (0, 2, 3)
image_values = np.random.rand(*image_shape) + 256
expected_mean = np.mean(image_values, axis=axis)
expected_var = np.var(image_values, axis=axis)
if fused:
# Add Bessel's correction
expected_var, _ = self._addBesselsCorrection(batch_size * height *
width, expected_var)
images = constant_op.constant(
image_values, shape=image_shape, dtype=dtypes.float32)
output = _layers.batch_norm(
images,
decay=0.2,
updates_collections=None,
is_training=True,
fused=fused,
data_format=data_format)
self.assertEqual(ops.get_collection(ops.GraphKeys.UPDATE_OPS), [])
objective = math_ops.reduce_sum(output)
[images_gradients] = gradients_impl.gradients(objective, images)
# Initialize all variables
sess.run(variables_lib.global_variables_initializer())
moving_mean = variables.get_variables('BatchNorm/moving_mean')[0]
moving_variance = variables.get_variables('BatchNorm/moving_variance')[0]
mean, variance = sess.run([moving_mean, moving_variance])
# After initialization moving_mean == 0 and moving_variance == 1.
self.assertAllClose(mean, [0] * channels)
self.assertAllClose(variance, [1] * channels)
# Initial input gradients.
images_gradients_value = sess.run(images_gradients)
for _ in range(10):
np_output, new_images_gradients = sess.run([output, images_gradients])
# The outputs should be close to 0.0 mean and 1.0 variance
self.assertAllClose(
np.mean(
np_output, axis=axis), [0] * channels, rtol=0.001, atol=0.001)
self.assertAllClose(
np.var(np_output, axis=axis), [1] * channels, rtol=0.01, atol=0.01)
# The gradients should change slowly while updating moving_mean.
max_diff = np.max(np.abs(images_gradients_value - new_images_gradients))
self.assertGreaterEqual(max_diff, 0.0)
self.assertLess(max_diff, 5e-5)
self.assertAllClose(moving_mean.eval(), expected_mean)
self.assertAllClose(moving_variance.eval(), expected_var)
def testTrainMovingVarsNHWC(self):
self._testTrainMovingVars(False, data_format='NHWC')
def testTrainMovingVarsNCHW(self):
self._testTrainMovingVars(False, data_format='NCHW')
def testTrainMovingVarsFusedNCHW(self):
if test.is_gpu_available(cuda_only=True):
self._testTrainMovingVars(True, data_format='NCHW')
def testTrainMovingVarsFusedNHWC(self):
self._testTrainMovingVars(True, data_format='NHWC')
def testCustomInitializer(self):
height, width = 3, 3
channels = 3
with self.test_session() as sess:
images = (np.ones((5, height, width, channels)) * 9.0).astype('f')
beta = init_ops.constant_initializer((np.ones(channels) * 5.0).astype(
'f'))
gamma = init_ops.constant_initializer((np.ones(channels) * 2.0).astype(
'f'))
mean = init_ops.constant_initializer((np.ones(channels) * 5.0).astype(
'f'))
variance = init_ops.constant_initializer((np.ones(channels) * 4.0).astype(
'f'))
output = _layers.batch_norm(
images,
is_training=False,
scale=True,
epsilon=0.0,
param_initializers={
'beta': beta,
'gamma': gamma,
'moving_mean': mean,
'moving_variance': variance,
})
sess.run(variables_lib.global_variables_initializer())
outs = sess.run(output)
self.assertAllClose(outs, images)
def _runBatchNormalizationWithFormat(self, shape, data_format, is_training):
channels = shape[-1]
with self.test_session() as sess:
images = np.arange(np.product(shape), dtype=np.float32).reshape(shape)
beta = init_ops.constant_initializer(
np.arange(
2, channels + 2, dtype=np.float32))
gamma = init_ops.constant_initializer(
np.arange(
10, channels + 10, dtype=np.float32) * 2.0)
mean = init_ops.constant_initializer(
np.arange(
3, channels + 3, dtype=np.float32) * 5.0)
variance = init_ops.constant_initializer(
np.arange(
1, channels + 1, dtype=np.float32) * 4.0)
if data_format == 'NCHW':
# Reshape inputs from NHWC to NCHW format.
images = array_ops.transpose(
images, [0, len(shape) - 1] + list(range(1, len(shape) - 1)))
output = _layers.batch_norm(
images,
is_training=is_training,
scale=True,
epsilon=0.5,
param_initializers={
'beta': beta,
'gamma': gamma,
'moving_mean': mean,
'moving_variance': variance,
},
data_format=data_format)
if data_format == 'NCHW':
# Reshape outputs from NCHW back to NHWC format.
output = array_ops.transpose(output,
[0] + list(range(2, len(shape))) + [1])
sess.run(variables_lib.global_variables_initializer())
return sess.run(output)
def testNHWCAndNCHWInferenceProduceSameOutput(self):
for shape in [[7, 3, 5], [5, 2, 3, 4], [11, 3, 2, 4, 5]]:
nhwc = self._runBatchNormalizationWithFormat(
data_format='NHWC', shape=shape, is_training=False)
nchw = self._runBatchNormalizationWithFormat(
data_format='NCHW', shape=shape, is_training=False)
self.assertAllClose(nhwc, nchw, atol=1e-4, rtol=1e-4)
def testNHWCAndNCHWTrainingProduceSameOutput(self):
for shape in [[7, 3, 5], [5, 2, 3, 4], [11, 3, 2, 4, 5]]:
nhwc = self._runBatchNormalizationWithFormat(
data_format='NHWC', shape=shape, is_training=True)
nchw = self._runBatchNormalizationWithFormat(
data_format='NCHW', shape=shape, is_training=True)
self.assertAllClose(nhwc, nchw, atol=1e-4, rtol=1e-4)
class LayerNormTest(test.TestCase):
def testUnknownShape(self):
with ops.Graph().as_default() as g, self.test_session(g):
inputs = array_ops.placeholder(dtype=dtypes.float32)
with self.assertRaisesRegexp(ValueError, 'undefined rank'):
_layers.layer_norm(inputs)
def testUnknownLastDim(self):
with ops.Graph().as_default() as g, self.test_session(g):
inputs = array_ops.placeholder(dtype=dtypes.float32)
inputs.set_shape(tensor_shape.TensorShape((5, 3, 3, None)))
with self.assertRaisesRegexp(ValueError, 'undefined last dimension'):
_layers.layer_norm(inputs)
def testCreateOp(self):
height, width = 3, 3
with self.test_session():
images = np.random.uniform(size=(5, height, width, 3))
output = _layers.layer_norm(images)
self.assertTrue(output.op.name.startswith('LayerNorm/batchnorm'))
self.assertListEqual(output.get_shape().as_list(), [5, height, width, 3])
def testCreateVariables(self):
height, width = 3, 3
with self.test_session():
images = random_ops.random_uniform((5, height, width, 3), seed=1)
_layers.layer_norm(images)
beta = variables.get_variables_by_name('beta')[0]
gamma = variables.get_variables_by_name('gamma')[0]
self.assertEqual(beta.op.name, 'LayerNorm/beta')
self.assertEqual(gamma.op.name, 'LayerNorm/gamma')
def testReuseVariables(self):
height, width = 3, 3
with self.test_session():
images = random_ops.random_uniform((5, height, width, 3), seed=1)
_layers.layer_norm(images, scope='ln')
_layers.layer_norm(images, scope='ln', reuse=True)
beta = variables.get_variables_by_name('beta')
gamma = variables.get_variables_by_name('gamma')
self.assertEqual(len(beta), 1)
self.assertEqual(len(gamma), 1)
def testReuseVars(self):
height, width = 3, 3
with self.test_session() as sess:
image_shape = (10, height, width, 3)
image_values = np.random.rand(*image_shape)
images = constant_op.constant(
image_values, shape=image_shape, dtype=dtypes.float32)
output_train = _layers.layer_norm(images, scope='LN')
output_eval = _layers.layer_norm(images, scope='LN', reuse=True)
# Initialize all variables
sess.run(variables_lib.global_variables_initializer())
# output_train and output_eval should be the same.
self.assertAllClose(sess.run([output_train]), sess.run([output_eval]))
def doOutputTest(self, input_shape, tol=1e-3):
for mu in [0.0, 1e2]:
for sigma in [1.0, 0.1]:
input_values = np.random.rand(*input_shape) * sigma + mu
expected_mean = np.zeros(input_shape[0])
expected_var = np.ones(input_shape[0])
with ops.Graph().as_default() as g:
with self.test_session(graph=g) as sess:
inputs = constant_op.constant(input_values, shape=input_shape,
dtype=dtypes.float32)
output_op = _layers.layer_norm(inputs, scope='LN')
# Initialize all variables
sess.run(variables_lib.global_variables_initializer())
# The mean and variance of the output should be close to 0 and 1
# respectively.
moments_axis = tuple([i for i in range(1, len(input_shape))])
outputs = sess.run(output_op)
# Make sure that there are no NaNs
self.assertFalse(np.isnan(outputs).any())
mean = np.mean(outputs, axis=moments_axis)
var = np.var(outputs, axis=moments_axis)
self.assertAllClose(mean, expected_mean, rtol=tol, atol=tol)
self.assertAllClose(var, expected_var, rtol=tol, atol=tol)
def testOutput2DInput(self):
self.doOutputTest((10, 300))
def testOutput4DInput(self):
self.doOutputTest((100, 10, 10, 3))
def testOutputSmallInput(self):
self.doOutputTest((10, 10, 10, 30))
def testOutputBigInput(self):
self.doOutputTest((1, 100, 100, 1))
class MaxPool2DTest(test.TestCase):
def testInvalidDataFormat(self):
height, width = 3, 6
images = np.random.uniform(size=(5, height, width, 3))
with self.assertRaisesRegexp(ValueError,
'data_format has to be either NCHW or NHWC.'):
_layers.max_pool2d(images, [3, 3], data_format='CHWN')
def testCreateMaxPool(self):
height, width = 3, 6
images = np.random.uniform(size=(5, height, width, 3)).astype(np.float32)
output = _layers.max_pool2d(images, [3, 3])
self.assertEqual(output.op.name, 'MaxPool2D/MaxPool')
self.assertListEqual(output.get_shape().as_list(), [5, 1, 2, 3])
def testCreateMaxPoolNCHW(self):
height, width = 3, 6
images = np.random.uniform(size=(5, 3, height, width)).astype(np.float32)
output = _layers.max_pool2d(images, [3, 3], data_format='NCHW')
self.assertEquals(output.op.name, 'MaxPool2D/MaxPool')
self.assertListEqual(output.get_shape().as_list(), [5, 3, 1, 2])
def testCollectOutputs(self):
height, width = 3, 6
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = _layers.max_pool2d(images, [3, 3], outputs_collections='outputs')
output_collected = ops.get_collection('outputs')[0]
self.assertEqual(output_collected.aliases, ['MaxPool2D'])
self.assertEqual(output_collected, output)
def testCreateSquareMaxPool(self):
height, width = 3, 6
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = _layers.max_pool2d(images, 3)
self.assertEqual(output.op.name, 'MaxPool2D/MaxPool')
self.assertListEqual(output.get_shape().as_list(), [5, 1, 2, 3])
def testCreateMaxPoolWithScope(self):
height, width = 3, 6
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = _layers.max_pool2d(images, [3, 3], scope='pool1')
self.assertEqual(output.op.name, 'pool1/MaxPool')
def testCreateMaxPoolWithSamePadding(self):
height, width = 3, 6
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = _layers.max_pool2d(images, [3, 3], padding='SAME')
self.assertListEqual(output.get_shape().as_list(), [5, 2, 3, 3])
def testCreateMaxPoolWithSamePaddingNCHW(self):
height, width = 3, 6
images = random_ops.random_uniform((5, 3, height, width), seed=1)
output = _layers.max_pool2d(
images, [3, 3], padding='SAME', data_format='NCHW')
self.assertListEqual(output.get_shape().as_list(), [5, 3, 2, 3])
def testCreateMaxPoolStrideWithSamePadding(self):
height, width = 3, 6
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = _layers.max_pool2d(images, [3, 3], stride=1, padding='SAME')
self.assertListEqual(output.get_shape().as_list(), [5, height, width, 3])
def testGlobalMaxPool(self):
height, width = 3, 6
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = _layers.max_pool2d(images, images.get_shape()[1:3], stride=1)
self.assertListEqual(output.get_shape().as_list(), [5, 1, 1, 3])
class OneHotEncodingTest(test.TestCase):
def testOneHotEncodingCreate(self):
with self.test_session():
labels = np.array([0, 1, 2])
output = _layers.one_hot_encoding(labels, num_classes=3)
self.assertEqual(output.op.name, 'OneHotEncoding/one_hot')
self.assertListEqual(output.get_shape().as_list(), [3, 3])
def testCollectOutputs(self):
with self.test_session():
labels = constant_op.constant([0, 1, 2])
output = _layers.one_hot_encoding(
labels, num_classes=3, outputs_collections='outputs')
c_output = ops.get_collection('outputs')[0]
self.assertEqual(c_output.aliases, ['OneHotEncoding'])
self.assertEqual(c_output, output)
def testOneHotEncoding(self):
with self.test_session():
labels = constant_op.constant([0, 1, 2])
one_hot_labels = constant_op.constant([[1, 0, 0], [0, 1, 0], [0, 0, 1]])
output = _layers.one_hot_encoding(labels, num_classes=3)
self.assertAllClose(output.eval(), one_hot_labels.eval())
def testOneHotEncodingInt32(self):
with self.test_session():
labels = constant_op.constant([0, 1, 2], dtype=dtypes.int32)
one_hot_labels = constant_op.constant([[1, 0, 0], [0, 1, 0], [0, 0, 1]])
output = _layers.one_hot_encoding(labels, num_classes=3)
self.assertAllClose(output.eval(), one_hot_labels.eval())
class RepeatTests(test.TestCase):
def testRepeat(self):
height, width = 3, 3
with self.test_session():
images = np.random.uniform(size=(5, height, width, 3)).astype(np.float32)
output = _layers.repeat(images, 3, layers_lib.conv2d, 32, [3, 3])
self.assertEqual(output.op.name, 'Repeat/convolution_3/Relu')
self.assertListEqual(output.get_shape().as_list(), [5, 3, 3, 32])
def testRepeatWithScope(self):
height, width = 3, 3
with self.test_session():
images = random_ops.random_uniform(
(5, height, width, 3), seed=1, name='images')
output = _layers.repeat(
images, 3, layers_lib.conv2d, 32, [3, 3], scope='conv1')
self.assertEqual(output.op.name, 'conv1/conv1_3/Relu')
self.assertListEqual(output.get_shape().as_list(), [5, 3, 3, 32])
class SeparableConv2dTest(test.TestCase):
def testCreateConvInt32(self):
height, width = 3, 3
with self.test_session():
images = random_ops.random_uniform(
(5, height, width, 3), seed=1, dtype=dtypes.int32, maxval=12345)
with self.assertRaisesRegexp(TypeError, 'non-floating point type'):
layers_lib.separable_conv2d(images, 32, [3, 3], 2)
def testCreateConvFloat32(self):
height, width = 3, 3
with self.test_session():
images = random_ops.random_uniform(
(5, height, width, 3), seed=1, dtype=dtypes.float32)
output = layers_lib.separable_conv2d(images, 32, [3, 3], 2)
self.assertEqual(output.op.name, 'SeparableConv2d/Relu')
self.assertListEqual(output.get_shape().as_list(), [5, height, width, 32])
def testCreateDepthwiseConv(self):
height, width = 3, 3
with self.test_session():
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = layers_lib.separable_conv2d(images, None, [3, 3], 2)
self.assertEqual(output.op.name, 'SeparableConv2d/Relu')
self.assertListEqual(output.get_shape().as_list(), [5, height, width, 6])
def testCreateConvCreatesWeightsAndBiasesVars(self):
height, width = 3, 3
images = random_ops.random_uniform((5, height, width, 3), seed=1)
with self.test_session():
self.assertFalse(variables.get_variables('conv1/depthwise_weights'))
self.assertFalse(variables.get_variables('conv1/pointwise_weights'))
self.assertFalse(variables.get_variables('conv1/biases'))
layers_lib.separable_conv2d(images, 32, [3, 3], 4, scope='conv1')
self.assertTrue(variables.get_variables('conv1/depthwise_weights'))
self.assertTrue(variables.get_variables('conv1/pointwise_weights'))
self.assertTrue(variables.get_variables('conv1/biases'))
def testCreateAtrousConvCreatesWeightsAndBiasesVars(self):
height, width = 3, 3
images = random_ops.random_uniform((5, height, width, 3), seed=1)
with self.test_session():
self.assertFalse(variables.get_variables('conv1/depthwise_weights'))
self.assertFalse(variables.get_variables('conv1/pointwise_weights'))
self.assertFalse(variables.get_variables('conv1/biases'))
layers_lib.separable_conv2d(images, 32, [3, 3], 4, rate=2, scope='conv1')
self.assertTrue(variables.get_variables('conv1/depthwise_weights'))
self.assertTrue(variables.get_variables('conv1/pointwise_weights'))
self.assertTrue(variables.get_variables('conv1/biases'))
def testCreateDepthwiseConvCreatesWeightsAndBiasesVars(self):
height, width = 3, 3
images = random_ops.random_uniform((5, height, width, 3), seed=1)
with self.test_session():
self.assertFalse(variables.get_variables('conv1/depthwise_weights'))
self.assertFalse(variables.get_variables('conv1/pointwise_weights'))
self.assertFalse(variables.get_variables('conv1/biases'))
layers_lib.separable_conv2d(images, None, [3, 3], 4, scope='conv1')
self.assertTrue(variables.get_variables('conv1/depthwise_weights'))
self.assertFalse(variables.get_variables('conv1/pointwise_weights'))
self.assertTrue(variables.get_variables('conv1/biases'))
def testCreateConvWithScope(self):
height, width = 3, 3
with self.test_session():
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = layers_lib.separable_conv2d(images, 32, [3, 3], 6, scope='conv1')
self.assertEqual(output.op.name, 'conv1/Relu')
def testCreateConvWithoutActivation(self):
height, width = 3, 3
with self.test_session():
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = layers_lib.separable_conv2d(
images, 32, [3, 3], 8, activation_fn=None)
self.assertEqual(output.op.name, 'SeparableConv2d/BiasAdd')
def testCreateConvValid(self):
height, width = 3, 3
with self.test_session():
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = layers_lib.separable_conv2d(
images, 32, [3, 3], 2, padding='VALID')
self.assertListEqual(output.get_shape().as_list(), [5, 1, 1, 32])
def testCreateAtrousConvValid(self):
height, width = 5, 5
with self.test_session():
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = layers_lib.separable_conv2d(
images, 32, [3, 3], 2, padding='VALID', rate=2)
self.assertListEqual(output.get_shape().as_list(), [5, 1, 1, 32])
def testCreateDepthwiseConvValid(self):
height, width = 3, 3
with self.test_session():
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = layers_lib.separable_conv2d(
images, None, [3, 3], 2, padding='VALID')
self.assertListEqual(output.get_shape().as_list(), [5, 1, 1, 6])
def testCreateAtrousDepthwiseConvValid(self):
height, width = 5, 5
with self.test_session():
images = random_ops.random_uniform((5, height, width, 3), seed=1)
output = layers_lib.separable_conv2d(
images, None, [3, 3], 2, padding='VALID', rate=2)
self.assertListEqual(output.get_shape().as_list(), [5, 1, 1, 6])
def testCreateConvWithWeightDecay(self):
random_seed.set_random_seed(0)
height, width = 3, 3
with self.test_session() as sess:
images = random_ops.random_uniform((5, height, width, 3), seed=1)
regularizer = regularizers.l2_regularizer(0.01)
layers_lib.separable_conv2d(
images, 32, [3, 3], 2, weights_regularizer=regularizer)
self.assertEqual(
len(ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)), 2)
weight_decay = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)[0]
self.assertEqual(
weight_decay.op.name,
'SeparableConv2d/depthwise_kernel/Regularizer/l2_regularizer')
sess.run(variables_lib.global_variables_initializer())
self.assertLessEqual(sess.run(weight_decay), 0.05)
weight_decay = ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)[1]
self.assertEqual(
weight_decay.op.name,
'SeparableConv2d/pointwise_kernel/Regularizer/l2_regularizer')
self.assertLessEqual(sess.run(weight_decay), 0.05)
def testReuseConvWithWeightDecay(self):
height, width = 3, 3
with self.test_session():
images = random_ops.random_uniform((5, height, width, 3), seed=1)
regularizer = regularizers.l2_regularizer(0.01)
layers_lib.separable_conv2d(
images, 32, [3, 3], 2, weights_regularizer=regularizer, scope='conv1')
self.assertEqual(
len(ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)), 2)
layers_lib.separable_conv2d(
images,
32, [3, 3],
2,
weights_regularizer=regularizer,
scope='conv1',
reuse=True)
self.assertEqual(
len(ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)), 2)
def testConvWithBatchNorm(self):
height, width = 3, 3
batch_norm_collection = 'moving_vars'
normalizer_params = {
'variables_collections': {
'beta': [batch_norm_collection],
'gamma': [batch_norm_collection],
'moving_mean': [batch_norm_collection],
'moving_variance': [batch_norm_collection],
}
}
images = random_ops.random_uniform((5, height, width, 3), seed=1)
net = layers_lib.separable_conv2d(
images,
8, [3, 3],
2,
normalizer_fn=_layers.batch_norm,
normalizer_params=normalizer_params,
scope='conv1')
net = layers_lib.separable_conv2d(
net,
32, [3, 3],
2,
normalizer_fn=_layers.batch_norm,
normalizer_params=normalizer_params,
scope='conv2')
self.assertEqual(len(ops.get_collection(batch_norm_collection)), 6)
self.assertEqual(len(variables.get_variables('conv1/BatchNorm')), 3)
self.assertEqual(len(variables.get_variables('conv2/BatchNorm')), 3)
def testConvWithInputsViaPlaceHolder(self):
height, width = 3, 3
images_placeholder = array_ops.placeholder(
dtypes.float32, shape=(None, None, None, 3))
net = layers_lib.separable_conv2d(
images_placeholder,
8, [3, 3],
2,
normalizer_fn=_layers.batch_norm,
normalizer_params={},
scope='conv1')
init_op = variables_lib.global_variables_initializer()
with self.test_session() as sess:
images = np.random.rand(5, height, width, 3)
sess.run(init_op)
sess.run(net, feed_dict={images_placeholder: images})
def testTrainableFlagIsPassedOn(self):
for trainable in [True, False]:
for num_filters in [None, 8]:
with ops.Graph().as_default():
input_size = [5, 10, 12, 3]
images = random_ops.random_uniform(input_size, seed=1)
layers_lib.separable_conv2d(
images, num_filters, [3, 3], 1, trainable=trainable)
model_variables = variables.get_model_variables()
trainable_variables = variables_lib.trainable_variables()
for model_variable in model_variables:
self.assertEqual(trainable, model_variable in trainable_variables)
class ScaleGradientTests(test.TestCase):
"""Simple tests of the scale_gradient function."""
def testBasic(self):
with self.test_session():
x = np.array([42], np.float32)
gradient_scale = np.array([2], np.float32)
x = ops.convert_to_tensor(x)
y = layers_lib.scale_gradient(x, gradient_scale)
np.testing.assert_array_equal(x.eval(), y.eval())
g_x, = gradients_impl.gradients(y, [x], [np.array([3], np.float32)])
np.testing.assert_array_equal([3 * 2], g_x.eval())
class SoftmaxTests(test.TestCase):
def setUp(self):
self.low = 1 / (1 + math.e)
self.high = math.e / (1 + math.e)
def testSoftmax2D(self):
logits = constant_op.constant([[0.0, 1], [1, 1], [1, 0]])
prediction = _layers.softmax(logits)
exp_prediction = np.array([[self.low, self.high], [0.5, 0.5],
[self.high, self.low]])
with self.test_session() as sess:
prediction = sess.run(prediction)
self.assertAllClose(exp_prediction, prediction)
def testSoftmax3D(self):
logits = np.ones((2, 3, 2))
logits[0, 0, 0] = 0
logits[1, 1, 1] = 0
logits = constant_op.constant(logits)
exp_prediction = 0.5 * np.ones((2, 3, 2))
exp_prediction[0, 0, 0] = self.low
exp_prediction[0, 0, 1] = self.high
exp_prediction[1, 1, 0] = self.high
exp_prediction[1, 1, 1] = self.low
prediction = _layers.softmax(logits)
with self.test_session() as sess:
prediction = sess.run(prediction)
self.assertAllClose(exp_prediction, prediction)
def testSoftmax3DUnknownSize(self):
logits = np.ones((2, 3, 2))
logits[0, 0, 0] = 0
logits[1, 1, 1] = 0
logit_placeholder = array_ops.placeholder(
dtypes.float32, shape=(None, None, 2))
feed_dict = {logit_placeholder: logits}
exp_prediction = 0.5 * np.ones((2, 3, 2))
exp_prediction[0, 0, 0] = self.low
exp_prediction[0, 0, 1] = self.high
exp_prediction[1, 1, 0] = self.high
exp_prediction[1, 1, 1] = self.low
prediction = _layers.softmax(logit_placeholder)
with self.test_session() as sess:
prediction = sess.run(prediction, feed_dict=feed_dict)
self.assertAllClose(exp_prediction, prediction)
def testSoftmaxUndefinedNthDimension(self):
logits = array_ops.placeholder(dtypes.float32)
with self.assertRaises(ValueError):
_layers.softmax(logits)
class StackTests(test.TestCase):
def testStackFullyConnected(self):
height, width = 3, 3
with self.test_session():
images = np.random.uniform(size=(5, height * width * 3))
output = _layers.stack(images, _layers.fully_connected, [10, 20, 30])
self.assertEqual(output.op.name, 'Stack/fully_connected_3/Relu')
self.assertListEqual(output.get_shape().as_list(), [5, 30])
def testStackFullyConnectedFailOnReuse(self):
height, width = 3, 3
with self.test_session():
with variable_scope.variable_scope('test', reuse=True):
images = np.random.uniform(size=(5, height * width * 3))
with self.assertRaises(ValueError):
_layers.stack(images, _layers.fully_connected, [10, 20, 30])
def testStackRelu(self):
height, width = 3, 3
with self.test_session():
images = random_ops.random_uniform(
(5, height * width * 3), seed=1, name='images')
output = _layers.stack(images, layers_lib.relu, [10, 20, 30])
self.assertEqual(output.op.name, 'Stack/fully_connected_3/Relu')
self.assertListEqual(output.get_shape().as_list(), [5, 30])
def testStackElu(self):
height, width = 3, 3
with self.test_session():
images = random_ops.random_uniform(
(5, height * width * 3), seed=1, name='images')
output = _layers.stack(images, layers_lib.elu, [10, 20, 30])
self.assertEqual(output.op.name, 'Stack/fully_connected_3/Elu')
self.assertListEqual(output.get_shape().as_list(), [5, 30])
def testStackConvolution2d(self):
height, width = 3, 3
with self.test_session():
images = random_ops.random_uniform(
(5, height, width, 3), seed=1, name='images')
output = _layers.stack(
images,
layers_lib.convolution2d, [10, 20, 30],
kernel_size=[3, 3],
padding='SAME')
self.assertEqual(output.op.name, 'Stack/convolution_3/Relu')
self.assertListEqual(output.get_shape().as_list(), [5, 3, 3, 30])
def testStackWithScope(self):
height, width = 3, 3
with self.test_session():
images = random_ops.random_uniform(
(5, height, width, 3), seed=1, name='images')
output = _layers.stack(
images,
layers_lib.convolution2d, [10, 20, 30],
kernel_size=[3, 3],
padding='SAME',
scope='conv1')
self.assertEqual(output.op.name, 'conv1/conv1_3/Relu')
self.assertListEqual(output.get_shape().as_list(), [5, 3, 3, 30])
class UnitNormTests(test.TestCase):
def testUnitNormWithRandomMatrix(self):
height, width = 2, 3
for dim in range(3):
random_seed.set_random_seed(0)
image = random_ops.random_uniform((height, width, 3))
output = _layers.unit_norm(image, dim=dim, epsilon=1e-6)
norms = math_ops.sqrt(
math_ops.reduce_sum(
math_ops.square(output), reduction_indices=dim))
shape = [height, width, 3]
del shape[dim]
expected = np.ones(shape)
with self.test_session():
actual = norms.eval()
self.assertAllClose(expected, actual, 1e-4, 1e-4)
def testDimEqualToRankRaisesError(self):
height, width = 2, 3
random_seed.set_random_seed(0)
image = random_ops.random_uniform((height, width, 3))
with self.assertRaises(ValueError):
_layers.unit_norm(image, dim=3, epsilon=1e-6)
def testUnknownRankRaisesError(self):
image = array_ops.placeholder(dtypes.float32)
with self.assertRaises(ValueError):
_layers.unit_norm(image, dim=2)
def testKnownRankUnknownDimsSucceeds(self):
height, width = 2, 3
for dim in range(3):
placeholder_value = np.ones((height, width, 3))
shape = [height, width, 3]
del shape[dim]
expected = np.ones(shape)
image = array_ops.placeholder(dtypes.float32, (None, None, 3))
output = _layers.unit_norm(image, dim=dim, epsilon=1e-6)
norms = math_ops.sqrt(
math_ops.reduce_sum(
math_ops.square(output), reduction_indices=dim))
with self.test_session():
actual = norms.eval({image: placeholder_value})
self.assertAllClose(expected, actual, 1e-4, 1e-4)
# TODO(b/28426988): Add separate tests for non-legacy versions.
class LegacyFullyConnectedTest(test.TestCase):
def setUp(self):
test.TestCase.setUp(self)
random_seed.set_random_seed(1234)
self.input = constant_op.constant([[1., 2., 3.], [-4., 15., -6.]])
self.input_3_dim_arr = [[[1., 1.1, 1.2],
[2., 2.1, 2.2],
[3., 3.1, 3.2],
[4., 4.1, 4.2]],
[[5., 5.1, 5.2],
[6., 6.1, 6.2],
[7., 7.1, 7.2],
[8., 8.1, 8.2]]]
self.input_3_dim = constant_op.constant(self.input_3_dim_arr)
assert not ops.get_collection(ops.GraphKeys.SUMMARIES)
def _fully_connected_basic_use(self, x, num_output_units, expected_shape):
output = _layers.legacy_fully_connected(
x, num_output_units, activation_fn=nn_ops.relu)
with session.Session() as sess:
with self.assertRaises(errors_impl.FailedPreconditionError):
sess.run(output)
variables_lib.global_variables_initializer().run()
out_value, shape_value = sess.run([output, array_ops.shape(output)])
self.assertAllClose(shape_value, expected_shape)
self.assertEqual(output.get_shape().as_list(), expected_shape)
self.assertTrue(np.all(out_value >= 0), 'Relu should have all values >= 0.')
self.assertEqual(2,
len(ops.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES)))
self.assertEqual(
0, len(ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)))
def test_fully_connected_basic_use(self):
self._fully_connected_basic_use(self.input, 8, [2, 8])
def test_fully_connected_basic_use_multi_dim(self):
for last_dim in [1, 3]:
self.setUp()
self._fully_connected_basic_use(self.input_3_dim, last_dim,
[2, 4, last_dim])
def test_relu_layer_basic_use(self):
output = layers_lib.legacy_relu(self.input, 8)
with session.Session() as sess:
with self.assertRaises(errors_impl.FailedPreconditionError):
sess.run(output)
variables_lib.global_variables_initializer().run()
out_value = sess.run(output)
self.assertEqual(output.get_shape().as_list(), [2, 8])
self.assertTrue(np.all(out_value >= 0), 'Relu should have all values >= 0.')
self.assertEqual(2,
len(ops.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES)))
self.assertEqual(
0, len(ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES)))
def test_variable_reuse_with_scope(self):
with variable_scope.variable_scope('test') as vs:
output1 = layers_lib.legacy_relu(self.input, 8)
output2 = layers_lib.legacy_relu(self.input, 8)
with variable_scope.variable_scope(vs, reuse=True):
output3 = layers_lib.legacy_relu(self.input, 8)
with session.Session() as sess:
variables_lib.global_variables_initializer().run()
out_value1, out_value2, out_value3 = sess.run([output1, output2, output3])
self.assertFalse(np.allclose(out_value1, out_value2))
self.assertAllClose(out_value1, out_value3)
def test_variable_reuse_with_template(self):
tmpl1 = template.make_template(
'test', _layers.legacy_fully_connected, num_output_units=8)
output1 = tmpl1(self.input)
output2 = tmpl1(self.input)
with session.Session() as sess:
variables_lib.global_variables_initializer().run()
out_value1, out_value2 = sess.run([output1, output2])
self.assertAllClose(out_value1, out_value2)
def _custom_initializers(self, x, num_output_units, expected_outputs):
output = layers_lib.legacy_relu(
x,
num_output_units,
weight_init=init_ops.constant_initializer(2.0),
bias_init=init_ops.constant_initializer(1.0))
with session.Session() as sess:
variables_lib.global_variables_initializer().run()
out_value = sess.run(output)
self.assertAllClose(np.array(expected_outputs), out_value)
def test_custom_initializers(self):
self._custom_initializers(self.input, 2, [[13.0, 13.0], [11.0, 11.0]])
def test_custom_initializers_multi_dim(self):
self._custom_initializers(self.input_3_dim, 2,
[[[7.6, 7.6],
[13.6, 13.6],
[19.6, 19.6],
[25.6, 25.6]],
[[31.6, 31.6],
[37.6, 37.6],
[43.6, 43.6],
[49.6, 49.6]]])
def test_custom_collections(self):
layers_lib.legacy_relu(
self.input,
2,
weight_collections=['unbiased'],
bias_collections=['biased'],
output_collections=['output'])
self.assertEqual(1, len(ops.get_collection('unbiased')))
self.assertEqual(1, len(ops.get_collection('biased')))
self.assertEqual(1, len(ops.get_collection('output')))
self.assertEqual(2, len(ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES)))
def test_all_custom_collections(self):
layers_lib.legacy_relu(
self.input,
2,
weight_collections=['unbiased', 'all'],
bias_collections=['biased', 'all'])
self.assertEqual(1, len(ops.get_collection('unbiased')))
self.assertEqual(1, len(ops.get_collection('biased')))
self.assertEqual(
ops.get_collection(ops.GraphKeys.TRAINABLE_VARIABLES),
ops.get_collection('all'))
def test_no_bias(self):
layers_lib.legacy_relu(self.input, 2, bias_init=None)
self.assertEqual(1, len(ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES)))
def test_no_activation(self):
y = _layers.legacy_fully_connected(self.input, 2)
self.assertEqual(2, len(ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES)))
self.assertEqual('BiasAdd', y.op.type)
def test_no_activation_no_bias(self):
y = _layers.legacy_fully_connected(self.input, 2, bias_init=None)
self.assertEqual(1, len(ops.get_collection(ops.GraphKeys.GLOBAL_VARIABLES)))
self.assertEqual('MatMul', y.op.type)
def test_regularizer(self):
cnt = [0]
tensor = constant_op.constant(5.0)
def test_fn(_):
cnt[0] += 1
return tensor
_layers.legacy_fully_connected(self.input, 2, weight_regularizer=test_fn)
self.assertEqual([tensor],
ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES))
self.assertEqual(1, cnt[0])
def test_regularizer_with_multiple_variables(self):
cnt = [0]
tensor = constant_op.constant(5.0)
def test_fn(_):
cnt[0] += 1
return tensor
_layers.legacy_fully_connected(self.input, 2, weight_regularizer=test_fn)
_layers.legacy_fully_connected(self.input, 2, weight_regularizer=test_fn)
self.assertEqual([tensor, tensor],
ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES))
self.assertEqual(2, cnt[0])
def test_regularizer_with_variable_reuse(self):
cnt = [0]
tensor = constant_op.constant(5.0)
def test_fn(_):
cnt[0] += 1
return tensor
with variable_scope.variable_scope('test') as vs:
_layers.legacy_fully_connected(self.input, 2, weight_regularizer=test_fn)
with variable_scope.variable_scope(vs, reuse=True):
_layers.legacy_fully_connected(self.input, 2, weight_regularizer=test_fn)
self.assertEqual([tensor],
ops.get_collection(ops.GraphKeys.REGULARIZATION_LOSSES))
self.assertEqual(1, cnt[0])
def test_empty_x_results_in_empty_output(self):
# Empty x is common if someone masks their input with tf.boolean_mask in
# order to drop missing entries, and in a particular batch all entries are
# missing.
with self.test_session():
x = np.array([]).reshape(0, 3)
self.assertEqual(0, array_ops.size(x).eval())
y = _layers.legacy_fully_connected(x, 2, activation_fn=nn_ops.softmax)
variables_lib.global_variables_initializer().run()
expected_y = np.array([]).reshape(0, 2)
np.testing.assert_array_equal(expected_y, y.eval())
def test_shapes_variable_first_dim(self):
# first dimension is not known statically.
x = array_ops.placeholder(dtypes.float32, shape=[None, 4, 3])
y = _layers.legacy_fully_connected(x, 1)
# in the output we still only know the 2nd and 3rd dimensions statically.
self.assertEqual(y.get_shape().as_list(), [None, 4, 1])
with self.test_session() as sess:
variables_lib.global_variables_initializer().run()
# we can feed in input with first dimension 2
shape_value = sess.run(array_ops.shape(y),
feed_dict={x: self.input_3_dim_arr})
self.assertAllClose(shape_value, [2, 4, 1])
# we can feed in input with first dimension 1
shape_value = sess.run(array_ops.shape(y),
feed_dict={x: [self.input_3_dim_arr[0]]})
self.assertAllClose(shape_value, [1, 4, 1])
# we cannot feed in input with inconsistent dimensions
with self.assertRaises(ValueError):
sess.run(array_ops.shape(y), feed_dict={x: [[[]]]})
def _unknown_dim_invalid_input(self, last_dim):
x = array_ops.placeholder(dtypes.float32, shape=[3, last_dim])
_layers.legacy_fully_connected(x, 2, activation_fn=None)
def test_known_dim_valid_input(self):
self._unknown_dim_invalid_input(last_dim=3)
def test_unknown_dim_invalid_input(self):
with self.assertRaisesRegexp(
ValueError, 'last dimension of x must be known but is None'):
self._unknown_dim_invalid_input(last_dim=None)
def test_1d_invalid_input(self):
with self.test_session():
with self.assertRaisesRegexp(ValueError,
'rank of x must be at least 2 not: 1'):
x = constant_op.constant([[]], shape=[0])
_layers.legacy_fully_connected(x, 2, activation_fn=nn_ops.softmax)
if __name__ == '__main__':
test.main()
| apache-2.0 |
mozilla/iacomus-alerts | ansible/inventory/ec2.py | 24 | 51233 | #!/usr/bin/env python
'''
EC2 external inventory script
=================================
Generates inventory that Ansible can understand by making API request to
AWS EC2 using the Boto library.
NOTE: This script assumes Ansible is being executed where the environment
variables needed for Boto have already been set:
export AWS_ACCESS_KEY_ID='AK123'
export AWS_SECRET_ACCESS_KEY='abc123'
This script also assumes there is an ec2.ini file alongside it. To specify a
different path to ec2.ini, define the EC2_INI_PATH environment variable:
export EC2_INI_PATH=/path/to/my_ec2.ini
If you're using eucalyptus you need to set the above variables and
you need to define:
export EC2_URL=http://hostname_of_your_cc:port/services/Eucalyptus
For more details, see: http://docs.pythonboto.org/en/latest/boto_config_tut.html
When run against a specific host, this script returns the following variables:
- ec2_ami_launch_index
- ec2_architecture
- ec2_association
- ec2_attachTime
- ec2_attachment
- ec2_attachmentId
- ec2_client_token
- ec2_deleteOnTermination
- ec2_description
- ec2_deviceIndex
- ec2_dns_name
- ec2_eventsSet
- ec2_group_name
- ec2_hypervisor
- ec2_id
- ec2_image_id
- ec2_instanceState
- ec2_instance_type
- ec2_ipOwnerId
- ec2_ip_address
- ec2_item
- ec2_kernel
- ec2_key_name
- ec2_launch_time
- ec2_monitored
- ec2_monitoring
- ec2_networkInterfaceId
- ec2_ownerId
- ec2_persistent
- ec2_placement
- ec2_platform
- ec2_previous_state
- ec2_private_dns_name
- ec2_private_ip_address
- ec2_publicIp
- ec2_public_dns_name
- ec2_ramdisk
- ec2_reason
- ec2_region
- ec2_requester_id
- ec2_root_device_name
- ec2_root_device_type
- ec2_security_group_ids
- ec2_security_group_names
- ec2_shutdown_state
- ec2_sourceDestCheck
- ec2_spot_instance_request_id
- ec2_state
- ec2_state_code
- ec2_state_reason
- ec2_status
- ec2_subnet_id
- ec2_tenancy
- ec2_virtualization_type
- ec2_vpc_id
These variables are pulled out of a boto.ec2.instance object. There is a lack of
consistency with variable spellings (camelCase and underscores) since this
just loops through all variables the object exposes. It is preferred to use the
ones with underscores when multiple exist.
In addition, if an instance has AWS Tags associated with it, each tag is a new
variable named:
- ec2_tag_[Key] = [Value]
Security groups are comma-separated in 'ec2_security_group_ids' and
'ec2_security_group_names'.
'''
# (c) 2012, Peter Sankauskas
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
######################################################################
import sys
import os
import argparse
import re
from time import time
import boto
from boto import ec2
from boto import rds
from boto import elasticache
from boto import route53
import six
from six.moves import configparser
from collections import defaultdict
try:
import json
except ImportError:
import simplejson as json
class Ec2Inventory(object):
def _empty_inventory(self):
return {"_meta" : {"hostvars" : {}}}
def __init__(self):
''' Main execution path '''
# Inventory grouped by instance IDs, tags, security groups, regions,
# and availability zones
self.inventory = self._empty_inventory()
# Index of hostname (address) to instance ID
self.index = {}
# Read settings and parse CLI arguments
self.read_settings()
self.parse_cli_args()
# Cache
if self.args.refresh_cache:
self.do_api_calls_update_cache()
elif not self.is_cache_valid():
self.do_api_calls_update_cache()
# Data to print
if self.args.host:
data_to_print = self.get_host_info()
elif self.args.list:
# Display list of instances for inventory
if self.inventory == self._empty_inventory():
data_to_print = self.get_inventory_from_cache()
else:
data_to_print = self.json_format_dict(self.inventory, True)
print(data_to_print)
def is_cache_valid(self):
''' Determines if the cache files have expired, or if it is still valid '''
if os.path.isfile(self.cache_path_cache):
mod_time = os.path.getmtime(self.cache_path_cache)
current_time = time()
if (mod_time + self.cache_max_age) > current_time:
if os.path.isfile(self.cache_path_index):
return True
return False
def read_settings(self):
''' Reads the settings from the ec2.ini file '''
if six.PY2:
config = configparser.SafeConfigParser()
else:
config = configparser.ConfigParser()
ec2_default_ini_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'ec2.ini')
ec2_ini_path = os.environ.get('EC2_INI_PATH', ec2_default_ini_path)
config.read(ec2_ini_path)
# is eucalyptus?
self.eucalyptus_host = None
self.eucalyptus = False
if config.has_option('ec2', 'eucalyptus'):
self.eucalyptus = config.getboolean('ec2', 'eucalyptus')
if self.eucalyptus and config.has_option('ec2', 'eucalyptus_host'):
self.eucalyptus_host = config.get('ec2', 'eucalyptus_host')
# Regions
self.regions = []
configRegions = config.get('ec2', 'regions')
configRegions_exclude = config.get('ec2', 'regions_exclude')
if (configRegions == 'all'):
if self.eucalyptus_host:
self.regions.append(boto.connect_euca(host=self.eucalyptus_host).region.name)
else:
for regionInfo in ec2.regions():
if regionInfo.name not in configRegions_exclude:
self.regions.append(regionInfo.name)
else:
self.regions = configRegions.split(",")
# Destination addresses
self.destination_variable = config.get('ec2', 'destination_variable')
self.vpc_destination_variable = config.get('ec2', 'vpc_destination_variable')
# Route53
self.route53_enabled = config.getboolean('ec2', 'route53')
self.route53_excluded_zones = []
if config.has_option('ec2', 'route53_excluded_zones'):
self.route53_excluded_zones.extend(
config.get('ec2', 'route53_excluded_zones', '').split(','))
# Include RDS instances?
self.rds_enabled = True
if config.has_option('ec2', 'rds'):
self.rds_enabled = config.getboolean('ec2', 'rds')
# Include ElastiCache instances?
self.elasticache_enabled = True
if config.has_option('ec2', 'elasticache'):
self.elasticache_enabled = config.getboolean('ec2', 'elasticache')
# Return all EC2 instances?
if config.has_option('ec2', 'all_instances'):
self.all_instances = config.getboolean('ec2', 'all_instances')
else:
self.all_instances = False
# Return all RDS instances? (if RDS is enabled)
if config.has_option('ec2', 'all_rds_instances') and self.rds_enabled:
self.all_rds_instances = config.getboolean('ec2', 'all_rds_instances')
else:
self.all_rds_instances = False
# Return all ElastiCache replication groups? (if ElastiCache is enabled)
if config.has_option('ec2', 'all_elasticache_replication_groups') and self.elasticache_enabled:
self.all_elasticache_replication_groups = config.getboolean('ec2', 'all_elasticache_replication_groups')
else:
self.all_elasticache_replication_groups = False
# Return all ElastiCache clusters? (if ElastiCache is enabled)
if config.has_option('ec2', 'all_elasticache_clusters') and self.elasticache_enabled:
self.all_elasticache_clusters = config.getboolean('ec2', 'all_elasticache_clusters')
else:
self.all_elasticache_clusters = False
# Return all ElastiCache nodes? (if ElastiCache is enabled)
if config.has_option('ec2', 'all_elasticache_nodes') and self.elasticache_enabled:
self.all_elasticache_nodes = config.getboolean('ec2', 'all_elasticache_nodes')
else:
self.all_elasticache_nodes = False
# Cache related
cache_dir = os.path.expanduser(config.get('ec2', 'cache_path'))
if not os.path.exists(cache_dir):
os.makedirs(cache_dir)
self.cache_path_cache = cache_dir + "/ansible-ec2.cache"
self.cache_path_index = cache_dir + "/ansible-ec2.index"
self.cache_max_age = config.getint('ec2', 'cache_max_age')
# Configure nested groups instead of flat namespace.
if config.has_option('ec2', 'nested_groups'):
self.nested_groups = config.getboolean('ec2', 'nested_groups')
else:
self.nested_groups = False
# Configure which groups should be created.
group_by_options = [
'group_by_instance_id',
'group_by_region',
'group_by_availability_zone',
'group_by_ami_id',
'group_by_instance_type',
'group_by_key_pair',
'group_by_vpc_id',
'group_by_security_group',
'group_by_tag_keys',
'group_by_tag_none',
'group_by_route53_names',
'group_by_rds_engine',
'group_by_rds_parameter_group',
'group_by_elasticache_engine',
'group_by_elasticache_cluster',
'group_by_elasticache_parameter_group',
'group_by_elasticache_replication_group',
]
for option in group_by_options:
if config.has_option('ec2', option):
setattr(self, option, config.getboolean('ec2', option))
else:
setattr(self, option, True)
# Do we need to just include hosts that match a pattern?
try:
pattern_include = config.get('ec2', 'pattern_include')
if pattern_include and len(pattern_include) > 0:
self.pattern_include = re.compile(pattern_include)
else:
self.pattern_include = None
except configparser.NoOptionError as e:
self.pattern_include = None
# Do we need to exclude hosts that match a pattern?
try:
pattern_exclude = config.get('ec2', 'pattern_exclude');
if pattern_exclude and len(pattern_exclude) > 0:
self.pattern_exclude = re.compile(pattern_exclude)
else:
self.pattern_exclude = None
except configparser.NoOptionError as e:
self.pattern_exclude = None
# Instance filters (see boto and EC2 API docs). Ignore invalid filters.
self.ec2_instance_filters = defaultdict(list)
if config.has_option('ec2', 'instance_filters'):
for instance_filter in config.get('ec2', 'instance_filters', '').split(','):
instance_filter = instance_filter.strip()
if not instance_filter or '=' not in instance_filter:
continue
filter_key, filter_value = [x.strip() for x in instance_filter.split('=', 1)]
if not filter_key:
continue
self.ec2_instance_filters[filter_key].append(filter_value)
def parse_cli_args(self):
''' Command line argument processing '''
parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on EC2')
parser.add_argument('--list', action='store_true', default=True,
help='List instances (default: True)')
parser.add_argument('--host', action='store',
help='Get all the variables about a specific instance')
parser.add_argument('--refresh-cache', action='store_true', default=False,
help='Force refresh of cache by making API requests to EC2 (default: False - use cache files)')
self.args = parser.parse_args()
def do_api_calls_update_cache(self):
''' Do API calls to each region, and save data in cache files '''
if self.route53_enabled:
self.get_route53_records()
for region in self.regions:
self.get_instances_by_region(region)
if self.rds_enabled:
self.get_rds_instances_by_region(region)
if self.elasticache_enabled:
self.get_elasticache_clusters_by_region(region)
self.get_elasticache_replication_groups_by_region(region)
self.write_to_cache(self.inventory, self.cache_path_cache)
self.write_to_cache(self.index, self.cache_path_index)
def connect(self, region):
''' create connection to api server'''
if self.eucalyptus:
conn = boto.connect_euca(host=self.eucalyptus_host)
conn.APIVersion = '2010-08-31'
else:
conn = ec2.connect_to_region(region)
# connect_to_region will fail "silently" by returning None if the region name is wrong or not supported
if conn is None:
self.fail_with_error("region name: %s likely not supported, or AWS is down. connection to region failed." % region)
return conn
def get_instances_by_region(self, region):
''' Makes an AWS EC2 API call to the list of instances in a particular
region '''
try:
conn = self.connect(region)
reservations = []
if self.ec2_instance_filters:
for filter_key, filter_values in self.ec2_instance_filters.items():
reservations.extend(conn.get_all_instances(filters = { filter_key : filter_values }))
else:
reservations = conn.get_all_instances()
for reservation in reservations:
for instance in reservation.instances:
self.add_instance(instance, region)
except boto.exception.BotoServerError as e:
if e.error_code == 'AuthFailure':
error = self.get_auth_error_message()
else:
backend = 'Eucalyptus' if self.eucalyptus else 'AWS'
error = "Error connecting to %s backend.\n%s" % (backend, e.message)
self.fail_with_error(error)
def get_rds_instances_by_region(self, region):
''' Makes an AWS API call to the list of RDS instances in a particular
region '''
try:
conn = rds.connect_to_region(region)
if conn:
instances = conn.get_all_dbinstances()
for instance in instances:
self.add_rds_instance(instance, region)
except boto.exception.BotoServerError as e:
error = e.reason
if e.error_code == 'AuthFailure':
error = self.get_auth_error_message()
if not e.reason == "Forbidden":
error = "Looks like AWS RDS is down:\n%s" % e.message
self.fail_with_error(error)
def get_elasticache_clusters_by_region(self, region):
''' Makes an AWS API call to the list of ElastiCache clusters (with
nodes' info) in a particular region.'''
# ElastiCache boto module doesn't provide a get_all_intances method,
# that's why we need to call describe directly (it would be called by
# the shorthand method anyway...)
try:
conn = elasticache.connect_to_region(region)
if conn:
# show_cache_node_info = True
# because we also want nodes' information
response = conn.describe_cache_clusters(None, None, None, True)
except boto.exception.BotoServerError as e:
error = e.reason
if e.error_code == 'AuthFailure':
error = self.get_auth_error_message()
if not e.reason == "Forbidden":
error = "Looks like AWS ElastiCache is down:\n%s" % e.message
self.fail_with_error(error)
try:
# Boto also doesn't provide wrapper classes to CacheClusters or
# CacheNodes. Because of that wo can't make use of the get_list
# method in the AWSQueryConnection. Let's do the work manually
clusters = response['DescribeCacheClustersResponse']['DescribeCacheClustersResult']['CacheClusters']
except KeyError as e:
error = "ElastiCache query to AWS failed (unexpected format)."
self.fail_with_error(error)
for cluster in clusters:
self.add_elasticache_cluster(cluster, region)
def get_elasticache_replication_groups_by_region(self, region):
''' Makes an AWS API call to the list of ElastiCache replication groups
in a particular region.'''
# ElastiCache boto module doesn't provide a get_all_intances method,
# that's why we need to call describe directly (it would be called by
# the shorthand method anyway...)
try:
conn = elasticache.connect_to_region(region)
if conn:
response = conn.describe_replication_groups()
except boto.exception.BotoServerError as e:
error = e.reason
if e.error_code == 'AuthFailure':
error = self.get_auth_error_message()
if not e.reason == "Forbidden":
error = "Looks like AWS ElastiCache [Replication Groups] is down:\n%s" % e.message
self.fail_with_error(error)
try:
# Boto also doesn't provide wrapper classes to ReplicationGroups
# Because of that wo can't make use of the get_list method in the
# AWSQueryConnection. Let's do the work manually
replication_groups = response['DescribeReplicationGroupsResponse']['DescribeReplicationGroupsResult']['ReplicationGroups']
except KeyError as e:
error = "ElastiCache [Replication Groups] query to AWS failed (unexpected format)."
self.fail_with_error(error)
for replication_group in replication_groups:
self.add_elasticache_replication_group(replication_group, region)
def get_auth_error_message(self):
''' create an informative error message if there is an issue authenticating'''
errors = ["Authentication error retrieving ec2 inventory."]
if None in [os.environ.get('AWS_ACCESS_KEY_ID'), os.environ.get('AWS_SECRET_ACCESS_KEY')]:
errors.append(' - No AWS_ACCESS_KEY_ID or AWS_SECRET_ACCESS_KEY environment vars found')
else:
errors.append(' - AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY environment vars found but may not be correct')
boto_paths = ['/etc/boto.cfg', '~/.boto', '~/.aws/credentials']
boto_config_found = list(p for p in boto_paths if os.path.isfile(os.path.expanduser(p)))
if len(boto_config_found) > 0:
errors.append(" - Boto configs found at '%s', but the credentials contained may not be correct" % ', '.join(boto_config_found))
else:
errors.append(" - No Boto config found at any expected location '%s'" % ', '.join(boto_paths))
return '\n'.join(errors)
def fail_with_error(self, err_msg):
'''log an error to std err for ansible-playbook to consume and exit'''
sys.stderr.write(err_msg)
sys.exit(1)
def get_instance(self, region, instance_id):
conn = self.connect(region)
reservations = conn.get_all_instances([instance_id])
for reservation in reservations:
for instance in reservation.instances:
return instance
def add_instance(self, instance, region):
''' Adds an instance to the inventory and index, as long as it is
addressable '''
# Only want running instances unless all_instances is True
if not self.all_instances and instance.state != 'running':
return
# Select the best destination address
if instance.subnet_id:
dest = getattr(instance, self.vpc_destination_variable, None)
if dest is None:
dest = getattr(instance, 'tags').get(self.vpc_destination_variable, None)
else:
dest = getattr(instance, self.destination_variable, None)
if dest is None:
dest = getattr(instance, 'tags').get(self.destination_variable, None)
if not dest:
# Skip instances we cannot address (e.g. private VPC subnet)
return
# if we only want to include hosts that match a pattern, skip those that don't
if self.pattern_include and not self.pattern_include.match(dest):
return
# if we need to exclude hosts that match a pattern, skip those
if self.pattern_exclude and self.pattern_exclude.match(dest):
return
# Add to index
self.index[dest] = [region, instance.id]
# Inventory: Group by instance ID (always a group of 1)
if self.group_by_instance_id:
self.inventory[instance.id] = [dest]
if self.nested_groups:
self.push_group(self.inventory, 'instances', instance.id)
# Inventory: Group by region
if self.group_by_region:
self.push(self.inventory, region, dest)
if self.nested_groups:
self.push_group(self.inventory, 'regions', region)
# Inventory: Group by availability zone
if self.group_by_availability_zone:
self.push(self.inventory, instance.placement, dest)
if self.nested_groups:
if self.group_by_region:
self.push_group(self.inventory, region, instance.placement)
self.push_group(self.inventory, 'zones', instance.placement)
# Inventory: Group by Amazon Machine Image (AMI) ID
if self.group_by_ami_id:
ami_id = self.to_safe(instance.image_id)
self.push(self.inventory, ami_id, dest)
if self.nested_groups:
self.push_group(self.inventory, 'images', ami_id)
# Inventory: Group by instance type
if self.group_by_instance_type:
type_name = self.to_safe('type_' + instance.instance_type)
self.push(self.inventory, type_name, dest)
if self.nested_groups:
self.push_group(self.inventory, 'types', type_name)
# Inventory: Group by key pair
if self.group_by_key_pair and instance.key_name:
key_name = self.to_safe('key_' + instance.key_name)
self.push(self.inventory, key_name, dest)
if self.nested_groups:
self.push_group(self.inventory, 'keys', key_name)
# Inventory: Group by VPC
if self.group_by_vpc_id and instance.vpc_id:
vpc_id_name = self.to_safe('vpc_id_' + instance.vpc_id)
self.push(self.inventory, vpc_id_name, dest)
if self.nested_groups:
self.push_group(self.inventory, 'vpcs', vpc_id_name)
# Inventory: Group by security group
if self.group_by_security_group:
try:
for group in instance.groups:
key = self.to_safe("security_group_" + group.name)
self.push(self.inventory, key, dest)
if self.nested_groups:
self.push_group(self.inventory, 'security_groups', key)
except AttributeError:
self.fail_with_error('\n'.join(['Package boto seems a bit older.',
'Please upgrade boto >= 2.3.0.']))
# Inventory: Group by tag keys
if self.group_by_tag_keys:
for k, v in instance.tags.items():
if v:
key = self.to_safe("tag_" + k + "=" + v)
else:
key = self.to_safe("tag_" + k)
self.push(self.inventory, key, dest)
if self.nested_groups:
self.push_group(self.inventory, 'tags', self.to_safe("tag_" + k))
self.push_group(self.inventory, self.to_safe("tag_" + k), key)
# Inventory: Group by Route53 domain names if enabled
if self.route53_enabled and self.group_by_route53_names:
route53_names = self.get_instance_route53_names(instance)
for name in route53_names:
self.push(self.inventory, name, dest)
if self.nested_groups:
self.push_group(self.inventory, 'route53', name)
# Global Tag: instances without tags
if self.group_by_tag_none and len(instance.tags) == 0:
self.push(self.inventory, 'tag_none', dest)
if self.nested_groups:
self.push_group(self.inventory, 'tags', 'tag_none')
# Global Tag: tag all EC2 instances
self.push(self.inventory, 'ec2', dest)
self.inventory["_meta"]["hostvars"][dest] = self.get_host_info_dict_from_instance(instance)
def add_rds_instance(self, instance, region):
''' Adds an RDS instance to the inventory and index, as long as it is
addressable '''
# Only want available instances unless all_rds_instances is True
if not self.all_rds_instances and instance.status != 'available':
return
# Select the best destination address
dest = instance.endpoint[0]
if not dest:
# Skip instances we cannot address (e.g. private VPC subnet)
return
# Add to index
self.index[dest] = [region, instance.id]
# Inventory: Group by instance ID (always a group of 1)
if self.group_by_instance_id:
self.inventory[instance.id] = [dest]
if self.nested_groups:
self.push_group(self.inventory, 'instances', instance.id)
# Inventory: Group by region
if self.group_by_region:
self.push(self.inventory, region, dest)
if self.nested_groups:
self.push_group(self.inventory, 'regions', region)
# Inventory: Group by availability zone
if self.group_by_availability_zone:
self.push(self.inventory, instance.availability_zone, dest)
if self.nested_groups:
if self.group_by_region:
self.push_group(self.inventory, region, instance.availability_zone)
self.push_group(self.inventory, 'zones', instance.availability_zone)
# Inventory: Group by instance type
if self.group_by_instance_type:
type_name = self.to_safe('type_' + instance.instance_class)
self.push(self.inventory, type_name, dest)
if self.nested_groups:
self.push_group(self.inventory, 'types', type_name)
# Inventory: Group by VPC
if self.group_by_vpc_id and instance.subnet_group and instance.subnet_group.vpc_id:
vpc_id_name = self.to_safe('vpc_id_' + instance.subnet_group.vpc_id)
self.push(self.inventory, vpc_id_name, dest)
if self.nested_groups:
self.push_group(self.inventory, 'vpcs', vpc_id_name)
# Inventory: Group by security group
if self.group_by_security_group:
try:
if instance.security_group:
key = self.to_safe("security_group_" + instance.security_group.name)
self.push(self.inventory, key, dest)
if self.nested_groups:
self.push_group(self.inventory, 'security_groups', key)
except AttributeError:
self.fail_with_error('\n'.join(['Package boto seems a bit older.',
'Please upgrade boto >= 2.3.0.']))
# Inventory: Group by engine
if self.group_by_rds_engine:
self.push(self.inventory, self.to_safe("rds_" + instance.engine), dest)
if self.nested_groups:
self.push_group(self.inventory, 'rds_engines', self.to_safe("rds_" + instance.engine))
# Inventory: Group by parameter group
if self.group_by_rds_parameter_group:
self.push(self.inventory, self.to_safe("rds_parameter_group_" + instance.parameter_group.name), dest)
if self.nested_groups:
self.push_group(self.inventory, 'rds_parameter_groups', self.to_safe("rds_parameter_group_" + instance.parameter_group.name))
# Global Tag: all RDS instances
self.push(self.inventory, 'rds', dest)
self.inventory["_meta"]["hostvars"][dest] = self.get_host_info_dict_from_instance(instance)
def add_elasticache_cluster(self, cluster, region):
''' Adds an ElastiCache cluster to the inventory and index, as long as
it's nodes are addressable '''
# Only want available clusters unless all_elasticache_clusters is True
if not self.all_elasticache_clusters and cluster['CacheClusterStatus'] != 'available':
return
# Select the best destination address
if 'ConfigurationEndpoint' in cluster and cluster['ConfigurationEndpoint']:
# Memcached cluster
dest = cluster['ConfigurationEndpoint']['Address']
is_redis = False
else:
# Redis sigle node cluster
# Because all Redis clusters are single nodes, we'll merge the
# info from the cluster with info about the node
dest = cluster['CacheNodes'][0]['Endpoint']['Address']
is_redis = True
if not dest:
# Skip clusters we cannot address (e.g. private VPC subnet)
return
# Add to index
self.index[dest] = [region, cluster['CacheClusterId']]
# Inventory: Group by instance ID (always a group of 1)
if self.group_by_instance_id:
self.inventory[cluster['CacheClusterId']] = [dest]
if self.nested_groups:
self.push_group(self.inventory, 'instances', cluster['CacheClusterId'])
# Inventory: Group by region
if self.group_by_region and not is_redis:
self.push(self.inventory, region, dest)
if self.nested_groups:
self.push_group(self.inventory, 'regions', region)
# Inventory: Group by availability zone
if self.group_by_availability_zone and not is_redis:
self.push(self.inventory, cluster['PreferredAvailabilityZone'], dest)
if self.nested_groups:
if self.group_by_region:
self.push_group(self.inventory, region, cluster['PreferredAvailabilityZone'])
self.push_group(self.inventory, 'zones', cluster['PreferredAvailabilityZone'])
# Inventory: Group by node type
if self.group_by_instance_type and not is_redis:
type_name = self.to_safe('type_' + cluster['CacheNodeType'])
self.push(self.inventory, type_name, dest)
if self.nested_groups:
self.push_group(self.inventory, 'types', type_name)
# Inventory: Group by VPC (information not available in the current
# AWS API version for ElastiCache)
# Inventory: Group by security group
if self.group_by_security_group and not is_redis:
# Check for the existence of the 'SecurityGroups' key and also if
# this key has some value. When the cluster is not placed in a SG
# the query can return None here and cause an error.
if 'SecurityGroups' in cluster and cluster['SecurityGroups'] is not None:
for security_group in cluster['SecurityGroups']:
key = self.to_safe("security_group_" + security_group['SecurityGroupId'])
self.push(self.inventory, key, dest)
if self.nested_groups:
self.push_group(self.inventory, 'security_groups', key)
# Inventory: Group by engine
if self.group_by_elasticache_engine and not is_redis:
self.push(self.inventory, self.to_safe("elasticache_" + cluster['Engine']), dest)
if self.nested_groups:
self.push_group(self.inventory, 'elasticache_engines', self.to_safe(cluster['Engine']))
# Inventory: Group by parameter group
if self.group_by_elasticache_parameter_group:
self.push(self.inventory, self.to_safe("elasticache_parameter_group_" + cluster['CacheParameterGroup']['CacheParameterGroupName']), dest)
if self.nested_groups:
self.push_group(self.inventory, 'elasticache_parameter_groups', self.to_safe(cluster['CacheParameterGroup']['CacheParameterGroupName']))
# Inventory: Group by replication group
if self.group_by_elasticache_replication_group and 'ReplicationGroupId' in cluster and cluster['ReplicationGroupId']:
self.push(self.inventory, self.to_safe("elasticache_replication_group_" + cluster['ReplicationGroupId']), dest)
if self.nested_groups:
self.push_group(self.inventory, 'elasticache_replication_groups', self.to_safe(cluster['ReplicationGroupId']))
# Global Tag: all ElastiCache clusters
self.push(self.inventory, 'elasticache_clusters', cluster['CacheClusterId'])
host_info = self.get_host_info_dict_from_describe_dict(cluster)
self.inventory["_meta"]["hostvars"][dest] = host_info
# Add the nodes
for node in cluster['CacheNodes']:
self.add_elasticache_node(node, cluster, region)
def add_elasticache_node(self, node, cluster, region):
''' Adds an ElastiCache node to the inventory and index, as long as
it is addressable '''
# Only want available nodes unless all_elasticache_nodes is True
if not self.all_elasticache_nodes and node['CacheNodeStatus'] != 'available':
return
# Select the best destination address
dest = node['Endpoint']['Address']
if not dest:
# Skip nodes we cannot address (e.g. private VPC subnet)
return
node_id = self.to_safe(cluster['CacheClusterId'] + '_' + node['CacheNodeId'])
# Add to index
self.index[dest] = [region, node_id]
# Inventory: Group by node ID (always a group of 1)
if self.group_by_instance_id:
self.inventory[node_id] = [dest]
if self.nested_groups:
self.push_group(self.inventory, 'instances', node_id)
# Inventory: Group by region
if self.group_by_region:
self.push(self.inventory, region, dest)
if self.nested_groups:
self.push_group(self.inventory, 'regions', region)
# Inventory: Group by availability zone
if self.group_by_availability_zone:
self.push(self.inventory, cluster['PreferredAvailabilityZone'], dest)
if self.nested_groups:
if self.group_by_region:
self.push_group(self.inventory, region, cluster['PreferredAvailabilityZone'])
self.push_group(self.inventory, 'zones', cluster['PreferredAvailabilityZone'])
# Inventory: Group by node type
if self.group_by_instance_type:
type_name = self.to_safe('type_' + cluster['CacheNodeType'])
self.push(self.inventory, type_name, dest)
if self.nested_groups:
self.push_group(self.inventory, 'types', type_name)
# Inventory: Group by VPC (information not available in the current
# AWS API version for ElastiCache)
# Inventory: Group by security group
if self.group_by_security_group:
# Check for the existence of the 'SecurityGroups' key and also if
# this key has some value. When the cluster is not placed in a SG
# the query can return None here and cause an error.
if 'SecurityGroups' in cluster and cluster['SecurityGroups'] is not None:
for security_group in cluster['SecurityGroups']:
key = self.to_safe("security_group_" + security_group['SecurityGroupId'])
self.push(self.inventory, key, dest)
if self.nested_groups:
self.push_group(self.inventory, 'security_groups', key)
# Inventory: Group by engine
if self.group_by_elasticache_engine:
self.push(self.inventory, self.to_safe("elasticache_" + cluster['Engine']), dest)
if self.nested_groups:
self.push_group(self.inventory, 'elasticache_engines', self.to_safe("elasticache_" + cluster['Engine']))
# Inventory: Group by parameter group (done at cluster level)
# Inventory: Group by replication group (done at cluster level)
# Inventory: Group by ElastiCache Cluster
if self.group_by_elasticache_cluster:
self.push(self.inventory, self.to_safe("elasticache_cluster_" + cluster['CacheClusterId']), dest)
# Global Tag: all ElastiCache nodes
self.push(self.inventory, 'elasticache_nodes', dest)
host_info = self.get_host_info_dict_from_describe_dict(node)
if dest in self.inventory["_meta"]["hostvars"]:
self.inventory["_meta"]["hostvars"][dest].update(host_info)
else:
self.inventory["_meta"]["hostvars"][dest] = host_info
def add_elasticache_replication_group(self, replication_group, region):
''' Adds an ElastiCache replication group to the inventory and index '''
# Only want available clusters unless all_elasticache_replication_groups is True
if not self.all_elasticache_replication_groups and replication_group['Status'] != 'available':
return
# Select the best destination address (PrimaryEndpoint)
dest = replication_group['NodeGroups'][0]['PrimaryEndpoint']['Address']
if not dest:
# Skip clusters we cannot address (e.g. private VPC subnet)
return
# Add to index
self.index[dest] = [region, replication_group['ReplicationGroupId']]
# Inventory: Group by ID (always a group of 1)
if self.group_by_instance_id:
self.inventory[replication_group['ReplicationGroupId']] = [dest]
if self.nested_groups:
self.push_group(self.inventory, 'instances', replication_group['ReplicationGroupId'])
# Inventory: Group by region
if self.group_by_region:
self.push(self.inventory, region, dest)
if self.nested_groups:
self.push_group(self.inventory, 'regions', region)
# Inventory: Group by availability zone (doesn't apply to replication groups)
# Inventory: Group by node type (doesn't apply to replication groups)
# Inventory: Group by VPC (information not available in the current
# AWS API version for replication groups
# Inventory: Group by security group (doesn't apply to replication groups)
# Check this value in cluster level
# Inventory: Group by engine (replication groups are always Redis)
if self.group_by_elasticache_engine:
self.push(self.inventory, 'elasticache_redis', dest)
if self.nested_groups:
self.push_group(self.inventory, 'elasticache_engines', 'redis')
# Global Tag: all ElastiCache clusters
self.push(self.inventory, 'elasticache_replication_groups', replication_group['ReplicationGroupId'])
host_info = self.get_host_info_dict_from_describe_dict(replication_group)
self.inventory["_meta"]["hostvars"][dest] = host_info
def get_route53_records(self):
''' Get and store the map of resource records to domain names that
point to them. '''
r53_conn = route53.Route53Connection()
all_zones = r53_conn.get_zones()
route53_zones = [ zone for zone in all_zones if zone.name[:-1]
not in self.route53_excluded_zones ]
self.route53_records = {}
for zone in route53_zones:
rrsets = r53_conn.get_all_rrsets(zone.id)
for record_set in rrsets:
record_name = record_set.name
if record_name.endswith('.'):
record_name = record_name[:-1]
for resource in record_set.resource_records:
self.route53_records.setdefault(resource, set())
self.route53_records[resource].add(record_name)
def get_instance_route53_names(self, instance):
''' Check if an instance is referenced in the records we have from
Route53. If it is, return the list of domain names pointing to said
instance. If nothing points to it, return an empty list. '''
instance_attributes = [ 'public_dns_name', 'private_dns_name',
'ip_address', 'private_ip_address' ]
name_list = set()
for attrib in instance_attributes:
try:
value = getattr(instance, attrib)
except AttributeError:
continue
if value in self.route53_records:
name_list.update(self.route53_records[value])
return list(name_list)
def get_host_info_dict_from_instance(self, instance):
instance_vars = {}
for key in vars(instance):
value = getattr(instance, key)
key = self.to_safe('ec2_' + key)
# Handle complex types
# state/previous_state changed to properties in boto in https://github.com/boto/boto/commit/a23c379837f698212252720d2af8dec0325c9518
if key == 'ec2__state':
instance_vars['ec2_state'] = instance.state or ''
instance_vars['ec2_state_code'] = instance.state_code
elif key == 'ec2__previous_state':
instance_vars['ec2_previous_state'] = instance.previous_state or ''
instance_vars['ec2_previous_state_code'] = instance.previous_state_code
elif type(value) in [int, bool]:
instance_vars[key] = value
elif isinstance(value, six.string_types):
instance_vars[key] = value.strip()
elif type(value) == type(None):
instance_vars[key] = ''
elif key == 'ec2_region':
instance_vars[key] = value.name
elif key == 'ec2__placement':
instance_vars['ec2_placement'] = value.zone
elif key == 'ec2_tags':
for k, v in value.items():
key = self.to_safe('ec2_tag_' + k)
instance_vars[key] = v
elif key == 'ec2_groups':
group_ids = []
group_names = []
for group in value:
group_ids.append(group.id)
group_names.append(group.name)
instance_vars["ec2_security_group_ids"] = ','.join([str(i) for i in group_ids])
instance_vars["ec2_security_group_names"] = ','.join([str(i) for i in group_names])
else:
pass
# TODO Product codes if someone finds them useful
#print key
#print type(value)
#print value
return instance_vars
def get_host_info_dict_from_describe_dict(self, describe_dict):
''' Parses the dictionary returned by the API call into a flat list
of parameters. This method should be used only when 'describe' is
used directly because Boto doesn't provide specific classes. '''
# I really don't agree with prefixing everything with 'ec2'
# because EC2, RDS and ElastiCache are different services.
# I'm just following the pattern used until now to not break any
# compatibility.
host_info = {}
for key in describe_dict:
value = describe_dict[key]
key = self.to_safe('ec2_' + self.uncammelize(key))
# Handle complex types
# Target: Memcached Cache Clusters
if key == 'ec2_configuration_endpoint' and value:
host_info['ec2_configuration_endpoint_address'] = value['Address']
host_info['ec2_configuration_endpoint_port'] = value['Port']
# Target: Cache Nodes and Redis Cache Clusters (single node)
if key == 'ec2_endpoint' and value:
host_info['ec2_endpoint_address'] = value['Address']
host_info['ec2_endpoint_port'] = value['Port']
# Target: Redis Replication Groups
if key == 'ec2_node_groups' and value:
host_info['ec2_endpoint_address'] = value[0]['PrimaryEndpoint']['Address']
host_info['ec2_endpoint_port'] = value[0]['PrimaryEndpoint']['Port']
replica_count = 0
for node in value[0]['NodeGroupMembers']:
if node['CurrentRole'] == 'primary':
host_info['ec2_primary_cluster_address'] = node['ReadEndpoint']['Address']
host_info['ec2_primary_cluster_port'] = node['ReadEndpoint']['Port']
host_info['ec2_primary_cluster_id'] = node['CacheClusterId']
elif node['CurrentRole'] == 'replica':
host_info['ec2_replica_cluster_address_'+ str(replica_count)] = node['ReadEndpoint']['Address']
host_info['ec2_replica_cluster_port_'+ str(replica_count)] = node['ReadEndpoint']['Port']
host_info['ec2_replica_cluster_id_'+ str(replica_count)] = node['CacheClusterId']
replica_count += 1
# Target: Redis Replication Groups
if key == 'ec2_member_clusters' and value:
host_info['ec2_member_clusters'] = ','.join([str(i) for i in value])
# Target: All Cache Clusters
elif key == 'ec2_cache_parameter_group':
host_info["ec2_cache_node_ids_to_reboot"] = ','.join([str(i) for i in value['CacheNodeIdsToReboot']])
host_info['ec2_cache_parameter_group_name'] = value['CacheParameterGroupName']
host_info['ec2_cache_parameter_apply_status'] = value['ParameterApplyStatus']
# Target: Almost everything
elif key == 'ec2_security_groups':
# Skip if SecurityGroups is None
# (it is possible to have the key defined but no value in it).
if value is not None:
sg_ids = []
for sg in value:
sg_ids.append(sg['SecurityGroupId'])
host_info["ec2_security_group_ids"] = ','.join([str(i) for i in sg_ids])
# Target: Everything
# Preserve booleans and integers
elif type(value) in [int, bool]:
host_info[key] = value
# Target: Everything
# Sanitize string values
elif isinstance(value, six.string_types):
host_info[key] = value.strip()
# Target: Everything
# Replace None by an empty string
elif type(value) == type(None):
host_info[key] = ''
else:
# Remove non-processed complex types
pass
return host_info
def get_host_info(self):
''' Get variables about a specific host '''
if len(self.index) == 0:
# Need to load index from cache
self.load_index_from_cache()
if not self.args.host in self.index:
# try updating the cache
self.do_api_calls_update_cache()
if not self.args.host in self.index:
# host might not exist anymore
return self.json_format_dict({}, True)
(region, instance_id) = self.index[self.args.host]
instance = self.get_instance(region, instance_id)
return self.json_format_dict(self.get_host_info_dict_from_instance(instance), True)
def push(self, my_dict, key, element):
''' Push an element onto an array that may not have been defined in
the dict '''
group_info = my_dict.setdefault(key, [])
if isinstance(group_info, dict):
host_list = group_info.setdefault('hosts', [])
host_list.append(element)
else:
group_info.append(element)
def push_group(self, my_dict, key, element):
''' Push a group as a child of another group. '''
parent_group = my_dict.setdefault(key, {})
if not isinstance(parent_group, dict):
parent_group = my_dict[key] = {'hosts': parent_group}
child_groups = parent_group.setdefault('children', [])
if element not in child_groups:
child_groups.append(element)
def get_inventory_from_cache(self):
''' Reads the inventory from the cache file and returns it as a JSON
object '''
cache = open(self.cache_path_cache, 'r')
json_inventory = cache.read()
return json_inventory
def load_index_from_cache(self):
''' Reads the index from the cache file sets self.index '''
cache = open(self.cache_path_index, 'r')
json_index = cache.read()
self.index = json.loads(json_index)
def write_to_cache(self, data, filename):
''' Writes data in JSON format to a file '''
json_data = self.json_format_dict(data, True)
cache = open(filename, 'w')
cache.write(json_data)
cache.close()
def uncammelize(self, key):
temp = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', key)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', temp).lower()
def to_safe(self, word):
''' Converts 'bad' characters in a string to underscores so they can be
used as Ansible groups '''
return re.sub("[^A-Za-z0-9\_]", "_", word)
def json_format_dict(self, data, pretty=False):
''' Converts a dict to a JSON object and dumps it as a formatted
string '''
if pretty:
return json.dumps(data, sort_keys=True, indent=2)
else:
return json.dumps(data)
# Run the script
Ec2Inventory()
| epl-1.0 |
nhejazi/scikit-learn | sklearn/feature_selection/tests/test_chi2.py | 49 | 3080 | """
Tests for chi2, currently the only feature selection function designed
specifically to work with sparse matrices.
"""
import warnings
import numpy as np
from scipy.sparse import coo_matrix, csr_matrix
import scipy.stats
from sklearn.feature_selection import SelectKBest, chi2
from sklearn.feature_selection.univariate_selection import _chisquare
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import clean_warning_registry
# Feature 0 is highly informative for class 1;
# feature 1 is the same everywhere;
# feature 2 is a bit informative for class 2.
X = [[2, 1, 2],
[9, 1, 1],
[6, 1, 2],
[0, 1, 2]]
y = [0, 1, 2, 2]
def mkchi2(k):
"""Make k-best chi2 selector"""
return SelectKBest(chi2, k=k)
def test_chi2():
# Test Chi2 feature extraction
chi2 = mkchi2(k=1).fit(X, y)
chi2 = mkchi2(k=1).fit(X, y)
assert_array_equal(chi2.get_support(indices=True), [0])
assert_array_equal(chi2.transform(X), np.array(X)[:, [0]])
chi2 = mkchi2(k=2).fit(X, y)
assert_array_equal(sorted(chi2.get_support(indices=True)), [0, 2])
Xsp = csr_matrix(X, dtype=np.float64)
chi2 = mkchi2(k=2).fit(Xsp, y)
assert_array_equal(sorted(chi2.get_support(indices=True)), [0, 2])
Xtrans = chi2.transform(Xsp)
assert_array_equal(Xtrans.shape, [Xsp.shape[0], 2])
# == doesn't work on scipy.sparse matrices
Xtrans = Xtrans.toarray()
Xtrans2 = mkchi2(k=2).fit_transform(Xsp, y).toarray()
assert_array_equal(Xtrans, Xtrans2)
def test_chi2_coo():
# Check that chi2 works with a COO matrix
# (as returned by CountVectorizer, DictVectorizer)
Xcoo = coo_matrix(X)
mkchi2(k=2).fit_transform(Xcoo, y)
# if we got here without an exception, we're safe
def test_chi2_negative():
# Check for proper error on negative numbers in the input X.
X, y = [[0, 1], [-1e-20, 1]], [0, 1]
for X in (X, np.array(X), csr_matrix(X)):
assert_raises(ValueError, chi2, X, y)
def test_chi2_unused_feature():
# Unused feature should evaluate to NaN
# and should issue no runtime warning
clean_warning_registry()
with warnings.catch_warnings(record=True) as warned:
warnings.simplefilter('always')
chi, p = chi2([[1, 0], [0, 0]], [1, 0])
for w in warned:
if 'divide by zero' in repr(w):
raise AssertionError('Found unexpected warning %s' % w)
assert_array_equal(chi, [1, np.nan])
assert_array_equal(p[1], np.nan)
def test_chisquare():
# Test replacement for scipy.stats.chisquare against the original.
obs = np.array([[2., 2.],
[1., 1.]])
exp = np.array([[1.5, 1.5],
[1.5, 1.5]])
# call SciPy first because our version overwrites obs
chi_scp, p_scp = scipy.stats.chisquare(obs, exp)
chi_our, p_our = _chisquare(obs, exp)
assert_array_almost_equal(chi_scp, chi_our)
assert_array_almost_equal(p_scp, p_our)
| bsd-3-clause |
kutenai/django | tests/model_validation/tests.py | 15 | 4004 | from django.core import management
from django.core.checks import Error
from django.core.checks.model_checks import _check_lazy_references
from django.db import models
from django.db.models.signals import post_init
from django.test import SimpleTestCase
from django.test.utils import isolate_apps, override_settings
from django.utils import six
@override_settings(
INSTALLED_APPS=['django.contrib.auth', 'django.contrib.contenttypes'],
SILENCED_SYSTEM_CHECKS=['fields.W342'], # ForeignKey(unique=True)
)
class ModelValidationTest(SimpleTestCase):
def test_models_validate(self):
# All our models should validate properly
# Validation Tests:
# * choices= Iterable of Iterables
# See: https://code.djangoproject.com/ticket/20430
# * related_name='+' doesn't clash with another '+'
# See: https://code.djangoproject.com/ticket/21375
management.call_command("check", stdout=six.StringIO())
@isolate_apps('django.contrib.auth', kwarg_name='apps')
def test_lazy_reference_checks(self, apps):
class DummyModel(models.Model):
author = models.ForeignKey('Author', models.CASCADE)
class Meta:
app_label = "model_validation"
class DummyClass(object):
def __call__(self, **kwargs):
pass
def dummy_method(self):
pass
def dummy_function(*args, **kwargs):
pass
apps.lazy_model_operation(dummy_function, ('auth', 'imaginarymodel'))
apps.lazy_model_operation(dummy_function, ('fanciful_app', 'imaginarymodel'))
post_init.connect(dummy_function, sender='missing-app.Model', apps=apps)
post_init.connect(DummyClass(), sender='missing-app.Model', apps=apps)
post_init.connect(DummyClass().dummy_method, sender='missing-app.Model', apps=apps)
errors = _check_lazy_references(apps)
expected = [
Error(
"%r contains a lazy reference to auth.imaginarymodel, "
"but app 'auth' doesn't provide model 'imaginarymodel'." % dummy_function,
obj=dummy_function,
id='models.E022',
),
Error(
"%r contains a lazy reference to fanciful_app.imaginarymodel, "
"but app 'fanciful_app' isn't installed." % dummy_function,
obj=dummy_function,
id='models.E022',
),
Error(
"An instance of class 'DummyClass' was connected to "
"the 'post_init' signal with a lazy reference to the sender "
"'missing-app.model', but app 'missing-app' isn't installed.",
hint=None,
obj='model_validation.tests',
id='signals.E001',
),
Error(
"Bound method 'DummyClass.dummy_method' was connected to the "
"'post_init' signal with a lazy reference to the sender "
"'missing-app.model', but app 'missing-app' isn't installed.",
hint=None,
obj='model_validation.tests',
id='signals.E001',
),
Error(
"The field model_validation.DummyModel.author was declared "
"with a lazy reference to 'model_validation.author', but app "
"'model_validation' isn't installed.",
hint=None,
obj=DummyModel.author.field,
id='fields.E307',
),
Error(
"The function 'dummy_function' was connected to the 'post_init' "
"signal with a lazy reference to the sender "
"'missing-app.model', but app 'missing-app' isn't installed.",
hint=None,
obj='model_validation.tests',
id='signals.E001',
),
]
self.assertEqual(errors, expected)
| bsd-3-clause |
nhicher/ansible | lib/ansible/modules/files/blockinfile.py | 16 | 11779 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2014, 2015 YAEGASHI Takeshi <yaegashi@debian.org>
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'core'}
DOCUMENTATION = r'''
---
module: blockinfile
short_description: Insert/update/remove a text block surrounded by marker lines
version_added: '2.0'
description:
- This module will insert/update/remove a block of multi-line text
surrounded by customizable marker lines.
author:
- Yaegashi Takeshi (@yaegashi)
options:
path:
description:
- The file to modify.
- Before Ansible 2.3 this option was only usable as I(dest), I(destfile) and I(name).
required: yes
type: path
aliases: [ dest, destfile, name ]
state:
description:
- Whether the block should be there or not.
type: str
choices: [ absent, present ]
default: present
marker:
description:
- The marker line template.
- C({mark}) will be replaced with the values C(in marker_begin) (default="BEGIN") and C(marker_end) (default="END").
- Using a custom marker without the C({mark}) variable may result in the block being repeatedly inserted on subsequent playbook runs.
type: str
default: '# {mark} ANSIBLE MANAGED BLOCK'
block:
description:
- The text to insert inside the marker lines.
- If it is missing or an empty string, the block will be removed as if C(state) were specified to C(absent).
type: str
aliases: [ content ]
default: ''
insertafter:
description:
- If specified, the block will be inserted after the last match of specified regular expression.
- A special value is available; C(EOF) for inserting the block at the end of the file.
- If specified regular expression has no matches, C(EOF) will be used instead.
type: str
default: EOF
choices: [ EOF, '*regex*' ]
insertbefore:
description:
- If specified, the block will be inserted before the last match of specified regular expression.
- A special value is available; C(BOF) for inserting the block at the beginning of the file.
- If specified regular expression has no matches, the block will be inserted at the end of the file.
type: str
choices: [ BOF, '*regex*' ]
create:
description:
- Create a new file if it does not exist.
type: bool
default: no
backup:
description:
- Create a backup file including the timestamp information so you can
get the original file back if you somehow clobbered it incorrectly.
type: bool
default: no
marker_begin:
description:
- This will be inserted at C({mark}) in the opening ansible block marker.
type: str
default: BEGIN
version_added: '2.5'
marker_end:
required: false
description:
- This will be inserted at C({mark}) in the closing ansible block marker.
type: str
default: END
version_added: '2.5'
notes:
- This module supports check mode.
- When using 'with_*' loops be aware that if you do not set a unique mark the block will be overwritten on each iteration.
- As of Ansible 2.3, the I(dest) option has been changed to I(path) as default, but I(dest) still works as well.
- Option I(follow) has been removed in Ansible 2.5, because this module modifies the contents of the file so I(follow=no) doesn't make sense.
- When more then one block should be handled in one file you must change the I(marker) per task.
extends_documentation_fragment:
- files
- validate
'''
EXAMPLES = r'''
# Before Ansible 2.3, option 'dest' or 'name' was used instead of 'path'
- name: Insert/Update "Match User" configuration block in /etc/ssh/sshd_config
blockinfile:
path: /etc/ssh/sshd_config
block: |
Match User ansible-agent
PasswordAuthentication no
- name: Insert/Update eth0 configuration stanza in /etc/network/interfaces
(it might be better to copy files into /etc/network/interfaces.d/)
blockinfile:
path: /etc/network/interfaces
block: |
iface eth0 inet static
address 192.0.2.23
netmask 255.255.255.0
- name: Insert/Update configuration using a local file and validate it
blockinfile:
block: "{{ lookup('file', './local/ssh_config') }}"
dest: /etc/ssh/ssh_config
backup: yes
validate: /usr/sbin/sshd -T -f %s
- name: Insert/Update HTML surrounded by custom markers after <body> line
blockinfile:
path: /var/www/html/index.html
marker: "<!-- {mark} ANSIBLE MANAGED BLOCK -->"
insertafter: "<body>"
content: |
<h1>Welcome to {{ ansible_hostname }}</h1>
<p>Last updated on {{ ansible_date_time.iso8601 }}</p>
- name: Remove HTML as well as surrounding markers
blockinfile:
path: /var/www/html/index.html
marker: "<!-- {mark} ANSIBLE MANAGED BLOCK -->"
content: ""
- name: Add mappings to /etc/hosts
blockinfile:
path: /etc/hosts
block: |
{{ item.ip }} {{ item.name }}
marker: "# {mark} ANSIBLE MANAGED BLOCK {{ item.name }}"
with_items:
- { name: host1, ip: 10.10.1.10 }
- { name: host2, ip: 10.10.1.11 }
- { name: host3, ip: 10.10.1.12 }
'''
import re
import os
import tempfile
from ansible.module_utils.six import b
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_bytes
def write_changes(module, contents, path):
tmpfd, tmpfile = tempfile.mkstemp(dir=module.tmpdir)
f = os.fdopen(tmpfd, 'wb')
f.write(contents)
f.close()
validate = module.params.get('validate', None)
valid = not validate
if validate:
if "%s" not in validate:
module.fail_json(msg="validate must contain %%s: %s" % (validate))
(rc, out, err) = module.run_command(validate % tmpfile)
valid = rc == 0
if rc != 0:
module.fail_json(msg='failed to validate: '
'rc:%s error:%s' % (rc, err))
if valid:
module.atomic_move(tmpfile, path, unsafe_writes=module.params['unsafe_writes'])
def check_file_attrs(module, changed, message, diff):
file_args = module.load_file_common_arguments(module.params)
if module.set_file_attributes_if_different(file_args, False, diff=diff):
if changed:
message += " and "
changed = True
message += "ownership, perms or SE linux context changed"
return message, changed
def main():
module = AnsibleModule(
argument_spec=dict(
path=dict(type='path', required=True, aliases=['dest', 'destfile', 'name']),
state=dict(type='str', default='present', choices=['absent', 'present']),
marker=dict(type='str', default='# {mark} ANSIBLE MANAGED BLOCK'),
block=dict(type='str', default='', aliases=['content']),
insertafter=dict(type='str'),
insertbefore=dict(type='str'),
create=dict(type='bool', default=False),
backup=dict(type='bool', default=False),
validate=dict(type='str'),
marker_begin=dict(type='str', default='BEGIN'),
marker_end=dict(type='str', default='END'),
),
mutually_exclusive=[['insertbefore', 'insertafter']],
add_file_common_args=True,
supports_check_mode=True
)
params = module.params
path = params['path']
if os.path.isdir(path):
module.fail_json(rc=256,
msg='Path %s is a directory !' % path)
path_exists = os.path.exists(path)
if not path_exists:
if not module.boolean(params['create']):
module.fail_json(rc=257,
msg='Path %s does not exist !' % path)
destpath = os.path.dirname(path)
if not os.path.exists(destpath) and not module.check_mode:
try:
os.makedirs(destpath)
except Exception as e:
module.fail_json(msg='Error creating %s Error code: %s Error description: %s' % (destpath, e[0], e[1]))
original = None
lines = []
else:
f = open(path, 'rb')
original = f.read()
f.close()
lines = original.splitlines()
diff = {'before': '',
'after': '',
'before_header': '%s (content)' % path,
'after_header': '%s (content)' % path}
if module._diff and original:
diff['before'] = original
insertbefore = params['insertbefore']
insertafter = params['insertafter']
block = to_bytes(params['block'])
marker = to_bytes(params['marker'])
present = params['state'] == 'present'
if not present and not path_exists:
module.exit_json(changed=False, msg="File %s not present" % path)
if insertbefore is None and insertafter is None:
insertafter = 'EOF'
if insertafter not in (None, 'EOF'):
insertre = re.compile(to_bytes(insertafter, errors='surrogate_or_strict'))
elif insertbefore not in (None, 'BOF'):
insertre = re.compile(to_bytes(insertbefore, errors='surrogate_or_strict'))
else:
insertre = None
marker0 = re.sub(b(r'{mark}'), b(params['marker_begin']), marker)
marker1 = re.sub(b(r'{mark}'), b(params['marker_end']), marker)
if present and block:
# Escape seqeuences like '\n' need to be handled in Ansible 1.x
if module.ansible_version.startswith('1.'):
block = re.sub('', block, '')
blocklines = [marker0] + block.splitlines() + [marker1]
else:
blocklines = []
n0 = n1 = None
for i, line in enumerate(lines):
if line == marker0:
n0 = i
if line == marker1:
n1 = i
if None in (n0, n1):
n0 = None
if insertre is not None:
for i, line in enumerate(lines):
if insertre.search(line):
n0 = i
if n0 is None:
n0 = len(lines)
elif insertafter is not None:
n0 += 1
elif insertbefore is not None:
n0 = 0 # insertbefore=BOF
else:
n0 = len(lines) # insertafter=EOF
elif n0 < n1:
lines[n0:n1 + 1] = []
else:
lines[n1:n0 + 1] = []
n0 = n1
lines[n0:n0] = blocklines
if lines:
result = b('\n').join(lines)
if original is None or original.endswith(b('\n')):
result += b('\n')
else:
result = b''
if module._diff:
diff['after'] = result
if original == result:
msg = ''
changed = False
elif original is None:
msg = 'File created'
changed = True
elif not blocklines:
msg = 'Block removed'
changed = True
else:
msg = 'Block inserted'
changed = True
if changed and not module.check_mode:
if module.boolean(params['backup']) and path_exists:
module.backup_local(path)
# We should always follow symlinks so that we change the real file
real_path = os.path.realpath(params['path'])
write_changes(module, result, real_path)
if module.check_mode and not path_exists:
module.exit_json(changed=changed, msg=msg, diff=diff)
attr_diff = {}
msg, changed = check_file_attrs(module, changed, msg, attr_diff)
attr_diff['before_header'] = '%s (file attributes)' % path
attr_diff['after_header'] = '%s (file attributes)' % path
difflist = [diff, attr_diff]
module.exit_json(changed=changed, msg=msg, diff=difflist)
if __name__ == '__main__':
main()
| gpl-3.0 |
orgito/ansible | lib/ansible/modules/remote_management/ucs/ucs_ip_pool.py | 46 | 11474 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: ucs_ip_pool
short_description: Configures IP address pools on Cisco UCS Manager
description:
- Configures IP address pools and blocks of IP addresses on Cisco UCS Manager.
- Examples can be used with the UCS Platform Emulator U(https://communities.cisco.com/ucspe).
extends_documentation_fragment: ucs
options:
state:
description:
- If C(present), will verify IP pool is present and will create if needed.
- If C(absent), will verify IP pool is absent and will delete if needed.
choices: [present, absent]
default: present
name:
description:
- The name of the IP address pool.
- This name can be between 1 and 32 alphanumeric characters.
- "You cannot use spaces or any special characters other than - (hyphen), \"_\" (underscore), : (colon), and . (period)."
- You cannot change this name after the IP address pool is created.
required: yes
descrption:
description:
- The user-defined description of the IP address pool.
- Enter up to 256 characters.
- "You can use any characters or spaces except the following:"
- "` (accent mark), \ (backslash), ^ (carat), \" (double quote), = (equal sign), > (greater than), < (less than), or ' (single quote)."
aliases: [ descr ]
order:
description:
- The Assignment Order field.
- "This can be one of the following:"
- "default - Cisco UCS Manager selects a random identity from the pool."
- "sequential - Cisco UCS Manager selects the lowest available identity from the pool."
choices: [default, sequential]
default: default
first_addr:
description:
- The first IPv4 address in the IPv4 addresses block.
- This is the From field in the UCS Manager Add IPv4 Blocks menu.
last_addr:
description:
- The last IPv4 address in the IPv4 addresses block.
- This is the To field in the UCS Manager Add IPv4 Blocks menu.
subnet_mask:
description:
- The subnet mask associated with the IPv4 addresses in the block.
default: 255.255.255.0
default_gw:
description:
- The default gateway associated with the IPv4 addresses in the block.
default: 0.0.0.0
primary_dns:
description:
- The primary DNS server that this block of IPv4 addresses should access.
default: 0.0.0.0
secondary_dns:
description:
- The secondary DNS server that this block of IPv4 addresses should access.
default: 0.0.0.0
ipv6_first_addr:
description:
- The first IPv6 address in the IPv6 addresses block.
- This is the From field in the UCS Manager Add IPv6 Blocks menu.
ipv6_last_addr:
description:
- The last IPv6 address in the IPv6 addresses block.
- This is the To field in the UCS Manager Add IPv6 Blocks menu.
ipv6_prefix:
description:
- The network address prefix associated with the IPv6 addresses in the block.
default: '64'
ipv6_default_gw:
description:
- The default gateway associated with the IPv6 addresses in the block.
default: '::'
ipv6_primary_dns:
description:
- The primary DNS server that this block of IPv6 addresses should access.
default: '::'
ipv6_secondary_dns:
description:
- The secondary DNS server that this block of IPv6 addresses should access.
default: '::'
org_dn:
description:
- Org dn (distinguished name)
default: org-root
requirements:
- ucsmsdk
author:
- David Soper (@dsoper2)
- CiscoUcs (@CiscoUcs)
version_added: '2.5'
'''
EXAMPLES = r'''
- name: Configure IPv4 address pools
ucs_ip_pool:
hostname: 172.16.143.150
username: admin
password: password
name: ip-A
order: sequential
first_addr: 192.168.0.10
last_addr: 192.168.0.19
subnet_mask: 255.255.255.0
default_gw: 192.168.0.1
primary_dns: 172.16.143.136
- name: Configure IPv6 address pools
ucs_ip_pool:
hostname: 172.16.143.150
username: admin
password: password
name: ipv6-B
ipv6_first_addr: fe80::1cae:7992:d7a1:ed07
ipv6_last_addr: fe80::1cae:7992:d7a1:edfe
ipv6_default_gw: fe80::1cae:7992:d7a1:ecff
- name: Remove IPv4 address pools
ucs_ip_pool:
hostname: 172.16.143.150
username: admin
password: password
name: ip-A
state: absent
- name: Remove IPv6 address pools
ucs_ip_pool:
hostname: 172.16.143.150
username: admin
password: password
name: ipv6-B
state: absent
'''
RETURN = r'''
#
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.remote_management.ucs import UCSModule, ucs_argument_spec
def main():
argument_spec = ucs_argument_spec
argument_spec.update(
org_dn=dict(type='str', default='org-root'),
name=dict(type='str', required=True),
descr=dict(type='str', default=''),
order=dict(type='str', default='default', choices=['default', 'sequential']),
first_addr=dict(type='str'),
last_addr=dict(type='str'),
subnet_mask=dict(type='str', default='255.255.255.0'),
default_gw=dict(type='str', default='0.0.0.0'),
primary_dns=dict(type='str', default='0.0.0.0'),
secondary_dns=dict(type='str', default='0.0.0.0'),
ipv6_first_addr=dict(type='str'),
ipv6_last_addr=dict(type='str'),
ipv6_prefix=dict(type='str', default='64'),
ipv6_default_gw=dict(type='str', default='::'),
ipv6_primary_dns=dict(type='str', default='::'),
ipv6_secondary_dns=dict(type='str', default='::'),
state=dict(type='str', default='present', choices=['present', 'absent']),
)
module = AnsibleModule(
argument_spec,
supports_check_mode=True,
)
# UCSModule verifies ucsmsdk is present and exits on failure. Imports are below ucs object creation.
ucs = UCSModule(module)
err = False
from ucsmsdk.mometa.ippool.IppoolPool import IppoolPool
from ucsmsdk.mometa.ippool.IppoolBlock import IppoolBlock
from ucsmsdk.mometa.ippool.IppoolIpV6Block import IppoolIpV6Block
changed = False
try:
mo_exists = False
props_match = False
# dn is <org_dn>/ip-pool-<name>
dn = module.params['org_dn'] + '/ip-pool-' + module.params['name']
mo = ucs.login_handle.query_dn(dn)
if mo:
mo_exists = True
if module.params['state'] == 'absent':
if mo_exists:
if not module.check_mode:
ucs.login_handle.remove_mo(mo)
ucs.login_handle.commit()
changed = True
else:
if mo_exists:
# check top-level mo props
kwargs = dict(assignment_order=module.params['order'])
kwargs['descr'] = module.params['descr']
if (mo.check_prop_match(**kwargs)):
# top-level props match, check next level mo/props
if module.params['last_addr'] and module.params['first_addr']:
# ipv4 block specified, check properties
block_dn = dn + '/block-' + module.params['first_addr'] + '-' + module.params['last_addr']
mo_1 = ucs.login_handle.query_dn(block_dn)
if mo_1:
kwargs = dict(subnet=module.params['subnet_mask'])
kwargs['def_gw'] = module.params['default_gw']
kwargs['prim_dns'] = module.params['primary_dns']
kwargs['sec_dns'] = module.params['secondary_dns']
if (mo_1.check_prop_match(**kwargs)):
# ipv4 block exists and properties match
props_match = True
else:
# no ipv4 block specified, but top-level props matched
props_match = True
# only check ipv6 props if the top-level and ipv4 props matched
if props_match and module.params['ipv6_last_addr'] and module.params['ipv6_first_addr']:
# ipv6 block specified, check properties
block_dn = dn + '/v6block-' + module.params['ipv6_first_addr'].lower() + '-' + module.params['ipv6_last_addr'].lower()
mo_1 = ucs.login_handle.query_dn(block_dn)
if mo_1:
kwargs = dict(prefix=module.params['ipv6_prefix'])
kwargs['def_gw'] = module.params['ipv6_default_gw']
kwargs['prim_dns'] = module.params['ipv6_primary_dns']
kwargs['sec_dns'] = module.params['ipv6_secondary_dns']
if (mo_1.check_prop_match(**kwargs)):
# ipv6 block exists and properties match
props_match = True
else:
# no ipv6 block specified, but previous checks matched
props_match = True
if not props_match:
if not module.check_mode:
# create if mo does not already exist
mo = IppoolPool(
parent_mo_or_dn=module.params['org_dn'],
name=module.params['name'],
descr=module.params['descr'],
assignment_order=module.params['order'],
)
if module.params['last_addr'] and module.params['first_addr']:
mo_1 = IppoolBlock(
parent_mo_or_dn=mo,
to=module.params['last_addr'],
r_from=module.params['first_addr'],
subnet=module.params['subnet_mask'],
def_gw=module.params['default_gw'],
prim_dns=module.params['primary_dns'],
sec_dns=module.params['secondary_dns'],
)
if module.params['ipv6_last_addr'] and module.params['ipv6_first_addr']:
mo_1 = IppoolIpV6Block(
parent_mo_or_dn=mo,
to=module.params['ipv6_last_addr'],
r_from=module.params['ipv6_first_addr'],
prefix=module.params['ipv6_prefix'],
def_gw=module.params['ipv6_default_gw'],
prim_dns=module.params['ipv6_primary_dns'],
sec_dns=module.params['ipv6_secondary_dns'],
)
ucs.login_handle.add_mo(mo, True)
ucs.login_handle.commit()
changed = True
except Exception as e:
err = True
ucs.result['msg'] = "setup error: %s " % str(e)
ucs.result['changed'] = changed
if err:
module.fail_json(**ucs.result)
module.exit_json(**ucs.result)
if __name__ == '__main__':
main()
| gpl-3.0 |
chouseknecht/ansible | test/units/mock/procenv.py | 113 | 2694 | # (c) 2016, Matt Davis <mdavis@ansible.com>
# (c) 2016, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
import json
from contextlib import contextmanager
from io import BytesIO, StringIO
from units.compat import unittest
from ansible.module_utils.six import PY3
from ansible.module_utils._text import to_bytes
@contextmanager
def swap_stdin_and_argv(stdin_data='', argv_data=tuple()):
"""
context manager that temporarily masks the test runner's values for stdin and argv
"""
real_stdin = sys.stdin
real_argv = sys.argv
if PY3:
fake_stream = StringIO(stdin_data)
fake_stream.buffer = BytesIO(to_bytes(stdin_data))
else:
fake_stream = BytesIO(to_bytes(stdin_data))
try:
sys.stdin = fake_stream
sys.argv = argv_data
yield
finally:
sys.stdin = real_stdin
sys.argv = real_argv
@contextmanager
def swap_stdout():
"""
context manager that temporarily replaces stdout for tests that need to verify output
"""
old_stdout = sys.stdout
if PY3:
fake_stream = StringIO()
else:
fake_stream = BytesIO()
try:
sys.stdout = fake_stream
yield fake_stream
finally:
sys.stdout = old_stdout
class ModuleTestCase(unittest.TestCase):
def setUp(self, module_args=None):
if module_args is None:
module_args = {'_ansible_remote_tmp': '/tmp', '_ansible_keep_remote_files': False}
args = json.dumps(dict(ANSIBLE_MODULE_ARGS=module_args))
# unittest doesn't have a clean place to use a context manager, so we have to enter/exit manually
self.stdin_swap = swap_stdin_and_argv(stdin_data=args)
self.stdin_swap.__enter__()
def tearDown(self):
# unittest doesn't have a clean place to use a context manager, so we have to enter/exit manually
self.stdin_swap.__exit__(None, None, None)
| gpl-3.0 |
FiloSottile/youtube-dl | youtube_dl/extractor/veehd.py | 13 | 3040 | from __future__ import unicode_literals
import re
import json
from .common import InfoExtractor
from ..compat import (
compat_urlparse,
)
from ..utils import (
ExtractorError,
clean_html,
get_element_by_id,
)
class VeeHDIE(InfoExtractor):
_VALID_URL = r'https?://veehd\.com/video/(?P<id>\d+)'
_TEST = {
'url': 'http://veehd.com/video/4639434_Solar-Sinter',
'info_dict': {
'id': '4639434',
'ext': 'mp4',
'title': 'Solar Sinter',
'uploader_id': 'VideoEyes',
'description': 'md5:46a840e8692ddbaffb5f81d9885cb457',
},
}
def _real_extract(self, url):
video_id = self._match_id(url)
# VeeHD seems to send garbage on the first request.
# See https://github.com/rg3/youtube-dl/issues/2102
self._download_webpage(url, video_id, 'Requesting webpage')
webpage = self._download_webpage(url, video_id)
if 'This video has been removed<' in webpage:
raise ExtractorError('Video %s has been removed' % video_id, expected=True)
player_path = self._search_regex(
r'\$\("#playeriframe"\).attr\({src : "(.+?)"',
webpage, 'player path')
player_url = compat_urlparse.urljoin(url, player_path)
self._download_webpage(player_url, video_id, 'Requesting player page')
player_page = self._download_webpage(
player_url, video_id, 'Downloading player page')
config_json = self._search_regex(
r'value=\'config=({.+?})\'', player_page, 'config json', default=None)
if config_json:
config = json.loads(config_json)
video_url = compat_urlparse.unquote(config['clip']['url'])
else:
iframe_src = self._search_regex(
r'<iframe[^>]+src="/?([^"]+)"', player_page, 'iframe url')
iframe_url = 'http://veehd.com/%s' % iframe_src
self._download_webpage(iframe_url, video_id, 'Requesting iframe page')
iframe_page = self._download_webpage(
iframe_url, video_id, 'Downloading iframe page')
video_url = self._search_regex(
r"file\s*:\s*'([^']+)'", iframe_page, 'video url')
title = clean_html(get_element_by_id('videoName', webpage).rpartition('|')[0])
uploader_id = self._html_search_regex(
r'<a href="/profile/\d+">(.+?)</a>',
webpage, 'uploader')
thumbnail = self._search_regex(
r'<img id="veehdpreview" src="(.+?)"',
webpage, 'thumbnail')
description = self._html_search_regex(
r'<td class="infodropdown".*?<div>(.*?)<ul',
webpage, 'description', flags=re.DOTALL)
return {
'_type': 'video',
'id': video_id,
'title': title,
'url': video_url,
'ext': 'mp4',
'uploader_id': uploader_id,
'thumbnail': thumbnail,
'description': description,
}
| unlicense |
2014c2g9/c2g9 | wsgi/static/reeborg/src/libraries/brython/Lib/collections/abc.py | 739 | 16026 | # Copyright 2007 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Abstract Base Classes (ABCs) for collections, according to PEP 3119.
Unit tests are in test_collections.
"""
from abc import ABCMeta, abstractmethod
import sys
__all__ = ["Hashable", "Iterable", "Iterator",
"Sized", "Container", "Callable",
"Set", "MutableSet",
"Mapping", "MutableMapping",
"MappingView", "KeysView", "ItemsView", "ValuesView",
"Sequence", "MutableSequence",
"ByteString",
]
# Private list of types that we want to register with the various ABCs
# so that they will pass tests like:
# it = iter(somebytearray)
# assert isinstance(it, Iterable)
# Note: in other implementations, these types many not be distinct
# and they make have their own implementation specific types that
# are not included on this list.
bytes_iterator = type(iter(b''))
bytearray_iterator = type(iter(bytearray()))
#callable_iterator = ???
dict_keyiterator = type(iter({}.keys()))
dict_valueiterator = type(iter({}.values()))
dict_itemiterator = type(iter({}.items()))
list_iterator = type(iter([]))
list_reverseiterator = type(iter(reversed([])))
range_iterator = type(iter(range(0)))
set_iterator = type(iter(set()))
str_iterator = type(iter(""))
tuple_iterator = type(iter(()))
zip_iterator = type(iter(zip()))
## views ##
dict_keys = type({}.keys())
dict_values = type({}.values())
dict_items = type({}.items())
## misc ##
mappingproxy = type(type.__dict__)
### ONE-TRICK PONIES ###
class Hashable(metaclass=ABCMeta):
__slots__ = ()
@abstractmethod
def __hash__(self):
return 0
@classmethod
def __subclasshook__(cls, C):
if cls is Hashable:
for B in C.__mro__:
if "__hash__" in B.__dict__:
if B.__dict__["__hash__"]:
return True
break
return NotImplemented
class Iterable(metaclass=ABCMeta):
__slots__ = ()
@abstractmethod
def __iter__(self):
while False:
yield None
@classmethod
def __subclasshook__(cls, C):
if cls is Iterable:
if any("__iter__" in B.__dict__ for B in C.__mro__):
return True
return NotImplemented
class Iterator(Iterable):
__slots__ = ()
@abstractmethod
def __next__(self):
raise StopIteration
def __iter__(self):
return self
@classmethod
def __subclasshook__(cls, C):
if cls is Iterator:
if (any("__next__" in B.__dict__ for B in C.__mro__) and
any("__iter__" in B.__dict__ for B in C.__mro__)):
return True
return NotImplemented
Iterator.register(bytes_iterator)
Iterator.register(bytearray_iterator)
#Iterator.register(callable_iterator)
Iterator.register(dict_keyiterator)
Iterator.register(dict_valueiterator)
Iterator.register(dict_itemiterator)
Iterator.register(list_iterator)
Iterator.register(list_reverseiterator)
Iterator.register(range_iterator)
Iterator.register(set_iterator)
Iterator.register(str_iterator)
Iterator.register(tuple_iterator)
Iterator.register(zip_iterator)
class Sized(metaclass=ABCMeta):
__slots__ = ()
@abstractmethod
def __len__(self):
return 0
@classmethod
def __subclasshook__(cls, C):
if cls is Sized:
if any("__len__" in B.__dict__ for B in C.__mro__):
return True
return NotImplemented
class Container(metaclass=ABCMeta):
__slots__ = ()
@abstractmethod
def __contains__(self, x):
return False
@classmethod
def __subclasshook__(cls, C):
if cls is Container:
if any("__contains__" in B.__dict__ for B in C.__mro__):
return True
return NotImplemented
class Callable(metaclass=ABCMeta):
__slots__ = ()
@abstractmethod
def __call__(self, *args, **kwds):
return False
@classmethod
def __subclasshook__(cls, C):
if cls is Callable:
if any("__call__" in B.__dict__ for B in C.__mro__):
return True
return NotImplemented
### SETS ###
class Set(Sized, Iterable, Container):
"""A set is a finite, iterable container.
This class provides concrete generic implementations of all
methods except for __contains__, __iter__ and __len__.
To override the comparisons (presumably for speed, as the
semantics are fixed), all you have to do is redefine __le__ and
then the other operations will automatically follow suit.
"""
__slots__ = ()
def __le__(self, other):
if not isinstance(other, Set):
return NotImplemented
if len(self) > len(other):
return False
for elem in self:
if elem not in other:
return False
return True
def __lt__(self, other):
if not isinstance(other, Set):
return NotImplemented
return len(self) < len(other) and self.__le__(other)
def __gt__(self, other):
if not isinstance(other, Set):
return NotImplemented
return other < self
def __ge__(self, other):
if not isinstance(other, Set):
return NotImplemented
return other <= self
def __eq__(self, other):
if not isinstance(other, Set):
return NotImplemented
return len(self) == len(other) and self.__le__(other)
def __ne__(self, other):
return not (self == other)
@classmethod
def _from_iterable(cls, it):
'''Construct an instance of the class from any iterable input.
Must override this method if the class constructor signature
does not accept an iterable for an input.
'''
return cls(it)
def __and__(self, other):
if not isinstance(other, Iterable):
return NotImplemented
return self._from_iterable(value for value in other if value in self)
def isdisjoint(self, other):
for value in other:
if value in self:
return False
return True
def __or__(self, other):
if not isinstance(other, Iterable):
return NotImplemented
chain = (e for s in (self, other) for e in s)
return self._from_iterable(chain)
def __sub__(self, other):
if not isinstance(other, Set):
if not isinstance(other, Iterable):
return NotImplemented
other = self._from_iterable(other)
return self._from_iterable(value for value in self
if value not in other)
def __xor__(self, other):
if not isinstance(other, Set):
if not isinstance(other, Iterable):
return NotImplemented
other = self._from_iterable(other)
return (self - other) | (other - self)
def _hash(self):
"""Compute the hash value of a set.
Note that we don't define __hash__: not all sets are hashable.
But if you define a hashable set type, its __hash__ should
call this function.
This must be compatible __eq__.
All sets ought to compare equal if they contain the same
elements, regardless of how they are implemented, and
regardless of the order of the elements; so there's not much
freedom for __eq__ or __hash__. We match the algorithm used
by the built-in frozenset type.
"""
MAX = sys.maxsize
MASK = 2 * MAX + 1
n = len(self)
h = 1927868237 * (n + 1)
h &= MASK
for x in self:
hx = hash(x)
h ^= (hx ^ (hx << 16) ^ 89869747) * 3644798167
h &= MASK
h = h * 69069 + 907133923
h &= MASK
if h > MAX:
h -= MASK + 1
if h == -1:
h = 590923713
return h
Set.register(frozenset)
class MutableSet(Set):
__slots__ = ()
@abstractmethod
def add(self, value):
"""Add an element."""
raise NotImplementedError
@abstractmethod
def discard(self, value):
"""Remove an element. Do not raise an exception if absent."""
raise NotImplementedError
def remove(self, value):
"""Remove an element. If not a member, raise a KeyError."""
if value not in self:
raise KeyError(value)
self.discard(value)
def pop(self):
"""Return the popped value. Raise KeyError if empty."""
it = iter(self)
try:
value = next(it)
except StopIteration:
raise KeyError
self.discard(value)
return value
def clear(self):
"""This is slow (creates N new iterators!) but effective."""
try:
while True:
self.pop()
except KeyError:
pass
def __ior__(self, it):
for value in it:
self.add(value)
return self
def __iand__(self, it):
for value in (self - it):
self.discard(value)
return self
def __ixor__(self, it):
if it is self:
self.clear()
else:
if not isinstance(it, Set):
it = self._from_iterable(it)
for value in it:
if value in self:
self.discard(value)
else:
self.add(value)
return self
def __isub__(self, it):
if it is self:
self.clear()
else:
for value in it:
self.discard(value)
return self
MutableSet.register(set)
### MAPPINGS ###
class Mapping(Sized, Iterable, Container):
__slots__ = ()
@abstractmethod
def __getitem__(self, key):
raise KeyError
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def __contains__(self, key):
try:
self[key]
except KeyError:
return False
else:
return True
def keys(self):
return KeysView(self)
def items(self):
return ItemsView(self)
def values(self):
return ValuesView(self)
def __eq__(self, other):
if not isinstance(other, Mapping):
return NotImplemented
return dict(self.items()) == dict(other.items())
def __ne__(self, other):
return not (self == other)
Mapping.register(mappingproxy)
class MappingView(Sized):
def __init__(self, mapping):
self._mapping = mapping
def __len__(self):
return len(self._mapping)
def __repr__(self):
return '{0.__class__.__name__}({0._mapping!r})'.format(self)
class KeysView(MappingView, Set):
@classmethod
def _from_iterable(self, it):
return set(it)
def __contains__(self, key):
return key in self._mapping
def __iter__(self):
for key in self._mapping:
yield key
KeysView.register(dict_keys)
class ItemsView(MappingView, Set):
@classmethod
def _from_iterable(self, it):
return set(it)
def __contains__(self, item):
key, value = item
try:
v = self._mapping[key]
except KeyError:
return False
else:
return v == value
def __iter__(self):
for key in self._mapping:
yield (key, self._mapping[key])
ItemsView.register(dict_items)
class ValuesView(MappingView):
def __contains__(self, value):
for key in self._mapping:
if value == self._mapping[key]:
return True
return False
def __iter__(self):
for key in self._mapping:
yield self._mapping[key]
ValuesView.register(dict_values)
class MutableMapping(Mapping):
__slots__ = ()
@abstractmethod
def __setitem__(self, key, value):
raise KeyError
@abstractmethod
def __delitem__(self, key):
raise KeyError
__marker = object()
def pop(self, key, default=__marker):
try:
value = self[key]
except KeyError:
if default is self.__marker:
raise
return default
else:
del self[key]
return value
def popitem(self):
try:
key = next(iter(self))
except StopIteration:
raise KeyError
value = self[key]
del self[key]
return key, value
def clear(self):
try:
while True:
self.popitem()
except KeyError:
pass
def update(*args, **kwds):
if len(args) > 2:
raise TypeError("update() takes at most 2 positional "
"arguments ({} given)".format(len(args)))
elif not args:
raise TypeError("update() takes at least 1 argument (0 given)")
self = args[0]
other = args[1] if len(args) >= 2 else ()
if isinstance(other, Mapping):
for key in other:
self[key] = other[key]
elif hasattr(other, "keys"):
for key in other.keys():
self[key] = other[key]
else:
for key, value in other:
self[key] = value
for key, value in kwds.items():
self[key] = value
def setdefault(self, key, default=None):
try:
return self[key]
except KeyError:
self[key] = default
return default
MutableMapping.register(dict)
### SEQUENCES ###
class Sequence(Sized, Iterable, Container):
"""All the operations on a read-only sequence.
Concrete subclasses must override __new__ or __init__,
__getitem__, and __len__.
"""
__slots__ = ()
@abstractmethod
def __getitem__(self, index):
raise IndexError
def __iter__(self):
i = 0
try:
while True:
v = self[i]
yield v
i += 1
except IndexError:
return
def __contains__(self, value):
for v in self:
if v == value:
return True
return False
def __reversed__(self):
for i in reversed(range(len(self))):
yield self[i]
def index(self, value):
for i, v in enumerate(self):
if v == value:
return i
raise ValueError
def count(self, value):
return sum(1 for v in self if v == value)
Sequence.register(tuple)
Sequence.register(str)
Sequence.register(range)
class ByteString(Sequence):
"""This unifies bytes and bytearray.
XXX Should add all their methods.
"""
__slots__ = ()
ByteString.register(bytes)
ByteString.register(bytearray)
class MutableSequence(Sequence):
__slots__ = ()
@abstractmethod
def __setitem__(self, index, value):
raise IndexError
@abstractmethod
def __delitem__(self, index):
raise IndexError
@abstractmethod
def insert(self, index, value):
raise IndexError
def append(self, value):
self.insert(len(self), value)
def clear(self):
try:
while True:
self.pop()
except IndexError:
pass
def reverse(self):
n = len(self)
for i in range(n//2):
self[i], self[n-i-1] = self[n-i-1], self[i]
def extend(self, values):
for v in values:
self.append(v)
def pop(self, index=-1):
v = self[index]
del self[index]
return v
def remove(self, value):
del self[self.index(value)]
def __iadd__(self, values):
self.extend(values)
return self
MutableSequence.register(list)
MutableSequence.register(bytearray) # Multiply inheriting, see ByteString
| gpl-2.0 |
pexip/os-chardet | chardet/langhungarianmodel.py | 269 | 12592 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# Character Mapping Table:
Latin2_HungarianCharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47,
46, 71, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253,
253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8,
23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253,
159,160,161,162,163,164,165,166,167,168,169,170,171,172,173,174,
175,176,177,178,179,180,181,182,183,184,185,186,187,188,189,190,
191,192,193,194,195,196,197, 75,198,199,200,201,202,203,204,205,
79,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,
221, 51, 81,222, 78,223,224,225,226, 44,227,228,229, 61,230,231,
232,233,234, 58,235, 66, 59,236,237,238, 60, 69, 63,239,240,241,
82, 14, 74,242, 70, 80,243, 72,244, 15, 83, 77, 84, 30, 76, 85,
245,246,247, 25, 73, 42, 24,248,249,250, 31, 56, 29,251,252,253,
)
win1250HungarianCharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 28, 40, 54, 45, 32, 50, 49, 38, 39, 53, 36, 41, 34, 35, 47,
46, 72, 43, 33, 37, 57, 48, 64, 68, 55, 52,253,253,253,253,253,
253, 2, 18, 26, 17, 1, 27, 12, 20, 9, 22, 7, 6, 13, 4, 8,
23, 67, 10, 5, 3, 21, 19, 65, 62, 16, 11,253,253,253,253,253,
161,162,163,164,165,166,167,168,169,170,171,172,173,174,175,176,
177,178,179,180, 78,181, 69,182,183,184,185,186,187,188,189,190,
191,192,193,194,195,196,197, 76,198,199,200,201,202,203,204,205,
81,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,
221, 51, 83,222, 80,223,224,225,226, 44,227,228,229, 61,230,231,
232,233,234, 58,235, 66, 59,236,237,238, 60, 70, 63,239,240,241,
84, 14, 75,242, 71, 82,243, 73,244, 15, 85, 79, 86, 30, 77, 87,
245,246,247, 25, 74, 42, 24,248,249,250, 31, 56, 29,251,252,253,
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 94.7368%
# first 1024 sequences:5.2623%
# rest sequences: 0.8894%
# negative sequences: 0.0009%
HungarianLangModel = (
0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,2,3,3,1,1,2,2,2,2,2,1,2,
3,2,2,3,3,3,3,3,2,3,3,3,3,3,3,1,2,3,3,3,3,2,3,3,1,1,3,3,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,
3,2,1,3,3,3,3,3,2,3,3,3,3,3,1,1,2,3,3,3,3,3,3,3,1,1,3,2,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,1,1,2,3,3,3,1,3,3,3,3,3,1,3,3,2,2,0,3,2,3,
0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,3,3,2,3,3,2,2,3,2,3,2,0,3,2,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,3,3,3,3,2,3,3,3,1,2,3,2,2,3,1,2,3,3,2,2,0,3,3,3,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,3,2,3,3,3,3,2,3,3,3,3,0,2,3,2,
0,0,0,1,1,0,0,0,0,0,3,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,1,1,1,3,3,2,1,3,2,2,3,2,1,3,2,2,1,0,3,3,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,2,2,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,3,2,2,3,1,1,3,2,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,2,2,3,3,3,3,3,2,1,3,3,3,3,3,2,2,1,3,3,3,0,1,1,2,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,3,3,3,2,0,3,2,3,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,1,3,2,2,2,3,1,1,3,3,1,1,0,3,3,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,2,3,3,3,2,3,2,3,3,3,2,3,3,3,3,3,1,2,3,2,2,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,2,2,2,3,1,3,3,2,2,1,3,3,3,1,1,3,1,2,3,2,3,2,2,2,1,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,2,1,3,3,3,2,2,3,2,1,0,3,2,0,1,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,3,3,3,3,3,1,2,3,3,3,3,1,1,0,3,3,3,3,0,2,3,0,0,2,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,2,2,3,3,2,2,2,2,3,3,0,1,2,3,2,3,2,2,3,2,1,2,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,
3,3,3,3,3,3,1,2,3,3,3,2,1,2,3,3,2,2,2,3,2,3,3,1,3,3,1,1,0,2,3,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,1,2,2,2,2,3,3,3,1,1,1,3,3,1,1,3,1,1,3,2,1,2,3,1,1,0,2,2,2,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,2,1,2,1,1,3,3,1,1,1,1,3,3,1,1,2,2,1,2,1,1,2,2,1,1,0,2,2,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,1,1,2,1,1,3,3,1,0,1,1,3,3,2,0,1,1,2,3,1,0,2,2,1,0,0,1,3,2,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,2,1,3,3,3,3,3,1,2,3,2,3,3,2,1,1,3,2,3,2,1,2,2,0,1,2,1,0,0,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,2,2,2,2,3,1,2,2,1,1,3,3,0,3,2,1,2,3,2,1,3,3,1,1,0,2,1,3,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,3,3,2,2,2,3,2,3,3,3,2,1,1,3,3,1,1,1,2,2,3,2,3,2,2,2,1,0,2,2,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
1,0,0,3,3,3,3,3,0,0,3,3,2,3,0,0,0,2,3,3,1,0,1,2,0,0,1,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,2,3,3,3,3,3,1,2,3,3,2,2,1,1,0,3,3,2,2,1,2,2,1,0,2,2,0,1,1,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,2,2,1,3,1,2,3,3,2,2,1,1,2,2,1,1,1,1,3,2,1,1,1,1,2,1,0,1,2,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
2,3,3,1,1,1,1,1,3,3,3,0,1,1,3,3,1,1,1,1,1,2,2,0,3,1,1,2,0,2,1,1,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,
3,1,0,1,2,1,2,2,0,1,2,3,1,2,0,0,0,2,1,1,1,1,1,2,0,0,1,1,0,0,0,0,
1,2,1,2,2,2,1,2,1,2,0,2,0,2,2,1,1,2,1,1,2,1,1,1,0,1,0,0,0,1,1,0,
1,1,1,2,3,2,3,3,0,1,2,2,3,1,0,1,0,2,1,2,2,0,1,1,0,0,1,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,3,3,2,2,1,0,0,3,2,3,2,0,0,0,1,1,3,0,0,1,1,0,0,2,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,2,2,3,3,1,0,1,3,2,3,1,1,1,0,1,1,1,1,1,3,1,0,0,2,2,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,1,2,2,2,1,0,1,2,3,3,2,0,0,0,2,1,1,1,2,1,1,1,0,1,1,1,0,0,0,
1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,2,1,1,1,1,1,1,0,1,1,1,0,0,1,1,
3,2,2,1,0,0,1,1,2,2,0,3,0,1,2,1,1,0,0,1,1,1,0,1,1,1,1,0,2,1,1,1,
2,2,1,1,1,2,1,2,1,1,1,1,1,1,1,2,1,1,1,2,3,1,1,1,1,1,1,1,1,1,0,1,
2,3,3,0,1,0,0,0,3,3,1,0,0,1,2,2,1,0,0,0,0,2,0,0,1,1,1,0,2,1,1,1,
2,1,1,1,1,1,1,2,1,1,0,1,1,0,1,1,1,0,1,2,1,1,0,1,1,1,1,1,1,1,0,1,
2,3,3,0,1,0,0,0,2,2,0,0,0,0,1,2,2,0,0,0,0,1,0,0,1,1,0,0,2,0,1,0,
2,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1,
3,2,2,0,1,0,1,0,2,3,2,0,0,1,2,2,1,0,0,1,1,1,0,0,2,1,0,1,2,2,1,1,
2,1,1,1,1,1,1,2,1,1,1,1,1,1,0,2,1,0,1,1,0,1,1,1,0,1,1,2,1,1,0,1,
2,2,2,0,0,1,0,0,2,2,1,1,0,0,2,1,1,0,0,0,1,2,0,0,2,1,0,0,2,1,1,1,
2,1,1,1,1,2,1,2,1,1,1,2,2,1,1,2,1,1,1,2,1,1,1,1,1,1,1,1,1,1,0,1,
1,2,3,0,0,0,1,0,3,2,1,0,0,1,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,2,1,
1,1,0,0,0,1,0,1,1,1,1,1,2,0,0,1,0,0,0,2,0,0,1,1,1,1,1,1,1,1,0,1,
3,0,0,2,1,2,2,1,0,0,2,1,2,2,0,0,0,2,1,1,1,0,1,1,0,0,1,1,2,0,0,0,
1,2,1,2,2,1,1,2,1,2,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,0,0,1,
1,3,2,0,0,0,1,0,2,2,2,0,0,0,2,2,1,0,0,0,0,3,1,1,1,1,0,0,2,1,1,1,
2,1,0,1,1,1,0,1,1,1,1,1,1,1,0,2,1,0,0,1,0,1,1,0,1,1,1,1,1,1,0,1,
2,3,2,0,0,0,1,0,2,2,0,0,0,0,2,1,1,0,0,0,0,2,1,0,1,1,0,0,2,1,1,0,
2,1,1,1,1,2,1,2,1,2,0,1,1,1,0,2,1,1,1,2,1,1,1,1,0,1,1,1,1,1,0,1,
3,1,1,2,2,2,3,2,1,1,2,2,1,1,0,1,0,2,2,1,1,1,1,1,0,0,1,1,0,1,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,0,0,0,0,0,2,2,0,0,0,0,2,2,1,0,0,0,1,1,0,0,1,2,0,0,2,1,1,1,
2,2,1,1,1,2,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,1,1,0,1,2,1,1,1,0,1,
1,0,0,1,2,3,2,1,0,0,2,0,1,1,0,0,0,1,1,1,1,0,1,1,0,0,1,0,0,0,0,0,
1,2,1,2,1,2,1,1,1,2,0,2,1,1,1,0,1,2,0,0,1,1,1,0,0,0,0,0,0,0,0,0,
2,3,2,0,0,0,0,0,1,1,2,1,0,0,1,1,1,0,0,0,0,2,0,0,1,1,0,0,2,1,1,1,
2,1,1,1,1,1,1,2,1,0,1,1,1,1,0,2,1,1,1,1,1,1,0,1,0,1,1,1,1,1,0,1,
1,2,2,0,1,1,1,0,2,2,2,0,0,0,3,2,1,0,0,0,1,1,0,0,1,1,0,1,1,1,0,0,
1,1,0,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,2,1,1,1,0,0,1,1,1,0,1,0,1,
2,1,0,2,1,1,2,2,1,1,2,1,1,1,0,0,0,1,1,0,1,1,1,1,0,0,1,1,1,0,0,0,
1,2,2,2,2,2,1,1,1,2,0,2,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,1,0,
1,2,3,0,0,0,1,0,2,2,0,0,0,0,2,2,0,0,0,0,0,1,0,0,1,0,0,0,2,0,1,0,
2,1,1,1,1,1,0,2,0,0,0,1,2,1,1,1,1,0,1,2,0,1,0,1,0,1,1,1,0,1,0,1,
2,2,2,0,0,0,1,0,2,1,2,0,0,0,1,1,2,0,0,0,0,1,0,0,1,1,0,0,2,1,0,1,
2,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,2,0,1,1,1,1,1,0,1,
1,2,2,0,0,0,1,0,2,2,2,0,0,0,1,1,0,0,0,0,0,1,1,0,2,0,0,1,1,1,0,1,
1,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,0,0,1,1,0,1,0,1,1,1,1,1,0,0,0,1,
1,0,0,1,0,1,2,1,0,0,1,1,1,2,0,0,0,1,1,0,1,0,1,1,0,0,1,0,0,0,0,0,
0,2,1,2,1,1,1,1,1,2,0,2,0,1,1,0,1,2,1,0,1,1,1,0,0,0,0,0,0,1,0,0,
2,1,1,0,1,2,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,0,0,0,2,1,0,1,
2,2,1,1,1,1,1,2,1,1,0,1,1,1,1,2,1,1,1,2,1,1,0,1,0,1,1,1,1,1,0,1,
1,2,2,0,0,0,0,0,1,1,0,0,0,0,2,1,0,0,0,0,0,2,0,0,2,2,0,0,2,0,0,1,
2,1,1,1,1,1,1,1,0,1,1,0,1,1,0,1,0,0,0,1,1,1,1,0,0,1,1,1,1,0,0,1,
1,1,2,0,0,3,1,0,2,1,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,0,0,1,0,1,0,
1,2,1,0,1,1,1,2,1,1,0,1,1,1,1,1,0,0,0,1,1,1,1,1,0,1,0,0,0,1,0,0,
2,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,2,0,0,0,
2,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,2,1,1,0,0,1,1,1,1,1,0,1,
2,1,1,1,2,1,1,1,0,1,1,2,1,0,0,0,0,1,1,1,1,0,1,0,0,0,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,0,1,1,1,1,1,0,0,1,1,2,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,1,0,0,0,
1,2,1,1,1,1,1,1,1,1,0,1,0,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,
2,0,0,0,1,1,1,1,0,0,1,1,0,0,0,0,0,1,1,1,2,0,0,1,0,0,1,0,1,0,0,0,
0,1,1,1,1,1,1,1,1,2,0,1,1,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,1,1,0,0,2,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,
0,1,1,1,1,1,1,0,1,1,0,1,0,1,1,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,
0,1,1,1,1,1,0,0,1,1,0,1,0,1,0,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
0,0,0,1,0,0,0,0,0,0,1,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,1,1,1,0,1,0,0,1,1,0,1,0,1,1,0,1,1,1,0,1,1,1,0,0,0,0,0,0,0,0,0,
2,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,0,0,1,0,0,1,0,1,0,1,1,1,0,0,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,1,0,0,0,1,1,1,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,0,0,0,
0,1,1,1,1,1,1,0,1,1,0,1,0,1,0,0,1,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
)
Latin2HungarianModel = {
'char_to_order_map': Latin2_HungarianCharToOrderMap,
'precedence_matrix': HungarianLangModel,
'typical_positive_ratio': 0.947368,
'keep_english_letter': True,
'charset_name': "ISO-8859-2",
'language': 'Hungarian',
}
Win1250HungarianModel = {
'char_to_order_map': win1250HungarianCharToOrderMap,
'precedence_matrix': HungarianLangModel,
'typical_positive_ratio': 0.947368,
'keep_english_letter': True,
'charset_name': "windows-1250",
'language': 'Hungarian',
}
| lgpl-2.1 |
alinbalutoiu/tempest | tempest/api/image/v1/test_images.py | 8 | 12353 | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from six import moves
from tempest.api.image import base
from tempest.common.utils import data_utils
from tempest import config
from tempest import test
CONF = config.CONF
class CreateRegisterImagesTest(base.BaseV1ImageTest):
"""Here we test the registration and creation of images."""
@test.idempotent_id('3027f8e6-3492-4a11-8575-c3293017af4d')
def test_register_then_upload(self):
# Register, then upload an image
properties = {'prop1': 'val1'}
body = self.create_image(name='New Name',
container_format='bare',
disk_format='raw',
is_public=False,
properties=properties)
self.assertIn('id', body)
image_id = body.get('id')
self.assertEqual('New Name', body.get('name'))
self.assertFalse(body.get('is_public'))
self.assertEqual('queued', body.get('status'))
for key, val in properties.items():
self.assertEqual(val, body.get('properties')[key])
# Now try uploading an image file
image_file = moves.cStringIO(data_utils.random_bytes())
body = self.client.update_image(image_id, data=image_file)
self.assertIn('size', body)
self.assertEqual(1024, body.get('size'))
@test.idempotent_id('69da74d9-68a9-404b-9664-ff7164ccb0f5')
def test_register_remote_image(self):
# Register a new remote image
body = self.create_image(name='New Remote Image',
container_format='bare',
disk_format='raw', is_public=False,
location=CONF.image.http_image,
properties={'key1': 'value1',
'key2': 'value2'})
self.assertIn('id', body)
self.assertEqual('New Remote Image', body.get('name'))
self.assertFalse(body.get('is_public'))
self.assertEqual('active', body.get('status'))
properties = body.get('properties')
self.assertEqual(properties['key1'], 'value1')
self.assertEqual(properties['key2'], 'value2')
@test.idempotent_id('6d0e13a7-515b-460c-b91f-9f4793f09816')
def test_register_http_image(self):
body = self.create_image(name='New Http Image',
container_format='bare',
disk_format='raw', is_public=False,
copy_from=CONF.image.http_image)
self.assertIn('id', body)
image_id = body.get('id')
self.assertEqual('New Http Image', body.get('name'))
self.assertFalse(body.get('is_public'))
self.client.wait_for_image_status(image_id, 'active')
self.client.show_image(image_id)
@test.idempotent_id('05b19d55-140c-40d0-b36b-fafd774d421b')
def test_register_image_with_min_ram(self):
# Register an image with min ram
properties = {'prop1': 'val1'}
body = self.create_image(name='New_image_with_min_ram',
container_format='bare',
disk_format='raw',
is_public=False,
min_ram=40,
properties=properties)
self.assertIn('id', body)
self.assertEqual('New_image_with_min_ram', body.get('name'))
self.assertFalse(body.get('is_public'))
self.assertEqual('queued', body.get('status'))
self.assertEqual(40, body.get('min_ram'))
for key, val in properties.items():
self.assertEqual(val, body.get('properties')[key])
self.client.delete_image(body['id'])
class ListImagesTest(base.BaseV1ImageTest):
"""
Here we test the listing of image information
"""
@classmethod
def resource_setup(cls):
super(ListImagesTest, cls).resource_setup()
# We add a few images here to test the listing functionality of
# the images API
img1 = cls._create_remote_image('one', 'bare', 'raw')
img2 = cls._create_remote_image('two', 'ami', 'ami')
img3 = cls._create_remote_image('dup', 'bare', 'raw')
img4 = cls._create_remote_image('dup', 'bare', 'raw')
img5 = cls._create_standard_image('1', 'ami', 'ami', 42)
img6 = cls._create_standard_image('2', 'ami', 'ami', 142)
img7 = cls._create_standard_image('33', 'bare', 'raw', 142)
img8 = cls._create_standard_image('33', 'bare', 'raw', 142)
cls.created_set = set(cls.created_images)
# 4x-4x remote image
cls.remote_set = set((img1, img2, img3, img4))
cls.standard_set = set((img5, img6, img7, img8))
# 5x bare, 3x ami
cls.bare_set = set((img1, img3, img4, img7, img8))
cls.ami_set = set((img2, img5, img6))
# 1x with size 42
cls.size42_set = set((img5,))
# 3x with size 142
cls.size142_set = set((img6, img7, img8,))
# dup named
cls.dup_set = set((img3, img4))
@classmethod
def _create_remote_image(cls, name, container_format, disk_format):
"""
Create a new remote image and return the ID of the newly-registered
image
"""
name = 'New Remote Image %s' % name
location = CONF.image.http_image
image = cls.create_image(name=name,
container_format=container_format,
disk_format=disk_format,
is_public=False,
location=location)
image_id = image['id']
return image_id
@classmethod
def _create_standard_image(cls, name, container_format,
disk_format, size):
"""
Create a new standard image and return the ID of the newly-registered
image. Note that the size of the new image is a random number between
1024 and 4096
"""
image_file = moves.cStringIO(data_utils.random_bytes(size))
name = 'New Standard Image %s' % name
image = cls.create_image(name=name,
container_format=container_format,
disk_format=disk_format,
is_public=False, data=image_file)
image_id = image['id']
return image_id
@test.idempotent_id('246178ab-3b33-4212-9a4b-a7fe8261794d')
def test_index_no_params(self):
# Simple test to see all fixture images returned
images_list = self.client.list_images()
image_list = map(lambda x: x['id'], images_list)
for image_id in self.created_images:
self.assertIn(image_id, image_list)
@test.idempotent_id('f1755589-63d6-4468-b098-589820eb4031')
def test_index_disk_format(self):
images_list = self.client.list_images(disk_format='ami')
for image in images_list:
self.assertEqual(image['disk_format'], 'ami')
result_set = set(map(lambda x: x['id'], images_list))
self.assertTrue(self.ami_set <= result_set)
self.assertFalse(self.created_set - self.ami_set <= result_set)
@test.idempotent_id('2143655d-96d9-4bec-9188-8674206b4b3b')
def test_index_container_format(self):
images_list = self.client.list_images(container_format='bare')
for image in images_list:
self.assertEqual(image['container_format'], 'bare')
result_set = set(map(lambda x: x['id'], images_list))
self.assertTrue(self.bare_set <= result_set)
self.assertFalse(self.created_set - self.bare_set <= result_set)
@test.idempotent_id('feb32ac6-22bb-4a16-afd8-9454bb714b14')
def test_index_max_size(self):
images_list = self.client.list_images(size_max=42)
for image in images_list:
self.assertTrue(image['size'] <= 42)
result_set = set(map(lambda x: x['id'], images_list))
self.assertTrue(self.size42_set <= result_set)
self.assertFalse(self.created_set - self.size42_set <= result_set)
@test.idempotent_id('6ffc16d0-4cbf-4401-95c8-4ac63eac34d8')
def test_index_min_size(self):
images_list = self.client.list_images(size_min=142)
for image in images_list:
self.assertTrue(image['size'] >= 142)
result_set = set(map(lambda x: x['id'], images_list))
self.assertTrue(self.size142_set <= result_set)
self.assertFalse(self.size42_set <= result_set)
@test.idempotent_id('e5dc26d9-9aa2-48dd-bda5-748e1445da98')
def test_index_status_active_detail(self):
images_list = self.client.list_images(detail=True,
status='active',
sort_key='size',
sort_dir='desc')
top_size = images_list[0]['size'] # We have non-zero sized images
for image in images_list:
size = image['size']
self.assertTrue(size <= top_size)
top_size = size
self.assertEqual(image['status'], 'active')
@test.idempotent_id('097af10a-bae8-4342-bff4-edf89969ed2a')
def test_index_name(self):
images_list = self.client.list_images(
detail=True,
name='New Remote Image dup')
result_set = set(map(lambda x: x['id'], images_list))
for image in images_list:
self.assertEqual(image['name'], 'New Remote Image dup')
self.assertTrue(self.dup_set <= result_set)
self.assertFalse(self.created_set - self.dup_set <= result_set)
class UpdateImageMetaTest(base.BaseV1ImageTest):
@classmethod
def resource_setup(cls):
super(UpdateImageMetaTest, cls).resource_setup()
cls.image_id = cls._create_standard_image('1', 'ami', 'ami', 42)
@classmethod
def _create_standard_image(cls, name, container_format,
disk_format, size):
"""
Create a new standard image and return the ID of the newly-registered
image.
"""
image_file = moves.cStringIO(data_utils.random_bytes(size))
name = 'New Standard Image %s' % name
image = cls.create_image(name=name,
container_format=container_format,
disk_format=disk_format,
is_public=False, data=image_file,
properties={'key1': 'value1'})
image_id = image['id']
return image_id
@test.idempotent_id('01752c1c-0275-4de3-9e5b-876e44541928')
def test_list_image_metadata(self):
# All metadata key/value pairs for an image should be returned
resp_metadata = self.client.get_image_meta(self.image_id)
expected = {'key1': 'value1'}
self.assertEqual(expected, resp_metadata['properties'])
@test.idempotent_id('d6d7649c-08ce-440d-9ea7-e3dda552f33c')
def test_update_image_metadata(self):
# The metadata for the image should match the updated values
req_metadata = {'key1': 'alt1', 'key2': 'value2'}
metadata = self.client.get_image_meta(self.image_id)
self.assertEqual(metadata['properties'], {'key1': 'value1'})
metadata['properties'].update(req_metadata)
metadata = self.client.update_image(
self.image_id, properties=metadata['properties'])
resp_metadata = self.client.get_image_meta(self.image_id)
expected = {'key1': 'alt1', 'key2': 'value2'}
self.assertEqual(expected, resp_metadata['properties'])
| apache-2.0 |
npe9/depot_tools | third_party/gsutil/gslib/commands/mv.py | 51 | 6213 | # Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from gslib.command import Command
from gslib.command import COMMAND_NAME
from gslib.command import COMMAND_NAME_ALIASES
from gslib.command import CONFIG_REQUIRED
from gslib.command import FILE_URIS_OK
from gslib.command import MAX_ARGS
from gslib.command import MIN_ARGS
from gslib.command import PROVIDER_URIS_OK
from gslib.command import SUPPORTED_SUB_ARGS
from gslib.command import URIS_START_ARG
from gslib.exception import CommandException
from gslib.help_provider import HELP_NAME
from gslib.help_provider import HELP_NAME_ALIASES
from gslib.help_provider import HELP_ONE_LINE_SUMMARY
from gslib.help_provider import HELP_TEXT
from gslib.help_provider import HelpType
from gslib.help_provider import HELP_TYPE
from gslib.util import NO_MAX
_detailed_help_text = ("""
<B>SYNOPSIS</B>
gsutil mv [-p] src_uri dst_uri
- or -
gsutil mv [-p] uri... dst_uri
<B>DESCRIPTION</B>
The gsutil mv command allows you to move data between your local file
system and the cloud, move data within the cloud, and move data between
cloud storage providers. For example, to move all objects from a
bucket to a local directory you could use:
gsutil mv gs://my_bucket dir
Similarly, to move all objects from a local directory to a bucket you could
use:
gsutil mv ./dir gs://my_bucket
<B>RENAMING BUCKET SUBDIRECTORIES</B>
You can use the gsutil mv command to rename subdirectories. For example,
the command:
gsutil mv gs://my_bucket/olddir gs://my_bucket/newdir
would rename all objects and subdirectories under gs://my_bucket/olddir to be
under gs://my_bucket/newdir, otherwise preserving the subdirectory structure.
If you do a rename as specified above and you want to preserve ACLs, you
should use the -p option (see OPTIONS).
Note that when using mv to rename bucket subdirectories you cannot specify
the source URI using wildcards. You need to spell out the complete name:
gsutil mv gs://my_bucket/olddir gs://my_bucket/newdir
If you have a large number of files to move you might want to use the
gsutil -m option, to perform a multi-threaded/multi-processing move:
gsutil -m mv gs://my_bucket/olddir gs://my_bucket/newdir
<B>NON-ATOMIC OPERATION</B>
Unlike the case with many file systems, the gsutil mv command does not
perform a single atomic operation. Rather, it performs a copy from source
to destination followed by removing the source for each object.
<B>OPTIONS</B>
-p Causes ACL to be preserved when moving in the cloud. Note that
this option has performance and cost implications, because it
is essentially performing three requests (getacl, cp, setacl).
(The performance issue can be mitigated to some degree by
using gsutil -m cp to cause multi-threaded/multi-processing
copying.)
""")
class MvCommand(Command):
"""Implementation of gsutil mv command.
Note that there is no atomic rename operation - this command is simply
a shorthand for 'cp' followed by 'rm'.
"""
# Command specification (processed by parent class).
command_spec = {
# Name of command.
COMMAND_NAME : 'mv',
# List of command name aliases.
COMMAND_NAME_ALIASES : ['move', 'ren', 'rename'],
# Min number of args required by this command.
MIN_ARGS : 2,
# Max number of args required by this command, or NO_MAX.
MAX_ARGS : NO_MAX,
# Getopt-style string specifying acceptable sub args.
SUPPORTED_SUB_ARGS : 'pv',
# True if file URIs acceptable for this command.
FILE_URIS_OK : True,
# True if provider-only URIs acceptable for this command.
PROVIDER_URIS_OK : False,
# Index in args of first URI arg.
URIS_START_ARG : 0,
# True if must configure gsutil before running command.
CONFIG_REQUIRED : True,
}
help_spec = {
# Name of command or auxiliary help info for which this help applies.
HELP_NAME : 'mv',
# List of help name aliases.
HELP_NAME_ALIASES : ['move', 'rename'],
# Type of help:
HELP_TYPE : HelpType.COMMAND_HELP,
# One line summary of this help.
HELP_ONE_LINE_SUMMARY : 'Move/rename objects and/or subdirectories',
# The full help text.
HELP_TEXT : _detailed_help_text,
}
# Command entry point.
def RunCommand(self):
# Check each source arg up, refusing to delete a bucket src URI (force users
# to explicitly do that as a separate operation).
for arg_to_check in self.args[0:-1]:
if self.suri_builder.StorageUri(arg_to_check).names_bucket():
raise CommandException('You cannot move a source bucket using the mv '
'command. If you meant to move\nall objects in '
'the bucket, you can use a command like:\n'
'\tgsutil mv %s/* %s' %
(arg_to_check, self.args[-1]))
# Insert command-line opts in front of args so they'll be picked up by cp
# and rm commands (e.g., for -p option). Use undocumented (internal
# use-only) cp -M option, which causes each original object to be deleted
# after successfully copying to its destination, and also causes naming
# behavior consistent with Unix mv naming behavior (see comments in
# _ConstructDstUri in cp.py).
unparsed_args = ['-M']
if self.recursion_requested:
unparsed_args.append('-R')
unparsed_args.extend(self.unparsed_args)
self.command_runner.RunNamedCommand('cp', unparsed_args, self.headers,
self.debug, self.parallel_operations)
return 0
| bsd-3-clause |
knehez/configuration | playbooks/callback_plugins/hipchat_plugin.py | 51 | 9800 | import os
import time
from ansible import utils
try:
import prettytable
except ImportError:
prettytable = None
try:
import hipchat
except ImportError:
hipchat = None
class CallbackModule(object):
"""Send status updates to a HipChat channel during playbook execution.
This plugin makes use of the following environment variables:
HIPCHAT_TOKEN (required): HipChat API token
HIPCHAT_ROOM (optional): HipChat room to post in. Default: ansible
HIPCHAT_FROM (optional): Name to post as. Default: ansible
HIPCHAT_NOTIFY (optional): Add notify flag to important messages ("true" or "false"). Default: true
HIPCHAT_MSG_PREFIX (option): Optional prefix to add to all hipchat messages
HIPCHAT_MSG_COLOR (option): Optional color for hipchat messages
HIPCHAT_CONDENSED (option): Condense the task summary output
Requires:
prettytable
"""
def __init__(self):
self.enabled = "HIPCHAT_TOKEN" in os.environ
if not self.enabled:
return
# make sure we got our imports
if not hipchat:
raise ImportError(
"The hipchat plugin requires the hipchat Python module, "
"which is not installed or was not found."
)
if not prettytable:
raise ImportError(
"The hipchat plugin requires the prettytable Python module, "
"which is not installed or was not found."
)
self.start_time = time.time()
self.task_report = []
self.last_task = None
self.last_task_changed = False
self.last_task_count = 0
self.last_task_delta = 0
self.last_task_start = time.time()
self.condensed_task_report = (os.getenv('HIPCHAT_CONDENSED', True) == True)
self.room = os.getenv('HIPCHAT_ROOM', 'ansible')
self.from_name = os.getenv('HIPCHAT_FROM', 'ansible')
self.allow_notify = (os.getenv('HIPCHAT_NOTIFY') != 'false')
try:
self.hipchat_conn = hipchat.HipChat(token=os.getenv('HIPCHAT_TOKEN'))
except Exception as e:
utils.warning("Unable to connect to hipchat: {}".format(e))
self.hipchat_msg_prefix = os.getenv('HIPCHAT_MSG_PREFIX', '')
self.hipchat_msg_color = os.getenv('HIPCHAT_MSG_COLOR', '')
self.printed_playbook = False
self.playbook_name = None
def _send_hipchat(self, message, room=None, from_name=None, color=None, message_format='text'):
if not room:
room = self.room
if not from_name:
from_name = self.from_name
if not color:
color = self.hipchat_msg_color
try:
self.hipchat_conn.message_room(room, from_name, message, color=color, message_format=message_format)
except Exception as e:
utils.warning("Could not submit message to hipchat: {}".format(e))
def _flush_last_task(self):
if self.last_task:
delta = time.time() - self.last_task_start
self.task_report.append(dict(
changed=self.last_task_changed,
count=self.last_task_count,
delta="{:0>.1f}".format(self.last_task_delta),
task=self.last_task))
self.last_task_count = 0
self.last_task_changed = False
self.last_task = None
self.last_task_delta = 0
def _process_message(self, msg, msg_type='STATUS'):
if msg_type == 'OK' and self.last_task:
if msg.get('changed', True):
self.last_task_changed = True
if msg.get('delta', False):
(hour, minute, sec) = msg['delta'].split(':')
total = float(hour) * 1200 + float(minute) * 60 + float(sec)
self.last_task_delta += total
self.last_task_count += 1
else:
self._flush_last_task()
if msg_type == 'TASK_START':
self.last_task = msg
self.last_task_start = time.time()
elif msg_type == 'FAILED':
self.last_task_start = time.time()
if 'msg' in msg:
self._send_hipchat('/code {}: The ansible run returned the following error:\n\n {}'.format(
self.hipchat_msg_prefix, msg['msg']), color='red', message_format='text')
else:
# move forward the last task start time
self.last_task_start = time.time()
def on_any(self, *args, **kwargs):
pass
def runner_on_failed(self, host, res, ignore_errors=False):
if self.enabled:
self._process_message(res, 'FAILED')
def runner_on_ok(self, host, res):
if self.enabled:
# don't send the setup results
if res['invocation']['module_name'] != "setup":
self._process_message(res, 'OK')
def runner_on_error(self, host, msg):
if self.enabled:
self._process_message(msg, 'ERROR')
def runner_on_skipped(self, host, item=None):
if self.enabled:
self._process_message(item, 'SKIPPED')
def runner_on_unreachable(self, host, res):
pass
def runner_on_no_hosts(self):
pass
def runner_on_async_poll(self, host, res, jid, clock):
if self.enabled:
self._process_message(res, 'ASYNC_POLL')
def runner_on_async_ok(self, host, res, jid):
if self.enabled:
self._process_message(res, 'ASYNC_OK')
def runner_on_async_failed(self, host, res, jid):
if self.enabled:
self._process_message(res, 'ASYNC_FAILED')
def playbook_on_start(self):
pass
def playbook_on_notify(self, host, handler):
pass
def playbook_on_no_hosts_matched(self):
pass
def playbook_on_no_hosts_remaining(self):
pass
def playbook_on_task_start(self, name, is_conditional):
if self.enabled:
self._process_message(name, 'TASK_START')
def playbook_on_vars_prompt(self, varname, private=True, prompt=None,
encrypt=None, confirm=False, salt_size=None,
salt=None, default=None):
pass
def playbook_on_setup(self):
pass
def playbook_on_import_for_host(self, host, imported_file):
pass
def playbook_on_not_import_for_host(self, host, missing_file):
pass
def playbook_on_play_start(self, pattern):
if self.enabled:
"""Display Playbook and play start messages"""
self.start_time = time.time()
self.playbook_name, _ = os.path.splitext(os.path.basename(self.play.playbook.filename))
host_list = self.play.playbook.inventory.host_list
inventory = os.path.basename(os.path.realpath(host_list))
subset = self.play.playbook.inventory._subset
msg = "<b>{description}</b>: Starting ansible run for play <b><i>{play}</i></b>".format(description=self.hipchat_msg_prefix, play=self.playbook_name)
if self.play.playbook.only_tags and 'all' not in self.play.playbook.only_tags:
msg = msg + " with tags <b><i>{}</i></b>".format(','.join(self.play.playbook.only_tags))
if subset:
msg = msg + " on hosts <b><i>{}</i></b>".format(','.join(subset))
self._send_hipchat(msg, message_format='html')
def playbook_on_stats(self, stats):
if self.enabled:
self._flush_last_task()
delta = time.time() - self.start_time
self.start_time = time.time()
"""Display info about playbook statistics"""
hosts = sorted(stats.processed.keys())
task_column = '{} - Task'.format(self.hipchat_msg_prefix)
task_summary = prettytable.PrettyTable([task_column, 'Time', 'Count', 'Changed'])
task_summary.align[task_column] = "l"
task_summary.align['Time'] = "r"
task_summary.align['Count'] = "r"
task_summary.align['Changed'] = "r"
for task in self.task_report:
if self.condensed_task_report:
# for the condensed task report skip all tasks
# that are not marked as changed and that have
# a time delta less than 1
if not task['changed'] and float(task['delta']) < 1:
continue
task_summary.add_row([task['task'], task['delta'], str(task['count']), str(task['changed'])])
summary_table = prettytable.PrettyTable(['Ok', 'Changed', 'Unreachable', 'Failures'])
self._send_hipchat("/code " + str(task_summary) )
summary_all_host_output = []
for host in hosts:
stats = stats.summarize(host)
summary_output = "<b>{}</b>: <i>{}</i> - ".format(self.hipchat_msg_prefix, host)
for summary_item in ['ok', 'changed', 'unreachable', 'failures']:
if stats[summary_item] != 0:
summary_output += "<b>{}</b> - {} ".format(summary_item, stats[summary_item])
summary_all_host_output.append(summary_output)
self._send_hipchat("<br />".join(summary_all_host_output), message_format='html')
msg = "<b>{description}</b>: Finished Ansible run for <b><i>{play}</i> in {min:02} minutes, {sec:02} seconds</b><br /><br />".format(
description=self.hipchat_msg_prefix,
play=self.playbook_name,
min=int(delta / 60),
sec=int(delta % 60))
self._send_hipchat(msg, message_format='html')
| agpl-3.0 |
CTSRD-SOAAP/chromium-42.0.2311.135 | v8/tools/release/auto_roll.py | 1 | 4496 | #!/usr/bin/env python
# Copyright 2014 the V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import json
import os
import sys
import urllib
from common_includes import *
import chromium_roll
class CheckActiveRoll(Step):
MESSAGE = "Check active roll."
@staticmethod
def ContainsChromiumRoll(changes):
for change in changes:
if change["subject"].startswith("Update V8 to"):
return True
return False
def RunStep(self):
params = {
"closed": 3,
"owner": self._options.author,
"limit": 30,
"format": "json",
}
params = urllib.urlencode(params)
search_url = "https://codereview.chromium.org/search"
result = self.ReadURL(search_url, params, wait_plan=[5, 20])
if self.ContainsChromiumRoll(json.loads(result)["results"]):
print "Stop due to existing Chromium roll."
return True
class DetectLastRoll(Step):
MESSAGE = "Detect commit ID of the last Chromium roll."
def RunStep(self):
# The revision that should be rolled.
latest_release = self.GetLatestRelease()
# Interpret the DEPS file to retrieve the v8 revision.
# TODO(machenbach): This should be part or the roll-deps api of
# depot_tools.
Var = lambda var: '%s'
exec(FileToText(os.path.join(self._options.chromium, "DEPS")))
# The revision rolled last.
self["last_roll"] = vars['v8_revision']
# TODO(machenbach): It is possible that the auto-push script made a new
# fast-forward release (e.g. 4.2.3) while somebody patches the last
# candidate (e.g. 4.2.2.1). In this case, the auto-roller would pick
# the fast-forward release. Should there be a way to prioritize the
# patched version?
if latest_release == self["last_roll"]:
# We always try to roll if the latest revision is not the revision in
# chromium.
print("There is no newer v8 revision than the one in Chromium (%s)."
% self["last_roll"])
return True
class CheckClusterFuzz(Step):
MESSAGE = "Check ClusterFuzz api for new problems."
def RunStep(self):
if not os.path.exists(self.Config("CLUSTERFUZZ_API_KEY_FILE")):
print "Skipping ClusterFuzz check. No api key file found."
return False
api_key = FileToText(self.Config("CLUSTERFUZZ_API_KEY_FILE"))
# Check for open, reproducible issues that have no associated bug.
result = self._side_effect_handler.ReadClusterFuzzAPI(
api_key, job_type="linux_asan_d8_dbg", reproducible="True",
open="True", bug_information="",
revision_greater_or_equal=str(self["last_push"]))
if result:
print "Stop due to pending ClusterFuzz issues."
return True
class RollChromium(Step):
MESSAGE = "Roll V8 into Chromium."
def RunStep(self):
if self._options.roll:
args = [
"--author", self._options.author,
"--reviewer", self._options.reviewer,
"--chromium", self._options.chromium,
"--last-roll", self["last_roll"],
"--use-commit-queue",
]
if self._options.sheriff:
args.extend([
"--sheriff", "--googlers-mapping", self._options.googlers_mapping])
if self._options.dry_run:
args.extend(["--dry-run"])
if self._options.work_dir:
args.extend(["--work-dir", self._options.work_dir])
self._side_effect_handler.Call(chromium_roll.ChromiumRoll().Run, args)
class AutoRoll(ScriptsBase):
def _PrepareOptions(self, parser):
parser.add_argument("-c", "--chromium", required=True,
help=("The path to your Chromium src/ "
"directory to automate the V8 roll."))
parser.add_argument("--roll", help="Call Chromium roll script.",
default=False, action="store_true")
def _ProcessOptions(self, options): # pragma: no cover
if not options.reviewer:
print "A reviewer (-r) is required."
return False
if not options.author:
print "An author (-a) is required."
return False
return True
def _Config(self):
return {
"PERSISTFILE_BASENAME": "/tmp/v8-auto-roll-tempfile",
"CLUSTERFUZZ_API_KEY_FILE": ".cf_api_key",
}
def _Steps(self):
return [
CheckActiveRoll,
DetectLastRoll,
CheckClusterFuzz,
RollChromium,
]
if __name__ == "__main__": # pragma: no cover
sys.exit(AutoRoll().Run())
| bsd-3-clause |
nedlowe/amaas-core-sdk-python | amaascore/assets/automobile.py | 3 | 7568 | from datetime import date
from dateutil.parser import parse
from amaascore.assets.real_asset import RealAsset
from amaascore.assets.enums import *
class Automobile(RealAsset):
def __init__(self, asset_manager_id, asset_id, client_id, asset_issuer_id=None,
country_id=None, display_name='', description='',
venue_id=None, issue_date=None, maturity_date=date.max, comments=None,
links=None, references=None, additional=None, currency=None,
asset_status='Active', model_year=None,
vehicle_id=None, make=None, model=None, color=None,
style=None, genre=None, rarity=None,
condition=None, imported=None, imported_country_id=None,
engine_id=None, chassis_id=None, steering=None, gearbox=None, gears=None,
drive=None, engine_cap=None, power=None, weight=None,
speed=None, fuel_type=None, petrol_grade=None, *args, **kwargs):
super(Automobile, self).__init__(asset_manager_id=asset_manager_id,
asset_id=asset_id, client_id=client_id,
asset_issuer_id=asset_issuer_id, asset_status=asset_status,
display_name=display_name,
description=description, country_id=country_id,
venue_id=venue_id, issue_date=issue_date,
currency=currency, maturity_date=maturity_date,
comments=comments, links=links, references=references,
additional=additional,
*args, **kwargs)
self.model_year = model_year
self.vehicle_id = vehicle_id
self.make = make
self.model = model
self.color = color
self.style = style
self.genre = genre
self.rarity = rarity
self.condition = condition
self.imported = imported
self.imported_country_id = imported_country_id
self.engine_id = engine_id
self.chassis_id = chassis_id
self.steering = steering
self.gearbox = gearbox
self.gears = gears
self.drive = drive
self.engine_cap = engine_cap
self.power = power
self.weight = weight
self.speed = speed
self.fuel_type = fuel_type
self.petrol_grade = petrol_grade
@property
def model_year(self):
return self._model_year
@model_year.setter
def model_year(self, model_year):
if isinstance(model_year, str):
model_year = int(model_year)
self._model_year = model_year
@property
def make(self):
return self._make
@make.setter
def make(self, make):
if not make:
self._make = None
elif make in CAR_MAKE_RECORD:
self._make = make
else:
raise ValueError("Invalid input for car make: %s" % make)
@property
def color(self):
return self._color
@color.setter
def color(self, color):
if not color:
self._color = None
elif color in CAR_COLOR_RECORD:
self._color = color
else:
raise ValueError("Invalid input for car color: %s" % color)
@property
def style(self):
return self._style
@style.setter
def style(self, style):
if not style:
self._style = None
elif style in set(CAR_STYLE.keys()):
self._style = style
elif style in set(CAR_STYLE.values()):
self._style = list(CAR_STYLE.keys())[list(CAR_STYLE.values()).index(style)]
else:
raise ValueError("Invalid input for car style: %s" % style)
@property
def genre(self):
return self._genre
@genre.setter
def genre(self, genre):
if not genre:
self._genre = None
elif genre in set(CAR_GENRE.keys()):
self._genre = genre
elif genre in set(CAR_GENRE.values()):
self._genre = list(CAR_GENRE.keys())[list(CAR_GENRE.values()).index(genre)]
else:
raise ValueError("Invalid input for car genre: %s" % genre)
@property
def rarity(self):
return self._rarity
@rarity.setter
def rarity(self, rarity):
if not rarity:
self._rarity = None
elif rarity in CAR_RARITY:
self._rarity = rarity
else:
raise ValueError("Invalid input for car rarity: %s" % rarity)
@property
def condition(self):
return self._condition
@condition.setter
def condition(self, condition):
if not condition:
self._condition = None
elif condition in set(CAR_CONDITION.keys()):
self._condition = condition
elif condition in set(CAR_CONDITION.values()):
self._condition = list(CAR_CONDITION.keys())[list(CAR_CONDITION.values()).index(condition)]
else:
raise ValueError("Invalid input for car condition: %s" % condition)
@property
def imported(self):
return self._imported
@imported.setter
def imported(self, imported):
if not imported:
self._imported = None
else:
if imported not in [True, False]:
raise ValueError("imported should be True or False: %s" % imported)
self._imported = imported
@property
def steering(self):
return self._steering
@steering.setter
def steering(self, steering):
if not steering:
self._steering = None
else:
if steering not in ['LHD', 'RHD']:
raise ValueError("Invalid steering LHD/RHD: %s" % steering)
self._steering = steering
@property
def gearbox(self):
return self._gearbox
@gearbox.setter
def gearbox(self, gearbox):
if not gearbox:
self._gearbox = None
else:
if gearbox not in ['A', 'M']:
raise ValueError("Invalid gearbox A/M: %s" % gearbox)
self._gearbox = gearbox
@property
def drive(self):
return self._drive
@drive.setter
def drive(self, drive):
if not drive:
self._drive = None
else:
if drive not in ['RWD', 'FWD', 'AWD']:
raise ValueError("Invalid drive RWD/FWD/AWD: %s" % drive)
self._drive = drive
@property
def fuel_type(self):
return self._fuel_type
@fuel_type.setter
def fuel_type(self, fuel_type):
if not fuel_type:
self._fuel_type = None
else:
if fuel_type not in ['Petrol', 'Diesel']:
raise ValueError("Invalid fuel_type Petrol/Diesel: %s" % fuel_type)
self._fuel_type = fuel_type
@property
def petrol_grade(self):
return self._petrol_grade
@petrol_grade.setter
def petrol_grade(self, petrol_grade):
if not petrol_grade:
self._petrol_grade = None
else:
if isinstance(petrol_grade, str):
petrol_grade = int(float(petrol_grade))
if petrol_grade in set(CAR_PETROL_GRADE.keys()):
self._petrol_grade = CAR_PETROL_GRADE[petrol_grade]
else:
raise ValueError("Invalid data type for petrol_grade (87~94): %s" % petrol_grade)
| apache-2.0 |
vertcoin/vertcoin | test/util/rpcauth-test.py | 71 | 1719 | #!/usr/bin/env python3
# Copyright (c) 2015-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test share/rpcauth/rpcauth.py
"""
import base64
import configparser
import hmac
import importlib
import os
import sys
import unittest
class TestRPCAuth(unittest.TestCase):
def setUp(self):
config = configparser.ConfigParser()
config_path = os.path.abspath(
os.path.join(os.sep, os.path.abspath(os.path.dirname(__file__)),
"../config.ini"))
with open(config_path, encoding="utf8") as config_file:
config.read_file(config_file)
sys.path.insert(0, os.path.dirname(config['environment']['RPCAUTH']))
self.rpcauth = importlib.import_module('rpcauth')
def test_generate_salt(self):
for i in range(16, 32 + 1):
self.assertEqual(len(self.rpcauth.generate_salt(i)), i * 2)
def test_generate_password(self):
password = self.rpcauth.generate_password()
expected_password = base64.urlsafe_b64encode(
base64.urlsafe_b64decode(password)).decode('utf-8')
self.assertEqual(expected_password, password)
def test_check_password_hmac(self):
salt = self.rpcauth.generate_salt(16)
password = self.rpcauth.generate_password()
password_hmac = self.rpcauth.password_to_hmac(salt, password)
m = hmac.new(bytearray(salt, 'utf-8'),
bytearray(password, 'utf-8'), 'SHA256')
expected_password_hmac = m.hexdigest()
self.assertEqual(expected_password_hmac, password_hmac)
if __name__ == '__main__':
unittest.main()
| mit |
heytcass/homeassistant-config | deps/google/protobuf/internal/descriptor_pool_test2_pb2.py | 7 | 11886 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/internal/descriptor_pool_test2.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf.internal import descriptor_pool_test1_pb2 as google_dot_protobuf_dot_internal_dot_descriptor__pool__test1__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/internal/descriptor_pool_test2.proto',
package='google.protobuf.python.internal',
syntax='proto2',
serialized_pb=_b('\n4google/protobuf/internal/descriptor_pool_test2.proto\x12\x1fgoogle.protobuf.python.internal\x1a\x34google/protobuf/internal/descriptor_pool_test1.proto\"\xef\x06\n\x13\x44\x65scriptorPoolTest3\x12X\n\x0bnested_enum\x18\x01 \x01(\x0e\x32?.google.protobuf.python.internal.DescriptorPoolTest3.NestedEnum:\x02XI\x12Z\n\x0enested_message\x18\x02 \x01(\x0b\x32\x42.google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage\x1a\xf7\x03\n\rNestedMessage\x12\x66\n\x0bnested_enum\x18\x01 \x01(\x0e\x32M.google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.NestedEnum:\x02PI\x12\x18\n\x0cnested_field\x18\x02 \x01(\t:\x02nu\x12q\n\x13\x64\x65\x65p_nested_message\x18\x03 \x01(\x0b\x32T.google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.DeepNestedMessage\x1a\xcd\x01\n\x11\x44\x65\x65pNestedMessage\x12y\n\x0bnested_enum\x18\x01 \x01(\x0e\x32_.google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.DeepNestedMessage.NestedEnum:\x03RHO\x12\x1b\n\x0cnested_field\x18\x02 \x01(\t:\x05sigma\" \n\nNestedEnum\x12\x07\n\x03RHO\x10\x11\x12\t\n\x05SIGMA\x10\x12\"!\n\nNestedEnum\x12\x0b\n\x07OMICRON\x10\x0f\x12\x06\n\x02PI\x10\x10\"\x1c\n\nNestedEnum\x12\x06\n\x02NU\x10\r\x12\x06\n\x02XI\x10\x0e\x32\x89\x01\n\x14\x64\x65scriptor_pool_test\x12\x34.google.protobuf.python.internal.DescriptorPoolTest1\x18\xe9\x07 \x01(\x0b\x32\x34.google.protobuf.python.internal.DescriptorPoolTest3')
,
dependencies=[google_dot_protobuf_dot_internal_dot_descriptor__pool__test1__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM = _descriptor.EnumDescriptor(
name='NestedEnum',
full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.DeepNestedMessage.NestedEnum',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='RHO', index=0, number=17,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SIGMA', index=1, number=18,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=786,
serialized_end=818,
)
_sym_db.RegisterEnumDescriptor(_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM)
_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_NESTEDENUM = _descriptor.EnumDescriptor(
name='NestedEnum',
full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.NestedEnum',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='OMICRON', index=0, number=15,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='PI', index=1, number=16,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=820,
serialized_end=853,
)
_sym_db.RegisterEnumDescriptor(_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_NESTEDENUM)
_DESCRIPTORPOOLTEST3_NESTEDENUM = _descriptor.EnumDescriptor(
name='NestedEnum',
full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedEnum',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='NU', index=0, number=13,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='XI', index=1, number=14,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=855,
serialized_end=883,
)
_sym_db.RegisterEnumDescriptor(_DESCRIPTORPOOLTEST3_NESTEDENUM)
_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE = _descriptor.Descriptor(
name='DeepNestedMessage',
full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.DeepNestedMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='nested_enum', full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.DeepNestedMessage.nested_enum', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=17,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='nested_field', full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.DeepNestedMessage.nested_field', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=_b("sigma").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM,
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=613,
serialized_end=818,
)
_DESCRIPTORPOOLTEST3_NESTEDMESSAGE = _descriptor.Descriptor(
name='NestedMessage',
full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='nested_enum', full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.nested_enum', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=16,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='nested_field', full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.nested_field', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=_b("nu").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='deep_nested_message', full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.deep_nested_message', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE, ],
enum_types=[
_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_NESTEDENUM,
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=350,
serialized_end=853,
)
_DESCRIPTORPOOLTEST3 = _descriptor.Descriptor(
name='DescriptorPoolTest3',
full_name='google.protobuf.python.internal.DescriptorPoolTest3',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='nested_enum', full_name='google.protobuf.python.internal.DescriptorPoolTest3.nested_enum', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=14,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='nested_message', full_name='google.protobuf.python.internal.DescriptorPoolTest3.nested_message', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
_descriptor.FieldDescriptor(
name='descriptor_pool_test', full_name='google.protobuf.python.internal.DescriptorPoolTest3.descriptor_pool_test', index=0,
number=1001, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
options=None),
],
nested_types=[_DESCRIPTORPOOLTEST3_NESTEDMESSAGE, ],
enum_types=[
_DESCRIPTORPOOLTEST3_NESTEDENUM,
],
options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=144,
serialized_end=1023,
)
_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE.fields_by_name['nested_enum'].enum_type = _DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM
_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE.containing_type = _DESCRIPTORPOOLTEST3_NESTEDMESSAGE
_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM.containing_type = _DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE
_DESCRIPTORPOOLTEST3_NESTEDMESSAGE.fields_by_name['nested_enum'].enum_type = _DESCRIPTORPOOLTEST3_NESTEDMESSAGE_NESTEDENUM
_DESCRIPTORPOOLTEST3_NESTEDMESSAGE.fields_by_name['deep_nested_message'].message_type = _DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE
_DESCRIPTORPOOLTEST3_NESTEDMESSAGE.containing_type = _DESCRIPTORPOOLTEST3
_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_NESTEDENUM.containing_type = _DESCRIPTORPOOLTEST3_NESTEDMESSAGE
_DESCRIPTORPOOLTEST3.fields_by_name['nested_enum'].enum_type = _DESCRIPTORPOOLTEST3_NESTEDENUM
_DESCRIPTORPOOLTEST3.fields_by_name['nested_message'].message_type = _DESCRIPTORPOOLTEST3_NESTEDMESSAGE
_DESCRIPTORPOOLTEST3_NESTEDENUM.containing_type = _DESCRIPTORPOOLTEST3
DESCRIPTOR.message_types_by_name['DescriptorPoolTest3'] = _DESCRIPTORPOOLTEST3
DescriptorPoolTest3 = _reflection.GeneratedProtocolMessageType('DescriptorPoolTest3', (_message.Message,), dict(
NestedMessage = _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), dict(
DeepNestedMessage = _reflection.GeneratedProtocolMessageType('DeepNestedMessage', (_message.Message,), dict(
DESCRIPTOR = _DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE,
__module__ = 'google.protobuf.internal.descriptor_pool_test2_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.DeepNestedMessage)
))
,
DESCRIPTOR = _DESCRIPTORPOOLTEST3_NESTEDMESSAGE,
__module__ = 'google.protobuf.internal.descriptor_pool_test2_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage)
))
,
DESCRIPTOR = _DESCRIPTORPOOLTEST3,
__module__ = 'google.protobuf.internal.descriptor_pool_test2_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.DescriptorPoolTest3)
))
_sym_db.RegisterMessage(DescriptorPoolTest3)
_sym_db.RegisterMessage(DescriptorPoolTest3.NestedMessage)
_sym_db.RegisterMessage(DescriptorPoolTest3.NestedMessage.DeepNestedMessage)
_DESCRIPTORPOOLTEST3.extensions_by_name['descriptor_pool_test'].message_type = _DESCRIPTORPOOLTEST3
google_dot_protobuf_dot_internal_dot_descriptor__pool__test1__pb2.DescriptorPoolTest1.RegisterExtension(_DESCRIPTORPOOLTEST3.extensions_by_name['descriptor_pool_test'])
# @@protoc_insertion_point(module_scope)
| mit |
shedskin/shedskin | examples/c64/sprite.py | 7 | 1556 | #!/usr/bin/env python2
# I, Danny Milosavljevic, hereby place this file into the public domain.
# public: calculate_pixbuf_data
import sys
SPRITE_COUNT = 8
#sprite = open(sys.argv[1], "rb").read()[2:]
WIDTH = 24
HEIGHT = 21
#def from_high_resolution_sprite(sprite, primary_color):
# result = []
# for cell in sprite:
# for item in [
# primary_color if (cell & (1 << (7 - column_i))) != 0
# else [0,0,0,0] for column_i in range(8)]:
# result += item
# return(result)
#def from_multi_color_sprite(sprite, primary_color, multicolor_0, multicolor_1):
# result = []
# #for i in range(8):
# # result += [0xFF,0,0,0xFF]
# masks = [0x03, 0x0C, 0x30, 0xC0]
# colors = [
# [0x00, 0x00, 0x00, 0x00],
# multicolor_0, # $D025
# primary_color, # $D027..$D02E
# multicolor_1, # $D026
# ]
# for cell in sprite:
# for item in reversed([colors[(cell & masks[column_i]) >> (column_i * 2)] for column_i in range(4)]):
# result += item * 2
# return(result)
#data = from_high_resolution_sprite(sprite)
#frame_size = 4 * 64 * 8 # WIDTH * 4 * HEIGHT + 4
#data = from_multi_color_sprite(sprite)
#def calculate_pixbuf_data(sprite_data, primary_color, B_multicolor, multicolor_0, multicolor_1):
# if B_multicolor:
# data = from_multi_color_sprite(map(ord, sprite_data), primary_color, multicolor_0, multicolor_1)
# else:
# data = from_high_resolution_sprite(map(ord, sprite_data), primary_color)
# return("".join(map(chr, data)))
| gpl-3.0 |
DueLaser/OctoPrint | src/octoprint/plugins/softwareupdate/updaters/update_script.py | 19 | 3510 | # coding=utf-8
from __future__ import absolute_import
__author__ = "Gina Häußge <osd@foosel.net>"
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
__copyright__ = "Copyright (C) 2014 The OctoPrint Project - Released under terms of the AGPLv3 License"
import sys
import logging
from ..exceptions import ScriptError, ConfigurationInvalid, UpdateError
from ..util import execute
def can_perform_update(target, check):
import os
script_configured = bool("update_script" in check and check["update_script"])
folder = None
if "update_folder" in check:
folder = check["update_folder"]
elif "checkout_folder" in check:
folder = check["checkout_folder"]
folder_configured = bool(folder and os.path.isdir(folder))
return script_configured and folder_configured
def perform_update(target, check, target_version):
logger = logging.getLogger("octoprint.plugins.softwareupdate.updaters.update_script")
if not can_perform_update(target, check):
raise ConfigurationInvalid("checkout_folder and update_folder are missing for update target %s, one is needed" % target)
update_script = check["update_script"]
folder = check["update_folder"] if "update_folder" in check else check["checkout_folder"]
pre_update_script = check["pre_update_script"] if "pre_update_script" in check else None
post_update_script = check["post_update_script"] if "post_update_script" in check else None
update_stdout = ""
update_stderr = ""
### pre update
if pre_update_script is not None:
logger.debug("Target: %s, running pre-update script: %s" % (target, pre_update_script))
try:
returncode, stdout, stderr = execute(pre_update_script, cwd=folder)
update_stdout += stdout
update_stderr += stderr
except ScriptError as e:
logger.exception("Target: %s, error while executing pre update script, got returncode %r" % (target, e.returncode))
logger.warn("Target: %s, pre-update stdout:\n%s" % (target, e.stdout))
logger.warn("Target: %s, pre-update stderr:\n%s" % (target, e.stderr))
### update
try:
update_command = update_script.format(python=sys.executable, folder=folder, target=target_version)
logger.debug("Target %s, running update script: %s" % (target, update_command))
returncode, stdout, stderr = execute(update_command, cwd=folder)
update_stdout += stdout
update_stderr += stderr
except ScriptError as e:
logger.exception("Target: %s, error while executing update script, got returncode %r" % (target, e.returncode))
logger.warn("Target: %s, update stdout:\n%s" % (target, e.stdout))
logger.warn("Target: %s, update stderr:\n%s" % (target, e.stderr))
raise UpdateError("Error while executing update script for %s", (e.stdout, e.stderr))
### post update
if post_update_script is not None:
logger.debug("Target: %s, running post-update script %s..." % (target, post_update_script))
try:
returncode, stdout, stderr = execute(post_update_script, cwd=folder)
update_stdout += stdout
update_stderr += stderr
except ScriptError as e:
logger.exception("Target: %s, error while executing post update script, got returncode %r" % (target, e.returncode))
logger.warn("Target: %s, post-update stdout:\n%s" % (target, e.stdout))
logger.warn("Target: %s, post-update stderr:\n%s" % (target, e.stderr))
logger.debug("Target: %s, update stdout:\n%s" % (target, update_stdout))
logger.debug("Target: %s, update stderr:\n%s" % (target, update_stderr))
### result
return update_stdout, update_stderr
| gpl-3.0 |
kmcnellis/twitter_nlp | python/twokenize_wrapper.py | 8 | 1647 | #!/homes/gws/aritter/local/bin/python
###############################################################################
# Tokinizes strings using the 'twokenize' module, but also splits up
# contractions, which 'twokenize' fails to do.
#
###############################################################################
import sys
import twokenize
def tokenize(tweet):
tokens = twokenize.tokenize(tweet)
return split_contractions(tokens)
def split_contractions(tokens):
# Fix "n't", "I'm", "'re", "'s", "'ve", "'ll" cases
new_token_list = []
for token in tokens:
new_tk = None
if token[-3:] == 'n\'t':
new_tk = token[:-3]
new_token_list.append('n\'t')
elif token == 'I\'m' or token == 'i\'m':
new_token_list.append('I')
new_token_list.append('\'m')
elif token[-3:] == '\'re':
new_tk = token[:-3]
new_token_list.append('\'re')
elif token[-2:] == '\'s':
new_tk = token[:-2]
new_token_list.append('\'s')
elif token[-3:] == '\'ve':
new_tk = token[:-3]
new_token_list.append('\'ve')
elif token[-3:] == '\'ll':
new_tok = token[:-3]
new_token_list.append('\'ll')
else:
new_token_list.append(token)
# Add new token if one exists
if new_tk:
#sys.stderr.write('Split following contraction: %s\n' % token)
new_token_list.insert(-1, new_tk)
return new_token_list
if __name__=='__main__':
for line in sys.stdin:
print u" ".join(tokenize(line[:-1])).encode('utf-8')
| gpl-3.0 |
381426068/MissionPlanner | Lib/site-packages/numpy/dual.py | 87 | 1783 | """
Aliases for functions which may be accelerated by Scipy.
Scipy_ can be built to use accelerated or otherwise improved libraries
for FFTs, linear algebra, and special functions. This module allows
developers to transparently support these accelerated functions when
scipy is available but still support users who have only installed
Numpy.
.. _Scipy : http://www.scipy.org
"""
# This module should be used for functions both in numpy and scipy if
# you want to use the numpy version if available but the scipy version
# otherwise.
# Usage --- from numpy.dual import fft, inv
__all__ = ['fft','ifft','fftn','ifftn','fft2','ifft2',
'norm','inv','svd','solve','det','eig','eigvals',
'eigh','eigvalsh','lstsq', 'pinv','cholesky','i0']
import numpy.linalg as linpkg
import numpy.fft as fftpkg
from numpy.lib import i0
import sys
fft = fftpkg.fft
ifft = fftpkg.ifft
fftn = fftpkg.fftn
ifftn = fftpkg.ifftn
fft2 = fftpkg.fft2
ifft2 = fftpkg.ifft2
norm = linpkg.norm
inv = linpkg.inv
svd = linpkg.svd
solve = linpkg.solve
det = linpkg.det
eig = linpkg.eig
eigvals = linpkg.eigvals
eigh = linpkg.eigh
eigvalsh = linpkg.eigvalsh
lstsq = linpkg.lstsq
pinv = linpkg.pinv
cholesky = linpkg.cholesky
_restore_dict = {}
def register_func(name, func):
if name not in __all__:
raise ValueError, "%s not a dual function." % name
f = sys._getframe(0).f_globals
_restore_dict[name] = f[name]
f[name] = func
def restore_func(name):
if name not in __all__:
raise ValueError, "%s not a dual function." % name
try:
val = _restore_dict[name]
except KeyError:
return
else:
sys._getframe(0).f_globals[name] = val
def restore_all():
for name in _restore_dict.keys():
restore_func(name)
| gpl-3.0 |
makielab/django-oscar | oscar/apps/order/migrations/0011_auto__del_field_paymentevent_date__add_field_paymentevent_date_created.py | 16 | 33742 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from oscar.core.compat import AUTH_USER_MODEL, AUTH_USER_MODEL_NAME
class Migration(SchemaMigration):
def forwards(self, orm):
db.rename_column('order_communicationevent', 'date', 'date_created')
db.rename_column('order_shippingevent', 'date', 'date_created')
db.rename_column('order_paymentevent', 'date', 'date_created')
def backwards(self, orm):
db.rename_column('order_communicationevent', 'date_created', 'date')
db.rename_column('order_shippingevent', 'date_created', 'date')
db.rename_column('order_paymentevent', 'date_created', 'date')
models = {
'address.country': {
'Meta': {'ordering': "('-is_highlighted', 'name')", 'object_name': 'Country'},
'is_highlighted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'is_shipping_country': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'iso_3166_1_a2': ('django.db.models.fields.CharField', [], {'max_length': '2', 'primary_key': 'True'}),
'iso_3166_1_a3': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'db_index': 'True'}),
'iso_3166_1_numeric': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'printable_name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
AUTH_USER_MODEL: {
'Meta': {'object_name': AUTH_USER_MODEL_NAME},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'catalogue.attributeentity': {
'Meta': {'object_name': 'AttributeEntity'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'blank': 'True'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'entities'", 'to': "orm['catalogue.AttributeEntityType']"})
},
'catalogue.attributeentitytype': {
'Meta': {'object_name': 'AttributeEntityType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'blank': 'True'})
},
'catalogue.attributeoption': {
'Meta': {'object_name': 'AttributeOption'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'to': "orm['catalogue.AttributeOptionGroup']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'option': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'catalogue.attributeoptiongroup': {
'Meta': {'object_name': 'AttributeOptionGroup'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'catalogue.category': {
'Meta': {'ordering': "['full_name']", 'object_name': 'Category'},
'depth': ('django.db.models.fields.PositiveIntegerField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'full_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'numchild': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'})
},
'catalogue.option': {
'Meta': {'object_name': 'Option'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'Required'", 'max_length': '128'})
},
'catalogue.product': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'Product'},
'attributes': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.ProductAttribute']", 'through': "orm['catalogue.ProductAttributeValue']", 'symmetrical': 'False'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Category']", 'through': "orm['catalogue.ProductCategory']", 'symmetrical': 'False'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_discountable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'variants'", 'null': 'True', 'to': "orm['catalogue.Product']"}),
'product_class': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.ProductClass']", 'null': 'True'}),
'product_options': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
'rating': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'recommended_products': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Product']", 'symmetrical': 'False', 'through': "orm['catalogue.ProductRecommendation']", 'blank': 'True'}),
'related_products': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'relations'", 'blank': 'True', 'to': "orm['catalogue.Product']"}),
'score': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'db_index': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}),
'status': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'upc': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
'catalogue.productattribute': {
'Meta': {'ordering': "['code']", 'object_name': 'ProductAttribute'},
'code': ('django.db.models.fields.SlugField', [], {'max_length': '128'}),
'entity_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeEntityType']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'option_group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeOptionGroup']", 'null': 'True', 'blank': 'True'}),
'product_class': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'attributes'", 'null': 'True', 'to': "orm['catalogue.ProductClass']"}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'text'", 'max_length': '20'})
},
'catalogue.productattributevalue': {
'Meta': {'object_name': 'ProductAttributeValue'},
'attribute': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.ProductAttribute']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attribute_values'", 'to': "orm['catalogue.Product']"}),
'value_boolean': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'value_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'value_entity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeEntity']", 'null': 'True', 'blank': 'True'}),
'value_float': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'value_integer': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'value_option': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeOption']", 'null': 'True', 'blank': 'True'}),
'value_richtext': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'value_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'catalogue.productcategory': {
'Meta': {'ordering': "['-is_canonical']", 'object_name': 'ProductCategory'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Category']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_canonical': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']"})
},
'catalogue.productclass': {
'Meta': {'ordering': "['name']", 'object_name': 'ProductClass'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'options': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
'requires_shipping': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'track_stock': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'catalogue.productrecommendation': {
'Meta': {'object_name': 'ProductRecommendation'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'primary': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'primary_recommendations'", 'to': "orm['catalogue.Product']"}),
'ranking': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'recommendation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']"})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'customer.communicationeventtype': {
'Meta': {'object_name': 'CommunicationEventType'},
'category': ('django.db.models.fields.CharField', [], {'default': "u'Order related'", 'max_length': '255'}),
'code': ('django.db.models.fields.SlugField', [], {'max_length': '128'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'email_body_html_template': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'email_body_template': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'email_subject_template': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'sms_template': ('django.db.models.fields.CharField', [], {'max_length': '170', 'blank': 'True'})
},
'order.billingaddress': {
'Meta': {'object_name': 'BillingAddress'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['address.Country']"}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'line1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'line2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line4': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'postcode': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'search_text': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})
},
'order.communicationevent': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'CommunicationEvent'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['customer.CommunicationEventType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'communication_events'", 'to': "orm['order.Order']"})
},
'order.line': {
'Meta': {'object_name': 'Line'},
'est_dispatch_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line_price_before_discounts_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'line_price_before_discounts_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'line_price_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'line_price_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'lines'", 'to': "orm['order.Order']"}),
'partner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'order_lines'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['partner.Partner']"}),
'partner_line_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'partner_line_reference': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'partner_name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'partner_sku': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'unit_cost_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'unit_price_excl_tax': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'unit_price_incl_tax': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'unit_retail_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'upc': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'})
},
'order.lineattribute': {
'Meta': {'object_name': 'LineAttribute'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attributes'", 'to': "orm['order.Line']"}),
'option': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_attributes'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['catalogue.Option']"}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'order.lineprice': {
'Meta': {'ordering': "('id',)", 'object_name': 'LinePrice'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'prices'", 'to': "orm['order.Line']"}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_prices'", 'to': "orm['order.Order']"}),
'price_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'price_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'shipping_excl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'shipping_incl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'})
},
'order.order': {
'Meta': {'ordering': "['-date_placed']", 'object_name': 'Order'},
'basket_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'billing_address': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.BillingAddress']", 'null': 'True', 'blank': 'True'}),
'date_placed': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'guest_email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'number': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'shipping_address': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.ShippingAddress']", 'null': 'True', 'blank': 'True'}),
'shipping_excl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'shipping_incl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'shipping_method': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'total_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'total_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'orders'", 'null': 'True', 'to': "orm['{0}']".format(AUTH_USER_MODEL)})
},
'order.orderdiscount': {
'Meta': {'object_name': 'OrderDiscount'},
'amount': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'category': ('django.db.models.fields.CharField', [], {'default': "'Basket'", 'max_length': '64'}),
'frequency': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'offer_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'offer_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'discounts'", 'to': "orm['order.Order']"}),
'voucher_code': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'voucher_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'})
},
'order.ordernote': {
'Meta': {'object_name': 'OrderNote'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'note_type': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notes'", 'to': "orm['order.Order']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['{0}']".format(AUTH_USER_MODEL), 'null': 'True'})
},
'order.paymentevent': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'PaymentEvent'},
'amount': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.PaymentEventType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lines': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['order.Line']", 'through': "orm['order.PaymentEventQuantity']", 'symmetrical': 'False'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'payment_events'", 'to': "orm['order.Order']"})
},
'order.paymenteventquantity': {
'Meta': {'object_name': 'PaymentEventQuantity'},
'event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_quantities'", 'to': "orm['order.PaymentEvent']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'payment_event_quantities'", 'to': "orm['order.Line']"}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'order.paymenteventtype': {
'Meta': {'ordering': "('sequence_number',)", 'object_name': 'PaymentEventType'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'sequence_number': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'order.shippingaddress': {
'Meta': {'object_name': 'ShippingAddress'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['address.Country']"}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'line1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'line2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line4': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'phone_number': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'postcode': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'search_text': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})
},
'order.shippingevent': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'ShippingEvent'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.ShippingEventType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lines': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'shipping_events'", 'symmetrical': 'False', 'through': "orm['order.ShippingEventQuantity']", 'to': "orm['order.Line']"}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'shipping_events'", 'to': "orm['order.Order']"})
},
'order.shippingeventquantity': {
'Meta': {'object_name': 'ShippingEventQuantity'},
'event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_quantities'", 'to': "orm['order.ShippingEvent']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'shipping_event_quantities'", 'to': "orm['order.Line']"}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'order.shippingeventtype': {
'Meta': {'ordering': "('sequence_number',)", 'object_name': 'ShippingEventType'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'sequence_number': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'partner.partner': {
'Meta': {'object_name': 'Partner'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'partners'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['{0}']".format(AUTH_USER_MODEL)})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['order']
| bsd-3-clause |
goldeneye-source/ges-python | lib/encodings/cp1253.py | 272 | 13094 | """ Python Character Mapping Codec cp1253 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1253.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp1253',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
'\x00' # 0x00 -> NULL
'\x01' # 0x01 -> START OF HEADING
'\x02' # 0x02 -> START OF TEXT
'\x03' # 0x03 -> END OF TEXT
'\x04' # 0x04 -> END OF TRANSMISSION
'\x05' # 0x05 -> ENQUIRY
'\x06' # 0x06 -> ACKNOWLEDGE
'\x07' # 0x07 -> BELL
'\x08' # 0x08 -> BACKSPACE
'\t' # 0x09 -> HORIZONTAL TABULATION
'\n' # 0x0A -> LINE FEED
'\x0b' # 0x0B -> VERTICAL TABULATION
'\x0c' # 0x0C -> FORM FEED
'\r' # 0x0D -> CARRIAGE RETURN
'\x0e' # 0x0E -> SHIFT OUT
'\x0f' # 0x0F -> SHIFT IN
'\x10' # 0x10 -> DATA LINK ESCAPE
'\x11' # 0x11 -> DEVICE CONTROL ONE
'\x12' # 0x12 -> DEVICE CONTROL TWO
'\x13' # 0x13 -> DEVICE CONTROL THREE
'\x14' # 0x14 -> DEVICE CONTROL FOUR
'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
'\x16' # 0x16 -> SYNCHRONOUS IDLE
'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
'\x18' # 0x18 -> CANCEL
'\x19' # 0x19 -> END OF MEDIUM
'\x1a' # 0x1A -> SUBSTITUTE
'\x1b' # 0x1B -> ESCAPE
'\x1c' # 0x1C -> FILE SEPARATOR
'\x1d' # 0x1D -> GROUP SEPARATOR
'\x1e' # 0x1E -> RECORD SEPARATOR
'\x1f' # 0x1F -> UNIT SEPARATOR
' ' # 0x20 -> SPACE
'!' # 0x21 -> EXCLAMATION MARK
'"' # 0x22 -> QUOTATION MARK
'#' # 0x23 -> NUMBER SIGN
'$' # 0x24 -> DOLLAR SIGN
'%' # 0x25 -> PERCENT SIGN
'&' # 0x26 -> AMPERSAND
"'" # 0x27 -> APOSTROPHE
'(' # 0x28 -> LEFT PARENTHESIS
')' # 0x29 -> RIGHT PARENTHESIS
'*' # 0x2A -> ASTERISK
'+' # 0x2B -> PLUS SIGN
',' # 0x2C -> COMMA
'-' # 0x2D -> HYPHEN-MINUS
'.' # 0x2E -> FULL STOP
'/' # 0x2F -> SOLIDUS
'0' # 0x30 -> DIGIT ZERO
'1' # 0x31 -> DIGIT ONE
'2' # 0x32 -> DIGIT TWO
'3' # 0x33 -> DIGIT THREE
'4' # 0x34 -> DIGIT FOUR
'5' # 0x35 -> DIGIT FIVE
'6' # 0x36 -> DIGIT SIX
'7' # 0x37 -> DIGIT SEVEN
'8' # 0x38 -> DIGIT EIGHT
'9' # 0x39 -> DIGIT NINE
':' # 0x3A -> COLON
';' # 0x3B -> SEMICOLON
'<' # 0x3C -> LESS-THAN SIGN
'=' # 0x3D -> EQUALS SIGN
'>' # 0x3E -> GREATER-THAN SIGN
'?' # 0x3F -> QUESTION MARK
'@' # 0x40 -> COMMERCIAL AT
'A' # 0x41 -> LATIN CAPITAL LETTER A
'B' # 0x42 -> LATIN CAPITAL LETTER B
'C' # 0x43 -> LATIN CAPITAL LETTER C
'D' # 0x44 -> LATIN CAPITAL LETTER D
'E' # 0x45 -> LATIN CAPITAL LETTER E
'F' # 0x46 -> LATIN CAPITAL LETTER F
'G' # 0x47 -> LATIN CAPITAL LETTER G
'H' # 0x48 -> LATIN CAPITAL LETTER H
'I' # 0x49 -> LATIN CAPITAL LETTER I
'J' # 0x4A -> LATIN CAPITAL LETTER J
'K' # 0x4B -> LATIN CAPITAL LETTER K
'L' # 0x4C -> LATIN CAPITAL LETTER L
'M' # 0x4D -> LATIN CAPITAL LETTER M
'N' # 0x4E -> LATIN CAPITAL LETTER N
'O' # 0x4F -> LATIN CAPITAL LETTER O
'P' # 0x50 -> LATIN CAPITAL LETTER P
'Q' # 0x51 -> LATIN CAPITAL LETTER Q
'R' # 0x52 -> LATIN CAPITAL LETTER R
'S' # 0x53 -> LATIN CAPITAL LETTER S
'T' # 0x54 -> LATIN CAPITAL LETTER T
'U' # 0x55 -> LATIN CAPITAL LETTER U
'V' # 0x56 -> LATIN CAPITAL LETTER V
'W' # 0x57 -> LATIN CAPITAL LETTER W
'X' # 0x58 -> LATIN CAPITAL LETTER X
'Y' # 0x59 -> LATIN CAPITAL LETTER Y
'Z' # 0x5A -> LATIN CAPITAL LETTER Z
'[' # 0x5B -> LEFT SQUARE BRACKET
'\\' # 0x5C -> REVERSE SOLIDUS
']' # 0x5D -> RIGHT SQUARE BRACKET
'^' # 0x5E -> CIRCUMFLEX ACCENT
'_' # 0x5F -> LOW LINE
'`' # 0x60 -> GRAVE ACCENT
'a' # 0x61 -> LATIN SMALL LETTER A
'b' # 0x62 -> LATIN SMALL LETTER B
'c' # 0x63 -> LATIN SMALL LETTER C
'd' # 0x64 -> LATIN SMALL LETTER D
'e' # 0x65 -> LATIN SMALL LETTER E
'f' # 0x66 -> LATIN SMALL LETTER F
'g' # 0x67 -> LATIN SMALL LETTER G
'h' # 0x68 -> LATIN SMALL LETTER H
'i' # 0x69 -> LATIN SMALL LETTER I
'j' # 0x6A -> LATIN SMALL LETTER J
'k' # 0x6B -> LATIN SMALL LETTER K
'l' # 0x6C -> LATIN SMALL LETTER L
'm' # 0x6D -> LATIN SMALL LETTER M
'n' # 0x6E -> LATIN SMALL LETTER N
'o' # 0x6F -> LATIN SMALL LETTER O
'p' # 0x70 -> LATIN SMALL LETTER P
'q' # 0x71 -> LATIN SMALL LETTER Q
'r' # 0x72 -> LATIN SMALL LETTER R
's' # 0x73 -> LATIN SMALL LETTER S
't' # 0x74 -> LATIN SMALL LETTER T
'u' # 0x75 -> LATIN SMALL LETTER U
'v' # 0x76 -> LATIN SMALL LETTER V
'w' # 0x77 -> LATIN SMALL LETTER W
'x' # 0x78 -> LATIN SMALL LETTER X
'y' # 0x79 -> LATIN SMALL LETTER Y
'z' # 0x7A -> LATIN SMALL LETTER Z
'{' # 0x7B -> LEFT CURLY BRACKET
'|' # 0x7C -> VERTICAL LINE
'}' # 0x7D -> RIGHT CURLY BRACKET
'~' # 0x7E -> TILDE
'\x7f' # 0x7F -> DELETE
'\u20ac' # 0x80 -> EURO SIGN
'\ufffe' # 0x81 -> UNDEFINED
'\u201a' # 0x82 -> SINGLE LOW-9 QUOTATION MARK
'\u0192' # 0x83 -> LATIN SMALL LETTER F WITH HOOK
'\u201e' # 0x84 -> DOUBLE LOW-9 QUOTATION MARK
'\u2026' # 0x85 -> HORIZONTAL ELLIPSIS
'\u2020' # 0x86 -> DAGGER
'\u2021' # 0x87 -> DOUBLE DAGGER
'\ufffe' # 0x88 -> UNDEFINED
'\u2030' # 0x89 -> PER MILLE SIGN
'\ufffe' # 0x8A -> UNDEFINED
'\u2039' # 0x8B -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK
'\ufffe' # 0x8C -> UNDEFINED
'\ufffe' # 0x8D -> UNDEFINED
'\ufffe' # 0x8E -> UNDEFINED
'\ufffe' # 0x8F -> UNDEFINED
'\ufffe' # 0x90 -> UNDEFINED
'\u2018' # 0x91 -> LEFT SINGLE QUOTATION MARK
'\u2019' # 0x92 -> RIGHT SINGLE QUOTATION MARK
'\u201c' # 0x93 -> LEFT DOUBLE QUOTATION MARK
'\u201d' # 0x94 -> RIGHT DOUBLE QUOTATION MARK
'\u2022' # 0x95 -> BULLET
'\u2013' # 0x96 -> EN DASH
'\u2014' # 0x97 -> EM DASH
'\ufffe' # 0x98 -> UNDEFINED
'\u2122' # 0x99 -> TRADE MARK SIGN
'\ufffe' # 0x9A -> UNDEFINED
'\u203a' # 0x9B -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
'\ufffe' # 0x9C -> UNDEFINED
'\ufffe' # 0x9D -> UNDEFINED
'\ufffe' # 0x9E -> UNDEFINED
'\ufffe' # 0x9F -> UNDEFINED
'\xa0' # 0xA0 -> NO-BREAK SPACE
'\u0385' # 0xA1 -> GREEK DIALYTIKA TONOS
'\u0386' # 0xA2 -> GREEK CAPITAL LETTER ALPHA WITH TONOS
'\xa3' # 0xA3 -> POUND SIGN
'\xa4' # 0xA4 -> CURRENCY SIGN
'\xa5' # 0xA5 -> YEN SIGN
'\xa6' # 0xA6 -> BROKEN BAR
'\xa7' # 0xA7 -> SECTION SIGN
'\xa8' # 0xA8 -> DIAERESIS
'\xa9' # 0xA9 -> COPYRIGHT SIGN
'\ufffe' # 0xAA -> UNDEFINED
'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xac' # 0xAC -> NOT SIGN
'\xad' # 0xAD -> SOFT HYPHEN
'\xae' # 0xAE -> REGISTERED SIGN
'\u2015' # 0xAF -> HORIZONTAL BAR
'\xb0' # 0xB0 -> DEGREE SIGN
'\xb1' # 0xB1 -> PLUS-MINUS SIGN
'\xb2' # 0xB2 -> SUPERSCRIPT TWO
'\xb3' # 0xB3 -> SUPERSCRIPT THREE
'\u0384' # 0xB4 -> GREEK TONOS
'\xb5' # 0xB5 -> MICRO SIGN
'\xb6' # 0xB6 -> PILCROW SIGN
'\xb7' # 0xB7 -> MIDDLE DOT
'\u0388' # 0xB8 -> GREEK CAPITAL LETTER EPSILON WITH TONOS
'\u0389' # 0xB9 -> GREEK CAPITAL LETTER ETA WITH TONOS
'\u038a' # 0xBA -> GREEK CAPITAL LETTER IOTA WITH TONOS
'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
'\u038c' # 0xBC -> GREEK CAPITAL LETTER OMICRON WITH TONOS
'\xbd' # 0xBD -> VULGAR FRACTION ONE HALF
'\u038e' # 0xBE -> GREEK CAPITAL LETTER UPSILON WITH TONOS
'\u038f' # 0xBF -> GREEK CAPITAL LETTER OMEGA WITH TONOS
'\u0390' # 0xC0 -> GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS
'\u0391' # 0xC1 -> GREEK CAPITAL LETTER ALPHA
'\u0392' # 0xC2 -> GREEK CAPITAL LETTER BETA
'\u0393' # 0xC3 -> GREEK CAPITAL LETTER GAMMA
'\u0394' # 0xC4 -> GREEK CAPITAL LETTER DELTA
'\u0395' # 0xC5 -> GREEK CAPITAL LETTER EPSILON
'\u0396' # 0xC6 -> GREEK CAPITAL LETTER ZETA
'\u0397' # 0xC7 -> GREEK CAPITAL LETTER ETA
'\u0398' # 0xC8 -> GREEK CAPITAL LETTER THETA
'\u0399' # 0xC9 -> GREEK CAPITAL LETTER IOTA
'\u039a' # 0xCA -> GREEK CAPITAL LETTER KAPPA
'\u039b' # 0xCB -> GREEK CAPITAL LETTER LAMDA
'\u039c' # 0xCC -> GREEK CAPITAL LETTER MU
'\u039d' # 0xCD -> GREEK CAPITAL LETTER NU
'\u039e' # 0xCE -> GREEK CAPITAL LETTER XI
'\u039f' # 0xCF -> GREEK CAPITAL LETTER OMICRON
'\u03a0' # 0xD0 -> GREEK CAPITAL LETTER PI
'\u03a1' # 0xD1 -> GREEK CAPITAL LETTER RHO
'\ufffe' # 0xD2 -> UNDEFINED
'\u03a3' # 0xD3 -> GREEK CAPITAL LETTER SIGMA
'\u03a4' # 0xD4 -> GREEK CAPITAL LETTER TAU
'\u03a5' # 0xD5 -> GREEK CAPITAL LETTER UPSILON
'\u03a6' # 0xD6 -> GREEK CAPITAL LETTER PHI
'\u03a7' # 0xD7 -> GREEK CAPITAL LETTER CHI
'\u03a8' # 0xD8 -> GREEK CAPITAL LETTER PSI
'\u03a9' # 0xD9 -> GREEK CAPITAL LETTER OMEGA
'\u03aa' # 0xDA -> GREEK CAPITAL LETTER IOTA WITH DIALYTIKA
'\u03ab' # 0xDB -> GREEK CAPITAL LETTER UPSILON WITH DIALYTIKA
'\u03ac' # 0xDC -> GREEK SMALL LETTER ALPHA WITH TONOS
'\u03ad' # 0xDD -> GREEK SMALL LETTER EPSILON WITH TONOS
'\u03ae' # 0xDE -> GREEK SMALL LETTER ETA WITH TONOS
'\u03af' # 0xDF -> GREEK SMALL LETTER IOTA WITH TONOS
'\u03b0' # 0xE0 -> GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS
'\u03b1' # 0xE1 -> GREEK SMALL LETTER ALPHA
'\u03b2' # 0xE2 -> GREEK SMALL LETTER BETA
'\u03b3' # 0xE3 -> GREEK SMALL LETTER GAMMA
'\u03b4' # 0xE4 -> GREEK SMALL LETTER DELTA
'\u03b5' # 0xE5 -> GREEK SMALL LETTER EPSILON
'\u03b6' # 0xE6 -> GREEK SMALL LETTER ZETA
'\u03b7' # 0xE7 -> GREEK SMALL LETTER ETA
'\u03b8' # 0xE8 -> GREEK SMALL LETTER THETA
'\u03b9' # 0xE9 -> GREEK SMALL LETTER IOTA
'\u03ba' # 0xEA -> GREEK SMALL LETTER KAPPA
'\u03bb' # 0xEB -> GREEK SMALL LETTER LAMDA
'\u03bc' # 0xEC -> GREEK SMALL LETTER MU
'\u03bd' # 0xED -> GREEK SMALL LETTER NU
'\u03be' # 0xEE -> GREEK SMALL LETTER XI
'\u03bf' # 0xEF -> GREEK SMALL LETTER OMICRON
'\u03c0' # 0xF0 -> GREEK SMALL LETTER PI
'\u03c1' # 0xF1 -> GREEK SMALL LETTER RHO
'\u03c2' # 0xF2 -> GREEK SMALL LETTER FINAL SIGMA
'\u03c3' # 0xF3 -> GREEK SMALL LETTER SIGMA
'\u03c4' # 0xF4 -> GREEK SMALL LETTER TAU
'\u03c5' # 0xF5 -> GREEK SMALL LETTER UPSILON
'\u03c6' # 0xF6 -> GREEK SMALL LETTER PHI
'\u03c7' # 0xF7 -> GREEK SMALL LETTER CHI
'\u03c8' # 0xF8 -> GREEK SMALL LETTER PSI
'\u03c9' # 0xF9 -> GREEK SMALL LETTER OMEGA
'\u03ca' # 0xFA -> GREEK SMALL LETTER IOTA WITH DIALYTIKA
'\u03cb' # 0xFB -> GREEK SMALL LETTER UPSILON WITH DIALYTIKA
'\u03cc' # 0xFC -> GREEK SMALL LETTER OMICRON WITH TONOS
'\u03cd' # 0xFD -> GREEK SMALL LETTER UPSILON WITH TONOS
'\u03ce' # 0xFE -> GREEK SMALL LETTER OMEGA WITH TONOS
'\ufffe' # 0xFF -> UNDEFINED
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| gpl-3.0 |
bala4901/odoo | addons/l10n_us/__init__.py | 893 | 1045 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
awkspace/ansible | lib/ansible/modules/cloud/digital_ocean/digital_ocean_image_facts.py | 29 | 4212 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Ansible Project
# Copyright: (c) 2018, Abhijeet Kasurde <akasurde@redhat.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: digital_ocean_image_facts
short_description: Gather facts about DigitalOcean images
description:
- This module can be used to gather facts about DigitalOcean provided images.
- These images can be either of type C(distribution), C(application) and C(private).
author: "Abhijeet Kasurde (@Akasurde)"
version_added: "2.6"
options:
image_type:
description:
- Specifies the type of image facts to be retrived.
- If set to C(application), then facts are gathered related to all application images.
- If set to C(distribution), then facts are gathered related to all distribution images.
- If set to C(private), then facts are gathered related to all private images.
- If not set to any of above, then facts are gathered related to all images.
default: 'all'
choices: [ 'all', 'application', 'distribution', 'private' ]
required: false
requirements:
- "python >= 2.6"
extends_documentation_fragment: digital_ocean.documentation
'''
EXAMPLES = '''
- name: Gather facts about all images
digital_ocean_image_facts:
image_type: all
oauth_token: "{{ oauth_token }}"
- name: Gather facts about application images
digital_ocean_image_facts:
image_type: application
oauth_token: "{{ oauth_token }}"
- name: Gather facts about distribution images
digital_ocean_image_facts:
image_type: distribution
oauth_token: "{{ oauth_token }}"
- name: Get distribution about image with slug coreos-beta
digital_ocean_image_facts:
register: resp_out
- set_fact:
distribution_name: "{{ item.distribution }}"
loop: "{{ resp_out.data|json_query(name) }}"
vars:
name: "[?slug=='coreos-beta']"
- debug: var=distribution_name
'''
RETURN = '''
data:
description: DigitalOcean image facts
returned: success
type: list
sample: [
{
"created_at": "2018-02-02T07:11:43Z",
"distribution": "CoreOS",
"id": 31434061,
"min_disk_size": 20,
"name": "1662.1.0 (beta)",
"public": true,
"regions": [
"nyc1",
"sfo1",
"nyc2",
"ams2",
"sgp1",
"lon1",
"nyc3",
"ams3",
"fra1",
"tor1",
"sfo2",
"blr1"
],
"size_gigabytes": 0.42,
"slug": "coreos-beta",
"type": "snapshot"
},
]
'''
from traceback import format_exc
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.digital_ocean import DigitalOceanHelper
from ansible.module_utils._text import to_native
def core(module):
image_type = module.params['image_type']
rest = DigitalOceanHelper(module)
base_url = 'images?'
if image_type == 'distribution':
base_url += "type=distribution&"
elif image_type == 'application':
base_url += "type=application&"
elif image_type == 'private':
base_url += "private=true&"
images = rest.get_paginated_data(base_url=base_url, data_key_name='images')
module.exit_json(changed=False, data=images)
def main():
argument_spec = DigitalOceanHelper.digital_ocean_argument_spec()
argument_spec.update(
image_type=dict(type='str',
required=False,
choices=['all', 'application', 'distribution', 'private'],
default='all'
)
)
module = AnsibleModule(argument_spec=argument_spec)
try:
core(module)
except Exception as e:
module.fail_json(msg=to_native(e), exception=format_exc())
if __name__ == '__main__':
main()
| gpl-3.0 |
double12gzh/nova | nova/db/sqlalchemy/migrate_repo/versions/253_add_pci_requests_to_instance_extra_table.py | 81 | 1112 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Column
from sqlalchemy import MetaData
from sqlalchemy import Table
from sqlalchemy import Text
BASE_TABLE_NAME = 'instance_extra'
NEW_COLUMN_NAME = 'pci_requests'
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
for prefix in ('', 'shadow_'):
table = Table(prefix + BASE_TABLE_NAME, meta, autoload=True)
new_column = Column(NEW_COLUMN_NAME, Text, nullable=True)
if not hasattr(table.c, NEW_COLUMN_NAME):
table.create_column(new_column)
| apache-2.0 |
ossdemura/django-miniblog | src/Lib/site-packages/django/contrib/contenttypes/forms.py | 116 | 3620 | from __future__ import unicode_literals
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.forms import ModelForm, modelformset_factory
from django.forms.models import BaseModelFormSet
class BaseGenericInlineFormSet(BaseModelFormSet):
"""
A formset for generic inline objects to a parent.
"""
def __init__(self, data=None, files=None, instance=None, save_as_new=None,
prefix=None, queryset=None, **kwargs):
opts = self.model._meta
self.instance = instance
self.rel_name = '-'.join((
opts.app_label, opts.model_name,
self.ct_field.name, self.ct_fk_field.name,
))
if self.instance is None or self.instance.pk is None:
qs = self.model._default_manager.none()
else:
if queryset is None:
queryset = self.model._default_manager
qs = queryset.filter(**{
self.ct_field.name: ContentType.objects.get_for_model(
self.instance, for_concrete_model=self.for_concrete_model),
self.ct_fk_field.name: self.instance.pk,
})
super(BaseGenericInlineFormSet, self).__init__(
queryset=qs, data=data, files=files,
prefix=prefix,
**kwargs
)
@classmethod
def get_default_prefix(cls):
opts = cls.model._meta
return '-'.join((opts.app_label, opts.model_name, cls.ct_field.name, cls.ct_fk_field.name))
def save_new(self, form, commit=True):
setattr(form.instance, self.ct_field.get_attname(), ContentType.objects.get_for_model(self.instance).pk)
setattr(form.instance, self.ct_fk_field.get_attname(), self.instance.pk)
return form.save(commit=commit)
def generic_inlineformset_factory(model, form=ModelForm,
formset=BaseGenericInlineFormSet,
ct_field="content_type", fk_field="object_id",
fields=None, exclude=None,
extra=3, can_order=False, can_delete=True,
max_num=None, formfield_callback=None,
validate_max=False, for_concrete_model=True,
min_num=None, validate_min=False):
"""
Returns a ``GenericInlineFormSet`` for the given kwargs.
You must provide ``ct_field`` and ``fk_field`` if they are different from
the defaults ``content_type`` and ``object_id`` respectively.
"""
opts = model._meta
# if there is no field called `ct_field` let the exception propagate
ct_field = opts.get_field(ct_field)
if not isinstance(ct_field, models.ForeignKey) or ct_field.remote_field.model != ContentType:
raise Exception("fk_name '%s' is not a ForeignKey to ContentType" % ct_field)
fk_field = opts.get_field(fk_field) # let the exception propagate
if exclude is not None:
exclude = list(exclude)
exclude.extend([ct_field.name, fk_field.name])
else:
exclude = [ct_field.name, fk_field.name]
FormSet = modelformset_factory(
model, form=form, formfield_callback=formfield_callback,
formset=formset, extra=extra, can_delete=can_delete,
can_order=can_order, fields=fields, exclude=exclude, max_num=max_num,
validate_max=validate_max, min_num=min_num, validate_min=validate_min,
)
FormSet.ct_field = ct_field
FormSet.ct_fk_field = fk_field
FormSet.for_concrete_model = for_concrete_model
return FormSet
| mit |
davidszotten/pytest | src/_pytest/pastebin.py | 34 | 3631 | """ submit failure or test session information to a pastebin service. """
from __future__ import absolute_import, division, print_function
import pytest
import six
import sys
import tempfile
def pytest_addoption(parser):
group = parser.getgroup("terminal reporting")
group._addoption(
"--pastebin",
metavar="mode",
action="store",
dest="pastebin",
default=None,
choices=["failed", "all"],
help="send failed|all info to bpaste.net pastebin service.",
)
@pytest.hookimpl(trylast=True)
def pytest_configure(config):
if config.option.pastebin == "all":
tr = config.pluginmanager.getplugin("terminalreporter")
# if no terminal reporter plugin is present, nothing we can do here;
# this can happen when this function executes in a slave node
# when using pytest-xdist, for example
if tr is not None:
# pastebin file will be utf-8 encoded binary file
config._pastebinfile = tempfile.TemporaryFile("w+b")
oldwrite = tr._tw.write
def tee_write(s, **kwargs):
oldwrite(s, **kwargs)
if isinstance(s, six.text_type):
s = s.encode("utf-8")
config._pastebinfile.write(s)
tr._tw.write = tee_write
def pytest_unconfigure(config):
if hasattr(config, "_pastebinfile"):
# get terminal contents and delete file
config._pastebinfile.seek(0)
sessionlog = config._pastebinfile.read()
config._pastebinfile.close()
del config._pastebinfile
# undo our patching in the terminal reporter
tr = config.pluginmanager.getplugin("terminalreporter")
del tr._tw.__dict__["write"]
# write summary
tr.write_sep("=", "Sending information to Paste Service")
pastebinurl = create_new_paste(sessionlog)
tr.write_line("pastebin session-log: %s\n" % pastebinurl)
def create_new_paste(contents):
"""
Creates a new paste using bpaste.net service.
:contents: paste contents as utf-8 encoded bytes
:returns: url to the pasted contents
"""
import re
if sys.version_info < (3, 0):
from urllib import urlopen, urlencode
else:
from urllib.request import urlopen
from urllib.parse import urlencode
params = {
"code": contents,
"lexer": "python3" if sys.version_info[0] == 3 else "python",
"expiry": "1week",
}
url = "https://bpaste.net"
response = urlopen(url, data=urlencode(params).encode("ascii")).read()
m = re.search(r'href="/raw/(\w+)"', response.decode("utf-8"))
if m:
return "%s/show/%s" % (url, m.group(1))
else:
return "bad response: " + response
def pytest_terminal_summary(terminalreporter):
import _pytest.config
if terminalreporter.config.option.pastebin != "failed":
return
tr = terminalreporter
if "failed" in tr.stats:
terminalreporter.write_sep("=", "Sending information to Paste Service")
for rep in terminalreporter.stats.get("failed"):
try:
msg = rep.longrepr.reprtraceback.reprentries[-1].reprfileloc
except AttributeError:
msg = tr._getfailureheadline(rep)
tw = _pytest.config.create_terminal_writer(
terminalreporter.config, stringio=True
)
rep.toterminal(tw)
s = tw.stringio.getvalue()
assert len(s)
pastebinurl = create_new_paste(s)
tr.write_line("%s --> %s" % (msg, pastebinurl))
| mit |
joeyjojo/django_offline | src/django/contrib/messages/storage/base.py | 399 | 6134 | from django.conf import settings
from django.utils.encoding import force_unicode, StrAndUnicode
from django.contrib.messages import constants, utils
LEVEL_TAGS = utils.get_level_tags()
class Message(StrAndUnicode):
"""
Represents an actual message that can be stored in any of the supported
storage classes (typically session- or cookie-based) and rendered in a view
or template.
"""
def __init__(self, level, message, extra_tags=None):
self.level = int(level)
self.message = message
self.extra_tags = extra_tags
def _prepare(self):
"""
Prepares the message for serialization by forcing the ``message``
and ``extra_tags`` to unicode in case they are lazy translations.
Known "safe" types (None, int, etc.) are not converted (see Django's
``force_unicode`` implementation for details).
"""
self.message = force_unicode(self.message, strings_only=True)
self.extra_tags = force_unicode(self.extra_tags, strings_only=True)
def __eq__(self, other):
return isinstance(other, Message) and self.level == other.level and \
self.message == other.message
def __unicode__(self):
return force_unicode(self.message)
def _get_tags(self):
label_tag = force_unicode(LEVEL_TAGS.get(self.level, ''),
strings_only=True)
extra_tags = force_unicode(self.extra_tags, strings_only=True)
if extra_tags and label_tag:
return u' '.join([extra_tags, label_tag])
elif extra_tags:
return extra_tags
elif label_tag:
return label_tag
return ''
tags = property(_get_tags)
class BaseStorage(object):
"""
This is the base backend for temporary message storage.
This is not a complete class; to be a usable storage backend, it must be
subclassed and the two methods ``_get`` and ``_store`` overridden.
"""
def __init__(self, request, *args, **kwargs):
self.request = request
self._queued_messages = []
self.used = False
self.added_new = False
super(BaseStorage, self).__init__(*args, **kwargs)
def __len__(self):
return len(self._loaded_messages) + len(self._queued_messages)
def __iter__(self):
self.used = True
if self._queued_messages:
self._loaded_messages.extend(self._queued_messages)
self._queued_messages = []
return iter(self._loaded_messages)
def __contains__(self, item):
return item in self._loaded_messages or item in self._queued_messages
@property
def _loaded_messages(self):
"""
Returns a list of loaded messages, retrieving them first if they have
not been loaded yet.
"""
if not hasattr(self, '_loaded_data'):
messages, all_retrieved = self._get()
self._loaded_data = messages or []
return self._loaded_data
def _get(self, *args, **kwargs):
"""
Retrieves a list of stored messages. Returns a tuple of the messages
and a flag indicating whether or not all the messages originally
intended to be stored in this storage were, in fact, stored and
retrieved; e.g., ``(messages, all_retrieved)``.
**This method must be implemented by a subclass.**
If it is possible to tell if the backend was not used (as opposed to
just containing no messages) then ``None`` should be returned in
place of ``messages``.
"""
raise NotImplementedError()
def _store(self, messages, response, *args, **kwargs):
"""
Stores a list of messages, returning a list of any messages which could
not be stored.
One type of object must be able to be stored, ``Message``.
**This method must be implemented by a subclass.**
"""
raise NotImplementedError()
def _prepare_messages(self, messages):
"""
Prepares a list of messages for storage.
"""
for message in messages:
message._prepare()
def update(self, response):
"""
Stores all unread messages.
If the backend has yet to be iterated, previously stored messages will
be stored again. Otherwise, only messages added after the last
iteration will be stored.
"""
self._prepare_messages(self._queued_messages)
if self.used:
return self._store(self._queued_messages, response)
elif self.added_new:
messages = self._loaded_messages + self._queued_messages
return self._store(messages, response)
def add(self, level, message, extra_tags=''):
"""
Queues a message to be stored.
The message is only queued if it contained something and its level is
not less than the recording level (``self.level``).
"""
if not message:
return
# Check that the message level is not less than the recording level.
level = int(level)
if level < self.level:
return
# Add the message.
self.added_new = True
message = Message(level, message, extra_tags=extra_tags)
self._queued_messages.append(message)
def _get_level(self):
"""
Returns the minimum recorded level.
The default level is the ``MESSAGE_LEVEL`` setting. If this is
not found, the ``INFO`` level is used.
"""
if not hasattr(self, '_level'):
self._level = getattr(settings, 'MESSAGE_LEVEL', constants.INFO)
return self._level
def _set_level(self, value=None):
"""
Sets a custom minimum recorded level.
If set to ``None``, the default level will be used (see the
``_get_level`` method).
"""
if value is None and hasattr(self, '_level'):
del self._level
else:
self._level = int(value)
level = property(_get_level, _set_level, _set_level)
| mit |
edx-solutions/edx-platform | common/djangoapps/third_party_auth/migrations/0008_auto_20170413_1455.py | 5 | 1465 | # -*- coding: utf-8 -*-
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('third_party_auth', '0007_auto_20170406_0912'),
]
operations = [
migrations.AddField(
model_name='ltiproviderconfig',
name='drop_existing_session',
field=models.BooleanField(default=False, help_text='Whether to drop an existing session when accessing a view decorated with third_party_auth.decorators.tpa_hint_ends_existing_session when a tpa_hint URL query parameter mapping to this provider is included in the request.'),
),
migrations.AddField(
model_name='oauth2providerconfig',
name='drop_existing_session',
field=models.BooleanField(default=False, help_text='Whether to drop an existing session when accessing a view decorated with third_party_auth.decorators.tpa_hint_ends_existing_session when a tpa_hint URL query parameter mapping to this provider is included in the request.'),
),
migrations.AddField(
model_name='samlproviderconfig',
name='drop_existing_session',
field=models.BooleanField(default=False, help_text='Whether to drop an existing session when accessing a view decorated with third_party_auth.decorators.tpa_hint_ends_existing_session when a tpa_hint URL query parameter mapping to this provider is included in the request.'),
),
]
| agpl-3.0 |
user-none/calibre | src/calibre/gui2/tweak_book/file_list.py | 3 | 35970 | #!/usr/bin/env python2
# vim:fileencoding=utf-8
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2013, Kovid Goyal <kovid at kovidgoyal.net>'
import os
from binascii import hexlify
from collections import OrderedDict, defaultdict
from functools import partial
import sip
from PyQt5.Qt import (
QWidget, QTreeWidget, QGridLayout, QSize, Qt, QTreeWidgetItem, QIcon, QFont,
QStyledItemDelegate, QStyle, QPixmap, QPainter, pyqtSignal, QMenu, QTimer,
QDialogButtonBox, QDialog, QLabel, QLineEdit, QVBoxLayout, QScrollArea,
QRadioButton, QFormLayout, QSpinBox, QListWidget, QListWidgetItem, QCheckBox)
from calibre import human_readable, sanitize_file_name_unicode, plugins
from calibre.ebooks.oeb.base import OEB_STYLES, OEB_DOCS
from calibre.ebooks.oeb.polish.container import guess_type, OEB_FONTS
from calibre.ebooks.oeb.polish.replace import get_recommended_folders
from calibre.ebooks.oeb.polish.cover import (
get_cover_page_name, get_raster_cover_name, is_raster_image)
from calibre.gui2 import error_dialog, choose_files, question_dialog, elided_text, choose_save_file
from calibre.gui2.tweak_book import current_container, tprefs
from calibre.gui2.tweak_book.editor import syntax_from_mime
from calibre.gui2.tweak_book.templates import template_for
from calibre.utils.icu import sort_key
TOP_ICON_SIZE = 24
NAME_ROLE = Qt.UserRole
CATEGORY_ROLE = NAME_ROLE + 1
LINEAR_ROLE = CATEGORY_ROLE + 1
MIME_ROLE = LINEAR_ROLE + 1
NBSP = '\xa0'
def name_is_ok(name, show_error):
if not name or not name.strip():
return show_error('') and False
ext = name.rpartition('.')[-1]
if not ext or ext == name:
return show_error(_('The file name must have an extension')) and False
norm = name.replace('\\', '/')
parts = name.split('/')
for x in parts:
if sanitize_file_name_unicode(x) != x:
return show_error(_('The file name contains invalid characters')) and False
if current_container().has_name(norm):
return show_error(_('This file name already exists in the book')) and False
show_error('')
return True
def get_bulk_rename_settings(parent, number, msg=None, sanitize=sanitize_file_name_unicode, leading_zeros=True, prefix=None): # {{{
d = QDialog(parent)
d.setWindowTitle(_('Bulk rename items'))
d.l = l = QFormLayout(d)
d.setLayout(l)
d.prefix = p = QLineEdit(d)
p.setText(prefix or _('Chapter-'))
p.selectAll()
d.la = la = QLabel(msg or _(
'All selected files will be renamed to the form prefix-number'))
l.addRow(la)
l.addRow(_('&Prefix:'), p)
d.num = num = QSpinBox(d)
num.setMinimum(0), num.setValue(1), num.setMaximum(1000)
l.addRow(_('Starting &number:'), num)
d.bb = bb = QDialogButtonBox(QDialogButtonBox.Ok | QDialogButtonBox.Cancel)
bb.accepted.connect(d.accept), bb.rejected.connect(d.reject)
l.addRow(bb)
if d.exec_() == d.Accepted:
prefix = sanitize(unicode(d.prefix.text()))
num = d.num.value()
fmt = '%d'
if leading_zeros:
largest = num + number - 1
fmt = '%0{0}d'.format(len(str(largest)))
return prefix + fmt, num
return None, None
# }}}
class ItemDelegate(QStyledItemDelegate): # {{{
rename_requested = pyqtSignal(object, object)
def setEditorData(self, editor, index):
name = unicode(index.data(NAME_ROLE) or '')
# We do this because Qt calls selectAll() unconditionally on the
# editor, and we want only a part of the file name to be selected
QTimer.singleShot(0, partial(self.set_editor_data, name, editor))
def set_editor_data(self, name, editor):
if sip.isdeleted(editor):
return
editor.setText(name)
ext_pos = name.rfind('.')
slash_pos = name.rfind('/')
if ext_pos > -1 and slash_pos > -1 and ext_pos > slash_pos + 1:
editor.setSelection(slash_pos+1, ext_pos - slash_pos - 1)
else:
editor.selectAll()
def setModelData(self, editor, model, index):
newname = unicode(editor.text())
oldname = unicode(index.data(NAME_ROLE) or '')
if newname != oldname:
self.rename_requested.emit(oldname, newname)
def sizeHint(self, option, index):
ans = QStyledItemDelegate.sizeHint(self, option, index)
top_level = not index.parent().isValid()
ans += QSize(0, 20 if top_level else 10)
return ans
def paint(self, painter, option, index):
top_level = not index.parent().isValid()
hover = option.state & QStyle.State_MouseOver
if hover:
if top_level:
suffix = '%s(%d)' % (NBSP, index.model().rowCount(index))
else:
try:
suffix = NBSP + human_readable(current_container().filesize(unicode(index.data(NAME_ROLE) or '')))
except EnvironmentError:
suffix = NBSP + human_readable(0)
br = painter.boundingRect(option.rect, Qt.AlignRight|Qt.AlignVCenter, suffix)
if top_level and index.row() > 0:
option.rect.adjust(0, 5, 0, 0)
painter.drawLine(option.rect.topLeft(), option.rect.topRight())
option.rect.adjust(0, 1, 0, 0)
if hover:
option.rect.adjust(0, 0, -br.width(), 0)
QStyledItemDelegate.paint(self, painter, option, index)
if hover:
option.rect.adjust(0, 0, br.width(), 0)
painter.drawText(option.rect, Qt.AlignRight|Qt.AlignVCenter, suffix)
# }}}
class FileList(QTreeWidget):
delete_requested = pyqtSignal(object, object)
reorder_spine = pyqtSignal(object)
rename_requested = pyqtSignal(object, object)
bulk_rename_requested = pyqtSignal(object)
edit_file = pyqtSignal(object, object, object)
merge_requested = pyqtSignal(object, object, object)
mark_requested = pyqtSignal(object, object)
export_requested = pyqtSignal(object, object)
replace_requested = pyqtSignal(object, object, object, object)
link_stylesheets_requested = pyqtSignal(object, object, object)
def __init__(self, parent=None):
QTreeWidget.__init__(self, parent)
self.ordered_selected_indexes = False
pi = plugins['progress_indicator'][0]
if hasattr(pi, 'set_no_activate_on_click'):
pi.set_no_activate_on_click(self)
self.current_edited_name = None
self.delegate = ItemDelegate(self)
self.delegate.rename_requested.connect(self.rename_requested)
self.setTextElideMode(Qt.ElideMiddle)
self.setItemDelegate(self.delegate)
self.setIconSize(QSize(16, 16))
self.header().close()
self.setDragEnabled(True)
self.setEditTriggers(self.EditKeyPressed)
self.setSelectionMode(self.ExtendedSelection)
self.viewport().setAcceptDrops(True)
self.setDropIndicatorShown(True)
self.setDragDropMode(self.InternalMove)
self.setAutoScroll(True)
self.setAutoScrollMargin(TOP_ICON_SIZE*2)
self.setDefaultDropAction(Qt.MoveAction)
self.setAutoExpandDelay(1000)
self.setAnimated(True)
self.setMouseTracking(True)
self.setContextMenuPolicy(Qt.CustomContextMenu)
self.customContextMenuRequested.connect(self.show_context_menu)
self.root = self.invisibleRootItem()
self.emblem_cache = {}
self.rendered_emblem_cache = {}
self.top_level_pixmap_cache = {
name : QPixmap(I(icon)).scaled(TOP_ICON_SIZE, TOP_ICON_SIZE, transformMode=Qt.SmoothTransformation)
for name, icon in {
'text':'keyboard-prefs.png',
'styles':'lookfeel.png',
'fonts':'font.png',
'misc':'mimetypes/dir.png',
'images':'view-image.png',
}.iteritems()}
self.itemActivated.connect(self.item_double_clicked)
def get_state(self):
s = {'pos':self.verticalScrollBar().value()}
s['expanded'] = {c for c, item in self.categories.iteritems() if item.isExpanded()}
s['selected'] = {unicode(i.data(0, NAME_ROLE) or '') for i in self.selectedItems()}
return s
def set_state(self, state):
for category, item in self.categories.iteritems():
item.setExpanded(category in state['expanded'])
self.verticalScrollBar().setValue(state['pos'])
for parent in self.categories.itervalues():
for c in (parent.child(i) for i in xrange(parent.childCount())):
name = unicode(c.data(0, NAME_ROLE) or '')
if name in state['selected']:
c.setSelected(True)
def item_from_name(self, name):
for parent in self.categories.itervalues():
for c in (parent.child(i) for i in xrange(parent.childCount())):
q = unicode(c.data(0, NAME_ROLE) or '')
if q == name:
return c
def select_name(self, name):
for parent in self.categories.itervalues():
for c in (parent.child(i) for i in xrange(parent.childCount())):
q = unicode(c.data(0, NAME_ROLE) or '')
c.setSelected(q == name)
if q == name:
self.scrollToItem(c)
def mark_name_as_current(self, name):
current = self.item_from_name(name)
if current is not None:
if self.current_edited_name is not None:
ci = self.item_from_name(self.current_edited_name)
if ci is not None:
ci.setData(0, Qt.FontRole, None)
self.current_edited_name = name
self.mark_item_as_current(current)
def mark_item_as_current(self, item):
font = QFont(self.font())
font.setItalic(True)
font.setBold(True)
item.setData(0, Qt.FontRole, font)
def clear_currently_edited_name(self):
if self.current_edited_name:
ci = self.item_from_name(self.current_edited_name)
if ci is not None:
ci.setData(0, Qt.FontRole, None)
self.current_edited_name = None
def build(self, container, preserve_state=True):
if preserve_state:
state = self.get_state()
self.clear()
self.root = self.invisibleRootItem()
self.root.setFlags(Qt.ItemIsDragEnabled)
self.categories = {}
for category, text in (
('text', _('Text')),
('styles', _('Styles')),
('images', _('Images')),
('fonts', _('Fonts')),
('misc', _('Miscellaneous')),
):
self.categories[category] = i = QTreeWidgetItem(self.root, 0)
i.setText(0, text)
i.setData(0, Qt.DecorationRole, self.top_level_pixmap_cache[category])
f = i.font(0)
f.setBold(True)
i.setFont(0, f)
i.setData(0, NAME_ROLE, category)
flags = Qt.ItemIsEnabled
if category == 'text':
flags |= Qt.ItemIsDropEnabled
i.setFlags(flags)
processed, seen = {}, {}
cover_page_name = get_cover_page_name(container)
cover_image_name = get_raster_cover_name(container)
manifested_names = set()
for names in container.manifest_type_map.itervalues():
manifested_names |= set(names)
def get_category(name, mt):
category = 'misc'
if mt.startswith('image/'):
category = 'images'
elif mt in OEB_FONTS:
category = 'fonts'
elif mt in OEB_STYLES:
category = 'styles'
elif mt in OEB_DOCS:
category = 'text'
ext = name.rpartition('.')[-1].lower()
if ext in {'ttf', 'otf', 'woff'}:
# Probably wrong mimetype in the OPF
category = 'fonts'
return category
def set_display_name(name, item):
if name in processed:
# We have an exact duplicate (can happen if there are
# duplicates in the spine)
item.setText(0, processed[name].text(0))
item.setText(1, processed[name].text(1))
return
parts = name.split('/')
text = parts[-1]
while text in seen and parts:
text = parts.pop() + '/' + text
seen[text] = item
item.setText(0, text)
item.setText(1, hexlify(sort_key(text)))
def render_emblems(item, emblems):
emblems = tuple(emblems)
if not emblems:
return
icon = self.rendered_emblem_cache.get(emblems, None)
if icon is None:
pixmaps = []
for emblem in emblems:
pm = self.emblem_cache.get(emblem, None)
if pm is None:
pm = self.emblem_cache[emblem] = QPixmap(
I(emblem)).scaled(self.iconSize(), transformMode=Qt.SmoothTransformation)
pixmaps.append(pm)
num = len(pixmaps)
w, h = pixmaps[0].width(), pixmaps[0].height()
if num == 1:
icon = self.rendered_emblem_cache[emblems] = QIcon(pixmaps[0])
else:
canvas = QPixmap((num * w) + ((num-1)*2), h)
canvas.fill(Qt.transparent)
painter = QPainter(canvas)
for i, pm in enumerate(pixmaps):
painter.drawPixmap(i * (w + 2), 0, pm)
painter.end()
icon = self.rendered_emblem_cache[emblems] = canvas
item.setData(0, Qt.DecorationRole, icon)
cannot_be_renamed = container.names_that_must_not_be_changed
ncx_mime = guess_type('a.ncx')
def create_item(name, linear=None):
imt = container.mime_map.get(name, guess_type(name))
icat = get_category(name, imt)
category = 'text' if linear is not None else ({'text':'misc'}.get(icat, icat))
item = QTreeWidgetItem(self.categories['text' if linear is not None else category], 1)
flags = Qt.ItemIsEnabled | Qt.ItemIsSelectable
if category == 'text':
flags |= Qt.ItemIsDragEnabled
if name not in cannot_be_renamed:
flags |= Qt.ItemIsEditable
item.setFlags(flags)
item.setStatusTip(0, _('Full path: ') + name)
item.setData(0, NAME_ROLE, name)
item.setData(0, CATEGORY_ROLE, category)
item.setData(0, LINEAR_ROLE, bool(linear))
item.setData(0, MIME_ROLE, imt)
set_display_name(name, item)
tooltips = []
emblems = []
if name in {cover_page_name, cover_image_name}:
emblems.append('default_cover.png')
tooltips.append(_('This file is the cover %s for this book') % (_('image') if name == cover_image_name else _('page')))
if name in container.opf_name:
emblems.append('metadata.png')
tooltips.append(_('This file contains all the metadata and book structure information'))
if imt == ncx_mime:
emblems.append('toc.png')
tooltips.append(_('This file contains the metadata table of contents'))
if name not in manifested_names and not container.ok_to_be_unmanifested(name):
emblems.append('dialog_question.png')
tooltips.append(_('This file is not listed in the book manifest'))
if linear is False:
emblems.append('arrow-down.png')
tooltips.append(_('This file is marked as non-linear in the spine\nDrag it to the top to make it linear'))
if linear is None and icat == 'text':
# Text item outside spine
emblems.append('dialog_warning.png')
tooltips.append(_('This file is a text file that is not referenced in the spine'))
if category == 'text' and name in processed:
# Duplicate entry in spine
emblems.append('dialog_error.png')
tooltips.append(_('This file occurs more than once in the spine'))
render_emblems(item, emblems)
if tooltips:
item.setData(0, Qt.ToolTipRole, '\n'.join(tooltips))
return item
for name, linear in container.spine_names:
processed[name] = create_item(name, linear=linear)
for name in container.name_path_map:
if name in processed:
continue
processed[name] = create_item(name)
for name, c in self.categories.iteritems():
c.setExpanded(True)
if name != 'text':
c.sortChildren(1, Qt.AscendingOrder)
if preserve_state:
self.set_state(state)
if self.current_edited_name:
item = self.item_from_name(self.current_edited_name)
if item is not None:
self.mark_item_as_current(item)
def show_context_menu(self, point):
item = self.itemAt(point)
if item is None or item in set(self.categories.itervalues()):
return
m = QMenu(self)
sel = self.selectedItems()
num = len(sel)
container = current_container()
ci = self.currentItem()
if ci is not None:
cn = unicode(ci.data(0, NAME_ROLE) or '')
mt = unicode(ci.data(0, MIME_ROLE) or '')
cat = unicode(ci.data(0, CATEGORY_ROLE) or '')
n = elided_text(cn.rpartition('/')[-1])
m.addAction(QIcon(I('save.png')), _('Export %s') % n, partial(self.export, cn))
if cn not in container.names_that_must_not_be_changed and cn not in container.names_that_must_not_be_removed and mt not in OEB_FONTS:
m.addAction(_('Replace %s with file...') % n, partial(self.replace, cn))
m.addSeparator()
m.addAction(QIcon(I('modified.png')), _('&Rename %s') % n, self.edit_current_item)
if is_raster_image(mt):
m.addAction(QIcon(I('default_cover.png')), _('Mark %s as cover image') % n, partial(self.mark_as_cover, cn))
elif current_container().SUPPORTS_TITLEPAGES and mt in OEB_DOCS and cat == 'text':
m.addAction(QIcon(I('default_cover.png')), _('Mark %s as cover page') % n, partial(self.mark_as_titlepage, cn))
m.addSeparator()
if num > 0:
m.addSeparator()
if num > 1:
m.addAction(QIcon(I('modified.png')), _('&Bulk rename selected files'), self.request_bulk_rename)
m.addAction(QIcon(I('trash.png')), _('&Delete the %d selected file(s)') % num, self.request_delete)
m.addSeparator()
selected_map = defaultdict(list)
for item in sel:
selected_map[unicode(item.data(0, CATEGORY_ROLE) or '')].append(unicode(item.data(0, NAME_ROLE) or ''))
for items in selected_map.itervalues():
items.sort(key=self.index_of_name)
if selected_map['text']:
m.addAction(QIcon(I('format-text-color.png')), _('Link &stylesheets...'), partial(self.link_stylesheets, selected_map['text']))
if len(selected_map['text']) > 1:
m.addAction(QIcon(I('merge.png')), _('&Merge selected text files'), partial(self.start_merge, 'text', selected_map['text']))
if len(selected_map['styles']) > 1:
m.addAction(QIcon(I('merge.png')), _('&Merge selected style files'), partial(self.start_merge, 'styles', selected_map['styles']))
if len(list(m.actions())) > 0:
m.popup(self.mapToGlobal(point))
def index_of_name(self, name):
for category, parent in self.categories.iteritems():
for i in xrange(parent.childCount()):
item = parent.child(i)
if unicode(item.data(0, NAME_ROLE) or '') == name:
return (category, i)
return (None, -1)
def start_merge(self, category, names):
d = MergeDialog(names, self)
if d.exec_() == d.Accepted and d.ans:
self.merge_requested.emit(category, names, d.ans)
def edit_current_item(self):
if self.currentItem() is not None:
self.editItem(self.currentItem())
def mark_as_cover(self, name):
self.mark_requested.emit(name, 'cover')
def mark_as_titlepage(self, name):
first = unicode(self.categories['text'].child(0).data(0, NAME_ROLE) or '') == name
move_to_start = False
if not first:
move_to_start = question_dialog(self, _('Not first item'), _(
'%s is not the first text item. You should only mark the'
' first text item as cover. Do you want to make it the'
' first item?') % elided_text(name))
self.mark_requested.emit(name, 'titlepage:%r' % move_to_start)
def keyPressEvent(self, ev):
if ev.key() in (Qt.Key_Delete, Qt.Key_Backspace):
ev.accept()
self.request_delete()
else:
return QTreeWidget.keyPressEvent(self, ev)
def request_bulk_rename(self):
names = {unicode(item.data(0, NAME_ROLE) or '') for item in self.selectedItems()}
bad = names & current_container().names_that_must_not_be_changed
if bad:
return error_dialog(self, _('Cannot rename'),
_('The file(s) %s cannot be renamed.') % ('<b>%s</b>' % ', '.join(bad)), show=True)
names = sorted(names, key=self.index_of_name)
fmt, num = get_bulk_rename_settings(self, len(names))
if fmt is not None:
def change_name(name, num):
parts = name.split('/')
base, ext = parts[-1].rpartition('.')[0::2]
parts[-1] = (fmt % num) + '.' + ext
return '/'.join(parts)
name_map = {n:change_name(n, num + i) for i, n in enumerate(names)}
self.bulk_rename_requested.emit(name_map)
def request_delete(self):
names = {unicode(item.data(0, NAME_ROLE) or '') for item in self.selectedItems()}
bad = names & current_container().names_that_must_not_be_removed
if bad:
return error_dialog(self, _('Cannot delete'),
_('The file(s) %s cannot be deleted.') % ('<b>%s</b>' % ', '.join(bad)), show=True)
text = self.categories['text']
children = (text.child(i) for i in xrange(text.childCount()))
spine_removals = [(unicode(item.data(0, NAME_ROLE) or ''), item.isSelected()) for item in children]
other_removals = {unicode(item.data(0, NAME_ROLE) or '') for item in self.selectedItems()
if unicode(item.data(0, CATEGORY_ROLE) or '') != 'text'}
self.delete_requested.emit(spine_removals, other_removals)
def delete_done(self, spine_removals, other_removals):
removals = []
for i, (name, remove) in enumerate(spine_removals):
if remove:
removals.append(self.categories['text'].child(i))
for category, parent in self.categories.iteritems():
if category != 'text':
for i in xrange(parent.childCount()):
child = parent.child(i)
if unicode(child.data(0, NAME_ROLE) or '') in other_removals:
removals.append(child)
# The sorting by index is necessary otherwise Qt crashes with recursive
# repaint detected message
for c in sorted(removals, key=lambda x:x.parent().indexOfChild(x), reverse=True):
sip.delete(c)
# A bug in the raster paint engine on linux causes a crash if the scrollbar
# is at the bottom and the delete happens to cause the scrollbar to
# update
b = self.verticalScrollBar()
if b.value() == b.maximum():
b.setValue(b.minimum())
QTimer.singleShot(0, lambda : b.setValue(b.maximum()))
def __enter__(self):
self.ordered_selected_indexes = True
def __exit__(self, *args):
self.ordered_selected_indexes = False
def selectedIndexes(self):
ans = QTreeWidget.selectedIndexes(self)
if self.ordered_selected_indexes:
# The reverse is needed because Qt's implementation of dropEvent
# reverses the selectedIndexes when dropping.
ans = list(sorted(ans, key=lambda idx:idx.row(), reverse=True))
return ans
def dropEvent(self, event):
with self:
text = self.categories['text']
pre_drop_order = {text.child(i):i for i in xrange(text.childCount())}
super(FileList, self).dropEvent(event)
current_order = {text.child(i):i for i in xrange(text.childCount())}
if current_order != pre_drop_order:
order = []
for child in (text.child(i) for i in xrange(text.childCount())):
name = unicode(child.data(0, NAME_ROLE) or '')
linear = bool(child.data(0, LINEAR_ROLE))
order.append([name, linear])
# Ensure that all non-linear items are at the end, any non-linear
# items not at the end will be made linear
for i, (name, linear) in tuple(enumerate(order)):
if not linear and i < len(order) - 1 and order[i+1][1]:
order[i][1] = True
self.reorder_spine.emit(order)
def item_double_clicked(self, item, column):
category = unicode(item.data(0, CATEGORY_ROLE) or '')
if category:
self._request_edit(item)
def _request_edit(self, item):
category = unicode(item.data(0, CATEGORY_ROLE) or '')
mime = unicode(item.data(0, MIME_ROLE) or '')
name = unicode(item.data(0, NAME_ROLE) or '')
syntax = {'text':'html', 'styles':'css'}.get(category, None)
self.edit_file.emit(name, syntax, mime)
def request_edit(self, name):
item = self.item_from_name(name)
if item is not None:
self._request_edit(item)
else:
error_dialog(self, _('Cannot edit'),
_('No item with the name: %s was found') % name, show=True)
@property
def all_files(self):
return (category.child(i) for category in self.categories.itervalues() for i in xrange(category.childCount()))
@property
def searchable_names(self):
ans = {'text':OrderedDict(), 'styles':OrderedDict(), 'selected':OrderedDict()}
for item in self.all_files:
category = unicode(item.data(0, CATEGORY_ROLE) or '')
mime = unicode(item.data(0, MIME_ROLE) or '')
name = unicode(item.data(0, NAME_ROLE) or '')
ok = category in {'text', 'styles'}
if ok:
ans[category][name] = syntax_from_mime(name, mime)
if not ok and category == 'misc':
ok = mime in {guess_type('a.'+x) for x in ('opf', 'ncx', 'txt', 'xml')}
if ok and item.isSelected():
ans['selected'][name] = syntax_from_mime(name, mime)
return ans
def export(self, name):
path = choose_save_file(self, 'tweak_book_export_file', _('Choose location'), filters=[
(_('Files'), [name.rpartition('.')[-1].lower()])], all_files=False, initial_filename=name.split('/')[-1])
if path:
self.export_requested.emit(name, path)
def replace(self, name):
c = current_container()
mt = c.mime_map[name]
oext = name.rpartition('.')[-1].lower()
filters = [oext]
fname = _('Files')
if mt in OEB_DOCS:
fname = _('HTML Files')
filters = 'html htm xhtm xhtml shtml'.split()
elif is_raster_image(mt):
fname = _('Images')
filters = 'jpeg jpg gif png'.split()
path = choose_files(self, 'tweak_book_import_file', _('Choose file'), filters=[(fname, filters)], select_only_single_file=True)
if not path:
return
path = path[0]
ext = path.rpartition('.')[-1].lower()
force_mt = None
if mt in OEB_DOCS:
force_mt = c.guess_type('a.html')
nname = os.path.basename(path)
nname, ext = nname.rpartition('.')[0::2]
nname = nname + '.' + ext.lower()
self.replace_requested.emit(name, path, nname, force_mt)
def link_stylesheets(self, names):
s = self.categories['styles']
sheets = [unicode(s.child(i).data(0, NAME_ROLE) or '') for i in xrange(s.childCount())]
if not sheets:
return error_dialog(self, _('No stylesheets'), _(
'This book currently has no stylesheets. You must first create a stylesheet'
' before linking it.'), show=True)
d = QDialog(self)
d.l = l = QVBoxLayout(d)
d.setLayout(l)
d.setWindowTitle(_('Choose stylesheets'))
d.la = la = QLabel(_('Choose the stylesheets to link. Drag and drop to re-arrange'))
la.setWordWrap(True)
l.addWidget(la)
d.s = s = QListWidget(d)
l.addWidget(s)
s.setDragEnabled(True)
s.setDropIndicatorShown(True)
s.setDragDropMode(self.InternalMove)
s.setAutoScroll(True)
s.setDefaultDropAction(Qt.MoveAction)
for name in sheets:
i = QListWidgetItem(name, s)
flags = Qt.ItemIsEnabled | Qt.ItemIsUserCheckable | Qt.ItemIsDragEnabled | Qt.ItemIsSelectable
i.setFlags(flags)
i.setCheckState(Qt.Checked)
d.r = r = QCheckBox(_('Remove existing links to stylesheets'))
r.setChecked(tprefs['remove_existing_links_when_linking_sheets'])
l.addWidget(r)
d.bb = bb = QDialogButtonBox(QDialogButtonBox.Ok | QDialogButtonBox.Cancel)
bb.accepted.connect(d.accept), bb.rejected.connect(d.reject)
l.addWidget(bb)
if d.exec_() == d.Accepted:
tprefs['remove_existing_links_when_linking_sheets'] = r.isChecked()
sheets = [unicode(s.item(il).text()) for il in xrange(s.count()) if s.item(il).checkState() == Qt.Checked]
if sheets:
self.link_stylesheets_requested.emit(names, sheets, r.isChecked())
class NewFileDialog(QDialog): # {{{
def __init__(self, parent=None):
QDialog.__init__(self, parent)
self.l = l = QVBoxLayout()
self.setLayout(l)
self.la = la = QLabel(_(
'Choose a name for the new (blank) file. To place the file in a'
' specific folder in the book, include the folder name, for example: <i>text/chapter1.html'))
la.setWordWrap(True)
self.setWindowTitle(_('Choose file'))
l.addWidget(la)
self.name = n = QLineEdit(self)
n.textChanged.connect(self.update_ok)
l.addWidget(n)
self.err_label = la = QLabel('')
la.setWordWrap(True)
l.addWidget(la)
self.bb = bb = QDialogButtonBox(QDialogButtonBox.Ok | QDialogButtonBox.Cancel)
l.addWidget(bb)
bb.accepted.connect(self.accept)
bb.rejected.connect(self.reject)
self.imp_button = b = bb.addButton(_('Import resource file (image/font/etc.)'), bb.ActionRole)
b.setIcon(QIcon(I('view-image.png')))
b.setToolTip(_('Import a file from your computer as a new'
' file into the book.'))
b.clicked.connect(self.import_file)
self.ok_button = bb.button(bb.Ok)
self.file_data = b''
self.using_template = False
self.setMinimumWidth(350)
def show_error(self, msg):
self.err_label.setText('<p style="color:red">' + msg)
return False
def import_file(self):
path = choose_files(self, 'tweak-book-new-resource-file', _('Choose file'), select_only_single_file=True)
if path:
self.do_import_file(path[0])
def do_import_file(self, path, hide_button=False):
with open(path, 'rb') as f:
self.file_data = f.read()
name = os.path.basename(path)
fmap = get_recommended_folders(current_container(), (name,))
if fmap[name]:
name = '/'.join((fmap[name], name))
self.name.setText(name)
self.la.setText(_('Choose a name for the imported file'))
if hide_button:
self.imp_button.setVisible(False)
@property
def name_is_ok(self):
return name_is_ok(unicode(self.name.text()), self.show_error)
def update_ok(self, *args):
self.ok_button.setEnabled(self.name_is_ok)
def accept(self):
if not self.name_is_ok:
return error_dialog(self, _('No name specified'), _(
'You must specify a name for the new file, with an extension, for example, chapter1.html'), show=True)
name = unicode(self.name.text())
name, ext = name.rpartition('.')[0::2]
name = (name + '.' + ext.lower()).replace('\\', '/')
mt = guess_type(name)
if not self.file_data:
if mt in OEB_DOCS:
self.file_data = template_for('html').encode('utf-8')
self.using_template = True
elif mt in OEB_STYLES:
self.file_data = template_for('css').encode('utf-8')
self.using_template = True
self.file_name = name
QDialog.accept(self)
# }}}
class MergeDialog(QDialog): # {{{
def __init__(self, names, parent=None):
QDialog.__init__(self, parent)
self.setWindowTitle(_('Choose master file'))
self.l = l = QVBoxLayout()
self.setLayout(l)
self.la = la = QLabel(_('Choose the master file. All selected files will be merged into the master file:'))
la.setWordWrap(True)
l.addWidget(la)
self.sa = sa = QScrollArea(self)
l.addWidget(sa)
self.bb = bb = QDialogButtonBox(QDialogButtonBox.Ok | QDialogButtonBox.Cancel)
l.addWidget(bb)
bb.accepted.connect(self.accept)
bb.rejected.connect(self.reject)
self.w = w = QWidget(self)
w.l = QVBoxLayout()
w.setLayout(w.l)
buttons = self.buttons = [QRadioButton(n) for n in names]
buttons[0].setChecked(True)
map(w.l.addWidget, buttons)
sa.setWidget(w)
self.resize(self.sizeHint() + QSize(150, 20))
@property
def ans(self):
for b in self.buttons:
if b.isChecked():
return unicode(b.text())
# }}}
class FileListWidget(QWidget):
def __init__(self, parent=None):
QWidget.__init__(self, parent)
self.setLayout(QGridLayout(self))
self.file_list = FileList(self)
self.layout().addWidget(self.file_list)
self.layout().setContentsMargins(0, 0, 0, 0)
self.forwarded_signals = {k for k, o in vars(self.file_list.__class__).iteritems() if isinstance(o, pyqtSignal) and '_' in k and not hasattr(self, k)}
for x in ('delete_done', 'select_name', 'request_edit', 'mark_name_as_current', 'clear_currently_edited_name'):
setattr(self, x, getattr(self.file_list, x))
def build(self, container, preserve_state=True):
self.file_list.build(container, preserve_state=preserve_state)
@property
def searchable_names(self):
return self.file_list.searchable_names
def __getattr__(self, name):
if name in self.forwarded_signals:
return getattr(self.file_list, name)
return QWidget.__getattr__(self, name)
| gpl-3.0 |
gabelula/b-counted | django/contrib/gis/geos/prototypes/predicates.py | 623 | 1777 | """
This module houses the GEOS ctypes prototype functions for the
unary and binary predicate operations on geometries.
"""
from ctypes import c_char, c_char_p, c_double
from django.contrib.gis.geos.libgeos import GEOM_PTR
from django.contrib.gis.geos.prototypes.errcheck import check_predicate
from django.contrib.gis.geos.prototypes.threadsafe import GEOSFunc
## Binary & unary predicate functions ##
def binary_predicate(func, *args):
"For GEOS binary predicate functions."
argtypes = [GEOM_PTR, GEOM_PTR]
if args: argtypes += args
func.argtypes = argtypes
func.restype = c_char
func.errcheck = check_predicate
return func
def unary_predicate(func):
"For GEOS unary predicate functions."
func.argtypes = [GEOM_PTR]
func.restype = c_char
func.errcheck = check_predicate
return func
## Unary Predicates ##
geos_hasz = unary_predicate(GEOSFunc('GEOSHasZ'))
geos_isempty = unary_predicate(GEOSFunc('GEOSisEmpty'))
geos_isring = unary_predicate(GEOSFunc('GEOSisRing'))
geos_issimple = unary_predicate(GEOSFunc('GEOSisSimple'))
geos_isvalid = unary_predicate(GEOSFunc('GEOSisValid'))
## Binary Predicates ##
geos_contains = binary_predicate(GEOSFunc('GEOSContains'))
geos_crosses = binary_predicate(GEOSFunc('GEOSCrosses'))
geos_disjoint = binary_predicate(GEOSFunc('GEOSDisjoint'))
geos_equals = binary_predicate(GEOSFunc('GEOSEquals'))
geos_equalsexact = binary_predicate(GEOSFunc('GEOSEqualsExact'), c_double)
geos_intersects = binary_predicate(GEOSFunc('GEOSIntersects'))
geos_overlaps = binary_predicate(GEOSFunc('GEOSOverlaps'))
geos_relatepattern = binary_predicate(GEOSFunc('GEOSRelatePattern'), c_char_p)
geos_touches = binary_predicate(GEOSFunc('GEOSTouches'))
geos_within = binary_predicate(GEOSFunc('GEOSWithin'))
| apache-2.0 |
VishvajitP/python-social-auth | social/storage/mongoengine_orm.py | 83 | 5914 | import base64
import six
from mongoengine import DictField, IntField, StringField, \
EmailField, BooleanField
from mongoengine.queryset import OperationError
from social.storage.base import UserMixin, AssociationMixin, NonceMixin, \
CodeMixin, BaseStorage
UNUSABLE_PASSWORD = '!' # Borrowed from django 1.4
class MongoengineUserMixin(UserMixin):
"""Social Auth association model"""
user = None
provider = StringField(max_length=32)
uid = StringField(max_length=255, unique_with='provider')
extra_data = DictField()
def str_id(self):
return str(self.id)
@classmethod
def get_social_auth_for_user(cls, user, provider=None, id=None):
qs = cls.objects
if provider:
qs = qs.filter(provider=provider)
if id:
qs = qs.filter(id=id)
return qs.filter(user=user.id)
@classmethod
def create_social_auth(cls, user, uid, provider):
if not isinstance(type(uid), six.string_types):
uid = str(uid)
return cls.objects.create(user=user.id, uid=uid, provider=provider)
@classmethod
def username_max_length(cls):
username_field = cls.username_field()
field = getattr(cls.user_model(), username_field)
return field.max_length
@classmethod
def username_field(cls):
return getattr(cls.user_model(), 'USERNAME_FIELD', 'username')
@classmethod
def create_user(cls, *args, **kwargs):
kwargs['password'] = UNUSABLE_PASSWORD
if 'email' in kwargs:
# Empty string makes email regex validation fail
kwargs['email'] = kwargs['email'] or None
return cls.user_model().objects.create(*args, **kwargs)
@classmethod
def allowed_to_disconnect(cls, user, backend_name, association_id=None):
if association_id is not None:
qs = cls.objects.filter(id__ne=association_id)
else:
qs = cls.objects.filter(provider__ne=backend_name)
qs = qs.filter(user=user)
if hasattr(user, 'has_usable_password'):
valid_password = user.has_usable_password()
else:
valid_password = True
return valid_password or qs.count() > 0
@classmethod
def changed(cls, user):
user.save()
def set_extra_data(self, extra_data=None):
if super(MongoengineUserMixin, self).set_extra_data(extra_data):
self.save()
@classmethod
def disconnect(cls, entry):
entry.delete()
@classmethod
def user_exists(cls, *args, **kwargs):
"""
Return True/False if a User instance exists with the given arguments.
Arguments are directly passed to filter() manager method.
"""
if 'username' in kwargs:
kwargs[cls.username_field()] = kwargs.pop('username')
return cls.user_model().objects.filter(*args, **kwargs).count() > 0
@classmethod
def get_username(cls, user):
return getattr(user, cls.username_field(), None)
@classmethod
def get_user(cls, pk):
try:
return cls.user_model().objects.get(id=pk)
except cls.user_model().DoesNotExist:
return None
@classmethod
def get_users_by_email(cls, email):
return cls.user_model().objects.filter(email__iexact=email)
@classmethod
def get_social_auth(cls, provider, uid):
if not isinstance(uid, six.string_types):
uid = str(uid)
try:
return cls.objects.get(provider=provider, uid=uid)
except cls.DoesNotExist:
return None
class MongoengineNonceMixin(NonceMixin):
"""One use numbers"""
server_url = StringField(max_length=255)
timestamp = IntField()
salt = StringField(max_length=40)
@classmethod
def use(cls, server_url, timestamp, salt):
return cls.objects.get_or_create(server_url=server_url,
timestamp=timestamp,
salt=salt)[1]
class MongoengineAssociationMixin(AssociationMixin):
"""OpenId account association"""
server_url = StringField(max_length=255)
handle = StringField(max_length=255)
secret = StringField(max_length=255) # Stored base64 encoded
issued = IntField()
lifetime = IntField()
assoc_type = StringField(max_length=64)
@classmethod
def store(cls, server_url, association):
# Don't use get_or_create because issued cannot be null
try:
assoc = cls.objects.get(server_url=server_url,
handle=association.handle)
except cls.DoesNotExist:
assoc = cls(server_url=server_url,
handle=association.handle)
assoc.secret = base64.encodestring(association.secret)
assoc.issued = association.issued
assoc.lifetime = association.lifetime
assoc.assoc_type = association.assoc_type
assoc.save()
@classmethod
def get(cls, *args, **kwargs):
return cls.objects.filter(*args, **kwargs)
@classmethod
def remove(cls, ids_to_delete):
cls.objects.filter(pk__in=ids_to_delete).delete()
class MongoengineCodeMixin(CodeMixin):
email = EmailField()
code = StringField(max_length=32)
verified = BooleanField(default=False)
@classmethod
def get_code(cls, code):
try:
return cls.objects.get(code=code)
except cls.DoesNotExist:
return None
class BaseMongoengineStorage(BaseStorage):
user = MongoengineUserMixin
nonce = MongoengineNonceMixin
association = MongoengineAssociationMixin
code = MongoengineCodeMixin
@classmethod
def is_integrity_error(cls, exception):
return exception.__class__ is OperationError and \
'E11000' in exception.message
| bsd-3-clause |
marc-sensenich/ansible | lib/ansible/modules/network/f5/bigip_profile_client_ssl.py | 9 | 34018 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2017, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_profile_client_ssl
short_description: Manages client SSL profiles on a BIG-IP
description:
- Manages client SSL profiles on a BIG-IP.
version_added: 2.5
options:
name:
description:
- Specifies the name of the profile.
required: True
parent:
description:
- The parent template of this monitor template. Once this value has
been set, it cannot be changed. By default, this value is the C(clientssl)
parent on the C(Common) partition.
default: /Common/clientssl
ciphers:
description:
- Specifies the list of ciphers that the system supports. When creating a new
profile, the default cipher list is provided by the parent profile.
cert_key_chain:
description:
- One or more certificates and keys to associate with the SSL profile. This
option is always a list. The keys in the list dictate the details of the
client/key/chain combination. Note that BIG-IPs can only have one of each
type of each certificate/key type. This means that you can only have one
RSA, one DSA, and one ECDSA per profile. If you attempt to assign two
RSA, DSA, or ECDSA certificate/key combo, the device will reject this.
- This list is a complex list that specifies a number of keys.
suboptions:
cert:
description:
- Specifies a cert name for use.
required: True
key:
description:
- Contains a key name.
required: True
chain:
description:
- Contains a certificate chain that is relevant to the certificate and key
mentioned earlier.
- This key is optional.
passphrase:
description:
- Contains the passphrase of the key file, should it require one.
- Passphrases are encrypted on the remote BIG-IP device. Therefore, there is no way
to compare them when updating a client SSL profile. Due to this, if you specify a
passphrase, this module will always register a C(changed) event.
partition:
description:
- Device partition to manage resources on.
default: Common
version_added: 2.5
options:
description:
- Options that the system uses for SSL processing in the form of a list. When
creating a new profile, the list is provided by the parent profile.
- When a C('') or C(none) value is provided all options for SSL processing are disabled.
choices:
- netscape-reuse-cipher-change-bug
- microsoft-big-sslv3-buffer
- msie-sslv2-rsa-padding
- ssleay-080-client-dh-bug
- tls-d5-bug
- tls-block-padding-bug
- dont-insert-empty-fragments
- no-ssl
- no-dtls
- no-session-resumption-on-renegotiation
- no-tlsv1.1
- no-tlsv1.2
- single-dh-use
- ephemeral-rsa
- cipher-server-preference
- tls-rollback-bug
- no-sslv2
- no-sslv3
- no-tls
- no-tlsv1
- pkcs1-check-1
- pkcs1-check-2
- netscape-ca-dn-bug
- netscape-demo-cipher-change-bug
- "none"
version_added: 2.7
secure_renegotiation:
description:
- Specifies the method of secure renegotiations for SSL connections. When
creating a new profile, the setting is provided by the parent profile.
- When C(request) is set the system request secure renegotation of SSL
connections.
- C(require) is a default setting and when set the system permits initial SSL
handshakes from clients but terminates renegotiations from unpatched clients.
- The C(require-strict) setting the system requires strict renegotiation of SSL
connections. In this mode the system refuses connections to insecure servers,
and terminates existing SSL connections to insecure servers.
choices:
- require
- require-strict
- request
version_added: 2.7
allow_non_ssl:
description:
- Enables or disables acceptance of non-SSL connections.
- When creating a new profile, the setting is provided by the parent profile.
type: bool
version_added: 2.7
server_name:
description:
- Specifies the fully qualified DNS hostname of the server used in Server Name Indication communications.
When creating a new profile, the setting is provided by the parent profile.
- The server name can also be a wildcard string containing the asterisk C(*) character.
version_added: 2.8
sni_default:
description:
- Indicates that the system uses this profile as the default SSL profile when there is no match to the
server name, or when the client provides no SNI extension support.
- When creating a new profile, the setting is provided by the parent profile.
- There can be only one SSL profile with this setting enabled.
type: bool
version_added: 2.8
sni_require:
description:
- Requires that the network peers also provide SNI support. This setting only takes effect when C(sni_default) is
set to C(true). When creating a new profile, the setting is provided by the parent profile.
type: bool
version_added: 2.8
client_certificate:
description:
- Specifies the way the system handles client certificates.
- When C(ignore), specifies that the system ignores certificates from client
systems.
- When C(require), specifies that the system requires a client to present a
valid certificate.
- When C(request), specifies that the system requests a valid certificate from a
client but always authenticate the client.
choices:
- ignore
- require
- request
version_added: 2.8
client_auth_frequency:
description:
- Specifies the frequency of client authentication for an SSL session.
- When C(once), specifies that the system authenticates the client once for an
SSL session.
- When C(always), specifies that the system authenticates the client once for an
SSL session and also upon reuse of that session.
choices:
- once
- always
version_added: 2.8
retain_certificate:
description:
- When C(yes), client certificate is retained in SSL session.
type: bool
version_added: 2.8
cert_auth_depth:
description:
- Specifies the maximum number of certificates to be traversed in a client
certificate chain.
version_added: 2.8
trusted_cert_authority:
description:
- Specifies a client CA that the system trusts.
version_added: 2.8
advertised_cert_authority:
description:
- Specifies that the CAs that the system advertises to clients is being trusted
by the profile.
version_added: 2.8
client_auth_crl:
description:
- Specifies the name of a file containing a list of revoked client certificates.
version_added: 2.8
allow_expired_crl:
description:
- Instructs the system to use the specified CRL file even if it has expired.
type: bool
version_added: 2.8
state:
description:
- When C(present), ensures that the profile exists.
- When C(absent), ensures the profile is removed.
default: present
choices:
- present
- absent
version_added: 2.5
notes:
- Requires BIG-IP software version >= 12
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
- Wojciech Wypior (@wojtek0806)
'''
EXAMPLES = r'''
- name: Create client SSL profile
bigip_profile_client_ssl:
state: present
name: my_profile
provider:
server: lb.mydomain.com
user: admin
password: secret
delegate_to: localhost
- name: Create client SSL profile with specific ciphers
bigip_profile_client_ssl:
state: present
name: my_profile
ciphers: "!SSLv3:!SSLv2:ECDHE+AES-GCM+SHA256:ECDHE-RSA-AES128-CBC-SHA"
provider:
server: lb.mydomain.com
user: admin
password: secret
delegate_to: localhost
- name: Create client SSL profile with specific SSL options
bigip_profile_client_ssl:
state: present
name: my_profile
options:
- no-sslv2
- no-sslv3
provider:
server: lb.mydomain.com
user: admin
password: secret
delegate_to: localhost
- name: Create client SSL profile require secure renegotiation
bigip_profile_client_ssl:
state: present
name: my_profile
secure_renegotation: request
provider:
server: lb.mydomain.com
user: admin
password: secret
delegate_to: localhost
- name: Create a client SSL profile with a cert/key/chain setting
bigip_profile_client_ssl:
state: present
name: my_profile
cert_key_chain:
- cert: bigip_ssl_cert1
key: bigip_ssl_key1
chain: bigip_ssl_cert1
provider:
server: lb.mydomain.com
user: admin
password: secret
delegate_to: localhost
'''
RETURN = r'''
ciphers:
description: The ciphers applied to the profile.
returned: changed
type: str
sample: "!SSLv3:!SSLv2:ECDHE+AES-GCM+SHA256:ECDHE-RSA-AES128-CBC-SHA"
options:
description: The list of options for SSL processing.
returned: changed
type: list
sample: ['no-sslv2', 'no-sslv3']
secure_renegotation:
description: The method of secure SSL renegotiation.
returned: changed
type: str
sample: request
allow_non_ssl:
description: Acceptance of non-SSL connections.
returned: changed
type: bool
sample: yes
'''
import os
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
from ansible.module_utils.six import iteritems
try:
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import cleanup_tokens
from library.module_utils.network.f5.common import fq_name
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.common import flatten_boolean
from library.module_utils.network.f5.common import transform_name
from library.module_utils.network.f5.common import exit_json
from library.module_utils.network.f5.common import fail_json
from library.module_utils.network.f5.common import is_empty_list
except ImportError:
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import cleanup_tokens
from ansible.module_utils.network.f5.common import fq_name
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.common import flatten_boolean
from ansible.module_utils.network.f5.common import transform_name
from ansible.module_utils.network.f5.common import exit_json
from ansible.module_utils.network.f5.common import fail_json
from ansible.module_utils.network.f5.common import is_empty_list
class Parameters(AnsibleF5Parameters):
api_map = {
'certKeyChain': 'cert_key_chain',
'defaultsFrom': 'parent',
'allowNonSsl': 'allow_non_ssl',
'secureRenegotiation': 'secure_renegotiation',
'tmOptions': 'options',
'sniDefault': 'sni_default',
'sniRequire': 'sni_require',
'serverName': 'server_name',
'peerCertMode': 'client_certificate',
'authenticate': 'client_auth_frequency',
'retainCertificate': 'retain_certificate',
'authenticateDepth': 'cert_auth_depth',
'caFile': 'trusted_cert_authority',
'clientCertCa': 'advertised_cert_authority',
'crlFile': 'client_auth_crl',
'allowExpiredCrl': 'allow_expired_crl',
}
api_attributes = [
'ciphers',
'certKeyChain',
'defaultsFrom',
'tmOptions',
'secureRenegotiation',
'allowNonSsl',
'sniDefault',
'sniRequire',
'serverName',
'peerCertMode',
'authenticate',
'retainCertificate',
'authenticateDepth',
'caFile',
'clientCertCa',
'crlFile',
'allowExpiredCrl',
]
returnables = [
'ciphers',
'allow_non_ssl',
'options',
'secure_renegotiation',
'cert_key_chain',
'parent',
'sni_default',
'sni_require',
'server_name',
'client_certificate',
'client_auth_frequency',
'retain_certificate',
'cert_auth_depth',
'trusted_cert_authority',
'advertised_cert_authority',
'client_auth_crl',
'allow_expired_crl',
]
updatables = [
'ciphers',
'cert_key_chain',
'allow_non_ssl',
'options',
'secure_renegotiation',
'sni_default',
'sni_require',
'server_name',
'client_certificate',
'client_auth_frequency',
'retain_certificate',
'cert_auth_depth',
'trusted_cert_authority',
'advertised_cert_authority',
'client_auth_crl',
'allow_expired_crl',
]
@property
def retain_certificate(self):
return flatten_boolean(self._values['retain_certificate'])
@property
def allow_expired_crl(self):
return flatten_boolean(self._values['allow_expired_crl'])
class ModuleParameters(Parameters):
def _key_filename(self, name):
if name.endswith('.key'):
return name
else:
return name + '.key'
def _cert_filename(self, name):
if name.endswith('.crt'):
return name
else:
return name + '.crt'
def _get_chain_value(self, item):
if 'chain' not in item or item['chain'] == 'none':
result = 'none'
else:
result = self._cert_filename(fq_name(self.partition, item['chain']))
return result
@property
def parent(self):
if self._values['parent'] is None:
return None
if self._values['parent'] == 'clientssl':
return '/Common/clientssl'
result = fq_name(self.partition, self._values['parent'])
return result
@property
def cert_key_chain(self):
if self._values['cert_key_chain'] is None:
return None
result = []
for item in self._values['cert_key_chain']:
if 'key' in item and 'cert' not in item:
raise F5ModuleError(
"When providing a 'key', you must also provide a 'cert'"
)
if 'cert' in item and 'key' not in item:
raise F5ModuleError(
"When providing a 'cert', you must also provide a 'key'"
)
key = self._key_filename(item['key'])
cert = self._cert_filename(item['cert'])
chain = self._get_chain_value(item)
name = os.path.basename(cert)
filename, ex = os.path.splitext(name)
tmp = {
'name': filename,
'cert': fq_name(self.partition, cert),
'key': fq_name(self.partition, key),
'chain': chain
}
if 'passphrase' in item:
tmp['passphrase'] = item['passphrase']
result.append(tmp)
result = sorted(result, key=lambda x: x['name'])
return result
@property
def allow_non_ssl(self):
result = flatten_boolean(self._values['allow_non_ssl'])
if result is None:
return None
if result == 'yes':
return 'enabled'
return 'disabled'
@property
def options(self):
options = self._values['options']
if options is None:
return None
if is_empty_list(options):
return []
return options
@property
def sni_require(self):
require = flatten_boolean(self._values['sni_require'])
default = self.sni_default
if require is None:
return None
if default in [None, False]:
if require == 'yes':
raise F5ModuleError(
"Cannot set 'sni_require' to {0} if 'sni_default' is set as {1}".format(require, default))
if require == 'yes':
return True
else:
return False
@property
def trusted_cert_authority(self):
if self._values['trusted_cert_authority'] is None:
return None
if self._values['trusted_cert_authority'] in ['', 'none']:
return ''
result = fq_name(self.partition, self._values['trusted_cert_authority'])
return result
@property
def advertised_cert_authority(self):
if self._values['advertised_cert_authority'] is None:
return None
if self._values['advertised_cert_authority'] in ['', 'none']:
return ''
result = fq_name(self.partition, self._values['advertised_cert_authority'])
return result
@property
def client_auth_crl(self):
if self._values['client_auth_crl'] is None:
return None
if self._values['client_auth_crl'] in ['', 'none']:
return ''
result = fq_name(self.partition, self._values['client_auth_crl'])
return result
class ApiParameters(Parameters):
@property
def cert_key_chain(self):
if self._values['cert_key_chain'] is None:
return None
result = []
for item in self._values['cert_key_chain']:
tmp = dict(
name=item['name'],
)
for x in ['cert', 'key', 'chain', 'passphrase']:
if x in item:
tmp[x] = item[x]
if 'chain' not in item:
tmp['chain'] = 'none'
result.append(tmp)
result = sorted(result, key=lambda y: y['name'])
return result
@property
def sni_default(self):
result = self._values['sni_default']
if result is None:
return None
if result == 'true':
return True
else:
return False
@property
def sni_require(self):
result = self._values['sni_require']
if result is None:
return None
if result == 'true':
return True
else:
return False
@property
def trusted_cert_authority(self):
if self._values['trusted_cert_authority'] in [None, 'none']:
return None
return self._values['trusted_cert_authority']
@property
def advertised_cert_authority(self):
if self._values['advertised_cert_authority'] in [None, 'none']:
return None
return self._values['advertised_cert_authority']
@property
def client_auth_crl(self):
if self._values['client_auth_crl'] in [None, 'none']:
return None
return self._values['client_auth_crl']
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
class UsableChanges(Changes):
@property
def retain_certificate(self):
if self._values['retain_certificate'] is None:
return None
elif self._values['retain_certificate'] == 'yes':
return 'true'
return 'false'
@property
def allow_expired_crl(self):
if self._values['allow_expired_crl'] is None:
return None
elif self._values['allow_expired_crl'] == 'yes':
return 'enabled'
return 'disabled'
class ReportableChanges(Changes):
@property
def allow_non_ssl(self):
if self._values['allow_non_ssl'] is None:
return None
elif self._values['allow_non_ssl'] == 'enabled':
return 'yes'
return 'no'
@property
def retain_certificate(self):
return flatten_boolean(self._values['retain_certificate'])
@property
def allow_expired_crl(self):
return flatten_boolean(self._values['allow_expired_crl'])
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
result = self.__default(param)
return result
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
def to_tuple(self, items):
result = []
for x in items:
tmp = [(str(k), str(v)) for k, v in iteritems(x)]
result += tmp
return result
def _diff_complex_items(self, want, have):
if want == [] and have is None:
return None
if want is None:
return None
w = self.to_tuple(want)
h = self.to_tuple(have)
if set(w).issubset(set(h)):
return None
else:
return want
@property
def parent(self):
if self.want.parent != self.have.parent:
raise F5ModuleError(
"The parent profile cannot be changed"
)
@property
def cert_key_chain(self):
result = self._diff_complex_items(self.want.cert_key_chain, self.have.cert_key_chain)
return result
@property
def options(self):
if self.want.options is None:
return None
if not self.want.options:
if self.have.options is None:
return None
if not self.have.options:
return None
if self.have.options is not None:
return self.want.options
if self.have.options is None:
return self.want.options
if set(self.want.options) != set(self.have.options):
return self.want.options
@property
def sni_require(self):
if self.want.sni_require is None:
return None
if self.want.sni_require is False:
if self.have.sni_default is True and self.want.sni_default is None:
raise F5ModuleError(
"Cannot set 'sni_require' to {0} if 'sni_default' is {1}".format(
self.want.sni_require, self.have.sni_default)
)
if self.want.sni_require == self.have.sni_require:
return None
return self.want.sni_require
@property
def trusted_cert_authority(self):
if self.want.trusted_cert_authority is None:
return None
if self.want.trusted_cert_authority == '' and self.have.trusted_cert_authority is None:
return None
if self.want.trusted_cert_authority != self.have.trusted_cert_authority:
return self.want.trusted_cert_authority
@property
def advertised_cert_authority(self):
if self.want.advertised_cert_authority is None:
return None
if self.want.advertised_cert_authority == '' and self.have.advertised_cert_authority is None:
return None
if self.want.advertised_cert_authority != self.have.advertised_cert_authority:
return self.want.advertised_cert_authority
@property
def client_auth_crl(self):
if self.want.client_auth_crl is None:
return None
if self.want.client_auth_crl == '' and self.have.client_auth_crl is None:
return None
if self.want.client_auth_crl != self.have.client_auth_crl:
return self.want.client_auth_crl
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
self.want = ModuleParameters(params=self.module.params)
self.have = ApiParameters()
self.changes = UsableChanges()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def exec_module(self):
changed = False
result = dict()
state = self.want.state
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.client.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/profile/client-ssl/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError:
return False
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
return True
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the resource.")
return True
def create(self):
self._set_changed_options()
if self.module.check_mode:
return True
self.create_on_device()
return True
def create_on_device(self):
params = self.changes.api_params()
params['name'] = self.want.name
params['partition'] = self.want.partition
uri = "https://{0}:{1}/mgmt/tm/ltm/profile/client-ssl/".format(
self.client.provider['server'],
self.client.provider['server_port']
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403, 404]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/ltm/profile/client-ssl/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 404]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def absent(self):
if self.exists():
return self.remove()
return False
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/profile/client-ssl/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
response = self.client.api.delete(uri)
if response.status == 200:
return True
raise F5ModuleError(response.content)
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/profile/client-ssl/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return ApiParameters(params=response)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
name=dict(required=True),
parent=dict(default='/Common/clientssl'),
ciphers=dict(),
allow_non_ssl=dict(type='bool'),
secure_renegotiation=dict(
choices=['require', 'require-strict', 'request']
),
options=dict(
type='list',
choices=[
'netscape-reuse-cipher-change-bug',
'microsoft-big-sslv3-buffer',
'msie-sslv2-rsa-padding',
'ssleay-080-client-dh-bug',
'tls-d5-bug',
'tls-block-padding-bug',
'dont-insert-empty-fragments',
'no-ssl',
'no-dtls',
'no-session-resumption-on-renegotiation',
'no-tlsv1.1',
'no-tlsv1.2',
'single-dh-use',
'ephemeral-rsa',
'cipher-server-preference',
'tls-rollback-bug',
'no-sslv2',
'no-sslv3',
'no-tls',
'no-tlsv1',
'pkcs1-check-1',
'pkcs1-check-2',
'netscape-ca-dn-bug',
'netscape-demo-cipher-change-bug',
'none',
]
),
cert_key_chain=dict(
type='list',
options=dict(
cert=dict(required=True),
key=dict(required=True),
chain=dict(),
passphrase=dict()
)
),
state=dict(
default='present',
choices=['present', 'absent']
),
sni_default=dict(type='bool'),
sni_require=dict(type='bool'),
server_name=dict(),
client_certificate=dict(
choices=['require', 'ignore', 'request']
),
client_auth_frequency=dict(
choices=['once', 'always']
),
cert_auth_depth=dict(type='int'),
retain_certificate=dict(type='bool'),
trusted_cert_authority=dict(),
advertised_cert_authority=dict(),
client_auth_crl=dict(),
allow_expired_crl=dict(type='bool'),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
)
client = F5RestClient(**module.params)
try:
mm = ModuleManager(module=module, client=client)
results = mm.exec_module()
cleanup_tokens(client)
exit_json(module, results, client)
except F5ModuleError as ex:
cleanup_tokens(client)
fail_json(module, ex, client)
if __name__ == '__main__':
main()
| gpl-3.0 |
freephys/python_ase | ase/transport/selfenergy.py | 14 | 2861 | import numpy as np
class LeadSelfEnergy:
conv = 1e-8 # Convergence criteria for surface Green function
def __init__(self, hs_dii, hs_dij, hs_dim, eta=1e-4):
self.h_ii, self.s_ii = hs_dii # onsite principal layer
self.h_ij, self.s_ij = hs_dij # coupling between principal layers
self.h_im, self.s_im = hs_dim # coupling to the central region
self.nbf = self.h_im.shape[1] # nbf for the scattering region
self.eta = eta
self.energy = None
self.bias = 0
self.sigma_mm = np.empty((self.nbf, self.nbf), complex)
def retarded(self, energy):
"""Return self-energy (sigma) evaluated at specified energy."""
if energy != self.energy:
self.energy = energy
z = energy - self.bias + self.eta * 1.j
tau_im = z * self.s_im - self.h_im
a_im = np.linalg.solve(self.get_sgfinv(energy), tau_im)
tau_mi = z * self.s_im.T.conj() - self.h_im.T.conj()
self.sigma_mm[:] = np.dot(tau_mi, a_im)
return self.sigma_mm
def set_bias(self, bias):
self.bias = bias
def get_lambda(self, energy):
"""Return the lambda (aka Gamma) defined by i(S-S^d).
Here S is the retarded selfenergy, and d denotes the hermitian
conjugate.
"""
sigma_mm = self.retarded(energy)
return 1.j * (sigma_mm - sigma_mm.T.conj())
def get_sgfinv(self, energy):
"""The inverse of the retarded surface Green function"""
z = energy - self.bias + self.eta * 1.j
v_00 = z * self.s_ii.T.conj() - self.h_ii.T.conj()
v_11 = v_00.copy()
v_10 = z * self.s_ij - self.h_ij
v_01 = z * self.s_ij.T.conj() - self.h_ij.T.conj()
delta = self.conv + 1
while delta > self.conv:
a = np.linalg.solve(v_11, v_01)
b = np.linalg.solve(v_11, v_10)
v_01_dot_b = np.dot(v_01, b)
v_00 -= v_01_dot_b
v_11 -= np.dot(v_10, a)
v_11 -= v_01_dot_b
v_01 = -np.dot(v_01, a)
v_10 = -np.dot(v_10, b)
delta = abs(v_01).max()
return v_00
class BoxProbe:
"""Box shaped Buttinger probe.
Kramers-kroning: real = H(imag); imag = -H(real)
"""
def __init__(self, eta, a, b, energies, S, T=0.3):
from Transport.Hilbert import hilbert
se = np.empty(len(energies), complex)
se.imag = .5 * (np.tanh(.5 * (energies - a) / T) -
np.tanh(.5 * (energies - b) / T))
se.real = hilbert(se.imag)
se.imag -= 1
self.selfenergy_e = eta * se
self.energies = energies
self.S = S
def retarded(self, energy):
return self.selfenergy_e[self.energies.searchsorted(energy)] * self.S
| gpl-3.0 |
shipci/sympy | sympy/polys/tests/test_polyoptions.py | 24 | 11998 | """Tests for options manager for :class:`Poly` and public API functions. """
from sympy.polys.polyoptions import (
Options, Expand, Gens, Wrt, Sort, Order, Field, Greedy, Domain,
Split, Gaussian, Extension, Modulus, Symmetric, Strict, Auto,
Frac, Formal, Polys, Include, All, Gen, Symbols, Method)
from sympy.polys.orderings import lex
from sympy.polys.domains import FF, GF, ZZ, QQ, RR, EX
from sympy.polys.polyerrors import OptionError, GeneratorsError
from sympy import Integer, Symbol, I, sqrt
from sympy.utilities.pytest import raises
from sympy.abc import x, y, z
def test_Options_clone():
opt = Options((x, y, z), {'domain': 'ZZ'})
assert opt.gens == (x, y, z)
assert opt.domain == ZZ
assert ('order' in opt) is False
new_opt = opt.clone({'gens': (x, y), 'order': 'lex'})
assert opt.gens == (x, y, z)
assert opt.domain == ZZ
assert ('order' in opt) is False
assert new_opt.gens == (x, y)
assert new_opt.domain == ZZ
assert ('order' in new_opt) is True
def test_Expand_preprocess():
assert Expand.preprocess(False) is False
assert Expand.preprocess(True) is True
assert Expand.preprocess(0) is False
assert Expand.preprocess(1) is True
raises(OptionError, lambda: Expand.preprocess(x))
def test_Expand_postprocess():
opt = {'expand': True}
Expand.postprocess(opt)
assert opt == {'expand': True}
def test_Gens_preprocess():
assert Gens.preprocess((None,)) == ()
assert Gens.preprocess((x, y, z)) == (x, y, z)
assert Gens.preprocess(((x, y, z),)) == (x, y, z)
a = Symbol('a', commutative=False)
raises(GeneratorsError, lambda: Gens.preprocess((x, x, y)))
raises(GeneratorsError, lambda: Gens.preprocess((x, y, a)))
def test_Gens_postprocess():
opt = {'gens': (x, y)}
Gens.postprocess(opt)
assert opt == {'gens': (x, y)}
def test_Wrt_preprocess():
assert Wrt.preprocess(x) == ['x']
assert Wrt.preprocess('') == []
assert Wrt.preprocess(' ') == []
assert Wrt.preprocess('x,y') == ['x', 'y']
assert Wrt.preprocess('x y') == ['x', 'y']
assert Wrt.preprocess('x, y') == ['x', 'y']
assert Wrt.preprocess('x , y') == ['x', 'y']
assert Wrt.preprocess(' x, y') == ['x', 'y']
assert Wrt.preprocess(' x, y') == ['x', 'y']
assert Wrt.preprocess([x, y]) == ['x', 'y']
raises(OptionError, lambda: Wrt.preprocess(','))
raises(OptionError, lambda: Wrt.preprocess(0))
def test_Wrt_postprocess():
opt = {'wrt': ['x']}
Wrt.postprocess(opt)
assert opt == {'wrt': ['x']}
def test_Sort_preprocess():
assert Sort.preprocess([x, y, z]) == ['x', 'y', 'z']
assert Sort.preprocess((x, y, z)) == ['x', 'y', 'z']
assert Sort.preprocess('x > y > z') == ['x', 'y', 'z']
assert Sort.preprocess('x>y>z') == ['x', 'y', 'z']
raises(OptionError, lambda: Sort.preprocess(0))
raises(OptionError, lambda: Sort.preprocess(set([x, y, z])))
def test_Sort_postprocess():
opt = {'sort': 'x > y'}
Sort.postprocess(opt)
assert opt == {'sort': 'x > y'}
def test_Order_preprocess():
assert Order.preprocess('lex') == lex
def test_Order_postprocess():
opt = {'order': True}
Order.postprocess(opt)
assert opt == {'order': True}
def test_Field_preprocess():
assert Field.preprocess(False) is False
assert Field.preprocess(True) is True
assert Field.preprocess(0) is False
assert Field.preprocess(1) is True
raises(OptionError, lambda: Field.preprocess(x))
def test_Field_postprocess():
opt = {'field': True}
Field.postprocess(opt)
assert opt == {'field': True}
def test_Greedy_preprocess():
assert Greedy.preprocess(False) is False
assert Greedy.preprocess(True) is True
assert Greedy.preprocess(0) is False
assert Greedy.preprocess(1) is True
raises(OptionError, lambda: Greedy.preprocess(x))
def test_Greedy_postprocess():
opt = {'greedy': True}
Greedy.postprocess(opt)
assert opt == {'greedy': True}
def test_Domain_preprocess():
assert Domain.preprocess(ZZ) == ZZ
assert Domain.preprocess(QQ) == QQ
assert Domain.preprocess(EX) == EX
assert Domain.preprocess(FF(2)) == FF(2)
assert Domain.preprocess(ZZ[x, y]) == ZZ[x, y]
assert Domain.preprocess('Z') == ZZ
assert Domain.preprocess('Q') == QQ
assert Domain.preprocess('ZZ') == ZZ
assert Domain.preprocess('QQ') == QQ
assert Domain.preprocess('EX') == EX
assert Domain.preprocess('FF(23)') == FF(23)
assert Domain.preprocess('GF(23)') == GF(23)
raises(OptionError, lambda: Domain.preprocess('Z[]'))
assert Domain.preprocess('Z[x]') == ZZ[x]
assert Domain.preprocess('Q[x]') == QQ[x]
assert Domain.preprocess('ZZ[x]') == ZZ[x]
assert Domain.preprocess('QQ[x]') == QQ[x]
assert Domain.preprocess('Z[x,y]') == ZZ[x, y]
assert Domain.preprocess('Q[x,y]') == QQ[x, y]
assert Domain.preprocess('ZZ[x,y]') == ZZ[x, y]
assert Domain.preprocess('QQ[x,y]') == QQ[x, y]
raises(OptionError, lambda: Domain.preprocess('Z()'))
assert Domain.preprocess('Z(x)') == ZZ.frac_field(x)
assert Domain.preprocess('Q(x)') == QQ.frac_field(x)
assert Domain.preprocess('ZZ(x)') == ZZ.frac_field(x)
assert Domain.preprocess('QQ(x)') == QQ.frac_field(x)
assert Domain.preprocess('Z(x,y)') == ZZ.frac_field(x, y)
assert Domain.preprocess('Q(x,y)') == QQ.frac_field(x, y)
assert Domain.preprocess('ZZ(x,y)') == ZZ.frac_field(x, y)
assert Domain.preprocess('QQ(x,y)') == QQ.frac_field(x, y)
assert Domain.preprocess('Q<I>') == QQ.algebraic_field(I)
assert Domain.preprocess('QQ<I>') == QQ.algebraic_field(I)
assert Domain.preprocess('Q<sqrt(2), I>') == QQ.algebraic_field(sqrt(2), I)
assert Domain.preprocess(
'QQ<sqrt(2), I>') == QQ.algebraic_field(sqrt(2), I)
raises(OptionError, lambda: Domain.preprocess('abc'))
def test_Domain_postprocess():
raises(GeneratorsError, lambda: Domain.postprocess({'gens': (x, y),
'domain': ZZ[y, z]}))
raises(GeneratorsError, lambda: Domain.postprocess({'gens': (),
'domain': EX}))
raises(GeneratorsError, lambda: Domain.postprocess({'domain': EX}))
def test_Split_preprocess():
assert Split.preprocess(False) is False
assert Split.preprocess(True) is True
assert Split.preprocess(0) is False
assert Split.preprocess(1) is True
raises(OptionError, lambda: Split.preprocess(x))
def test_Split_postprocess():
raises(NotImplementedError, lambda: Split.postprocess({'split': True}))
def test_Gaussian_preprocess():
assert Gaussian.preprocess(False) is False
assert Gaussian.preprocess(True) is True
assert Gaussian.preprocess(0) is False
assert Gaussian.preprocess(1) is True
raises(OptionError, lambda: Gaussian.preprocess(x))
def test_Gaussian_postprocess():
opt = {'gaussian': True}
Gaussian.postprocess(opt)
assert opt == {
'gaussian': True,
'extension': set([I]),
'domain': QQ.algebraic_field(I),
}
def test_Extension_preprocess():
assert Extension.preprocess(True) is True
assert Extension.preprocess(1) is True
assert Extension.preprocess([]) is None
assert Extension.preprocess(sqrt(2)) == set([sqrt(2)])
assert Extension.preprocess([sqrt(2)]) == set([sqrt(2)])
assert Extension.preprocess([sqrt(2), I]) == set([sqrt(2), I])
raises(OptionError, lambda: Extension.preprocess(False))
raises(OptionError, lambda: Extension.preprocess(0))
def test_Extension_postprocess():
opt = {'extension': set([sqrt(2)])}
Extension.postprocess(opt)
assert opt == {
'extension': set([sqrt(2)]),
'domain': QQ.algebraic_field(sqrt(2)),
}
opt = {'extension': True}
Extension.postprocess(opt)
assert opt == {'extension': True}
def test_Modulus_preprocess():
assert Modulus.preprocess(23) == 23
assert Modulus.preprocess(Integer(23)) == 23
raises(OptionError, lambda: Modulus.preprocess(0))
raises(OptionError, lambda: Modulus.preprocess(x))
def test_Modulus_postprocess():
opt = {'modulus': 5}
Modulus.postprocess(opt)
assert opt == {
'modulus': 5,
'domain': FF(5),
}
opt = {'modulus': 5, 'symmetric': False}
Modulus.postprocess(opt)
assert opt == {
'modulus': 5,
'domain': FF(5, False),
'symmetric': False,
}
def test_Symmetric_preprocess():
assert Symmetric.preprocess(False) is False
assert Symmetric.preprocess(True) is True
assert Symmetric.preprocess(0) is False
assert Symmetric.preprocess(1) is True
raises(OptionError, lambda: Symmetric.preprocess(x))
def test_Symmetric_postprocess():
opt = {'symmetric': True}
Symmetric.postprocess(opt)
assert opt == {'symmetric': True}
def test_Strict_preprocess():
assert Strict.preprocess(False) is False
assert Strict.preprocess(True) is True
assert Strict.preprocess(0) is False
assert Strict.preprocess(1) is True
raises(OptionError, lambda: Strict.preprocess(x))
def test_Strict_postprocess():
opt = {'strict': True}
Strict.postprocess(opt)
assert opt == {'strict': True}
def test_Auto_preprocess():
assert Auto.preprocess(False) is False
assert Auto.preprocess(True) is True
assert Auto.preprocess(0) is False
assert Auto.preprocess(1) is True
raises(OptionError, lambda: Auto.preprocess(x))
def test_Auto_postprocess():
opt = {'auto': True}
Auto.postprocess(opt)
assert opt == {'auto': True}
def test_Frac_preprocess():
assert Frac.preprocess(False) is False
assert Frac.preprocess(True) is True
assert Frac.preprocess(0) is False
assert Frac.preprocess(1) is True
raises(OptionError, lambda: Frac.preprocess(x))
def test_Frac_postprocess():
opt = {'frac': True}
Frac.postprocess(opt)
assert opt == {'frac': True}
def test_Formal_preprocess():
assert Formal.preprocess(False) is False
assert Formal.preprocess(True) is True
assert Formal.preprocess(0) is False
assert Formal.preprocess(1) is True
raises(OptionError, lambda: Formal.preprocess(x))
def test_Formal_postprocess():
opt = {'formal': True}
Formal.postprocess(opt)
assert opt == {'formal': True}
def test_Polys_preprocess():
assert Polys.preprocess(False) is False
assert Polys.preprocess(True) is True
assert Polys.preprocess(0) is False
assert Polys.preprocess(1) is True
raises(OptionError, lambda: Polys.preprocess(x))
def test_Polys_postprocess():
opt = {'polys': True}
Polys.postprocess(opt)
assert opt == {'polys': True}
def test_Include_preprocess():
assert Include.preprocess(False) is False
assert Include.preprocess(True) is True
assert Include.preprocess(0) is False
assert Include.preprocess(1) is True
raises(OptionError, lambda: Include.preprocess(x))
def test_Include_postprocess():
opt = {'include': True}
Include.postprocess(opt)
assert opt == {'include': True}
def test_All_preprocess():
assert All.preprocess(False) is False
assert All.preprocess(True) is True
assert All.preprocess(0) is False
assert All.preprocess(1) is True
raises(OptionError, lambda: All.preprocess(x))
def test_All_postprocess():
opt = {'all': True}
All.postprocess(opt)
assert opt == {'all': True}
def test_Gen_postprocess():
opt = {'gen': x}
Gen.postprocess(opt)
assert opt == {'gen': x}
def test_Symbols_preprocess():
raises(OptionError, lambda: Symbols.preprocess(x))
def test_Symbols_postprocess():
opt = {'symbols': [x, y, z]}
Symbols.postprocess(opt)
assert opt == {'symbols': [x, y, z]}
def test_Method_preprocess():
raises(OptionError, lambda: Method.preprocess(10))
def test_Method_postprocess():
opt = {'method': 'f5b'}
Method.postprocess(opt)
assert opt == {'method': 'f5b'}
| bsd-3-clause |
tellesnobrega/horizon | horizon/conf/default.py | 49 | 1588 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
# Default configuration dictionary. Do not mutate.
HORIZON_CONFIG = {
# Allow for ordering dashboards; list or tuple if provided.
'dashboards': None,
# Name of a default dashboard; defaults to first alphabetically if None
'default_dashboard': None,
# Default redirect url for users' home
'user_home': settings.LOGIN_REDIRECT_URL,
# AJAX settings for JavaScript
'ajax_queue_limit': 10,
'ajax_poll_interval': 2500,
# URL for additional help with this site.
'help_url': None,
# Exception configuration.
'exceptions': {'unauthorized': [],
'not_found': [],
'recoverable': []},
# Password configuration.
'password_validator': {'regex': '.*',
'help_text': _("Password is not accepted")},
'password_autocomplete': 'off',
# Enable or disable simplified floating IP address management.
'simple_ip_management': True
}
| apache-2.0 |
alanplotko/CoREdash | app.py | 2 | 5680 | # Flask
from flask import Flask, render_template, request, redirect, url_for, session, abort, make_response
# Authentication
from authomatic.adapters import WerkzeugAdapter
from authomatic import Authomatic
from config import CONFIG
# MongoDB and Sessions
from flask.ext.session import Session
from pymongo import MongoClient
from functools import wraps
from datetime import datetime
from time import time
# Miscellaneous
import os, logging, json, sys
tmpl_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'templates')
# MongoDB Setup
client = MongoClient(os.getenv('COREDASH_MONGOHQ_URL'))
db = client.core
# MongoDB Session Setup
SESSION_TYPE = 'mongodb'
SESSION_MONGODB = client
SESSION_MONGODB_DB = os.getenv('COREDASH_MONGOHQ_DB')
SESSION_MONGODB_COLLECT = os.getenv('COREDASH_MONGOHQ_SESSIONS')
SESSION_USE_SIGNER = True
SESSION_KEY_PREFIX = os.getenv('COREDASH_MONGOHQ_PREFIX')
# Instantiate Authomatic Object and set up app
app = Flask(__name__)
app.secret_key = os.getenv('COREDASH_APP_SECRET')
authomatic = Authomatic(config=CONFIG, secret=app.secret_key)
app.config.from_object(__name__)
Session(app)
@app.before_first_request
def setup_logging():
if not app.debug:
# In production mode, add log handler to sys.stderr.
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
def getCredentials():
credentials = session.get('credentials', None)
if credentials:
credentials = authomatic.credentials(credentials)
return credentials
return None
def login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
credentials = getCredentials()
if not credentials or not credentials.valid:
return redirect(url_for('login', next=request.url))
# If credentials are valid and expire in 30 minutes, refresh
elif credentials and credentials.valid and credentials.expire_soon(30 * 60):
response = credentials.refresh()
return f(*args, **kwargs)
return decorated_function
@app.route('/')
def index():
credentials = getCredentials()
if credentials and credentials.valid:
return redirect(url_for('dashboard'))
return render_template('index.html', template_folder=tmpl_dir)
@app.route('/login')
def login():
credentials = getCredentials()
if credentials and credentials.valid:
return redirect(url_for('dashboard'))
return render_template('login.html', template_folder=tmpl_dir, credentials=credentials)
@app.route('/oauth2callback', methods=['GET', 'POST'])
def authenticate():
# We need response object for the WerkzeugAdapter.
response = make_response()
# Log the user in, pass it the adapter and the provider name.
result = authomatic.login(
WerkzeugAdapter(request, response),
"google",
session=session,
session_saver=app.save_session(session, response)
)
# If there is no LoginResult object, the login procedure is still pending
if result:
if result.user:
# We need to update the user to get more info
result.user.update()
# Store authomatic credentials in session
session['credentials'] = authomatic.credentials(result.user.credentials).serialize()
# Create new account if user is not found
account = db.users.find_one({'email': result.user.email })
if account == None:
del session['credentials']
return make_response(render_template('error.html', template_folder=tmpl_dir, error=401, error_msg="Unauthorized",
return_home="We couldn't find you on the CoRE member list. You must be a CoRE member to access \
CoREdash. Check with the secretary if you believe this is a mistake."), 401)
else:
# Store user information in session
session['username'] = result.user.email
if account.get('name') is None:
db.users.update({ 'email': result.user.email }, { '$set': { 'name': result.user.name } }, upsert=False)
session['display_name'] = result.user.name.split(' ')[0]
credentials = getCredentials()
return render_template('process_login.html')
# Don't forget to return the response
return response
@app.route('/logout')
def logout():
credentials = getCredentials()
if credentials and credentials.valid:
db.sessions.remove({ "id": app.config.get('SESSION_KEY_PREFIX') + session.sid })
session.clear()
return redirect(url_for('index'))
@app.route('/dashboard')
@login_required
def dashboard():
credentials = getCredentials()
return render_template('dashboard.html', template_folder=tmpl_dir, credentials=credentials)
@app.errorhandler(401)
def unauthorized(error):
return render_template('error.html', template_folder=tmpl_dir, error=401, error_msg="Unauthorized",
return_home="You must be a CoRE member to access this page!"
)
@app.errorhandler(500)
def internal_server(e):
return render_template('error.html', template_folder=tmpl_dir, error=500, error_msg="Internal Server Error",
return_home="The gears must have gotten stuck. Let us know if it happens again!"
)
@app.errorhandler(404)
def page_not_found(e):
return render_template('error.html', template_folder=tmpl_dir, error=404, error_msg="Page Not Found",
return_home="We can't find what you're looking for."
)
if __name__ == "__main__":
port = int(os.environ.get("PORT", 5000))
app.run(host='0.0.0.0', port=port)
| bsd-3-clause |
jdsimcoe/briansimcoe | workspace/grunt/node_modules/grunt-growl/node_modules/grunt/node_modules/gzip-js/node_modules/deflate-js/test/deflate.py | 177 | 2329 | import os
from colorama import Fore
from helpers import deflate, inflate, run_cmd
outDirDefault = 'test-outs'
testDirDefault = 'test-files'
"""
Run a single test
@param tFile- required; the full path to the file to run
@param level- optional (default: all); the compression level [1-9]
@param delete- optional (default: True); whether to delete the gzipped files
@return True if all tests passed; False if at least one test failed
"""
def runTest(tFile, level=None, delete=True, outDir=outDirDefault):
passed = True
if level == None:
for x in range(1, 10):
if runTest(tFile, x, delete) == False:
passed = False
return passed
# make the test-outs directory
try:
os.mkdir(outDir)
except:
pass
out1 = os.path.join(outDir, '%(file)s.%(level)d.deflate' % {'file': os.path.basename(tFile), 'level' : level})
out2 = os.path.join(outDir, '%(file)s.%(level)d.out.deflate' % {'file': os.path.basename(tFile), 'level' : level})
outData = deflate(tFile, outfile=out1, level=level)
run_cmd('../bin/deflate.js --level %(level)d --file %(file)s --output %(output)s' % {'level' : level, 'file' : tFile, 'output' : out2})
result = run_cmd('diff %(file1)s %(file2)s' % {'file1' : out1, 'file2' : out2})
if result['returncode'] == 0:
status = Fore.GREEN + 'PASSED' + Fore.RESET
else:
passed = False
status = Fore.RED + 'FAILED' + Fore.RESET
print 'Level %(level)d: %(status)s' % {'level' : level, 'status' : status}
if delete == True:
os.remove(out1)
os.remove(out2)
return passed
"""
Runs all tests on the given level. This iterates throuth the testDir directory defined above.
@param level- The level to run on [1-9] (default: None, runs on all levels all)
@param delete- Whether to delete output files after the test is run
@return True if all levels passed, False if at least one failed
"""
def runAll(level=None, delete=True, testDir=testDirDefault, outDir=outDirDefault):
# make the test-outs directory
try:
os.mkdir(outDir)
except:
pass
passed = True
for tFile in os.listdir(testDir):
fullPath = os.path.join(testDir, tFile)
print Fore.YELLOW + tFile + Fore.RESET
if runTest(fullPath, level, delete) == False:
passed = False
print ''
# if we deletede all the files that were created, delete the directory
if delete == True:
os.rmdir(outDir)
return passed
| mit |
resmo/ansible | test/units/modules/network/fortios/test_fortios_system_fm.py | 21 | 6794 | # Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
from mock import ANY
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
try:
from ansible.modules.network.fortios import fortios_system_fm
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible.modules.network.fortios.fortios_system_fm.Connection')
return connection_class_mock
fos_instance = FortiOSHandler(connection_mock)
def test_system_fm_creation(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'system_fm': {
'auto_backup': 'enable',
'id': '4',
'ip': 'test_value_5',
'ipsec': 'enable',
'scheduled_config_restore': 'enable',
'status': 'enable',
'vdom': 'test_value_9'
},
'vdom': 'root'}
is_error, changed, response = fortios_system_fm.fortios_system(input_data, fos_instance)
expected_data = {
'auto-backup': 'enable',
'id': '4',
'ip': 'test_value_5',
'ipsec': 'enable',
'scheduled-config-restore': 'enable',
'status': 'enable',
'vdom': 'test_value_9'
}
set_method_mock.assert_called_with('system', 'fm', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_system_fm_creation_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'system_fm': {
'auto_backup': 'enable',
'id': '4',
'ip': 'test_value_5',
'ipsec': 'enable',
'scheduled_config_restore': 'enable',
'status': 'enable',
'vdom': 'test_value_9'
},
'vdom': 'root'}
is_error, changed, response = fortios_system_fm.fortios_system(input_data, fos_instance)
expected_data = {
'auto-backup': 'enable',
'id': '4',
'ip': 'test_value_5',
'ipsec': 'enable',
'scheduled-config-restore': 'enable',
'status': 'enable',
'vdom': 'test_value_9'
}
set_method_mock.assert_called_with('system', 'fm', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_system_fm_idempotent(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'system_fm': {
'auto_backup': 'enable',
'id': '4',
'ip': 'test_value_5',
'ipsec': 'enable',
'scheduled_config_restore': 'enable',
'status': 'enable',
'vdom': 'test_value_9'
},
'vdom': 'root'}
is_error, changed, response = fortios_system_fm.fortios_system(input_data, fos_instance)
expected_data = {
'auto-backup': 'enable',
'id': '4',
'ip': 'test_value_5',
'ipsec': 'enable',
'scheduled-config-restore': 'enable',
'status': 'enable',
'vdom': 'test_value_9'
}
set_method_mock.assert_called_with('system', 'fm', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 404
def test_system_fm_filter_foreign_attributes(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'system_fm': {
'random_attribute_not_valid': 'tag',
'auto_backup': 'enable',
'id': '4',
'ip': 'test_value_5',
'ipsec': 'enable',
'scheduled_config_restore': 'enable',
'status': 'enable',
'vdom': 'test_value_9'
},
'vdom': 'root'}
is_error, changed, response = fortios_system_fm.fortios_system(input_data, fos_instance)
expected_data = {
'auto-backup': 'enable',
'id': '4',
'ip': 'test_value_5',
'ipsec': 'enable',
'scheduled-config-restore': 'enable',
'status': 'enable',
'vdom': 'test_value_9'
}
set_method_mock.assert_called_with('system', 'fm', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
| gpl-3.0 |
state-hiu/cybergis-scripts | bin/cybergis-script-geoshape-configure.py | 1 | 2429 | #!/usr/bin/python
from base64 import b64encode
from optparse import make_option
import json
import urllib
import urllib2
import argparse
import time
import os
import sys
import subprocess
#==#
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'lib', 'cybergis')))
import cybergis_geoshape._geoshape_configure
#==#
parser = argparse.ArgumentParser(description='Initialize GeoGig repository and optionally add to GeoServer instance. If you want to add the GeoGig repo include the optional parameters.')
#==#
parser.add_argument('--verbose', '-v', default=0, action='count', help="Print out intermediate status messages.")
#==#
parser.add_argument("--env", default="standalone", help="The environment (standalone, application, or aws).")
parser.add_argument("--repo_url", default="https://github.com/state-hiu/rogue-chef-repo.git", help="The source GeoSHAPE Chef repo.")
parser.add_argument("--repo_branch", default="hiu_baseline", help="The branch of the source GeoSHAPE Chef repo.")
#==#
parser.add_argument("--fqdn", default="localhost", help="The fqdn of GeoSHAPE.")
#==#
parser.add_argument("--gn_project_url", default="https://github.com/state-hiu/rogue_geonode.git", help="The downstream GeoNode repo.")
parser.add_argument("--gn_project_branch", default="master", help="The branch of the downstream GeoNode repo.")
#==#
parser.add_argument("--gs_data_url", default="https://github.com/state-hiu/geoserver_data.git", help="The baseline geoserver_data repo.")
parser.add_argument("--gs_data_branch", default="master", help="The baseline geoserver_data branch.")
#==#
parser.add_argument('--banner', default=0, action='count', help="Display a banner")
parser.add_argument("--banner_text", default="", help="The banner text.")
parser.add_argument("--banner_color_text", default="", help="The foreground/font color of the banner text.")
parser.add_argument("--banner_color_background", default="", help="The background color of the banner.")
#==#
#When env equals application or aws
parser.add_argument("--db_host", help="The database host")
parser.add_argument("--db_ip", help="The database ip")
parser.add_argument("--db_port", default="5432", help="The database port")
parser.add_argument("--db_user", default="postgres", help="The database user")
parser.add_argument("--db_pass", help="The database password")
#==#
args = parser.parse_args()
#==#
cybergis_geoshape._geoshape_configure.run(args)
| mit |
carnell69/kuma | vendor/packages/logilab/common/ureports/docbook_writer.py | 93 | 5706 | # copyright 2003-2011 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of logilab-common.
#
# logilab-common is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 2.1 of the License, or (at your option) any
# later version.
#
# logilab-common is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
"""HTML formatting drivers for ureports"""
__docformat__ = "restructuredtext en"
from six.moves import range
from logilab.common.ureports import HTMLWriter
class DocbookWriter(HTMLWriter):
"""format layouts as HTML"""
def begin_format(self, layout):
"""begin to format a layout"""
super(HTMLWriter, self).begin_format(layout)
if self.snippet is None:
self.writeln('<?xml version="1.0" encoding="ISO-8859-1"?>')
self.writeln("""
<book xmlns:xi='http://www.w3.org/2001/XInclude'
lang='fr'>
""")
def end_format(self, layout):
"""finished to format a layout"""
if self.snippet is None:
self.writeln('</book>')
def visit_section(self, layout):
"""display a section (using <chapter> (level 0) or <section>)"""
if self.section == 0:
tag = "chapter"
else:
tag = "section"
self.section += 1
self.writeln(self._indent('<%s%s>' % (tag, self.handle_attrs(layout))))
self.format_children(layout)
self.writeln(self._indent('</%s>'% tag))
self.section -= 1
def visit_title(self, layout):
"""display a title using <title>"""
self.write(self._indent(' <title%s>' % self.handle_attrs(layout)))
self.format_children(layout)
self.writeln('</title>')
def visit_table(self, layout):
"""display a table as html"""
self.writeln(self._indent(' <table%s><title>%s</title>' \
% (self.handle_attrs(layout), layout.title)))
self.writeln(self._indent(' <tgroup cols="%s">'% layout.cols))
for i in range(layout.cols):
self.writeln(self._indent(' <colspec colname="c%s" colwidth="1*"/>' % i))
table_content = self.get_table_content(layout)
# write headers
if layout.cheaders:
self.writeln(self._indent(' <thead>'))
self._write_row(table_content[0])
self.writeln(self._indent(' </thead>'))
table_content = table_content[1:]
elif layout.rcheaders:
self.writeln(self._indent(' <thead>'))
self._write_row(table_content[-1])
self.writeln(self._indent(' </thead>'))
table_content = table_content[:-1]
# write body
self.writeln(self._indent(' <tbody>'))
for i in range(len(table_content)):
row = table_content[i]
self.writeln(self._indent(' <row>'))
for j in range(len(row)):
cell = row[j] or ' '
self.writeln(self._indent(' <entry>%s</entry>' % cell))
self.writeln(self._indent(' </row>'))
self.writeln(self._indent(' </tbody>'))
self.writeln(self._indent(' </tgroup>'))
self.writeln(self._indent(' </table>'))
def _write_row(self, row):
"""write content of row (using <row> <entry>)"""
self.writeln(' <row>')
for j in range(len(row)):
cell = row[j] or ' '
self.writeln(' <entry>%s</entry>' % cell)
self.writeln(self._indent(' </row>'))
def visit_list(self, layout):
"""display a list (using <itemizedlist>)"""
self.writeln(self._indent(' <itemizedlist%s>' % self.handle_attrs(layout)))
for row in list(self.compute_content(layout)):
self.writeln(' <listitem><para>%s</para></listitem>' % row)
self.writeln(self._indent(' </itemizedlist>'))
def visit_paragraph(self, layout):
"""display links (using <para>)"""
self.write(self._indent(' <para>'))
self.format_children(layout)
self.writeln('</para>')
def visit_span(self, layout):
"""display links (using <p>)"""
#TODO: translate in docbook
self.write('<literal %s>' % self.handle_attrs(layout))
self.format_children(layout)
self.write('</literal>')
def visit_link(self, layout):
"""display links (using <ulink>)"""
self.write('<ulink url="%s"%s>%s</ulink>' % (layout.url,
self.handle_attrs(layout),
layout.label))
def visit_verbatimtext(self, layout):
"""display verbatim text (using <programlisting>)"""
self.writeln(self._indent(' <programlisting>'))
self.write(layout.data.replace('&', '&').replace('<', '<'))
self.writeln(self._indent(' </programlisting>'))
def visit_text(self, layout):
"""add some text"""
self.write(layout.data.replace('&', '&').replace('<', '<'))
def _indent(self, string):
"""correctly indent string according to section"""
return ' ' * 2*(self.section) + string
| mpl-2.0 |
retomerz/intellij-community | python/helpers/pydev/third_party/pep8/lib2to3/lib2to3/fixes/fix_intern.py | 315 | 1405 | # Copyright 2006 Georg Brandl.
# Licensed to PSF under a Contributor Agreement.
"""Fixer for intern().
intern(s) -> sys.intern(s)"""
# Local imports
from .. import pytree
from .. import fixer_base
from ..fixer_util import Name, Attr, touch_import
class FixIntern(fixer_base.BaseFix):
BM_compatible = True
order = "pre"
PATTERN = """
power< 'intern'
trailer< lpar='('
( not(arglist | argument<any '=' any>) obj=any
| obj=arglist<(not argument<any '=' any>) any ','> )
rpar=')' >
after=any*
>
"""
def transform(self, node, results):
syms = self.syms
obj = results["obj"].clone()
if obj.type == syms.arglist:
newarglist = obj.clone()
else:
newarglist = pytree.Node(syms.arglist, [obj.clone()])
after = results["after"]
if after:
after = [n.clone() for n in after]
new = pytree.Node(syms.power,
Attr(Name(u"sys"), Name(u"intern")) +
[pytree.Node(syms.trailer,
[results["lpar"].clone(),
newarglist,
results["rpar"].clone()])] + after)
new.prefix = node.prefix
touch_import(None, u'sys', node)
return new
| apache-2.0 |
krishnazure/ansible | v1/ansible/runner/action_plugins/assemble.py | 109 | 6150 | # (c) 2013-2014, Michael DeHaan <michael.dehaan@gmail.com>
# Stephen Fromm <sfromm@gmail.com>
# Brian Coca <briancoca+dev@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
import os
import os.path
import pipes
import shutil
import tempfile
import base64
import re
from ansible import utils
from ansible.runner.return_data import ReturnData
class ActionModule(object):
TRANSFERS_FILES = True
def __init__(self, runner):
self.runner = runner
def _assemble_from_fragments(self, src_path, delimiter=None, compiled_regexp=None):
''' assemble a file from a directory of fragments '''
tmpfd, temp_path = tempfile.mkstemp()
tmp = os.fdopen(tmpfd,'w')
delimit_me = False
add_newline = False
for f in sorted(os.listdir(src_path)):
if compiled_regexp and not compiled_regexp.search(f):
continue
fragment = "%s/%s" % (src_path, f)
if not os.path.isfile(fragment):
continue
fragment_content = file(fragment).read()
# always put a newline between fragments if the previous fragment didn't end with a newline.
if add_newline:
tmp.write('\n')
# delimiters should only appear between fragments
if delimit_me:
if delimiter:
# un-escape anything like newlines
delimiter = delimiter.decode('unicode-escape')
tmp.write(delimiter)
# always make sure there's a newline after the
# delimiter, so lines don't run together
if delimiter[-1] != '\n':
tmp.write('\n')
tmp.write(fragment_content)
delimit_me = True
if fragment_content.endswith('\n'):
add_newline = False
else:
add_newline = True
tmp.close()
return temp_path
def run(self, conn, tmp, module_name, module_args, inject, complex_args=None, **kwargs):
# load up options
options = {}
if complex_args:
options.update(complex_args)
options.update(utils.parse_kv(module_args))
src = options.get('src', None)
dest = options.get('dest', None)
delimiter = options.get('delimiter', None)
remote_src = utils.boolean(options.get('remote_src', 'yes'))
regexp = options.get('regexp', None)
if src is None or dest is None:
result = dict(failed=True, msg="src and dest are required")
return ReturnData(conn=conn, comm_ok=False, result=result)
if remote_src:
return self.runner._execute_module(conn, tmp, 'assemble', module_args, inject=inject, complex_args=complex_args)
elif '_original_file' in inject:
src = utils.path_dwim_relative(inject['_original_file'], 'files', src, self.runner.basedir)
else:
# the source is local, so expand it here
src = os.path.expanduser(src)
_re = None
if regexp is not None:
_re = re.compile(regexp)
# Does all work assembling the file
path = self._assemble_from_fragments(src, delimiter, _re)
path_checksum = utils.checksum_s(path)
dest = self.runner._remote_expand_user(conn, dest, tmp)
remote_checksum = self.runner._remote_checksum(conn, tmp, dest, inject)
if path_checksum != remote_checksum:
resultant = file(path).read()
if self.runner.diff:
dest_result = self.runner._execute_module(conn, tmp, 'slurp', "path=%s" % dest, inject=inject, persist_files=True)
if 'content' in dest_result.result:
dest_contents = dest_result.result['content']
if dest_result.result['encoding'] == 'base64':
dest_contents = base64.b64decode(dest_contents)
else:
raise Exception("unknown encoding, failed: %s" % dest_result.result)
xfered = self.runner._transfer_str(conn, tmp, 'src', resultant)
# fix file permissions when the copy is done as a different user
if self.runner.become and self.runner.become_user != 'root':
self.runner._remote_chmod(conn, 'a+r', xfered, tmp)
# run the copy module
new_module_args = dict(
src=xfered,
dest=dest,
original_basename=os.path.basename(src),
)
module_args_tmp = utils.merge_module_args(module_args, new_module_args)
if self.runner.noop_on_check(inject):
return ReturnData(conn=conn, comm_ok=True, result=dict(changed=True), diff=dict(before_header=dest, after_header=src, after=resultant))
else:
res = self.runner._execute_module(conn, tmp, 'copy', module_args_tmp, inject=inject)
res.diff = dict(after=resultant)
return res
else:
new_module_args = dict(
src=xfered,
dest=dest,
original_basename=os.path.basename(src),
)
# make sure checkmod is passed on correctly
if self.runner.noop_on_check(inject):
new_module_args['CHECKMODE'] = True
module_args_tmp = utils.merge_module_args(module_args, new_module_args)
return self.runner._execute_module(conn, tmp, 'file', module_args_tmp, inject=inject)
| gpl-3.0 |
ahernp/DMCM | mpages/management/commands/delete_page_reads.py | 1 | 1243 | from datetime import timedelta
from django.core.management.base import BaseCommand
from django.utils import timezone
from django.conf import settings
from ...models import PageRead
import logging
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = "Deletes older PageRead rows."
def add_arguments(self, parser):
parser.add_argument(
"keep_delta", nargs="?", default=settings.KEEP_PAGEREAD_FOR_DAYS, type=int
)
parser.add_argument(
"--verbose",
action="store_true",
dest="verbose",
default=False,
help="Print progress on command line",
)
def handle(self, *args, **options):
verbose = options["verbose"]
keep_delta = options["keep_delta"]
delete_before = timezone.now() - timedelta(days=keep_delta)
pageread_count = PageRead.objects.filter(created__lt=delete_before).count()
if verbose:
print(
f"{pageread_count} PageRead entries to delete (older than {delete_before} days)"
)
PageRead.objects.filter(created__lt=delete_before).delete()
logger.info("%s older PageRead rows deleted", pageread_count)
| bsd-3-clause |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.