repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
sveetch/boussole | boussole/cli/console_script.py | 1 | 1784 | """
Main entrance to commandline actions
"""
import click
from ..logs import init_logger
from .compile import compile_command
from .startproject import startproject_command
from .version import version_command
from .watch import watch_command
# Help alias on "-h" argument
CONTEXT_SETTINGS = dict(help_option_names=["-h", "--help"])
# Default logger conf
BOUSSOLE_LOGGER_CONF = ("DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL", None)
@click.group(context_settings=CONTEXT_SETTINGS)
@click.option(
"-v",
"--verbose",
type=click.IntRange(min=0, max=5),
default=4,
metavar="INTEGER",
help=(
"An integer between 0 and 5, where '0' make a totaly "
"silent output and '5' set level to DEBUG (the most verbose "
"level). Default to '4' (Info level)."
)
)
@click.pass_context
def cli_frontend(ctx, verbose):
"""
Boussole is a commandline interface to build Sass projects using libsass.
Every project will need a settings file containing all needed settings to
build it.
"""
printout = True
if verbose == 0:
verbose = 1
printout = False
# Verbosity is the inverse of logging levels
levels = [item for item in BOUSSOLE_LOGGER_CONF]
levels.reverse()
# Init the logger config
root_logger = init_logger(levels[verbose], printout=printout)
# Init the default context that will be passed to commands
ctx.obj = {
"verbosity": verbose,
"logger": root_logger,
}
# Attach commands methods to the main grouper
cli_frontend.add_command(version_command, name="version")
cli_frontend.add_command(compile_command, name="compile")
cli_frontend.add_command(watch_command, name="watch")
cli_frontend.add_command(startproject_command, name="startproject")
| mit |
nimzco/Environment | Sublime/Packages/markupsafe/all/markupsafe/_constants.py | 1535 | 4795 | # -*- coding: utf-8 -*-
"""
markupsafe._constants
~~~~~~~~~~~~~~~~~~~~~
Highlevel implementation of the Markup string.
:copyright: (c) 2010 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
HTML_ENTITIES = {
'AElig': 198,
'Aacute': 193,
'Acirc': 194,
'Agrave': 192,
'Alpha': 913,
'Aring': 197,
'Atilde': 195,
'Auml': 196,
'Beta': 914,
'Ccedil': 199,
'Chi': 935,
'Dagger': 8225,
'Delta': 916,
'ETH': 208,
'Eacute': 201,
'Ecirc': 202,
'Egrave': 200,
'Epsilon': 917,
'Eta': 919,
'Euml': 203,
'Gamma': 915,
'Iacute': 205,
'Icirc': 206,
'Igrave': 204,
'Iota': 921,
'Iuml': 207,
'Kappa': 922,
'Lambda': 923,
'Mu': 924,
'Ntilde': 209,
'Nu': 925,
'OElig': 338,
'Oacute': 211,
'Ocirc': 212,
'Ograve': 210,
'Omega': 937,
'Omicron': 927,
'Oslash': 216,
'Otilde': 213,
'Ouml': 214,
'Phi': 934,
'Pi': 928,
'Prime': 8243,
'Psi': 936,
'Rho': 929,
'Scaron': 352,
'Sigma': 931,
'THORN': 222,
'Tau': 932,
'Theta': 920,
'Uacute': 218,
'Ucirc': 219,
'Ugrave': 217,
'Upsilon': 933,
'Uuml': 220,
'Xi': 926,
'Yacute': 221,
'Yuml': 376,
'Zeta': 918,
'aacute': 225,
'acirc': 226,
'acute': 180,
'aelig': 230,
'agrave': 224,
'alefsym': 8501,
'alpha': 945,
'amp': 38,
'and': 8743,
'ang': 8736,
'apos': 39,
'aring': 229,
'asymp': 8776,
'atilde': 227,
'auml': 228,
'bdquo': 8222,
'beta': 946,
'brvbar': 166,
'bull': 8226,
'cap': 8745,
'ccedil': 231,
'cedil': 184,
'cent': 162,
'chi': 967,
'circ': 710,
'clubs': 9827,
'cong': 8773,
'copy': 169,
'crarr': 8629,
'cup': 8746,
'curren': 164,
'dArr': 8659,
'dagger': 8224,
'darr': 8595,
'deg': 176,
'delta': 948,
'diams': 9830,
'divide': 247,
'eacute': 233,
'ecirc': 234,
'egrave': 232,
'empty': 8709,
'emsp': 8195,
'ensp': 8194,
'epsilon': 949,
'equiv': 8801,
'eta': 951,
'eth': 240,
'euml': 235,
'euro': 8364,
'exist': 8707,
'fnof': 402,
'forall': 8704,
'frac12': 189,
'frac14': 188,
'frac34': 190,
'frasl': 8260,
'gamma': 947,
'ge': 8805,
'gt': 62,
'hArr': 8660,
'harr': 8596,
'hearts': 9829,
'hellip': 8230,
'iacute': 237,
'icirc': 238,
'iexcl': 161,
'igrave': 236,
'image': 8465,
'infin': 8734,
'int': 8747,
'iota': 953,
'iquest': 191,
'isin': 8712,
'iuml': 239,
'kappa': 954,
'lArr': 8656,
'lambda': 955,
'lang': 9001,
'laquo': 171,
'larr': 8592,
'lceil': 8968,
'ldquo': 8220,
'le': 8804,
'lfloor': 8970,
'lowast': 8727,
'loz': 9674,
'lrm': 8206,
'lsaquo': 8249,
'lsquo': 8216,
'lt': 60,
'macr': 175,
'mdash': 8212,
'micro': 181,
'middot': 183,
'minus': 8722,
'mu': 956,
'nabla': 8711,
'nbsp': 160,
'ndash': 8211,
'ne': 8800,
'ni': 8715,
'not': 172,
'notin': 8713,
'nsub': 8836,
'ntilde': 241,
'nu': 957,
'oacute': 243,
'ocirc': 244,
'oelig': 339,
'ograve': 242,
'oline': 8254,
'omega': 969,
'omicron': 959,
'oplus': 8853,
'or': 8744,
'ordf': 170,
'ordm': 186,
'oslash': 248,
'otilde': 245,
'otimes': 8855,
'ouml': 246,
'para': 182,
'part': 8706,
'permil': 8240,
'perp': 8869,
'phi': 966,
'pi': 960,
'piv': 982,
'plusmn': 177,
'pound': 163,
'prime': 8242,
'prod': 8719,
'prop': 8733,
'psi': 968,
'quot': 34,
'rArr': 8658,
'radic': 8730,
'rang': 9002,
'raquo': 187,
'rarr': 8594,
'rceil': 8969,
'rdquo': 8221,
'real': 8476,
'reg': 174,
'rfloor': 8971,
'rho': 961,
'rlm': 8207,
'rsaquo': 8250,
'rsquo': 8217,
'sbquo': 8218,
'scaron': 353,
'sdot': 8901,
'sect': 167,
'shy': 173,
'sigma': 963,
'sigmaf': 962,
'sim': 8764,
'spades': 9824,
'sub': 8834,
'sube': 8838,
'sum': 8721,
'sup': 8835,
'sup1': 185,
'sup2': 178,
'sup3': 179,
'supe': 8839,
'szlig': 223,
'tau': 964,
'there4': 8756,
'theta': 952,
'thetasym': 977,
'thinsp': 8201,
'thorn': 254,
'tilde': 732,
'times': 215,
'trade': 8482,
'uArr': 8657,
'uacute': 250,
'uarr': 8593,
'ucirc': 251,
'ugrave': 249,
'uml': 168,
'upsih': 978,
'upsilon': 965,
'uuml': 252,
'weierp': 8472,
'xi': 958,
'yacute': 253,
'yen': 165,
'yuml': 255,
'zeta': 950,
'zwj': 8205,
'zwnj': 8204
}
| mit |
w1ll1am23/home-assistant | homeassistant/components/somfy/sensor.py | 4 | 1731 | """Support for Somfy Thermostat Battery."""
from pymfy.api.devices.category import Category
from pymfy.api.devices.thermostat import Thermostat
from homeassistant.components.sensor import SensorEntity
from homeassistant.const import DEVICE_CLASS_BATTERY, PERCENTAGE
from . import SomfyEntity
from .const import API, COORDINATOR, DOMAIN
SUPPORTED_CATEGORIES = {Category.HVAC.value}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Somfy sensor platform."""
domain_data = hass.data[DOMAIN]
coordinator = domain_data[COORDINATOR]
api = domain_data[API]
sensors = [
SomfyThermostatBatterySensor(coordinator, device_id, api)
for device_id, device in coordinator.data.items()
if SUPPORTED_CATEGORIES & set(device.categories)
]
async_add_entities(sensors)
class SomfyThermostatBatterySensor(SomfyEntity, SensorEntity):
"""Representation of a Somfy thermostat battery."""
def __init__(self, coordinator, device_id, api):
"""Initialize the Somfy device."""
super().__init__(coordinator, device_id, api)
self._climate = None
self._create_device()
def _create_device(self):
"""Update the device with the latest data."""
self._climate = Thermostat(self.device, self.api)
@property
def state(self) -> int:
"""Return the state of the sensor."""
return self._climate.get_battery()
@property
def device_class(self) -> str:
"""Return the device class of the sensor."""
return DEVICE_CLASS_BATTERY
@property
def unit_of_measurement(self) -> str:
"""Return the unit of measurement of the sensor."""
return PERCENTAGE
| apache-2.0 |
akshmakov/Dolfin-Fijee-Fork | demo/undocumented/refinement/python/demo_refinement.py | 2 | 1508 | "This demo illustrates mesh refinement."
# Copyright (C) 2007-2009 Anders Logg
#
# This file is part of DOLFIN.
#
# DOLFIN is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DOLFIN is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with DOLFIN. If not, see <http://www.gnu.org/licenses/>.
#
# First added: 2007-06-01
# Last changed: 2012-11-12
from dolfin import *
# Create mesh of unit square
mesh = UnitSquareMesh(8, 8)
plot(mesh)
info(mesh)
print
# Uniform refinement
mesh = refine(mesh)
plot(mesh)
info(mesh)
print
# Uniform refinement
mesh = refine(mesh)
plot(mesh)
info(mesh)
print
# Refine mesh close to x = (0.5, 0.5)
p = Point(0.5, 0.5)
for i in range(5):
print "marking for refinement"
# Mark cells for refinement
cell_markers = CellFunction("bool", mesh)
for c in cells(mesh):
if c.midpoint().distance(p) < 0.1:
cell_markers[c] = True
else:
cell_markers[c] = False
# Refine mesh
mesh = refine(mesh, cell_markers)
# Plot mesh
plot(mesh)
interactive()
| gpl-3.0 |
EmmanuelJohnson/ssquiz | flask/lib/python2.7/site-packages/whoosh/lang/snowball/hungarian.py | 96 | 11694 | from whoosh.compat import u
class HungarianStemmer(object):
"""
The Hungarian Snowball stemmer.
:cvar __vowels: The Hungarian vowels.
:type __vowels: unicode
:cvar __digraphs: The Hungarian digraphs.
:type __digraphs: tuple
:cvar __double_consonants: The Hungarian double consonants.
:type __double_consonants: tuple
:cvar __step1_suffixes: Suffixes to be deleted in step 1 of the algorithm.
:type __step1_suffixes: tuple
:cvar __step2_suffixes: Suffixes to be deleted in step 2 of the algorithm.
:type __step2_suffixes: tuple
:cvar __step3_suffixes: Suffixes to be deleted in step 3 of the algorithm.
:type __step3_suffixes: tuple
:cvar __step4_suffixes: Suffixes to be deleted in step 4 of the algorithm.
:type __step4_suffixes: tuple
:cvar __step5_suffixes: Suffixes to be deleted in step 5 of the algorithm.
:type __step5_suffixes: tuple
:cvar __step6_suffixes: Suffixes to be deleted in step 6 of the algorithm.
:type __step6_suffixes: tuple
:cvar __step7_suffixes: Suffixes to be deleted in step 7 of the algorithm.
:type __step7_suffixes: tuple
:cvar __step8_suffixes: Suffixes to be deleted in step 8 of the algorithm.
:type __step8_suffixes: tuple
:cvar __step9_suffixes: Suffixes to be deleted in step 9 of the algorithm.
:type __step9_suffixes: tuple
:note: A detailed description of the Hungarian
stemming algorithm can be found under
http://snowball.tartarus.org/algorithms/hungarian/stemmer.html
"""
__vowels = u("aeiou\xF6\xFC\xE1\xE9\xED\xF3\xF5\xFA\xFB")
__digraphs = ("cs", "dz", "dzs", "gy", "ly", "ny", "ty", "zs")
__double_consonants = ("bb", "cc", "ccs", "dd", "ff", "gg",
"ggy", "jj", "kk", "ll", "lly", "mm",
"nn", "nny", "pp", "rr", "ss", "ssz",
"tt", "tty", "vv", "zz", "zzs")
__step1_suffixes = ("al", "el")
__step2_suffixes = (u('k\xE9ppen'), u('onk\xE9nt'), u('enk\xE9nt'),
u('ank\xE9nt'), u('k\xE9pp'), u('k\xE9nt'), 'ban',
'ben', 'nak', 'nek', 'val', 'vel', u('t\xF3l'),
u('t\xF5l'), u('r\xF3l'), u('r\xF5l'), u('b\xF3l'),
u('b\xF5l'), 'hoz', 'hez', u('h\xF6z'),
u('n\xE1l'), u('n\xE9l'), u('\xE9rt'), 'kor',
'ba', 'be', 'ra', 're', 'ig', 'at', 'et',
'ot', u('\xF6t'), 'ul', u('\xFCl'), u('v\xE1'),
u('v\xE9'), 'en', 'on', 'an', u('\xF6n'),
'n', 't')
__step3_suffixes = (u("\xE1nk\xE9nt"), u("\xE1n"), u("\xE9n"))
__step4_suffixes = ('astul', u('est\xFCl'), u('\xE1stul'),
u('\xE9st\xFCl'), 'stul', u('st\xFCl'))
__step5_suffixes = (u("\xE1"), u("\xE9"))
__step6_suffixes = (u('ok\xE9'), u('\xF6k\xE9'), u('ak\xE9'),
u('ek\xE9'), u('\xE1k\xE9'), u('\xE1\xE9i'),
u('\xE9k\xE9'), u('\xE9\xE9i'), u('k\xE9'),
u('\xE9i'), u('\xE9\xE9'), u('\xE9'))
__step7_suffixes = (u('\xE1juk'), u('\xE9j\xFCk'), u('\xFCnk'),
'unk', 'juk', u('j\xFCk'), u('\xE1nk'),
u('\xE9nk'), 'nk', 'uk', u('\xFCk'), 'em',
'om', 'am', 'od', 'ed', 'ad', u('\xF6d'),
'ja', 'je', u('\xE1m'), u('\xE1d'), u('\xE9m'),
u('\xE9d'), 'm', 'd', 'a', 'e', 'o',
u('\xE1'), u('\xE9'))
__step8_suffixes = ('jaitok', 'jeitek', 'jaink', 'jeink', 'aitok',
'eitek', u('\xE1itok'), u('\xE9itek'), 'jaim',
'jeim', 'jaid', 'jeid', 'eink', 'aink',
'itek', 'jeik', 'jaik', u('\xE1ink'),
u('\xE9ink'), 'aim', 'eim', 'aid', 'eid',
'jai', 'jei', 'ink', 'aik', 'eik',
u('\xE1im'), u('\xE1id'), u('\xE1ik'), u('\xE9im'),
u('\xE9id'), u('\xE9ik'), 'im', 'id', 'ai',
'ei', 'ik', u('\xE1i'), u('\xE9i'), 'i')
__step9_suffixes = (u("\xE1k"), u("\xE9k"), u("\xF6k"), "ok",
"ek", "ak", "k")
def stem(self, word):
"""
Stem an Hungarian word and return the stemmed form.
:param word: The word that is stemmed.
:type word: str or unicode
:return: The stemmed form.
:rtype: unicode
"""
word = word.lower()
r1 = self.__r1_hungarian(word, self.__vowels, self.__digraphs)
# STEP 1: Remove instrumental case
if r1.endswith(self.__step1_suffixes):
for double_cons in self.__double_consonants:
if word[-2 - len(double_cons):-2] == double_cons:
word = "".join((word[:-4], word[-3]))
if r1[-2 - len(double_cons):-2] == double_cons:
r1 = "".join((r1[:-4], r1[-3]))
break
# STEP 2: Remove frequent cases
for suffix in self.__step2_suffixes:
if word.endswith(suffix):
if r1.endswith(suffix):
word = word[:-len(suffix)]
r1 = r1[:-len(suffix)]
if r1.endswith(u("\xE1")):
word = "".join((word[:-1], "a"))
r1 = "".join((r1[:-1], "a"))
elif r1.endswith(u("\xE9")):
word = "".join((word[:-1], "e"))
r1 = "".join((r1[:-1], "e"))
break
# STEP 3: Remove special cases
for suffix in self.__step3_suffixes:
if r1.endswith(suffix):
if suffix == u("\xE9n"):
word = "".join((word[:-2], "e"))
r1 = "".join((r1[:-2], "e"))
else:
word = "".join((word[:-len(suffix)], "a"))
r1 = "".join((r1[:-len(suffix)], "a"))
break
# STEP 4: Remove other cases
for suffix in self.__step4_suffixes:
if r1.endswith(suffix):
if suffix == u("\xE1stul"):
word = "".join((word[:-5], "a"))
r1 = "".join((r1[:-5], "a"))
elif suffix == u("\xE9st\xFCl"):
word = "".join((word[:-5], "e"))
r1 = "".join((r1[:-5], "e"))
else:
word = word[:-len(suffix)]
r1 = r1[:-len(suffix)]
break
# STEP 5: Remove factive case
for suffix in self.__step5_suffixes:
if r1.endswith(suffix):
for double_cons in self.__double_consonants:
if word[-1 - len(double_cons):-1] == double_cons:
word = "".join((word[:-3], word[-2]))
if r1[-1 - len(double_cons):-1] == double_cons:
r1 = "".join((r1[:-3], r1[-2]))
break
# STEP 6: Remove owned
for suffix in self.__step6_suffixes:
if r1.endswith(suffix):
if suffix in (u("\xE1k\xE9"), u("\xE1\xE9i")):
word = "".join((word[:-3], "a"))
r1 = "".join((r1[:-3], "a"))
elif suffix in (u("\xE9k\xE9"), u("\xE9\xE9i"),
u("\xE9\xE9")):
word = "".join((word[:-len(suffix)], "e"))
r1 = "".join((r1[:-len(suffix)], "e"))
else:
word = word[:-len(suffix)]
r1 = r1[:-len(suffix)]
break
# STEP 7: Remove singular owner suffixes
for suffix in self.__step7_suffixes:
if word.endswith(suffix):
if r1.endswith(suffix):
if suffix in (u("\xE1nk"), u("\xE1juk"), u("\xE1m"),
u("\xE1d"), u("\xE1")):
word = "".join((word[:-len(suffix)], "a"))
r1 = "".join((r1[:-len(suffix)], "a"))
elif suffix in (u("\xE9nk"), u("\xE9j\xFCk"),
u("\xE9m"), u("\xE9d"), u("\xE9")):
word = "".join((word[:-len(suffix)], "e"))
r1 = "".join((r1[:-len(suffix)], "e"))
else:
word = word[:-len(suffix)]
r1 = r1[:-len(suffix)]
break
# STEP 8: Remove plural owner suffixes
for suffix in self.__step8_suffixes:
if word.endswith(suffix):
if r1.endswith(suffix):
if suffix in (u("\xE1im"), u("\xE1id"), u("\xE1i"),
u("\xE1ink"), u("\xE1itok"), u("\xE1ik")):
word = "".join((word[:-len(suffix)], "a"))
r1 = "".join((r1[:-len(suffix)], "a"))
elif suffix in (u("\xE9im"), u("\xE9id"), u("\xE9i"),
u("\xE9ink"), u("\xE9itek"), u("\xE9ik")):
word = "".join((word[:-len(suffix)], "e"))
r1 = "".join((r1[:-len(suffix)], "e"))
else:
word = word[:-len(suffix)]
r1 = r1[:-len(suffix)]
break
# STEP 9: Remove plural suffixes
for suffix in self.__step9_suffixes:
if word.endswith(suffix):
if r1.endswith(suffix):
if suffix == u("\xE1k"):
word = "".join((word[:-2], "a"))
elif suffix == u("\xE9k"):
word = "".join((word[:-2], "e"))
else:
word = word[:-len(suffix)]
break
return word
def __r1_hungarian(self, word, vowels, digraphs):
"""
Return the region R1 that is used by the Hungarian stemmer.
If the word begins with a vowel, R1 is defined as the region
after the first consonant or digraph (= two letters stand for
one phoneme) in the word. If the word begins with a consonant,
it is defined as the region after the first vowel in the word.
If the word does not contain both a vowel and consonant, R1
is the null region at the end of the word.
:param word: The Hungarian word whose region R1 is determined.
:type word: str or unicode
:param vowels: The Hungarian vowels that are used to determine
the region R1.
:type vowels: unicode
:param digraphs: The digraphs that are used to determine the
region R1.
:type digraphs: tuple
:return: the region R1 for the respective word.
:rtype: unicode
:note: This helper method is invoked by the stem method of the subclass
HungarianStemmer. It is not to be invoked directly!
"""
r1 = ""
if word[0] in vowels:
for digraph in digraphs:
if digraph in word[1:]:
r1 = word[word.index(digraph[-1]) + 1:]
return r1
for i in range(1, len(word)):
if word[i] not in vowels:
r1 = word[i + 1:]
break
else:
for i in range(1, len(word)):
if word[i] in vowels:
r1 = word[i + 1:]
break
return r1
| bsd-3-clause |
RadioFreeAsia/RDacity | lib-src/lv2/lv2/plugins/eg-amp.lv2/waflib/Tools/dmd.py | 316 | 1511 | #! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import sys
from waflib.Tools import ar,d
from waflib.Configure import conf
@conf
def find_dmd(conf):
conf.find_program(['dmd','dmd2','ldc'],var='D')
out=conf.cmd_and_log([conf.env.D,'--help'])
if out.find("D Compiler v")==-1:
out=conf.cmd_and_log([conf.env.D,'-version'])
if out.find("based on DMD v1.")==-1:
conf.fatal("detected compiler is not dmd/ldc")
@conf
def common_flags_ldc(conf):
v=conf.env
v['DFLAGS']=['-d-version=Posix']
v['LINKFLAGS']=[]
v['DFLAGS_dshlib']=['-relocation-model=pic']
@conf
def common_flags_dmd(conf):
v=conf.env
v['D_SRC_F']=['-c']
v['D_TGT_F']='-of%s'
v['D_LINKER']=v['D']
v['DLNK_SRC_F']=''
v['DLNK_TGT_F']='-of%s'
v['DINC_ST']='-I%s'
v['DSHLIB_MARKER']=v['DSTLIB_MARKER']=''
v['DSTLIB_ST']=v['DSHLIB_ST']='-L-l%s'
v['DSTLIBPATH_ST']=v['DLIBPATH_ST']='-L-L%s'
v['LINKFLAGS_dprogram']=['-quiet']
v['DFLAGS_dshlib']=['-fPIC']
v['LINKFLAGS_dshlib']=['-L-shared']
v['DHEADER_ext']='.di'
v.DFLAGS_d_with_header=['-H','-Hf']
v['D_HDR_F']='%s'
def configure(conf):
conf.find_dmd()
if sys.platform=='win32':
out=conf.cmd_and_log([conf.env.D,'--help'])
if out.find("D Compiler v2.")>-1:
conf.fatal('dmd2 on Windows is not supported, use gdc or ldc2 instead')
conf.load('ar')
conf.load('d')
conf.common_flags_dmd()
conf.d_platform_flags()
if str(conf.env.D).find('ldc')>-1:
conf.common_flags_ldc()
| gpl-2.0 |
gusai-francelabs/datafari | windows/python/Lib/test/test_exceptions.py | 12 | 26271 | # Python test set -- part 5, built-in exceptions
import os
import sys
import unittest
import pickle, cPickle
from test.test_support import (TESTFN, unlink, run_unittest, captured_output,
check_warnings, cpython_only)
from test.test_pep352 import ignore_deprecation_warnings
# XXX This is not really enough, each *operation* should be tested!
class ExceptionTests(unittest.TestCase):
def testReload(self):
# Reloading the built-in exceptions module failed prior to Py2.2, while it
# should act the same as reloading built-in sys.
try:
from imp import reload
import exceptions
reload(exceptions)
except ImportError, e:
self.fail("reloading exceptions: %s" % e)
def raise_catch(self, exc, excname):
try:
raise exc, "spam"
except exc, err:
buf1 = str(err)
try:
raise exc("spam")
except exc, err:
buf2 = str(err)
self.assertEqual(buf1, buf2)
self.assertEqual(exc.__name__, excname)
def testRaising(self):
self.raise_catch(AttributeError, "AttributeError")
self.assertRaises(AttributeError, getattr, sys, "undefined_attribute")
self.raise_catch(EOFError, "EOFError")
fp = open(TESTFN, 'w')
fp.close()
fp = open(TESTFN, 'r')
savestdin = sys.stdin
try:
try:
sys.stdin = fp
x = raw_input()
except EOFError:
pass
finally:
sys.stdin = savestdin
fp.close()
unlink(TESTFN)
self.raise_catch(IOError, "IOError")
self.assertRaises(IOError, open, 'this file does not exist', 'r')
self.raise_catch(ImportError, "ImportError")
self.assertRaises(ImportError, __import__, "undefined_module")
self.raise_catch(IndexError, "IndexError")
x = []
self.assertRaises(IndexError, x.__getitem__, 10)
self.raise_catch(KeyError, "KeyError")
x = {}
self.assertRaises(KeyError, x.__getitem__, 'key')
self.raise_catch(KeyboardInterrupt, "KeyboardInterrupt")
self.raise_catch(MemoryError, "MemoryError")
self.raise_catch(NameError, "NameError")
try: x = undefined_variable
except NameError: pass
self.raise_catch(OverflowError, "OverflowError")
x = 1
for dummy in range(128):
x += x # this simply shouldn't blow up
self.raise_catch(RuntimeError, "RuntimeError")
self.raise_catch(SyntaxError, "SyntaxError")
try: exec '/\n'
except SyntaxError: pass
self.raise_catch(IndentationError, "IndentationError")
self.raise_catch(TabError, "TabError")
# can only be tested under -tt, and is the only test for -tt
#try: compile("try:\n\t1.0/0.0\n \t1.0/0.0\nfinally:\n pass\n", '<string>', 'exec')
#except TabError: pass
#else: self.fail("TabError not raised")
self.raise_catch(SystemError, "SystemError")
self.raise_catch(SystemExit, "SystemExit")
self.assertRaises(SystemExit, sys.exit, 0)
self.raise_catch(TypeError, "TypeError")
try: [] + ()
except TypeError: pass
self.raise_catch(ValueError, "ValueError")
self.assertRaises(ValueError, chr, 10000)
self.raise_catch(ZeroDivisionError, "ZeroDivisionError")
try: x = 1 // 0
except ZeroDivisionError: pass
self.raise_catch(Exception, "Exception")
try: x = 1 // 0
except Exception, e: pass
def testSyntaxErrorMessage(self):
# make sure the right exception message is raised for each of
# these code fragments
def ckmsg(src, msg):
try:
compile(src, '<fragment>', 'exec')
except SyntaxError, e:
if e.msg != msg:
self.fail("expected %s, got %s" % (msg, e.msg))
else:
self.fail("failed to get expected SyntaxError")
s = '''while 1:
try:
pass
finally:
continue'''
if not sys.platform.startswith('java'):
ckmsg(s, "'continue' not supported inside 'finally' clause")
s = '''if 1:
try:
continue
except:
pass'''
ckmsg(s, "'continue' not properly in loop")
ckmsg("continue\n", "'continue' not properly in loop")
@cpython_only
def testSettingException(self):
# test that setting an exception at the C level works even if the
# exception object can't be constructed.
class BadException:
def __init__(self_):
raise RuntimeError, "can't instantiate BadException"
def test_capi1():
import _testcapi
try:
_testcapi.raise_exception(BadException, 1)
except TypeError, err:
exc, err, tb = sys.exc_info()
co = tb.tb_frame.f_code
self.assertEqual(co.co_name, "test_capi1")
self.assertTrue(co.co_filename.endswith('test_exceptions'+os.extsep+'py'))
else:
self.fail("Expected exception")
def test_capi2():
import _testcapi
try:
_testcapi.raise_exception(BadException, 0)
except RuntimeError, err:
exc, err, tb = sys.exc_info()
co = tb.tb_frame.f_code
self.assertEqual(co.co_name, "__init__")
self.assertTrue(co.co_filename.endswith('test_exceptions'+os.extsep+'py'))
co2 = tb.tb_frame.f_back.f_code
self.assertEqual(co2.co_name, "test_capi2")
else:
self.fail("Expected exception")
if not sys.platform.startswith('java'):
test_capi1()
test_capi2()
def test_WindowsError(self):
try:
WindowsError
except NameError:
pass
else:
self.assertEqual(str(WindowsError(1001)),
"1001")
self.assertEqual(str(WindowsError(1001, "message")),
"[Error 1001] message")
self.assertEqual(WindowsError(1001, "message").errno, 22)
self.assertEqual(WindowsError(1001, "message").winerror, 1001)
@ignore_deprecation_warnings
def testAttributes(self):
# test that exception attributes are happy
exceptionList = [
(BaseException, (), {'message' : '', 'args' : ()}),
(BaseException, (1, ), {'message' : 1, 'args' : (1,)}),
(BaseException, ('foo',),
{'message' : 'foo', 'args' : ('foo',)}),
(BaseException, ('foo', 1),
{'message' : '', 'args' : ('foo', 1)}),
(SystemExit, ('foo',),
{'message' : 'foo', 'args' : ('foo',), 'code' : 'foo'}),
(IOError, ('foo',),
{'message' : 'foo', 'args' : ('foo',), 'filename' : None,
'errno' : None, 'strerror' : None}),
(IOError, ('foo', 'bar'),
{'message' : '', 'args' : ('foo', 'bar'), 'filename' : None,
'errno' : 'foo', 'strerror' : 'bar'}),
(IOError, ('foo', 'bar', 'baz'),
{'message' : '', 'args' : ('foo', 'bar'), 'filename' : 'baz',
'errno' : 'foo', 'strerror' : 'bar'}),
(IOError, ('foo', 'bar', 'baz', 'quux'),
{'message' : '', 'args' : ('foo', 'bar', 'baz', 'quux')}),
(EnvironmentError, ('errnoStr', 'strErrorStr', 'filenameStr'),
{'message' : '', 'args' : ('errnoStr', 'strErrorStr'),
'strerror' : 'strErrorStr', 'errno' : 'errnoStr',
'filename' : 'filenameStr'}),
(EnvironmentError, (1, 'strErrorStr', 'filenameStr'),
{'message' : '', 'args' : (1, 'strErrorStr'), 'errno' : 1,
'strerror' : 'strErrorStr', 'filename' : 'filenameStr'}),
(SyntaxError, (), {'message' : '', 'msg' : None, 'text' : None,
'filename' : None, 'lineno' : None, 'offset' : None,
'print_file_and_line' : None}),
(SyntaxError, ('msgStr',),
{'message' : 'msgStr', 'args' : ('msgStr',), 'text' : None,
'print_file_and_line' : None, 'msg' : 'msgStr',
'filename' : None, 'lineno' : None, 'offset' : None}),
(SyntaxError, ('msgStr', ('filenameStr', 'linenoStr', 'offsetStr',
'textStr')),
{'message' : '', 'offset' : 'offsetStr', 'text' : 'textStr',
'args' : ('msgStr', ('filenameStr', 'linenoStr',
'offsetStr', 'textStr')),
'print_file_and_line' : None, 'msg' : 'msgStr',
'filename' : 'filenameStr', 'lineno' : 'linenoStr'}),
(SyntaxError, ('msgStr', 'filenameStr', 'linenoStr', 'offsetStr',
'textStr', 'print_file_and_lineStr'),
{'message' : '', 'text' : None,
'args' : ('msgStr', 'filenameStr', 'linenoStr', 'offsetStr',
'textStr', 'print_file_and_lineStr'),
'print_file_and_line' : None, 'msg' : 'msgStr',
'filename' : None, 'lineno' : None, 'offset' : None}),
(UnicodeError, (), {'message' : '', 'args' : (),}),
(UnicodeEncodeError, ('ascii', u'a', 0, 1, 'ordinal not in range'),
{'message' : '', 'args' : ('ascii', u'a', 0, 1,
'ordinal not in range'),
'encoding' : 'ascii', 'object' : u'a',
'start' : 0, 'reason' : 'ordinal not in range'}),
(UnicodeDecodeError, ('ascii', '\xff', 0, 1, 'ordinal not in range'),
{'message' : '', 'args' : ('ascii', '\xff', 0, 1,
'ordinal not in range'),
'encoding' : 'ascii', 'object' : '\xff',
'start' : 0, 'reason' : 'ordinal not in range'}),
(UnicodeTranslateError, (u"\u3042", 0, 1, "ouch"),
{'message' : '', 'args' : (u'\u3042', 0, 1, 'ouch'),
'object' : u'\u3042', 'reason' : 'ouch',
'start' : 0, 'end' : 1}),
]
try:
exceptionList.append(
(WindowsError, (1, 'strErrorStr', 'filenameStr'),
{'message' : '', 'args' : (1, 'strErrorStr'),
'strerror' : 'strErrorStr', 'winerror' : 1,
'errno' : 22, 'filename' : 'filenameStr'})
)
except NameError:
pass
for exc, args, expected in exceptionList:
try:
raise exc(*args)
except BaseException, e:
if type(e) is not exc:
raise
# Verify module name
self.assertEqual(type(e).__module__, 'exceptions')
# Verify no ref leaks in Exc_str()
s = str(e)
for checkArgName in expected:
self.assertEqual(repr(getattr(e, checkArgName)),
repr(expected[checkArgName]),
'exception "%s", attribute "%s"' %
(repr(e), checkArgName))
# test for pickling support
for p in pickle, cPickle:
for protocol in range(p.HIGHEST_PROTOCOL + 1):
new = p.loads(p.dumps(e, protocol))
for checkArgName in expected:
got = repr(getattr(new, checkArgName))
want = repr(expected[checkArgName])
self.assertEqual(got, want,
'pickled "%r", attribute "%s"' %
(e, checkArgName))
def testDeprecatedMessageAttribute(self):
# Accessing BaseException.message and relying on its value set by
# BaseException.__init__ triggers a deprecation warning.
exc = BaseException("foo")
with check_warnings(("BaseException.message has been deprecated "
"as of Python 2.6", DeprecationWarning)) as w:
self.assertEqual(exc.message, "foo")
self.assertEqual(len(w.warnings), 1)
def testRegularMessageAttribute(self):
# Accessing BaseException.message after explicitly setting a value
# for it does not trigger a deprecation warning.
exc = BaseException("foo")
exc.message = "bar"
with check_warnings(quiet=True) as w:
self.assertEqual(exc.message, "bar")
self.assertEqual(len(w.warnings), 0)
# Deleting the message is supported, too.
del exc.message
with self.assertRaises(AttributeError):
exc.message
@ignore_deprecation_warnings
def testPickleMessageAttribute(self):
# Pickling with message attribute must work, as well.
e = Exception("foo")
f = Exception("foo")
f.message = "bar"
for p in pickle, cPickle:
ep = p.loads(p.dumps(e))
self.assertEqual(ep.message, "foo")
fp = p.loads(p.dumps(f))
self.assertEqual(fp.message, "bar")
@ignore_deprecation_warnings
def testSlicing(self):
# Test that you can slice an exception directly instead of requiring
# going through the 'args' attribute.
args = (1, 2, 3)
exc = BaseException(*args)
self.assertEqual(exc[:], args)
self.assertEqual(exc.args[:], args)
def testKeywordArgs(self):
# test that builtin exception don't take keyword args,
# but user-defined subclasses can if they want
self.assertRaises(TypeError, BaseException, a=1)
class DerivedException(BaseException):
def __init__(self, fancy_arg):
BaseException.__init__(self)
self.fancy_arg = fancy_arg
x = DerivedException(fancy_arg=42)
self.assertEqual(x.fancy_arg, 42)
def testInfiniteRecursion(self):
def f():
return f()
self.assertRaises(RuntimeError, f)
def g():
try:
return g()
except ValueError:
return -1
# The test prints an unraisable recursion error when
# doing "except ValueError", this is because subclass
# checking has recursion checking too.
with captured_output("stderr"):
try:
g()
except RuntimeError:
pass
except:
self.fail("Should have raised KeyError")
else:
self.fail("Should have raised KeyError")
def testUnicodeStrUsage(self):
# Make sure both instances and classes have a str and unicode
# representation.
self.assertTrue(str(Exception))
self.assertTrue(unicode(Exception))
self.assertTrue(str(Exception('a')))
self.assertTrue(unicode(Exception(u'a')))
self.assertTrue(unicode(Exception(u'\xe1')))
def testUnicodeChangeAttributes(self):
# See issue 7309. This was a crasher.
u = UnicodeEncodeError('baz', u'xxxxx', 1, 5, 'foo')
self.assertEqual(str(u), "'baz' codec can't encode characters in position 1-4: foo")
u.end = 2
self.assertEqual(str(u), "'baz' codec can't encode character u'\\x78' in position 1: foo")
u.end = 5
u.reason = 0x345345345345345345
self.assertEqual(str(u), "'baz' codec can't encode characters in position 1-4: 965230951443685724997")
u.encoding = 4000
self.assertEqual(str(u), "'4000' codec can't encode characters in position 1-4: 965230951443685724997")
u.start = 1000
self.assertEqual(str(u), "'4000' codec can't encode characters in position 1000-4: 965230951443685724997")
u = UnicodeDecodeError('baz', 'xxxxx', 1, 5, 'foo')
self.assertEqual(str(u), "'baz' codec can't decode bytes in position 1-4: foo")
u.end = 2
self.assertEqual(str(u), "'baz' codec can't decode byte 0x78 in position 1: foo")
u.end = 5
u.reason = 0x345345345345345345
self.assertEqual(str(u), "'baz' codec can't decode bytes in position 1-4: 965230951443685724997")
u.encoding = 4000
self.assertEqual(str(u), "'4000' codec can't decode bytes in position 1-4: 965230951443685724997")
u.start = 1000
self.assertEqual(str(u), "'4000' codec can't decode bytes in position 1000-4: 965230951443685724997")
u = UnicodeTranslateError(u'xxxx', 1, 5, 'foo')
self.assertEqual(str(u), "can't translate characters in position 1-4: foo")
u.end = 2
self.assertEqual(str(u), "can't translate character u'\\x78' in position 1: foo")
u.end = 5
u.reason = 0x345345345345345345
self.assertEqual(str(u), "can't translate characters in position 1-4: 965230951443685724997")
u.start = 1000
self.assertEqual(str(u), "can't translate characters in position 1000-4: 965230951443685724997")
def test_unicode_errors_no_object(self):
# See issue #21134.
klasses = UnicodeEncodeError, UnicodeDecodeError, UnicodeTranslateError
for klass in klasses:
self.assertEqual(str(klass.__new__(klass)), "")
def test_badisinstance(self):
# Bug #2542: if issubclass(e, MyException) raises an exception,
# it should be ignored
class Meta(type):
def __subclasscheck__(cls, subclass):
raise ValueError()
class MyException(Exception):
__metaclass__ = Meta
pass
with captured_output("stderr") as stderr:
try:
raise KeyError()
except MyException, e:
self.fail("exception should not be a MyException")
except KeyError:
pass
except:
self.fail("Should have raised KeyError")
else:
self.fail("Should have raised KeyError")
with captured_output("stderr") as stderr:
def g():
try:
return g()
except RuntimeError:
return sys.exc_info()
e, v, tb = g()
self.assertTrue(e is RuntimeError, e)
self.assertIn("maximum recursion depth exceeded", str(v))
def test_new_returns_invalid_instance(self):
# See issue #11627.
class MyException(Exception):
def __new__(cls, *args):
return object()
with self.assertRaises(TypeError):
raise MyException
def test_assert_with_tuple_arg(self):
try:
assert False, (3,)
except AssertionError as e:
self.assertEqual(str(e), "(3,)")
def test_bad_exception_clearing(self):
# See issue 16445: use of Py_XDECREF instead of Py_CLEAR in
# BaseException_set_message gave a possible way to segfault the
# interpreter.
class Nasty(str):
def __del__(message):
del e.message
e = ValueError(Nasty("msg"))
e.args = ()
del e.message
# Helper class used by TestSameStrAndUnicodeMsg
class ExcWithOverriddenStr(Exception):
"""Subclass of Exception that accepts a keyword 'msg' arg that is
returned by __str__. 'msg' won't be included in self.args"""
def __init__(self, *args, **kwargs):
self.msg = kwargs.pop('msg') # msg should always be present
super(ExcWithOverriddenStr, self).__init__(*args, **kwargs)
def __str__(self):
return self.msg
class TestSameStrAndUnicodeMsg(unittest.TestCase):
"""unicode(err) should return the same message of str(err). See #6108"""
def check_same_msg(self, exc, msg):
"""Helper function that checks if str(exc) == unicode(exc) == msg"""
self.assertEqual(str(exc), msg)
self.assertEqual(str(exc), unicode(exc))
def test_builtin_exceptions(self):
"""Check same msg for built-in exceptions"""
# These exceptions implement a __str__ method that uses the args
# to create a better error message. unicode(e) should return the same
# message.
exceptions = [
SyntaxError('invalid syntax', ('<string>', 1, 3, '2+*3')),
IOError(2, 'No such file or directory'),
KeyError('both should have the same quotes'),
UnicodeDecodeError('ascii', '\xc3\xa0', 0, 1,
'ordinal not in range(128)'),
UnicodeEncodeError('ascii', u'\u1234', 0, 1,
'ordinal not in range(128)')
]
for exception in exceptions:
self.assertEqual(str(exception), unicode(exception))
def test_0_args(self):
"""Check same msg for Exception with 0 args"""
# str() and unicode() on an Exception with no args should return an
# empty string
self.check_same_msg(Exception(), '')
def test_0_args_with_overridden___str__(self):
"""Check same msg for exceptions with 0 args and overridden __str__"""
# str() and unicode() on an exception with overridden __str__ that
# returns an ascii-only string should return the same string
for msg in ('foo', u'foo'):
self.check_same_msg(ExcWithOverriddenStr(msg=msg), msg)
# if __str__ returns a non-ascii unicode string str() should fail
# but unicode() should return the unicode string
e = ExcWithOverriddenStr(msg=u'f\xf6\xf6') # no args
self.assertRaises(UnicodeEncodeError, str, e)
self.assertEqual(unicode(e), u'f\xf6\xf6')
def test_1_arg(self):
"""Check same msg for Exceptions with 1 arg"""
for arg in ('foo', u'foo'):
self.check_same_msg(Exception(arg), arg)
# if __str__ is not overridden and self.args[0] is a non-ascii unicode
# string, str() should try to return str(self.args[0]) and fail.
# unicode() should return unicode(self.args[0]) and succeed.
e = Exception(u'f\xf6\xf6')
self.assertRaises(UnicodeEncodeError, str, e)
self.assertEqual(unicode(e), u'f\xf6\xf6')
def test_1_arg_with_overridden___str__(self):
"""Check same msg for exceptions with overridden __str__ and 1 arg"""
# when __str__ is overridden and __unicode__ is not implemented
# unicode(e) returns the same as unicode(e.__str__()).
for msg in ('foo', u'foo'):
self.check_same_msg(ExcWithOverriddenStr('arg', msg=msg), msg)
# if __str__ returns a non-ascii unicode string, str() should fail
# but unicode() should succeed.
e = ExcWithOverriddenStr('arg', msg=u'f\xf6\xf6') # 1 arg
self.assertRaises(UnicodeEncodeError, str, e)
self.assertEqual(unicode(e), u'f\xf6\xf6')
def test_many_args(self):
"""Check same msg for Exceptions with many args"""
argslist = [
(3, 'foo'),
(1, u'foo', 'bar'),
(4, u'f\xf6\xf6', u'bar', 'baz')
]
# both str() and unicode() should return a repr() of the args
for args in argslist:
self.check_same_msg(Exception(*args), repr(args))
def test_many_args_with_overridden___str__(self):
"""Check same msg for exceptions with overridden __str__ and many args"""
# if __str__ returns an ascii string / ascii unicode string
# both str() and unicode() should succeed
for msg in ('foo', u'foo'):
e = ExcWithOverriddenStr('arg1', u'arg2', u'f\xf6\xf6', msg=msg)
self.check_same_msg(e, msg)
# if __str__ returns a non-ascii unicode string, str() should fail
# but unicode() should succeed
e = ExcWithOverriddenStr('arg1', u'f\xf6\xf6', u'arg3', # 3 args
msg=u'f\xf6\xf6')
self.assertRaises(UnicodeEncodeError, str, e)
self.assertEqual(unicode(e), u'f\xf6\xf6')
@cpython_only
def test_exception_with_doc(self):
import _testcapi
doc2 = "This is a test docstring."
doc4 = "This is another test docstring."
self.assertRaises(SystemError, _testcapi.make_exception_with_doc,
"error1")
# test basic usage of PyErr_NewException
error1 = _testcapi.make_exception_with_doc("_testcapi.error1")
self.assertIs(type(error1), type)
self.assertTrue(issubclass(error1, Exception))
self.assertIsNone(error1.__doc__)
# test with given docstring
error2 = _testcapi.make_exception_with_doc("_testcapi.error2", doc2)
self.assertEqual(error2.__doc__, doc2)
# test with explicit base (without docstring)
error3 = _testcapi.make_exception_with_doc("_testcapi.error3",
base=error2)
self.assertTrue(issubclass(error3, error2))
# test with explicit base tuple
class C(object):
pass
error4 = _testcapi.make_exception_with_doc("_testcapi.error4", doc4,
(error3, C))
self.assertTrue(issubclass(error4, error3))
self.assertTrue(issubclass(error4, C))
self.assertEqual(error4.__doc__, doc4)
# test with explicit dictionary
error5 = _testcapi.make_exception_with_doc("_testcapi.error5", "",
error4, {'a': 1})
self.assertTrue(issubclass(error5, error4))
self.assertEqual(error5.a, 1)
self.assertEqual(error5.__doc__, "")
def test_main():
run_unittest(ExceptionTests, TestSameStrAndUnicodeMsg)
if __name__ == '__main__':
test_main()
| apache-2.0 |
GarySparrow/mFlaskWeb | venv/Lib/site-packages/pygments/lexers/ecl.py | 47 | 5875 | # -*- coding: utf-8 -*-
"""
pygments.lexers.ecl
~~~~~~~~~~~~~~~~~~~
Lexers for the ECL language.
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, include, bygroups, words
from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
Number, Punctuation, Error
__all__ = ['ECLLexer']
class ECLLexer(RegexLexer):
"""
Lexer for the declarative big-data `ECL
<http://hpccsystems.com/community/docs/ecl-language-reference/html>`_
language.
.. versionadded:: 1.5
"""
name = 'ECL'
aliases = ['ecl']
filenames = ['*.ecl']
mimetypes = ['application/x-ecl']
flags = re.IGNORECASE | re.MULTILINE
tokens = {
'root': [
include('whitespace'),
include('statements'),
],
'whitespace': [
(r'\s+', Text),
(r'\/\/.*', Comment.Single),
(r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
],
'statements': [
include('types'),
include('keywords'),
include('functions'),
include('hash'),
(r'"', String, 'string'),
(r'\'', String, 'string'),
(r'(\d+\.\d*|\.\d+|\d+)e[+-]?\d+[lu]*', Number.Float),
(r'(\d+\.\d*|\.\d+|\d+f)f?', Number.Float),
(r'0x[0-9a-f]+[lu]*', Number.Hex),
(r'0[0-7]+[lu]*', Number.Oct),
(r'\d+[lu]*', Number.Integer),
(r'\*/', Error),
(r'[~!%^&*+=|?:<>/-]+', Operator),
(r'[{}()\[\],.;]', Punctuation),
(r'[a-z_]\w*', Name),
],
'hash': [
(r'^#.*$', Comment.Preproc),
],
'types': [
(r'(RECORD|END)\D', Keyword.Declaration),
(r'((?:ASCII|BIG_ENDIAN|BOOLEAN|DATA|DECIMAL|EBCDIC|INTEGER|PATTERN|'
r'QSTRING|REAL|RECORD|RULE|SET OF|STRING|TOKEN|UDECIMAL|UNICODE|'
r'UNSIGNED|VARSTRING|VARUNICODE)\d*)(\s+)',
bygroups(Keyword.Type, Text)),
],
'keywords': [
(words((
'APPLY', 'ASSERT', 'BUILD', 'BUILDINDEX', 'EVALUATE', 'FAIL',
'KEYDIFF', 'KEYPATCH', 'LOADXML', 'NOTHOR', 'NOTIFY', 'OUTPUT',
'PARALLEL', 'SEQUENTIAL', 'SOAPCALL', 'CHECKPOINT', 'DEPRECATED',
'FAILCODE', 'FAILMESSAGE', 'FAILURE', 'GLOBAL', 'INDEPENDENT',
'ONWARNING', 'PERSIST', 'PRIORITY', 'RECOVERY', 'STORED', 'SUCCESS',
'WAIT', 'WHEN'), suffix=r'\b'),
Keyword.Reserved),
# These are classed differently, check later
(words((
'ALL', 'AND', 'ANY', 'AS', 'ATMOST', 'BEFORE', 'BEGINC++', 'BEST', 'BETWEEN', 'CASE',
'CONST', 'COUNTER', 'CSV', 'DESCEND', 'ENCRYPT', 'ENDC++', 'ENDMACRO', 'EXCEPT',
'EXCLUSIVE', 'EXPIRE', 'EXPORT', 'EXTEND', 'FALSE', 'FEW', 'FIRST', 'FLAT', 'FULL',
'FUNCTION', 'GROUP', 'HEADER', 'HEADING', 'HOLE', 'IFBLOCK', 'IMPORT', 'IN', 'JOINED',
'KEEP', 'KEYED', 'LAST', 'LEFT', 'LIMIT', 'LOAD', 'LOCAL', 'LOCALE', 'LOOKUP', 'MACRO',
'MANY', 'MAXCOUNT', 'MAXLENGTH', 'MIN SKEW', 'MODULE', 'INTERFACE', 'NAMED', 'NOCASE',
'NOROOT', 'NOSCAN', 'NOSORT', 'NOT', 'OF', 'ONLY', 'OPT', 'OR', 'OUTER', 'OVERWRITE',
'PACKED', 'PARTITION', 'PENALTY', 'PHYSICALLENGTH', 'PIPE', 'QUOTE', 'RELATIONSHIP',
'REPEAT', 'RETURN', 'RIGHT', 'SCAN', 'SELF', 'SEPARATOR', 'SERVICE', 'SHARED', 'SKEW',
'SKIP', 'SQL', 'STORE', 'TERMINATOR', 'THOR', 'THRESHOLD', 'TOKEN', 'TRANSFORM', 'TRIM',
'TRUE', 'TYPE', 'UNICODEORDER', 'UNSORTED', 'VALIDATE', 'VIRTUAL', 'WHOLE', 'WILD',
'WITHIN', 'XML', 'XPATH', '__COMPRESSED__'), suffix=r'\b'),
Keyword.Reserved),
],
'functions': [
(words((
'ABS', 'ACOS', 'ALLNODES', 'ASCII', 'ASIN', 'ASSTRING', 'ATAN', 'ATAN2', 'AVE', 'CASE',
'CHOOSE', 'CHOOSEN', 'CHOOSESETS', 'CLUSTERSIZE', 'COMBINE', 'CORRELATION', 'COS',
'COSH', 'COUNT', 'COVARIANCE', 'CRON', 'DATASET', 'DEDUP', 'DEFINE', 'DENORMALIZE',
'DISTRIBUTE', 'DISTRIBUTED', 'DISTRIBUTION', 'EBCDIC', 'ENTH', 'ERROR', 'EVALUATE',
'EVENT', 'EVENTEXTRA', 'EVENTNAME', 'EXISTS', 'EXP', 'FAILCODE', 'FAILMESSAGE',
'FETCH', 'FROMUNICODE', 'GETISVALID', 'GLOBAL', 'GRAPH', 'GROUP', 'HASH', 'HASH32',
'HASH64', 'HASHCRC', 'HASHMD5', 'HAVING', 'IF', 'INDEX', 'INTFORMAT', 'ISVALID',
'ITERATE', 'JOIN', 'KEYUNICODE', 'LENGTH', 'LIBRARY', 'LIMIT', 'LN', 'LOCAL', 'LOG', 'LOOP',
'MAP', 'MATCHED', 'MATCHLENGTH', 'MATCHPOSITION', 'MATCHTEXT', 'MATCHUNICODE',
'MAX', 'MERGE', 'MERGEJOIN', 'MIN', 'NOLOCAL', 'NONEMPTY', 'NORMALIZE', 'PARSE', 'PIPE',
'POWER', 'PRELOAD', 'PROCESS', 'PROJECT', 'PULL', 'RANDOM', 'RANGE', 'RANK', 'RANKED',
'REALFORMAT', 'RECORDOF', 'REGEXFIND', 'REGEXREPLACE', 'REGROUP', 'REJECTED',
'ROLLUP', 'ROUND', 'ROUNDUP', 'ROW', 'ROWDIFF', 'SAMPLE', 'SET', 'SIN', 'SINH', 'SIZEOF',
'SOAPCALL', 'SORT', 'SORTED', 'SQRT', 'STEPPED', 'STORED', 'SUM', 'TABLE', 'TAN', 'TANH',
'THISNODE', 'TOPN', 'TOUNICODE', 'TRANSFER', 'TRIM', 'TRUNCATE', 'TYPEOF', 'UNGROUP',
'UNICODEORDER', 'VARIANCE', 'WHICH', 'WORKUNIT', 'XMLDECODE', 'XMLENCODE',
'XMLTEXT', 'XMLUNICODE'), suffix=r'\b'),
Name.Function),
],
'string': [
(r'"', String, '#pop'),
(r'\'', String, '#pop'),
(r'[^"\']+', String),
],
}
| mit |
ofekd/servo | components/script/dom/bindings/codegen/parser/tests/test_mozmap.py | 75 | 1163 | import WebIDL
def WebIDLTest(parser, harness):
parser.parse("""
dictionary Dict {};
interface MozMapArg {
void foo(MozMap<Dict> arg);
};
""")
results = parser.finish()
harness.check(len(results), 2, "Should know about two things");
harness.ok(isinstance(results[1], WebIDL.IDLInterface),
"Should have an interface here");
members = results[1].members
harness.check(len(members), 1, "Should have one member")
harness.ok(members[0].isMethod(), "Should have method")
signature = members[0].signatures()[0]
args = signature[1]
harness.check(len(args), 1, "Should have one arg")
harness.ok(args[0].type.isMozMap(), "Should have a MozMap type here")
harness.ok(args[0].type.inner.isDictionary(),
"Should have a dictionary inner type")
parser = parser.reset()
threw = False
try:
parser.parse("""
interface MozMapVoidArg {
void foo(MozMap<void> arg);
};
""")
results = parser.finish()
except Exception,x:
threw = True
harness.ok(threw, "Should have thrown.")
| mpl-2.0 |
TsubameDono/codecombat | scripts/devSetup/mongo.py | 67 | 4203 | from __future__ import print_function
__author__ = u'schmatz'
from downloader import Downloader
import tarfile
from errors import DownloadCorruptionError
import warnings
import os
from configuration import Configuration
from dependency import Dependency
import sys
import shutil
class MongoDB(Dependency):
def __init__(self,configuration):
super(self.__class__, self).__init__(configuration)
operating_system = configuration.system.operating_system
self.config.directory.create_directory_in_tmp(u"mongo")
if operating_system == u"mac":
self.downloader = MacMongoDBDownloader(self)
elif operating_system == u"win":
self.downloader = WindowsMongoDBDownloader(self)
elif operating_system == u"linux":
self.downloader = LinuxMongoDBDownloader(self)
@property
def tmp_directory(self):
return self.config.directory.tmp_directory
@property
def bin_directory(self):
return self.config.directory.bin_directory
def bashrc_string(self):
return "COCO_MONGOD_PATH=" + self.config.directory.bin_directory + os.sep + u"mongo" + os.sep +"bin" + os.sep + "mongod"
def download_dependencies(self):
install_directory = self.config.directory.bin_directory + os.sep + u"mongo"
if os.path.exists(install_directory):
print(u"Skipping MongoDB download because " + install_directory + " exists.")
else:
self.downloader.download()
self.downloader.decompress()
def install_dependencies(self):
install_directory = self.config.directory.bin_directory + os.sep + u"mongo"
if os.path.exists(install_directory):
print(u"Skipping creation of " + install_directory + " because it exists.")
else:
shutil.copytree(self.findUnzippedMongoBinPath(),install_directory)
def findUnzippedMongoBinPath(self):
return self.downloader.download_directory + os.sep + \
(next(os.walk(self.downloader.download_directory))[1])[0] + os.sep + u"bin"
class MongoDBDownloader(Downloader):
@property
def download_url(self):
raise NotImplementedError
@property
def download_directory(self):
return self.dependency.tmp_directory + os.sep + u"mongo"
@property
def downloaded_file_path(self):
return self.download_directory + os.sep + u"mongodb.tgz"
def download(self):
print(u"Downloading MongoDB from URL " + self.download_url)
self.download_file(self.download_url,self.downloaded_file_path)
self.check_download()
def decompress(self):
print(u"Decompressing MongoDB...")
tfile = tarfile.open(self.downloaded_file_path)
#TODO: make directory handler class
tfile.extractall(self.download_directory)
print(u"Decompressed MongoDB into " + self.download_directory)
def check_download(self):
isFileValid = tarfile.is_tarfile(self.downloaded_file_path)
if not isFileValid:
raise DownloadCorruptionError(u"MongoDB download was corrupted.")
class LinuxMongoDBDownloader(MongoDBDownloader):
@property
def download_url(self):
if self.dependency.config.mem_width == 64:
return u"http://fastdl.mongodb.org/linux/mongodb-linux-x86_64-3.0.2.tgz"
else:
warnings.warn(u"MongoDB *really* doesn't run well on 32 bit systems. You have been warned.")
return u"http://fastdl.mongodb.org/linux/mongodb-linux-i686-3.0.2.tgz"
class WindowsMongoDBDownloader(MongoDBDownloader):
@property
def download_url(self):
#TODO: Implement Windows Vista detection
warnings.warn(u"If you have a version of Windows older than 7, MongoDB may not function properly!")
if self.dependency.config.mem_width == 64:
return u"http://fastdl.mongodb.org/win32/mongodb-win32-x86_64-2008plus-3.0.2.zip"
else:
return u"http://fastdl.mongodb.org/win32/mongodb-win32-i386-3.0.2.zip"
class MacMongoDBDownloader(MongoDBDownloader):
@property
def download_url(self):
return u"http://fastdl.mongodb.org/osx/mongodb-osx-x86_64-3.0.2.tgz"
| mit |
cdsteinkuehler/MachineKit | lib/python/gladevcp/makepins.py | 12 | 2268 | #!/usr/bin/python2.4
# -*- encoding: utf-8 -*-
# GLADE_VCP
# Copyright 2010 Chris Morley
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import sys
import gtk
import hal
import gtk.glade
import gobject
import getopt
from hal_widgets import _HalWidgetBase
from led import HAL_LED
from hal_glib import GComponent
from gladevcp.gladebuilder import widget_name
class GladePanel():
def on_window_destroy(self, widget, data=None):
self.hal.exit()
gobject.source_remove(self.timer)
gtk.main_quit()
def __init__(self,halcomp,xmlname,builder,buildertype):
self.builder = builder
self.hal = GComponent(halcomp)
self.widgets = {}
for widget in builder.get_objects():
idname = widget_name(widget)
if idname is None:
continue
if isinstance(widget, _HalWidgetBase):
widget.hal_init(self.hal, idname)
self.widgets[idname] = widget
self.timer = gobject.timeout_add(100, self.update)
def update(self):
for obj in self.widgets.values():
obj.hal_update()
return True
def __getitem__(self, item):
return self.widgets[item]
def __setitem__(self, item, value):
self.widgets[item] = value
if __name__ == "__main__":
print "Gladevcp_make_pins cannot be run on its own"
print "It must be called by gladevcp or a python program"
print "that loads and displays the glade panel and creates a HAL component"
# vim: sts=4 sw=4 et
| lgpl-2.1 |
Yarrick13/hwasp | tests/asp/AllAnswerSets/nontight/example.hamiltonian.7.asp.gringo.test.py | 4 | 1438 | input = """
1 2 2 1 3 4
1 3 2 1 2 4
1 4 0 0
1 5 2 1 6 7
1 6 2 1 5 7
1 7 0 0
1 8 2 1 9 10
1 9 2 1 8 10
1 10 0 0
1 11 2 1 12 13
1 12 2 1 11 13
1 13 0 0
1 14 2 1 15 16
1 15 2 1 14 16
1 16 0 0
1 17 2 1 18 19
1 18 2 1 17 19
1 19 0 0
1 20 2 1 21 22
1 21 2 1 20 22
1 22 0 0
1 23 2 1 24 25
1 24 2 1 23 25
1 25 0 0
1 26 2 1 27 28
1 27 2 1 26 28
1 28 0 0
1 29 2 1 30 31
1 30 2 1 29 31
1 31 0 0
1 32 2 1 33 34
1 33 2 1 32 34
1 34 0 0
1 35 2 1 36 37
1 36 2 1 35 37
1 37 0 0
1 38 1 0 11
1 39 1 0 14
1 38 2 0 39 26
1 40 2 0 38 2
1 38 2 0 40 11
1 40 2 0 39 29
1 41 2 0 38 5
1 38 2 0 41 17
1 41 2 0 39 32
1 39 2 0 38 8
1 39 2 0 40 14
1 39 2 0 41 20
1 42 2 0 41 23
1 41 2 0 42 35
1 1 2 0 2 8
1 1 2 0 2 5
1 1 2 0 2 29
1 1 2 0 5 8
1 1 2 0 5 32
1 1 2 0 5 35
1 1 2 0 8 20
1 1 2 0 8 14
1 1 2 0 11 14
1 1 2 0 11 26
1 1 2 0 11 17
1 1 2 0 14 20
1 1 2 0 17 23
1 1 2 0 17 26
1 1 2 0 20 17
1 1 2 0 20 23
1 1 2 0 26 29
1 1 2 0 26 32
1 1 2 0 29 32
1 1 2 0 32 35
1 1 1 1 38
1 1 1 1 40
1 1 1 1 41
1 1 1 1 39
1 1 1 1 42
0
38 reached(0)
39 reached(3)
40 reached(1)
41 reached(2)
42 reached(4)
3 out_hm(0,1)
6 out_hm(0,2)
9 out_hm(0,3)
12 out_hm(1,0)
15 out_hm(1,3)
18 out_hm(2,0)
21 out_hm(2,3)
24 out_hm(2,4)
27 out_hm(3,0)
30 out_hm(3,1)
33 out_hm(3,2)
36 out_hm(4,2)
2 in_hm(0,1)
5 in_hm(0,2)
8 in_hm(0,3)
11 in_hm(1,0)
14 in_hm(1,3)
17 in_hm(2,0)
20 in_hm(2,3)
23 in_hm(2,4)
26 in_hm(3,0)
29 in_hm(3,1)
32 in_hm(3,2)
35 in_hm(4,2)
0
B+
0
B-
1
0
1
"""
output = """
"""
| apache-2.0 |
stephane-martin/salt-debian-packaging | salt-2016.3.3/tests/unit/modules/status_test.py | 2 | 3076 | # -*- coding: utf-8 -*-
# Import Python libs
from __future__ import absolute_import
# Import Salt Libs
from salt.modules import status
from salt.exceptions import CommandExecutionError
# Import Salt Testing Libs
from salttesting import TestCase
from salttesting.helpers import ensure_in_syspath
from salttesting.mock import (
MagicMock,
patch,
)
ensure_in_syspath('../../')
# Globals
status.__salt__ = {}
class StatusTestCase(TestCase):
'''
test modules.status functions
'''
def test_uptime(self):
'''
Test modules.status.uptime function, new version
:return:
'''
class ProcUptime(object):
def __init__(self, *args, **kwargs):
self.data = "773865.18 1003405.46"
def read(self):
return self.data
with patch.dict(status.__salt__, {'cmd.run': MagicMock(return_value="1\n2\n3")}):
with patch('os.path.exists', MagicMock(return_value=True)):
with patch('time.time', MagicMock(return_value=1458821523.72)):
status.open = ProcUptime
u_time = status.uptime()
self.assertEqual(u_time['users'], 3)
self.assertEqual(u_time['seconds'], 773865)
self.assertEqual(u_time['days'], 8)
self.assertEqual(u_time['time'], '22:57')
def test_uptime_failure(self):
'''
Test modules.status.uptime function should raise an exception if /proc/uptime does not exists.
:return:
'''
with patch('os.path.exists', MagicMock(return_value=False)):
with self.assertRaises(CommandExecutionError):
status.uptime()
def test_deprecated_uptime(self):
'''
test modules.status.uptime function, deprecated version
'''
mock_uptime = 'very often'
mock_run = MagicMock(return_value=mock_uptime)
with patch.dict(status.__salt__, {'cmd.run': mock_run}):
self.assertEqual(status._uptime(), mock_uptime)
mock_uptime = 'very idle'
mock_run = MagicMock(return_value=mock_uptime)
with patch.dict(status.__salt__, {'cmd.run': mock_run}):
with patch('os.path.exists', MagicMock(return_value=True)):
self.assertEqual(status._uptime(human_readable=False), mock_uptime.split()[0])
mock_uptime = ''
mock_return = 'unexpected format in /proc/uptime'
mock_run = MagicMock(return_value=mock_uptime)
with patch.dict(status.__salt__, {'cmd.run': mock_run}):
with patch('os.path.exists', MagicMock(return_value=True)):
self.assertEqual(status._uptime(human_readable=False), mock_return)
mock_return = 'cannot find /proc/uptime'
with patch('os.path.exists', MagicMock(return_value=False)):
self.assertEqual(status._uptime(human_readable=False), mock_return)
if __name__ == '__main__':
from integration import run_tests
run_tests(StatusTestCase, needs_daemon=False)
| apache-2.0 |
andmos/ansible | lib/ansible/modules/network/f5/bigip_monitor_udp.py | 14 | 20573 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2017, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_monitor_udp
short_description: Manages F5 BIG-IP LTM udp monitors
description: Manages F5 BIG-IP LTM udp monitors.
version_added: 2.5
options:
name:
description:
- Monitor name.
required: True
parent:
description:
- The parent template of this monitor template. Once this value has
been set, it cannot be changed. By default, this value is the C(udp)
parent on the C(Common) partition.
default: /Common/udp
description:
description:
- The description of the monitor.
version_added: 2.7
send:
description:
- The send string for the monitor call. When creating a new monitor, if
this value is not provided, the default C(default send string) will be used.
receive:
description:
- The receive string for the monitor call.
receive_disable:
description:
- This setting works like C(receive), except that the system marks the node
or pool member disabled when its response matches the C(receive_disable)
string but not C(receive). To use this setting, you must specify both
C(receive_disable) and C(receive).
ip:
description:
- IP address part of the IP/port definition. If this parameter is not
provided when creating a new monitor, then the default value will be
'*'.
port:
description:
- Port address part of the IP/port definition. If this parameter is not
provided when creating a new monitor, then the default value will be
'*'. Note that if specifying an IP address, a value between 1 and 65535
must be specified.
interval:
description:
- The interval specifying how frequently the monitor instance of this
template will run. If this parameter is not provided when creating
a new monitor, then the default value will be 5. This value B(must)
be less than the C(timeout) value.
timeout:
description:
- The number of seconds in which the node or service must respond to
the monitor request. If the target responds within the set time
period, it is considered up. If the target does not respond within
the set time period, it is considered down. You can change this
number to any number you want, however, it should be 3 times the
interval number of seconds plus 1 second. If this parameter is not
provided when creating a new monitor, then the default value will be 16.
time_until_up:
description:
- Specifies the amount of time in seconds after the first successful
response before a node will be marked up. A value of 0 will cause a
node to be marked up immediately after a valid response is received
from the node. If this parameter is not provided when creating
a new monitor, then the default value will be 0.
partition:
description:
- Device partition to manage resources on.
default: Common
version_added: 2.5
state:
description:
- When C(present), ensures that the monitor exists.
- When C(absent), ensures the monitor is removed.
default: present
choices:
- present
- absent
version_added: 2.5
notes:
- Requires BIG-IP software version >= 12
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
- Wojciech Wypior (@wojtek0806)
'''
EXAMPLES = r'''
- name: Create UDP Monitor
bigip_monitor_udp:
state: present
ip: 10.10.10.10
name: my_udp_monitor
provider:
server: lb.mydomain.com
user: admin
password: secret
delegate_to: localhost
- name: Remove UDP Monitor
bigip_monitor_udp:
state: absent
name: my_udp_monitor
provider:
server: lb.mydomain.com
user: admin
password: secret
delegate_to: localhost
'''
RETURN = r'''
parent:
description: New parent template of the monitor.
returned: changed
type: str
sample: http
description:
description: The description of the monitor.
returned: changed
type: str
sample: Important Monitor
ip:
description: The new IP of IP/port definition.
returned: changed
type: str
sample: 10.12.13.14
interval:
description: The new interval in which to run the monitor check.
returned: changed
type: int
sample: 2
timeout:
description: The new timeout in which the remote system must respond to the monitor.
returned: changed
type: int
sample: 10
time_until_up:
description: The new time in which to mark a system as up after first successful response.
returned: changed
type: int
sample: 2
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
try:
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import cleanup_tokens
from library.module_utils.network.f5.common import fq_name
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.common import transform_name
from library.module_utils.network.f5.common import exit_json
from library.module_utils.network.f5.common import fail_json
from library.module_utils.network.f5.compare import cmp_str_with_none
from library.module_utils.network.f5.ipaddress import is_valid_ip
except ImportError:
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import cleanup_tokens
from ansible.module_utils.network.f5.common import fq_name
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.common import transform_name
from ansible.module_utils.network.f5.common import exit_json
from ansible.module_utils.network.f5.common import fail_json
from ansible.module_utils.network.f5.compare import cmp_str_with_none
from ansible.module_utils.network.f5.ipaddress import is_valid_ip
class Parameters(AnsibleF5Parameters):
api_map = {
'timeUntilUp': 'time_until_up',
'defaultsFrom': 'parent',
'recv': 'receive',
}
api_attributes = [
'timeUntilUp',
'defaultsFrom',
'interval',
'timeout',
'recv',
'send',
'destination',
'description',
]
returnables = [
'parent',
'send',
'receive',
'ip',
'port',
'interval',
'timeout',
'time_until_up',
'description',
]
updatables = [
'destination',
'send',
'receive',
'interval',
'timeout',
'time_until_up',
'description',
]
@property
def destination(self):
if self.ip is None and self.port is None:
return None
destination = '{0}:{1}'.format(self.ip, self.port)
return destination
@destination.setter
def destination(self, value):
ip, port = value.split(':')
self._values['ip'] = ip
self._values['port'] = port
@property
def interval(self):
if self._values['interval'] is None:
return None
# Per BZ617284, the BIG-IP UI does not raise a warning about this.
# So I do
if 1 > int(self._values['interval']) > 86400:
raise F5ModuleError(
"Interval value must be between 1 and 86400"
)
return int(self._values['interval'])
@property
def timeout(self):
if self._values['timeout'] is None:
return None
return int(self._values['timeout'])
@property
def ip(self):
if self._values['ip'] is None:
return None
if self._values['ip'] in ['*', '0.0.0.0']:
return '*'
elif is_valid_ip(self._values['ip']):
return self._values['ip']
else:
raise F5ModuleError(
"The provided 'ip' parameter is not an IP address."
)
@property
def port(self):
if self._values['port'] is None:
return None
elif self._values['port'] == '*':
return '*'
return int(self._values['port'])
@property
def time_until_up(self):
if self._values['time_until_up'] is None:
return None
return int(self._values['time_until_up'])
@property
def parent(self):
if self._values['parent'] is None:
return None
result = fq_name(self.partition, self._values['parent'])
return result
@property
def type(self):
return 'udp'
class ApiParameters(Parameters):
@property
def description(self):
if self._values['description'] in [None, 'none']:
return None
return self._values['description']
class ModuleParameters(Parameters):
@property
def description(self):
if self._values['description'] is None:
return None
elif self._values['description'] in ['none', '']:
return ''
return self._values['description']
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
class UsableChanges(Changes):
pass
class ReportableChanges(Changes):
pass
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
result = self.__default(param)
return result
@property
def parent(self):
if self.want.parent != self.have.parent:
raise F5ModuleError(
"The parent monitor cannot be changed"
)
@property
def destination(self):
if self.want.ip is None and self.want.port is None:
return None
if self.want.port is None:
self.want.update({'port': self.have.port})
if self.want.ip is None:
self.want.update({'ip': self.have.ip})
if self.want.port in [None, '*'] and self.want.ip != '*':
raise F5ModuleError(
"Specifying an IP address requires that a port number be specified"
)
if self.want.destination != self.have.destination:
return self.want.destination
@property
def interval(self):
if self.want.timeout is not None and self.want.interval is not None:
if self.want.interval >= self.want.timeout:
raise F5ModuleError(
"Parameter 'interval' must be less than 'timeout'."
)
elif self.want.timeout is not None:
if self.have.interval >= self.want.timeout:
raise F5ModuleError(
"Parameter 'interval' must be less than 'timeout'."
)
elif self.want.interval is not None:
if self.want.interval >= self.have.timeout:
raise F5ModuleError(
"Parameter 'interval' must be less than 'timeout'."
)
if self.want.interval != self.have.interval:
return self.want.interval
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
@property
def description(self):
return cmp_str_with_none(self.want.description, self.have.description)
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
self.want = ModuleParameters(params=self.module.params)
self.have = ApiParameters()
self.changes = UsableChanges()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def exec_module(self):
changed = False
result = dict()
state = self.want.state
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.client.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/monitor/udp/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError:
return False
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
return True
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the resource.")
return True
def create(self):
self._set_changed_options()
self._set_default_creation_values()
if self.module.check_mode:
return True
self.create_on_device()
return True
def _set_default_creation_values(self):
if self.want.timeout is None:
self.want.update({'timeout': 16})
if self.want.interval is None:
self.want.update({'interval': 5})
if self.want.time_until_up is None:
self.want.update({'time_until_up': 0})
if self.want.ip is None:
self.want.update({'ip': '*'})
if self.want.port is None:
self.want.update({'port': '*'})
if self.want.send is None:
self.want.update({'send': 'default send string'})
def create_on_device(self):
params = self.changes.api_params()
params['name'] = self.want.name
params['partition'] = self.want.partition
uri = "https://{0}:{1}/mgmt/tm/ltm/monitor/udp/".format(
self.client.provider['server'],
self.client.provider['server_port']
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/ltm/monitor/udp/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def absent(self):
if self.exists():
return self.remove()
return False
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/monitor/udp/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.delete(uri)
if resp.status == 200:
return True
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/ltm/monitor/udp/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return ApiParameters(params=response)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
name=dict(required=True),
parent=dict(default='/Common/udp'),
description=dict(),
send=dict(),
receive=dict(),
receive_disable=dict(required=False),
ip=dict(),
port=dict(),
interval=dict(type='int'),
timeout=dict(type='int'),
time_until_up=dict(type='int'),
state=dict(
default='present',
choices=['present', 'absent']
),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
)
client = F5RestClient(**module.params)
try:
mm = ModuleManager(module=module, client=client)
results = mm.exec_module()
cleanup_tokens(client)
exit_json(module, results, client)
except F5ModuleError as ex:
cleanup_tokens(client)
fail_json(module, ex, client)
if __name__ == '__main__':
main()
| gpl-3.0 |
pong3489/TEST_Mission | Lib/encodings/iso8859_4.py | 93 | 13939 | """ Python Character Mapping Codec iso8859_4 generated from 'MAPPINGS/ISO8859/8859-4.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-4',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\x80' # 0x80 -> <control>
u'\x81' # 0x81 -> <control>
u'\x82' # 0x82 -> <control>
u'\x83' # 0x83 -> <control>
u'\x84' # 0x84 -> <control>
u'\x85' # 0x85 -> <control>
u'\x86' # 0x86 -> <control>
u'\x87' # 0x87 -> <control>
u'\x88' # 0x88 -> <control>
u'\x89' # 0x89 -> <control>
u'\x8a' # 0x8A -> <control>
u'\x8b' # 0x8B -> <control>
u'\x8c' # 0x8C -> <control>
u'\x8d' # 0x8D -> <control>
u'\x8e' # 0x8E -> <control>
u'\x8f' # 0x8F -> <control>
u'\x90' # 0x90 -> <control>
u'\x91' # 0x91 -> <control>
u'\x92' # 0x92 -> <control>
u'\x93' # 0x93 -> <control>
u'\x94' # 0x94 -> <control>
u'\x95' # 0x95 -> <control>
u'\x96' # 0x96 -> <control>
u'\x97' # 0x97 -> <control>
u'\x98' # 0x98 -> <control>
u'\x99' # 0x99 -> <control>
u'\x9a' # 0x9A -> <control>
u'\x9b' # 0x9B -> <control>
u'\x9c' # 0x9C -> <control>
u'\x9d' # 0x9D -> <control>
u'\x9e' # 0x9E -> <control>
u'\x9f' # 0x9F -> <control>
u'\xa0' # 0xA0 -> NO-BREAK SPACE
u'\u0104' # 0xA1 -> LATIN CAPITAL LETTER A WITH OGONEK
u'\u0138' # 0xA2 -> LATIN SMALL LETTER KRA
u'\u0156' # 0xA3 -> LATIN CAPITAL LETTER R WITH CEDILLA
u'\xa4' # 0xA4 -> CURRENCY SIGN
u'\u0128' # 0xA5 -> LATIN CAPITAL LETTER I WITH TILDE
u'\u013b' # 0xA6 -> LATIN CAPITAL LETTER L WITH CEDILLA
u'\xa7' # 0xA7 -> SECTION SIGN
u'\xa8' # 0xA8 -> DIAERESIS
u'\u0160' # 0xA9 -> LATIN CAPITAL LETTER S WITH CARON
u'\u0112' # 0xAA -> LATIN CAPITAL LETTER E WITH MACRON
u'\u0122' # 0xAB -> LATIN CAPITAL LETTER G WITH CEDILLA
u'\u0166' # 0xAC -> LATIN CAPITAL LETTER T WITH STROKE
u'\xad' # 0xAD -> SOFT HYPHEN
u'\u017d' # 0xAE -> LATIN CAPITAL LETTER Z WITH CARON
u'\xaf' # 0xAF -> MACRON
u'\xb0' # 0xB0 -> DEGREE SIGN
u'\u0105' # 0xB1 -> LATIN SMALL LETTER A WITH OGONEK
u'\u02db' # 0xB2 -> OGONEK
u'\u0157' # 0xB3 -> LATIN SMALL LETTER R WITH CEDILLA
u'\xb4' # 0xB4 -> ACUTE ACCENT
u'\u0129' # 0xB5 -> LATIN SMALL LETTER I WITH TILDE
u'\u013c' # 0xB6 -> LATIN SMALL LETTER L WITH CEDILLA
u'\u02c7' # 0xB7 -> CARON
u'\xb8' # 0xB8 -> CEDILLA
u'\u0161' # 0xB9 -> LATIN SMALL LETTER S WITH CARON
u'\u0113' # 0xBA -> LATIN SMALL LETTER E WITH MACRON
u'\u0123' # 0xBB -> LATIN SMALL LETTER G WITH CEDILLA
u'\u0167' # 0xBC -> LATIN SMALL LETTER T WITH STROKE
u'\u014a' # 0xBD -> LATIN CAPITAL LETTER ENG
u'\u017e' # 0xBE -> LATIN SMALL LETTER Z WITH CARON
u'\u014b' # 0xBF -> LATIN SMALL LETTER ENG
u'\u0100' # 0xC0 -> LATIN CAPITAL LETTER A WITH MACRON
u'\xc1' # 0xC1 -> LATIN CAPITAL LETTER A WITH ACUTE
u'\xc2' # 0xC2 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
u'\xc3' # 0xC3 -> LATIN CAPITAL LETTER A WITH TILDE
u'\xc4' # 0xC4 -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xc5' # 0xC5 -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\xc6' # 0xC6 -> LATIN CAPITAL LETTER AE
u'\u012e' # 0xC7 -> LATIN CAPITAL LETTER I WITH OGONEK
u'\u010c' # 0xC8 -> LATIN CAPITAL LETTER C WITH CARON
u'\xc9' # 0xC9 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\u0118' # 0xCA -> LATIN CAPITAL LETTER E WITH OGONEK
u'\xcb' # 0xCB -> LATIN CAPITAL LETTER E WITH DIAERESIS
u'\u0116' # 0xCC -> LATIN CAPITAL LETTER E WITH DOT ABOVE
u'\xcd' # 0xCD -> LATIN CAPITAL LETTER I WITH ACUTE
u'\xce' # 0xCE -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
u'\u012a' # 0xCF -> LATIN CAPITAL LETTER I WITH MACRON
u'\u0110' # 0xD0 -> LATIN CAPITAL LETTER D WITH STROKE
u'\u0145' # 0xD1 -> LATIN CAPITAL LETTER N WITH CEDILLA
u'\u014c' # 0xD2 -> LATIN CAPITAL LETTER O WITH MACRON
u'\u0136' # 0xD3 -> LATIN CAPITAL LETTER K WITH CEDILLA
u'\xd4' # 0xD4 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
u'\xd5' # 0xD5 -> LATIN CAPITAL LETTER O WITH TILDE
u'\xd6' # 0xD6 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xd7' # 0xD7 -> MULTIPLICATION SIGN
u'\xd8' # 0xD8 -> LATIN CAPITAL LETTER O WITH STROKE
u'\u0172' # 0xD9 -> LATIN CAPITAL LETTER U WITH OGONEK
u'\xda' # 0xDA -> LATIN CAPITAL LETTER U WITH ACUTE
u'\xdb' # 0xDB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
u'\xdc' # 0xDC -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\u0168' # 0xDD -> LATIN CAPITAL LETTER U WITH TILDE
u'\u016a' # 0xDE -> LATIN CAPITAL LETTER U WITH MACRON
u'\xdf' # 0xDF -> LATIN SMALL LETTER SHARP S
u'\u0101' # 0xE0 -> LATIN SMALL LETTER A WITH MACRON
u'\xe1' # 0xE1 -> LATIN SMALL LETTER A WITH ACUTE
u'\xe2' # 0xE2 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\xe3' # 0xE3 -> LATIN SMALL LETTER A WITH TILDE
u'\xe4' # 0xE4 -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xe5' # 0xE5 -> LATIN SMALL LETTER A WITH RING ABOVE
u'\xe6' # 0xE6 -> LATIN SMALL LETTER AE
u'\u012f' # 0xE7 -> LATIN SMALL LETTER I WITH OGONEK
u'\u010d' # 0xE8 -> LATIN SMALL LETTER C WITH CARON
u'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE
u'\u0119' # 0xEA -> LATIN SMALL LETTER E WITH OGONEK
u'\xeb' # 0xEB -> LATIN SMALL LETTER E WITH DIAERESIS
u'\u0117' # 0xEC -> LATIN SMALL LETTER E WITH DOT ABOVE
u'\xed' # 0xED -> LATIN SMALL LETTER I WITH ACUTE
u'\xee' # 0xEE -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\u012b' # 0xEF -> LATIN SMALL LETTER I WITH MACRON
u'\u0111' # 0xF0 -> LATIN SMALL LETTER D WITH STROKE
u'\u0146' # 0xF1 -> LATIN SMALL LETTER N WITH CEDILLA
u'\u014d' # 0xF2 -> LATIN SMALL LETTER O WITH MACRON
u'\u0137' # 0xF3 -> LATIN SMALL LETTER K WITH CEDILLA
u'\xf4' # 0xF4 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\xf5' # 0xF5 -> LATIN SMALL LETTER O WITH TILDE
u'\xf6' # 0xF6 -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xf7' # 0xF7 -> DIVISION SIGN
u'\xf8' # 0xF8 -> LATIN SMALL LETTER O WITH STROKE
u'\u0173' # 0xF9 -> LATIN SMALL LETTER U WITH OGONEK
u'\xfa' # 0xFA -> LATIN SMALL LETTER U WITH ACUTE
u'\xfb' # 0xFB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS
u'\u0169' # 0xFD -> LATIN SMALL LETTER U WITH TILDE
u'\u016b' # 0xFE -> LATIN SMALL LETTER U WITH MACRON
u'\u02d9' # 0xFF -> DOT ABOVE
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| gpl-3.0 |
HopeFOAM/HopeFOAM | ThirdParty-0.1/ParaView-5.0.1/VTK/ThirdParty/Twisted/twisted/python/logfile.py | 60 | 9711 | # -*- test-case-name: twisted.test.test_logfile -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
A rotating, browsable log file.
"""
# System Imports
import os, glob, time, stat
from twisted.python import threadable
class BaseLogFile:
"""
The base class for a log file that can be rotated.
"""
synchronized = ["write", "rotate"]
def __init__(self, name, directory, defaultMode=None):
"""
Create a log file.
@param name: name of the file
@param directory: directory holding the file
@param defaultMode: permissions used to create the file. Default to
current permissions of the file if the file exists.
"""
self.directory = directory
self.name = name
self.path = os.path.join(directory, name)
if defaultMode is None and os.path.exists(self.path):
self.defaultMode = stat.S_IMODE(os.stat(self.path)[stat.ST_MODE])
else:
self.defaultMode = defaultMode
self._openFile()
def fromFullPath(cls, filename, *args, **kwargs):
"""
Construct a log file from a full file path.
"""
logPath = os.path.abspath(filename)
return cls(os.path.basename(logPath),
os.path.dirname(logPath), *args, **kwargs)
fromFullPath = classmethod(fromFullPath)
def shouldRotate(self):
"""
Override with a method to that returns true if the log
should be rotated.
"""
raise NotImplementedError
def _openFile(self):
"""
Open the log file.
"""
self.closed = False
if os.path.exists(self.path):
self._file = file(self.path, "r+", 1)
self._file.seek(0, 2)
else:
if self.defaultMode is not None:
# Set the lowest permissions
oldUmask = os.umask(0777)
try:
self._file = file(self.path, "w+", 1)
finally:
os.umask(oldUmask)
else:
self._file = file(self.path, "w+", 1)
if self.defaultMode is not None:
try:
os.chmod(self.path, self.defaultMode)
except OSError:
# Probably /dev/null or something?
pass
def __getstate__(self):
state = self.__dict__.copy()
del state["_file"]
return state
def __setstate__(self, state):
self.__dict__ = state
self._openFile()
def write(self, data):
"""
Write some data to the file.
"""
if self.shouldRotate():
self.flush()
self.rotate()
self._file.write(data)
def flush(self):
"""
Flush the file.
"""
self._file.flush()
def close(self):
"""
Close the file.
The file cannot be used once it has been closed.
"""
self.closed = True
self._file.close()
self._file = None
def reopen(self):
"""
Reopen the log file. This is mainly useful if you use an external log
rotation tool, which moves under your feet.
Note that on Windows you probably need a specific API to rename the
file, as it's not supported to simply use os.rename, for example.
"""
self.close()
self._openFile()
def getCurrentLog(self):
"""
Return a LogReader for the current log file.
"""
return LogReader(self.path)
class LogFile(BaseLogFile):
"""
A log file that can be rotated.
A rotateLength of None disables automatic log rotation.
"""
def __init__(self, name, directory, rotateLength=1000000, defaultMode=None,
maxRotatedFiles=None):
"""
Create a log file rotating on length.
@param name: file name.
@type name: C{str}
@param directory: path of the log file.
@type directory: C{str}
@param rotateLength: size of the log file where it rotates. Default to
1M.
@type rotateLength: C{int}
@param defaultMode: mode used to create the file.
@type defaultMode: C{int}
@param maxRotatedFiles: if not None, max number of log files the class
creates. Warning: it removes all log files above this number.
@type maxRotatedFiles: C{int}
"""
BaseLogFile.__init__(self, name, directory, defaultMode)
self.rotateLength = rotateLength
self.maxRotatedFiles = maxRotatedFiles
def _openFile(self):
BaseLogFile._openFile(self)
self.size = self._file.tell()
def shouldRotate(self):
"""
Rotate when the log file size is larger than rotateLength.
"""
return self.rotateLength and self.size >= self.rotateLength
def getLog(self, identifier):
"""
Given an integer, return a LogReader for an old log file.
"""
filename = "%s.%d" % (self.path, identifier)
if not os.path.exists(filename):
raise ValueError, "no such logfile exists"
return LogReader(filename)
def write(self, data):
"""
Write some data to the file.
"""
BaseLogFile.write(self, data)
self.size += len(data)
def rotate(self):
"""
Rotate the file and create a new one.
If it's not possible to open new logfile, this will fail silently,
and continue logging to old logfile.
"""
if not (os.access(self.directory, os.W_OK) and os.access(self.path, os.W_OK)):
return
logs = self.listLogs()
logs.reverse()
for i in logs:
if self.maxRotatedFiles is not None and i >= self.maxRotatedFiles:
os.remove("%s.%d" % (self.path, i))
else:
os.rename("%s.%d" % (self.path, i), "%s.%d" % (self.path, i + 1))
self._file.close()
os.rename(self.path, "%s.1" % self.path)
self._openFile()
def listLogs(self):
"""
Return sorted list of integers - the old logs' identifiers.
"""
result = []
for name in glob.glob("%s.*" % self.path):
try:
counter = int(name.split('.')[-1])
if counter:
result.append(counter)
except ValueError:
pass
result.sort()
return result
def __getstate__(self):
state = BaseLogFile.__getstate__(self)
del state["size"]
return state
threadable.synchronize(LogFile)
class DailyLogFile(BaseLogFile):
"""A log file that is rotated daily (at or after midnight localtime)
"""
def _openFile(self):
BaseLogFile._openFile(self)
self.lastDate = self.toDate(os.stat(self.path)[8])
def shouldRotate(self):
"""Rotate when the date has changed since last write"""
return self.toDate() > self.lastDate
def toDate(self, *args):
"""Convert a unixtime to (year, month, day) localtime tuple,
or return the current (year, month, day) localtime tuple.
This function primarily exists so you may overload it with
gmtime, or some cruft to make unit testing possible.
"""
# primarily so this can be unit tested easily
return time.localtime(*args)[:3]
def suffix(self, tupledate):
"""Return the suffix given a (year, month, day) tuple or unixtime"""
try:
return '_'.join(map(str, tupledate))
except:
# try taking a float unixtime
return '_'.join(map(str, self.toDate(tupledate)))
def getLog(self, identifier):
"""Given a unix time, return a LogReader for an old log file."""
if self.toDate(identifier) == self.lastDate:
return self.getCurrentLog()
filename = "%s.%s" % (self.path, self.suffix(identifier))
if not os.path.exists(filename):
raise ValueError, "no such logfile exists"
return LogReader(filename)
def write(self, data):
"""Write some data to the log file"""
BaseLogFile.write(self, data)
# Guard against a corner case where time.time()
# could potentially run backwards to yesterday.
# Primarily due to network time.
self.lastDate = max(self.lastDate, self.toDate())
def rotate(self):
"""Rotate the file and create a new one.
If it's not possible to open new logfile, this will fail silently,
and continue logging to old logfile.
"""
if not (os.access(self.directory, os.W_OK) and os.access(self.path, os.W_OK)):
return
newpath = "%s.%s" % (self.path, self.suffix(self.lastDate))
if os.path.exists(newpath):
return
self._file.close()
os.rename(self.path, newpath)
self._openFile()
def __getstate__(self):
state = BaseLogFile.__getstate__(self)
del state["lastDate"]
return state
threadable.synchronize(DailyLogFile)
class LogReader:
"""Read from a log file."""
def __init__(self, name):
self._file = file(name, "r")
def readLines(self, lines=10):
"""Read a list of lines from the log file.
This doesn't returns all of the files lines - call it multiple times.
"""
result = []
for i in range(lines):
line = self._file.readline()
if not line:
break
result.append(line)
return result
def close(self):
self._file.close()
| gpl-3.0 |
lz1988/company-site | django/utils/http.py | 27 | 8316 | from __future__ import unicode_literals
import calendar
import datetime
import re
import sys
try:
from urllib import parse as urllib_parse
except ImportError: # Python 2
import urllib as urllib_parse
import urlparse
urllib_parse.urlparse = urlparse.urlparse
from email.utils import formatdate
from django.utils.datastructures import MultiValueDict
from django.utils.encoding import force_str, force_text
from django.utils.functional import allow_lazy
from django.utils import six
ETAG_MATCH = re.compile(r'(?:W/)?"((?:\\.|[^"])*)"')
MONTHS = 'jan feb mar apr may jun jul aug sep oct nov dec'.split()
__D = r'(?P<day>\d{2})'
__D2 = r'(?P<day>[ \d]\d)'
__M = r'(?P<mon>\w{3})'
__Y = r'(?P<year>\d{4})'
__Y2 = r'(?P<year>\d{2})'
__T = r'(?P<hour>\d{2}):(?P<min>\d{2}):(?P<sec>\d{2})'
RFC1123_DATE = re.compile(r'^\w{3}, %s %s %s %s GMT$' % (__D, __M, __Y, __T))
RFC850_DATE = re.compile(r'^\w{6,9}, %s-%s-%s %s GMT$' % (__D, __M, __Y2, __T))
ASCTIME_DATE = re.compile(r'^\w{3} %s %s %s %s$' % (__M, __D2, __T, __Y))
def urlquote(url, safe='/'):
"""
A version of Python's urllib.quote() function that can operate on unicode
strings. The url is first UTF-8 encoded before quoting. The returned string
can safely be used as part of an argument to a subsequent iri_to_uri() call
without double-quoting occurring.
"""
return force_text(urllib_parse.quote(force_str(url), force_str(safe)))
urlquote = allow_lazy(urlquote, six.text_type)
def urlquote_plus(url, safe=''):
"""
A version of Python's urllib.quote_plus() function that can operate on
unicode strings. The url is first UTF-8 encoded before quoting. The
returned string can safely be used as part of an argument to a subsequent
iri_to_uri() call without double-quoting occurring.
"""
return force_text(urllib_parse.quote_plus(force_str(url), force_str(safe)))
urlquote_plus = allow_lazy(urlquote_plus, six.text_type)
def urlunquote(quoted_url):
"""
A wrapper for Python's urllib.unquote() function that can operate on
the result of django.utils.http.urlquote().
"""
return force_text(urllib_parse.unquote(force_str(quoted_url)))
urlunquote = allow_lazy(urlunquote, six.text_type)
def urlunquote_plus(quoted_url):
"""
A wrapper for Python's urllib.unquote_plus() function that can operate on
the result of django.utils.http.urlquote_plus().
"""
return force_text(urllib_parse.unquote_plus(force_str(quoted_url)))
urlunquote_plus = allow_lazy(urlunquote_plus, six.text_type)
def urlencode(query, doseq=0):
"""
A version of Python's urllib.urlencode() function that can operate on
unicode strings. The parameters are first case to UTF-8 encoded strings and
then encoded as per normal.
"""
if isinstance(query, MultiValueDict):
query = query.lists()
elif hasattr(query, 'items'):
query = query.items()
return urllib_parse.urlencode(
[(force_str(k),
[force_str(i) for i in v] if isinstance(v, (list,tuple)) else force_str(v))
for k, v in query],
doseq)
def cookie_date(epoch_seconds=None):
"""
Formats the time to ensure compatibility with Netscape's cookie standard.
Accepts a floating point number expressed in seconds since the epoch, in
UTC - such as that outputted by time.time(). If set to None, defaults to
the current time.
Outputs a string in the format 'Wdy, DD-Mon-YYYY HH:MM:SS GMT'.
"""
rfcdate = formatdate(epoch_seconds)
return '%s-%s-%s GMT' % (rfcdate[:7], rfcdate[8:11], rfcdate[12:25])
def http_date(epoch_seconds=None):
"""
Formats the time to match the RFC1123 date format as specified by HTTP
RFC2616 section 3.3.1.
Accepts a floating point number expressed in seconds since the epoch, in
UTC - such as that outputted by time.time(). If set to None, defaults to
the current time.
Outputs a string in the format 'Wdy, DD Mon YYYY HH:MM:SS GMT'.
"""
rfcdate = formatdate(epoch_seconds)
return '%s GMT' % rfcdate[:25]
def parse_http_date(date):
"""
Parses a date format as specified by HTTP RFC2616 section 3.3.1.
The three formats allowed by the RFC are accepted, even if only the first
one is still in widespread use.
Returns an integer expressed in seconds since the epoch, in UTC.
"""
# emails.Util.parsedate does the job for RFC1123 dates; unfortunately
# RFC2616 makes it mandatory to support RFC850 dates too. So we roll
# our own RFC-compliant parsing.
for regex in RFC1123_DATE, RFC850_DATE, ASCTIME_DATE:
m = regex.match(date)
if m is not None:
break
else:
raise ValueError("%r is not in a valid HTTP date format" % date)
try:
year = int(m.group('year'))
if year < 100:
if year < 70:
year += 2000
else:
year += 1900
month = MONTHS.index(m.group('mon').lower()) + 1
day = int(m.group('day'))
hour = int(m.group('hour'))
min = int(m.group('min'))
sec = int(m.group('sec'))
result = datetime.datetime(year, month, day, hour, min, sec)
return calendar.timegm(result.utctimetuple())
except Exception:
raise ValueError("%r is not a valid date" % date)
def parse_http_date_safe(date):
"""
Same as parse_http_date, but returns None if the input is invalid.
"""
try:
return parse_http_date(date)
except Exception:
pass
# Base 36 functions: useful for generating compact URLs
def base36_to_int(s):
"""
Converts a base 36 string to an ``int``. Raises ``ValueError` if the
input won't fit into an int.
"""
# To prevent overconsumption of server resources, reject any
# base36 string that is long than 13 base36 digits (13 digits
# is sufficient to base36-encode any 64-bit integer)
if len(s) > 13:
raise ValueError("Base36 input too large")
value = int(s, 36)
# ... then do a final check that the value will fit into an int to avoid
# returning a long (#15067). The long type was removed in Python 3.
if not six.PY3 and value > sys.maxint:
raise ValueError("Base36 input too large")
return value
def int_to_base36(i):
"""
Converts an integer to a base36 string
"""
digits = "0123456789abcdefghijklmnopqrstuvwxyz"
factor = 0
if i < 0:
raise ValueError("Negative base36 conversion input.")
if not six.PY3:
if not isinstance(i, six.integer_types):
raise TypeError("Non-integer base36 conversion input.")
if i > sys.maxint:
raise ValueError("Base36 conversion input too large.")
# Find starting factor
while True:
factor += 1
if i < 36 ** factor:
factor -= 1
break
base36 = []
# Construct base36 representation
while factor >= 0:
j = 36 ** factor
base36.append(digits[i // j])
i = i % j
factor -= 1
return ''.join(base36)
def parse_etags(etag_str):
"""
Parses a string with one or several etags passed in If-None-Match and
If-Match headers by the rules in RFC 2616. Returns a list of etags
without surrounding double quotes (") and unescaped from \<CHAR>.
"""
etags = ETAG_MATCH.findall(etag_str)
if not etags:
# etag_str has wrong format, treat it as an opaque string then
return [etag_str]
etags = [e.encode('ascii').decode('unicode_escape') for e in etags]
return etags
def quote_etag(etag):
"""
Wraps a string in double quotes escaping contents as necesary.
"""
return '"%s"' % etag.replace('\\', '\\\\').replace('"', '\\"')
def same_origin(url1, url2):
"""
Checks if two URLs are 'same-origin'
"""
p1, p2 = urllib_parse.urlparse(url1), urllib_parse.urlparse(url2)
return (p1.scheme, p1.hostname, p1.port) == (p2.scheme, p2.hostname, p2.port)
def is_safe_url(url, host=None):
"""
Return ``True`` if the url is a safe redirection (i.e. it doesn't point to
a different host).
Always returns ``False`` on an empty url.
"""
if not url:
return False
netloc = urllib_parse.urlparse(url)[1]
return not netloc or netloc == host
| bsd-3-clause |
ericholscher/django | tests/model_forms/models.py | 1 | 9652 | """
XX. Generating HTML forms from models
This is mostly just a reworking of the ``form_for_model``/``form_for_instance``
tests to use ``ModelForm``. As such, the text may not make sense in all cases,
and the examples are probably a poor fit for the ``ModelForm`` syntax. In other
words, most of these tests should be rewritten.
"""
from __future__ import unicode_literals
import os
import tempfile
from django.core import validators
from django.core.exceptions import ImproperlyConfigured, ValidationError
from django.core.files.storage import FileSystemStorage
from django.db import models
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
temp_storage_dir = tempfile.mkdtemp(dir=os.environ['DJANGO_TEST_TEMP_DIR'])
temp_storage = FileSystemStorage(temp_storage_dir)
ARTICLE_STATUS = (
(1, 'Draft'),
(2, 'Pending'),
(3, 'Live'),
)
ARTICLE_STATUS_CHAR = (
('d', 'Draft'),
('p', 'Pending'),
('l', 'Live'),
)
@python_2_unicode_compatible
class Category(models.Model):
name = models.CharField(max_length=20)
slug = models.SlugField(max_length=20)
url = models.CharField('The URL', max_length=40)
def __str__(self):
return self.name
def __repr__(self):
return self.__str__()
@python_2_unicode_compatible
class Writer(models.Model):
name = models.CharField(max_length=50, help_text='Use both first and last names.')
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
@python_2_unicode_compatible
class Article(models.Model):
headline = models.CharField(max_length=50)
slug = models.SlugField()
pub_date = models.DateField()
created = models.DateField(editable=False)
writer = models.ForeignKey(Writer)
article = models.TextField()
categories = models.ManyToManyField(Category, blank=True)
status = models.PositiveIntegerField(choices=ARTICLE_STATUS, blank=True, null=True)
def save(self):
import datetime
if not self.id:
self.created = datetime.date.today()
return super(Article, self).save()
def __str__(self):
return self.headline
class ImprovedArticle(models.Model):
article = models.OneToOneField(Article)
class ImprovedArticleWithParentLink(models.Model):
article = models.OneToOneField(Article, parent_link=True)
class BetterWriter(Writer):
score = models.IntegerField()
@python_2_unicode_compatible
class WriterProfile(models.Model):
writer = models.OneToOneField(Writer, primary_key=True)
age = models.PositiveIntegerField()
def __str__(self):
return "%s is %s" % (self.writer, self.age)
@python_2_unicode_compatible
class TextFile(models.Model):
description = models.CharField(max_length=20)
file = models.FileField(storage=temp_storage, upload_to='tests', max_length=15)
def __str__(self):
return self.description
try:
from django.utils.image import Image
test_images = True
@python_2_unicode_compatible
class ImageFile(models.Model):
def custom_upload_path(self, filename):
path = self.path or 'tests'
return '%s/%s' % (path, filename)
description = models.CharField(max_length=20)
# Deliberately put the image field *after* the width/height fields to
# trigger the bug in #10404 with width/height not getting assigned.
width = models.IntegerField(editable=False)
height = models.IntegerField(editable=False)
image = models.ImageField(storage=temp_storage, upload_to=custom_upload_path,
width_field='width', height_field='height')
path = models.CharField(max_length=16, blank=True, default='')
def __str__(self):
return self.description
@python_2_unicode_compatible
class OptionalImageFile(models.Model):
def custom_upload_path(self, filename):
path = self.path or 'tests'
return '%s/%s' % (path, filename)
description = models.CharField(max_length=20)
image = models.ImageField(storage=temp_storage, upload_to=custom_upload_path,
width_field='width', height_field='height',
blank=True, null=True)
width = models.IntegerField(editable=False, null=True)
height = models.IntegerField(editable=False, null=True)
path = models.CharField(max_length=16, blank=True, default='')
def __str__(self):
return self.description
except ImproperlyConfigured:
test_images = False
@python_2_unicode_compatible
class CommaSeparatedInteger(models.Model):
field = models.CommaSeparatedIntegerField(max_length=20)
def __str__(self):
return self.field
@python_2_unicode_compatible
class Product(models.Model):
slug = models.SlugField(unique=True)
def __str__(self):
return self.slug
@python_2_unicode_compatible
class Price(models.Model):
price = models.DecimalField(max_digits=10, decimal_places=2)
quantity = models.PositiveIntegerField()
def __str__(self):
return "%s for %s" % (self.quantity, self.price)
class Meta:
unique_together = (('price', 'quantity'),)
class ArticleStatus(models.Model):
status = models.CharField(max_length=2, choices=ARTICLE_STATUS_CHAR, blank=True, null=True)
@python_2_unicode_compatible
class Inventory(models.Model):
barcode = models.PositiveIntegerField(unique=True)
parent = models.ForeignKey('self', to_field='barcode', blank=True, null=True)
name = models.CharField(blank=False, max_length=20)
class Meta:
ordering = ('name',)
def __str__(self):
return self.name
def __repr__(self):
return self.__str__()
class Book(models.Model):
title = models.CharField(max_length=40)
author = models.ForeignKey(Writer, blank=True, null=True)
special_id = models.IntegerField(blank=True, null=True, unique=True)
class Meta:
unique_together = ('title', 'author')
class BookXtra(models.Model):
isbn = models.CharField(max_length=16, unique=True)
suffix1 = models.IntegerField(blank=True, default=0)
suffix2 = models.IntegerField(blank=True, default=0)
class Meta:
unique_together = (('suffix1', 'suffix2'))
abstract = True
class DerivedBook(Book, BookXtra):
pass
@python_2_unicode_compatible
class ExplicitPK(models.Model):
key = models.CharField(max_length=20, primary_key=True)
desc = models.CharField(max_length=20, blank=True, unique=True)
class Meta:
unique_together = ('key', 'desc')
def __str__(self):
return self.key
@python_2_unicode_compatible
class Post(models.Model):
title = models.CharField(max_length=50, unique_for_date='posted', blank=True)
slug = models.CharField(max_length=50, unique_for_year='posted', blank=True)
subtitle = models.CharField(max_length=50, unique_for_month='posted', blank=True)
posted = models.DateField()
def __str__(self):
return self.title
@python_2_unicode_compatible
class DateTimePost(models.Model):
title = models.CharField(max_length=50, unique_for_date='posted', blank=True)
slug = models.CharField(max_length=50, unique_for_year='posted', blank=True)
subtitle = models.CharField(max_length=50, unique_for_month='posted', blank=True)
posted = models.DateTimeField(editable=False)
def __str__(self):
return self.title
class DerivedPost(Post):
pass
@python_2_unicode_compatible
class BigInt(models.Model):
biggie = models.BigIntegerField()
def __str__(self):
return six.text_type(self.biggie)
class MarkupField(models.CharField):
def __init__(self, *args, **kwargs):
kwargs["max_length"] = 20
super(MarkupField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
# don't allow this field to be used in form (real use-case might be
# that you know the markup will always be X, but it is among an app
# that allows the user to say it could be something else)
# regressed at r10062
return None
class CustomFieldForExclusionModel(models.Model):
name = models.CharField(max_length=10)
markup = MarkupField()
class FlexibleDatePost(models.Model):
title = models.CharField(max_length=50, unique_for_date='posted', blank=True)
slug = models.CharField(max_length=50, unique_for_year='posted', blank=True)
subtitle = models.CharField(max_length=50, unique_for_month='posted', blank=True)
posted = models.DateField(blank=True, null=True)
@python_2_unicode_compatible
class Colour(models.Model):
name = models.CharField(max_length=50)
def __iter__(self):
for number in xrange(5):
yield number
def __str__(self):
return self.name
class ColourfulItem(models.Model):
name = models.CharField(max_length=50)
colours = models.ManyToManyField(Colour)
class ArticleStatusNote(models.Model):
name = models.CharField(max_length=20)
status = models.ManyToManyField(ArticleStatus)
class CustomErrorMessage(models.Model):
name1 = models.CharField(max_length=50,
validators=[validators.validate_slug],
error_messages={'invalid': 'Model custom error message.'})
name2 = models.CharField(max_length=50,
validators=[validators.validate_slug],
error_messages={'invalid': 'Model custom error message.'})
def clean(self):
if self.name1 == 'FORBIDDEN_VALUE':
raise ValidationError({'name1': [ValidationError('Model.clean() error messages.')]})
| bsd-3-clause |
mrahnis/orangery | orangery/cli/segment.py | 1 | 2695 | import sys
import logging
import time
import json
import click
import pandas as pnd
import matplotlib.pyplot as plt
import orangery as o
@click.command(options_metavar='<options>')
@click.argument('areas_f', nargs=1, type=click.Path(exists=True), metavar='<areas_file>')
@click.argument('materials_f', nargs=1, type=click.Path(exists=True), metavar='<materials_file>')
def segment(areas_f, materials_f):
"""Prompt the user to assign materials to polygon areas listed in a csv file.
The segment subcommand takes two arguments: A path to a csv file listing cut-and-fill polygon areas and a path to a JSON file listing possible materials.
The csv file listing the cut-and-fill polygon areas is created with the --save option of the cutfill subcommand.
\b
Example:
orangery segment XS-3-20130514-20170609.csv materials.json
"""
def __assign_material(p, low, high):
prompt = 'Enter a material no. for area {0}: '.format(p)
err = 'Input must be an integer number between {0} and {1}.'.format(low, high)
while True:
try:
m = int(input(prompt))
if low <= m <= high:
return m
else:
print(err)
except ValueError:
print(err)
areas = pnd.read_csv(areas_f, index_col=0)
# materials list and array to track assignment of material to polygon
materials = json.load(open(materials_f, 'r'))
materials = materials['materials']
assignments = []
print('\n')
print('Areas')
print('--------------------')
print(areas)
print('-------------------')
print('\n')
print("No. Material")
print('-------------------')
for i, material in enumerate(materials):
print(i, " ", material['name'])
print('\n')
print("Assign a material, by number, to each area")
print('-------------------')
for i, area in areas.iterrows():
m = __assign_material(i, 0, len(materials)-1)
assignments.append([i, m, materials[m]['name'], materials[m]['density'], materials[m]['fines']])
assignments_df = pnd.DataFrame(assignments, columns=['polygon', 'material', 'name', 'density', 'fines'])
result = assignments_df.join(areas)
result['mass_fines'] = result['density']*result['fines']/100*result['area']
print('\n')
print('Results ')
print('-------------------')
print(result)
print('-------------------')
print('Net change in mass of fines: ', result['mass_fines'].sum())
print('\n')
input("Press Enter to exit")
outfile = areas_f.split('.')[0] + '-sgmt.' + areas_f.split('.')[1]
result.to_csv(outfile) | bsd-3-clause |
morissette/devopsdays-hackathon-2016 | venv/lib/python2.7/site-packages/werkzeug/useragents.py | 257 | 5418 | # -*- coding: utf-8 -*-
"""
werkzeug.useragents
~~~~~~~~~~~~~~~~~~~
This module provides a helper to inspect user agent strings. This module
is far from complete but should work for most of the currently available
browsers.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import re
class UserAgentParser(object):
"""A simple user agent parser. Used by the `UserAgent`."""
platforms = (
('cros', 'chromeos'),
('iphone|ios', 'iphone'),
('ipad', 'ipad'),
(r'darwin|mac|os\s*x', 'macos'),
('win', 'windows'),
(r'android', 'android'),
(r'x11|lin(\b|ux)?', 'linux'),
('(sun|i86)os', 'solaris'),
(r'nintendo\s+wii', 'wii'),
('irix', 'irix'),
('hp-?ux', 'hpux'),
('aix', 'aix'),
('sco|unix_sv', 'sco'),
('bsd', 'bsd'),
('amiga', 'amiga'),
('blackberry|playbook', 'blackberry'),
('symbian', 'symbian')
)
browsers = (
('googlebot', 'google'),
('msnbot', 'msn'),
('yahoo', 'yahoo'),
('ask jeeves', 'ask'),
(r'aol|america\s+online\s+browser', 'aol'),
('opera', 'opera'),
('chrome', 'chrome'),
('firefox|firebird|phoenix|iceweasel', 'firefox'),
('galeon', 'galeon'),
('safari|version', 'safari'),
('webkit', 'webkit'),
('camino', 'camino'),
('konqueror', 'konqueror'),
('k-meleon', 'kmeleon'),
('netscape', 'netscape'),
(r'msie|microsoft\s+internet\s+explorer|trident/.+? rv:', 'msie'),
('lynx', 'lynx'),
('links', 'links'),
('seamonkey|mozilla', 'seamonkey')
)
_browser_version_re = r'(?:%s)[/\sa-z(]*(\d+[.\da-z]+)?(?i)'
_language_re = re.compile(
r'(?:;\s*|\s+)(\b\w{2}\b(?:-\b\w{2}\b)?)\s*;|'
r'(?:\(|\[|;)\s*(\b\w{2}\b(?:-\b\w{2}\b)?)\s*(?:\]|\)|;)'
)
def __init__(self):
self.platforms = [(b, re.compile(a, re.I)) for a, b in self.platforms]
self.browsers = [(b, re.compile(self._browser_version_re % a))
for a, b in self.browsers]
def __call__(self, user_agent):
for platform, regex in self.platforms:
match = regex.search(user_agent)
if match is not None:
break
else:
platform = None
for browser, regex in self.browsers:
match = regex.search(user_agent)
if match is not None:
version = match.group(1)
break
else:
browser = version = None
match = self._language_re.search(user_agent)
if match is not None:
language = match.group(1) or match.group(2)
else:
language = None
return platform, browser, version, language
class UserAgent(object):
"""Represents a user agent. Pass it a WSGI environment or a user agent
string and you can inspect some of the details from the user agent
string via the attributes. The following attributes exist:
.. attribute:: string
the raw user agent string
.. attribute:: platform
the browser platform. The following platforms are currently
recognized:
- `aix`
- `amiga`
- `android`
- `bsd`
- `chromeos`
- `hpux`
- `iphone`
- `ipad`
- `irix`
- `linux`
- `macos`
- `sco`
- `solaris`
- `wii`
- `windows`
.. attribute:: browser
the name of the browser. The following browsers are currently
recognized:
- `aol` *
- `ask` *
- `camino`
- `chrome`
- `firefox`
- `galeon`
- `google` *
- `kmeleon`
- `konqueror`
- `links`
- `lynx`
- `msie`
- `msn`
- `netscape`
- `opera`
- `safari`
- `seamonkey`
- `webkit`
- `yahoo` *
(Browsers maked with a star (``*``) are crawlers.)
.. attribute:: version
the version of the browser
.. attribute:: language
the language of the browser
"""
_parser = UserAgentParser()
def __init__(self, environ_or_string):
if isinstance(environ_or_string, dict):
environ_or_string = environ_or_string.get('HTTP_USER_AGENT', '')
self.string = environ_or_string
self.platform, self.browser, self.version, self.language = \
self._parser(environ_or_string)
def to_header(self):
return self.string
def __str__(self):
return self.string
def __nonzero__(self):
return bool(self.browser)
__bool__ = __nonzero__
def __repr__(self):
return '<%s %r/%s>' % (
self.__class__.__name__,
self.browser,
self.version
)
# conceptionally this belongs in this module but because we want to lazily
# load the user agent module (which happens in wrappers.py) we have to import
# it afterwards. The class itself has the module set to this module so
# pickle, inspect and similar modules treat the object as if it was really
# implemented here.
from werkzeug.wrappers import UserAgentMixin # noqa
| gpl-3.0 |
golismero/golismero | golismero/api/data/vulnerability/ssl/obsolete_protocol.py | 8 | 2487 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
__license__= """
GoLismero 2.0 - The web knife - Copyright (C) 2011-2014
Golismero project site: https://github.com/golismero
Golismero project mail: contact@golismero-project.com
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
__all__ = ["ObsoleteProtocol"]
from . import SSLVulnerability
from .. import Vulnerability
from ... import identity
#------------------------------------------------------------------------------
class ObsoleteProtocol(SSLVulnerability):
"""
Obsolete SSL/TLS Protocol.
A server has been found that uses an obsolete version of the SSL/TLS
protocol. This may allow a strategically located attacker to snoop on
network traffic, or perform a Man-In-The-Middle attack against
unsuspecting users connecting to this host.
Upgrade your server software to a current version of SSL/TLS.
"""
DEFAULTS = SSLVulnerability.DEFAULTS.copy()
DEFAULTS["cwe"] = "CWE-327"
DEFAULTS["cvss_base"] = "4"
#--------------------------------------------------------------------------
def __init__(self, target, protocol, **kwargs):
"""
:param target: Domain where the vulnerability was found.
:type target: Domain
:param protocol: Protocol name.
:type protocol: str
"""
# Protocol name.
self.__protocol = protocol
# Parent constructor.
super(ObsoleteProtocol, self).__init__(target, **kwargs)
__init__.__doc__ += Vulnerability.__init__.__doc__[
Vulnerability.__init__.__doc__.find("\n :keyword"):]
#--------------------------------------------------------------------------
@identity
def protocol(self):
"""
:returns: Protocol name.
:rtype: str
"""
return self.__protocol
| gpl-2.0 |
sdague/home-assistant | tests/components/hvv_departures/test_config_flow.py | 6 | 12742 | """Test the HVV Departures config flow."""
import json
from pygti.exceptions import CannotConnect, InvalidAuth
from homeassistant import data_entry_flow
from homeassistant.components.hvv_departures.const import (
CONF_FILTER,
CONF_REAL_TIME,
CONF_STATION,
DOMAIN,
)
from homeassistant.config_entries import CONN_CLASS_CLOUD_POLL, SOURCE_USER
from homeassistant.const import CONF_HOST, CONF_OFFSET, CONF_PASSWORD, CONF_USERNAME
from tests.async_mock import patch
from tests.common import MockConfigEntry, load_fixture
FIXTURE_INIT = json.loads(load_fixture("hvv_departures/init.json"))
FIXTURE_CHECK_NAME = json.loads(load_fixture("hvv_departures/check_name.json"))
FIXTURE_STATION_INFORMATION = json.loads(
load_fixture("hvv_departures/station_information.json")
)
FIXTURE_CONFIG_ENTRY = json.loads(load_fixture("hvv_departures/config_entry.json"))
FIXTURE_OPTIONS = json.loads(load_fixture("hvv_departures/options.json"))
FIXTURE_DEPARTURE_LIST = json.loads(load_fixture("hvv_departures/departure_list.json"))
async def test_user_flow(hass):
"""Test that config flow works."""
with patch(
"homeassistant.components.hvv_departures.hub.GTI.init",
return_value=FIXTURE_INIT,
), patch(
"homeassistant.components.hvv_departures.hub.GTI.checkName",
return_value=FIXTURE_CHECK_NAME,
), patch(
"homeassistant.components.hvv_departures.hub.GTI.stationInformation",
return_value=FIXTURE_STATION_INFORMATION,
), patch(
"homeassistant.components.hvv_departures.async_setup", return_value=True
), patch(
"homeassistant.components.hvv_departures.async_setup_entry",
return_value=True,
):
# step: user
result_user = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={
CONF_HOST: "api-test.geofox.de",
CONF_USERNAME: "test-username",
CONF_PASSWORD: "test-password",
},
)
assert result_user["step_id"] == "station"
# step: station
result_station = await hass.config_entries.flow.async_configure(
result_user["flow_id"],
{CONF_STATION: "Wartenau"},
)
assert result_station["step_id"] == "station_select"
# step: station_select
result_station_select = await hass.config_entries.flow.async_configure(
result_user["flow_id"],
{CONF_STATION: "Wartenau"},
)
assert result_station_select["type"] == "create_entry"
assert result_station_select["title"] == "Wartenau"
assert result_station_select["data"] == {
CONF_HOST: "api-test.geofox.de",
CONF_USERNAME: "test-username",
CONF_PASSWORD: "test-password",
CONF_STATION: {
"name": "Wartenau",
"city": "Hamburg",
"combinedName": "Wartenau",
"id": "Master:10901",
"type": "STATION",
"coordinate": {"x": 10.035515, "y": 53.56478},
"serviceTypes": ["bus", "u"],
"hasStationInformation": True,
},
}
async def test_user_flow_no_results(hass):
"""Test that config flow works when there are no results."""
with patch(
"homeassistant.components.hvv_departures.hub.GTI.init",
return_value=FIXTURE_INIT,
), patch(
"homeassistant.components.hvv_departures.hub.GTI.checkName",
return_value={"returnCode": "OK", "results": []},
), patch(
"homeassistant.components.hvv_departures.async_setup", return_value=True
), patch(
"homeassistant.components.hvv_departures.async_setup_entry",
return_value=True,
):
# step: user
result_user = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={
CONF_HOST: "api-test.geofox.de",
CONF_USERNAME: "test-username",
CONF_PASSWORD: "test-password",
},
)
assert result_user["step_id"] == "station"
# step: station
result_station = await hass.config_entries.flow.async_configure(
result_user["flow_id"],
{CONF_STATION: "non_existing_station"},
)
assert result_station["step_id"] == "station"
assert result_station["errors"]["base"] == "no_results"
async def test_user_flow_invalid_auth(hass):
"""Test that config flow handles invalid auth."""
with patch(
"homeassistant.components.hvv_departures.hub.GTI.init",
side_effect=InvalidAuth(
"ERROR_TEXT",
"Bei der Verarbeitung der Anfrage ist ein technisches Problem aufgetreten.",
"Authentication failed!",
),
):
# step: user
result_user = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={
CONF_HOST: "api-test.geofox.de",
CONF_USERNAME: "test-username",
CONF_PASSWORD: "test-password",
},
)
assert result_user["type"] == "form"
assert result_user["errors"] == {"base": "invalid_auth"}
async def test_user_flow_cannot_connect(hass):
"""Test that config flow handles connection errors."""
with patch(
"homeassistant.components.hvv_departures.hub.GTI.init",
side_effect=CannotConnect(),
):
# step: user
result_user = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={
CONF_HOST: "api-test.geofox.de",
CONF_USERNAME: "test-username",
CONF_PASSWORD: "test-password",
},
)
assert result_user["type"] == "form"
assert result_user["errors"] == {"base": "cannot_connect"}
async def test_user_flow_station(hass):
"""Test that config flow handles empty data on step station."""
with patch(
"homeassistant.components.hvv_departures.hub.GTI.init",
return_value=True,
), patch(
"homeassistant.components.hvv_departures.hub.GTI.checkName",
return_value={"returnCode": "OK", "results": []},
):
# step: user
result_user = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={
CONF_HOST: "api-test.geofox.de",
CONF_USERNAME: "test-username",
CONF_PASSWORD: "test-password",
},
)
assert result_user["step_id"] == "station"
# step: station
result_station = await hass.config_entries.flow.async_configure(
result_user["flow_id"],
None,
)
assert result_station["type"] == "form"
assert result_station["step_id"] == "station"
async def test_user_flow_station_select(hass):
"""Test that config flow handles empty data on step station_select."""
with patch(
"homeassistant.components.hvv_departures.hub.GTI.init",
return_value=True,
), patch(
"homeassistant.components.hvv_departures.hub.GTI.checkName",
return_value=FIXTURE_CHECK_NAME,
):
result_user = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_USER},
data={
CONF_HOST: "api-test.geofox.de",
CONF_USERNAME: "test-username",
CONF_PASSWORD: "test-password",
},
)
result_station = await hass.config_entries.flow.async_configure(
result_user["flow_id"],
{CONF_STATION: "Wartenau"},
)
# step: station_select
result_station_select = await hass.config_entries.flow.async_configure(
result_station["flow_id"],
None,
)
assert result_station_select["type"] == "form"
assert result_station_select["step_id"] == "station_select"
async def test_options_flow(hass):
"""Test that options flow works."""
config_entry = MockConfigEntry(
version=1,
domain=DOMAIN,
title="Wartenau",
data=FIXTURE_CONFIG_ENTRY,
source="user",
connection_class=CONN_CLASS_CLOUD_POLL,
system_options={"disable_new_entities": False},
options=FIXTURE_OPTIONS,
unique_id="1234",
)
config_entry.add_to_hass(hass)
with patch("homeassistant.components.hvv_departures.PLATFORMS", new=[]), patch(
"homeassistant.components.hvv_departures.hub.GTI.init",
return_value=True,
), patch(
"homeassistant.components.hvv_departures.hub.GTI.departureList",
return_value=FIXTURE_DEPARTURE_LIST,
):
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
result = await hass.config_entries.options.async_init(config_entry.entry_id)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "init"
result = await hass.config_entries.options.async_configure(
result["flow_id"],
user_input={CONF_FILTER: ["0"], CONF_OFFSET: 15, CONF_REAL_TIME: False},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert config_entry.options == {
CONF_FILTER: [
{
"serviceID": "HHA-U:U1_HHA-U",
"stationIDs": ["Master:10902"],
"label": "Fuhlsbüttel Nord / Ochsenzoll / Norderstedt Mitte / Kellinghusenstraße / Ohlsdorf / Garstedt",
"serviceName": "U1",
}
],
CONF_OFFSET: 15,
CONF_REAL_TIME: False,
}
async def test_options_flow_invalid_auth(hass):
"""Test that options flow works."""
config_entry = MockConfigEntry(
version=1,
domain=DOMAIN,
title="Wartenau",
data=FIXTURE_CONFIG_ENTRY,
source="user",
connection_class=CONN_CLASS_CLOUD_POLL,
system_options={"disable_new_entities": False},
options=FIXTURE_OPTIONS,
unique_id="1234",
)
config_entry.add_to_hass(hass)
with patch("homeassistant.components.hvv_departures.PLATFORMS", new=[]), patch(
"homeassistant.components.hvv_departures.hub.GTI.init", return_value=True
), patch(
"homeassistant.components.hvv_departures.hub.GTI.departureList",
return_value=FIXTURE_DEPARTURE_LIST,
):
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
with patch(
"homeassistant.components.hvv_departures.hub.GTI.departureList",
side_effect=InvalidAuth(
"ERROR_TEXT",
"Bei der Verarbeitung der Anfrage ist ein technisches Problem aufgetreten.",
"Authentication failed!",
),
):
result = await hass.config_entries.options.async_init(config_entry.entry_id)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "init"
assert result["errors"] == {"base": "invalid_auth"}
async def test_options_flow_cannot_connect(hass):
"""Test that options flow works."""
config_entry = MockConfigEntry(
version=1,
domain=DOMAIN,
title="Wartenau",
data=FIXTURE_CONFIG_ENTRY,
source="user",
connection_class=CONN_CLASS_CLOUD_POLL,
system_options={"disable_new_entities": False},
options=FIXTURE_OPTIONS,
unique_id="1234",
)
config_entry.add_to_hass(hass)
with patch("homeassistant.components.hvv_departures.PLATFORMS", new=[]), patch(
"homeassistant.components.hvv_departures.hub.GTI.init", return_value=True
), patch(
"homeassistant.components.hvv_departures.hub.GTI.departureList",
return_value=FIXTURE_DEPARTURE_LIST,
):
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
with patch(
"homeassistant.components.hvv_departures.hub.GTI.departureList",
side_effect=CannotConnect(),
):
result = await hass.config_entries.options.async_init(config_entry.entry_id)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "init"
assert result["errors"] == {"base": "cannot_connect"}
| apache-2.0 |
agnisparsha/aws-addsg-ec2 | add_sg.py | 1 | 1656 | #!/usr/bin/python
__author__ = 'agnisparsha'
import requests
from boto.ec2.connection import EC2Connection
AWS_ACCESS_KEY_ID = '<access_key_id>'
AWS_SECRET_ACCESS_KEY = '<access_secret_key>'
AWS_REGION = '<eg,. us-east-1c>'
def get_my_ip():
request = requests.get(r'http://jsonip.com')
ip = request.json()['ip']
return ip
def create_connection():
conn = EC2Connection (AWS_ACCESS_KEY_ID, AWS_SECRET_ACCESS_KEY)
return conn
def get_instances(conn):
instances = conn.get_all_instances()
return instances
def print_instances(inst_array):
index = 0
for isntance in inst_array:
print '%d. %s' % (index, isntance)
index += 1
def get_sg_print(instance):
sg_array = instance.groups
index = 0
for sg in sg_array:
print '%d. Name: %s' % (index, sg.name)
return sg_array
if __name__ == '__main__':
# Create Connection
connection = create_connection()
instances = get_instances(connection)
print_instances(instances)
chosen_instance = int(raw_input('Choose Instance: '))
instance_obj = instances[chosen_instance]
# Get the instance group
sgs = get_sg_print(instance_obj)
sg_chosen_num = int(raw_input('Choose SG: '))
sg_chosen = sgs[sg_chosen_num]
# Choose the security group from the isntance group name
groups = [sg for sg in connection.get_all_security_groups() if sg.name == sg_chosen.name]
group = groups[0] if groups else None
from_port = int(raw_input('From Port: '))
to_port = int(raw_input('To Port: '))
my_ip = get_my_ip()
cidr = '%s0/24' % my_ip[:-len(my_ip.split('.')[-1])]
# Add the rule by authorizing it
group.authorize(ip_protocol='tcp', from_port=from_port,
to_port=to_port, cidr_ip=cidr)
| mit |
blueboxgroup/neutron | neutron/tests/unit/ibm/test_sdnve_api.py | 33 | 5707 | # Copyright 2014 IBM Corp.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from neutron.openstack.common import uuidutils
from neutron.plugins.ibm.common import constants
from neutron.plugins.ibm import sdnve_api
from neutron.tests import base
RESOURCE_PATH = {
'network': "ln/networks/",
}
RESOURCE = 'network'
HTTP_OK = 200
TENANT_ID = uuidutils.generate_uuid()
class TestSdnveApi(base.BaseTestCase):
def setUp(self):
super(TestSdnveApi, self).setUp()
class MockKeystoneClient(object):
def __init__(self, **kwargs):
pass
def get_tenant_name(self, id):
return 'test tenant name'
with mock.patch('neutron.plugins.ibm.sdnve_api.'
'KeystoneClient',
new=MockKeystoneClient):
self.api = sdnve_api.Client()
def mock_do_request(self, method, url, body=None, headers=None,
params=None, connection_type=None):
return (HTTP_OK, url)
def mock_do_request_tenant(self, method, url, body=None, headers=None,
params=None, connection_type=None):
return (HTTP_OK, {'id': TENANT_ID,
'network_type': constants.TENANT_TYPE_OF})
def mock_do_request_no_tenant(self, method, url, body=None, headers=None,
params=None, connection_type=None):
return (None, None)
def mock_process_request(self, body):
return body
def test_sdnve_api_list(self):
with mock.patch('neutron.plugins.ibm.sdnve_api.'
'Client.do_request',
new=self.mock_do_request):
result = self.api.sdnve_list(RESOURCE)
self.assertEqual(result, (HTTP_OK, RESOURCE_PATH[RESOURCE]))
def test_sdnve_api_show(self):
with mock.patch('neutron.plugins.ibm.sdnve_api.'
'Client.do_request',
new=self.mock_do_request):
result = self.api.sdnve_show(RESOURCE, TENANT_ID)
self.assertEqual(result,
(HTTP_OK, RESOURCE_PATH[RESOURCE] + TENANT_ID))
def test_sdnve_api_create(self):
with mock.patch('neutron.plugins.ibm.sdnve_api.'
'Client.do_request',
new=self.mock_do_request):
with mock.patch('neutron.plugins.ibm.sdnve_api.'
'Client.process_request',
new=self.mock_process_request):
result = self.api.sdnve_create(RESOURCE, '')
self.assertEqual(result, (HTTP_OK, RESOURCE_PATH[RESOURCE]))
def test_sdnve_api_update(self):
with mock.patch('neutron.plugins.ibm.sdnve_api.'
'Client.do_request',
new=self.mock_do_request):
with mock.patch('neutron.plugins.ibm.sdnve_api.'
'Client.process_request',
new=self.mock_process_request):
result = self.api.sdnve_update(RESOURCE, TENANT_ID, '')
self.assertEqual(result,
(HTTP_OK,
RESOURCE_PATH[RESOURCE] + TENANT_ID))
def test_sdnve_api_delete(self):
with mock.patch('neutron.plugins.ibm.sdnve_api.'
'Client.do_request',
new=self.mock_do_request):
result = self.api.sdnve_delete(RESOURCE, TENANT_ID)
self.assertEqual(result,
(HTTP_OK, RESOURCE_PATH[RESOURCE] + TENANT_ID))
def test_sdnve_get_tenant_by_id(self):
with mock.patch('neutron.plugins.ibm.sdnve_api.'
'Client.do_request',
new=self.mock_do_request_tenant):
id = TENANT_ID
result = self.api.sdnve_get_tenant_byid(id)
self.assertEqual(result,
(TENANT_ID, constants.TENANT_TYPE_OF))
def test_sdnve_check_and_create_tenant(self):
with mock.patch('neutron.plugins.ibm.sdnve_api.'
'Client.do_request',
new=self.mock_do_request_tenant):
id = TENANT_ID
result = self.api.sdnve_check_and_create_tenant(id)
self.assertEqual(result, TENANT_ID)
def test_sdnve_check_and_create_tenant_fail(self):
with mock.patch('neutron.plugins.ibm.sdnve_api.'
'Client.do_request',
new=self.mock_do_request_no_tenant):
id = TENANT_ID
result = self.api.sdnve_check_and_create_tenant(
id, constants.TENANT_TYPE_OF)
self.assertIsNone(result)
def test_process_request(self):
my_request = {'key_1': 'value_1', 'router:external': 'True',
'key_2': 'value_2'}
expected = {'key_1': 'value_1', 'router_external': 'True',
'key_2': 'value_2'}
result = self.api.process_request(my_request)
self.assertEqual(expected, result)
| apache-2.0 |
Evervolv/android_external_chromium_org | tools/android/remove_strings.py | 183 | 1426 | #!/usr/bin/python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Remove strings by name from a GRD file."""
import optparse
import re
import sys
def RemoveStrings(grd_path, string_names):
"""Removes strings with the given names from a GRD file. Overwrites the file.
Args:
grd_path: path to the GRD file.
string_names: a list of string names to be removed.
"""
with open(grd_path, 'r') as f:
grd = f.read()
names_pattern = '|'.join(map(re.escape, string_names))
pattern = r'<message [^>]*name="(%s)".*?</message>\s*' % names_pattern
grd = re.sub(pattern, '', grd, flags=re.DOTALL)
with open(grd_path, 'w') as f:
f.write(grd)
def ParseArgs(args):
usage = 'usage: %prog GRD_PATH...'
parser = optparse.OptionParser(
usage=usage, description='Remove strings from GRD files. Reads string '
'names from stdin, and removes strings with those names from the listed '
'GRD files.')
options, args = parser.parse_args(args=args)
if not args:
parser.error('must provide GRD_PATH argument(s)')
return args
def main(args=None):
grd_paths = ParseArgs(args)
strings_to_remove = filter(None, map(str.strip, sys.stdin.readlines()))
for grd_path in grd_paths:
RemoveStrings(grd_path, strings_to_remove)
if __name__ == '__main__':
main()
| bsd-3-clause |
kemalakyol48/python-for-android | python-modules/twisted/twisted/conch/test/test_default.py | 60 | 6384 | # Copyright (c) 2009 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.conch.client.default}.
"""
try:
import Crypto.Cipher.DES3
import pyasn1
except ImportError:
skip = "PyCrypto and PyASN1 required for twisted.conch.client.default."
else:
from twisted.conch.client.agent import SSHAgentClient
from twisted.conch.client.default import SSHUserAuthClient
from twisted.conch.client.options import ConchOptions
from twisted.conch.ssh.keys import Key
from twisted.trial.unittest import TestCase
from twisted.python.filepath import FilePath
from twisted.conch.test import keydata
from twisted.test.proto_helpers import StringTransport
class SSHUserAuthClientTest(TestCase):
"""
Tests for L{SSHUserAuthClient}.
@type rsaPublic: L{Key}
@ivar rsaPublic: A public RSA key.
"""
def setUp(self):
self.rsaPublic = Key.fromString(keydata.publicRSA_openssh)
self.tmpdir = FilePath(self.mktemp())
self.tmpdir.makedirs()
self.rsaFile = self.tmpdir.child('id_rsa')
self.rsaFile.setContent(keydata.privateRSA_openssh)
self.tmpdir.child('id_rsa.pub').setContent(keydata.publicRSA_openssh)
def test_signDataWithAgent(self):
"""
When connected to an agent, L{SSHUserAuthClient} can use it to
request signatures of particular data with a particular L{Key}.
"""
client = SSHUserAuthClient("user", ConchOptions(), None)
agent = SSHAgentClient()
transport = StringTransport()
agent.makeConnection(transport)
client.keyAgent = agent
cleartext = "Sign here"
client.signData(self.rsaPublic, cleartext)
self.assertEquals(
transport.value(),
"\x00\x00\x00\x8b\r\x00\x00\x00u" + self.rsaPublic.blob() +
"\x00\x00\x00\t" + cleartext +
"\x00\x00\x00\x00")
def test_agentGetPublicKey(self):
"""
L{SSHUserAuthClient} looks up public keys from the agent using the
L{SSHAgentClient} class. That L{SSHAgentClient.getPublicKey} returns a
L{Key} object with one of the public keys in the agent. If no more
keys are present, it returns C{None}.
"""
agent = SSHAgentClient()
agent.blobs = [self.rsaPublic.blob()]
key = agent.getPublicKey()
self.assertEquals(key.isPublic(), True)
self.assertEquals(key, self.rsaPublic)
self.assertEquals(agent.getPublicKey(), None)
def test_getPublicKeyFromFile(self):
"""
L{SSHUserAuthClient.getPublicKey()} is able to get a public key from
the first file described by its options' C{identitys} list, and return
the corresponding public L{Key} object.
"""
options = ConchOptions()
options.identitys = [self.rsaFile.path]
client = SSHUserAuthClient("user", options, None)
key = client.getPublicKey()
self.assertEquals(key.isPublic(), True)
self.assertEquals(key, self.rsaPublic)
def test_getPublicKeyAgentFallback(self):
"""
If an agent is present, but doesn't return a key,
L{SSHUserAuthClient.getPublicKey} continue with the normal key lookup.
"""
options = ConchOptions()
options.identitys = [self.rsaFile.path]
agent = SSHAgentClient()
client = SSHUserAuthClient("user", options, None)
client.keyAgent = agent
key = client.getPublicKey()
self.assertEquals(key.isPublic(), True)
self.assertEquals(key, self.rsaPublic)
def test_getPublicKeyBadKeyError(self):
"""
If L{keys.Key.fromFile} raises a L{keys.BadKeyError}, the
L{SSHUserAuthClient.getPublicKey} tries again to get a public key by
calling itself recursively.
"""
options = ConchOptions()
self.tmpdir.child('id_dsa.pub').setContent(keydata.publicDSA_openssh)
dsaFile = self.tmpdir.child('id_dsa')
dsaFile.setContent(keydata.privateDSA_openssh)
options.identitys = [self.rsaFile.path, dsaFile.path]
self.tmpdir.child('id_rsa.pub').setContent('not a key!')
client = SSHUserAuthClient("user", options, None)
key = client.getPublicKey()
self.assertEquals(key.isPublic(), True)
self.assertEquals(key, Key.fromString(keydata.publicDSA_openssh))
self.assertEquals(client.usedFiles, [self.rsaFile.path, dsaFile.path])
def test_getPrivateKey(self):
"""
L{SSHUserAuthClient.getPrivateKey} will load a private key from the
last used file populated by L{SSHUserAuthClient.getPublicKey}, and
return a L{Deferred} which fires with the corresponding private L{Key}.
"""
rsaPrivate = Key.fromString(keydata.privateRSA_openssh)
options = ConchOptions()
options.identitys = [self.rsaFile.path]
client = SSHUserAuthClient("user", options, None)
# Populate the list of used files
client.getPublicKey()
def _cbGetPrivateKey(key):
self.assertEquals(key.isPublic(), False)
self.assertEquals(key, rsaPrivate)
return client.getPrivateKey().addCallback(_cbGetPrivateKey)
def test_getPrivateKeyPassphrase(self):
"""
L{SSHUserAuthClient} can get a private key from a file, and return a
Deferred called back with a private L{Key} object, even if the key is
encrypted.
"""
rsaPrivate = Key.fromString(keydata.privateRSA_openssh)
passphrase = 'this is the passphrase'
self.rsaFile.setContent(rsaPrivate.toString('openssh', passphrase))
options = ConchOptions()
options.identitys = [self.rsaFile.path]
client = SSHUserAuthClient("user", options, None)
# Populate the list of used files
client.getPublicKey()
def _getPassword(prompt):
self.assertEquals(prompt,
"Enter passphrase for key '%s': " % (
self.rsaFile.path,))
return passphrase
def _cbGetPrivateKey(key):
self.assertEquals(key.isPublic(), False)
self.assertEquals(key, rsaPrivate)
self.patch(client, '_getPassword', _getPassword)
return client.getPrivateKey().addCallback(_cbGetPrivateKey)
| apache-2.0 |
factorlibre/carrier-delivery | delivery_carrier_label_fedex/models/stock.py | 1 | 11695 | ##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2015 FactorLibre (http://www.factorlibre.com)
# Hugo Santos <hugo.santos@factorlibre.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from fedex.base_service import FedexError, SchemaValidationError
from fedex.config import FedexConfig
from fedex.services.ship_service import FedexProcessShipmentRequest
from openerp import models, fields, api, exceptions, _
from openerp.addons.delivery_carrier_label_fedex.models.fedex_config import \
FEDEX_SERVICE_TYPES
class ShippingLabel(models.Model):
_inherit = 'shipping.label'
@api.model
def _get_file_type_selection(self):
res = super(ShippingLabel, self)._get_file_type_selection()
fedex_file_types = [
('pdf', 'pdf'),
('zplii', 'zplii'),
('epl2', 'epl2'),
('dpl', 'dpl'),
('png', 'png')
]
res += fedex_file_types
res = list(set(res))
return res
class StockPicking(models.Model):
_inherit = 'stock.picking'
fedex_service_type = fields.Selection(
FEDEX_SERVICE_TYPES, string="Fedex Service")
fedex_commodity_description = fields.Char(
'Commodity Description',
help='Description of items for customs clearance',
size=20)
@api.onchange('carrier_id')
def carrier_id_change(self):
result = super(StockPicking, self).carrier_id_change()
if not self.carrier_id and not self.carrier_id.type != 'fedex' or (
not self.carrier_id.fedex_config_id):
return result
if self.carrier_id.fedex_config_id.default_service:
self.fedex_service_type =\
self.carrier_id.fedex_config_id.default_service
return result
@api.multi
def _generate_fedex_label(self, package_ids=None):
self.ensure_one()
if not self.carrier_id.fedex_config_id:
raise exceptions.Warning(_('No Fedex config defined in carrier'))
if not self.fedex_service_type:
raise exceptions.Warning(
_('A Fedex Service type is required to generate a shipment'
' request and label'))
if not self.picking_type_id.warehouse_id.partner_id:
raise exceptions.Warning(
_('Please define an address in the %s warehouse') % (
self.warehouse_id.name))
warehouse_partner = self.picking_type_id.warehouse_id.partner_id
odoo_fedex_config = self.carrier_id.fedex_config_id
fedex_config = FedexConfig(
key=odoo_fedex_config.key,
password=odoo_fedex_config.password,
account_number=odoo_fedex_config.account_number,
meter_number=odoo_fedex_config.meter_number,
freight_account_number=(
odoo_fedex_config.freight_account_number),
use_test_server=odoo_fedex_config.is_test)
shipment = FedexProcessShipmentRequest(fedex_config)
company_currency = self.company_id.currency_id
requested_shipment = shipment.RequestedShipment
shipment.RequestedShipment.DropoffType = 'REGULAR_PICKUP'
shipment.RequestedShipment.ServiceType = self.fedex_service_type
shipment.RequestedShipment.PackagingType = 'FEDEX_PAK'
requested_shipment.TotalInsuredValue.Currency = company_currency.name
requested_shipment.TotalInsuredValue.Amount = 0.0
shipment.RequestedShipment.Shipper.AccountNumber =\
odoo_fedex_config.account_number
# Shipper contact info.
shipper_contact = shipment.RequestedShipment.Shipper.Contact
shipper_contact.CompanyName = warehouse_partner.name
shipper_contact.PhoneNumber = warehouse_partner.phone
# Shipper address.
shipper_address = shipment.RequestedShipment.Shipper.Address
shipper_street_lines = [warehouse_partner.street]
if warehouse_partner.street2:
shipper_street_lines.append(warehouse_partner.street2)
shipper_address.StreetLines = shipper_street_lines
shipper_address.City = warehouse_partner.city
shipper_address.StateOrProvinceCode = warehouse_partner.country_id.code
shipper_address.PostalCode = warehouse_partner.zip
shipper_address.CountryCode = warehouse_partner.country_id.code
shipper_address.Residential = True
# Recipient contact info.
recipient_contact = shipment.RequestedShipment.Recipient.Contact
recipient_contact.PersonName = self.partner_id.name
recipient_contact.CompanyName = self.partner_id.parent_id.name or ''
recipient_contact.PhoneNumber = self.partner_id.phone
# Recipient address
recipient_address = shipment.RequestedShipment.Recipient.Address
recipient_street_lines = [self.partner_id.street]
if self.partner_id.street2:
recipient_street_lines.append(self.partner_id.street2)
recipient_address.StreetLines = recipient_street_lines
recipient_address.City = self.partner_id.city
recipient_address.StateOrProvinceCode = self.partner_id.country_id.code
recipient_address.PostalCode = self.partner_id.zip
recipient_address.CountryCode = self.partner_id.country_id.code
recipient_address.Residential = True
shipment.RequestedShipment.EdtRequestType = 'NONE'
shipping_charges_payment =\
shipment.RequestedShipment.ShippingChargesPayment
payor_responsible_party =\
shipping_charges_payment.Payor.ResponsibleParty
payor_responsible_party.AccountNumber = fedex_config.account_number
payor_responsible_party.Address.CountryCode =\
warehouse_partner.country_id.code
shipping_charges_payment.PaymentType = 'SENDER'
# Label Specification
requested_shipment.LabelSpecification.LabelFormatType = 'COMMON2D'
requested_shipment.LabelSpecification.ImageType =\
odoo_fedex_config.label_type
requested_shipment.LabelSpecification.LabelStockType =\
odoo_fedex_config.label_template
requested_shipment.LabelSpecification.LabelPrintingOrientation =\
'BOTTOM_EDGE_OF_TEXT_FIRST'
# Sale amount and currency
total_amount = 0.0
if self.sale_id:
total_amount = self.sale_id.amount_total
if self.sale_id.currency_id.id != company_currency.id:
total_amount = self.sale_id.currency_id.with_context(
date=self.sale_id.date_order).compute(
self.sale_id.amount_total, company_currency)
number_of_packages = self.number_of_packages or 1
picking_weight = self.weight or 1.0
add_commodities = False
commodities = []
if warehouse_partner.country_id.code != \
self.partner_id.country_id.code:
# Add customs Value
customs_detail = requested_shipment.CustomsClearanceDetail
duties_payment = customs_detail.DutiesPayment
duties_payment.PaymentType = 'RECIPIENT'
customs_detail.CustomsValue.Currency = company_currency.name
customs_detail.CustomsValue.Amount = total_amount
customs_detail.ClearanceBrokerage = None
customs_detail.DocumentContent = None
customs_detail.FreightOnValue = None
add_commodities = True
# Create a package
# requested_shipment.PackageCount = number_of_packages
pack_weight = picking_weight / float(number_of_packages)
package_weight = shipment.create_wsdl_object_of_type('Weight')
package_weight.Value = pack_weight
package_weight.Units = odoo_fedex_config.weight_uom
package = shipment.create_wsdl_object_of_type(
'RequestedPackageLineItem')
package.PhysicalPackaging = 'BOX'
package.Weight = package_weight
package.SequenceNumber = 1
package.GroupPackageCount = 1
shipment.add_package(package)
# for package_seq in range(number_of_packages):
if add_commodities:
commodity = shipment.create_wsdl_object_of_type('Commodity')
commodity.NumberOfPieces = 1
commodity.CountryOfManufacture =\
warehouse_partner.country_id.code
commodity.Description = self.fedex_commodity_description
commodity.Weight.Units = odoo_fedex_config.weight_uom
commodity.Weight.Value = pack_weight
commodity.Quantity = 1
commodity.QuantityUnits = 'PCE'
commodity.UnitPrice.Currency = company_currency.name
commodity.UnitPrice.Amount = total_amount
commodities.append(commodity)
if commodities:
requested_shipment.CustomsClearanceDetail.Commodities = commodities
requested_shipment.TotalWeight.Units = odoo_fedex_config.weight_uom
requested_shipment.TotalWeight.Value = picking_weight
labels = []
label_extension = odoo_fedex_config.label_type.lower()
try:
shipment.send_request()
if shipment.response.HighestSeverity == 'ERROR':
error_response = shipment.response.Notifications[0]
raise exceptions.Warning(_('Fedex Error: {} {}').format(
error_response.Code, error_response.Message))
completed_shipment = shipment.response.CompletedShipmentDetail
tracking_number = completed_shipment.CompletedPackageDetails[0].\
TrackingIds[0].TrackingNumber
for package_response in completed_shipment.CompletedPackageDetails:
label = {
'file': str(package_response.Label.Parts[0].Image).decode(
'base64'),
'file_type': label_extension,
'name': "{}.{}".format(
package_response.TrackingIds[0].TrackingNumber,
label_extension)
}
labels.append(label)
self.write({'carrier_tracking_ref': tracking_number})
except FedexError, e:
raise exceptions.Warning(_('Fedex Error {}: {}').format(
e.error_code, e.value))
except SchemaValidationError, e:
raise exceptions.Warning(_('Fedex Schema Error {}: {}').format(
e.error_code, e.value))
return labels
@api.multi
def generate_shipping_labels(self, package_ids=None):
""" Add label generation for Fedex """
self.ensure_one()
if self.carrier_id.type == 'fedex':
return self._generate_fedex_label(package_ids=package_ids)
return super(StockPicking, self).generate_shipping_labels(
package_ids=package_ids)
| agpl-3.0 |
AICP/external_chromium_org | build/android/gyp/jar_toc.py | 10 | 3101 | #!/usr/bin/env python
#
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Creates a TOC file from a Java jar.
The TOC file contains the non-package API of the jar. This includes all
public/protected/package classes/functions/members and the values of static
final variables (members with package access are kept because in some cases we
have multiple libraries with the same package, particularly test+non-test). Some
other information (major/minor javac version) is also included.
This TOC file then can be used to determine if a dependent library should be
rebuilt when this jar changes. I.e. any change to the jar that would require a
rebuild, will have a corresponding change in the TOC file.
"""
import optparse
import re
import sys
import zipfile
from util import build_utils
from util import md5_check
def GetClassesInZipFile(zip_file):
classes = []
files = zip_file.namelist()
for f in files:
if f.endswith('.class'):
# f is of the form org/chromium/base/Class$Inner.class
classes.append(f.replace('/', '.')[:-6])
return classes
def CallJavap(classpath, classes):
javap_cmd = [
'javap',
'-package', # Show public/protected/package.
# -verbose is required to get constant values (which can be inlined in
# dependents).
'-verbose',
'-classpath', classpath
] + classes
return build_utils.CheckOutput(javap_cmd)
def ExtractToc(disassembled_classes):
# javap output is structured by indent (2-space) levels.
good_patterns = [
'^[^ ]', # This includes all class/function/member signatures.
'^ SourceFile:',
'^ minor version:',
'^ major version:',
'^ Constant value:',
]
bad_patterns = [
'^const #', # Matches the constant pool (i.e. literals used in the class).
]
def JavapFilter(line):
return (re.match('|'.join(good_patterns), line) and
not re.match('|'.join(bad_patterns), line))
toc = filter(JavapFilter, disassembled_classes.split('\n'))
return '\n'.join(toc)
def UpdateToc(jar_path, toc_path):
classes = GetClassesInZipFile(zipfile.ZipFile(jar_path))
javap_output = CallJavap(classpath=jar_path, classes=classes)
toc = ExtractToc(javap_output)
with open(toc_path, 'w') as tocfile:
tocfile.write(toc)
def DoJarToc(options):
jar_path = options.jar_path
toc_path = options.toc_path
record_path = '%s.md5.stamp' % toc_path
md5_check.CallAndRecordIfStale(
lambda: UpdateToc(jar_path, toc_path),
record_path=record_path,
input_paths=[jar_path],
)
build_utils.Touch(toc_path)
def main():
parser = optparse.OptionParser()
parser.add_option('--jar-path', help='Input .jar path.')
parser.add_option('--toc-path', help='Output .jar.TOC path.')
parser.add_option('--stamp', help='Path to touch on success.')
options, _ = parser.parse_args()
DoJarToc(options)
if options.stamp:
build_utils.Touch(options.stamp)
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause |
gtko/CouchPotatoServer | libs/pytwitter/__init__.py | 104 | 175429 | #!/usr/bin/env python
#
# vim: sw=2 ts=2 sts=2
#
# Copyright 2007 The Python-Twitter Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''A library that provides a Python interface to the Twitter API'''
__author__ = 'python-twitter@googlegroups.com'
__version__ = '1.0.1'
import calendar
import datetime
import httplib
import os
import rfc822
import sys
import tempfile
import textwrap
import time
import urllib
import urllib2
import urlparse
import gzip
import StringIO
try:
# Python >= 2.6
import json as simplejson
except ImportError:
try:
# Python < 2.6
import simplejson
except ImportError:
try:
# Google App Engine
from django.utils import simplejson
except ImportError:
raise ImportError, "Unable to load a json library"
# parse_qsl moved to urlparse module in v2.6
try:
from urlparse import parse_qsl, parse_qs
except ImportError:
from cgi import parse_qsl, parse_qs
try:
from hashlib import md5
except ImportError:
from md5 import md5
import oauth2 as oauth
CHARACTER_LIMIT = 140
# A singleton representing a lazily instantiated FileCache.
DEFAULT_CACHE = object()
REQUEST_TOKEN_URL = 'https://api.twitter.com/oauth/request_token'
ACCESS_TOKEN_URL = 'https://api.twitter.com/oauth/access_token'
AUTHORIZATION_URL = 'https://api.twitter.com/oauth/authorize'
SIGNIN_URL = 'https://api.twitter.com/oauth/authenticate'
class TwitterError(Exception):
'''Base class for Twitter errors'''
@property
def message(self):
'''Returns the first argument used to construct this error.'''
return self.args[0]
class Status(object):
'''A class representing the Status structure used by the twitter API.
The Status structure exposes the following properties:
status.created_at
status.created_at_in_seconds # read only
status.favorited
status.favorite_count
status.in_reply_to_screen_name
status.in_reply_to_user_id
status.in_reply_to_status_id
status.truncated
status.source
status.id
status.text
status.location
status.relative_created_at # read only
status.user
status.urls
status.user_mentions
status.hashtags
status.geo
status.place
status.coordinates
status.contributors
'''
def __init__(self,
created_at = None,
favorited = None,
favorite_count = None,
id = None,
text = None,
location = None,
user = None,
in_reply_to_screen_name = None,
in_reply_to_user_id = None,
in_reply_to_status_id = None,
truncated = None,
source = None,
now = None,
urls = None,
user_mentions = None,
hashtags = None,
media = None,
geo = None,
place = None,
coordinates = None,
contributors = None,
retweeted = None,
retweeted_status = None,
current_user_retweet = None,
retweet_count = None,
possibly_sensitive = None,
scopes = None,
withheld_copyright = None,
withheld_in_countries = None,
withheld_scope = None):
'''An object to hold a Twitter status message.
This class is normally instantiated by the twitter.Api class and
returned in a sequence.
Note: Dates are posted in the form "Sat Jan 27 04:17:38 +0000 2007"
Args:
created_at:
The time this status message was posted. [Optional]
favorited:
Whether this is a favorite of the authenticated user. [Optional]
favorite_count:
Number of times this status message has been favorited. [Optional]
id:
The unique id of this status message. [Optional]
text:
The text of this status message. [Optional]
location:
the geolocation string associated with this message. [Optional]
relative_created_at:
A human readable string representing the posting time. [Optional]
user:
A twitter.User instance representing the person posting the
message. [Optional]
now:
The current time, if the client chooses to set it.
Defaults to the wall clock time. [Optional]
urls:
user_mentions:
hashtags:
geo:
place:
coordinates:
contributors:
retweeted:
retweeted_status:
current_user_retweet:
retweet_count:
possibly_sensitive:
scopes:
withheld_copyright:
withheld_in_countries:
withheld_scope:
'''
self.created_at = created_at
self.favorited = favorited
self.favorite_count = favorite_count
self.id = id
self.text = text
self.location = location
self.user = user
self.now = now
self.in_reply_to_screen_name = in_reply_to_screen_name
self.in_reply_to_user_id = in_reply_to_user_id
self.in_reply_to_status_id = in_reply_to_status_id
self.truncated = truncated
self.retweeted = retweeted
self.source = source
self.urls = urls
self.user_mentions = user_mentions
self.hashtags = hashtags
self.media = media
self.geo = geo
self.place = place
self.coordinates = coordinates
self.contributors = contributors
self.retweeted_status = retweeted_status
self.current_user_retweet = current_user_retweet
self.retweet_count = retweet_count
self.possibly_sensitive = possibly_sensitive
self.scopes = scopes
self.withheld_copyright = withheld_copyright
self.withheld_in_countries = withheld_in_countries
self.withheld_scope = withheld_scope
def GetCreatedAt(self):
'''Get the time this status message was posted.
Returns:
The time this status message was posted
'''
return self._created_at
def SetCreatedAt(self, created_at):
'''Set the time this status message was posted.
Args:
created_at:
The time this status message was created
'''
self._created_at = created_at
created_at = property(GetCreatedAt, SetCreatedAt,
doc = 'The time this status message was posted.')
def GetCreatedAtInSeconds(self):
'''Get the time this status message was posted, in seconds since the epoch.
Returns:
The time this status message was posted, in seconds since the epoch.
'''
return calendar.timegm(rfc822.parsedate(self.created_at))
created_at_in_seconds = property(GetCreatedAtInSeconds,
doc = "The time this status message was "
"posted, in seconds since the epoch")
def GetFavorited(self):
'''Get the favorited setting of this status message.
Returns:
True if this status message is favorited; False otherwise
'''
return self._favorited
def SetFavorited(self, favorited):
'''Set the favorited state of this status message.
Args:
favorited:
boolean True/False favorited state of this status message
'''
self._favorited = favorited
favorited = property(GetFavorited, SetFavorited,
doc = 'The favorited state of this status message.')
def GetFavoriteCount(self):
'''Get the favorite count of this status message.
Returns:
number of times this status message has been favorited
'''
return self._favorite_count
def SetFavoriteCount(self, favorite_count):
'''Set the favorited state of this status message.
Args:
favorite_count:
int number of favorites for this status message
'''
self._favorite_count = favorite_count
favorite_count = property(GetFavoriteCount, SetFavoriteCount,
doc = 'The number of favorites for this status message.')
def GetId(self):
'''Get the unique id of this status message.
Returns:
The unique id of this status message
'''
return self._id
def SetId(self, id):
'''Set the unique id of this status message.
Args:
id:
The unique id of this status message
'''
self._id = id
id = property(GetId, SetId,
doc = 'The unique id of this status message.')
def GetInReplyToScreenName(self):
return self._in_reply_to_screen_name
def SetInReplyToScreenName(self, in_reply_to_screen_name):
self._in_reply_to_screen_name = in_reply_to_screen_name
in_reply_to_screen_name = property(GetInReplyToScreenName, SetInReplyToScreenName,
doc = '')
def GetInReplyToUserId(self):
return self._in_reply_to_user_id
def SetInReplyToUserId(self, in_reply_to_user_id):
self._in_reply_to_user_id = in_reply_to_user_id
in_reply_to_user_id = property(GetInReplyToUserId, SetInReplyToUserId,
doc = '')
def GetInReplyToStatusId(self):
return self._in_reply_to_status_id
def SetInReplyToStatusId(self, in_reply_to_status_id):
self._in_reply_to_status_id = in_reply_to_status_id
in_reply_to_status_id = property(GetInReplyToStatusId, SetInReplyToStatusId,
doc = '')
def GetTruncated(self):
return self._truncated
def SetTruncated(self, truncated):
self._truncated = truncated
truncated = property(GetTruncated, SetTruncated,
doc = '')
def GetRetweeted(self):
return self._retweeted
def SetRetweeted(self, retweeted):
self._retweeted = retweeted
retweeted = property(GetRetweeted, SetRetweeted,
doc = '')
def GetSource(self):
return self._source
def SetSource(self, source):
self._source = source
source = property(GetSource, SetSource,
doc = '')
def GetText(self):
'''Get the text of this status message.
Returns:
The text of this status message.
'''
return self._text
def SetText(self, text):
'''Set the text of this status message.
Args:
text:
The text of this status message
'''
self._text = text
text = property(GetText, SetText,
doc = 'The text of this status message')
def GetLocation(self):
'''Get the geolocation associated with this status message
Returns:
The geolocation string of this status message.
'''
return self._location
def SetLocation(self, location):
'''Set the geolocation associated with this status message
Args:
location:
The geolocation string of this status message
'''
self._location = location
location = property(GetLocation, SetLocation,
doc = 'The geolocation string of this status message')
def GetRelativeCreatedAt(self):
'''Get a human readable string representing the posting time
Returns:
A human readable string representing the posting time
'''
fudge = 1.25
delta = long(self.now) - long(self.created_at_in_seconds)
if delta < (1 * fudge):
return 'about a second ago'
elif delta < (60 * (1 / fudge)):
return 'about %d seconds ago' % (delta)
elif delta < (60 * fudge):
return 'about a minute ago'
elif delta < (60 * 60 * (1 / fudge)):
return 'about %d minutes ago' % (delta / 60)
elif delta < (60 * 60 * fudge) or delta / (60 * 60) == 1:
return 'about an hour ago'
elif delta < (60 * 60 * 24 * (1 / fudge)):
return 'about %d hours ago' % (delta / (60 * 60))
elif delta < (60 * 60 * 24 * fudge) or delta / (60 * 60 * 24) == 1:
return 'about a day ago'
else:
return 'about %d days ago' % (delta / (60 * 60 * 24))
relative_created_at = property(GetRelativeCreatedAt,
doc = 'Get a human readable string representing '
'the posting time')
def GetUser(self):
'''Get a twitter.User representing the entity posting this status message.
Returns:
A twitter.User representing the entity posting this status message
'''
return self._user
def SetUser(self, user):
'''Set a twitter.User representing the entity posting this status message.
Args:
user:
A twitter.User representing the entity posting this status message
'''
self._user = user
user = property(GetUser, SetUser,
doc = 'A twitter.User representing the entity posting this '
'status message')
def GetNow(self):
'''Get the wallclock time for this status message.
Used to calculate relative_created_at. Defaults to the time
the object was instantiated.
Returns:
Whatever the status instance believes the current time to be,
in seconds since the epoch.
'''
if self._now is None:
self._now = time.time()
return self._now
def SetNow(self, now):
'''Set the wallclock time for this status message.
Used to calculate relative_created_at. Defaults to the time
the object was instantiated.
Args:
now:
The wallclock time for this instance.
'''
self._now = now
now = property(GetNow, SetNow,
doc = 'The wallclock time for this status instance.')
def GetGeo(self):
return self._geo
def SetGeo(self, geo):
self._geo = geo
geo = property(GetGeo, SetGeo,
doc = '')
def GetPlace(self):
return self._place
def SetPlace(self, place):
self._place = place
place = property(GetPlace, SetPlace,
doc = '')
def GetCoordinates(self):
return self._coordinates
def SetCoordinates(self, coordinates):
self._coordinates = coordinates
coordinates = property(GetCoordinates, SetCoordinates,
doc = '')
def GetContributors(self):
return self._contributors
def SetContributors(self, contributors):
self._contributors = contributors
contributors = property(GetContributors, SetContributors,
doc = '')
def GetRetweeted_status(self):
return self._retweeted_status
def SetRetweeted_status(self, retweeted_status):
self._retweeted_status = retweeted_status
retweeted_status = property(GetRetweeted_status, SetRetweeted_status,
doc = '')
def GetRetweetCount(self):
return self._retweet_count
def SetRetweetCount(self, retweet_count):
self._retweet_count = retweet_count
retweet_count = property(GetRetweetCount, SetRetweetCount,
doc = '')
def GetCurrent_user_retweet(self):
return self._current_user_retweet
def SetCurrent_user_retweet(self, current_user_retweet):
self._current_user_retweet = current_user_retweet
current_user_retweet = property(GetCurrent_user_retweet, SetCurrent_user_retweet,
doc = '')
def GetPossibly_sensitive(self):
return self._possibly_sensitive
def SetPossibly_sensitive(self, possibly_sensitive):
self._possibly_sensitive = possibly_sensitive
possibly_sensitive = property(GetPossibly_sensitive, SetPossibly_sensitive,
doc = '')
def GetScopes(self):
return self._scopes
def SetScopes(self, scopes):
self._scopes = scopes
scopes = property(GetScopes, SetScopes, doc = '')
def GetWithheld_copyright(self):
return self._withheld_copyright
def SetWithheld_copyright(self, withheld_copyright):
self._withheld_copyright = withheld_copyright
withheld_copyright = property(GetWithheld_copyright, SetWithheld_copyright,
doc = '')
def GetWithheld_in_countries(self):
return self._withheld_in_countries
def SetWithheld_in_countries(self, withheld_in_countries):
self._withheld_in_countries = withheld_in_countries
withheld_in_countries = property(GetWithheld_in_countries, SetWithheld_in_countries,
doc = '')
def GetWithheld_scope(self):
return self._withheld_scope
def SetWithheld_scope(self, withheld_scope):
self._withheld_scope = withheld_scope
withheld_scope = property(GetWithheld_scope, SetWithheld_scope,
doc = '')
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
try:
return other and \
self.created_at == other.created_at and \
self.id == other.id and \
self.text == other.text and \
self.location == other.location and \
self.user == other.user and \
self.in_reply_to_screen_name == other.in_reply_to_screen_name and \
self.in_reply_to_user_id == other.in_reply_to_user_id and \
self.in_reply_to_status_id == other.in_reply_to_status_id and \
self.truncated == other.truncated and \
self.retweeted == other.retweeted and \
self.favorited == other.favorited and \
self.favorite_count == other.favorite_count and \
self.source == other.source and \
self.geo == other.geo and \
self.place == other.place and \
self.coordinates == other.coordinates and \
self.contributors == other.contributors and \
self.retweeted_status == other.retweeted_status and \
self.retweet_count == other.retweet_count and \
self.current_user_retweet == other.current_user_retweet and \
self.possibly_sensitive == other.possibly_sensitive and \
self.scopes == other.scopes and \
self.withheld_copyright == other.withheld_copyright and \
self.withheld_in_countries == other.withheld_in_countries and \
self.withheld_scope == other.withheld_scope
except AttributeError:
return False
def __str__(self):
'''A string representation of this twitter.Status instance.
The return value is the same as the JSON string representation.
Returns:
A string representation of this twitter.Status instance.
'''
return self.AsJsonString()
def AsJsonString(self):
'''A JSON string representation of this twitter.Status instance.
Returns:
A JSON string representation of this twitter.Status instance
'''
return simplejson.dumps(self.AsDict(), sort_keys = True)
def AsDict(self):
'''A dict representation of this twitter.Status instance.
The return value uses the same key names as the JSON representation.
Return:
A dict representing this twitter.Status instance
'''
data = {}
if self.created_at:
data['created_at'] = self.created_at
if self.favorited:
data['favorited'] = self.favorited
if self.favorite_count:
data['favorite_count'] = self.favorite_count
if self.id:
data['id'] = self.id
if self.text:
data['text'] = self.text
if self.location:
data['location'] = self.location
if self.user:
data['user'] = self.user.AsDict()
if self.in_reply_to_screen_name:
data['in_reply_to_screen_name'] = self.in_reply_to_screen_name
if self.in_reply_to_user_id:
data['in_reply_to_user_id'] = self.in_reply_to_user_id
if self.in_reply_to_status_id:
data['in_reply_to_status_id'] = self.in_reply_to_status_id
if self.truncated is not None:
data['truncated'] = self.truncated
if self.retweeted is not None:
data['retweeted'] = self.retweeted
if self.favorited is not None:
data['favorited'] = self.favorited
if self.source:
data['source'] = self.source
if self.geo:
data['geo'] = self.geo
if self.place:
data['place'] = self.place
if self.coordinates:
data['coordinates'] = self.coordinates
if self.contributors:
data['contributors'] = self.contributors
if self.hashtags:
data['hashtags'] = [h.text for h in self.hashtags]
if self.retweeted_status:
data['retweeted_status'] = self.retweeted_status.AsDict()
if self.retweet_count:
data['retweet_count'] = self.retweet_count
if self.urls:
data['urls'] = dict([(url.url, url.expanded_url) for url in self.urls])
if self.user_mentions:
data['user_mentions'] = [um.AsDict() for um in self.user_mentions]
if self.current_user_retweet:
data['current_user_retweet'] = self.current_user_retweet
if self.possibly_sensitive:
data['possibly_sensitive'] = self.possibly_sensitive
if self.scopes:
data['scopes'] = self.scopes
if self.withheld_copyright:
data['withheld_copyright'] = self.withheld_copyright
if self.withheld_in_countries:
data['withheld_in_countries'] = self.withheld_in_countries
if self.withheld_scope:
data['withheld_scope'] = self.withheld_scope
return data
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data: A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.Status instance
'''
if 'user' in data:
user = User.NewFromJsonDict(data['user'])
else:
user = None
if 'retweeted_status' in data:
retweeted_status = Status.NewFromJsonDict(data['retweeted_status'])
else:
retweeted_status = None
if 'current_user_retweet' in data:
current_user_retweet = data['current_user_retweet']['id']
else:
current_user_retweet = None
urls = None
user_mentions = None
hashtags = None
media = None
if 'entities' in data:
if 'urls' in data['entities']:
urls = [Url.NewFromJsonDict(u) for u in data['entities']['urls']]
if 'user_mentions' in data['entities']:
user_mentions = [User.NewFromJsonDict(u) for u in data['entities']['user_mentions']]
if 'hashtags' in data['entities']:
hashtags = [Hashtag.NewFromJsonDict(h) for h in data['entities']['hashtags']]
if 'media' in data['entities']:
media = data['entities']['media']
else:
media = []
return Status(created_at = data.get('created_at', None),
favorited = data.get('favorited', None),
favorite_count = data.get('favorite_count', None),
id = data.get('id', None),
text = data.get('text', None),
location = data.get('location', None),
in_reply_to_screen_name = data.get('in_reply_to_screen_name', None),
in_reply_to_user_id = data.get('in_reply_to_user_id', None),
in_reply_to_status_id = data.get('in_reply_to_status_id', None),
truncated = data.get('truncated', None),
retweeted = data.get('retweeted', None),
source = data.get('source', None),
user = user,
urls = urls,
user_mentions = user_mentions,
hashtags = hashtags,
media = media,
geo = data.get('geo', None),
place = data.get('place', None),
coordinates = data.get('coordinates', None),
contributors = data.get('contributors', None),
retweeted_status = retweeted_status,
current_user_retweet = current_user_retweet,
retweet_count = data.get('retweet_count', None),
possibly_sensitive = data.get('possibly_sensitive', None),
scopes = data.get('scopes', None),
withheld_copyright = data.get('withheld_copyright', None),
withheld_in_countries = data.get('withheld_in_countries', None),
withheld_scope = data.get('withheld_scope', None))
class User(object):
'''A class representing the User structure used by the twitter API.
The User structure exposes the following properties:
user.id
user.name
user.screen_name
user.location
user.description
user.profile_image_url
user.profile_background_tile
user.profile_background_image_url
user.profile_sidebar_fill_color
user.profile_background_color
user.profile_link_color
user.profile_text_color
user.protected
user.utc_offset
user.time_zone
user.url
user.status
user.statuses_count
user.followers_count
user.friends_count
user.favourites_count
user.geo_enabled
user.verified
user.lang
user.notifications
user.contributors_enabled
user.created_at
user.listed_count
'''
def __init__(self,
id = None,
name = None,
screen_name = None,
location = None,
description = None,
profile_image_url = None,
profile_background_tile = None,
profile_background_image_url = None,
profile_sidebar_fill_color = None,
profile_background_color = None,
profile_link_color = None,
profile_text_color = None,
protected = None,
utc_offset = None,
time_zone = None,
followers_count = None,
friends_count = None,
statuses_count = None,
favourites_count = None,
url = None,
status = None,
geo_enabled = None,
verified = None,
lang = None,
notifications = None,
contributors_enabled = None,
created_at = None,
listed_count = None):
self.id = id
self.name = name
self.screen_name = screen_name
self.location = location
self.description = description
self.profile_image_url = profile_image_url
self.profile_background_tile = profile_background_tile
self.profile_background_image_url = profile_background_image_url
self.profile_sidebar_fill_color = profile_sidebar_fill_color
self.profile_background_color = profile_background_color
self.profile_link_color = profile_link_color
self.profile_text_color = profile_text_color
self.protected = protected
self.utc_offset = utc_offset
self.time_zone = time_zone
self.followers_count = followers_count
self.friends_count = friends_count
self.statuses_count = statuses_count
self.favourites_count = favourites_count
self.url = url
self.status = status
self.geo_enabled = geo_enabled
self.verified = verified
self.lang = lang
self.notifications = notifications
self.contributors_enabled = contributors_enabled
self.created_at = created_at
self.listed_count = listed_count
def GetId(self):
'''Get the unique id of this user.
Returns:
The unique id of this user
'''
return self._id
def SetId(self, id):
'''Set the unique id of this user.
Args:
id: The unique id of this user.
'''
self._id = id
id = property(GetId, SetId,
doc = 'The unique id of this user.')
def GetName(self):
'''Get the real name of this user.
Returns:
The real name of this user
'''
return self._name
def SetName(self, name):
'''Set the real name of this user.
Args:
name: The real name of this user
'''
self._name = name
name = property(GetName, SetName,
doc = 'The real name of this user.')
def GetScreenName(self):
'''Get the short twitter name of this user.
Returns:
The short twitter name of this user
'''
return self._screen_name
def SetScreenName(self, screen_name):
'''Set the short twitter name of this user.
Args:
screen_name: the short twitter name of this user
'''
self._screen_name = screen_name
screen_name = property(GetScreenName, SetScreenName,
doc = 'The short twitter name of this user.')
def GetLocation(self):
'''Get the geographic location of this user.
Returns:
The geographic location of this user
'''
return self._location
def SetLocation(self, location):
'''Set the geographic location of this user.
Args:
location: The geographic location of this user
'''
self._location = location
location = property(GetLocation, SetLocation,
doc = 'The geographic location of this user.')
def GetDescription(self):
'''Get the short text description of this user.
Returns:
The short text description of this user
'''
return self._description
def SetDescription(self, description):
'''Set the short text description of this user.
Args:
description: The short text description of this user
'''
self._description = description
description = property(GetDescription, SetDescription,
doc = 'The short text description of this user.')
def GetUrl(self):
'''Get the homepage url of this user.
Returns:
The homepage url of this user
'''
return self._url
def SetUrl(self, url):
'''Set the homepage url of this user.
Args:
url: The homepage url of this user
'''
self._url = url
url = property(GetUrl, SetUrl,
doc = 'The homepage url of this user.')
def GetProfileImageUrl(self):
'''Get the url of the thumbnail of this user.
Returns:
The url of the thumbnail of this user
'''
return self._profile_image_url
def SetProfileImageUrl(self, profile_image_url):
'''Set the url of the thumbnail of this user.
Args:
profile_image_url: The url of the thumbnail of this user
'''
self._profile_image_url = profile_image_url
profile_image_url = property(GetProfileImageUrl, SetProfileImageUrl,
doc = 'The url of the thumbnail of this user.')
def GetProfileBackgroundTile(self):
'''Boolean for whether to tile the profile background image.
Returns:
True if the background is to be tiled, False if not, None if unset.
'''
return self._profile_background_tile
def SetProfileBackgroundTile(self, profile_background_tile):
'''Set the boolean flag for whether to tile the profile background image.
Args:
profile_background_tile: Boolean flag for whether to tile or not.
'''
self._profile_background_tile = profile_background_tile
profile_background_tile = property(GetProfileBackgroundTile, SetProfileBackgroundTile,
doc = 'Boolean for whether to tile the background image.')
def GetProfileBackgroundImageUrl(self):
return self._profile_background_image_url
def SetProfileBackgroundImageUrl(self, profile_background_image_url):
self._profile_background_image_url = profile_background_image_url
profile_background_image_url = property(GetProfileBackgroundImageUrl, SetProfileBackgroundImageUrl,
doc = 'The url of the profile background of this user.')
def GetProfileSidebarFillColor(self):
return self._profile_sidebar_fill_color
def SetProfileSidebarFillColor(self, profile_sidebar_fill_color):
self._profile_sidebar_fill_color = profile_sidebar_fill_color
profile_sidebar_fill_color = property(GetProfileSidebarFillColor, SetProfileSidebarFillColor)
def GetProfileBackgroundColor(self):
return self._profile_background_color
def SetProfileBackgroundColor(self, profile_background_color):
self._profile_background_color = profile_background_color
profile_background_color = property(GetProfileBackgroundColor, SetProfileBackgroundColor)
def GetProfileLinkColor(self):
return self._profile_link_color
def SetProfileLinkColor(self, profile_link_color):
self._profile_link_color = profile_link_color
profile_link_color = property(GetProfileLinkColor, SetProfileLinkColor)
def GetProfileTextColor(self):
return self._profile_text_color
def SetProfileTextColor(self, profile_text_color):
self._profile_text_color = profile_text_color
profile_text_color = property(GetProfileTextColor, SetProfileTextColor)
def GetProtected(self):
return self._protected
def SetProtected(self, protected):
self._protected = protected
protected = property(GetProtected, SetProtected)
def GetUtcOffset(self):
return self._utc_offset
def SetUtcOffset(self, utc_offset):
self._utc_offset = utc_offset
utc_offset = property(GetUtcOffset, SetUtcOffset)
def GetTimeZone(self):
'''Returns the current time zone string for the user.
Returns:
The descriptive time zone string for the user.
'''
return self._time_zone
def SetTimeZone(self, time_zone):
'''Sets the user's time zone string.
Args:
time_zone:
The descriptive time zone to assign for the user.
'''
self._time_zone = time_zone
time_zone = property(GetTimeZone, SetTimeZone)
def GetStatus(self):
'''Get the latest twitter.Status of this user.
Returns:
The latest twitter.Status of this user
'''
return self._status
def SetStatus(self, status):
'''Set the latest twitter.Status of this user.
Args:
status:
The latest twitter.Status of this user
'''
self._status = status
status = property(GetStatus, SetStatus,
doc = 'The latest twitter.Status of this user.')
def GetFriendsCount(self):
'''Get the friend count for this user.
Returns:
The number of users this user has befriended.
'''
return self._friends_count
def SetFriendsCount(self, count):
'''Set the friend count for this user.
Args:
count:
The number of users this user has befriended.
'''
self._friends_count = count
friends_count = property(GetFriendsCount, SetFriendsCount,
doc = 'The number of friends for this user.')
def GetListedCount(self):
'''Get the listed count for this user.
Returns:
The number of lists this user belongs to.
'''
return self._listed_count
def SetListedCount(self, count):
'''Set the listed count for this user.
Args:
count:
The number of lists this user belongs to.
'''
self._listed_count = count
listed_count = property(GetListedCount, SetListedCount,
doc = 'The number of lists this user belongs to.')
def GetFollowersCount(self):
'''Get the follower count for this user.
Returns:
The number of users following this user.
'''
return self._followers_count
def SetFollowersCount(self, count):
'''Set the follower count for this user.
Args:
count:
The number of users following this user.
'''
self._followers_count = count
followers_count = property(GetFollowersCount, SetFollowersCount,
doc = 'The number of users following this user.')
def GetStatusesCount(self):
'''Get the number of status updates for this user.
Returns:
The number of status updates for this user.
'''
return self._statuses_count
def SetStatusesCount(self, count):
'''Set the status update count for this user.
Args:
count:
The number of updates for this user.
'''
self._statuses_count = count
statuses_count = property(GetStatusesCount, SetStatusesCount,
doc = 'The number of updates for this user.')
def GetFavouritesCount(self):
'''Get the number of favourites for this user.
Returns:
The number of favourites for this user.
'''
return self._favourites_count
def SetFavouritesCount(self, count):
'''Set the favourite count for this user.
Args:
count:
The number of favourites for this user.
'''
self._favourites_count = count
favourites_count = property(GetFavouritesCount, SetFavouritesCount,
doc = 'The number of favourites for this user.')
def GetGeoEnabled(self):
'''Get the setting of geo_enabled for this user.
Returns:
True/False if Geo tagging is enabled
'''
return self._geo_enabled
def SetGeoEnabled(self, geo_enabled):
'''Set the latest twitter.geo_enabled of this user.
Args:
geo_enabled:
True/False if Geo tagging is to be enabled
'''
self._geo_enabled = geo_enabled
geo_enabled = property(GetGeoEnabled, SetGeoEnabled,
doc = 'The value of twitter.geo_enabled for this user.')
def GetVerified(self):
'''Get the setting of verified for this user.
Returns:
True/False if user is a verified account
'''
return self._verified
def SetVerified(self, verified):
'''Set twitter.verified for this user.
Args:
verified:
True/False if user is a verified account
'''
self._verified = verified
verified = property(GetVerified, SetVerified,
doc = 'The value of twitter.verified for this user.')
def GetLang(self):
'''Get the setting of lang for this user.
Returns:
language code of the user
'''
return self._lang
def SetLang(self, lang):
'''Set twitter.lang for this user.
Args:
lang:
language code for the user
'''
self._lang = lang
lang = property(GetLang, SetLang,
doc = 'The value of twitter.lang for this user.')
def GetNotifications(self):
'''Get the setting of notifications for this user.
Returns:
True/False for the notifications setting of the user
'''
return self._notifications
def SetNotifications(self, notifications):
'''Set twitter.notifications for this user.
Args:
notifications:
True/False notifications setting for the user
'''
self._notifications = notifications
notifications = property(GetNotifications, SetNotifications,
doc = 'The value of twitter.notifications for this user.')
def GetContributorsEnabled(self):
'''Get the setting of contributors_enabled for this user.
Returns:
True/False contributors_enabled of the user
'''
return self._contributors_enabled
def SetContributorsEnabled(self, contributors_enabled):
'''Set twitter.contributors_enabled for this user.
Args:
contributors_enabled:
True/False contributors_enabled setting for the user
'''
self._contributors_enabled = contributors_enabled
contributors_enabled = property(GetContributorsEnabled, SetContributorsEnabled,
doc = 'The value of twitter.contributors_enabled for this user.')
def GetCreatedAt(self):
'''Get the setting of created_at for this user.
Returns:
created_at value of the user
'''
return self._created_at
def SetCreatedAt(self, created_at):
'''Set twitter.created_at for this user.
Args:
created_at:
created_at value for the user
'''
self._created_at = created_at
created_at = property(GetCreatedAt, SetCreatedAt,
doc = 'The value of twitter.created_at for this user.')
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
try:
return other and \
self.id == other.id and \
self.name == other.name and \
self.screen_name == other.screen_name and \
self.location == other.location and \
self.description == other.description and \
self.profile_image_url == other.profile_image_url and \
self.profile_background_tile == other.profile_background_tile and \
self.profile_background_image_url == other.profile_background_image_url and \
self.profile_sidebar_fill_color == other.profile_sidebar_fill_color and \
self.profile_background_color == other.profile_background_color and \
self.profile_link_color == other.profile_link_color and \
self.profile_text_color == other.profile_text_color and \
self.protected == other.protected and \
self.utc_offset == other.utc_offset and \
self.time_zone == other.time_zone and \
self.url == other.url and \
self.statuses_count == other.statuses_count and \
self.followers_count == other.followers_count and \
self.favourites_count == other.favourites_count and \
self.friends_count == other.friends_count and \
self.status == other.status and \
self.geo_enabled == other.geo_enabled and \
self.verified == other.verified and \
self.lang == other.lang and \
self.notifications == other.notifications and \
self.contributors_enabled == other.contributors_enabled and \
self.created_at == other.created_at and \
self.listed_count == other.listed_count
except AttributeError:
return False
def __str__(self):
'''A string representation of this twitter.User instance.
The return value is the same as the JSON string representation.
Returns:
A string representation of this twitter.User instance.
'''
return self.AsJsonString()
def AsJsonString(self):
'''A JSON string representation of this twitter.User instance.
Returns:
A JSON string representation of this twitter.User instance
'''
return simplejson.dumps(self.AsDict(), sort_keys = True)
def AsDict(self):
'''A dict representation of this twitter.User instance.
The return value uses the same key names as the JSON representation.
Return:
A dict representing this twitter.User instance
'''
data = {}
if self.id:
data['id'] = self.id
if self.name:
data['name'] = self.name
if self.screen_name:
data['screen_name'] = self.screen_name
if self.location:
data['location'] = self.location
if self.description:
data['description'] = self.description
if self.profile_image_url:
data['profile_image_url'] = self.profile_image_url
if self.profile_background_tile is not None:
data['profile_background_tile'] = self.profile_background_tile
if self.profile_background_image_url:
data['profile_sidebar_fill_color'] = self.profile_background_image_url
if self.profile_background_color:
data['profile_background_color'] = self.profile_background_color
if self.profile_link_color:
data['profile_link_color'] = self.profile_link_color
if self.profile_text_color:
data['profile_text_color'] = self.profile_text_color
if self.protected is not None:
data['protected'] = self.protected
if self.utc_offset:
data['utc_offset'] = self.utc_offset
if self.time_zone:
data['time_zone'] = self.time_zone
if self.url:
data['url'] = self.url
if self.status:
data['status'] = self.status.AsDict()
if self.friends_count:
data['friends_count'] = self.friends_count
if self.followers_count:
data['followers_count'] = self.followers_count
if self.statuses_count:
data['statuses_count'] = self.statuses_count
if self.favourites_count:
data['favourites_count'] = self.favourites_count
if self.geo_enabled:
data['geo_enabled'] = self.geo_enabled
if self.verified:
data['verified'] = self.verified
if self.lang:
data['lang'] = self.lang
if self.notifications:
data['notifications'] = self.notifications
if self.contributors_enabled:
data['contributors_enabled'] = self.contributors_enabled
if self.created_at:
data['created_at'] = self.created_at
if self.listed_count:
data['listed_count'] = self.listed_count
return data
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data:
A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.User instance
'''
if 'status' in data:
status = Status.NewFromJsonDict(data['status'])
else:
status = None
return User(id = data.get('id', None),
name = data.get('name', None),
screen_name = data.get('screen_name', None),
location = data.get('location', None),
description = data.get('description', None),
statuses_count = data.get('statuses_count', None),
followers_count = data.get('followers_count', None),
favourites_count = data.get('favourites_count', None),
friends_count = data.get('friends_count', None),
profile_image_url = data.get('profile_image_url_https', data.get('profile_image_url', None)),
profile_background_tile = data.get('profile_background_tile', None),
profile_background_image_url = data.get('profile_background_image_url', None),
profile_sidebar_fill_color = data.get('profile_sidebar_fill_color', None),
profile_background_color = data.get('profile_background_color', None),
profile_link_color = data.get('profile_link_color', None),
profile_text_color = data.get('profile_text_color', None),
protected = data.get('protected', None),
utc_offset = data.get('utc_offset', None),
time_zone = data.get('time_zone', None),
url = data.get('url', None),
status = status,
geo_enabled = data.get('geo_enabled', None),
verified = data.get('verified', None),
lang = data.get('lang', None),
notifications = data.get('notifications', None),
contributors_enabled = data.get('contributors_enabled', None),
created_at = data.get('created_at', None),
listed_count = data.get('listed_count', None))
class List(object):
'''A class representing the List structure used by the twitter API.
The List structure exposes the following properties:
list.id
list.name
list.slug
list.description
list.full_name
list.mode
list.uri
list.member_count
list.subscriber_count
list.following
'''
def __init__(self,
id = None,
name = None,
slug = None,
description = None,
full_name = None,
mode = None,
uri = None,
member_count = None,
subscriber_count = None,
following = None,
user = None):
self.id = id
self.name = name
self.slug = slug
self.description = description
self.full_name = full_name
self.mode = mode
self.uri = uri
self.member_count = member_count
self.subscriber_count = subscriber_count
self.following = following
self.user = user
def GetId(self):
'''Get the unique id of this list.
Returns:
The unique id of this list
'''
return self._id
def SetId(self, id):
'''Set the unique id of this list.
Args:
id:
The unique id of this list.
'''
self._id = id
id = property(GetId, SetId,
doc = 'The unique id of this list.')
def GetName(self):
'''Get the real name of this list.
Returns:
The real name of this list
'''
return self._name
def SetName(self, name):
'''Set the real name of this list.
Args:
name:
The real name of this list
'''
self._name = name
name = property(GetName, SetName,
doc = 'The real name of this list.')
def GetSlug(self):
'''Get the slug of this list.
Returns:
The slug of this list
'''
return self._slug
def SetSlug(self, slug):
'''Set the slug of this list.
Args:
slug:
The slug of this list.
'''
self._slug = slug
slug = property(GetSlug, SetSlug,
doc = 'The slug of this list.')
def GetDescription(self):
'''Get the description of this list.
Returns:
The description of this list
'''
return self._description
def SetDescription(self, description):
'''Set the description of this list.
Args:
description:
The description of this list.
'''
self._description = description
description = property(GetDescription, SetDescription,
doc = 'The description of this list.')
def GetFull_name(self):
'''Get the full_name of this list.
Returns:
The full_name of this list
'''
return self._full_name
def SetFull_name(self, full_name):
'''Set the full_name of this list.
Args:
full_name:
The full_name of this list.
'''
self._full_name = full_name
full_name = property(GetFull_name, SetFull_name,
doc = 'The full_name of this list.')
def GetMode(self):
'''Get the mode of this list.
Returns:
The mode of this list
'''
return self._mode
def SetMode(self, mode):
'''Set the mode of this list.
Args:
mode:
The mode of this list.
'''
self._mode = mode
mode = property(GetMode, SetMode,
doc = 'The mode of this list.')
def GetUri(self):
'''Get the uri of this list.
Returns:
The uri of this list
'''
return self._uri
def SetUri(self, uri):
'''Set the uri of this list.
Args:
uri:
The uri of this list.
'''
self._uri = uri
uri = property(GetUri, SetUri,
doc = 'The uri of this list.')
def GetMember_count(self):
'''Get the member_count of this list.
Returns:
The member_count of this list
'''
return self._member_count
def SetMember_count(self, member_count):
'''Set the member_count of this list.
Args:
member_count:
The member_count of this list.
'''
self._member_count = member_count
member_count = property(GetMember_count, SetMember_count,
doc = 'The member_count of this list.')
def GetSubscriber_count(self):
'''Get the subscriber_count of this list.
Returns:
The subscriber_count of this list
'''
return self._subscriber_count
def SetSubscriber_count(self, subscriber_count):
'''Set the subscriber_count of this list.
Args:
subscriber_count:
The subscriber_count of this list.
'''
self._subscriber_count = subscriber_count
subscriber_count = property(GetSubscriber_count, SetSubscriber_count,
doc = 'The subscriber_count of this list.')
def GetFollowing(self):
'''Get the following status of this list.
Returns:
The following status of this list
'''
return self._following
def SetFollowing(self, following):
'''Set the following status of this list.
Args:
following:
The following of this list.
'''
self._following = following
following = property(GetFollowing, SetFollowing,
doc = 'The following status of this list.')
def GetUser(self):
'''Get the user of this list.
Returns:
The owner of this list
'''
return self._user
def SetUser(self, user):
'''Set the user of this list.
Args:
user:
The owner of this list.
'''
self._user = user
user = property(GetUser, SetUser,
doc = 'The owner of this list.')
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
try:
return other and \
self.id == other.id and \
self.name == other.name and \
self.slug == other.slug and \
self.description == other.description and \
self.full_name == other.full_name and \
self.mode == other.mode and \
self.uri == other.uri and \
self.member_count == other.member_count and \
self.subscriber_count == other.subscriber_count and \
self.following == other.following and \
self.user == other.user
except AttributeError:
return False
def __str__(self):
'''A string representation of this twitter.List instance.
The return value is the same as the JSON string representation.
Returns:
A string representation of this twitter.List instance.
'''
return self.AsJsonString()
def AsJsonString(self):
'''A JSON string representation of this twitter.List instance.
Returns:
A JSON string representation of this twitter.List instance
'''
return simplejson.dumps(self.AsDict(), sort_keys = True)
def AsDict(self):
'''A dict representation of this twitter.List instance.
The return value uses the same key names as the JSON representation.
Return:
A dict representing this twitter.List instance
'''
data = {}
if self.id:
data['id'] = self.id
if self.name:
data['name'] = self.name
if self.slug:
data['slug'] = self.slug
if self.description:
data['description'] = self.description
if self.full_name:
data['full_name'] = self.full_name
if self.mode:
data['mode'] = self.mode
if self.uri:
data['uri'] = self.uri
if self.member_count is not None:
data['member_count'] = self.member_count
if self.subscriber_count is not None:
data['subscriber_count'] = self.subscriber_count
if self.following is not None:
data['following'] = self.following
if self.user is not None:
data['user'] = self.user.AsDict()
return data
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data:
A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.List instance
'''
if 'user' in data:
user = User.NewFromJsonDict(data['user'])
else:
user = None
return List(id = data.get('id', None),
name = data.get('name', None),
slug = data.get('slug', None),
description = data.get('description', None),
full_name = data.get('full_name', None),
mode = data.get('mode', None),
uri = data.get('uri', None),
member_count = data.get('member_count', None),
subscriber_count = data.get('subscriber_count', None),
following = data.get('following', None),
user = user)
class DirectMessage(object):
'''A class representing the DirectMessage structure used by the twitter API.
The DirectMessage structure exposes the following properties:
direct_message.id
direct_message.created_at
direct_message.created_at_in_seconds # read only
direct_message.sender_id
direct_message.sender_screen_name
direct_message.recipient_id
direct_message.recipient_screen_name
direct_message.text
'''
def __init__(self,
id = None,
created_at = None,
sender_id = None,
sender_screen_name = None,
recipient_id = None,
recipient_screen_name = None,
text = None):
'''An object to hold a Twitter direct message.
This class is normally instantiated by the twitter.Api class and
returned in a sequence.
Note: Dates are posted in the form "Sat Jan 27 04:17:38 +0000 2007"
Args:
id:
The unique id of this direct message. [Optional]
created_at:
The time this direct message was posted. [Optional]
sender_id:
The id of the twitter user that sent this message. [Optional]
sender_screen_name:
The name of the twitter user that sent this message. [Optional]
recipient_id:
The id of the twitter that received this message. [Optional]
recipient_screen_name:
The name of the twitter that received this message. [Optional]
text:
The text of this direct message. [Optional]
'''
self.id = id
self.created_at = created_at
self.sender_id = sender_id
self.sender_screen_name = sender_screen_name
self.recipient_id = recipient_id
self.recipient_screen_name = recipient_screen_name
self.text = text
def GetId(self):
'''Get the unique id of this direct message.
Returns:
The unique id of this direct message
'''
return self._id
def SetId(self, id):
'''Set the unique id of this direct message.
Args:
id:
The unique id of this direct message
'''
self._id = id
id = property(GetId, SetId,
doc = 'The unique id of this direct message.')
def GetCreatedAt(self):
'''Get the time this direct message was posted.
Returns:
The time this direct message was posted
'''
return self._created_at
def SetCreatedAt(self, created_at):
'''Set the time this direct message was posted.
Args:
created_at:
The time this direct message was created
'''
self._created_at = created_at
created_at = property(GetCreatedAt, SetCreatedAt,
doc = 'The time this direct message was posted.')
def GetCreatedAtInSeconds(self):
'''Get the time this direct message was posted, in seconds since the epoch.
Returns:
The time this direct message was posted, in seconds since the epoch.
'''
return calendar.timegm(rfc822.parsedate(self.created_at))
created_at_in_seconds = property(GetCreatedAtInSeconds,
doc = "The time this direct message was "
"posted, in seconds since the epoch")
def GetSenderId(self):
'''Get the unique sender id of this direct message.
Returns:
The unique sender id of this direct message
'''
return self._sender_id
def SetSenderId(self, sender_id):
'''Set the unique sender id of this direct message.
Args:
sender_id:
The unique sender id of this direct message
'''
self._sender_id = sender_id
sender_id = property(GetSenderId, SetSenderId,
doc = 'The unique sender id of this direct message.')
def GetSenderScreenName(self):
'''Get the unique sender screen name of this direct message.
Returns:
The unique sender screen name of this direct message
'''
return self._sender_screen_name
def SetSenderScreenName(self, sender_screen_name):
'''Set the unique sender screen name of this direct message.
Args:
sender_screen_name:
The unique sender screen name of this direct message
'''
self._sender_screen_name = sender_screen_name
sender_screen_name = property(GetSenderScreenName, SetSenderScreenName,
doc = 'The unique sender screen name of this direct message.')
def GetRecipientId(self):
'''Get the unique recipient id of this direct message.
Returns:
The unique recipient id of this direct message
'''
return self._recipient_id
def SetRecipientId(self, recipient_id):
'''Set the unique recipient id of this direct message.
Args:
recipient_id:
The unique recipient id of this direct message
'''
self._recipient_id = recipient_id
recipient_id = property(GetRecipientId, SetRecipientId,
doc = 'The unique recipient id of this direct message.')
def GetRecipientScreenName(self):
'''Get the unique recipient screen name of this direct message.
Returns:
The unique recipient screen name of this direct message
'''
return self._recipient_screen_name
def SetRecipientScreenName(self, recipient_screen_name):
'''Set the unique recipient screen name of this direct message.
Args:
recipient_screen_name:
The unique recipient screen name of this direct message
'''
self._recipient_screen_name = recipient_screen_name
recipient_screen_name = property(GetRecipientScreenName, SetRecipientScreenName,
doc = 'The unique recipient screen name of this direct message.')
def GetText(self):
'''Get the text of this direct message.
Returns:
The text of this direct message.
'''
return self._text
def SetText(self, text):
'''Set the text of this direct message.
Args:
text:
The text of this direct message
'''
self._text = text
text = property(GetText, SetText,
doc = 'The text of this direct message')
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
try:
return other and \
self.id == other.id and \
self.created_at == other.created_at and \
self.sender_id == other.sender_id and \
self.sender_screen_name == other.sender_screen_name and \
self.recipient_id == other.recipient_id and \
self.recipient_screen_name == other.recipient_screen_name and \
self.text == other.text
except AttributeError:
return False
def __str__(self):
'''A string representation of this twitter.DirectMessage instance.
The return value is the same as the JSON string representation.
Returns:
A string representation of this twitter.DirectMessage instance.
'''
return self.AsJsonString()
def AsJsonString(self):
'''A JSON string representation of this twitter.DirectMessage instance.
Returns:
A JSON string representation of this twitter.DirectMessage instance
'''
return simplejson.dumps(self.AsDict(), sort_keys = True)
def AsDict(self):
'''A dict representation of this twitter.DirectMessage instance.
The return value uses the same key names as the JSON representation.
Return:
A dict representing this twitter.DirectMessage instance
'''
data = {}
if self.id:
data['id'] = self.id
if self.created_at:
data['created_at'] = self.created_at
if self.sender_id:
data['sender_id'] = self.sender_id
if self.sender_screen_name:
data['sender_screen_name'] = self.sender_screen_name
if self.recipient_id:
data['recipient_id'] = self.recipient_id
if self.recipient_screen_name:
data['recipient_screen_name'] = self.recipient_screen_name
if self.text:
data['text'] = self.text
return data
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data:
A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.DirectMessage instance
'''
return DirectMessage(created_at = data.get('created_at', None),
recipient_id = data.get('recipient_id', None),
sender_id = data.get('sender_id', None),
text = data.get('text', None),
sender_screen_name = data.get('sender_screen_name', None),
id = data.get('id', None),
recipient_screen_name = data.get('recipient_screen_name', None))
class Hashtag(object):
''' A class representing a twitter hashtag
'''
def __init__(self,
text = None):
self.text = text
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data:
A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.Hashtag instance
'''
return Hashtag(text = data.get('text', None))
class Trend(object):
''' A class representing a trending topic
'''
def __init__(self, name = None, query = None, timestamp = None, url = None):
self.name = name
self.query = query
self.timestamp = timestamp
self.url = url
def __str__(self):
return 'Name: %s\nQuery: %s\nTimestamp: %s\nSearch URL: %s\n' % (self.name, self.query, self.timestamp, self.url)
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
try:
return other and \
self.name == other.name and \
self.query == other.query and \
self.timestamp == other.timestamp and \
self.url == self.url
except AttributeError:
return False
@staticmethod
def NewFromJsonDict(data, timestamp = None):
'''Create a new instance based on a JSON dict
Args:
data:
A JSON dict
timestamp:
Gets set as the timestamp property of the new object
Returns:
A twitter.Trend object
'''
return Trend(name = data.get('name', None),
query = data.get('query', None),
url = data.get('url', None),
timestamp = timestamp)
class Url(object):
'''A class representing an URL contained in a tweet'''
def __init__(self,
url = None,
expanded_url = None):
self.url = url
self.expanded_url = expanded_url
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data:
A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.Url instance
'''
return Url(url = data.get('url', None),
expanded_url = data.get('expanded_url', None))
class Api(object):
'''A python interface into the Twitter API
By default, the Api caches results for 1 minute.
Example usage:
To create an instance of the twitter.Api class, with no authentication:
>>> import twitter
>>> api = twitter.Api()
To fetch the most recently posted public twitter status messages:
>>> statuses = api.GetPublicTimeline()
>>> print [s.user.name for s in statuses]
[u'DeWitt', u'Kesuke Miyagi', u'ev', u'Buzz Andersen', u'Biz Stone'] #...
To fetch a single user's public status messages, where "user" is either
a Twitter "short name" or their user id.
>>> statuses = api.GetUserTimeline(user)
>>> print [s.text for s in statuses]
To use authentication, instantiate the twitter.Api class with a
consumer key and secret; and the oAuth key and secret:
>>> api = twitter.Api(consumer_key='twitter consumer key',
consumer_secret='twitter consumer secret',
access_token_key='the_key_given',
access_token_secret='the_key_secret')
To fetch your friends (after being authenticated):
>>> users = api.GetFriends()
>>> print [u.name for u in users]
To post a twitter status message (after being authenticated):
>>> status = api.PostUpdate('I love python-twitter!')
>>> print status.text
I love python-twitter!
There are many other methods, including:
>>> api.PostUpdates(status)
>>> api.PostDirectMessage(user, text)
>>> api.GetUser(user)
>>> api.GetReplies()
>>> api.GetUserTimeline(user)
>>> api.GetHomeTimeLine()
>>> api.GetStatus(id)
>>> api.DestroyStatus(id)
>>> api.GetFriends(user)
>>> api.GetFollowers()
>>> api.GetFeatured()
>>> api.GetDirectMessages()
>>> api.GetSentDirectMessages()
>>> api.PostDirectMessage(user, text)
>>> api.DestroyDirectMessage(id)
>>> api.DestroyFriendship(user)
>>> api.CreateFriendship(user)
>>> api.GetUserByEmail(email)
>>> api.VerifyCredentials()
'''
DEFAULT_CACHE_TIMEOUT = 60 # cache for 1 minute
_API_REALM = 'Twitter API'
def __init__(self,
consumer_key = None,
consumer_secret = None,
access_token_key = None,
access_token_secret = None,
input_encoding = None,
request_headers = None,
cache = DEFAULT_CACHE,
shortner = None,
base_url = None,
use_gzip_compression = False,
debugHTTP = False):
'''Instantiate a new twitter.Api object.
Args:
consumer_key:
Your Twitter user's consumer_key.
consumer_secret:
Your Twitter user's consumer_secret.
access_token_key:
The oAuth access token key value you retrieved
from running get_access_token.py.
access_token_secret:
The oAuth access token's secret, also retrieved
from the get_access_token.py run.
input_encoding:
The encoding used to encode input strings. [Optional]
request_header:
A dictionary of additional HTTP request headers. [Optional]
cache:
The cache instance to use. Defaults to DEFAULT_CACHE.
Use None to disable caching. [Optional]
shortner:
The shortner instance to use. Defaults to None.
See shorten_url.py for an example shortner. [Optional]
base_url:
The base URL to use to contact the Twitter API.
Defaults to https://api.twitter.com. [Optional]
use_gzip_compression:
Set to True to tell enable gzip compression for any call
made to Twitter. Defaults to False. [Optional]
debugHTTP:
Set to True to enable debug output from urllib2 when performing
any HTTP requests. Defaults to False. [Optional]
'''
self.SetCache(cache)
self._urllib = urllib2
self._cache_timeout = Api.DEFAULT_CACHE_TIMEOUT
self._input_encoding = input_encoding
self._use_gzip = use_gzip_compression
self._debugHTTP = debugHTTP
self._oauth_consumer = None
self._shortlink_size = 19
self._InitializeRequestHeaders(request_headers)
self._InitializeUserAgent()
self._InitializeDefaultParameters()
if base_url is None:
self.base_url = 'https://api.twitter.com/1.1'
else:
self.base_url = base_url
if consumer_key is not None and (access_token_key is None or
access_token_secret is None):
print >> sys.stderr, 'Twitter now requires an oAuth Access Token for API calls.'
print >> sys.stderr, 'If your using this library from a command line utility, please'
print >> sys.stderr, 'run the the included get_access_token.py tool to generate one.'
raise TwitterError('Twitter requires oAuth Access Token for all API access')
self.SetCredentials(consumer_key, consumer_secret, access_token_key, access_token_secret)
def SetCredentials(self,
consumer_key,
consumer_secret,
access_token_key = None,
access_token_secret = None):
'''Set the consumer_key and consumer_secret for this instance
Args:
consumer_key:
The consumer_key of the twitter account.
consumer_secret:
The consumer_secret for the twitter account.
access_token_key:
The oAuth access token key value you retrieved
from running get_access_token.py.
access_token_secret:
The oAuth access token's secret, also retrieved
from the get_access_token.py run.
'''
self._consumer_key = consumer_key
self._consumer_secret = consumer_secret
self._access_token_key = access_token_key
self._access_token_secret = access_token_secret
self._oauth_consumer = None
if consumer_key is not None and consumer_secret is not None and \
access_token_key is not None and access_token_secret is not None:
self._signature_method_plaintext = oauth.SignatureMethod_PLAINTEXT()
self._signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1()
self._oauth_token = oauth.Token(key = access_token_key, secret = access_token_secret)
self._oauth_consumer = oauth.Consumer(key = consumer_key, secret = consumer_secret)
def ClearCredentials(self):
'''Clear the any credentials for this instance
'''
self._consumer_key = None
self._consumer_secret = None
self._access_token_key = None
self._access_token_secret = None
self._oauth_consumer = None
def GetSearch(self,
term = None,
geocode = None,
since_id = None,
max_id = None,
until = None,
count = 15,
lang = None,
locale = None,
result_type = "mixed",
include_entities = None):
'''Return twitter search results for a given term.
Args:
term:
Term to search by. Optional if you include geocode.
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occurred since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
max_id:
Returns only statuses with an ID less than (that is, older
than) or equal to the specified ID. [Optional]
until:
Returns tweets generated before the given date. Date should be
formatted as YYYY-MM-DD. [Optional]
geocode:
Geolocation information in the form (latitude, longitude, radius)
[Optional]
count:
Number of results to return. Default is 15 [Optional]
lang:
Language for results as ISO 639-1 code. Default is None (all languages)
[Optional]
locale:
Language of the search query. Currently only 'ja' is effective. This is
intended for language-specific consumers and the default should work in
the majority of cases.
result_type:
Type of result which should be returned. Default is "mixed". Other
valid options are "recent" and "popular". [Optional]
include_entities:
If True, each tweet will include a node called "entities,".
This node offers a variety of metadata about the tweet in a
discrete structure, including: user_mentions, urls, and
hashtags. [Optional]
Returns:
A sequence of twitter.Status instances, one for each message containing
the term
'''
# Build request parameters
parameters = {}
if since_id:
try:
parameters['since_id'] = long(since_id)
except:
raise TwitterError("since_id must be an integer")
if max_id:
try:
parameters['max_id'] = long(max_id)
except:
raise TwitterError("max_id must be an integer")
if until:
parameters['until'] = until
if lang:
parameters['lang'] = lang
if locale:
parameters['locale'] = locale
if term is None and geocode is None:
return []
if term is not None:
parameters['q'] = term
if geocode is not None:
parameters['geocode'] = ','.join(map(str, geocode))
if include_entities:
parameters['include_entities'] = 1
try:
parameters['count'] = int(count)
except:
raise TwitterError("count must be an integer")
if result_type in ["mixed", "popular", "recent"]:
parameters['result_type'] = result_type
# Make and send requests
url = '%s/search/tweets.json' % self.base_url
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
# Return built list of statuses
return [Status.NewFromJsonDict(x) for x in data['statuses']]
def GetUsersSearch(self,
term = None,
page = 1,
count = 20,
include_entities = None):
'''Return twitter user search results for a given term.
Args:
term:
Term to search by.
page:
Page of results to return. Default is 1
[Optional]
count:
Number of results to return. Default is 20
[Optional]
include_entities:
If True, each tweet will include a node called "entities,".
This node offers a variety of metadata about the tweet in a
discrete structure, including: user_mentions, urls, and hashtags.
[Optional]
Returns:
A sequence of twitter.User instances, one for each message containing
the term
'''
# Build request parameters
parameters = {}
if term is not None:
parameters['q'] = term
if include_entities:
parameters['include_entities'] = 1
try:
parameters['count'] = int(count)
except:
raise TwitterError("count must be an integer")
# Make and send requests
url = '%s/users/search.json' % self.base_url
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
return [User.NewFromJsonDict(x) for x in data]
def GetTrendsCurrent(self, exclude = None):
'''Get the current top trending topics (global)
Args:
exclude:
Appends the exclude parameter as a request parameter.
Currently only exclude=hashtags is supported. [Optional]
Returns:
A list with 10 entries. Each entry contains a trend.
'''
return self.GetTrendsWoeid(id = 1, exclude = exclude)
def GetTrendsWoeid(self, id, exclude = None):
'''Return the top 10 trending topics for a specific WOEID, if trending
information is available for it.
Args:
woeid:
the Yahoo! Where On Earth ID for a location.
exclude:
Appends the exclude parameter as a request parameter.
Currently only exclude=hashtags is supported. [Optional]
Returns:
A list with 10 entries. Each entry contains a trend.
'''
url = '%s/trends/place.json' % (self.base_url)
parameters = {'id': id}
if exclude:
parameters['exclude'] = exclude
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
trends = []
timestamp = data[0]['as_of']
for trend in data[0]['trends']:
trends.append(Trend.NewFromJsonDict(trend, timestamp = timestamp))
return trends
def GetHomeTimeline(self,
count = None,
since_id = None,
max_id = None,
trim_user = False,
exclude_replies = False,
contributor_details = False,
include_entities = True):
'''
Fetch a collection of the most recent Tweets and retweets posted by the
authenticating user and the users they follow.
The home timeline is central to how most users interact with the Twitter
service.
The twitter.Api instance must be authenticated.
Args:
count:
Specifies the number of statuses to retrieve. May not be
greater than 200. Defaults to 20. [Optional]
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occurred since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
max_id:
Returns results with an ID less than (that is, older than) or
equal to the specified ID. [Optional]
trim_user:
When True, each tweet returned in a timeline will include a user
object including only the status authors numerical ID. Omit this
parameter to receive the complete user object. [Optional]
exclude_replies:
This parameter will prevent replies from appearing in the
returned timeline. Using exclude_replies with the count
parameter will mean you will receive up-to count tweets -
this is because the count parameter retrieves that many
tweets before filtering out retweets and replies.
[Optional]
contributor_details:
This parameter enhances the contributors element of the
status response to include the screen_name of the contributor.
By default only the user_id of the contributor is included.
[Optional]
include_entities:
The entities node will be disincluded when set to false.
This node offers a variety of metadata about the tweet in a
discreet structure, including: user_mentions, urls, and
hashtags. [Optional]
Returns:
A sequence of twitter.Status instances, one for each message
'''
url = '%s/statuses/home_timeline.json' % self.base_url
if not self._oauth_consumer:
raise TwitterError("API must be authenticated.")
parameters = {}
if count is not None:
try:
if int(count) > 200:
raise TwitterError("'count' may not be greater than 200")
except ValueError:
raise TwitterError("'count' must be an integer")
parameters['count'] = count
if since_id:
try:
parameters['since_id'] = long(since_id)
except ValueError:
raise TwitterError("'since_id' must be an integer")
if max_id:
try:
parameters['max_id'] = long(max_id)
except ValueError:
raise TwitterError("'max_id' must be an integer")
if trim_user:
parameters['trim_user'] = 1
if exclude_replies:
parameters['exclude_replies'] = 1
if contributor_details:
parameters['contributor_details'] = 1
if not include_entities:
parameters['include_entities'] = 'false'
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
return [Status.NewFromJsonDict(x) for x in data]
def GetUserTimeline(self,
user_id = None,
screen_name = None,
since_id = None,
max_id = None,
count = None,
include_rts = None,
trim_user = None,
exclude_replies = None):
'''Fetch the sequence of public Status messages for a single user.
The twitter.Api instance must be authenticated if the user is private.
Args:
user_id:
Specifies the ID of the user for whom to return the
user_timeline. Helpful for disambiguating when a valid user ID
is also a valid screen name. [Optional]
screen_name:
Specifies the screen name of the user for whom to return the
user_timeline. Helpful for disambiguating when a valid screen
name is also a user ID. [Optional]
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occurred since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
max_id:
Returns only statuses with an ID less than (that is, older
than) or equal to the specified ID. [Optional]
count:
Specifies the number of statuses to retrieve. May not be
greater than 200. [Optional]
include_rts:
If True, the timeline will contain native retweets (if they
exist) in addition to the standard stream of tweets. [Optional]
trim_user:
If True, statuses will only contain the numerical user ID only.
Otherwise a full user object will be returned for each status.
[Optional]
exclude_replies:
If True, this will prevent replies from appearing in the returned
timeline. Using exclude_replies with the count parameter will mean you
will receive up-to count tweets - this is because the count parameter
retrieves that many tweets before filtering out retweets and replies.
This parameter is only supported for JSON and XML responses. [Optional]
Returns:
A sequence of Status instances, one for each message up to count
'''
parameters = {}
url = '%s/statuses/user_timeline.json' % (self.base_url)
if user_id:
parameters['user_id'] = user_id
elif screen_name:
parameters['screen_name'] = screen_name
if since_id:
try:
parameters['since_id'] = long(since_id)
except:
raise TwitterError("since_id must be an integer")
if max_id:
try:
parameters['max_id'] = long(max_id)
except:
raise TwitterError("max_id must be an integer")
if count:
try:
parameters['count'] = int(count)
except:
raise TwitterError("count must be an integer")
if include_rts:
parameters['include_rts'] = 1
if trim_user:
parameters['trim_user'] = 1
if exclude_replies:
parameters['exclude_replies'] = 1
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
return [Status.NewFromJsonDict(x) for x in data]
def GetStatus(self,
id,
trim_user = False,
include_my_retweet = True,
include_entities = True):
'''Returns a single status message, specified by the id parameter.
The twitter.Api instance must be authenticated.
Args:
id:
The numeric ID of the status you are trying to retrieve.
trim_user:
When set to True, each tweet returned in a timeline will include
a user object including only the status authors numerical ID.
Omit this parameter to receive the complete user object.
[Optional]
include_my_retweet:
When set to True, any Tweets returned that have been retweeted by
the authenticating user will include an additional
current_user_retweet node, containing the ID of the source status
for the retweet. [Optional]
include_entities:
If False, the entities node will be disincluded.
This node offers a variety of metadata about the tweet in a
discreet structure, including: user_mentions, urls, and
hashtags. [Optional]
Returns:
A twitter.Status instance representing that status message
'''
url = '%s/statuses/show.json' % (self.base_url)
if not self._oauth_consumer:
raise TwitterError("API must be authenticated.")
parameters = {}
try:
parameters['id'] = long(id)
except ValueError:
raise TwitterError("'id' must be an integer.")
if trim_user:
parameters['trim_user'] = 1
if include_my_retweet:
parameters['include_my_retweet'] = 1
if not include_entities:
parameters['include_entities'] = 'none'
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
return Status.NewFromJsonDict(data)
def DestroyStatus(self, id, trim_user = False):
'''Destroys the status specified by the required ID parameter.
The twitter.Api instance must be authenticated and the
authenticating user must be the author of the specified status.
Args:
id:
The numerical ID of the status you're trying to destroy.
Returns:
A twitter.Status instance representing the destroyed status message
'''
if not self._oauth_consumer:
raise TwitterError("API must be authenticated.")
try:
post_data = {'id': long(id)}
except:
raise TwitterError("id must be an integer")
url = '%s/statuses/destroy/%s.json' % (self.base_url, id)
if trim_user:
post_data['trim_user'] = 1
json = self._FetchUrl(url, post_data = post_data)
data = self._ParseAndCheckTwitter(json)
return Status.NewFromJsonDict(data)
@classmethod
def _calculate_status_length(cls, status, linksize = 19):
dummy_link_replacement = 'https://-%d-chars%s/' % (linksize, '-' * (linksize - 18))
shortened = ' '.join([x if not (x.startswith('http://') or
x.startswith('https://'))
else
dummy_link_replacement
for x in status.split(' ')])
return len(shortened)
def PostUpdate(self, status, in_reply_to_status_id = None, latitude = None, longitude = None, place_id = None, display_coordinates = False, trim_user = False):
'''Post a twitter status message from the authenticated user.
The twitter.Api instance must be authenticated.
https://dev.twitter.com/docs/api/1.1/post/statuses/update
Args:
status:
The message text to be posted.
Must be less than or equal to 140 characters.
in_reply_to_status_id:
The ID of an existing status that the status to be posted is
in reply to. This implicitly sets the in_reply_to_user_id
attribute of the resulting status to the user ID of the
message being replied to. Invalid/missing status IDs will be
ignored. [Optional]
latitude:
Latitude coordinate of the tweet in degrees. Will only work
in conjunction with longitude argument. Both longitude and
latitude will be ignored by twitter if the user has a false
geo_enabled setting. [Optional]
longitude:
Longitude coordinate of the tweet in degrees. Will only work
in conjunction with latitude argument. Both longitude and
latitude will be ignored by twitter if the user has a false
geo_enabled setting. [Optional]
place_id:
A place in the world. These IDs can be retrieved from
GET geo/reverse_geocode. [Optional]
display_coordinates:
Whether or not to put a pin on the exact coordinates a tweet
has been sent from. [Optional]
trim_user:
If True the returned payload will only contain the user IDs,
otherwise the payload will contain the full user data item.
[Optional]
Returns:
A twitter.Status instance representing the message posted.
'''
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
url = '%s/statuses/update.json' % self.base_url
if isinstance(status, unicode) or self._input_encoding is None:
u_status = status
else:
u_status = unicode(status, self._input_encoding)
#if self._calculate_status_length(u_status, self._shortlink_size) > CHARACTER_LIMIT:
# raise TwitterError("Text must be less than or equal to %d characters. "
# "Consider using PostUpdates." % CHARACTER_LIMIT)
data = {'status': status}
if in_reply_to_status_id:
data['in_reply_to_status_id'] = in_reply_to_status_id
if latitude is not None and longitude is not None:
data['lat'] = str(latitude)
data['long'] = str(longitude)
if place_id is not None:
data['place_id'] = str(place_id)
if display_coordinates:
data['display_coordinates'] = 'true'
if trim_user:
data['trim_user'] = 'true'
json = self._FetchUrl(url, post_data = data)
data = self._ParseAndCheckTwitter(json)
return Status.NewFromJsonDict(data)
def PostUpdates(self, status, continuation = None, **kwargs):
'''Post one or more twitter status messages from the authenticated user.
Unlike api.PostUpdate, this method will post multiple status updates
if the message is longer than 140 characters.
The twitter.Api instance must be authenticated.
Args:
status:
The message text to be posted.
May be longer than 140 characters.
continuation:
The character string, if any, to be appended to all but the
last message. Note that Twitter strips trailing '...' strings
from messages. Consider using the unicode \u2026 character
(horizontal ellipsis) instead. [Defaults to None]
**kwargs:
See api.PostUpdate for a list of accepted parameters.
Returns:
A of list twitter.Status instance representing the messages posted.
'''
results = list()
if continuation is None:
continuation = ''
line_length = CHARACTER_LIMIT - len(continuation)
lines = textwrap.wrap(status, line_length)
for line in lines[0:-1]:
results.append(self.PostUpdate(line + continuation, **kwargs))
results.append(self.PostUpdate(lines[-1], **kwargs))
return results
def PostRetweet(self, original_id, trim_user = False):
'''Retweet a tweet with the Retweet API.
The twitter.Api instance must be authenticated.
Args:
original_id:
The numerical id of the tweet that will be retweeted
trim_user:
If True the returned payload will only contain the user IDs,
otherwise the payload will contain the full user data item.
[Optional]
Returns:
A twitter.Status instance representing the original tweet with retweet details embedded.
'''
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
try:
if int(original_id) <= 0:
raise TwitterError("'original_id' must be a positive number")
except ValueError:
raise TwitterError("'original_id' must be an integer")
url = '%s/statuses/retweet/%s.json' % (self.base_url, original_id)
data = {'id': original_id}
if trim_user:
data['trim_user'] = 'true'
json = self._FetchUrl(url, post_data = data)
data = self._ParseAndCheckTwitter(json)
return Status.NewFromJsonDict(data)
def GetUserRetweets(self, count = None, since_id = None, max_id = None, trim_user = False):
'''Fetch the sequence of retweets made by the authenticated user.
The twitter.Api instance must be authenticated.
Args:
count:
The number of status messages to retrieve. [Optional]
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occurred since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
max_id:
Returns results with an ID less than (that is, older than) or
equal to the specified ID. [Optional]
trim_user:
If True the returned payload will only contain the user IDs,
otherwise the payload will contain the full user data item.
[Optional]
Returns:
A sequence of twitter.Status instances, one for each message up to count
'''
return self.GetUserTimeline(since_id = since_id, count = count, max_id = max_id, trim_user = trim_user, exclude_replies = True, include_rts = True)
def GetReplies(self, since_id = None, count = None, max_id = None, trim_user = False):
'''Get a sequence of status messages representing the 20 most
recent replies (status updates prefixed with @twitterID) to the
authenticating user.
Args:
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occurred since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
max_id:
Returns results with an ID less than (that is, older than) or
equal to the specified ID. [Optional]
trim_user:
If True the returned payload will only contain the user IDs,
otherwise the payload will contain the full user data item.
[Optional]
Returns:
A sequence of twitter.Status instances, one for each reply to the user.
'''
return self.GetUserTimeline(since_id = since_id, count = count, max_id = max_id, trim_user = trim_user, exclude_replies = False, include_rts = False)
def GetRetweets(self, statusid, count = None, trim_user = False):
'''Returns up to 100 of the first retweets of the tweet identified
by statusid
Args:
statusid:
The ID of the tweet for which retweets should be searched for
count:
The number of status messages to retrieve. [Optional]
trim_user:
If True the returned payload will only contain the user IDs,
otherwise the payload will contain the full user data item.
[Optional]
Returns:
A list of twitter.Status instances, which are retweets of statusid
'''
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instsance must be authenticated.")
url = '%s/statuses/retweets/%s.json' % (self.base_url, statusid)
parameters = {}
if trim_user:
parameters['trim_user'] = 'true'
if count:
try:
parameters['count'] = int(count)
except:
raise TwitterError("count must be an integer")
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
return [Status.NewFromJsonDict(s) for s in data]
def GetRetweetsOfMe(self,
count = None,
since_id = None,
max_id = None,
trim_user = False,
include_entities = True,
include_user_entities = True):
'''Returns up to 100 of the most recent tweets of the user that have been
retweeted by others.
Args:
count:
The number of retweets to retrieve, up to 100. If omitted, 20 is
assumed.
since_id:
Returns results with an ID greater than (newer than) this ID.
max_id:
Returns results with an ID less than or equal to this ID.
trim_user:
When True, the user object for each tweet will only be an ID.
include_entities:
When True, the tweet entities will be included.
include_user_entities:
When True, the user entities will be included.
'''
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
url = '%s/statuses/retweets_of_me.json' % self.base_url
parameters = {}
if count is not None:
try:
if int(count) > 100:
raise TwitterError("'count' may not be greater than 100")
except ValueError:
raise TwitterError("'count' must be an integer")
if count:
parameters['count'] = count
if since_id:
parameters['since_id'] = since_id
if max_id:
parameters['max_id'] = max_id
if trim_user:
parameters['trim_user'] = trim_user
if not include_entities:
parameters['include_entities'] = include_entities
if not include_user_entities:
parameters['include_user_entities'] = include_user_entities
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
return [Status.NewFromJsonDict(s) for s in data]
def GetFriends(self, user_id = None, screen_name = None, cursor = -1, skip_status = False, include_user_entities = False):
'''Fetch the sequence of twitter.User instances, one for each friend.
The twitter.Api instance must be authenticated.
Args:
user_id:
The twitter id of the user whose friends you are fetching.
If not specified, defaults to the authenticated user. [Optional]
screen_name:
The twitter name of the user whose friends you are fetching.
If not specified, defaults to the authenticated user. [Optional]
cursor:
Should be set to -1 for the initial call and then is used to
control what result page Twitter returns [Optional(ish)]
skip_status:
If True the statuses will not be returned in the user items.
[Optional]
include_user_entities:
When True, the user entities will be included.
Returns:
A sequence of twitter.User instances, one for each friend
'''
if not self._oauth_consumer:
raise TwitterError("twitter.Api instance must be authenticated")
url = '%s/friends/list.json' % self.base_url
result = []
parameters = {}
if user_id is not None:
parameters['user_id'] = user_id
if screen_name is not None:
parameters['screen_name'] = screen_name
if skip_status:
parameters['skip_status'] = True
if include_user_entities:
parameters['include_user_entities'] = True
while True:
parameters['cursor'] = cursor
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
result += [User.NewFromJsonDict(x) for x in data['users']]
if 'next_cursor' in data:
if data['next_cursor'] == 0 or data['next_cursor'] == data['previous_cursor']:
break
else:
cursor = data['next_cursor']
else:
break
return result
def GetFriendIDs(self, user_id = None, screen_name = None, cursor = -1, stringify_ids = False, count = None):
'''Returns a list of twitter user id's for every person
the specified user is following.
Args:
user_id:
The id of the user to retrieve the id list for
[Optional]
screen_name:
The screen_name of the user to retrieve the id list for
[Optional]
cursor:
Specifies the Twitter API Cursor location to start at.
Note: there are pagination limits.
[Optional]
stringify_ids:
if True then twitter will return the ids as strings instead of integers.
[Optional]
count:
The number of status messages to retrieve. [Optional]
Returns:
A list of integers, one for each user id.
'''
url = '%s/friends/ids.json' % self.base_url
if not self._oauth_consumer:
raise TwitterError("twitter.Api instance must be authenticated")
parameters = {}
if user_id is not None:
parameters['user_id'] = user_id
if screen_name is not None:
parameters['screen_name'] = screen_name
if stringify_ids:
parameters['stringify_ids'] = True
if count is not None:
parameters['count'] = count
result = []
while True:
parameters['cursor'] = cursor
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
result += [x for x in data['ids']]
if 'next_cursor' in data:
if data['next_cursor'] == 0 or data['next_cursor'] == data['previous_cursor']:
break
else:
cursor = data['next_cursor']
else:
break
return result
def GetFollowerIDs(self, user_id = None, screen_name = None, cursor = -1, stringify_ids = False, count = None, total_count = None):
'''Returns a list of twitter user id's for every person
that is following the specified user.
Args:
user_id:
The id of the user to retrieve the id list for
[Optional]
screen_name:
The screen_name of the user to retrieve the id list for
[Optional]
cursor:
Specifies the Twitter API Cursor location to start at.
Note: there are pagination limits.
[Optional]
stringify_ids:
if True then twitter will return the ids as strings instead of integers.
[Optional]
count:
The number of user id's to retrieve per API request. Please be aware that
this might get you rate-limited if set to a small number. By default Twitter
will retrieve 5000 UIDs per call.
[Optional]
total_count:
The total amount of UIDs to retrieve. Good if the account has many followers
and you don't want to get rate limited. The data returned might contain more
UIDs if total_count is not a multiple of count (5000 by default).
[Optional]
Returns:
A list of integers, one for each user id.
'''
url = '%s/followers/ids.json' % self.base_url
if not self._oauth_consumer:
raise TwitterError("twitter.Api instance must be authenticated")
parameters = {}
if user_id is not None:
parameters['user_id'] = user_id
if screen_name is not None:
parameters['screen_name'] = screen_name
if stringify_ids:
parameters['stringify_ids'] = True
if count is not None:
parameters['count'] = count
result = []
while True:
if total_count and total_count < count:
parameters['count'] = total_count
parameters['cursor'] = cursor
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
result += [x for x in data['ids']]
if 'next_cursor' in data:
if data['next_cursor'] == 0 or data['next_cursor'] == data['previous_cursor']:
break
else:
cursor = data['next_cursor']
total_count -= len(data['ids'])
if total_count < 1:
break
else:
break
return result
def GetFollowers(self, user_id = None, screen_name = None, cursor = -1, skip_status = False, include_user_entities = False):
'''Fetch the sequence of twitter.User instances, one for each follower
The twitter.Api instance must be authenticated.
Args:
user_id:
The twitter id of the user whose followers you are fetching.
If not specified, defaults to the authenticated user. [Optional]
screen_name:
The twitter name of the user whose followers you are fetching.
If not specified, defaults to the authenticated user. [Optional]
cursor:
Should be set to -1 for the initial call and then is used to
control what result page Twitter returns [Optional(ish)]
skip_status:
If True the statuses will not be returned in the user items.
[Optional]
include_user_entities:
When True, the user entities will be included.
Returns:
A sequence of twitter.User instances, one for each follower
'''
if not self._oauth_consumer:
raise TwitterError("twitter.Api instance must be authenticated")
url = '%s/followers/list.json' % self.base_url
result = []
parameters = {}
if user_id is not None:
parameters['user_id'] = user_id
if screen_name is not None:
parameters['screen_name'] = screen_name
if skip_status:
parameters['skip_status'] = True
if include_user_entities:
parameters['include_user_entities'] = True
while True:
parameters['cursor'] = cursor
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
result += [User.NewFromJsonDict(x) for x in data['users']]
if 'next_cursor' in data:
if data['next_cursor'] == 0 or data['next_cursor'] == data['previous_cursor']:
break
else:
cursor = data['next_cursor']
else:
break
return result
def UsersLookup(self, user_id = None, screen_name = None, users = None, include_entities = True):
'''Fetch extended information for the specified users.
Users may be specified either as lists of either user_ids,
screen_names, or twitter.User objects. The list of users that
are queried is the union of all specified parameters.
The twitter.Api instance must be authenticated.
Args:
user_id:
A list of user_ids to retrieve extended information.
[Optional]
screen_name:
A list of screen_names to retrieve extended information.
[Optional]
users:
A list of twitter.User objects to retrieve extended information.
[Optional]
include_entities:
The entities node that may appear within embedded statuses will be
disincluded when set to False.
[Optional]
Returns:
A list of twitter.User objects for the requested users
'''
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
if not user_id and not screen_name and not users:
raise TwitterError("Specify at least one of user_id, screen_name, or users.")
url = '%s/users/lookup.json' % self.base_url
parameters = {}
uids = list()
if user_id:
uids.extend(user_id)
if users:
uids.extend([u.id for u in users])
if len(uids):
parameters['user_id'] = ','.join(["%s" % u for u in uids])
if screen_name:
parameters['screen_name'] = ','.join(screen_name)
if not include_entities:
parameters['include_entities'] = 'false'
json = self._FetchUrl(url, parameters = parameters)
try:
data = self._ParseAndCheckTwitter(json)
except TwitterError, e:
_, e, _ = sys.exc_info()
t = e.args[0]
if len(t) == 1 and ('code' in t[0]) and (t[0]['code'] == 34):
data = []
else:
raise
return [User.NewFromJsonDict(u) for u in data]
def GetUser(self, user_id = None, screen_name = None, include_entities = True):
'''Returns a single user.
The twitter.Api instance must be authenticated.
Args:
user_id:
The id of the user to retrieve.
[Optional]
screen_name:
The screen name of the user for whom to return results for. Either a
user_id or screen_name is required for this method.
[Optional]
include_entities:
if set to False, the 'entities' node will not be included.
[Optional]
Returns:
A twitter.User instance representing that user
'''
url = '%s/users/show.json' % (self.base_url)
parameters = {}
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
if user_id:
parameters['user_id'] = user_id
elif screen_name:
parameters['screen_name'] = screen_name
else:
raise TwitterError("Specify at least one of user_id or screen_name.")
if not include_entities:
parameters['include_entities'] = 'false'
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
return User.NewFromJsonDict(data)
def GetDirectMessages(self, since_id = None, max_id = None, count = None, include_entities = True, skip_status = False):
'''Returns a list of the direct messages sent to the authenticating user.
The twitter.Api instance must be authenticated.
Args:
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occurred since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
max_id:
Returns results with an ID less than (that is, older than) or
equal to the specified ID. [Optional]
count:
Specifies the number of direct messages to try and retrieve, up to a
maximum of 200. The value of count is best thought of as a limit to the
number of Tweets to return because suspended or deleted content is
removed after the count has been applied. [Optional]
include_entities:
The entities node will not be included when set to False.
[Optional]
skip_status:
When set to True statuses will not be included in the returned user
objects. [Optional]
Returns:
A sequence of twitter.DirectMessage instances
'''
url = '%s/direct_messages.json' % self.base_url
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
parameters = {}
if since_id:
parameters['since_id'] = since_id
if max_id:
parameters['max_id'] = max_id
if count:
try:
parameters['count'] = int(count)
except:
raise TwitterError("count must be an integer")
if not include_entities:
parameters['include_entities'] = 'false'
if skip_status:
parameters['skip_status'] = 1
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
return [DirectMessage.NewFromJsonDict(x) for x in data]
def GetSentDirectMessages(self, since_id = None, max_id = None, count = None, page = None, include_entities = True):
'''Returns a list of the direct messages sent by the authenticating user.
The twitter.Api instance must be authenticated.
Args:
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occured since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
max_id:
Returns results with an ID less than (that is, older than) or
equal to the specified ID. [Optional]
count:
Specifies the number of direct messages to try and retrieve, up to a
maximum of 200. The value of count is best thought of as a limit to the
number of Tweets to return because suspended or deleted content is
removed after the count has been applied. [Optional]
page:
Specifies the page of results to retrieve.
Note: there are pagination limits. [Optional]
include_entities:
The entities node will not be included when set to False.
[Optional]
Returns:
A sequence of twitter.DirectMessage instances
'''
url = '%s/direct_messages/sent.json' % self.base_url
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
parameters = {}
if since_id:
parameters['since_id'] = since_id
if page:
parameters['page'] = page
if max_id:
parameters['max_id'] = max_id
if count:
try:
parameters['count'] = int(count)
except:
raise TwitterError("count must be an integer")
if not include_entities:
parameters['include_entities'] = 'false'
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
return [DirectMessage.NewFromJsonDict(x) for x in data]
def PostDirectMessage(self, text, user_id = None, screen_name = None):
'''Post a twitter direct message from the authenticated user
The twitter.Api instance must be authenticated. user_id or screen_name
must be specified.
Args:
text: The message text to be posted. Must be less than 140 characters.
user_id:
The ID of the user who should receive the direct message.
[Optional]
screen_name:
The screen name of the user who should receive the direct message.
[Optional]
Returns:
A twitter.DirectMessage instance representing the message posted
'''
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
url = '%s/direct_messages/new.json' % self.base_url
data = {'text': text}
if user_id:
data['user_id'] = user_id
elif screen_name:
data['screen_name'] = screen_name
else:
raise TwitterError("Specify at least one of user_id or screen_name.")
json = self._FetchUrl(url, post_data = data)
data = self._ParseAndCheckTwitter(json)
return DirectMessage.NewFromJsonDict(data)
def DestroyDirectMessage(self, id, include_entities = True):
'''Destroys the direct message specified in the required ID parameter.
The twitter.Api instance must be authenticated, and the
authenticating user must be the recipient of the specified direct
message.
Args:
id: The id of the direct message to be destroyed
Returns:
A twitter.DirectMessage instance representing the message destroyed
'''
url = '%s/direct_messages/destroy.json' % self.base_url
data = {'id': id}
if not include_entities:
data['include_entities'] = 'false'
json = self._FetchUrl(url, post_data = data)
data = self._ParseAndCheckTwitter(json)
return DirectMessage.NewFromJsonDict(data)
def CreateFriendship(self, user_id = None, screen_name = None, follow = True):
'''Befriends the user specified by the user_id or screen_name.
The twitter.Api instance must be authenticated.
Args:
user_id:
A user_id to follow [Optional]
screen_name:
A screen_name to follow [Optional]
follow:
Set to False to disable notifications for the target user
Returns:
A twitter.User instance representing the befriended user.
'''
url = '%s/friendships/create.json' % (self.base_url)
data = {}
if user_id:
data['user_id'] = user_id
elif screen_name:
data['screen_name'] = screen_name
else:
raise TwitterError("Specify at least one of user_id or screen_name.")
if follow:
data['follow'] = 'true'
else:
data['follow'] = 'false'
json = self._FetchUrl(url, post_data = data)
data = self._ParseAndCheckTwitter(json)
return User.NewFromJsonDict(data)
def DestroyFriendship(self, user_id = None, screen_name = None):
'''Discontinues friendship with a user_id or screen_name.
The twitter.Api instance must be authenticated.
Args:
user_id:
A user_id to unfollow [Optional]
screen_name:
A screen_name to unfollow [Optional]
Returns:
A twitter.User instance representing the discontinued friend.
'''
url = '%s/friendships/destroy.json' % self.base_url
data = {}
if user_id:
data['user_id'] = user_id
elif screen_name:
data['screen_name'] = screen_name
else:
raise TwitterError("Specify at least one of user_id or screen_name.")
json = self._FetchUrl(url, post_data = data)
data = self._ParseAndCheckTwitter(json)
return User.NewFromJsonDict(data)
def CreateFavorite(self, status = None, id = None, include_entities = True):
'''Favorites the specified status object or id as the authenticating user.
Returns the favorite status when successful.
The twitter.Api instance must be authenticated.
Args:
id:
The id of the twitter status to mark as a favorite.
[Optional]
status:
The twitter.Status object to mark as a favorite.
[Optional]
include_entities:
The entities node will be omitted when set to False.
Returns:
A twitter.Status instance representing the newly-marked favorite.
'''
url = '%s/favorites/create.json' % self.base_url
data = {}
if id:
data['id'] = id
elif status:
data['id'] = status.id
else:
raise TwitterError("Specify id or status")
if not include_entities:
data['include_entities'] = 'false'
json = self._FetchUrl(url, post_data = data)
data = self._ParseAndCheckTwitter(json)
return Status.NewFromJsonDict(data)
def DestroyFavorite(self, status = None, id = None, include_entities = True):
'''Un-Favorites the specified status object or id as the authenticating user.
Returns the un-favorited status when successful.
The twitter.Api instance must be authenticated.
Args:
id:
The id of the twitter status to unmark as a favorite.
[Optional]
status:
The twitter.Status object to unmark as a favorite.
[Optional]
include_entities:
The entities node will be omitted when set to False.
Returns:
A twitter.Status instance representing the newly-unmarked favorite.
'''
url = '%s/favorites/destroy.json' % self.base_url
data = {}
if id:
data['id'] = id
elif status:
data['id'] = status.id
else:
raise TwitterError("Specify id or status")
if not include_entities:
data['include_entities'] = 'false'
json = self._FetchUrl(url, post_data = data)
data = self._ParseAndCheckTwitter(json)
return Status.NewFromJsonDict(data)
def GetFavorites(self,
user_id = None,
screen_name = None,
count = None,
since_id = None,
max_id = None,
include_entities = True):
'''Return a list of Status objects representing favorited tweets.
By default, returns the (up to) 20 most recent tweets for the
authenticated user.
Args:
user:
The twitter name or id of the user whose favorites you are fetching.
If not specified, defaults to the authenticated user. [Optional]
page:
Specifies the page of results to retrieve.
Note: there are pagination limits. [Optional]
'''
parameters = {}
url = '%s/favorites/list.json' % self.base_url
if user_id:
parameters['user_id'] = user_id
elif screen_name:
parameters['screen_name'] = user_id
if since_id:
try:
parameters['since_id'] = long(since_id)
except:
raise TwitterError("since_id must be an integer")
if max_id:
try:
parameters['max_id'] = long(max_id)
except:
raise TwitterError("max_id must be an integer")
if count:
try:
parameters['count'] = int(count)
except:
raise TwitterError("count must be an integer")
if include_entities:
parameters['include_entities'] = True
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
return [Status.NewFromJsonDict(x) for x in data]
def GetMentions(self,
count = None,
since_id = None,
max_id = None,
trim_user = False,
contributor_details = False,
include_entities = True):
'''Returns the 20 most recent mentions (status containing @screen_name)
for the authenticating user.
Args:
count:
Specifies the number of tweets to try and retrieve, up to a maximum of
200. The value of count is best thought of as a limit to the number of
tweets to return because suspended or deleted content is removed after
the count has been applied. [Optional]
since_id:
Returns results with an ID greater than (that is, more recent
than) the specified ID. There are limits to the number of
Tweets which can be accessed through the API. If the limit of
Tweets has occurred since the since_id, the since_id will be
forced to the oldest ID available. [Optional]
max_id:
Returns only statuses with an ID less than
(that is, older than) the specified ID. [Optional]
trim_user:
When set to True, each tweet returned in a timeline will include a user
object including only the status authors numerical ID. Omit this
parameter to receive the complete user object.
contributor_details:
If set to True, this parameter enhances the contributors element of the
status response to include the screen_name of the contributor. By
default only the user_id of the contributor is included.
include_entities:
The entities node will be disincluded when set to False.
Returns:
A sequence of twitter.Status instances, one for each mention of the user.
'''
url = '%s/statuses/mentions_timeline.json' % self.base_url
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
parameters = {}
if count:
try:
parameters['count'] = int(count)
except:
raise TwitterError("count must be an integer")
if since_id:
try:
parameters['since_id'] = long(since_id)
except:
raise TwitterError("since_id must be an integer")
if max_id:
try:
parameters['max_id'] = long(max_id)
except:
raise TwitterError("max_id must be an integer")
if trim_user:
parameters['trim_user'] = 1
if contributor_details:
parameters['contributor_details'] = 'true'
if not include_entities:
parameters['include_entities'] = 'false'
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
return [Status.NewFromJsonDict(x) for x in data]
def CreateList(self, name, mode = None, description = None):
'''Creates a new list with the give name for the authenticated user.
The twitter.Api instance must be authenticated.
Args:
name:
New name for the list
mode:
'public' or 'private'.
Defaults to 'public'. [Optional]
description:
Description of the list. [Optional]
Returns:
A twitter.List instance representing the new list
'''
url = '%s/lists/create.json' % self.base_url
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
parameters = {'name': name}
if mode is not None:
parameters['mode'] = mode
if description is not None:
parameters['description'] = description
json = self._FetchUrl(url, post_data = parameters)
data = self._ParseAndCheckTwitter(json)
return List.NewFromJsonDict(data)
def DestroyList(self,
owner_screen_name = False,
owner_id = False,
list_id = None,
slug = None):
'''
Destroys the list identified by list_id or owner_screen_name/owner_id and
slug.
The twitter.Api instance must be authenticated.
Args:
owner_screen_name:
The screen_name of the user who owns the list being requested by a slug.
owner_id:
The user ID of the user who owns the list being requested by a slug.
list_id:
The numerical id of the list.
slug:
You can identify a list by its slug instead of its numerical id. If you
decide to do so, note that you'll also have to specify the list owner
using the owner_id or owner_screen_name parameters.
Returns:
A twitter.List instance representing the removed list.
'''
url = '%s/lists/destroy.json' % self.base_url
data = {}
if list_id:
try:
data['list_id'] = long(list_id)
except:
raise TwitterError("list_id must be an integer")
elif slug:
data['slug'] = slug
if owner_id:
try:
data['owner_id'] = long(owner_id)
except:
raise TwitterError("owner_id must be an integer")
elif owner_screen_name:
data['owner_screen_name'] = owner_screen_name
else:
raise TwitterError("Identify list by list_id or owner_screen_name/owner_id and slug")
else:
raise TwitterError("Identify list by list_id or owner_screen_name/owner_id and slug")
json = self._FetchUrl(url, post_data = data)
data = self._ParseAndCheckTwitter(json)
return List.NewFromJsonDict(data)
def CreateSubscription(self,
owner_screen_name = False,
owner_id = False,
list_id = None,
slug = None):
'''Creates a subscription to a list by the authenticated user
The twitter.Api instance must be authenticated.
Args:
owner_screen_name:
The screen_name of the user who owns the list being requested by a slug.
owner_id:
The user ID of the user who owns the list being requested by a slug.
list_id:
The numerical id of the list.
slug:
You can identify a list by its slug instead of its numerical id. If you
decide to do so, note that you'll also have to specify the list owner
using the owner_id or owner_screen_name parameters.
Returns:
A twitter.List instance representing the list subscribed to
'''
url = '%s/lists/subscribers/create.json' % (self.base_url)
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
data = {}
if list_id:
try:
data['list_id'] = long(list_id)
except:
raise TwitterError("list_id must be an integer")
elif slug:
data['slug'] = slug
if owner_id:
try:
data['owner_id'] = long(owner_id)
except:
raise TwitterError("owner_id must be an integer")
elif owner_screen_name:
data['owner_screen_name'] = owner_screen_name
else:
raise TwitterError("Identify list by list_id or owner_screen_name/owner_id and slug")
else:
raise TwitterError("Identify list by list_id or owner_screen_name/owner_id and slug")
json = self._FetchUrl(url, post_data = data)
data = self._ParseAndCheckTwitter(json)
return List.NewFromJsonDict(data)
def DestroySubscription(self,
owner_screen_name = False,
owner_id = False,
list_id = None,
slug = None):
'''Destroys the subscription to a list for the authenticated user
The twitter.Api instance must be authenticated.
Args:
owner_screen_name:
The screen_name of the user who owns the list being requested by a slug.
owner_id:
The user ID of the user who owns the list being requested by a slug.
list_id:
The numerical id of the list.
slug:
You can identify a list by its slug instead of its numerical id. If you
decide to do so, note that you'll also have to specify the list owner
using the owner_id or owner_screen_name parameters.
Returns:
A twitter.List instance representing the removed list.
'''
url = '%s/lists/subscribers/destroy.json' % (self.base_url)
if not self._oauth_consumer:
raise TwitterError("The twitter.Api instance must be authenticated.")
data = {}
if list_id:
try:
data['list_id'] = long(list_id)
except:
raise TwitterError("list_id must be an integer")
elif slug:
data['slug'] = slug
if owner_id:
try:
data['owner_id'] = long(owner_id)
except:
raise TwitterError("owner_id must be an integer")
elif owner_screen_name:
data['owner_screen_name'] = owner_screen_name
else:
raise TwitterError("Identify list by list_id or owner_screen_name/owner_id and slug")
else:
raise TwitterError("Identify list by list_id or owner_screen_name/owner_id and slug")
json = self._FetchUrl(url, post_data = data)
data = self._ParseAndCheckTwitter(json)
return List.NewFromJsonDict(data)
def GetSubscriptions(self, user_id = None, screen_name = None, count = 20, cursor = -1):
'''
Obtain a collection of the lists the specified user is subscribed to, 20
lists per page by default. Does not include the user's own lists.
The twitter.Api instance must be authenticated.
Args:
user_id:
The ID of the user for whom to return results for. [Optional]
screen_name:
The screen name of the user for whom to return results for.
[Optional]
count:
The amount of results to return per page. Defaults to 20.
No more than 1000 results will ever be returned in a single page.
cursor:
"page" value that Twitter will use to start building the
list sequence from. -1 to start at the beginning.
Twitter will return in the result the values for next_cursor
and previous_cursor. [Optional]
Returns:
A sequence of twitter.List instances, one for each list
'''
if not self._oauth_consumer:
raise TwitterError("twitter.Api instance must be authenticated")
url = '%s/lists/subscriptions.json' % (self.base_url)
parameters = {}
try:
parameters['cursor'] = int(cursor)
except:
raise TwitterError("cursor must be an integer")
try:
parameters['count'] = int(count)
except:
raise TwitterError("count must be an integer")
if user_id is not None:
try:
parameters['user_id'] = long(user_id)
except:
raise TwitterError('user_id must be an integer')
elif screen_name is not None:
parameters['screen_name'] = screen_name
else:
raise TwitterError('Specify user_id or screen_name')
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
return [List.NewFromJsonDict(x) for x in data['lists']]
def GetLists(self, user_id = None, screen_name = None, count = None, cursor = -1):
'''Fetch the sequence of lists for a user.
The twitter.Api instance must be authenticated.
Args:
user_id:
The ID of the user for whom to return results for. [Optional]
screen_name:
The screen name of the user for whom to return results for.
[Optional]
count:
The amount of results to return per page. Defaults to 20. No more than
1000 results will ever be returned in a single page.
[Optional]
cursor:
"page" value that Twitter will use to start building the
list sequence from. -1 to start at the beginning.
Twitter will return in the result the values for next_cursor
and previous_cursor. [Optional]
Returns:
A sequence of twitter.List instances, one for each list
'''
if not self._oauth_consumer:
raise TwitterError("twitter.Api instance must be authenticated")
url = '%s/lists/ownerships.json' % self.base_url
result = []
parameters = {}
if user_id is not None:
try:
parameters['user_id'] = long(user_id)
except:
raise TwitterError('user_id must be an integer')
elif screen_name is not None:
parameters['screen_name'] = screen_name
else:
raise TwitterError('Specify user_id or screen_name')
if count is not None:
parameters['count'] = count
while True:
parameters['cursor'] = cursor
json = self._FetchUrl(url, parameters = parameters)
data = self._ParseAndCheckTwitter(json)
result += [List.NewFromJsonDict(x) for x in data['lists']]
if 'next_cursor' in data:
if data['next_cursor'] == 0 or data['next_cursor'] == data['previous_cursor']:
break
else:
cursor = data['next_cursor']
else:
break
return result
def VerifyCredentials(self):
'''Returns a twitter.User instance if the authenticating user is valid.
Returns:
A twitter.User instance representing that user if the
credentials are valid, None otherwise.
'''
if not self._oauth_consumer:
raise TwitterError("Api instance must first be given user credentials.")
url = '%s/account/verify_credentials.json' % self.base_url
try:
json = self._FetchUrl(url, no_cache = True)
except urllib2.HTTPError, http_error:
if http_error.code == httplib.UNAUTHORIZED:
return None
else:
raise http_error
data = self._ParseAndCheckTwitter(json)
return User.NewFromJsonDict(data)
def SetCache(self, cache):
'''Override the default cache. Set to None to prevent caching.
Args:
cache:
An instance that supports the same API as the twitter._FileCache
'''
if cache == DEFAULT_CACHE:
self._cache = _FileCache()
else:
self._cache = cache
def SetUrllib(self, urllib):
'''Override the default urllib implementation.
Args:
urllib:
An instance that supports the same API as the urllib2 module
'''
self._urllib = urllib
def SetCacheTimeout(self, cache_timeout):
'''Override the default cache timeout.
Args:
cache_timeout:
Time, in seconds, that responses should be reused.
'''
self._cache_timeout = cache_timeout
def SetUserAgent(self, user_agent):
'''Override the default user agent
Args:
user_agent:
A string that should be send to the server as the User-agent
'''
self._request_headers['User-Agent'] = user_agent
def SetXTwitterHeaders(self, client, url, version):
'''Set the X-Twitter HTTP headers that will be sent to the server.
Args:
client:
The client name as a string. Will be sent to the server as
the 'X-Twitter-Client' header.
url:
The URL of the meta.xml as a string. Will be sent to the server
as the 'X-Twitter-Client-URL' header.
version:
The client version as a string. Will be sent to the server
as the 'X-Twitter-Client-Version' header.
'''
self._request_headers['X-Twitter-Client'] = client
self._request_headers['X-Twitter-Client-URL'] = url
self._request_headers['X-Twitter-Client-Version'] = version
def SetSource(self, source):
'''Suggest the "from source" value to be displayed on the Twitter web site.
The value of the 'source' parameter must be first recognized by
the Twitter server. New source values are authorized on a case by
case basis by the Twitter development team.
Args:
source:
The source name as a string. Will be sent to the server as
the 'source' parameter.
'''
self._default_params['source'] = source
def GetRateLimitStatus(self, resources = None):
'''Fetch the rate limit status for the currently authorized user.
Args:
resources:
A comma seperated list of resource families you want to know the current
rate limit disposition of.
[Optional]
Returns:
A dictionary containing the time the limit will reset (reset_time),
the number of remaining hits allowed before the reset (remaining_hits),
the number of hits allowed in a 60-minute period (hourly_limit), and
the time of the reset in seconds since The Epoch (reset_time_in_seconds).
'''
parameters = {}
if resources is not None:
parameters['resources'] = resources
url = '%s/application/rate_limit_status.json' % self.base_url
json = self._FetchUrl(url, parameters = parameters, no_cache = True)
data = self._ParseAndCheckTwitter(json)
return data
def MaximumHitFrequency(self):
'''Determines the minimum number of seconds that a program must wait
before hitting the server again without exceeding the rate_limit
imposed for the currently authenticated user.
Returns:
The minimum second interval that a program must use so as to not
exceed the rate_limit imposed for the user.
'''
rate_status = self.GetRateLimitStatus()
reset_time = rate_status.get('reset_time', None)
limit = rate_status.get('remaining_hits', None)
if reset_time:
# put the reset time into a datetime object
reset = datetime.datetime(*rfc822.parsedate(reset_time)[:7])
# find the difference in time between now and the reset time + 1 hour
delta = reset + datetime.timedelta(hours = 1) - datetime.datetime.utcnow()
if not limit:
return int(delta.seconds)
# determine the minimum number of seconds allowed as a regular interval
max_frequency = int(delta.seconds / limit) + 1
# return the number of seconds
return max_frequency
return 60
def _BuildUrl(self, url, path_elements = None, extra_params = None):
# Break url into constituent parts
(scheme, netloc, path, params, query, fragment) = urlparse.urlparse(url)
# Add any additional path elements to the path
if path_elements:
# Filter out the path elements that have a value of None
p = [i for i in path_elements if i]
if not path.endswith('/'):
path += '/'
path += '/'.join(p)
# Add any additional query parameters to the query string
if extra_params and len(extra_params) > 0:
extra_query = self._EncodeParameters(extra_params)
# Add it to the existing query
if query:
query += '&' + extra_query
else:
query = extra_query
# Return the rebuilt URL
return urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
def _InitializeRequestHeaders(self, request_headers):
if request_headers:
self._request_headers = request_headers
else:
self._request_headers = {}
def _InitializeUserAgent(self):
user_agent = 'Python-urllib/%s (python-twitter/%s)' % \
(self._urllib.__version__, __version__)
self.SetUserAgent(user_agent)
def _InitializeDefaultParameters(self):
self._default_params = {}
def _DecompressGzippedResponse(self, response):
raw_data = response.read()
if response.headers.get('content-encoding', None) == 'gzip':
url_data = gzip.GzipFile(fileobj = StringIO.StringIO(raw_data)).read()
else:
url_data = raw_data
return url_data
def _Encode(self, s):
if self._input_encoding:
return unicode(s, self._input_encoding).encode('utf-8')
else:
return unicode(s).encode('utf-8')
def _EncodeParameters(self, parameters):
'''Return a string in key=value&key=value form
Values of None are not included in the output string.
Args:
parameters:
A dict of (key, value) tuples, where value is encoded as
specified by self._encoding
Returns:
A URL-encoded string in "key=value&key=value" form
'''
if parameters is None:
return None
else:
return urllib.urlencode(dict([(k, self._Encode(v)) for k, v in parameters.items() if v is not None]))
def _EncodePostData(self, post_data):
'''Return a string in key=value&key=value form
Values are assumed to be encoded in the format specified by self._encoding,
and are subsequently URL encoded.
Args:
post_data:
A dict of (key, value) tuples, where value is encoded as
specified by self._encoding
Returns:
A URL-encoded string in "key=value&key=value" form
'''
if post_data is None:
return None
else:
return urllib.urlencode(dict([(k, self._Encode(v)) for k, v in post_data.items()]))
def _ParseAndCheckTwitter(self, json):
"""Try and parse the JSON returned from Twitter and return
an empty dictionary if there is any error. This is a purely
defensive check because during some Twitter network outages
it will return an HTML failwhale page."""
try:
data = simplejson.loads(json)
self._CheckForTwitterError(data)
except ValueError:
if "<title>Twitter / Over capacity</title>" in json:
raise TwitterError("Capacity Error")
if "<title>Twitter / Error</title>" in json:
raise TwitterError("Technical Error")
raise TwitterError("json decoding")
return data
def _CheckForTwitterError(self, data):
"""Raises a TwitterError if twitter returns an error message.
Args:
data:
A python dict created from the Twitter json response
Raises:
TwitterError wrapping the twitter error message if one exists.
"""
# Twitter errors are relatively unlikely, so it is faster
# to check first, rather than try and catch the exception
if 'error' in data:
raise TwitterError(data['error'])
if 'errors' in data:
raise TwitterError(data['errors'])
def _FetchUrl(self,
url,
post_data = None,
parameters = None,
no_cache = None,
use_gzip_compression = None):
'''Fetch a URL, optionally caching for a specified time.
Args:
url:
The URL to retrieve
post_data:
A dict of (str, unicode) key/value pairs.
If set, POST will be used.
parameters:
A dict whose key/value pairs should encoded and added
to the query string. [Optional]
no_cache:
If true, overrides the cache on the current request
use_gzip_compression:
If True, tells the server to gzip-compress the response.
It does not apply to POST requests.
Defaults to None, which will get the value to use from
the instance variable self._use_gzip [Optional]
Returns:
A string containing the body of the response.
'''
# Build the extra parameters dict
extra_params = {}
if self._default_params:
extra_params.update(self._default_params)
if parameters:
extra_params.update(parameters)
if post_data:
http_method = "POST"
else:
http_method = "GET"
if self._debugHTTP:
_debug = 1
else:
_debug = 0
http_handler = self._urllib.HTTPHandler(debuglevel = _debug)
https_handler = self._urllib.HTTPSHandler(debuglevel = _debug)
http_proxy = os.environ.get('http_proxy')
https_proxy = os.environ.get('https_proxy')
if http_proxy is None or https_proxy is None :
proxy_status = False
else :
proxy_status = True
opener = self._urllib.OpenerDirector()
opener.add_handler(http_handler)
opener.add_handler(https_handler)
if proxy_status is True :
proxy_handler = self._urllib.ProxyHandler({'http':str(http_proxy), 'https': str(https_proxy)})
opener.add_handler(proxy_handler)
if use_gzip_compression is None:
use_gzip = self._use_gzip
else:
use_gzip = use_gzip_compression
# Set up compression
if use_gzip and not post_data:
opener.addheaders.append(('Accept-Encoding', 'gzip'))
if self._oauth_consumer is not None:
if post_data and http_method == "POST":
parameters = post_data.copy()
req = oauth.Request.from_consumer_and_token(self._oauth_consumer,
token = self._oauth_token,
http_method = http_method,
http_url = url, parameters = parameters)
req.sign_request(self._signature_method_hmac_sha1, self._oauth_consumer, self._oauth_token)
headers = req.to_header()
if http_method == "POST":
encoded_post_data = req.to_postdata()
else:
encoded_post_data = None
url = req.to_url()
else:
url = self._BuildUrl(url, extra_params = extra_params)
encoded_post_data = self._EncodePostData(post_data)
# Open and return the URL immediately if we're not going to cache
if encoded_post_data or no_cache or not self._cache or not self._cache_timeout:
response = opener.open(url, encoded_post_data)
url_data = self._DecompressGzippedResponse(response)
opener.close()
else:
# Unique keys are a combination of the url and the oAuth Consumer Key
if self._consumer_key:
key = self._consumer_key + ':' + url
else:
key = url
# See if it has been cached before
last_cached = self._cache.GetCachedTime(key)
# If the cached version is outdated then fetch another and store it
if not last_cached or time.time() >= last_cached + self._cache_timeout:
try:
response = opener.open(url, encoded_post_data)
url_data = self._DecompressGzippedResponse(response)
self._cache.Set(key, url_data)
except urllib2.HTTPError, e:
print e
opener.close()
else:
url_data = self._cache.Get(key)
# Always return the latest version
return url_data
class _FileCacheError(Exception):
'''Base exception class for FileCache related errors'''
class _FileCache(object):
DEPTH = 3
def __init__(self, root_directory = None):
self._InitializeRootDirectory(root_directory)
def Get(self, key):
path = self._GetPath(key)
if os.path.exists(path):
return open(path).read()
else:
return None
def Set(self, key, data):
path = self._GetPath(key)
directory = os.path.dirname(path)
if not os.path.exists(directory):
os.makedirs(directory)
if not os.path.isdir(directory):
raise _FileCacheError('%s exists but is not a directory' % directory)
temp_fd, temp_path = tempfile.mkstemp()
temp_fp = os.fdopen(temp_fd, 'w')
temp_fp.write(data)
temp_fp.close()
if not path.startswith(self._root_directory):
raise _FileCacheError('%s does not appear to live under %s' %
(path, self._root_directory))
if os.path.exists(path):
os.remove(path)
os.rename(temp_path, path)
def Remove(self, key):
path = self._GetPath(key)
if not path.startswith(self._root_directory):
raise _FileCacheError('%s does not appear to live under %s' %
(path, self._root_directory))
if os.path.exists(path):
os.remove(path)
def GetCachedTime(self, key):
path = self._GetPath(key)
if os.path.exists(path):
return os.path.getmtime(path)
else:
return None
def _GetUsername(self):
'''Attempt to find the username in a cross-platform fashion.'''
try:
return os.getenv('USER') or \
os.getenv('LOGNAME') or \
os.getenv('USERNAME') or \
os.getlogin() or \
'nobody'
except (AttributeError, IOError, OSError), e:
return 'nobody'
def _GetTmpCachePath(self):
username = self._GetUsername()
cache_directory = 'python.cache_' + username
return os.path.join(tempfile.gettempdir(), cache_directory)
def _InitializeRootDirectory(self, root_directory):
if not root_directory:
root_directory = self._GetTmpCachePath()
root_directory = os.path.abspath(root_directory)
if not os.path.exists(root_directory):
os.mkdir(root_directory)
if not os.path.isdir(root_directory):
raise _FileCacheError('%s exists but is not a directory' %
root_directory)
self._root_directory = root_directory
def _GetPath(self, key):
try:
hashed_key = md5(key).hexdigest()
except TypeError:
hashed_key = md5.new(key).hexdigest()
return os.path.join(self._root_directory,
self._GetPrefix(hashed_key),
hashed_key)
def _GetPrefix(self, hashed_key):
return os.path.sep.join(hashed_key[0:_FileCache.DEPTH])
| gpl-3.0 |
TOC-Shard/moul-scripts | Python/tldnEmgrPhase0.py | 6 | 5064 | # -*- coding: utf-8 -*-
""" *==LICENSE==*
CyanWorlds.com Engine - MMOG client, server and tools
Copyright (C) 2011 Cyan Worlds, Inc.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Additional permissions under GNU GPL version 3 section 7
If you modify this Program, or any covered work, by linking or
combining it with any of RAD Game Tools Bink SDK, Autodesk 3ds Max SDK,
NVIDIA PhysX SDK, Microsoft DirectX SDK, OpenSSL library, Independent
JPEG Group JPEG library, Microsoft Windows Media SDK, or Apple QuickTime SDK
(or a modified version of those libraries),
containing parts covered by the terms of the Bink SDK EULA, 3ds Max EULA,
PhysX SDK EULA, DirectX SDK EULA, OpenSSL and SSLeay licenses, IJG
JPEG Library README, Windows Media SDK EULA, or QuickTime SDK EULA, the
licensors of this Program grant you additional
permission to convey the resulting work. Corresponding Source for a
non-source form of such a combination shall include the source code for
the parts of OpenSSL and IJG JPEG Library used as well as that of the covered
work.
You can contact Cyan Worlds, Inc. by email legal@cyan.com
or by snail mail at:
Cyan Worlds, Inc.
14617 N Newport Hwy
Mead, WA 99021
*==LICENSE==* """
"""
Module: tldnEmgrPhase0.py
Age: Teledahn
Date: January 2002
Event Manager interface for Teledahn Phase 0 content
"""
from Plasma import *
from PlasmaTypes import *
import string
#globals
variable = None
BooleanVARs = [
"tldnPumpSwitchFunc"
]
AgeStartedIn = None
#This identifies the maximum valid value for INT Variables
#The range is always from 00 to the value specified here
def OutOfRange(VARname, NewSDLValue, myMaxINT):
PtDebugPrint("nb01EmgrPhase0.OutOfRange:\tERROR: Variable %s expected range from 0 - %d. Received value of %d" % (VARname,myMaxINT,NewSDLValue))
# the expected range of these intergers is defined in the list above. Convention is "variablename" + "MaxINT"
StateVARs = {
}
class tldnEmgrPhase0(ptResponder):
def __init__(self):
ptResponder.__init__(self)
self.id = 5230
version = 3
self.version = version
print "__init__tldnEmgrPhase0 v.", version
def OnFirstUpdate(self):
global AgeStartedIn
AgeStartedIn = PtGetAgeName()
def OnServerInitComplete(self):
if AgeStartedIn == PtGetAgeName():
ageSDL = PtGetAgeSDL()
for variable in BooleanVARs:
#~ print "Tying together BOOL variable", variable
ageSDL.setNotify(self.key,variable,0.0)
self.IManageBOOLs(variable, "")
for variable in StateVARs:
#~ print "Tying together INT", variable
ageSDL.setNotify(self.key,variable,0.0)
StateVARs[variable](variable, ageSDL[variable][0])
def OnSDLNotify(self,VARname,SDLname,PlayerID,tag):
global variable
global sdlvalue
if AgeStartedIn == PtGetAgeName():
ageSDL = PtGetAgeSDL()
PtDebugPrint("tldnEmgrPhase0.SDLNotify - name = %s, SDLname = %s" % (VARname,SDLname))
if VARname in BooleanVARs:
print "tldnEmgrPhase0.OnSDLNotify : %s is a BOOLEAN Variable" % (VARname)
self.IManageBOOLs(VARname,SDLname)
elif VARname in StateVARs.keys():
ageSDL = PtGetAgeSDL()
NewSDLValue = ageSDL[VARname][0]
StateVARs[VARname](VARname, NewSDLValue)
else:
PtDebugPrint("tldnEmgrPhase0.OnSDLNotify:\tERROR: Variable %s was not recognized as a Boolean, Performance, or State Variable. " % (VARname))
pass
def IManageBOOLs(self,VARname,SDLname):
if AgeStartedIn == PtGetAgeName():
ageSDL = PtGetAgeSDL()
if ageSDL[VARname][0] == 1: # are we paging things in?
PtDebugPrint("tldnEmgrPhase0.OnSDLNotify:\tPaging in room %s" % (VARname))
PtPageInNode(VARname)
elif ageSDL[VARname][0] == 0: #are we paging things out?
print "variable = ", VARname
PtDebugPrint("tldnEmgrPhase0.OnSDLNotify:\tPaging out room %s" % (VARname))
PtPageOutNode(VARname)
else:
sdlvalue = ageSDL[VARname][0]
PtDebugPrint("tldnEmgrPhase0.OnSDLNotify:\tERROR: Variable %s had unexpected SDL value of %s" % (VARname,sdlvalue))
| gpl-3.0 |
johanvdw/rasterio | rasterio/rio/bands.py | 1 | 4139 | import logging
import click
from cligj import files_inout_arg, format_opt
from .helpers import resolve_inout
from . import options
import rasterio
from rasterio.five import zip_longest
PHOTOMETRIC_CHOICES = [val.lower() for val in [
'MINISBLACK',
'MINISWHITE',
'RGB',
'CMYK',
'YCBCR',
'CIELAB',
'ICCLAB',
'ITULAB']]
# Stack command.
@click.command(short_help="Stack a number of bands into a multiband dataset.")
@files_inout_arg
@options.output_opt
@format_opt
@options.bidx_mult_opt
@click.option('--photometric', default=None,
type=click.Choice(PHOTOMETRIC_CHOICES),
help="Photometric interpretation")
@click.pass_context
def stack(ctx, files, output, driver, bidx, photometric):
"""Stack a number of bands from one or more input files into a
multiband dataset.
Input datasets must be of a kind: same data type, dimensions, etc. The
output is cloned from the first input.
By default, rio-stack will take all bands from each input and write them
in same order to the output. Optionally, bands for each input may be
specified using a simple syntax:
--bidx N takes the Nth band from the input (first band is 1).
--bidx M,N,0 takes bands M, N, and O.
--bidx M..O takes bands M-O, inclusive.
--bidx ..N takes all bands up to and including N.
--bidx N.. takes all bands from N to the end.
Examples, using the Rasterio testing dataset, which produce a copy.
rio stack RGB.byte.tif -o stacked.tif
rio stack RGB.byte.tif --bidx 1,2,3 -o stacked.tif
rio stack RGB.byte.tif --bidx 1..3 -o stacked.tif
rio stack RGB.byte.tif --bidx ..2 RGB.byte.tif --bidx 3.. -o stacked.tif
"""
verbosity = (ctx.obj and ctx.obj.get('verbosity')) or 2
logger = logging.getLogger('rio')
try:
with rasterio.drivers(CPL_DEBUG=verbosity>2):
output, files = resolve_inout(files=files, output=output)
output_count = 0
indexes = []
for path, item in zip_longest(files, bidx, fillvalue=None):
with rasterio.open(path) as src:
src_indexes = src.indexes
if item is None:
indexes.append(src_indexes)
output_count += len(src_indexes)
elif '..' in item:
start, stop = map(
lambda x: int(x) if x else None, item.split('..'))
if start is None:
start = 1
indexes.append(src_indexes[slice(start-1, stop)])
output_count += len(src_indexes[slice(start-1, stop)])
else:
parts = list(map(int, item.split(',')))
if len(parts) == 1:
indexes.append(parts[0])
output_count += 1
else:
parts = list(parts)
indexes.append(parts)
output_count += len(parts)
with rasterio.open(files[0]) as first:
kwargs = first.meta
kwargs['transform'] = kwargs.pop('affine')
kwargs.update(
driver=driver,
count=output_count)
if photometric:
kwargs['photometric'] = photometric
with rasterio.open(output, 'w', **kwargs) as dst:
dst_idx = 1
for path, index in zip(files, indexes):
with rasterio.open(path) as src:
if isinstance(index, int):
data = src.read(index)
dst.write(data, dst_idx)
dst_idx += 1
elif isinstance(index, list):
data = src.read(index)
dst.write(data, range(dst_idx, dst_idx+len(index)))
dst_idx += len(index)
except Exception:
logger.exception("Exception caught during processing")
raise click.Abort()
| bsd-3-clause |
simongoffin/my_odoo_tutorial | addons/l10n_ae/__openerp__.py | 337 | 1579 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2014 Tech Receptives (<http://techreceptives.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'U.A.E. - Accounting',
'version': '1.0',
'author': 'Tech Receptives',
'website': 'http://www.techreceptives.com',
'category': 'Localization/Account Charts',
'description': """
United Arab Emirates accounting chart and localization.
=======================================================
""",
'depends': ['base', 'account', 'account_chart'],
'demo': [ ],
'data': [
'l10n_ae_chart.xml',
'l10n_ae_wizard.xml',
],
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
legalsylvain/OpenUpgrade | addons/account/wizard/account_fiscalyear_close.py | 38 | 15050 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
class account_fiscalyear_close(osv.osv_memory):
"""
Closes Account Fiscalyear and Generate Opening entries for New Fiscalyear
"""
_name = "account.fiscalyear.close"
_description = "Fiscalyear Close"
_columns = {
'fy_id': fields.many2one('account.fiscalyear', \
'Fiscal Year to close', required=True, help="Select a Fiscal year to close"),
'fy2_id': fields.many2one('account.fiscalyear', \
'New Fiscal Year', required=True),
'journal_id': fields.many2one('account.journal', 'Opening Entries Journal', domain="[('type','=','situation')]", required=True, help='The best practice here is to use a journal dedicated to contain the opening entries of all fiscal years. Note that you should define it with default debit/credit accounts, of type \'situation\' and with a centralized counterpart.'),
'period_id': fields.many2one('account.period', 'Opening Entries Period', required=True),
'report_name': fields.char('Name of new entries',size=64, required=True, help="Give name of the new entries"),
}
_defaults = {
'report_name': lambda self, cr, uid, context: _('End of Fiscal Year Entry'),
}
def data_save(self, cr, uid, ids, context=None):
"""
This function close account fiscalyear and create entries in new fiscalyear
@param cr: the current row, from the database cursor,
@param uid: the current user’s ID for security checks,
@param ids: List of Account fiscalyear close state’s IDs
"""
def _reconcile_fy_closing(cr, uid, ids, context=None):
"""
This private function manually do the reconciliation on the account_move_line given as `ids´, and directly
through psql. It's necessary to do it this way because the usual `reconcile()´ function on account.move.line
object is really resource greedy (not supposed to work on reconciliation between thousands of records) and
it does a lot of different computation that are useless in this particular case.
"""
#check that the reconcilation concern journal entries from only one company
cr.execute('select distinct(company_id) from account_move_line where id in %s',(tuple(ids),))
if len(cr.fetchall()) > 1:
raise osv.except_osv(_('Warning!'), _('The entries to reconcile should belong to the same company.'))
r_id = self.pool.get('account.move.reconcile').create(cr, uid, {'type': 'auto', 'opening_reconciliation': True})
cr.execute('update account_move_line set reconcile_id = %s where id in %s',(r_id, tuple(ids),))
return r_id
obj_acc_period = self.pool.get('account.period')
obj_acc_fiscalyear = self.pool.get('account.fiscalyear')
obj_acc_journal = self.pool.get('account.journal')
obj_acc_move = self.pool.get('account.move')
obj_acc_move_line = self.pool.get('account.move.line')
obj_acc_account = self.pool.get('account.account')
obj_acc_journal_period = self.pool.get('account.journal.period')
currency_obj = self.pool.get('res.currency')
data = self.browse(cr, uid, ids, context=context)
if context is None:
context = {}
fy_id = data[0].fy_id.id
cr.execute("SELECT id FROM account_period WHERE date_stop < (SELECT date_start FROM account_fiscalyear WHERE id = %s)", (str(data[0].fy2_id.id),))
fy_period_set = ','.join(map(lambda id: str(id[0]), cr.fetchall()))
cr.execute("SELECT id FROM account_period WHERE date_start > (SELECT date_stop FROM account_fiscalyear WHERE id = %s)", (str(fy_id),))
fy2_period_set = ','.join(map(lambda id: str(id[0]), cr.fetchall()))
if not fy_period_set or not fy2_period_set:
raise osv.except_osv(_('User Error!'), _('The periods to generate opening entries cannot be found.'))
period = obj_acc_period.browse(cr, uid, data[0].period_id.id, context=context)
new_fyear = obj_acc_fiscalyear.browse(cr, uid, data[0].fy2_id.id, context=context)
old_fyear = obj_acc_fiscalyear.browse(cr, uid, fy_id, context=context)
new_journal = data[0].journal_id.id
new_journal = obj_acc_journal.browse(cr, uid, new_journal, context=context)
company_id = new_journal.company_id.id
if not new_journal.default_credit_account_id or not new_journal.default_debit_account_id:
raise osv.except_osv(_('User Error!'),
_('The journal must have default credit and debit account.'))
if (not new_journal.centralisation) or new_journal.entry_posted:
raise osv.except_osv(_('User Error!'),
_('The journal must have centralized counterpart without the Skipping draft state option checked.'))
#delete existing move and move lines if any
move_ids = obj_acc_move.search(cr, uid, [
('journal_id', '=', new_journal.id), ('period_id', '=', period.id)])
if move_ids:
move_line_ids = obj_acc_move_line.search(cr, uid, [('move_id', 'in', move_ids)])
obj_acc_move_line._remove_move_reconcile(cr, uid, move_line_ids, opening_reconciliation=True, context=context)
obj_acc_move_line.unlink(cr, uid, move_line_ids, context=context)
obj_acc_move.unlink(cr, uid, move_ids, context=context)
cr.execute("SELECT id FROM account_fiscalyear WHERE date_stop < %s", (str(new_fyear.date_start),))
result = cr.dictfetchall()
fy_ids = ','.join([str(x['id']) for x in result])
query_line = obj_acc_move_line._query_get(cr, uid,
obj='account_move_line', context={'fiscalyear': fy_ids})
#create the opening move
vals = {
'name': '/',
'ref': '',
'period_id': period.id,
'date': period.date_start,
'journal_id': new_journal.id,
}
move_id = obj_acc_move.create(cr, uid, vals, context=context)
#1. report of the accounts with defferal method == 'unreconciled'
cr.execute('''
SELECT a.id
FROM account_account a
LEFT JOIN account_account_type t ON (a.user_type = t.id)
WHERE a.active
AND a.type != 'view'
AND a.company_id = %s
AND t.close_method = %s''', (company_id, 'unreconciled', ))
account_ids = map(lambda x: x[0], cr.fetchall())
if account_ids:
cr.execute('''
INSERT INTO account_move_line (
name, create_uid, create_date, write_uid, write_date,
statement_id, journal_id, currency_id, date_maturity,
partner_id, blocked, credit, state, debit,
ref, account_id, period_id, date, move_id, amount_currency,
quantity, product_id, company_id)
(SELECT name, create_uid, create_date, write_uid, write_date,
statement_id, %s,currency_id, date_maturity, partner_id,
blocked, credit, 'draft', debit, ref, account_id,
%s, (%s) AS date, %s, amount_currency, quantity, product_id, company_id
FROM account_move_line
WHERE account_id IN %s
AND ''' + query_line + '''
AND reconcile_id IS NULL)''', (new_journal.id, period.id, period.date_start, move_id, tuple(account_ids),))
#We have also to consider all move_lines that were reconciled
#on another fiscal year, and report them too
cr.execute('''
INSERT INTO account_move_line (
name, create_uid, create_date, write_uid, write_date,
statement_id, journal_id, currency_id, date_maturity,
partner_id, blocked, credit, state, debit,
ref, account_id, period_id, date, move_id, amount_currency,
quantity, product_id, company_id)
(SELECT
b.name, b.create_uid, b.create_date, b.write_uid, b.write_date,
b.statement_id, %s, b.currency_id, b.date_maturity,
b.partner_id, b.blocked, b.credit, 'draft', b.debit,
b.ref, b.account_id, %s, (%s) AS date, %s, b.amount_currency,
b.quantity, b.product_id, b.company_id
FROM account_move_line b
WHERE b.account_id IN %s
AND b.reconcile_id IS NOT NULL
AND b.period_id IN ('''+fy_period_set+''')
AND b.reconcile_id IN (SELECT DISTINCT(reconcile_id)
FROM account_move_line a
WHERE a.period_id IN ('''+fy2_period_set+''')))''', (new_journal.id, period.id, period.date_start, move_id, tuple(account_ids),))
#2. report of the accounts with defferal method == 'detail'
cr.execute('''
SELECT a.id
FROM account_account a
LEFT JOIN account_account_type t ON (a.user_type = t.id)
WHERE a.active
AND a.type != 'view'
AND a.company_id = %s
AND t.close_method = %s''', (company_id, 'detail', ))
account_ids = map(lambda x: x[0], cr.fetchall())
if account_ids:
cr.execute('''
INSERT INTO account_move_line (
name, create_uid, create_date, write_uid, write_date,
statement_id, journal_id, currency_id, date_maturity,
partner_id, blocked, credit, state, debit,
ref, account_id, period_id, date, move_id, amount_currency,
quantity, product_id, company_id)
(SELECT name, create_uid, create_date, write_uid, write_date,
statement_id, %s,currency_id, date_maturity, partner_id,
blocked, credit, 'draft', debit, ref, account_id,
%s, (%s) AS date, %s, amount_currency, quantity, product_id, company_id
FROM account_move_line
WHERE account_id IN %s
AND ''' + query_line + ''')
''', (new_journal.id, period.id, period.date_start, move_id, tuple(account_ids),))
#3. report of the accounts with defferal method == 'balance'
cr.execute('''
SELECT a.id
FROM account_account a
LEFT JOIN account_account_type t ON (a.user_type = t.id)
WHERE a.active
AND a.type != 'view'
AND a.company_id = %s
AND t.close_method = %s''', (company_id, 'balance', ))
account_ids = map(lambda x: x[0], cr.fetchall())
query_1st_part = """
INSERT INTO account_move_line (
debit, credit, name, date, move_id, journal_id, period_id,
account_id, currency_id, amount_currency, company_id, state) VALUES
"""
query_2nd_part = ""
query_2nd_part_args = []
for account in obj_acc_account.browse(cr, uid, account_ids, context={'fiscalyear': fy_id}):
company_currency_id = self.pool.get('res.users').browse(cr, uid, uid).company_id.currency_id
if not currency_obj.is_zero(cr, uid, company_currency_id, abs(account.balance)):
if query_2nd_part:
query_2nd_part += ','
query_2nd_part += "(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)"
query_2nd_part_args += (account.balance > 0 and account.balance or 0.0,
account.balance < 0 and -account.balance or 0.0,
data[0].report_name,
period.date_start,
move_id,
new_journal.id,
period.id,
account.id,
account.currency_id and account.currency_id.id or None,
account.foreign_balance if account.currency_id else 0.0,
account.company_id.id,
'draft')
if query_2nd_part:
cr.execute(query_1st_part + query_2nd_part, tuple(query_2nd_part_args))
#validate and centralize the opening move
obj_acc_move.validate(cr, uid, [move_id], context=context)
#reconcile all the move.line of the opening move
ids = obj_acc_move_line.search(cr, uid, [('journal_id', '=', new_journal.id),
('period_id.fiscalyear_id','=',new_fyear.id)])
if ids:
reconcile_id = _reconcile_fy_closing(cr, uid, ids, context=context)
#set the creation date of the reconcilation at the first day of the new fiscalyear, in order to have good figures in the aged trial balance
self.pool.get('account.move.reconcile').write(cr, uid, [reconcile_id], {'create_date': new_fyear.date_start}, context=context)
#create the journal.period object and link it to the old fiscalyear
new_period = data[0].period_id.id
ids = obj_acc_journal_period.search(cr, uid, [('journal_id', '=', new_journal.id), ('period_id', '=', new_period)])
if not ids:
ids = [obj_acc_journal_period.create(cr, uid, {
'name': (new_journal.name or '') + ':' + (period.code or ''),
'journal_id': new_journal.id,
'period_id': period.id
})]
cr.execute('UPDATE account_fiscalyear ' \
'SET end_journal_period_id = %s ' \
'WHERE id = %s', (ids[0], old_fyear.id))
return {'type': 'ir.actions.act_window_close'}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
The-Compiler/pytest | src/_pytest/warnings.py | 12 | 4496 | import sys
import warnings
from contextlib import contextmanager
from typing import Generator
from typing import Optional
import pytest
from _pytest.compat import TYPE_CHECKING
from _pytest.config import apply_warning_filters
from _pytest.config import Config
from _pytest.config import parse_warning_filter
from _pytest.main import Session
from _pytest.nodes import Item
from _pytest.terminal import TerminalReporter
if TYPE_CHECKING:
from typing_extensions import Literal
def pytest_configure(config: Config) -> None:
config.addinivalue_line(
"markers",
"filterwarnings(warning): add a warning filter to the given test. "
"see https://docs.pytest.org/en/stable/warnings.html#pytest-mark-filterwarnings ",
)
@contextmanager
def catch_warnings_for_item(
config: Config,
ihook,
when: "Literal['config', 'collect', 'runtest']",
item: Optional[Item],
) -> Generator[None, None, None]:
"""Context manager that catches warnings generated in the contained execution block.
``item`` can be None if we are not in the context of an item execution.
Each warning captured triggers the ``pytest_warning_recorded`` hook.
"""
config_filters = config.getini("filterwarnings")
cmdline_filters = config.known_args_namespace.pythonwarnings or []
with warnings.catch_warnings(record=True) as log:
# mypy can't infer that record=True means log is not None; help it.
assert log is not None
if not sys.warnoptions:
# If user is not explicitly configuring warning filters, show deprecation warnings by default (#2908).
warnings.filterwarnings("always", category=DeprecationWarning)
warnings.filterwarnings("always", category=PendingDeprecationWarning)
apply_warning_filters(config_filters, cmdline_filters)
# apply filters from "filterwarnings" marks
nodeid = "" if item is None else item.nodeid
if item is not None:
for mark in item.iter_markers(name="filterwarnings"):
for arg in mark.args:
warnings.filterwarnings(*parse_warning_filter(arg, escape=False))
yield
for warning_message in log:
ihook.pytest_warning_captured.call_historic(
kwargs=dict(
warning_message=warning_message,
when=when,
item=item,
location=None,
)
)
ihook.pytest_warning_recorded.call_historic(
kwargs=dict(
warning_message=warning_message,
nodeid=nodeid,
when=when,
location=None,
)
)
def warning_record_to_str(warning_message: warnings.WarningMessage) -> str:
"""Convert a warnings.WarningMessage to a string."""
warn_msg = warning_message.message
msg = warnings.formatwarning(
str(warn_msg),
warning_message.category,
warning_message.filename,
warning_message.lineno,
warning_message.line,
)
return msg
@pytest.hookimpl(hookwrapper=True, tryfirst=True)
def pytest_runtest_protocol(item: Item) -> Generator[None, None, None]:
with catch_warnings_for_item(
config=item.config, ihook=item.ihook, when="runtest", item=item
):
yield
@pytest.hookimpl(hookwrapper=True, tryfirst=True)
def pytest_collection(session: Session) -> Generator[None, None, None]:
config = session.config
with catch_warnings_for_item(
config=config, ihook=config.hook, when="collect", item=None
):
yield
@pytest.hookimpl(hookwrapper=True)
def pytest_terminal_summary(
terminalreporter: TerminalReporter,
) -> Generator[None, None, None]:
config = terminalreporter.config
with catch_warnings_for_item(
config=config, ihook=config.hook, when="config", item=None
):
yield
@pytest.hookimpl(hookwrapper=True)
def pytest_sessionfinish(session: Session) -> Generator[None, None, None]:
config = session.config
with catch_warnings_for_item(
config=config, ihook=config.hook, when="config", item=None
):
yield
@pytest.hookimpl(hookwrapper=True)
def pytest_load_initial_conftests(
early_config: "Config",
) -> Generator[None, None, None]:
with catch_warnings_for_item(
config=early_config, ihook=early_config.hook, when="config", item=None
):
yield
| mit |
unho/pootle | tests/models/directory.py | 5 | 1789 | # -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
import pytest
from django.core.exceptions import ValidationError
from pootle_app.models.directory import Directory
@pytest.mark.django_db
def test_directory_create_name_with_slashes_or_backslashes(root):
"""Test Directories are not created with (back)slashes on their name."""
with pytest.raises(ValidationError):
Directory.objects.create(name="slashed/name", parent=root)
with pytest.raises(ValidationError):
Directory.objects.create(name="backslashed\\name", parent=root)
@pytest.mark.django_db
def test_directory_create_bad(root):
"""Test directory cannot be created with name and no parent or without name
but no parent.
"""
with pytest.raises(ValidationError):
Directory.objects.create(name="name", parent=None)
with pytest.raises(ValidationError):
Directory.objects.create(name="", parent=root)
@pytest.mark.django_db
def test_dir_get_or_make_subdir(project0, language0, tp0, subdir0):
foo = project0.directory.get_or_make_subdir("foo")
assert not foo.tp
assert foo == project0.directory.get_or_make_subdir("foo")
foo = language0.directory.get_or_make_subdir("foo")
assert not foo.tp
assert foo == language0.directory.get_or_make_subdir("foo")
foo = tp0.directory.get_or_make_subdir("foo")
assert foo.tp == tp0
assert foo == tp0.directory.get_or_make_subdir("foo")
foo = subdir0.get_or_make_subdir("foo")
assert foo.tp == subdir0.tp
assert foo == subdir0.get_or_make_subdir("foo")
| gpl-3.0 |
2014cdbg7/2014cdbg7 | wsgi/static/Brython2.1.0-20140419-113919/Lib/io.py | 58 | 9385 | import builtins
open = builtins.open
# for seek()
SEEK_SET = 0
SEEK_CUR = 1
SEEK_END = 2
r"""File-like objects that read from or write to a string buffer.
This implements (nearly) all stdio methods.
f = StringIO() # ready for writing
f = StringIO(buf) # ready for reading
f.close() # explicitly release resources held
flag = f.isatty() # always false
pos = f.tell() # get current position
f.seek(pos) # set current position
f.seek(pos, mode) # mode 0: absolute; 1: relative; 2: relative to EOF
buf = f.read() # read until EOF
buf = f.read(n) # read up to n bytes
buf = f.readline() # read until end of line ('\n') or EOF
list = f.readlines()# list of f.readline() results until EOF
f.truncate([size]) # truncate file at to at most size (default: current pos)
f.write(buf) # write at current position
f.writelines(list) # for line in list: f.write(line)
f.getvalue() # return whole file's contents as a string
Notes:
- Using a real file is often faster (but less convenient).
- There's also a much faster implementation in C, called cStringIO, but
it's not subclassable.
- fileno() is left unimplemented so that code which uses it triggers
an exception early.
- Seeking far beyond EOF and then writing will insert real null
bytes that occupy space in the buffer.
- There's a simple test set (see end of this file).
"""
try:
from errno import EINVAL
except ImportError:
EINVAL = 22
__all__ = ["StringIO"]
def _complain_ifclosed(closed):
if closed:
raise ValueError("I/O operation on closed file")
class StringIO:
"""class StringIO([buffer])
When a StringIO object is created, it can be initialized to an existing
string by passing the string to the constructor. If no string is given,
the StringIO will start empty.
The StringIO object can accept either Unicode or 8-bit strings, but
mixing the two may take some care. If both are used, 8-bit strings that
cannot be interpreted as 7-bit ASCII (that use the 8th bit) will cause
a UnicodeError to be raised when getvalue() is called.
"""
def __init__(self, buf = ''):
self.buf = buf
self.len = len(buf)
self.buflist = []
self.pos = 0
self.closed = False
self.softspace = 0
def __iter__(self):
return self
def next(self):
"""A file object is its own iterator, for example iter(f) returns f
(unless f is closed). When a file is used as an iterator, typically
in a for loop (for example, for line in f: print line), the next()
method is called repeatedly. This method returns the next input line,
or raises StopIteration when EOF is hit.
"""
_complain_ifclosed(self.closed)
r = self.readline()
if not r:
raise StopIteration
return r
def close(self):
"""Free the memory buffer.
"""
if not self.closed:
self.closed = True
del self.buf, self.pos
def isatty(self):
"""Returns False because StringIO objects are not connected to a
tty-like device.
"""
_complain_ifclosed(self.closed)
return False
def seek(self, pos, mode = 0):
"""Set the file's current position.
The mode argument is optional and defaults to 0 (absolute file
positioning); other values are 1 (seek relative to the current
position) and 2 (seek relative to the file's end).
There is no return value.
"""
_complain_ifclosed(self.closed)
if self.buflist:
self.buf += ''.join(self.buflist)
self.buflist = []
if mode == 1:
pos += self.pos
elif mode == 2:
pos += self.len
self.pos = max(0, pos)
def tell(self):
"""Return the file's current position."""
_complain_ifclosed(self.closed)
return self.pos
def read(self, n = -1):
"""Read at most size bytes from the file
(less if the read hits EOF before obtaining size bytes).
If the size argument is negative or omitted, read all data until EOF
is reached. The bytes are returned as a string object. An empty
string is returned when EOF is encountered immediately.
"""
_complain_ifclosed(self.closed)
if self.buflist:
self.buf += ''.join(self.buflist)
self.buflist = []
if n is None or n < 0:
newpos = self.len
else:
newpos = min(self.pos+n, self.len)
r = self.buf[self.pos:newpos]
self.pos = newpos
return r
def readline(self, length=None):
r"""Read one entire line from the file.
A trailing newline character is kept in the string (but may be absent
when a file ends with an incomplete line). If the size argument is
present and non-negative, it is a maximum byte count (including the
trailing newline) and an incomplete line may be returned.
An empty string is returned only when EOF is encountered immediately.
Note: Unlike stdio's fgets(), the returned string contains null
characters ('\0') if they occurred in the input.
"""
_complain_ifclosed(self.closed)
if self.buflist:
self.buf += ''.join(self.buflist)
self.buflist = []
i = self.buf.find('\n', self.pos)
if i < 0:
newpos = self.len
else:
newpos = i+1
if length is not None and length >= 0:
if self.pos + length < newpos:
newpos = self.pos + length
r = self.buf[self.pos:newpos]
self.pos = newpos
return r
def readlines(self, sizehint = 0):
"""Read until EOF using readline() and return a list containing the
lines thus read.
If the optional sizehint argument is present, instead of reading up
to EOF, whole lines totalling approximately sizehint bytes (or more
to accommodate a final whole line).
"""
total = 0
lines = []
line = self.readline()
while line:
lines.append(line)
total += len(line)
if 0 < sizehint <= total:
break
line = self.readline()
return lines
def truncate(self, size=None):
"""Truncate the file's size.
If the optional size argument is present, the file is truncated to
(at most) that size. The size defaults to the current position.
The current file position is not changed unless the position
is beyond the new file size.
If the specified size exceeds the file's current size, the
file remains unchanged.
"""
_complain_ifclosed(self.closed)
if size is None:
size = self.pos
elif size < 0:
raise IOError(EINVAL, "Negative size not allowed")
elif size < self.pos:
self.pos = size
self.buf = self.getvalue()[:size]
self.len = size
def write(self, s):
"""Write a string to the file.
There is no return value.
"""
_complain_ifclosed(self.closed)
if not s: return
spos = self.pos
slen = self.len
if spos == slen:
self.buflist.append(s)
self.len = self.pos = spos + len(s)
return
if spos > slen:
self.buflist.append('\0'*(spos - slen))
slen = spos
newpos = spos + len(s)
if spos < slen:
if self.buflist:
self.buf += ''.join(self.buflist)
self.buflist = [self.buf[:spos], s, self.buf[newpos:]]
self.buf = ''
if newpos > slen:
slen = newpos
else:
self.buflist.append(s)
slen = newpos
self.len = slen
self.pos = newpos
def writelines(self, iterable):
"""Write a sequence of strings to the file. The sequence can be any
iterable object producing strings, typically a list of strings. There
is no return value.
(The name is intended to match readlines(); writelines() does not add
line separators.)
"""
write = self.write
for line in iterable:
write(line)
def flush(self):
"""Flush the internal buffer
"""
_complain_ifclosed(self.closed)
def getvalue(self):
"""
Retrieve the entire contents of the "file" at any time before
the StringIO object's close() method is called.
The StringIO object can accept either Unicode or 8-bit strings,
but mixing the two may take some care. If both are used, 8-bit
strings that cannot be interpreted as 7-bit ASCII (that use the
8th bit) will cause a UnicodeError to be raised when getvalue()
is called.
"""
_complain_ifclosed(self.closed)
if self.buflist:
self.buf += ''.join(self.buflist)
self.buflist = []
return self.buf
TextIOWrapper = StringIO
class RawIOBase:
def read(self,n=-1):
pass
def readall(self):
pass
def readinto(self,b):
pass
def write(self,b):
pass
BufferedReader = RawIOBase
| gpl-2.0 |
saurabh6790/test-med-lib | webnotes/model/workflow.py | 34 | 1073 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import webnotes
def get_workflow_name(doctype):
if getattr(webnotes.local, "workflow_names", None) is None:
webnotes.local.workflow_names = {}
if doctype not in webnotes.local.workflow_names:
workflow_name = webnotes.conn.get_value("Workflow", {"document_type": doctype,
"is_active": "1"}, "name")
# no active? get default workflow
if not workflow_name:
workflow_name = webnotes.conn.get_value("Workflow", {"document_type": doctype},
"name")
webnotes.local.workflow_names[doctype] = workflow_name
return webnotes.local.workflow_names[doctype]
def get_default_state(doctype):
workflow_name = get_workflow_name(doctype)
return webnotes.conn.get_value("Workflow Document State", {"parent": workflow_name,
"idx":1}, "state")
def get_state_fieldname(doctype):
workflow_name = get_workflow_name(doctype)
return webnotes.conn.get_value("Workflow", workflow_name, "workflow_state_field")
| mit |
savoirfairelinux/odoo | addons/l10n_fr/__init__.py | 424 | 1447 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2008 JAILLET Simon - CrysaLEAD - www.crysalead.fr
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
##############################################################################
import l10n_fr
import report
import wizard
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
jpshort/odoo | openerp/addons/base/res/res_company.py | 138 | 21310 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import os
import re
import openerp
from openerp import SUPERUSER_ID, tools
from openerp.osv import fields, osv
from openerp.tools.translate import _
from openerp.tools.safe_eval import safe_eval as eval
from openerp.tools import image_resize_image
class multi_company_default(osv.osv):
"""
Manage multi company default value
"""
_name = 'multi_company.default'
_description = 'Default multi company'
_order = 'company_id,sequence,id'
_columns = {
'sequence': fields.integer('Sequence'),
'name': fields.char('Name', required=True, help='Name it to easily find a record'),
'company_id': fields.many2one('res.company', 'Main Company', required=True,
help='Company where the user is connected'),
'company_dest_id': fields.many2one('res.company', 'Default Company', required=True,
help='Company to store the current record'),
'object_id': fields.many2one('ir.model', 'Object', required=True,
help='Object affected by this rule'),
'expression': fields.char('Expression', required=True,
help='Expression, must be True to match\nuse context.get or user (browse)'),
'field_id': fields.many2one('ir.model.fields', 'Field', help='Select field property'),
}
_defaults = {
'expression': 'True',
'sequence': 100,
}
def copy(self, cr, uid, id, default=None, context=None):
"""
Add (copy) in the name when duplicate record
"""
if not context:
context = {}
if not default:
default = {}
company = self.browse(cr, uid, id, context=context)
default = default.copy()
default['name'] = company.name + _(' (copy)')
return super(multi_company_default, self).copy(cr, uid, id, default, context=context)
multi_company_default()
class res_company(osv.osv):
_name = "res.company"
_description = 'Companies'
_order = 'name'
def _get_address_data(self, cr, uid, ids, field_names, arg, context=None):
""" Read the 'address' functional fields. """
result = {}
part_obj = self.pool.get('res.partner')
for company in self.browse(cr, uid, ids, context=context):
result[company.id] = {}.fromkeys(field_names, False)
if company.partner_id:
address_data = part_obj.address_get(cr, openerp.SUPERUSER_ID, [company.partner_id.id], adr_pref=['default'])
if address_data['default']:
address = part_obj.read(cr, openerp.SUPERUSER_ID, [address_data['default']], field_names, context=context)[0]
for field in field_names:
result[company.id][field] = address[field] or False
return result
def _set_address_data(self, cr, uid, company_id, name, value, arg, context=None):
""" Write the 'address' functional fields. """
company = self.browse(cr, uid, company_id, context=context)
if company.partner_id:
part_obj = self.pool.get('res.partner')
address_data = part_obj.address_get(cr, uid, [company.partner_id.id], adr_pref=['default'])
address = address_data['default']
if address:
part_obj.write(cr, uid, [address], {name: value or False}, context=context)
else:
part_obj.create(cr, uid, {name: value or False, 'parent_id': company.partner_id.id}, context=context)
return True
def _get_logo_web(self, cr, uid, ids, _field_name, _args, context=None):
result = dict.fromkeys(ids, False)
for record in self.browse(cr, uid, ids, context=context):
size = (180, None)
result[record.id] = image_resize_image(record.partner_id.image, size)
return result
def _get_companies_from_partner(self, cr, uid, ids, context=None):
return self.pool['res.company'].search(cr, uid, [('partner_id', 'in', ids)], context=context)
_columns = {
'name': fields.related('partner_id', 'name', string='Company Name', size=128, required=True, store=True, type='char'),
'parent_id': fields.many2one('res.company', 'Parent Company', select=True),
'child_ids': fields.one2many('res.company', 'parent_id', 'Child Companies'),
'partner_id': fields.many2one('res.partner', 'Partner', required=True),
'rml_header': fields.text('RML Header', required=True),
'rml_header1': fields.char('Company Tagline', help="Appears by default on the top right corner of your printed documents (report header)."),
'rml_header2': fields.text('RML Internal Header', required=True),
'rml_header3': fields.text('RML Internal Header for Landscape Reports', required=True),
'rml_footer': fields.text('Report Footer', help="Footer text displayed at the bottom of all reports."),
'rml_footer_readonly': fields.related('rml_footer', type='text', string='Report Footer', readonly=True),
'custom_footer': fields.boolean('Custom Footer', help="Check this to define the report footer manually. Otherwise it will be filled in automatically."),
'font': fields.many2one('res.font', string="Font", domain=[('mode', 'in', ('Normal', 'Regular', 'all', 'Book'))],
help="Set the font into the report header, it will be used as default font in the RML reports of the user company"),
'logo': fields.related('partner_id', 'image', string="Logo", type="binary"),
'logo_web': fields.function(_get_logo_web, string="Logo Web", type="binary", store={
'res.company': (lambda s, c, u, i, x: i, ['partner_id'], 10),
'res.partner': (_get_companies_from_partner, ['image'], 10),
}),
'currency_id': fields.many2one('res.currency', 'Currency', required=True),
'currency_ids': fields.one2many('res.currency', 'company_id', 'Currency'),
'user_ids': fields.many2many('res.users', 'res_company_users_rel', 'cid', 'user_id', 'Accepted Users'),
'account_no':fields.char('Account No.'),
'street': fields.function(_get_address_data, fnct_inv=_set_address_data, size=128, type='char', string="Street", multi='address'),
'street2': fields.function(_get_address_data, fnct_inv=_set_address_data, size=128, type='char', string="Street2", multi='address'),
'zip': fields.function(_get_address_data, fnct_inv=_set_address_data, size=24, type='char', string="Zip", multi='address'),
'city': fields.function(_get_address_data, fnct_inv=_set_address_data, size=24, type='char', string="City", multi='address'),
'state_id': fields.function(_get_address_data, fnct_inv=_set_address_data, type='many2one', relation='res.country.state', string="Fed. State", multi='address'),
'bank_ids': fields.one2many('res.partner.bank','company_id', 'Bank Accounts', help='Bank accounts related to this company'),
'country_id': fields.function(_get_address_data, fnct_inv=_set_address_data, type='many2one', relation='res.country', string="Country", multi='address'),
'email': fields.related('partner_id', 'email', size=64, type='char', string="Email", store=True),
'phone': fields.related('partner_id', 'phone', size=64, type='char', string="Phone", store=True),
'fax': fields.function(_get_address_data, fnct_inv=_set_address_data, size=64, type='char', string="Fax", multi='address'),
'website': fields.related('partner_id', 'website', string="Website", type="char", size=64),
'vat': fields.related('partner_id', 'vat', string="Tax ID", type="char", size=32),
'company_registry': fields.char('Company Registry', size=64),
'rml_paper_format': fields.selection([('a4', 'A4'), ('us_letter', 'US Letter')], "Paper Format", required=True, oldname='paper_format'),
}
_sql_constraints = [
('name_uniq', 'unique (name)', 'The company name must be unique !')
]
def onchange_footer(self, cr, uid, ids, custom_footer, phone, fax, email, website, vat, company_registry, bank_ids, context=None):
if custom_footer:
return {}
# first line (notice that missing elements are filtered out before the join)
res = ' | '.join(filter(bool, [
phone and '%s: %s' % (_('Phone'), phone),
fax and '%s: %s' % (_('Fax'), fax),
email and '%s: %s' % (_('Email'), email),
website and '%s: %s' % (_('Website'), website),
vat and '%s: %s' % (_('TIN'), vat),
company_registry and '%s: %s' % (_('Reg'), company_registry),
]))
# second line: bank accounts
res_partner_bank = self.pool.get('res.partner.bank')
account_data = self.resolve_2many_commands(cr, uid, 'bank_ids', bank_ids, context=context)
account_names = res_partner_bank._prepare_name_get(cr, uid, account_data, context=context)
if account_names:
title = _('Bank Accounts') if len(account_names) > 1 else _('Bank Account')
res += '\n%s: %s' % (title, ', '.join(name for id, name in account_names))
return {'value': {'rml_footer': res, 'rml_footer_readonly': res}}
def onchange_state(self, cr, uid, ids, state_id, context=None):
if state_id:
return {'value':{'country_id': self.pool.get('res.country.state').browse(cr, uid, state_id, context).country_id.id }}
return {}
def onchange_font_name(self, cr, uid, ids, font, rml_header, rml_header2, rml_header3, context=None):
""" To change default header style of all <para> and drawstring. """
def _change_header(header,font):
""" Replace default fontname use in header and setfont tag """
default_para = re.sub('fontName.?=.?".*"', 'fontName="%s"'% font, header)
return re.sub('(<setFont.?name.?=.?)(".*?")(.)', '\g<1>"%s"\g<3>'% font, default_para)
if not font:
return True
fontname = self.pool.get('res.font').browse(cr, uid, font, context=context).name
return {'value':{
'rml_header': _change_header(rml_header, fontname),
'rml_header2':_change_header(rml_header2, fontname),
'rml_header3':_change_header(rml_header3, fontname)
}}
def on_change_country(self, cr, uid, ids, country_id, context=None):
res = {'domain': {'state_id': []}}
currency_id = self._get_euro(cr, uid, context=context)
if country_id:
currency_id = self.pool.get('res.country').browse(cr, uid, country_id, context=context).currency_id.id
res['domain'] = {'state_id': [('country_id','=',country_id)]}
res['value'] = {'currency_id': currency_id}
return res
def name_search(self, cr, uid, name='', args=None, operator='ilike', context=None, limit=100):
context = dict(context or {})
if context.pop('user_preference', None):
# We browse as superuser. Otherwise, the user would be able to
# select only the currently visible companies (according to rules,
# which are probably to allow to see the child companies) even if
# she belongs to some other companies.
user = self.pool.get('res.users').browse(cr, SUPERUSER_ID, uid, context=context)
cmp_ids = list(set([user.company_id.id] + [cmp.id for cmp in user.company_ids]))
uid = SUPERUSER_ID
args = (args or []) + [('id', 'in', cmp_ids)]
return super(res_company, self).name_search(cr, uid, name=name, args=args, operator=operator, context=context, limit=limit)
def _company_default_get(self, cr, uid, object=False, field=False, context=None):
"""
Check if the object for this company have a default value
"""
if not context:
context = {}
proxy = self.pool.get('multi_company.default')
args = [
('object_id.model', '=', object),
('field_id', '=', field),
]
ids = proxy.search(cr, uid, args, context=context)
user = self.pool.get('res.users').browse(cr, SUPERUSER_ID, uid, context=context)
for rule in proxy.browse(cr, uid, ids, context):
if eval(rule.expression, {'context': context, 'user': user}):
return rule.company_dest_id.id
return user.company_id.id
@tools.ormcache()
def _get_company_children(self, cr, uid=None, company=None):
if not company:
return []
ids = self.search(cr, uid, [('parent_id','child_of',[company])])
return ids
def _get_partner_hierarchy(self, cr, uid, company_id, context=None):
if company_id:
parent_id = self.browse(cr, uid, company_id)['parent_id']
if parent_id:
return self._get_partner_hierarchy(cr, uid, parent_id.id, context)
else:
return self._get_partner_descendance(cr, uid, company_id, [], context)
return []
def _get_partner_descendance(self, cr, uid, company_id, descendance, context=None):
descendance.append(self.browse(cr, uid, company_id).partner_id.id)
for child_id in self._get_company_children(cr, uid, company_id):
if child_id != company_id:
descendance = self._get_partner_descendance(cr, uid, child_id, descendance)
return descendance
#
# This function restart the cache on the _get_company_children method
#
def cache_restart(self, cr):
self._get_company_children.clear_cache(self)
def create(self, cr, uid, vals, context=None):
if not vals.get('name', False) or vals.get('partner_id', False):
self.cache_restart(cr)
return super(res_company, self).create(cr, uid, vals, context=context)
obj_partner = self.pool.get('res.partner')
partner_id = obj_partner.create(cr, uid, {'name': vals['name'], 'is_company':True, 'image': vals.get('logo', False)}, context=context)
vals.update({'partner_id': partner_id})
self.cache_restart(cr)
company_id = super(res_company, self).create(cr, uid, vals, context=context)
obj_partner.write(cr, uid, [partner_id], {'company_id': company_id}, context=context)
return company_id
def write(self, cr, uid, ids, values, context=None):
self.cache_restart(cr)
return super(res_company, self).write(cr, uid, ids, values, context=context)
def _get_euro(self, cr, uid, context=None):
rate_obj = self.pool.get('res.currency.rate')
rate_id = rate_obj.search(cr, uid, [('rate', '=', 1)], context=context)
return rate_id and rate_obj.browse(cr, uid, rate_id[0], context=context).currency_id.id or False
def _get_logo(self, cr, uid, ids):
return open(os.path.join( tools.config['root_path'], 'addons', 'base', 'res', 'res_company_logo.png'), 'rb') .read().encode('base64')
def _get_font(self, cr, uid, ids):
font_obj = self.pool.get('res.font')
res = font_obj.search(cr, uid, [('family', '=', 'Helvetica'), ('mode', '=', 'all')], limit=1)
return res and res[0] or False
_header = """
<header>
<pageTemplate>
<frame id="first" x1="28.0" y1="28.0" width="%s" height="%s"/>
<stylesheet>
<!-- Set here the default font to use for all <para> tags -->
<paraStyle name='Normal' fontName="DejaVuSans"/>
</stylesheet>
<pageGraphics>
<fill color="black"/>
<stroke color="black"/>
<setFont name="DejaVuSans" size="8"/>
<drawString x="%s" y="%s"> [[ formatLang(time.strftime("%%Y-%%m-%%d"), date=True) ]] [[ time.strftime("%%H:%%M") ]]</drawString>
<setFont name="DejaVuSans-Bold" size="10"/>
<drawCentredString x="%s" y="%s">[[ company.partner_id.name ]]</drawCentredString>
<stroke color="#000000"/>
<lines>%s</lines>
<!-- Set here the default font to use for all <drawString> tags -->
<!-- don't forget to change the 2 other occurence of <setFont> above if needed -->
<setFont name="DejaVuSans" size="8"/>
</pageGraphics>
</pageTemplate>
</header>"""
_header2 = _header % (539, 772, "1.0cm", "28.3cm", "11.1cm", "28.3cm", "1.0cm 28.1cm 20.1cm 28.1cm")
_header3 = _header % (786, 525, 25, 555, 440, 555, "25 550 818 550")
def _get_header(self,cr,uid,ids):
try :
header_file = tools.file_open(os.path.join('base', 'report', 'corporate_rml_header.rml'))
try:
return header_file.read()
finally:
header_file.close()
except:
return self._header_a4
_header_main = """
<header>
<pageTemplate>
<frame id="first" x1="1.3cm" y1="3.0cm" height="%s" width="19.0cm"/>
<stylesheet>
<!-- Set here the default font to use for all <para> tags -->
<paraStyle name='Normal' fontName="DejaVuSans"/>
<paraStyle name="main_footer" fontSize="8.0" alignment="CENTER"/>
<paraStyle name="main_header" fontSize="8.0" leading="10" alignment="LEFT" spaceBefore="0.0" spaceAfter="0.0"/>
</stylesheet>
<pageGraphics>
<!-- Set here the default font to use for all <drawString> tags -->
<setFont name="DejaVuSans" size="8"/>
<!-- You Logo - Change X,Y,Width and Height -->
<image x="1.3cm" y="%s" height="40.0" >[[ company.logo or removeParentNode('image') ]]</image>
<fill color="black"/>
<stroke color="black"/>
<!-- page header -->
<lines>1.3cm %s 20cm %s</lines>
<drawRightString x="20cm" y="%s">[[ company.rml_header1 ]]</drawRightString>
<drawString x="1.3cm" y="%s">[[ company.partner_id.name ]]</drawString>
<place x="1.3cm" y="%s" height="1.8cm" width="15.0cm">
<para style="main_header">[[ display_address(company.partner_id) or '' ]]</para>
</place>
<drawString x="1.3cm" y="%s">Phone:</drawString>
<drawRightString x="7cm" y="%s">[[ company.partner_id.phone or '' ]]</drawRightString>
<drawString x="1.3cm" y="%s">Mail:</drawString>
<drawRightString x="7cm" y="%s">[[ company.partner_id.email or '' ]]</drawRightString>
<lines>1.3cm %s 7cm %s</lines>
<!-- left margin -->
<rotate degrees="90"/>
<fill color="grey"/>
<drawString x="2.65cm" y="-0.4cm">generated by Odoo.com</drawString>
<fill color="black"/>
<rotate degrees="-90"/>
<!--page bottom-->
<lines>1.2cm 2.65cm 19.9cm 2.65cm</lines>
<place x="1.3cm" y="0cm" height="2.55cm" width="19.0cm">
<para style="main_footer">[[ company.rml_footer ]]</para>
<para style="main_footer">Contact : [[ user.name ]] - Page: <pageNumber/></para>
</place>
</pageGraphics>
</pageTemplate>
</header>"""
_header_a4 = _header_main % ('21.7cm', '27.7cm', '27.7cm', '27.7cm', '27.8cm', '27.3cm', '25.3cm', '25.0cm', '25.0cm', '24.6cm', '24.6cm', '24.5cm', '24.5cm')
_header_letter = _header_main % ('20cm', '26.0cm', '26.0cm', '26.0cm', '26.1cm', '25.6cm', '23.6cm', '23.3cm', '23.3cm', '22.9cm', '22.9cm', '22.8cm', '22.8cm')
def onchange_rml_paper_format(self, cr, uid, ids, rml_paper_format, context=None):
if rml_paper_format == 'us_letter':
return {'value': {'rml_header': self._header_letter}}
return {'value': {'rml_header': self._header_a4}}
def act_discover_fonts(self, cr, uid, ids, context=None):
return self.pool.get("res.font").font_scan(cr, uid, context=context)
_defaults = {
'currency_id': _get_euro,
'rml_paper_format': 'a4',
'rml_header':_get_header,
'rml_header2': _header2,
'rml_header3': _header3,
'logo':_get_logo,
'font':_get_font,
}
_constraints = [
(osv.osv._check_recursion, 'Error! You can not create recursive companies.', ['parent_id'])
]
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
Brickstertwo/git-commands | tests/functional/test_upstream.py | 1 | 8926 | import os
import shutil
import subprocess
import tempfile
import unittest
import git
import testutils
class TestGitUpstream(unittest.TestCase):
def _output(self, command):
proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
return proc.communicate()[0].strip()
def setUp(self):
# init repo
self.dirpath = tempfile.mkdtemp()
os.chdir(self.dirpath)
self.repo = git.Repo.init(self.dirpath)
testutils.init_local_config(self.repo)
subprocess.call('touch README.md'.split())
subprocess.call('git add -A'.split())
subprocess.call(['git', 'commit', '--quiet', '-m', 'Initial commit'])
subprocess.call('git checkout -b develop --quiet'.split())
subprocess.call('git branch --set-upstream-to=master --quiet'.split())
def tearDown(self):
shutil.rmtree(self.dirpath)
def test_upstream(self):
self.assertEqual('master', subprocess.check_output('git upstream'.split()).strip())
def test_upstream_includeRemote_shortOption(self):
self.assertEqual('./master', subprocess.check_output('git upstream -r'.split()).strip())
def test_upstream_includeRemote_longOption(self):
self.assertEqual('./master', subprocess.check_output('git upstream --include-remote'.split()).strip())
def test_upstream_excludeRemote_shortOption(self):
self.assertEqual('master', subprocess.check_output('git upstream -R'.split()).strip())
def test_upstream_excludeRemote_longOption(self):
self.assertEqual('master', subprocess.check_output('git upstream --no-include-remote'.split()).strip())
def test_upstream_showRemoteProperty_never(self):
# setup
subprocess.call('git config --local git-upstream.include-remote NEVER'.split())
# run
upstream_result = subprocess.check_output('git upstream'.split()).strip()
# verify
self.assertEqual('master', upstream_result)
def test_upstream_showRemoteProperty_always(self):
# setup
subprocess.call('git config --local git-upstream.include-remote ALWAYS'.split())
# run
upstream_result = subprocess.check_output('git upstream'.split()).strip()
# verify
self.assertEqual('./master', upstream_result)
def test_upstream_showRemoteProperty_noneLocal_isLocal(self):
# setup
subprocess.call('git config --local branch.develop.remote origin'.split())
subprocess.call('git config --local git-upstream.include-remote NONE_LOCAL'.split())
# run
upstream_result = subprocess.check_output('git upstream'.split()).strip()
# verify
self.assertEqual('origin/master', upstream_result)
def test_upstream_showRemoteProperty_noneLocal_notLocal(self):
# setup
subprocess.call('git config --local git-upstream.include-remote NONE_LOCAL'.split())
# run
upstream_result = subprocess.check_output('git upstream'.split()).strip()
# verify
self.assertEqual('master', upstream_result)
def test_upstream_ignoreShowRemotePropertyWithFlag_includeRemote_shortOption(self):
# setup
subprocess.call('git config --local git-upstream.include-remote NEVER'.split())
# run
upstream_result = subprocess.check_output('git upstream -r'.split()).strip()
# verify
self.assertEqual('./master', upstream_result)
def test_upstream_ignoreShowRemotePropertyWithFlag_includeRemote_longOption(self):
# setup
subprocess.call('git config --local git-upstream.include-remote NEVER'.split())
# run
upstream_result = subprocess.check_output('git upstream --include-remote'.split()).strip()
# verify
self.assertEqual('./master', upstream_result)
def test_upstream_ignoreShowRemotePropertyWithFlag_excudeRemote_shortOption(self):
# setup
subprocess.call('git config --local git-upstream.include-remote ALWAYS'.split())
# run
upstream_result = subprocess.check_output('git upstream -R'.split()).strip()
# verify
self.assertEqual('master', upstream_result)
def test_upstream_ignoreShowRemotePropertyWithFlag_excudeRemote_longOption(self):
# setup
subprocess.call('git config --local git-upstream.include-remote ALWAYS'.split())
# run
upstream_result = subprocess.check_output('git upstream --no-include-remote'.split()).strip()
# verify
self.assertEqual('master', upstream_result)
def test_upstream_specifyBranch_shortOption(self):
# setup
subprocess.call('git checkout -b new-feature --quiet'.split())
# run
upstream_result = subprocess.check_output('git upstream -b develop'.split()).strip()
# verify
self.assertEqual('master', upstream_result)
def test_upstream_specifyBranch_longOption(self):
# setup
subprocess.call('git checkout -b new-feature --quiet'.split())
# run
upstream_result = subprocess.check_output('git upstream --branch develop'.split()).strip()
# verify
self.assertEqual('master', upstream_result)
def test_upstream_specifyBranch_longOptionEqual(self):
# setup
subprocess.call('git checkout -b new-feature --quiet'.split())
# run
upstream_result = subprocess.check_output('git upstream --branch=develop'.split()).strip()
# verify
self.assertEqual('master', upstream_result)
def test_upstream_noUpstream(self):
# setup
subprocess.call('git branch --unset-upstream'.split())
# run
upstream_result = subprocess.check_output('git upstream'.split())
# verify
self.assertFalse(upstream_result)
def test_upstream_invalidBranch(self):
# setup
branch = 'invalidbranch'
# run
p = subprocess.Popen(['git', 'upstream', '-b', branch], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
# verify
self.assertEqual('error: {0!r} is not a valid branch'.format(branch), stderr.strip())
self.assertFalse(stdout)
def test_upstream_nonGitRepository(self):
# setup
os.mkdir(self.dirpath + '/dir')
os.chdir(self.dirpath + '/dir')
# run
p = subprocess.Popen('git upstream'.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
# verify
expected = "error: '{}' not a git repository".format(os.path.realpath(self.dirpath) + '/dir')
self.assertEqual(expected, stderr.strip())
self.assertFalse(stdout)
def test_upstream_emptyRepository(self):
# setup
# create a new repo in a sub-directory (lazy)
os.mkdir(self.dirpath + '/dir')
os.chdir(self.dirpath + '/dir')
subprocess.check_output('git init'.split())
# when
p = subprocess.Popen('git upstream'.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = p.communicate()
# then
self.assertFalse(stdout)
self.assertFalse(stderr)
def test_upstream_includeRemoteAndNoIncludeRemote(self):
expected = """usage: git upstream [-h] [-v] [-r | -R] [-b BRANCH]
git upstream: error: argument -R/--no-include-remote: not allowed with argument -r/--include-remote
"""
# run 1
stdout, stderr = subprocess.Popen('git upstream --include-remote --no-include-remote'.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
self.assertFalse(stdout)
self.assertEqual(stderr, expected)
# run 2
stdout, stderr = subprocess.Popen('git upstream --include-remote -R'.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
self.assertFalse(stdout)
self.assertEqual(stderr, expected)
# run 3
stdout, stderr = subprocess.Popen('git upstream -r --no-include-remote'.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
self.assertFalse(stdout)
self.assertEqual(stderr, expected)
# run 4
stdout, stderr = subprocess.Popen('git upstream -r -R'.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
self.assertFalse(stdout)
self.assertEqual(stderr, expected)
def test_upstream_version(self):
# expect
self.assertRegexpMatches(self._output('git upstream -v'.split()), 'git-upstream \\d+\\.\\d+\\.\\d+')
self.assertRegexpMatches(self._output('git upstream --version'.split()), 'git-upstream \\d+\\.\\d+\\.\\d+')
def test_upstream_help(self):
# expect
self.assertTrue(self._output('git upstream -h'.split()))
self.assertTrue(self._output('git upstream --help'.split()))
| mit |
TeachAtTUM/edx-platform | cms/djangoapps/contentstore/tests/test_contentstore.py | 2 | 96426 | # -*- coding: utf-8 -*-
from __future__ import print_function
import copy
import shutil
from datetime import timedelta
from functools import wraps
from json import loads
from textwrap import dedent
from unittest import SkipTest
from uuid import uuid4
import ddt
import django
import lxml.html
import mock
from django.conf import settings
from django.contrib.auth.models import User
from django.test import TestCase
from django.test.utils import override_settings
from edxval.api import create_video, get_videos_for_course
from fs.osfs import OSFS
from lxml import etree
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey, UsageKey
from opaque_keys.edx.locations import AssetLocation, CourseLocator
from path import Path as path
from six import text_type
from waffle.testutils import override_switch
from contentstore.tests.utils import AjaxEnabledTestClient, CourseTestCase, get_url, parse_json
from contentstore.utils import delete_course, reverse_course_url, reverse_url
from contentstore.views.component import ADVANCED_COMPONENT_TYPES
from contentstore.config import waffle
from course_action_state.managers import CourseActionStateItemNotFoundError
from course_action_state.models import CourseRerunState, CourseRerunUIStateManager
from django_comment_common.utils import are_permissions_roles_seeded
from openedx.core.lib.tempdir import mkdtemp_clean
from student import auth
from student.models import CourseEnrollment
from student.roles import CourseCreatorRole, CourseInstructorRole
from xmodule.capa_module import CapaDescriptor
from xmodule.contentstore.content import StaticContent
from xmodule.contentstore.django import contentstore
from xmodule.contentstore.utils import empty_asset_trashcan, restore_asset_from_trashcan
from xmodule.course_module import CourseDescriptor, Textbook
from xmodule.exceptions import InvalidVersionError
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.exceptions import ItemNotFoundError
from xmodule.modulestore.inheritance import own_metadata
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory, check_mongo_calls
from xmodule.modulestore.xml_exporter import export_course_to_xml
from xmodule.modulestore.xml_importer import import_course_from_xml, perform_xlint
from xmodule.seq_module import SequenceDescriptor
TEST_DATA_CONTENTSTORE = copy.deepcopy(settings.CONTENTSTORE)
TEST_DATA_CONTENTSTORE['DOC_STORE_CONFIG']['db'] = 'test_xcontent_%s' % uuid4().hex
TEST_DATA_DIR = settings.COMMON_TEST_DATA_ROOT
def requires_pillow_jpeg(func):
"""
A decorator to indicate that the function requires JPEG support for Pillow,
otherwise it cannot be run
"""
@wraps(func)
def decorated_func(*args, **kwargs):
"""
Execute the function if we have JPEG support in Pillow.
"""
try:
from PIL import Image
except ImportError:
raise SkipTest("Pillow is not installed (or not found)")
if not getattr(Image.core, "jpeg_decoder", False):
raise SkipTest("Pillow cannot open JPEG files")
return func(*args, **kwargs)
return decorated_func
@override_settings(CONTENTSTORE=TEST_DATA_CONTENTSTORE)
class ContentStoreTestCase(CourseTestCase):
"""
Base class for Content Store Test Cases
"""
class ImportRequiredTestCases(ContentStoreTestCase):
"""
Tests which legitimately need to import a course
"""
def test_no_static_link_rewrites_on_import(self):
course_items = import_course_from_xml(
self.store, self.user.id, TEST_DATA_DIR, ['toy'], create_if_not_present=True
)
course = course_items[0]
handouts_usage_key = course.id.make_usage_key('course_info', 'handouts')
handouts = self.store.get_item(handouts_usage_key)
self.assertIn('/static/', handouts.data)
handouts_usage_key = course.id.make_usage_key('html', 'toyhtml')
handouts = self.store.get_item(handouts_usage_key)
self.assertIn('/static/', handouts.data)
def test_xlint_fails(self):
err_cnt = perform_xlint(TEST_DATA_DIR, ['toy'])
self.assertGreater(err_cnt, 0)
def test_invalid_asset_overwrite(self):
"""
Tests that an asset with invalid displayname can be overwritten if multiple assets have same displayname.
It Verifies that:
During import, if ('/') or ('\') is present in displayname of an asset, it is replaced with underscores '_'.
Export does not fail when an asset has '/' in its displayname. If the converted display matches with
any other asset, then it will be replaced.
Asset name in XML: "/invalid\\displayname/subs-esLhHcdKGWvKs.srt"
"""
content_store = contentstore()
expected_displayname = u'_invalid_displayname_subs-esLhHcdKGWvKs.srt'
import_course_from_xml(
self.store,
self.user.id,
TEST_DATA_DIR,
['import_draft_order'],
static_content_store=content_store,
verbose=True,
create_if_not_present=True
)
# Verify the course has imported successfully
course = self.store.get_course(self.store.make_course_key(
'test_org',
'import_draft_order',
'import_draft_order'
))
self.assertIsNotNone(course)
# Add a new asset in the course, and make sure to name it such that it overwrite the one existing
# asset in the course. (i.e. _invalid_displayname_subs-esLhHcdKGWvKs.srt)
asset_key = course.id.make_asset_key('asset', 'sample_asset.srt')
content = StaticContent(
asset_key, expected_displayname, 'application/text', 'test',
)
content_store.save(content)
# Get & verify that course actually has two assets
assets, count = content_store.get_all_content_for_course(course.id)
self.assertEqual(count, 2)
# Verify both assets have similar `displayname` after saving.
for asset in assets:
self.assertEquals(asset['displayname'], expected_displayname)
# Test course export does not fail
root_dir = path(mkdtemp_clean())
print('Exporting to tempdir = {0}'.format(root_dir))
export_course_to_xml(self.store, content_store, course.id, root_dir, u'test_export')
filesystem = OSFS(text_type(root_dir / 'test_export/static'))
exported_static_files = filesystem.listdir(u'/')
# Verify that asset have been overwritten during export.
self.assertEqual(len(exported_static_files), 1)
self.assertTrue(filesystem.exists(expected_displayname))
self.assertEqual(exported_static_files[0], expected_displayname)
# Remove exported course
shutil.rmtree(root_dir)
def test_about_overrides(self):
"""
This test case verifies that a course can use specialized override for about data,
e.g. /about/Fall_2012/effort.html
while there is a base definition in /about/effort.html
"""
course_items = import_course_from_xml(
self.store, self.user.id, TEST_DATA_DIR, ['toy'], create_if_not_present=True
)
course_key = course_items[0].id
effort = self.store.get_item(course_key.make_usage_key('about', 'effort'))
self.assertEqual(effort.data, '6 hours')
# this one should be in a non-override folder
effort = self.store.get_item(course_key.make_usage_key('about', 'end_date'))
self.assertEqual(effort.data, 'TBD')
@requires_pillow_jpeg
def test_asset_import(self):
"""
This test validates that an image asset is imported and a thumbnail was generated for a .gif
"""
content_store = contentstore()
import_course_from_xml(
self.store, self.user.id, TEST_DATA_DIR, ['toy'], static_content_store=content_store, verbose=True,
create_if_not_present=True
)
course = self.store.get_course(self.store.make_course_key('edX', 'toy', '2012_Fall'))
self.assertIsNotNone(course)
# make sure we have some assets in our contentstore
all_assets, __ = content_store.get_all_content_for_course(course.id)
self.assertGreater(len(all_assets), 0)
# make sure we have some thumbnails in our contentstore
all_thumbnails = content_store.get_all_content_thumbnails_for_course(course.id)
self.assertGreater(len(all_thumbnails), 0)
location = AssetLocation.from_deprecated_string('/c4x/edX/toy/asset/just_a_test.jpg')
content = content_store.find(location)
self.assertIsNotNone(content)
self.assertIsNotNone(content.thumbnail_location)
thumbnail = content_store.find(content.thumbnail_location)
self.assertIsNotNone(thumbnail)
def test_course_info_updates_import_export(self):
"""
Test that course info updates are imported and exported with all content fields ('data', 'items')
"""
content_store = contentstore()
data_dir = TEST_DATA_DIR
courses = import_course_from_xml(
self.store, self.user.id, data_dir, ['course_info_updates'],
static_content_store=content_store, verbose=True, create_if_not_present=True
)
course = courses[0]
self.assertIsNotNone(course)
course_updates = self.store.get_item(course.id.make_usage_key('course_info', 'updates'))
self.assertIsNotNone(course_updates)
# check that course which is imported has files 'updates.html' and 'updates.items.json'
filesystem = OSFS(text_type(data_dir + '/course_info_updates/info'))
self.assertTrue(filesystem.exists(u'updates.html'))
self.assertTrue(filesystem.exists(u'updates.items.json'))
# verify that course info update module has same data content as in data file from which it is imported
# check 'data' field content
with filesystem.open(u'updates.html', 'r') as course_policy:
on_disk = course_policy.read()
self.assertEqual(course_updates.data, on_disk)
# check 'items' field content
with filesystem.open(u'updates.items.json', 'r') as course_policy:
on_disk = loads(course_policy.read())
self.assertEqual(course_updates.items, on_disk)
# now export the course to a tempdir and test that it contains files 'updates.html' and 'updates.items.json'
# with same content as in course 'info' directory
root_dir = path(mkdtemp_clean())
print('Exporting to tempdir = {0}'.format(root_dir))
export_course_to_xml(self.store, content_store, course.id, root_dir, u'test_export')
# check that exported course has files 'updates.html' and 'updates.items.json'
filesystem = OSFS(text_type(root_dir / 'test_export/info'))
self.assertTrue(filesystem.exists(u'updates.html'))
self.assertTrue(filesystem.exists(u'updates.items.json'))
# verify that exported course has same data content as in course_info_update module
with filesystem.open(u'updates.html', 'r') as grading_policy:
on_disk = grading_policy.read()
self.assertEqual(on_disk, course_updates.data)
with filesystem.open(u'updates.items.json', 'r') as grading_policy:
on_disk = loads(grading_policy.read())
self.assertEqual(on_disk, course_updates.items)
def test_rewrite_nonportable_links_on_import(self):
content_store = contentstore()
import_course_from_xml(
self.store, self.user.id, TEST_DATA_DIR, ['toy'],
static_content_store=content_store, create_if_not_present=True
)
# first check a static asset link
course_key = self.store.make_course_key('edX', 'toy', 'run')
html_module_location = course_key.make_usage_key('html', 'nonportable')
html_module = self.store.get_item(html_module_location)
self.assertIn('/static/foo.jpg', html_module.data)
# then check a intra courseware link
html_module_location = course_key.make_usage_key('html', 'nonportable_link')
html_module = self.store.get_item(html_module_location)
self.assertIn('/jump_to_id/nonportable_link', html_module.data)
def verify_content_existence(self, store, root_dir, course_id, dirname, category_name, filename_suffix=''):
filesystem = OSFS(root_dir / 'test_export')
self.assertTrue(filesystem.exists(dirname))
items = store.get_items(course_id, qualifiers={'category': category_name})
for item in items:
filesystem = OSFS(root_dir / ('test_export/' + dirname))
self.assertTrue(filesystem.exists(item.location.block_id + filename_suffix))
@mock.patch('xmodule.course_module.requests.get')
def test_export_course_roundtrip(self, mock_get):
mock_get.return_value.text = dedent("""
<?xml version="1.0"?><table_of_contents>
<entry page="5" page_label="ii" name="Table of Contents"/>
</table_of_contents>
""").strip()
content_store = contentstore()
course_id = self.import_and_populate_course()
root_dir = path(mkdtemp_clean())
print('Exporting to tempdir = {0}'.format(root_dir))
# export out to a tempdir
export_course_to_xml(self.store, content_store, course_id, root_dir, u'test_export')
# check for static tabs
self.verify_content_existence(self.store, root_dir, course_id, u'tabs', 'static_tab', '.html')
# check for about content
self.verify_content_existence(self.store, root_dir, course_id, u'about', 'about', '.html')
# assert that there is an html and video directory in drafts:
draft_dir = OSFS(root_dir / 'test_export/drafts')
self.assertTrue(draft_dir.exists(u'html'))
self.assertTrue(draft_dir.exists(u'video'))
# and assert that they contain the created modules
self.assertIn(self.DRAFT_HTML + ".xml", draft_dir.listdir(u'html'))
self.assertIn(self.DRAFT_VIDEO + ".xml", draft_dir.listdir(u'video'))
# and assert the child of the orphaned draft wasn't exported
self.assertNotIn(self.ORPHAN_DRAFT_HTML + ".xml", draft_dir.listdir(u'html'))
# check for grading_policy.json
filesystem = OSFS(root_dir / 'test_export/policies/2012_Fall')
self.assertTrue(filesystem.exists(u'grading_policy.json'))
course = self.store.get_course(course_id)
# compare what's on disk compared to what we have in our course
with filesystem.open(u'grading_policy.json', 'r') as grading_policy:
on_disk = loads(grading_policy.read())
self.assertEqual(on_disk, course.grading_policy)
# check for policy.json
self.assertTrue(filesystem.exists(u'policy.json'))
# compare what's on disk to what we have in the course module
with filesystem.open(u'policy.json', 'r') as course_policy:
on_disk = loads(course_policy.read())
self.assertIn('course/2012_Fall', on_disk)
self.assertEqual(on_disk['course/2012_Fall'], own_metadata(course))
# remove old course
self.store.delete_course(course_id, self.user.id)
# reimport over old course
self.check_import(root_dir, content_store, course_id)
# import to different course id
new_course_id = self.store.make_course_key('anotherX', 'anotherToy', 'Someday')
self.check_import(root_dir, content_store, new_course_id)
self.assertCoursesEqual(course_id, new_course_id)
shutil.rmtree(root_dir)
def check_import(self, root_dir, content_store, course_id):
"""Imports the course in root_dir into the given course_id and verifies its content"""
# reimport
import_course_from_xml(
self.store,
self.user.id,
root_dir,
['test_export'],
static_content_store=content_store,
target_id=course_id,
)
# verify content of the course
self.check_populated_course(course_id)
# verify additional export attributes
def verify_export_attrs_removed(attributes):
"""Verifies all temporary attributes added during export are removed"""
self.assertNotIn('index_in_children_list', attributes)
self.assertNotIn('parent_sequential_url', attributes)
self.assertNotIn('parent_url', attributes)
vertical = self.store.get_item(course_id.make_usage_key('vertical', self.TEST_VERTICAL))
verify_export_attrs_removed(vertical.xml_attributes)
for child in vertical.get_children():
verify_export_attrs_removed(child.xml_attributes)
if hasattr(child, 'data'):
verify_export_attrs_removed(child.data)
def test_export_course_with_metadata_only_video(self):
content_store = contentstore()
import_course_from_xml(self.store, self.user.id, TEST_DATA_DIR, ['toy'], create_if_not_present=True)
course_id = self.store.make_course_key('edX', 'toy', '2012_Fall')
# create a new video module and add it as a child to a vertical
# this re-creates a bug whereby since the video template doesn't have
# anything in 'data' field, the export was blowing up
verticals = self.store.get_items(course_id, qualifiers={'category': 'vertical'})
self.assertGreater(len(verticals), 0)
parent = verticals[0]
ItemFactory.create(parent_location=parent.location, category="video", display_name="untitled")
root_dir = path(mkdtemp_clean())
print('Exporting to tempdir = {0}'.format(root_dir))
# export out to a tempdir
export_course_to_xml(self.store, content_store, course_id, root_dir, u'test_export')
shutil.rmtree(root_dir)
def test_export_course_with_metadata_only_word_cloud(self):
"""
Similar to `test_export_course_with_metadata_only_video`.
"""
content_store = contentstore()
import_course_from_xml(self.store, self.user.id, TEST_DATA_DIR, ['word_cloud'], create_if_not_present=True)
course_id = self.store.make_course_key('HarvardX', 'ER22x', '2013_Spring')
verticals = self.store.get_items(course_id, qualifiers={'category': 'vertical'})
self.assertGreater(len(verticals), 0)
parent = verticals[0]
ItemFactory.create(parent_location=parent.location, category="word_cloud", display_name="untitled")
root_dir = path(mkdtemp_clean())
print('Exporting to tempdir = {0}'.format(root_dir))
# export out to a tempdir
export_course_to_xml(self.store, content_store, course_id, root_dir, u'test_export')
shutil.rmtree(root_dir)
def test_import_after_renaming_xml_data(self):
"""
Test that import works fine on split mongo after renaming the blocks url.
"""
split_store = modulestore()._get_modulestore_by_type(ModuleStoreEnum.Type.split) # pylint: disable=W0212
import_course_from_xml(
split_store, self.user.id, TEST_DATA_DIR,
['course_before_rename'],
create_if_not_present=True
)
course_after_rename = import_course_from_xml(
split_store, self.user.id, TEST_DATA_DIR,
['course_after_rename'],
create_if_not_present=True
)
all_items = split_store.get_items(course_after_rename[0].id, qualifiers={'category': 'chapter'})
renamed_chapter = [item for item in all_items if item.location.block_id == 'renamed_chapter'][0]
self.assertIsNotNone(renamed_chapter.published_on)
self.assertIsNotNone(renamed_chapter.parent)
self.assertIn(renamed_chapter.location, course_after_rename[0].children)
original_chapter = [item for item in all_items
if item.location.block_id == 'b9870b9af59841a49e6e02765d0e3bbf'][0]
self.assertIsNone(original_chapter.published_on)
self.assertIsNone(original_chapter.parent)
self.assertNotIn(original_chapter.location, course_after_rename[0].children)
def test_empty_data_roundtrip(self):
"""
Test that an empty `data` field is preserved through
export/import.
"""
content_store = contentstore()
import_course_from_xml(self.store, self.user.id, TEST_DATA_DIR, ['toy'], create_if_not_present=True)
course_id = self.store.make_course_key('edX', 'toy', '2012_Fall')
verticals = self.store.get_items(course_id, qualifiers={'category': 'vertical'})
self.assertGreater(len(verticals), 0)
parent = verticals[0]
# Create a module, and ensure that its `data` field is empty
word_cloud = ItemFactory.create(parent_location=parent.location, category="word_cloud", display_name="untitled")
del word_cloud.data
self.assertEquals(word_cloud.data, '')
# Export the course
root_dir = path(mkdtemp_clean())
export_course_to_xml(self.store, content_store, course_id, root_dir, u'test_roundtrip')
# Reimport and get the video back
import_course_from_xml(self.store, self.user.id, root_dir)
imported_word_cloud = self.store.get_item(course_id.make_usage_key('word_cloud', 'untitled'))
# It should now contain empty data
self.assertEquals(imported_word_cloud.data, '')
def test_html_export_roundtrip(self):
"""
Test that a course which has HTML that has style formatting is preserved in export/import
"""
content_store = contentstore()
import_course_from_xml(self.store, self.user.id, TEST_DATA_DIR, ['toy'], create_if_not_present=True)
course_id = self.store.make_course_key('edX', 'toy', '2012_Fall')
# Export the course
root_dir = path(mkdtemp_clean())
export_course_to_xml(self.store, content_store, course_id, root_dir, u'test_roundtrip')
# Reimport and get the video back
import_course_from_xml(self.store, self.user.id, root_dir, create_if_not_present=True)
# get the sample HTML with styling information
html_module = self.store.get_item(course_id.make_usage_key('html', 'with_styling'))
self.assertIn('<p style="font:italic bold 72px/30px Georgia, serif; color: red; ">', html_module.data)
# get the sample HTML with just a simple <img> tag information
html_module = self.store.get_item(course_id.make_usage_key('html', 'just_img'))
self.assertIn('<img src="/static/foo_bar.jpg" />', html_module.data)
def test_export_course_without_content_store(self):
# Create toy course
course_items = import_course_from_xml(
self.store, self.user.id, TEST_DATA_DIR, ['toy'], create_if_not_present=True
)
course_id = course_items[0].id
root_dir = path(mkdtemp_clean())
print('Exporting to tempdir = {0}'.format(root_dir))
export_course_to_xml(self.store, None, course_id, root_dir, u'test_export_no_content_store')
# Delete the course from module store and reimport it
self.store.delete_course(course_id, self.user.id)
import_course_from_xml(
self.store, self.user.id, root_dir, ['test_export_no_content_store'],
static_content_store=None,
target_id=course_id
)
# Verify reimported course
items = self.store.get_items(
course_id,
qualifiers={
'category': 'sequential',
'name': 'vertical_sequential',
}
)
self.assertEqual(len(items), 1)
def test_export_course_no_xml_attributes(self):
"""
Test that a module without an `xml_attributes` attr will still be
exported successfully
"""
content_store = contentstore()
import_course_from_xml(self.store, self.user.id, TEST_DATA_DIR, ['toy'], create_if_not_present=True)
course_id = self.store.make_course_key('edX', 'toy', '2012_Fall')
verticals = self.store.get_items(course_id, qualifiers={'category': 'vertical'})
vertical = verticals[0]
# create OpenAssessmentBlock:
open_assessment = ItemFactory.create(
parent_location=vertical.location,
category="openassessment",
display_name="untitled",
)
# convert it to draft
draft_open_assessment = self.store.convert_to_draft(
open_assessment.location, self.user.id
)
# note that it has no `xml_attributes` attribute
self.assertFalse(hasattr(draft_open_assessment, "xml_attributes"))
# export should still complete successfully
root_dir = path(mkdtemp_clean())
export_course_to_xml(
self.store,
content_store,
course_id,
root_dir,
u'test_no_xml_attributes'
)
@ddt.ddt
class MiscCourseTests(ContentStoreTestCase):
"""
Tests that rely on the toy courses.
"""
def setUp(self):
super(MiscCourseTests, self).setUp()
# save locs not items b/c the items won't have the subsequently created children in them until refetched
self.chapter_loc = self.store.create_child(
self.user.id, self.course.location, 'chapter', 'test_chapter'
).location
self.seq_loc = self.store.create_child(
self.user.id, self.chapter_loc, 'sequential', 'test_seq'
).location
self.vert_loc = self.store.create_child(self.user.id, self.seq_loc, 'vertical', 'test_vert').location
# now create some things quasi like the toy course had
self.problem = self.store.create_child(
self.user.id, self.vert_loc, 'problem', 'test_problem', fields={
"data": "<problem>Test</problem>"
}
)
self.store.create_child(
self.user.id, self.vert_loc, 'video', fields={
"youtube_id_0_75": "JMD_ifUUfsU",
"youtube_id_1_0": "OEoXaMPEzfM",
"youtube_id_1_25": "AKqURZnYqpk",
"youtube_id_1_5": "DYpADpL7jAY",
"name": "sample_video",
}
)
self.store.create_child(
self.user.id, self.vert_loc, 'video', fields={
"youtube_id_0_75": "JMD_ifUUfsU",
"youtube_id_1_0": "OEoXaMPEzfM",
"youtube_id_1_25": "AKqURZnYqpk",
"youtube_id_1_5": "DYpADpL7jAY",
"name": "truncated_video",
"end_time": timedelta(hours=10),
}
)
self.store.create_child(
self.user.id, self.vert_loc, 'poll_question', fields={
"name": "T1_changemind_poll_foo_2",
"display_name": "Change your answer",
"question": "Have you changed your mind?",
"answers": [{"id": "yes", "text": "Yes"}, {"id": "no", "text": "No"}],
}
)
self.course = self.store.publish(self.course.location, self.user.id)
def check_components_on_page(self, component_types, expected_types):
"""
Ensure that the right types end up on the page.
component_types is the list of advanced components.
expected_types is the list of elements that should appear on the page.
expected_types and component_types should be similar, but not
exactly the same -- for example, 'video' in
component_types should cause 'Video' to be present.
"""
self.course.advanced_modules = component_types
self.store.update_item(self.course, self.user.id)
# just pick one vertical
resp = self.client.get_html(get_url('container_handler', self.vert_loc))
self.assertEqual(resp.status_code, 200)
for expected in expected_types:
self.assertIn(expected, resp.content)
@ddt.data("<script>alert(1)</script>", "alert('hi')", "</script><script>alert(1)</script>")
def test_container_handler_xss_prevent(self, malicious_code):
"""
Test that XSS attack is prevented
"""
resp = self.client.get_html(get_url('container_handler', self.vert_loc) + '?action=' + malicious_code)
self.assertEqual(resp.status_code, 200)
# Test that malicious code does not appear in html
self.assertNotIn(malicious_code, resp.content)
def test_advanced_components_in_edit_unit(self):
# This could be made better, but for now let's just assert that we see the advanced modules mentioned in the
# page response HTML
self.check_components_on_page(
ADVANCED_COMPONENT_TYPES,
['Word cloud', 'Annotation', 'Text Annotation', 'Video Annotation', 'Image Annotation',
'split_test'],
)
@ddt.data('/Fake/asset/displayname', '\\Fake\\asset\\displayname')
def test_export_on_invalid_displayname(self, invalid_displayname):
""" Tests that assets with invalid 'displayname' does not cause export to fail """
content_store = contentstore()
exported_asset_name = u'_Fake_asset_displayname'
# Create an asset with slash `invalid_displayname` '
asset_key = self.course.id.make_asset_key('asset', "fake_asset.txt")
content = StaticContent(
asset_key, invalid_displayname, 'application/text', 'test',
)
content_store.save(content)
# Verify that the course has only one asset and it has been added with an invalid asset name.
assets, count = content_store.get_all_content_for_course(self.course.id)
self.assertEqual(count, 1)
display_name = assets[0]['displayname']
self.assertEqual(display_name, invalid_displayname)
# Now export the course to a tempdir and test that it contains assets. The export should pass
root_dir = path(mkdtemp_clean())
print('Exporting to tempdir = {0}'.format(root_dir))
export_course_to_xml(self.store, content_store, self.course.id, root_dir, u'test_export')
filesystem = OSFS(root_dir / 'test_export/static')
exported_static_files = filesystem.listdir(u'/')
# Verify that only single asset has been exported with the expected asset name.
self.assertTrue(filesystem.exists(exported_asset_name))
self.assertEqual(len(exported_static_files), 1)
# Remove tempdir
shutil.rmtree(root_dir)
@mock.patch(
'lms.djangoapps.ccx.modulestore.CCXModulestoreWrapper.get_item',
mock.Mock(return_value=mock.Mock(children=[]))
)
def test_export_with_orphan_vertical(self):
"""
Tests that, export does not fail when a parent xblock does not have draft child xblock
information but the draft child xblock has parent information.
"""
# Make an existing unit a draft
self.store.convert_to_draft(self.problem.location, self.user.id)
root_dir = path(mkdtemp_clean())
export_course_to_xml(self.store, None, self.course.id, root_dir, u'test_export')
# Verify that problem is exported in the drafts. This is expected because we are
# mocking get_item to for drafts. Expect no draft is exported.
# Specifically get_item is used in `xmodule.modulestore.xml_exporter._export_drafts`
export_draft_dir = OSFS(root_dir / 'test_export/drafts')
self.assertEqual(len(export_draft_dir.listdir(u'/')), 0)
# Remove tempdir
shutil.rmtree(root_dir)
def test_assets_overwrite(self):
""" Tests that assets will similar 'displayname' will be overwritten during export """
content_store = contentstore()
asset_displayname = u'Fake_asset.txt'
# Create two assets with similar 'displayname'
for i in range(2):
asset_path = 'sample_asset_{}.txt'.format(i)
asset_key = self.course.id.make_asset_key('asset', asset_path)
content = StaticContent(
asset_key, asset_displayname, 'application/text', 'test',
)
content_store.save(content)
# Fetch & verify course assets to be equal to 2.
assets, count = content_store.get_all_content_for_course(self.course.id)
self.assertEqual(count, 2)
# Verify both assets have similar 'displayname' after saving.
for asset in assets:
self.assertEquals(asset['displayname'], asset_displayname)
# Now export the course to a tempdir and test that it contains assets.
root_dir = path(mkdtemp_clean())
print('Exporting to tempdir = {0}'.format(root_dir))
export_course_to_xml(self.store, content_store, self.course.id, root_dir, u'test_export')
# Verify that asset have been overwritten during export.
filesystem = OSFS(root_dir / 'test_export/static')
exported_static_files = filesystem.listdir(u'/')
self.assertTrue(filesystem.exists(asset_displayname))
self.assertEqual(len(exported_static_files), 1)
# Remove tempdir
shutil.rmtree(root_dir)
def test_advanced_components_require_two_clicks(self):
self.check_components_on_page(['word_cloud'], ['Word cloud'])
def test_malformed_edit_unit_request(self):
# just pick one vertical
usage_key = self.course.id.make_usage_key('vertical', None)
resp = self.client.get_html(get_url('container_handler', usage_key))
self.assertEqual(resp.status_code, 400)
def test_edit_unit(self):
"""Verifies rendering the editor in all the verticals in the given test course"""
self._check_verticals([self.vert_loc])
def _get_draft_counts(self, item):
cnt = 1 if getattr(item, 'is_draft', False) else 0
for child in item.get_children():
cnt = cnt + self._get_draft_counts(child)
return cnt
def test_get_items(self):
"""
This verifies a bug we had where the None setting in get_items() meant 'wildcard'
Unfortunately, None = published for the revision field, so get_items() would return
both draft and non-draft copies.
"""
self.store.convert_to_draft(self.problem.location, self.user.id)
# Query get_items() and find the html item. This should just return back a single item (not 2).
direct_store_items = self.store.get_items(
self.course.id, revision=ModuleStoreEnum.RevisionOption.published_only
)
items_from_direct_store = [item for item in direct_store_items if item.location == self.problem.location]
self.assertEqual(len(items_from_direct_store), 1)
self.assertFalse(getattr(items_from_direct_store[0], 'is_draft', False))
# Fetch from the draft store.
draft_store_items = self.store.get_items(
self.course.id, revision=ModuleStoreEnum.RevisionOption.draft_only
)
items_from_draft_store = [item for item in draft_store_items if item.location == self.problem.location]
self.assertEqual(len(items_from_draft_store), 1)
# TODO the below won't work for split mongo
self.assertTrue(getattr(items_from_draft_store[0], 'is_draft', False))
def test_draft_metadata(self):
"""
This verifies a bug we had where inherited metadata was getting written to the
module as 'own-metadata' when publishing. Also verifies the metadata inheritance is
properly computed
"""
# refetch course so it has all the children correct
course = self.store.update_item(self.course, self.user.id)
course.graceperiod = timedelta(days=1, hours=5, minutes=59, seconds=59)
course = self.store.update_item(course, self.user.id)
problem = self.store.get_item(self.problem.location)
self.assertEqual(problem.graceperiod, course.graceperiod)
self.assertNotIn('graceperiod', own_metadata(problem))
self.store.convert_to_draft(problem.location, self.user.id)
# refetch to check metadata
problem = self.store.get_item(problem.location)
self.assertEqual(problem.graceperiod, course.graceperiod)
self.assertNotIn('graceperiod', own_metadata(problem))
# publish module
self.store.publish(problem.location, self.user.id)
# refetch to check metadata
problem = self.store.get_item(problem.location)
self.assertEqual(problem.graceperiod, course.graceperiod)
self.assertNotIn('graceperiod', own_metadata(problem))
# put back in draft and change metadata and see if it's now marked as 'own_metadata'
self.store.convert_to_draft(problem.location, self.user.id)
problem = self.store.get_item(problem.location)
new_graceperiod = timedelta(hours=1)
self.assertNotIn('graceperiod', own_metadata(problem))
problem.graceperiod = new_graceperiod
# Save the data that we've just changed to the underlying
# MongoKeyValueStore before we update the mongo datastore.
problem.save()
self.assertIn('graceperiod', own_metadata(problem))
self.assertEqual(problem.graceperiod, new_graceperiod)
self.store.update_item(problem, self.user.id)
# read back to make sure it reads as 'own-metadata'
problem = self.store.get_item(problem.location)
self.assertIn('graceperiod', own_metadata(problem))
self.assertEqual(problem.graceperiod, new_graceperiod)
# republish
self.store.publish(problem.location, self.user.id)
# and re-read and verify 'own-metadata'
self.store.convert_to_draft(problem.location, self.user.id)
problem = self.store.get_item(problem.location)
self.assertIn('graceperiod', own_metadata(problem))
self.assertEqual(problem.graceperiod, new_graceperiod)
def test_get_depth_with_drafts(self):
# make sure no draft items have been returned
num_drafts = self._get_draft_counts(self.course)
self.assertEqual(num_drafts, 0)
# put into draft
self.store.convert_to_draft(self.problem.location, self.user.id)
# make sure we can query that item and verify that it is a draft
draft_problem = self.store.get_item(self.problem.location)
self.assertTrue(getattr(draft_problem, 'is_draft', False))
# now requery with depth
course = self.store.get_course(self.course.id, depth=None)
# make sure just one draft item have been returned
num_drafts = self._get_draft_counts(course)
self.assertEqual(num_drafts, 1)
@mock.patch('xmodule.course_module.requests.get')
def test_import_textbook_as_content_element(self, mock_get):
mock_get.return_value.text = dedent("""
<?xml version="1.0"?><table_of_contents>
<entry page="5" page_label="ii" name="Table of Contents"/>
</table_of_contents>
""").strip()
self.course.textbooks = [Textbook("Textbook", "https://s3.amazonaws.com/edx-textbooks/guttag_computation_v3/")]
course = self.store.update_item(self.course, self.user.id)
self.assertGreater(len(course.textbooks), 0)
def test_import_polls(self):
items = self.store.get_items(self.course.id, qualifiers={'category': 'poll_question'})
self.assertGreater(len(items), 0)
# check that there's actually content in the 'question' field
self.assertGreater(len(items[0].question), 0)
def test_module_preview_in_whitelist(self):
"""
Tests the ajax callback to render an XModule
"""
with override_settings(COURSES_WITH_UNSAFE_CODE=[text_type(self.course.id)]):
# also try a custom response which will trigger the 'is this course in whitelist' logic
resp = self.client.get_json(
get_url('xblock_view_handler', self.vert_loc, kwargs={'view_name': 'container_preview'})
)
self.assertEqual(resp.status_code, 200)
vertical = self.store.get_item(self.vert_loc)
for child in vertical.children:
self.assertContains(resp, text_type(child))
def test_delete(self):
# make sure the parent points to the child object which is to be deleted
# need to refetch chapter b/c at the time it was assigned it had no children
chapter = self.store.get_item(self.chapter_loc)
self.assertIn(self.seq_loc, chapter.children)
self.client.delete(get_url('xblock_handler', self.seq_loc))
with self.assertRaises(ItemNotFoundError):
self.store.get_item(self.seq_loc)
chapter = self.store.get_item(self.chapter_loc)
# make sure the parent no longer points to the child object which was deleted
self.assertNotIn(self.seq_loc, chapter.children)
def test_asset_delete_and_restore(self):
"""
This test will exercise the soft delete/restore functionality of the assets
"""
asset_key = self._delete_asset_in_course()
# now try to find it in store, but they should not be there any longer
content = contentstore().find(asset_key, throw_on_not_found=False)
self.assertIsNone(content)
# now try to find it and the thumbnail in trashcan - should be in there
content = contentstore('trashcan').find(asset_key, throw_on_not_found=False)
self.assertIsNotNone(content)
# let's restore the asset
restore_asset_from_trashcan(text_type(asset_key))
# now try to find it in courseware store, and they should be back after restore
content = contentstore('trashcan').find(asset_key, throw_on_not_found=False)
self.assertIsNotNone(content)
def _delete_asset_in_course(self):
"""
Helper method for:
1) importing course from xml
2) finding asset in course (verifying non-empty)
3) computing thumbnail location of asset
4) deleting the asset from the course
"""
asset_key = self.course.id.make_asset_key('asset', 'sample_static.html')
content = StaticContent(
asset_key, "Fake asset", "application/text", "test",
)
contentstore().save(content)
# go through the website to do the delete, since the soft-delete logic is in the view
url = reverse_course_url(
'assets_handler',
self.course.id,
kwargs={'asset_key_string': text_type(asset_key)}
)
resp = self.client.delete(url)
self.assertEqual(resp.status_code, 204)
return asset_key
def test_empty_trashcan(self):
"""
This test will exercise the emptying of the asset trashcan
"""
self._delete_asset_in_course()
# make sure there's something in the trashcan
all_assets, __ = contentstore('trashcan').get_all_content_for_course(self.course.id)
self.assertGreater(len(all_assets), 0)
# empty the trashcan
empty_asset_trashcan([self.course.id])
# make sure trashcan is empty
all_assets, count = contentstore('trashcan').get_all_content_for_course(self.course.id)
self.assertEqual(len(all_assets), 0)
self.assertEqual(count, 0)
def test_illegal_draft_crud_ops(self):
# this test presumes old mongo and split_draft not full split
with self.assertRaises(InvalidVersionError):
self.store.convert_to_draft(self.chapter_loc, self.user.id)
chapter = self.store.get_item(self.chapter_loc)
chapter.data = 'chapter data'
self.store.update_item(chapter, self.user.id)
newobject = self.store.get_item(self.chapter_loc)
self.assertFalse(getattr(newobject, 'is_draft', False))
with self.assertRaises(InvalidVersionError):
self.store.unpublish(self.chapter_loc, self.user.id)
def test_bad_contentstore_request(self):
"""
Test that user get proper responses for urls with invalid url or
asset/course key
"""
resp = self.client.get_html('/c4x/CDX/123123/asset/&invalid.png')
self.assertEqual(resp.status_code, 400)
resp = self.client.get_html('/c4x/CDX/123123/asset/invalid.png')
self.assertEqual(resp.status_code, 404)
# Now test that 404 response is returned when user tries to access
# asset of some invalid course from split ModuleStore
with self.store.default_store(ModuleStoreEnum.Type.split):
resp = self.client.get_html('/c4x/InvalidOrg/InvalidCourse/asset/invalid.png')
self.assertEqual(resp.status_code, 404)
@override_switch(
'{}.{}'.format(waffle.WAFFLE_NAMESPACE, waffle.ENABLE_ACCESSIBILITY_POLICY_PAGE),
active=False)
def test_disabled_accessibility_page(self):
"""
Test that accessibility page returns 404 when waffle switch is disabled
"""
resp = self.client.get_html('/accessibility')
self.assertEqual(resp.status_code, 404)
def test_delete_course(self):
"""
This test creates a course, makes a draft item, and deletes the course. This will also assert that the
draft content is also deleted
"""
# add an asset
asset_key = self.course.id.make_asset_key('asset', 'sample_static.html')
content = StaticContent(
asset_key, "Fake asset", "application/text", "test",
)
contentstore().save(content)
assets, count = contentstore().get_all_content_for_course(self.course.id)
self.assertGreater(len(assets), 0)
self.assertGreater(count, 0)
self.store.convert_to_draft(self.vert_loc, self.user.id)
# delete the course
self.store.delete_course(self.course.id, self.user.id)
# assert that there's absolutely no non-draft modules in the course
# this should also include all draft items
items = self.store.get_items(self.course.id)
self.assertEqual(len(items), 0)
# assert that all content in the asset library is also deleted
assets, count = contentstore().get_all_content_for_course(self.course.id)
self.assertEqual(len(assets), 0)
self.assertEqual(count, 0)
def test_course_handouts_rewrites(self):
"""
Test that the xblock_handler rewrites static handout links
"""
handouts = self.store.create_item(
self.user.id, self.course.id, 'course_info', 'handouts', fields={
"data": "<a href='/static/handouts/sample_handout.txt'>Sample</a>",
}
)
# get module info (json)
resp = self.client.get(get_url('xblock_handler', handouts.location))
# make sure we got a successful response
self.assertEqual(resp.status_code, 200)
# check that /static/ has been converted to the full path
# note, we know the link it should be because that's what in the 'toy' course in the test data
asset_key = self.course.id.make_asset_key('asset', 'handouts_sample_handout.txt')
self.assertContains(resp, text_type(asset_key))
def test_prefetch_children(self):
# make sure we haven't done too many round trips to DB:
# 1) the course,
# 2 & 3) for the chapters and sequentials
# Because we're querying from the top of the tree, we cache information needed for inheritance,
# so we don't need to make an extra query to compute it.
# set the branch to 'publish' in order to prevent extra lookups of draft versions
with self.store.branch_setting(ModuleStoreEnum.Branch.published_only, self.course.id):
with check_mongo_calls(3):
course = self.store.get_course(self.course.id, depth=2)
# make sure we pre-fetched a known sequential which should be at depth=2
self.assertIn(self.seq_loc, course.system.module_data)
# make sure we don't have a specific vertical which should be at depth=3
self.assertNotIn(self.vert_loc, course.system.module_data)
# Now, test with the branch set to draft. No extra round trips b/c it doesn't go deep enough to get
# beyond direct only categories
with self.store.branch_setting(ModuleStoreEnum.Branch.draft_preferred, self.course.id):
with check_mongo_calls(3):
self.store.get_course(self.course.id, depth=2)
def _check_verticals(self, locations):
""" Test getting the editing HTML for each vertical. """
# Assert is here to make sure that the course being tested actually has verticals (units) to check.
self.assertGreater(len(locations), 0)
for loc in locations:
resp = self.client.get_html(get_url('container_handler', loc))
self.assertEqual(resp.status_code, 200)
@ddt.ddt
class ContentStoreTest(ContentStoreTestCase):
"""
Tests for the CMS ContentStore application.
"""
duplicate_course_error = ("There is already a course defined with the same organization and course number. "
"Please change either organization or course number to be unique.")
def setUp(self):
super(ContentStoreTest, self).setUp()
self.course_data = {
'org': 'MITx',
'number': '111',
'display_name': 'Robot Super Course',
'run': '2013_Spring'
}
def assert_created_course(self, number_suffix=None):
"""
Checks that the course was created properly.
"""
test_course_data = {}
test_course_data.update(self.course_data)
if number_suffix:
test_course_data['number'] = '{0}_{1}'.format(test_course_data['number'], number_suffix)
course_key = _get_course_id(self.store, test_course_data)
_create_course(self, course_key, test_course_data)
# Verify that the creator is now registered in the course.
self.assertTrue(CourseEnrollment.is_enrolled(self.user, course_key))
return test_course_data
def assert_create_course_failed(self, error_message):
"""
Checks that the course not created.
"""
resp = self.client.ajax_post('/course/', self.course_data)
self.assertEqual(resp.status_code, 400)
data = parse_json(resp)
self.assertEqual(data['error'], error_message)
def test_create_course(self):
"""Test new course creation - happy path"""
self.assert_created_course()
@override_settings(DEFAULT_COURSE_LANGUAGE='hr')
def test_create_course_default_language(self):
"""Test new course creation and verify default language"""
test_course_data = self.assert_created_course()
course_id = _get_course_id(self.store, test_course_data)
course_module = self.store.get_course(course_id)
self.assertEquals(course_module.language, 'hr')
def test_create_course_with_dots(self):
"""Test new course creation with dots in the name"""
self.course_data['org'] = 'org.foo.bar'
self.course_data['number'] = 'course.number'
self.course_data['run'] = 'run.name'
self.assert_created_course()
@ddt.data(ModuleStoreEnum.Type.split, ModuleStoreEnum.Type.mongo)
def test_course_with_different_cases(self, default_store):
"""
Tests that course can not be created with different case using an AJAX request to
course handler.
"""
course_number = '99x'
with self.store.default_store(default_store):
# Verify create a course passes with lower case.
self.course_data['number'] = course_number.lower()
self.assert_created_course()
# Verify create a course fail when same course number is provided with different case.
self.course_data['number'] = course_number.upper()
self.assert_course_creation_failed(self.duplicate_course_error)
def test_create_course_check_forum_seeding(self):
"""Test new course creation and verify forum seeding """
test_course_data = self.assert_created_course(number_suffix=uuid4().hex)
self.assertTrue(are_permissions_roles_seeded(_get_course_id(self.store, test_course_data)))
def test_forum_unseeding_on_delete(self):
"""Test new course creation and verify forum unseeding """
test_course_data = self.assert_created_course(number_suffix=uuid4().hex)
course_id = _get_course_id(self.store, test_course_data)
self.assertTrue(are_permissions_roles_seeded(course_id))
delete_course(course_id, self.user.id)
# should raise an exception for checking permissions on deleted course
with self.assertRaises(ItemNotFoundError):
are_permissions_roles_seeded(course_id)
def test_forum_unseeding_with_multiple_courses(self):
"""Test new course creation and verify forum unseeding when there are multiple courses"""
test_course_data = self.assert_created_course(number_suffix=uuid4().hex)
second_course_data = self.assert_created_course(number_suffix=uuid4().hex)
# unseed the forums for the first course
course_id = _get_course_id(self.store, test_course_data)
delete_course(course_id, self.user.id)
# should raise an exception for checking permissions on deleted course
with self.assertRaises(ItemNotFoundError):
are_permissions_roles_seeded(course_id)
second_course_id = _get_course_id(self.store, second_course_data)
# permissions should still be there for the other course
self.assertTrue(are_permissions_roles_seeded(second_course_id))
def test_course_enrollments_and_roles_on_delete(self):
"""
Test that course deletion doesn't remove course enrollments or user's roles
"""
test_course_data = self.assert_created_course(number_suffix=uuid4().hex)
course_id = _get_course_id(self.store, test_course_data)
# test that a user gets his enrollment and its 'student' role as default on creating a course
self.assertTrue(CourseEnrollment.is_enrolled(self.user, course_id))
self.assertTrue(self.user.roles.filter(name="Student", course_id=course_id))
delete_course(course_id, self.user.id)
# check that user's enrollment for this course is not deleted
self.assertTrue(CourseEnrollment.is_enrolled(self.user, course_id))
# check that user has form role "Student" for this course even after deleting it
self.assertTrue(self.user.roles.filter(name="Student", course_id=course_id))
def test_course_access_groups_on_delete(self):
"""
Test that course deletion removes users from 'instructor' and 'staff' groups of this course
of all format e.g, 'instructor_edX/Course/Run', 'instructor_edX.Course.Run', 'instructor_Course'
"""
test_course_data = self.assert_created_course(number_suffix=uuid4().hex)
course_id = _get_course_id(self.store, test_course_data)
# Add user in possible groups and check that user in instructor groups of this course
instructor_role = CourseInstructorRole(course_id)
auth.add_users(self.user, instructor_role, self.user)
self.assertGreater(len(instructor_role.users_with_role()), 0)
# Now delete course and check that user not in instructor groups of this course
delete_course(course_id, self.user.id)
# Update our cached user since its roles have changed
self.user = User.objects.get_by_natural_key(self.user.natural_key()[0])
self.assertFalse(instructor_role.has_user(self.user))
self.assertEqual(len(instructor_role.users_with_role()), 0)
def test_delete_course_with_keep_instructors(self):
"""
Tests that when you delete a course with 'keep_instructors',
it does not remove any permissions of users/groups from the course
"""
test_course_data = self.assert_created_course(number_suffix=uuid4().hex)
course_id = _get_course_id(self.store, test_course_data)
# Add and verify instructor role for the course
instructor_role = CourseInstructorRole(course_id)
instructor_role.add_users(self.user)
self.assertTrue(instructor_role.has_user(self.user))
delete_course(course_id, self.user.id, keep_instructors=True)
# Update our cached user so if any change in roles can be captured
self.user = User.objects.get_by_natural_key(self.user.natural_key()[0])
self.assertTrue(instructor_role.has_user(self.user))
def test_create_course_after_delete(self):
"""
Test that course creation works after deleting a course with the same URL
"""
test_course_data = self.assert_created_course()
course_id = _get_course_id(self.store, test_course_data)
delete_course(course_id, self.user.id)
self.assert_created_course()
def test_create_course_duplicate_course(self):
"""Test new course creation - error path"""
self.client.ajax_post('/course/', self.course_data)
self.assert_course_creation_failed(self.duplicate_course_error)
def assert_course_creation_failed(self, error_message):
"""
Checks that the course did not get created
"""
test_enrollment = False
try:
course_id = _get_course_id(self.store, self.course_data)
initially_enrolled = CourseEnrollment.is_enrolled(self.user, course_id)
test_enrollment = True
except InvalidKeyError:
# b/c the intent of the test with bad chars isn't to test auth but to test the handler, ignore
pass
resp = self.client.ajax_post('/course/', self.course_data)
self.assertEqual(resp.status_code, 200)
data = parse_json(resp)
self.assertRegexpMatches(data['ErrMsg'], error_message)
if test_enrollment:
# One test case involves trying to create the same course twice. Hence for that course,
# the user will be enrolled. In the other cases, initially_enrolled will be False.
self.assertEqual(initially_enrolled, CourseEnrollment.is_enrolled(self.user, course_id))
def test_create_course_duplicate_number(self):
"""Test new course creation - error path"""
self.client.ajax_post('/course/', self.course_data)
self.course_data['display_name'] = 'Robot Super Course Two'
self.course_data['run'] = '2013_Summer'
self.assert_course_creation_failed(self.duplicate_course_error)
@ddt.data(ModuleStoreEnum.Type.split, ModuleStoreEnum.Type.mongo)
def test_create_course_case_change(self, default_store):
"""Test new course creation - error path due to case insensitive name equality"""
self.course_data['number'] = '99x'
with self.store.default_store(default_store):
# Verify that the course was created properly.
self.assert_created_course()
# Keep the copy of original org
cache_current = self.course_data['org']
# Change `org` to lower case and verify that course did not get created
self.course_data['org'] = self.course_data['org'].lower()
self.assert_course_creation_failed(self.duplicate_course_error)
# Replace the org with its actual value, and keep the copy of course number.
self.course_data['org'] = cache_current
cache_current = self.course_data['number']
self.course_data['number'] = self.course_data['number'].upper()
self.assert_course_creation_failed(self.duplicate_course_error)
# Replace the org with its actual value, and keep the copy of course number.
self.course_data['number'] = cache_current
__ = self.course_data['run']
self.course_data['run'] = self.course_data['run'].upper()
self.assert_course_creation_failed(self.duplicate_course_error)
def test_course_substring(self):
"""
Test that a new course can be created whose name is a substring of an existing course
"""
self.client.ajax_post('/course/', self.course_data)
cache_current = self.course_data['number']
self.course_data['number'] = '{}a'.format(self.course_data['number'])
resp = self.client.ajax_post('/course/', self.course_data)
self.assertEqual(resp.status_code, 200)
self.course_data['number'] = cache_current
self.course_data['org'] = 'a{}'.format(self.course_data['org'])
resp = self.client.ajax_post('/course/', self.course_data)
self.assertEqual(resp.status_code, 200)
def test_create_course_with_bad_organization(self):
"""Test new course creation - error path for bad organization name"""
self.course_data['org'] = 'University of California, Berkeley'
self.assert_course_creation_failed(r"(?s)Unable to create course 'Robot Super Course'.*")
def test_create_course_with_course_creation_disabled_staff(self):
"""Test new course creation -- course creation disabled, but staff access."""
with mock.patch.dict('django.conf.settings.FEATURES', {'DISABLE_COURSE_CREATION': True}):
self.assert_created_course()
def test_create_course_with_course_creation_disabled_not_staff(self):
"""Test new course creation -- error path for course creation disabled, not staff access."""
with mock.patch.dict('django.conf.settings.FEATURES', {'DISABLE_COURSE_CREATION': True}):
self.user.is_staff = False
self.user.save()
self.assert_course_permission_denied()
def test_create_course_no_course_creators_staff(self):
"""Test new course creation -- course creation group enabled, staff, group is empty."""
with mock.patch.dict('django.conf.settings.FEATURES', {'ENABLE_CREATOR_GROUP': True}):
self.assert_created_course()
def test_create_course_no_course_creators_not_staff(self):
"""Test new course creation -- error path for course creator group enabled, not staff, group is empty."""
with mock.patch.dict('django.conf.settings.FEATURES', {"ENABLE_CREATOR_GROUP": True}):
self.user.is_staff = False
self.user.save()
self.assert_course_permission_denied()
def test_create_course_with_course_creator(self):
"""Test new course creation -- use course creator group"""
with mock.patch.dict('django.conf.settings.FEATURES', {"ENABLE_CREATOR_GROUP": True}):
auth.add_users(self.user, CourseCreatorRole(), self.user)
self.assert_created_course()
def test_create_course_with_unicode_in_id_disabled(self):
"""
Test new course creation with feature setting: ALLOW_UNICODE_COURSE_ID disabled.
"""
with mock.patch.dict('django.conf.settings.FEATURES', {'ALLOW_UNICODE_COURSE_ID': False}):
error_message = "Special characters not allowed in organization, course number, and course run."
self.course_data['org'] = u'��������������'
self.assert_create_course_failed(error_message)
self.course_data['number'] = u'��chantillon'
self.assert_create_course_failed(error_message)
self.course_data['run'] = u'����������'
self.assert_create_course_failed(error_message)
def assert_course_permission_denied(self):
"""
Checks that the course did not get created due to a PermissionError.
"""
resp = self.client.ajax_post('/course/', self.course_data)
self.assertEqual(resp.status_code, 403)
def test_course_index_view_with_no_courses(self):
"""Test viewing the index page with no courses"""
resp = self.client.get_html('/home/')
self.assertContains(
resp,
'<h1 class="page-header">Studio Home</h1>',
status_code=200,
html=True
)
def test_course_factory(self):
"""Test that the course factory works correctly."""
course = CourseFactory.create()
self.assertIsInstance(course, CourseDescriptor)
def test_item_factory(self):
"""Test that the item factory works correctly."""
course = CourseFactory.create()
item = ItemFactory.create(parent_location=course.location)
self.assertIsInstance(item, SequenceDescriptor)
def test_course_overview_view_with_course(self):
"""Test viewing the course overview page with an existing course"""
course = CourseFactory.create()
resp = self._show_course_overview(course.id)
self.assertContains(
resp,
'<article class="outline outline-complex outline-course" data-locator="{locator}" data-course-key="{course_key}">'.format(
locator=text_type(course.location),
course_key=text_type(course.id),
),
status_code=200,
html=True
)
def test_create_item(self):
"""Test creating a new xblock instance."""
course = CourseFactory.create()
section_data = {
'parent_locator': text_type(course.location),
'category': 'chapter',
'display_name': 'Section One',
}
resp = self.client.ajax_post(reverse_url('xblock_handler'), section_data)
self.assertEqual(resp.status_code, 200)
data = parse_json(resp)
retarget = text_type(course.id.make_usage_key('chapter', 'REPLACE')).replace('REPLACE', r'([0-9]|[a-f]){3,}')
self.assertRegexpMatches(data['locator'], retarget)
def test_capa_module(self):
"""Test that a problem treats markdown specially."""
course = CourseFactory.create()
problem_data = {
'parent_locator': text_type(course.location),
'category': 'problem'
}
resp = self.client.ajax_post(reverse_url('xblock_handler'), problem_data)
self.assertEqual(resp.status_code, 200)
payload = parse_json(resp)
problem_loc = UsageKey.from_string(payload['locator'])
problem = self.store.get_item(problem_loc)
# should be a CapaDescriptor
self.assertIsInstance(problem, CapaDescriptor, "New problem is not a CapaDescriptor")
context = problem.get_context()
self.assertIn('markdown', context, "markdown is missing from context")
self.assertNotIn('markdown', problem.editable_metadata_fields, "Markdown slipped into the editable metadata fields")
def test_cms_imported_course_walkthrough(self):
"""
Import and walk through some common URL endpoints. This just verifies non-500 and no other
correct behavior, so it is not a deep test
"""
def test_get_html(handler):
# Helper function for getting HTML for a page in Studio and
# checking that it does not error.
resp = self.client.get_html(
get_url(handler, course_key, 'course_key_string')
)
self.assertEqual(resp.status_code, 200)
course_items = import_course_from_xml(
self.store, self.user.id, TEST_DATA_DIR, ['simple'], create_if_not_present=True
)
course_key = course_items[0].id
resp = self._show_course_overview(course_key)
self.assertEqual(resp.status_code, 200)
self.assertContains(resp, 'Chapter 2')
# go to various pages
test_get_html('import_handler')
test_get_html('export_handler')
test_get_html('course_team_handler')
test_get_html('course_info_handler')
test_get_html('assets_handler')
test_get_html('tabs_handler')
test_get_html('settings_handler')
test_get_html('grading_handler')
test_get_html('advanced_settings_handler')
test_get_html('textbooks_list_handler')
# go look at the Edit page
unit_key = course_key.make_usage_key('vertical', 'test_vertical')
resp = self.client.get_html(get_url('container_handler', unit_key))
self.assertEqual(resp.status_code, 200)
def delete_item(category, name):
""" Helper method for testing the deletion of an xblock item. """
item_key = course_key.make_usage_key(category, name)
resp = self.client.delete(get_url('xblock_handler', item_key))
self.assertEqual(resp.status_code, 204)
# delete a component
delete_item(category='html', name='test_html')
# delete a unit
delete_item(category='vertical', name='test_vertical')
# delete a unit
delete_item(category='sequential', name='test_sequence')
# delete a chapter
delete_item(category='chapter', name='chapter_2')
def test_import_into_new_course_id(self):
target_id = _get_course_id(self.store, self.course_data)
_create_course(self, target_id, self.course_data)
import_course_from_xml(self.store, self.user.id, TEST_DATA_DIR, ['toy'], target_id=target_id)
modules = self.store.get_items(target_id)
# we should have a number of modules in there
# we can't specify an exact number since it'll always be changing
self.assertGreater(len(modules), 10)
#
# test various re-namespacing elements
#
# first check PDF textbooks, to make sure the url paths got updated
course_module = self.store.get_course(target_id)
self.assertEqual(len(course_module.pdf_textbooks), 1)
self.assertEqual(len(course_module.pdf_textbooks[0]["chapters"]), 2)
self.assertEqual(course_module.pdf_textbooks[0]["chapters"][0]["url"], '/static/Chapter1.pdf')
self.assertEqual(course_module.pdf_textbooks[0]["chapters"][1]["url"], '/static/Chapter2.pdf')
def test_import_into_new_course_id_wiki_slug_renamespacing(self):
# If reimporting into the same course do not change the wiki_slug.
target_id = self.store.make_course_key('edX', 'toy', '2012_Fall')
course_data = {
'org': target_id.org,
'number': target_id.course,
'display_name': 'Robot Super Course',
'run': target_id.run
}
_create_course(self, target_id, course_data)
course_module = self.store.get_course(target_id)
course_module.wiki_slug = 'toy'
course_module.save()
# Import a course with wiki_slug == location.course
import_course_from_xml(self.store, self.user.id, TEST_DATA_DIR, ['toy'], target_id=target_id)
course_module = self.store.get_course(target_id)
self.assertEquals(course_module.wiki_slug, 'toy')
# But change the wiki_slug if it is a different course.
target_id = self.store.make_course_key('MITx', '111', '2013_Spring')
course_data = {
'org': target_id.org,
'number': target_id.course,
'display_name': 'Robot Super Course',
'run': target_id.run
}
_create_course(self, target_id, course_data)
# Import a course with wiki_slug == location.course
import_course_from_xml(self.store, self.user.id, TEST_DATA_DIR, ['toy'], target_id=target_id)
course_module = self.store.get_course(target_id)
self.assertEquals(course_module.wiki_slug, 'MITx.111.2013_Spring')
# Now try importing a course with wiki_slug == '{0}.{1}.{2}'.format(location.org, location.course, location.run)
import_course_from_xml(self.store, self.user.id, TEST_DATA_DIR, ['two_toys'], target_id=target_id)
course_module = self.store.get_course(target_id)
self.assertEquals(course_module.wiki_slug, 'MITx.111.2013_Spring')
def test_import_metadata_with_attempts_empty_string(self):
import_course_from_xml(self.store, self.user.id, TEST_DATA_DIR, ['simple'], create_if_not_present=True)
did_load_item = False
try:
course_key = self.store.make_course_key('edX', 'simple', 'problem')
usage_key = course_key.make_usage_key('problem', 'ps01-simple')
self.store.get_item(usage_key)
did_load_item = True
except ItemNotFoundError:
pass
# make sure we found the item (e.g. it didn't error while loading)
self.assertTrue(did_load_item)
@ddt.data(ModuleStoreEnum.Type.split, ModuleStoreEnum.Type.mongo)
def test_forum_id_generation(self, default_store):
"""
Test that a discussion item, even if it doesn't set its discussion_id,
consistently generates the same one
"""
course = CourseFactory.create(default_store=default_store)
# create a discussion item
discussion_item = self.store.create_item(self.user.id, course.id, 'discussion', 'new_component')
# now fetch it from the modulestore to instantiate its descriptor
fetched = self.store.get_item(discussion_item.location)
# refetch it to be safe
refetched = self.store.get_item(discussion_item.location)
# and make sure the same discussion items have the same discussion ids
self.assertEqual(fetched.discussion_id, discussion_item.discussion_id)
self.assertEqual(fetched.discussion_id, refetched.discussion_id)
# and make sure that the id isn't the old "$$GUID$$"
self.assertNotEqual(discussion_item.discussion_id, '$$GUID$$')
def test_metadata_inheritance(self):
course_items = import_course_from_xml(
self.store, self.user.id, TEST_DATA_DIR, ['toy'], create_if_not_present=True
)
course = course_items[0]
verticals = self.store.get_items(course.id, qualifiers={'category': 'vertical'})
# let's assert on the metadata_inheritance on an existing vertical
for vertical in verticals:
self.assertEqual(course.xqa_key, vertical.xqa_key)
self.assertEqual(course.start, vertical.start)
self.assertGreater(len(verticals), 0)
# crate a new module and add it as a child to a vertical
parent = verticals[0]
new_block = self.store.create_child(
self.user.id, parent.location, 'html', 'new_component'
)
# flush the cache
new_block = self.store.get_item(new_block.location)
# check for grace period definition which should be defined at the course level
self.assertEqual(parent.graceperiod, new_block.graceperiod)
self.assertEqual(parent.start, new_block.start)
self.assertEqual(course.start, new_block.start)
self.assertEqual(course.xqa_key, new_block.xqa_key)
#
# now let's define an override at the leaf node level
#
new_block.graceperiod = timedelta(1)
self.store.update_item(new_block, self.user.id)
# flush the cache and refetch
new_block = self.store.get_item(new_block.location)
self.assertEqual(timedelta(1), new_block.graceperiod)
def test_default_metadata_inheritance(self):
course = CourseFactory.create()
vertical = ItemFactory.create(parent_location=course.location)
course.children.append(vertical)
# in memory
self.assertIsNotNone(course.start)
self.assertEqual(course.start, vertical.start)
self.assertEqual(course.textbooks, [])
self.assertIn('GRADER', course.grading_policy)
self.assertIn('GRADE_CUTOFFS', course.grading_policy)
# by fetching
fetched_course = self.store.get_item(course.location)
fetched_item = self.store.get_item(vertical.location)
self.assertIsNotNone(fetched_course.start)
self.assertEqual(course.start, fetched_course.start)
self.assertEqual(fetched_course.start, fetched_item.start)
self.assertEqual(course.textbooks, fetched_course.textbooks)
def test_image_import(self):
"""Test backwards compatibilty of course image."""
content_store = contentstore()
# Use conditional_and_poll, as it's got an image already
courses = import_course_from_xml(
self.store,
self.user.id,
TEST_DATA_DIR,
['conditional_and_poll'],
static_content_store=content_store,
create_if_not_present=True
)
course = courses[0]
# Make sure the course image is set to the right place
self.assertEqual(course.course_image, 'images_course_image.jpg')
# Ensure that the imported course image is present -- this shouldn't raise an exception
asset_key = course.id.make_asset_key('asset', course.course_image)
content_store.find(asset_key)
def _show_course_overview(self, course_key):
"""
Show the course overview page.
"""
resp = self.client.get_html(get_url('course_handler', course_key, 'course_key_string'))
return resp
def test_wiki_slug(self):
"""When creating a course a unique wiki_slug should be set."""
course_key = _get_course_id(self.store, self.course_data)
_create_course(self, course_key, self.course_data)
course_module = self.store.get_course(course_key)
self.assertEquals(course_module.wiki_slug, 'MITx.111.2013_Spring')
def test_course_handler_with_invalid_course_key_string(self):
"""Test viewing the course overview page with invalid course id"""
response = self.client.get_html('/course/edX/test')
self.assertEquals(response.status_code, 404)
class MetadataSaveTestCase(ContentStoreTestCase):
"""Test that metadata is correctly cached and decached."""
def setUp(self):
super(MetadataSaveTestCase, self).setUp()
course = CourseFactory.create()
video_sample_xml = """
<video display_name="Test Video"
youtube="1.0:p2Q6BrNhdh8,0.75:izygArpw-Qo,1.25:1EeWXzPdhSA,1.5:rABDYkeK0x8"
show_captions="false"
from="00:00:01"
to="00:01:00">
<source src="http://www.example.com/file.mp4"/>
<track src="http://www.example.com/track"/>
</video>
"""
self.video_descriptor = ItemFactory.create(
parent_location=course.location, category='video',
data={'data': video_sample_xml}
)
def test_metadata_not_persistence(self):
"""
Test that descriptors which set metadata fields in their
constructor are correctly deleted.
"""
self.assertIn('html5_sources', own_metadata(self.video_descriptor))
attrs_to_strip = {
'show_captions',
'youtube_id_1_0',
'youtube_id_0_75',
'youtube_id_1_25',
'youtube_id_1_5',
'start_time',
'end_time',
'source',
'html5_sources',
'track'
}
location = self.video_descriptor.location
for field_name in attrs_to_strip:
delattr(self.video_descriptor, field_name)
self.assertNotIn('html5_sources', own_metadata(self.video_descriptor))
self.store.update_item(self.video_descriptor, self.user.id)
module = self.store.get_item(location)
self.assertNotIn('html5_sources', own_metadata(module))
def test_metadata_persistence(self):
# TODO: create the same test as `test_metadata_not_persistence`,
# but check persistence for some other module.
pass
class RerunCourseTest(ContentStoreTestCase):
"""
Tests for Rerunning a course via the view handler
"""
def setUp(self):
super(RerunCourseTest, self).setUp()
self.destination_course_data = {
'org': 'MITx',
'number': '111',
'display_name': 'Robot Super Course',
'run': '2013_Spring'
}
def post_rerun_request(
self, source_course_key, destination_course_data=None, response_code=200, expect_error=False
):
"""Create and send an ajax post for the rerun request"""
# create data to post
rerun_course_data = {'source_course_key': text_type(source_course_key)}
if not destination_course_data:
destination_course_data = self.destination_course_data
rerun_course_data.update(destination_course_data)
destination_course_key = _get_course_id(self.store, destination_course_data)
# post the request
course_url = get_url('course_handler', destination_course_key, 'course_key_string')
response = self.client.ajax_post(course_url, rerun_course_data)
# verify response
self.assertEqual(response.status_code, response_code)
if not expect_error:
json_resp = parse_json(response)
self.assertNotIn('ErrMsg', json_resp)
destination_course_key = CourseKey.from_string(json_resp['destination_course_key'])
return destination_course_key
def get_unsucceeded_course_action_elements(self, html, course_key):
"""Returns the elements in the unsucceeded course action section that have the given course_key"""
return html.cssselect('.courses-processing li[data-course-key="{}"]'.format(text_type(course_key)))
def assertInCourseListing(self, course_key):
"""
Asserts that the given course key is NOT in the unsucceeded course action section of the html.
"""
course_listing = lxml.html.fromstring(self.client.get_html('/home/').content)
self.assertEqual(len(self.get_unsucceeded_course_action_elements(course_listing, course_key)), 0)
def assertInUnsucceededCourseActions(self, course_key):
"""
Asserts that the given course key is in the unsucceeded course action section of the html.
"""
course_listing = lxml.html.fromstring(self.client.get_html('/home/').content)
self.assertEqual(len(self.get_unsucceeded_course_action_elements(course_listing, course_key)), 1)
def verify_rerun_course(self, source_course_key, destination_course_key, destination_display_name):
"""
Verify the contents of the course rerun action
"""
rerun_state = CourseRerunState.objects.find_first(course_key=destination_course_key)
expected_states = {
'state': CourseRerunUIStateManager.State.SUCCEEDED,
'display_name': destination_display_name,
'source_course_key': source_course_key,
'course_key': destination_course_key,
'should_display': True,
}
for field_name, expected_value in expected_states.iteritems():
self.assertEquals(getattr(rerun_state, field_name), expected_value)
# Verify that the creator is now enrolled in the course.
self.assertTrue(CourseEnrollment.is_enrolled(self.user, destination_course_key))
# Verify both courses are in the course listing section
self.assertInCourseListing(source_course_key)
self.assertInCourseListing(destination_course_key)
def test_rerun_course_no_videos_in_val(self):
"""
Test when rerunning a course with no videos, VAL copies nothing
"""
source_course = CourseFactory.create()
destination_course_key = self.post_rerun_request(source_course.id)
self.verify_rerun_course(source_course.id, destination_course_key, self.destination_course_data['display_name'])
videos = list(get_videos_for_course(text_type(destination_course_key)))
self.assertEqual(0, len(videos))
self.assertInCourseListing(destination_course_key)
def test_rerun_course_success(self):
source_course = CourseFactory.create()
create_video(
dict(
edx_video_id="tree-hugger",
courses=[text_type(source_course.id)],
status='test',
duration=2,
encoded_videos=[]
)
)
destination_course_key = self.post_rerun_request(source_course.id)
self.verify_rerun_course(source_course.id, destination_course_key, self.destination_course_data['display_name'])
# Verify that the VAL copies videos to the rerun
source_videos = list(get_videos_for_course(text_type(source_course.id)))
target_videos = list(get_videos_for_course(text_type(destination_course_key)))
self.assertEqual(1, len(source_videos))
self.assertEqual(source_videos, target_videos)
def test_rerun_course_resets_advertised_date(self):
source_course = CourseFactory.create(advertised_start="01-12-2015")
destination_course_key = self.post_rerun_request(source_course.id)
destination_course = self.store.get_course(destination_course_key)
self.assertEqual(None, destination_course.advertised_start)
def test_rerun_of_rerun(self):
source_course = CourseFactory.create()
rerun_course_key = self.post_rerun_request(source_course.id)
rerun_of_rerun_data = {
'org': rerun_course_key.org,
'number': rerun_course_key.course,
'display_name': 'rerun of rerun',
'run': 'rerun2'
}
rerun_of_rerun_course_key = self.post_rerun_request(rerun_course_key, rerun_of_rerun_data)
self.verify_rerun_course(rerun_course_key, rerun_of_rerun_course_key, rerun_of_rerun_data['display_name'])
def test_rerun_course_fail_no_source_course(self):
existent_course_key = CourseFactory.create().id
non_existent_course_key = CourseLocator("org", "non_existent_course", "non_existent_run")
destination_course_key = self.post_rerun_request(non_existent_course_key)
# Verify that the course rerun action is marked failed
rerun_state = CourseRerunState.objects.find_first(course_key=destination_course_key)
self.assertEquals(rerun_state.state, CourseRerunUIStateManager.State.FAILED)
self.assertIn("Cannot find a course at", rerun_state.message)
# Verify that the creator is not enrolled in the course.
self.assertFalse(CourseEnrollment.is_enrolled(self.user, non_existent_course_key))
# Verify that the existing course continues to be in the course listings
self.assertInCourseListing(existent_course_key)
# Verify that the failed course is NOT in the course listings
self.assertInUnsucceededCourseActions(destination_course_key)
def test_rerun_course_fail_duplicate_course(self):
existent_course_key = CourseFactory.create().id
destination_course_data = {
'org': existent_course_key.org,
'number': existent_course_key.course,
'display_name': 'existing course',
'run': existent_course_key.run
}
destination_course_key = self.post_rerun_request(
existent_course_key, destination_course_data, expect_error=True
)
# Verify that the course rerun action doesn't exist
with self.assertRaises(CourseActionStateItemNotFoundError):
CourseRerunState.objects.find_first(course_key=destination_course_key)
# Verify that the existing course continues to be in the course listing
self.assertInCourseListing(existent_course_key)
def test_rerun_with_permission_denied(self):
with mock.patch.dict('django.conf.settings.FEATURES', {"ENABLE_CREATOR_GROUP": True}):
source_course = CourseFactory.create()
auth.add_users(self.user, CourseCreatorRole(), self.user)
self.user.is_staff = False
self.user.save()
self.post_rerun_request(source_course.id, response_code=403, expect_error=True)
def test_rerun_error(self):
error_message = "Mock Error Message"
with mock.patch(
'xmodule.modulestore.mixed.MixedModuleStore.clone_course',
mock.Mock(side_effect=Exception(error_message))
):
source_course = CourseFactory.create()
destination_course_key = self.post_rerun_request(source_course.id)
rerun_state = CourseRerunState.objects.find_first(course_key=destination_course_key)
self.assertEquals(rerun_state.state, CourseRerunUIStateManager.State.FAILED)
self.assertIn(error_message, rerun_state.message)
def test_rerun_error_trunc_message(self):
"""
CourseActionUIState.message is sometimes populated with the contents
of Python tracebacks. This test ensures we don't crash when attempting
to insert a value exceeding its max_length (note that sqlite does not
complain if this happens, but MySQL throws an error).
"""
with mock.patch(
'xmodule.modulestore.mixed.MixedModuleStore.clone_course',
mock.Mock(side_effect=Exception()),
):
source_course = CourseFactory.create()
message_too_long = "traceback".rjust(CourseRerunState.MAX_MESSAGE_LENGTH * 2, '-')
with mock.patch('traceback.format_exc', return_value=message_too_long):
destination_course_key = self.post_rerun_request(source_course.id)
rerun_state = CourseRerunState.objects.find_first(course_key=destination_course_key)
self.assertEquals(rerun_state.state, CourseRerunUIStateManager.State.FAILED)
self.assertTrue(rerun_state.message.endswith("traceback"))
self.assertEqual(len(rerun_state.message), CourseRerunState.MAX_MESSAGE_LENGTH)
def test_rerun_course_wiki_slug(self):
"""
Test that unique wiki_slug is assigned to rerun course.
"""
course_data = {
'org': 'edX',
'number': '123',
'display_name': 'Rerun Course',
'run': '2013'
}
source_wiki_slug = '{0}.{1}.{2}'.format(course_data['org'], course_data['number'], course_data['run'])
source_course_key = _get_course_id(self.store, course_data)
_create_course(self, source_course_key, course_data)
source_course = self.store.get_course(source_course_key)
# Verify created course's wiki_slug.
self.assertEquals(source_course.wiki_slug, source_wiki_slug)
destination_course_data = course_data
destination_course_data['run'] = '2013_Rerun'
destination_course_key = self.post_rerun_request(
source_course.id, destination_course_data=destination_course_data
)
self.verify_rerun_course(source_course.id, destination_course_key, destination_course_data['display_name'])
destination_course = self.store.get_course(destination_course_key)
destination_wiki_slug = '{0}.{1}.{2}'.format(
destination_course.id.org, destination_course.id.course, destination_course.id.run
)
# Verify rerun course's wiki_slug.
self.assertEquals(destination_course.wiki_slug, destination_wiki_slug)
class ContentLicenseTest(ContentStoreTestCase):
"""
Tests around content licenses
"""
def test_course_license_export(self):
content_store = contentstore()
root_dir = path(mkdtemp_clean())
self.course.license = "creative-commons: BY SA"
self.store.update_item(self.course, None)
export_course_to_xml(self.store, content_store, self.course.id, root_dir, u'test_license')
fname = "{block}.xml".format(block=self.course.scope_ids.usage_id.block_id)
run_file_path = root_dir / "test_license" / "course" / fname
run_xml = etree.parse(run_file_path.open())
self.assertEqual(run_xml.getroot().get("license"), "creative-commons: BY SA")
def test_video_license_export(self):
content_store = contentstore()
root_dir = path(mkdtemp_clean())
video_descriptor = ItemFactory.create(
parent_location=self.course.location, category='video',
license="all-rights-reserved"
)
export_course_to_xml(self.store, content_store, self.course.id, root_dir, u'test_license')
fname = "{block}.xml".format(block=video_descriptor.scope_ids.usage_id.block_id)
video_file_path = root_dir / "test_license" / "video" / fname
video_xml = etree.parse(video_file_path.open())
self.assertEqual(video_xml.getroot().get("license"), "all-rights-reserved")
def test_license_import(self):
course_items = import_course_from_xml(
self.store, self.user.id, TEST_DATA_DIR, ['toy'], create_if_not_present=True
)
course = course_items[0]
self.assertEqual(course.license, "creative-commons: BY")
videos = self.store.get_items(course.id, qualifiers={'category': 'video'})
self.assertEqual(videos[0].license, "all-rights-reserved")
class EntryPageTestCase(TestCase):
"""
Tests entry pages that aren't specific to a course.
"""
def setUp(self):
super(EntryPageTestCase, self).setUp()
self.client = AjaxEnabledTestClient()
def _test_page(self, page, status_code=200):
resp = self.client.get_html(page)
self.assertEqual(resp.status_code, status_code)
def test_how_it_works(self):
self._test_page("/howitworks")
def test_signup(self):
self._test_page("/signup")
def test_login(self):
self._test_page("/signin")
def test_logout(self):
# Logout redirects.
self._test_page("/logout", 302)
@override_switch(
'{}.{}'.format(waffle.WAFFLE_NAMESPACE, waffle.ENABLE_ACCESSIBILITY_POLICY_PAGE),
active=True)
def test_accessibility(self):
self._test_page('/accessibility')
class SigninPageTestCase(TestCase):
"""
Tests that the CSRF token is directly included in the signin form. This is
important to make sure that the script is functional independently of any
other script.
"""
def test_csrf_token_is_present_in_form(self):
# Expected html:
# <form>
# ...
# <fieldset>
# ...
# <input name="csrfmiddlewaretoken" value="...">
# ...
# </fieldset>
# ...
# </form>
response = self.client.get("/signin")
csrf_token = response.cookies.get("csrftoken")
form = lxml.html.fromstring(response.content).get_element_by_id("login_form")
csrf_input_field = form.find(".//input[@name='csrfmiddlewaretoken']")
self.assertIsNotNone(csrf_token)
self.assertIsNotNone(csrf_token.value)
self.assertIsNotNone(csrf_input_field)
# TODO: Remove Django 1.11 upgrade shim
# SHIM: _compare_salted_tokens was introduced in 1.10. Move the import and use only that branch post-upgrade.
if django.VERSION < (1, 10):
self.assertEqual(csrf_token.value, csrf_input_field.attrib["value"])
else:
from django.middleware.csrf import _compare_salted_tokens
self.assertTrue(_compare_salted_tokens(csrf_token.value, csrf_input_field.attrib["value"]))
def _create_course(test, course_key, course_data):
"""
Creates a course via an AJAX request and verifies the URL returned in the response.
"""
course_url = get_url('course_handler', course_key, 'course_key_string')
response = test.client.ajax_post(course_url, course_data)
test.assertEqual(response.status_code, 200)
data = parse_json(response)
test.assertNotIn('ErrMsg', data)
test.assertEqual(data['url'], course_url)
def _get_course_id(store, course_data):
"""Returns the course ID."""
return store.make_course_key(course_data['org'], course_data['number'], course_data['run'])
| agpl-3.0 |
Aasmi/scikit-learn | sklearn/decomposition/tests/test_nmf.py | 32 | 6044 | import numpy as np
from scipy import linalg
from sklearn.decomposition import nmf
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_false
from sklearn.utils.testing import raises
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_less
random_state = np.random.mtrand.RandomState(0)
@raises(ValueError)
def test_initialize_nn_input():
# Test NNDSVD behaviour on negative input
nmf._initialize_nmf(-np.ones((2, 2)), 2)
def test_initialize_nn_output():
# Test that NNDSVD does not return negative values
data = np.abs(random_state.randn(10, 10))
for var in (None, 'a', 'ar'):
W, H = nmf._initialize_nmf(data, 10, random_state=0)
assert_false((W < 0).any() or (H < 0).any())
def test_initialize_close():
# Test NNDSVD error
# Test that _initialize_nmf error is less than the standard deviation of
# the entries in the matrix.
A = np.abs(random_state.randn(10, 10))
W, H = nmf._initialize_nmf(A, 10)
error = linalg.norm(np.dot(W, H) - A)
sdev = linalg.norm(A - A.mean())
assert_true(error <= sdev)
def test_initialize_variants():
# Test NNDSVD variants correctness
# Test that the variants 'a' and 'ar' differ from basic NNDSVD only where
# the basic version has zeros.
data = np.abs(random_state.randn(10, 10))
W0, H0 = nmf._initialize_nmf(data, 10, variant=None)
Wa, Ha = nmf._initialize_nmf(data, 10, variant='a')
War, Har = nmf._initialize_nmf(data, 10, variant='ar', random_state=0)
for ref, evl in ((W0, Wa), (W0, War), (H0, Ha), (H0, Har)):
assert_true(np.allclose(evl[ref != 0], ref[ref != 0]))
@raises(ValueError)
def test_projgrad_nmf_fit_nn_input():
# Test model fit behaviour on negative input
A = -np.ones((2, 2))
m = nmf.ProjectedGradientNMF(n_components=2, init=None, random_state=0)
m.fit(A)
def test_projgrad_nmf_fit_nn_output():
# Test that the decomposition does not contain negative values
A = np.c_[5 * np.ones(5) - np.arange(1, 6),
5 * np.ones(5) + np.arange(1, 6)]
for init in (None, 'nndsvd', 'nndsvda', 'nndsvdar'):
model = nmf.ProjectedGradientNMF(n_components=2, init=init,
random_state=0)
transf = model.fit_transform(A)
assert_false((model.components_ < 0).any() or
(transf < 0).any())
def test_projgrad_nmf_fit_close():
# Test that the fit is not too far away
pnmf = nmf.ProjectedGradientNMF(5, init='nndsvda', random_state=0)
X = np.abs(random_state.randn(6, 5))
assert_less(pnmf.fit(X).reconstruction_err_, 0.05)
def test_nls_nn_output():
# Test that NLS solver doesn't return negative values
A = np.arange(1, 5).reshape(1, -1)
Ap, _, _ = nmf._nls_subproblem(np.dot(A.T, -A), A.T, A, 0.001, 100)
assert_false((Ap < 0).any())
def test_nls_close():
# Test that the NLS results should be close
A = np.arange(1, 5).reshape(1, -1)
Ap, _, _ = nmf._nls_subproblem(np.dot(A.T, A), A.T, np.zeros_like(A),
0.001, 100)
assert_true((np.abs(Ap - A) < 0.01).all())
def test_projgrad_nmf_transform():
# Test that NMF.transform returns close values
# (transform uses scipy.optimize.nnls for now)
A = np.abs(random_state.randn(6, 5))
m = nmf.ProjectedGradientNMF(n_components=5, init='nndsvd', random_state=0)
transf = m.fit_transform(A)
assert_true(np.allclose(transf, m.transform(A), atol=1e-2, rtol=0))
def test_n_components_greater_n_features():
# Smoke test for the case of more components than features.
A = np.abs(random_state.randn(30, 10))
nmf.ProjectedGradientNMF(n_components=15, sparseness='data',
random_state=0).fit(A)
def test_projgrad_nmf_sparseness():
# Test sparseness
# Test that sparsity constraints actually increase sparseness in the
# part where they are applied.
A = np.abs(random_state.randn(10, 10))
m = nmf.ProjectedGradientNMF(n_components=5, random_state=0).fit(A)
data_sp = nmf.ProjectedGradientNMF(n_components=5, sparseness='data',
random_state=0).fit(A).data_sparseness_
comp_sp = nmf.ProjectedGradientNMF(n_components=5, sparseness='components',
random_state=0).fit(A).comp_sparseness_
assert_greater(data_sp, m.data_sparseness_)
assert_greater(comp_sp, m.comp_sparseness_)
def test_sparse_input():
# Test that sparse matrices are accepted as input
from scipy.sparse import csc_matrix
A = np.abs(random_state.randn(10, 10))
A[:, 2 * np.arange(5)] = 0
T1 = nmf.ProjectedGradientNMF(n_components=5, init='random',
random_state=999).fit_transform(A)
A_sparse = csc_matrix(A)
pg_nmf = nmf.ProjectedGradientNMF(n_components=5, init='random',
random_state=999)
T2 = pg_nmf.fit_transform(A_sparse)
assert_array_almost_equal(pg_nmf.reconstruction_err_,
linalg.norm(A - np.dot(T2, pg_nmf.components_),
'fro'))
assert_array_almost_equal(T1, T2)
# same with sparseness
T2 = nmf.ProjectedGradientNMF(
n_components=5, init='random', sparseness='data',
random_state=999).fit_transform(A_sparse)
T1 = nmf.ProjectedGradientNMF(
n_components=5, init='random', sparseness='data',
random_state=999).fit_transform(A)
def test_sparse_transform():
# Test that transform works on sparse data. Issue #2124
from scipy.sparse import csc_matrix
A = np.abs(random_state.randn(5, 4))
A[A > 1.0] = 0
A = csc_matrix(A)
model = nmf.NMF()
A_fit_tr = model.fit_transform(A)
A_tr = model.transform(A)
# This solver seems pretty inconsistent
assert_array_almost_equal(A_fit_tr, A_tr, decimal=2)
| bsd-3-clause |
kishorvpatil/incubator-storm | storm-client/src/py/storm/Nimbus.py | 2 | 370569 | #!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Autogenerated by Thrift Compiler (0.13.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py:utf8strings
#
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
from thrift.protocol.TProtocol import TProtocolException
from thrift.TRecursive import fix_spec
import sys
import logging
from .ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
all_structs = []
class Iface(object):
def submitTopology(self, name, uploadedJarLocation, jsonConf, topology):
"""
Parameters:
- name
- uploadedJarLocation
- jsonConf
- topology
"""
pass
def submitTopologyWithOpts(self, name, uploadedJarLocation, jsonConf, topology, options):
"""
Parameters:
- name
- uploadedJarLocation
- jsonConf
- topology
- options
"""
pass
def killTopology(self, name):
"""
Parameters:
- name
"""
pass
def killTopologyWithOpts(self, name, options):
"""
Parameters:
- name
- options
"""
pass
def activate(self, name):
"""
Parameters:
- name
"""
pass
def deactivate(self, name):
"""
Parameters:
- name
"""
pass
def rebalance(self, name, options):
"""
Parameters:
- name
- options
"""
pass
def setLogConfig(self, name, config):
"""
Parameters:
- name
- config
"""
pass
def getLogConfig(self, name):
"""
Parameters:
- name
"""
pass
def debug(self, name, component, enable, samplingPercentage):
"""
Enable/disable logging the tuples generated in topology via an internal EventLogger bolt. The component name is optional
and if null or empty, the debug flag will apply to the entire topology.
The 'samplingPercentage' will limit loggging to a percentage of generated tuples.
Parameters:
- name
- component
- enable
- samplingPercentage
"""
pass
def setWorkerProfiler(self, id, profileRequest):
"""
Parameters:
- id
- profileRequest
"""
pass
def getComponentPendingProfileActions(self, id, component_id, action):
"""
Parameters:
- id
- component_id
- action
"""
pass
def uploadNewCredentials(self, name, creds):
"""
Parameters:
- name
- creds
"""
pass
def beginCreateBlob(self, key, meta):
"""
Parameters:
- key
- meta
"""
pass
def beginUpdateBlob(self, key):
"""
Parameters:
- key
"""
pass
def uploadBlobChunk(self, session, chunk):
"""
Parameters:
- session
- chunk
"""
pass
def finishBlobUpload(self, session):
"""
Parameters:
- session
"""
pass
def cancelBlobUpload(self, session):
"""
Parameters:
- session
"""
pass
def getBlobMeta(self, key):
"""
Parameters:
- key
"""
pass
def setBlobMeta(self, key, meta):
"""
Parameters:
- key
- meta
"""
pass
def beginBlobDownload(self, key):
"""
Parameters:
- key
"""
pass
def downloadBlobChunk(self, session):
"""
Parameters:
- session
"""
pass
def deleteBlob(self, key):
"""
Parameters:
- key
"""
pass
def listBlobs(self, session):
"""
Parameters:
- session
"""
pass
def getBlobReplication(self, key):
"""
Parameters:
- key
"""
pass
def updateBlobReplication(self, key, replication):
"""
Parameters:
- key
- replication
"""
pass
def createStateInZookeeper(self, key):
"""
Parameters:
- key
"""
pass
def beginFileUpload(self):
pass
def uploadChunk(self, location, chunk):
"""
Parameters:
- location
- chunk
"""
pass
def finishFileUpload(self, location):
"""
Parameters:
- location
"""
pass
def downloadChunk(self, id):
"""
Parameters:
- id
"""
pass
def getNimbusConf(self):
pass
def getClusterInfo(self):
pass
def getLeader(self):
pass
def isTopologyNameAllowed(self, name):
"""
Parameters:
- name
"""
pass
def getTopologyInfo(self, id):
"""
Parameters:
- id
"""
pass
def getTopologyInfoWithOpts(self, id, options):
"""
Parameters:
- id
- options
"""
pass
def getTopologyPageInfo(self, id, window, is_include_sys):
"""
Parameters:
- id
- window
- is_include_sys
"""
pass
def getSupervisorPageInfo(self, id, host, is_include_sys):
"""
Parameters:
- id
- host
- is_include_sys
"""
pass
def getComponentPageInfo(self, topology_id, component_id, window, is_include_sys):
"""
Parameters:
- topology_id
- component_id
- window
- is_include_sys
"""
pass
def getTopologyConf(self, id):
"""
Parameters:
- id
"""
pass
def getTopology(self, id):
"""
Returns the compiled topology that contains ackers and metrics consumsers. Compare {@link #getUserTopology(String id)}.
Parameters:
- id
"""
pass
def getUserTopology(self, id):
"""
Returns the user specified topology as submitted originally. Compare {@link #getTopology(String id)}.
Parameters:
- id
"""
pass
def getTopologyHistory(self, user):
"""
Parameters:
- user
"""
pass
def getOwnerResourceSummaries(self, owner):
"""
Parameters:
- owner
"""
pass
def getSupervisorAssignments(self, node):
"""
Get assigned assignments for a specific supervisor
Parameters:
- node
"""
pass
def sendSupervisorWorkerHeartbeats(self, heartbeats):
"""
Send supervisor worker heartbeats for a specific supervisor
Parameters:
- heartbeats
"""
pass
def sendSupervisorWorkerHeartbeat(self, heatbeat):
"""
Send supervisor local worker heartbeat when a supervisor is unreachable
Parameters:
- heatbeat
"""
pass
def processWorkerMetrics(self, metrics):
"""
Parameters:
- metrics
"""
pass
def isRemoteBlobExists(self, blobKey):
"""
Decide if the blob is removed from cluster.
Parameters:
- blobKey
"""
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def submitTopology(self, name, uploadedJarLocation, jsonConf, topology):
"""
Parameters:
- name
- uploadedJarLocation
- jsonConf
- topology
"""
self.send_submitTopology(name, uploadedJarLocation, jsonConf, topology)
self.recv_submitTopology()
def send_submitTopology(self, name, uploadedJarLocation, jsonConf, topology):
self._oprot.writeMessageBegin('submitTopology', TMessageType.CALL, self._seqid)
args = submitTopology_args()
args.name = name
args.uploadedJarLocation = uploadedJarLocation
args.jsonConf = jsonConf
args.topology = topology
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_submitTopology(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = submitTopology_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
if result.ite is not None:
raise result.ite
if result.aze is not None:
raise result.aze
return
def submitTopologyWithOpts(self, name, uploadedJarLocation, jsonConf, topology, options):
"""
Parameters:
- name
- uploadedJarLocation
- jsonConf
- topology
- options
"""
self.send_submitTopologyWithOpts(name, uploadedJarLocation, jsonConf, topology, options)
self.recv_submitTopologyWithOpts()
def send_submitTopologyWithOpts(self, name, uploadedJarLocation, jsonConf, topology, options):
self._oprot.writeMessageBegin('submitTopologyWithOpts', TMessageType.CALL, self._seqid)
args = submitTopologyWithOpts_args()
args.name = name
args.uploadedJarLocation = uploadedJarLocation
args.jsonConf = jsonConf
args.topology = topology
args.options = options
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_submitTopologyWithOpts(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = submitTopologyWithOpts_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
if result.ite is not None:
raise result.ite
if result.aze is not None:
raise result.aze
return
def killTopology(self, name):
"""
Parameters:
- name
"""
self.send_killTopology(name)
self.recv_killTopology()
def send_killTopology(self, name):
self._oprot.writeMessageBegin('killTopology', TMessageType.CALL, self._seqid)
args = killTopology_args()
args.name = name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_killTopology(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = killTopology_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
if result.aze is not None:
raise result.aze
return
def killTopologyWithOpts(self, name, options):
"""
Parameters:
- name
- options
"""
self.send_killTopologyWithOpts(name, options)
self.recv_killTopologyWithOpts()
def send_killTopologyWithOpts(self, name, options):
self._oprot.writeMessageBegin('killTopologyWithOpts', TMessageType.CALL, self._seqid)
args = killTopologyWithOpts_args()
args.name = name
args.options = options
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_killTopologyWithOpts(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = killTopologyWithOpts_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
if result.aze is not None:
raise result.aze
return
def activate(self, name):
"""
Parameters:
- name
"""
self.send_activate(name)
self.recv_activate()
def send_activate(self, name):
self._oprot.writeMessageBegin('activate', TMessageType.CALL, self._seqid)
args = activate_args()
args.name = name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_activate(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = activate_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
if result.aze is not None:
raise result.aze
return
def deactivate(self, name):
"""
Parameters:
- name
"""
self.send_deactivate(name)
self.recv_deactivate()
def send_deactivate(self, name):
self._oprot.writeMessageBegin('deactivate', TMessageType.CALL, self._seqid)
args = deactivate_args()
args.name = name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_deactivate(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = deactivate_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
if result.aze is not None:
raise result.aze
return
def rebalance(self, name, options):
"""
Parameters:
- name
- options
"""
self.send_rebalance(name, options)
self.recv_rebalance()
def send_rebalance(self, name, options):
self._oprot.writeMessageBegin('rebalance', TMessageType.CALL, self._seqid)
args = rebalance_args()
args.name = name
args.options = options
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_rebalance(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = rebalance_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
if result.ite is not None:
raise result.ite
if result.aze is not None:
raise result.aze
return
def setLogConfig(self, name, config):
"""
Parameters:
- name
- config
"""
self.send_setLogConfig(name, config)
self.recv_setLogConfig()
def send_setLogConfig(self, name, config):
self._oprot.writeMessageBegin('setLogConfig', TMessageType.CALL, self._seqid)
args = setLogConfig_args()
args.name = name
args.config = config
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_setLogConfig(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = setLogConfig_result()
result.read(iprot)
iprot.readMessageEnd()
return
def getLogConfig(self, name):
"""
Parameters:
- name
"""
self.send_getLogConfig(name)
return self.recv_getLogConfig()
def send_getLogConfig(self, name):
self._oprot.writeMessageBegin('getLogConfig', TMessageType.CALL, self._seqid)
args = getLogConfig_args()
args.name = name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getLogConfig(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getLogConfig_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "getLogConfig failed: unknown result")
def debug(self, name, component, enable, samplingPercentage):
"""
Enable/disable logging the tuples generated in topology via an internal EventLogger bolt. The component name is optional
and if null or empty, the debug flag will apply to the entire topology.
The 'samplingPercentage' will limit loggging to a percentage of generated tuples.
Parameters:
- name
- component
- enable
- samplingPercentage
"""
self.send_debug(name, component, enable, samplingPercentage)
self.recv_debug()
def send_debug(self, name, component, enable, samplingPercentage):
self._oprot.writeMessageBegin('debug', TMessageType.CALL, self._seqid)
args = debug_args()
args.name = name
args.component = component
args.enable = enable
args.samplingPercentage = samplingPercentage
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_debug(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = debug_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
if result.aze is not None:
raise result.aze
return
def setWorkerProfiler(self, id, profileRequest):
"""
Parameters:
- id
- profileRequest
"""
self.send_setWorkerProfiler(id, profileRequest)
self.recv_setWorkerProfiler()
def send_setWorkerProfiler(self, id, profileRequest):
self._oprot.writeMessageBegin('setWorkerProfiler', TMessageType.CALL, self._seqid)
args = setWorkerProfiler_args()
args.id = id
args.profileRequest = profileRequest
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_setWorkerProfiler(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = setWorkerProfiler_result()
result.read(iprot)
iprot.readMessageEnd()
return
def getComponentPendingProfileActions(self, id, component_id, action):
"""
Parameters:
- id
- component_id
- action
"""
self.send_getComponentPendingProfileActions(id, component_id, action)
return self.recv_getComponentPendingProfileActions()
def send_getComponentPendingProfileActions(self, id, component_id, action):
self._oprot.writeMessageBegin('getComponentPendingProfileActions', TMessageType.CALL, self._seqid)
args = getComponentPendingProfileActions_args()
args.id = id
args.component_id = component_id
args.action = action
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getComponentPendingProfileActions(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getComponentPendingProfileActions_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "getComponentPendingProfileActions failed: unknown result")
def uploadNewCredentials(self, name, creds):
"""
Parameters:
- name
- creds
"""
self.send_uploadNewCredentials(name, creds)
self.recv_uploadNewCredentials()
def send_uploadNewCredentials(self, name, creds):
self._oprot.writeMessageBegin('uploadNewCredentials', TMessageType.CALL, self._seqid)
args = uploadNewCredentials_args()
args.name = name
args.creds = creds
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_uploadNewCredentials(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = uploadNewCredentials_result()
result.read(iprot)
iprot.readMessageEnd()
if result.e is not None:
raise result.e
if result.ite is not None:
raise result.ite
if result.aze is not None:
raise result.aze
return
def beginCreateBlob(self, key, meta):
"""
Parameters:
- key
- meta
"""
self.send_beginCreateBlob(key, meta)
return self.recv_beginCreateBlob()
def send_beginCreateBlob(self, key, meta):
self._oprot.writeMessageBegin('beginCreateBlob', TMessageType.CALL, self._seqid)
args = beginCreateBlob_args()
args.key = key
args.meta = meta
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_beginCreateBlob(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = beginCreateBlob_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.aze is not None:
raise result.aze
if result.kae is not None:
raise result.kae
raise TApplicationException(TApplicationException.MISSING_RESULT, "beginCreateBlob failed: unknown result")
def beginUpdateBlob(self, key):
"""
Parameters:
- key
"""
self.send_beginUpdateBlob(key)
return self.recv_beginUpdateBlob()
def send_beginUpdateBlob(self, key):
self._oprot.writeMessageBegin('beginUpdateBlob', TMessageType.CALL, self._seqid)
args = beginUpdateBlob_args()
args.key = key
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_beginUpdateBlob(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = beginUpdateBlob_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.aze is not None:
raise result.aze
if result.knf is not None:
raise result.knf
raise TApplicationException(TApplicationException.MISSING_RESULT, "beginUpdateBlob failed: unknown result")
def uploadBlobChunk(self, session, chunk):
"""
Parameters:
- session
- chunk
"""
self.send_uploadBlobChunk(session, chunk)
self.recv_uploadBlobChunk()
def send_uploadBlobChunk(self, session, chunk):
self._oprot.writeMessageBegin('uploadBlobChunk', TMessageType.CALL, self._seqid)
args = uploadBlobChunk_args()
args.session = session
args.chunk = chunk
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_uploadBlobChunk(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = uploadBlobChunk_result()
result.read(iprot)
iprot.readMessageEnd()
if result.aze is not None:
raise result.aze
return
def finishBlobUpload(self, session):
"""
Parameters:
- session
"""
self.send_finishBlobUpload(session)
self.recv_finishBlobUpload()
def send_finishBlobUpload(self, session):
self._oprot.writeMessageBegin('finishBlobUpload', TMessageType.CALL, self._seqid)
args = finishBlobUpload_args()
args.session = session
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_finishBlobUpload(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = finishBlobUpload_result()
result.read(iprot)
iprot.readMessageEnd()
if result.aze is not None:
raise result.aze
return
def cancelBlobUpload(self, session):
"""
Parameters:
- session
"""
self.send_cancelBlobUpload(session)
self.recv_cancelBlobUpload()
def send_cancelBlobUpload(self, session):
self._oprot.writeMessageBegin('cancelBlobUpload', TMessageType.CALL, self._seqid)
args = cancelBlobUpload_args()
args.session = session
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_cancelBlobUpload(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = cancelBlobUpload_result()
result.read(iprot)
iprot.readMessageEnd()
if result.aze is not None:
raise result.aze
return
def getBlobMeta(self, key):
"""
Parameters:
- key
"""
self.send_getBlobMeta(key)
return self.recv_getBlobMeta()
def send_getBlobMeta(self, key):
self._oprot.writeMessageBegin('getBlobMeta', TMessageType.CALL, self._seqid)
args = getBlobMeta_args()
args.key = key
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getBlobMeta(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getBlobMeta_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.aze is not None:
raise result.aze
if result.knf is not None:
raise result.knf
raise TApplicationException(TApplicationException.MISSING_RESULT, "getBlobMeta failed: unknown result")
def setBlobMeta(self, key, meta):
"""
Parameters:
- key
- meta
"""
self.send_setBlobMeta(key, meta)
self.recv_setBlobMeta()
def send_setBlobMeta(self, key, meta):
self._oprot.writeMessageBegin('setBlobMeta', TMessageType.CALL, self._seqid)
args = setBlobMeta_args()
args.key = key
args.meta = meta
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_setBlobMeta(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = setBlobMeta_result()
result.read(iprot)
iprot.readMessageEnd()
if result.aze is not None:
raise result.aze
if result.knf is not None:
raise result.knf
return
def beginBlobDownload(self, key):
"""
Parameters:
- key
"""
self.send_beginBlobDownload(key)
return self.recv_beginBlobDownload()
def send_beginBlobDownload(self, key):
self._oprot.writeMessageBegin('beginBlobDownload', TMessageType.CALL, self._seqid)
args = beginBlobDownload_args()
args.key = key
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_beginBlobDownload(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = beginBlobDownload_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.aze is not None:
raise result.aze
if result.knf is not None:
raise result.knf
raise TApplicationException(TApplicationException.MISSING_RESULT, "beginBlobDownload failed: unknown result")
def downloadBlobChunk(self, session):
"""
Parameters:
- session
"""
self.send_downloadBlobChunk(session)
return self.recv_downloadBlobChunk()
def send_downloadBlobChunk(self, session):
self._oprot.writeMessageBegin('downloadBlobChunk', TMessageType.CALL, self._seqid)
args = downloadBlobChunk_args()
args.session = session
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_downloadBlobChunk(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = downloadBlobChunk_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.aze is not None:
raise result.aze
raise TApplicationException(TApplicationException.MISSING_RESULT, "downloadBlobChunk failed: unknown result")
def deleteBlob(self, key):
"""
Parameters:
- key
"""
self.send_deleteBlob(key)
self.recv_deleteBlob()
def send_deleteBlob(self, key):
self._oprot.writeMessageBegin('deleteBlob', TMessageType.CALL, self._seqid)
args = deleteBlob_args()
args.key = key
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_deleteBlob(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = deleteBlob_result()
result.read(iprot)
iprot.readMessageEnd()
if result.aze is not None:
raise result.aze
if result.knf is not None:
raise result.knf
if result.ise is not None:
raise result.ise
return
def listBlobs(self, session):
"""
Parameters:
- session
"""
self.send_listBlobs(session)
return self.recv_listBlobs()
def send_listBlobs(self, session):
self._oprot.writeMessageBegin('listBlobs', TMessageType.CALL, self._seqid)
args = listBlobs_args()
args.session = session
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_listBlobs(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = listBlobs_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "listBlobs failed: unknown result")
def getBlobReplication(self, key):
"""
Parameters:
- key
"""
self.send_getBlobReplication(key)
return self.recv_getBlobReplication()
def send_getBlobReplication(self, key):
self._oprot.writeMessageBegin('getBlobReplication', TMessageType.CALL, self._seqid)
args = getBlobReplication_args()
args.key = key
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getBlobReplication(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getBlobReplication_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.aze is not None:
raise result.aze
if result.knf is not None:
raise result.knf
raise TApplicationException(TApplicationException.MISSING_RESULT, "getBlobReplication failed: unknown result")
def updateBlobReplication(self, key, replication):
"""
Parameters:
- key
- replication
"""
self.send_updateBlobReplication(key, replication)
return self.recv_updateBlobReplication()
def send_updateBlobReplication(self, key, replication):
self._oprot.writeMessageBegin('updateBlobReplication', TMessageType.CALL, self._seqid)
args = updateBlobReplication_args()
args.key = key
args.replication = replication
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_updateBlobReplication(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = updateBlobReplication_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.aze is not None:
raise result.aze
if result.knf is not None:
raise result.knf
raise TApplicationException(TApplicationException.MISSING_RESULT, "updateBlobReplication failed: unknown result")
def createStateInZookeeper(self, key):
"""
Parameters:
- key
"""
self.send_createStateInZookeeper(key)
self.recv_createStateInZookeeper()
def send_createStateInZookeeper(self, key):
self._oprot.writeMessageBegin('createStateInZookeeper', TMessageType.CALL, self._seqid)
args = createStateInZookeeper_args()
args.key = key
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_createStateInZookeeper(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = createStateInZookeeper_result()
result.read(iprot)
iprot.readMessageEnd()
return
def beginFileUpload(self):
self.send_beginFileUpload()
return self.recv_beginFileUpload()
def send_beginFileUpload(self):
self._oprot.writeMessageBegin('beginFileUpload', TMessageType.CALL, self._seqid)
args = beginFileUpload_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_beginFileUpload(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = beginFileUpload_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.aze is not None:
raise result.aze
raise TApplicationException(TApplicationException.MISSING_RESULT, "beginFileUpload failed: unknown result")
def uploadChunk(self, location, chunk):
"""
Parameters:
- location
- chunk
"""
self.send_uploadChunk(location, chunk)
self.recv_uploadChunk()
def send_uploadChunk(self, location, chunk):
self._oprot.writeMessageBegin('uploadChunk', TMessageType.CALL, self._seqid)
args = uploadChunk_args()
args.location = location
args.chunk = chunk
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_uploadChunk(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = uploadChunk_result()
result.read(iprot)
iprot.readMessageEnd()
if result.aze is not None:
raise result.aze
return
def finishFileUpload(self, location):
"""
Parameters:
- location
"""
self.send_finishFileUpload(location)
self.recv_finishFileUpload()
def send_finishFileUpload(self, location):
self._oprot.writeMessageBegin('finishFileUpload', TMessageType.CALL, self._seqid)
args = finishFileUpload_args()
args.location = location
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_finishFileUpload(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = finishFileUpload_result()
result.read(iprot)
iprot.readMessageEnd()
if result.aze is not None:
raise result.aze
return
def downloadChunk(self, id):
"""
Parameters:
- id
"""
self.send_downloadChunk(id)
return self.recv_downloadChunk()
def send_downloadChunk(self, id):
self._oprot.writeMessageBegin('downloadChunk', TMessageType.CALL, self._seqid)
args = downloadChunk_args()
args.id = id
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_downloadChunk(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = downloadChunk_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.aze is not None:
raise result.aze
raise TApplicationException(TApplicationException.MISSING_RESULT, "downloadChunk failed: unknown result")
def getNimbusConf(self):
self.send_getNimbusConf()
return self.recv_getNimbusConf()
def send_getNimbusConf(self):
self._oprot.writeMessageBegin('getNimbusConf', TMessageType.CALL, self._seqid)
args = getNimbusConf_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getNimbusConf(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getNimbusConf_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.aze is not None:
raise result.aze
raise TApplicationException(TApplicationException.MISSING_RESULT, "getNimbusConf failed: unknown result")
def getClusterInfo(self):
self.send_getClusterInfo()
return self.recv_getClusterInfo()
def send_getClusterInfo(self):
self._oprot.writeMessageBegin('getClusterInfo', TMessageType.CALL, self._seqid)
args = getClusterInfo_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getClusterInfo(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getClusterInfo_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.aze is not None:
raise result.aze
raise TApplicationException(TApplicationException.MISSING_RESULT, "getClusterInfo failed: unknown result")
def getLeader(self):
self.send_getLeader()
return self.recv_getLeader()
def send_getLeader(self):
self._oprot.writeMessageBegin('getLeader', TMessageType.CALL, self._seqid)
args = getLeader_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getLeader(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getLeader_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.aze is not None:
raise result.aze
raise TApplicationException(TApplicationException.MISSING_RESULT, "getLeader failed: unknown result")
def isTopologyNameAllowed(self, name):
"""
Parameters:
- name
"""
self.send_isTopologyNameAllowed(name)
return self.recv_isTopologyNameAllowed()
def send_isTopologyNameAllowed(self, name):
self._oprot.writeMessageBegin('isTopologyNameAllowed', TMessageType.CALL, self._seqid)
args = isTopologyNameAllowed_args()
args.name = name
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_isTopologyNameAllowed(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = isTopologyNameAllowed_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.aze is not None:
raise result.aze
raise TApplicationException(TApplicationException.MISSING_RESULT, "isTopologyNameAllowed failed: unknown result")
def getTopologyInfo(self, id):
"""
Parameters:
- id
"""
self.send_getTopologyInfo(id)
return self.recv_getTopologyInfo()
def send_getTopologyInfo(self, id):
self._oprot.writeMessageBegin('getTopologyInfo', TMessageType.CALL, self._seqid)
args = getTopologyInfo_args()
args.id = id
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getTopologyInfo(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getTopologyInfo_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
if result.aze is not None:
raise result.aze
raise TApplicationException(TApplicationException.MISSING_RESULT, "getTopologyInfo failed: unknown result")
def getTopologyInfoWithOpts(self, id, options):
"""
Parameters:
- id
- options
"""
self.send_getTopologyInfoWithOpts(id, options)
return self.recv_getTopologyInfoWithOpts()
def send_getTopologyInfoWithOpts(self, id, options):
self._oprot.writeMessageBegin('getTopologyInfoWithOpts', TMessageType.CALL, self._seqid)
args = getTopologyInfoWithOpts_args()
args.id = id
args.options = options
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getTopologyInfoWithOpts(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getTopologyInfoWithOpts_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
if result.aze is not None:
raise result.aze
raise TApplicationException(TApplicationException.MISSING_RESULT, "getTopologyInfoWithOpts failed: unknown result")
def getTopologyPageInfo(self, id, window, is_include_sys):
"""
Parameters:
- id
- window
- is_include_sys
"""
self.send_getTopologyPageInfo(id, window, is_include_sys)
return self.recv_getTopologyPageInfo()
def send_getTopologyPageInfo(self, id, window, is_include_sys):
self._oprot.writeMessageBegin('getTopologyPageInfo', TMessageType.CALL, self._seqid)
args = getTopologyPageInfo_args()
args.id = id
args.window = window
args.is_include_sys = is_include_sys
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getTopologyPageInfo(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getTopologyPageInfo_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
if result.aze is not None:
raise result.aze
raise TApplicationException(TApplicationException.MISSING_RESULT, "getTopologyPageInfo failed: unknown result")
def getSupervisorPageInfo(self, id, host, is_include_sys):
"""
Parameters:
- id
- host
- is_include_sys
"""
self.send_getSupervisorPageInfo(id, host, is_include_sys)
return self.recv_getSupervisorPageInfo()
def send_getSupervisorPageInfo(self, id, host, is_include_sys):
self._oprot.writeMessageBegin('getSupervisorPageInfo', TMessageType.CALL, self._seqid)
args = getSupervisorPageInfo_args()
args.id = id
args.host = host
args.is_include_sys = is_include_sys
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getSupervisorPageInfo(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getSupervisorPageInfo_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
if result.aze is not None:
raise result.aze
raise TApplicationException(TApplicationException.MISSING_RESULT, "getSupervisorPageInfo failed: unknown result")
def getComponentPageInfo(self, topology_id, component_id, window, is_include_sys):
"""
Parameters:
- topology_id
- component_id
- window
- is_include_sys
"""
self.send_getComponentPageInfo(topology_id, component_id, window, is_include_sys)
return self.recv_getComponentPageInfo()
def send_getComponentPageInfo(self, topology_id, component_id, window, is_include_sys):
self._oprot.writeMessageBegin('getComponentPageInfo', TMessageType.CALL, self._seqid)
args = getComponentPageInfo_args()
args.topology_id = topology_id
args.component_id = component_id
args.window = window
args.is_include_sys = is_include_sys
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getComponentPageInfo(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getComponentPageInfo_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
if result.aze is not None:
raise result.aze
raise TApplicationException(TApplicationException.MISSING_RESULT, "getComponentPageInfo failed: unknown result")
def getTopologyConf(self, id):
"""
Parameters:
- id
"""
self.send_getTopologyConf(id)
return self.recv_getTopologyConf()
def send_getTopologyConf(self, id):
self._oprot.writeMessageBegin('getTopologyConf', TMessageType.CALL, self._seqid)
args = getTopologyConf_args()
args.id = id
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getTopologyConf(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getTopologyConf_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
if result.aze is not None:
raise result.aze
raise TApplicationException(TApplicationException.MISSING_RESULT, "getTopologyConf failed: unknown result")
def getTopology(self, id):
"""
Returns the compiled topology that contains ackers and metrics consumsers. Compare {@link #getUserTopology(String id)}.
Parameters:
- id
"""
self.send_getTopology(id)
return self.recv_getTopology()
def send_getTopology(self, id):
self._oprot.writeMessageBegin('getTopology', TMessageType.CALL, self._seqid)
args = getTopology_args()
args.id = id
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getTopology(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getTopology_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
if result.aze is not None:
raise result.aze
raise TApplicationException(TApplicationException.MISSING_RESULT, "getTopology failed: unknown result")
def getUserTopology(self, id):
"""
Returns the user specified topology as submitted originally. Compare {@link #getTopology(String id)}.
Parameters:
- id
"""
self.send_getUserTopology(id)
return self.recv_getUserTopology()
def send_getUserTopology(self, id):
self._oprot.writeMessageBegin('getUserTopology', TMessageType.CALL, self._seqid)
args = getUserTopology_args()
args.id = id
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getUserTopology(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getUserTopology_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.e is not None:
raise result.e
if result.aze is not None:
raise result.aze
raise TApplicationException(TApplicationException.MISSING_RESULT, "getUserTopology failed: unknown result")
def getTopologyHistory(self, user):
"""
Parameters:
- user
"""
self.send_getTopologyHistory(user)
return self.recv_getTopologyHistory()
def send_getTopologyHistory(self, user):
self._oprot.writeMessageBegin('getTopologyHistory', TMessageType.CALL, self._seqid)
args = getTopologyHistory_args()
args.user = user
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getTopologyHistory(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getTopologyHistory_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.aze is not None:
raise result.aze
raise TApplicationException(TApplicationException.MISSING_RESULT, "getTopologyHistory failed: unknown result")
def getOwnerResourceSummaries(self, owner):
"""
Parameters:
- owner
"""
self.send_getOwnerResourceSummaries(owner)
return self.recv_getOwnerResourceSummaries()
def send_getOwnerResourceSummaries(self, owner):
self._oprot.writeMessageBegin('getOwnerResourceSummaries', TMessageType.CALL, self._seqid)
args = getOwnerResourceSummaries_args()
args.owner = owner
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getOwnerResourceSummaries(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getOwnerResourceSummaries_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.aze is not None:
raise result.aze
raise TApplicationException(TApplicationException.MISSING_RESULT, "getOwnerResourceSummaries failed: unknown result")
def getSupervisorAssignments(self, node):
"""
Get assigned assignments for a specific supervisor
Parameters:
- node
"""
self.send_getSupervisorAssignments(node)
return self.recv_getSupervisorAssignments()
def send_getSupervisorAssignments(self, node):
self._oprot.writeMessageBegin('getSupervisorAssignments', TMessageType.CALL, self._seqid)
args = getSupervisorAssignments_args()
args.node = node
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getSupervisorAssignments(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getSupervisorAssignments_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.aze is not None:
raise result.aze
raise TApplicationException(TApplicationException.MISSING_RESULT, "getSupervisorAssignments failed: unknown result")
def sendSupervisorWorkerHeartbeats(self, heartbeats):
"""
Send supervisor worker heartbeats for a specific supervisor
Parameters:
- heartbeats
"""
self.send_sendSupervisorWorkerHeartbeats(heartbeats)
self.recv_sendSupervisorWorkerHeartbeats()
def send_sendSupervisorWorkerHeartbeats(self, heartbeats):
self._oprot.writeMessageBegin('sendSupervisorWorkerHeartbeats', TMessageType.CALL, self._seqid)
args = sendSupervisorWorkerHeartbeats_args()
args.heartbeats = heartbeats
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_sendSupervisorWorkerHeartbeats(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = sendSupervisorWorkerHeartbeats_result()
result.read(iprot)
iprot.readMessageEnd()
if result.aze is not None:
raise result.aze
return
def sendSupervisorWorkerHeartbeat(self, heatbeat):
"""
Send supervisor local worker heartbeat when a supervisor is unreachable
Parameters:
- heatbeat
"""
self.send_sendSupervisorWorkerHeartbeat(heatbeat)
self.recv_sendSupervisorWorkerHeartbeat()
def send_sendSupervisorWorkerHeartbeat(self, heatbeat):
self._oprot.writeMessageBegin('sendSupervisorWorkerHeartbeat', TMessageType.CALL, self._seqid)
args = sendSupervisorWorkerHeartbeat_args()
args.heatbeat = heatbeat
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_sendSupervisorWorkerHeartbeat(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = sendSupervisorWorkerHeartbeat_result()
result.read(iprot)
iprot.readMessageEnd()
if result.aze is not None:
raise result.aze
if result.e is not None:
raise result.e
return
def processWorkerMetrics(self, metrics):
"""
Parameters:
- metrics
"""
self.send_processWorkerMetrics(metrics)
self.recv_processWorkerMetrics()
def send_processWorkerMetrics(self, metrics):
self._oprot.writeMessageBegin('processWorkerMetrics', TMessageType.CALL, self._seqid)
args = processWorkerMetrics_args()
args.metrics = metrics
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_processWorkerMetrics(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = processWorkerMetrics_result()
result.read(iprot)
iprot.readMessageEnd()
return
def isRemoteBlobExists(self, blobKey):
"""
Decide if the blob is removed from cluster.
Parameters:
- blobKey
"""
self.send_isRemoteBlobExists(blobKey)
return self.recv_isRemoteBlobExists()
def send_isRemoteBlobExists(self, blobKey):
self._oprot.writeMessageBegin('isRemoteBlobExists', TMessageType.CALL, self._seqid)
args = isRemoteBlobExists_args()
args.blobKey = blobKey
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_isRemoteBlobExists(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = isRemoteBlobExists_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
if result.aze is not None:
raise result.aze
raise TApplicationException(TApplicationException.MISSING_RESULT, "isRemoteBlobExists failed: unknown result")
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["submitTopology"] = Processor.process_submitTopology
self._processMap["submitTopologyWithOpts"] = Processor.process_submitTopologyWithOpts
self._processMap["killTopology"] = Processor.process_killTopology
self._processMap["killTopologyWithOpts"] = Processor.process_killTopologyWithOpts
self._processMap["activate"] = Processor.process_activate
self._processMap["deactivate"] = Processor.process_deactivate
self._processMap["rebalance"] = Processor.process_rebalance
self._processMap["setLogConfig"] = Processor.process_setLogConfig
self._processMap["getLogConfig"] = Processor.process_getLogConfig
self._processMap["debug"] = Processor.process_debug
self._processMap["setWorkerProfiler"] = Processor.process_setWorkerProfiler
self._processMap["getComponentPendingProfileActions"] = Processor.process_getComponentPendingProfileActions
self._processMap["uploadNewCredentials"] = Processor.process_uploadNewCredentials
self._processMap["beginCreateBlob"] = Processor.process_beginCreateBlob
self._processMap["beginUpdateBlob"] = Processor.process_beginUpdateBlob
self._processMap["uploadBlobChunk"] = Processor.process_uploadBlobChunk
self._processMap["finishBlobUpload"] = Processor.process_finishBlobUpload
self._processMap["cancelBlobUpload"] = Processor.process_cancelBlobUpload
self._processMap["getBlobMeta"] = Processor.process_getBlobMeta
self._processMap["setBlobMeta"] = Processor.process_setBlobMeta
self._processMap["beginBlobDownload"] = Processor.process_beginBlobDownload
self._processMap["downloadBlobChunk"] = Processor.process_downloadBlobChunk
self._processMap["deleteBlob"] = Processor.process_deleteBlob
self._processMap["listBlobs"] = Processor.process_listBlobs
self._processMap["getBlobReplication"] = Processor.process_getBlobReplication
self._processMap["updateBlobReplication"] = Processor.process_updateBlobReplication
self._processMap["createStateInZookeeper"] = Processor.process_createStateInZookeeper
self._processMap["beginFileUpload"] = Processor.process_beginFileUpload
self._processMap["uploadChunk"] = Processor.process_uploadChunk
self._processMap["finishFileUpload"] = Processor.process_finishFileUpload
self._processMap["downloadChunk"] = Processor.process_downloadChunk
self._processMap["getNimbusConf"] = Processor.process_getNimbusConf
self._processMap["getClusterInfo"] = Processor.process_getClusterInfo
self._processMap["getLeader"] = Processor.process_getLeader
self._processMap["isTopologyNameAllowed"] = Processor.process_isTopologyNameAllowed
self._processMap["getTopologyInfo"] = Processor.process_getTopologyInfo
self._processMap["getTopologyInfoWithOpts"] = Processor.process_getTopologyInfoWithOpts
self._processMap["getTopologyPageInfo"] = Processor.process_getTopologyPageInfo
self._processMap["getSupervisorPageInfo"] = Processor.process_getSupervisorPageInfo
self._processMap["getComponentPageInfo"] = Processor.process_getComponentPageInfo
self._processMap["getTopologyConf"] = Processor.process_getTopologyConf
self._processMap["getTopology"] = Processor.process_getTopology
self._processMap["getUserTopology"] = Processor.process_getUserTopology
self._processMap["getTopologyHistory"] = Processor.process_getTopologyHistory
self._processMap["getOwnerResourceSummaries"] = Processor.process_getOwnerResourceSummaries
self._processMap["getSupervisorAssignments"] = Processor.process_getSupervisorAssignments
self._processMap["sendSupervisorWorkerHeartbeats"] = Processor.process_sendSupervisorWorkerHeartbeats
self._processMap["sendSupervisorWorkerHeartbeat"] = Processor.process_sendSupervisorWorkerHeartbeat
self._processMap["processWorkerMetrics"] = Processor.process_processWorkerMetrics
self._processMap["isRemoteBlobExists"] = Processor.process_isRemoteBlobExists
self._on_message_begin = None
def on_message_begin(self, func):
self._on_message_begin = func
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if self._on_message_begin:
self._on_message_begin(name, type, seqid)
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_submitTopology(self, seqid, iprot, oprot):
args = submitTopology_args()
args.read(iprot)
iprot.readMessageEnd()
result = submitTopology_result()
try:
self._handler.submitTopology(args.name, args.uploadedJarLocation, args.jsonConf, args.topology)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except AlreadyAliveException as e:
msg_type = TMessageType.REPLY
result.e = e
except InvalidTopologyException as ite:
msg_type = TMessageType.REPLY
result.ite = ite
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("submitTopology", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_submitTopologyWithOpts(self, seqid, iprot, oprot):
args = submitTopologyWithOpts_args()
args.read(iprot)
iprot.readMessageEnd()
result = submitTopologyWithOpts_result()
try:
self._handler.submitTopologyWithOpts(args.name, args.uploadedJarLocation, args.jsonConf, args.topology, args.options)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except AlreadyAliveException as e:
msg_type = TMessageType.REPLY
result.e = e
except InvalidTopologyException as ite:
msg_type = TMessageType.REPLY
result.ite = ite
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("submitTopologyWithOpts", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_killTopology(self, seqid, iprot, oprot):
args = killTopology_args()
args.read(iprot)
iprot.readMessageEnd()
result = killTopology_result()
try:
self._handler.killTopology(args.name)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except NotAliveException as e:
msg_type = TMessageType.REPLY
result.e = e
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("killTopology", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_killTopologyWithOpts(self, seqid, iprot, oprot):
args = killTopologyWithOpts_args()
args.read(iprot)
iprot.readMessageEnd()
result = killTopologyWithOpts_result()
try:
self._handler.killTopologyWithOpts(args.name, args.options)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except NotAliveException as e:
msg_type = TMessageType.REPLY
result.e = e
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("killTopologyWithOpts", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_activate(self, seqid, iprot, oprot):
args = activate_args()
args.read(iprot)
iprot.readMessageEnd()
result = activate_result()
try:
self._handler.activate(args.name)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except NotAliveException as e:
msg_type = TMessageType.REPLY
result.e = e
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("activate", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_deactivate(self, seqid, iprot, oprot):
args = deactivate_args()
args.read(iprot)
iprot.readMessageEnd()
result = deactivate_result()
try:
self._handler.deactivate(args.name)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except NotAliveException as e:
msg_type = TMessageType.REPLY
result.e = e
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("deactivate", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_rebalance(self, seqid, iprot, oprot):
args = rebalance_args()
args.read(iprot)
iprot.readMessageEnd()
result = rebalance_result()
try:
self._handler.rebalance(args.name, args.options)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except NotAliveException as e:
msg_type = TMessageType.REPLY
result.e = e
except InvalidTopologyException as ite:
msg_type = TMessageType.REPLY
result.ite = ite
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("rebalance", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_setLogConfig(self, seqid, iprot, oprot):
args = setLogConfig_args()
args.read(iprot)
iprot.readMessageEnd()
result = setLogConfig_result()
try:
self._handler.setLogConfig(args.name, args.config)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("setLogConfig", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getLogConfig(self, seqid, iprot, oprot):
args = getLogConfig_args()
args.read(iprot)
iprot.readMessageEnd()
result = getLogConfig_result()
try:
result.success = self._handler.getLogConfig(args.name)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getLogConfig", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_debug(self, seqid, iprot, oprot):
args = debug_args()
args.read(iprot)
iprot.readMessageEnd()
result = debug_result()
try:
self._handler.debug(args.name, args.component, args.enable, args.samplingPercentage)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except NotAliveException as e:
msg_type = TMessageType.REPLY
result.e = e
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("debug", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_setWorkerProfiler(self, seqid, iprot, oprot):
args = setWorkerProfiler_args()
args.read(iprot)
iprot.readMessageEnd()
result = setWorkerProfiler_result()
try:
self._handler.setWorkerProfiler(args.id, args.profileRequest)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("setWorkerProfiler", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getComponentPendingProfileActions(self, seqid, iprot, oprot):
args = getComponentPendingProfileActions_args()
args.read(iprot)
iprot.readMessageEnd()
result = getComponentPendingProfileActions_result()
try:
result.success = self._handler.getComponentPendingProfileActions(args.id, args.component_id, args.action)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getComponentPendingProfileActions", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_uploadNewCredentials(self, seqid, iprot, oprot):
args = uploadNewCredentials_args()
args.read(iprot)
iprot.readMessageEnd()
result = uploadNewCredentials_result()
try:
self._handler.uploadNewCredentials(args.name, args.creds)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except NotAliveException as e:
msg_type = TMessageType.REPLY
result.e = e
except InvalidTopologyException as ite:
msg_type = TMessageType.REPLY
result.ite = ite
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("uploadNewCredentials", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_beginCreateBlob(self, seqid, iprot, oprot):
args = beginCreateBlob_args()
args.read(iprot)
iprot.readMessageEnd()
result = beginCreateBlob_result()
try:
result.success = self._handler.beginCreateBlob(args.key, args.meta)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except KeyAlreadyExistsException as kae:
msg_type = TMessageType.REPLY
result.kae = kae
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("beginCreateBlob", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_beginUpdateBlob(self, seqid, iprot, oprot):
args = beginUpdateBlob_args()
args.read(iprot)
iprot.readMessageEnd()
result = beginUpdateBlob_result()
try:
result.success = self._handler.beginUpdateBlob(args.key)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except KeyNotFoundException as knf:
msg_type = TMessageType.REPLY
result.knf = knf
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("beginUpdateBlob", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_uploadBlobChunk(self, seqid, iprot, oprot):
args = uploadBlobChunk_args()
args.read(iprot)
iprot.readMessageEnd()
result = uploadBlobChunk_result()
try:
self._handler.uploadBlobChunk(args.session, args.chunk)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("uploadBlobChunk", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_finishBlobUpload(self, seqid, iprot, oprot):
args = finishBlobUpload_args()
args.read(iprot)
iprot.readMessageEnd()
result = finishBlobUpload_result()
try:
self._handler.finishBlobUpload(args.session)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("finishBlobUpload", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_cancelBlobUpload(self, seqid, iprot, oprot):
args = cancelBlobUpload_args()
args.read(iprot)
iprot.readMessageEnd()
result = cancelBlobUpload_result()
try:
self._handler.cancelBlobUpload(args.session)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("cancelBlobUpload", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getBlobMeta(self, seqid, iprot, oprot):
args = getBlobMeta_args()
args.read(iprot)
iprot.readMessageEnd()
result = getBlobMeta_result()
try:
result.success = self._handler.getBlobMeta(args.key)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except KeyNotFoundException as knf:
msg_type = TMessageType.REPLY
result.knf = knf
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getBlobMeta", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_setBlobMeta(self, seqid, iprot, oprot):
args = setBlobMeta_args()
args.read(iprot)
iprot.readMessageEnd()
result = setBlobMeta_result()
try:
self._handler.setBlobMeta(args.key, args.meta)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except KeyNotFoundException as knf:
msg_type = TMessageType.REPLY
result.knf = knf
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("setBlobMeta", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_beginBlobDownload(self, seqid, iprot, oprot):
args = beginBlobDownload_args()
args.read(iprot)
iprot.readMessageEnd()
result = beginBlobDownload_result()
try:
result.success = self._handler.beginBlobDownload(args.key)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except KeyNotFoundException as knf:
msg_type = TMessageType.REPLY
result.knf = knf
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("beginBlobDownload", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_downloadBlobChunk(self, seqid, iprot, oprot):
args = downloadBlobChunk_args()
args.read(iprot)
iprot.readMessageEnd()
result = downloadBlobChunk_result()
try:
result.success = self._handler.downloadBlobChunk(args.session)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("downloadBlobChunk", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_deleteBlob(self, seqid, iprot, oprot):
args = deleteBlob_args()
args.read(iprot)
iprot.readMessageEnd()
result = deleteBlob_result()
try:
self._handler.deleteBlob(args.key)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except KeyNotFoundException as knf:
msg_type = TMessageType.REPLY
result.knf = knf
except IllegalStateException as ise:
msg_type = TMessageType.REPLY
result.ise = ise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("deleteBlob", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_listBlobs(self, seqid, iprot, oprot):
args = listBlobs_args()
args.read(iprot)
iprot.readMessageEnd()
result = listBlobs_result()
try:
result.success = self._handler.listBlobs(args.session)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("listBlobs", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getBlobReplication(self, seqid, iprot, oprot):
args = getBlobReplication_args()
args.read(iprot)
iprot.readMessageEnd()
result = getBlobReplication_result()
try:
result.success = self._handler.getBlobReplication(args.key)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except KeyNotFoundException as knf:
msg_type = TMessageType.REPLY
result.knf = knf
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getBlobReplication", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_updateBlobReplication(self, seqid, iprot, oprot):
args = updateBlobReplication_args()
args.read(iprot)
iprot.readMessageEnd()
result = updateBlobReplication_result()
try:
result.success = self._handler.updateBlobReplication(args.key, args.replication)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except KeyNotFoundException as knf:
msg_type = TMessageType.REPLY
result.knf = knf
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("updateBlobReplication", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_createStateInZookeeper(self, seqid, iprot, oprot):
args = createStateInZookeeper_args()
args.read(iprot)
iprot.readMessageEnd()
result = createStateInZookeeper_result()
try:
self._handler.createStateInZookeeper(args.key)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("createStateInZookeeper", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_beginFileUpload(self, seqid, iprot, oprot):
args = beginFileUpload_args()
args.read(iprot)
iprot.readMessageEnd()
result = beginFileUpload_result()
try:
result.success = self._handler.beginFileUpload()
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("beginFileUpload", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_uploadChunk(self, seqid, iprot, oprot):
args = uploadChunk_args()
args.read(iprot)
iprot.readMessageEnd()
result = uploadChunk_result()
try:
self._handler.uploadChunk(args.location, args.chunk)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("uploadChunk", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_finishFileUpload(self, seqid, iprot, oprot):
args = finishFileUpload_args()
args.read(iprot)
iprot.readMessageEnd()
result = finishFileUpload_result()
try:
self._handler.finishFileUpload(args.location)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("finishFileUpload", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_downloadChunk(self, seqid, iprot, oprot):
args = downloadChunk_args()
args.read(iprot)
iprot.readMessageEnd()
result = downloadChunk_result()
try:
result.success = self._handler.downloadChunk(args.id)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("downloadChunk", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getNimbusConf(self, seqid, iprot, oprot):
args = getNimbusConf_args()
args.read(iprot)
iprot.readMessageEnd()
result = getNimbusConf_result()
try:
result.success = self._handler.getNimbusConf()
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getNimbusConf", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getClusterInfo(self, seqid, iprot, oprot):
args = getClusterInfo_args()
args.read(iprot)
iprot.readMessageEnd()
result = getClusterInfo_result()
try:
result.success = self._handler.getClusterInfo()
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getClusterInfo", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getLeader(self, seqid, iprot, oprot):
args = getLeader_args()
args.read(iprot)
iprot.readMessageEnd()
result = getLeader_result()
try:
result.success = self._handler.getLeader()
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getLeader", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_isTopologyNameAllowed(self, seqid, iprot, oprot):
args = isTopologyNameAllowed_args()
args.read(iprot)
iprot.readMessageEnd()
result = isTopologyNameAllowed_result()
try:
result.success = self._handler.isTopologyNameAllowed(args.name)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("isTopologyNameAllowed", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getTopologyInfo(self, seqid, iprot, oprot):
args = getTopologyInfo_args()
args.read(iprot)
iprot.readMessageEnd()
result = getTopologyInfo_result()
try:
result.success = self._handler.getTopologyInfo(args.id)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except NotAliveException as e:
msg_type = TMessageType.REPLY
result.e = e
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getTopologyInfo", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getTopologyInfoWithOpts(self, seqid, iprot, oprot):
args = getTopologyInfoWithOpts_args()
args.read(iprot)
iprot.readMessageEnd()
result = getTopologyInfoWithOpts_result()
try:
result.success = self._handler.getTopologyInfoWithOpts(args.id, args.options)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except NotAliveException as e:
msg_type = TMessageType.REPLY
result.e = e
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getTopologyInfoWithOpts", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getTopologyPageInfo(self, seqid, iprot, oprot):
args = getTopologyPageInfo_args()
args.read(iprot)
iprot.readMessageEnd()
result = getTopologyPageInfo_result()
try:
result.success = self._handler.getTopologyPageInfo(args.id, args.window, args.is_include_sys)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except NotAliveException as e:
msg_type = TMessageType.REPLY
result.e = e
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getTopologyPageInfo", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getSupervisorPageInfo(self, seqid, iprot, oprot):
args = getSupervisorPageInfo_args()
args.read(iprot)
iprot.readMessageEnd()
result = getSupervisorPageInfo_result()
try:
result.success = self._handler.getSupervisorPageInfo(args.id, args.host, args.is_include_sys)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except NotAliveException as e:
msg_type = TMessageType.REPLY
result.e = e
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getSupervisorPageInfo", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getComponentPageInfo(self, seqid, iprot, oprot):
args = getComponentPageInfo_args()
args.read(iprot)
iprot.readMessageEnd()
result = getComponentPageInfo_result()
try:
result.success = self._handler.getComponentPageInfo(args.topology_id, args.component_id, args.window, args.is_include_sys)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except NotAliveException as e:
msg_type = TMessageType.REPLY
result.e = e
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getComponentPageInfo", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getTopologyConf(self, seqid, iprot, oprot):
args = getTopologyConf_args()
args.read(iprot)
iprot.readMessageEnd()
result = getTopologyConf_result()
try:
result.success = self._handler.getTopologyConf(args.id)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except NotAliveException as e:
msg_type = TMessageType.REPLY
result.e = e
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getTopologyConf", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getTopology(self, seqid, iprot, oprot):
args = getTopology_args()
args.read(iprot)
iprot.readMessageEnd()
result = getTopology_result()
try:
result.success = self._handler.getTopology(args.id)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except NotAliveException as e:
msg_type = TMessageType.REPLY
result.e = e
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getTopology", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getUserTopology(self, seqid, iprot, oprot):
args = getUserTopology_args()
args.read(iprot)
iprot.readMessageEnd()
result = getUserTopology_result()
try:
result.success = self._handler.getUserTopology(args.id)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except NotAliveException as e:
msg_type = TMessageType.REPLY
result.e = e
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getUserTopology", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getTopologyHistory(self, seqid, iprot, oprot):
args = getTopologyHistory_args()
args.read(iprot)
iprot.readMessageEnd()
result = getTopologyHistory_result()
try:
result.success = self._handler.getTopologyHistory(args.user)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getTopologyHistory", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getOwnerResourceSummaries(self, seqid, iprot, oprot):
args = getOwnerResourceSummaries_args()
args.read(iprot)
iprot.readMessageEnd()
result = getOwnerResourceSummaries_result()
try:
result.success = self._handler.getOwnerResourceSummaries(args.owner)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getOwnerResourceSummaries", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getSupervisorAssignments(self, seqid, iprot, oprot):
args = getSupervisorAssignments_args()
args.read(iprot)
iprot.readMessageEnd()
result = getSupervisorAssignments_result()
try:
result.success = self._handler.getSupervisorAssignments(args.node)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getSupervisorAssignments", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_sendSupervisorWorkerHeartbeats(self, seqid, iprot, oprot):
args = sendSupervisorWorkerHeartbeats_args()
args.read(iprot)
iprot.readMessageEnd()
result = sendSupervisorWorkerHeartbeats_result()
try:
self._handler.sendSupervisorWorkerHeartbeats(args.heartbeats)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("sendSupervisorWorkerHeartbeats", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_sendSupervisorWorkerHeartbeat(self, seqid, iprot, oprot):
args = sendSupervisorWorkerHeartbeat_args()
args.read(iprot)
iprot.readMessageEnd()
result = sendSupervisorWorkerHeartbeat_result()
try:
self._handler.sendSupervisorWorkerHeartbeat(args.heatbeat)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except NotAliveException as e:
msg_type = TMessageType.REPLY
result.e = e
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("sendSupervisorWorkerHeartbeat", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_processWorkerMetrics(self, seqid, iprot, oprot):
args = processWorkerMetrics_args()
args.read(iprot)
iprot.readMessageEnd()
result = processWorkerMetrics_result()
try:
self._handler.processWorkerMetrics(args.metrics)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("processWorkerMetrics", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_isRemoteBlobExists(self, seqid, iprot, oprot):
args = isRemoteBlobExists_args()
args.read(iprot)
iprot.readMessageEnd()
result = isRemoteBlobExists_result()
try:
result.success = self._handler.isRemoteBlobExists(args.blobKey)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except AuthorizationException as aze:
msg_type = TMessageType.REPLY
result.aze = aze
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("isRemoteBlobExists", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class submitTopology_args(object):
"""
Attributes:
- name
- uploadedJarLocation
- jsonConf
- topology
"""
def __init__(self, name=None, uploadedJarLocation=None, jsonConf=None, topology=None,):
self.name = name
self.uploadedJarLocation = uploadedJarLocation
self.jsonConf = jsonConf
self.topology = topology
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.uploadedJarLocation = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.jsonConf = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.topology = StormTopology()
self.topology.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('submitTopology_args')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name.encode('utf-8') if sys.version_info[0] == 2 else self.name)
oprot.writeFieldEnd()
if self.uploadedJarLocation is not None:
oprot.writeFieldBegin('uploadedJarLocation', TType.STRING, 2)
oprot.writeString(self.uploadedJarLocation.encode('utf-8') if sys.version_info[0] == 2 else self.uploadedJarLocation)
oprot.writeFieldEnd()
if self.jsonConf is not None:
oprot.writeFieldBegin('jsonConf', TType.STRING, 3)
oprot.writeString(self.jsonConf.encode('utf-8') if sys.version_info[0] == 2 else self.jsonConf)
oprot.writeFieldEnd()
if self.topology is not None:
oprot.writeFieldBegin('topology', TType.STRUCT, 4)
self.topology.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(submitTopology_args)
submitTopology_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'name', 'UTF8', None, ), # 1
(2, TType.STRING, 'uploadedJarLocation', 'UTF8', None, ), # 2
(3, TType.STRING, 'jsonConf', 'UTF8', None, ), # 3
(4, TType.STRUCT, 'topology', [StormTopology, None], None, ), # 4
)
class submitTopology_result(object):
"""
Attributes:
- e
- ite
- aze
"""
def __init__(self, e=None, ite=None, aze=None,):
self.e = e
self.ite = ite
self.aze = aze
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = AlreadyAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.ite = InvalidTopologyException()
self.ite.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('submitTopology_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
if self.ite is not None:
oprot.writeFieldBegin('ite', TType.STRUCT, 2)
self.ite.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 3)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(submitTopology_result)
submitTopology_result.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', [AlreadyAliveException, None], None, ), # 1
(2, TType.STRUCT, 'ite', [InvalidTopologyException, None], None, ), # 2
(3, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 3
)
class submitTopologyWithOpts_args(object):
"""
Attributes:
- name
- uploadedJarLocation
- jsonConf
- topology
- options
"""
def __init__(self, name=None, uploadedJarLocation=None, jsonConf=None, topology=None, options=None,):
self.name = name
self.uploadedJarLocation = uploadedJarLocation
self.jsonConf = jsonConf
self.topology = topology
self.options = options
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.uploadedJarLocation = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.jsonConf = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.topology = StormTopology()
self.topology.read(iprot)
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRUCT:
self.options = SubmitOptions()
self.options.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('submitTopologyWithOpts_args')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name.encode('utf-8') if sys.version_info[0] == 2 else self.name)
oprot.writeFieldEnd()
if self.uploadedJarLocation is not None:
oprot.writeFieldBegin('uploadedJarLocation', TType.STRING, 2)
oprot.writeString(self.uploadedJarLocation.encode('utf-8') if sys.version_info[0] == 2 else self.uploadedJarLocation)
oprot.writeFieldEnd()
if self.jsonConf is not None:
oprot.writeFieldBegin('jsonConf', TType.STRING, 3)
oprot.writeString(self.jsonConf.encode('utf-8') if sys.version_info[0] == 2 else self.jsonConf)
oprot.writeFieldEnd()
if self.topology is not None:
oprot.writeFieldBegin('topology', TType.STRUCT, 4)
self.topology.write(oprot)
oprot.writeFieldEnd()
if self.options is not None:
oprot.writeFieldBegin('options', TType.STRUCT, 5)
self.options.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(submitTopologyWithOpts_args)
submitTopologyWithOpts_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'name', 'UTF8', None, ), # 1
(2, TType.STRING, 'uploadedJarLocation', 'UTF8', None, ), # 2
(3, TType.STRING, 'jsonConf', 'UTF8', None, ), # 3
(4, TType.STRUCT, 'topology', [StormTopology, None], None, ), # 4
(5, TType.STRUCT, 'options', [SubmitOptions, None], None, ), # 5
)
class submitTopologyWithOpts_result(object):
"""
Attributes:
- e
- ite
- aze
"""
def __init__(self, e=None, ite=None, aze=None,):
self.e = e
self.ite = ite
self.aze = aze
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = AlreadyAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.ite = InvalidTopologyException()
self.ite.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('submitTopologyWithOpts_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
if self.ite is not None:
oprot.writeFieldBegin('ite', TType.STRUCT, 2)
self.ite.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 3)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(submitTopologyWithOpts_result)
submitTopologyWithOpts_result.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', [AlreadyAliveException, None], None, ), # 1
(2, TType.STRUCT, 'ite', [InvalidTopologyException, None], None, ), # 2
(3, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 3
)
class killTopology_args(object):
"""
Attributes:
- name
"""
def __init__(self, name=None,):
self.name = name
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('killTopology_args')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name.encode('utf-8') if sys.version_info[0] == 2 else self.name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(killTopology_args)
killTopology_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'name', 'UTF8', None, ), # 1
)
class killTopology_result(object):
"""
Attributes:
- e
- aze
"""
def __init__(self, e=None, aze=None,):
self.e = e
self.aze = aze
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('killTopology_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 2)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(killTopology_result)
killTopology_result.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', [NotAliveException, None], None, ), # 1
(2, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 2
)
class killTopologyWithOpts_args(object):
"""
Attributes:
- name
- options
"""
def __init__(self, name=None, options=None,):
self.name = name
self.options = options
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.options = KillOptions()
self.options.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('killTopologyWithOpts_args')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name.encode('utf-8') if sys.version_info[0] == 2 else self.name)
oprot.writeFieldEnd()
if self.options is not None:
oprot.writeFieldBegin('options', TType.STRUCT, 2)
self.options.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(killTopologyWithOpts_args)
killTopologyWithOpts_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'name', 'UTF8', None, ), # 1
(2, TType.STRUCT, 'options', [KillOptions, None], None, ), # 2
)
class killTopologyWithOpts_result(object):
"""
Attributes:
- e
- aze
"""
def __init__(self, e=None, aze=None,):
self.e = e
self.aze = aze
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('killTopologyWithOpts_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 2)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(killTopologyWithOpts_result)
killTopologyWithOpts_result.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', [NotAliveException, None], None, ), # 1
(2, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 2
)
class activate_args(object):
"""
Attributes:
- name
"""
def __init__(self, name=None,):
self.name = name
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('activate_args')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name.encode('utf-8') if sys.version_info[0] == 2 else self.name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(activate_args)
activate_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'name', 'UTF8', None, ), # 1
)
class activate_result(object):
"""
Attributes:
- e
- aze
"""
def __init__(self, e=None, aze=None,):
self.e = e
self.aze = aze
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('activate_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 2)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(activate_result)
activate_result.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', [NotAliveException, None], None, ), # 1
(2, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 2
)
class deactivate_args(object):
"""
Attributes:
- name
"""
def __init__(self, name=None,):
self.name = name
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('deactivate_args')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name.encode('utf-8') if sys.version_info[0] == 2 else self.name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(deactivate_args)
deactivate_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'name', 'UTF8', None, ), # 1
)
class deactivate_result(object):
"""
Attributes:
- e
- aze
"""
def __init__(self, e=None, aze=None,):
self.e = e
self.aze = aze
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('deactivate_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 2)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(deactivate_result)
deactivate_result.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', [NotAliveException, None], None, ), # 1
(2, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 2
)
class rebalance_args(object):
"""
Attributes:
- name
- options
"""
def __init__(self, name=None, options=None,):
self.name = name
self.options = options
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.options = RebalanceOptions()
self.options.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('rebalance_args')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name.encode('utf-8') if sys.version_info[0] == 2 else self.name)
oprot.writeFieldEnd()
if self.options is not None:
oprot.writeFieldBegin('options', TType.STRUCT, 2)
self.options.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(rebalance_args)
rebalance_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'name', 'UTF8', None, ), # 1
(2, TType.STRUCT, 'options', [RebalanceOptions, None], None, ), # 2
)
class rebalance_result(object):
"""
Attributes:
- e
- ite
- aze
"""
def __init__(self, e=None, ite=None, aze=None,):
self.e = e
self.ite = ite
self.aze = aze
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.ite = InvalidTopologyException()
self.ite.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('rebalance_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
if self.ite is not None:
oprot.writeFieldBegin('ite', TType.STRUCT, 2)
self.ite.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 3)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(rebalance_result)
rebalance_result.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', [NotAliveException, None], None, ), # 1
(2, TType.STRUCT, 'ite', [InvalidTopologyException, None], None, ), # 2
(3, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 3
)
class setLogConfig_args(object):
"""
Attributes:
- name
- config
"""
def __init__(self, name=None, config=None,):
self.name = name
self.config = config
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.config = LogConfig()
self.config.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('setLogConfig_args')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name.encode('utf-8') if sys.version_info[0] == 2 else self.name)
oprot.writeFieldEnd()
if self.config is not None:
oprot.writeFieldBegin('config', TType.STRUCT, 2)
self.config.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(setLogConfig_args)
setLogConfig_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'name', 'UTF8', None, ), # 1
(2, TType.STRUCT, 'config', [LogConfig, None], None, ), # 2
)
class setLogConfig_result(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('setLogConfig_result')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(setLogConfig_result)
setLogConfig_result.thrift_spec = (
)
class getLogConfig_args(object):
"""
Attributes:
- name
"""
def __init__(self, name=None,):
self.name = name
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getLogConfig_args')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name.encode('utf-8') if sys.version_info[0] == 2 else self.name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getLogConfig_args)
getLogConfig_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'name', 'UTF8', None, ), # 1
)
class getLogConfig_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = LogConfig()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getLogConfig_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getLogConfig_result)
getLogConfig_result.thrift_spec = (
(0, TType.STRUCT, 'success', [LogConfig, None], None, ), # 0
)
class debug_args(object):
"""
Attributes:
- name
- component
- enable
- samplingPercentage
"""
def __init__(self, name=None, component=None, enable=None, samplingPercentage=None,):
self.name = name
self.component = component
self.enable = enable
self.samplingPercentage = samplingPercentage
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.component = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.BOOL:
self.enable = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.DOUBLE:
self.samplingPercentage = iprot.readDouble()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('debug_args')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name.encode('utf-8') if sys.version_info[0] == 2 else self.name)
oprot.writeFieldEnd()
if self.component is not None:
oprot.writeFieldBegin('component', TType.STRING, 2)
oprot.writeString(self.component.encode('utf-8') if sys.version_info[0] == 2 else self.component)
oprot.writeFieldEnd()
if self.enable is not None:
oprot.writeFieldBegin('enable', TType.BOOL, 3)
oprot.writeBool(self.enable)
oprot.writeFieldEnd()
if self.samplingPercentage is not None:
oprot.writeFieldBegin('samplingPercentage', TType.DOUBLE, 4)
oprot.writeDouble(self.samplingPercentage)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(debug_args)
debug_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'name', 'UTF8', None, ), # 1
(2, TType.STRING, 'component', 'UTF8', None, ), # 2
(3, TType.BOOL, 'enable', None, None, ), # 3
(4, TType.DOUBLE, 'samplingPercentage', None, None, ), # 4
)
class debug_result(object):
"""
Attributes:
- e
- aze
"""
def __init__(self, e=None, aze=None,):
self.e = e
self.aze = aze
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('debug_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 2)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(debug_result)
debug_result.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', [NotAliveException, None], None, ), # 1
(2, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 2
)
class setWorkerProfiler_args(object):
"""
Attributes:
- id
- profileRequest
"""
def __init__(self, id=None, profileRequest=None,):
self.id = id
self.profileRequest = profileRequest
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.profileRequest = ProfileRequest()
self.profileRequest.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('setWorkerProfiler_args')
if self.id is not None:
oprot.writeFieldBegin('id', TType.STRING, 1)
oprot.writeString(self.id.encode('utf-8') if sys.version_info[0] == 2 else self.id)
oprot.writeFieldEnd()
if self.profileRequest is not None:
oprot.writeFieldBegin('profileRequest', TType.STRUCT, 2)
self.profileRequest.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(setWorkerProfiler_args)
setWorkerProfiler_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'id', 'UTF8', None, ), # 1
(2, TType.STRUCT, 'profileRequest', [ProfileRequest, None], None, ), # 2
)
class setWorkerProfiler_result(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('setWorkerProfiler_result')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(setWorkerProfiler_result)
setWorkerProfiler_result.thrift_spec = (
)
class getComponentPendingProfileActions_args(object):
"""
Attributes:
- id
- component_id
- action
"""
def __init__(self, id=None, component_id=None, action=None,):
self.id = id
self.component_id = component_id
self.action = action
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.component_id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.action = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getComponentPendingProfileActions_args')
if self.id is not None:
oprot.writeFieldBegin('id', TType.STRING, 1)
oprot.writeString(self.id.encode('utf-8') if sys.version_info[0] == 2 else self.id)
oprot.writeFieldEnd()
if self.component_id is not None:
oprot.writeFieldBegin('component_id', TType.STRING, 2)
oprot.writeString(self.component_id.encode('utf-8') if sys.version_info[0] == 2 else self.component_id)
oprot.writeFieldEnd()
if self.action is not None:
oprot.writeFieldBegin('action', TType.I32, 3)
oprot.writeI32(self.action)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getComponentPendingProfileActions_args)
getComponentPendingProfileActions_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'id', 'UTF8', None, ), # 1
(2, TType.STRING, 'component_id', 'UTF8', None, ), # 2
(3, TType.I32, 'action', None, None, ), # 3
)
class getComponentPendingProfileActions_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype869, _size866) = iprot.readListBegin()
for _i870 in range(_size866):
_elem871 = ProfileRequest()
_elem871.read(iprot)
self.success.append(_elem871)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getComponentPendingProfileActions_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter872 in self.success:
iter872.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getComponentPendingProfileActions_result)
getComponentPendingProfileActions_result.thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT, [ProfileRequest, None], False), None, ), # 0
)
class uploadNewCredentials_args(object):
"""
Attributes:
- name
- creds
"""
def __init__(self, name=None, creds=None,):
self.name = name
self.creds = creds
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.creds = Credentials()
self.creds.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('uploadNewCredentials_args')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name.encode('utf-8') if sys.version_info[0] == 2 else self.name)
oprot.writeFieldEnd()
if self.creds is not None:
oprot.writeFieldBegin('creds', TType.STRUCT, 2)
self.creds.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(uploadNewCredentials_args)
uploadNewCredentials_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'name', 'UTF8', None, ), # 1
(2, TType.STRUCT, 'creds', [Credentials, None], None, ), # 2
)
class uploadNewCredentials_result(object):
"""
Attributes:
- e
- ite
- aze
"""
def __init__(self, e=None, ite=None, aze=None,):
self.e = e
self.ite = ite
self.aze = aze
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.ite = InvalidTopologyException()
self.ite.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('uploadNewCredentials_result')
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
if self.ite is not None:
oprot.writeFieldBegin('ite', TType.STRUCT, 2)
self.ite.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 3)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(uploadNewCredentials_result)
uploadNewCredentials_result.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'e', [NotAliveException, None], None, ), # 1
(2, TType.STRUCT, 'ite', [InvalidTopologyException, None], None, ), # 2
(3, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 3
)
class beginCreateBlob_args(object):
"""
Attributes:
- key
- meta
"""
def __init__(self, key=None, meta=None,):
self.key = key
self.meta = meta
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.key = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.meta = SettableBlobMeta()
self.meta.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('beginCreateBlob_args')
if self.key is not None:
oprot.writeFieldBegin('key', TType.STRING, 1)
oprot.writeString(self.key.encode('utf-8') if sys.version_info[0] == 2 else self.key)
oprot.writeFieldEnd()
if self.meta is not None:
oprot.writeFieldBegin('meta', TType.STRUCT, 2)
self.meta.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(beginCreateBlob_args)
beginCreateBlob_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'key', 'UTF8', None, ), # 1
(2, TType.STRUCT, 'meta', [SettableBlobMeta, None], None, ), # 2
)
class beginCreateBlob_result(object):
"""
Attributes:
- success
- aze
- kae
"""
def __init__(self, success=None, aze=None, kae=None,):
self.success = success
self.aze = aze
self.kae = kae
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.kae = KeyAlreadyExistsException()
self.kae.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('beginCreateBlob_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 1)
self.aze.write(oprot)
oprot.writeFieldEnd()
if self.kae is not None:
oprot.writeFieldBegin('kae', TType.STRUCT, 2)
self.kae.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(beginCreateBlob_result)
beginCreateBlob_result.thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
(1, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 1
(2, TType.STRUCT, 'kae', [KeyAlreadyExistsException, None], None, ), # 2
)
class beginUpdateBlob_args(object):
"""
Attributes:
- key
"""
def __init__(self, key=None,):
self.key = key
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.key = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('beginUpdateBlob_args')
if self.key is not None:
oprot.writeFieldBegin('key', TType.STRING, 1)
oprot.writeString(self.key.encode('utf-8') if sys.version_info[0] == 2 else self.key)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(beginUpdateBlob_args)
beginUpdateBlob_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'key', 'UTF8', None, ), # 1
)
class beginUpdateBlob_result(object):
"""
Attributes:
- success
- aze
- knf
"""
def __init__(self, success=None, aze=None, knf=None,):
self.success = success
self.aze = aze
self.knf = knf
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.knf = KeyNotFoundException()
self.knf.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('beginUpdateBlob_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 1)
self.aze.write(oprot)
oprot.writeFieldEnd()
if self.knf is not None:
oprot.writeFieldBegin('knf', TType.STRUCT, 2)
self.knf.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(beginUpdateBlob_result)
beginUpdateBlob_result.thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
(1, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 1
(2, TType.STRUCT, 'knf', [KeyNotFoundException, None], None, ), # 2
)
class uploadBlobChunk_args(object):
"""
Attributes:
- session
- chunk
"""
def __init__(self, session=None, chunk=None,):
self.session = session
self.chunk = chunk
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.chunk = iprot.readBinary()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('uploadBlobChunk_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
if self.chunk is not None:
oprot.writeFieldBegin('chunk', TType.STRING, 2)
oprot.writeBinary(self.chunk)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(uploadBlobChunk_args)
uploadBlobChunk_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
(2, TType.STRING, 'chunk', 'BINARY', None, ), # 2
)
class uploadBlobChunk_result(object):
"""
Attributes:
- aze
"""
def __init__(self, aze=None,):
self.aze = aze
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('uploadBlobChunk_result')
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 1)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(uploadBlobChunk_result)
uploadBlobChunk_result.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 1
)
class finishBlobUpload_args(object):
"""
Attributes:
- session
"""
def __init__(self, session=None,):
self.session = session
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('finishBlobUpload_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(finishBlobUpload_args)
finishBlobUpload_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
)
class finishBlobUpload_result(object):
"""
Attributes:
- aze
"""
def __init__(self, aze=None,):
self.aze = aze
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('finishBlobUpload_result')
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 1)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(finishBlobUpload_result)
finishBlobUpload_result.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 1
)
class cancelBlobUpload_args(object):
"""
Attributes:
- session
"""
def __init__(self, session=None,):
self.session = session
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('cancelBlobUpload_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(cancelBlobUpload_args)
cancelBlobUpload_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
)
class cancelBlobUpload_result(object):
"""
Attributes:
- aze
"""
def __init__(self, aze=None,):
self.aze = aze
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('cancelBlobUpload_result')
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 1)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(cancelBlobUpload_result)
cancelBlobUpload_result.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 1
)
class getBlobMeta_args(object):
"""
Attributes:
- key
"""
def __init__(self, key=None,):
self.key = key
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.key = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getBlobMeta_args')
if self.key is not None:
oprot.writeFieldBegin('key', TType.STRING, 1)
oprot.writeString(self.key.encode('utf-8') if sys.version_info[0] == 2 else self.key)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getBlobMeta_args)
getBlobMeta_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'key', 'UTF8', None, ), # 1
)
class getBlobMeta_result(object):
"""
Attributes:
- success
- aze
- knf
"""
def __init__(self, success=None, aze=None, knf=None,):
self.success = success
self.aze = aze
self.knf = knf
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = ReadableBlobMeta()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.knf = KeyNotFoundException()
self.knf.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getBlobMeta_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 1)
self.aze.write(oprot)
oprot.writeFieldEnd()
if self.knf is not None:
oprot.writeFieldBegin('knf', TType.STRUCT, 2)
self.knf.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getBlobMeta_result)
getBlobMeta_result.thrift_spec = (
(0, TType.STRUCT, 'success', [ReadableBlobMeta, None], None, ), # 0
(1, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 1
(2, TType.STRUCT, 'knf', [KeyNotFoundException, None], None, ), # 2
)
class setBlobMeta_args(object):
"""
Attributes:
- key
- meta
"""
def __init__(self, key=None, meta=None,):
self.key = key
self.meta = meta
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.key = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.meta = SettableBlobMeta()
self.meta.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('setBlobMeta_args')
if self.key is not None:
oprot.writeFieldBegin('key', TType.STRING, 1)
oprot.writeString(self.key.encode('utf-8') if sys.version_info[0] == 2 else self.key)
oprot.writeFieldEnd()
if self.meta is not None:
oprot.writeFieldBegin('meta', TType.STRUCT, 2)
self.meta.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(setBlobMeta_args)
setBlobMeta_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'key', 'UTF8', None, ), # 1
(2, TType.STRUCT, 'meta', [SettableBlobMeta, None], None, ), # 2
)
class setBlobMeta_result(object):
"""
Attributes:
- aze
- knf
"""
def __init__(self, aze=None, knf=None,):
self.aze = aze
self.knf = knf
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.knf = KeyNotFoundException()
self.knf.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('setBlobMeta_result')
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 1)
self.aze.write(oprot)
oprot.writeFieldEnd()
if self.knf is not None:
oprot.writeFieldBegin('knf', TType.STRUCT, 2)
self.knf.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(setBlobMeta_result)
setBlobMeta_result.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 1
(2, TType.STRUCT, 'knf', [KeyNotFoundException, None], None, ), # 2
)
class beginBlobDownload_args(object):
"""
Attributes:
- key
"""
def __init__(self, key=None,):
self.key = key
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.key = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('beginBlobDownload_args')
if self.key is not None:
oprot.writeFieldBegin('key', TType.STRING, 1)
oprot.writeString(self.key.encode('utf-8') if sys.version_info[0] == 2 else self.key)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(beginBlobDownload_args)
beginBlobDownload_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'key', 'UTF8', None, ), # 1
)
class beginBlobDownload_result(object):
"""
Attributes:
- success
- aze
- knf
"""
def __init__(self, success=None, aze=None, knf=None,):
self.success = success
self.aze = aze
self.knf = knf
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = BeginDownloadResult()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.knf = KeyNotFoundException()
self.knf.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('beginBlobDownload_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 1)
self.aze.write(oprot)
oprot.writeFieldEnd()
if self.knf is not None:
oprot.writeFieldBegin('knf', TType.STRUCT, 2)
self.knf.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(beginBlobDownload_result)
beginBlobDownload_result.thrift_spec = (
(0, TType.STRUCT, 'success', [BeginDownloadResult, None], None, ), # 0
(1, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 1
(2, TType.STRUCT, 'knf', [KeyNotFoundException, None], None, ), # 2
)
class downloadBlobChunk_args(object):
"""
Attributes:
- session
"""
def __init__(self, session=None,):
self.session = session
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('downloadBlobChunk_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(downloadBlobChunk_args)
downloadBlobChunk_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
)
class downloadBlobChunk_result(object):
"""
Attributes:
- success
- aze
"""
def __init__(self, success=None, aze=None,):
self.success = success
self.aze = aze
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readBinary()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('downloadBlobChunk_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeBinary(self.success)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 1)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(downloadBlobChunk_result)
downloadBlobChunk_result.thrift_spec = (
(0, TType.STRING, 'success', 'BINARY', None, ), # 0
(1, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 1
)
class deleteBlob_args(object):
"""
Attributes:
- key
"""
def __init__(self, key=None,):
self.key = key
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.key = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('deleteBlob_args')
if self.key is not None:
oprot.writeFieldBegin('key', TType.STRING, 1)
oprot.writeString(self.key.encode('utf-8') if sys.version_info[0] == 2 else self.key)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(deleteBlob_args)
deleteBlob_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'key', 'UTF8', None, ), # 1
)
class deleteBlob_result(object):
"""
Attributes:
- aze
- knf
- ise
"""
def __init__(self, aze=None, knf=None, ise=None,):
self.aze = aze
self.knf = knf
self.ise = ise
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.knf = KeyNotFoundException()
self.knf.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.ise = IllegalStateException()
self.ise.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('deleteBlob_result')
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 1)
self.aze.write(oprot)
oprot.writeFieldEnd()
if self.knf is not None:
oprot.writeFieldBegin('knf', TType.STRUCT, 2)
self.knf.write(oprot)
oprot.writeFieldEnd()
if self.ise is not None:
oprot.writeFieldBegin('ise', TType.STRUCT, 3)
self.ise.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(deleteBlob_result)
deleteBlob_result.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 1
(2, TType.STRUCT, 'knf', [KeyNotFoundException, None], None, ), # 2
(3, TType.STRUCT, 'ise', [IllegalStateException, None], None, ), # 3
)
class listBlobs_args(object):
"""
Attributes:
- session
"""
def __init__(self, session=None,):
self.session = session
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.session = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('listBlobs_args')
if self.session is not None:
oprot.writeFieldBegin('session', TType.STRING, 1)
oprot.writeString(self.session.encode('utf-8') if sys.version_info[0] == 2 else self.session)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(listBlobs_args)
listBlobs_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'session', 'UTF8', None, ), # 1
)
class listBlobs_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = ListBlobsResult()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('listBlobs_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(listBlobs_result)
listBlobs_result.thrift_spec = (
(0, TType.STRUCT, 'success', [ListBlobsResult, None], None, ), # 0
)
class getBlobReplication_args(object):
"""
Attributes:
- key
"""
def __init__(self, key=None,):
self.key = key
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.key = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getBlobReplication_args')
if self.key is not None:
oprot.writeFieldBegin('key', TType.STRING, 1)
oprot.writeString(self.key.encode('utf-8') if sys.version_info[0] == 2 else self.key)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getBlobReplication_args)
getBlobReplication_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'key', 'UTF8', None, ), # 1
)
class getBlobReplication_result(object):
"""
Attributes:
- success
- aze
- knf
"""
def __init__(self, success=None, aze=None, knf=None,):
self.success = success
self.aze = aze
self.knf = knf
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.knf = KeyNotFoundException()
self.knf.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getBlobReplication_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 1)
self.aze.write(oprot)
oprot.writeFieldEnd()
if self.knf is not None:
oprot.writeFieldBegin('knf', TType.STRUCT, 2)
self.knf.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getBlobReplication_result)
getBlobReplication_result.thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
(1, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 1
(2, TType.STRUCT, 'knf', [KeyNotFoundException, None], None, ), # 2
)
class updateBlobReplication_args(object):
"""
Attributes:
- key
- replication
"""
def __init__(self, key=None, replication=None,):
self.key = key
self.replication = replication
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.key = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.replication = iprot.readI32()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('updateBlobReplication_args')
if self.key is not None:
oprot.writeFieldBegin('key', TType.STRING, 1)
oprot.writeString(self.key.encode('utf-8') if sys.version_info[0] == 2 else self.key)
oprot.writeFieldEnd()
if self.replication is not None:
oprot.writeFieldBegin('replication', TType.I32, 2)
oprot.writeI32(self.replication)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(updateBlobReplication_args)
updateBlobReplication_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'key', 'UTF8', None, ), # 1
(2, TType.I32, 'replication', None, None, ), # 2
)
class updateBlobReplication_result(object):
"""
Attributes:
- success
- aze
- knf
"""
def __init__(self, success=None, aze=None, knf=None,):
self.success = success
self.aze = aze
self.knf = knf
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.I32:
self.success = iprot.readI32()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.knf = KeyNotFoundException()
self.knf.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('updateBlobReplication_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.I32, 0)
oprot.writeI32(self.success)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 1)
self.aze.write(oprot)
oprot.writeFieldEnd()
if self.knf is not None:
oprot.writeFieldBegin('knf', TType.STRUCT, 2)
self.knf.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(updateBlobReplication_result)
updateBlobReplication_result.thrift_spec = (
(0, TType.I32, 'success', None, None, ), # 0
(1, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 1
(2, TType.STRUCT, 'knf', [KeyNotFoundException, None], None, ), # 2
)
class createStateInZookeeper_args(object):
"""
Attributes:
- key
"""
def __init__(self, key=None,):
self.key = key
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.key = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('createStateInZookeeper_args')
if self.key is not None:
oprot.writeFieldBegin('key', TType.STRING, 1)
oprot.writeString(self.key.encode('utf-8') if sys.version_info[0] == 2 else self.key)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(createStateInZookeeper_args)
createStateInZookeeper_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'key', 'UTF8', None, ), # 1
)
class createStateInZookeeper_result(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('createStateInZookeeper_result')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(createStateInZookeeper_result)
createStateInZookeeper_result.thrift_spec = (
)
class beginFileUpload_args(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('beginFileUpload_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(beginFileUpload_args)
beginFileUpload_args.thrift_spec = (
)
class beginFileUpload_result(object):
"""
Attributes:
- success
- aze
"""
def __init__(self, success=None, aze=None,):
self.success = success
self.aze = aze
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('beginFileUpload_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 1)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(beginFileUpload_result)
beginFileUpload_result.thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
(1, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 1
)
class uploadChunk_args(object):
"""
Attributes:
- location
- chunk
"""
def __init__(self, location=None, chunk=None,):
self.location = location
self.chunk = chunk
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.location = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.chunk = iprot.readBinary()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('uploadChunk_args')
if self.location is not None:
oprot.writeFieldBegin('location', TType.STRING, 1)
oprot.writeString(self.location.encode('utf-8') if sys.version_info[0] == 2 else self.location)
oprot.writeFieldEnd()
if self.chunk is not None:
oprot.writeFieldBegin('chunk', TType.STRING, 2)
oprot.writeBinary(self.chunk)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(uploadChunk_args)
uploadChunk_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'location', 'UTF8', None, ), # 1
(2, TType.STRING, 'chunk', 'BINARY', None, ), # 2
)
class uploadChunk_result(object):
"""
Attributes:
- aze
"""
def __init__(self, aze=None,):
self.aze = aze
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('uploadChunk_result')
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 1)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(uploadChunk_result)
uploadChunk_result.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 1
)
class finishFileUpload_args(object):
"""
Attributes:
- location
"""
def __init__(self, location=None,):
self.location = location
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.location = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('finishFileUpload_args')
if self.location is not None:
oprot.writeFieldBegin('location', TType.STRING, 1)
oprot.writeString(self.location.encode('utf-8') if sys.version_info[0] == 2 else self.location)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(finishFileUpload_args)
finishFileUpload_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'location', 'UTF8', None, ), # 1
)
class finishFileUpload_result(object):
"""
Attributes:
- aze
"""
def __init__(self, aze=None,):
self.aze = aze
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('finishFileUpload_result')
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 1)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(finishFileUpload_result)
finishFileUpload_result.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 1
)
class downloadChunk_args(object):
"""
Attributes:
- id
"""
def __init__(self, id=None,):
self.id = id
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('downloadChunk_args')
if self.id is not None:
oprot.writeFieldBegin('id', TType.STRING, 1)
oprot.writeString(self.id.encode('utf-8') if sys.version_info[0] == 2 else self.id)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(downloadChunk_args)
downloadChunk_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'id', 'UTF8', None, ), # 1
)
class downloadChunk_result(object):
"""
Attributes:
- success
- aze
"""
def __init__(self, success=None, aze=None,):
self.success = success
self.aze = aze
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readBinary()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('downloadChunk_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeBinary(self.success)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 1)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(downloadChunk_result)
downloadChunk_result.thrift_spec = (
(0, TType.STRING, 'success', 'BINARY', None, ), # 0
(1, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 1
)
class getNimbusConf_args(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getNimbusConf_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getNimbusConf_args)
getNimbusConf_args.thrift_spec = (
)
class getNimbusConf_result(object):
"""
Attributes:
- success
- aze
"""
def __init__(self, success=None, aze=None,):
self.success = success
self.aze = aze
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getNimbusConf_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 1)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getNimbusConf_result)
getNimbusConf_result.thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
(1, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 1
)
class getClusterInfo_args(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getClusterInfo_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getClusterInfo_args)
getClusterInfo_args.thrift_spec = (
)
class getClusterInfo_result(object):
"""
Attributes:
- success
- aze
"""
def __init__(self, success=None, aze=None,):
self.success = success
self.aze = aze
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = ClusterSummary()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getClusterInfo_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 1)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getClusterInfo_result)
getClusterInfo_result.thrift_spec = (
(0, TType.STRUCT, 'success', [ClusterSummary, None], None, ), # 0
(1, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 1
)
class getLeader_args(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getLeader_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getLeader_args)
getLeader_args.thrift_spec = (
)
class getLeader_result(object):
"""
Attributes:
- success
- aze
"""
def __init__(self, success=None, aze=None,):
self.success = success
self.aze = aze
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = NimbusSummary()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getLeader_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 1)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getLeader_result)
getLeader_result.thrift_spec = (
(0, TType.STRUCT, 'success', [NimbusSummary, None], None, ), # 0
(1, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 1
)
class isTopologyNameAllowed_args(object):
"""
Attributes:
- name
"""
def __init__(self, name=None,):
self.name = name
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.name = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('isTopologyNameAllowed_args')
if self.name is not None:
oprot.writeFieldBegin('name', TType.STRING, 1)
oprot.writeString(self.name.encode('utf-8') if sys.version_info[0] == 2 else self.name)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(isTopologyNameAllowed_args)
isTopologyNameAllowed_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'name', 'UTF8', None, ), # 1
)
class isTopologyNameAllowed_result(object):
"""
Attributes:
- success
- aze
"""
def __init__(self, success=None, aze=None,):
self.success = success
self.aze = aze
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('isTopologyNameAllowed_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 1)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(isTopologyNameAllowed_result)
isTopologyNameAllowed_result.thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 1
)
class getTopologyInfo_args(object):
"""
Attributes:
- id
"""
def __init__(self, id=None,):
self.id = id
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getTopologyInfo_args')
if self.id is not None:
oprot.writeFieldBegin('id', TType.STRING, 1)
oprot.writeString(self.id.encode('utf-8') if sys.version_info[0] == 2 else self.id)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getTopologyInfo_args)
getTopologyInfo_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'id', 'UTF8', None, ), # 1
)
class getTopologyInfo_result(object):
"""
Attributes:
- success
- e
- aze
"""
def __init__(self, success=None, e=None, aze=None,):
self.success = success
self.e = e
self.aze = aze
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TopologyInfo()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getTopologyInfo_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 2)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getTopologyInfo_result)
getTopologyInfo_result.thrift_spec = (
(0, TType.STRUCT, 'success', [TopologyInfo, None], None, ), # 0
(1, TType.STRUCT, 'e', [NotAliveException, None], None, ), # 1
(2, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 2
)
class getTopologyInfoWithOpts_args(object):
"""
Attributes:
- id
- options
"""
def __init__(self, id=None, options=None,):
self.id = id
self.options = options
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.options = GetInfoOptions()
self.options.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getTopologyInfoWithOpts_args')
if self.id is not None:
oprot.writeFieldBegin('id', TType.STRING, 1)
oprot.writeString(self.id.encode('utf-8') if sys.version_info[0] == 2 else self.id)
oprot.writeFieldEnd()
if self.options is not None:
oprot.writeFieldBegin('options', TType.STRUCT, 2)
self.options.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getTopologyInfoWithOpts_args)
getTopologyInfoWithOpts_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'id', 'UTF8', None, ), # 1
(2, TType.STRUCT, 'options', [GetInfoOptions, None], None, ), # 2
)
class getTopologyInfoWithOpts_result(object):
"""
Attributes:
- success
- e
- aze
"""
def __init__(self, success=None, e=None, aze=None,):
self.success = success
self.e = e
self.aze = aze
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TopologyInfo()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getTopologyInfoWithOpts_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 2)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getTopologyInfoWithOpts_result)
getTopologyInfoWithOpts_result.thrift_spec = (
(0, TType.STRUCT, 'success', [TopologyInfo, None], None, ), # 0
(1, TType.STRUCT, 'e', [NotAliveException, None], None, ), # 1
(2, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 2
)
class getTopologyPageInfo_args(object):
"""
Attributes:
- id
- window
- is_include_sys
"""
def __init__(self, id=None, window=None, is_include_sys=None,):
self.id = id
self.window = window
self.is_include_sys = is_include_sys
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.window = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.BOOL:
self.is_include_sys = iprot.readBool()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getTopologyPageInfo_args')
if self.id is not None:
oprot.writeFieldBegin('id', TType.STRING, 1)
oprot.writeString(self.id.encode('utf-8') if sys.version_info[0] == 2 else self.id)
oprot.writeFieldEnd()
if self.window is not None:
oprot.writeFieldBegin('window', TType.STRING, 2)
oprot.writeString(self.window.encode('utf-8') if sys.version_info[0] == 2 else self.window)
oprot.writeFieldEnd()
if self.is_include_sys is not None:
oprot.writeFieldBegin('is_include_sys', TType.BOOL, 3)
oprot.writeBool(self.is_include_sys)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getTopologyPageInfo_args)
getTopologyPageInfo_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'id', 'UTF8', None, ), # 1
(2, TType.STRING, 'window', 'UTF8', None, ), # 2
(3, TType.BOOL, 'is_include_sys', None, None, ), # 3
)
class getTopologyPageInfo_result(object):
"""
Attributes:
- success
- e
- aze
"""
def __init__(self, success=None, e=None, aze=None,):
self.success = success
self.e = e
self.aze = aze
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TopologyPageInfo()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getTopologyPageInfo_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 2)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getTopologyPageInfo_result)
getTopologyPageInfo_result.thrift_spec = (
(0, TType.STRUCT, 'success', [TopologyPageInfo, None], None, ), # 0
(1, TType.STRUCT, 'e', [NotAliveException, None], None, ), # 1
(2, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 2
)
class getSupervisorPageInfo_args(object):
"""
Attributes:
- id
- host
- is_include_sys
"""
def __init__(self, id=None, host=None, is_include_sys=None,):
self.id = id
self.host = host
self.is_include_sys = is_include_sys
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.host = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.BOOL:
self.is_include_sys = iprot.readBool()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getSupervisorPageInfo_args')
if self.id is not None:
oprot.writeFieldBegin('id', TType.STRING, 1)
oprot.writeString(self.id.encode('utf-8') if sys.version_info[0] == 2 else self.id)
oprot.writeFieldEnd()
if self.host is not None:
oprot.writeFieldBegin('host', TType.STRING, 2)
oprot.writeString(self.host.encode('utf-8') if sys.version_info[0] == 2 else self.host)
oprot.writeFieldEnd()
if self.is_include_sys is not None:
oprot.writeFieldBegin('is_include_sys', TType.BOOL, 3)
oprot.writeBool(self.is_include_sys)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getSupervisorPageInfo_args)
getSupervisorPageInfo_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'id', 'UTF8', None, ), # 1
(2, TType.STRING, 'host', 'UTF8', None, ), # 2
(3, TType.BOOL, 'is_include_sys', None, None, ), # 3
)
class getSupervisorPageInfo_result(object):
"""
Attributes:
- success
- e
- aze
"""
def __init__(self, success=None, e=None, aze=None,):
self.success = success
self.e = e
self.aze = aze
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = SupervisorPageInfo()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getSupervisorPageInfo_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 2)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getSupervisorPageInfo_result)
getSupervisorPageInfo_result.thrift_spec = (
(0, TType.STRUCT, 'success', [SupervisorPageInfo, None], None, ), # 0
(1, TType.STRUCT, 'e', [NotAliveException, None], None, ), # 1
(2, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 2
)
class getComponentPageInfo_args(object):
"""
Attributes:
- topology_id
- component_id
- window
- is_include_sys
"""
def __init__(self, topology_id=None, component_id=None, window=None, is_include_sys=None,):
self.topology_id = topology_id
self.component_id = component_id
self.window = window
self.is_include_sys = is_include_sys
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.topology_id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.component_id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.window = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.BOOL:
self.is_include_sys = iprot.readBool()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getComponentPageInfo_args')
if self.topology_id is not None:
oprot.writeFieldBegin('topology_id', TType.STRING, 1)
oprot.writeString(self.topology_id.encode('utf-8') if sys.version_info[0] == 2 else self.topology_id)
oprot.writeFieldEnd()
if self.component_id is not None:
oprot.writeFieldBegin('component_id', TType.STRING, 2)
oprot.writeString(self.component_id.encode('utf-8') if sys.version_info[0] == 2 else self.component_id)
oprot.writeFieldEnd()
if self.window is not None:
oprot.writeFieldBegin('window', TType.STRING, 3)
oprot.writeString(self.window.encode('utf-8') if sys.version_info[0] == 2 else self.window)
oprot.writeFieldEnd()
if self.is_include_sys is not None:
oprot.writeFieldBegin('is_include_sys', TType.BOOL, 4)
oprot.writeBool(self.is_include_sys)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getComponentPageInfo_args)
getComponentPageInfo_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'topology_id', 'UTF8', None, ), # 1
(2, TType.STRING, 'component_id', 'UTF8', None, ), # 2
(3, TType.STRING, 'window', 'UTF8', None, ), # 3
(4, TType.BOOL, 'is_include_sys', None, None, ), # 4
)
class getComponentPageInfo_result(object):
"""
Attributes:
- success
- e
- aze
"""
def __init__(self, success=None, e=None, aze=None,):
self.success = success
self.e = e
self.aze = aze
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = ComponentPageInfo()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getComponentPageInfo_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 2)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getComponentPageInfo_result)
getComponentPageInfo_result.thrift_spec = (
(0, TType.STRUCT, 'success', [ComponentPageInfo, None], None, ), # 0
(1, TType.STRUCT, 'e', [NotAliveException, None], None, ), # 1
(2, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 2
)
class getTopologyConf_args(object):
"""
Attributes:
- id
"""
def __init__(self, id=None,):
self.id = id
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getTopologyConf_args')
if self.id is not None:
oprot.writeFieldBegin('id', TType.STRING, 1)
oprot.writeString(self.id.encode('utf-8') if sys.version_info[0] == 2 else self.id)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getTopologyConf_args)
getTopologyConf_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'id', 'UTF8', None, ), # 1
)
class getTopologyConf_result(object):
"""
Attributes:
- success
- e
- aze
"""
def __init__(self, success=None, e=None, aze=None,):
self.success = success
self.e = e
self.aze = aze
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getTopologyConf_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 2)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getTopologyConf_result)
getTopologyConf_result.thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
(1, TType.STRUCT, 'e', [NotAliveException, None], None, ), # 1
(2, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 2
)
class getTopology_args(object):
"""
Attributes:
- id
"""
def __init__(self, id=None,):
self.id = id
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getTopology_args')
if self.id is not None:
oprot.writeFieldBegin('id', TType.STRING, 1)
oprot.writeString(self.id.encode('utf-8') if sys.version_info[0] == 2 else self.id)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getTopology_args)
getTopology_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'id', 'UTF8', None, ), # 1
)
class getTopology_result(object):
"""
Attributes:
- success
- e
- aze
"""
def __init__(self, success=None, e=None, aze=None,):
self.success = success
self.e = e
self.aze = aze
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = StormTopology()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getTopology_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 2)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getTopology_result)
getTopology_result.thrift_spec = (
(0, TType.STRUCT, 'success', [StormTopology, None], None, ), # 0
(1, TType.STRUCT, 'e', [NotAliveException, None], None, ), # 1
(2, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 2
)
class getUserTopology_args(object):
"""
Attributes:
- id
"""
def __init__(self, id=None,):
self.id = id
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.id = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getUserTopology_args')
if self.id is not None:
oprot.writeFieldBegin('id', TType.STRING, 1)
oprot.writeString(self.id.encode('utf-8') if sys.version_info[0] == 2 else self.id)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getUserTopology_args)
getUserTopology_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'id', 'UTF8', None, ), # 1
)
class getUserTopology_result(object):
"""
Attributes:
- success
- e
- aze
"""
def __init__(self, success=None, e=None, aze=None,):
self.success = success
self.e = e
self.aze = aze
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = StormTopology()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getUserTopology_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 1)
self.e.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 2)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getUserTopology_result)
getUserTopology_result.thrift_spec = (
(0, TType.STRUCT, 'success', [StormTopology, None], None, ), # 0
(1, TType.STRUCT, 'e', [NotAliveException, None], None, ), # 1
(2, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 2
)
class getTopologyHistory_args(object):
"""
Attributes:
- user
"""
def __init__(self, user=None,):
self.user = user
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.user = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getTopologyHistory_args')
if self.user is not None:
oprot.writeFieldBegin('user', TType.STRING, 1)
oprot.writeString(self.user.encode('utf-8') if sys.version_info[0] == 2 else self.user)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getTopologyHistory_args)
getTopologyHistory_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'user', 'UTF8', None, ), # 1
)
class getTopologyHistory_result(object):
"""
Attributes:
- success
- aze
"""
def __init__(self, success=None, aze=None,):
self.success = success
self.aze = aze
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = TopologyHistoryInfo()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getTopologyHistory_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 1)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getTopologyHistory_result)
getTopologyHistory_result.thrift_spec = (
(0, TType.STRUCT, 'success', [TopologyHistoryInfo, None], None, ), # 0
(1, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 1
)
class getOwnerResourceSummaries_args(object):
"""
Attributes:
- owner
"""
def __init__(self, owner=None,):
self.owner = owner
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.owner = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getOwnerResourceSummaries_args')
if self.owner is not None:
oprot.writeFieldBegin('owner', TType.STRING, 1)
oprot.writeString(self.owner.encode('utf-8') if sys.version_info[0] == 2 else self.owner)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getOwnerResourceSummaries_args)
getOwnerResourceSummaries_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'owner', 'UTF8', None, ), # 1
)
class getOwnerResourceSummaries_result(object):
"""
Attributes:
- success
- aze
"""
def __init__(self, success=None, aze=None,):
self.success = success
self.aze = aze
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype876, _size873) = iprot.readListBegin()
for _i877 in range(_size873):
_elem878 = OwnerResourceSummary()
_elem878.read(iprot)
self.success.append(_elem878)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getOwnerResourceSummaries_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.STRUCT, len(self.success))
for iter879 in self.success:
iter879.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 1)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getOwnerResourceSummaries_result)
getOwnerResourceSummaries_result.thrift_spec = (
(0, TType.LIST, 'success', (TType.STRUCT, [OwnerResourceSummary, None], False), None, ), # 0
(1, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 1
)
class getSupervisorAssignments_args(object):
"""
Attributes:
- node
"""
def __init__(self, node=None,):
self.node = node
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.node = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getSupervisorAssignments_args')
if self.node is not None:
oprot.writeFieldBegin('node', TType.STRING, 1)
oprot.writeString(self.node.encode('utf-8') if sys.version_info[0] == 2 else self.node)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getSupervisorAssignments_args)
getSupervisorAssignments_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'node', 'UTF8', None, ), # 1
)
class getSupervisorAssignments_result(object):
"""
Attributes:
- success
- aze
"""
def __init__(self, success=None, aze=None,):
self.success = success
self.aze = aze
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = SupervisorAssignments()
self.success.read(iprot)
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getSupervisorAssignments_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 1)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getSupervisorAssignments_result)
getSupervisorAssignments_result.thrift_spec = (
(0, TType.STRUCT, 'success', [SupervisorAssignments, None], None, ), # 0
(1, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 1
)
class sendSupervisorWorkerHeartbeats_args(object):
"""
Attributes:
- heartbeats
"""
def __init__(self, heartbeats=None,):
self.heartbeats = heartbeats
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.heartbeats = SupervisorWorkerHeartbeats()
self.heartbeats.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('sendSupervisorWorkerHeartbeats_args')
if self.heartbeats is not None:
oprot.writeFieldBegin('heartbeats', TType.STRUCT, 1)
self.heartbeats.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(sendSupervisorWorkerHeartbeats_args)
sendSupervisorWorkerHeartbeats_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'heartbeats', [SupervisorWorkerHeartbeats, None], None, ), # 1
)
class sendSupervisorWorkerHeartbeats_result(object):
"""
Attributes:
- aze
"""
def __init__(self, aze=None,):
self.aze = aze
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('sendSupervisorWorkerHeartbeats_result')
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 1)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(sendSupervisorWorkerHeartbeats_result)
sendSupervisorWorkerHeartbeats_result.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 1
)
class sendSupervisorWorkerHeartbeat_args(object):
"""
Attributes:
- heatbeat
"""
def __init__(self, heatbeat=None,):
self.heatbeat = heatbeat
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.heatbeat = SupervisorWorkerHeartbeat()
self.heatbeat.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('sendSupervisorWorkerHeartbeat_args')
if self.heatbeat is not None:
oprot.writeFieldBegin('heatbeat', TType.STRUCT, 1)
self.heatbeat.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(sendSupervisorWorkerHeartbeat_args)
sendSupervisorWorkerHeartbeat_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'heatbeat', [SupervisorWorkerHeartbeat, None], None, ), # 1
)
class sendSupervisorWorkerHeartbeat_result(object):
"""
Attributes:
- aze
- e
"""
def __init__(self, aze=None, e=None,):
self.aze = aze
self.e = e
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.e = NotAliveException()
self.e.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('sendSupervisorWorkerHeartbeat_result')
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 1)
self.aze.write(oprot)
oprot.writeFieldEnd()
if self.e is not None:
oprot.writeFieldBegin('e', TType.STRUCT, 2)
self.e.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(sendSupervisorWorkerHeartbeat_result)
sendSupervisorWorkerHeartbeat_result.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 1
(2, TType.STRUCT, 'e', [NotAliveException, None], None, ), # 2
)
class processWorkerMetrics_args(object):
"""
Attributes:
- metrics
"""
def __init__(self, metrics=None,):
self.metrics = metrics
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.metrics = WorkerMetrics()
self.metrics.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('processWorkerMetrics_args')
if self.metrics is not None:
oprot.writeFieldBegin('metrics', TType.STRUCT, 1)
self.metrics.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(processWorkerMetrics_args)
processWorkerMetrics_args.thrift_spec = (
None, # 0
(1, TType.STRUCT, 'metrics', [WorkerMetrics, None], None, ), # 1
)
class processWorkerMetrics_result(object):
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('processWorkerMetrics_result')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(processWorkerMetrics_result)
processWorkerMetrics_result.thrift_spec = (
)
class isRemoteBlobExists_args(object):
"""
Attributes:
- blobKey
"""
def __init__(self, blobKey=None,):
self.blobKey = blobKey
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.blobKey = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('isRemoteBlobExists_args')
if self.blobKey is not None:
oprot.writeFieldBegin('blobKey', TType.STRING, 1)
oprot.writeString(self.blobKey.encode('utf-8') if sys.version_info[0] == 2 else self.blobKey)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(isRemoteBlobExists_args)
isRemoteBlobExists_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'blobKey', 'UTF8', None, ), # 1
)
class isRemoteBlobExists_result(object):
"""
Attributes:
- success
- aze
"""
def __init__(self, success=None, aze=None,):
self.success = success
self.aze = aze
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool()
else:
iprot.skip(ftype)
elif fid == 1:
if ftype == TType.STRUCT:
self.aze = AuthorizationException()
self.aze.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('isRemoteBlobExists_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
if self.aze is not None:
oprot.writeFieldBegin('aze', TType.STRUCT, 1)
self.aze.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(isRemoteBlobExists_result)
isRemoteBlobExists_result.thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
(1, TType.STRUCT, 'aze', [AuthorizationException, None], None, ), # 1
)
fix_spec(all_structs)
del all_structs
| apache-2.0 |
matthewbauer/wesnoth | data/tools/unit_tree/overview.py | 20 | 6049 | #!/usr/bin/env python
import glob, os, sys, time, re
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
import html_output
def write_addon_overview(folder, addon):
out = open(os.path.join(folder, "index.html"), "wb")
def w(x): out.write(x.encode("utf8") + "\n")
name = addon["name"]
path = "../"
title = name + " Overview"
generation_note = "generated on " + time.ctime()
w(html_output.html_header % locals())
w(html_output.top_bar % locals())
w('<div class="overview">')
eras = addon.get("eras", [])
w("<h2>" + name + "</h2>")
if eras:
w("<h3>Eras</h3><ul>")
for era in eras:
epath = os.path.join("en_US", era["id"] + ".html")
w('<li><a href="' + epath + '">' + era["name"] + '</a></li>')
w("</ul>")
campaigns = addon.get("campaigns", [])
if campaigns:
w("<h3>Campaigns</h3><ul>")
for campaign in campaigns:
cpath = os.path.join("en_US", campaign["id"] + ".html")
w('<li><a href="' + cpath + '">' + campaign["name"] + '</a></li>')
w("</ul>")
w("<div>")
if os.path.exists(os.path.join(folder, "error.log")):
w('<p><b>Warnings or errors were found: <a href="error.html"/>log</a></b></p>')
w('<p><a href="../overview.html">back to overview</a></p>')
w("</div>")
w('</div> <!-- overview -->')
w(html_output.html_footer % locals())
def main(folder):
out = open(os.path.join(folder, "overview.html"), "wb")
def w(x): out.write(x.encode("utf8") + "\n")
path = ""
title = "Wesnoth Unit Database Overview"
generation_note = "generated on " + time.ctime()
w(html_output.html_header % locals())
w(html_output.top_bar % locals())
w('<div class="overview">')
w('<table class="overview">')
w("<tr><th>")
w("Addon")
w("</th><th>")
w("Output Files")
w("</th><th>")
w("Error Log")
w("</th></tr>")
count = 0
total_n = 0
total_error_logs = 0
total_lines = 0
for f in sorted(glob.glob(os.path.join(folder, "*"))):
if not os.path.isdir(f): continue
if f.endswith("/pics"): continue
error_log = os.path.abspath(os.path.join(f, "error.log"))
error_html = os.path.abspath(os.path.join(f, "error.html"))
try:
n = len(os.listdir(os.path.join(f, "en_US")))
except OSError:
n = 0
total_n += n
name = f[len(folder):].lstrip("/")
error_name = os.path.join(name, "error.html")
w('<tr><td>')
w('<a href="' + os.path.join(name, "index.html") + '">' + name + '</a>')
w('</td><td>')
w(str(n))
w('</td><td>')
if os.path.exists(error_log):
text = open(error_log).read()
error_kind = "warnings"
if "<INTERNAL ERROR>" in text:
error_kind = "internal error"
elif "<WML ERROR>" in text:
error_kind = "wml error"
elif "<PARSE ERROR>" in text:
error_kind = "parse error"
elif "<TIMEOUT ERROR>" in text:
error_kind = "timeout"
source = []
def postprocess(line):
if line == "WMLError:": return ""
if line == "?": return ""
if line == "Preprocessor error:": return ""
if line.startswith("Automatically found a possible data directory"): return ""
if line.startswith("Overriding data directory with"): return ""
if line == "'SKIP_CORE' defined.": return ""
if re.match("added .* defines.", line): return ""
if line.startswith("skipped 'data/core'"): return ""
if line.startswith("preprocessing specified resource:"): return ""
mo = re.match(r"\d+ /tmp(?:/wmlparser_.*?/|/)(.*\.cfg).*", line)
if mo:
source.append("/tmp/" + mo.group(1))
return ""
mo = re.match(".*--preprocess-defines(.*)", line)
if mo: return "Defines: " + mo.group(1) + "<br />"
for s in source:
line = line.replace(s, "WML")
line = line.replace("included from WML:1", "")
rows = line.replace("included from", "\n included from").splitlines()
out = ""
for row in rows:
row = row.strip()
out += row + "<br />"
return out
htmlerr = open(error_html, "w")
htmlerr.write("<html><body>")
lines_count = 0
for line in text.splitlines():
line = line.strip()
if line in ["<INTERNAL ERROR>", "<WML ERROR>", "<PARSE ERROR>", "<TIMEOUT ERROR>"]:
htmlerr.write("<p>")
elif line in ["</INTERNAL ERROR>", "</WML ERROR>", "</PARSE ERROR>", "</TIMEOUT ERROR>"]:
htmlerr.write("</p>")
else:
err_html = postprocess(line)
lines_count += err_html.count("<br")
htmlerr.write(err_html)
htmlerr.write("</body></html>")
total_lines += lines_count
total_error_logs += 1
w('<a class="error" href="%s">%s (%d lines)</a>' % (error_name, error_kind, lines_count))
w("</td></tr>")
count += 1
w("<tr><td>")
w("Total (for %d addons):" % count)
w("</td><td>")
w(str(total_n))
w("</td><td>")
w(str(total_error_logs) + " (" + str(total_lines) + " lines)")
w("</td></tr>")
w("</table>")
w('</div> <!-- overview -->')
w(html_output.html_footer % locals())
if __name__ == "__main__":
main(sys.argv[1])
| gpl-2.0 |
pombredanne/pants | contrib/android/src/python/pants/contrib/android/tasks/aapt_gen.py | 4 | 8374 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import logging
import os
import subprocess
from pants.backend.jvm.targets.jar_library import JarLibrary
from pants.backend.jvm.targets.java_library import JavaLibrary
from pants.base.build_environment import get_buildroot
from pants.base.exceptions import TaskError
from pants.base.workunit import WorkUnitLabel
from pants.build_graph.address import Address
from pants.java.jar.jar_dependency import JarDependency
from pants.util.dirutil import safe_mkdir
from pants.contrib.android.targets.android_library import AndroidLibrary
from pants.contrib.android.targets.android_resources import AndroidResources
from pants.contrib.android.tasks.aapt_task import AaptTask
logger = logging.getLogger(__name__)
class AaptGen(AaptTask):
"""Process resources for Android targets with the Android Asset Packaging Tool (aapt).
The aapt tool supports 6 major commands: [dump, list, add, remove, crunch, package]
For right now, pants supports 'package'.
Commands and flags for aapt can be seen here:
https://android.googlesource.com/platform/frameworks/base/+/master/tools/aapt/Command.cpp
The resources are processed against a set of APIs found in the android.jar that corresponds to
the target's target_sdk. AndroidBinary files must declare a target_sdk in their manifest.
AndroidLibrary targets are processed with the target_sdk of the dependee AndroidBinary.
An AndroidLibrary will need to be processed once for every target_sdk that it supports.
Each AndroidLibrary is processed individually. AndroidBinary targets are processed along with
all of the AndroidLibrary targets in its transitive closure. The output of an AaptGen invocation
is an R.java file that allows programmatic access to resources, one each for all AndroidBinary
and AndroidLibrary targets.
"""
@classmethod
def _relative_genfile(cls, target):
"""Name of the file produced by aapt."""
return os.path.join(cls.package_path(target.manifest.package_name), 'R.java')
@classmethod
def prepare(cls, options, round_manager):
super(AaptGen, cls).prepare(options, round_manager)
round_manager.require_data('unpacked_libraries')
def __init__(self, *args, **kwargs):
super(AaptGen, self).__init__(*args, **kwargs)
self._jar_library_by_sdk = {}
self._created_library_targets = {}
def create_sdk_jar_deps(self, binaries):
"""Create a JarLibrary target for every sdk in play.
:param list binaries: A list of AndroidBinary targets.
"""
# Prepare exactly N android jar targets where N is the number of SDKs in-play.
for binary in binaries:
sdk = binary.target_sdk
if sdk not in self._jar_library_by_sdk:
jar_url = 'file://{0}'.format(self.android_jar(binary))
jar = JarDependency(org='com.google', name='android', rev=sdk, url=jar_url)
address = Address(os.path.relpath(self.workdir, get_buildroot()),
'android-{0}.jar'.format(sdk))
self._jar_library_by_sdk[sdk] = self.context.add_new_target(address, JarLibrary, jars=[jar])
binary.inject_dependency(self._jar_library_by_sdk[sdk].address)
def _render_args(self, binary, manifest, resource_dirs):
"""Compute the args that will be passed to the aapt tool.
:param AndroidBinary binary: The target that depends on the processed resources.
:param AndroidManifest manifest: Manifest of the target that owns the resources.
:param list resource_dirs: List of resource_dirs to include in this invocation of the aapt tool.
"""
# Glossary of used aapt flags.
# : 'package' is the main aapt operation (see class docstring for more info).
# : '-m' is to "make" a package directory under location '-J'.
# : '-J' Points to the output directory.
# : '-M' is the AndroidManifest.xml of the project.
# : '--auto-add-overlay' automatically add resources that are only in overlays.
# : '-S' points to each dir in resource_dirs, aapt 'scans' them in order while
# collecting resources (resource priority is left -> right).
# : '-I' packages to add to base 'include' set, here it is the android.jar of the target sdk.
# : '--ignore-assets' the aapt tool will disregard any files matching that pattern.
args = [self.aapt_tool(binary)]
args.extend(['package', '-m', '-J', self.aapt_out(binary)])
args.extend(['-M', manifest.path])
args.append('--auto-add-overlay')
for resource_dir in resource_dirs:
args.extend(['-S', resource_dir])
args.extend(['-I', self.android_jar(binary)])
args.extend(['--ignore-assets', self.ignored_assets])
logger.debug('Executing: {0}'.format(' '.join(args)))
return args
def execute(self):
# The number of R.java files produced from each library is == |sdks in play for its dependees|.
# The number of R.java files produced for each android_binary == |android_library deps| + 1
binaries = self.context.targets(self.is_android_binary)
self.create_sdk_jar_deps(binaries)
for binary in binaries:
# TODO(mateo) add invalidation framework. Adding it here doesn't work right now because the
# framework can't differentiate between one library that has to be compiled by multiple sdks.
gentargets = [binary]
def gather_gentargets(tgt):
"""Gather all AndroidLibrary targets that have a manifest."""
if isinstance(tgt, AndroidLibrary) and tgt.manifest:
gentargets.append(tgt)
binary.walk(gather_gentargets)
for gen in gentargets:
aapt_output = self._relative_genfile(gen)
aapt_file = os.path.join(self.aapt_out(binary), aapt_output)
resource_deps = self.context.build_graph.transitive_subgraph_of_addresses([gen.address])
resource_dirs = [t.resource_dir for t in resource_deps if isinstance(t, AndroidResources)]
if resource_dirs:
if aapt_file not in self._created_library_targets:
# Priority for resources is left->right, so dependency order matters (see TODO in aapt_builder).
args = self._render_args(binary, gen.manifest, resource_dirs)
with self.context.new_workunit(name='aaptgen', labels=[WorkUnitLabel.MULTITOOL]) as workunit:
returncode = subprocess.call(args,
stdout=workunit.output('stdout'),
stderr=workunit.output('stderr'))
if returncode:
raise TaskError('The AaptGen process exited non-zero: {}'.format(returncode))
new_target = self.create_target(binary, gen)
self._created_library_targets[aapt_file] = new_target
gen.inject_dependency(self._created_library_targets[aapt_file].address)
def create_target(self, binary, gentarget):
"""Create a JavaLibrary target for the R.java files created by the aapt tool.
:param AndroidBinary binary: AndroidBinary target whose target_sdk is used.
:param AndroidTarget gentarget: AndroidBinary or Library that owns the processed resources.
:returns new_target: Synthetic target for the R.java output of the aapt tool.
:rtype::class:`pants.backend.jvm.targets.java_library.JavaLibrary`
"""
spec_path = os.path.join(os.path.relpath(self.aapt_out(binary), get_buildroot()))
address = Address(spec_path=spec_path, target_name=gentarget.id)
new_target = self.context.add_new_target(address,
JavaLibrary,
derived_from=gentarget,
sources=[self._relative_genfile(gentarget)],
dependencies=[])
return new_target
def aapt_out(self, binary):
"""Location for the output of an aapt invocation.
:param AndroidBinary binary: AndroidBinary target that depends upon the aapt output.
:returns outdir: full path of output directory
:rtype string
"""
outdir = os.path.join(self.workdir, binary.target_sdk)
safe_mkdir(outdir)
return outdir
| apache-2.0 |
RongxinZhang/bitcoinxt | qa/rpc-tests/bipdersig-p2p.py | 20 | 6588 | #!/usr/bin/env python2
#
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
from test_framework.test_framework import ComparisonTestFramework
from test_framework.util import *
from test_framework.mininode import CTransaction, NetworkThread
from test_framework.blocktools import create_coinbase, create_block
from test_framework.comptool import TestInstance, TestManager
from test_framework.script import CScript
from binascii import hexlify, unhexlify
import cStringIO
import time
# A canonical signature consists of:
# <30> <total len> <02> <len R> <R> <02> <len S> <S> <hashtype>
def unDERify(tx):
'''
Make the signature in vin 0 of a tx non-DER-compliant,
by adding padding after the S-value.
'''
scriptSig = CScript(tx.vin[0].scriptSig)
newscript = []
for i in scriptSig:
if (len(newscript) == 0):
newscript.append(i[0:-1] + '\0' + i[-1])
else:
newscript.append(i)
tx.vin[0].scriptSig = CScript(newscript)
'''
This test is meant to exercise BIP66 (DER SIG).
Connect to a single node.
Mine 2 (version 2) blocks (save the coinbases for later).
Generate 98 more version 2 blocks, verify the node accepts.
Mine 749 version 3 blocks, verify the node accepts.
Check that the new DERSIG rules are not enforced on the 750th version 3 block.
Check that the new DERSIG rules are enforced on the 751st version 3 block.
Mine 199 new version blocks.
Mine 1 old-version block.
Mine 1 new version block.
Mine 1 old version block, see that the node rejects.
'''
class BIP66Test(ComparisonTestFramework):
def __init__(self):
self.num_nodes = 1
def setup_network(self):
# Must set the blockversion for this test
self.nodes = start_nodes(1, self.options.tmpdir,
extra_args=[['-debug', '-whitelist=127.0.0.1', '-blockversion=2']],
binary=[self.options.testbinary])
def run_test(self):
test = TestManager(self, self.options.tmpdir)
test.add_all_connections(self.nodes)
NetworkThread().start() # Start up network handling in another thread
test.run()
def create_transaction(self, node, coinbase, to_address, amount):
from_txid = node.getblock(coinbase)['tx'][0]
inputs = [{ "txid" : from_txid, "vout" : 0}]
outputs = { to_address : amount }
rawtx = node.createrawtransaction(inputs, outputs)
signresult = node.signrawtransaction(rawtx)
tx = CTransaction()
f = cStringIO.StringIO(unhexlify(signresult['hex']))
tx.deserialize(f)
return tx
def get_tests(self):
self.coinbase_blocks = self.nodes[0].generate(2)
self.tip = int ("0x" + self.nodes[0].getbestblockhash() + "L", 0)
self.nodeaddress = self.nodes[0].getnewaddress()
self.last_block_time = time.time()
''' 98 more version 2 blocks '''
test_blocks = []
for i in xrange(98):
block = create_block(self.tip, create_coinbase(2), self.last_block_time + 1)
block.nVersion = 2
block.rehash()
block.solve()
test_blocks.append([block, True])
self.last_block_time += 1
self.tip = block.sha256
yield TestInstance(test_blocks, sync_every_block=False)
''' Mine 74 version 3 blocks '''
test_blocks = []
for i in xrange(74):
block = create_block(self.tip, create_coinbase(2), self.last_block_time + 1)
block.nVersion = 3
block.rehash()
block.solve()
test_blocks.append([block, True])
self.last_block_time += 1
self.tip = block.sha256
yield TestInstance(test_blocks, sync_every_block=False)
'''
Check that the new DERSIG rules are not enforced in the 75th
version 3 block.
'''
spendtx = self.create_transaction(self.nodes[0],
self.coinbase_blocks[0], self.nodeaddress, 1.0)
unDERify(spendtx)
spendtx.rehash()
block = create_block(self.tip, create_coinbase(2), self.last_block_time + 1)
block.nVersion = 3
block.vtx.append(spendtx)
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
block.solve()
self.last_block_time += 1
self.tip = block.sha256
yield TestInstance([[block, True]])
'''
Check that the new DERSIG rules are enforced in the 76th version 3
block.
'''
spendtx = self.create_transaction(self.nodes[0],
self.coinbase_blocks[1], self.nodeaddress, 1.0)
unDERify(spendtx)
spendtx.rehash()
block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
block.nVersion = 3
block.vtx.append(spendtx)
block.hashMerkleRoot = block.calc_merkle_root()
block.rehash()
block.solve()
self.last_block_time += 1
yield TestInstance([[block, False]])
''' Mine 19 new version blocks on last valid tip '''
test_blocks = []
for i in xrange(19):
block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
block.nVersion = 3
block.rehash()
block.solve()
test_blocks.append([block, True])
self.last_block_time += 1
self.tip = block.sha256
yield TestInstance(test_blocks, sync_every_block=False)
''' Mine 1 old version block '''
block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
block.nVersion = 2
block.rehash()
block.solve()
self.last_block_time += 1
self.tip = block.sha256
yield TestInstance([[block, True]])
''' Mine 1 new version block '''
block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
block.nVersion = 3
block.rehash()
block.solve()
self.last_block_time += 1
self.tip = block.sha256
yield TestInstance([[block, True]])
''' Mine 1 old version block, should be invalid '''
block = create_block(self.tip, create_coinbase(1), self.last_block_time + 1)
block.nVersion = 2
block.rehash()
block.solve()
self.last_block_time += 1
yield TestInstance([[block, False]])
if __name__ == '__main__':
BIP66Test().main()
| mit |
corvorepack/REPOIVAN | plugin.video.movie.ultra.7k/resources/regex/shidurlive.py | 2 | 3960 | # -*- coding: utf-8 -*-
#------------------------------------------------------------
# Movie Ultra 7K Regex de Shidurlive
# Version 0.1 (15.10.2014)
#------------------------------------------------------------
# License: GPL (http://www.gnu.org/licenses/gpl-3.0.html)
# Gracias a la librería plugintools de Jesús (www.mimediacenter.info)
import os
import sys
import urllib
import urllib2
import re
import shutil
import zipfile
import time
import xbmc
import xbmcgui
import xbmcaddon
import xbmcplugin
import plugintools
import json
addonName = xbmcaddon.Addon().getAddonInfo("name")
addonVersion = xbmcaddon.Addon().getAddonInfo("version")
addonId = xbmcaddon.Addon().getAddonInfo("id")
addonPath = xbmcaddon.Addon().getAddonInfo("path")
# Función que guía el proceso de elaboración de la URL original
def shidurlive(params):
plugintools.log("[movie.ultra.7k-0.3.0].shidurlive "+repr(params))
url_user = {}
# Construimos diccionario...
url = params.get("url")
url_extracted = url.split(" ")
for entry in url_extracted:
if entry.startswith("rtmp"):
entry = entry.replace("rtmp=", "")
url_user["rtmp"]=entry
elif entry.startswith("playpath"):
entry = entry.replace("playpath=", "")
url_user["playpath"]=entry
elif entry.startswith("swfUrl"):
entry = entry.replace("swfUrl=", "")
url_user["swfurl"]=entry
elif entry.startswith("pageUrl"):
entry = entry.replace("pageUrl=", "")
url_user["pageurl"]=entry
elif entry.startswith("token"):
entry = entry.replace("token=", "")
url_user["token"]=entry
elif entry.startswith("referer"):
entry = entry.replace("referer=", "")
url_user["referer"]=entry
plugintools.log("URL_user dict= "+repr(url_user))
pageurl = url_user.get("pageurl")
# Controlamos ambos casos de URL: Único link (pageUrl) o link completo rtmp://...
if pageurl is None:
pageurl = url_user.get("url")
referer= url_user.get("referer")
url_user["pageurl"]=pageurl
print 'pageurl',pageurl
print 'referer',referer
body = gethttp_headers(pageurl, referer)
plugintools.log("body= "+body)
#src=http://www.shidurlive.com/stream/4e6a51324f54637a4e6a4d325a6a63324e6a55334d6a63354d7a453d/706c381d1202
src = re.compile('src=\"(.*?)\"').findall(body)
print 'src',src
url_user["pageurl"]=src[0]
pageurl = url_user.get("pageurl")
referer = url_user.get("referer")
body = gethttp_headers(pageurl, referer)
plugintools.log("body= "+body)
getparams_shidurlive(url_user, body)
url = url_user.get("rtmp") + ' playpath=' + url_user.get("playpath") + ' swfUrl=http://cdn.shidurlive.com/player.swf pageUrl=' + url_user.get("pageurl") + ' live=true timeout=15'
plugintools.play_resolved_url(url)
# Vamos a hacer una llamada al pageUrl
def gethttp_headers(pageurl, referer):
request_headers=[]
request_headers.append(["User-Agent","Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_3) AppleWebKit/537.31 (KHTML, like Gecko) Chrome/26.0.1410.65 Safari/537.31"])
request_headers.append(["Referer",referer])
body,response_headers = plugintools.read_body_and_headers(pageurl, headers=request_headers)
plugintools.log("body= "+body)
return body
# Iniciamos protocolo de elaboración de la URL original
# Capturamos parámetros correctos
def getparams_shidurlive(url_user, body):
plugintools.log("[movie.ultra.7k-0.3.0].getparams_shidurlive " + repr(url_user) )
# Construimos el diccionario de 9stream
streamer = re.compile("'streamer', '([^']*)").findall(body)
url_user["rtmp"]=streamer[0]
file = re.compile("'file', '([^']*)").findall(body)
url_user["playpath"]=file[0]
| gpl-2.0 |
scrollback/kuma | vendor/packages/logilab-common/test/unittest_changelog.py | 6 | 1354 | # copyright 2003-2010 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of logilab-common.
#
# logilab-common is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 2.1 of the License, or (at your option) any
# later version.
#
# logilab-common is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with logilab-common. If not, see <http://www.gnu.org/licenses/>.
from os.path import join, dirname
from cStringIO import StringIO
from logilab.common.testlib import TestCase, unittest_main
from logilab.common.changelog import ChangeLog
class ChangeLogTC(TestCase):
cl_class = ChangeLog
cl_file = join(dirname(__file__), 'data', 'ChangeLog')
def test_round_trip(self):
cl = self.cl_class(self.cl_file)
out = StringIO()
cl.write(out)
self.assertStreamEquals(open(self.cl_file), out)
if __name__ == '__main__':
unittest_main()
| mpl-2.0 |
bigswitch/nova | nova/conductor/tasks/migrate.py | 1 | 3844 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils
from nova.conductor.tasks import base
from nova import objects
from nova.scheduler import utils as scheduler_utils
class MigrationTask(base.TaskBase):
def __init__(self, context, instance, flavor,
request_spec, reservations, clean_shutdown, compute_rpcapi,
scheduler_client):
super(MigrationTask, self).__init__(context, instance)
self.clean_shutdown = clean_shutdown
self.request_spec = request_spec
self.reservations = reservations
self.flavor = flavor
self.quotas = None
self.compute_rpcapi = compute_rpcapi
self.scheduler_client = scheduler_client
def _execute(self):
image = self.request_spec.image
self.quotas = objects.Quotas.from_reservations(self.context,
self.reservations,
instance=self.instance)
# TODO(sbauza): Remove that once prep_resize() accepts a RequestSpec
# object in the signature and all the scheduler.utils methods too
legacy_spec = self.request_spec.to_legacy_request_spec_dict()
legacy_props = self.request_spec.to_legacy_filter_properties_dict()
scheduler_utils.setup_instance_group(self.context, legacy_spec,
legacy_props)
scheduler_utils.populate_retry(legacy_props,
self.instance.uuid)
# TODO(sbauza): Remove that RequestSpec rehydratation once
# scheduler.utils methods use directly the NovaObject.
self.request_spec = objects.RequestSpec.from_components(
self.context, self.instance.uuid, image,
self.instance.flavor, self.instance.numa_topology,
self.instance.pci_requests, legacy_props, None,
self.instance.availability_zone)
# NOTE(sbauza): Force_hosts/nodes needs to be reset
# if we want to make sure that the next destination
# is not forced to be the original host
self.request_spec.reset_forced_destinations()
hosts = self.scheduler_client.select_destinations(
self.context, self.request_spec)
host_state = hosts[0]
scheduler_utils.populate_filter_properties(legacy_props,
host_state)
# context is not serializable
legacy_props.pop('context', None)
(host, node) = (host_state['host'], host_state['nodename'])
# FIXME(sbauza): Serialize/Unserialize the legacy dict because of
# oslo.messaging #1529084 to transform datetime values into strings.
# tl;dr: datetimes in dicts are not accepted as correct values by the
# rpc fake driver.
legacy_spec = jsonutils.loads(jsonutils.dumps(legacy_spec))
self.compute_rpcapi.prep_resize(
self.context, legacy_spec['image'], self.instance,
self.flavor, host, self.reservations,
request_spec=legacy_spec, filter_properties=legacy_props,
node=node, clean_shutdown=self.clean_shutdown)
def rollback(self):
if self.quotas:
self.quotas.rollback()
| apache-2.0 |
idovear/odoo | addons/base_iban/base_iban.py | 278 | 8657 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import string
from openerp.osv import fields, osv
from openerp.tools.translate import _
# Reference Examples of IBAN
_ref_iban = { 'al':'ALkk BBBS SSSK CCCC CCCC CCCC CCCC', 'ad':'ADkk BBBB SSSS CCCC CCCC CCCC',
'at':'ATkk BBBB BCCC CCCC CCCC', 'be': 'BEkk BBBC CCCC CCKK', 'ba': 'BAkk BBBS SSCC CCCC CCKK',
'bg': 'BGkk BBBB SSSS DDCC CCCC CC', 'bh': 'BHkk BBBB SSSS SSSS SSSS SS',
'cr': 'CRkk BBBC CCCC CCCC CCCC C',
'hr': 'HRkk BBBB BBBC CCCC CCCC C', 'cy': 'CYkk BBBS SSSS CCCC CCCC CCCC CCCC',
'cz': 'CZkk BBBB SSSS SSCC CCCC CCCC', 'dk': 'DKkk BBBB CCCC CCCC CC',
'do': 'DOkk BBBB CCCC CCCC CCCC CCCC CCCC',
'ee': 'EEkk BBSS CCCC CCCC CCCK', 'fo': 'FOkk CCCC CCCC CCCC CC',
'fi': 'FIkk BBBB BBCC CCCC CK', 'fr': 'FRkk BBBB BGGG GGCC CCCC CCCC CKK',
'ge': 'GEkk BBCC CCCC CCCC CCCC CC', 'de': 'DEkk BBBB BBBB CCCC CCCC CC',
'gi': 'GIkk BBBB CCCC CCCC CCCC CCC', 'gr': 'GRkk BBBS SSSC CCCC CCCC CCCC CCC',
'gl': 'GLkk BBBB CCCC CCCC CC', 'hu': 'HUkk BBBS SSSC CCCC CCCC CCCC CCCC',
'is':'ISkk BBBB SSCC CCCC XXXX XXXX XX', 'ie': 'IEkk BBBB SSSS SSCC CCCC CC',
'il': 'ILkk BBBS SSCC CCCC CCCC CCC', 'it': 'ITkk KBBB BBSS SSSC CCCC CCCC CCC',
'kz': 'KZkk BBBC CCCC CCCC CCCC', 'kw': 'KWkk BBBB CCCC CCCC CCCC CCCC CCCC CC',
'lv': 'LVkk BBBB CCCC CCCC CCCC C',
'lb': 'LBkk BBBB CCCC CCCC CCCC CCCC CCCC', 'li': 'LIkk BBBB BCCC CCCC CCCC C',
'lt': 'LTkk BBBB BCCC CCCC CCCC', 'lu': 'LUkk BBBC CCCC CCCC CCCC' ,
'mk': 'MKkk BBBC CCCC CCCC CKK', 'mt': 'MTkk BBBB SSSS SCCC CCCC CCCC CCCC CCC',
'mr': 'MRkk BBBB BSSS SSCC CCCC CCCC CKK',
'mu': 'MUkk BBBB BBSS CCCC CCCC CCCC CCCC CC', 'mc': 'MCkk BBBB BGGG GGCC CCCC CCCC CKK',
'me': 'MEkk BBBC CCCC CCCC CCCC KK',
'nl': 'NLkk BBBB CCCC CCCC CC', 'no': 'NOkk BBBB CCCC CCK',
'pl':'PLkk BBBS SSSK CCCC CCCC CCCC CCCC',
'pt': 'PTkk BBBB SSSS CCCC CCCC CCCK K', 'ro': 'ROkk BBBB CCCC CCCC CCCC CCCC',
'sm': 'SMkk KBBB BBSS SSSC CCCC CCCC CCC', 'sa': 'SAkk BBCC CCCC CCCC CCCC CCCC',
'rs': 'RSkk BBBC CCCC CCCC CCCC KK', 'sk': 'SKkk BBBB SSSS SSCC CCCC CCCC',
'si': 'SIkk BBSS SCCC CCCC CKK', 'es': 'ESkk BBBB SSSS KKCC CCCC CCCC',
'se': 'SEkk BBBB CCCC CCCC CCCC CCCC', 'ch': 'CHkk BBBB BCCC CCCC CCCC C',
'tn': 'TNkk BBSS SCCC CCCC CCCC CCCC', 'tr': 'TRkk BBBB BRCC CCCC CCCC CCCC CC',
'ae': 'AEkk BBBC CCCC CCCC CCCC CCC',
'gb': 'GBkk BBBB SSSS SSCC CCCC CC',
}
def _format_iban(iban_str):
'''
This function removes all characters from given 'iban_str' that isn't a alpha numeric and converts it to upper case.
'''
res = ""
if iban_str:
for char in iban_str:
if char.isalnum():
res += char.upper()
return res
def _pretty_iban(iban_str):
"return iban_str in groups of four characters separated by a single space"
res = []
while iban_str:
res.append(iban_str[:4])
iban_str = iban_str[4:]
return ' '.join(res)
class res_partner_bank(osv.osv):
_inherit = "res.partner.bank"
def create(self, cr, uid, vals, context=None):
#overwrite to format the iban number correctly
if (vals.get('state',False)=='iban') and vals.get('acc_number', False):
vals['acc_number'] = _format_iban(vals['acc_number'])
vals['acc_number'] = _pretty_iban(vals['acc_number'])
return super(res_partner_bank, self).create(cr, uid, vals, context)
def write(self, cr, uid, ids, vals, context=None):
#overwrite to format the iban number correctly
if (vals.get('state',False)=='iban') and vals.get('acc_number', False):
vals['acc_number'] = _format_iban(vals['acc_number'])
vals['acc_number'] = _pretty_iban(vals['acc_number'])
return super(res_partner_bank, self).write(cr, uid, ids, vals, context)
def is_iban_valid(self, cr, uid, iban, context=None):
""" Check if IBAN is valid or not
@param iban: IBAN as string
@return: True if IBAN is valid, False otherwise
"""
if not iban:
return False
iban = _format_iban(iban).lower()
if iban[:2] in _ref_iban and len(iban) != len(_format_iban(_ref_iban[iban[:2]])):
return False
#the four first digits have to be shifted to the end
iban = iban[4:] + iban[:4]
#letters have to be transformed into numbers (a = 10, b = 11, ...)
iban2 = ""
for char in iban:
if char.isalpha():
iban2 += str(ord(char)-87)
else:
iban2 += char
#iban is correct if modulo 97 == 1
return int(iban2) % 97 == 1
def check_iban(self, cr, uid, ids, context=None):
'''
Check the IBAN number
'''
for bank_acc in self.browse(cr, uid, ids, context=context):
if bank_acc.state != 'iban':
continue
if not self.is_iban_valid(cr, uid, bank_acc.acc_number, context=context):
return False
return True
def _construct_constraint_msg(self, cr, uid, ids, context=None):
def default_iban_check(iban_cn):
return iban_cn and iban_cn[0] in string.ascii_lowercase and iban_cn[1] in string.ascii_lowercase
iban_country = self.browse(cr, uid, ids)[0].acc_number and self.browse(cr, uid, ids)[0].acc_number[:2].lower()
if default_iban_check(iban_country):
if iban_country in _ref_iban:
return _('The IBAN does not seem to be correct. You should have entered something like this %s'), \
('%s \nWhere B = National bank code, S = Branch code,'\
' C = Account No, K = Check digit' % _ref_iban[iban_country])
return _('This IBAN does not pass the validation check, please verify it'), ()
return _('The IBAN is invalid, it should begin with the country code'), ()
def _check_bank(self, cr, uid, ids, context=None):
for partner_bank in self.browse(cr, uid, ids, context=context):
if partner_bank.state == 'iban' and not partner_bank.bank.bic:
return False
return True
def get_bban_from_iban(self, cr, uid, ids, context=None):
'''
This function returns the bank account number computed from the iban account number, thanks to the mapping_list dictionary that contains the rules associated to its country.
'''
res = {}
mapping_list = {
#TODO add rules for others countries
'be': lambda x: x[4:],
'fr': lambda x: x[14:],
'ch': lambda x: x[9:],
'gb': lambda x: x[14:],
}
for record in self.browse(cr, uid, ids, context=context):
if not record.acc_number:
res[record.id] = False
continue
res[record.id] = False
for code, function in mapping_list.items():
if record.acc_number.lower().startswith(code):
res[record.id] = function(record.acc_number)
break
return res
_columns = {
# Deprecated: we keep it for backward compatibility, to be removed in v7
# We use acc_number instead of IBAN since v6.1, but we keep this field
# to not break community modules.
'iban': fields.related('acc_number', string='IBAN', size=34, readonly=True, help="International Bank Account Number", type="char"),
}
_constraints = [
(check_iban, _construct_constraint_msg, ["iban", "acc_number", "state"]),
(_check_bank, '\nPlease define BIC/Swift code on bank for bank type IBAN Account to make valid payments', ['bic'])
]
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
poondog/kangaroo-m7-mkII | tools/perf/scripts/python/check-perf-trace.py | 11214 | 2503 | # perf script event handlers, generated by perf script -g python
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# This script tests basic functionality such as flag and symbol
# strings, common_xxx() calls back into perf, begin, end, unhandled
# events, etc. Basically, if this script runs successfully and
# displays expected results, Python scripting support should be ok.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Core import *
from perf_trace_context import *
unhandled = autodict()
def trace_begin():
print "trace_begin"
pass
def trace_end():
print_unhandled()
def irq__softirq_entry(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
vec):
print_header(event_name, common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
print_uncommon(context)
print "vec=%s\n" % \
(symbol_str("irq__softirq_entry", "vec", vec)),
def kmem__kmalloc(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
call_site, ptr, bytes_req, bytes_alloc,
gfp_flags):
print_header(event_name, common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
print_uncommon(context)
print "call_site=%u, ptr=%u, bytes_req=%u, " \
"bytes_alloc=%u, gfp_flags=%s\n" % \
(call_site, ptr, bytes_req, bytes_alloc,
flag_str("kmem__kmalloc", "gfp_flags", gfp_flags)),
def trace_unhandled(event_name, context, event_fields_dict):
try:
unhandled[event_name] += 1
except TypeError:
unhandled[event_name] = 1
def print_header(event_name, cpu, secs, nsecs, pid, comm):
print "%-20s %5u %05u.%09u %8u %-20s " % \
(event_name, cpu, secs, nsecs, pid, comm),
# print trace fields not included in handler args
def print_uncommon(context):
print "common_preempt_count=%d, common_flags=%s, common_lock_depth=%d, " \
% (common_pc(context), trace_flag_str(common_flags(context)), \
common_lock_depth(context))
def print_unhandled():
keys = unhandled.keys()
if not keys:
return
print "\nunhandled events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"-----------"),
for event_name in keys:
print "%-40s %10d\n" % (event_name, unhandled[event_name])
| gpl-2.0 |
eruffaldi/python-docx | docx/oxml/shared.py | 17 | 1623 | # encoding: utf-8
"""
Objects shared by modules in the docx.oxml subpackage.
"""
from __future__ import absolute_import
from . import OxmlElement
from .ns import qn
from .simpletypes import ST_DecimalNumber, ST_OnOff, ST_String
from .xmlchemy import BaseOxmlElement, OptionalAttribute, RequiredAttribute
class CT_DecimalNumber(BaseOxmlElement):
"""
Used for ``<w:numId>``, ``<w:ilvl>``, ``<w:abstractNumId>`` and several
others, containing a text representation of a decimal number (e.g. 42) in
its ``val`` attribute.
"""
val = RequiredAttribute('w:val', ST_DecimalNumber)
@classmethod
def new(cls, nsptagname, val):
"""
Return a new ``CT_DecimalNumber`` element having tagname *nsptagname*
and ``val`` attribute set to *val*.
"""
return OxmlElement(nsptagname, attrs={qn('w:val'): str(val)})
class CT_OnOff(BaseOxmlElement):
"""
Used for ``<w:b>``, ``<w:i>`` elements and others, containing a bool-ish
string in its ``val`` attribute, xsd:boolean plus 'on' and 'off'.
"""
val = OptionalAttribute('w:val', ST_OnOff, default=True)
class CT_String(BaseOxmlElement):
"""
Used for ``<w:pStyle>`` and ``<w:tblStyle>`` elements and others,
containing a style name in its ``val`` attribute.
"""
val = RequiredAttribute('w:val', ST_String)
@classmethod
def new(cls, nsptagname, val):
"""
Return a new ``CT_String`` element with tagname *nsptagname* and
``val`` attribute set to *val*.
"""
elm = OxmlElement(nsptagname)
elm.val = val
return elm
| mit |
DavidTingley/ephys-processing-pipeline | installation/klustaviewa-0.3.0/build/lib.linux-x86_64-2.7/kwiklib/dataio/tests/test_kwik.py | 2 | 14009 | """HDF5 tools tests."""
# -----------------------------------------------------------------------------
# Imports
# -----------------------------------------------------------------------------
import os
import tempfile
import numpy as np
import tables as tb
from nose import with_setup
from kwiklib.utils.six import itervalues
from kwiklib.dataio.kwik import *
# -----------------------------------------------------------------------------
# Fixtures
# -----------------------------------------------------------------------------
DIRPATH = tempfile.mkdtemp()
def setup_create(create_default=False):
prm = {'nfeatures': 3, 'waveforms_nsamples': 20, 'has_masks': False,
'nchannels': 3}
prb = {0:
{
'channels': [4, 6, 8],
'graph': [[4, 6], [8, 4]],
'geometry': {4: [0.4, 0.6], 6: [0.6, 0.8], 8: [0.8, 0.0]},
}
}
create_files('myexperiment', dir=DIRPATH, prm=prm, prb=prb,
create_default_info=create_default)
def setup_create_default():
setup_create(True)
def teardown_create():
files = get_filenames('myexperiment', dir=DIRPATH)
[os.remove(path) for path in itervalues(files)]
# -----------------------------------------------------------------------------
# Filename tests
# -----------------------------------------------------------------------------
def test_get_filenames():
filenames = get_filenames('myexperiment')
assert os.path.basename(filenames['kwik']) == 'myexperiment.kwik'
assert os.path.basename(filenames['kwx']) == 'myexperiment.kwx'
assert os.path.basename(filenames['raw.kwd']) == 'myexperiment.raw.kwd'
assert os.path.basename(filenames['low.kwd']) == 'myexperiment.low.kwd'
assert os.path.basename(filenames['high.kwd']) == 'myexperiment.high.kwd'
def test_basename_1():
bn = 'myexperiment'
filenames = get_filenames(bn)
kwik = filenames['kwik']
kwx = filenames['kwx']
kwdraw = filenames['raw.kwd']
assert get_basename(kwik) == bn
assert get_basename(kwx) == bn
assert get_basename(kwdraw) == bn
def test_basename_2():
kwik = '/my/path/experiment.kwik'
kwx = '/my/path/experiment.kwx'
kwdhigh = '/my/path/experiment.high.kwd'
assert get_basename(kwik) == 'experiment'
assert get_basename(kwx) == 'experiment'
assert get_basename(kwdhigh) == 'experiment'
# -----------------------------------------------------------------------------
# HDF5 creation functions tests
# -----------------------------------------------------------------------------
def test_create_kwik():
path = os.path.join(DIRPATH, 'myexperiment.kwik')
prm = {
'waveforms_nsamples': 20,
'nfeatures': 3*32,
}
prb = {0:
{
'channels': [4, 6, 8],
'graph': [[4, 6], [8, 4]],
'geometry': {4: [0.4, 0.6], 6: [0.6, 0.8], 8: [0.8, 0.0]},
}
}
create_kwik(path, prm=prm, prb=prb)
f = tb.openFile(path, 'r')
channel = f.root.channel_groups.__getattr__('0').channels.__getattr__('4')
assert channel._v_attrs.name == 'channel_4'
f.close()
os.remove(path)
def test_create_kwx():
path = os.path.join(DIRPATH, 'myexperiment.kwx')
# Create the KWX file.
waveforms_nsamples = 20
nchannels = 32
nchannels2 = 24
nfeatures = 3*nchannels
prm = {
'waveforms_nsamples': waveforms_nsamples,
'nfeatures': 3*nchannels,
}
prb = {0:
{
'channels': np.arange(nchannels),
},
1: {
'channels': nchannels + np.arange(nchannels2),
'nfeatures': 3*nchannels2
},
2: {
'channels': nchannels + nchannels2 + np.arange(nchannels),
'nfeatures': 2*nchannels
},
}
create_kwx(path, prb=prb, prm=prm)
# Open the KWX file.
f = tb.openFile(path, 'r')
# Group 1
fm1 = f.root.channel_groups.__getattr__('1').features_masks
wr1 = f.root.channel_groups.__getattr__('1').waveforms_raw
wf1 = f.root.channel_groups.__getattr__('1').waveforms_filtered
assert fm1.shape[1:] == (3*nchannels2, 2)
assert wr1.shape[1:] == (waveforms_nsamples, nchannels2)
assert wf1.shape[1:] == (waveforms_nsamples, nchannels2)
# Group 2
fm2 = f.root.channel_groups.__getattr__('2').features_masks
wr2 = f.root.channel_groups.__getattr__('2').waveforms_raw
wf2 = f.root.channel_groups.__getattr__('2').waveforms_filtered
assert fm2.shape[1:] == (2*nchannels, 2)
assert wr2.shape[1:] == (waveforms_nsamples, nchannels)
assert wf2.shape[1:] == (waveforms_nsamples, nchannels)
f.close()
# Delete the file.
os.remove(path)
def test_create_kwd():
path = os.path.join(DIRPATH, 'myexperiment.raw.kwd')
# Create the KWD file.
nchannels_tot = 32*3
prm = {'nchannels': nchannels_tot}
create_kwd(path, type='raw', prm=prm,)
# Open the KWX file.
f = tb.openFile(path, 'r')
assert f.root.recordings
f.close()
# Delete the file.
os.remove(path)
def test_create_empty():
files = create_files('myexperiment', dir=DIRPATH)
[os.remove(path) for path in itervalues(files)]
@with_setup(setup_create_default, teardown_create)
def test_create_default():
path = os.path.join(DIRPATH, 'myexperiment.kwik')
prm = {
'waveforms_nsamples': 20,
'nfeatures': 3*32,
}
prb = {0:
{
'channels': [4, 6, 8],
'graph': [[4, 6], [8, 4]],
'geometry': {4: [0.4, 0.6], 6: [0.6, 0.8], 8: [0.8, 0.0]},
}
}
files = open_files('myexperiment', dir=DIRPATH)
f = files['kwik']
assert f.root.channel_groups.__getattr__('0').cluster_groups.main.__getattr__('0')._f_getAttr('name') == 'Noise'
assert hasattr(f.root.channel_groups.__getattr__('0').clusters.main, '0')
close_files(files)
# -----------------------------------------------------------------------------
# Item creation functions tests
# -----------------------------------------------------------------------------
@with_setup(setup_create, teardown_create)
def test_add_recording():
files = open_files('myexperiment', dir=DIRPATH, mode='a')
sample_rate = 20000.
start_time = 10.
start_sample = 200000.
bit_depth = 16
band_high = 100.
band_low = 500.
nchannels = 32
nsamples = 0
add_recording(files,
sample_rate=sample_rate,
start_time=start_time,
start_sample=start_sample,
bit_depth=bit_depth,
band_high=band_high,
band_low=band_low,
nchannels=nchannels,
nsamples=nsamples,
)
rec = files['kwik'].root.recordings.__getattr__('0')
assert rec._v_attrs.sample_rate == sample_rate
assert rec._v_attrs.start_time == start_time
assert rec._v_attrs.start_sample == start_sample
assert rec._v_attrs.bit_depth == bit_depth
assert rec._v_attrs.band_high == band_high
assert rec._v_attrs.band_low == band_low
close_files(files)
@with_setup(setup_create, teardown_create)
def test_add_event_type():
files = open_files('myexperiment', dir=DIRPATH, mode='a')
add_event_type(files, 'myevents')
events = files['kwik'].root.event_types.myevents.events
assert isinstance(events.time_samples, tb.EArray)
assert isinstance(events.recording, tb.EArray)
events.user_data
close_files(files)
@with_setup(setup_create, teardown_create)
def test_add_cluster_group():
files = open_files('myexperiment', dir=DIRPATH, mode='a')
add_cluster_group(files, channel_group_id='0', id='0', name='Noise')
noise = files['kwik'].root.channel_groups.__getattr__('0').cluster_groups.main.__getattr__('0')
assert noise._v_attrs.name == 'Noise'
noise.application_data.klustaviewa._v_attrs.color
noise.user_data
remove_cluster_group(files, channel_group_id='0', id='0')
assert not hasattr(
files['kwik'].root.channel_groups.__getattr__('0').cluster_groups.main,
'0')
close_files(files)
@with_setup(setup_create, teardown_create)
def test_add_cluster():
files = open_files('myexperiment', dir=DIRPATH, mode='a')
add_cluster(files, channel_group_id='0',)
cluster = files['kwik'].root.channel_groups.__getattr__('0').clusters.main.__getattr__('0')
cluster._v_attrs.cluster_group
cluster._v_attrs.mean_waveform_raw
cluster._v_attrs.mean_waveform_filtered
cluster.quality_measures
cluster.application_data.klustaviewa._v_attrs.color
cluster.user_data
remove_cluster(files, channel_group_id='0', id='0')
assert not hasattr(
files['kwik'].root.channel_groups.__getattr__('0').clusters.main,
'0')
close_files(files)
@with_setup(setup_create, teardown_create)
def test_add_clustering():
files = open_files('myexperiment', dir=DIRPATH, mode='a')
nspikes = 100
add_spikes(files, channel_group_id='0',
time_samples=np.arange(nspikes),
features=np.random.randn(nspikes, 3),
fill_empty=False,
)
spike_clusters = np.random.randint(size=nspikes, low=3, high=20)
add_clustering(files, name='myclustering', spike_clusters=spike_clusters)
clusters = files['kwik'].root.channel_groups.__getattr__('0').spikes.clusters.myclustering[:]
assert np.allclose(spike_clusters, clusters)
clustering = files['kwik'].root.channel_groups.__getattr__('0').clusters.myclustering
assert not hasattr(clustering, '0')
for i in np.unique(spike_clusters):
assert clustering.__getattr__(str(i)).application_data. \
klustaviewa._f_getAttr('color') > 0
close_files(files)
@with_setup(setup_create, teardown_create)
def test_add_clustering_overwrite():
files = open_files('myexperiment', dir=DIRPATH, mode='a')
nspikes = 100
add_spikes(files, channel_group_id='0',
time_samples=np.arange(nspikes),
features=np.random.randn(nspikes, 3),
fill_empty=False,
)
spike_clusters = np.random.randint(size=nspikes, low=3, high=20)
add_clustering(files, name='main', spike_clusters=spike_clusters,
overwrite=True)
clusters = files['kwik'].root.channel_groups.__getattr__('0').spikes.clusters.main[:]
assert np.allclose(spike_clusters, clusters)
clustering = files['kwik'].root.channel_groups.__getattr__('0').clusters.main
assert not hasattr(clustering, '0')
for i in np.unique(spike_clusters):
assert clustering.__getattr__(str(i)).application_data. \
klustaviewa._f_getAttr('color') > 0
close_files(files)
@with_setup(setup_create, teardown_create)
def test_add_spikes():
files = open_files('myexperiment', dir=DIRPATH, mode='a')
nspikes = 7
add_spikes(files, channel_group_id='0',
time_samples=1,
)
add_spikes(files, channel_group_id='0',
time_samples=np.arange(1),
)
add_spikes(files, channel_group_id='0',
time_samples=np.arange(2),
masks=np.random.randn(2, 3),
)
add_spikes(files, channel_group_id='0',
time_samples=np.arange(2),
waveforms_raw=np.random.randn(2, 20, 3),
)
add_spikes(files, channel_group_id='0',
time_samples=4,
waveforms_raw=np.random.randn(20, 3),
waveforms_filtered=np.random.randn(20, 3),
)
spikes = files['kwx'].root.channel_groups.__getattr__('0')
assert spikes.waveforms_raw.shape == (nspikes, 20, 3)
assert spikes.waveforms_filtered.shape == (nspikes, 20, 3)
close_files(files)
@with_setup(setup_create, teardown_create)
def test_add_spikes_fm():
files = open_files('myexperiment', dir=DIRPATH, mode='a')
nspikes = 7
add_spikes(files, channel_group_id='0',
time_samples=np.arange(nspikes),
features=np.random.randn(nspikes, 3),
fill_empty=False,
)
spikes = files['kwx'].root.channel_groups.__getattr__('0')
assert spikes.waveforms_raw.shape == (0, 20, 3)
assert spikes.waveforms_filtered.shape == (0, 20, 3)
assert spikes.features_masks.shape == (nspikes, 3)
close_files(files)
@with_setup(setup_create, teardown_create)
def test_to_contiguous():
"""Convert an EArray to contiguous Array."""
files = open_files('myexperiment', dir=DIRPATH, mode='a')
n = 100000
fm = files['kwx'].root.channel_groups.__getattr__('0').features_masks
s = fm.shape[1:]
a = fm.atom
X = np.random.rand(n, *s)
fm.append(X)
assert isinstance(fm, tb.EArray)
assert fm.shape[0] == n
assert fm.shape[1:] == s
assert fm.atom == a
to_contiguous(fm, nspikes=n)
fm = files['kwx'].root.channel_groups.__getattr__('0').features_masks
assert isinstance(fm, tb.Array) and not isinstance(fm, tb.EArray)
assert fm.shape[0] == n
assert fm.shape[1:] == s
assert fm.atom == a
Y = fm[...]
assert np.allclose(X, Y)
close_files(files)
| gpl-3.0 |
sloanyang/gyp | test/mac/gyptest-app-error.py | 164 | 1172 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that invalid strings files cause the build to fail.
"""
import TestCmd
import TestGyp
import sys
if sys.platform == 'darwin':
expected_error = 'Old-style plist parser: missing semicolon in dictionary'
saw_expected_error = [False] # Python2 has no "nonlocal" keyword.
def match(a, b):
if a == b:
return True
if not TestCmd.is_List(a):
a = a.split('\n')
if not TestCmd.is_List(b):
b = b.split('\n')
if expected_error in '\n'.join(a) + '\n'.join(b):
saw_expected_error[0] = True
return True
return False
test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'], match=match)
test.run_gyp('test-error.gyp', chdir='app-bundle')
test.build('test-error.gyp', test.ALL, chdir='app-bundle')
# Ninja pipes stderr of subprocesses to stdout.
if test.format == 'ninja' and expected_error in test.stdout():
saw_expected_error[0] = True
if saw_expected_error[0]:
test.pass_test()
else:
test.fail_test()
| bsd-3-clause |
mchristopher/PokemonGo-DesktopMap | app/pylibs/osx64/gevent/monkey.py | 5 | 24241 | # Copyright (c) 2009-2012 Denis Bilenko. See LICENSE for details.
# pylint: disable=redefined-outer-name
"""
Make the standard library cooperative.
Patching
========
The primary purpose of this module is to carefully patch, in place,
portions of the standard library with gevent-friendly functions that
behave in the same way as the original (at least as closely as possible).
The primary interface to this is the :func:`patch_all` function, which
performs all the available patches. It accepts arguments to limit the
patching to certain modules, but most programs will want to use the
default values as they receive the most wide-spread testing.
Patching *should be done as early as possible* in the lifecycle of the
program. For example, the main module (the one that tests against
``__main__`` or is otherwise the first imported) should begin with
this code, ideally before any other imports::
from gevent import monkey
monkey.patch_all()
.. tip::
Some frameworks, such as gunicorn, handle monkey-patching for you.
Check their documentation to be sure.
Querying
--------
Sometimes it is helpful to know if objects have been monkey-patched, and in
advanced cases even to have access to the original standard library functions. This
module provides functions for that purpose.
- :func:`is_module_patched`
- :func:`is_object_patched`
- :func:`get_original`
Use as a module
===============
Sometimes it is useful to run existing python scripts or modules that
were not built to be gevent aware under gevent. To do so, this module
can be run as the main module, passing the script and its arguments.
For details, see the :func:`main` function.
Functions
=========
"""
from __future__ import absolute_import
from __future__ import print_function
import sys
__all__ = [
'patch_all',
'patch_builtins',
'patch_dns',
'patch_os',
'patch_select',
'patch_signal',
'patch_socket',
'patch_ssl',
'patch_subprocess',
'patch_sys',
'patch_thread',
'patch_time',
# query functions
'get_original',
'is_module_patched',
'is_object_patched',
# module functions
'main',
]
if sys.version_info[0] >= 3:
string_types = str,
PY3 = True
else:
import __builtin__
string_types = __builtin__.basestring
PY3 = False
if sys.platform.startswith("win"):
WIN = True
else:
WIN = False
# maps module name -> {attribute name: original item}
# e.g. "time" -> {"sleep": built-in function sleep}
saved = {}
def is_module_patched(modname):
"""Check if a module has been replaced with a cooperative version."""
return modname in saved
def is_object_patched(modname, objname):
"""Check if an object in a module has been replaced with a cooperative version."""
return is_module_patched(modname) and objname in saved[modname]
def _get_original(name, items):
d = saved.get(name, {})
values = []
module = None
for item in items:
if item in d:
values.append(d[item])
else:
if module is None:
module = __import__(name)
values.append(getattr(module, item))
return values
def get_original(mod_name, item_name):
"""Retrieve the original object from a module.
If the object has not been patched, then that object will still be retrieved.
:param item_name: A string or sequence of strings naming the attribute(s) on the module
``mod_name`` to return.
:return: The original value if a string was given for ``item_name`` or a sequence
of original values if a sequence was passed.
"""
if isinstance(item_name, string_types):
return _get_original(mod_name, [item_name])[0]
else:
return _get_original(mod_name, item_name)
def patch_item(module, attr, newitem):
NONE = object()
olditem = getattr(module, attr, NONE)
if olditem is not NONE:
saved.setdefault(module.__name__, {}).setdefault(attr, olditem)
setattr(module, attr, newitem)
def remove_item(module, attr):
NONE = object()
olditem = getattr(module, attr, NONE)
if olditem is NONE:
return
saved.setdefault(module.__name__, {}).setdefault(attr, olditem)
delattr(module, attr)
def patch_module(name, items=None):
gevent_module = getattr(__import__('gevent.' + name), name)
module_name = getattr(gevent_module, '__target__', name)
module = __import__(module_name)
if items is None:
items = getattr(gevent_module, '__implements__', None)
if items is None:
raise AttributeError('%r does not have __implements__' % gevent_module)
for attr in items:
patch_item(module, attr, getattr(gevent_module, attr))
return module
def _queue_warning(message, _warnings):
# Queues a warning to show after the monkey-patching process is all done.
# Done this way to avoid extra imports during the process itself, just
# in case. If we're calling a function one-off (unusual) go ahead and do it
if _warnings is None:
_process_warnings([message])
else:
_warnings.append(message)
def _process_warnings(_warnings):
import warnings
for warning in _warnings:
warnings.warn(warning, RuntimeWarning, stacklevel=3)
def _patch_sys_std(name):
from gevent.fileobject import FileObjectThread
orig = getattr(sys, name)
if not isinstance(orig, FileObjectThread):
patch_item(sys, name, FileObjectThread(orig))
def patch_sys(stdin=True, stdout=True, stderr=True):
"""Patch sys.std[in,out,err] to use a cooperative IO via a threadpool.
This is relatively dangerous and can have unintended consequences such as hanging
the process or `misinterpreting control keys`_ when ``input`` and ``raw_input``
are used.
This method does nothing on Python 3. The Python 3 interpreter wants to flush
the TextIOWrapper objects that make up stderr/stdout at shutdown time, but
using a threadpool at that time leads to a hang.
.. _`misinterpreting control keys`: https://github.com/gevent/gevent/issues/274
"""
# test__issue6.py demonstrates the hang if these lines are removed;
# strangely enough that test passes even without monkey-patching sys
if PY3:
return
if stdin:
_patch_sys_std('stdin')
if stdout:
_patch_sys_std('stdout')
if stderr:
_patch_sys_std('stderr')
def patch_os():
"""
Replace :func:`os.fork` with :func:`gevent.fork`, and, on POSIX,
:func:`os.waitpid` with :func:`gevent.os.waitpid` (if the
environment variable ``GEVENT_NOWAITPID`` is not defined). Does
nothing if fork is not available.
This method must be used with :func:`patch_signal` to have proper SIGCHLD
handling. :func:`patch_all` calls both by default.
"""
patch_module('os')
def patch_time():
"""Replace :func:`time.sleep` with :func:`gevent.sleep`."""
from gevent.hub import sleep
import time
patch_item(time, 'sleep', sleep)
def _patch_existing_locks(threading):
if len(list(threading.enumerate())) != 1:
return
try:
tid = threading.get_ident()
except AttributeError:
tid = threading._get_ident()
rlock_type = type(threading.RLock())
try:
import importlib._bootstrap
except ImportError:
class _ModuleLock(object):
pass
else:
_ModuleLock = importlib._bootstrap._ModuleLock
# It might be possible to walk up all the existing stack frames to find
# locked objects...at least if they use `with`. To be sure, we look at every object
# Since we're supposed to be done very early in the process, there shouldn't be
# too many.
# By definition there's only one thread running, so the various
# owner attributes were the old (native) thread id. Make it our
# current greenlet id so that when it wants to unlock and compare
# self.__owner with _get_ident(), they match.
gc = __import__('gc')
for o in gc.get_objects():
if isinstance(o, rlock_type):
if hasattr(o, '_owner'): # Py3
if o._owner is not None:
o._owner = tid
else:
if o._RLock__owner is not None:
o._RLock__owner = tid
elif isinstance(o, _ModuleLock):
if o.owner is not None:
o.owner = tid
def patch_thread(threading=True, _threading_local=True, Event=False, logging=True,
existing_locks=True,
_warnings=None):
"""
Replace the standard :mod:`thread` module to make it greenlet-based.
- If *threading* is true (the default), also patch ``threading``.
- If *_threading_local* is true (the default), also patch ``_threading_local.local``.
- If *logging* is True (the default), also patch locks taken if the logging module has
been configured.
- If *existing_locks* is True (the default), and the process is still single threaded,
make sure than any :class:`threading.RLock` (and, under Python 3, :class:`importlib._bootstrap._ModuleLock`)
instances that are currently locked can be properly unlocked.
.. caution::
Monkey-patching :mod:`thread` and using
:class:`multiprocessing.Queue` or
:class:`concurrent.futures.ProcessPoolExecutor` (which uses a
``Queue``) will hang the process.
.. versionchanged:: 1.1b1
Add *logging* and *existing_locks* params.
"""
# Description of the hang:
# There is an incompatibility with patching 'thread' and the 'multiprocessing' module:
# The problem is that multiprocessing.queues.Queue uses a half-duplex multiprocessing.Pipe,
# which is implemented with os.pipe() and _multiprocessing.Connection. os.pipe isn't patched
# by gevent, as it returns just a fileno. _multiprocessing.Connection is an internal implementation
# class implemented in C, which exposes a 'poll(timeout)' method; under the covers, this issues a
# (blocking) select() call: hence the need for a real thread. Except for that method, we could
# almost replace Connection with gevent.fileobject.SocketAdapter, plus a trivial
# patch to os.pipe (below). Sigh, so close. (With a little work, we could replicate that method)
# import os
# import fcntl
# os_pipe = os.pipe
# def _pipe():
# r, w = os_pipe()
# fcntl.fcntl(r, fcntl.F_SETFL, os.O_NONBLOCK)
# fcntl.fcntl(w, fcntl.F_SETFL, os.O_NONBLOCK)
# return r, w
# os.pipe = _pipe
# The 'threading' module copies some attributes from the
# thread module the first time it is imported. If we patch 'thread'
# before that happens, then we store the wrong values in 'saved',
# So if we're going to patch threading, we either need to import it
# before we patch thread, or manually clean up the attributes that
# are in trouble. The latter is tricky because of the different names
# on different versions.
if threading:
__import__('threading')
patch_module('thread')
if threading:
threading = patch_module('threading')
if Event:
from gevent.event import Event
patch_item(threading, 'Event', Event)
if existing_locks:
_patch_existing_locks(threading)
if logging and 'logging' in sys.modules:
logging = __import__('logging')
patch_item(logging, '_lock', threading.RLock())
for wr in logging._handlerList:
# In py26, these are actual handlers, not weakrefs
handler = wr() if callable(wr) else wr
if handler is None:
continue
if not hasattr(handler, 'lock'):
raise TypeError("Unknown/unsupported handler %r" % handler)
handler.lock = threading.RLock()
if _threading_local:
_threading_local = __import__('_threading_local')
from gevent.local import local
patch_item(_threading_local, 'local', local)
if sys.version_info[:2] >= (3, 4):
# Issue 18808 changes the nature of Thread.join() to use
# locks. This means that a greenlet spawned in the main thread
# (which is already running) cannot wait for the main thread---it
# hangs forever. We patch around this if possible. See also
# gevent.threading.
threading = __import__('threading')
greenlet = __import__('greenlet')
if threading.current_thread() == threading.main_thread():
main_thread = threading.main_thread()
_greenlet = main_thread._greenlet = greenlet.getcurrent()
from gevent.hub import sleep
def join(timeout=None):
if threading.current_thread() is main_thread:
raise RuntimeError("Cannot join current thread")
if _greenlet.dead or not main_thread.is_alive():
return
elif timeout:
raise ValueError("Cannot use a timeout to join the main thread")
# XXX: Make that work
else:
while main_thread.is_alive():
sleep(0.01)
main_thread.join = join
# Patch up the ident of the main thread to match. This
# matters if threading was imported before monkey-patching
# thread
oldid = main_thread.ident
main_thread._ident = threading.get_ident()
if oldid in threading._active:
threading._active[main_thread.ident] = threading._active[oldid]
if oldid != main_thread.ident:
del threading._active[oldid]
else:
_queue_warning("Monkey-patching not on the main thread; "
"threading.main_thread().join() will hang from a greenlet",
_warnings)
def patch_socket(dns=True, aggressive=True):
"""Replace the standard socket object with gevent's cooperative sockets.
If ``dns`` is true, also patch dns functions in :mod:`socket`.
"""
from gevent import socket
# Note: although it seems like it's not strictly necessary to monkey patch 'create_connection',
# it's better to do it. If 'create_connection' was not monkey patched, but the rest of socket module
# was, create_connection would still use "green" getaddrinfo and "green" socket.
# However, because gevent.socket.socket.connect is a Python function, the exception raised by it causes
# _socket object to be referenced by the frame, thus causing the next invocation of bind(source_address) to fail.
if dns:
items = socket.__implements__
else:
items = set(socket.__implements__) - set(socket.__dns__)
patch_module('socket', items=items)
if aggressive:
if 'ssl' not in socket.__implements__:
remove_item(socket, 'ssl')
def patch_dns():
"""Replace DNS functions in :mod:`socket` with cooperative versions.
This is only useful if :func:`patch_socket` has been called and is done automatically
by that method if requested.
"""
from gevent import socket
patch_module('socket', items=socket.__dns__)
def patch_ssl():
"""Replace SSLSocket object and socket wrapping functions in :mod:`ssl` with cooperative versions.
This is only useful if :func:`patch_socket` has been called.
"""
patch_module('ssl')
def patch_select(aggressive=True):
"""
Replace :func:`select.select` with :func:`gevent.select.select`
and :func:`select.poll` with :class:`gevent.select.poll` (where available).
If ``aggressive`` is true (the default), also remove other
blocking functions from :mod:`select` and (on Python 3.4 and
above) :mod:`selectors`:
- :func:`select.epoll`
- :func:`select.kqueue`
- :func:`select.kevent`
- :func:`select.devpoll` (Python 3.5+)
- :class:`selectors.EpollSelector`
- :class:`selectors.KqueueSelector`
- :class:`selectors.DevpollSelector` (Python 3.5+)
"""
patch_module('select')
if aggressive:
select = __import__('select')
# since these are blocking we're removing them here. This makes some other
# modules (e.g. asyncore) non-blocking, as they use select that we provide
# when none of these are available.
remove_item(select, 'epoll')
remove_item(select, 'kqueue')
remove_item(select, 'kevent')
remove_item(select, 'devpoll')
if sys.version_info[:2] >= (3, 4):
# Python 3 wants to use `select.select` as a member function,
# leading to this error in selectors.py (because gevent.select.select is
# not a builtin and doesn't get the magic auto-static that they do)
# r, w, _ = self._select(self._readers, self._writers, [], timeout)
# TypeError: select() takes from 3 to 4 positional arguments but 5 were given
# Note that this obviously only happens if selectors was imported after we had patched
# select; but there is a code path that leads to it being imported first (but now we've
# patched select---so we can't compare them identically)
select = __import__('select') # Should be gevent-patched now
orig_select_select = get_original('select', 'select')
assert select.select is not orig_select_select
selectors = __import__('selectors')
if selectors.SelectSelector._select in (select.select, orig_select_select):
def _select(self, *args, **kwargs): # pylint:disable=unused-argument
return select.select(*args, **kwargs)
selectors.SelectSelector._select = _select
_select._gevent_monkey = True
if aggressive:
# If `selectors` had already been imported before we removed
# select.epoll|kqueue|devpoll, these may have been defined in terms
# of those functions. They'll fail at runtime.
remove_item(selectors, 'EpollSelector')
remove_item(selectors, 'KqueueSelector')
remove_item(selectors, 'DevpollSelector')
selectors.DefaultSelector = selectors.SelectSelector
def patch_subprocess():
"""
Replace :func:`subprocess.call`, :func:`subprocess.check_call`,
:func:`subprocess.check_output` and :class:`subprocess.Popen` with
:mod:`cooperative versions <gevent.subprocess>`.
.. note::
On Windows under Python 3, the API support may not completely match
the standard library.
"""
patch_module('subprocess')
def patch_builtins():
"""
Make the builtin __import__ function `greenlet safe`_ under Python 2.
.. note::
This does nothing under Python 3 as it is not necessary. Python 3 features
improved import locks that are per-module, not global.
.. _greenlet safe: https://github.com/gevent/gevent/issues/108
"""
if sys.version_info[:2] < (3, 3):
patch_module('builtins')
def patch_signal():
"""
Make the signal.signal function work with a monkey-patched os.
This method must be used with :func:`patch_os` to have proper SIGCHLD
handling. :func:`patch_all` calls both by default.
.. seealso:: :mod:`gevent.signal`
"""
patch_module("signal")
def _check_repatching(**module_settings):
_warnings = []
key = '_gevent_saved_patch_all'
if saved.get(key, module_settings) != module_settings:
_queue_warning("Patching more than once will result in the union of all True"
" parameters being patched",
_warnings)
first_time = key not in saved
saved[key] = module_settings
return _warnings, first_time
def patch_all(socket=True, dns=True, time=True, select=True, thread=True, os=True, ssl=True, httplib=False,
subprocess=True, sys=False, aggressive=True, Event=False,
builtins=True, signal=True):
"""
Do all of the default monkey patching (calls every other applicable
function in this module).
.. versionchanged:: 1.1
Issue a :mod:`warning <warnings>` if this function is called multiple times
with different arguments. The second and subsequent calls will only add more
patches, they can never remove existing patches by setting an argument to ``False``.
.. versionchanged:: 1.1
Issue a :mod:`warning <warnings>` if this function is called with ``os=False``
and ``signal=True``. This will cause SIGCHLD handlers to not be called. This may
be an error in the future.
"""
# Check to see if they're changing the patched list
_warnings, first_time = _check_repatching(**locals())
if not _warnings and not first_time:
# Nothing to do, identical args to what we just
# did
return
# order is important
if os:
patch_os()
if time:
patch_time()
if thread:
patch_thread(Event=Event)
# sys must be patched after thread. in other cases threading._shutdown will be
# initiated to _MainThread with real thread ident
if sys:
patch_sys()
if socket:
patch_socket(dns=dns, aggressive=aggressive)
if select:
patch_select(aggressive=aggressive)
if ssl:
patch_ssl()
if httplib:
raise ValueError('gevent.httplib is no longer provided, httplib must be False')
if subprocess:
patch_subprocess()
if builtins:
patch_builtins()
if signal:
if not os:
_queue_warning('Patching signal but not os will result in SIGCHLD handlers'
' installed after this not being called and os.waitpid may not'
' function correctly if gevent.subprocess is used. This may raise an'
' error in the future.',
_warnings)
patch_signal()
_process_warnings(_warnings)
def main():
args = {}
argv = sys.argv[1:]
verbose = False
script_help, patch_all_args, modules = _get_script_help()
while argv and argv[0].startswith('--'):
option = argv[0][2:]
if option == 'verbose':
verbose = True
elif option.startswith('no-') and option.replace('no-', '') in patch_all_args:
args[option[3:]] = False
elif option in patch_all_args:
args[option] = True
if option in modules:
for module in modules:
args.setdefault(module, False)
else:
sys.exit(script_help + '\n\n' + 'Cannot patch %r' % option)
del argv[0]
# TODO: break on --
if verbose:
import pprint
import os
print('gevent.monkey.patch_all(%s)' % ', '.join('%s=%s' % item for item in args.items()))
print('sys.version=%s' % (sys.version.strip().replace('\n', ' '), ))
print('sys.path=%s' % pprint.pformat(sys.path))
print('sys.modules=%s' % pprint.pformat(sorted(sys.modules.keys())))
print('cwd=%s' % os.getcwd())
patch_all(**args)
if argv:
sys.argv = argv
__package__ = None
assert __package__ is None
globals()['__file__'] = sys.argv[0] # issue #302
with open(sys.argv[0]) as f:
exec(f.read())
else:
print(script_help)
def _get_script_help():
from inspect import getargspec
patch_all_args = getargspec(patch_all)[0]
modules = [x for x in patch_all_args if 'patch_' + x in globals()]
script_help = """gevent.monkey - monkey patch the standard modules to use gevent.
USAGE: python -m gevent.monkey [MONKEY OPTIONS] script [SCRIPT OPTIONS]
If no OPTIONS present, monkey patches all the modules it can patch.
You can exclude a module with --no-module, e.g. --no-thread. You can
specify a module to patch with --module, e.g. --socket. In the latter
case only the modules specified on the command line will be patched.
MONKEY OPTIONS: --verbose %s""" % ', '.join('--[no-]%s' % m for m in modules)
return script_help, patch_all_args, modules
main.__doc__ = _get_script_help()[0]
if __name__ == '__main__':
main()
| mit |
kwlzn/pex | pex/link.py | 1 | 4085 | # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import
import os
import posixpath
from collections import Iterable
from .compatibility import PY3, WINDOWS, pathname2url
from .compatibility import string as compatible_string
from .compatibility import url2pathname
from .util import Memoizer
if PY3:
import urllib.parse as urlparse
else:
import urlparse
class Link(object):
"""Wrapper around a URL."""
@classmethod
def wrap(cls, url):
"""Given a url that is either a string or :class:`Link`, return a :class:`Link`.
:param url: A string-like or :class:`Link` object to wrap.
:returns: A :class:`Link` object wrapping the url.
"""
if isinstance(url, cls):
return url
elif isinstance(url, compatible_string):
return cls(url)
else:
raise ValueError('url must be either a string or Link.')
@classmethod
def wrap_iterable(cls, url_or_urls):
"""Given a string or :class:`Link` or iterable, return an iterable of :class:`Link` objects.
:param url_or_urls: A string or :class:`Link` object, or iterable of string or :class:`Link`
objects.
:returns: A list of :class:`Link` objects.
"""
try:
return [cls.wrap(url_or_urls)]
except ValueError:
pass
if isinstance(url_or_urls, Iterable):
return [cls.wrap(url) for url in url_or_urls]
raise ValueError('url_or_urls must be string/Link or iterable of strings/Links')
@classmethod
def _normalize(cls, filename):
return urlparse.urljoin('file:', pathname2url(
os.path.realpath(os.path.expanduser(filename))))
# A cache for the result of from_filename
_FROM_FILENAME_CACHE = Memoizer()
@classmethod
def from_filename(cls, filename):
"""Return a :class:`Link` wrapping the local filename."""
result = cls._FROM_FILENAME_CACHE.get(filename)
if result is None:
result = cls(cls._normalize(filename))
cls._FROM_FILENAME_CACHE.store(filename, result)
return result
def __init__(self, url):
"""Construct a :class:`Link` from a url.
:param url: A string-like object representing a url.
"""
purl = urlparse.urlparse(url)
if purl.scheme == '' or (
WINDOWS and len(purl.scheme) == 1): # This is likely a drive letter.
purl = urlparse.urlparse(self._normalize(url))
self._url = purl
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, link):
return self.__class__ == link.__class__ and self._url == link._url
def __hash__(self):
return hash(self._url)
def join(self, href):
"""Given a href relative to this link, return the :class:`Link` of the absolute url.
:param href: A string-like path relative to this link.
"""
return self.wrap(urlparse.urljoin(self.url, href))
@property
def filename(self):
"""The basename of this url."""
return urlparse.unquote(posixpath.basename(self._url.path))
@property
def path(self):
"""The full path of this url with any hostname and scheme components removed."""
return urlparse.unquote(self._url.path)
@property
def local_path(self):
"""Returns the local filesystem path (only works for file:// urls)."""
assert self.local, 'local_path called on a non-file:// url %s' % (self.url,)
return url2pathname(self.path)
@property
def url(self):
"""The url string to which this link points."""
return urlparse.urlunparse(self._url)
@property
def fragment(self):
"""The url fragment following '#' if any."""
return urlparse.unquote(self._url.fragment)
@property
def scheme(self):
"""The URI scheme used by this Link."""
return self._url.scheme
@property
def local(self):
"""Is the url a local file?"""
return self._url.scheme in ('', 'file')
@property
def remote(self):
"""Is the url a remote file?"""
return self._url.scheme in ('http', 'https')
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, self.url)
| apache-2.0 |
ChromiumWebApps/chromium | build/android/buildbot/bb_run_bot.py | 1 | 10537 | #!/usr/bin/env python
#
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import collections
import copy
import json
import os
import pipes
import re
import subprocess
import sys
import bb_utils
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from pylib import constants
CHROMIUM_COVERAGE_BUCKET = 'chromium-code-coverage'
_BotConfig = collections.namedtuple(
'BotConfig', ['bot_id', 'host_obj', 'test_obj'])
HostConfig = collections.namedtuple(
'HostConfig',
['script', 'host_steps', 'extra_args', 'extra_gyp_defines', 'target_arch'])
TestConfig = collections.namedtuple('Tests', ['script', 'tests', 'extra_args'])
def BotConfig(bot_id, host_object, test_object=None):
return _BotConfig(bot_id, host_object, test_object)
def DictDiff(d1, d2):
diff = []
for key in sorted(set(d1.keys() + d2.keys())):
if key in d1 and d1[key] != d2.get(key):
diff.append('- %s=%s' % (key, pipes.quote(d1[key])))
if key in d2 and d2[key] != d1.get(key):
diff.append('+ %s=%s' % (key, pipes.quote(d2[key])))
return '\n'.join(diff)
def GetEnvironment(host_obj, testing, extra_env_vars=None):
init_env = dict(os.environ)
init_env['GYP_GENERATORS'] = 'ninja'
if extra_env_vars:
init_env.update(extra_env_vars)
envsetup_cmd = '. build/android/envsetup.sh'
if testing:
# Skip envsetup to avoid presubmit dependence on android deps.
print 'Testing mode - skipping "%s"' % envsetup_cmd
envsetup_cmd = ':'
else:
print 'Running %s' % envsetup_cmd
proc = subprocess.Popen(['bash', '-exc',
envsetup_cmd + ' >&2; python build/android/buildbot/env_to_json.py'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
cwd=bb_utils.CHROME_SRC, env=init_env)
json_env, envsetup_output = proc.communicate()
if proc.returncode != 0:
print >> sys.stderr, 'FATAL Failure in envsetup.'
print >> sys.stderr, envsetup_output
sys.exit(1)
env = json.loads(json_env)
env['GYP_DEFINES'] = env.get('GYP_DEFINES', '') + \
' fastbuild=1 use_goma=1 gomadir=%s' % bb_utils.GOMA_DIR
if host_obj.target_arch:
gyp_target_arch = { 'mips': 'mipsel', 'x86': 'ia32' }.get(
host_obj.target_arch, host_obj.target_arch)
env['GYP_DEFINES'] += ' target_arch=%s' % gyp_target_arch
extra_gyp = host_obj.extra_gyp_defines
if extra_gyp:
env['GYP_DEFINES'] += ' %s' % extra_gyp
if re.search('(asan|clang)=1', extra_gyp):
env.pop('CXX_target', None)
# Bots checkout chrome in /b/build/slave/<name>/build/src
build_internal_android = os.path.abspath(os.path.join(
bb_utils.CHROME_SRC, '..', '..', '..', '..', '..', 'build_internal',
'scripts', 'slave', 'android'))
if os.path.exists(build_internal_android):
env['PATH'] = os.pathsep.join([build_internal_android, env['PATH']])
return env
def GetCommands(options, bot_config):
"""Get a formatted list of commands.
Args:
options: Options object.
bot_config: A BotConfig named tuple.
host_step_script: Host step script.
device_step_script: Device step script.
Returns:
list of Command objects.
"""
property_args = bb_utils.EncodeProperties(options)
commands = [[bot_config.host_obj.script,
'--steps=%s' % ','.join(bot_config.host_obj.host_steps)] +
property_args + (bot_config.host_obj.extra_args or [])]
test_obj = bot_config.test_obj
if test_obj:
run_test_cmd = [test_obj.script] + property_args
for test in test_obj.tests:
run_test_cmd.extend(['-f', test])
if test_obj.extra_args:
run_test_cmd.extend(test_obj.extra_args)
commands.append(run_test_cmd)
return commands
def GetBotStepMap():
compile_step = ['compile']
std_host_tests = ['check_webview_licenses', 'findbugs']
std_build_steps = ['compile', 'zip_build']
std_test_steps = ['extract_build']
std_tests = ['ui', 'unit']
flakiness_server = (
'--flakiness-server=%s' % constants.UPSTREAM_FLAKINESS_SERVER)
experimental = ['--experimental']
B = BotConfig
H = (lambda steps, extra_args=None, extra_gyp=None, target_arch=None :
HostConfig('build/android/buildbot/bb_host_steps.py', steps, extra_args,
extra_gyp, target_arch))
T = (lambda tests, extra_args=None :
TestConfig('build/android/buildbot/bb_device_steps.py', tests,
extra_args))
bot_configs = [
# Main builders
B('main-builder-dbg', H(std_build_steps + std_host_tests)),
B('main-builder-rel', H(std_build_steps)),
B('main-clang-builder',
H(compile_step, extra_gyp='clang=1 component=shared_library')),
B('main-clobber', H(compile_step)),
B('main-tests', H(std_test_steps), T(std_tests, [flakiness_server])),
# Other waterfalls
B('asan-builder-tests', H(compile_step,
extra_gyp='asan=1 component=shared_library'),
T(std_tests, ['--asan', '--asan-symbolize'])),
B('blink-try-builder', H(compile_step)),
B('chromedriver-fyi-tests-dbg', H(std_test_steps),
T(['chromedriver'], ['--install=ChromiumTestShell'])),
B('fyi-x86-builder-dbg',
H(compile_step + std_host_tests, experimental, target_arch='x86')),
B('fyi-builder-dbg',
H(std_build_steps + std_host_tests, experimental,
extra_gyp='emma_coverage=1 android_lint=1')),
B('x86-builder-dbg',
H(compile_step + std_host_tests, target_arch='x86')),
B('fyi-builder-rel', H(std_build_steps, experimental)),
B('fyi-tests', H(std_test_steps),
T(std_tests, ['--experimental', flakiness_server,
'--coverage-bucket', CHROMIUM_COVERAGE_BUCKET])),
B('fyi-component-builder-tests-dbg',
H(compile_step, extra_gyp='component=shared_library'),
T(std_tests, ['--experimental', flakiness_server])),
B('gpu-builder-tests-dbg', H(compile_step), T(['gpu'])),
# Pass empty T([]) so that logcat monitor and device status check are run.
B('perf-bisect-builder-tests-dbg', H(['bisect_perf_regression']), T([])),
B('perf-tests-rel', H(std_test_steps),
T([], ['--install=ChromiumTestShell'])),
B('webkit-latest-webkit-tests', H(std_test_steps),
T(['webkit_layout', 'webkit'], ['--auto-reconnect'])),
B('webkit-latest-contentshell', H(compile_step),
T(['webkit_layout'], ['--auto-reconnect'])),
B('builder-unit-tests', H(compile_step), T(['unit'])),
B('webrtc-chromium-builder',
H(std_build_steps,
extra_args=['--build-targets=android_builder_chromium_webrtc'])),
B('webrtc-native-builder',
H(std_build_steps,
extra_args=['--build-targets=android_builder_webrtc'],
extra_gyp='include_tests=1 enable_tracing=1')),
B('webrtc-chromium-tests', H(std_test_steps),
T(['webrtc_chromium'],
[flakiness_server, '--gtest-filter=WebRtc*'])),
B('webrtc-native-tests', H(std_test_steps),
T(['webrtc_native'], [flakiness_server])),
# Generic builder config (for substring match).
B('builder', H(std_build_steps)),
]
bot_map = dict((config.bot_id, config) for config in bot_configs)
# These bots have identical configuration to ones defined earlier.
copy_map = [
('lkgr-clobber', 'main-clobber'),
('try-builder-dbg', 'main-builder-dbg'),
('try-builder-rel', 'main-builder-rel'),
('try-clang-builder', 'main-clang-builder'),
('try-fyi-builder-dbg', 'fyi-builder-dbg'),
('try-x86-builder-dbg', 'x86-builder-dbg'),
('try-tests', 'main-tests'),
('try-fyi-tests', 'fyi-tests'),
('webkit-latest-tests', 'main-tests'),
]
for to_id, from_id in copy_map:
assert to_id not in bot_map
# pylint: disable=W0212
bot_map[to_id] = copy.deepcopy(bot_map[from_id])._replace(bot_id=to_id)
# Trybots do not upload to flakiness dashboard. They should be otherwise
# identical in configuration to their trunk building counterparts.
test_obj = bot_map[to_id].test_obj
if to_id.startswith('try') and test_obj:
extra_args = test_obj.extra_args
if extra_args and flakiness_server in extra_args:
extra_args.remove(flakiness_server)
return bot_map
# Return an object from the map, looking first for an exact id match.
# If this fails, look for an id which is a substring of the specified id.
# Choose the longest of all substring matches.
# pylint: disable=W0622
def GetBestMatch(id_map, id):
config = id_map.get(id)
if not config:
substring_matches = filter(lambda x: x in id, id_map.iterkeys())
if substring_matches:
max_id = max(substring_matches, key=len)
print 'Using config from id="%s" (substring match).' % max_id
config = id_map[max_id]
return config
def GetRunBotOptParser():
parser = bb_utils.GetParser()
parser.add_option('--bot-id', help='Specify bot id directly.')
parser.add_option('--testing', action='store_true',
help='For testing: print, but do not run commands')
return parser
def GetBotConfig(options, bot_step_map):
bot_id = options.bot_id or options.factory_properties.get('android_bot_id')
if not bot_id:
print (sys.stderr,
'A bot id must be specified through option or factory_props.')
return
bot_config = GetBestMatch(bot_step_map, bot_id)
if not bot_config:
print 'Error: config for id="%s" cannot be inferred.' % bot_id
return bot_config
def RunBotCommands(options, commands, env):
print 'Environment changes:'
print DictDiff(dict(os.environ), env)
for command in commands:
print bb_utils.CommandToString(command)
sys.stdout.flush()
if options.testing:
env['BUILDBOT_TESTING'] = '1'
return_code = subprocess.call(command, cwd=bb_utils.CHROME_SRC, env=env)
if return_code != 0:
return return_code
def main(argv):
parser = GetRunBotOptParser()
options, args = parser.parse_args(argv[1:])
if args:
parser.error('Unused args: %s' % args)
bot_config = GetBotConfig(options, GetBotStepMap())
if not bot_config:
sys.exit(1)
print 'Using config:', bot_config
commands = GetCommands(options, bot_config)
for command in commands:
print 'Will run: ', bb_utils.CommandToString(command)
print
env = GetEnvironment(bot_config.host_obj, options.testing)
return RunBotCommands(options, commands, env)
if __name__ == '__main__':
sys.exit(main(sys.argv))
| bsd-3-clause |
40223123/raven | static/Brython3.1.0-20150301-090019/Lib/socket.py | 730 | 14913 | # Wrapper module for _socket, providing some additional facilities
# implemented in Python.
"""\
This module provides socket operations and some related functions.
On Unix, it supports IP (Internet Protocol) and Unix domain sockets.
On other systems, it only supports IP. Functions specific for a
socket are available as methods of the socket object.
Functions:
socket() -- create a new socket object
socketpair() -- create a pair of new socket objects [*]
fromfd() -- create a socket object from an open file descriptor [*]
fromshare() -- create a socket object from data received from socket.share() [*]
gethostname() -- return the current hostname
gethostbyname() -- map a hostname to its IP number
gethostbyaddr() -- map an IP number or hostname to DNS info
getservbyname() -- map a service name and a protocol name to a port number
getprotobyname() -- map a protocol name (e.g. 'tcp') to a number
ntohs(), ntohl() -- convert 16, 32 bit int from network to host byte order
htons(), htonl() -- convert 16, 32 bit int from host to network byte order
inet_aton() -- convert IP addr string (123.45.67.89) to 32-bit packed format
inet_ntoa() -- convert 32-bit packed format IP to string (123.45.67.89)
socket.getdefaulttimeout() -- get the default timeout value
socket.setdefaulttimeout() -- set the default timeout value
create_connection() -- connects to an address, with an optional timeout and
optional source address.
[*] not available on all platforms!
Special objects:
SocketType -- type object for socket objects
error -- exception raised for I/O errors
has_ipv6 -- boolean value indicating if IPv6 is supported
Integer constants:
AF_INET, AF_UNIX -- socket domains (first argument to socket() call)
SOCK_STREAM, SOCK_DGRAM, SOCK_RAW -- socket types (second argument)
Many other constants may be defined; these may be used in calls to
the setsockopt() and getsockopt() methods.
"""
import _socket
from _socket import *
import os, sys, io
try:
import errno
except ImportError:
errno = None
EBADF = getattr(errno, 'EBADF', 9)
EAGAIN = getattr(errno, 'EAGAIN', 11)
EWOULDBLOCK = getattr(errno, 'EWOULDBLOCK', 11)
__all__ = ["getfqdn", "create_connection"]
__all__.extend(os._get_exports_list(_socket))
_realsocket = socket
# WSA error codes
if sys.platform.lower().startswith("win"):
errorTab = {}
errorTab[10004] = "The operation was interrupted."
errorTab[10009] = "A bad file handle was passed."
errorTab[10013] = "Permission denied."
errorTab[10014] = "A fault occurred on the network??" # WSAEFAULT
errorTab[10022] = "An invalid operation was attempted."
errorTab[10035] = "The socket operation would block"
errorTab[10036] = "A blocking operation is already in progress."
errorTab[10048] = "The network address is in use."
errorTab[10054] = "The connection has been reset."
errorTab[10058] = "The network has been shut down."
errorTab[10060] = "The operation timed out."
errorTab[10061] = "Connection refused."
errorTab[10063] = "The name is too long."
errorTab[10064] = "The host is down."
errorTab[10065] = "The host is unreachable."
__all__.append("errorTab")
class socket(_socket.socket):
"""A subclass of _socket.socket adding the makefile() method."""
__slots__ = ["__weakref__", "_io_refs", "_closed"]
def __init__(self, family=AF_INET, type=SOCK_STREAM, proto=0, fileno=None):
_socket.socket.__init__(self, family, type, proto, fileno)
self._io_refs = 0
self._closed = False
def __enter__(self):
return self
def __exit__(self, *args):
if not self._closed:
self.close()
def __repr__(self):
"""Wrap __repr__() to reveal the real class name."""
s = _socket.socket.__repr__(self)
if s.startswith("<socket object"):
s = "<%s.%s%s%s" % (self.__class__.__module__,
self.__class__.__name__,
getattr(self, '_closed', False) and " [closed] " or "",
s[7:])
return s
def __getstate__(self):
raise TypeError("Cannot serialize socket object")
def dup(self):
"""dup() -> socket object
Return a new socket object connected to the same system resource.
"""
fd = dup(self.fileno())
sock = self.__class__(self.family, self.type, self.proto, fileno=fd)
sock.settimeout(self.gettimeout())
return sock
def accept(self):
"""accept() -> (socket object, address info)
Wait for an incoming connection. Return a new socket
representing the connection, and the address of the client.
For IP sockets, the address info is a pair (hostaddr, port).
"""
fd, addr = self._accept()
sock = socket(self.family, self.type, self.proto, fileno=fd)
# Issue #7995: if no default timeout is set and the listening
# socket had a (non-zero) timeout, force the new socket in blocking
# mode to override platform-specific socket flags inheritance.
if getdefaulttimeout() is None and self.gettimeout():
sock.setblocking(True)
return sock, addr
def makefile(self, mode="r", buffering=None, *,
encoding=None, errors=None, newline=None):
"""makefile(...) -> an I/O stream connected to the socket
The arguments are as for io.open() after the filename,
except the only mode characters supported are 'r', 'w' and 'b'.
The semantics are similar too. (XXX refactor to share code?)
"""
for c in mode:
if c not in {"r", "w", "b"}:
raise ValueError("invalid mode %r (only r, w, b allowed)")
writing = "w" in mode
reading = "r" in mode or not writing
assert reading or writing
binary = "b" in mode
rawmode = ""
if reading:
rawmode += "r"
if writing:
rawmode += "w"
raw = SocketIO(self, rawmode)
self._io_refs += 1
if buffering is None:
buffering = -1
if buffering < 0:
buffering = io.DEFAULT_BUFFER_SIZE
if buffering == 0:
if not binary:
raise ValueError("unbuffered streams must be binary")
return raw
if reading and writing:
buffer = io.BufferedRWPair(raw, raw, buffering)
elif reading:
buffer = io.BufferedReader(raw, buffering)
else:
assert writing
buffer = io.BufferedWriter(raw, buffering)
if binary:
return buffer
text = io.TextIOWrapper(buffer, encoding, errors, newline)
text.mode = mode
return text
def _decref_socketios(self):
if self._io_refs > 0:
self._io_refs -= 1
if self._closed:
self.close()
def _real_close(self, _ss=_socket.socket):
# This function should not reference any globals. See issue #808164.
_ss.close(self)
def close(self):
# This function should not reference any globals. See issue #808164.
self._closed = True
if self._io_refs <= 0:
self._real_close()
def detach(self):
"""detach() -> file descriptor
Close the socket object without closing the underlying file descriptor.
The object cannot be used after this call, but the file descriptor
can be reused for other purposes. The file descriptor is returned.
"""
self._closed = True
return super().detach()
def fromfd(fd, family, type, proto=0):
""" fromfd(fd, family, type[, proto]) -> socket object
Create a socket object from a duplicate of the given file
descriptor. The remaining arguments are the same as for socket().
"""
nfd = dup(fd)
return socket(family, type, proto, nfd)
if hasattr(_socket.socket, "share"):
def fromshare(info):
""" fromshare(info) -> socket object
Create a socket object from a the bytes object returned by
socket.share(pid).
"""
return socket(0, 0, 0, info)
if hasattr(_socket, "socketpair"):
def socketpair(family=None, type=SOCK_STREAM, proto=0):
"""socketpair([family[, type[, proto]]]) -> (socket object, socket object)
Create a pair of socket objects from the sockets returned by the platform
socketpair() function.
The arguments are the same as for socket() except the default family is
AF_UNIX if defined on the platform; otherwise, the default is AF_INET.
"""
if family is None:
try:
family = AF_UNIX
except NameError:
family = AF_INET
a, b = _socket.socketpair(family, type, proto)
a = socket(family, type, proto, a.detach())
b = socket(family, type, proto, b.detach())
return a, b
_blocking_errnos = { EAGAIN, EWOULDBLOCK }
class SocketIO(io.RawIOBase):
"""Raw I/O implementation for stream sockets.
This class supports the makefile() method on sockets. It provides
the raw I/O interface on top of a socket object.
"""
# One might wonder why not let FileIO do the job instead. There are two
# main reasons why FileIO is not adapted:
# - it wouldn't work under Windows (where you can't used read() and
# write() on a socket handle)
# - it wouldn't work with socket timeouts (FileIO would ignore the
# timeout and consider the socket non-blocking)
# XXX More docs
def __init__(self, sock, mode):
if mode not in ("r", "w", "rw", "rb", "wb", "rwb"):
raise ValueError("invalid mode: %r" % mode)
io.RawIOBase.__init__(self)
self._sock = sock
if "b" not in mode:
mode += "b"
self._mode = mode
self._reading = "r" in mode
self._writing = "w" in mode
self._timeout_occurred = False
def readinto(self, b):
"""Read up to len(b) bytes into the writable buffer *b* and return
the number of bytes read. If the socket is non-blocking and no bytes
are available, None is returned.
If *b* is non-empty, a 0 return value indicates that the connection
was shutdown at the other end.
"""
self._checkClosed()
self._checkReadable()
if self._timeout_occurred:
raise IOError("cannot read from timed out object")
while True:
try:
return self._sock.recv_into(b)
except timeout:
self._timeout_occurred = True
raise
except InterruptedError:
continue
except error as e:
if e.args[0] in _blocking_errnos:
return None
raise
def write(self, b):
"""Write the given bytes or bytearray object *b* to the socket
and return the number of bytes written. This can be less than
len(b) if not all data could be written. If the socket is
non-blocking and no bytes could be written None is returned.
"""
self._checkClosed()
self._checkWritable()
try:
return self._sock.send(b)
except error as e:
# XXX what about EINTR?
if e.args[0] in _blocking_errnos:
return None
raise
def readable(self):
"""True if the SocketIO is open for reading.
"""
if self.closed:
raise ValueError("I/O operation on closed socket.")
return self._reading
def writable(self):
"""True if the SocketIO is open for writing.
"""
if self.closed:
raise ValueError("I/O operation on closed socket.")
return self._writing
def seekable(self):
"""True if the SocketIO is open for seeking.
"""
if self.closed:
raise ValueError("I/O operation on closed socket.")
return super().seekable()
def fileno(self):
"""Return the file descriptor of the underlying socket.
"""
self._checkClosed()
return self._sock.fileno()
@property
def name(self):
if not self.closed:
return self.fileno()
else:
return -1
@property
def mode(self):
return self._mode
def close(self):
"""Close the SocketIO object. This doesn't close the underlying
socket, except if all references to it have disappeared.
"""
if self.closed:
return
io.RawIOBase.close(self)
self._sock._decref_socketios()
self._sock = None
def getfqdn(name=''):
"""Get fully qualified domain name from name.
An empty argument is interpreted as meaning the local host.
First the hostname returned by gethostbyaddr() is checked, then
possibly existing aliases. In case no FQDN is available, hostname
from gethostname() is returned.
"""
name = name.strip()
if not name or name == '0.0.0.0':
name = gethostname()
try:
hostname, aliases, ipaddrs = gethostbyaddr(name)
except error:
pass
else:
aliases.insert(0, hostname)
for name in aliases:
if '.' in name:
break
else:
name = hostname
return name
_GLOBAL_DEFAULT_TIMEOUT = object()
def create_connection(address, timeout=_GLOBAL_DEFAULT_TIMEOUT,
source_address=None):
"""Connect to *address* and return the socket object.
Convenience function. Connect to *address* (a 2-tuple ``(host,
port)``) and return the socket object. Passing the optional
*timeout* parameter will set the timeout on the socket instance
before attempting to connect. If no *timeout* is supplied, the
global default timeout setting returned by :func:`getdefaulttimeout`
is used. If *source_address* is set it must be a tuple of (host, port)
for the socket to bind as a source address before making the connection.
An host of '' or port 0 tells the OS to use the default.
"""
host, port = address
err = None
for res in getaddrinfo(host, port, 0, SOCK_STREAM):
af, socktype, proto, canonname, sa = res
sock = None
try:
sock = socket(af, socktype, proto)
if timeout is not _GLOBAL_DEFAULT_TIMEOUT:
sock.settimeout(timeout)
if source_address:
sock.bind(source_address)
sock.connect(sa)
return sock
except error as _:
err = _
if sock is not None:
sock.close()
if err is not None:
raise err
else:
raise error("getaddrinfo returns an empty list")
| gpl-3.0 |
hyperized/ansible | lib/ansible/modules/network/f5/bigip_asm_dos_application.py | 2 | 44296 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2019, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_asm_dos_application
short_description: Manage application settings for DOS profile
description:
- Manages Application settings for ASM/AFM DOS profile.
version_added: 2.9
options:
profile:
description:
- Specifies the name of the profile to manage application settings in.
type: str
required: True
rtbh_duration:
description:
- Specifies the duration of the RTBH BGP route advertisement, in seconds.
- The accepted range is between 0 and 4294967295 inclusive.
type: int
rtbh_enable:
description:
- Specifies whether to enable Remote Triggered Black Hole C(RTBH) of attacking IPs by advertising BGP routes.
type: bool
scrubbing_duration:
description:
- Specifies the duration of the Traffic Scrubbing BGP route advertisement, in seconds.
- The accepted range is between 0 and 4294967295 inclusive.
type: int
scrubbing_enable:
description:
- Specifies whether to enable Traffic Scrubbing during attacks by advertising BGP routes.
type: bool
single_page_application:
description:
- Specifies, when C(yes), that the system supports a Single Page Applications.
type: bool
trigger_irule:
description:
- Specifies, when C(yes), that the system activates an Application DoS iRule event.
type: bool
geolocations:
description:
- Manages the geolocations countries whitelist, blacklist.
type: dict
suboptions:
whitelist:
description:
- A list of countries to be put on whitelist, must not have overlapping elements with C(blacklist).
type: list
blacklist:
description:
- A list of countries to be put on blacklist, must not have overlapping elements with C(whitelist).
type: list
heavy_urls:
description:
- Manages Heavy URL protection.
- Heavy URLs are a small number of site URLs that might consume considerable server resources per request.
type: dict
suboptions:
auto_detect:
description:
- Enables or disables automatic heavy URL detection.
type: bool
latency_threshold:
description:
- Specifies the latency threshold for automatic heavy URL detection.
- The accepted range is between 0 and 4294967295 miliseconds inclusive.
type: int
exclude:
description:
- Specifies a list of URLs or wildcards to exclude from the heavy URLs.
type: list
include:
description:
- Configures additional URLs to include in the heavy URLs that were auto detected.
type: list
suboptions:
url:
description:
- Specifies the URL to be added to the list of heavy URLs, in addition to the automatically detected ones.
type: str
threshold:
description:
- Specifies the threshold of requests per second, where the URL in question is considered under attack.
- The accepted range is between 1 and 4294967295 inclusive, or C(auto).
type: str
mobile_detection:
description:
- Configures detection of mobile applications built with the Anti-Bot Mobile SDK and defines how requests
from these mobile application clients are handled.
type: dict
suboptions:
enabled:
description:
- When C(yes), requests from mobile applications built with Anti-Bot Mobile SDK will be detected and handled
according to the parameters set.
- When C(no), these requests will be handled like any other request which may let attacks in, or cause false
positives.
type: bool
allow_android_rooted_device:
description:
- When C(yes) device will allow traffic from rooted Android devices.
type: bool
allow_any_android_package:
description:
- When C(yes) allows any application publisher.
- A publisher is identified by the certificate used to sign the application.
type: bool
allow_any_ios_package:
description:
- When C(yes) allows any iOS package.
- A package name is the unique identifier of the mobile application.
type: bool
allow_jailbroken_devices:
description:
- When C(yes) allows traffic from jailbroken iOS devices.
type: bool
allow_emulators:
description:
- When C(yes) allows traffic from applications run on emulators.
type: bool
client_side_challenge_mode:
description:
- Action to take when a CAPTCHA or Client Side Integrity challenge needs to be presented.
- The mobile application user will not see a CAPTCHA challenge and the mobile application will not be
presented with the Client Side Integrity challenge. The such options for mobile applications are C(pass)
or C(cshui).
- When C(pass) the traffic is passed without incident.
- When C(cshui) the SDK checks for human interactions with the screen in the last few seconds.
If none are detected, the traffic is blocked.
type: str
choices:
- pass
- cshui
ios_allowed_package_names:
description:
- Specifies the names of iOS packages to allow traffic on.
- This option has no effect when C(allow_any_ios_package) is set to C(yes).
type: list
android_publishers:
description:
- This option has no effect when C(allow_any_android_package) is set to C(yes).
- Specifies the allowed publisher certificates for android applications.
- The publisher certificate needs to be installed on the BIG-IP beforehand.
- "The certificate name located on a different partition than the one specified
in C(partition) parameter needs to be provided in C(full_path) format C(/Foo/cert.crt)."
type: list
partition:
description:
- Device partition to manage resources on.
type: str
default: Common
state:
description:
- When C(state) is C(present), ensures that the Application object exists.
- When C(state) is C(absent), ensures that the Application object is removed.
type: str
choices:
- present
- absent
default: present
notes:
- Requires BIG-IP >= 13.1.0
extends_documentation_fragment: f5
author:
- Wojciech Wypior (@wojtek0806)
'''
EXAMPLES = r'''
- name: Create an ASM dos application profile
bigip_asm_dos_application:
profile: dos_foo
geolocations:
blacklist:
- Afghanistan
- Andora
whitelist:
- Cuba
heavy_urls:
auto_detect: yes
latency_threshold: 1000
rtbh_duration: 3600
rtbh_enable: yes
single_page_application: yes
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Update an ASM dos application profile
bigip_asm_dos_application:
profile: dos_foo
mobile_detection:
enabled: yes
allow_any_ios_package: yes
allow_emulators: yes
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Remove an ASM dos application profile
bigip_asm_dos_application:
profile: dos_foo
state: absent
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
'''
RETURN = r'''
rtbh_enable:
description: Enables Remote Triggered Black Hole of attacking IPs.
returned: changed
type: bool
sample: no
rtbh_duration:
description: The duration of the RTBH BGP route advertisement.
returned: changed
type: int
sample: 3600
scrubbing_enable:
description: Enables Traffic Scrubbing during attacks.
returned: changed
type: bool
sample: yes
scrubbing_duration:
description: The duration of the Traffic Scrubbing BGP route advertisement.
returned: changed
type: int
sample: 3600
single_page_application:
description: Enables support of a Single Page Applications.
returned: changed
type: bool
sample: no
trigger_irule:
description: Activates an Application DoS iRule event.
returned: changed
type: bool
sample: yes
geolocations:
description: Specifies geolocations countries whitelist, blacklist.
type: complex
returned: changed
contains:
whitelist:
description: A list of countries to be put on whitelist.
returned: changed
type: list
sample: ['United States, United Kingdom']
blacklist:
description: A list of countries to be put on blacklist.
returned: changed
type: list
sample: ['Russia', 'Germany']
sample: hash/dictionary of values
heavy_urls:
description: Manages Heavy URL protection.
type: complex
returned: changed
contains:
auto_detect:
description: Enables or disables automatic heavy URL detection.
returned: changed
type: bool
sample: yes
latency_threshold:
description: Specifies the latency threshold for automatic heavy URL detection.
returned: changed
type: int
sample: 2000
exclude:
description: Specifies a list of URLs or wildcards to exclude from the heavy URLs.
returned: changed
type: list
sample: ['/exclude.html', '/exclude2.html']
include:
description: Configures additional URLs to include in the heavy URLs.
type: complex
returned: changed
contains:
url:
description: The URL to be added to the list of heavy URLs.
returned: changed
type: str
sample: /include.html
threshold:
description: The threshold of requests per second
returned: changed
type: str
sample: auto
sample: hash/dictionary of values
sample: hash/dictionary of values
mobile_detection:
description: Configures detection of mobile applications built with the Anti-Bot Mobile SDK.
type: complex
returned: changed
contains:
enable:
description: Enables or disables automatic mobile detection.
returned: changed
type: bool
sample: yes
allow_android_rooted_device:
description: Allows traffic from rooted Android devices.
returned: changed
type: bool
sample: no
allow_any_android_package:
description: Allows any application publisher.
returned: changed
type: bool
sample: no
allow_any_ios_package:
description: Allows any iOS package.
returned: changed
type: bool
sample: yes
allow_jailbroken_devices:
description: Allows traffic from jailbroken iOS devices.
returned: changed
type: bool
sample: no
allow_emulators:
description: Allows traffic from applications run on emulators.
returned: changed
type: bool
sample: yes
client_side_challenge_mode:
description: Action to take when a CAPTCHA or Client Side Integrity challenge needs to be presented.
returned: changed
type: str
sample: pass
ios_allowed_package_names:
description: The names of iOS packages to allow traffic on.
returned: changed
type: list
sample: ['package1','package2']
android_publishers:
description: The allowed publisher certificates for android applications.
returned: changed
type: list
sample: ['/Common/cert1.crt', '/Common/cert2.crt']
sample: hash/dictionary of values
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
from distutils.version import LooseVersion
try:
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import fq_name
from library.module_utils.network.f5.common import transform_name
from library.module_utils.network.f5.common import flatten_boolean
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.compare import compare_complex_list
from library.module_utils.network.f5.compare import cmp_simple_list
from library.module_utils.network.f5.icontrol import tmos_version
from library.module_utils.network.f5.icontrol import module_provisioned
except ImportError:
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import fq_name
from ansible.module_utils.network.f5.common import transform_name
from ansible.module_utils.network.f5.common import flatten_boolean
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.compare import compare_complex_list
from ansible.module_utils.network.f5.compare import cmp_simple_list
from ansible.module_utils.network.f5.icontrol import tmos_version
from ansible.module_utils.network.f5.icontrol import module_provisioned
class Parameters(AnsibleF5Parameters):
api_map = {
'rtbhDurationSec': 'rtbh_duration',
'rtbhEnable': 'rtbh_enable',
'scrubbingDurationSec': 'scrubbing_duration',
'scrubbingEnable': 'scrubbing_enable',
'singlePageApplication': 'single_page_application',
'triggerIrule': 'trigger_irule',
'heavyUrls': 'heavy_urls',
'mobileDetection': 'mobile_detection',
}
api_attributes = [
'geolocations',
'rtbhDurationSec',
'rtbhEnable',
'scrubbingDurationSec',
'scrubbingEnable',
'singlePageApplication',
'triggerIrule',
'heavyUrls',
'mobileDetection',
]
returnables = [
'rtbh_duration',
'rtbh_enable',
'scrubbing_duration',
'scrubbing_enable',
'single_page_application',
'trigger_irule',
'enable_mobile_detection',
'allow_android_rooted_device',
'allow_any_android_package',
'allow_any_ios_package',
'allow_jailbroken_devices',
'allow_emulators',
'client_side_challenge_mode',
'ios_allowed_package_names',
'android_publishers',
'auto_detect',
'latency_threshold',
'hw_url_exclude',
'hw_url_include',
'geo_blacklist',
'geo_whitelist',
]
updatables = [
'rtbh_duration',
'rtbh_enable',
'scrubbing_duration',
'scrubbing_enable',
'single_page_application',
'trigger_irule',
'enable_mobile_detection',
'allow_android_rooted_device',
'allow_any_android_package',
'allow_any_ios_package',
'allow_jailbroken_devices',
'allow_emulators',
'client_side_challenge_mode',
'ios_allowed_package_names',
'android_publishers',
'auto_detect',
'latency_threshold',
'hw_url_exclude',
'hw_url_include',
'geo_blacklist',
'geo_whitelist',
]
class ApiParameters(Parameters):
@property
def enable_mobile_detection(self):
if self._values['mobile_detection'] is None:
return None
return self._values['mobile_detection']['enabled']
@property
def allow_android_rooted_device(self):
if self._values['mobile_detection'] is None:
return None
return self._values['mobile_detection']['allowAndroidRootedDevice']
@property
def allow_any_android_package(self):
if self._values['mobile_detection'] is None:
return None
return self._values['mobile_detection']['allowAnyAndroidPackage']
@property
def allow_any_ios_package(self):
if self._values['mobile_detection'] is None:
return None
return self._values['mobile_detection']['allowAnyIosPackage']
@property
def allow_jailbroken_devices(self):
if self._values['mobile_detection'] is None:
return None
return self._values['mobile_detection']['allowJailbrokenDevices']
@property
def allow_emulators(self):
if self._values['mobile_detection'] is None:
return None
return self._values['mobile_detection']['allowEmulators']
@property
def client_side_challenge_mode(self):
if self._values['mobile_detection'] is None:
return None
return self._values['mobile_detection']['clientSideChallengeMode']
@property
def ios_allowed_package_names(self):
if self._values['mobile_detection'] is None:
return None
return self._values['mobile_detection'].get('iosAllowedPackageNames', None)
@property
def android_publishers(self):
if self._values['mobile_detection'] is None or 'androidPublishers' not in self._values['mobile_detection']:
return None
result = [fq_name(publisher['partition'], publisher['name'])
for publisher in self._values['mobile_detection']['androidPublishers']]
return result
@property
def auto_detect(self):
if self._values['heavy_urls'] is None:
return None
return self._values['heavy_urls']['automaticDetection']
@property
def latency_threshold(self):
if self._values['heavy_urls'] is None:
return None
return self._values['heavy_urls']['latencyThreshold']
@property
def hw_url_exclude(self):
if self._values['heavy_urls'] is None:
return None
return self._values['heavy_urls'].get('exclude', None)
@property
def hw_url_include(self):
if self._values['heavy_urls'] is None:
return None
return self._values['heavy_urls'].get('includeList', None)
@property
def geo_blacklist(self):
if self._values['geolocations'] is None:
return None
result = list()
for item in self._values['geolocations']:
if 'blackListed' in item and item['blackListed'] is True:
result.append(item['name'])
if result:
return result
@property
def geo_whitelist(self):
if self._values['geolocations'] is None:
return None
result = list()
for item in self._values['geolocations']:
if 'whiteListed' in item and item['whiteListed'] is True:
result.append(item['name'])
if result:
return result
class ModuleParameters(Parameters):
@property
def rtbh_duration(self):
if self._values['rtbh_duration'] is None:
return None
if 0 <= self._values['rtbh_duration'] <= 4294967295:
return self._values['rtbh_duration']
raise F5ModuleError(
"Valid 'rtbh_duration' must be in range 0 - 4294967295 seconds."
)
@property
def rtbh_enable(self):
result = flatten_boolean(self._values['rtbh_enable'])
if result == 'yes':
return 'enabled'
if result == 'no':
return 'disabled'
return result
@property
def scrubbing_duration(self):
if self._values['scrubbing_duration'] is None:
return None
if 0 <= self._values['scrubbing_duration'] <= 4294967295:
return self._values['scrubbing_duration']
raise F5ModuleError(
"Valid 'scrubbing_duration' must be in range 0 - 4294967295 seconds."
)
@property
def scrubbing_enable(self):
result = flatten_boolean(self._values['scrubbing_enable'])
if result == 'yes':
return 'enabled'
if result == 'no':
return 'disabled'
return result
@property
def single_page_application(self):
result = flatten_boolean(self._values['single_page_application'])
if result == 'yes':
return 'enabled'
if result == 'no':
return 'disabled'
return result
@property
def trigger_irule(self):
result = flatten_boolean(self._values['trigger_irule'])
if result == 'yes':
return 'enabled'
if result == 'no':
return 'disabled'
return result
@property
def enable_mobile_detection(self):
if self._values['mobile_detection'] is None:
return None
result = flatten_boolean(self._values['mobile_detection']['enabled'])
if result == 'yes':
return 'enabled'
if result == 'no':
return 'disabled'
return result
@property
def allow_android_rooted_device(self):
if self._values['mobile_detection'] is None:
return None
result = flatten_boolean(self._values['mobile_detection']['allow_android_rooted_device'])
if result == 'yes':
return 'true'
if result == 'no':
return 'false'
return result
@property
def allow_any_android_package(self):
if self._values['mobile_detection'] is None:
return None
result = flatten_boolean(self._values['mobile_detection']['allow_any_android_package'])
if result == 'yes':
return 'true'
if result == 'no':
return 'false'
return result
@property
def allow_any_ios_package(self):
if self._values['mobile_detection'] is None:
return None
result = flatten_boolean(self._values['mobile_detection']['allow_any_ios_package'])
if result == 'yes':
return 'true'
if result == 'no':
return 'false'
return result
@property
def allow_jailbroken_devices(self):
if self._values['mobile_detection'] is None:
return None
result = flatten_boolean(self._values['mobile_detection']['allow_jailbroken_devices'])
if result == 'yes':
return 'true'
if result == 'no':
return 'false'
return result
@property
def allow_emulators(self):
if self._values['mobile_detection'] is None:
return None
result = flatten_boolean(self._values['mobile_detection']['allow_emulators'])
if result == 'yes':
return 'true'
if result == 'no':
return 'false'
return result
@property
def client_side_challenge_mode(self):
if self._values['mobile_detection'] is None:
return None
return self._values['mobile_detection']['client_side_challenge_mode']
@property
def ios_allowed_package_names(self):
if self._values['mobile_detection'] is None:
return None
return self._values['mobile_detection']['ios_allowed_package_names']
@property
def android_publishers(self):
if self._values['mobile_detection'] is None or self._values['mobile_detection']['android_publishers'] is None:
return None
result = [fq_name(self.partition, item) for item in self._values['mobile_detection']['android_publishers']]
return result
@property
def auto_detect(self):
if self._values['heavy_urls'] is None:
return None
result = flatten_boolean(self._values['heavy_urls']['auto_detect'])
if result == 'yes':
return 'enabled'
if result == 'no':
return 'disabled'
return result
@property
def latency_threshold(self):
if self._values['heavy_urls'] is None or self._values['heavy_urls']['latency_threshold'] is None:
return None
if 0 <= self._values['heavy_urls']['latency_threshold'] <= 4294967295:
return self._values['heavy_urls']['latency_threshold']
raise F5ModuleError(
"Valid 'latency_threshold' must be in range 0 - 4294967295 milliseconds."
)
@property
def hw_url_exclude(self):
if self._values['heavy_urls'] is None:
return None
return self._values['heavy_urls']['exclude']
@property
def hw_url_include(self):
if self._values['heavy_urls'] is None or self._values['heavy_urls']['include'] is None:
return None
result = list()
for item in self._values['heavy_urls']['include']:
element = dict()
element['url'] = self._correct_url(item['url'])
element['name'] = 'URL{0}'.format(self._correct_url(item['url']))
if 'threshold' in item:
element['threshold'] = self._validate_threshold(item['threshold'])
result.append(element)
return result
def _validate_threshold(self, item):
if item == 'auto':
return item
if 1 <= int(item) <= 4294967295:
return item
raise F5ModuleError(
"Valid 'url threshold' must be in range 1 - 4294967295 requests per second or 'auto'."
)
def _correct_url(self, item):
if item.startswith('/'):
return item
return "/{0}".format(item)
@property
def geo_blacklist(self):
if self._values['geolocations'] is None:
return None
whitelist = self.geo_whitelist
blacklist = self._values['geolocations']['blacklist']
if whitelist and blacklist:
if not set(whitelist).isdisjoint(set(blacklist)):
raise F5ModuleError('Cannot specify the same element in blacklist and whitelist.')
return blacklist
@property
def geo_whitelist(self):
if self._values['geolocations'] is None:
return None
return self._values['geolocations']['whitelist']
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
class UsableChanges(Changes):
@property
def geolocations(self):
if self._values['geo_blacklist'] is None and self._values['geo_whitelist'] is None:
return None
result = list()
if self._values['geo_blacklist']:
for item in self._values['geo_blacklist']:
element = dict()
element['name'] = item
element['blackListed'] = True
result.append(element)
if self._values['geo_whitelist']:
for item in self._values['geo_whitelist']:
element = dict()
element['name'] = item
element['whiteListed'] = True
result.append(element)
if result:
return result
@property
def heavy_urls(self):
tmp = dict()
tmp['automaticDetection'] = self._values['auto_detect']
tmp['latencyThreshold'] = self._values['latency_threshold']
tmp['exclude'] = self._values['hw_url_exclude']
tmp['includeList'] = self._values['hw_url_include']
result = self._filter_params(tmp)
if result:
return result
@property
def mobile_detection(self):
tmp = dict()
tmp['enabled'] = self._values['enable_mobile_detection']
tmp['allowAndroidRootedDevice'] = self._values['allow_android_rooted_device']
tmp['allowAnyAndroidPackage'] = self._values['allow_any_android_package']
tmp['allowAnyIosPackage'] = self._values['allow_any_ios_package']
tmp['allowJailbrokenDevices'] = self._values['allow_jailbroken_devices']
tmp['allowEmulators'] = self._values['allow_emulators']
tmp['clientSideChallengeMode'] = self._values['client_side_challenge_mode']
tmp['iosAllowedPackageNames'] = self._values['ios_allowed_package_names']
tmp['androidPublishers'] = self._values['android_publishers']
result = self._filter_params(tmp)
if result:
return result
class ReportableChanges(Changes):
returnables = [
'rtbh_duration',
'rtbh_enable',
'scrubbing_duration',
'scrubbing_enable',
'single_page_application',
'trigger_irule',
'heavy_urls',
'mobile_detection',
'geolocations',
]
def _convert_include_list(self, items):
result = list()
for item in items:
element = dict()
element['url'] = item['url']
if 'threshold' in item:
element['threshold'] = item['threshold']
result.append(element)
if result:
return result
@property
def geolocations(self):
tmp = dict()
tmp['blacklist'] = self._values['geo_blacklist']
tmp['whitelist'] = self._values['geo_whitelist']
result = self._filter_params(tmp)
if result:
return result
@property
def heavy_urls(self):
tmp = dict()
tmp['auto_detect'] = flatten_boolean(self._values['auto_detect'])
tmp['latency_threshold'] = self._values['latency_threshold']
tmp['exclude'] = self._values['hw_url_exclude']
tmp['include'] = self._convert_include_list(self._values['hw_url_include'])
result = self._filter_params(tmp)
if result:
return result
@property
def mobile_detection(self):
tmp = dict()
tmp['enabled'] = flatten_boolean(self._values['enable_mobile_detection'])
tmp['allow_android_rooted_device'] = flatten_boolean(self._values['allow_android_rooted_device'])
tmp['allow_any_android_package'] = flatten_boolean(self._values['allow_any_android_package'])
tmp['allow_any_ios_package'] = flatten_boolean(self._values['allow_any_ios_package'])
tmp['allow_jailbroken_devices'] = flatten_boolean(self._values['allow_jailbroken_devices'])
tmp['allow_emulators'] = flatten_boolean(self._values['allow_emulators'])
tmp['client_side_challenge_mode'] = self._values['client_side_challenge_mode']
tmp['ios_allowed_package_names'] = self._values['ios_allowed_package_names']
tmp['android_publishers'] = self._values['android_publishers']
result = self._filter_params(tmp)
if result:
return result
@property
def rtbh_enable(self):
result = flatten_boolean(self._values['rtbh_enable'])
return result
@property
def scrubbing_enable(self):
result = flatten_boolean(self._values['scrubbing_enable'])
return result
@property
def single_page_application(self):
result = flatten_boolean(self._values['single_page_application'])
return result
@property
def trigger_irule(self):
result = flatten_boolean(self._values['trigger_irule'])
return result
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
@property
def hw_url_include(self):
if self.want.hw_url_include is None:
return None
if self.have.hw_url_include is None and self.want.hw_url_include == []:
return None
if self.have.hw_url_include is None:
return self.want.hw_url_include
wants = self.want.hw_url_include
haves = list()
# First we remove extra keys in have for the same elements
for want in wants:
for have in self.have.hw_url_include:
if want['url'] == have['url']:
entry = self._filter_have(want, have)
haves.append(entry)
# Next we do compare the lists as normal
result = compare_complex_list(wants, haves)
return result
def _filter_have(self, want, have):
to_check = set(want.keys()).intersection(set(have.keys()))
result = dict()
for k in list(to_check):
result[k] = have[k]
return result
@property
def hw_url_exclude(self):
result = cmp_simple_list(self.want.hw_url_exclude, self.have.hw_url_exclude)
return result
@property
def geo_blacklist(self):
result = cmp_simple_list(self.want.geo_blacklist, self.have.geo_blacklist)
return result
@property
def geo_whitelist(self):
result = cmp_simple_list(self.want.geo_whitelist, self.have.geo_whitelist)
return result
@property
def android_publishers(self):
result = cmp_simple_list(self.want.android_publishers, self.have.android_publishers)
return result
@property
def ios_allowed_package_names(self):
result = cmp_simple_list(self.want.ios_allowed_package_names, self.have.ios_allowed_package_names)
return result
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = F5RestClient(**self.module.params)
self.want = ModuleParameters(params=self.module.params)
self.have = ApiParameters()
self.changes = UsableChanges()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.client.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def exec_module(self):
if not module_provisioned(self.client, 'asm'):
raise F5ModuleError(
"ASM must be provisioned to use this module."
)
if self.version_less_than_13_1():
raise F5ModuleError('Module supported on TMOS versions 13.1.x and above')
changed = False
result = dict()
state = self.want.state
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def version_less_than_13_1(self):
version = tmos_version(self.client)
if LooseVersion(version) < LooseVersion('13.1.0'):
return True
return False
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def absent(self):
if self.exists():
return self.remove()
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the resource.")
return True
def create(self):
self._set_changed_options()
if self.module.check_mode:
return True
self.create_on_device()
return True
def profile_exists(self):
uri = "https://{0}:{1}/mgmt/tm/security/dos/profile/{2}/".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.profile),
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError:
return False
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
return True
def exists(self):
if not self.profile_exists():
raise F5ModuleError(
'Specified DOS profile: {0} on partition: {1} does not exist.'.format(
self.want.profile, self.want.partition)
)
uri = "https://{0}:{1}/mgmt/tm/security/dos/profile/{2}/application/{3}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.profile),
self.want.profile
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError:
return False
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
return True
def create_on_device(self):
params = self.changes.api_params()
params['name'] = self.want.profile
uri = "https://{0}:{1}/mgmt/tm/security/dos/profile/{2}/application/".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.profile),
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 409]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return True
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/security/dos/profile/{2}/application/{3}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.profile),
self.want.profile
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/security/dos/profile/{2}/application/{3}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.profile),
self.want.profile
)
response = self.client.api.delete(uri)
if response.status == 200:
return True
raise F5ModuleError(response.content)
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/security/dos/profile/{2}/application/{3}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.profile),
self.want.profile
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return ApiParameters(params=response)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
profile=dict(
required=True,
),
geolocations=dict(
type='dict',
options=dict(
blacklist=dict(type='list'),
whitelist=dict(type='list'),
),
),
heavy_urls=dict(
type='dict',
options=dict(
auto_detect=dict(type='bool'),
latency_threshold=dict(type='int'),
exclude=dict(type='list'),
include=dict(
type='list',
elements='dict',
options=dict(
url=dict(required=True),
threshold=dict(),
),
)
),
),
mobile_detection=dict(
type='dict',
options=dict(
enabled=dict(type='bool'),
allow_android_rooted_device=dict(type='bool'),
allow_any_android_package=dict(type='bool'),
allow_any_ios_package=dict(type='bool'),
allow_jailbroken_devices=dict(type='bool'),
allow_emulators=dict(type='bool'),
client_side_challenge_mode=dict(choices=['cshui', 'pass']),
ios_allowed_package_names=dict(type='list'),
android_publishers=dict(type='list')
)
),
rtbh_duration=dict(type='int'),
rtbh_enable=dict(type='bool'),
scrubbing_duration=dict(type='int'),
scrubbing_enable=dict(type='bool'),
single_page_application=dict(type='bool'),
trigger_irule=dict(type='bool'),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
),
state=dict(
default='present',
choices=['present', 'absent']
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
)
try:
mm = ModuleManager(module=module)
results = mm.exec_module()
module.exit_json(**results)
except F5ModuleError as ex:
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
| gpl-3.0 |
eonpatapon/nova | nova/api/openstack/compute/plugins/v3/server_password.py | 36 | 2525 | # Copyright (c) 2012 Nebula, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The server password extension."""
from nova.api.metadata import password
from nova.api.openstack import common
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova import compute
ALIAS = 'os-server-password'
authorize = extensions.os_compute_authorizer(ALIAS)
class ServerPasswordController(wsgi.Controller):
"""The Server Password API controller for the OpenStack API."""
def __init__(self):
self.compute_api = compute.API(skip_policy_check=True)
@extensions.expected_errors(404)
def index(self, req, server_id):
context = req.environ['nova.context']
authorize(context)
instance = common.get_instance(self.compute_api, context, server_id)
passw = password.extract_password(instance)
return {'password': passw or ''}
@extensions.expected_errors(404)
@wsgi.response(204)
def clear(self, req, server_id):
"""Removes the encrypted server password from the metadata server
Note that this does not actually change the instance server
password.
"""
context = req.environ['nova.context']
authorize(context)
instance = common.get_instance(self.compute_api, context, server_id)
meta = password.convert_password(context, None)
instance.system_metadata.update(meta)
instance.save()
class ServerPassword(extensions.V3APIExtensionBase):
"""Server password support."""
name = "ServerPassword"
alias = ALIAS
version = 1
def get_resources(self):
resources = [
extensions.ResourceExtension(
ALIAS, ServerPasswordController(),
collection_actions={'clear': 'DELETE'},
parent=dict(member_name='server', collection_name='servers'))]
return resources
def get_controller_extensions(self):
return []
| apache-2.0 |
apark263/tensorflow | tensorflow/python/framework/tensor_shape_div_test.py | 178 | 1495 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test that old style division works for Dimension."""
from __future__ import absolute_import
# from __future__ import division # Intentionally skip this import
from __future__ import print_function
import six
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_util
from tensorflow.python.platform import googletest
class DimensionDivTest(test_util.TensorFlowTestCase):
def testDivSucceeds(self):
"""Without from __future__ import division, __div__ should work."""
if six.PY2: # Old division exists only in Python 2
values = [tensor_shape.Dimension(x) for x in (3, 7, 11, None)]
for x in values:
for y in values:
self.assertEqual((x / y).value, (x // y).value)
if __name__ == "__main__":
googletest.main()
| apache-2.0 |
SamYaple/neutron | neutron/plugins/ml2/drivers/brocade/db/models.py | 63 | 4420 | # Copyright 2014 Brocade Communications System, Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Brocade specific database schema/model."""
import sqlalchemy as sa
from neutron.db import model_base
from neutron.db import models_v2
class ML2_BrocadeNetwork(model_base.BASEV2, models_v2.HasId,
models_v2.HasTenant):
"""Schema for brocade network."""
vlan = sa.Column(sa.String(10))
segment_id = sa.Column(sa.String(36))
network_type = sa.Column(sa.String(10))
class ML2_BrocadePort(model_base.BASEV2, models_v2.HasId,
models_v2.HasTenant):
"""Schema for brocade port."""
network_id = sa.Column(sa.String(36),
sa.ForeignKey("ml2_brocadenetworks.id"),
nullable=False)
admin_state_up = sa.Column(sa.Boolean, nullable=False)
physical_interface = sa.Column(sa.String(36))
vlan_id = sa.Column(sa.String(36))
def create_network(context, net_id, vlan, segment_id, network_type, tenant_id):
"""Create a brocade specific network/port-profiles."""
# only network_type of vlan is supported
session = context.session
with session.begin(subtransactions=True):
net = get_network(context, net_id, None)
if not net:
net = ML2_BrocadeNetwork(id=net_id, vlan=vlan,
segment_id=segment_id,
network_type='vlan',
tenant_id=tenant_id)
session.add(net)
return net
def delete_network(context, net_id):
"""Delete a brocade specific network/port-profiles."""
session = context.session
with session.begin(subtransactions=True):
net = get_network(context, net_id, None)
if net:
session.delete(net)
def get_network(context, net_id, fields=None):
"""Get brocade specific network, with vlan extension."""
session = context.session
return session.query(ML2_BrocadeNetwork).filter_by(id=net_id).first()
def get_networks(context, filters=None, fields=None):
"""Get all brocade specific networks."""
session = context.session
return session.query(ML2_BrocadeNetwork).all()
def create_port(context, port_id, network_id, physical_interface,
vlan_id, tenant_id, admin_state_up):
"""Create a brocade specific port, has policy like vlan."""
session = context.session
with session.begin(subtransactions=True):
port = get_port(context, port_id)
if not port:
port = ML2_BrocadePort(id=port_id,
network_id=network_id,
physical_interface=physical_interface,
vlan_id=vlan_id,
admin_state_up=admin_state_up,
tenant_id=tenant_id)
session.add(port)
return port
def get_port(context, port_id):
"""get a brocade specific port."""
session = context.session
return session.query(ML2_BrocadePort).filter_by(id=port_id).first()
def get_ports(context, network_id=None):
"""get a brocade specific port."""
session = context.session
return session.query(ML2_BrocadePort).filter_by(
network_id=network_id).all()
def delete_port(context, port_id):
"""delete brocade specific port."""
session = context.session
with session.begin(subtransactions=True):
port = get_port(context, port_id)
if port:
session.delete(port)
def update_port_state(context, port_id, admin_state_up):
"""Update port attributes."""
session = context.session
with session.begin(subtransactions=True):
session.query(ML2_BrocadePort).filter_by(
id=port_id).update({'admin_state_up': admin_state_up})
| apache-2.0 |
martonw/phantomjs | src/qt/qtwebkit/Source/ThirdParty/gtest/test/run_tests_util_test.py | 233 | 23693 | #!/usr/bin/env python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests for run_tests_util.py test runner script."""
__author__ = 'vladl@google.com (Vlad Losev)'
import os
import re
import sets
import unittest
import run_tests_util
GTEST_DBG_DIR = 'scons/build/dbg/gtest/scons'
GTEST_OPT_DIR = 'scons/build/opt/gtest/scons'
GTEST_OTHER_DIR = 'scons/build/other/gtest/scons'
def AddExeExtension(path):
"""Appends .exe to the path on Windows or Cygwin."""
if run_tests_util.IS_WINDOWS or run_tests_util.IS_CYGWIN:
return path + '.exe'
else:
return path
class FakePath(object):
"""A fake os.path module for testing."""
def __init__(self, current_dir=os.getcwd(), known_paths=None):
self.current_dir = current_dir
self.tree = {}
self.path_separator = os.sep
# known_paths contains either absolute or relative paths. Relative paths
# are absolutized with self.current_dir.
if known_paths:
self._AddPaths(known_paths)
def _AddPath(self, path):
ends_with_slash = path.endswith('/')
path = self.abspath(path)
if ends_with_slash:
path += self.path_separator
name_list = path.split(self.path_separator)
tree = self.tree
for name in name_list[:-1]:
if not name:
continue
if name in tree:
tree = tree[name]
else:
tree[name] = {}
tree = tree[name]
name = name_list[-1]
if name:
if name in tree:
assert tree[name] == 1
else:
tree[name] = 1
def _AddPaths(self, paths):
for path in paths:
self._AddPath(path)
def PathElement(self, path):
"""Returns an internal representation of directory tree entry for path."""
tree = self.tree
name_list = self.abspath(path).split(self.path_separator)
for name in name_list:
if not name:
continue
tree = tree.get(name, None)
if tree is None:
break
return tree
# Silences pylint warning about using standard names.
# pylint: disable-msg=C6409
def normpath(self, path):
return os.path.normpath(path)
def abspath(self, path):
return self.normpath(os.path.join(self.current_dir, path))
def isfile(self, path):
return self.PathElement(self.abspath(path)) == 1
def isdir(self, path):
return type(self.PathElement(self.abspath(path))) == type(dict())
def basename(self, path):
return os.path.basename(path)
def dirname(self, path):
return os.path.dirname(path)
def join(self, *kargs):
return os.path.join(*kargs)
class FakeOs(object):
"""A fake os module for testing."""
P_WAIT = os.P_WAIT
def __init__(self, fake_path_module):
self.path = fake_path_module
# Some methods/attributes are delegated to the real os module.
self.environ = os.environ
# pylint: disable-msg=C6409
def listdir(self, path):
assert self.path.isdir(path)
return self.path.PathElement(path).iterkeys()
def spawnv(self, wait, executable, *kargs):
assert wait == FakeOs.P_WAIT
return self.spawn_impl(executable, kargs)
class GetTestsToRunTest(unittest.TestCase):
"""Exercises TestRunner.GetTestsToRun."""
def NormalizeGetTestsToRunResults(self, results):
"""Normalizes path data returned from GetTestsToRun for comparison."""
def NormalizePythonTestPair(pair):
"""Normalizes path data in the (directory, python_script) pair."""
return (os.path.normpath(pair[0]), os.path.normpath(pair[1]))
def NormalizeBinaryTestPair(pair):
"""Normalizes path data in the (directory, binary_executable) pair."""
directory, executable = map(os.path.normpath, pair)
# On Windows and Cygwin, the test file names have the .exe extension, but
# they can be invoked either by name or by name+extension. Our test must
# accommodate both situations.
if run_tests_util.IS_WINDOWS or run_tests_util.IS_CYGWIN:
executable = re.sub(r'\.exe$', '', executable)
return (directory, executable)
python_tests = sets.Set(map(NormalizePythonTestPair, results[0]))
binary_tests = sets.Set(map(NormalizeBinaryTestPair, results[1]))
return (python_tests, binary_tests)
def AssertResultsEqual(self, results, expected):
"""Asserts results returned by GetTestsToRun equal to expected results."""
self.assertEqual(self.NormalizeGetTestsToRunResults(results),
self.NormalizeGetTestsToRunResults(expected),
'Incorrect set of tests returned:\n%s\nexpected:\n%s' %
(results, expected))
def setUp(self):
self.fake_os = FakeOs(FakePath(
current_dir=os.path.abspath(os.path.dirname(run_tests_util.__file__)),
known_paths=[AddExeExtension(GTEST_DBG_DIR + '/gtest_unittest'),
AddExeExtension(GTEST_OPT_DIR + '/gtest_unittest'),
'test/gtest_color_test.py']))
self.fake_configurations = ['dbg', 'opt']
self.test_runner = run_tests_util.TestRunner(script_dir='.',
injected_os=self.fake_os,
injected_subprocess=None)
def testBinaryTestsOnly(self):
"""Exercises GetTestsToRun with parameters designating binary tests only."""
# A default build.
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
['gtest_unittest'],
'',
False,
available_configurations=self.fake_configurations),
([],
[(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]))
# An explicitly specified directory.
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
[GTEST_DBG_DIR, 'gtest_unittest'],
'',
False,
available_configurations=self.fake_configurations),
([],
[(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]))
# A particular configuration.
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
['gtest_unittest'],
'other',
False,
available_configurations=self.fake_configurations),
([],
[(GTEST_OTHER_DIR, GTEST_OTHER_DIR + '/gtest_unittest')]))
# All available configurations
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
['gtest_unittest'],
'all',
False,
available_configurations=self.fake_configurations),
([],
[(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest'),
(GTEST_OPT_DIR, GTEST_OPT_DIR + '/gtest_unittest')]))
# All built configurations (unbuilt don't cause failure).
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
['gtest_unittest'],
'',
True,
available_configurations=self.fake_configurations + ['unbuilt']),
([],
[(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest'),
(GTEST_OPT_DIR, GTEST_OPT_DIR + '/gtest_unittest')]))
# A combination of an explicit directory and a configuration.
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
[GTEST_DBG_DIR, 'gtest_unittest'],
'opt',
False,
available_configurations=self.fake_configurations),
([],
[(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest'),
(GTEST_OPT_DIR, GTEST_OPT_DIR + '/gtest_unittest')]))
# Same test specified in an explicit directory and via a configuration.
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
[GTEST_DBG_DIR, 'gtest_unittest'],
'dbg',
False,
available_configurations=self.fake_configurations),
([],
[(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]))
# All built configurations + explicit directory + explicit configuration.
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
[GTEST_DBG_DIR, 'gtest_unittest'],
'opt',
True,
available_configurations=self.fake_configurations),
([],
[(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest'),
(GTEST_OPT_DIR, GTEST_OPT_DIR + '/gtest_unittest')]))
def testPythonTestsOnly(self):
"""Exercises GetTestsToRun with parameters designating Python tests only."""
# A default build.
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
['gtest_color_test.py'],
'',
False,
available_configurations=self.fake_configurations),
([(GTEST_DBG_DIR, 'test/gtest_color_test.py')],
[]))
# An explicitly specified directory.
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
[GTEST_DBG_DIR, 'test/gtest_color_test.py'],
'',
False,
available_configurations=self.fake_configurations),
([(GTEST_DBG_DIR, 'test/gtest_color_test.py')],
[]))
# A particular configuration.
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
['gtest_color_test.py'],
'other',
False,
available_configurations=self.fake_configurations),
([(GTEST_OTHER_DIR, 'test/gtest_color_test.py')],
[]))
# All available configurations
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
['test/gtest_color_test.py'],
'all',
False,
available_configurations=self.fake_configurations),
([(GTEST_DBG_DIR, 'test/gtest_color_test.py'),
(GTEST_OPT_DIR, 'test/gtest_color_test.py')],
[]))
# All built configurations (unbuilt don't cause failure).
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
['gtest_color_test.py'],
'',
True,
available_configurations=self.fake_configurations + ['unbuilt']),
([(GTEST_DBG_DIR, 'test/gtest_color_test.py'),
(GTEST_OPT_DIR, 'test/gtest_color_test.py')],
[]))
# A combination of an explicit directory and a configuration.
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
[GTEST_DBG_DIR, 'gtest_color_test.py'],
'opt',
False,
available_configurations=self.fake_configurations),
([(GTEST_DBG_DIR, 'test/gtest_color_test.py'),
(GTEST_OPT_DIR, 'test/gtest_color_test.py')],
[]))
# Same test specified in an explicit directory and via a configuration.
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
[GTEST_DBG_DIR, 'gtest_color_test.py'],
'dbg',
False,
available_configurations=self.fake_configurations),
([(GTEST_DBG_DIR, 'test/gtest_color_test.py')],
[]))
# All built configurations + explicit directory + explicit configuration.
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
[GTEST_DBG_DIR, 'gtest_color_test.py'],
'opt',
True,
available_configurations=self.fake_configurations),
([(GTEST_DBG_DIR, 'test/gtest_color_test.py'),
(GTEST_OPT_DIR, 'test/gtest_color_test.py')],
[]))
def testCombinationOfBinaryAndPythonTests(self):
"""Exercises GetTestsToRun with mixed binary/Python tests."""
# Use only default configuration for this test.
# Neither binary nor Python tests are specified so find all.
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
[],
'',
False,
available_configurations=self.fake_configurations),
([(GTEST_DBG_DIR, 'test/gtest_color_test.py')],
[(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]))
# Specifying both binary and Python tests.
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
['gtest_unittest', 'gtest_color_test.py'],
'',
False,
available_configurations=self.fake_configurations),
([(GTEST_DBG_DIR, 'test/gtest_color_test.py')],
[(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]))
# Specifying binary tests suppresses Python tests.
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
['gtest_unittest'],
'',
False,
available_configurations=self.fake_configurations),
([],
[(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]))
# Specifying Python tests suppresses binary tests.
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
['gtest_color_test.py'],
'',
False,
available_configurations=self.fake_configurations),
([(GTEST_DBG_DIR, 'test/gtest_color_test.py')],
[]))
def testIgnoresNonTestFiles(self):
"""Verifies that GetTestsToRun ignores non-test files in the filesystem."""
self.fake_os = FakeOs(FakePath(
current_dir=os.path.abspath(os.path.dirname(run_tests_util.__file__)),
known_paths=[AddExeExtension(GTEST_DBG_DIR + '/gtest_nontest'),
'test/']))
self.test_runner = run_tests_util.TestRunner(script_dir='.',
injected_os=self.fake_os,
injected_subprocess=None)
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
[],
'',
True,
available_configurations=self.fake_configurations),
([], []))
def testWorksFromDifferentDir(self):
"""Exercises GetTestsToRun from a directory different from run_test.py's."""
# Here we simulate an test script in directory /d/ called from the
# directory /a/b/c/.
self.fake_os = FakeOs(FakePath(
current_dir=os.path.abspath('/a/b/c'),
known_paths=[
'/a/b/c/',
AddExeExtension('/d/' + GTEST_DBG_DIR + '/gtest_unittest'),
AddExeExtension('/d/' + GTEST_OPT_DIR + '/gtest_unittest'),
'/d/test/gtest_color_test.py']))
self.fake_configurations = ['dbg', 'opt']
self.test_runner = run_tests_util.TestRunner(script_dir='/d/',
injected_os=self.fake_os,
injected_subprocess=None)
# A binary test.
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
['gtest_unittest'],
'',
False,
available_configurations=self.fake_configurations),
([],
[('/d/' + GTEST_DBG_DIR, '/d/' + GTEST_DBG_DIR + '/gtest_unittest')]))
# A Python test.
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
['gtest_color_test.py'],
'',
False,
available_configurations=self.fake_configurations),
([('/d/' + GTEST_DBG_DIR, '/d/test/gtest_color_test.py')], []))
def testNonTestBinary(self):
"""Exercises GetTestsToRun with a non-test parameter."""
self.assert_(
not self.test_runner.GetTestsToRun(
['gtest_unittest_not_really'],
'',
False,
available_configurations=self.fake_configurations))
def testNonExistingPythonTest(self):
"""Exercises GetTestsToRun with a non-existent Python test parameter."""
self.assert_(
not self.test_runner.GetTestsToRun(
['nonexistent_test.py'],
'',
False,
available_configurations=self.fake_configurations))
if run_tests_util.IS_WINDOWS or run_tests_util.IS_CYGWIN:
def testDoesNotPickNonExeFilesOnWindows(self):
"""Verifies that GetTestsToRun does not find _test files on Windows."""
self.fake_os = FakeOs(FakePath(
current_dir=os.path.abspath(os.path.dirname(run_tests_util.__file__)),
known_paths=['/d/' + GTEST_DBG_DIR + '/gtest_test', 'test/']))
self.test_runner = run_tests_util.TestRunner(script_dir='.',
injected_os=self.fake_os,
injected_subprocess=None)
self.AssertResultsEqual(
self.test_runner.GetTestsToRun(
[],
'',
True,
available_configurations=self.fake_configurations),
([], []))
class RunTestsTest(unittest.TestCase):
"""Exercises TestRunner.RunTests."""
def SpawnSuccess(self, unused_executable, unused_argv):
"""Fakes test success by returning 0 as an exit code."""
self.num_spawn_calls += 1
return 0
def SpawnFailure(self, unused_executable, unused_argv):
"""Fakes test success by returning 1 as an exit code."""
self.num_spawn_calls += 1
return 1
def setUp(self):
self.fake_os = FakeOs(FakePath(
current_dir=os.path.abspath(os.path.dirname(run_tests_util.__file__)),
known_paths=[
AddExeExtension(GTEST_DBG_DIR + '/gtest_unittest'),
AddExeExtension(GTEST_OPT_DIR + '/gtest_unittest'),
'test/gtest_color_test.py']))
self.fake_configurations = ['dbg', 'opt']
self.test_runner = run_tests_util.TestRunner(
script_dir=os.path.dirname(__file__) or '.',
injected_os=self.fake_os,
injected_subprocess=None)
self.num_spawn_calls = 0 # A number of calls to spawn.
def testRunPythonTestSuccess(self):
"""Exercises RunTests to handle a Python test success."""
self.fake_os.spawn_impl = self.SpawnSuccess
self.assertEqual(
self.test_runner.RunTests(
[(GTEST_DBG_DIR, 'test/gtest_color_test.py')],
[]),
0)
self.assertEqual(self.num_spawn_calls, 1)
def testRunBinaryTestSuccess(self):
"""Exercises RunTests to handle a binary test success."""
self.fake_os.spawn_impl = self.SpawnSuccess
self.assertEqual(
self.test_runner.RunTests(
[],
[(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]),
0)
self.assertEqual(self.num_spawn_calls, 1)
def testRunPythonTestFauilure(self):
"""Exercises RunTests to handle a Python test failure."""
self.fake_os.spawn_impl = self.SpawnFailure
self.assertEqual(
self.test_runner.RunTests(
[(GTEST_DBG_DIR, 'test/gtest_color_test.py')],
[]),
1)
self.assertEqual(self.num_spawn_calls, 1)
def testRunBinaryTestFailure(self):
"""Exercises RunTests to handle a binary test failure."""
self.fake_os.spawn_impl = self.SpawnFailure
self.assertEqual(
self.test_runner.RunTests(
[],
[(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]),
1)
self.assertEqual(self.num_spawn_calls, 1)
def testCombinedTestSuccess(self):
"""Exercises RunTests to handle a success of both Python and binary test."""
self.fake_os.spawn_impl = self.SpawnSuccess
self.assertEqual(
self.test_runner.RunTests(
[(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')],
[(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]),
0)
self.assertEqual(self.num_spawn_calls, 2)
def testCombinedTestSuccessAndFailure(self):
"""Exercises RunTests to handle a success of both Python and binary test."""
def SpawnImpl(executable, argv):
self.num_spawn_calls += 1
# Simulates failure of a Python test and success of a binary test.
if '.py' in executable or '.py' in argv[0]:
return 1
else:
return 0
self.fake_os.spawn_impl = SpawnImpl
self.assertEqual(
self.test_runner.RunTests(
[(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')],
[(GTEST_DBG_DIR, GTEST_DBG_DIR + '/gtest_unittest')]),
0)
self.assertEqual(self.num_spawn_calls, 2)
class ParseArgsTest(unittest.TestCase):
"""Exercises ParseArgs."""
def testNoOptions(self):
options, args = run_tests_util.ParseArgs('gtest', argv=['script.py'])
self.assertEqual(args, ['script.py'])
self.assert_(options.configurations is None)
self.assertFalse(options.built_configurations)
def testOptionC(self):
options, args = run_tests_util.ParseArgs(
'gtest', argv=['script.py', '-c', 'dbg'])
self.assertEqual(args, ['script.py'])
self.assertEqual(options.configurations, 'dbg')
self.assertFalse(options.built_configurations)
def testOptionA(self):
options, args = run_tests_util.ParseArgs('gtest', argv=['script.py', '-a'])
self.assertEqual(args, ['script.py'])
self.assertEqual(options.configurations, 'all')
self.assertFalse(options.built_configurations)
def testOptionB(self):
options, args = run_tests_util.ParseArgs('gtest', argv=['script.py', '-b'])
self.assertEqual(args, ['script.py'])
self.assert_(options.configurations is None)
self.assertTrue(options.built_configurations)
def testOptionCAndOptionB(self):
options, args = run_tests_util.ParseArgs(
'gtest', argv=['script.py', '-c', 'dbg', '-b'])
self.assertEqual(args, ['script.py'])
self.assertEqual(options.configurations, 'dbg')
self.assertTrue(options.built_configurations)
def testOptionH(self):
help_called = [False]
# Suppresses lint warning on unused arguments. These arguments are
# required by optparse, even though they are unused.
# pylint: disable-msg=W0613
def VerifyHelp(option, opt, value, parser):
help_called[0] = True
# Verifies that -h causes the help callback to be called.
help_called[0] = False
_, args = run_tests_util.ParseArgs(
'gtest', argv=['script.py', '-h'], help_callback=VerifyHelp)
self.assertEqual(args, ['script.py'])
self.assertTrue(help_called[0])
# Verifies that --help causes the help callback to be called.
help_called[0] = False
_, args = run_tests_util.ParseArgs(
'gtest', argv=['script.py', '--help'], help_callback=VerifyHelp)
self.assertEqual(args, ['script.py'])
self.assertTrue(help_called[0])
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
lancezlin/ml_template_py | lib/python2.7/site-packages/pygments/styles/vs.py | 50 | 1073 | # -*- coding: utf-8 -*-
"""
pygments.styles.vs
~~~~~~~~~~~~~~~~~~
Simple style with MS Visual Studio colors.
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Operator, Generic
class VisualStudioStyle(Style):
background_color = "#ffffff"
default_style = ""
styles = {
Comment: "#008000",
Comment.Preproc: "#0000ff",
Keyword: "#0000ff",
Operator.Word: "#0000ff",
Keyword.Type: "#2b91af",
Name.Class: "#2b91af",
String: "#a31515",
Generic.Heading: "bold",
Generic.Subheading: "bold",
Generic.Emph: "italic",
Generic.Strong: "bold",
Generic.Prompt: "bold",
Error: "border:#FF0000"
}
| mit |
gacarrillor/QGIS | python/plugins/db_manager/db_plugins/postgis/plugins/qgis_topoview/__init__.py | 30 | 12901 | # -*- coding: utf-8 -*-
"""
/***************************************************************************
Name : TopoViewer plugin for DB Manager
Description : Create a project to display topology schema on Qgis
Date : Sep 23, 2011
copyright : (C) 2011 by Giuseppe Sucameli
(C) 2019 by Sandro Santilli
email : strk@kbt.io
Based on qgis_pgis_topoview by Sandro Santilli <strk@kbt.io>
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from builtins import str
from qgis.PyQt.QtWidgets import QAction
from qgis.PyQt.QtCore import Qt
from qgis.PyQt.QtGui import QIcon
from qgis.core import Qgis, QgsProject, QgsVectorLayer, QgsWkbTypes, QgsLayerTreeGroup
from qgis.gui import QgsMessageBar
import os
current_path = os.path.dirname(__file__)
# The load function is called when the "db" database or either one of its
# children db objects (table o schema) is selected by the user.
# @param db is the selected database
# @param mainwindow is the DBManager mainwindow
def load(db, mainwindow):
# check whether the selected database supports topology
# (search for topology.topology)
sql = u"""SELECT count(*)
FROM pg_class AS cls JOIN pg_namespace AS nsp ON nsp.oid = cls.relnamespace
WHERE cls.relname = 'topology' AND nsp.nspname = 'topology'"""
res = db.executeSql(sql)
if res is None or len(res) < 1 or int(res[0][0]) <= 0:
return
# add the action to the DBManager menu
action = QAction(QIcon(), "&TopoViewer", db)
mainwindow.registerAction(action, "&Schema", run)
# The run function is called once the user clicks on the action TopoViewer
# (look above at the load function) from the DBManager menu/toolbar.
# @param item is the selected db item (either db, schema or table)
# @param action is the clicked action on the DBManager menu/toolbar
# @param mainwindow is the DBManager mainwindow
def run(item, action, mainwindow):
db = item.database()
uri = db.uri()
iface = mainwindow.iface
quoteId = db.connector.quoteId
quoteStr = db.connector.quoteString
# check if the selected item is a topology schema
isTopoSchema = False
if not hasattr(item, 'schema'):
mainwindow.infoBar.pushMessage("Invalid topology", u'Select a topology schema to continue.', Qgis.Info,
mainwindow.iface.messageTimeout())
return False
if item.schema() is not None:
sql = u"SELECT srid FROM topology.topology WHERE name = %s" % quoteStr(item.schema().name)
res = db.executeSql(sql)
isTopoSchema = len(res) > 0
if not isTopoSchema:
mainwindow.infoBar.pushMessage("Invalid topology",
u'Schema "{0}" is not registered in topology.topology.'.format(
item.schema().name), Qgis.Warning,
mainwindow.iface.messageTimeout())
return False
if (res[0][0] < 0):
mainwindow.infoBar.pushMessage("WARNING", u'Topology "{0}" is registered as having a srid of {1} in topology.topology, we will assume 0 (for unknown)'.format(item.schema().name, res[0]), Qgis.Warning, mainwindow.iface.messageTimeout())
toposrid = '0'
else:
toposrid = str(res[0][0])
# load layers into the current project
toponame = item.schema().name
template_dir = os.path.join(current_path, 'templates')
# do not refresh the canvas until all the layers are added
wasFrozen = iface.mapCanvas().isFrozen()
iface.mapCanvas().freeze()
try:
provider = db.dbplugin().providerName()
uri = db.uri()
# Force use of estimated metadata (topologies can be big)
uri.setUseEstimatedMetadata(True)
# FACES
# face mbr
uri.setDataSource(toponame, 'face', 'mbr', '', 'face_id')
uri.setSrid(toposrid)
uri.setWkbType(QgsWkbTypes.Polygon)
layerFaceMbr = QgsVectorLayer(uri.uri(False), u'%s.face_mbr' % toponame, provider)
layerFaceMbr.loadNamedStyle(os.path.join(template_dir, 'face_mbr.qml'))
face_extent = layerFaceMbr.extent()
# face geometry
sql = u'SELECT face_id, mbr, topology.ST_GetFaceGeometry(%s,' \
'face_id)::geometry(polygon, %s) as geom ' \
'FROM %s.face WHERE face_id > 0' % \
(quoteStr(toponame), toposrid, quoteId(toponame))
uri.setDataSource('', u'(%s\n)' % sql, 'geom', '', 'face_id')
uri.setParam('bbox', 'mbr')
uri.setSrid(toposrid)
uri.setWkbType(QgsWkbTypes.Polygon)
layerFaceGeom = QgsVectorLayer(uri.uri(False), u'%s.face' % toponame, provider)
layerFaceGeom.setExtent(face_extent)
layerFaceGeom.loadNamedStyle(os.path.join(template_dir, 'face.qml'))
# face_seed
sql = u'SELECT face_id, mbr, ST_PointOnSurface(' \
'topology.ST_GetFaceGeometry(%s,' \
'face_id))::geometry(point, %s) as geom ' \
'FROM %s.face WHERE face_id > 0' % \
(quoteStr(toponame), toposrid, quoteId(toponame))
uri.setDataSource('', u'(%s)' % sql, 'geom', '', 'face_id')
uri.setParam('bbox', 'mbr')
uri.setSrid(toposrid)
uri.setWkbType(QgsWkbTypes.Point)
layerFaceSeed = QgsVectorLayer(uri.uri(False), u'%s.face_seed' % toponame, provider)
layerFaceSeed.setExtent(face_extent)
layerFaceSeed.loadNamedStyle(os.path.join(template_dir, 'face_seed.qml'))
# TODO: add polygon0, polygon1 and polygon2 ?
# NODES
# node
uri.setDataSource(toponame, 'node', 'geom', '', 'node_id')
uri.removeParam('bbox')
uri.setSrid(toposrid)
uri.setWkbType(QgsWkbTypes.Point)
layerNode = QgsVectorLayer(uri.uri(False), u'%s.node' % toponame, provider)
layerNode.loadNamedStyle(os.path.join(template_dir, 'node.qml'))
node_extent = layerNode.extent()
# node labels
uri.setDataSource(toponame, 'node', 'geom', '', 'node_id')
uri.setSrid(toposrid)
uri.setWkbType(QgsWkbTypes.Point)
uri.removeParam('bbox')
layerNodeLabel = QgsVectorLayer(uri.uri(False), u'%s.node_id' % toponame, provider)
layerNodeLabel.setExtent(node_extent)
layerNodeLabel.loadNamedStyle(os.path.join(template_dir, 'node_label.qml'))
# EDGES
# edge
uri.setDataSource(toponame, 'edge_data', 'geom', '', 'edge_id')
uri.setSrid(toposrid)
uri.setWkbType(QgsWkbTypes.LineString)
uri.removeParam('bbox')
layerEdge = QgsVectorLayer(uri.uri(False), u'%s.edge' % toponame, provider)
edge_extent = layerEdge.extent()
# directed edge
uri.setDataSource(toponame, 'edge_data', 'geom', '', 'edge_id')
uri.setSrid(toposrid)
uri.setWkbType(QgsWkbTypes.LineString)
uri.removeParam('bbox')
layerDirectedEdge = QgsVectorLayer(uri.uri(False), u'%s.directed_edge' % toponame, provider)
layerDirectedEdge.setExtent(edge_extent)
layerDirectedEdge.loadNamedStyle(os.path.join(template_dir, 'edge.qml'))
# edge labels
uri.setDataSource(toponame, 'edge_data', 'geom', '', 'edge_id')
uri.setSrid(toposrid)
uri.setWkbType(QgsWkbTypes.LineString)
uri.removeParam('bbox')
layerEdgeLabel = QgsVectorLayer(uri.uri(False), u'%s.edge_id' % toponame, provider)
layerEdgeLabel.setExtent(edge_extent)
layerEdgeLabel.loadNamedStyle(os.path.join(template_dir, 'edge_label.qml'))
# face_left
uri.setDataSource(toponame, 'edge_data', 'geom', '', 'edge_id')
uri.setSrid(toposrid)
uri.setWkbType(QgsWkbTypes.LineString)
uri.removeParam('bbox')
layerFaceLeft = QgsVectorLayer(uri.uri(False), u'%s.face_left' % toponame, provider)
layerFaceLeft.setExtent(edge_extent)
layerFaceLeft.loadNamedStyle(os.path.join(template_dir, 'face_left.qml'))
# face_right
uri.setDataSource(toponame, 'edge_data', 'geom', '', 'edge_id')
uri.setSrid(toposrid)
uri.setWkbType(QgsWkbTypes.LineString)
uri.removeParam('bbox')
layerFaceRight = QgsVectorLayer(uri.uri(False), u'%s.face_right' % toponame, provider)
layerFaceRight.setExtent(edge_extent)
layerFaceRight.loadNamedStyle(os.path.join(template_dir, 'face_right.qml'))
# next_left
uri.setDataSource(toponame, 'edge_data', 'geom', '', 'edge_id')
uri.setSrid(toposrid)
uri.setWkbType(QgsWkbTypes.LineString)
uri.removeParam('bbox')
layerNextLeft = QgsVectorLayer(uri.uri(False), u'%s.next_left' % toponame, provider)
layerNextLeft.setExtent(edge_extent)
layerNextLeft.loadNamedStyle(os.path.join(template_dir, 'next_left.qml'))
# next_right
uri.setDataSource(toponame, 'edge_data', 'geom', '', 'edge_id')
uri.setSrid(toposrid)
uri.setWkbType(QgsWkbTypes.LineString)
uri.removeParam('bbox')
layerNextRight = QgsVectorLayer(uri.uri(False), u'%s.next_right' % toponame, provider)
layerNextRight.setExtent(edge_extent)
layerNextRight.loadNamedStyle(os.path.join(template_dir, 'next_right.qml'))
# Add layers to the layer tree
faceLayers = [layerFaceMbr, layerFaceGeom, layerFaceSeed]
nodeLayers = [layerNode, layerNodeLabel]
edgeLayers = [layerEdge, layerDirectedEdge, layerEdgeLabel, layerFaceLeft, layerFaceRight, layerNextLeft, layerNextRight]
QgsProject.instance().addMapLayers(faceLayers, False)
QgsProject.instance().addMapLayers(nodeLayers, False)
QgsProject.instance().addMapLayers(edgeLayers, False)
# Organize layers in groups
groupFaces = QgsLayerTreeGroup(u'Faces')
for layer in faceLayers:
nodeLayer = groupFaces.addLayer(layer)
nodeLayer.setItemVisibilityChecked(False)
nodeLayer.setExpanded(False)
groupNodes = QgsLayerTreeGroup(u'Nodes')
for layer in nodeLayers:
nodeLayer = groupNodes.addLayer(layer)
nodeLayer.setItemVisibilityChecked(False)
nodeLayer.setExpanded(False)
groupEdges = QgsLayerTreeGroup(u'Edges')
for layer in edgeLayers:
nodeLayer = groupEdges.addLayer(layer)
nodeLayer.setItemVisibilityChecked(False)
nodeLayer.setExpanded(False)
supergroup = QgsLayerTreeGroup(u'Topology "%s"' % toponame)
supergroup.insertChildNodes(-1, [groupFaces, groupNodes, groupEdges])
layerTree = QgsProject.instance().layerTreeRoot()
layerTree.addChildNode(supergroup)
# Set layers rendering order
order = layerTree.layerOrder()
order.insert(0, order.pop(order.index(layerFaceMbr)))
order.insert(0, order.pop(order.index(layerFaceGeom)))
order.insert(0, order.pop(order.index(layerEdge)))
order.insert(0, order.pop(order.index(layerDirectedEdge)))
order.insert(0, order.pop(order.index(layerNode)))
order.insert(0, order.pop(order.index(layerFaceSeed)))
order.insert(0, order.pop(order.index(layerNodeLabel)))
order.insert(0, order.pop(order.index(layerEdgeLabel)))
order.insert(0, order.pop(order.index(layerNextLeft)))
order.insert(0, order.pop(order.index(layerNextRight)))
order.insert(0, order.pop(order.index(layerFaceLeft)))
order.insert(0, order.pop(order.index(layerFaceRight)))
layerTree.setHasCustomLayerOrder(True)
layerTree.setCustomLayerOrder(order)
finally:
# Set canvas extent to topology extent, if not yet initialized
canvas = iface.mapCanvas()
if (canvas.fullExtent().isNull()):
ext = node_extent
ext.combineExtentWith(edge_extent)
# Grow by 1/20 of largest side
ext = ext.buffered(max(ext.width(), ext.height()) / 20)
canvas.setExtent(ext)
# restore canvas render flag
if not wasFrozen:
iface.mapCanvas().freeze(False)
return True
| gpl-2.0 |
gregbanks/suds | suds/bindings/document.py | 204 | 5792 | # This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jeff Ortel ( jortel@redhat.com )
"""
Provides classes for the (WS) SOAP I{document/literal}.
"""
from logging import getLogger
from suds import *
from suds.bindings.binding import Binding
from suds.sax.element import Element
log = getLogger(__name__)
class Document(Binding):
"""
The document/literal style. Literal is the only (@use) supported
since document/encoded is pretty much dead.
Although the soap specification supports multiple documents within the soap
<body/>, it is very uncommon. As such, suds presents an I{RPC} view of
service methods defined with a single document parameter. This is done so
that the user can pass individual parameters instead of one, single document.
To support the complete specification, service methods defined with multiple documents
(multiple message parts), must present a I{document} view for that method.
"""
def bodycontent(self, method, args, kwargs):
#
# The I{wrapped} vs I{bare} style is detected in 2 ways.
# If there is 2+ parts in the message then it is I{bare}.
# If there is only (1) part and that part resolves to a builtin then
# it is I{bare}. Otherwise, it is I{wrapped}.
#
if not len(method.soap.input.body.parts):
return ()
wrapped = method.soap.input.body.wrapped
if wrapped:
pts = self.bodypart_types(method)
root = self.document(pts[0])
else:
root = []
n = 0
for pd in self.param_defs(method):
if n < len(args):
value = args[n]
else:
value = kwargs.get(pd[0])
n += 1
p = self.mkparam(method, pd, value)
if p is None:
continue
if not wrapped:
ns = pd[1].namespace('ns0')
p.setPrefix(ns[0], ns[1])
root.append(p)
return root
def replycontent(self, method, body):
wrapped = method.soap.output.body.wrapped
if wrapped:
return body[0].children
else:
return body.children
def document(self, wrapper):
"""
Get the document root. For I{document/literal}, this is the
name of the wrapper element qualifed by the schema tns.
@param wrapper: The method name.
@type wrapper: L{xsd.sxbase.SchemaObject}
@return: A root element.
@rtype: L{Element}
"""
tag = wrapper[1].name
ns = wrapper[1].namespace('ns0')
d = Element(tag, ns=ns)
return d
def mkparam(self, method, pdef, object):
#
# Expand list parameters into individual parameters
# each with the type information. This is because in document
# arrays are simply unbounded elements.
#
if isinstance(object, (list, tuple)):
tags = []
for item in object:
tags.append(self.mkparam(method, pdef, item))
return tags
else:
return Binding.mkparam(self, method, pdef, object)
def param_defs(self, method):
#
# Get parameter definitions for document literal.
# The I{wrapped} vs I{bare} style is detected in 2 ways.
# If there is 2+ parts in the message then it is I{bare}.
# If there is only (1) part and that part resolves to a builtin then
# it is I{bare}. Otherwise, it is I{wrapped}.
#
pts = self.bodypart_types(method)
wrapped = method.soap.input.body.wrapped
if not wrapped:
return pts
result = []
# wrapped
for p in pts:
resolved = p[1].resolve()
for child, ancestry in resolved:
if child.isattr():
continue
if self.bychoice(ancestry):
log.debug(
'%s\ncontained by <choice/>, excluded as param for %s()',
child,
method.name)
continue
result.append((child.name, child))
return result
def returned_types(self, method):
result = []
wrapped = method.soap.output.body.wrapped
rts = self.bodypart_types(method, input=False)
if wrapped:
for pt in rts:
resolved = pt.resolve(nobuiltin=True)
for child, ancestry in resolved:
result.append(child)
break
else:
result += rts
return result
def bychoice(self, ancestry):
"""
The ancestry contains a <choice/>
@param ancestry: A list of ancestors.
@type ancestry: list
@return: True if contains <choice/>
@rtype: boolean
"""
for x in ancestry:
if x.choice():
return True
return False | lgpl-3.0 |
jrior001/android_kernel_samsung_d2 | Documentation/networking/cxacru-cf.py | 14668 | 1626 | #!/usr/bin/env python
# Copyright 2009 Simon Arlott
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 59
# Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# Usage: cxacru-cf.py < cxacru-cf.bin
# Output: values string suitable for the sysfs adsl_config attribute
#
# Warning: cxacru-cf.bin with MD5 hash cdbac2689969d5ed5d4850f117702110
# contains mis-aligned values which will stop the modem from being able
# to make a connection. If the first and last two bytes are removed then
# the values become valid, but the modulation will be forced to ANSI
# T1.413 only which may not be appropriate.
#
# The original binary format is a packed list of le32 values.
import sys
import struct
i = 0
while True:
buf = sys.stdin.read(4)
if len(buf) == 0:
break
elif len(buf) != 4:
sys.stdout.write("\n")
sys.stderr.write("Error: read {0} not 4 bytes\n".format(len(buf)))
sys.exit(1)
if i > 0:
sys.stdout.write(" ")
sys.stdout.write("{0:x}={1}".format(i, struct.unpack("<I", buf)[0]))
i += 1
sys.stdout.write("\n")
| gpl-2.0 |
whatsthehubbub/playpilots | ebi/stereoscoop/views.py | 1 | 4573 | from django.http import HttpResponse, HttpResponseRedirect, Http404, HttpResponseBadRequest
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext, Template
from django.template.loader import get_template, render_to_string
from stereoscoop.models import StereoscoopUnlock, StereoscoopCode, StereoscoopBadge, StereoscoopMovie
import actstream
import datetime
import logging
from metagame.services import send_tweet
import json
def stereoscoop_code(request):
if request.user.is_authenticated() and request.method=="POST":
player = request.user.get_profile()
code = request.POST.get('codeinput', '')
if not StereoscoopCode.objects.filter(code=code).exists():
# Check for unlock
try:
unlock = StereoscoopUnlock.objects.get(code=code)
# Only create the code object if the unlock exists
StereoscoopCode.objects.create(player=player, code=code)
if player.get_twitter_name():
send_tweet('@%(player)s heeft %(badgetitle)s gevonden bij De Stereoscoop %(badgelink)s #NFF' % {
'player': player.get_twitter_name(),
'badgetitle': unlock.badge.title,
'badgelink': 'http://playpilots.nl/de-stereoscoop/badge/%s/' % unlock.badge.slug
})
actstream.action.send(request.user.get_profile(), verb="scratchte op de Stereoscoop net de", target=unlock.badge)
except StereoscoopUnlock.DoesNotExist:
logging.error('we do not have an unlock for code: %s', code)
return HttpResponse(json.dumps({
'result': 0,
'error': 'Sorry, maar onze robots kunnen deze code niet ontcijferen. Weet je zeker dat je geen typefout hebt gemaakt?'
}))
return HttpResponse(json.dumps({'result': 1}))
else:
logging.error('code %s aready exists in the database', code)
return HttpResponse(json.dumps({
'result': 0,
'error': 'Iemand anders heeft deze code al geclaimed. Of was jij het?'
}))
def token_catcher(request):
if request.method == "POST":
receivedParams = str(request.POST)
logging.debug('stereoscoop catcher received %s', receivedParams)
token = request.POST.get('token')
dt = datetime.datetime.strptime(request.POST.get('datetime', ''), '%Y-%m-%d %H:%M:%S')
logging.debug('got token %s and datetime %s', token, str(dt))
badgeid = int(request.POST.get('badge', ''))
badge = StereoscoopBadge.objects.get(badgeid=badgeid)
logging.debug('got badgeid %d and badge %s', badgeid, str(badge))
movie1Title = request.POST.get('movie1', '')
movie2Title = request.POST.get('movie2', '')
logging.debug('got movie titles %s and %s', movie1Title, movie2Title)
movie1 = StereoscoopMovie.objects.get(title=movie1Title)
movie2 = StereoscoopMovie.objects.get(title=movie2Title)
logging.debug('resolved to movies %s and %s', str(movie1), str(movie2))
s = StereoscoopUnlock.objects.create(code=token, time=dt, badge=badge, movie1=movie1, movie2=movie2)
logging.debug('created stereoscoop unlock %d', s.id)
scene1 = request.POST.get('scene1', '')
scene2 = request.POST.get('scene2', '')
if scene1 and scene2:
try:
s.scene1 = int(scene1)
s.scene2 = int(scene2)
except:
pass
cue1 = request.POST.get('cue1', '')
cue2 = request.POST.get('cue2', '')
if cue1 and cue2:
try:
s.cue1 = int(cue1)
s.cue2 = int(cue2)
except:
pass
s.save()
return HttpResponse('success\r\n' + receivedParams, mimetype='text/plain')
return HttpResponseBadRequest()
def stereoscoop_badge(request, slug=''):
convars = {
'current': 'games'
}
if slug:
badge = StereoscoopBadge.objects.get(slug=slug)
convars['badge'] = badge
return render_to_response('stereoscoop/badge.html', convars, context_instance=RequestContext(request)) | mit |
abstract-open-solutions/OCB | addons/auth_signup/controllers/main.py | 165 | 6011 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2012-today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
import logging
import werkzeug
import openerp
from openerp.addons.auth_signup.res_users import SignupError
from openerp.addons.web.controllers.main import ensure_db
from openerp import http
from openerp.http import request
from openerp.tools.translate import _
_logger = logging.getLogger(__name__)
class AuthSignupHome(openerp.addons.web.controllers.main.Home):
@http.route()
def web_login(self, *args, **kw):
ensure_db()
response = super(AuthSignupHome, self).web_login(*args, **kw)
response.qcontext.update(self.get_auth_signup_config())
if request.httprequest.method == 'GET' and request.session.uid and request.params.get('redirect'):
# Redirect if already logged in and redirect param is present
return http.redirect_with_hash(request.params.get('redirect'))
return response
@http.route('/web/signup', type='http', auth='public', website=True)
def web_auth_signup(self, *args, **kw):
qcontext = self.get_auth_signup_qcontext()
if not qcontext.get('token') and not qcontext.get('signup_enabled'):
raise werkzeug.exceptions.NotFound()
if 'error' not in qcontext and request.httprequest.method == 'POST':
try:
self.do_signup(qcontext)
return super(AuthSignupHome, self).web_login(*args, **kw)
except (SignupError, AssertionError), e:
qcontext['error'] = _(e.message)
return request.render('auth_signup.signup', qcontext)
@http.route('/web/reset_password', type='http', auth='public', website=True)
def web_auth_reset_password(self, *args, **kw):
qcontext = self.get_auth_signup_qcontext()
if not qcontext.get('token') and not qcontext.get('reset_password_enabled'):
raise werkzeug.exceptions.NotFound()
if 'error' not in qcontext and request.httprequest.method == 'POST':
try:
if qcontext.get('token'):
self.do_signup(qcontext)
return super(AuthSignupHome, self).web_login(*args, **kw)
else:
login = qcontext.get('login')
assert login, "No login provided."
res_users = request.registry.get('res.users')
res_users.reset_password(request.cr, openerp.SUPERUSER_ID, login)
qcontext['message'] = _("An email has been sent with credentials to reset your password")
except SignupError:
qcontext['error'] = _("Could not reset your password")
_logger.exception('error when resetting password')
except Exception, e:
qcontext['error'] = _(e.message)
return request.render('auth_signup.reset_password', qcontext)
def get_auth_signup_config(self):
"""retrieve the module config (which features are enabled) for the login page"""
icp = request.registry.get('ir.config_parameter')
return {
'signup_enabled': icp.get_param(request.cr, openerp.SUPERUSER_ID, 'auth_signup.allow_uninvited') == 'True',
'reset_password_enabled': icp.get_param(request.cr, openerp.SUPERUSER_ID, 'auth_signup.reset_password') == 'True',
}
def get_auth_signup_qcontext(self):
""" Shared helper returning the rendering context for signup and reset password """
qcontext = request.params.copy()
qcontext.update(self.get_auth_signup_config())
if qcontext.get('token'):
try:
# retrieve the user info (name, login or email) corresponding to a signup token
res_partner = request.registry.get('res.partner')
token_infos = res_partner.signup_retrieve_info(request.cr, openerp.SUPERUSER_ID, qcontext.get('token'))
for k, v in token_infos.items():
qcontext.setdefault(k, v)
except:
qcontext['error'] = _("Invalid signup token")
return qcontext
def do_signup(self, qcontext):
""" Shared helper that creates a res.partner out of a token """
values = dict((key, qcontext.get(key)) for key in ('login', 'name', 'password'))
assert any([k for k in values.values()]), "The form was not properly filled in."
assert values.get('password') == qcontext.get('confirm_password'), "Passwords do not match; please retype them."
self._signup_with_values(qcontext.get('token'), values)
request.cr.commit()
def _signup_with_values(self, token, values):
db, login, password = request.registry['res.users'].signup(request.cr, openerp.SUPERUSER_ID, values, token)
request.cr.commit() # as authenticate will use its own cursor we need to commit the current transaction
uid = request.session.authenticate(db, login, password)
if not uid:
raise SignupError(_('Authentification Failed.'))
# vim:expandtab:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
bemineni/eldam | docs/conf.py | 1 | 4905 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# eldam documentation build configuration file, created by
# sphinx-quickstart on Wed Mar 22 23:22:44 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
from recommonmark.parser import CommonMarkParser
import sphinx_rtd_theme
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
source_suffix = ['.rst', '.md']
#source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'eldam'
copyright = '2017, Srikanth Bemineni'
author = 'Srikanth Bemineni'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.2.0'
# The full version, including alpha/beta/rc tags.
release = '1.2.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'eldamdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'eldam.tex', 'eldam Documentation',
'Srikanth Bemineni', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'eldam', 'eldam Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'eldam', 'eldam Documentation',
author, 'eldam', 'One line description of project.',
'Miscellaneous'),
]
source_parsers = {
'.md': CommonMarkParser,
}
| mit |
teamfx/openjfx-8u-dev-rt | modules/web/src/main/native/Source/JavaScriptCore/inspector/scripts/codegen/generate_objc_frontend_dispatcher_implementation.py | 1 | 7937 | #!/usr/bin/env python
#
# Copyright (c) 2014-2016 Apple Inc. All rights reserved.
# Copyright (c) 2014 University of Washington. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS''
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
import logging
import string
from string import Template
from cpp_generator import CppGenerator
from generator import Generator, ucfirst
from objc_generator import ObjCGenerator
from objc_generator_templates import ObjCGeneratorTemplates as ObjCTemplates
log = logging.getLogger('global')
class ObjCFrontendDispatcherImplementationGenerator(ObjCGenerator):
def __init__(self, *args, **kwargs):
ObjCGenerator.__init__(self, *args, **kwargs)
def output_filename(self):
return '%sEventDispatchers.mm' % self.protocol_name()
def domains_to_generate(self):
return filter(self.should_generate_events_for_domain, Generator.domains_to_generate(self))
def generate_output(self):
secondary_headers = [
'"%sTypeConversions.h"' % self.protocol_name(),
'<wtf/JSONValues.h>',
]
header_args = {
'primaryInclude': '"%sInternal.h"' % self.protocol_name(),
'secondaryIncludes': '\n'.join(['#import %s' % header for header in secondary_headers]),
}
domains = self.domains_to_generate()
sections = []
sections.append(self.generate_license())
sections.append(Template(ObjCTemplates.ImplementationPrelude).substitute(None, **header_args))
sections.extend(map(self._generate_event_dispatcher_implementations, domains))
sections.append(Template(ObjCTemplates.ImplementationPostlude).substitute(None, **header_args))
return '\n\n'.join(sections)
def _generate_event_dispatcher_implementations(self, domain):
if not self.should_generate_events_for_domain(domain):
return ''
lines = []
objc_name = '%s%sDomainEventDispatcher' % (self.objc_prefix(), domain.domain_name)
lines.append('@implementation %s' % objc_name)
lines.append('{')
lines.append(' AugmentableInspectorController* _controller;')
lines.append('}')
lines.append('')
lines.append('- (instancetype)initWithController:(AugmentableInspectorController*)controller;')
lines.append('{')
lines.append(' self = [super init];')
lines.append(' if (!self)')
lines.append(' return nil;')
lines.append(' ASSERT(controller);')
lines.append(' _controller = controller;')
lines.append(' return self;')
lines.append('}')
lines.append('')
for event in self.events_for_domain(domain):
lines.append(self._generate_event(domain, event))
lines.append('')
lines.append('@end')
return '\n'.join(lines)
def _generate_event(self, domain, event):
lines = []
lines.append(self._generate_event_signature(domain, event))
lines.append('{')
lines.append(' const FrontendRouter& router = _controller->frontendRouter();')
lines.append('')
required_pointer_parameters = filter(lambda parameter: not parameter.is_optional and ObjCGenerator.is_type_objc_pointer_type(parameter.type), event.event_parameters)
for parameter in required_pointer_parameters:
var_name = ObjCGenerator.identifier_to_objc_identifier(parameter.parameter_name)
lines.append(' THROW_EXCEPTION_FOR_REQUIRED_PARAMETER(%s, @"%s");' % (var_name, var_name))
objc_array_class = self.objc_class_for_array_type(parameter.type)
if objc_array_class and objc_array_class.startswith(self.objc_prefix()):
lines.append(' THROW_EXCEPTION_FOR_BAD_TYPE_IN_ARRAY(%s, [%s class]);' % (var_name, objc_array_class))
optional_pointer_parameters = filter(lambda parameter: parameter.is_optional and ObjCGenerator.is_type_objc_pointer_type(parameter.type), event.event_parameters)
for parameter in optional_pointer_parameters:
var_name = ObjCGenerator.identifier_to_objc_identifier(parameter.parameter_name)
lines.append(' THROW_EXCEPTION_FOR_BAD_OPTIONAL_PARAMETER(%s, @"%s");' % (var_name, var_name))
objc_array_class = self.objc_class_for_array_type(parameter.type)
if objc_array_class and objc_array_class.startswith(self.objc_prefix()):
lines.append(' THROW_EXCEPTION_FOR_BAD_TYPE_IN_OPTIONAL_ARRAY(%s, [%s class]);' % (var_name, objc_array_class))
if required_pointer_parameters or optional_pointer_parameters:
lines.append('')
lines.append(' Ref<JSON::Object> jsonMessage = JSON::Object::create();')
lines.append(' jsonMessage->setString(ASCIILiteral("method"), ASCIILiteral("%s.%s"));' % (domain.domain_name, event.event_name))
if event.event_parameters:
lines.extend(self._generate_event_out_parameters(domain, event))
lines.append(' router.sendEvent(jsonMessage->toJSONString());')
lines.append('}')
return '\n'.join(lines)
def _generate_event_signature(self, domain, event):
if not event.event_parameters:
return '- (void)%s' % event.event_name
pairs = []
for parameter in event.event_parameters:
param_name = parameter.parameter_name
pairs.append('%s:(%s)%s' % (param_name, self.objc_type_for_param(domain, event.event_name, parameter), param_name))
pairs[0] = ucfirst(pairs[0])
return '- (void)%sWith%s' % (event.event_name, ' '.join(pairs))
def _generate_event_out_parameters(self, domain, event):
lines = []
lines.append(' Ref<JSON::Object> paramsObject = JSON::Object::create();')
for parameter in event.event_parameters:
keyed_set_method = CppGenerator.cpp_setter_method_for_type(parameter.type)
var_name = parameter.parameter_name
safe_var_name = '(*%s)' % var_name if parameter.is_optional else var_name
export_expression = self.objc_protocol_export_expression_for_variable(parameter.type, safe_var_name)
if not parameter.is_optional:
lines.append(' paramsObject->%s(ASCIILiteral("%s"), %s);' % (keyed_set_method, parameter.parameter_name, export_expression))
else:
lines.append(' if (%s)' % (parameter.parameter_name))
lines.append(' paramsObject->%s(ASCIILiteral("%s"), %s);' % (keyed_set_method, parameter.parameter_name, export_expression))
lines.append(' jsonMessage->setObject(ASCIILiteral("params"), WTFMove(paramsObject));')
return lines
| gpl-2.0 |
tomkralidis/QGIS | tests/src/python/test_qgsalignmentcombobox.py | 18 | 2010 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsAlignmentComboBox
From build dir, run: ctest -R PyQgsAlignmentComboBox -V
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Nyall Dawson'
__date__ = '26/06/2019'
__copyright__ = 'Copyright 2019, The QGIS Project'
import qgis # NOQA
from qgis.PyQt.QtCore import Qt
from qgis.gui import QgsAlignmentComboBox
from qgis.PyQt.QtTest import QSignalSpy
from qgis.testing import start_app, unittest
start_app()
class TestQgsAlignmentComboBox(unittest.TestCase):
def testGettersSetters(self):
""" test widget getters/setters """
w = QgsAlignmentComboBox()
w.setAvailableAlignments(Qt.AlignRight | Qt.AlignJustify)
w.setCurrentAlignment(Qt.AlignRight)
self.assertEqual(w.currentAlignment(), Qt.AlignRight)
w.setCurrentAlignment(Qt.AlignJustify)
self.assertEqual(w.currentAlignment(), Qt.AlignJustify)
# not a choice
w.setCurrentAlignment(Qt.AlignLeft)
self.assertEqual(w.currentAlignment(), Qt.AlignJustify)
def test_ChangedSignals(self):
""" test that signals are correctly emitted when setting alignment"""
w = QgsAlignmentComboBox()
spy = QSignalSpy(w.changed)
w.setCurrentAlignment(Qt.AlignRight)
self.assertEqual(len(spy), 1)
w.setCurrentAlignment(Qt.AlignRight)
self.assertEqual(len(spy), 1)
w.setCurrentAlignment(Qt.AlignLeft)
self.assertEqual(len(spy), 2)
w.setAvailableAlignments(Qt.AlignRight | Qt.AlignJustify)
self.assertEqual(len(spy), 3)
self.assertEqual(w.currentAlignment(), Qt.AlignRight)
w.setAvailableAlignments(Qt.AlignLeft | Qt.AlignRight)
self.assertEqual(len(spy), 3)
if __name__ == '__main__':
unittest.main()
| gpl-2.0 |
pombredanne/mitmproxy | examples/tls_passthrough.py | 15 | 4470 | """
This inline script allows conditional TLS Interception based
on a user-defined strategy.
Example:
> mitmdump -s tls_passthrough.py
1. curl --proxy http://localhost:8080 https://example.com --insecure
// works - we'll also see the contents in mitmproxy
2. curl --proxy http://localhost:8080 https://example.com --insecure
// still works - we'll also see the contents in mitmproxy
3. curl --proxy http://localhost:8080 https://example.com
// fails with a certificate error, which we will also see in mitmproxy
4. curl --proxy http://localhost:8080 https://example.com
// works again, but mitmproxy does not intercept and we do *not* see the contents
Authors: Maximilian Hils, Matthew Tuusberg
"""
from __future__ import (absolute_import, print_function, division)
import collections
import random
from enum import Enum
from libmproxy.exceptions import TlsProtocolException
from libmproxy.protocol import TlsLayer, RawTCPLayer
class InterceptionResult(Enum):
success = True
failure = False
skipped = None
class _TlsStrategy(object):
"""
Abstract base class for interception strategies.
"""
def __init__(self):
# A server_address -> interception results mapping
self.history = collections.defaultdict(lambda: collections.deque(maxlen=200))
def should_intercept(self, server_address):
"""
Returns:
True, if we should attempt to intercept the connection.
False, if we want to employ pass-through instead.
"""
raise NotImplementedError()
def record_success(self, server_address):
self.history[server_address].append(InterceptionResult.success)
def record_failure(self, server_address):
self.history[server_address].append(InterceptionResult.failure)
def record_skipped(self, server_address):
self.history[server_address].append(InterceptionResult.skipped)
class ConservativeStrategy(_TlsStrategy):
"""
Conservative Interception Strategy - only intercept if there haven't been any failed attempts
in the history.
"""
def should_intercept(self, server_address):
if InterceptionResult.failure in self.history[server_address]:
return False
return True
class ProbabilisticStrategy(_TlsStrategy):
"""
Fixed probability that we intercept a given connection.
"""
def __init__(self, p):
self.p = p
super(ProbabilisticStrategy, self).__init__()
def should_intercept(self, server_address):
return random.uniform(0, 1) < self.p
class TlsFeedback(TlsLayer):
"""
Monkey-patch _establish_tls_with_client to get feedback if TLS could be established
successfully on the client connection (which may fail due to cert pinning).
"""
def _establish_tls_with_client(self):
server_address = self.server_conn.address
tls_strategy = self.script_context.tls_strategy
try:
super(TlsFeedback, self)._establish_tls_with_client()
except TlsProtocolException as e:
tls_strategy.record_failure(server_address)
raise e
else:
tls_strategy.record_success(server_address)
# inline script hooks below.
def start(context, argv):
if len(argv) == 2:
context.tls_strategy = ProbabilisticStrategy(float(argv[1]))
else:
context.tls_strategy = ConservativeStrategy()
def next_layer(context, next_layer):
"""
This hook does the actual magic - if the next layer is planned to be a TLS layer,
we check if we want to enter pass-through mode instead.
"""
if isinstance(next_layer, TlsLayer) and next_layer._client_tls:
server_address = next_layer.server_conn.address
if context.tls_strategy.should_intercept(server_address):
# We try to intercept.
# Monkey-Patch the layer to get feedback from the TLSLayer if interception worked.
next_layer.__class__ = TlsFeedback
next_layer.script_context = context
else:
# We don't intercept - reply with a pass-through layer and add a "skipped" entry.
context.log("TLS passthrough for %s" % repr(next_layer.server_conn.address), "info")
next_layer_replacement = RawTCPLayer(next_layer.ctx, logging=False)
next_layer.reply(next_layer_replacement)
context.tls_strategy.record_skipped(server_address)
| mit |
switchboardOp/ansible | lib/ansible/modules/cloud/linode/linode.py | 7 | 26096 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: linode
short_description: create / delete / stop / restart an instance in Linode Public Cloud
description:
- creates / deletes a Linode Public Cloud instance and optionally waits for it to be 'running'.
version_added: "1.3"
options:
state:
description:
- Indicate desired state of the resource
choices: ['present', 'active', 'started', 'absent', 'deleted', 'stopped', 'restarted']
default: present
api_key:
description:
- Linode API key
default: null
name:
description:
- Name to give the instance (alphanumeric, dashes, underscore)
- To keep sanity on the Linode Web Console, name is prepended with LinodeID_
default: null
displaygroup:
description:
- Add the instance to a Display Group in Linode Manager
default: null
version_added: "2.3"
linode_id:
description:
- Unique ID of a linode server
aliases: [ 'lid' ]
default: null
additional_disks:
description: >
List of dictionaries for creating additional disks that are added to the Linode configuration settings.
Dictionary takes Size, Label, Type. Size is in MB.
default: null
version_added: "2.3"
alert_bwin_enabled:
description:
- Set status of bandwidth in alerts.
default: "True"
choices: [ "True", "False" ]
version_added: "2.3"
alert_bwin_threshold:
description:
- Set threshold in MB of bandwidth in alerts.
default: null
version_added: "2.3"
alert_bwout_enabled:
description:
- Set status of bandwidth out alerts.
default: "True"
choices: [ "True", "False" ]
version_added: "2.3"
alert_bwout_threshold:
description:
- Set threshold in MB of bandwidth out alerts.
default: null
version_added: "2.3"
alert_bwquota_enabled:
description:
- Set status of bandwidth quota alerts as percentage of network transfer quota.
default: "True"
choices: [ "True", "False" ]
version_added: "2.3"
alert_bwquota_threshold:
description:
- Set threshold in MB of bandwidth quota alerts.
default: null
version_added: "2.3"
alert_cpu_enabled:
description:
- Set status of receiving CPU usage alerts.
default: "True"
choices: [ "True", "False" ]
version_added: "2.3"
alert_cpu_threshold:
description:
- Set percentage threshold for receiving CPU usage alerts. Each CPU core adds 100% to total.
default: null
version_added: "2.3"
alert_diskio_enabled:
description:
- Set status of receiving disk IO alerts.
default: "True"
choices: [ "True", "False" ]
version_added: "2.3"
alert_diskio_threshold:
description:
- Set threshold for average IO ops/sec over 2 hour period.
default: null
version_added: "2.3"
backupweeklyday:
description:
- Integer value for what day of the week to store weekly backups.
default: null
version_added: "2.3"
plan:
description:
- plan to use for the instance (Linode plan)
default: null
payment_term:
description:
- payment term to use for the instance (payment term in months)
default: 1
choices: [1, 12, 24]
password:
description:
- root password to apply to a new server (auto generated if missing)
default: null
private_ip:
description:
- Add private IPv4 address when Linode is created.
default: "no"
choices: [ "yes", "no" ]
version_added: "2.3"
ssh_pub_key:
description:
- SSH public key applied to root user
default: null
swap:
description:
- swap size in MB
default: 512
distribution:
description:
- distribution to use for the instance (Linode Distribution)
default: null
datacenter:
description:
- datacenter to create an instance in (Linode Datacenter)
default: null
kernel_id:
description:
- kernel to use for the instance (Linode Kernel)
default: null
version_added: "2.4"
wait:
description:
- wait for the instance to be in state 'running' before returning
default: "no"
choices: [ "yes", "no" ]
wait_timeout:
description:
- how long before wait gives up, in seconds
default: 300
watchdog:
description:
- Set status of Lassie watchdog.
default: "True"
choices: [ "True", "False" ]
version_added: "2.2"
requirements:
- "python >= 2.6"
- "linode-python"
- "pycurl"
author: "Vincent Viallet (@zbal)"
notes:
- LINODE_API_KEY env variable can be used instead
'''
EXAMPLES = '''
# Create a server with a private IP Address
- local_action:
module: linode
api_key: 'longStringFromLinodeApi'
name: linode-test1
plan: 1
datacenter: 2
distribution: 99
password: 'superSecureRootPassword'
private_ip: yes
ssh_pub_key: 'ssh-rsa qwerty'
swap: 768
wait: yes
wait_timeout: 600
state: present
# Fully configure new server
- local_action:
module: linode
api_key: 'longStringFromLinodeApi'
name: linode-test1
plan: 4
datacenter: 2
distribution: 99
kernel_id: 138
password: 'superSecureRootPassword'
private_ip: yes
ssh_pub_key: 'ssh-rsa qwerty'
swap: 768
wait: yes
wait_timeout: 600
state: present
alert_bwquota_enabled: True
alert_bwquota_threshold: 80
alert_bwin_enabled: True
alert_bwin_threshold: 10
alert_cpu_enabled: True
alert_cpu_threshold: 210
alert_diskio_enabled: True
alert_bwout_enabled: True
alert_bwout_threshold: 10
alert_diskio_enabled: True
alert_diskio_threshold: 10000
backupweeklyday: 1
backupwindow: 2
displaygroup: 'test'
additional_disks:
- {Label: 'disk1', Size: 2500, Type: 'raw'}
- {Label: 'newdisk', Size: 2000}
watchdog: True
# Ensure a running server (create if missing)
- local_action:
module: linode
api_key: 'longStringFromLinodeApi'
name: linode-test1
linode_id: 12345678
plan: 1
datacenter: 2
distribution: 99
password: 'superSecureRootPassword'
ssh_pub_key: 'ssh-rsa qwerty'
swap: 768
wait: yes
wait_timeout: 600
state: present
# Delete a server
- local_action:
module: linode
api_key: 'longStringFromLinodeApi'
name: linode-test1
linode_id: 12345678
state: absent
# Stop a server
- local_action:
module: linode
api_key: 'longStringFromLinodeApi'
name: linode-test1
linode_id: 12345678
state: stopped
# Reboot a server
- local_action:
module: linode
api_key: 'longStringFromLinodeApi'
name: linode-test1
linode_id: 12345678
state: restarted
'''
import time
import os
try:
import pycurl
HAS_PYCURL = True
except ImportError:
HAS_PYCURL = False
try:
from linode import api as linode_api
HAS_LINODE = True
except ImportError:
HAS_LINODE = False
def randompass():
'''
Generate a long random password that comply to Linode requirements
'''
# Linode API currently requires the following:
# It must contain at least two of these four character classes:
# lower case letters - upper case letters - numbers - punctuation
# we play it safe :)
import random
import string
# as of python 2.4, this reseeds the PRNG from urandom
random.seed()
lower = ''.join(random.choice(string.ascii_lowercase) for x in range(6))
upper = ''.join(random.choice(string.ascii_uppercase) for x in range(6))
number = ''.join(random.choice(string.digits) for x in range(6))
punct = ''.join(random.choice(string.punctuation) for x in range(6))
p = lower + upper + number + punct
return ''.join(random.sample(p, len(p)))
def getInstanceDetails(api, server):
'''
Return the details of an instance, populating IPs, etc.
'''
instance = {'id': server['LINODEID'],
'name': server['LABEL'],
'public': [],
'private': []}
# Populate with ips
for ip in api.linode_ip_list(LinodeId=server['LINODEID']):
if ip['ISPUBLIC'] and 'ipv4' not in instance:
instance['ipv4'] = ip['IPADDRESS']
instance['fqdn'] = ip['RDNS_NAME']
if ip['ISPUBLIC']:
instance['public'].append({'ipv4': ip['IPADDRESS'],
'fqdn': ip['RDNS_NAME'],
'ip_id': ip['IPADDRESSID']})
else:
instance['private'].append({'ipv4': ip['IPADDRESS'],
'fqdn': ip['RDNS_NAME'],
'ip_id': ip['IPADDRESSID']})
return instance
def linodeServers(module, api, state, name, alert_bwin_enabled, alert_bwin_threshold, alert_bwout_enabled, alert_bwout_threshold,
alert_bwquota_enabled, alert_bwquota_threshold, alert_cpu_enabled, alert_cpu_threshold, alert_diskio_enabled,
alert_diskio_threshold,backupweeklyday, backupwindow, displaygroup, plan, additional_disks, distribution,
datacenter, kernel_id, linode_id, payment_term, password, private_ip, ssh_pub_key, swap, wait, wait_timeout, watchdog):
instances = []
changed = False
new_server = False
servers = []
disks = []
configs = []
jobs = []
disk_size = 0
# See if we can match an existing server details with the provided linode_id
if linode_id:
# For the moment we only consider linode_id as criteria for match
# Later we can use more (size, name, etc.) and update existing
servers = api.linode_list(LinodeId=linode_id)
# Attempt to fetch details about disks and configs only if servers are
# found with linode_id
if servers:
disks = api.linode_disk_list(LinodeId=linode_id)
configs = api.linode_config_list(LinodeId=linode_id)
# Act on the state
if state in ('active', 'present', 'started'):
# TODO: validate all the plan / distribution / datacenter are valid
# Multi step process/validation:
# - need linode_id (entity)
# - need disk_id for linode_id - create disk from distrib
# - need config_id for linode_id - create config (need kernel)
# Any create step triggers a job that need to be waited for.
if not servers:
for arg in (name, plan, distribution, datacenter):
if not arg:
module.fail_json(msg='%s is required for %s state' % (arg, state))
# Create linode entity
new_server = True
# Get size of all individually listed disks to subtract from Distribution disk
used_disk_space = 0 if additional_disks is None else sum(disk['Size'] for disk in additional_disks)
try:
res = api.linode_create(DatacenterID=datacenter, PlanID=plan,
PaymentTerm=payment_term)
linode_id = res['LinodeID']
# Update linode Label to match name
api.linode_update(LinodeId=linode_id, Label='%s_%s' % (linode_id, name))
# Update Linode with Ansible configuration options
api.linode_update(LinodeId=linode_id, ALERT_BWIN_ENABLED=alert_bwin_enabled,
ALERT_BWIN_THRESHOLD=alert_bwin_threshold, ALERT_BWOUT_ENABLED=alert_bwout_enabled,
ALERT_BWOUT_THRESHOLD=alert_bwout_threshold, ALERT_BWQUOTA_ENABLED=alert_bwquota_enabled,
ALERT_BWQUOTA_THRESHOLD=alert_bwquota_threshold, ALERT_CPU_ENABLED=alert_cpu_enabled,
ALERT_CPU_THRESHOLD=alert_cpu_threshold, ALERT_DISKIO_ENABLED=alert_diskio_enabled,
ALERT_DISKIO_THRESHOLD=alert_diskio_threshold, BACKUPWEEKLYDAY=backupweeklyday,
BACKUPWINDOW=backupwindow, LPM_DISPLAYGROUP=displaygroup, WATCHDOG=watchdog)
# Save server
servers = api.linode_list(LinodeId=linode_id)
except Exception as e:
module.fail_json(msg = '%s' % e.value[0]['ERRORMESSAGE'])
#Add private IP to Linode
if private_ip:
try:
res = api.linode_ip_addprivate(LinodeID=linode_id)
except Exception as e:
module.fail_json(msg = '%s' % e.value[0]['ERRORMESSAGE'])
if not disks:
for arg in (name, linode_id, distribution):
if not arg:
module.fail_json(msg='%s is required for %s state' % (arg, state))
# Create disks (1 from distrib, 1 for SWAP)
new_server = True
try:
if not password:
# Password is required on creation, if not provided generate one
password = randompass()
if not swap:
swap = 512
# Create data disk
size = servers[0]['TOTALHD'] - used_disk_space - swap
if ssh_pub_key:
res = api.linode_disk_createfromdistribution(
LinodeId=linode_id, DistributionID=distribution,
rootPass=password, rootSSHKey=ssh_pub_key,
Label='%s data disk (lid: %s)' % (name, linode_id),
Size=size)
else:
res = api.linode_disk_createfromdistribution(
LinodeId=linode_id, DistributionID=distribution,
rootPass=password,
Label='%s data disk (lid: %s)' % (name, linode_id),
Size=size)
jobs.append(res['JobID'])
# Create SWAP disk
res = api.linode_disk_create(LinodeId=linode_id, Type='swap',
Label='%s swap disk (lid: %s)' % (name, linode_id),
Size=swap)
# Create individually listed disks at specified size
if additional_disks:
for disk in additional_disks:
# If a disk Type is not passed in, default to ext4
if disk.get('Type') is None:
disk['Type'] = 'ext4'
res = api.linode_disk_create(LinodeID=linode_id, Label=disk['Label'], Size=disk['Size'], Type=disk['Type'])
jobs.append(res['JobID'])
except Exception as e:
# TODO: destroy linode ?
module.fail_json(msg = '%s' % e.value[0]['ERRORMESSAGE'])
if not configs:
for arg in (name, linode_id, distribution):
if not arg:
module.fail_json(msg='%s is required for %s state' % (arg, state))
# Check architecture
for distrib in api.avail_distributions():
if distrib['DISTRIBUTIONID'] != distribution:
continue
arch = '32'
if distrib['IS64BIT']:
arch = '64'
break
# Get latest kernel matching arch if kernel_id is not specified
if not kernel_id:
for kernel in api.avail_kernels():
if not kernel['LABEL'].startswith('Latest %s' % arch):
continue
kernel_id = kernel['KERNELID']
break
# Get disk list
disks_id = []
for disk in api.linode_disk_list(LinodeId=linode_id):
if disk['TYPE'] == 'ext3':
disks_id.insert(0, str(disk['DISKID']))
continue
disks_id.append(str(disk['DISKID']))
# Trick to get the 9 items in the list
while len(disks_id) < 9:
disks_id.append('')
disks_list = ','.join(disks_id)
# Create config
new_server = True
try:
api.linode_config_create(LinodeId=linode_id, KernelId=kernel_id,
Disklist=disks_list, Label='%s config' % name)
configs = api.linode_config_list(LinodeId=linode_id)
except Exception as e:
module.fail_json(msg = '%s' % e.value[0]['ERRORMESSAGE'])
# Start / Ensure servers are running
for server in servers:
# Refresh server state
server = api.linode_list(LinodeId=server['LINODEID'])[0]
# Ensure existing servers are up and running, boot if necessary
if server['STATUS'] != 1:
res = api.linode_boot(LinodeId=linode_id)
jobs.append(res['JobID'])
changed = True
# wait here until the instances are up
wait_timeout = time.time() + wait_timeout
while wait and wait_timeout > time.time():
# refresh the server details
server = api.linode_list(LinodeId=server['LINODEID'])[0]
# status:
# -2: Boot failed
# 1: Running
if server['STATUS'] in (-2, 1):
break
time.sleep(5)
if wait and wait_timeout <= time.time():
# waiting took too long
module.fail_json(msg = 'Timeout waiting on %s (lid: %s)' %
(server['LABEL'], server['LINODEID']))
# Get a fresh copy of the server details
server = api.linode_list(LinodeId=server['LINODEID'])[0]
if server['STATUS'] == -2:
module.fail_json(msg = '%s (lid: %s) failed to boot' %
(server['LABEL'], server['LINODEID']))
# From now on we know the task is a success
# Build instance report
instance = getInstanceDetails(api, server)
# depending on wait flag select the status
if wait:
instance['status'] = 'Running'
else:
instance['status'] = 'Starting'
# Return the root password if this is a new box and no SSH key
# has been provided
if new_server and not ssh_pub_key:
instance['password'] = password
instances.append(instance)
elif state in ('stopped'):
if not linode_id:
module.fail_json(msg='linode_id is required for stopped state')
if not servers:
module.fail_json(msg = 'Server (lid: %s) not found' % (linode_id))
for server in servers:
instance = getInstanceDetails(api, server)
if server['STATUS'] != 2:
try:
res = api.linode_shutdown(LinodeId=linode_id)
except Exception as e:
module.fail_json(msg = '%s' % e.value[0]['ERRORMESSAGE'])
instance['status'] = 'Stopping'
changed = True
else:
instance['status'] = 'Stopped'
instances.append(instance)
elif state in ('restarted'):
if not linode_id:
module.fail_json(msg='linode_id is required for restarted state')
if not servers:
module.fail_json(msg = 'Server (lid: %s) not found' % (linode_id))
for server in servers:
instance = getInstanceDetails(api, server)
try:
res = api.linode_reboot(LinodeId=server['LINODEID'])
except Exception as e:
module.fail_json(msg = '%s' % e.value[0]['ERRORMESSAGE'])
instance['status'] = 'Restarting'
changed = True
instances.append(instance)
elif state in ('absent', 'deleted'):
for server in servers:
instance = getInstanceDetails(api, server)
try:
api.linode_delete(LinodeId=server['LINODEID'], skipChecks=True)
except Exception as e:
module.fail_json(msg = '%s' % e.value[0]['ERRORMESSAGE'])
instance['status'] = 'Deleting'
changed = True
instances.append(instance)
# Ease parsing if only 1 instance
if len(instances) == 1:
module.exit_json(changed=changed, instance=instances[0])
module.exit_json(changed=changed, instances=instances)
def main():
module = AnsibleModule(
argument_spec = dict(
state = dict(default='present', choices=['active', 'present', 'started',
'deleted', 'absent', 'stopped',
'restarted']),
api_key = dict(no_log=True),
name = dict(type='str'),
alert_bwin_enabled = dict(type='bool', default=True),
alert_bwin_threshold = dict(type='int'),
alert_bwout_enabled = dict(type='bool', default=True),
alert_bwout_threshold = dict(type='int'),
alert_bwquota_enabled = dict(type='bool', default=True),
alert_bwquota_threshold = dict(type='int'),
alert_cpu_enabled = dict(type='bool', default=True),
alert_cpu_threshold = dict(type='int'),
alert_diskio_enabled = dict(type='bool', default=True),
alert_diskio_threshold = dict(type='int'),
backupweeklyday = dict(type='int'),
backupwindow = dict(type='int'),
displaygroup = dict(type='str', default=''),
plan = dict(type='int'),
additional_disks= dict(type='list'),
distribution = dict(type='int'),
datacenter = dict(type='int'),
kernel_id = dict(type='int'),
linode_id = dict(type='int', aliases=['lid']),
payment_term = dict(type='int', default=1, choices=[1, 12, 24]),
password = dict(type='str', no_log=True),
private_ip = dict(type='bool'),
ssh_pub_key = dict(type='str'),
swap = dict(type='int', default=512),
wait = dict(type='bool', default=True),
wait_timeout = dict(default=300),
watchdog = dict(type='bool', default=True),
)
)
if not HAS_PYCURL:
module.fail_json(msg='pycurl required for this module')
if not HAS_LINODE:
module.fail_json(msg='linode-python required for this module')
state = module.params.get('state')
api_key = module.params.get('api_key')
name = module.params.get('name')
alert_bwin_enabled = int(module.params.get('alert_bwin_enabled'))
alert_bwin_threshold = module.params.get('alert_bwin_threshold')
alert_bwout_enabled = int(module.params.get('alert_bwout_enabled'))
alert_bwout_threshold = module.params.get('alert_bwout_threshold')
alert_bwquota_enabled = int(module.params.get('alert_bwquota_enabled'))
alert_bwquota_threshold = module.params.get('alert_bwquota_threshold')
alert_cpu_enabled = int(module.params.get('alert_cpu_enabled'))
alert_cpu_threshold = module.params.get('alert_cpu_threshold')
alert_diskio_enabled = int(module.params.get('alert_diskio_enabled'))
alert_diskio_threshold = module.params.get('alert_diskio_threshold')
backupsenabled = module.params.get('backupsenabled')
backupweeklyday = module.params.get('backupweeklyday')
backupwindow = module.params.get('backupwindow')
displaygroup = module.params.get('displaygroup')
plan = module.params.get('plan')
additional_disks = module.params.get('additional_disks')
distribution = module.params.get('distribution')
datacenter = module.params.get('datacenter')
kernel_id = module.params.get('kernel_id')
linode_id = module.params.get('linode_id')
payment_term = module.params.get('payment_term')
password = module.params.get('password')
private_ip = module.params.get('private_ip')
ssh_pub_key = module.params.get('ssh_pub_key')
swap = module.params.get('swap')
wait = module.params.get('wait')
wait_timeout = int(module.params.get('wait_timeout'))
watchdog = int(module.params.get('watchdog'))
# Setup the api_key
if not api_key:
try:
api_key = os.environ['LINODE_API_KEY']
except KeyError as e:
module.fail_json(msg = 'Unable to load %s' % e.message)
# setup the auth
try:
api = linode_api.Api(api_key)
api.test_echo()
except Exception as e:
module.fail_json(msg = '%s' % e.value[0]['ERRORMESSAGE'])
linodeServers(module, api, state, name, alert_bwin_enabled,
alert_bwin_threshold, alert_bwout_enabled, alert_bwout_threshold,
alert_bwquota_enabled, alert_bwquota_threshold, alert_cpu_enabled,
alert_cpu_threshold, alert_diskio_enabled, alert_diskio_threshold,
backupweeklyday, backupwindow, displaygroup, plan,
additional_disks, distribution, datacenter, kernel_id, linode_id,
payment_term, password, private_ip, ssh_pub_key, swap, wait,
wait_timeout, watchdog)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
| gpl-3.0 |
Sodki/ansible-modules-extras | network/citrix/netscaler.py | 143 | 5056 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Ansible module to manage Citrix NetScaler entities
(c) 2013, Nandor Sivok <nandor@gawker.com>
This file is part of Ansible
Ansible is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Ansible is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""
DOCUMENTATION = '''
---
module: netscaler
version_added: "1.1"
short_description: Manages Citrix NetScaler entities
description:
- Manages Citrix NetScaler server and service entities.
options:
nsc_host:
description:
- hostname or ip of your netscaler
required: true
default: null
aliases: []
nsc_protocol:
description:
- protocol used to access netscaler
required: false
default: https
aliases: []
user:
description:
- username
required: true
default: null
aliases: []
password:
description:
- password
required: true
default: null
aliases: []
action:
description:
- the action you want to perform on the entity
required: false
default: disable
choices: ["enable", "disable"]
aliases: []
name:
description:
- name of the entity
required: true
default: hostname
aliases: []
type:
description:
- type of the entity
required: false
default: server
choices: ["server", "service"]
aliases: []
validate_certs:
description:
- If C(no), SSL certificates for the target url will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
required: false
default: 'yes'
choices: ['yes', 'no']
requirements: []
author: "Nandor Sivok (@dominis)"
'''
EXAMPLES = '''
# Disable the server
ansible host -m netscaler -a "nsc_host=nsc.example.com user=apiuser password=apipass"
# Enable the server
ansible host -m netscaler -a "nsc_host=nsc.example.com user=apiuser password=apipass action=enable"
# Disable the service local:8080
ansible host -m netscaler -a "nsc_host=nsc.example.com user=apiuser password=apipass name=local:8080 type=service action=disable"
'''
import base64
import socket
import urllib
class netscaler(object):
_nitro_base_url = '/nitro/v1/'
def __init__(self, module):
self.module = module
def http_request(self, api_endpoint, data_json={}):
request_url = self._nsc_protocol + '://' + self._nsc_host + self._nitro_base_url + api_endpoint
data_json = urllib.urlencode(data_json)
if not len(data_json):
data_json = None
auth = base64.encodestring('%s:%s' % (self._nsc_user, self._nsc_pass)).replace('\n', '').strip()
headers = {
'Authorization': 'Basic %s' % auth,
'Content-Type' : 'application/x-www-form-urlencoded',
}
response, info = fetch_url(self.module, request_url, data=data_json, headers=headers)
return json.load(response)
def prepare_request(self, action):
resp = self.http_request(
'config',
{
"object":
{
"params": {"action": action},
self._type: {"name": self._name}
}
}
)
return resp
def core(module):
n = netscaler(module)
n._nsc_host = module.params.get('nsc_host')
n._nsc_user = module.params.get('user')
n._nsc_pass = module.params.get('password')
n._nsc_protocol = module.params.get('nsc_protocol')
n._name = module.params.get('name')
n._type = module.params.get('type')
action = module.params.get('action')
r = n.prepare_request(action)
return r['errorcode'], r
def main():
module = AnsibleModule(
argument_spec = dict(
nsc_host = dict(required=True),
nsc_protocol = dict(default='https'),
user = dict(required=True),
password = dict(required=True),
action = dict(default='enable', choices=['enable','disable']),
name = dict(default=socket.gethostname()),
type = dict(default='server', choices=['service', 'server']),
validate_certs=dict(default='yes', type='bool'),
)
)
rc = 0
try:
rc, result = core(module)
except Exception, e:
module.fail_json(msg=str(e))
if rc != 0:
module.fail_json(rc=rc, msg=result)
else:
result['changed'] = True
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
main()
| gpl-3.0 |
druuu/django | tests/template_tests/filter_tests/test_length_is.py | 360 | 3204 | from django.template.defaultfilters import length_is
from django.test import SimpleTestCase
from ..utils import setup
class LengthIsTests(SimpleTestCase):
@setup({'length_is01': '{% if some_list|length_is:"4" %}Four{% endif %}'})
def test_length_is01(self):
output = self.engine.render_to_string('length_is01', {'some_list': ['4', None, True, {}]})
self.assertEqual(output, 'Four')
@setup({'length_is02': '{% if some_list|length_is:"4" %}Four{% else %}Not Four{% endif %}'})
def test_length_is02(self):
output = self.engine.render_to_string('length_is02', {'some_list': ['4', None, True, {}, 17]})
self.assertEqual(output, 'Not Four')
@setup({'length_is03': '{% if mystring|length_is:"4" %}Four{% endif %}'})
def test_length_is03(self):
output = self.engine.render_to_string('length_is03', {'mystring': 'word'})
self.assertEqual(output, 'Four')
@setup({'length_is04': '{% if mystring|length_is:"4" %}Four{% else %}Not Four{% endif %}'})
def test_length_is04(self):
output = self.engine.render_to_string('length_is04', {'mystring': 'Python'})
self.assertEqual(output, 'Not Four')
@setup({'length_is05': '{% if mystring|length_is:"4" %}Four{% else %}Not Four{% endif %}'})
def test_length_is05(self):
output = self.engine.render_to_string('length_is05', {'mystring': ''})
self.assertEqual(output, 'Not Four')
@setup({'length_is06': '{% with var|length as my_length %}{{ my_length }}{% endwith %}'})
def test_length_is06(self):
output = self.engine.render_to_string('length_is06', {'var': 'django'})
self.assertEqual(output, '6')
# Boolean return value from length_is should not be coerced to a string
@setup({'length_is07': '{% if "X"|length_is:0 %}Length is 0{% else %}Length not 0{% endif %}'})
def test_length_is07(self):
output = self.engine.render_to_string('length_is07', {})
self.assertEqual(output, 'Length not 0')
@setup({'length_is08': '{% if "X"|length_is:1 %}Length is 1{% else %}Length not 1{% endif %}'})
def test_length_is08(self):
output = self.engine.render_to_string('length_is08', {})
self.assertEqual(output, 'Length is 1')
# Invalid uses that should fail silently.
@setup({'length_is09': '{{ var|length_is:"fish" }}'})
def test_length_is09(self):
output = self.engine.render_to_string('length_is09', {'var': 'django'})
self.assertEqual(output, '')
@setup({'length_is10': '{{ int|length_is:"1" }}'})
def test_length_is10(self):
output = self.engine.render_to_string('length_is10', {'int': 7})
self.assertEqual(output, '')
@setup({'length_is11': '{{ none|length_is:"1" }}'})
def test_length_is11(self):
output = self.engine.render_to_string('length_is11', {'none': None})
self.assertEqual(output, '')
class FunctionTests(SimpleTestCase):
def test_empty_list(self):
self.assertEqual(length_is([], 0), True)
self.assertEqual(length_is([], 1), False)
def test_string(self):
self.assertEqual(length_is('a', 1), True)
self.assertEqual(length_is('a', 10), False)
| bsd-3-clause |
runekaagaard/django-contrib-locking | django/contrib/auth/management/commands/changepassword.py | 222 | 2100 | from __future__ import unicode_literals
import getpass
from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand, CommandError
from django.db import DEFAULT_DB_ALIAS
from django.utils.encoding import force_str
class Command(BaseCommand):
help = "Change a user's password for django.contrib.auth."
requires_system_checks = False
def _get_pass(self, prompt="Password: "):
p = getpass.getpass(prompt=force_str(prompt))
if not p:
raise CommandError("aborted")
return p
def add_arguments(self, parser):
parser.add_argument('username', nargs='?',
help='Username to change password for; by default, it\'s the current username.')
parser.add_argument('--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS,
help='Specifies the database to use. Default is "default".')
def handle(self, *args, **options):
if options.get('username'):
username = options['username']
else:
username = getpass.getuser()
UserModel = get_user_model()
try:
u = UserModel._default_manager.using(options.get('database')).get(**{
UserModel.USERNAME_FIELD: username
})
except UserModel.DoesNotExist:
raise CommandError("user '%s' does not exist" % username)
self.stdout.write("Changing password for user '%s'\n" % u)
MAX_TRIES = 3
count = 0
p1, p2 = 1, 2 # To make them initially mismatch.
while p1 != p2 and count < MAX_TRIES:
p1 = self._get_pass()
p2 = self._get_pass("Password (again): ")
if p1 != p2:
self.stdout.write("Passwords do not match. Please try again.\n")
count = count + 1
if count == MAX_TRIES:
raise CommandError("Aborting password change for user '%s' after %s attempts" % (u, count))
u.set_password(p1)
u.save()
return "Password changed successfully for user '%s'" % u
| bsd-3-clause |
OhRly-net/ohrlycoin | share/qt/extract_strings_qt.py | 2945 | 1844 | #!/usr/bin/python
'''
Extract _("...") strings for translation and convert to Qt4 stringdefs so that
they can be picked up by Qt linguist.
'''
from subprocess import Popen, PIPE
import glob
import operator
OUT_CPP="src/qt/bitcoinstrings.cpp"
EMPTY=['""']
def parse_po(text):
"""
Parse 'po' format produced by xgettext.
Return a list of (msgid,msgstr) tuples.
"""
messages = []
msgid = []
msgstr = []
in_msgid = False
in_msgstr = False
for line in text.split('\n'):
line = line.rstrip('\r')
if line.startswith('msgid '):
if in_msgstr:
messages.append((msgid, msgstr))
in_msgstr = False
# message start
in_msgid = True
msgid = [line[6:]]
elif line.startswith('msgstr '):
in_msgid = False
in_msgstr = True
msgstr = [line[7:]]
elif line.startswith('"'):
if in_msgid:
msgid.append(line)
if in_msgstr:
msgstr.append(line)
if in_msgstr:
messages.append((msgid, msgstr))
return messages
files = glob.glob('src/*.cpp') + glob.glob('src/*.h')
# xgettext -n --keyword=_ $FILES
child = Popen(['xgettext','--output=-','-n','--keyword=_'] + files, stdout=PIPE)
(out, err) = child.communicate()
messages = parse_po(out)
f = open(OUT_CPP, 'w')
f.write("""#include <QtGlobal>
// Automatically generated by extract_strings.py
#ifdef __GNUC__
#define UNUSED __attribute__((unused))
#else
#define UNUSED
#endif
""")
f.write('static const char UNUSED *bitcoin_strings[] = {\n')
messages.sort(key=operator.itemgetter(0))
for (msgid, msgstr) in messages:
if msgid != EMPTY:
f.write('QT_TRANSLATE_NOOP("bitcoin-core", %s),\n' % ('\n'.join(msgid)))
f.write('};')
f.close()
| mit |
arbrandes/edx-platform | lms/djangoapps/teams/serializers.py | 4 | 8054 | """
Defines serializers used by the Team API.
"""
from copy import deepcopy
from django.conf import settings
from django.contrib.auth.models import User # lint-amnesty, pylint: disable=imported-auth-user
from django_countries import countries
from rest_framework import serializers
from lms.djangoapps.teams.api import add_team_count, get_teams_accessible_by_user
from lms.djangoapps.teams.models import CourseTeam, CourseTeamMembership
from openedx.core.djangoapps.user_api.accounts.serializers import UserReadOnlySerializer
from openedx.core.lib.api.fields import ExpandableField
from openedx.core.lib.api.serializers import CollapsedReferenceSerializer
class CountryField(serializers.Field):
"""
Field to serialize a country code.
"""
COUNTRY_CODES = list(dict(countries).keys())
def to_representation(self, obj): # pylint: disable=arguments-differ
"""
Represent the country as a 2-character unicode identifier.
"""
return str(obj)
def to_internal_value(self, data):
"""
Check that the code is a valid country code.
We leave the data in its original format so that the Django model's
CountryField can convert it to the internal representation used
by the django-countries library.
"""
if data and data not in self.COUNTRY_CODES:
raise serializers.ValidationError(
f"{data} is not a valid country code"
)
return data
class UserMembershipSerializer(serializers.ModelSerializer):
"""Serializes CourseTeamMemberships with only user and date_joined
Used for listing team members.
"""
profile_configuration = deepcopy(settings.ACCOUNT_VISIBILITY_CONFIGURATION)
profile_configuration['bulk_shareable_fields'].append('url')
profile_configuration['public_fields'].append('url')
user = ExpandableField(
collapsed_serializer=CollapsedReferenceSerializer(
model_class=User,
id_source='username',
view_name='accounts_api',
read_only=True,
),
expanded_serializer=UserReadOnlySerializer(configuration=profile_configuration),
)
class Meta:
model = CourseTeamMembership
fields = ("user", "date_joined", "last_activity_at")
read_only_fields = ("date_joined", "last_activity_at")
class CourseTeamSerializer(serializers.ModelSerializer):
"""Serializes a CourseTeam with membership information."""
id = serializers.CharField(source='team_id', read_only=True) # pylint: disable=invalid-name
membership = UserMembershipSerializer(many=True, read_only=True)
country = CountryField()
class Meta:
model = CourseTeam
fields = (
"id",
"discussion_topic_id",
"name",
"course_id",
"topic_id",
"date_created",
"description",
"country",
"language",
"last_activity_at",
"membership",
"organization_protected",
)
read_only_fields = ("course_id", "date_created", "discussion_topic_id", "last_activity_at")
class CourseTeamCreationSerializer(serializers.ModelSerializer):
"""Deserializes a CourseTeam for creation."""
country = CountryField(required=False)
class Meta:
model = CourseTeam
fields = (
"name",
"course_id",
"description",
"topic_id",
"country",
"language",
"organization_protected",
)
def create(self, validated_data):
team = CourseTeam.create(
name=validated_data.get("name", ''),
course_id=validated_data.get("course_id"),
description=validated_data.get("description", ''),
topic_id=validated_data.get("topic_id", ''),
country=validated_data.get("country", ''),
language=validated_data.get("language", ''),
organization_protected=validated_data.get("organization_protected", False)
)
team.save()
return team
class CourseTeamSerializerWithoutMembership(CourseTeamSerializer):
"""The same as the `CourseTeamSerializer`, but elides the membership field.
Intended to be used as a sub-serializer for serializing team
memberships, since the membership field is redundant in that case.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
del self.fields['membership']
class MembershipSerializer(serializers.ModelSerializer):
"""Serializes CourseTeamMemberships with information about both teams and users."""
profile_configuration = deepcopy(settings.ACCOUNT_VISIBILITY_CONFIGURATION)
profile_configuration['bulk_shareable_fields'].append('url')
profile_configuration['public_fields'].append('url')
user = ExpandableField(
collapsed_serializer=CollapsedReferenceSerializer(
model_class=User,
id_source='username',
view_name='accounts_api',
read_only=True,
),
expanded_serializer=UserReadOnlySerializer(configuration=profile_configuration)
)
team = ExpandableField(
collapsed_serializer=CollapsedReferenceSerializer(
model_class=CourseTeam,
id_source='team_id',
view_name='teams_detail',
read_only=True,
),
expanded_serializer=CourseTeamSerializerWithoutMembership(read_only=True),
)
class Meta:
model = CourseTeamMembership
fields = ("user", "team", "date_joined", "last_activity_at")
read_only_fields = ("date_joined", "last_activity_at")
class BaseTopicSerializer(serializers.Serializer): # pylint: disable=abstract-method
"""Serializes a topic without team_count."""
description = serializers.CharField()
name = serializers.CharField()
id = serializers.CharField() # pylint: disable=invalid-name
type = serializers.CharField()
max_team_size = serializers.IntegerField()
class TopicSerializer(BaseTopicSerializer): # pylint: disable=abstract-method
"""
Adds team_count to the basic topic serializer, checking if team_count
is already present in the topic data, and if not, querying the CourseTeam
model to get the count. Requires that `context` is provided with a valid course_id
in order to filter teams within the course.
"""
team_count = serializers.SerializerMethodField()
def get_team_count(self, topic):
"""Get the number of teams associated with this topic"""
# If team_count is already present (possible if topic data was pre-processed for sorting), return it.
if 'team_count' in topic:
return topic['team_count']
else:
return get_teams_accessible_by_user(
self.context.get('user'),
[topic['id']],
self.context['course_id'],
self.context.get('organization_protection_status')
).count()
class BulkTeamCountTopicListSerializer(serializers.ListSerializer): # pylint: disable=abstract-method
"""
List serializer for efficiently serializing a set of topics.
"""
def to_representation(self, obj): # pylint: disable=arguments-differ
"""Adds team_count to each topic. """
data = super().to_representation(obj)
add_team_count(
self.context['request'].user,
data,
self.context['course_id'],
self.context.get('organization_protection_status')
)
return data
class BulkTeamCountTopicSerializer(BaseTopicSerializer): # pylint: disable=abstract-method
"""
Serializes a set of topics, adding the team_count field to each topic as a bulk operation.
Requires that `context` is provided with a valid course_id in order to filter teams within the course.
"""
class Meta:
list_serializer_class = BulkTeamCountTopicListSerializer
| agpl-3.0 |
nkantar/Starminder | starminder/main/views.py | 1 | 1457 | from datetime import time
from django.contrib.auth.decorators import login_required
from django.urls import reverse_lazy
from django.utils.decorators import method_decorator
from django.views.generic import FormView, TemplateView
from starminder.main.forms import ProfileForm
class HomeView(TemplateView):
template_name = "home.html"
@method_decorator(login_required(login_url="/"), name="dispatch")
class DashboardView(FormView):
form_class = ProfileForm
template_name = "dashboard.html"
success_url = reverse_lazy("dashboard")
def get_initial(self):
initial = super().get_initial()
initial.update(
{
"number": self.request.user.profile.number,
"day": self.request.user.profile.day,
"time": self.request.user.profile.time,
"email": self.request.user.email,
}
)
return initial
def post(self, *args, **kwargs):
form = self.get_form()
if form.is_valid():
user = self.request.user
user.email = form.data["email"]
user.save()
profile = user.profile
profile.number = int(form.data["number"])
profile.day = int(form.data["day"])
profile.time = time.fromisoformat(form.data["time"])
profile.save()
return self.form_valid(form)
else:
return self.form_invalid(form)
| bsd-3-clause |
larsbs/nftablui | nftserver/app/validators/chain_validator.py | 1 | 1311 | from utils.nft_errors import NFTValidationError, abort
from wrappers import table_wrapper, chain_wrapper
def validate_new_chain(chain_json):
chain = chain_json['chain']
validation_error = NFTValidationError('chain')
# JSON errors
if not chain:
raise abort(400, 'No "chain" field in chain json')
if not chain['table']:
raise abort(400, 'Chain has no table associated')
if not chain['table'] in [t['id'] for t in table_wrapper.list_all_tables()]:
raise abort(400, 'The table {table_id} doesn\'t exist'.format(chain['table']))
# Validate hook and type?
# Validation errors
if not chain['name']:
validation_error.add_error('name', 'Este campo es necesario.')
# If no hook or type is specified, no priority is needed
# if chain['priority'] == None: # Use this instead of "not" because 0 is true
# validation_error.errors.append({'priority': 'Este campo es necesario.'})
# Return
if validation_error.has_errors():
raise validation_error
else:
return chain
def validate_chain_delete(chain_id):
if not chain_id:
raise abort(400, 'No chain id specified.')
chain = chain_wrapper.get_chain(chain_id)
if not chain:
raise abort(400, 'The chain doesn\'t exists')
return chain
| mit |
TNT-Samuel/Coding-Projects | DNS Server/Source - Copy/Lib/site-packages/urllib3/contrib/ntlmpool.py | 312 | 4478 | """
NTLM authenticating pool, contributed by erikcederstran
Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
"""
from __future__ import absolute_import
from logging import getLogger
from ntlm import ntlm
from .. import HTTPSConnectionPool
from ..packages.six.moves.http_client import HTTPSConnection
log = getLogger(__name__)
class NTLMConnectionPool(HTTPSConnectionPool):
"""
Implements an NTLM authentication version of an urllib3 connection pool
"""
scheme = 'https'
def __init__(self, user, pw, authurl, *args, **kwargs):
"""
authurl is a random URL on the server that is protected by NTLM.
user is the Windows user, probably in the DOMAIN\\username format.
pw is the password for the user.
"""
super(NTLMConnectionPool, self).__init__(*args, **kwargs)
self.authurl = authurl
self.rawuser = user
user_parts = user.split('\\', 1)
self.domain = user_parts[0].upper()
self.user = user_parts[1]
self.pw = pw
def _new_conn(self):
# Performs the NTLM handshake that secures the connection. The socket
# must be kept open while requests are performed.
self.num_connections += 1
log.debug('Starting NTLM HTTPS connection no. %d: https://%s%s',
self.num_connections, self.host, self.authurl)
headers = {}
headers['Connection'] = 'Keep-Alive'
req_header = 'Authorization'
resp_header = 'www-authenticate'
conn = HTTPSConnection(host=self.host, port=self.port)
# Send negotiation message
headers[req_header] = (
'NTLM %s' % ntlm.create_NTLM_NEGOTIATE_MESSAGE(self.rawuser))
log.debug('Request headers: %s', headers)
conn.request('GET', self.authurl, None, headers)
res = conn.getresponse()
reshdr = dict(res.getheaders())
log.debug('Response status: %s %s', res.status, res.reason)
log.debug('Response headers: %s', reshdr)
log.debug('Response data: %s [...]', res.read(100))
# Remove the reference to the socket, so that it can not be closed by
# the response object (we want to keep the socket open)
res.fp = None
# Server should respond with a challenge message
auth_header_values = reshdr[resp_header].split(', ')
auth_header_value = None
for s in auth_header_values:
if s[:5] == 'NTLM ':
auth_header_value = s[5:]
if auth_header_value is None:
raise Exception('Unexpected %s response header: %s' %
(resp_header, reshdr[resp_header]))
# Send authentication message
ServerChallenge, NegotiateFlags = \
ntlm.parse_NTLM_CHALLENGE_MESSAGE(auth_header_value)
auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(ServerChallenge,
self.user,
self.domain,
self.pw,
NegotiateFlags)
headers[req_header] = 'NTLM %s' % auth_msg
log.debug('Request headers: %s', headers)
conn.request('GET', self.authurl, None, headers)
res = conn.getresponse()
log.debug('Response status: %s %s', res.status, res.reason)
log.debug('Response headers: %s', dict(res.getheaders()))
log.debug('Response data: %s [...]', res.read()[:100])
if res.status != 200:
if res.status == 401:
raise Exception('Server rejected request: wrong '
'username or password')
raise Exception('Wrong server response: %s %s' %
(res.status, res.reason))
res.fp = None
log.debug('Connection established')
return conn
def urlopen(self, method, url, body=None, headers=None, retries=3,
redirect=True, assert_same_host=True):
if headers is None:
headers = {}
headers['Connection'] = 'Keep-Alive'
return super(NTLMConnectionPool, self).urlopen(method, url, body,
headers, retries,
redirect,
assert_same_host)
| gpl-3.0 |
open-synergy/website | website_event_register_free_with_sale/__openerp__.py | 5 | 1591 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2015 Serv. Tecnol. Avanzados (http://www.serviciosbaeza.com)
# Pedro M. Baeza <pedro.baeza@serviciosbaeza.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Register for free events - Sale extension",
"version": "8.0.1.1.0",
"author": "Tecnativa, "
"Antiun Ingeniería S.L., "
"Odoo Community Association (OCA)",
"website": "https://www.tecnativa.com",
"license": "AGPL-3",
"category": "Website",
"summary": "Combine free and paid tickets on events",
"depends": [
'website_event_register_free',
'website_event_sale',
],
"data": [
"templates/website_sale.xml",
],
"auto_install": True,
"installable": True,
}
| agpl-3.0 |
linjoahow/lego_Automatic-assembly | static/Brython3.1.1-20150328-091302/Lib/browser/indexed_db.py | 632 | 3008 | class EventListener:
def __init__(self, events=[]):
self._events=events
def append(self, event):
self._events.append(event)
def fire(self, e):
for _event in self._events:
_event(e)
class IndexedDB:
def __init__(self):
if not __BRYTHON__.has_indexedDB:
raise NotImplementedError("Your browser doesn't support indexedDB")
return
self._indexedDB=__BRYTHON__.indexedDB()
self._db=None
self._version=None
def _onsuccess(self, event):
self._db=event.target.result
def open(self, name, onsuccess, version=1.0, onerror=None,
onupgradeneeded=None):
self._version=version
_result=self._indexedDB.open(name, version)
_success=EventListener([self._onsuccess, onsuccess])
_result.onsuccess=_success.fire
_result.onupgradeneeded=onupgradeneeded
#if onerror is None:
def onerror(e):
print("onerror: %s:%s" % (e.type, e.target.result))
def onblocked(e):
print("blocked: %s:%s" % (e.type, e.result))
_result.onerror=onerror
_result.onblocked=onblocked
def transaction(self, entities, mode='read'):
return Transaction(self._db.transaction(entities, mode))
class Transaction:
def __init__(self, transaction):
self._transaction=transaction
def objectStore(self, name):
return ObjectStore(self._transaction.objectStore(name))
class ObjectStore:
def __init__(self, objectStore):
self._objectStore=objectStore
self._data=[]
def clear(self, onsuccess=None, onerror=None):
_result=self._objectStore.clear()
if onsuccess is not None:
_result.onsuccess=onsuccess
if onerror is not None:
_result.onerror=onerror
def _helper(self, func, object, onsuccess=None, onerror=None):
_result=func(object)
if onsuccess is not None:
_result.onsuccess=onsuccess
if onerror is not None:
_result.onerror=onerror
def put(self, obj, key=None, onsuccess=None, onerror=None):
_r = self._objectStore.put(obj, key)
_r.onsuccess = onsuccess
_r.onerror = onerror
def add(self, obj, key, onsuccess=None, onerror=None):
_r = self._objectStore.add(obj, key)
_r.onsuccess = onsuccess
_r.onerror = onerror
#self._helper(self._objectStore.add, object, onsuccess, onerror)
def delete(self, index, onsuccess=None, onerror=None):
self._helper(self._objectStore.delete, index, onsuccess, onerror)
def query(self, *args):
self._data=[]
def onsuccess(event):
cursor=event.target.result
if cursor is not None:
self._data.append(cursor.value)
getattr(cursor,"continue")() # cursor.continue() is illegal
self._objectStore.openCursor(args).onsuccess=onsuccess
def fetchall(self):
yield self._data
def get(self, key, onsuccess=None, onerror=None):
self._helper(self._objectStore.get, key, onsuccess, onerror)
| gpl-3.0 |
shubhdev/edx-platform | cms/djangoapps/contentstore/views/tests/test_certificates.py | 17 | 19874 | #-*- coding: utf-8 -*-
"""
Group Configuration Tests.
"""
import json
import mock
from django.conf import settings
from django.test.utils import override_settings
from opaque_keys.edx.keys import AssetKey
from opaque_keys.edx.locations import AssetLocation
from contentstore.utils import reverse_course_url
from contentstore.views.certificates import CERTIFICATE_SCHEMA_VERSION
from contentstore.tests.utils import CourseTestCase
from xmodule.contentstore.django import contentstore
from xmodule.contentstore.content import StaticContent
from xmodule.exceptions import NotFoundError
from student.models import CourseEnrollment
from contentstore.views.certificates import CertificateManager
from django.test.utils import override_settings
from contentstore.utils import get_lms_link_for_certificate_web_view
FEATURES_WITH_CERTS_ENABLED = settings.FEATURES.copy()
FEATURES_WITH_CERTS_ENABLED['CERTIFICATES_HTML_VIEW'] = True
CERTIFICATE_JSON = {
u'name': u'Test certificate',
u'description': u'Test description',
u'version': CERTIFICATE_SCHEMA_VERSION,
}
CERTIFICATE_JSON_WITH_SIGNATORIES = {
u'name': u'Test certificate',
u'description': u'Test description',
u'version': CERTIFICATE_SCHEMA_VERSION,
u'course_title': 'Course Title Override',
u'signatories': [
{
"name": "Bob Smith",
"title": "The DEAN.",
"signature_image_path": "/c4x/test/CSS101/asset/Signature.png"
}
]
}
# pylint: disable=no-member
class HelperMethods(object):
"""
Mixin that provides useful methods for certificate configuration tests.
"""
def _create_fake_images(self, asset_keys):
"""
Creates fake image files for a list of asset_keys.
"""
for asset_key_string in asset_keys:
asset_key = AssetKey.from_string(asset_key_string)
content = StaticContent(
asset_key, "Fake asset", "image/png", "data",
)
contentstore().save(content)
def _add_course_certificates(self, count=1, signatory_count=0):
"""
Create certificate for the course.
"""
signatories = [
{
'name': 'Name ' + str(i),
'title': 'Title ' + str(i),
'signature_image_path': '/c4x/test/CSS101/asset/Signature{}.png'.format(i),
'id': i
} for i in xrange(0, signatory_count)
]
# create images for signatory signatures except the last signatory
for idx, signatory in enumerate(signatories):
if len(signatories) > 2 and idx == len(signatories) - 1:
continue
else:
self._create_fake_images([signatory['signature_image_path']])
certificates = [
{
'id': i,
'name': 'Name ' + str(i),
'description': 'Description ' + str(i),
'org_logo_path': '/c4x/test/CSS101/asset/org_logo{}.png'.format(i),
'signatories': signatories,
'version': CERTIFICATE_SCHEMA_VERSION,
'is_active': False
} for i in xrange(0, count)
]
self._create_fake_images([certificate['org_logo_path'] for certificate in certificates])
self.course.certificates = {'certificates': certificates}
self.save_course()
# pylint: disable=no-member
class CertificatesBaseTestCase(object):
"""
Mixin with base test cases for the certificates.
"""
def _remove_ids(self, content):
"""
Remove ids from the response. We cannot predict IDs, because they're
generated randomly.
We use this method to clean up response when creating new certificate.
"""
certificate_id = content.pop("id")
return certificate_id
def test_required_fields_are_absent(self):
"""
Test required fields are absent.
"""
bad_jsons = [
# must have name of the certificate
{
u'description': 'Test description',
u'version': CERTIFICATE_SCHEMA_VERSION
},
# an empty json
{},
]
for bad_json in bad_jsons:
response = self.client.post(
self._url(),
data=json.dumps(bad_json),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest"
)
self.assertEqual(response.status_code, 400)
self.assertNotIn("Location", response)
content = json.loads(response.content)
self.assertIn("error", content)
def test_invalid_json(self):
"""
Test invalid json handling.
"""
# Invalid JSON.
invalid_json = "{u'name': 'Test Name', u'description': 'Test description'," \
" u'version': " + str(CERTIFICATE_SCHEMA_VERSION) + ", []}"
response = self.client.post(
self._url(),
data=invalid_json,
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest"
)
self.assertEqual(response.status_code, 400)
self.assertNotIn("Location", response)
content = json.loads(response.content)
self.assertIn("error", content)
def test_certificate_data_validation(self):
#Test certificate schema version
json_data_1 = {
u'version': 100,
u'name': u'Test certificate',
u'description': u'Test description'
}
with self.assertRaises(Exception) as context:
CertificateManager.validate(json_data_1)
self.assertTrue("Unsupported certificate schema version: 100. Expected version: 1." in context.exception)
#Test certificate name is missing
json_data_2 = {
u'version': CERTIFICATE_SCHEMA_VERSION,
u'description': u'Test description'
}
with self.assertRaises(Exception) as context:
CertificateManager.validate(json_data_2)
self.assertTrue('must have name of the certificate' in context.exception)
# pylint: disable=no-member
@override_settings(FEATURES=FEATURES_WITH_CERTS_ENABLED)
class CertificatesListHandlerTestCase(CourseTestCase, CertificatesBaseTestCase, HelperMethods):
"""
Test cases for certificates_list_handler.
"""
def setUp(self):
"""
Set up CertificatesListHandlerTestCase.
"""
super(CertificatesListHandlerTestCase, self).setUp()
def _url(self):
"""
Return url for the handler.
"""
return reverse_course_url('certificates.certificates_list_handler', self.course.id)
def test_can_create_certificate(self):
"""
Test that you can create a certificate.
"""
expected = {
u'version': CERTIFICATE_SCHEMA_VERSION,
u'name': u'Test certificate',
u'description': u'Test description',
u'org_logo_path': '',
u'signatories': []
}
response = self.client.ajax_post(
self._url(),
data=CERTIFICATE_JSON
)
self.assertEqual(response.status_code, 201)
self.assertIn("Location", response)
content = json.loads(response.content)
self._remove_ids(content) # pylint: disable=unused-variable
self.assertEqual(content, expected)
@override_settings(LMS_BASE=None)
def test_no_lms_base_for_certificate_web_view_link(self):
test_link = get_lms_link_for_certificate_web_view(
user_id=self.user.id,
course_key=self.course.id,
mode='honor'
)
self.assertEquals(test_link, None)
@override_settings(LMS_BASE="lms_base_url")
def test_lms_link_for_certificate_web_view(self):
test_url = "//lms_base_url/certificates/user/" \
+ str(self.user.id) + "/course/" + unicode(self.course.id) + '?preview=honor'
link = get_lms_link_for_certificate_web_view(
user_id=self.user.id,
course_key=self.course.id,
mode='honor'
)
self.assertEquals(link, test_url)
@mock.patch.dict('django.conf.settings.FEATURES', {'CERTIFICATES_HTML_VIEW': True})
def test_certificate_info_in_response(self):
"""
Test that certificate has been created and rendered properly.
"""
response = self.client.ajax_post(
self._url(),
data=CERTIFICATE_JSON_WITH_SIGNATORIES
)
self.assertEqual(response.status_code, 201)
# in html response
result = self.client.get_html(self._url())
self.assertIn('Test certificate', result.content)
self.assertIn('Test description', result.content)
# in JSON response
response = self.client.get_json(self._url())
data = json.loads(response.content)
self.assertEquals(len(data), 1)
self.assertEqual(data[0]['name'], 'Test certificate')
self.assertEqual(data[0]['description'], 'Test description')
self.assertEqual(data[0]['version'], CERTIFICATE_SCHEMA_VERSION)
def test_unsupported_http_accept_header(self):
"""
Test if not allowed header present in request.
"""
response = self.client.get(
self._url(),
HTTP_ACCEPT="text/plain",
)
self.assertEqual(response.status_code, 406)
def test_certificate_unsupported_method(self):
"""
Unit Test: test_certificate_unsupported_method
"""
resp = self.client.put(self._url())
self.assertEqual(resp.status_code, 405)
def test_not_permitted(self):
"""
Test that when user has not read access to course then permission denied exception should raised.
"""
test_user_client, test_user = self.create_non_staff_authed_user_client()
CourseEnrollment.enroll(test_user, self.course.id)
response = test_user_client.ajax_post(
self._url(),
data=CERTIFICATE_JSON
)
self.assertEqual(response.status_code, 403)
self.assertIn("error", response.content)
def test_assign_unique_identifier_to_certificates(self):
"""
Test certificates have unique ids
"""
self._add_course_certificates(count=2)
json_data = {
u'version': CERTIFICATE_SCHEMA_VERSION,
u'name': u'New test certificate',
u'description': u'New test description',
u'signatories': []
}
response = self.client.post(
self._url(),
data=json.dumps(json_data),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
new_certificate = json.loads(response.content)
for prev_certificate in self.course.certificates['certificates']:
self.assertNotEqual(new_certificate.get('id'), prev_certificate.get('id'))
@override_settings(FEATURES=FEATURES_WITH_CERTS_ENABLED)
class CertificatesDetailHandlerTestCase(CourseTestCase, CertificatesBaseTestCase, HelperMethods):
"""
Test cases for CertificatesDetailHandlerTestCase.
"""
_id = 0
def _url(self, cid=-1):
"""
Return url for the handler.
"""
cid = cid if cid > 0 else self._id
return reverse_course_url(
'certificates.certificates_detail_handler',
self.course.id,
kwargs={'certificate_id': cid},
)
def test_can_create_new_certificate_if_it_does_not_exist(self):
"""
PUT/POST new certificate.
"""
expected = {
u'id': 666,
u'version': CERTIFICATE_SCHEMA_VERSION,
u'name': u'Test certificate',
u'description': u'Test description',
u'course_title': u'Course Title Override',
u'org_logo_path': '',
u'signatories': []
}
response = self.client.put(
self._url(cid=666),
data=json.dumps(expected),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
content = json.loads(response.content)
self.assertEqual(content, expected)
def test_can_edit_certificate(self):
"""
Edit certificate, check its id and modified fields.
"""
self._add_course_certificates(count=2)
expected = {
u'id': 1,
u'version': CERTIFICATE_SCHEMA_VERSION,
u'name': u'New test certificate',
u'description': u'New test description',
u'course_title': u'Course Title Override',
u'org_logo_path': '',
u'signatories': []
}
response = self.client.put(
self._url(cid=1),
data=json.dumps(expected),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
content = json.loads(response.content)
self.assertEqual(content, expected)
self.reload_course()
# Verify that certificate is properly updated in the course.
course_certificates = self.course.certificates['certificates']
self.assertEqual(len(course_certificates), 2)
self.assertEqual(course_certificates[1].get('name'), u'New test certificate')
self.assertEqual(course_certificates[1].get('description'), 'New test description')
def test_can_delete_certificate_with_signatories(self):
"""
Delete certificate
"""
self._add_course_certificates(count=2, signatory_count=1)
certificates = self.course.certificates['certificates']
org_logo_url = certificates[1]['org_logo_path']
image_asset_location = AssetLocation.from_deprecated_string(org_logo_url)
content = contentstore().find(image_asset_location)
self.assertIsNotNone(content)
response = self.client.delete(
self._url(cid=1),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
self.assertEqual(response.status_code, 204)
self.reload_course()
# Verify that certificates are properly updated in the course.
certificates = self.course.certificates['certificates']
self.assertEqual(len(certificates), 1)
# make sure certificate org logo is deleted too
self.assertRaises(NotFoundError, contentstore().find, image_asset_location)
self.assertEqual(certificates[0].get('name'), 'Name 0')
self.assertEqual(certificates[0].get('description'), 'Description 0')
def test_delete_non_existing_certificate(self):
"""
Try to delete a non existing certificate. It should return status code 404 Not found.
"""
self._add_course_certificates(count=2)
response = self.client.delete(
self._url(cid=100),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
self.assertEqual(response.status_code, 404)
def test_can_delete_signatory(self):
"""
Delete an existing certificate signatory
"""
self._add_course_certificates(count=2, signatory_count=3)
certificates = self.course.certificates['certificates']
signatory = certificates[1].get("signatories")[1]
image_asset_location = AssetLocation.from_deprecated_string(signatory['signature_image_path'])
content = contentstore().find(image_asset_location)
self.assertIsNotNone(content)
test_url = '{}/signatories/1'.format(self._url(cid=1))
response = self.client.delete(
test_url,
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
self.assertEqual(response.status_code, 204)
self.reload_course()
# Verify that certificates are properly updated in the course.
certificates = self.course.certificates['certificates']
self.assertEqual(len(certificates[1].get("signatories")), 2)
# make sure signatory signature image is deleted too
self.assertRaises(NotFoundError, contentstore().find, image_asset_location)
def test_deleting_signatory_without_signature(self):
"""
Delete an signatory whose signature image is already removed or does not exist
"""
self._add_course_certificates(count=2, signatory_count=4)
test_url = '{}/signatories/3'.format(self._url(cid=1))
response = self.client.delete(
test_url,
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
self.assertEqual(response.status_code, 204)
def test_delete_signatory_non_existing_certificate(self):
"""
Try to delete a non existing certificate signatory. It should return status code 404 Not found.
"""
self._add_course_certificates(count=2)
test_url = '{}/signatories/1'.format(self._url(cid=100))
response = self.client.delete(
test_url,
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
self.assertEqual(response.status_code, 404)
def test_certificate_activation_success(self):
"""
Activate and Deactivate the course certificate
"""
test_url = reverse_course_url('certificates.certificate_activation_handler', self.course.id)
self._add_course_certificates(count=1, signatory_count=2)
is_active = True
for i in range(2):
if i == 1:
is_active = not is_active
response = self.client.post(
test_url,
data=json.dumps({"is_active": is_active}),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest"
)
self.assertEquals(response.status_code, 200)
course = self.store.get_course(self.course.id)
certificates = course.certificates['certificates']
self.assertEqual(certificates[0].get('is_active'), is_active)
def test_certificate_activation_failure(self):
"""
Certificate activation should fail when user has not read access to course then permission denied exception
should raised.
"""
test_url = reverse_course_url('certificates.certificate_activation_handler', self.course.id)
test_user_client, test_user = self.create_non_staff_authed_user_client()
CourseEnrollment.enroll(test_user, self.course.id)
self._add_course_certificates(count=1, signatory_count=2)
response = test_user_client.post(
test_url,
data=json.dumps({"is_active": True}),
content_type="application/json",
HTTP_ACCEPT="application/json",
HTTP_X_REQUESTED_WITH="XMLHttpRequest",
)
self.assertEquals(response.status_code, 403)
course = self.store.get_course(self.course.id)
certificates = course.certificates['certificates']
self.assertEqual(certificates[0].get('is_active'), False)
| agpl-3.0 |
eerwitt/tensorflow | tensorflow/contrib/distributions/python/ops/bijectors/bijector_test_util.py | 27 | 7584 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Bijector unit-test utilities."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.distributions.python.ops import uniform as uniform_lib
from tensorflow.python.framework import ops
from tensorflow.python.ops import math_ops
def assert_finite(array):
if not np.isfinite(array).all():
raise AssertionError("array was not all finite. %s" % array[:15])
def assert_strictly_increasing(array):
np.testing.assert_array_less(0., np.diff(array))
def assert_strictly_decreasing(array):
np.testing.assert_array_less(np.diff(array), 0.)
def assert_strictly_monotonic(array):
if array[0] < array[-1]:
assert_strictly_increasing(array)
else:
assert_strictly_decreasing(array)
def assert_scalar_congruency(bijector,
lower_x,
upper_x,
n=int(10e3),
rtol=0.01,
sess=None):
"""Assert `bijector`'s forward/inverse/inverse_log_det_jacobian are congruent.
We draw samples `X ~ U(lower_x, upper_x)`, then feed these through the
`bijector` in order to check that:
1. the forward is strictly monotonic.
2. the forward/inverse methods are inverses of each other.
3. the jacobian is the correct change of measure.
This can only be used for a Bijector mapping open subsets of the real line
to themselves. This is due to the fact that this test compares the `prob`
before/after transformation with the Lebesgue measure on the line.
Args:
bijector: Instance of Bijector
lower_x: Python scalar.
upper_x: Python scalar. Must have `lower_x < upper_x`, and both must be in
the domain of the `bijector`. The `bijector` should probably not produce
huge variation in values in the interval `(lower_x, upper_x)`, or else
the variance based check of the Jacobian will require small `rtol` or
huge `n`.
n: Number of samples to draw for the checks.
rtol: Positive number. Used for the Jacobian check.
sess: `tf.Session`. Defaults to the default session.
Raises:
AssertionError: If tests fail.
"""
# Checks and defaults.
assert bijector.event_ndims.eval() == 0
if sess is None:
sess = ops.get_default_session()
# Should be monotonic over this interval
ten_x_pts = np.linspace(lower_x, upper_x, num=10).astype(np.float32)
if bijector.dtype is not None:
ten_x_pts = ten_x_pts.astype(bijector.dtype.as_numpy_dtype)
forward_on_10_pts = bijector.forward(ten_x_pts)
# Set the lower/upper limits in the range of the bijector.
lower_y, upper_y = sess.run(
[bijector.forward(lower_x), bijector.forward(upper_x)])
if upper_y < lower_y: # If bijector.forward is a decreasing function.
lower_y, upper_y = upper_y, lower_y
# Uniform samples from the domain, range.
uniform_x_samps = uniform_lib.Uniform(
low=lower_x, high=upper_x).sample(n, seed=0)
uniform_y_samps = uniform_lib.Uniform(
low=lower_y, high=upper_y).sample(n, seed=1)
# These compositions should be the identity.
inverse_forward_x = bijector.inverse(bijector.forward(uniform_x_samps))
forward_inverse_y = bijector.forward(bijector.inverse(uniform_y_samps))
# For a < b, and transformation y = y(x),
# (b - a) = \int_a^b dx = \int_{y(a)}^{y(b)} |dx/dy| dy
# "change_measure_dy_dx" below is a Monte Carlo approximation to the right
# hand side, which should then be close to the left, which is (b - a).
dy_dx = math_ops.exp(bijector.inverse_log_det_jacobian(uniform_y_samps))
# E[|dx/dy|] under Uniform[lower_y, upper_y]
# = \int_{y(a)}^{y(b)} |dx/dy| dP(u), where dP(u) is the uniform measure
expectation_of_dy_dx_under_uniform = math_ops.reduce_mean(dy_dx)
# dy = dP(u) * (upper_y - lower_y)
change_measure_dy_dx = (
(upper_y - lower_y) * expectation_of_dy_dx_under_uniform)
# We'll also check that dy_dx = 1 / dx_dy.
dx_dy = math_ops.exp(
bijector.forward_log_det_jacobian(bijector.inverse(uniform_y_samps)))
[
forward_on_10_pts_v,
dy_dx_v,
dx_dy_v,
change_measure_dy_dx_v,
uniform_x_samps_v,
uniform_y_samps_v,
inverse_forward_x_v,
forward_inverse_y_v,
] = sess.run([
forward_on_10_pts,
dy_dx,
dx_dy,
change_measure_dy_dx,
uniform_x_samps,
uniform_y_samps,
inverse_forward_x,
forward_inverse_y,
])
assert_strictly_monotonic(forward_on_10_pts_v)
# Composition of forward/inverse should be the identity.
np.testing.assert_allclose(
inverse_forward_x_v, uniform_x_samps_v, atol=1e-5, rtol=1e-3)
np.testing.assert_allclose(
forward_inverse_y_v, uniform_y_samps_v, atol=1e-5, rtol=1e-3)
# Change of measure should be correct.
np.testing.assert_allclose(
upper_x - lower_x, change_measure_dy_dx_v, atol=0, rtol=rtol)
# Inverse Jacobian should be equivalent to the reciprocal of the forward
# Jacobian.
np.testing.assert_allclose(
dy_dx_v, np.divide(1., dx_dy_v), atol=1e-5, rtol=1e-3)
def assert_bijective_and_finite(bijector, x, y, atol=0, rtol=1e-5, sess=None):
"""Assert that forward/inverse (along with jacobians) are inverses and finite.
It is recommended to use x and y values that are very very close to the edge
of the Bijector's domain.
Args:
bijector: A Bijector instance.
x: np.array of values in the domain of bijector.forward.
y: np.array of values in the domain of bijector.inverse.
atol: Absolute tolerance.
rtol: Relative tolerance.
sess: TensorFlow session. Defaults to the default session.
Raises:
AssertionError: If tests fail.
"""
sess = sess or ops.get_default_session()
# These are the incoming points, but people often create a crazy range of
# values for which these end up being bad, especially in 16bit.
assert_finite(x)
assert_finite(y)
f_x = bijector.forward(x)
g_y = bijector.inverse(y)
[
x_from_x,
y_from_y,
ildj_f_x,
fldj_x,
ildj_y,
fldj_g_y,
f_x_v,
g_y_v,
] = sess.run([
bijector.inverse(f_x),
bijector.forward(g_y),
bijector.inverse_log_det_jacobian(f_x),
bijector.forward_log_det_jacobian(x),
bijector.inverse_log_det_jacobian(y),
bijector.forward_log_det_jacobian(g_y),
f_x,
g_y,
])
assert_finite(x_from_x)
assert_finite(y_from_y)
assert_finite(ildj_f_x)
assert_finite(fldj_x)
assert_finite(ildj_y)
assert_finite(fldj_g_y)
assert_finite(f_x_v)
assert_finite(g_y_v)
np.testing.assert_allclose(x_from_x, x, atol=atol, rtol=rtol)
np.testing.assert_allclose(y_from_y, y, atol=atol, rtol=rtol)
np.testing.assert_allclose(-ildj_f_x, fldj_x, atol=atol, rtol=rtol)
np.testing.assert_allclose(-ildj_y, fldj_g_y, atol=atol, rtol=rtol)
| apache-2.0 |
miguelpalacio/python-for-android | python-build/python-libs/gdata/src/gdata/tlslite/TLSConnection.py | 278 | 70347 | """
MAIN CLASS FOR TLS LITE (START HERE!).
"""
from __future__ import generators
import socket
from utils.compat import formatExceptionTrace
from TLSRecordLayer import TLSRecordLayer
from Session import Session
from constants import *
from utils.cryptomath import getRandomBytes
from errors import *
from messages import *
from mathtls import *
from HandshakeSettings import HandshakeSettings
class TLSConnection(TLSRecordLayer):
"""
This class wraps a socket and provides TLS handshaking and data
transfer.
To use this class, create a new instance, passing a connected
socket into the constructor. Then call some handshake function.
If the handshake completes without raising an exception, then a TLS
connection has been negotiated. You can transfer data over this
connection as if it were a socket.
This class provides both synchronous and asynchronous versions of
its key functions. The synchronous versions should be used when
writing single-or multi-threaded code using blocking sockets. The
asynchronous versions should be used when performing asynchronous,
event-based I/O with non-blocking sockets.
Asynchronous I/O is a complicated subject; typically, you should
not use the asynchronous functions directly, but should use some
framework like asyncore or Twisted which TLS Lite integrates with
(see
L{tlslite.integration.TLSAsyncDispatcherMixIn.TLSAsyncDispatcherMixIn} or
L{tlslite.integration.TLSTwistedProtocolWrapper.TLSTwistedProtocolWrapper}).
"""
def __init__(self, sock):
"""Create a new TLSConnection instance.
@param sock: The socket data will be transmitted on. The
socket should already be connected. It may be in blocking or
non-blocking mode.
@type sock: L{socket.socket}
"""
TLSRecordLayer.__init__(self, sock)
def handshakeClientSRP(self, username, password, session=None,
settings=None, checker=None, async=False):
"""Perform an SRP handshake in the role of client.
This function performs a TLS/SRP handshake. SRP mutually
authenticates both parties to each other using only a
username and password. This function may also perform a
combined SRP and server-certificate handshake, if the server
chooses to authenticate itself with a certificate chain in
addition to doing SRP.
TLS/SRP is non-standard. Most TLS implementations don't
support it. See
U{http://www.ietf.org/html.charters/tls-charter.html} or
U{http://trevp.net/tlssrp/} for the latest information on
TLS/SRP.
Like any handshake function, this can be called on a closed
TLS connection, or on a TLS connection that is already open.
If called on an open connection it performs a re-handshake.
If the function completes without raising an exception, the
TLS connection will be open and available for data transfer.
If an exception is raised, the connection will have been
automatically closed (if it was ever open).
@type username: str
@param username: The SRP username.
@type password: str
@param password: The SRP password.
@type session: L{tlslite.Session.Session}
@param session: A TLS session to attempt to resume. This
session must be an SRP session performed with the same username
and password as were passed in. If the resumption does not
succeed, a full SRP handshake will be performed.
@type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
@param settings: Various settings which can be used to control
the ciphersuites, certificate types, and SSL/TLS versions
offered by the client.
@type checker: L{tlslite.Checker.Checker}
@param checker: A Checker instance. This instance will be
invoked to examine the other party's authentication
credentials, if the handshake completes succesfully.
@type async: bool
@param async: If False, this function will block until the
handshake is completed. If True, this function will return a
generator. Successive invocations of the generator will
return 0 if it is waiting to read from the socket, 1 if it is
waiting to write to the socket, or will raise StopIteration if
the handshake operation is completed.
@rtype: None or an iterable
@return: If 'async' is True, a generator object will be
returned.
@raise socket.error: If a socket error occurs.
@raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
without a preceding alert.
@raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
@raise tlslite.errors.TLSAuthenticationError: If the checker
doesn't like the other party's authentication credentials.
"""
handshaker = self._handshakeClientAsync(srpParams=(username, password),
session=session, settings=settings, checker=checker)
if async:
return handshaker
for result in handshaker:
pass
def handshakeClientCert(self, certChain=None, privateKey=None,
session=None, settings=None, checker=None,
async=False):
"""Perform a certificate-based handshake in the role of client.
This function performs an SSL or TLS handshake. The server
will authenticate itself using an X.509 or cryptoID certificate
chain. If the handshake succeeds, the server's certificate
chain will be stored in the session's serverCertChain attribute.
Unless a checker object is passed in, this function does no
validation or checking of the server's certificate chain.
If the server requests client authentication, the
client will send the passed-in certificate chain, and use the
passed-in private key to authenticate itself. If no
certificate chain and private key were passed in, the client
will attempt to proceed without client authentication. The
server may or may not allow this.
Like any handshake function, this can be called on a closed
TLS connection, or on a TLS connection that is already open.
If called on an open connection it performs a re-handshake.
If the function completes without raising an exception, the
TLS connection will be open and available for data transfer.
If an exception is raised, the connection will have been
automatically closed (if it was ever open).
@type certChain: L{tlslite.X509CertChain.X509CertChain} or
L{cryptoIDlib.CertChain.CertChain}
@param certChain: The certificate chain to be used if the
server requests client authentication.
@type privateKey: L{tlslite.utils.RSAKey.RSAKey}
@param privateKey: The private key to be used if the server
requests client authentication.
@type session: L{tlslite.Session.Session}
@param session: A TLS session to attempt to resume. If the
resumption does not succeed, a full handshake will be
performed.
@type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
@param settings: Various settings which can be used to control
the ciphersuites, certificate types, and SSL/TLS versions
offered by the client.
@type checker: L{tlslite.Checker.Checker}
@param checker: A Checker instance. This instance will be
invoked to examine the other party's authentication
credentials, if the handshake completes succesfully.
@type async: bool
@param async: If False, this function will block until the
handshake is completed. If True, this function will return a
generator. Successive invocations of the generator will
return 0 if it is waiting to read from the socket, 1 if it is
waiting to write to the socket, or will raise StopIteration if
the handshake operation is completed.
@rtype: None or an iterable
@return: If 'async' is True, a generator object will be
returned.
@raise socket.error: If a socket error occurs.
@raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
without a preceding alert.
@raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
@raise tlslite.errors.TLSAuthenticationError: If the checker
doesn't like the other party's authentication credentials.
"""
handshaker = self._handshakeClientAsync(certParams=(certChain,
privateKey), session=session, settings=settings,
checker=checker)
if async:
return handshaker
for result in handshaker:
pass
def handshakeClientUnknown(self, srpCallback=None, certCallback=None,
session=None, settings=None, checker=None,
async=False):
"""Perform a to-be-determined type of handshake in the role of client.
This function performs an SSL or TLS handshake. If the server
requests client certificate authentication, the
certCallback will be invoked and should return a (certChain,
privateKey) pair. If the callback returns None, the library
will attempt to proceed without client authentication. The
server may or may not allow this.
If the server requests SRP authentication, the srpCallback
will be invoked and should return a (username, password) pair.
If the callback returns None, the local implementation will
signal a user_canceled error alert.
After the handshake completes, the client can inspect the
connection's session attribute to determine what type of
authentication was performed.
Like any handshake function, this can be called on a closed
TLS connection, or on a TLS connection that is already open.
If called on an open connection it performs a re-handshake.
If the function completes without raising an exception, the
TLS connection will be open and available for data transfer.
If an exception is raised, the connection will have been
automatically closed (if it was ever open).
@type srpCallback: callable
@param srpCallback: The callback to be used if the server
requests SRP authentication. If None, the client will not
offer support for SRP ciphersuites.
@type certCallback: callable
@param certCallback: The callback to be used if the server
requests client certificate authentication.
@type session: L{tlslite.Session.Session}
@param session: A TLS session to attempt to resume. If the
resumption does not succeed, a full handshake will be
performed.
@type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
@param settings: Various settings which can be used to control
the ciphersuites, certificate types, and SSL/TLS versions
offered by the client.
@type checker: L{tlslite.Checker.Checker}
@param checker: A Checker instance. This instance will be
invoked to examine the other party's authentication
credentials, if the handshake completes succesfully.
@type async: bool
@param async: If False, this function will block until the
handshake is completed. If True, this function will return a
generator. Successive invocations of the generator will
return 0 if it is waiting to read from the socket, 1 if it is
waiting to write to the socket, or will raise StopIteration if
the handshake operation is completed.
@rtype: None or an iterable
@return: If 'async' is True, a generator object will be
returned.
@raise socket.error: If a socket error occurs.
@raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
without a preceding alert.
@raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
@raise tlslite.errors.TLSAuthenticationError: If the checker
doesn't like the other party's authentication credentials.
"""
handshaker = self._handshakeClientAsync(unknownParams=(srpCallback,
certCallback), session=session, settings=settings,
checker=checker)
if async:
return handshaker
for result in handshaker:
pass
def handshakeClientSharedKey(self, username, sharedKey, settings=None,
checker=None, async=False):
"""Perform a shared-key handshake in the role of client.
This function performs a shared-key handshake. Using shared
symmetric keys of high entropy (128 bits or greater) mutually
authenticates both parties to each other.
TLS with shared-keys is non-standard. Most TLS
implementations don't support it. See
U{http://www.ietf.org/html.charters/tls-charter.html} for the
latest information on TLS with shared-keys. If the shared-keys
Internet-Draft changes or is superceded, TLS Lite will track
those changes, so the shared-key support in later versions of
TLS Lite may become incompatible with this version.
Like any handshake function, this can be called on a closed
TLS connection, or on a TLS connection that is already open.
If called on an open connection it performs a re-handshake.
If the function completes without raising an exception, the
TLS connection will be open and available for data transfer.
If an exception is raised, the connection will have been
automatically closed (if it was ever open).
@type username: str
@param username: The shared-key username.
@type sharedKey: str
@param sharedKey: The shared key.
@type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
@param settings: Various settings which can be used to control
the ciphersuites, certificate types, and SSL/TLS versions
offered by the client.
@type checker: L{tlslite.Checker.Checker}
@param checker: A Checker instance. This instance will be
invoked to examine the other party's authentication
credentials, if the handshake completes succesfully.
@type async: bool
@param async: If False, this function will block until the
handshake is completed. If True, this function will return a
generator. Successive invocations of the generator will
return 0 if it is waiting to read from the socket, 1 if it is
waiting to write to the socket, or will raise StopIteration if
the handshake operation is completed.
@rtype: None or an iterable
@return: If 'async' is True, a generator object will be
returned.
@raise socket.error: If a socket error occurs.
@raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
without a preceding alert.
@raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
@raise tlslite.errors.TLSAuthenticationError: If the checker
doesn't like the other party's authentication credentials.
"""
handshaker = self._handshakeClientAsync(sharedKeyParams=(username,
sharedKey), settings=settings, checker=checker)
if async:
return handshaker
for result in handshaker:
pass
def _handshakeClientAsync(self, srpParams=(), certParams=(),
unknownParams=(), sharedKeyParams=(),
session=None, settings=None, checker=None,
recursive=False):
handshaker = self._handshakeClientAsyncHelper(srpParams=srpParams,
certParams=certParams, unknownParams=unknownParams,
sharedKeyParams=sharedKeyParams, session=session,
settings=settings, recursive=recursive)
for result in self._handshakeWrapperAsync(handshaker, checker):
yield result
def _handshakeClientAsyncHelper(self, srpParams, certParams, unknownParams,
sharedKeyParams, session, settings, recursive):
if not recursive:
self._handshakeStart(client=True)
#Unpack parameters
srpUsername = None # srpParams
password = None # srpParams
clientCertChain = None # certParams
privateKey = None # certParams
srpCallback = None # unknownParams
certCallback = None # unknownParams
#session # sharedKeyParams (or session)
#settings # settings
if srpParams:
srpUsername, password = srpParams
elif certParams:
clientCertChain, privateKey = certParams
elif unknownParams:
srpCallback, certCallback = unknownParams
elif sharedKeyParams:
session = Session()._createSharedKey(*sharedKeyParams)
if not settings:
settings = HandshakeSettings()
settings = settings._filter()
#Validate parameters
if srpUsername and not password:
raise ValueError("Caller passed a username but no password")
if password and not srpUsername:
raise ValueError("Caller passed a password but no username")
if clientCertChain and not privateKey:
raise ValueError("Caller passed a certChain but no privateKey")
if privateKey and not clientCertChain:
raise ValueError("Caller passed a privateKey but no certChain")
if clientCertChain:
foundType = False
try:
import cryptoIDlib.CertChain
if isinstance(clientCertChain, cryptoIDlib.CertChain.CertChain):
if "cryptoID" not in settings.certificateTypes:
raise ValueError("Client certificate doesn't "\
"match Handshake Settings")
settings.certificateTypes = ["cryptoID"]
foundType = True
except ImportError:
pass
if not foundType and isinstance(clientCertChain,
X509CertChain):
if "x509" not in settings.certificateTypes:
raise ValueError("Client certificate doesn't match "\
"Handshake Settings")
settings.certificateTypes = ["x509"]
foundType = True
if not foundType:
raise ValueError("Unrecognized certificate type")
if session:
if not session.valid():
session = None #ignore non-resumable sessions...
elif session.resumable and \
(session.srpUsername != srpUsername):
raise ValueError("Session username doesn't match")
#Add Faults to parameters
if srpUsername and self.fault == Fault.badUsername:
srpUsername += "GARBAGE"
if password and self.fault == Fault.badPassword:
password += "GARBAGE"
if sharedKeyParams:
identifier = sharedKeyParams[0]
sharedKey = sharedKeyParams[1]
if self.fault == Fault.badIdentifier:
identifier += "GARBAGE"
session = Session()._createSharedKey(identifier, sharedKey)
elif self.fault == Fault.badSharedKey:
sharedKey += "GARBAGE"
session = Session()._createSharedKey(identifier, sharedKey)
#Initialize locals
serverCertChain = None
cipherSuite = 0
certificateType = CertificateType.x509
premasterSecret = None
#Get client nonce
clientRandom = getRandomBytes(32)
#Initialize acceptable ciphersuites
cipherSuites = []
if srpParams:
cipherSuites += CipherSuite.getSrpRsaSuites(settings.cipherNames)
cipherSuites += CipherSuite.getSrpSuites(settings.cipherNames)
elif certParams:
cipherSuites += CipherSuite.getRsaSuites(settings.cipherNames)
elif unknownParams:
if srpCallback:
cipherSuites += \
CipherSuite.getSrpRsaSuites(settings.cipherNames)
cipherSuites += \
CipherSuite.getSrpSuites(settings.cipherNames)
cipherSuites += CipherSuite.getRsaSuites(settings.cipherNames)
elif sharedKeyParams:
cipherSuites += CipherSuite.getRsaSuites(settings.cipherNames)
else:
cipherSuites += CipherSuite.getRsaSuites(settings.cipherNames)
#Initialize acceptable certificate types
certificateTypes = settings._getCertificateTypes()
#Tentatively set the version to the client's minimum version.
#We'll use this for the ClientHello, and if an error occurs
#parsing the Server Hello, we'll use this version for the response
self.version = settings.maxVersion
#Either send ClientHello (with a resumable session)...
if session:
#If it's a resumable (i.e. not a shared-key session), then its
#ciphersuite must be one of the acceptable ciphersuites
if (not sharedKeyParams) and \
session.cipherSuite not in cipherSuites:
raise ValueError("Session's cipher suite not consistent "\
"with parameters")
else:
clientHello = ClientHello()
clientHello.create(settings.maxVersion, clientRandom,
session.sessionID, cipherSuites,
certificateTypes, session.srpUsername)
#Or send ClientHello (without)
else:
clientHello = ClientHello()
clientHello.create(settings.maxVersion, clientRandom,
createByteArraySequence([]), cipherSuites,
certificateTypes, srpUsername)
for result in self._sendMsg(clientHello):
yield result
#Get ServerHello (or missing_srp_username)
for result in self._getMsg((ContentType.handshake,
ContentType.alert),
HandshakeType.server_hello):
if result in (0,1):
yield result
else:
break
msg = result
if isinstance(msg, ServerHello):
serverHello = msg
elif isinstance(msg, Alert):
alert = msg
#If it's not a missing_srp_username, re-raise
if alert.description != AlertDescription.missing_srp_username:
self._shutdown(False)
raise TLSRemoteAlert(alert)
#If we're not in SRP callback mode, we won't have offered SRP
#without a username, so we shouldn't get this alert
if not srpCallback:
for result in self._sendError(\
AlertDescription.unexpected_message):
yield result
srpParams = srpCallback()
#If the callback returns None, cancel the handshake
if srpParams == None:
for result in self._sendError(AlertDescription.user_canceled):
yield result
#Recursively perform handshake
for result in self._handshakeClientAsyncHelper(srpParams,
None, None, None, None, settings, True):
yield result
return
#Get the server version. Do this before anything else, so any
#error alerts will use the server's version
self.version = serverHello.server_version
#Future responses from server must use this version
self._versionCheck = True
#Check ServerHello
if serverHello.server_version < settings.minVersion:
for result in self._sendError(\
AlertDescription.protocol_version,
"Too old version: %s" % str(serverHello.server_version)):
yield result
if serverHello.server_version > settings.maxVersion:
for result in self._sendError(\
AlertDescription.protocol_version,
"Too new version: %s" % str(serverHello.server_version)):
yield result
if serverHello.cipher_suite not in cipherSuites:
for result in self._sendError(\
AlertDescription.illegal_parameter,
"Server responded with incorrect ciphersuite"):
yield result
if serverHello.certificate_type not in certificateTypes:
for result in self._sendError(\
AlertDescription.illegal_parameter,
"Server responded with incorrect certificate type"):
yield result
if serverHello.compression_method != 0:
for result in self._sendError(\
AlertDescription.illegal_parameter,
"Server responded with incorrect compression method"):
yield result
#Get the server nonce
serverRandom = serverHello.random
#If the server agrees to resume
if session and session.sessionID and \
serverHello.session_id == session.sessionID:
#If a shared-key, we're flexible about suites; otherwise the
#server-chosen suite has to match the session's suite
if sharedKeyParams:
session.cipherSuite = serverHello.cipher_suite
elif serverHello.cipher_suite != session.cipherSuite:
for result in self._sendError(\
AlertDescription.illegal_parameter,\
"Server's ciphersuite doesn't match session"):
yield result
#Set the session for this connection
self.session = session
#Calculate pending connection states
self._calcPendingStates(clientRandom, serverRandom,
settings.cipherImplementations)
#Exchange ChangeCipherSpec and Finished messages
for result in self._getFinished():
yield result
for result in self._sendFinished():
yield result
#Mark the connection as open
self._handshakeDone(resumed=True)
#If server DOES NOT agree to resume
else:
if sharedKeyParams:
for result in self._sendError(\
AlertDescription.user_canceled,
"Was expecting a shared-key resumption"):
yield result
#We've already validated these
cipherSuite = serverHello.cipher_suite
certificateType = serverHello.certificate_type
#If the server chose an SRP suite...
if cipherSuite in CipherSuite.srpSuites:
#Get ServerKeyExchange, ServerHelloDone
for result in self._getMsg(ContentType.handshake,
HandshakeType.server_key_exchange, cipherSuite):
if result in (0,1):
yield result
else:
break
serverKeyExchange = result
for result in self._getMsg(ContentType.handshake,
HandshakeType.server_hello_done):
if result in (0,1):
yield result
else:
break
serverHelloDone = result
#If the server chose an SRP+RSA suite...
elif cipherSuite in CipherSuite.srpRsaSuites:
#Get Certificate, ServerKeyExchange, ServerHelloDone
for result in self._getMsg(ContentType.handshake,
HandshakeType.certificate, certificateType):
if result in (0,1):
yield result
else:
break
serverCertificate = result
for result in self._getMsg(ContentType.handshake,
HandshakeType.server_key_exchange, cipherSuite):
if result in (0,1):
yield result
else:
break
serverKeyExchange = result
for result in self._getMsg(ContentType.handshake,
HandshakeType.server_hello_done):
if result in (0,1):
yield result
else:
break
serverHelloDone = result
#If the server chose an RSA suite...
elif cipherSuite in CipherSuite.rsaSuites:
#Get Certificate[, CertificateRequest], ServerHelloDone
for result in self._getMsg(ContentType.handshake,
HandshakeType.certificate, certificateType):
if result in (0,1):
yield result
else:
break
serverCertificate = result
for result in self._getMsg(ContentType.handshake,
(HandshakeType.server_hello_done,
HandshakeType.certificate_request)):
if result in (0,1):
yield result
else:
break
msg = result
certificateRequest = None
if isinstance(msg, CertificateRequest):
certificateRequest = msg
for result in self._getMsg(ContentType.handshake,
HandshakeType.server_hello_done):
if result in (0,1):
yield result
else:
break
serverHelloDone = result
elif isinstance(msg, ServerHelloDone):
serverHelloDone = msg
else:
raise AssertionError()
#Calculate SRP premaster secret, if server chose an SRP or
#SRP+RSA suite
if cipherSuite in CipherSuite.srpSuites + \
CipherSuite.srpRsaSuites:
#Get and check the server's group parameters and B value
N = serverKeyExchange.srp_N
g = serverKeyExchange.srp_g
s = serverKeyExchange.srp_s
B = serverKeyExchange.srp_B
if (g,N) not in goodGroupParameters:
for result in self._sendError(\
AlertDescription.untrusted_srp_parameters,
"Unknown group parameters"):
yield result
if numBits(N) < settings.minKeySize:
for result in self._sendError(\
AlertDescription.untrusted_srp_parameters,
"N value is too small: %d" % numBits(N)):
yield result
if numBits(N) > settings.maxKeySize:
for result in self._sendError(\
AlertDescription.untrusted_srp_parameters,
"N value is too large: %d" % numBits(N)):
yield result
if B % N == 0:
for result in self._sendError(\
AlertDescription.illegal_parameter,
"Suspicious B value"):
yield result
#Check the server's signature, if server chose an
#SRP+RSA suite
if cipherSuite in CipherSuite.srpRsaSuites:
#Hash ServerKeyExchange/ServerSRPParams
hashBytes = serverKeyExchange.hash(clientRandom,
serverRandom)
#Extract signature bytes from ServerKeyExchange
sigBytes = serverKeyExchange.signature
if len(sigBytes) == 0:
for result in self._sendError(\
AlertDescription.illegal_parameter,
"Server sent an SRP ServerKeyExchange "\
"message without a signature"):
yield result
#Get server's public key from the Certificate message
for result in self._getKeyFromChain(serverCertificate,
settings):
if result in (0,1):
yield result
else:
break
publicKey, serverCertChain = result
#Verify signature
if not publicKey.verify(sigBytes, hashBytes):
for result in self._sendError(\
AlertDescription.decrypt_error,
"Signature failed to verify"):
yield result
#Calculate client's ephemeral DH values (a, A)
a = bytesToNumber(getRandomBytes(32))
A = powMod(g, a, N)
#Calculate client's static DH values (x, v)
x = makeX(bytesToString(s), srpUsername, password)
v = powMod(g, x, N)
#Calculate u
u = makeU(N, A, B)
#Calculate premaster secret
k = makeK(N, g)
S = powMod((B - (k*v)) % N, a+(u*x), N)
if self.fault == Fault.badA:
A = N
S = 0
premasterSecret = numberToBytes(S)
#Send ClientKeyExchange
for result in self._sendMsg(\
ClientKeyExchange(cipherSuite).createSRP(A)):
yield result
#Calculate RSA premaster secret, if server chose an RSA suite
elif cipherSuite in CipherSuite.rsaSuites:
#Handle the presence of a CertificateRequest
if certificateRequest:
if unknownParams and certCallback:
certParamsNew = certCallback()
if certParamsNew:
clientCertChain, privateKey = certParamsNew
#Get server's public key from the Certificate message
for result in self._getKeyFromChain(serverCertificate,
settings):
if result in (0,1):
yield result
else:
break
publicKey, serverCertChain = result
#Calculate premaster secret
premasterSecret = getRandomBytes(48)
premasterSecret[0] = settings.maxVersion[0]
premasterSecret[1] = settings.maxVersion[1]
if self.fault == Fault.badPremasterPadding:
premasterSecret[0] = 5
if self.fault == Fault.shortPremasterSecret:
premasterSecret = premasterSecret[:-1]
#Encrypt premaster secret to server's public key
encryptedPreMasterSecret = publicKey.encrypt(premasterSecret)
#If client authentication was requested, send Certificate
#message, either with certificates or empty
if certificateRequest:
clientCertificate = Certificate(certificateType)
if clientCertChain:
#Check to make sure we have the same type of
#certificates the server requested
wrongType = False
if certificateType == CertificateType.x509:
if not isinstance(clientCertChain, X509CertChain):
wrongType = True
elif certificateType == CertificateType.cryptoID:
if not isinstance(clientCertChain,
cryptoIDlib.CertChain.CertChain):
wrongType = True
if wrongType:
for result in self._sendError(\
AlertDescription.handshake_failure,
"Client certificate is of wrong type"):
yield result
clientCertificate.create(clientCertChain)
for result in self._sendMsg(clientCertificate):
yield result
else:
#The server didn't request client auth, so we
#zeroize these so the clientCertChain won't be
#stored in the session.
privateKey = None
clientCertChain = None
#Send ClientKeyExchange
clientKeyExchange = ClientKeyExchange(cipherSuite,
self.version)
clientKeyExchange.createRSA(encryptedPreMasterSecret)
for result in self._sendMsg(clientKeyExchange):
yield result
#If client authentication was requested and we have a
#private key, send CertificateVerify
if certificateRequest and privateKey:
if self.version == (3,0):
#Create a temporary session object, just for the
#purpose of creating the CertificateVerify
session = Session()
session._calcMasterSecret(self.version,
premasterSecret,
clientRandom,
serverRandom)
verifyBytes = self._calcSSLHandshakeHash(\
session.masterSecret, "")
elif self.version in ((3,1), (3,2)):
verifyBytes = stringToBytes(\
self._handshake_md5.digest() + \
self._handshake_sha.digest())
if self.fault == Fault.badVerifyMessage:
verifyBytes[0] = ((verifyBytes[0]+1) % 256)
signedBytes = privateKey.sign(verifyBytes)
certificateVerify = CertificateVerify()
certificateVerify.create(signedBytes)
for result in self._sendMsg(certificateVerify):
yield result
#Create the session object
self.session = Session()
self.session._calcMasterSecret(self.version, premasterSecret,
clientRandom, serverRandom)
self.session.sessionID = serverHello.session_id
self.session.cipherSuite = cipherSuite
self.session.srpUsername = srpUsername
self.session.clientCertChain = clientCertChain
self.session.serverCertChain = serverCertChain
#Calculate pending connection states
self._calcPendingStates(clientRandom, serverRandom,
settings.cipherImplementations)
#Exchange ChangeCipherSpec and Finished messages
for result in self._sendFinished():
yield result
for result in self._getFinished():
yield result
#Mark the connection as open
self.session._setResumable(True)
self._handshakeDone(resumed=False)
def handshakeServer(self, sharedKeyDB=None, verifierDB=None,
certChain=None, privateKey=None, reqCert=False,
sessionCache=None, settings=None, checker=None):
"""Perform a handshake in the role of server.
This function performs an SSL or TLS handshake. Depending on
the arguments and the behavior of the client, this function can
perform a shared-key, SRP, or certificate-based handshake. It
can also perform a combined SRP and server-certificate
handshake.
Like any handshake function, this can be called on a closed
TLS connection, or on a TLS connection that is already open.
If called on an open connection it performs a re-handshake.
This function does not send a Hello Request message before
performing the handshake, so if re-handshaking is required,
the server must signal the client to begin the re-handshake
through some other means.
If the function completes without raising an exception, the
TLS connection will be open and available for data transfer.
If an exception is raised, the connection will have been
automatically closed (if it was ever open).
@type sharedKeyDB: L{tlslite.SharedKeyDB.SharedKeyDB}
@param sharedKeyDB: A database of shared symmetric keys
associated with usernames. If the client performs a
shared-key handshake, the session's sharedKeyUsername
attribute will be set.
@type verifierDB: L{tlslite.VerifierDB.VerifierDB}
@param verifierDB: A database of SRP password verifiers
associated with usernames. If the client performs an SRP
handshake, the session's srpUsername attribute will be set.
@type certChain: L{tlslite.X509CertChain.X509CertChain} or
L{cryptoIDlib.CertChain.CertChain}
@param certChain: The certificate chain to be used if the
client requests server certificate authentication.
@type privateKey: L{tlslite.utils.RSAKey.RSAKey}
@param privateKey: The private key to be used if the client
requests server certificate authentication.
@type reqCert: bool
@param reqCert: Whether to request client certificate
authentication. This only applies if the client chooses server
certificate authentication; if the client chooses SRP or
shared-key authentication, this will be ignored. If the client
performs a client certificate authentication, the sessions's
clientCertChain attribute will be set.
@type sessionCache: L{tlslite.SessionCache.SessionCache}
@param sessionCache: An in-memory cache of resumable sessions.
The client can resume sessions from this cache. Alternatively,
if the client performs a full handshake, a new session will be
added to the cache.
@type settings: L{tlslite.HandshakeSettings.HandshakeSettings}
@param settings: Various settings which can be used to control
the ciphersuites and SSL/TLS version chosen by the server.
@type checker: L{tlslite.Checker.Checker}
@param checker: A Checker instance. This instance will be
invoked to examine the other party's authentication
credentials, if the handshake completes succesfully.
@raise socket.error: If a socket error occurs.
@raise tlslite.errors.TLSAbruptCloseError: If the socket is closed
without a preceding alert.
@raise tlslite.errors.TLSAlert: If a TLS alert is signalled.
@raise tlslite.errors.TLSAuthenticationError: If the checker
doesn't like the other party's authentication credentials.
"""
for result in self.handshakeServerAsync(sharedKeyDB, verifierDB,
certChain, privateKey, reqCert, sessionCache, settings,
checker):
pass
def handshakeServerAsync(self, sharedKeyDB=None, verifierDB=None,
certChain=None, privateKey=None, reqCert=False,
sessionCache=None, settings=None, checker=None):
"""Start a server handshake operation on the TLS connection.
This function returns a generator which behaves similarly to
handshakeServer(). Successive invocations of the generator
will return 0 if it is waiting to read from the socket, 1 if it is
waiting to write to the socket, or it will raise StopIteration
if the handshake operation is complete.
@rtype: iterable
@return: A generator; see above for details.
"""
handshaker = self._handshakeServerAsyncHelper(\
sharedKeyDB=sharedKeyDB,
verifierDB=verifierDB, certChain=certChain,
privateKey=privateKey, reqCert=reqCert,
sessionCache=sessionCache, settings=settings)
for result in self._handshakeWrapperAsync(handshaker, checker):
yield result
def _handshakeServerAsyncHelper(self, sharedKeyDB, verifierDB,
certChain, privateKey, reqCert, sessionCache,
settings):
self._handshakeStart(client=False)
if (not sharedKeyDB) and (not verifierDB) and (not certChain):
raise ValueError("Caller passed no authentication credentials")
if certChain and not privateKey:
raise ValueError("Caller passed a certChain but no privateKey")
if privateKey and not certChain:
raise ValueError("Caller passed a privateKey but no certChain")
if not settings:
settings = HandshakeSettings()
settings = settings._filter()
#Initialize acceptable cipher suites
cipherSuites = []
if verifierDB:
if certChain:
cipherSuites += \
CipherSuite.getSrpRsaSuites(settings.cipherNames)
cipherSuites += CipherSuite.getSrpSuites(settings.cipherNames)
if sharedKeyDB or certChain:
cipherSuites += CipherSuite.getRsaSuites(settings.cipherNames)
#Initialize acceptable certificate type
certificateType = None
if certChain:
try:
import cryptoIDlib.CertChain
if isinstance(certChain, cryptoIDlib.CertChain.CertChain):
certificateType = CertificateType.cryptoID
except ImportError:
pass
if isinstance(certChain, X509CertChain):
certificateType = CertificateType.x509
if certificateType == None:
raise ValueError("Unrecognized certificate type")
#Initialize locals
clientCertChain = None
serverCertChain = None #We may set certChain to this later
postFinishedError = None
#Tentatively set version to most-desirable version, so if an error
#occurs parsing the ClientHello, this is what we'll use for the
#error alert
self.version = settings.maxVersion
#Get ClientHello
for result in self._getMsg(ContentType.handshake,
HandshakeType.client_hello):
if result in (0,1):
yield result
else:
break
clientHello = result
#If client's version is too low, reject it
if clientHello.client_version < settings.minVersion:
self.version = settings.minVersion
for result in self._sendError(\
AlertDescription.protocol_version,
"Too old version: %s" % str(clientHello.client_version)):
yield result
#If client's version is too high, propose my highest version
elif clientHello.client_version > settings.maxVersion:
self.version = settings.maxVersion
else:
#Set the version to the client's version
self.version = clientHello.client_version
#Get the client nonce; create server nonce
clientRandom = clientHello.random
serverRandom = getRandomBytes(32)
#Calculate the first cipher suite intersection.
#This is the 'privileged' ciphersuite. We'll use it if we're
#doing a shared-key resumption or a new negotiation. In fact,
#the only time we won't use it is if we're resuming a non-sharedkey
#session, in which case we use the ciphersuite from the session.
#
#Given the current ciphersuite ordering, this means we prefer SRP
#over non-SRP.
for cipherSuite in cipherSuites:
if cipherSuite in clientHello.cipher_suites:
break
else:
for result in self._sendError(\
AlertDescription.handshake_failure):
yield result
#If resumption was requested...
if clientHello.session_id and (sharedKeyDB or sessionCache):
session = None
#Check in the sharedKeys container
if sharedKeyDB and len(clientHello.session_id)==16:
try:
#Trim off zero padding, if any
for x in range(16):
if clientHello.session_id[x]==0:
break
self.allegedSharedKeyUsername = bytesToString(\
clientHello.session_id[:x])
session = sharedKeyDB[self.allegedSharedKeyUsername]
if not session.sharedKey:
raise AssertionError()
#use privileged ciphersuite
session.cipherSuite = cipherSuite
except KeyError:
pass
#Then check in the session cache
if sessionCache and not session:
try:
session = sessionCache[bytesToString(\
clientHello.session_id)]
if session.sharedKey:
raise AssertionError()
if not session.resumable:
raise AssertionError()
#Check for consistency with ClientHello
if session.cipherSuite not in cipherSuites:
for result in self._sendError(\
AlertDescription.handshake_failure):
yield result
if session.cipherSuite not in clientHello.cipher_suites:
for result in self._sendError(\
AlertDescription.handshake_failure):
yield result
if clientHello.srp_username:
if clientHello.srp_username != session.srpUsername:
for result in self._sendError(\
AlertDescription.handshake_failure):
yield result
except KeyError:
pass
#If a session is found..
if session:
#Set the session
self.session = session
#Send ServerHello
serverHello = ServerHello()
serverHello.create(self.version, serverRandom,
session.sessionID, session.cipherSuite,
certificateType)
for result in self._sendMsg(serverHello):
yield result
#From here on, the client's messages must have the right version
self._versionCheck = True
#Calculate pending connection states
self._calcPendingStates(clientRandom, serverRandom,
settings.cipherImplementations)
#Exchange ChangeCipherSpec and Finished messages
for result in self._sendFinished():
yield result
for result in self._getFinished():
yield result
#Mark the connection as open
self._handshakeDone(resumed=True)
return
#If not a resumption...
#TRICKY: we might have chosen an RSA suite that was only deemed
#acceptable because of the shared-key resumption. If the shared-
#key resumption failed, because the identifier wasn't recognized,
#we might fall through to here, where we have an RSA suite
#chosen, but no certificate.
if cipherSuite in CipherSuite.rsaSuites and not certChain:
for result in self._sendError(\
AlertDescription.handshake_failure):
yield result
#If an RSA suite is chosen, check for certificate type intersection
#(We do this check down here because if the mismatch occurs but the
# client is using a shared-key session, it's okay)
if cipherSuite in CipherSuite.rsaSuites + \
CipherSuite.srpRsaSuites:
if certificateType not in clientHello.certificate_types:
for result in self._sendError(\
AlertDescription.handshake_failure,
"the client doesn't support my certificate type"):
yield result
#Move certChain -> serverCertChain, now that we're using it
serverCertChain = certChain
#Create sessionID
if sessionCache:
sessionID = getRandomBytes(32)
else:
sessionID = createByteArraySequence([])
#If we've selected an SRP suite, exchange keys and calculate
#premaster secret:
if cipherSuite in CipherSuite.srpSuites + CipherSuite.srpRsaSuites:
#If there's no SRP username...
if not clientHello.srp_username:
#Ask the client to re-send ClientHello with one
for result in self._sendMsg(Alert().create(\
AlertDescription.missing_srp_username,
AlertLevel.warning)):
yield result
#Get ClientHello
for result in self._getMsg(ContentType.handshake,
HandshakeType.client_hello):
if result in (0,1):
yield result
else:
break
clientHello = result
#Check ClientHello
#If client's version is too low, reject it (COPIED CODE; BAD!)
if clientHello.client_version < settings.minVersion:
self.version = settings.minVersion
for result in self._sendError(\
AlertDescription.protocol_version,
"Too old version: %s" % str(clientHello.client_version)):
yield result
#If client's version is too high, propose my highest version
elif clientHello.client_version > settings.maxVersion:
self.version = settings.maxVersion
else:
#Set the version to the client's version
self.version = clientHello.client_version
#Recalculate the privileged cipher suite, making sure to
#pick an SRP suite
cipherSuites = [c for c in cipherSuites if c in \
CipherSuite.srpSuites + \
CipherSuite.srpRsaSuites]
for cipherSuite in cipherSuites:
if cipherSuite in clientHello.cipher_suites:
break
else:
for result in self._sendError(\
AlertDescription.handshake_failure):
yield result
#Get the client nonce; create server nonce
clientRandom = clientHello.random
serverRandom = getRandomBytes(32)
#The username better be there, this time
if not clientHello.srp_username:
for result in self._sendError(\
AlertDescription.illegal_parameter,
"Client resent a hello, but without the SRP"\
" username"):
yield result
#Get username
self.allegedSrpUsername = clientHello.srp_username
#Get parameters from username
try:
entry = verifierDB[self.allegedSrpUsername]
except KeyError:
for result in self._sendError(\
AlertDescription.unknown_srp_username):
yield result
(N, g, s, v) = entry
#Calculate server's ephemeral DH values (b, B)
b = bytesToNumber(getRandomBytes(32))
k = makeK(N, g)
B = (powMod(g, b, N) + (k*v)) % N
#Create ServerKeyExchange, signing it if necessary
serverKeyExchange = ServerKeyExchange(cipherSuite)
serverKeyExchange.createSRP(N, g, stringToBytes(s), B)
if cipherSuite in CipherSuite.srpRsaSuites:
hashBytes = serverKeyExchange.hash(clientRandom,
serverRandom)
serverKeyExchange.signature = privateKey.sign(hashBytes)
#Send ServerHello[, Certificate], ServerKeyExchange,
#ServerHelloDone
msgs = []
serverHello = ServerHello()
serverHello.create(self.version, serverRandom, sessionID,
cipherSuite, certificateType)
msgs.append(serverHello)
if cipherSuite in CipherSuite.srpRsaSuites:
certificateMsg = Certificate(certificateType)
certificateMsg.create(serverCertChain)
msgs.append(certificateMsg)
msgs.append(serverKeyExchange)
msgs.append(ServerHelloDone())
for result in self._sendMsgs(msgs):
yield result
#From here on, the client's messages must have the right version
self._versionCheck = True
#Get and check ClientKeyExchange
for result in self._getMsg(ContentType.handshake,
HandshakeType.client_key_exchange,
cipherSuite):
if result in (0,1):
yield result
else:
break
clientKeyExchange = result
A = clientKeyExchange.srp_A
if A % N == 0:
postFinishedError = (AlertDescription.illegal_parameter,
"Suspicious A value")
#Calculate u
u = makeU(N, A, B)
#Calculate premaster secret
S = powMod((A * powMod(v,u,N)) % N, b, N)
premasterSecret = numberToBytes(S)
#If we've selected an RSA suite, exchange keys and calculate
#premaster secret:
elif cipherSuite in CipherSuite.rsaSuites:
#Send ServerHello, Certificate[, CertificateRequest],
#ServerHelloDone
msgs = []
msgs.append(ServerHello().create(self.version, serverRandom,
sessionID, cipherSuite, certificateType))
msgs.append(Certificate(certificateType).create(serverCertChain))
if reqCert:
msgs.append(CertificateRequest())
msgs.append(ServerHelloDone())
for result in self._sendMsgs(msgs):
yield result
#From here on, the client's messages must have the right version
self._versionCheck = True
#Get [Certificate,] (if was requested)
if reqCert:
if self.version == (3,0):
for result in self._getMsg((ContentType.handshake,
ContentType.alert),
HandshakeType.certificate,
certificateType):
if result in (0,1):
yield result
else:
break
msg = result
if isinstance(msg, Alert):
#If it's not a no_certificate alert, re-raise
alert = msg
if alert.description != \
AlertDescription.no_certificate:
self._shutdown(False)
raise TLSRemoteAlert(alert)
elif isinstance(msg, Certificate):
clientCertificate = msg
if clientCertificate.certChain and \
clientCertificate.certChain.getNumCerts()!=0:
clientCertChain = clientCertificate.certChain
else:
raise AssertionError()
elif self.version in ((3,1), (3,2)):
for result in self._getMsg(ContentType.handshake,
HandshakeType.certificate,
certificateType):
if result in (0,1):
yield result
else:
break
clientCertificate = result
if clientCertificate.certChain and \
clientCertificate.certChain.getNumCerts()!=0:
clientCertChain = clientCertificate.certChain
else:
raise AssertionError()
#Get ClientKeyExchange
for result in self._getMsg(ContentType.handshake,
HandshakeType.client_key_exchange,
cipherSuite):
if result in (0,1):
yield result
else:
break
clientKeyExchange = result
#Decrypt ClientKeyExchange
premasterSecret = privateKey.decrypt(\
clientKeyExchange.encryptedPreMasterSecret)
randomPreMasterSecret = getRandomBytes(48)
versionCheck = (premasterSecret[0], premasterSecret[1])
if not premasterSecret:
premasterSecret = randomPreMasterSecret
elif len(premasterSecret)!=48:
premasterSecret = randomPreMasterSecret
elif versionCheck != clientHello.client_version:
if versionCheck != self.version: #Tolerate buggy IE clients
premasterSecret = randomPreMasterSecret
#Get and check CertificateVerify, if relevant
if clientCertChain:
if self.version == (3,0):
#Create a temporary session object, just for the purpose
#of checking the CertificateVerify
session = Session()
session._calcMasterSecret(self.version, premasterSecret,
clientRandom, serverRandom)
verifyBytes = self._calcSSLHandshakeHash(\
session.masterSecret, "")
elif self.version in ((3,1), (3,2)):
verifyBytes = stringToBytes(self._handshake_md5.digest() +\
self._handshake_sha.digest())
for result in self._getMsg(ContentType.handshake,
HandshakeType.certificate_verify):
if result in (0,1):
yield result
else:
break
certificateVerify = result
publicKey = clientCertChain.getEndEntityPublicKey()
if len(publicKey) < settings.minKeySize:
postFinishedError = (AlertDescription.handshake_failure,
"Client's public key too small: %d" % len(publicKey))
if len(publicKey) > settings.maxKeySize:
postFinishedError = (AlertDescription.handshake_failure,
"Client's public key too large: %d" % len(publicKey))
if not publicKey.verify(certificateVerify.signature,
verifyBytes):
postFinishedError = (AlertDescription.decrypt_error,
"Signature failed to verify")
#Create the session object
self.session = Session()
self.session._calcMasterSecret(self.version, premasterSecret,
clientRandom, serverRandom)
self.session.sessionID = sessionID
self.session.cipherSuite = cipherSuite
self.session.srpUsername = self.allegedSrpUsername
self.session.clientCertChain = clientCertChain
self.session.serverCertChain = serverCertChain
#Calculate pending connection states
self._calcPendingStates(clientRandom, serverRandom,
settings.cipherImplementations)
#Exchange ChangeCipherSpec and Finished messages
for result in self._getFinished():
yield result
#If we were holding a post-finished error until receiving the client
#finished message, send it now. We delay the call until this point
#because calling sendError() throws an exception, and our caller might
#shut down the socket upon receiving the exception. If he did, and the
#client was still sending its ChangeCipherSpec or Finished messages, it
#would cause a socket error on the client side. This is a lot of
#consideration to show to misbehaving clients, but this would also
#cause problems with fault-testing.
if postFinishedError:
for result in self._sendError(*postFinishedError):
yield result
for result in self._sendFinished():
yield result
#Add the session object to the session cache
if sessionCache and sessionID:
sessionCache[bytesToString(sessionID)] = self.session
#Mark the connection as open
self.session._setResumable(True)
self._handshakeDone(resumed=False)
def _handshakeWrapperAsync(self, handshaker, checker):
if not self.fault:
try:
for result in handshaker:
yield result
if checker:
try:
checker(self)
except TLSAuthenticationError:
alert = Alert().create(AlertDescription.close_notify,
AlertLevel.fatal)
for result in self._sendMsg(alert):
yield result
raise
except:
self._shutdown(False)
raise
else:
try:
for result in handshaker:
yield result
if checker:
try:
checker(self)
except TLSAuthenticationError:
alert = Alert().create(AlertDescription.close_notify,
AlertLevel.fatal)
for result in self._sendMsg(alert):
yield result
raise
except socket.error, e:
raise TLSFaultError("socket error!")
except TLSAbruptCloseError, e:
raise TLSFaultError("abrupt close error!")
except TLSAlert, alert:
if alert.description not in Fault.faultAlerts[self.fault]:
raise TLSFaultError(str(alert))
else:
pass
except:
self._shutdown(False)
raise
else:
raise TLSFaultError("No error!")
def _getKeyFromChain(self, certificate, settings):
#Get and check cert chain from the Certificate message
certChain = certificate.certChain
if not certChain or certChain.getNumCerts() == 0:
for result in self._sendError(AlertDescription.illegal_parameter,
"Other party sent a Certificate message without "\
"certificates"):
yield result
#Get and check public key from the cert chain
publicKey = certChain.getEndEntityPublicKey()
if len(publicKey) < settings.minKeySize:
for result in self._sendError(AlertDescription.handshake_failure,
"Other party's public key too small: %d" % len(publicKey)):
yield result
if len(publicKey) > settings.maxKeySize:
for result in self._sendError(AlertDescription.handshake_failure,
"Other party's public key too large: %d" % len(publicKey)):
yield result
yield publicKey, certChain
| apache-2.0 |
turbokongen/home-assistant | homeassistant/components/motion_blinds/sensor.py | 2 | 6631 | """Support for Motion Blinds sensors."""
from motionblinds import BlindType
from homeassistant.const import (
DEVICE_CLASS_BATTERY,
DEVICE_CLASS_SIGNAL_STRENGTH,
PERCENTAGE,
SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
)
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN, KEY_COORDINATOR, KEY_GATEWAY
ATTR_BATTERY_VOLTAGE = "battery_voltage"
TYPE_BLIND = "blind"
TYPE_GATEWAY = "gateway"
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Perform the setup for Motion Blinds."""
entities = []
motion_gateway = hass.data[DOMAIN][config_entry.entry_id][KEY_GATEWAY]
coordinator = hass.data[DOMAIN][config_entry.entry_id][KEY_COORDINATOR]
for blind in motion_gateway.device_list.values():
entities.append(MotionSignalStrengthSensor(coordinator, blind, TYPE_BLIND))
if blind.type == BlindType.TopDownBottomUp:
entities.append(MotionTDBUBatterySensor(coordinator, blind, "Bottom"))
entities.append(MotionTDBUBatterySensor(coordinator, blind, "Top"))
elif blind.battery_voltage > 0:
# Only add battery powered blinds
entities.append(MotionBatterySensor(coordinator, blind))
entities.append(
MotionSignalStrengthSensor(coordinator, motion_gateway, TYPE_GATEWAY)
)
async_add_entities(entities)
class MotionBatterySensor(CoordinatorEntity, Entity):
"""
Representation of a Motion Battery Sensor.
Updates are done by the cover platform.
"""
def __init__(self, coordinator, blind):
"""Initialize the Motion Battery Sensor."""
super().__init__(coordinator)
self._blind = blind
@property
def unique_id(self):
"""Return the unique id of the blind."""
return f"{self._blind.mac}-battery"
@property
def device_info(self):
"""Return the device info of the blind."""
return {"identifiers": {(DOMAIN, self._blind.mac)}}
@property
def name(self):
"""Return the name of the blind battery sensor."""
return f"{self._blind.blind_type}-battery-{self._blind.mac[12:]}"
@property
def available(self):
"""Return True if entity is available."""
return self._blind.available
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return PERCENTAGE
@property
def device_class(self):
"""Return the device class of this entity."""
return DEVICE_CLASS_BATTERY
@property
def state(self):
"""Return the state of the sensor."""
return self._blind.battery_level
@property
def device_state_attributes(self):
"""Return device specific state attributes."""
return {ATTR_BATTERY_VOLTAGE: self._blind.battery_voltage}
async def async_added_to_hass(self):
"""Subscribe to multicast pushes."""
self._blind.Register_callback(self.unique_id, self.schedule_update_ha_state)
await super().async_added_to_hass()
async def async_will_remove_from_hass(self):
"""Unsubscribe when removed."""
self._blind.Remove_callback(self.unique_id)
await super().async_will_remove_from_hass()
class MotionTDBUBatterySensor(MotionBatterySensor):
"""
Representation of a Motion Battery Sensor for a Top Down Bottom Up blind.
Updates are done by the cover platform.
"""
def __init__(self, coordinator, blind, motor):
"""Initialize the Motion Battery Sensor."""
super().__init__(coordinator, blind)
self._motor = motor
@property
def unique_id(self):
"""Return the unique id of the blind."""
return f"{self._blind.mac}-{self._motor}-battery"
@property
def name(self):
"""Return the name of the blind battery sensor."""
return f"{self._blind.blind_type}-{self._motor}-battery-{self._blind.mac[12:]}"
@property
def state(self):
"""Return the state of the sensor."""
if self._blind.battery_level is None:
return None
return self._blind.battery_level[self._motor[0]]
@property
def device_state_attributes(self):
"""Return device specific state attributes."""
attributes = {}
if self._blind.battery_voltage is not None:
attributes[ATTR_BATTERY_VOLTAGE] = self._blind.battery_voltage[
self._motor[0]
]
return attributes
class MotionSignalStrengthSensor(CoordinatorEntity, Entity):
"""Representation of a Motion Signal Strength Sensor."""
def __init__(self, coordinator, device, device_type):
"""Initialize the Motion Signal Strength Sensor."""
super().__init__(coordinator)
self._device = device
self._device_type = device_type
@property
def unique_id(self):
"""Return the unique id of the blind."""
return f"{self._device.mac}-RSSI"
@property
def device_info(self):
"""Return the device info of the blind."""
return {"identifiers": {(DOMAIN, self._device.mac)}}
@property
def name(self):
"""Return the name of the blind signal strength sensor."""
if self._device_type == TYPE_GATEWAY:
return "Motion gateway signal strength"
return f"{self._device.blind_type} signal strength - {self._device.mac[12:]}"
@property
def available(self):
"""Return True if entity is available."""
return self._device.available
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return SIGNAL_STRENGTH_DECIBELS_MILLIWATT
@property
def device_class(self):
"""Return the device class of this entity."""
return DEVICE_CLASS_SIGNAL_STRENGTH
@property
def entity_registry_enabled_default(self):
"""Return if the entity should be enabled when first added to the entity registry."""
return False
@property
def state(self):
"""Return the state of the sensor."""
return self._device.RSSI
async def async_added_to_hass(self):
"""Subscribe to multicast pushes."""
self._device.Register_callback(self.unique_id, self.schedule_update_ha_state)
await super().async_added_to_hass()
async def async_will_remove_from_hass(self):
"""Unsubscribe when removed."""
self._device.Remove_callback(self.unique_id)
await super().async_will_remove_from_hass()
| apache-2.0 |
sobercoder/gem5 | src/arch/x86/isa/insts/general_purpose/rotate_and_shift/__init__.py | 91 | 2283 | # Copyright (c) 2007 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
categories = ["rotate",
"shift"]
microcode = ""
for category in categories:
exec "import %s as cat" % category
microcode += cat.microcode
| bsd-3-clause |
zanph/zanph | flaskroulette/venv/lib/python2.7/site-packages/requests/packages/urllib3/packages/ordered_dict.py | 2040 | 8935 | # Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
# Passes Python2.7's test suite and incorporates all the latest updates.
# Copyright 2009 Raymond Hettinger, released under the MIT License.
# http://code.activestate.com/recipes/576693/
try:
from thread import get_ident as _get_ident
except ImportError:
from dummy_thread import get_ident as _get_ident
try:
from _abcoll import KeysView, ValuesView, ItemsView
except ImportError:
pass
class OrderedDict(dict):
'Dictionary that remembers insertion order'
# An inherited dict maps keys to values.
# The inherited dict provides __getitem__, __len__, __contains__, and get.
# The remaining methods are order-aware.
# Big-O running times for all methods are the same as for regular dictionaries.
# The internal self.__map dictionary maps keys to links in a doubly linked list.
# The circular doubly linked list starts and ends with a sentinel element.
# The sentinel element never gets deleted (this simplifies the algorithm).
# Each link is stored as a list of length three: [PREV, NEXT, KEY].
def __init__(self, *args, **kwds):
'''Initialize an ordered dictionary. Signature is the same as for
regular dictionaries, but keyword arguments are not recommended
because their insertion order is arbitrary.
'''
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__root
except AttributeError:
self.__root = root = [] # sentinel node
root[:] = [root, root, None]
self.__map = {}
self.__update(*args, **kwds)
def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
'od.__setitem__(i, y) <==> od[i]=y'
# Setting a new item creates a new link which goes at the end of the linked
# list, and the inherited dictionary is updated with the new key/value pair.
if key not in self:
root = self.__root
last = root[0]
last[1] = root[0] = self.__map[key] = [last, root, key]
dict_setitem(self, key, value)
def __delitem__(self, key, dict_delitem=dict.__delitem__):
'od.__delitem__(y) <==> del od[y]'
# Deleting an existing item uses self.__map to find the link which is
# then removed by updating the links in the predecessor and successor nodes.
dict_delitem(self, key)
link_prev, link_next, key = self.__map.pop(key)
link_prev[1] = link_next
link_next[0] = link_prev
def __iter__(self):
'od.__iter__() <==> iter(od)'
root = self.__root
curr = root[1]
while curr is not root:
yield curr[2]
curr = curr[1]
def __reversed__(self):
'od.__reversed__() <==> reversed(od)'
root = self.__root
curr = root[0]
while curr is not root:
yield curr[2]
curr = curr[0]
def clear(self):
'od.clear() -> None. Remove all items from od.'
try:
for node in self.__map.itervalues():
del node[:]
root = self.__root
root[:] = [root, root, None]
self.__map.clear()
except AttributeError:
pass
dict.clear(self)
def popitem(self, last=True):
'''od.popitem() -> (k, v), return and remove a (key, value) pair.
Pairs are returned in LIFO order if last is true or FIFO order if false.
'''
if not self:
raise KeyError('dictionary is empty')
root = self.__root
if last:
link = root[0]
link_prev = link[0]
link_prev[1] = root
root[0] = link_prev
else:
link = root[1]
link_next = link[1]
root[1] = link_next
link_next[0] = root
key = link[2]
del self.__map[key]
value = dict.pop(self, key)
return key, value
# -- the following methods do not depend on the internal structure --
def keys(self):
'od.keys() -> list of keys in od'
return list(self)
def values(self):
'od.values() -> list of values in od'
return [self[key] for key in self]
def items(self):
'od.items() -> list of (key, value) pairs in od'
return [(key, self[key]) for key in self]
def iterkeys(self):
'od.iterkeys() -> an iterator over the keys in od'
return iter(self)
def itervalues(self):
'od.itervalues -> an iterator over the values in od'
for k in self:
yield self[k]
def iteritems(self):
'od.iteritems -> an iterator over the (key, value) items in od'
for k in self:
yield (k, self[k])
def update(*args, **kwds):
'''od.update(E, **F) -> None. Update od from dict/iterable E and F.
If E is a dict instance, does: for k in E: od[k] = E[k]
If E has a .keys() method, does: for k in E.keys(): od[k] = E[k]
Or if E is an iterable of items, does: for k, v in E: od[k] = v
In either case, this is followed by: for k, v in F.items(): od[k] = v
'''
if len(args) > 2:
raise TypeError('update() takes at most 2 positional '
'arguments (%d given)' % (len(args),))
elif not args:
raise TypeError('update() takes at least 1 argument (0 given)')
self = args[0]
# Make progressively weaker assumptions about "other"
other = ()
if len(args) == 2:
other = args[1]
if isinstance(other, dict):
for key in other:
self[key] = other[key]
elif hasattr(other, 'keys'):
for key in other.keys():
self[key] = other[key]
else:
for key, value in other:
self[key] = value
for key, value in kwds.items():
self[key] = value
__update = update # let subclasses override update without breaking __init__
__marker = object()
def pop(self, key, default=__marker):
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
If key is not found, d is returned if given, otherwise KeyError is raised.
'''
if key in self:
result = self[key]
del self[key]
return result
if default is self.__marker:
raise KeyError(key)
return default
def setdefault(self, key, default=None):
'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
if key in self:
return self[key]
self[key] = default
return default
def __repr__(self, _repr_running={}):
'od.__repr__() <==> repr(od)'
call_key = id(self), _get_ident()
if call_key in _repr_running:
return '...'
_repr_running[call_key] = 1
try:
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, self.items())
finally:
del _repr_running[call_key]
def __reduce__(self):
'Return state information for pickling'
items = [[k, self[k]] for k in self]
inst_dict = vars(self).copy()
for k in vars(OrderedDict()):
inst_dict.pop(k, None)
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
def copy(self):
'od.copy() -> a shallow copy of od'
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
and values equal to v (which defaults to None).
'''
d = cls()
for key in iterable:
d[key] = value
return d
def __eq__(self, other):
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
while comparison to a regular mapping is order-insensitive.
'''
if isinstance(other, OrderedDict):
return len(self)==len(other) and self.items() == other.items()
return dict.__eq__(self, other)
def __ne__(self, other):
return not self == other
# -- the following methods are only used in Python 2.7 --
def viewkeys(self):
"od.viewkeys() -> a set-like object providing a view on od's keys"
return KeysView(self)
def viewvalues(self):
"od.viewvalues() -> an object providing a view on od's values"
return ValuesView(self)
def viewitems(self):
"od.viewitems() -> a set-like object providing a view on od's items"
return ItemsView(self)
| mit |
iulian787/spack | lib/spack/spack/cmd/pkg.py | 5 | 6418 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from __future__ import print_function
import os
import re
import llnl.util.tty as tty
from llnl.util.tty.colify import colify
from llnl.util.filesystem import working_dir
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.paths
import spack.repo
from spack.util.executable import which
description = "query packages associated with particular git revisions"
section = "developer"
level = "long"
def setup_parser(subparser):
sp = subparser.add_subparsers(
metavar='SUBCOMMAND', dest='pkg_command')
add_parser = sp.add_parser('add', help=pkg_add.__doc__)
arguments.add_common_arguments(add_parser, ['packages'])
list_parser = sp.add_parser('list', help=pkg_list.__doc__)
list_parser.add_argument('rev', default='HEAD', nargs='?',
help="revision to list packages for")
diff_parser = sp.add_parser('diff', help=pkg_diff.__doc__)
diff_parser.add_argument(
'rev1', nargs='?', default='HEAD^',
help="revision to compare against")
diff_parser.add_argument(
'rev2', nargs='?', default='HEAD',
help="revision to compare to rev1 (default is HEAD)")
add_parser = sp.add_parser('added', help=pkg_added.__doc__)
add_parser.add_argument(
'rev1', nargs='?', default='HEAD^',
help="revision to compare against")
add_parser.add_argument(
'rev2', nargs='?', default='HEAD',
help="revision to compare to rev1 (default is HEAD)")
add_parser = sp.add_parser('changed', help=pkg_changed.__doc__)
add_parser.add_argument(
'rev1', nargs='?', default='HEAD^',
help="revision to compare against")
add_parser.add_argument(
'rev2', nargs='?', default='HEAD',
help="revision to compare to rev1 (default is HEAD)")
add_parser.add_argument(
'-t', '--type', action='store', default='C',
help="Types of changes to show (A: added, R: removed, "
"C: changed); default is 'C'")
rm_parser = sp.add_parser('removed', help=pkg_removed.__doc__)
rm_parser.add_argument(
'rev1', nargs='?', default='HEAD^',
help="revision to compare against")
rm_parser.add_argument(
'rev2', nargs='?', default='HEAD',
help="revision to compare to rev1 (default is HEAD)")
def packages_path():
"""Get the test repo if it is active, otherwise the builtin repo."""
try:
return spack.repo.path.get_repo('builtin.mock').packages_path
except spack.repo.UnknownNamespaceError:
return spack.repo.path.get_repo('builtin').packages_path
class GitExe:
# Wrapper around Executable for git to set working directory for all
# invocations.
#
# Not using -C as that is not supported for git < 1.8.5.
def __init__(self):
self._git_cmd = which('git', required=True)
def __call__(self, *args, **kwargs):
with working_dir(packages_path()):
return self._git_cmd(*args, **kwargs)
_git = None
def get_git():
"""Get a git executable that runs *within* the packages path."""
global _git
if _git is None:
_git = GitExe()
return _git
def list_packages(rev):
git = get_git()
# git ls-tree does not support ... merge-base syntax, so do it manually
if rev.endswith('...'):
ref = rev.replace('...', '')
rev = git('merge-base', ref, 'HEAD', output=str).strip()
output = git('ls-tree', '--name-only', rev, output=str)
return sorted(line for line in output.split('\n')
if line and not line.startswith('.'))
def pkg_add(args):
"""add a package to the git stage with `git add`"""
git = get_git()
for pkg_name in args.packages:
filename = spack.repo.path.filename_for_package_name(pkg_name)
if not os.path.isfile(filename):
tty.die("No such package: %s. Path does not exist:" %
pkg_name, filename)
git('add', filename)
def pkg_list(args):
"""list packages associated with a particular spack git revision"""
colify(list_packages(args.rev))
def diff_packages(rev1, rev2):
p1 = set(list_packages(rev1))
p2 = set(list_packages(rev2))
return p1.difference(p2), p2.difference(p1)
def pkg_diff(args):
"""compare packages available in two different git revisions"""
u1, u2 = diff_packages(args.rev1, args.rev2)
if u1:
print("%s:" % args.rev1)
colify(sorted(u1), indent=4)
if u1:
print()
if u2:
print("%s:" % args.rev2)
colify(sorted(u2), indent=4)
def pkg_removed(args):
"""show packages removed since a commit"""
u1, u2 = diff_packages(args.rev1, args.rev2)
if u1:
colify(sorted(u1))
def pkg_added(args):
"""show packages added since a commit"""
u1, u2 = diff_packages(args.rev1, args.rev2)
if u2:
colify(sorted(u2))
def pkg_changed(args):
"""show packages changed since a commit"""
lower_type = args.type.lower()
if not re.match('^[arc]*$', lower_type):
tty.die("Invald change type: '%s'." % args.type,
"Can contain only A (added), R (removed), or C (changed)")
removed, added = diff_packages(args.rev1, args.rev2)
git = get_git()
out = git('diff', '--relative', '--name-only', args.rev1, args.rev2,
output=str).strip()
lines = [] if not out else re.split(r'\s+', out)
changed = set()
for path in lines:
pkg_name, _, _ = path.partition(os.sep)
if pkg_name not in added and pkg_name not in removed:
changed.add(pkg_name)
packages = set()
if 'a' in lower_type:
packages |= added
if 'r' in lower_type:
packages |= removed
if 'c' in lower_type:
packages |= changed
if packages:
colify(sorted(packages))
def pkg(parser, args):
if not spack.cmd.spack_is_git_repo():
tty.die("This spack is not a git clone. Can't use 'spack pkg'")
action = {'add': pkg_add,
'diff': pkg_diff,
'list': pkg_list,
'removed': pkg_removed,
'added': pkg_added,
'changed': pkg_changed}
action[args.pkg_command](args)
| lgpl-2.1 |
vallsv/pyqtgraph | examples/crosshair.py | 24 | 2649 | """
Demonstrates some customized mouse interaction by drawing a crosshair that follows
the mouse.
"""
import initExample ## Add path to library (just for examples; you do not need this)
import numpy as np
import pyqtgraph as pg
from pyqtgraph.Qt import QtGui, QtCore
from pyqtgraph.Point import Point
#generate layout
app = QtGui.QApplication([])
win = pg.GraphicsWindow()
win.setWindowTitle('pyqtgraph example: crosshair')
label = pg.LabelItem(justify='right')
win.addItem(label)
p1 = win.addPlot(row=1, col=0)
p2 = win.addPlot(row=2, col=0)
region = pg.LinearRegionItem()
region.setZValue(10)
# Add the LinearRegionItem to the ViewBox, but tell the ViewBox to exclude this
# item when doing auto-range calculations.
p2.addItem(region, ignoreBounds=True)
#pg.dbg()
p1.setAutoVisible(y=True)
#create numpy arrays
#make the numbers large to show that the xrange shows data from 10000 to all the way 0
data1 = 10000 + 15000 * pg.gaussianFilter(np.random.random(size=10000), 10) + 3000 * np.random.random(size=10000)
data2 = 15000 + 15000 * pg.gaussianFilter(np.random.random(size=10000), 10) + 3000 * np.random.random(size=10000)
p1.plot(data1, pen="r")
p1.plot(data2, pen="g")
p2.plot(data1, pen="w")
def update():
region.setZValue(10)
minX, maxX = region.getRegion()
p1.setXRange(minX, maxX, padding=0)
region.sigRegionChanged.connect(update)
def updateRegion(window, viewRange):
rgn = viewRange[0]
region.setRegion(rgn)
p1.sigRangeChanged.connect(updateRegion)
region.setRegion([1000, 2000])
#cross hair
vLine = pg.InfiniteLine(angle=90, movable=False)
hLine = pg.InfiniteLine(angle=0, movable=False)
p1.addItem(vLine, ignoreBounds=True)
p1.addItem(hLine, ignoreBounds=True)
vb = p1.vb
def mouseMoved(evt):
pos = evt[0] ## using signal proxy turns original arguments into a tuple
if p1.sceneBoundingRect().contains(pos):
mousePoint = vb.mapSceneToView(pos)
index = int(mousePoint.x())
if index > 0 and index < len(data1):
label.setText("<span style='font-size: 12pt'>x=%0.1f, <span style='color: red'>y1=%0.1f</span>, <span style='color: green'>y2=%0.1f</span>" % (mousePoint.x(), data1[index], data2[index]))
vLine.setPos(mousePoint.x())
hLine.setPos(mousePoint.y())
proxy = pg.SignalProxy(p1.scene().sigMouseMoved, rateLimit=60, slot=mouseMoved)
#p1.scene().sigMouseMoved.connect(mouseMoved)
## Start Qt event loop unless running in interactive mode or using pyside.
if __name__ == '__main__':
import sys
if (sys.flags.interactive != 1) or not hasattr(QtCore, 'PYQT_VERSION'):
QtGui.QApplication.instance().exec_()
| mit |
SchoolIdolTomodachi/SchoolIdolAPI | api/migrations/0069_verificationrequest.py | 4 | 1304 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('api', '0068_auto_20150914_1909'),
]
operations = [
migrations.CreateModel(
name='VerificationRequest',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('creation', models.DateTimeField(auto_now_add=True)),
('verification_date', models.DateTimeField(auto_now_add=True)),
('verification', models.PositiveIntegerField(default=0, choices=[(0, b''), (1, 'Silver Verified'), (2, 'Gold Verified'), (3, b'')])),
('status', models.PositiveIntegerField(default=0, choices=[(0, b'Pending'), (1, b'In Progress'), (2, b'Verified')])),
('account', models.ForeignKey(related_name='verificationrequests', to='api.Account')),
('verified_by', models.ForeignKey(related_name='verificationsdone', to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
]
| apache-2.0 |
epam/DLab | infrastructure-provisioning/src/dataengine-service/fabfile.py | 1 | 4232 | #!/usr/bin/python
# *****************************************************************************
#
# Copyright (c) 2016, EPAM SYSTEMS INC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ******************************************************************************
import json
import time
from fabric.api import *
from dlab.fab import *
from dlab.meta_lib import *
from dlab.actions_lib import *
import sys
import os
import uuid
import logging
def run():
local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'], os.environ['request_id'])
local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
logging.basicConfig(format='%(levelname)-8s [%(asctime)s] %(message)s',
level=logging.INFO,
filename=local_log_filepath)
dataengine_service_config = dict()
dataengine_service_config['uuid'] = str(uuid.uuid4())[:5]
try:
local("~/scripts/{}.py --uuid {}".format('dataengine-service_prepare', dataengine_service_config['uuid']))
except Exception as err:
traceback.print_exc()
append_result("Failed preparing Data Engine service.", str(err))
sys.exit(1)
try:
local("~/scripts/{}.py --uuid {}".format('dataengine-service_configure', dataengine_service_config['uuid']))
except Exception as err:
traceback.print_exc()
append_result("Failed configuring Data Engine service.", str(err))
sys.exit(1)
# Main function for installing additional libraries for Dataengine
def install_libs():
local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
os.environ['request_id'])
local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
logging.basicConfig(format='%(levelname)-8s [%(asctime)s] %(message)s',
level=logging.DEBUG,
filename=local_log_filepath)
try:
local("~/scripts/{}.py".format('dataengine-service_install_libs'))
except Exception as err:
traceback.print_exc()
append_result("Failed installing additional libs for DataEngine service.", str(err))
sys.exit(1)
# Main function for get available libraries for Data Engine
def list_libs():
local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
os.environ['request_id'])
local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
logging.basicConfig(format='%(levelname)-8s [%(asctime)s] %(message)s',
level=logging.DEBUG,
filename=local_log_filepath)
try:
local("~/scripts/{}.py".format('dataengine-service_list_libs'))
except Exception as err:
traceback.print_exc()
append_result("Failed get available libraries for Data Engine service.", str(err))
sys.exit(1)
def terminate():
local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'], os.environ['request_id'])
local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
logging.basicConfig(format='%(levelname)-8s [%(asctime)s] %(message)s',
level=logging.DEBUG,
filename=local_log_filepath)
try:
local("~/scripts/{}.py".format('dataengine-service_terminate'))
except Exception as err:
traceback.print_exc()
append_result("Failed configuring Notebook node.", str(err))
sys.exit(1)
| apache-2.0 |
stadt-karlsruhe/ckanext-extractor | ckanext/extractor/tests/logic/test_action.py | 1 | 13849 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2016-2018 Stadt Karlsruhe (www.karlsruhe.de)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import, print_function, unicode_literals
import uuid
import mock
from nose.tools import assert_false, assert_raises, assert_true
from ckan.logic import NotFound
from ckan.model import Resource
from ckan.tests.helpers import call_action, FunctionalTestBase
from ckan.tests import factories
from ...model import ResourceMetadata
from ..helpers import (assert_anonymous_access, assert_authorized, assert_equal,
assert_no_anonymous_access, assert_no_metadata,
assert_not_authorized, get_metadata, fake_process,
assert_validation_fails)
def enqueue_job(*args, **kwargs):
return mock.Mock(id=str(uuid.uuid4()))
class TestExtractorList(FunctionalTestBase):
def test_extractor_list_empty(self):
"""
extractor_list when no metadata exist.
"""
assert_equal(call_action('extractor_list'), [])
def test_extractor_list_inprogress(self):
"""
extractor_list does not list metadata that is in progress.
"""
factories.Resource(format='pdf')
assert_equal(call_action('extractor_list'), [])
def test_extractor_list_some(self):
"""
extractor_list when some metadata exist.
"""
res_dict = factories.Resource(format='pdf')
fake_process(res_dict)
assert_equal(call_action('extractor_list'), [res_dict['id']])
def test_extractor_list_auth(self):
"""
Authorization for extractor_show.
"""
assert_authorized(factories.User(), 'extractor_list',
"Normal user wasn't allowed to extractor_list")
assert_anonymous_access('extractor_list')
@mock.patch('ckanext.extractor.logic.action.enqueue_job',
side_effect=enqueue_job)
class TestExtractorExtract(FunctionalTestBase):
def test_extractor_extract_new_indexed(self, enqueue_job):
"""
extractor_extract for a new resource with indexed format.
"""
res_dict = factories.Resource(format='pdf')
get_metadata(res_dict).delete().commit()
enqueue_job.reset_mock()
result = call_action('extractor_extract', id=res_dict['id'])
assert_equal(result['status'], 'new', 'Wrong state')
assert_false(result['task_id'] is None, 'Missing task ID')
assert_equal(result['task_id'], get_metadata(res_dict).task_id,
'Task IDs differ.')
assert_equal(enqueue_job.call_count, 1,
'Wrong number of extraction tasks.')
def test_extractor_extract_new_ignored(self, enqueue_job):
"""
extractor_extract for a new resource with ignored format.
"""
res_dict = factories.Resource(format='foo')
result = call_action('extractor_extract', id=res_dict['id'])
assert_equal(result['status'], 'ignored', 'Wrong state')
assert_true(result['task_id'] is None, 'Unexpected task ID')
assert_equal(enqueue_job.call_count, 0,
'Wrong number of extraction tasks.')
def test_extractor_extract_unchanged(self, enqueue_job):
"""
extractor_extract for a resource with unchanged format and URL.
"""
res_dict = factories.Resource(format='pdf')
enqueue_job.reset_mock()
fake_process(res_dict)
result = call_action('extractor_extract', id=res_dict['id'])
assert_equal(result['status'], 'unchanged', 'Wrong state')
assert_true(result['task_id'] is None, 'Unexpected task ID')
assert_equal(result['task_id'], get_metadata(res_dict).task_id,
'Task IDs differ.')
assert_equal(enqueue_job.call_count, 0,
'Wrong number of extraction tasks.')
def test_extractor_extract_update_indexed_format(self, enqueue_job):
"""
extractor_extract for a resource with updated, indexed format.
"""
res_dict = factories.Resource(format='pdf')
enqueue_job.reset_mock()
fake_process(res_dict)
resource = Resource.get(res_dict['id'])
resource.format = 'doc'
resource.save()
result = call_action('extractor_extract', id=res_dict['id'])
assert_equal(result['status'], 'update', 'Wrong state')
assert_false(result['task_id'] is None, 'Missing task ID')
assert_equal(result['task_id'], get_metadata(res_dict).task_id,
'Task IDs differ.')
assert_equal(enqueue_job.call_count, 1,
'Wrong number of extraction tasks.')
def test_extractor_extract_update_ignored_format(self, enqueue_job):
"""
extractor_extract for a resource with updated, ignored format.
"""
res_dict = factories.Resource(format='pdf')
enqueue_job.reset_mock()
fake_process(res_dict)
resource = Resource.get(res_dict['id'])
resource.format = 'foo'
resource.save()
result = call_action('extractor_extract', id=res_dict['id'])
assert_equal(result['status'], 'ignored', 'Wrong state')
assert_true(result['task_id'] is None, 'Unexpected task ID')
assert_equal(enqueue_job.call_count, 0,
'Wrong number of extraction tasks.')
assert_no_metadata(res_dict)
def test_extractor_extract_inprogress(self, enqueue_job):
"""
extractor_extract for a resource that's already being extracted.
"""
res_dict = factories.Resource(format='pdf')
enqueue_job.reset_mock()
old_task_id = get_metadata(res_dict).task_id
result = call_action('extractor_extract', id=res_dict['id'])
assert_equal(result['status'], 'inprogress', 'Wrong state')
assert_equal(result['task_id'], old_task_id, 'Task IDs differ.')
assert_equal(enqueue_job.call_count, 0,
'Wrong number of extraction tasks.')
def test_extractor_extract_unexisting(self, enqueue_job):
"""
extractor_extract for a resource that does not exist.
"""
assert_raises(
NotFound, lambda: call_action('extractor_extract',
id='does-not-exist'))
def test_extractor_extract_auth(self, enqueue_job):
"""
Authorization for extractor_extract.
"""
res_dict = factories.Resource(format='pdf')
assert_not_authorized(factories.User(), 'extractor_extract',
'Normal user was allowed to extractor_extract',
id=res_dict['id'])
assert_no_anonymous_access('extractor_extract', id=res_dict['id'])
assert_authorized(factories.Sysadmin(), 'extractor_extract',
"Sysadmin wasn't allowed to extractor_extract",
id=res_dict['id'])
def test_extractor_extract_validation(self, enqueue_job):
"""
Input validation for extractor_extract.
"""
assert_validation_fails('extractor_extract', 'ID was not required.')
assert_validation_fails('extractor_extract',
'Wrong force type was accepted',
force='maybe')
def test_extractor_extract_force_ignored_format(self, enqueue_job):
"""
Forcing extractor_extract with ignored format.
"""
res_dict = factories.Resource(format='foo')
result = call_action('extractor_extract', id=res_dict['id'],
force=True)
assert_equal(result['status'], 'ignored', 'Wrong state')
assert_true(result['task_id'] is None, 'Unexpected task ID')
assert_equal(enqueue_job.call_count, 0,
'Wrong number of extraction tasks.')
def test_extractor_extract_force_unchanged(self, enqueue_job):
"""
Forcing extractor_extract with unchanged resource.
"""
res_dict = factories.Resource(format='pdf')
enqueue_job.reset_mock()
fake_process(res_dict)
result = call_action('extractor_extract', id=res_dict['id'],
force=True)
assert_equal(result['status'], 'unchanged', 'Wrong state')
assert_false(result['task_id'] is None, 'Missing task ID')
assert_equal(result['task_id'], get_metadata(res_dict).task_id,
'Task IDs differ.')
assert_equal(enqueue_job.call_count, 1,
'Wrong number of extraction tasks.')
def test_extractor_extract_force_inprogress(self, enqueue_job):
"""
Forcing extractor_extract with existing task.
"""
res_dict = factories.Resource(format='pdf')
enqueue_job.reset_mock()
old_task_id = get_metadata(res_dict).task_id
result = call_action('extractor_extract', id=res_dict['id'],
force=True)
assert_equal(result['status'], 'inprogress', 'Wrong state')
assert_false(result['task_id'] is None, 'Missing task ID')
assert_equal(result['task_id'], get_metadata(res_dict).task_id,
'Task IDs differ.')
assert_false(result['task_id'] == old_task_id,
'Task ID was not updated.')
assert_equal(enqueue_job.call_count, 1,
'Wrong number of extraction tasks.')
@mock.patch('ckanext.extractor.logic.action.enqueue_job',
return_value=mock.Mock(id='test-id'))
class TestExtractorShow(FunctionalTestBase):
def test_extractor_show_unexisting(self, enqueue_job):
"""
extractor_show for a resource that does not exist.
"""
assert_raises(
NotFound, lambda: call_action('extractor_show',
id='does-not-exist'))
def test_extractor_show_inprogress(self, enqueue_job):
"""
extractor_show for metadata that is in progress.
"""
res_dict = factories.Resource(format='pdf')
result = call_action('extractor_show', id=res_dict['id'])
assert_equal(result['task_id'], 'test-id', 'Wrong task ID.')
def test_extractor_show_normal(self, enqueue_job):
"""
extractor_show for normal metadata.
"""
res_dict = factories.Resource(format='pdf')
fake_process(res_dict)
metadata = get_metadata(res_dict)
metadata.meta['fulltext'] = 'foobar'
metadata.meta['author'] = 'John Doe'
metadata.save()
result = call_action('extractor_show', id=res_dict['id'])
assert_equal(result['meta']['fulltext'], 'foobar', 'Wrong fulltext.')
assert_equal(result['meta']['author'], 'John Doe', 'Wrong author.')
assert_equal(result['resource_id'], res_dict['id'],
'Wrong resource ID.')
assert_true(result['task_id'] is None, 'Unexpected task ID.')
def test_extractor_show_auth(self, enqueue_job):
"""
Authorization for extractor_show.
"""
res_dict = factories.Resource(format='pdf')
assert_authorized(factories.User(), 'extractor_show',
"Normal user wasn't allowed to extractor_show",
id=res_dict['id'])
assert_anonymous_access('extractor_show', id=res_dict['id'])
def test_extractor_show_validation(self, enqueue_job):
"""
Input validation for extractor_show.
"""
assert_validation_fails('extractor_show',
'ID was not required.')
@mock.patch('ckanext.extractor.logic.action.enqueue_job',
side_effect=enqueue_job)
class TestExtractorDelete(FunctionalTestBase):
def test_extractor_delete_unexisting(self, enqueue_job):
"""
extractor_delete for a resource that does not exist.
"""
assert_raises(
NotFound, lambda: call_action('extractor_delete',
id='does-not-exist'))
def test_extractor_delete_normal(self, enqueue_job):
"""
extractor_delete for a normal resource.
"""
res_dict = factories.Resource(format='pdf')
fake_process(res_dict)
call_action('extractor_delete', id=res_dict['id'])
assert_no_metadata(res_dict)
def test_extractor_delete_auth(self, enqueue_job):
"""
Authorization for extractor_delete.
"""
res_dict = factories.Resource(format='pdf')
assert_not_authorized(factories.User(), 'extractor_delete',
'Normal user was allowed to extractor_delete',
id=res_dict['id'])
assert_no_anonymous_access('extractor_delete', id=res_dict['id'])
assert_authorized(factories.Sysadmin(), 'extractor_delete',
"Sysadmin wasn't allowed to extractor_delete",
id=res_dict['id'])
def test_extractor_delete_validation(self, enqueue_job):
"""
Input validation for extractor_delete.
"""
assert_validation_fails('extractor_delete', 'ID was not required.')
| agpl-3.0 |
clchiou/garage | shipyard2/shipyard2/rules/xars.py | 1 | 5976 | """Helpers for writing rules under //xars."""
__all__ = [
'define_xar',
'define_zipapp',
]
import dataclasses
import logging
from pathlib import Path
import foreman
from g1 import scripts
from g1.bases.assertions import ASSERT
from g1.containers import models as ctr_models
from g1.operations.cores import models as ops_models
import shipyard2
import shipyard2.rules
from shipyard2.rules import images
from shipyard2.rules import pythons
from shipyard2.rules import releases
LOG = logging.getLogger(__name__)
@dataclasses.dataclass(frozen=True)
class XarRules:
build: foreman.Rule
@dataclasses.dataclass(frozen=True)
class ZipappRules:
build: foreman.Rule
def _run_build(build_func, parameters, kind, name, version, **kwargs):
xar_dir_path = releases.get_output_dir_path(parameters, name, version)
metadata_path = xar_dir_path / shipyard2.XAR_DIR_RELEASE_METADATA_FILENAME
if metadata_path.exists():
LOG.info('skip: build %s: %s %s', kind, name, version)
return
LOG.info('build %s: %s %s', kind, name, version)
try:
scripts.mkdir(xar_dir_path)
releases.generate_release_metadata(parameters, metadata_path)
build_func(parameters, name, version, xar_dir_path, **kwargs)
except Exception:
# Roll back on error.
scripts.rm(xar_dir_path, recursive=True)
raise
def define_xar(
*,
name,
exec_relpath,
image,
):
"""Define a XAR.
This defines:
* Parameter: name/version.
* Rule: name/build. NOTE: This rule is generally run in the host
system, not inside a builder pod.
"""
ASSERT.not_predicate(Path(exec_relpath), Path.is_absolute)
# Let's require absolute image label for now as it is quite hard to
# derive label path from xar's relpath.
ASSERT.startswith(image, '//')
name_prefix = shipyard2.rules.canonicalize_name_prefix(name)
parameter_version = name_prefix + 'version'
rule_build = name_prefix + 'build'
(foreman.define_parameter(parameter_version)\
.with_doc('xar version'))
image = foreman.Label.parse(image)
@foreman.rule(rule_build)
@foreman.rule.depend('//releases:build')
@foreman.rule.depend('//xars/bases:build')
@foreman.rule.depend(str(images.derive_rule(image)))
def build(parameters):
_run_build(
_build_xar,
parameters,
'xar',
name,
ASSERT.not_none(parameters[parameter_version]),
exec_relpath=exec_relpath,
image=image,
)
return XarRules(build=build)
def _build_xar(
parameters, name, version, xar_dir_path, *, exec_relpath, image
):
releases.dump(
ops_models.XarDeployInstruction(
label=str(releases.get_output_label(name)),
version=version,
exec_relpath=exec_relpath,
image=ctr_models.PodConfig.Image(
name=str(image.name),
version=images.get_image_version(parameters, image),
),
),
xar_dir_path / shipyard2.XAR_DIR_DEPLOY_INSTRUCTION_FILENAME,
)
scripts.make_relative_symlink(
images.derive_image_path(parameters, image),
xar_dir_path / shipyard2.XAR_DIR_IMAGE_FILENAME,
)
def define_zipapp(
*,
name,
packages,
python_version='3',
):
"""Define a Python zipapp of first-party packages.
This defines:
* Parameter: name/version.
* Rule: name/build. NOTE: This rule is generally run in the host
system, not inside a builder pod.
"""
ASSERT.not_empty(packages)
ASSERT.all(packages, lambda p: not Path(p).is_absolute())
name_prefix = shipyard2.rules.canonicalize_name_prefix(name)
parameter_version = name_prefix + 'version'
rule_build = name_prefix + 'build'
(foreman.define_parameter(parameter_version)\
.with_doc('zipapp version'))
@foreman.rule(rule_build)
@foreman.rule.depend('//releases:build')
@foreman.rule.depend('//xars/bases:build')
def build(parameters):
_run_build(
_build_zipapp,
parameters,
'zipapp',
name,
ASSERT.not_none(parameters[parameter_version]),
python_exec='python%s' % python_version,
packages=packages,
)
return ZipappRules(build=build)
def _build_zipapp(
parameters, name, version, xar_dir_path, *, python_exec, packages
):
releases.dump(
ops_models.XarDeployInstruction(
label=str(releases.get_output_label(name)),
version=version,
exec_relpath=None,
image=None,
),
xar_dir_path / shipyard2.XAR_DIR_DEPLOY_INSTRUCTION_FILENAME,
)
_package_zipapp(
parameters,
python_exec,
packages,
xar_dir_path / shipyard2.XAR_DIR_ZIPAPP_FILENAME,
)
def _package_zipapp(parameters, python_exec, packages, zipapp_path):
# zipapp packaging might not work correctly on existing zipapp.
ASSERT.not_predicate(zipapp_path, Path.exists)
# We will change working directory when running setup.py so let's
# make sure that zipapp output path is absolute.
ASSERT.predicate(zipapp_path, Path.is_absolute)
scripts.export_path(
'PYTHONPATH',
pythons.find_package(parameters, 'py/g1/devtools/buildtools'),
)
# TODO: Remove this once startup is migrated to
# g1.devtools.buildtools.
scripts.export_path(
'PYTHONPATH',
pythons.find_package(parameters, 'py/buildtools'),
)
for package in packages:
with scripts.using_cwd(pythons.find_package(parameters, package)):
# Clean up any previous build, just in case.
scripts.rm('build', recursive=True)
scripts.run([
python_exec,
'setup.py',
'build',
'bdist_zipapp',
*('--output', zipapp_path),
])
| mit |
jasonseminara/OpenSourceFinal | lib/python3.5/site-packages/pip/_vendor/requests/packages/chardet/escprober.py | 2936 | 3187 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
from .escsm import (HZSMModel, ISO2022CNSMModel, ISO2022JPSMModel,
ISO2022KRSMModel)
from .charsetprober import CharSetProber
from .codingstatemachine import CodingStateMachine
from .compat import wrap_ord
class EscCharSetProber(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mCodingSM = [
CodingStateMachine(HZSMModel),
CodingStateMachine(ISO2022CNSMModel),
CodingStateMachine(ISO2022JPSMModel),
CodingStateMachine(ISO2022KRSMModel)
]
self.reset()
def reset(self):
CharSetProber.reset(self)
for codingSM in self._mCodingSM:
if not codingSM:
continue
codingSM.active = True
codingSM.reset()
self._mActiveSM = len(self._mCodingSM)
self._mDetectedCharset = None
def get_charset_name(self):
return self._mDetectedCharset
def get_confidence(self):
if self._mDetectedCharset:
return 0.99
else:
return 0.00
def feed(self, aBuf):
for c in aBuf:
# PY3K: aBuf is a byte array, so c is an int, not a byte
for codingSM in self._mCodingSM:
if not codingSM:
continue
if not codingSM.active:
continue
codingState = codingSM.next_state(wrap_ord(c))
if codingState == constants.eError:
codingSM.active = False
self._mActiveSM -= 1
if self._mActiveSM <= 0:
self._mState = constants.eNotMe
return self.get_state()
elif codingState == constants.eItsMe:
self._mState = constants.eFoundIt
self._mDetectedCharset = codingSM.get_coding_state_machine() # nopep8
return self.get_state()
return self.get_state()
| mit |
xavierwu/scikit-learn | examples/applications/plot_species_distribution_modeling.py | 254 | 7434 | """
=============================
Species distribution modeling
=============================
Modeling species' geographic distributions is an important
problem in conservation biology. In this example we
model the geographic distribution of two south american
mammals given past observations and 14 environmental
variables. Since we have only positive examples (there are
no unsuccessful observations), we cast this problem as a
density estimation problem and use the `OneClassSVM` provided
by the package `sklearn.svm` as our modeling tool.
The dataset is provided by Phillips et. al. (2006).
If available, the example uses
`basemap <http://matplotlib.sourceforge.net/basemap/doc/html/>`_
to plot the coast lines and national boundaries of South America.
The two species are:
- `"Bradypus variegatus"
<http://www.iucnredlist.org/apps/redlist/details/3038/0>`_ ,
the Brown-throated Sloth.
- `"Microryzomys minutus"
<http://www.iucnredlist.org/apps/redlist/details/13408/0>`_ ,
also known as the Forest Small Rice Rat, a rodent that lives in Peru,
Colombia, Ecuador, Peru, and Venezuela.
References
----------
* `"Maximum entropy modeling of species geographic distributions"
<http://www.cs.princeton.edu/~schapire/papers/ecolmod.pdf>`_
S. J. Phillips, R. P. Anderson, R. E. Schapire - Ecological Modelling,
190:231-259, 2006.
"""
# Authors: Peter Prettenhofer <peter.prettenhofer@gmail.com>
# Jake Vanderplas <vanderplas@astro.washington.edu>
#
# License: BSD 3 clause
from __future__ import print_function
from time import time
import numpy as np
import matplotlib.pyplot as plt
from sklearn.datasets.base import Bunch
from sklearn.datasets import fetch_species_distributions
from sklearn.datasets.species_distributions import construct_grids
from sklearn import svm, metrics
# if basemap is available, we'll use it.
# otherwise, we'll improvise later...
try:
from mpl_toolkits.basemap import Basemap
basemap = True
except ImportError:
basemap = False
print(__doc__)
def create_species_bunch(species_name, train, test, coverages, xgrid, ygrid):
"""Create a bunch with information about a particular organism
This will use the test/train record arrays to extract the
data specific to the given species name.
"""
bunch = Bunch(name=' '.join(species_name.split("_")[:2]))
species_name = species_name.encode('ascii')
points = dict(test=test, train=train)
for label, pts in points.items():
# choose points associated with the desired species
pts = pts[pts['species'] == species_name]
bunch['pts_%s' % label] = pts
# determine coverage values for each of the training & testing points
ix = np.searchsorted(xgrid, pts['dd long'])
iy = np.searchsorted(ygrid, pts['dd lat'])
bunch['cov_%s' % label] = coverages[:, -iy, ix].T
return bunch
def plot_species_distribution(species=("bradypus_variegatus_0",
"microryzomys_minutus_0")):
"""
Plot the species distribution.
"""
if len(species) > 2:
print("Note: when more than two species are provided,"
" only the first two will be used")
t0 = time()
# Load the compressed data
data = fetch_species_distributions()
# Set up the data grid
xgrid, ygrid = construct_grids(data)
# The grid in x,y coordinates
X, Y = np.meshgrid(xgrid, ygrid[::-1])
# create a bunch for each species
BV_bunch = create_species_bunch(species[0],
data.train, data.test,
data.coverages, xgrid, ygrid)
MM_bunch = create_species_bunch(species[1],
data.train, data.test,
data.coverages, xgrid, ygrid)
# background points (grid coordinates) for evaluation
np.random.seed(13)
background_points = np.c_[np.random.randint(low=0, high=data.Ny,
size=10000),
np.random.randint(low=0, high=data.Nx,
size=10000)].T
# We'll make use of the fact that coverages[6] has measurements at all
# land points. This will help us decide between land and water.
land_reference = data.coverages[6]
# Fit, predict, and plot for each species.
for i, species in enumerate([BV_bunch, MM_bunch]):
print("_" * 80)
print("Modeling distribution of species '%s'" % species.name)
# Standardize features
mean = species.cov_train.mean(axis=0)
std = species.cov_train.std(axis=0)
train_cover_std = (species.cov_train - mean) / std
# Fit OneClassSVM
print(" - fit OneClassSVM ... ", end='')
clf = svm.OneClassSVM(nu=0.1, kernel="rbf", gamma=0.5)
clf.fit(train_cover_std)
print("done.")
# Plot map of South America
plt.subplot(1, 2, i + 1)
if basemap:
print(" - plot coastlines using basemap")
m = Basemap(projection='cyl', llcrnrlat=Y.min(),
urcrnrlat=Y.max(), llcrnrlon=X.min(),
urcrnrlon=X.max(), resolution='c')
m.drawcoastlines()
m.drawcountries()
else:
print(" - plot coastlines from coverage")
plt.contour(X, Y, land_reference,
levels=[-9999], colors="k",
linestyles="solid")
plt.xticks([])
plt.yticks([])
print(" - predict species distribution")
# Predict species distribution using the training data
Z = np.ones((data.Ny, data.Nx), dtype=np.float64)
# We'll predict only for the land points.
idx = np.where(land_reference > -9999)
coverages_land = data.coverages[:, idx[0], idx[1]].T
pred = clf.decision_function((coverages_land - mean) / std)[:, 0]
Z *= pred.min()
Z[idx[0], idx[1]] = pred
levels = np.linspace(Z.min(), Z.max(), 25)
Z[land_reference == -9999] = -9999
# plot contours of the prediction
plt.contourf(X, Y, Z, levels=levels, cmap=plt.cm.Reds)
plt.colorbar(format='%.2f')
# scatter training/testing points
plt.scatter(species.pts_train['dd long'], species.pts_train['dd lat'],
s=2 ** 2, c='black',
marker='^', label='train')
plt.scatter(species.pts_test['dd long'], species.pts_test['dd lat'],
s=2 ** 2, c='black',
marker='x', label='test')
plt.legend()
plt.title(species.name)
plt.axis('equal')
# Compute AUC with regards to background points
pred_background = Z[background_points[0], background_points[1]]
pred_test = clf.decision_function((species.cov_test - mean)
/ std)[:, 0]
scores = np.r_[pred_test, pred_background]
y = np.r_[np.ones(pred_test.shape), np.zeros(pred_background.shape)]
fpr, tpr, thresholds = metrics.roc_curve(y, scores)
roc_auc = metrics.auc(fpr, tpr)
plt.text(-35, -70, "AUC: %.3f" % roc_auc, ha="right")
print("\n Area under the ROC curve : %f" % roc_auc)
print("\ntime elapsed: %.2fs" % (time() - t0))
plot_species_distribution()
plt.show()
| bsd-3-clause |
physycom/QGIS | tests/src/python/test_qgscheckablecombobox.py | 45 | 1824 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsCheckableComboBox
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Alexander Bruy'
__date__ = '22/03/2017'
__copyright__ = 'Copyright 2017, The QGIS Project'
import qgis # NOQA
from qgis.PyQt.QtCore import Qt
from qgis.PyQt.QtTest import QSignalSpy
from qgis.gui import QgsCheckableComboBox
from qgis.testing import start_app, unittest
start_app()
class TestQgsCheckableComboBox(unittest.TestCase):
def testGettersSetters(self):
""" test widget getters/setters """
w = qgis.gui.QgsCheckableComboBox()
w.setSeparator('|')
self.assertEqual(w.separator(), '|')
w.setDefaultText('Select items...')
self.assertEqual(w.defaultText(), 'Select items...')
w.addItems(['One', 'Two', 'Three'])
w.setCheckedItems(['Two'])
self.assertEqual(len(w.checkedItems()), 1)
self.assertEqual(w.checkedItems(), ['Two'])
w.setCheckedItems(['Three'])
self.assertEqual(len(w.checkedItems()), 2)
self.assertEqual(w.checkedItems(), ['Two', 'Three'])
w.setItemCheckState(2, Qt.Unchecked)
self.assertEqual(w.itemCheckState(2), Qt.Unchecked)
def test_ChangedSignals(self):
""" test that signals are correctly emitted when clearing"""
w = qgis.gui.QgsCheckableComboBox()
w.addItems(['One', 'Two', 'Three'])
checkedItemsChanged_spy = QSignalSpy(w.checkedItemsChanged)
w.setCheckedItems(['Two'])
self.assertEqual(len(checkedItemsChanged_spy), 1)
if __name__ == '__main__':
unittest.main()
| gpl-2.0 |
wangtuanjie/airflow | airflow/example_dags/example_python_operator.py | 25 | 1190 | from __future__ import print_function
from builtins import range
from airflow.operators import PythonOperator
from airflow.models import DAG
from datetime import datetime, timedelta
import time
from pprint import pprint
seven_days_ago = datetime.combine(datetime.today() - timedelta(7),
datetime.min.time())
args = {
'owner': 'airflow',
'start_date': seven_days_ago,
}
dag = DAG(dag_id='example_python_operator', default_args=args)
def my_sleeping_function(random_base):
'''This is a function that will run within the DAG execution'''
time.sleep(random_base)
def print_context(ds, **kwargs):
pprint(kwargs)
print(ds)
return 'Whatever you return gets printed in the logs'
run_this = PythonOperator(
task_id='print_the_context',
provide_context=True,
python_callable=print_context,
dag=dag)
for i in range(10):
'''
Generating 10 sleeping task, sleeping from 0 to 9 seconds
respectively
'''
task = PythonOperator(
task_id='sleep_for_'+str(i),
python_callable=my_sleeping_function,
op_kwargs={'random_base': i},
dag=dag)
task.set_upstream(run_this)
| apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.