repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
alfa-addon/addon | plugin.video.alfa/lib/cloudscraper/cf_assistant.py | 1 | 10179 | # -*- coding: utf-8 -*-
# ------------------------------------------------------------------------------
# cloudscaper+alfa_assistant
# ------------------------------------------------------------------------------
import sys
PY3 = False
if sys.version_info[0] >= 3: PY3 = True; unicode = str; unichr = chr; long = int
if PY3:
import urllib.parse as urlparse
from lib import alfaresolver_py3 as alfaresolver
else:
import urlparse
from lib import alfaresolver
import traceback
import xbmc
import time
from .exceptions import CloudflareChallengeError
from lib import alfa_assistant
from core import httptools, scrapertools, filetools, jsontools
from core.item import Item
from platformcode import logger, config, help_window
PATH_BL = filetools.join(config.get_runtime_path(), 'resources', 'cf_assistant_bl.json')
def get_cl(resp, timeout=20, debug=False, extraPostDelay=15, retry=False, blacklist=True, retryIfTimeout=True, **kwargs):
blacklist_clear = True
if 'hideproxy' in resp.url or 'webproxy' in resp.url or kwargs.get('proxies'):
blacklist_clear = False
blacklist = False
if timeout < 15: timeout = 20
if timeout + extraPostDelay > 35: timeout = 20
domain_full = urlparse.urlparse(resp.url).netloc
domain = domain_full
if blacklist and not retry:
blacklist_clear = check_blacklist(domain_full)
if blacklist_clear:
host = config.get_system_platform()[:1]
freequent_data = [domain, 'CF2,0.0.0,0,%s0,NoApp' % host]
check_assistant = alfa_assistant.open_alfa_assistant(getWebViewInfo=True, retry=retry)
if not isinstance(check_assistant, dict) and retry:
alfa_assistant.close_alfa_assistant()
time.sleep(2)
check_assistant = alfa_assistant.open_alfa_assistant(getWebViewInfo=True, retry=True)
if not check_assistant:
time.sleep(10)
check_assistant = alfa_assistant.get_generic_call('getWebViewInfo', timeout=2, alfa_s=True)
if check_assistant and isinstance(check_assistant, dict):
if check_assistant.get('assistantLatestVersion') and check_assistant.get('assistantVersion'):
installed_version = check_assistant['assistantVersion'].split('.')
available_version = check_assistant['assistantLatestVersion'].split('.')
newer = False
for i, ver in enumerate(available_version):
if int(ver) > int(installed_version[i]):
newer = True
break
if int(ver) < int(installed_version[i]):
break
if newer:
help_window.show_info('cf_2_02', wait=False)
ua = get_ua(check_assistant)
try:
vers = int(scrapertools.find_single_match(ua, r"Android\s*(\d+)"))
except:
vers = 0
wvbVersion = check_assistant.get('wvbVersion', '0.0.0').split('.')[0]
if len(wvbVersion) > 3: wvbVersion = wvbVersion[:2]
freequent_data[1] = 'CF2,%s,%s,%s%s,' % (check_assistant.get('assistantVersion', '0.0.0'), wvbVersion, host, vers)
if vers:
dan = {'User-Agent': ua}
resp.headers.update(dict(dan))
ua = None
else:
ua = httptools.get_user_agent()
logger.debug("UserAgent: %s || Android Vrs: %s" % (ua, vers))
jscode = get_jscode(1, 'KEYCODE_ENTER', 1)
url_cf = scrapertools.find_single_match(resp.url, '(http.*\:\/\/(?:www\S*.)?\w+\.\w+(?:\.\w+)?)(?:\/)?') + '|cf_clearance'
data_assistant = alfa_assistant.get_urls_by_page_finished(resp.url, timeout=timeout, getCookies=True, userAgent=ua,
disableCache=True, debug=debug, jsCode=jscode,
extraPostDelay=extraPostDelay, clearWebCache=True,
removeAllCookies=True, returnWhenCookieNameFound=url_cf,
retryIfTimeout=retryIfTimeout
)
logger.debug("data assistant: %s" % data_assistant)
domain_ = domain
split_lst = domain.split(".")
if len(split_lst) > 2:
domain = domain.replace(split_lst[0], "")
if not domain.startswith('.'):
domain = "."+domain
get_ua(data_assistant)
if isinstance(data_assistant, dict) and data_assistant.get("cookies", None):
logger.debug("Lista cookies: %s" % data_assistant.get("cookies", []))
for cookie in data_assistant["cookies"]:
cookieslist = cookie.get("cookiesList", None)
val = scrapertools.find_single_match(cookieslist, 'cf_clearance=([A-z0-9_-]+)')
dom = cookie.get("urls", None)
logger.debug("dominios: %s" % dom[0])
if 'cf_clearance' in cookieslist and val:
dict_cookie = {'domain': domain,
'name': 'cf_clearance',
'value': val}
if domain_ in dom[0]:
httptools.set_cookies(dict_cookie)
rin = {'Server': 'Alfa'}
resp.headers.update(dict(rin))
logger.debug("cf_clearence=%s" % val)
if not retry:
freequent_data[1] += 'OK'
else:
freequent_data[1] += 'OK_R'
freequency(freequent_data)
return resp
else:
logger.error("No cf_clearance")
else:
freequent_data[1] += 'NO-CFC'
else:
freequent_data[1] += 'ERR'
logger.error("No Cookies o Error en conexión con Alfa Assistant")
if not retry:
config.set_setting('cf_assistant_ua', '')
logger.debug("No se obtuvieron resultados, reintentando...")
return get_cl(resp, timeout=timeout-5, extraPostDelay=extraPostDelay, \
retry=True, blacklist=True, retryIfTimeout=False, **kwargs)
elif host == 'a':
help_window.show_info('cf_2_01')
freequency(freequent_data)
if filetools.exists(PATH_BL):
bl_data = jsontools.load(filetools.read(PATH_BL))
else:
bl_data = {}
bl_data[domain_full] = time.time()
filetools.write(PATH_BL, jsontools.dump(bl_data))
msg = 'Detected a Cloudflare version 2 Captcha challenge,\
This feature is not available in the opensource (free) version.'
resp.status_code = msg
raise CloudflareChallengeError(msg)
def get_ua(data_assistant):
if not data_assistant or not isinstance(data_assistant, dict):
return 'Default'
UA = data_assistant.get("userAgent", 'Default')
if UA == httptools.get_user_agent():
UA = 'Default'
config.set_setting('cf_assistant_ua', UA)
return UA
def get_jscode(count, key, n_iframe, timeout=3):
count = str(count)
focus = str(n_iframe)
timeout = str(timeout * 1000)
js = '''((() => {
const KEYCODE_ENTER = 'KEYCODE_ENTER';
const KEYCODE_TAB = 'KEYCODE_TAB';
function sendKeyAfterNTabs(count, key) {
try {
for (var i = 0; i <= count; i++) {
if (i > 0) {
alfaAssistantAndroidPI.sendKey(KEYCODE_TAB);
}
if (i == count) {
alfaAssistantAndroidPI.sendKey(key);
break;
}
}
} catch (e) {
console.error('##Error sending key ' + key, e);
};
};
function setFocusToIframeNumber(nmb) {
document.querySelectorAll('iframe')[nmb - 1].focus();
}
try {
setFocusToIframeNumber('''+ focus +''');
sendKeyAfterNTabs('''+ count +''', '''+ key +''');
setTimeout(function() {
window.location.href = alfaAssistantAndroidPI.getMainURL();
}, '''+ timeout +''');
}
catch(e){
console.error('##Error focus ', e);
};
}))();
'''
return js
def freequency(freequent_data):
import threading
try:
threading.Thread(target=alfaresolver.frequency_count, args=(Item(), [], freequent_data)).start()
ret = True
except:
logger.error(traceback.format_exc())
def check_blacklist(domain):
res = True
if not filetools.exists(PATH_BL):
return res
try:
bl_data = jsontools.load(filetools.read(PATH_BL))
bl_data_clean = bl_data.copy()
expiration = config.get_setting('cf_assistant_bl_expiration', default=30) * 60
if not expiration:
config.set_setting('cf_assistant_bl_expiration', 30)
expiration = 30 * 60
time_today = time.time()
if bl_data:
for domain_reg, time_rec in list(bl_data_clean.items()):
if time_today > time_rec + expiration:
del bl_data[domain_reg]
filetools.write(PATH_BL, jsontools.dump(bl_data))
for domain_reg, time_rec in list(bl_data.items()):
if domain in domain_reg:
res = False
break
else:
res = True
except:
logger.error(traceback.format_exc())
filetools.remove(PATH_BL)
res = True
return res
| gpl-3.0 |
laurenrevere/osf.io | addons/dataverse/serializer.py | 32 | 3736 | from addons.base.serializer import OAuthAddonSerializer
from addons.dataverse import client
from addons.dataverse.settings import DEFAULT_HOSTS
from website.util import api_url_for, web_url_for
class DataverseSerializer(OAuthAddonSerializer):
addon_short_name = 'dataverse'
REQUIRED_URLS = []
# Include host information with more informative labels / formatting
def serialize_account(self, external_account):
ret = super(DataverseSerializer, self).serialize_account(external_account)
host = external_account.oauth_key
ret.update({
'host': host,
'host_url': 'https://{0}'.format(host),
})
return ret
@property
def credentials_owner(self):
return self.node_settings.user_settings.owner
@property
def serialized_urls(self):
external_account = self.node_settings.external_account
ret = {
'settings': web_url_for('user_addons'), # TODO: Is this needed?
}
# Dataverse users do not currently have profile URLs
if external_account and external_account.profile_url:
ret['owner'] = external_account.profile_url
addon_urls = self.addon_serialized_urls
# Make sure developer returns set of needed urls
for url in self.REQUIRED_URLS:
assert url in addon_urls, "addon_serilized_urls must include key '{0}'".format(url)
ret.update(addon_urls)
return ret
@property
def addon_serialized_urls(self):
node = self.node_settings.owner
external_account = self.node_settings.external_account
host = external_account.oauth_key if external_account else ''
return {
'create': api_url_for('dataverse_add_user_account'),
'set': node.api_url_for('dataverse_set_config'),
'importAuth': node.api_url_for('dataverse_import_auth'),
'deauthorize': node.api_url_for('dataverse_deauthorize_node'),
'getDatasets': node.api_url_for('dataverse_get_datasets'),
'datasetPrefix': 'https://doi.org/',
'dataversePrefix': 'http://{0}/dataverse/'.format(host),
'accounts': api_url_for('dataverse_account_list'),
}
@property
def serialized_node_settings(self):
result = super(DataverseSerializer, self).serialized_node_settings
result['hosts'] = DEFAULT_HOSTS
# Update with Dataverse specific fields
if self.node_settings.has_auth:
external_account = self.node_settings.external_account
dataverse_host = external_account.oauth_key
connection = client.connect_from_settings(self.node_settings)
dataverses = client.get_dataverses(connection)
result.update({
'dataverseHost': dataverse_host,
'connected': connection is not None,
'dataverses': [
{'title': dataverse.title, 'alias': dataverse.alias}
for dataverse in dataverses
],
'savedDataverse': {
'title': self.node_settings.dataverse,
'alias': self.node_settings.dataverse_alias,
},
'savedDataset': {
'title': self.node_settings.dataset,
'doi': self.node_settings.dataset_doi,
}
})
return result
def serialize_settings(self, node_settings, user):
if not self.node_settings:
self.node_settings = node_settings
if not self.user_settings:
self.user_settings = user.get_addon(self.addon_short_name)
return self.serialized_node_settings
| apache-2.0 |
GoogleCloudPlatform/training-data-analyst | courses/machine_learning/deepdive2/end_to_end_ml/labs/serving/application/lib/werkzeug/security.py | 11 | 8106 | # -*- coding: utf-8 -*-
"""
werkzeug.security
~~~~~~~~~~~~~~~~~
Security related helpers such as secure password hashing tools.
:copyright: 2007 Pallets
:license: BSD-3-Clause
"""
import codecs
import hashlib
import hmac
import os
import posixpath
from random import SystemRandom
from struct import Struct
from ._compat import izip
from ._compat import PY2
from ._compat import range_type
from ._compat import text_type
from ._compat import to_bytes
from ._compat import to_native
SALT_CHARS = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"
DEFAULT_PBKDF2_ITERATIONS = 150000
_pack_int = Struct(">I").pack
_builtin_safe_str_cmp = getattr(hmac, "compare_digest", None)
_sys_rng = SystemRandom()
_os_alt_seps = list(
sep for sep in [os.path.sep, os.path.altsep] if sep not in (None, "/")
)
def pbkdf2_hex(
data, salt, iterations=DEFAULT_PBKDF2_ITERATIONS, keylen=None, hashfunc=None
):
"""Like :func:`pbkdf2_bin`, but returns a hex-encoded string.
.. versionadded:: 0.9
:param data: the data to derive.
:param salt: the salt for the derivation.
:param iterations: the number of iterations.
:param keylen: the length of the resulting key. If not provided,
the digest size will be used.
:param hashfunc: the hash function to use. This can either be the
string name of a known hash function, or a function
from the hashlib module. Defaults to sha256.
"""
rv = pbkdf2_bin(data, salt, iterations, keylen, hashfunc)
return to_native(codecs.encode(rv, "hex_codec"))
def pbkdf2_bin(
data, salt, iterations=DEFAULT_PBKDF2_ITERATIONS, keylen=None, hashfunc=None
):
"""Returns a binary digest for the PBKDF2 hash algorithm of `data`
with the given `salt`. It iterates `iterations` times and produces a
key of `keylen` bytes. By default, SHA-256 is used as hash function;
a different hashlib `hashfunc` can be provided.
.. versionadded:: 0.9
:param data: the data to derive.
:param salt: the salt for the derivation.
:param iterations: the number of iterations.
:param keylen: the length of the resulting key. If not provided
the digest size will be used.
:param hashfunc: the hash function to use. This can either be the
string name of a known hash function or a function
from the hashlib module. Defaults to sha256.
"""
if not hashfunc:
hashfunc = "sha256"
data = to_bytes(data)
salt = to_bytes(salt)
if callable(hashfunc):
_test_hash = hashfunc()
hash_name = getattr(_test_hash, "name", None)
else:
hash_name = hashfunc
return hashlib.pbkdf2_hmac(hash_name, data, salt, iterations, keylen)
def safe_str_cmp(a, b):
"""This function compares strings in somewhat constant time. This
requires that the length of at least one string is known in advance.
Returns `True` if the two strings are equal, or `False` if they are not.
.. versionadded:: 0.7
"""
if isinstance(a, text_type):
a = a.encode("utf-8")
if isinstance(b, text_type):
b = b.encode("utf-8")
if _builtin_safe_str_cmp is not None:
return _builtin_safe_str_cmp(a, b)
if len(a) != len(b):
return False
rv = 0
if PY2:
for x, y in izip(a, b):
rv |= ord(x) ^ ord(y)
else:
for x, y in izip(a, b):
rv |= x ^ y
return rv == 0
def gen_salt(length):
"""Generate a random string of SALT_CHARS with specified ``length``."""
if length <= 0:
raise ValueError("Salt length must be positive")
return "".join(_sys_rng.choice(SALT_CHARS) for _ in range_type(length))
def _hash_internal(method, salt, password):
"""Internal password hash helper. Supports plaintext without salt,
unsalted and salted passwords. In case salted passwords are used
hmac is used.
"""
if method == "plain":
return password, method
if isinstance(password, text_type):
password = password.encode("utf-8")
if method.startswith("pbkdf2:"):
args = method[7:].split(":")
if len(args) not in (1, 2):
raise ValueError("Invalid number of arguments for PBKDF2")
method = args.pop(0)
iterations = args and int(args[0] or 0) or DEFAULT_PBKDF2_ITERATIONS
is_pbkdf2 = True
actual_method = "pbkdf2:%s:%d" % (method, iterations)
else:
is_pbkdf2 = False
actual_method = method
if is_pbkdf2:
if not salt:
raise ValueError("Salt is required for PBKDF2")
rv = pbkdf2_hex(password, salt, iterations, hashfunc=method)
elif salt:
if isinstance(salt, text_type):
salt = salt.encode("utf-8")
mac = _create_mac(salt, password, method)
rv = mac.hexdigest()
else:
rv = hashlib.new(method, password).hexdigest()
return rv, actual_method
def _create_mac(key, msg, method):
if callable(method):
return hmac.HMAC(key, msg, method)
def hashfunc(d=b""):
return hashlib.new(method, d)
# Python 2.7 used ``hasattr(digestmod, '__call__')``
# to detect if hashfunc is callable
hashfunc.__call__ = hashfunc
return hmac.HMAC(key, msg, hashfunc)
def generate_password_hash(password, method="pbkdf2:sha256", salt_length=8):
"""Hash a password with the given method and salt with a string of
the given length. The format of the string returned includes the method
that was used so that :func:`check_password_hash` can check the hash.
The format for the hashed string looks like this::
method$salt$hash
This method can **not** generate unsalted passwords but it is possible
to set param method='plain' in order to enforce plaintext passwords.
If a salt is used, hmac is used internally to salt the password.
If PBKDF2 is wanted it can be enabled by setting the method to
``pbkdf2:method:iterations`` where iterations is optional::
pbkdf2:sha256:80000$salt$hash
pbkdf2:sha256$salt$hash
:param password: the password to hash.
:param method: the hash method to use (one that hashlib supports). Can
optionally be in the format ``pbkdf2:<method>[:iterations]``
to enable PBKDF2.
:param salt_length: the length of the salt in letters.
"""
salt = gen_salt(salt_length) if method != "plain" else ""
h, actual_method = _hash_internal(method, salt, password)
return "%s$%s$%s" % (actual_method, salt, h)
def check_password_hash(pwhash, password):
"""check a password against a given salted and hashed password value.
In order to support unsalted legacy passwords this method supports
plain text passwords, md5 and sha1 hashes (both salted and unsalted).
Returns `True` if the password matched, `False` otherwise.
:param pwhash: a hashed string like returned by
:func:`generate_password_hash`.
:param password: the plaintext password to compare against the hash.
"""
if pwhash.count("$") < 2:
return False
method, salt, hashval = pwhash.split("$", 2)
return safe_str_cmp(_hash_internal(method, salt, password)[0], hashval)
def safe_join(directory, *pathnames):
"""Safely join zero or more untrusted path components to a base
directory to avoid escaping the base directory.
:param directory: The trusted base directory.
:param pathnames: The untrusted path components relative to the
base directory.
:return: A safe path, otherwise ``None``.
"""
parts = [directory]
for filename in pathnames:
if filename != "":
filename = posixpath.normpath(filename)
if (
any(sep in filename for sep in _os_alt_seps)
or os.path.isabs(filename)
or filename == ".."
or filename.startswith("../")
):
return None
parts.append(filename)
return posixpath.join(*parts)
| apache-2.0 |
noba3/KoTos | addons/script.module.youtube.dl/lib/youtube_dl/extractor/youku.py | 4 | 8214 | # coding: utf-8
from __future__ import unicode_literals
import base64
from .common import InfoExtractor
from ..compat import (
compat_urllib_parse,
compat_ord,
)
from ..utils import (
ExtractorError,
sanitized_Request,
)
class YoukuIE(InfoExtractor):
IE_NAME = 'youku'
IE_DESC = '优酷'
_VALID_URL = r'''(?x)
(?:
http://(?:v|player)\.youku\.com/(?:v_show/id_|player\.php/sid/)|
youku:)
(?P<id>[A-Za-z0-9]+)(?:\.html|/v\.swf|)
'''
_TESTS = [{
# MD5 is unstable
'url': 'http://v.youku.com/v_show/id_XMTc1ODE5Njcy.html',
'info_dict': {
'id': 'XMTc1ODE5Njcy_part1',
'title': '★Smile﹗♡ Git Fresh -Booty Music舞蹈.',
'ext': 'flv'
}
}, {
'url': 'http://player.youku.com/player.php/sid/XNDgyMDQ2NTQw/v.swf',
'only_matching': True,
}, {
'url': 'http://v.youku.com/v_show/id_XODgxNjg1Mzk2_ev_1.html',
'info_dict': {
'id': 'XODgxNjg1Mzk2',
'title': '武媚娘传奇 85',
},
'playlist_count': 11,
'skip': 'Available in China only',
}, {
'url': 'http://v.youku.com/v_show/id_XMTI1OTczNDM5Mg==.html',
'info_dict': {
'id': 'XMTI1OTczNDM5Mg',
'title': '花千骨 04',
},
'playlist_count': 13,
}, {
'url': 'http://v.youku.com/v_show/id_XNjA1NzA2Njgw.html',
'note': 'Video protected with password',
'info_dict': {
'id': 'XNjA1NzA2Njgw',
'title': '邢義田复旦讲座之想象中的胡人—从“左衽孔子”说起',
},
'playlist_count': 19,
'params': {
'videopassword': '100600',
},
}]
def construct_video_urls(self, data):
# get sid, token
def yk_t(s1, s2):
ls = list(range(256))
t = 0
for i in range(256):
t = (t + ls[i] + compat_ord(s1[i % len(s1)])) % 256
ls[i], ls[t] = ls[t], ls[i]
s = bytearray()
x, y = 0, 0
for i in range(len(s2)):
y = (y + 1) % 256
x = (x + ls[y]) % 256
ls[x], ls[y] = ls[y], ls[x]
s.append(compat_ord(s2[i]) ^ ls[(ls[x] + ls[y]) % 256])
return bytes(s)
sid, token = yk_t(
b'becaf9be', base64.b64decode(data['security']['encrypt_string'].encode('ascii'))
).decode('ascii').split('_')
# get oip
oip = data['security']['ip']
fileid_dict = {}
for stream in data['stream']:
format = stream.get('stream_type')
fileid = stream['stream_fileid']
fileid_dict[format] = fileid
def get_fileid(format, n):
number = hex(int(str(n), 10))[2:].upper()
if len(number) == 1:
number = '0' + number
streamfileids = fileid_dict[format]
fileid = streamfileids[0:8] + number + streamfileids[10:]
return fileid
# get ep
def generate_ep(format, n):
fileid = get_fileid(format, n)
ep_t = yk_t(
b'bf7e5f01',
('%s_%s_%s' % (sid, fileid, token)).encode('ascii')
)
ep = base64.b64encode(ep_t).decode('ascii')
return ep
# generate video_urls
video_urls_dict = {}
for stream in data['stream']:
format = stream.get('stream_type')
video_urls = []
for dt in stream['segs']:
n = str(stream['segs'].index(dt))
param = {
'K': dt['key'],
'hd': self.get_hd(format),
'myp': 0,
'ypp': 0,
'ctype': 12,
'ev': 1,
'token': token,
'oip': oip,
'ep': generate_ep(format, n)
}
video_url = \
'http://k.youku.com/player/getFlvPath/' + \
'sid/' + sid + \
'_00' + \
'/st/' + self.parse_ext_l(format) + \
'/fileid/' + get_fileid(format, n) + '?' + \
compat_urllib_parse.urlencode(param)
video_urls.append(video_url)
video_urls_dict[format] = video_urls
return video_urls_dict
def get_hd(self, fm):
hd_id_dict = {
'3gp': '0',
'3gphd': '1',
'flv': '0',
'flvhd': '0',
'mp4': '1',
'mp4hd': '1',
'mp4hd2': '1',
'mp4hd3': '1',
'hd2': '2',
'hd3': '3',
}
return hd_id_dict[fm]
def parse_ext_l(self, fm):
ext_dict = {
'3gp': 'flv',
'3gphd': 'mp4',
'flv': 'flv',
'flvhd': 'flv',
'mp4': 'mp4',
'mp4hd': 'mp4',
'mp4hd2': 'flv',
'mp4hd3': 'flv',
'hd2': 'flv',
'hd3': 'flv',
}
return ext_dict[fm]
def get_format_name(self, fm):
_dict = {
'3gp': 'h6',
'3gphd': 'h5',
'flv': 'h4',
'flvhd': 'h4',
'mp4': 'h3',
'mp4hd': 'h3',
'mp4hd2': 'h4',
'mp4hd3': 'h4',
'hd2': 'h2',
'hd3': 'h1',
}
return _dict[fm]
def _real_extract(self, url):
video_id = self._match_id(url)
def retrieve_data(req_url, note):
headers = {
'Referer': req_url,
}
self._set_cookie('youku.com', 'xreferrer', 'http://www.youku.com')
req = sanitized_Request(req_url, headers=headers)
cn_verification_proxy = self._downloader.params.get('cn_verification_proxy')
if cn_verification_proxy:
req.add_header('Ytdl-request-proxy', cn_verification_proxy)
raw_data = self._download_json(req, video_id, note=note)
return raw_data['data']
video_password = self._downloader.params.get('videopassword', None)
# request basic data
basic_data_url = "http://play.youku.com/play/get.json?vid=%s&ct=12" % video_id
if video_password:
basic_data_url += '&pwd=%s' % video_password
data = retrieve_data(basic_data_url, 'Downloading JSON metadata')
error = data.get('error')
if error:
error_note = error.get('note')
if error_note is not None and '因版权原因无法观看此视频' in error_note:
raise ExtractorError(
'Youku said: Sorry, this video is available in China only', expected=True)
else:
msg = 'Youku server reported error %i' % error.get('code')
if error_note is not None:
msg += ': ' + error_note
raise ExtractorError(msg)
# get video title
title = data['video']['title']
# generate video_urls_dict
video_urls_dict = self.construct_video_urls(data)
# construct info
entries = [{
'id': '%s_part%d' % (video_id, i + 1),
'title': title,
'formats': [],
# some formats are not available for all parts, we have to detect
# which one has all
} for i in range(max(len(v.get('segs')) for v in data['stream']))]
for stream in data['stream']:
fm = stream.get('stream_type')
video_urls = video_urls_dict[fm]
for video_url, seg, entry in zip(video_urls, stream['segs'], entries):
entry['formats'].append({
'url': video_url,
'format_id': self.get_format_name(fm),
'ext': self.parse_ext_l(fm),
'filesize': int(seg['size']),
})
return {
'_type': 'multi_video',
'id': video_id,
'title': title,
'entries': entries,
}
| gpl-2.0 |
zengenti/ansible | lib/ansible/modules/cloud/ovirt/ovirt_snapshots_facts.py | 8 | 4358 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: ovirt_snapshots_facts
short_description: Retrieve facts about one or more oVirt virtual machine snapshots
author: "Ondra Machacek (@machacekondra)"
version_added: "2.3"
description:
- "Retrieve facts about one or more oVirt virtual machine snapshots."
notes:
- "This module creates a new top-level C(ovirt_snapshots) fact, which
contains a list of snapshots."
options:
vm:
description:
- "Name of the VM with snapshot."
required: true
description:
description:
- "Description of the snapshot, can be used as glob expression."
snapshot_id:
description:
- "Id of the snaphost we want to retrieve facts about."
extends_documentation_fragment: ovirt_facts
'''
EXAMPLES = '''
# Examples don't contain auth parameter for simplicity,
# look at ovirt_auth module to see how to reuse authentication:
# Gather facts about all snapshots which description start with C(update) for VM named C(centos7):
- ovirt_snapshots_facts:
vm: centos7
description: update*
- debug:
var: ovirt_snapshots
'''
RETURN = '''
ovirt_snapshots:
description: "List of dictionaries describing the snapshot. Snapshot attribtues are mapped to dictionary keys,
all snapshot attributes can be found at following url: https://ovirt.example.com/ovirt-engine/api/model#types/snapshot."
returned: On success.
type: list
'''
import fnmatch
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ovirt import (
check_sdk,
create_connection,
get_dict_of_struct,
ovirt_facts_full_argument_spec,
search_by_name,
)
def main():
argument_spec = ovirt_facts_full_argument_spec(
vm=dict(required=True),
description=dict(default=None),
snapshot_id=dict(default=None),
)
module = AnsibleModule(argument_spec)
check_sdk(module)
try:
auth = module.params.pop('auth')
connection = create_connection(auth)
vms_service = connection.system_service().vms_service()
vm_name = module.params['vm']
vm = search_by_name(vms_service, vm_name)
if vm is None:
raise Exception("VM '%s' was not found." % vm_name)
snapshots_service = vms_service.service(vm.id).snapshots_service()
if module.params['description']:
snapshots = [
e for e in snapshots_service.list()
if fnmatch.fnmatch(e.description, module.params['description'])
]
elif module.params['snapshot_id']:
snapshots = [
snapshots_service.snapshot_service(module.params['snapshot_id']).get()
]
else:
snapshots = snapshots_service.list()
module.exit_json(
changed=False,
ansible_facts=dict(
ovirt_snapshots=[
get_dict_of_struct(
struct=c,
connection=connection,
fetch_nested=module.params.get('fetch_nested'),
attributes=module.params.get('nested_attributes'),
) for c in snapshots
],
),
)
except Exception as e:
module.fail_json(msg=str(e), exception=traceback.format_exc())
finally:
connection.close(logout=auth.get('token') is None)
if __name__ == '__main__':
main()
| gpl-3.0 |
aronasorman/kolibri | kolibri/logger/migrations/0001_initial_redone.py | 7 | 5172 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-08-09 17:25
from __future__ import unicode_literals
import django.core.validators
import django.db.models.deletion
import kolibri.content.models
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
('kolibriauth', '0001_initial_redone'),
]
operations = [
migrations.CreateModel(
name='ContentRatingLog',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content_id', kolibri.content.models.UUIDField(db_index=True)),
('channel_id', kolibri.content.models.UUIDField()),
('quality', models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(5)])),
('ease', models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(5)])),
('learning', models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(1), django.core.validators.MaxValueValidator(5)])),
('feedback', models.TextField(blank=True)),
('dataset', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='kolibriauth.FacilityDataset')),
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='kolibriauth.FacilityUser')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='ContentSessionLog',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content_id', kolibri.content.models.UUIDField(db_index=True)),
('channel_id', kolibri.content.models.UUIDField()),
('start_timestamp', models.DateTimeField()),
('end_timestamp', models.DateTimeField(blank=True, null=True)),
('time_spent', models.FloatField(default=0.0, help_text='(in seconds)', validators=[django.core.validators.MinValueValidator(0)])),
('progress', models.FloatField(default=0, validators=[django.core.validators.MinValueValidator(0)])),
('kind', models.CharField(max_length=200)),
('extra_fields', models.TextField(default='{}')),
('dataset', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='kolibriauth.FacilityDataset')),
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='kolibriauth.FacilityUser')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='ContentSummaryLog',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content_id', kolibri.content.models.UUIDField(db_index=True)),
('channel_id', kolibri.content.models.UUIDField()),
('start_timestamp', models.DateTimeField()),
('end_timestamp', models.DateTimeField(blank=True, null=True)),
('completion_timestamp', models.DateTimeField(blank=True, null=True)),
('time_spent', models.FloatField(default=0.0, help_text='(in seconds)', validators=[django.core.validators.MinValueValidator(0)])),
('progress', models.FloatField(default=0, validators=[django.core.validators.MinValueValidator(0), django.core.validators.MaxValueValidator(1)])),
('kind', models.CharField(max_length=200)),
('extra_fields', models.TextField(default='{}')),
('dataset', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='kolibriauth.FacilityDataset')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='kolibriauth.FacilityUser')),
],
options={
'abstract': False,
},
),
migrations.CreateModel(
name='UserSessionLog',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('channels', models.TextField(blank=True)),
('start_timestamp', models.DateTimeField(auto_now_add=True)),
('completion_timestamp', models.DateTimeField(blank=True, null=True)),
('pages', models.TextField(blank=True)),
('dataset', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='kolibriauth.FacilityDataset')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='kolibriauth.FacilityUser')),
],
options={
'abstract': False,
},
),
]
| mit |
takeshineshiro/django | tests/utils_tests/test_crypto.py | 447 | 4581 | from __future__ import unicode_literals
import binascii
import hashlib
import unittest
from django.utils.crypto import constant_time_compare, pbkdf2
class TestUtilsCryptoMisc(unittest.TestCase):
def test_constant_time_compare(self):
# It's hard to test for constant time, just test the result.
self.assertTrue(constant_time_compare(b'spam', b'spam'))
self.assertFalse(constant_time_compare(b'spam', b'eggs'))
self.assertTrue(constant_time_compare('spam', 'spam'))
self.assertFalse(constant_time_compare('spam', 'eggs'))
class TestUtilsCryptoPBKDF2(unittest.TestCase):
# http://tools.ietf.org/html/draft-josefsson-pbkdf2-test-vectors-06
rfc_vectors = [
{
"args": {
"password": "password",
"salt": "salt",
"iterations": 1,
"dklen": 20,
"digest": hashlib.sha1,
},
"result": "0c60c80f961f0e71f3a9b524af6012062fe037a6",
},
{
"args": {
"password": "password",
"salt": "salt",
"iterations": 2,
"dklen": 20,
"digest": hashlib.sha1,
},
"result": "ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957",
},
{
"args": {
"password": "password",
"salt": "salt",
"iterations": 4096,
"dklen": 20,
"digest": hashlib.sha1,
},
"result": "4b007901b765489abead49d926f721d065a429c1",
},
# # this takes way too long :(
# {
# "args": {
# "password": "password",
# "salt": "salt",
# "iterations": 16777216,
# "dklen": 20,
# "digest": hashlib.sha1,
# },
# "result": "eefe3d61cd4da4e4e9945b3d6ba2158c2634e984",
# },
{
"args": {
"password": "passwordPASSWORDpassword",
"salt": "saltSALTsaltSALTsaltSALTsaltSALTsalt",
"iterations": 4096,
"dklen": 25,
"digest": hashlib.sha1,
},
"result": "3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038",
},
{
"args": {
"password": "pass\0word",
"salt": "sa\0lt",
"iterations": 4096,
"dklen": 16,
"digest": hashlib.sha1,
},
"result": "56fa6aa75548099dcc37d7f03425e0c3",
},
]
regression_vectors = [
{
"args": {
"password": "password",
"salt": "salt",
"iterations": 1,
"dklen": 20,
"digest": hashlib.sha256,
},
"result": "120fb6cffcf8b32c43e7225256c4f837a86548c9",
},
{
"args": {
"password": "password",
"salt": "salt",
"iterations": 1,
"dklen": 20,
"digest": hashlib.sha512,
},
"result": "867f70cf1ade02cff3752599a3a53dc4af34c7a6",
},
{
"args": {
"password": "password",
"salt": "salt",
"iterations": 1000,
"dklen": 0,
"digest": hashlib.sha512,
},
"result": ("afe6c5530785b6cc6b1c6453384731bd5ee432ee"
"549fd42fb6695779ad8a1c5bf59de69c48f774ef"
"c4007d5298f9033c0241d5ab69305e7b64eceeb8d"
"834cfec"),
},
# Check leading zeros are not stripped (#17481)
{
"args": {
"password": b'\xba',
"salt": "salt",
"iterations": 1,
"dklen": 20,
"digest": hashlib.sha1,
},
"result": '0053d3b91a7f1e54effebd6d68771e8a6e0b2c5b',
},
]
def test_public_vectors(self):
for vector in self.rfc_vectors:
result = pbkdf2(**vector['args'])
self.assertEqual(binascii.hexlify(result).decode('ascii'),
vector['result'])
def test_regression_vectors(self):
for vector in self.regression_vectors:
result = pbkdf2(**vector['args'])
self.assertEqual(binascii.hexlify(result).decode('ascii'),
vector['result'])
| bsd-3-clause |
LiberatorUSA/GUCEF | dependencies/Ogre/Tools/Blender2.6Export/ogre_mesh_exporter/mesh_properties.py | 16 | 7068 | # ##### BEGIN MIT LICENSE BLOCK #####
# Copyright (C) 2011 by Lih-Hern Pang
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# ##### END MIT LICENSE BLOCK #####
import bpy, os, sys, configparser
from bpy.props import *
class SubMeshProperties(bpy.types.PropertyGroup):
# Flag to tell if this submesh should use shared vertices.
useSharedVertices = BoolProperty(
name = "Use Shared Vertices",
description = "Use shared vertices with other submeshes.",
default = True,
options = set()
)
# Custom name of submesh.
name = StringProperty(
name = "Custom name",
description = "Custom name of submesh.",
default = "",
options = set()
)
# ##############################################
# Mesh Properties on the mesh objects
class MeshProperties(bpy.types.PropertyGroup):
# Enable/Disable export of this mesh.
exportEnabled = BoolProperty(
name = "Export",
description = "Export this mesh.",
default = True,
options = set()
)
exportTab = EnumProperty(
items = (
("mesh", "Mesh", "Mesh tab"),
("animation", "Animation", "Animation tab"),
("settings", "Override Settings", "Override global settings tab")),
default = "mesh",
options = {'SKIP_SAVE'}
)
subMeshProperties = CollectionProperty(type = SubMeshProperties)
animationTab = EnumProperty(
items = (
("skel", "Skeleton", "Skeleton animation tab"),
("pose", "Pose", "Vertex Pose animation tab"),
("morph", "Morph", "Vertex Morph animation tab")),
default = "skel",
options = {'SKIP_SAVE'}
)
# ##############################################
# Export override specific Properties
requireMaterials_override = BoolProperty(
name = "Require Materials Override",
description = "Override global setting.",
default = False,
options = set()
)
requireMaterials = BoolProperty(
name = "Require Materials",
description = "Generate Error message when part of this mesh is not assigned with a material.",
default = True,
options = set()
)
applyModifiers_override = BoolProperty(
name = "Apply Modifiers Override",
description = "Override global setting.",
default = False,
options = set()
)
applyModifiers = BoolProperty(
name = "Apply Modifiers",
description = "Apply mesh modifiers before export. (Slow and may break vertex order for morph targets!)",
default = False,
options = set()
)
skeletonNameFollowMesh_override = BoolProperty(
name = "Skeleton Name Follow Mesh Override",
description = "Override global setting.",
default = False,
options = set()
)
skeletonNameFollowMesh = BoolProperty(
name = "Skeleton Name Follow Mesh",
description = "Use mesh name for exported skeleton name instead of the armature name.",
default = True,
options = set()
)
# ##############################################
# XML Converter specific Properties
extremityPoints_override = BoolProperty(
name = "Extremity Points Override",
description = "Override global setting.",
default = False,
options = set()
)
extremityPoints = IntProperty(
name = "Extremity Points",
description = "Generate no more than num eXtremes for every submesh. (For submesh render sorting when using alpha materials on submesh)",
soft_min = 0,
soft_max = 65536,
options = set()
)
edgeLists_override = BoolProperty(
name = "Edge Lists Override",
description = "Override global setting.",
default = False,
options = set()
)
edgeLists = BoolProperty(
name = "Edge Lists",
description = "Generate edge lists. (Useful for outlining or doing stencil shadows)",
default = False,
options = set()
)
tangent_override = BoolProperty(
name = "Tangent Override",
description = "Override global setting.",
default = False,
options = set()
)
tangent = BoolProperty(
name = "Tangent",
description = "Generate tangent.",
default = False,
options = set()
)
tangentSemantic_override = BoolProperty(
name = "Tangent Semantic Override",
description = "Override global setting.",
default = False,
options = set()
)
tangentSemantic = EnumProperty(
name = "Tangent Semantic",
description = "Tangent Semantic to use.",
items=(("uvw", "uvw", "Use UV semantic."),
("tangent", "tangent", "Use tangent semantic."),
),
default= "tangent",
options = set()
)
tangentSize_override = BoolProperty(
name = "Tangent Size Override",
description = "Override global setting.",
default = False,
options = set()
)
tangentSize = EnumProperty(
name = "Tangent Size",
description = "Size of tangent.",
items=(("4", "4 component (parity)", "Use 4 component tangent where 4th component is parity."),
("3", "3 component", "Use 3 component tangent."),
),
default= "3",
options = set()
)
splitMirrored_override = BoolProperty(
name = "Split Mirrored Override",
description = "Override global setting.",
default = False,
options = set()
)
splitMirrored = BoolProperty(
name = "Split Mirrored",
description = "Split tangent vertices at UV mirror points.",
default = False,
options = set()
)
splitRotated_override = BoolProperty(
name = "Split Rotated Override",
description = "Override global setting.",
default = False,
options = set()
)
splitRotated = BoolProperty(
name = "Split Rotated",
description = "Split tangent vertices where basis is rotated > 90 degrees.",
default = False,
options = set()
)
reorganiseVertBuff_override = BoolProperty(
name = "Reorganise Vertex Buffers Override",
description = "Override global setting.",
default = False,
options = set()
)
reorganiseVertBuff = BoolProperty(
name = "Reorganise Vertex Buffers",
description = "Reorganise vertex buffer to make it GPU vertex cache friendly.",
default = True,
options = set()
)
optimiseAnimation_override = BoolProperty(
name = "Optimise Animation Override",
description = "Override global setting.",
default = False,
options = set()
)
optimiseAnimation = BoolProperty(
name = "Optimise Animation",
description = "Optimise out redundant tracks & keyframes.",
default = True,
options = set()
)
| apache-2.0 |
jwessel/meta-overc | meta-cube/recipes-support/overc-system-agent/files/overc-system-agent-1.2/Overc/package.py | 3 | 1167 | import sys, os
from Overc.utils import Process
class Package(object):
def __init__(self):
pass
def _smartpm(self, args, chroot=None):
cmd = []
if chroot != None:
cmd.append("chroot")
cmd.append(chroot)
cmd.append("/usr/bin/smart")
cmd.append(args)
cmd_s = ' '.join(cmd)
process = Process()
retval = process.run(cmd_s)
self.message = process.message
if retval is not 0:
print "Error!: %s" % cmd_s
return retval
def _get_kernel(self, path):
if path == "/":
subp=subprocess.Popen("rpm -qa | grep kernel-image | xargs rpm -ql | grep bzImage | awk -F'/' '{print $3}'", shell=True,stdout=subprocess.PIPE)
else:
subp1 = subprocess.Popen("chroot %s rpm -qa | grep kernel-image" % path, shell=True,stdout=subprocess.PIPE)
rpm_package = subp1.stdout.readline().strip()
subp=subprocess.Popen("chroot %s rpm -ql %s | grep bzImage | awk -F'/' '{print $3}'" % (path, rpm_package), shell=True,stdout=subprocess.PIPE)
c=subp.stdout.readline().strip()
return c
| mit |
vasily-v-ryabov/pywinauto | pywinauto/tests/allcontrols.py | 5 | 2735 | # GUI Application automation and testing library
# Copyright (C) 2006-2018 Mark Mc Mahon and Contributors
# https://github.com/pywinauto/pywinauto/graphs/contributors
# http://pywinauto.readthedocs.io/en/latest/credits.html
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of pywinauto nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Get All Controls Test
**What is checked**
This test does no actual testing it just returns each control.
**How is it checked**
A loop over all the controls in the dialog is made and each control added to
the list of bugs
**When is a bug reported**
For each control.
**Bug Extra Information**
There is no extra information associated with this bug type
**Is Reference dialog needed**
No,but if available the reference control will be returned with the localised
control.
**False positive bug reports**
Not possible
**Test Identifier**
The identifier for this test/bug is "AllControls"
"""
testname = "AllControls"
#-----------------------------------------------------------------------------
def AllControlsTest(windows):
"""Returns just one bug for each control"""
bugs = []
for win in windows:
bugs.append((
[win,],
{},
testname,
0
))
return bugs
| bsd-3-clause |
OpenLinkedSocialData/fbEgo | JulianaSouza23022014_fb/scripts/rdfFBEgo.py | 3 | 2851 | import social as S, percolation as P, os
import importlib
importlib.reload(P.rdf)
importlib.reload(S.fb)
importlib.reload(S.fb.gdf2rdf)
importlib.reload(S.fb.gdf2rdf)
c=P.utils.check
fnames_=[
("RenatoFabbri19112014.gdf",None,"781909429","renato.fabbri"),
("PedroPauloRocha10032013.gdf",None,"836944624","dpedropaulorocha"),
("AnaCelia18032014.gdf",None,"1450596979",0),
("FabiBorges08032014.gdf",None,"598339469","antennarush"),
("RicardoPoppi18032014.gdf",None,"100000099352333","ricardopoppi"),
("ElenaGarnelo04032014.gdf",None,"1361932044","elena.garnelo"),
("GeorgeSanders08032014.gdf",None,"1347483608","george.sander"),
("GraziMachado18032014.gdf",None,"1847090892","GrazielleMachado"),
("RenatoFabbri19032014.gdf",None,"781909429","renato.fabbri")
]
fnames_=[
("CalebLuporini25022014.gdf", None,"1110305437","calebml"),
("DanielGonzales23022014.gdf", None,"100002080034739","daniel.gonzalezxavier"),
("JoaoMekitarian23022014.gdf", None,"100002080034739","joaopaulo.mekitarian"),
("MariliaPisani25022014.gdf", None,"100000812625301","marilia.pisani"),
("RenatoFabbri22022014.gdf", None,"781909429","renato.fabbri"),
("FelipeBrait23022014.gdf", None,"1420435978","felipe.brait"),
("JulianaSouza23022014.gdf", None,"520322516","juliana.desouza2"),
("NatachaRena22022014.gdf", None,"665770837","natacha.rena"),
("SarahLuporini25022014.gdf", None,"1528620900","sarah.cura"),
("CamilaBatista23022014.gdf", None,"100001707143512","camila.batista.3382"),
("KarinaGomes22022014.gdf", None,"100000176551181","karina.gomes.71"),
("OrlandoCoelho22022014.gdf", None,"1060234340","orlando.coelho.98"),
("SatoBrasil25022014.gdf", None,"1060234340","sato.dobrasil"),
("CarlosDiego25022014.gdf", None,"689266676","cdiegosr"),
("PalomaKliss25022014.gdf", None,"100008456088732",0),
("CristinaMekitarian23022014.gdf",None,"1771691370","cristina.mekitarian"),
("MarcelaLucatelli25022014.gdf", None,"520656478","justinamoira"),
("PedroRocha25022014.gdf",None,"836944624","dpedropaulorocha"),
]
#("JoaoMeirelles25022014.gdf", None,0,0),
#("LucasOliveira26022014.gdf", None,0,0),
#("FideoFuturalista22022014.gdf", None,0,0),
fpath="./publishing/fb3/"
#dpath="../data/fb/gdf/ego/"
#dpath="../data/fb/gdf/posAvlab/"
dpath="../data/fb/gdf/avlab/"
umbrella_dir="fbEgo/"
scriptpath=os.path.realpath(__file__)
for fnames in fnames_[4:]:
S.fb.triplifyGDF(dpath=dpath,
fname=fnames[0],
fnamei=None,
fpath=fpath,
scriptpath=scriptpath,
uid=fnames[2],
sid=fnames[3],
fb_link=None,
ego=True,
umbrella_dir=umbrella_dir)
| cc0-1.0 |
2013Commons/HUE-SHARK | desktop/core/ext-py/simplejson/build/lib.linux-i686-2.7/simplejson/tests/test_scanstring.py | 125 | 3835 | import sys
import decimal
from unittest import TestCase
import simplejson as json
import simplejson.decoder
class TestScanString(TestCase):
def test_py_scanstring(self):
self._test_scanstring(simplejson.decoder.py_scanstring)
def test_c_scanstring(self):
if not simplejson.decoder.c_scanstring:
return
self._test_scanstring(simplejson.decoder.c_scanstring)
def _test_scanstring(self, scanstring):
self.assertEquals(
scanstring('"z\\ud834\\udd20x"', 1, None, True),
(u'z\U0001d120x', 16))
if sys.maxunicode == 65535:
self.assertEquals(
scanstring(u'"z\U0001d120x"', 1, None, True),
(u'z\U0001d120x', 6))
else:
self.assertEquals(
scanstring(u'"z\U0001d120x"', 1, None, True),
(u'z\U0001d120x', 5))
self.assertEquals(
scanstring('"\\u007b"', 1, None, True),
(u'{', 8))
self.assertEquals(
scanstring('"A JSON payload should be an object or array, not a string."', 1, None, True),
(u'A JSON payload should be an object or array, not a string.', 60))
self.assertEquals(
scanstring('["Unclosed array"', 2, None, True),
(u'Unclosed array', 17))
self.assertEquals(
scanstring('["extra comma",]', 2, None, True),
(u'extra comma', 14))
self.assertEquals(
scanstring('["double extra comma",,]', 2, None, True),
(u'double extra comma', 21))
self.assertEquals(
scanstring('["Comma after the close"],', 2, None, True),
(u'Comma after the close', 24))
self.assertEquals(
scanstring('["Extra close"]]', 2, None, True),
(u'Extra close', 14))
self.assertEquals(
scanstring('{"Extra comma": true,}', 2, None, True),
(u'Extra comma', 14))
self.assertEquals(
scanstring('{"Extra value after close": true} "misplaced quoted value"', 2, None, True),
(u'Extra value after close', 26))
self.assertEquals(
scanstring('{"Illegal expression": 1 + 2}', 2, None, True),
(u'Illegal expression', 21))
self.assertEquals(
scanstring('{"Illegal invocation": alert()}', 2, None, True),
(u'Illegal invocation', 21))
self.assertEquals(
scanstring('{"Numbers cannot have leading zeroes": 013}', 2, None, True),
(u'Numbers cannot have leading zeroes', 37))
self.assertEquals(
scanstring('{"Numbers cannot be hex": 0x14}', 2, None, True),
(u'Numbers cannot be hex', 24))
self.assertEquals(
scanstring('[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]', 21, None, True),
(u'Too deep', 30))
self.assertEquals(
scanstring('{"Missing colon" null}', 2, None, True),
(u'Missing colon', 16))
self.assertEquals(
scanstring('{"Double colon":: null}', 2, None, True),
(u'Double colon', 15))
self.assertEquals(
scanstring('{"Comma instead of colon", null}', 2, None, True),
(u'Comma instead of colon', 25))
self.assertEquals(
scanstring('["Colon instead of comma": false]', 2, None, True),
(u'Colon instead of comma', 25))
self.assertEquals(
scanstring('["Bad value", truth]', 2, None, True),
(u'Bad value', 12))
def test_issue3623(self):
self.assertRaises(ValueError, json.decoder.scanstring, "xxx", 1,
"xxx")
self.assertRaises(UnicodeDecodeError,
json.encoder.encode_basestring_ascii, "xx\xff")
| apache-2.0 |
alanjw/GreenOpenERP-Win-X86 | python/Lib/encodings/mac_roman.py | 93 | 14043 | """ Python Character Mapping Codec mac_roman generated from 'MAPPINGS/VENDORS/APPLE/ROMAN.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='mac-roman',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> CONTROL CHARACTER
u'\x01' # 0x01 -> CONTROL CHARACTER
u'\x02' # 0x02 -> CONTROL CHARACTER
u'\x03' # 0x03 -> CONTROL CHARACTER
u'\x04' # 0x04 -> CONTROL CHARACTER
u'\x05' # 0x05 -> CONTROL CHARACTER
u'\x06' # 0x06 -> CONTROL CHARACTER
u'\x07' # 0x07 -> CONTROL CHARACTER
u'\x08' # 0x08 -> CONTROL CHARACTER
u'\t' # 0x09 -> CONTROL CHARACTER
u'\n' # 0x0A -> CONTROL CHARACTER
u'\x0b' # 0x0B -> CONTROL CHARACTER
u'\x0c' # 0x0C -> CONTROL CHARACTER
u'\r' # 0x0D -> CONTROL CHARACTER
u'\x0e' # 0x0E -> CONTROL CHARACTER
u'\x0f' # 0x0F -> CONTROL CHARACTER
u'\x10' # 0x10 -> CONTROL CHARACTER
u'\x11' # 0x11 -> CONTROL CHARACTER
u'\x12' # 0x12 -> CONTROL CHARACTER
u'\x13' # 0x13 -> CONTROL CHARACTER
u'\x14' # 0x14 -> CONTROL CHARACTER
u'\x15' # 0x15 -> CONTROL CHARACTER
u'\x16' # 0x16 -> CONTROL CHARACTER
u'\x17' # 0x17 -> CONTROL CHARACTER
u'\x18' # 0x18 -> CONTROL CHARACTER
u'\x19' # 0x19 -> CONTROL CHARACTER
u'\x1a' # 0x1A -> CONTROL CHARACTER
u'\x1b' # 0x1B -> CONTROL CHARACTER
u'\x1c' # 0x1C -> CONTROL CHARACTER
u'\x1d' # 0x1D -> CONTROL CHARACTER
u'\x1e' # 0x1E -> CONTROL CHARACTER
u'\x1f' # 0x1F -> CONTROL CHARACTER
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> CONTROL CHARACTER
u'\xc4' # 0x80 -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xc5' # 0x81 -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\xc7' # 0x82 -> LATIN CAPITAL LETTER C WITH CEDILLA
u'\xc9' # 0x83 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\xd1' # 0x84 -> LATIN CAPITAL LETTER N WITH TILDE
u'\xd6' # 0x85 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xdc' # 0x86 -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\xe1' # 0x87 -> LATIN SMALL LETTER A WITH ACUTE
u'\xe0' # 0x88 -> LATIN SMALL LETTER A WITH GRAVE
u'\xe2' # 0x89 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\xe4' # 0x8A -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xe3' # 0x8B -> LATIN SMALL LETTER A WITH TILDE
u'\xe5' # 0x8C -> LATIN SMALL LETTER A WITH RING ABOVE
u'\xe7' # 0x8D -> LATIN SMALL LETTER C WITH CEDILLA
u'\xe9' # 0x8E -> LATIN SMALL LETTER E WITH ACUTE
u'\xe8' # 0x8F -> LATIN SMALL LETTER E WITH GRAVE
u'\xea' # 0x90 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
u'\xeb' # 0x91 -> LATIN SMALL LETTER E WITH DIAERESIS
u'\xed' # 0x92 -> LATIN SMALL LETTER I WITH ACUTE
u'\xec' # 0x93 -> LATIN SMALL LETTER I WITH GRAVE
u'\xee' # 0x94 -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\xef' # 0x95 -> LATIN SMALL LETTER I WITH DIAERESIS
u'\xf1' # 0x96 -> LATIN SMALL LETTER N WITH TILDE
u'\xf3' # 0x97 -> LATIN SMALL LETTER O WITH ACUTE
u'\xf2' # 0x98 -> LATIN SMALL LETTER O WITH GRAVE
u'\xf4' # 0x99 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\xf6' # 0x9A -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xf5' # 0x9B -> LATIN SMALL LETTER O WITH TILDE
u'\xfa' # 0x9C -> LATIN SMALL LETTER U WITH ACUTE
u'\xf9' # 0x9D -> LATIN SMALL LETTER U WITH GRAVE
u'\xfb' # 0x9E -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xfc' # 0x9F -> LATIN SMALL LETTER U WITH DIAERESIS
u'\u2020' # 0xA0 -> DAGGER
u'\xb0' # 0xA1 -> DEGREE SIGN
u'\xa2' # 0xA2 -> CENT SIGN
u'\xa3' # 0xA3 -> POUND SIGN
u'\xa7' # 0xA4 -> SECTION SIGN
u'\u2022' # 0xA5 -> BULLET
u'\xb6' # 0xA6 -> PILCROW SIGN
u'\xdf' # 0xA7 -> LATIN SMALL LETTER SHARP S
u'\xae' # 0xA8 -> REGISTERED SIGN
u'\xa9' # 0xA9 -> COPYRIGHT SIGN
u'\u2122' # 0xAA -> TRADE MARK SIGN
u'\xb4' # 0xAB -> ACUTE ACCENT
u'\xa8' # 0xAC -> DIAERESIS
u'\u2260' # 0xAD -> NOT EQUAL TO
u'\xc6' # 0xAE -> LATIN CAPITAL LETTER AE
u'\xd8' # 0xAF -> LATIN CAPITAL LETTER O WITH STROKE
u'\u221e' # 0xB0 -> INFINITY
u'\xb1' # 0xB1 -> PLUS-MINUS SIGN
u'\u2264' # 0xB2 -> LESS-THAN OR EQUAL TO
u'\u2265' # 0xB3 -> GREATER-THAN OR EQUAL TO
u'\xa5' # 0xB4 -> YEN SIGN
u'\xb5' # 0xB5 -> MICRO SIGN
u'\u2202' # 0xB6 -> PARTIAL DIFFERENTIAL
u'\u2211' # 0xB7 -> N-ARY SUMMATION
u'\u220f' # 0xB8 -> N-ARY PRODUCT
u'\u03c0' # 0xB9 -> GREEK SMALL LETTER PI
u'\u222b' # 0xBA -> INTEGRAL
u'\xaa' # 0xBB -> FEMININE ORDINAL INDICATOR
u'\xba' # 0xBC -> MASCULINE ORDINAL INDICATOR
u'\u03a9' # 0xBD -> GREEK CAPITAL LETTER OMEGA
u'\xe6' # 0xBE -> LATIN SMALL LETTER AE
u'\xf8' # 0xBF -> LATIN SMALL LETTER O WITH STROKE
u'\xbf' # 0xC0 -> INVERTED QUESTION MARK
u'\xa1' # 0xC1 -> INVERTED EXCLAMATION MARK
u'\xac' # 0xC2 -> NOT SIGN
u'\u221a' # 0xC3 -> SQUARE ROOT
u'\u0192' # 0xC4 -> LATIN SMALL LETTER F WITH HOOK
u'\u2248' # 0xC5 -> ALMOST EQUAL TO
u'\u2206' # 0xC6 -> INCREMENT
u'\xab' # 0xC7 -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbb' # 0xC8 -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u2026' # 0xC9 -> HORIZONTAL ELLIPSIS
u'\xa0' # 0xCA -> NO-BREAK SPACE
u'\xc0' # 0xCB -> LATIN CAPITAL LETTER A WITH GRAVE
u'\xc3' # 0xCC -> LATIN CAPITAL LETTER A WITH TILDE
u'\xd5' # 0xCD -> LATIN CAPITAL LETTER O WITH TILDE
u'\u0152' # 0xCE -> LATIN CAPITAL LIGATURE OE
u'\u0153' # 0xCF -> LATIN SMALL LIGATURE OE
u'\u2013' # 0xD0 -> EN DASH
u'\u2014' # 0xD1 -> EM DASH
u'\u201c' # 0xD2 -> LEFT DOUBLE QUOTATION MARK
u'\u201d' # 0xD3 -> RIGHT DOUBLE QUOTATION MARK
u'\u2018' # 0xD4 -> LEFT SINGLE QUOTATION MARK
u'\u2019' # 0xD5 -> RIGHT SINGLE QUOTATION MARK
u'\xf7' # 0xD6 -> DIVISION SIGN
u'\u25ca' # 0xD7 -> LOZENGE
u'\xff' # 0xD8 -> LATIN SMALL LETTER Y WITH DIAERESIS
u'\u0178' # 0xD9 -> LATIN CAPITAL LETTER Y WITH DIAERESIS
u'\u2044' # 0xDA -> FRACTION SLASH
u'\u20ac' # 0xDB -> EURO SIGN
u'\u2039' # 0xDC -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK
u'\u203a' # 0xDD -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
u'\ufb01' # 0xDE -> LATIN SMALL LIGATURE FI
u'\ufb02' # 0xDF -> LATIN SMALL LIGATURE FL
u'\u2021' # 0xE0 -> DOUBLE DAGGER
u'\xb7' # 0xE1 -> MIDDLE DOT
u'\u201a' # 0xE2 -> SINGLE LOW-9 QUOTATION MARK
u'\u201e' # 0xE3 -> DOUBLE LOW-9 QUOTATION MARK
u'\u2030' # 0xE4 -> PER MILLE SIGN
u'\xc2' # 0xE5 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
u'\xca' # 0xE6 -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
u'\xc1' # 0xE7 -> LATIN CAPITAL LETTER A WITH ACUTE
u'\xcb' # 0xE8 -> LATIN CAPITAL LETTER E WITH DIAERESIS
u'\xc8' # 0xE9 -> LATIN CAPITAL LETTER E WITH GRAVE
u'\xcd' # 0xEA -> LATIN CAPITAL LETTER I WITH ACUTE
u'\xce' # 0xEB -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
u'\xcf' # 0xEC -> LATIN CAPITAL LETTER I WITH DIAERESIS
u'\xcc' # 0xED -> LATIN CAPITAL LETTER I WITH GRAVE
u'\xd3' # 0xEE -> LATIN CAPITAL LETTER O WITH ACUTE
u'\xd4' # 0xEF -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
u'\uf8ff' # 0xF0 -> Apple logo
u'\xd2' # 0xF1 -> LATIN CAPITAL LETTER O WITH GRAVE
u'\xda' # 0xF2 -> LATIN CAPITAL LETTER U WITH ACUTE
u'\xdb' # 0xF3 -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
u'\xd9' # 0xF4 -> LATIN CAPITAL LETTER U WITH GRAVE
u'\u0131' # 0xF5 -> LATIN SMALL LETTER DOTLESS I
u'\u02c6' # 0xF6 -> MODIFIER LETTER CIRCUMFLEX ACCENT
u'\u02dc' # 0xF7 -> SMALL TILDE
u'\xaf' # 0xF8 -> MACRON
u'\u02d8' # 0xF9 -> BREVE
u'\u02d9' # 0xFA -> DOT ABOVE
u'\u02da' # 0xFB -> RING ABOVE
u'\xb8' # 0xFC -> CEDILLA
u'\u02dd' # 0xFD -> DOUBLE ACUTE ACCENT
u'\u02db' # 0xFE -> OGONEK
u'\u02c7' # 0xFF -> CARON
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| agpl-3.0 |
jashank/rust | src/etc/mirror-all-snapshots.py | 53 | 1372 | #!/usr/bin/env python
#
# Copyright 2011-2013 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
import os
from snapshot import *
f = open(snapshotfile)
date = None
rev = None
platform = None
snap = None
i = 0
for line in f.readlines():
i += 1
parsed = parse_line(i, line)
if not parsed:
continue
if parsed["type"] == "snapshot":
date = parsed["date"]
rev = parsed["rev"]
elif rev is not None and parsed["type"] == "file":
platform = parsed["platform"]
hsh = parsed["hash"]
snap = full_snapshot_name(date, rev, platform, hsh)
dl = os.path.join(download_dir_base, snap)
url = download_url_base + "/" + snap
if (not os.path.exists(dl)):
print("downloading " + url)
get_url_to_file(url, dl)
if (snap_filename_hash_part(snap) == hash_file(dl)):
print("got download with ok hash")
else:
raise Exception("bad hash on download")
| apache-2.0 |
cloxp/cloxp-install | win/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/flock_tool.py | 604 | 1533 | #!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""These functions are executed via gyp-flock-tool when using the Makefile
generator. Used on systems that don't have a built-in flock."""
import fcntl
import os
import struct
import subprocess
import sys
def main(args):
executor = FlockTool()
executor.Dispatch(args)
class FlockTool(object):
"""This class emulates the 'flock' command."""
def Dispatch(self, args):
"""Dispatches a string command to a method."""
if len(args) < 1:
raise Exception("Not enough arguments")
method = "Exec%s" % self._CommandifyName(args[0])
getattr(self, method)(*args[1:])
def _CommandifyName(self, name_string):
"""Transforms a tool name like copy-info-plist to CopyInfoPlist"""
return name_string.title().replace('-', '')
def ExecFlock(self, lockfile, *cmd_list):
"""Emulates the most basic behavior of Linux's flock(1)."""
# Rely on exception handling to report errors.
# Note that the stock python on SunOS has a bug
# where fcntl.flock(fd, LOCK_EX) always fails
# with EBADF, that's why we use this F_SETLK
# hack instead.
fd = os.open(lockfile, os.O_WRONLY|os.O_NOCTTY|os.O_CREAT, 0666)
op = struct.pack('hhllhhl', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
fcntl.fcntl(fd, fcntl.F_SETLK, op)
return subprocess.call(cmd_list)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| mit |
canwe/NewsBlur | apps/rss_feeds/migrations/0042_feed_classifier_counts.py | 18 | 6475 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'FeedData.feed_classifier_counts'
db.add_column('rss_feeds_feeddata', 'feed_classifier_counts', self.gf('django.db.models.fields.TextField')(null=True, blank=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'FeedData.feed_classifier_counts'
db.delete_column('rss_feeds_feeddata', 'feed_classifier_counts')
models = {
'rss_feeds.duplicatefeed': {
'Meta': {'object_name': 'DuplicateFeed'},
'duplicate_address': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'duplicate_feed_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'feed': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'duplicate_addresses'", 'to': "orm['rss_feeds.Feed']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'rss_feeds.feed': {
'Meta': {'ordering': "['feed_title']", 'object_name': 'Feed', 'db_table': "'feeds'"},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'active_subscribers': ('django.db.models.fields.IntegerField', [], {'default': '-1', 'db_index': 'True'}),
'average_stories_per_month': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'creation': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'days_to_trim': ('django.db.models.fields.IntegerField', [], {'default': '90'}),
'etag': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'exception_code': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'feed_address': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '255'}),
'feed_link': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'feed_title': ('django.db.models.fields.CharField', [], {'default': "'[Untitled]'", 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'fetched_once': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'has_feed_exception': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'has_page_exception': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_load_time': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_update': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'min_to_decay': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'next_scheduled_update': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'num_subscribers': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
'premium_subscribers': ('django.db.models.fields.IntegerField', [], {'default': '-1'}),
'queued_date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'stories_last_month': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
'rss_feeds.feeddata': {
'Meta': {'object_name': 'FeedData'},
'feed': ('utils.fields.AutoOneToOneField', [], {'related_name': "'data'", 'unique': 'True', 'to': "orm['rss_feeds.Feed']"}),
'feed_classifier_counts': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'feed_tagline': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'popular_authors': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'null': 'True', 'blank': 'True'}),
'popular_tags': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'null': 'True', 'blank': 'True'}),
'story_count_history': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'rss_feeds.feedicon': {
'Meta': {'object_name': 'FeedIcon'},
'color': ('django.db.models.fields.CharField', [], {'max_length': '6', 'null': 'True', 'blank': 'True'}),
'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'feed': ('utils.fields.AutoOneToOneField', [], {'related_name': "'icon'", 'unique': 'True', 'primary_key': 'True', 'to': "orm['rss_feeds.Feed']"}),
'icon_url': ('django.db.models.fields.CharField', [], {'max_length': '2000', 'null': 'True', 'blank': 'True'}),
'not_found': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'rss_feeds.feedloadtime': {
'Meta': {'object_name': 'FeedLoadtime'},
'date_accessed': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'feed': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['rss_feeds.Feed']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'loadtime': ('django.db.models.fields.FloatField', [], {})
},
'rss_feeds.feedupdatehistory': {
'Meta': {'object_name': 'FeedUpdateHistory'},
'average_per_feed': ('django.db.models.fields.DecimalField', [], {'max_digits': '4', 'decimal_places': '1'}),
'fetch_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'number_of_feeds': ('django.db.models.fields.IntegerField', [], {}),
'seconds_taken': ('django.db.models.fields.IntegerField', [], {})
}
}
complete_apps = ['rss_feeds']
| mit |
hoehnp/navit_test | lib/python2.7/site-packages/setuptools/glob.py | 242 | 5207 | """
Filename globbing utility. Mostly a copy of `glob` from Python 3.5.
Changes include:
* `yield from` and PEP3102 `*` removed.
* `bytes` changed to `six.binary_type`.
* Hidden files are not ignored.
"""
import os
import re
import fnmatch
from setuptools.extern.six import binary_type
__all__ = ["glob", "iglob", "escape"]
def glob(pathname, recursive=False):
"""Return a list of paths matching a pathname pattern.
The pattern may contain simple shell-style wildcards a la
fnmatch. However, unlike fnmatch, filenames starting with a
dot are special cases that are not matched by '*' and '?'
patterns.
If recursive is true, the pattern '**' will match any files and
zero or more directories and subdirectories.
"""
return list(iglob(pathname, recursive=recursive))
def iglob(pathname, recursive=False):
"""Return an iterator which yields the paths matching a pathname pattern.
The pattern may contain simple shell-style wildcards a la
fnmatch. However, unlike fnmatch, filenames starting with a
dot are special cases that are not matched by '*' and '?'
patterns.
If recursive is true, the pattern '**' will match any files and
zero or more directories and subdirectories.
"""
it = _iglob(pathname, recursive)
if recursive and _isrecursive(pathname):
s = next(it) # skip empty string
assert not s
return it
def _iglob(pathname, recursive):
dirname, basename = os.path.split(pathname)
if not has_magic(pathname):
if basename:
if os.path.lexists(pathname):
yield pathname
else:
# Patterns ending with a slash should match only directories
if os.path.isdir(dirname):
yield pathname
return
if not dirname:
if recursive and _isrecursive(basename):
for x in glob2(dirname, basename):
yield x
else:
for x in glob1(dirname, basename):
yield x
return
# `os.path.split()` returns the argument itself as a dirname if it is a
# drive or UNC path. Prevent an infinite recursion if a drive or UNC path
# contains magic characters (i.e. r'\\?\C:').
if dirname != pathname and has_magic(dirname):
dirs = _iglob(dirname, recursive)
else:
dirs = [dirname]
if has_magic(basename):
if recursive and _isrecursive(basename):
glob_in_dir = glob2
else:
glob_in_dir = glob1
else:
glob_in_dir = glob0
for dirname in dirs:
for name in glob_in_dir(dirname, basename):
yield os.path.join(dirname, name)
# These 2 helper functions non-recursively glob inside a literal directory.
# They return a list of basenames. `glob1` accepts a pattern while `glob0`
# takes a literal basename (so it only has to check for its existence).
def glob1(dirname, pattern):
if not dirname:
if isinstance(pattern, binary_type):
dirname = os.curdir.encode('ASCII')
else:
dirname = os.curdir
try:
names = os.listdir(dirname)
except OSError:
return []
return fnmatch.filter(names, pattern)
def glob0(dirname, basename):
if not basename:
# `os.path.split()` returns an empty basename for paths ending with a
# directory separator. 'q*x/' should match only directories.
if os.path.isdir(dirname):
return [basename]
else:
if os.path.lexists(os.path.join(dirname, basename)):
return [basename]
return []
# This helper function recursively yields relative pathnames inside a literal
# directory.
def glob2(dirname, pattern):
assert _isrecursive(pattern)
yield pattern[:0]
for x in _rlistdir(dirname):
yield x
# Recursively yields relative pathnames inside a literal directory.
def _rlistdir(dirname):
if not dirname:
if isinstance(dirname, binary_type):
dirname = binary_type(os.curdir, 'ASCII')
else:
dirname = os.curdir
try:
names = os.listdir(dirname)
except os.error:
return
for x in names:
yield x
path = os.path.join(dirname, x) if dirname else x
for y in _rlistdir(path):
yield os.path.join(x, y)
magic_check = re.compile('([*?[])')
magic_check_bytes = re.compile(b'([*?[])')
def has_magic(s):
if isinstance(s, binary_type):
match = magic_check_bytes.search(s)
else:
match = magic_check.search(s)
return match is not None
def _isrecursive(pattern):
if isinstance(pattern, binary_type):
return pattern == b'**'
else:
return pattern == '**'
def escape(pathname):
"""Escape all special characters.
"""
# Escaping is done by wrapping any of "*?[" between square brackets.
# Metacharacters do not work in the drive part and shouldn't be escaped.
drive, pathname = os.path.splitdrive(pathname)
if isinstance(pathname, binary_type):
pathname = magic_check_bytes.sub(br'[\1]', pathname)
else:
pathname = magic_check.sub(r'[\1]', pathname)
return drive + pathname
| gpl-2.0 |
y4n9squared/HEtest | hetest/python/common/array_view_test.py | 1 | 7993 | # *****************************************************************
# Copyright (c) 2013 Massachusetts Institute of Technology
#
# Developed exclusively at US Government expense under US Air Force contract
# FA8721-05-C-002. The rights of the United States Government to use, modify,
# reproduce, release, perform, display or disclose this computer software and
# computer software documentation in whole or in part, in any manner and for
# any purpose whatsoever, and to have or authorize others to do so, are
# Unrestricted and Unlimited.
#
# Licensed for use under the BSD License as described in the BSD-LICENSE.txt
# file in the root directory of this release.
#
# Project: SPAR
# Authors: OMD
# Description: Unit tests for ArrayView
#
# Modifications:
# Date Name Modification
# ---- ---- ------------
# 13 Jan 2012 omd Original Version
# *****************************************************************
import unittest
from array_view import ArrayView
from array_view import get_array_view_or_slice
import numpy
class ArrayViewTest(unittest.TestCase):
def test_is_view(self):
"""Make sure that a view is really a view: e.g. that changes to it are
reflected in the base array and vice versa."""
base = numpy.array([1, 2, 3, 4, 5])
view = ArrayView(base, [0, 2])
self.assertEqual(view[0], 1)
self.assertEqual(view[1], 3)
# Now modify base. The changes should be reflected in the view.
base[2] = 100
self.assertEqual(view[0], 1)
self.assertEqual(view[1], 100)
self.assertEqual(base[0], 1)
self.assertEqual(base[2], 100)
# Now modify the view. The changes should be reflected in base.
view[1] = 3
self.assertEqual(view[0], 1)
self.assertEqual(view[1], 3)
self.assertEqual(base[0], 1)
self.assertEqual(base[2], 3)
def test_view_slices(self):
"""We should be able to get and set via slices as well and still preserve
the view property."""
base = numpy.arange(0, 100)
view = ArrayView(base, [0, 20, 22, 50, 77])
self.assertEqual(view[0], 0)
self.assertEqual(view[1], 20)
self.assertEqual(view[2], 22)
self.assertEqual(view[3], 50)
self.assertEqual(view[4], 77)
# The [:] slice should just be the same view again.
same_view = view[:]
self.assertEqual(same_view[0], 0)
self.assertEqual(same_view[1], 20)
self.assertEqual(same_view[2], 22)
self.assertEqual(same_view[3], 50)
self.assertEqual(same_view[4], 77)
# And it should really be a view
same_view[1] = 1
self.assertEqual(same_view[1], 1)
self.assertEqual(view[1], 1)
self.assertEqual(base[20], 1)
same_view[1] = 20
base[77] = -1
self.assertEqual(view[4], -1)
self.assertEqual(same_view[4], -1)
base[77] = 77
# And other slices should work as well
middle_two = view[1:3]
self.assertEqual(middle_two[0], 20)
self.assertEqual(middle_two[1], 22)
# and it too should be a view
middle_two[0] = 0
self.assertEqual(middle_two[0], 0)
self.assertEqual(base[20], 0)
middle_two[0] = 20
def test_iteration(self):
base = numpy.arange(0, 100)
view = ArrayView(base, [0, 20, 22, 50, 77])
expected = [0, 20, 22, 50, 77]
for expected, observed in zip(expected, view):
self.assertEqual(expected, observed)
def test_len(self):
base = numpy.arange(0, 100)
view = ArrayView(base, [0, 20, 22, 50, 77])
self.assertEqual(len(view), 5)
def test_contains(self):
base = numpy.arange(0, 100)
view = ArrayView(base, [0, 20, 22, 50, 77])
self.assertTrue(20 in view)
self.assertTrue(22 in view)
self.assertFalse(1 in view)
self.assertFalse(19 in view)
def test_add(self):
"""Simple addition of arrays should work."""
base = numpy.array([0, 1, 1, 0])
# view = [1, 0]
view = ArrayView(base, [1, 3])
added = view + numpy.array([1, 1])
self.assertEqual(added[0], 2)
self.assertEqual(added[1], 1)
def test_plus_equal(self):
"""The += operator should work and since it's a view it should modify
both the view and the base array."""
base = numpy.array([0, 1, 1, 0])
# view = [1, 0]
view = ArrayView(base, [1, 3])
view += numpy.array([1, 1])
self.assertEqual(view[0], 2)
self.assertEqual(view[1], 1)
# make sure base was modified too
self.assertEqual(base[1], 2)
self.assertEqual(base[3], 1)
def test_plus_equal_two_views(self):
"""Test that += works correctly with a view on the left and right of the
assignment."""
base1 = numpy.array([0, 1, 1, 0])
base2 = numpy.array([1, 1, 1, 1])
# view1 == [1, 0]
view1 = ArrayView(base1, [1, 3])
# veiw2 == [1, 1]
view2 = ArrayView(base2, [0, 2])
view1 += view2
self.assertEqual(view1[0], 2)
self.assertEqual(view1[1], 1)
# make sure base was modified too
self.assertEqual(base1[1], 2)
self.assertEqual(base1[3], 1)
# view2 and base2 should be unmodified
self.assertEqual(view2[0], 1)
self.assertEqual(view2[1], 1)
self.assertTrue(numpy.all(base2 == numpy.array([1, 1, 1, 1])))
def test_other_in_place_math(self):
"""I've overrident most of the other "in place" math operators like -=,
%=, etc. Here we test some of them and, in parcticular, make sure
they're modifying the base array."""
base = numpy.array([0, 1, 1, 0])
# view = [1, 0]
view = ArrayView(base, [1, 3])
view -= numpy.array([1, 2])
self.assertEqual(view[0], 0)
self.assertEqual(view[1], -2)
self.assertEqual(base[0], 0)
self.assertEqual(base[1], 0)
self.assertEqual(base[2], 1)
self.assertEqual(base[3], -2)
base = numpy.array([0, 1, 1, 0])
# view = [1, 0]
view = ArrayView(base, [1, 3])
view *= numpy.array([2, 2])
self.assertEqual(view[0], 2)
self.assertEqual(view[1], 0)
self.assertEqual(base[0], 0)
self.assertEqual(base[1], 2)
self.assertEqual(base[2], 1)
self.assertEqual(base[3], 0)
def test_get_array_view_or_slice(self):
"""Make sure the get_array_view_or_slice method returns the right thing
and that the returned slice contains the right data."""
base = numpy.array([0, 1, 2, 3, 4, 5])
# contiguous indices should be a slice
s1 = get_array_view_or_slice(base, [0, 1, 2, 3])
self.assertEqual(type(s1), numpy.ndarray)
self.assertEqual(len(s1), 4)
self.assertEqual(s1[0], 0)
self.assertEqual(s1[1], 1)
self.assertEqual(s1[2], 2)
self.assertEqual(s1[3], 3)
# and the slice should act as a view
s1[0] = -1
self.assertEqual(base[0], -1)
base[0] = 100
self.assertEqual(s1[0], 100)
# Put it back the way it was.
base[0] = 0
# And make sure it works if the start index isn't 0
s2 = get_array_view_or_slice(base, [2, 3])
self.assertEqual(type(s2), numpy.ndarray)
self.assertEqual(len(s2), 2)
self.assertEqual(s2[0], 2)
self.assertEqual(s2[1], 3)
# And make sure an ArrayView is returned if the indices aren't
# contiguous
av = get_array_view_or_slice(base, [0, 1, 3])
self.assertEqual(type(av), ArrayView)
self.assertEqual(len(av), 3)
self.assertEqual(av[0], 0)
self.assertEqual(av[1], 1)
self.assertEqual(av[2], 3)
| bsd-2-clause |
pku9104038/edx-platform | lms/lib/comment_client/thread.py | 5 | 5458 | from .utils import merge_dict, strip_blank, strip_none, extract, perform_request
from .utils import CommentClientRequestError
import models
import settings
class Thread(models.Model):
accessible_fields = [
'id', 'title', 'body', 'anonymous', 'anonymous_to_peers', 'course_id',
'closed', 'tags', 'votes', 'commentable_id', 'username', 'user_id',
'created_at', 'updated_at', 'comments_count', 'unread_comments_count',
'at_position_list', 'children', 'type', 'highlighted_title',
'highlighted_body', 'endorsed', 'read', 'group_id', 'group_name', 'pinned',
'abuse_flaggers', 'resp_skip', 'resp_limit', 'resp_total'
]
updatable_fields = [
'title', 'body', 'anonymous', 'anonymous_to_peers', 'course_id',
'closed', 'user_id', 'commentable_id', 'group_id', 'group_name', 'pinned'
]
initializable_fields = updatable_fields
base_url = "{prefix}/threads".format(prefix=settings.PREFIX)
default_retrieve_params = {'recursive': False}
type = 'thread'
@classmethod
def search(cls, query_params, *args, **kwargs):
default_params = {'page': 1,
'per_page': 20,
'course_id': query_params['course_id'],
'recursive': False}
params = merge_dict(default_params, strip_blank(strip_none(query_params)))
if query_params.get('text'):
url = cls.url(action='search')
else:
url = cls.url(action='get_all', params=extract(params, 'commentable_id'))
if params.get('commentable_id'):
del params['commentable_id']
response = perform_request('get', url, params, *args, **kwargs)
return response.get('collection', []), response.get('page', 1), response.get('num_pages', 1)
@classmethod
def url_for_threads(cls, params={}):
if params.get('commentable_id'):
return "{prefix}/{commentable_id}/threads".format(prefix=settings.PREFIX, commentable_id=params['commentable_id'])
else:
return "{prefix}/threads".format(prefix=settings.PREFIX)
@classmethod
def url_for_search_threads(cls, params={}):
return "{prefix}/search/threads".format(prefix=settings.PREFIX)
@classmethod
def url(cls, action, params={}):
if action in ['get_all', 'post']:
return cls.url_for_threads(params)
elif action == 'search':
return cls.url_for_search_threads(params)
else:
return super(Thread, cls).url(action, params)
# TODO: This is currently overriding Model._retrieve only to add parameters
# for the request. Model._retrieve should be modified to handle this such
# that subclasses don't need to override for this.
def _retrieve(self, *args, **kwargs):
url = self.url(action='get', params=self.attributes)
request_params = {
'recursive': kwargs.get('recursive'),
'user_id': kwargs.get('user_id'),
'mark_as_read': kwargs.get('mark_as_read', True),
'resp_skip': kwargs.get('response_skip'),
'resp_limit': kwargs.get('response_limit'),
}
request_params = strip_none(request_params)
response = perform_request('get', url, request_params)
self.update_attributes(**response)
def flagAbuse(self, user, voteable):
if voteable.type == 'thread':
url = _url_for_flag_abuse_thread(voteable.id)
elif voteable.type == 'comment':
url = _url_for_flag_comment(voteable.id)
else:
raise CommentClientRequestError("Can only flag/unflag threads or comments")
params = {'user_id': user.id}
request = perform_request('put', url, params)
voteable.update_attributes(request)
def unFlagAbuse(self, user, voteable, removeAll):
if voteable.type == 'thread':
url = _url_for_unflag_abuse_thread(voteable.id)
elif voteable.type == 'comment':
url = _url_for_unflag_comment(voteable.id)
else:
raise CommentClientRequestError("Can only flag/unflag for threads or comments")
params = {'user_id': user.id}
#if you're an admin, when you unflag, remove ALL flags
if removeAll:
params['all'] = True
request = perform_request('put', url, params)
voteable.update_attributes(request)
def pin(self, user, thread_id):
url = _url_for_pin_thread(thread_id)
params = {'user_id': user.id}
request = perform_request('put', url, params)
self.update_attributes(request)
def un_pin(self, user, thread_id):
url = _url_for_un_pin_thread(thread_id)
params = {'user_id': user.id}
request = perform_request('put', url, params)
self.update_attributes(request)
def _url_for_flag_abuse_thread(thread_id):
return "{prefix}/threads/{thread_id}/abuse_flag".format(prefix=settings.PREFIX, thread_id=thread_id)
def _url_for_unflag_abuse_thread(thread_id):
return "{prefix}/threads/{thread_id}/abuse_unflag".format(prefix=settings.PREFIX, thread_id=thread_id)
def _url_for_pin_thread(thread_id):
return "{prefix}/threads/{thread_id}/pin".format(prefix=settings.PREFIX, thread_id=thread_id)
def _url_for_un_pin_thread(thread_id):
return "{prefix}/threads/{thread_id}/unpin".format(prefix=settings.PREFIX, thread_id=thread_id)
| agpl-3.0 |
Ensighten/Diamond | src/collectors/xen_collector/test/testxen.py | 31 | 2972 | #!/usr/bin/python
# coding=utf-8
###############################################################################
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from test import run_only
from mock import Mock
from mock import patch
from diamond.collector import Collector
from xen_collector import XENCollector
###############################################################################
def run_only_if_libvirt_is_available(func):
try:
import libvirt
except ImportError:
libvirt = None
pred = lambda: libvirt is not None
return run_only(func, pred)
class TestXENCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('XENCollector', {
})
self.collector = XENCollector(config, None)
def test_import(self):
self.assertTrue(XENCollector)
@run_only_if_libvirt_is_available
@patch('os.statvfs')
@patch('libvirt.openReadOnly')
@patch.object(Collector, 'publish')
def test_centos6(self, publish_mock, libvirt_mock, os_mock):
class info:
def __init__(self, id):
self.id = id
def info(self):
if self.id == 0:
return [1, 49420888L, 49420888L, 8, 911232000000000L]
if self.id == 1:
return [1, 2097152L, 2097152L, 2, 310676150000000L]
if self.id == 2:
return [1, 2097152L, 2097152L, 2, 100375300000000L]
if self.id == 3:
return [1, 10485760L, 10485760L, 2, 335312040000000L]
if self.id == 4:
return [1, 10485760L, 10485760L, 2, 351313480000000L]
libvirt_m = Mock()
libvirt_m.getInfo.return_value = ['x86_64', 48262, 8, 1200, 2, 1, 4, 1]
libvirt_m.listDomainsID.return_value = [0, 2, 1, 4, 3]
def lookupByIdMock(id):
lookup = info(id)
return lookup
libvirt_m.lookupByID = lookupByIdMock
libvirt_mock.return_value = libvirt_m
statsvfs_mock = Mock()
statsvfs_mock.f_bavail = 74492145
statsvfs_mock.f_frsize = 4096
os_mock.return_value = statsvfs_mock
self.collector.collect()
metrics = {
'TotalCores': 8.000000,
'InstalledMem': 48262.000000,
'MemAllocated': 24576.000000,
'MemFree': 23686.000000,
'DiskFree': 297968580.000000,
'FreeCores': 0.000000,
'AllocatedCores': 8.000000,
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
###############################################################################
if __name__ == "__main__":
unittest.main()
| mit |
SSDS-Croatia/SSDS-2017 | Day-2/segmentation/data.py | 1 | 2603 | import math
import numpy as np
import pickle
from os.path import join
import PIL.Image as pimg
def _shuffle_data(data):
idx = np.arange(data[0].shape[0])
np.random.shuffle(idx)
shuffled_data = []
for d in data:
if type(d) == np.ndarray:
d = d[idx]
else:
d = [d[i] for i in idx]
shuffled_data.append(d)
return shuffled_data
class Dataset():
class_info = [['road', [128,64,128]],
['building', [70,70,70]],
['infrastructure', [220,220,0]],
['nature', [107,142,35]],
['sky', [70,130,180]],
['person', [220,20,60]],
['vehicle', [0,0,142]]]
num_classes = len(class_info)
def __init__(self, split_name, batch_size, downsample=None, shuffle=True):
self.mean = np.array([75.205, 85.014, 75.089])
self.std = np.array([46.894, 47.633, 46.471])
self.batch_size = batch_size
self.shuffle = shuffle
# load the dataset
data_dir = 'local/data/'
data = pickle.load(open(join(data_dir, split_name+'.pickle'), 'rb'))
self.x = data['rgb']
self.y = data['labels']
self.names = data['names']
if downsample is not None and downsample > 1:
new_x = []
new_y = []
img_width = round(self.x.shape[2] / downsample)
img_height = round(self.x.shape[1] / downsample)
for i in range(self.x.shape[0]):
img = pimg.fromarray(self.x[i]).resize((img_width, img_height), pimg.BILINEAR)
labels = pimg.fromarray(self.y[i]).resize((img_width, img_height), pimg.NEAREST)
new_x.append(img)
new_y.append(labels)
self.x = np.stack(new_x)
self.y = np.stack(new_y)
self.num_examples = self.x.shape[0]
self.height = self.x.shape[1]
self.width = self.x.shape[2]
self.channels = self.x.shape[3]
self.num_batches = math.ceil(self.num_examples / self.batch_size)
def __iter__(self):
if self.shuffle:
self.x, self.y, self.names = _shuffle_data([self.x, self.y, self.names])
self.cnt = 0
return self
def __next__(self):
if self.cnt >= self.num_batches:
raise StopIteration
offset = self.cnt * self.batch_size
x = self.x[offset:offset+self.batch_size]
y = self.y[offset:offset+self.batch_size]
x = (np.ascontiguousarray(x).astype(np.float32) - self.mean) / self.std
y = np.ascontiguousarray(y).astype(np.int32)
names = self.names[offset:offset+self.batch_size]
self.cnt += 1
return x, y, names
def get_img(self, name):
i = self.names.index(name)
img = self.x[i]
return img.astype(np.uint8) | mit |
vukae/eve-wspace | evewspace/core/searches.py | 17 | 1198 | # Eve W-Space
# Copyright (C) 2013 Andrew Austin and other contributors
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version. An additional term under section
# 7 of the GPL is included in the LICENSE file.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from search import registry
from models import Type, Corporation, Alliance
registry.register(Corporation, 'corp', 'name')
registry.register(Alliance, 'alliance', 'name')
registry.register(Type, 'item', 'name',
Type.objects.filter(published=1).all())
registry.register(Type, 'tower', 'name',
Type.objects.filter(published=1, marketgroup__pk=478).all())
| gpl-3.0 |
PaulWay/spacewalk | client/rhel/rhn-client-tools/src/up2date_client/debUtils.py | 8 | 2514 | # Client code for Update Agent
# Copyright (c) 2011--2012 Red Hat, Inc. Distributed under GPLv2.
#
# Author: Simon Lukasik
# Lukas Durfina
#
import os
import apt
import gettext
t = gettext.translation('rhn-client-tools', fallback=True)
_ = t.ugettext
# FIXME: After Debian bug 187019 is resolved
def verifyPackages(packages):
cache = apt.Cache()
missing_packages = []
for package in packages:
pkg = cache[package[0]]
if pkg == None or not pkg.is_installed:
missing_packages.append(package)
return [], missing_packages
def parseVRE(version):
epoch = ''
release = '0'
if version.find(':') != -1:
epoch, version = version.split(':')
if version.find('-') != -1:
tmp = version.split('-')
version = '-'.join(tmp[:-1])
release = tmp[-1]
return version, release, epoch
def installTime(pkg_name, pkg_arch):
dir = '/var/lib/dpkg/info'
files = [ '%s.list' % pkg_name,
'%s:%s.list' % (pkg_name, pkg_arch) ]
for f in files:
path = os.path.join(dir,f)
if os.path.isfile(path):
return os.path.getmtime(path)
return None
#FIXME: Using Apt cache might not be an ultimate solution.
# It could be better to parse /var/lib/dpkg/status manually.
# Apt cache might not contain all the packages.
def getInstalledPackageList(msgCallback = None, progressCallback = None,
getArch=None, getInfo = None):
""" Return list of packages. Package is dict with following keys:
name, epoch, version, release and optionaly arch.
"""
if msgCallback != None:
msgCallback(_("Getting list of packages installed on the system"))
cache = apt.Cache()
total = 0
for pkg in cache:
if pkg.installed != None:
total += 1
count = 0
pkg_list = []
for pkg in cache:
if pkg.installed == None:
continue
version, release, epoch = parseVRE(pkg.installed.version)
package = {
'name': pkg.name,
'epoch': epoch,
'version': version,
'release': release,
'arch': pkg.installed.architecture + '-deb',
'installtime': installTime(pkg.name, pkg.installed.architecture)
}
pkg_list.append(package)
if progressCallback != None:
progressCallback(count, total)
count = count + 1
pkg_list.sort()
return pkg_list
def setDebugVerbosity():
pass
| gpl-2.0 |
kamyu104/django | tests/messages_tests/base.py | 319 | 14243 | from django import http
from django.contrib.messages import constants, get_level, set_level, utils
from django.contrib.messages.api import MessageFailure
from django.contrib.messages.constants import DEFAULT_LEVELS
from django.contrib.messages.storage import base, default_storage
from django.contrib.messages.storage.base import Message
from django.core.urlresolvers import reverse
from django.test import modify_settings, override_settings
from django.utils.translation import ugettext_lazy
def add_level_messages(storage):
"""
Adds 6 messages from different levels (including a custom one) to a storage
instance.
"""
storage.add(constants.INFO, 'A generic info message')
storage.add(29, 'Some custom level')
storage.add(constants.DEBUG, 'A debugging message', extra_tags='extra-tag')
storage.add(constants.WARNING, 'A warning')
storage.add(constants.ERROR, 'An error')
storage.add(constants.SUCCESS, 'This was a triumph.')
class override_settings_tags(override_settings):
def enable(self):
super(override_settings_tags, self).enable()
# LEVEL_TAGS is a constant defined in the
# django.contrib.messages.storage.base module, so after changing
# settings.MESSAGE_TAGS, we need to update that constant too.
self.old_level_tags = base.LEVEL_TAGS
base.LEVEL_TAGS = utils.get_level_tags()
def disable(self):
super(override_settings_tags, self).disable()
base.LEVEL_TAGS = self.old_level_tags
class BaseTests(object):
storage_class = default_storage
levels = {
'debug': constants.DEBUG,
'info': constants.INFO,
'success': constants.SUCCESS,
'warning': constants.WARNING,
'error': constants.ERROR,
}
def setUp(self):
self.settings_override = override_settings_tags(
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': (
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
),
},
}],
ROOT_URLCONF='messages_tests.urls',
MESSAGE_TAGS='',
MESSAGE_STORAGE='%s.%s' % (self.storage_class.__module__,
self.storage_class.__name__),
SESSION_SERIALIZER='django.contrib.sessions.serializers.JSONSerializer',
)
self.settings_override.enable()
def tearDown(self):
self.settings_override.disable()
def get_request(self):
return http.HttpRequest()
def get_response(self):
return http.HttpResponse()
def get_storage(self, data=None):
"""
Returns the storage backend, setting its loaded data to the ``data``
argument.
This method avoids the storage ``_get`` method from getting called so
that other parts of the storage backend can be tested independent of
the message retrieval logic.
"""
storage = self.storage_class(self.get_request())
storage._loaded_data = data or []
return storage
def test_add(self):
storage = self.get_storage()
self.assertFalse(storage.added_new)
storage.add(constants.INFO, 'Test message 1')
self.assertTrue(storage.added_new)
storage.add(constants.INFO, 'Test message 2', extra_tags='tag')
self.assertEqual(len(storage), 2)
def test_add_lazy_translation(self):
storage = self.get_storage()
response = self.get_response()
storage.add(constants.INFO, ugettext_lazy('lazy message'))
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 1)
def test_no_update(self):
storage = self.get_storage()
response = self.get_response()
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 0)
def test_add_update(self):
storage = self.get_storage()
response = self.get_response()
storage.add(constants.INFO, 'Test message 1')
storage.add(constants.INFO, 'Test message 1', extra_tags='tag')
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 2)
def test_existing_add_read_update(self):
storage = self.get_existing_storage()
response = self.get_response()
storage.add(constants.INFO, 'Test message 3')
list(storage) # Simulates a read
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 0)
def test_existing_read_add_update(self):
storage = self.get_existing_storage()
response = self.get_response()
list(storage) # Simulates a read
storage.add(constants.INFO, 'Test message 3')
storage.update(response)
storing = self.stored_messages_count(storage, response)
self.assertEqual(storing, 1)
@override_settings(MESSAGE_LEVEL=constants.DEBUG)
def test_full_request_response_cycle(self):
"""
With the message middleware enabled, tests that messages are properly
stored and then retrieved across the full request/redirect/response
cycle.
"""
data = {
'messages': ['Test message %d' % x for x in range(5)],
}
show_url = reverse('show_message')
for level in ('debug', 'info', 'success', 'warning', 'error'):
add_url = reverse('add_message', args=(level,))
response = self.client.post(add_url, data, follow=True)
self.assertRedirects(response, show_url)
self.assertIn('messages', response.context)
messages = [Message(self.levels[level], msg) for msg in data['messages']]
self.assertEqual(list(response.context['messages']), messages)
for msg in data['messages']:
self.assertContains(response, msg)
@override_settings(MESSAGE_LEVEL=constants.DEBUG)
def test_with_template_response(self):
data = {
'messages': ['Test message %d' % x for x in range(5)],
}
show_url = reverse('show_template_response')
for level in self.levels.keys():
add_url = reverse('add_template_response', args=(level,))
response = self.client.post(add_url, data, follow=True)
self.assertRedirects(response, show_url)
self.assertIn('messages', response.context)
for msg in data['messages']:
self.assertContains(response, msg)
# there shouldn't be any messages on second GET request
response = self.client.get(show_url)
for msg in data['messages']:
self.assertNotContains(response, msg)
def test_context_processor_message_levels(self):
show_url = reverse('show_template_response')
response = self.client.get(show_url)
self.assertIn('DEFAULT_MESSAGE_LEVELS', response.context)
self.assertEqual(response.context['DEFAULT_MESSAGE_LEVELS'], DEFAULT_LEVELS)
@override_settings(MESSAGE_LEVEL=constants.DEBUG)
def test_multiple_posts(self):
"""
Tests that messages persist properly when multiple POSTs are made
before a GET.
"""
data = {
'messages': ['Test message %d' % x for x in range(5)],
}
show_url = reverse('show_message')
messages = []
for level in ('debug', 'info', 'success', 'warning', 'error'):
messages.extend(Message(self.levels[level], msg) for msg in data['messages'])
add_url = reverse('add_message', args=(level,))
self.client.post(add_url, data)
response = self.client.get(show_url)
self.assertIn('messages', response.context)
self.assertEqual(list(response.context['messages']), messages)
for msg in data['messages']:
self.assertContains(response, msg)
@modify_settings(
INSTALLED_APPS={'remove': 'django.contrib.messages'},
MIDDLEWARE_CLASSES={'remove': 'django.contrib.messages.middleware.MessageMiddleware'},
)
@override_settings(
MESSAGE_LEVEL=constants.DEBUG,
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
}],
)
def test_middleware_disabled(self):
"""
Tests that, when the middleware is disabled, an exception is raised
when one attempts to store a message.
"""
data = {
'messages': ['Test message %d' % x for x in range(5)],
}
reverse('show_message')
for level in ('debug', 'info', 'success', 'warning', 'error'):
add_url = reverse('add_message', args=(level,))
self.assertRaises(MessageFailure, self.client.post, add_url,
data, follow=True)
@modify_settings(
INSTALLED_APPS={'remove': 'django.contrib.messages'},
MIDDLEWARE_CLASSES={'remove': 'django.contrib.messages.middleware.MessageMiddleware'},
)
@override_settings(
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
}],
)
def test_middleware_disabled_fail_silently(self):
"""
Tests that, when the middleware is disabled, an exception is not
raised if 'fail_silently' = True
"""
data = {
'messages': ['Test message %d' % x for x in range(5)],
'fail_silently': True,
}
show_url = reverse('show_message')
for level in ('debug', 'info', 'success', 'warning', 'error'):
add_url = reverse('add_message', args=(level,))
response = self.client.post(add_url, data, follow=True)
self.assertRedirects(response, show_url)
self.assertNotIn('messages', response.context)
def stored_messages_count(self, storage, response):
"""
Returns the number of messages being stored after a
``storage.update()`` call.
"""
raise NotImplementedError('This method must be set by a subclass.')
def test_get(self):
raise NotImplementedError('This method must be set by a subclass.')
def get_existing_storage(self):
return self.get_storage([Message(constants.INFO, 'Test message 1'),
Message(constants.INFO, 'Test message 2',
extra_tags='tag')])
def test_existing_read(self):
"""
Tests that reading the existing storage doesn't cause the data to be
lost.
"""
storage = self.get_existing_storage()
self.assertFalse(storage.used)
# After iterating the storage engine directly, the used flag is set.
data = list(storage)
self.assertTrue(storage.used)
# The data does not disappear because it has been iterated.
self.assertEqual(data, list(storage))
def test_existing_add(self):
storage = self.get_existing_storage()
self.assertFalse(storage.added_new)
storage.add(constants.INFO, 'Test message 3')
self.assertTrue(storage.added_new)
def test_default_level(self):
# get_level works even with no storage on the request.
request = self.get_request()
self.assertEqual(get_level(request), constants.INFO)
# get_level returns the default level if it hasn't been set.
storage = self.get_storage()
request._messages = storage
self.assertEqual(get_level(request), constants.INFO)
# Only messages of sufficient level get recorded.
add_level_messages(storage)
self.assertEqual(len(storage), 5)
def test_low_level(self):
request = self.get_request()
storage = self.storage_class(request)
request._messages = storage
self.assertTrue(set_level(request, 5))
self.assertEqual(get_level(request), 5)
add_level_messages(storage)
self.assertEqual(len(storage), 6)
def test_high_level(self):
request = self.get_request()
storage = self.storage_class(request)
request._messages = storage
self.assertTrue(set_level(request, 30))
self.assertEqual(get_level(request), 30)
add_level_messages(storage)
self.assertEqual(len(storage), 2)
@override_settings(MESSAGE_LEVEL=29)
def test_settings_level(self):
request = self.get_request()
storage = self.storage_class(request)
self.assertEqual(get_level(request), 29)
add_level_messages(storage)
self.assertEqual(len(storage), 3)
def test_tags(self):
storage = self.get_storage()
storage.level = 0
add_level_messages(storage)
tags = [msg.tags for msg in storage]
self.assertEqual(tags,
['info', '', 'extra-tag debug', 'warning', 'error',
'success'])
def test_level_tag(self):
storage = self.get_storage()
storage.level = 0
add_level_messages(storage)
tags = [msg.level_tag for msg in storage]
self.assertEqual(tags,
['info', '', 'debug', 'warning', 'error',
'success'])
@override_settings_tags(MESSAGE_TAGS={
constants.INFO: 'info',
constants.DEBUG: '',
constants.WARNING: '',
constants.ERROR: 'bad',
29: 'custom',
}
)
def test_custom_tags(self):
storage = self.get_storage()
storage.level = 0
add_level_messages(storage)
tags = [msg.tags for msg in storage]
self.assertEqual(tags,
['info', 'custom', 'extra-tag', '', 'bad', 'success'])
| bsd-3-clause |
mandx/django-extensions | conftest.py | 24 | 2501 | from django.conf import settings
def pytest_configure():
import sys
try:
import django # NOQA
except ImportError:
print("Error: missing test dependency:")
print(" django library is needed to run test suite")
print(" you can install it with 'pip install django'")
print(" or use tox to automatically handle test dependencies")
sys.exit(1)
try:
import shortuuid # NOQA
except ImportError:
print("Error: missing test dependency:")
print(" shortuuid library is needed to run test suite")
print(" you can install it with 'pip install shortuuid'")
print(" or use tox to automatically handle test dependencies")
sys.exit(1)
try:
import dateutil # NOQA
except ImportError:
print("Error: missing test dependency:")
print(" dateutil library is needed to run test suite")
print(" you can install it with 'pip install python-dateutil'")
print(" or use tox to automatically handle test dependencies")
sys.exit(1)
try:
import six # NOQA
except ImportError:
print("Error: missing test dependency:")
print(" six library is needed to run test suite")
print(" you can install it with 'pip install six'")
print(" or use tox to automatically handle test dependencies")
sys.exit(1)
# Dynamically configure the Django settings with the minimum necessary to
# get Django running tests.
settings.configure(
INSTALLED_APPS=[
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.admin',
'django.contrib.sessions',
'tests.testapp',
'django_extensions',
],
MIDDLEWARE_CLASSES=(
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
),
# Django replaces this, but it still wants it. *shrugs*
DATABASE_ENGINE='django.db.backends.sqlite3',
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
},
MEDIA_ROOT='/tmp/django_extensions_test_media/',
MEDIA_PATH='/media/',
ROOT_URLCONF='tests.urls',
DEBUG=True,
TEMPLATE_DEBUG=True,
)
| mit |
abhijeet-dev/ll-arndale-octa | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/SchedGui.py | 12980 | 5411 | # SchedGui.py - Python extension for perf script, basic GUI code for
# traces drawing and overview.
#
# Copyright (C) 2010 by Frederic Weisbecker <fweisbec@gmail.com>
#
# This software is distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
try:
import wx
except ImportError:
raise ImportError, "You need to install the wxpython lib for this script"
class RootFrame(wx.Frame):
Y_OFFSET = 100
RECT_HEIGHT = 100
RECT_SPACE = 50
EVENT_MARKING_WIDTH = 5
def __init__(self, sched_tracer, title, parent = None, id = -1):
wx.Frame.__init__(self, parent, id, title)
(self.screen_width, self.screen_height) = wx.GetDisplaySize()
self.screen_width -= 10
self.screen_height -= 10
self.zoom = 0.5
self.scroll_scale = 20
self.sched_tracer = sched_tracer
self.sched_tracer.set_root_win(self)
(self.ts_start, self.ts_end) = sched_tracer.interval()
self.update_width_virtual()
self.nr_rects = sched_tracer.nr_rectangles() + 1
self.height_virtual = RootFrame.Y_OFFSET + (self.nr_rects * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE))
# whole window panel
self.panel = wx.Panel(self, size=(self.screen_width, self.screen_height))
# scrollable container
self.scroll = wx.ScrolledWindow(self.panel)
self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale)
self.scroll.EnableScrolling(True, True)
self.scroll.SetFocus()
# scrollable drawing area
self.scroll_panel = wx.Panel(self.scroll, size=(self.screen_width - 15, self.screen_height / 2))
self.scroll_panel.Bind(wx.EVT_PAINT, self.on_paint)
self.scroll_panel.Bind(wx.EVT_KEY_DOWN, self.on_key_press)
self.scroll_panel.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down)
self.scroll.Bind(wx.EVT_PAINT, self.on_paint)
self.scroll.Bind(wx.EVT_KEY_DOWN, self.on_key_press)
self.scroll.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down)
self.scroll.Fit()
self.Fit()
self.scroll_panel.SetDimensions(-1, -1, self.width_virtual, self.height_virtual, wx.SIZE_USE_EXISTING)
self.txt = None
self.Show(True)
def us_to_px(self, val):
return val / (10 ** 3) * self.zoom
def px_to_us(self, val):
return (val / self.zoom) * (10 ** 3)
def scroll_start(self):
(x, y) = self.scroll.GetViewStart()
return (x * self.scroll_scale, y * self.scroll_scale)
def scroll_start_us(self):
(x, y) = self.scroll_start()
return self.px_to_us(x)
def paint_rectangle_zone(self, nr, color, top_color, start, end):
offset_px = self.us_to_px(start - self.ts_start)
width_px = self.us_to_px(end - self.ts_start)
offset_py = RootFrame.Y_OFFSET + (nr * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE))
width_py = RootFrame.RECT_HEIGHT
dc = self.dc
if top_color is not None:
(r, g, b) = top_color
top_color = wx.Colour(r, g, b)
brush = wx.Brush(top_color, wx.SOLID)
dc.SetBrush(brush)
dc.DrawRectangle(offset_px, offset_py, width_px, RootFrame.EVENT_MARKING_WIDTH)
width_py -= RootFrame.EVENT_MARKING_WIDTH
offset_py += RootFrame.EVENT_MARKING_WIDTH
(r ,g, b) = color
color = wx.Colour(r, g, b)
brush = wx.Brush(color, wx.SOLID)
dc.SetBrush(brush)
dc.DrawRectangle(offset_px, offset_py, width_px, width_py)
def update_rectangles(self, dc, start, end):
start += self.ts_start
end += self.ts_start
self.sched_tracer.fill_zone(start, end)
def on_paint(self, event):
dc = wx.PaintDC(self.scroll_panel)
self.dc = dc
width = min(self.width_virtual, self.screen_width)
(x, y) = self.scroll_start()
start = self.px_to_us(x)
end = self.px_to_us(x + width)
self.update_rectangles(dc, start, end)
def rect_from_ypixel(self, y):
y -= RootFrame.Y_OFFSET
rect = y / (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)
height = y % (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)
if rect < 0 or rect > self.nr_rects - 1 or height > RootFrame.RECT_HEIGHT:
return -1
return rect
def update_summary(self, txt):
if self.txt:
self.txt.Destroy()
self.txt = wx.StaticText(self.panel, -1, txt, (0, (self.screen_height / 2) + 50))
def on_mouse_down(self, event):
(x, y) = event.GetPositionTuple()
rect = self.rect_from_ypixel(y)
if rect == -1:
return
t = self.px_to_us(x) + self.ts_start
self.sched_tracer.mouse_down(rect, t)
def update_width_virtual(self):
self.width_virtual = self.us_to_px(self.ts_end - self.ts_start)
def __zoom(self, x):
self.update_width_virtual()
(xpos, ypos) = self.scroll.GetViewStart()
xpos = self.us_to_px(x) / self.scroll_scale
self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale, xpos, ypos)
self.Refresh()
def zoom_in(self):
x = self.scroll_start_us()
self.zoom *= 2
self.__zoom(x)
def zoom_out(self):
x = self.scroll_start_us()
self.zoom /= 2
self.__zoom(x)
def on_key_press(self, event):
key = event.GetRawKeyCode()
if key == ord("+"):
self.zoom_in()
return
if key == ord("-"):
self.zoom_out()
return
key = event.GetKeyCode()
(x, y) = self.scroll.GetViewStart()
if key == wx.WXK_RIGHT:
self.scroll.Scroll(x + 1, y)
elif key == wx.WXK_LEFT:
self.scroll.Scroll(x - 1, y)
elif key == wx.WXK_DOWN:
self.scroll.Scroll(x, y + 1)
elif key == wx.WXK_UP:
self.scroll.Scroll(x, y - 1)
| gpl-2.0 |
ohmini/thaifoodapi | lib/django/contrib/gis/db/backends/postgis/adapter.py | 373 | 1695 | """
This object provides quoting for GEOS geometries into PostgreSQL/PostGIS.
"""
from __future__ import unicode_literals
from psycopg2 import Binary
from psycopg2.extensions import ISQLQuote
class PostGISAdapter(object):
def __init__(self, geom, geography=False):
"Initializes on the geometry."
# Getting the WKB (in string form, to allow easy pickling of
# the adaptor) and the SRID from the geometry.
self.ewkb = bytes(geom.ewkb)
self.srid = geom.srid
self.geography = geography
self._adapter = Binary(self.ewkb)
def __conform__(self, proto):
# Does the given protocol conform to what Psycopg2 expects?
if proto == ISQLQuote:
return self
else:
raise Exception('Error implementing psycopg2 protocol. Is psycopg2 installed?')
def __eq__(self, other):
if not isinstance(other, PostGISAdapter):
return False
return (self.ewkb == other.ewkb) and (self.srid == other.srid)
def __hash__(self):
return hash((self.ewkb, self.srid))
def __str__(self):
return self.getquoted()
def prepare(self, conn):
"""
This method allows escaping the binary in the style required by the
server's `standard_conforming_string` setting.
"""
self._adapter.prepare(conn)
def getquoted(self):
"Returns a properly quoted string for use in PostgreSQL/PostGIS."
# psycopg will figure out whether to use E'\\000' or '\000'
return str('%s(%s)' % (
'ST_GeogFromWKB' if self.geography else 'ST_GeomFromEWKB',
self._adapter.getquoted().decode())
)
| bsd-3-clause |
c0defreak/python-for-android | python3-alpha/python3-src/Lib/tkinter/test/runtktests.py | 67 | 2312 | """
Use this module to get and run all tk tests.
tkinter tests should live in a package inside the directory where this file
lives, like test_tkinter.
Extensions also should live in packages following the same rule as above.
"""
import os
import sys
import unittest
import importlib
import test.support
this_dir_path = os.path.abspath(os.path.dirname(__file__))
def is_package(path):
for name in os.listdir(path):
if name in ('__init__.py', '__init__.pyc', '__init.pyo'):
return True
return False
def get_tests_modules(basepath=this_dir_path, gui=True, packages=None):
"""This will import and yield modules whose names start with test_
and are inside packages found in the path starting at basepath.
If packages is specified it should contain package names that
want their tests collected.
"""
py_ext = '.py'
for dirpath, dirnames, filenames in os.walk(basepath):
for dirname in list(dirnames):
if dirname[0] == '.':
dirnames.remove(dirname)
if is_package(dirpath) and filenames:
pkg_name = dirpath[len(basepath) + len(os.sep):].replace('/', '.')
if packages and pkg_name not in packages:
continue
filenames = filter(
lambda x: x.startswith('test_') and x.endswith(py_ext),
filenames)
for name in filenames:
try:
yield importlib.import_module(
".%s.%s" % (pkg_name, name[:-len(py_ext)]),
"tkinter.test")
except test.support.ResourceDenied:
if gui:
raise
def get_tests(text=True, gui=True, packages=None):
"""Yield all the tests in the modules found by get_tests_modules.
If nogui is True, only tests that do not require a GUI will be
returned."""
attrs = []
if text:
attrs.append('tests_nogui')
if gui:
attrs.append('tests_gui')
for module in get_tests_modules(gui=gui, packages=packages):
for attr in attrs:
for test in getattr(module, attr, ()):
yield test
if __name__ == "__main__":
test.support.use_resources = ['gui']
test.support.run_unittest(*get_tests())
| apache-2.0 |
alxgu/ansible | lib/ansible/modules/cloud/ovirt/ovirt_group_facts.py | 55 | 3393 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ovirt_group_facts
short_description: Retrieve facts about one or more oVirt/RHV groups
author: "Ondra Machacek (@machacekondra)"
version_added: "2.3"
description:
- "Retrieve facts about one or more oVirt/RHV groups."
notes:
- "This module creates a new top-level C(ovirt_groups) fact, which
contains a list of groups."
options:
pattern:
description:
- "Search term which is accepted by oVirt/RHV search backend."
- "For example to search group X use following pattern: name=X"
extends_documentation_fragment: ovirt_facts
'''
EXAMPLES = '''
# Examples don't contain auth parameter for simplicity,
# look at ovirt_auth module to see how to reuse authentication:
# Gather facts about all groups which names start with C(admin):
- ovirt_group_facts:
pattern: name=admin*
- debug:
var: ovirt_groups
'''
RETURN = '''
ovirt_groups:
description: "List of dictionaries describing the groups. Group attributes are mapped to dictionary keys,
all groups attributes can be found at following url: http://ovirt.github.io/ovirt-engine-api-model/master/#types/group."
returned: On success.
type: list
'''
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ovirt import (
check_sdk,
create_connection,
get_dict_of_struct,
ovirt_facts_full_argument_spec,
)
def main():
argument_spec = ovirt_facts_full_argument_spec(
pattern=dict(default='', required=False),
)
module = AnsibleModule(argument_spec)
check_sdk(module)
try:
auth = module.params.pop('auth')
connection = create_connection(auth)
groups_service = connection.system_service().groups_service()
groups = groups_service.list(search=module.params['pattern'])
module.exit_json(
changed=False,
ansible_facts=dict(
ovirt_groups=[
get_dict_of_struct(
struct=c,
connection=connection,
fetch_nested=module.params.get('fetch_nested'),
attributes=module.params.get('nested_attributes'),
) for c in groups
],
),
)
except Exception as e:
module.fail_json(msg=str(e), exception=traceback.format_exc())
finally:
connection.close(logout=auth.get('token') is None)
if __name__ == '__main__':
main()
| gpl-3.0 |
qedi-r/home-assistant | homeassistant/components/aladdin_connect/cover.py | 5 | 3509 | """Platform for the Aladdin Connect cover component."""
import logging
from aladdin_connect import AladdinConnectClient
import voluptuous as vol
from homeassistant.components.cover import (
PLATFORM_SCHEMA,
SUPPORT_CLOSE,
SUPPORT_OPEN,
CoverDevice,
)
from homeassistant.const import (
CONF_PASSWORD,
CONF_USERNAME,
STATE_CLOSED,
STATE_CLOSING,
STATE_OPEN,
STATE_OPENING,
)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
NOTIFICATION_ID = "aladdin_notification"
NOTIFICATION_TITLE = "Aladdin Connect Cover Setup"
STATES_MAP = {
"open": STATE_OPEN,
"opening": STATE_OPENING,
"closed": STATE_CLOSED,
"closing": STATE_CLOSING,
}
SUPPORTED_FEATURES = SUPPORT_OPEN | SUPPORT_CLOSE
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Aladdin Connect platform."""
username = config.get(CONF_USERNAME)
password = config.get(CONF_PASSWORD)
acc = AladdinConnectClient(username, password)
try:
if not acc.login():
raise ValueError("Username or Password is incorrect")
add_entities(AladdinDevice(acc, door) for door in acc.get_doors())
except (TypeError, KeyError, NameError, ValueError) as ex:
_LOGGER.error("%s", ex)
hass.components.persistent_notification.create(
"Error: {}<br />"
"You will need to restart hass after fixing."
"".format(ex),
title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID,
)
class AladdinDevice(CoverDevice):
"""Representation of Aladdin Connect cover."""
def __init__(self, acc, device):
"""Initialize the cover."""
self._acc = acc
self._device_id = device["device_id"]
self._number = device["door_number"]
self._name = device["name"]
self._status = STATES_MAP.get(device["status"])
@property
def device_class(self):
"""Define this cover as a garage door."""
return "garage"
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORTED_FEATURES
@property
def unique_id(self):
"""Return a unique ID."""
return f"{self._device_id}-{self._number}"
@property
def name(self):
"""Return the name of the garage door."""
return self._name
@property
def is_opening(self):
"""Return if the cover is opening or not."""
return self._status == STATE_OPENING
@property
def is_closing(self):
"""Return if the cover is closing or not."""
return self._status == STATE_CLOSING
@property
def is_closed(self):
"""Return None if status is unknown, True if closed, else False."""
if self._status is None:
return None
return self._status == STATE_CLOSED
def close_cover(self, **kwargs):
"""Issue close command to cover."""
self._acc.close_door(self._device_id, self._number)
def open_cover(self, **kwargs):
"""Issue open command to cover."""
self._acc.open_door(self._device_id, self._number)
def update(self):
"""Update status of cover."""
acc_status = self._acc.get_door_status(self._device_id, self._number)
self._status = STATES_MAP.get(acc_status)
| apache-2.0 |
foufou55/Sick-Beard | sickbeard/name_parser/regexes.py | 23 | 14128 | # Author: Nic Wolfe <nic@wolfeden.ca>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of Sick Beard.
#
# Sick Beard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sick Beard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
# all regexes are case insensitive
from sickbeard.common import showLanguages
ep_regexes = [
('season only_year',
# Show.Name.2010.S01.Source.Quality.Etc-Group
'''
^((?P<series_name>.+?)[. _-]+)? # Show_Name and separator
(?P<air_year>\d{4})[. _-]+ # 2010 and separator
s((ea|ai)son[. _-])? # S01/Season 01
(?P<season_num>\d+)[. _-]* # S01 and optional separator
[. _-]*((?P<extra_info>.+?) # Source_Quality_Etc-
((?<![. _-])(?<!WEB) # Make sure this is really the release group
-(?P<release_group>[^- ]+))?)?$ # Group
'''),
('standard_repeat',
# Show.Name.S01E02.S01E03.Source.Quality.Etc-Group
# Show Name - S01E02 - S01E03 - S01E04 - Ep Name
'''
^(?P<series_name>.+?)[. _-]+ # Show_Name and separator
s(?P<season_num>\d+)[. _-]* # S01 and optional separator
e(?P<ep_num>\d+) # E02 and separator
([. _-]+s(?P=season_num)[. _-]* # S01 and optional separator
e(?P<extra_ep_num>\d+))+ # E03/etc and separator
[. _-]*((?P<extra_info>.+?) # Source_Quality_Etc-
((?<![. _-])(?<!WEB) # Make sure this is really the release group
-(?P<release_group>[^- ]+))?)?$ # Group
'''),
('scene_date_format_bis',
# Show.Name.2010.S01E01.Source.Quality.Etc-Group
'''
^(?P<series_name>.+?)[. _-]+ # Show_Name and separator
(?P<air_year>\d{4})[. _-]+ # 2010 and separator
s(?P<season_num>\d+)[. _-]* # S01 and optional separator
e(?P<ep_num>\d+)
'''),
('fov_repeat',
# Show.Name.1x02.1x03.Source.Quality.Etc-Group
# Show Name - 1x02 - 1x03 - 1x04 - Ep Name
'''
^(?P<series_name>.+?)[. _-]+ # Show_Name and separator
(?P<season_num>\d+)x # 1x
(?P<ep_num>\d+) # 02 and separator
([. _-]+(?P=season_num)x # 1x
(?P<extra_ep_num>\d+))+ # 03/etc and separator
[. _-]*((?P<extra_info>.+?) # Source_Quality_Etc-
((?<![. _-])(?<!WEB) # Make sure this is really the release group
-(?P<release_group>[^- ]+))?)?$ # Group
'''),
('standard_cpas_bien',
# [www.Cpasbien.me] Dexter.S07E04.FRENCH.LD.HDTV.XviD-MiNDe
'''
\[[a-zA-Z0-9\.]{2,20}\][. _-]+
(?P<series_name>.+?)[. _-]+ # Show_Name and separator
s(?P<season_num>\d+)[. _-]* # S01 and optional separator
e(?P<ep_num>\d+) # E02 and separator
[. _-]*((?P<extra_info>.+?) # Source_Quality_Etc-
((?<![. _-])(?<!WEB) # Make sure this is really the release group
-(?P<release_group>[^- ]+))?)?$ # Group
'''),
('standard_ep',
# Show.Name.S01EP02.Source.Quality.Etc-Group
# Show Name - S01EP02 - My Ep Name
# Show.Name.S01.EP03.My.Ep.Name
'''
^((?P<series_name>.+?)[. _-]+)? # Show_Name and separator
s(?P<season_num>\d+)[. _-]* # S01 and optional separator
ep(?P<ep_num>\d+) # E02 and separator
[. _-]*((?P<extra_info>.+?) # Source_Quality_Etc-
((?<![. _-])(?<!WEB) # Make sure this is really the release group
-(?P<release_group>[^- ]+))?)?$ # Group
'''),
('standard',
# Show.Name.S01E02.Source.Quality.Etc-Group
# Show Name - S01E02 - My Ep Name
# Show.Name.S01.E03.My.Ep.Name
# Show.Name.S01E02E03.Source.Quality.Etc-Group
# Show Name - S01E02-03 - My Ep Name
# Show.Name.S01.E02.E03
'''
^((?P<series_name>.+?)[. _-]+)? # Show_Name and separator
s(?P<season_num>\d+)[. _-]* # S01 and optional separator
e(?P<ep_num>\d+) # E02 and separator
(([. _-]*e|-) # linking e/- char
(?P<extra_ep_num>(?!(1080|720)[pi])\d+))* # additional E03/etc
[. _-]*((?P<extra_info>.+?) # Source_Quality_Etc-
((?<![. _-])(?<!WEB) # Make sure this is really the release group
-(?P<release_group>[^- ]+))?)?$ # Group
'''),
('fov',
# Show_Name.1x02.Source_Quality_Etc-Group
# Show Name - 1x02 - My Ep Name
# Show_Name.1x02x03x04.Source_Quality_Etc-Group
# Show Name - 1x02-03-04 - My Ep Name
'''
^((?P<series_name>.+?)[\[. _-]+)? # Show_Name and separator
(?P<season_num>\d+)x # 1x
(?P<ep_num>\d+) # 02 and separator
(([. _-]*x|-) # linking x/- char
(?P<extra_ep_num>
(?!(1080|720)[pi])(?!(?<=x)264) # ignore obviously wrong multi-eps
\d+))* # additional x03/etc
[\]. _-]*((?P<extra_info>.+?) # Source_Quality_Etc-
((?<![. _-])(?<!WEB) # Make sure this is really the release group
-(?P<release_group>[^- ]+))?)?$ # Group
'''),
('scene_date_format',
# Show.Name.2010.11.23.Source.Quality.Etc-Group
# Show Name - 2010-11-23 - Ep Name
'''
^((?P<series_name>.+?)[. _-]+)? # Show_Name and separator
(?P<air_year>\d{4})[. _-]+ # 2010 and separator
(?P<air_month>\d{2})[. _-]+ # 11 and separator
(?P<air_day>\d{2}) # 23 and separator
[. _-]*((?P<extra_info>.+?) # Source_Quality_Etc-
((?<![. _-])(?<!WEB) # Make sure this is really the release group
-(?P<release_group>[^- ]+))?)?$ # Group
'''),
('stupid-mix',
# tpz-show102Source_Quality_Etc
'''
[a-zA-Z0-9]{2,6}[. _-]+ # tpz-abc
(?P<series_name>.+?)[. _-]+ # Show Name and separator
(?!264) # don't count x264
(?P<season_num>\d{1,2}) # 1
(?P<ep_num>\d{2})[. _-]+ # 02
(?P<extra_info>.+)$ # Source_Quality_Etc-
'''),
('stupid',
# tpz-abc102
'''
(?P<release_group>.+?)-\w+?[\. ]? # tpz-abc
(?!264) # don't count x264
(?P<season_num>\d{1,2}) # 1
(?P<ep_num>\d{2})$ # 02
'''),
('verbose',
# Show Name Season 1 Episode 2 Ep Name
'''
^(?P<series_name>.+?)[. _-]+ # Show Name and separator
(sea|sai)son[. _-]+ # season and separator
(?P<season_num>\d+)[. _-]+ # 1
episode[. _-]+ # episode and separator
(?P<ep_num>\d+)[. _-]+ # 02 and separator
(?P<extra_info>.+)$ # Source_Quality_Etc-
'''),
('season_only',
# Show.Name.S01.Source.Quality.Etc-Group
'''
^((?P<series_name>.+?)[. _-]+)? # Show_Name and separator
s((ea|ai)son[. _-])? # S01/Season 01
(?P<season_num>\d+)[. _-]* # S01 and optional separator
[. _-]*((?P<extra_info>.+?) # Source_Quality_Etc-
((?<![. _-])(?<!WEB) # Make sure this is really the release group
-(?P<release_group>[^- ]+))?)?$ # Group
'''
),
('no_season_multi_ep',
# Show.Name.E02-03
# Show.Name.E02.2010
'''
^((?P<series_name>.+?)[. _-]+)? # Show_Name and separator
(e(p(isode)?)?|part|pt)[. _-]? # e, ep, episode, or part
(?P<ep_num>(\d+|[ivx]+)) # first ep num
((([. _-]+(and|&|to)[. _-]+)|-) # and/&/to joiner
(?P<extra_ep_num>(?!(1080|720)[pi])(\d+|[ivx]+))[. _-]) # second ep num
([. _-]*(?P<extra_info>.+?) # Source_Quality_Etc-
((?<![. _-])(?<!WEB) # Make sure this is really the release group
-(?P<release_group>[^- ]+))?)?$ # Group
'''
),
('no_season_general',
# Show.Name.E23.Test
# Show.Name.Part.3.Source.Quality.Etc-Group
# Show.Name.Part.1.and.Part.2.Blah-Group
'''
^((?P<series_name>.+?)[. _-]+)? # Show_Name and separator
(e(p(isode)?)?|part|pt)[. _-]? # e, ep, episode, or part
(?P<ep_num>(\d+|([ivx]+(?=[. _-])))) # first ep num
([. _-]+((and|&|to)[. _-]+)? # and/&/to joiner
((e(p(isode)?)?|part|pt)[. _-]?) # e, ep, episode, or part
(?P<extra_ep_num>(?!(1080|720)[pi])
(\d+|([ivx]+(?=[. _-]))))[. _-])* # second ep num
([. _-]*(?P<extra_info>.+?) # Source_Quality_Etc-
((?<![. _-])(?<!WEB) # Make sure this is really the release group
-(?P<release_group>[^- ]+))?)?$ # Group
'''
),
('bare',
# Show.Name.102.Source.Quality.Etc-Group
'''
^(?P<series_name>.+?)[. _-]+ # Show_Name and separator
(?P<season_num>\d{1,2}) # 1
(?P<ep_num>\d{2}) # 02 and separator
([. _-]+(?P<extra_info>(?!\d{3}[. _-]+)[^-]+) # Source_Quality_Etc-
(-(?P<release_group>.+))?)?$ # Group
'''),
('no_season',
# Show Name - 01 - Ep Name
# 01 - Ep Name
# 01 - Ep Name
'''
^((?P<series_name>.+?)(?:[. _-]{2,}|[. _]))? # Show_Name and separator
(?P<ep_num>\d{1,2}) # 02
(?:-(?P<extra_ep_num>\d{1,2}))* # 02
[. _-]+((?P<extra_info>.+?) # Source_Quality_Etc-
((?<![. _-])(?<!WEB) # Make sure this is really the release group
-(?P<release_group>[^- ]+))?)?$ # Group
'''
),
('mm',
# engrenages S0311 HDTV Divx
'''
^(?P<series_name>.+?)[. _-]+ # Show_Name and separator
s(?P<season_num>\d+)[. _-]* # S01 and optional separator
(?P<ep_num>\d+) # 02 and separator
'''
),
]
language_regexes = {}
for k,v in showLanguages.iteritems():
language_regexes[k] = '(^|\w|[. _-])*('+v+')(([. _-])(dubbed))?\w*([. _-]|$)'
| gpl-3.0 |
Cornices/cornice.ext.swagger | tests/converters/test_schema.py | 1 | 9298 | import unittest
import colander
from cornice_swagger.converters import convert_schema as convert
from cornice_swagger.converters import TypeConversionDispatcher
from cornice_swagger.converters.exceptions import NoSuchConverter
from ..support import AnyType, AnyTypeConverter
class ConversionTest(unittest.TestCase):
def test_validate_all(self):
node = colander.SchemaNode(colander.String(),
validator=colander.All(
colander.Length(12, 42),
colander.Regex(r'foo*bar')
))
ret = convert(node)
self.assertDictEqual(ret, {
'type': 'string',
'pattern': 'foo*bar',
'maxLength': 42,
'minLength': 12,
})
def test_support_custom_converters(self):
node = colander.SchemaNode(AnyType())
custom_converters = {AnyType: AnyTypeConverter}
converter = TypeConversionDispatcher(custom_converters)
ret = converter(node)
self.assertEquals(ret, {})
def test_support_default_converter(self):
node = colander.SchemaNode(AnyType())
converter = TypeConversionDispatcher(default_converter=AnyTypeConverter)
ret = converter(node)
self.assertEquals(ret, {})
def test_raise_no_such_converter_on_invalid_type(self):
node = colander.SchemaNode(dict)
self.assertRaises(NoSuchConverter, convert, node)
class StringConversionTest(unittest.TestCase):
def test_sanity(self):
node = colander.SchemaNode(colander.String())
ret = convert(node)
self.assertDictEqual(ret, {
'type': 'string',
})
def test_validate_default(self):
node = colander.SchemaNode(colander.String(), default='foo')
ret = convert(node)
self.assertDictEqual(ret, {
'type': 'string',
'default': 'foo',
})
def test_validate_length(self):
node = colander.SchemaNode(colander.String(),
validator=colander.Length(12, 42))
ret = convert(node)
self.assertDictEqual(ret, {
'type': 'string',
'maxLength': 42,
'minLength': 12,
})
def test_validate_regex(self):
node = colander.SchemaNode(colander.String(),
validator=colander.Regex(r'foo*bar'))
ret = convert(node)
self.assertDictEqual(ret, {
'type': 'string',
'pattern': 'foo*bar',
})
def test_validate_regex_email(self):
node = colander.SchemaNode(colander.String(),
validator=colander.Email())
ret = convert(node)
self.assertDictEqual(ret, {
'type': 'string',
'format': 'email',
})
def test_validate_regex_url(self):
node = colander.SchemaNode(colander.String(),
validator=colander.url)
ret = convert(node)
self.assertDictEqual(ret, {
'type': 'string',
'format': 'url',
})
def test_validate_oneof(self):
node = colander.SchemaNode(colander.String(),
validator=colander.OneOf(["one", "two"]))
ret = convert(node)
self.assertDictEqual(ret, {
'type': 'string',
'enum': ['one', 'two'],
})
def test_title(self):
node = colander.SchemaNode(colander.String(), title='foo')
ret = convert(node)
self.assertDictEqual(ret, {
'type': 'string',
'title': 'foo',
})
def test_description(self):
node = colander.SchemaNode(colander.String(),
description='bar')
ret = convert(node)
self.assertDictEqual(ret, {
'type': 'string',
'description': 'bar',
})
class IntegerConversionTest(unittest.TestCase):
def test_sanity(self):
node = colander.SchemaNode(colander.Integer())
ret = convert(node)
self.assertDictEqual(ret, {
'type': 'integer',
})
def test_default(self):
node = colander.SchemaNode(colander.Integer(), default=1)
ret = convert(node)
self.assertDictEqual(ret, {
'type': 'integer',
'default': 1,
})
def test_enum(self):
node = colander.SchemaNode(colander.Integer(),
validator=colander.OneOf([1, 2, 3, 4]))
ret = convert(node)
self.assertDictEqual(ret, {
'type': 'integer',
'enum': [1, 2, 3, 4],
})
def test_range(self):
node = colander.SchemaNode(colander.Integer(),
validator=colander.Range(111, 555))
ret = convert(node)
self.assertDictEqual(ret, {
'type': 'integer',
'minimum': 111,
'maximum': 555,
})
class DateTimeConversionTest(unittest.TestCase):
def test_sanity(self):
node = colander.SchemaNode(colander.DateTime())
ret = convert(node)
self.assertDictEqual(ret, {
'type': 'string',
'format': 'date-time',
})
class MappingConversionTest(unittest.TestCase):
def test_sanity(self):
node = colander.MappingSchema()
ret = convert(node)
self.assertDictEqual(ret, {
'type': 'object',
})
def test_required(self):
class Mapping(colander.MappingSchema):
foo = colander.SchemaNode(colander.String())
node = Mapping()
ret = convert(node)
self.assertDictEqual(ret, {
'type': 'object',
'properties': {
'foo': {
'title': 'Foo',
'type': 'string'
}
},
'required': ['foo']
})
def test_not_required(self):
class Mapping(colander.MappingSchema):
foo = colander.SchemaNode(colander.String(),
missing=colander.drop)
node = Mapping()
ret = convert(node)
self.assertDictEqual(ret, {
'type': 'object',
'properties': {
'foo': {
'title': 'Foo',
'type': 'string'
}
},
})
def test_nested_schema(self):
class BaseMapping(colander.MappingSchema):
foo = colander.SchemaNode(colander.String(),
missing=colander.drop)
class TopMapping(colander.MappingSchema):
bar = BaseMapping(missing=colander.drop)
node = TopMapping()
ret = convert(node)
self.assertDictEqual(ret, {
'type': 'object',
'properties': {
'bar': {
'title': 'Bar',
'type': 'object',
'properties': {
'foo': {
'title': 'Foo',
'type': 'string'
}
}
}
},
})
def test_open_schema(self):
class Mapping(colander.MappingSchema):
foo = colander.SchemaNode(colander.String(),
missing=colander.drop)
@staticmethod
def schema_type():
return colander.Mapping(unknown='preserve')
node = Mapping()
ret = convert(node)
self.assertDictEqual(ret, {
'type': 'object',
'properties': {
'foo': {
'title': 'Foo',
'type': 'string'
}
},
'additionalProperties': {}
})
class SequenceConversionTest(unittest.TestCase):
def primitive_sequence_test(self):
class Integers(colander.SequenceSchema):
num = colander.SchemaNode(colander.Integer())
ret = convert(Integers)
self.assertDictEqual(ret, {
'type': 'array',
'items': {
'type': 'integer',
},
})
def mapping_sequence_test(self):
class BaseMapping(colander.MappingSchema):
name = colander.SchemaNode(colander.String())
number = colander.SchemaNode(colander.Integer())
class BaseMappings(colander.SequenceSchema):
base_mapping = BaseMapping()
schema = BaseMappings()
ret = convert(schema)
self.assertDictEqual(ret, {
'type': 'array',
'items': {
'type': 'object',
'properties': {
'name': {
'type': 'string',
'title': 'Name',
},
'number': {
'type': 'integer',
'title': 'Number',
}
},
'required': ['name', 'number'],
'title': 'Base Mapping',
},
})
| apache-2.0 |
radzhome/AWS-ElasticBeanstalk-CLI | eb/macosx/python2.7/scli/cli_parse.py | 4 | 8732 | #!/usr/bin/env python
# ==============================================================================
# Copyright 2012 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Amazon Software License (the "License"). You may not use
# this file except in compliance with the License. A copy of the License is
# located at
#
# http://aws.amazon.com/asl/
#
# or in the "license" file accompanying this file. This file is distributed on
# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or
# implied. See the License for the specific language governing permissions
# and limitations under the License.
#==============================================================================
import argparse
from contextlib import closing
import logging
from StringIO import StringIO
from lib.utility import misc
from lib.elasticbeanstalk.model import EnvironmentTier
from scli.resources import CLISwitch, CLISwitchMsg, EBSCliAttr
from scli.constants import CommandType, ServiceDefault, ServiceRegionId, \
ParameterName, ParameterSource
from scli.parameter import Parameter
from scli.exception import ArgumentError
log = logging.getLogger('cli')
def _word_join(word_list, separator=u''):
x = separator.join(map(misc.to_unicode, word_list))
return x
def command(string):
command = misc.to_unicode(string)
for item in CommandType:
if item.lower() == command.lower().strip():
return item
raise AttributeError(EBSCliAttr.InvalidCommand.format(command))
def subcommand(string):
subcommand = misc.to_unicode(string)
return subcommand
def _init_parser(parser):
commands = u', '.join(map(unicode.lower, CommandType))
parser.add_argument(CLISwitch[ParameterName.Command],
type=command,
metavar=u'COMMAND', help=commands)
parser.add_argument(CLISwitch[ParameterName.SubCommand],
type=subcommand, nargs='*',
default=None,
metavar=u'SUBCOMMAND', )
# AWS credential
parser.add_argument(u'-I', u'--' + CLISwitch[ParameterName.AwsAccessKeyId],
dest=ParameterName.AwsAccessKeyId,
metavar=u'ACCESS_KEY_ID',
help=CLISwitchMsg[ParameterName.AwsAccessKeyId])
parser.add_argument(u'-S', '--' + CLISwitch[ParameterName.AwsSecretAccessKey],
dest=ParameterName.AwsSecretAccessKey,
metavar=u'SECRET_ACCESS_KEY',
help=CLISwitchMsg[ParameterName.AwsSecretAccessKey])
parser.add_argument(u'--' + CLISwitch[ParameterName.AwsCredentialFile],
dest=ParameterName.AwsCredentialFile,
metavar=u'FILE_PATH_NAME',
help=CLISwitchMsg[ParameterName.AwsCredentialFile])
# Application/environment
parser.add_argument(u'-t', u'--' + CLISwitch[ParameterName.EnvironmentTier],
dest=ParameterName.EnvironmentTier,
metavar=u'ENVIRONMENT_TIER',
help=CLISwitchMsg[ParameterName.EnvironmentTier])
parser.add_argument(u'-s', u'--' + CLISwitch[ParameterName.SolutionStack],
dest=ParameterName.SolutionStack, nargs='+',
metavar=u'',
help=CLISwitchMsg[ParameterName.SolutionStack])
parser.add_argument(u'-a', u'--' + CLISwitch[ParameterName.ApplicationName],
dest=ParameterName.ApplicationName,
metavar=u'APPLICATION_NAME',
help=CLISwitchMsg[ParameterName.ApplicationName])
parser.add_argument(u'-l', u'--' + CLISwitch[ParameterName.ApplicationVersionName],
dest=ParameterName.ApplicationVersionName,
metavar=u'VERSION_LABEL',
help=CLISwitchMsg[ParameterName.ApplicationVersionName])
parser.add_argument(u'-e', u'--' + CLISwitch[ParameterName.EnvironmentName],
dest=ParameterName.EnvironmentName,
metavar=u'ENVIRONMENT_NAME',
help=CLISwitchMsg[ParameterName.EnvironmentName])
# Output
parser.add_argument(u'--' + CLISwitch[ParameterName.Verbose],
action='store_const', const=ServiceDefault.ENABLED,
dest=ParameterName.Verbose,
metavar=u'',
help=CLISwitchMsg[ParameterName.Verbose])
parser.add_argument(u'-f', u'--' + CLISwitch[ParameterName.Force],
action='store_const', const=ServiceDefault.ENABLED,
dest=ParameterName.Force,
metavar=u'',
help=CLISwitchMsg[ParameterName.Force])
# Service
parser.add_argument(u'--' + CLISwitch[ParameterName.WaitForFinishTimeout], type=int,
dest=ParameterName.WaitForFinishTimeout,
metavar=u'TIMEOUT_IN_SEC',
help=unicode.format(CLISwitchMsg[ParameterName.WaitForFinishTimeout],
ServiceDefault.WAIT_TIMEOUT_IN_SEC))
parser.add_argument(u'--' + CLISwitch[ParameterName.Region],
dest=ParameterName.Region,
metavar=u'REGION',
help=CLISwitchMsg[ParameterName.Region])
parser.add_argument(u'--' + CLISwitch[ParameterName.ServiceEndpoint],
dest=ParameterName.ServiceEndpoint,
metavar=u'ENDPOINT',
help=CLISwitchMsg[ParameterName.ServiceEndpoint])
# SCli Helper switch
parser.add_argument(u'--version', action=u'version', version=EBSCliAttr.Version)
# List of non string parameters
NON_STRING_PARAMETERS = [ParameterName.EnvironmentTier]
def parse(parameter_pool, line=None):
''' Parse command arguments'''
parser = ArgumentParser(description=EBSCliAttr.Name,
usage=EBSCliAttr.Usage)
_init_parser(parser)
if line is not None:
args = vars(parser.parse_args(line.split()))
else:
args = vars(parser.parse_args())
# Post prcessing
if args[ParameterName.EnvironmentTier] is not None:
tier_serialized = args[ParameterName.EnvironmentTier]
args[ParameterName.EnvironmentTier] = EnvironmentTier.from_serialized_string(tier_serialized)
if args[ParameterName.SolutionStack] is not None:
solution_stack = _word_join(args[ParameterName.SolutionStack], u' ')
args[ParameterName.SolutionStack] = solution_stack
if args[ParameterName.Region] is not None:
region_id = args[ParameterName.Region]
region = ServiceRegionId.keys()[ServiceRegionId.values().index(region_id)]
args[ParameterName.Region] = region
# Store command line arguments into parameter pool
for arg, value in args.iteritems():
arg = misc.to_unicode(arg, convert_none=False)
# Try to convert string/list-of-string parameters to unicode
if arg not in NON_STRING_PARAMETERS:
if isinstance(value, list):
value = [misc.to_unicode(item) for item in value]
else:
value = misc.to_unicode(value, convert_none=False)
if arg == CLISwitch[ParameterName.Command]:
parameter_pool.put(Parameter(ParameterName.Command,
value,
ParameterSource.CliArgument))
elif arg == CLISwitch[ParameterName.SubCommand]:
parameter_pool.put(Parameter(ParameterName.SubCommand,
value,
ParameterSource.CliArgument))
elif value is not None:
parameter_pool.put(Parameter(arg,
value,
ParameterSource.CliArgument))
log.info(u'Finished parsing command line arguments')
if log.isEnabledFor(logging.DEBUG):
log.debug(u'Received arguments: {0}'. \
format(misc.collection_to_string(parameter_pool.parameter_names)))
return args
class ArgumentParser(argparse.ArgumentParser):
'''Subclass of argparse.ArgumentParser to override behavior of error()'''
def error(self, error_message):
with closing(StringIO()) as usage:
self.print_usage(usage)
message = EBSCliAttr.ErrorMsg.format(error_message, usage.getvalue(), self.prog)
raise ArgumentError(message)
| apache-2.0 |
liorvh/infernal-twin | build/reportlab/build/lib.linux-i686-2.7/reportlab/graphics/charts/axes.py | 25 | 90090 | #Copyright ReportLab Europe Ltd. 2000-2012
#see license.txt for license details
__version__=''' $Id$ '''
__doc__="""Collection of axes for charts.
The current collection comprises axes for charts using cartesian
coordinate systems. All axes might have tick marks and labels.
There are two dichotomies for axes: one of X and Y flavours and
another of category and value flavours.
Category axes have an ordering but no metric. They are divided
into a number of equal-sized buckets. Their tick marks or labels,
if available, go BETWEEN the buckets, and the labels are placed
below to/left of the X/Y-axis, respectively.
Value axes have an ordering AND metric. They correspond to a nu-
meric quantity. Value axis have a real number quantity associated
with it. The chart tells it where to go.
The most basic axis divides the number line into equal spaces
and has tickmarks and labels associated with each; later we
will add variants where you can specify the sampling
interval.
The charts using axis tell them where the labels should be placed.
Axes of complementary X/Y flavours can be connected to each other
in various ways, i.e. with a specific reference point, like an
x/value axis to a y/value (or category) axis. In this case the
connection can be either at the top or bottom of the former or
at any absolute value (specified in points) or at some value of
the former axes in its own coordinate system.
"""
from reportlab.lib.validators import isNumber, isNumberOrNone, isListOfStringsOrNone, isListOfNumbers, \
isListOfNumbersOrNone, isColorOrNone, OneOf, isBoolean, SequenceOf, \
isString, EitherOr, Validator, NoneOr, isInstanceOf, \
isNormalDate, isNoneOrCallable
from reportlab.lib.attrmap import *
from reportlab.lib import normalDate
from reportlab.graphics.shapes import Drawing, Line, PolyLine, Rect, Group, STATE_DEFAULTS, _textBoxLimits, _rotatedBoxLimits
from reportlab.graphics.widgetbase import Widget, TypedPropertyCollection
from reportlab.graphics.charts.textlabels import Label, PMVLabel
from reportlab.graphics.charts.utils import nextRoundNumber
from reportlab.graphics.widgets.grids import ShadedRect
from reportlab.lib.colors import Color
from reportlab.lib.utils import isSeq
import copy
try:
reduce # Python 2.x
except NameError:
from functools import reduce
# Helpers.
def _findMinMaxValue(V, x, default, func, special=None):
if isSeq(V[0][0]):
if special:
f=lambda T,x=x,special=special,func=func: special(T,x,func)
else:
f=lambda T,x=x: T[x]
V=list(map(lambda e,f=f: list(map(f,e)),V))
V = list(filter(len,[[x for x in x if x is not None] for x in V]))
if len(V)==0: return default
return func(list(map(func,V)))
def _findMin(V, x, default,special=None):
'''find minimum over V[i][x]'''
return _findMinMaxValue(V,x,default,min,special=special)
def _findMax(V, x, default,special=None):
'''find maximum over V[i][x]'''
return _findMinMaxValue(V,x,default,max,special=special)
def _allInt(values):
'''true if all values are int'''
for v in values:
try:
if int(v)!=v: return 0
except:
return 0
return 1
class AxisLineAnnotation:
'''Create a grid like line using the given user value to draw the line
kwds may contain
startOffset if true v is offset from the default grid start position
endOffset if true v is offset from the default grid end position
scaleValue True/not given --> scale the value
otherwise use the absolute value
lo lowest coordinate to draw default 0
hi highest coordinate to draw at default = length
drawAtLimit True draw line at appropriate limit if its coordinate exceeds the lo, hi range
False ignore if it's outside the range
all Line keywords are acceptable
'''
def __init__(self,v,**kwds):
self._v = v
self._kwds = kwds
def __call__(self,axis):
kwds = self._kwds.copy()
scaleValue = kwds.pop('scaleValue',True)
endOffset = kwds.pop('endOffset',False)
startOffset = kwds.pop('endOffset',False)
if axis.isYAxis:
offs = axis._x
d0 = axis._y
else:
offs = axis._y
d0 = axis._x
s = kwds.pop('start',None)
e = kwds.pop('end',None)
if s is None or e is None:
dim = getattr(getattr(axis,'joinAxis',None),'getGridDims',None)
if dim and hasattr(dim,'__call__'):
dim = dim()
if dim:
if s is None: s = dim[0]
if e is None: e = dim[1]
else:
if s is None: s = 0
if e is None: e = 0
hi = kwds.pop('hi',axis._length)+d0
lo = kwds.pop('lo',0)+d0
lo,hi=min(lo,hi),max(lo,hi)
drawAtLimit = kwds.pop('drawAtLimit',False)
oaglp = axis._get_line_pos
if not scaleValue:
axis._get_line_pos = lambda x: x
try:
v = self._v
if endOffset:
v = v + hi
elif startOffset:
v = v + lo
func = axis._getLineFunc(s-offs,e-offs,kwds.pop('parent',None))
if not hasattr(axis,'_tickValues'):
axis._pseudo_configure()
d = axis._get_line_pos(v)
if d<lo or d>hi:
if not drawAtLimit: return None
if d<lo:
d = lo
else:
d = hi
axis._get_line_pos = lambda x: d
L = func(v)
for k,v in kwds.items():
setattr(L,k,v)
finally:
axis._get_line_pos = oaglp
return L
class AxisBackgroundAnnotation:
'''Create a set of coloured bars on the background of a chart using axis ticks as the bar borders
colors is a set of colors to use for the background bars. A colour of None is just a skip.
Special effects if you pass a rect or Shaded rect instead.
'''
def __init__(self,colors,**kwds):
self._colors = colors
self._kwds = kwds
def __call__(self,axis):
colors = self._colors
if not colors: return
kwds = self._kwds.copy()
isYAxis = axis.isYAxis
if isYAxis:
offs = axis._x
d0 = axis._y
else:
offs = axis._y
d0 = axis._x
s = kwds.pop('start',None)
e = kwds.pop('end',None)
if s is None or e is None:
dim = getattr(getattr(axis,'joinAxis',None),'getGridDims',None)
if dim and hasattr(dim,'__call__'):
dim = dim()
if dim:
if s is None: s = dim[0]
if e is None: e = dim[1]
else:
if s is None: s = 0
if e is None: e = 0
if not hasattr(axis,'_tickValues'):
axis._pseudo_configure()
tv = getattr(axis,'_tickValues',None)
if not tv: return
G = Group()
ncolors = len(colors)
v0 = axis._get_line_pos(tv[0])
for i in range(1,len(tv)):
v1 = axis._get_line_pos(tv[i])
c = colors[(i-1)%ncolors]
if c:
if isYAxis:
y = v0
x = s
height = v1-v0
width = e-s
else:
x = v0
y = s
width = v1-v0
height = e-s
if isinstance(c,Color):
r = Rect(x,y,width,height,fillColor=c,strokeColor=None)
elif isinstance(c,Rect):
r = Rect(x,y,width,height)
for k in c.__dict__:
if k not in ('x','y','width','height'):
setattr(r,k,getattr(c,k))
elif isinstance(c,ShadedRect):
r = ShadedRect(x=x,y=y,width=width,height=height)
for k in c.__dict__:
if k not in ('x','y','width','height'):
setattr(r,k,getattr(c,k))
G.add(r)
v0 = v1
return G
class TickLU:
'''lookup special cases for tick values'''
def __init__(self,*T,**kwds):
self.accuracy = kwds.pop('accuracy',1e-8)
self.T = T
def __contains__(self,t):
accuracy = self.accuracy
for x,v in self.T:
if abs(x-t)<accuracy:
return True
return False
def __getitem__(self,t):
accuracy = self.accuracy
for x,v in self.T:
if abs(x-t)<self.accuracy:
return v
raise IndexError('cannot locate index %r' % t)
class _AxisG(Widget):
def _get_line_pos(self,v):
v = self.scale(v)
try:
v = v[0]
except:
pass
return v
def _cxLine(self,x,start,end):
x = self._get_line_pos(x)
return Line(x, self._y + start, x, self._y + end)
def _cyLine(self,y,start,end):
y = self._get_line_pos(y)
return Line(self._x + start, y, self._x + end, y)
def _cxLine3d(self,x,start,end,_3d_dx,_3d_dy):
x = self._get_line_pos(x)
y0 = self._y + start
y1 = self._y + end
y0, y1 = min(y0,y1),max(y0,y1)
x1 = x + _3d_dx
return PolyLine([x,y0,x1,y0+_3d_dy,x1,y1+_3d_dy],strokeLineJoin=1)
def _cyLine3d(self,y,start,end,_3d_dx,_3d_dy):
y = self._get_line_pos(y)
x0 = self._x + start
x1 = self._x + end
x0, x1 = min(x0,x1),max(x0,x1)
y1 = y + _3d_dy
return PolyLine([x0,y,x0+_3d_dx,y1,x1+_3d_dx,y1],strokeLineJoin=1)
def _getLineFunc(self, start, end, parent=None):
_3d_dx = getattr(parent,'_3d_dx',None)
if _3d_dx is not None:
_3d_dy = getattr(parent,'_3d_dy',None)
f = self.isYAxis and self._cyLine3d or self._cxLine3d
return lambda v, s=start, e=end, f=f,_3d_dx=_3d_dx,_3d_dy=_3d_dy: f(v,s,e,_3d_dx=_3d_dx,_3d_dy=_3d_dy)
else:
f = self.isYAxis and self._cyLine or self._cxLine
return lambda v, s=start, e=end, f=f: f(v,s,e)
def _makeLines(self,g,start,end,strokeColor,strokeWidth,strokeDashArray,strokeLineJoin,strokeLineCap,strokeMiterLimit,parent=None,exclude=[],specials={}):
func = self._getLineFunc(start,end,parent)
if not hasattr(self,'_tickValues'):
self._pseudo_configure()
if exclude:
exf = self.isYAxis and (lambda l: l.y1 in exclude) or (lambda l: l.x1 in exclude)
else:
exf = None
for t in self._tickValues:
L = func(t)
if exf and exf(L): continue
L.strokeColor = strokeColor
L.strokeWidth = strokeWidth
L.strokeDashArray = strokeDashArray
L.strokeLineJoin = strokeLineJoin
L.strokeLineCap = strokeLineCap
L.strokeMiterLimit = strokeMiterLimit
if t in specials:
for a,v in specials[t].items():
setattr(L,a,v)
g.add(L)
def makeGrid(self,g,dim=None,parent=None,exclude=[]):
'''this is only called by a container object'''
c = self.gridStrokeColor
w = self.gridStrokeWidth or 0
if w and c and self.visibleGrid:
s = self.gridStart
e = self.gridEnd
if s is None or e is None:
if dim and hasattr(dim,'__call__'):
dim = dim()
if dim:
if s is None: s = dim[0]
if e is None: e = dim[1]
else:
if s is None: s = 0
if e is None: e = 0
if s or e:
if self.isYAxis: offs = self._x
else: offs = self._y
self._makeLines(g,s-offs,e-offs,c,w,self.gridStrokeDashArray,self.gridStrokeLineJoin,self.gridStrokeLineCap,self.gridStrokeMiterLimit,parent=parent,exclude=exclude,specials=getattr(self,'_gridSpecials',{}))
self._makeSubGrid(g,dim,parent,exclude=[])
def _makeSubGrid(self,g,dim=None,parent=None,exclude=[]):
'''this is only called by a container object'''
if not (getattr(self,'visibleSubGrid',0) and self.subTickNum>0): return
c = self.subGridStrokeColor
w = self.subGridStrokeWidth or 0
if not(w and c): return
s = self.subGridStart
e = self.subGridEnd
if s is None or e is None:
if dim and hasattr(dim,'__call__'):
dim = dim()
if dim:
if s is None: s = dim[0]
if e is None: e = dim[1]
else:
if s is None: s = 0
if e is None: e = 0
if s or e:
if self.isYAxis: offs = self._x
else: offs = self._y
otv = self._calcSubTicks()
try:
self._makeLines(g,s-offs,e-offs,c,w,self.subGridStrokeDashArray,self.subGridStrokeLineJoin,self.subGridStrokeLineCap,self.subGridStrokeMiterLimit,parent=parent,exclude=exclude)
finally:
self._tickValues = otv
def getGridDims(self,start=None,end=None):
if start is None: start = (self._x,self._y)[self.isYAxis]
if end is None: end = start+self._length
return start,end
def isYAxis(self):
if getattr(self,'_dataIndex',None)==1: return True
acn = self.__class__.__name__
return acn[0]=='Y' or acn[:4]=='AdjY'
isYAxis = property(isYAxis)
def isXAxis(self):
if getattr(self,'_dataIndex',None)==0: return True
acn = self.__class__.__name__
return acn[0]=='X' or acn[:11]=='NormalDateX'
isXAxis = property(isXAxis)
def addAnnotations(self,g,A=None):
if A is None: getattr(self,'annotations',[])
for x in A:
g.add(x(self))
def _splitAnnotations(self):
A = getattr(self,'annotations',[])[:]
D = {}
for v in ('early','beforeAxis','afterAxis','beforeTicks',
'afterTicks','beforeTickLabels',
'afterTickLabels','late'):
R = [].append
P = [].append
for a in A:
if getattr(a,v,0):
R(a)
else:
P(a)
D[v] = R.__self__
A[:] = P.__self__
D['late'] += A
return D
def draw(self):
g = Group()
A = self._splitAnnotations()
self.addAnnotations(g,A['early'])
if self.visible:
self.addAnnotations(g,A['beforeAxis'])
g.add(self.makeAxis())
self.addAnnotations(g,A['afterAxis'])
self.addAnnotations(g,A['beforeTicks'])
g.add(self.makeTicks())
self.addAnnotations(g,A['afterTicks'])
self.addAnnotations(g,A['beforeTickLabels'])
g.add(self.makeTickLabels())
self.addAnnotations(g,A['afterTickLabels'])
self.addAnnotations(g,A['late'])
return g
class CALabel(PMVLabel):
_attrMap = AttrMap(BASE=PMVLabel,
labelPosFrac = AttrMapValue(isNumber, desc='where in the category range [0,1] the labels should be anchored'),
)
def __init__(self,**kw):
PMVLabel.__init__(self,**kw)
self._setKeywords(
labelPosFrac = 0.5,
)
# Category axes.
class CategoryAxis(_AxisG):
"Abstract category axis, unusable in itself."
_nodoc = 1
_attrMap = AttrMap(
visible = AttrMapValue(isBoolean, desc='Display entire object, if true.'),
visibleAxis = AttrMapValue(isBoolean, desc='Display axis line, if true.'),
visibleTicks = AttrMapValue(isBoolean, desc='Display axis ticks, if true.'),
visibleLabels = AttrMapValue(isBoolean, desc='Display axis labels, if true.'),
visibleGrid = AttrMapValue(isBoolean, desc='Display axis grid, if true.'),
strokeWidth = AttrMapValue(isNumber, desc='Width of axis line and ticks.'),
strokeColor = AttrMapValue(isColorOrNone, desc='Color of axis line and ticks.'),
strokeDashArray = AttrMapValue(isListOfNumbersOrNone, desc='Dash array used for axis line.'),
strokeLineCap = AttrMapValue(OneOf(0,1,2),desc="Line cap 0=butt, 1=round & 2=square"),
strokeLineJoin = AttrMapValue(OneOf(0,1,2),desc="Line join 0=miter, 1=round & 2=bevel"),
strokeMiterLimit = AttrMapValue(isNumber,desc="miter limit control miter line joins"),
gridStrokeWidth = AttrMapValue(isNumber, desc='Width of grid lines.'),
gridStrokeColor = AttrMapValue(isColorOrNone, desc='Color of grid lines.'),
gridStrokeDashArray = AttrMapValue(isListOfNumbersOrNone, desc='Dash array used for grid lines.'),
gridStrokeLineCap = AttrMapValue(OneOf(0,1,2),desc="Grid Line cap 0=butt, 1=round & 2=square"),
gridStrokeLineJoin = AttrMapValue(OneOf(0,1,2),desc="Grid Line join 0=miter, 1=round & 2=bevel"),
gridStrokeMiterLimit = AttrMapValue(isNumber,desc="Grid miter limit control miter line joins"),
gridStart = AttrMapValue(isNumberOrNone, desc='Start of grid lines wrt axis origin'),
gridEnd = AttrMapValue(isNumberOrNone, desc='End of grid lines wrt axis origin'),
drawGridLast = AttrMapValue(isBoolean, desc='if true draw gridlines after everything else.'),
labels = AttrMapValue(None, desc='Handle of the axis labels.'),
categoryNames = AttrMapValue(isListOfStringsOrNone, desc='List of category names.'),
joinAxis = AttrMapValue(None, desc='Join both axes if true.'),
joinAxisPos = AttrMapValue(isNumberOrNone, desc='Position at which to join with other axis.'),
reverseDirection = AttrMapValue(isBoolean, desc='If true reverse category direction.'),
style = AttrMapValue(OneOf('parallel','stacked','parallel_3d'),"How common category bars are plotted"),
labelAxisMode = AttrMapValue(OneOf('high','low','axis', 'axispmv'), desc="Like joinAxisMode, but for the axis labels"),
tickShift = AttrMapValue(isBoolean, desc='Tick shift typically'),
loPad = AttrMapValue(isNumber, desc='extra inner space before start of the axis'),
hiPad = AttrMapValue(isNumber, desc='extra inner space after end of the axis'),
annotations = AttrMapValue(None,desc='list of annotations'),
loLLen = AttrMapValue(isNumber, desc='extra line length before start of the axis'),
hiLLen = AttrMapValue(isNumber, desc='extra line length after end of the axis'),
skipGrid = AttrMapValue(OneOf('none','top','both','bottom'),"grid lines to skip top bottom both none"),
innerTickDraw = AttrMapValue(isNoneOrCallable, desc="Callable to replace _drawInnerTicks"),
)
def __init__(self):
assert self.__class__.__name__!='CategoryAxis', "Abstract Class CategoryAxis Instantiated"
# private properties set by methods. The initial values
# here are to make demos easy; they would always be
# overridden in real life.
self._x = 50
self._y = 50
self._length = 100
self._catCount = 0
# public properties
self.visible = 1
self.visibleAxis = 1
self.visibleTicks = 1
self.visibleLabels = 1
self.visibleGrid = 0
self.drawGridLast = False
self.strokeWidth = 1
self.strokeColor = STATE_DEFAULTS['strokeColor']
self.strokeDashArray = STATE_DEFAULTS['strokeDashArray']
self.gridStrokeLineJoin = self.strokeLineJoin = STATE_DEFAULTS['strokeLineJoin']
self.gridStrokeLineCap = self.strokeLineCap = STATE_DEFAULTS['strokeLineCap']
self.gridStrokeMiterLimit = self.strokeMiterLimit = STATE_DEFAULTS['strokeMiterLimit']
self.gridStrokeWidth = 0.25
self.gridStrokeColor = STATE_DEFAULTS['strokeColor']
self.gridStrokeDashArray = STATE_DEFAULTS['strokeDashArray']
self.gridStart = self.gridEnd = None
self.strokeLineJoin = STATE_DEFAULTS['strokeLineJoin']
self.strokeLineCap = STATE_DEFAULTS['strokeLineCap']
self.strokeMiterLimit = STATE_DEFAULTS['strokeMiterLimit']
self.labels = TypedPropertyCollection(CALabel)
# if None, they don't get labels. If provided,
# you need one name per data point and they are
# used for label text.
self.categoryNames = None
self.joinAxis = None
self.joinAxisPos = None
self.joinAxisMode = None
self.labelAxisMode = 'axis'
self.reverseDirection = 0
self.style = 'parallel'
#various private things which need to be initialized
self._labelTextFormat = None
self.tickShift = 0
self.loPad = 0
self.hiPad = 0
self.loLLen = 0
self.hiLLen = 0
def setPosition(self, x, y, length):
# ensure floating point
self._x = float(x)
self._y = float(y)
self._length = float(length)
def configure(self, multiSeries,barWidth=None):
self._catCount = max(list(map(len,multiSeries)))
self._barWidth = barWidth or ((self._length-self.loPad-self.hiPad)/float(self._catCount or 1))
self._calcTickmarkPositions()
if self.labelAxisMode == 'axispmv':
self._pmv = [sum([series[i] for series in multiSeries]) for i in xrange(self._catCount)]
def _calcTickmarkPositions(self):
n = self._catCount
if self.tickShift:
self._tickValues = [t+0.5 for t in range(n)]
else:
if self.reverseDirection:
self._tickValues = list(range(-1,n))
else:
self._tickValues = list(range(n+1))
def _scale(self,idx):
if self.reverseDirection: idx = self._catCount-idx-1
return idx
def _assertYAxis(axis):
assert axis.isYAxis, "Cannot connect to other axes (%s), but Y- ones." % axis.__class__.__name__
def _assertXAxis(axis):
assert axis.isXAxis, "Cannot connect to other axes (%s), but X- ones." % axis.__class__.__name__
class _XTicks:
_tickTweaks = 0 #try 0.25-0.5
def _drawTicksInner(self,tU,tD,g):
itd = getattr(self,'innerTickDraw',None)
if itd:
itd(self,tU,tD,g)
elif tU or tD:
sW = self.strokeWidth
tW = self._tickTweaks
if tW:
if tU and not tD:
tD = tW*sW
elif tD and not tU:
tU = tW*sW
self._makeLines(g,tU,-tD,self.strokeColor,sW,self.strokeDashArray,self.strokeLineJoin,self.strokeLineCap,self.strokeMiterLimit)
def _drawTicks(self,tU,tD,g=None):
g = g or Group()
if self.visibleTicks:
self._drawTicksInner(tU,tD,g)
return g
def _calcSubTicks(self):
if not hasattr(self,'_tickValues'):
self._pseudo_configure()
otv = self._tickValues
if not hasattr(self,'_subTickValues'):
acn = self.__class__.__name__
if acn[:11]=='NormalDateX':
iFuzz = 0
dCnv = int
else:
iFuzz = 1e-8
dCnv = lambda x:x
OTV = [tv for tv in otv if getattr(tv,'_doSubTicks',1)]
T = [].append
nst = int(self.subTickNum)
i = len(OTV)
if i<2:
self._subTickValues = []
else:
if i==2:
dst = OTV[1]-OTV[0]
elif i==3:
dst = max(OTV[1]-OTV[0],OTV[2]-OTV[1])
else:
i >>= 1
dst = OTV[i+1] - OTV[i]
fuzz = dst*iFuzz
vn = self._valueMin+fuzz
vx = self._valueMax-fuzz
if OTV[0]>vn: OTV.insert(0,OTV[0]-dst)
if OTV[-1]<vx: OTV.append(OTV[-1]+dst)
dst /= float(nst+1)
for i,x in enumerate(OTV[:-1]):
for j in range(nst):
t = x+dCnv((j+1)*dst)
if t<=vn or t>=vx: continue
T(t)
self._subTickValues = T.__self__
self._tickValues = self._subTickValues
return otv
def _drawSubTicks(self,tU,tD,g):
if getattr(self,'visibleSubTicks',0) and self.subTickNum>0:
otv = self._calcSubTicks()
try:
self._subTicking = 1
self._drawTicksInner(tU,tD,g)
finally:
del self._subTicking
self._tickValues = otv
def makeTicks(self):
yold=self._y
try:
self._y = self._labelAxisPos(getattr(self,'tickAxisMode','axis'))
g = self._drawTicks(self.tickUp,self.tickDown)
self._drawSubTicks(getattr(self,'subTickHi',0),getattr(self,'subTickLo',0),g)
return g
finally:
self._y = yold
def _labelAxisPos(self,mode=None):
axis = self.joinAxis
if axis:
mode = mode or self.labelAxisMode
if mode == 'low':
return axis._y
elif mode == 'high':
return axis._y + axis._length
return self._y
class _YTicks(_XTicks):
def _labelAxisPos(self,mode=None):
axis = self.joinAxis
if axis:
mode = mode or self.labelAxisMode
if mode == 'low':
return axis._x
elif mode == 'high':
return axis._x + axis._length
return self._x
def makeTicks(self):
xold=self._x
try:
self._x = self._labelAxisPos(getattr(self,'tickAxisMode','axis'))
g = self._drawTicks(self.tickRight,self.tickLeft)
self._drawSubTicks(getattr(self,'subTickHi',0),getattr(self,'subTickLo',0),g)
return g
finally:
self._x = xold
class XCategoryAxis(_XTicks,CategoryAxis):
"X/category axis"
_attrMap = AttrMap(BASE=CategoryAxis,
tickUp = AttrMapValue(isNumber,
desc='Tick length up the axis.'),
tickDown = AttrMapValue(isNumber,
desc='Tick length down the axis.'),
joinAxisMode = AttrMapValue(OneOf('bottom', 'top', 'value', 'points', None),
desc="Mode used for connecting axis ('bottom', 'top', 'value', 'points', None)."),
)
_dataIndex = 0
def __init__(self):
CategoryAxis.__init__(self)
self.labels.boxAnchor = 'n' #north - top edge
self.labels.dy = -5
# ultra-simple tick marks for now go between categories
# and have same line style as axis - need more
self.tickUp = 0 # how far into chart does tick go?
self.tickDown = 5 # how far below axis does tick go?
def demo(self):
self.setPosition(30, 70, 140)
self.configure([(10,20,30,40,50)])
self.categoryNames = ['One','Two','Three','Four','Five']
# all labels top-centre aligned apart from the last
self.labels.boxAnchor = 'n'
self.labels[4].boxAnchor = 'e'
self.labels[4].angle = 90
d = Drawing(200, 100)
d.add(self)
return d
def joinToAxis(self, yAxis, mode='bottom', pos=None):
"Join with y-axis using some mode."
_assertYAxis(yAxis)
if mode == 'bottom':
self._y = yAxis._y
elif mode == 'top':
self._y = yAxis._y + yAxis._length
elif mode == 'value':
self._y = yAxis.scale(pos)
elif mode == 'points':
self._y = pos
def _joinToAxis(self):
ja = self.joinAxis
if ja:
jam = self.joinAxisMode
if jam in ('bottom', 'top'):
self.joinToAxis(ja, mode=jam)
elif jam in ('value', 'points'):
self.joinToAxis(ja, mode=jam, pos=self.joinAxisPos)
def scale(self, idx):
"""returns the x position and width in drawing units of the slice"""
return (self._x + self.loPad + self._scale(idx)*self._barWidth, self._barWidth)
def makeAxis(self):
g = Group()
self._joinToAxis()
if not self.visibleAxis: return g
axis = Line(self._x-self.loLLen, self._y, self._x + self._length+self.hiLLen, self._y)
axis.strokeColor = self.strokeColor
axis.strokeWidth = self.strokeWidth
axis.strokeDashArray = self.strokeDashArray
g.add(axis)
return g
def makeTickLabels(self):
g = Group()
if not self.visibleLabels: return g
categoryNames = self.categoryNames
if categoryNames is not None:
catCount = self._catCount
n = len(categoryNames)
reverseDirection = self.reverseDirection
barWidth = self._barWidth
_y = self._labelAxisPos()
_x = self._x
pmv = self._pmv if self.labelAxisMode=='axispmv' else None
for i in range(catCount):
if reverseDirection: ic = catCount-i-1
else: ic = i
if ic>=n: continue
label=i-catCount
if label in self.labels:
label = self.labels[label]
else:
label = self.labels[i]
if pmv:
_dy = label.dy
v = label._pmv = pmv[ic]
if v<0: _dy *= -2
else:
_dy = 0
lpf = label.labelPosFrac
x = _x + (i+lpf) * barWidth
label.setOrigin(x,_y+_dy)
label.setText(categoryNames[ic] or '')
g.add(label)
return g
class YCategoryAxis(_YTicks,CategoryAxis):
"Y/category axis"
_attrMap = AttrMap(BASE=CategoryAxis,
tickLeft = AttrMapValue(isNumber,
desc='Tick length left of the axis.'),
tickRight = AttrMapValue(isNumber,
desc='Tick length right of the axis.'),
joinAxisMode = AttrMapValue(OneOf(('left', 'right', 'value', 'points', None)),
desc="Mode used for connecting axis ('left', 'right', 'value', 'points', None)."),
)
_dataIndex = 1
def __init__(self):
CategoryAxis.__init__(self)
self.labels.boxAnchor = 'e' #east - right edge
self.labels.dx = -5
# ultra-simple tick marks for now go between categories
# and have same line style as axis - need more
self.tickLeft = 5 # how far left of axis does tick go?
self.tickRight = 0 # how far right of axis does tick go?
def demo(self):
self.setPosition(50, 10, 80)
self.configure([(10,20,30)])
self.categoryNames = ['One','Two','Three']
# all labels top-centre aligned apart from the last
self.labels.boxAnchor = 'e'
self.labels[2].boxAnchor = 's'
self.labels[2].angle = 90
d = Drawing(200, 100)
d.add(self)
return d
def joinToAxis(self, xAxis, mode='left', pos=None):
"Join with x-axis using some mode."
_assertXAxis(xAxis)
if mode == 'left':
self._x = xAxis._x * 1.0
elif mode == 'right':
self._x = (xAxis._x + xAxis._length) * 1.0
elif mode == 'value':
self._x = xAxis.scale(pos) * 1.0
elif mode == 'points':
self._x = pos * 1.0
def _joinToAxis(self):
ja = self.joinAxis
if ja:
jam = self.joinAxisMode
if jam in ('left', 'right'):
self.joinToAxis(ja, mode=jam)
elif jam in ('value', 'points'):
self.joinToAxis(ja, mode=jam, pos=self.joinAxisPos)
def scale(self, idx):
"Returns the y position and width in drawing units of the slice."
return (self._y + self._scale(idx)*self._barWidth, self._barWidth)
def makeAxis(self):
g = Group()
self._joinToAxis()
if not self.visibleAxis: return g
axis = Line(self._x, self._y-self.loLLen, self._x, self._y + self._length+self.hiLLen)
axis.strokeColor = self.strokeColor
axis.strokeWidth = self.strokeWidth
axis.strokeDashArray = self.strokeDashArray
g.add(axis)
return g
def makeTickLabels(self):
g = Group()
if not self.visibleLabels: return g
categoryNames = self.categoryNames
if categoryNames is not None:
catCount = self._catCount
n = len(categoryNames)
reverseDirection = self.reverseDirection
barWidth = self._barWidth
labels = self.labels
_x = self._labelAxisPos()
_y = self._y
pmv = self._pmv if self.labelAxisMode=='axispmv' else None
for i in range(catCount):
if reverseDirection: ic = catCount-i-1
else: ic = i
if ic>=n: continue
label=i-catCount
if label in self.labels:
label = self.labels[label]
else:
label = self.labels[i]
lpf = label.labelPosFrac
y = _y + (i+lpf) * barWidth
if pmv:
_dx = label.dx
v = label._pmv = pmv[ic]
if v<0: _dx *= -2
else:
_dx = 0
label.setOrigin(_x+_dx, y)
label.setText(categoryNames[ic] or '')
g.add(label)
return g
class TickLabeller:
'''Abstract base class which may be used to indicate a change
in the call signature for callable label formats
'''
def __call__(self,axis,value):
return 'Abstract class instance called'
# Value axes.
class ValueAxis(_AxisG):
"Abstract value axis, unusable in itself."
_attrMap = AttrMap(
forceZero = AttrMapValue(EitherOr((isBoolean,OneOf('near'))), desc='Ensure zero in range if true.'),
visible = AttrMapValue(isBoolean, desc='Display entire object, if true.'),
visibleAxis = AttrMapValue(isBoolean, desc='Display axis line, if true.'),
visibleLabels = AttrMapValue(isBoolean, desc='Display axis labels, if true.'),
visibleTicks = AttrMapValue(isBoolean, desc='Display axis ticks, if true.'),
visibleGrid = AttrMapValue(isBoolean, desc='Display axis grid, if true.'),
strokeWidth = AttrMapValue(isNumber, desc='Width of axis line and ticks.'),
strokeColor = AttrMapValue(isColorOrNone, desc='Color of axis line and ticks.'),
strokeDashArray = AttrMapValue(isListOfNumbersOrNone, desc='Dash array used for axis line.'),
strokeLineCap = AttrMapValue(OneOf(0,1,2),desc="Line cap 0=butt, 1=round & 2=square"),
strokeLineJoin = AttrMapValue(OneOf(0,1,2),desc="Line join 0=miter, 1=round & 2=bevel"),
strokeMiterLimit = AttrMapValue(isNumber,desc="miter limit control miter line joins"),
gridStrokeWidth = AttrMapValue(isNumber, desc='Width of grid lines.'),
gridStrokeColor = AttrMapValue(isColorOrNone, desc='Color of grid lines.'),
gridStrokeDashArray = AttrMapValue(isListOfNumbersOrNone, desc='Dash array used for grid lines.'),
gridStrokeLineCap = AttrMapValue(OneOf(0,1,2),desc="Grid Line cap 0=butt, 1=round & 2=square"),
gridStrokeLineJoin = AttrMapValue(OneOf(0,1,2),desc="Grid Line join 0=miter, 1=round & 2=bevel"),
gridStrokeMiterLimit = AttrMapValue(isNumber,desc="Grid miter limit control miter line joins"),
gridStart = AttrMapValue(isNumberOrNone, desc='Start of grid lines wrt axis origin'),
gridEnd = AttrMapValue(isNumberOrNone, desc='End of grid lines wrt axis origin'),
drawGridLast = AttrMapValue(isBoolean, desc='if true draw gridlines after everything else.'),
minimumTickSpacing = AttrMapValue(isNumber, desc='Minimum value for distance between ticks.'),
maximumTicks = AttrMapValue(isNumber, desc='Maximum number of ticks.'),
labels = AttrMapValue(None, desc='Handle of the axis labels.'),
labelAxisMode = AttrMapValue(OneOf('high','low','axis'), desc="Like joinAxisMode, but for the axis labels"),
labelTextFormat = AttrMapValue(None, desc='Formatting string or function used for axis labels.'),
labelTextPostFormat = AttrMapValue(None, desc='Extra Formatting string.'),
labelTextScale = AttrMapValue(isNumberOrNone, desc='Scaling for label tick values.'),
valueMin = AttrMapValue(isNumberOrNone, desc='Minimum value on axis.'),
valueMax = AttrMapValue(isNumberOrNone, desc='Maximum value on axis.'),
valueStep = AttrMapValue(isNumberOrNone, desc='Step size used between ticks.'),
valueSteps = AttrMapValue(isListOfNumbersOrNone, desc='List of step sizes used between ticks.'),
avoidBoundFrac = AttrMapValue(EitherOr((isNumberOrNone,SequenceOf(isNumber,emptyOK=0,lo=2,hi=2))), desc='Fraction of interval to allow above and below.'),
avoidBoundSpace = AttrMapValue(EitherOr((isNumberOrNone,SequenceOf(isNumber,emptyOK=0,lo=2,hi=2))), desc='Space to allow above and below.'),
abf_ignore_zero = AttrMapValue(EitherOr((NoneOr(isBoolean),SequenceOf(isBoolean,emptyOK=0,lo=2,hi=2))), desc='Set to True to make the avoidBoundFrac calculations treat zero as non-special'),
rangeRound=AttrMapValue(OneOf('none','both','ceiling','floor'),'How to round the axis limits'),
zrangePref = AttrMapValue(isNumberOrNone, desc='Zero range axis limit preference.'),
style = AttrMapValue(OneOf('normal','stacked','parallel_3d'),"How values are plotted!"),
skipEndL = AttrMapValue(OneOf('none','start','end','both'), desc='Skip high/low tick labels'),
origShiftIPC = AttrMapValue(isNumberOrNone, desc='Lowest label shift interval ratio.'),
origShiftMin = AttrMapValue(isNumberOrNone, desc='Minimum amount to shift.'),
origShiftSpecialValue = AttrMapValue(isNumberOrNone, desc='special value for shift'),
tickAxisMode = AttrMapValue(OneOf('high','low','axis'), desc="Like joinAxisMode, but for the ticks"),
reverseDirection = AttrMapValue(isBoolean, desc='If true reverse category direction.'),
annotations = AttrMapValue(None,desc='list of annotations'),
loLLen = AttrMapValue(isNumber, desc='extra line length before start of the axis'),
hiLLen = AttrMapValue(isNumber, desc='extra line length after end of the axis'),
subTickNum = AttrMapValue(isNumber, desc='Number of axis sub ticks, if >0'),
subTickLo = AttrMapValue(isNumber, desc='sub tick down or left'),
subTickHi = AttrMapValue(isNumber, desc='sub tick up or right'),
visibleSubTicks = AttrMapValue(isBoolean, desc='Display axis sub ticks, if true.'),
visibleSubGrid = AttrMapValue(isBoolean, desc='Display axis sub grid, if true.'),
subGridStrokeWidth = AttrMapValue(isNumber, desc='Width of grid lines.'),
subGridStrokeColor = AttrMapValue(isColorOrNone, desc='Color of grid lines.'),
subGridStrokeDashArray = AttrMapValue(isListOfNumbersOrNone, desc='Dash array used for grid lines.'),
subGridStrokeLineCap = AttrMapValue(OneOf(0,1,2),desc="Grid Line cap 0=butt, 1=round & 2=square"),
subGridStrokeLineJoin = AttrMapValue(OneOf(0,1,2),desc="Grid Line join 0=miter, 1=round & 2=bevel"),
subGridStrokeMiterLimit = AttrMapValue(isNumber,desc="Grid miter limit control miter line joins"),
subGridStart = AttrMapValue(isNumberOrNone, desc='Start of grid lines wrt axis origin'),
subGridEnd = AttrMapValue(isNumberOrNone, desc='End of grid lines wrt axis origin'),
keepTickLabelsInside = AttrMapValue(isBoolean, desc='Ensure tick labels do not project beyond bounds of axis if true'),
skipGrid = AttrMapValue(OneOf('none','top','both','bottom'),"grid lines to skip top bottom both none"),
requiredRange = AttrMapValue(isNumberOrNone, desc='Minimum required value range.'),
innerTickDraw = AttrMapValue(isNoneOrCallable, desc="Callable to replace _drawInnerTicks"),
)
def __init__(self,**kw):
assert self.__class__.__name__!='ValueAxis', 'Abstract Class ValueAxis Instantiated'
self._setKeywords(**kw)
self._setKeywords(
_configured = 0,
# private properties set by methods. The initial values
# here are to make demos easy; they would always be
# overridden in real life.
_x = 50,
_y = 50,
_length = 100,
# public properties
visible = 1,
visibleAxis = 1,
visibleLabels = 1,
visibleTicks = 1,
visibleGrid = 0,
forceZero = 0,
strokeWidth = 1,
strokeColor = STATE_DEFAULTS['strokeColor'],
strokeDashArray = STATE_DEFAULTS['strokeDashArray'],
strokeLineJoin = STATE_DEFAULTS['strokeLineJoin'],
strokeLineCap = STATE_DEFAULTS['strokeLineCap'],
strokeMiterLimit = STATE_DEFAULTS['strokeMiterLimit'],
gridStrokeWidth = 0.25,
gridStrokeColor = STATE_DEFAULTS['strokeColor'],
gridStrokeDashArray = STATE_DEFAULTS['strokeDashArray'],
gridStrokeLineJoin = STATE_DEFAULTS['strokeLineJoin'],
gridStrokeLineCap = STATE_DEFAULTS['strokeLineCap'],
gridStrokeMiterLimit = STATE_DEFAULTS['strokeMiterLimit'],
gridStart = None,
gridEnd = None,
drawGridLast = False,
visibleSubGrid = 0,
visibleSubTicks = 0,
subTickNum = 0,
subTickLo = 0,
subTickHi = 0,
subGridStrokeLineJoin = STATE_DEFAULTS['strokeLineJoin'],
subGridStrokeLineCap = STATE_DEFAULTS['strokeLineCap'],
subGridStrokeMiterLimit = STATE_DEFAULTS['strokeMiterLimit'],
subGridStrokeWidth = 0.25,
subGridStrokeColor = STATE_DEFAULTS['strokeColor'],
subGridStrokeDashArray = STATE_DEFAULTS['strokeDashArray'],
subGridStart = None,
subGridEnd = None,
labels = TypedPropertyCollection(Label),
keepTickLabelsInside = 0,
# how close can the ticks be?
minimumTickSpacing = 10,
maximumTicks = 7,
# a format string like '%0.2f'
# or a function which takes the value as an argument and returns a string
_labelTextFormat = None,
labelAxisMode = 'axis',
labelTextFormat = None,
labelTextPostFormat = None,
labelTextScale = None,
# if set to None, these will be worked out for you.
# if you override any or all of them, your values
# will be used.
valueMin = None,
valueMax = None,
valueStep = None,
avoidBoundFrac = None,
avoidBoundSpace = None,
abf_ignore_zero = False,
rangeRound = 'none',
zrangePref = 0,
style = 'normal',
skipEndL='none',
origShiftIPC = None,
origShiftMin = None,
origShiftSpecialValue = None,
tickAxisMode = 'axis',
reverseDirection=0,
loLLen=0,
hiLLen=0,
requiredRange=0,
)
self.labels.angle = 0
def setPosition(self, x, y, length):
# ensure floating point
self._x = float(x)
self._y = float(y)
self._length = float(length)
def configure(self, dataSeries):
"""Let the axis configure its scale and range based on the data.
Called after setPosition. Let it look at a list of lists of
numbers determine the tick mark intervals. If valueMin,
valueMax and valueStep are configured then it
will use them; if any of them are set to None it
will look at the data and make some sensible decision.
You may override this to build custom axes with
irregular intervals. It creates an internal
variable self._values, which is a list of numbers
to use in plotting.
"""
self._setRange(dataSeries)
self._configure_end()
def _configure_end(self):
self._calcTickmarkPositions()
self._calcScaleFactor()
self._configured = 1
def _getValueStepAndTicks(self, valueMin, valueMax,cache={}):
try:
K = (valueMin,valueMax)
r = cache[K]
except:
self._valueMin = valueMin
self._valueMax = valueMax
valueStep,T = self._calcStepAndTickPositions()
r = cache[K] = valueStep, T, valueStep*1e-8
return r
def _preRangeAdjust(self,valueMin,valueMax):
rr = self.requiredRange
if rr>0:
r = valueMax - valueMin
if r<rr:
m = 0.5*(valueMax+valueMin)
rr *= 0.5
y1 = min(m-rr,valueMin)
y2 = max(m+rr,valueMax)
if valueMin>=100 and y1<100:
y2 = y2 + 100 - y1
y1 = 100
elif valueMin>=0 and y1<0:
y2 = y2 - y1
y1 = 0
valueMin = self._cValueMin = y1
valueMax = self._cValueMax = y2
return valueMin,valueMax
def _setRange(self, dataSeries):
"""Set minimum and maximum axis values.
The dataSeries argument is assumed to be a list of data
vectors. Each vector is itself a list or tuple of numbers.
Returns a min, max tuple.
"""
oMin = valueMin = self.valueMin
oMax = valueMax = self.valueMax
if valueMin is None: valueMin = self._cValueMin = _findMin(dataSeries,self._dataIndex,0)
if valueMax is None: valueMax = self._cValueMax = _findMax(dataSeries,self._dataIndex,0)
if valueMin == valueMax:
if valueMax==0:
if oMin is None and oMax is None:
zrp = getattr(self,'zrangePref',0)
if zrp>0:
valueMax = zrp
valueMin = 0
elif zrp<0:
valueMax = 0
valueMin = zrp
else:
valueMax = 0.01
valueMin = -0.01
elif self.valueMin is None:
valueMin = -0.01
else:
valueMax = 0.01
else:
if valueMax>0:
valueMax = 1.2*valueMax
valueMin = 0.0
else:
valueMax = 0.0
valueMin = 1.2*valueMin
if getattr(self,'_bubblePlot',None):
bubbleMax = float(_findMax(dataSeries,2,0))
frac=.25
bubbleV=frac*(valueMax-valueMin)
self._bubbleV = bubbleV
self._bubbleMax = bubbleMax
self._bubbleRadius = frac*self._length
def special(T,x,func,bubbleV=bubbleV,bubbleMax=bubbleMax):
try:
v = T[2]
except IndexError:
v = bubbleMAx*0.1
bubbleV *= (v/bubbleMax)**0.5
return func(T[x]+bubbleV,T[x]-bubbleV)
if oMin is None: valueMin = self._cValueMin = _findMin(dataSeries,self._dataIndex,0,special=special)
if oMax is None: valueMax = self._cValueMax = _findMax(dataSeries,self._dataIndex,0,special=special)
valueMin, valueMax = self._preRangeAdjust(valueMin,valueMax)
rangeRound = self.rangeRound
cMin = valueMin
cMax = valueMax
forceZero = self.forceZero
if forceZero:
if forceZero=='near':
forceZero = min(abs(valueMin),abs(valueMax)) <= 5*(valueMax-valueMin)
if forceZero:
if valueMax<0: valueMax=0
elif valueMin>0: valueMin = 0
abf = self.avoidBoundFrac
do_rr = not getattr(self,'valueSteps',None)
do_abf = abf and do_rr
if not isSeq(abf):
abf = abf, abf
abfiz = getattr(self,'abf_ignore_zero', False)
if not isSeq(abfiz):
abfiz = abfiz, abfiz
do_rr = rangeRound is not 'none' and do_rr
if do_rr:
rrn = rangeRound in ['both','floor']
rrx = rangeRound in ['both','ceiling']
else:
rrn = rrx = 0
abS = self.avoidBoundSpace
do_abs = abS
if do_abs:
if not isSeq(abS):
abS = abS, abS
aL = float(self._length)
go = do_rr or do_abf or do_abs
cache = {}
iter = 0
while go and iter<=10:
iter += 1
go = 0
if do_abf or do_abs:
valueStep, T, fuzz = self._getValueStepAndTicks(valueMin, valueMax, cache)
if do_abf:
i0 = valueStep*abf[0]
i1 = valueStep*abf[1]
else:
i0 = i1 = 0
if do_abs:
sf = (valueMax-valueMin)/aL
i0 = max(i0,abS[0]*sf)
i1 = max(i1,abS[1]*sf)
if rrn: v = T[0]
else: v = valueMin
u = cMin-i0
if (abfiz[0] or abs(v)>fuzz) and v>=u+fuzz:
valueMin = u
go = 1
if rrx: v = T[-1]
else: v = valueMax
u = cMax+i1
if (abfiz[1] or abs(v)>fuzz) and v<=u-fuzz:
valueMax = u
go = 1
if do_rr:
valueStep, T, fuzz = self._getValueStepAndTicks(valueMin, valueMax, cache)
if rrn:
if valueMin<T[0]-fuzz:
valueMin = T[0]-valueStep
go = 1
else:
go = valueMin>=T[0]+fuzz
valueMin = T[0]
if rrx:
if valueMax>T[-1]+fuzz:
valueMax = T[-1]+valueStep
go = 1
else:
go = valueMax<=T[-1]-fuzz
valueMax = T[-1]
if iter and not go:
self._computedValueStep = valueStep
else:
self._computedValueStep = None
self._valueMin = valueMin
self._valueMax = valueMax
origShiftIPC = self.origShiftIPC
origShiftMin = self.origShiftMin
if origShiftMin is not None or origShiftIPC is not None:
origShiftSpecialValue = self.origShiftSpecialValue
self._calcValueStep()
valueMax, valueMin = self._valueMax, self._valueMin
if origShiftSpecialValue is None or abs(origShiftSpecialValue-valueMin)<1e-6:
if origShiftIPC:
m = origShiftIPC*self._valueStep
else:
m = 0
if origShiftMin:
m = max(m,(valueMax-valueMin)*origShiftMin/self._length)
self._valueMin -= m
self._rangeAdjust()
def _pseudo_configure(self):
self._valueMin = self.valueMin
self._valueMax = self.valueMax
self._configure_end()
def _rangeAdjust(self):
"""Override this if you want to alter the calculated range.
E.g. if want a minumamum range of 30% or don't want 100%
as the first point.
"""
pass
def _adjustAxisTicks(self):
'''Override if you want to put slack at the ends of the axis
eg if you don't want the last tick to be at the bottom etc
'''
pass
def _calcScaleFactor(self):
"""Calculate the axis' scale factor.
This should be called only *after* the axis' range is set.
Returns a number.
"""
self._scaleFactor = self._length / float(self._valueMax - self._valueMin)
return self._scaleFactor
def _calcStepAndTickPositions(self):
valueStep = getattr(self,'_computedValueStep',None)
if valueStep:
del self._computedValueStep
self._valueStep = valueStep
else:
self._calcValueStep()
valueStep = self._valueStep
valueMin = self._valueMin
valueMax = self._valueMax
fuzz = 1e-8*valueStep
rangeRound = self.rangeRound
i0 = int(float(valueMin)/valueStep)
v = i0*valueStep
if rangeRound in ('both','floor'):
if v>valueMin+fuzz: i0 -= 1
elif v<valueMin-fuzz: i0 += 1
i1 = int(float(valueMax)/valueStep)
v = i1*valueStep
if rangeRound in ('both','ceiling'):
if v<valueMax-fuzz: i1 += 1
elif v>valueMax+fuzz: i1 -= 1
return valueStep,[i*valueStep for i in range(i0,i1+1)]
def _calcTickPositions(self):
return self._calcStepAndTickPositions()[1]
def _calcTickmarkPositions(self):
"""Calculate a list of tick positions on the axis. Returns a list of numbers."""
self._tickValues = getattr(self,'valueSteps',None)
if self._tickValues: return self._tickValues
self._tickValues = self._calcTickPositions()
self._adjustAxisTicks()
return self._tickValues
def _calcValueStep(self):
'''Calculate _valueStep for the axis or get from valueStep.'''
if self.valueStep is None:
rawRange = self._valueMax - self._valueMin
rawInterval = rawRange / min(float(self.maximumTicks-1),(float(self._length)/self.minimumTickSpacing))
self._valueStep = nextRoundNumber(rawInterval)
else:
self._valueStep = self.valueStep
def _allIntTicks(self):
return _allInt(self._tickValues)
def makeTickLabels(self):
g = Group()
if not self.visibleLabels: return g
f = self._labelTextFormat # perhaps someone already set it
if f is None:
f = self.labelTextFormat or (self._allIntTicks() and '%.0f' or str)
elif f is str and self._allIntTicks(): f = '%.0f'
elif hasattr(f,'calcPlaces'):
f.calcPlaces(self._tickValues)
post = self.labelTextPostFormat
scl = self.labelTextScale
pos = [self._x, self._y]
d = self._dataIndex
pos[1-d] = self._labelAxisPos()
labels = self.labels
if self.skipEndL!='none':
if self.isXAxis:
sk = self._x
else:
sk = self._y
if self.skipEndL=='start':
sk = [sk]
else:
sk = [sk,sk+self._length]
if self.skipEndL=='end':
del sk[0]
else:
sk = []
nticks = len(self._tickValues)
nticks1 = nticks - 1
for i,tick in enumerate(self._tickValues):
label = i-nticks
if label in labels:
label = labels[label]
else:
label = labels[i]
if f and label.visible:
v = self.scale(tick)
if sk:
for skv in sk:
if abs(skv-v)<1e-6:
v = None
break
if v is not None:
if scl is not None:
t = tick*scl
else:
t = tick
if isinstance(f, str): txt = f % t
elif isSeq(f):
#it's a list, use as many items as we get
if i < len(f):
txt = f[i]
else:
txt = ''
elif hasattr(f,'__call__'):
if isinstance(f,TickLabeller):
txt = f(self,t)
else:
txt = f(t)
else:
raise ValueError('Invalid labelTextFormat %s' % f)
if post: txt = post % txt
pos[d] = v
label.setOrigin(*pos)
label.setText(txt)
#special property to ensure a label doesn't project beyond the bounds of an x-axis
if self.keepTickLabelsInside:
if isinstance(self, XValueAxis): #not done yet for y axes
a_x = self._x
if not i: #first one
x0, y0, x1, y1 = label.getBounds()
if x0 < a_x:
label = label.clone(dx=label.dx + a_x - x0)
if i==nticks1: #final one
a_x1 = a_x +self._length
x0, y0, x1, y1 = label.getBounds()
if x1 > a_x1:
label=label.clone(dx=label.dx-x1+a_x1)
g.add(label)
return g
def scale(self, value):
"""Converts a numeric value to a plotarea position.
The chart first configures the axis, then asks it to
"""
assert self._configured, "Axis cannot scale numbers before it is configured"
if value is None: value = 0
#this could be made more efficient by moving the definition of org and sf into the configuration
org = (self._x, self._y)[self._dataIndex]
sf = self._scaleFactor
if self.reverseDirection:
sf = -sf
org += self._length
return org + sf*(value - self._valueMin)
class XValueAxis(_XTicks,ValueAxis):
"X/value axis"
_attrMap = AttrMap(BASE=ValueAxis,
tickUp = AttrMapValue(isNumber,
desc='Tick length up the axis.'),
tickDown = AttrMapValue(isNumber,
desc='Tick length down the axis.'),
joinAxis = AttrMapValue(None,
desc='Join both axes if true.'),
joinAxisMode = AttrMapValue(OneOf('bottom', 'top', 'value', 'points', None),
desc="Mode used for connecting axis ('bottom', 'top', 'value', 'points', None)."),
joinAxisPos = AttrMapValue(isNumberOrNone,
desc='Position at which to join with other axis.'),
)
# Indicate the dimension of the data we're interested in.
_dataIndex = 0
def __init__(self,**kw):
ValueAxis.__init__(self,**kw)
self.labels.boxAnchor = 'n'
self.labels.dx = 0
self.labels.dy = -5
self.tickUp = 0
self.tickDown = 5
self.joinAxis = None
self.joinAxisMode = None
self.joinAxisPos = None
def demo(self):
self.setPosition(20, 50, 150)
self.configure([(10,20,30,40,50)])
d = Drawing(200, 100)
d.add(self)
return d
def joinToAxis(self, yAxis, mode='bottom', pos=None):
"Join with y-axis using some mode."
_assertYAxis(yAxis)
if mode == 'bottom':
self._y = yAxis._y * 1.0
elif mode == 'top':
self._y = (yAxis._y + yAxis._length) * 1.0
elif mode == 'value':
self._y = yAxis.scale(pos) * 1.0
elif mode == 'points':
self._y = pos * 1.0
def _joinToAxis(self):
ja = self.joinAxis
if ja:
jam = self.joinAxisMode or 'bottom'
if jam in ('bottom', 'top'):
self.joinToAxis(ja, mode=jam)
elif jam in ('value', 'points'):
self.joinToAxis(ja, mode=jam, pos=self.joinAxisPos)
def makeAxis(self):
g = Group()
self._joinToAxis()
if not self.visibleAxis: return g
axis = Line(self._x-self.loLLen, self._y, self._x + self._length+self.hiLLen, self._y)
axis.strokeColor = self.strokeColor
axis.strokeWidth = self.strokeWidth
axis.strokeDashArray = self.strokeDashArray
g.add(axis)
return g
#additional utilities to help specify calendar dates on which tick marks
#are to be plotted. After some thought, when the magic algorithm fails,
#we can let them specify a number of days-of-the-year to tick in any given
#year.
#################################################################################
#
# Preliminary support objects/functions for the axis used in time series charts
#
#################################################################################
_months = ['jan','feb','mar','apr','may','jun','jul','aug','sep','oct','nov','dec']
_maxDays = [31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
def parseDayAndMonth(dmstr):
"""This accepts and validates strings like "31-Dec" i.e. dates
of no particular year. 29 Feb is allowed. These can be used
for recurring dates. It returns a (dd, mm) pair where mm is the
month integer. If the text is not valid it raises an error.
"""
dstr, mstr = dmstr.split('-')
dd = int(dstr)
mstr = mstr.lower()
mm = _months.index(mstr) + 1
assert dd <= _maxDays[mm-1]
return (dd, mm)
class _isListOfDaysAndMonths(Validator):
"""This accepts and validates lists of strings like "31-Dec" i.e. dates
of no particular year. 29 Feb is allowed. These can be used
for recurring dates.
"""
def test(self,x):
if isSeq(x):
answer = True
for element in x:
try:
dd, mm = parseDayAndMonth(element)
except:
answer = False
return answer
else:
return False
def normalize(self,x):
#we store them as presented, it's the most presentable way
return x
isListOfDaysAndMonths = _isListOfDaysAndMonths()
_NDINTM = 1,2,3,6,12,24,60,120,180,240,300,360,420,480,540,600,720,840,960,1080,1200,2400
class NormalDateXValueAxis(XValueAxis):
"""An X axis applying additional rules.
Depending on the data and some built-in rules, the axis
displays normalDate values as nicely formatted dates.
The client chart should have NormalDate X values.
"""
_attrMap = AttrMap(BASE = XValueAxis,
bottomAxisLabelSlack = AttrMapValue(isNumber, desc="Fractional amount used to adjust label spacing"),
niceMonth = AttrMapValue(isBoolean, desc="Flag for displaying months 'nicely'."),
forceEndDate = AttrMapValue(isBoolean, desc='Flag for enforced displaying of last date value.'),
forceFirstDate = AttrMapValue(isBoolean, desc='Flag for enforced displaying of first date value.'),
forceDatesEachYear = AttrMapValue(isListOfDaysAndMonths, desc='List of dates in format "31-Dec",' +
'"1-Jan". If present they will always be used for tick marks in the current year, rather ' +
'than the dates chosen by the automatic algorithm. Hyphen compulsory, case of month optional.'),
xLabelFormat = AttrMapValue(None, desc="Label format string (e.g. '{mm}/{yy}') or function."),
dayOfWeekName = AttrMapValue(SequenceOf(isString,emptyOK=0,lo=7,hi=7), desc='Weekday names.'),
monthName = AttrMapValue(SequenceOf(isString,emptyOK=0,lo=12,hi=12), desc='Month names.'),
dailyFreq = AttrMapValue(isBoolean, desc='True if we are to assume daily data to be ticked at end of month.'),
specifiedTickDates = AttrMapValue(NoneOr(SequenceOf(isNormalDate)), desc='Actual tick values to use; no calculations done'),
specialTickClear = AttrMapValue(isBoolean, desc='clear rather than delete close ticks when forced first/end dates'),
skipGrid = AttrMapValue(OneOf('none','top','both','bottom'),"grid lines to skip top bottom both none"),
)
_valueClass = normalDate.ND
def __init__(self,**kw):
XValueAxis.__init__(self,**kw)
# some global variables still used...
self.bottomAxisLabelSlack = 0.1
self.niceMonth = 1
self.forceEndDate = 0
self.forceFirstDate = 0
self.forceDatesEachYear = []
self.dailyFreq = 0
self.xLabelFormat = "{mm}/{yy}"
self.dayOfWeekName = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday']
self.monthName = ['January', 'February', 'March', 'April', 'May', 'June', 'July',
'August', 'September', 'October', 'November', 'December']
self.specialTickClear = 0
self.valueSteps = self.specifiedTickDates = None
def _scalar2ND(self, x):
"Convert a scalar to a NormalDate value."
d = self._valueClass()
d.normalize(x)
return d
def _dateFormatter(self, v):
"Create a formatted label for some value."
if not isinstance(v,normalDate.NormalDate):
v = self._scalar2ND(v)
d, m = normalDate._dayOfWeekName, normalDate._monthName
try:
normalDate._dayOfWeekName, normalDate._monthName = self.dayOfWeekName, self.monthName
return v.formatMS(self.xLabelFormat)
finally:
normalDate._dayOfWeekName, normalDate._monthName = d, m
def _xAxisTicker(self, xVals):
"""Complex stuff...
Needs explanation...
Yes please says Andy :-(. Modified on 19 June 2006 to attempt to allow
a mode where one can specify recurring days and months.
"""
axisLength = self._length
formatter = self._dateFormatter
if isinstance(formatter,TickLabeller):
def formatter(tick):
return self._dateFormatter(self,tick)
firstDate = xVals[0]
endDate = xVals[-1]
labels = self.labels
fontName, fontSize, leading = labels.fontName, labels.fontSize, labels.leading
textAnchor, boxAnchor, angle = labels.textAnchor, labels.boxAnchor, labels.angle
RBL = _textBoxLimits(formatter(firstDate).split('\n'),fontName,
fontSize,leading or 1.2*fontSize,textAnchor,boxAnchor)
RBL = _rotatedBoxLimits(RBL[0],RBL[1],RBL[2],RBL[3], angle)
xLabelW = RBL[1]-RBL[0]
xLabelH = RBL[3]-RBL[2]
w = max(xLabelW,labels.width,self.minimumTickSpacing)
W = w+w*self.bottomAxisLabelSlack
n = len(xVals)
ticks = []
labels = []
maximumTicks = self.maximumTicks
if self.specifiedTickDates:
VC = self._valueClass
ticks = [VC(x) for x in self.specifiedTickDates]
labels = [formatter(d) for d in ticks]
if self.forceFirstDate and firstDate==ticks[0] and (axisLength/float(ticks[-1]-ticks[0]))*(ticks[1]-ticks[0])<=W:
if self.specialTickClear:
labels[1] = ''
else:
del ticks[1], labels[1]
if self.forceEndDate and endDate==ticks[-1] and (axisLength/float(ticks[-1]-ticks[0]))*(ticks[-1]-ticks[-2])<=W:
if self.specialTickClear:
labels[-2] = ''
else:
del ticks[-2], labels[-2]
return ticks, labels
def addTick(i, xVals=xVals, formatter=formatter, ticks=ticks, labels=labels):
ticks.insert(0,xVals[i])
labels.insert(0,formatter(xVals[i]))
#AR 20060619 - first we try the approach where the user has explicitly
#specified the days of year to be ticked. Other explicit routes may
#be added.
if self.forceDatesEachYear:
forcedPartialDates = list(map(parseDayAndMonth, self.forceDatesEachYear))
#generate the list of dates in the range.
#print 'dates range from %s to %s' % (firstDate, endDate)
firstYear = firstDate.year()
lastYear = endDate.year()
ticks = []
labels = []
yyyy = firstYear
#generate all forced dates between the year it starts and the year it
#ends, adding them if within range.
while yyyy <= lastYear:
for (dd, mm) in forcedPartialDates:
theDate = normalDate.ND((yyyy, mm, dd))
if theDate >= firstDate and theDate <= endDate:
ticks.append(theDate)
labels.append(formatter(theDate))
yyyy += 1
#first and last may still be forced in.
if self.forceFirstDate and firstDate!=ticks[0]:
ticks.insert(0, firstDate)
labels.insert(0,formatter(firstDate))
if (axisLength/float(ticks[-1]-ticks[0]))*(ticks[1]-ticks[0])<=W:
if self.specialTickClear:
labels[1] = ''
else:
del ticks[1], labels[1]
if self.forceEndDate and endDate!=ticks[-1]:
ticks.append(endDate)
labels.append(formatter(endDate))
if (axisLength/float(ticks[-1]-ticks[0]))*(ticks[-1]-ticks[-2])<=W:
if self.specialTickClear:
labels[-2] = ''
else:
del ticks[-2], labels[-2]
#print 'xVals found on forced dates =', ticks
return ticks, labels
#otherwise, we apply the 'magic algorithm...' which looks for nice spacing
#based on the size and separation of the labels.
for d in _NDINTM:
k = n/d
if k<=maximumTicks and k*W <= axisLength:
i = n-1
if self.niceMonth:
j = endDate.month() % (d<=12 and d or 12)
if j:
if self.forceEndDate:
addTick(i)
ticks[0]._doSubTicks=0
i -= j
#weird first date ie not at end of month
try:
wfd = firstDate.month() == xVals[1].month()
except:
wfd = 0
while i>=wfd:
addTick(i)
i -= d
if self.forceFirstDate and ticks[0]!=firstDate:
addTick(0)
ticks[0]._doSubTicks=0
if (axisLength/float(ticks[-1]-ticks[0]))*(ticks[1]-ticks[0])<=W:
if self.specialTickClear:
labels[1] = ''
else:
del ticks[1], labels[1]
if self.forceEndDate and self.niceMonth and j:
if (axisLength/float(ticks[-1]-ticks[0]))*(ticks[-1]-ticks[-2])<=W:
if self.specialTickClear:
labels[-2] = ''
else:
del ticks[-2], labels[-2]
try:
if labels[0] and labels[0]==labels[1]:
del ticks[1], labels[1]
except IndexError:
pass
return ticks, labels
raise ValueError('Problem selecting NormalDate value axis tick positions')
def _convertXV(self,data):
'''Convert all XValues to a standard normalDate type'''
VC = self._valueClass
for D in data:
for i in range(len(D)):
x, y = D[i]
if not isinstance(x,VC):
D[i] = (VC(x),y)
def _getStepsAndLabels(self,xVals):
if self.dailyFreq:
xEOM = []
pm = 0
px = xVals[0]
for x in xVals:
m = x.month()
if pm!=m:
if pm: xEOM.append(px)
pm = m
px = x
px = xVals[-1]
if xEOM[-1]!=x: xEOM.append(px)
steps, labels = self._xAxisTicker(xEOM)
else:
steps, labels = self._xAxisTicker(xVals)
return steps, labels
def configure(self, data):
self._convertXV(data)
xVals = set()
for x in data:
for dv in x:
xVals.add(dv[0])
xVals = list(xVals)
xVals.sort()
steps,labels = self._getStepsAndLabels(xVals)
valueMin, valueMax = self.valueMin, self.valueMax
if valueMin is None: valueMin = xVals[0]
if valueMax is None: valueMax = xVals[-1]
self._valueMin, self._valueMax = valueMin, valueMax
self._tickValues = steps
self._labelTextFormat = labels
self._scaleFactor = self._length / float(valueMax - valueMin)
self._tickValues = steps
self._configured = 1
class YValueAxis(_YTicks,ValueAxis):
"Y/value axis"
_attrMap = AttrMap(BASE=ValueAxis,
tickLeft = AttrMapValue(isNumber,
desc='Tick length left of the axis.'),
tickRight = AttrMapValue(isNumber,
desc='Tick length right of the axis.'),
joinAxis = AttrMapValue(None,
desc='Join both axes if true.'),
joinAxisMode = AttrMapValue(OneOf(('left', 'right', 'value', 'points', None)),
desc="Mode used for connecting axis ('left', 'right', 'value', 'points', None)."),
joinAxisPos = AttrMapValue(isNumberOrNone,
desc='Position at which to join with other axis.'),
)
# Indicate the dimension of the data we're interested in.
_dataIndex = 1
def __init__(self):
ValueAxis.__init__(self)
self.labels.boxAnchor = 'e'
self.labels.dx = -5
self.labels.dy = 0
self.tickRight = 0
self.tickLeft = 5
self.joinAxis = None
self.joinAxisMode = None
self.joinAxisPos = None
def demo(self):
data = [(10, 20, 30, 42)]
self.setPosition(100, 10, 80)
self.configure(data)
drawing = Drawing(200, 100)
drawing.add(self)
return drawing
def joinToAxis(self, xAxis, mode='left', pos=None):
"Join with x-axis using some mode."
_assertXAxis(xAxis)
if mode == 'left':
self._x = xAxis._x * 1.0
elif mode == 'right':
self._x = (xAxis._x + xAxis._length) * 1.0
elif mode == 'value':
self._x = xAxis.scale(pos) * 1.0
elif mode == 'points':
self._x = pos * 1.0
def _joinToAxis(self):
ja = self.joinAxis
if ja:
jam = self.joinAxisMode
if jam in ('left', 'right'):
self.joinToAxis(ja, mode=jam)
elif jam in ('value', 'points'):
self.joinToAxis(ja, mode=jam, pos=self.joinAxisPos)
def makeAxis(self):
g = Group()
self._joinToAxis()
if not self.visibleAxis: return g
axis = Line(self._x, self._y-self.loLLen, self._x, self._y + self._length+self.hiLLen)
axis.strokeColor = self.strokeColor
axis.strokeWidth = self.strokeWidth
axis.strokeDashArray = self.strokeDashArray
g.add(axis)
return g
class AdjYValueAxis(YValueAxis):
"""A Y-axis applying additional rules.
Depending on the data and some built-in rules, the axis
may choose to adjust its range and origin.
"""
_attrMap = AttrMap(BASE = YValueAxis,
leftAxisPercent = AttrMapValue(isBoolean, desc='When true add percent sign to label values.'),
leftAxisOrigShiftIPC = AttrMapValue(isNumber, desc='Lowest label shift interval ratio.'),
leftAxisOrigShiftMin = AttrMapValue(isNumber, desc='Minimum amount to shift.'),
leftAxisSkipLL0 = AttrMapValue(EitherOr((isBoolean,isListOfNumbers)), desc='Skip/Keep lowest tick label when true/false.\nOr skiplist'),
labelVOffset = AttrMapValue(isNumber, desc='add this to the labels'),
)
def __init__(self,**kw):
YValueAxis.__init__(self,**kw)
self.requiredRange = 30
self.leftAxisPercent = 1
self.leftAxisOrigShiftIPC = 0.15
self.leftAxisOrigShiftMin = 12
self.leftAxisSkipLL0 = self.labelVOffset = 0
self.valueSteps = None
def _rangeAdjust(self):
"Adjusts the value range of the axis."
from reportlab.graphics.charts.utils import find_good_grid, ticks
y_min, y_max = self._valueMin, self._valueMax
m = self.maximumTicks
n = list(filter(lambda x,m=m: x<=m,[4,5,6,7,8,9]))
if not n: n = [m]
valueStep, requiredRange = self.valueStep, self.requiredRange
if requiredRange and y_max - y_min < requiredRange:
y1, y2 = find_good_grid(y_min, y_max,n=n,grid=valueStep)[:2]
if y2 - y1 < requiredRange:
ym = (y1+y2)*0.5
y1 = min(ym-requiredRange*0.5,y_min)
y2 = max(ym+requiredRange*0.5,y_max)
if y_min>=100 and y1<100:
y2 = y2 + 100 - y1
y1 = 100
elif y_min>=0 and y1<0:
y2 = y2 - y1
y1 = 0
self._valueMin, self._valueMax = y1, y2
T, L = ticks(self._valueMin, self._valueMax, split=1, n=n, percent=self.leftAxisPercent,grid=valueStep, labelVOffset=self.labelVOffset)
abf = self.avoidBoundFrac
if abf:
i1 = (T[1]-T[0])
if not isSeq(abf):
i0 = i1 = i1*abf
else:
i0 = i1*abf[0]
i1 = i1*abf[1]
_n = getattr(self,'_cValueMin',T[0])
_x = getattr(self,'_cValueMax',T[-1])
if _n - T[0] < i0: self._valueMin = self._valueMin - i0
if T[-1]-_x < i1: self._valueMax = self._valueMax + i1
T, L = ticks(self._valueMin, self._valueMax, split=1, n=n, percent=self.leftAxisPercent,grid=valueStep, labelVOffset=self.labelVOffset)
self._valueMin = T[0]
self._valueMax = T[-1]
self._tickValues = T
if self.labelTextFormat is None:
self._labelTextFormat = L
else:
self._labelTextFormat = self.labelTextFormat
if abs(self._valueMin-100)<1e-6:
self._calcValueStep()
vMax, vMin = self._valueMax, self._valueMin
m = max(self.leftAxisOrigShiftIPC*self._valueStep,
(vMax-vMin)*self.leftAxisOrigShiftMin/self._length)
self._valueMin = self._valueMin - m
if self.leftAxisSkipLL0:
if isSeq(self.leftAxisSkipLL0):
for x in self.leftAxisSkipLL0:
try:
L[x] = ''
except IndexError:
pass
L[0] = ''
# Sample functions.
def sample0a():
"Sample drawing with one xcat axis and two buckets."
drawing = Drawing(400, 200)
data = [(10, 20)]
xAxis = XCategoryAxis()
xAxis.setPosition(75, 75, 300)
xAxis.configure(data)
xAxis.categoryNames = ['Ying', 'Yang']
xAxis.labels.boxAnchor = 'n'
drawing.add(xAxis)
return drawing
def sample0b():
"Sample drawing with one xcat axis and one bucket only."
drawing = Drawing(400, 200)
data = [(10,)]
xAxis = XCategoryAxis()
xAxis.setPosition(75, 75, 300)
xAxis.configure(data)
xAxis.categoryNames = ['Ying']
xAxis.labels.boxAnchor = 'n'
drawing.add(xAxis)
return drawing
def sample1():
"Sample drawing containing two unconnected axes."
from reportlab.graphics.shapes import _baseGFontNameB
drawing = Drawing(400, 200)
data = [(10, 20, 30, 42)]
xAxis = XCategoryAxis()
xAxis.setPosition(75, 75, 300)
xAxis.configure(data)
xAxis.categoryNames = ['Beer','Wine','Meat','Cannelloni']
xAxis.labels.boxAnchor = 'n'
xAxis.labels[3].dy = -15
xAxis.labels[3].angle = 30
xAxis.labels[3].fontName = _baseGFontNameB
yAxis = YValueAxis()
yAxis.setPosition(50, 50, 125)
yAxis.configure(data)
drawing.add(xAxis)
drawing.add(yAxis)
return drawing
def sample4a():
"Sample drawing, xvalue/yvalue axes, y connected at 100 pts to x."
drawing = Drawing(400, 200)
data = [(10, 20, 30, 42)]
yAxis = YValueAxis()
yAxis.setPosition(50, 50, 125)
yAxis.configure(data)
xAxis = XValueAxis()
xAxis._length = 300
xAxis.joinAxis = yAxis
xAxis.joinAxisMode = 'points'
xAxis.joinAxisPos = 100
xAxis.configure(data)
drawing.add(xAxis)
drawing.add(yAxis)
return drawing
def sample4b():
"Sample drawing, xvalue/yvalue axes, y connected at value 35 of x."
drawing = Drawing(400, 200)
data = [(10, 20, 30, 42)]
yAxis = YValueAxis()
yAxis.setPosition(50, 50, 125)
yAxis.configure(data)
xAxis = XValueAxis()
xAxis._length = 300
xAxis.joinAxis = yAxis
xAxis.joinAxisMode = 'value'
xAxis.joinAxisPos = 35
xAxis.configure(data)
drawing.add(xAxis)
drawing.add(yAxis)
return drawing
def sample4c():
"Sample drawing, xvalue/yvalue axes, y connected to bottom of x."
drawing = Drawing(400, 200)
data = [(10, 20, 30, 42)]
yAxis = YValueAxis()
yAxis.setPosition(50, 50, 125)
yAxis.configure(data)
xAxis = XValueAxis()
xAxis._length = 300
xAxis.joinAxis = yAxis
xAxis.joinAxisMode = 'bottom'
xAxis.configure(data)
drawing.add(xAxis)
drawing.add(yAxis)
return drawing
def sample4c1():
"xvalue/yvalue axes, without drawing axis lines/ticks."
drawing = Drawing(400, 200)
data = [(10, 20, 30, 42)]
yAxis = YValueAxis()
yAxis.setPosition(50, 50, 125)
yAxis.configure(data)
yAxis.visibleAxis = 0
yAxis.visibleTicks = 0
xAxis = XValueAxis()
xAxis._length = 300
xAxis.joinAxis = yAxis
xAxis.joinAxisMode = 'bottom'
xAxis.configure(data)
xAxis.visibleAxis = 0
xAxis.visibleTicks = 0
drawing.add(xAxis)
drawing.add(yAxis)
return drawing
def sample4d():
"Sample drawing, xvalue/yvalue axes, y connected to top of x."
drawing = Drawing(400, 200)
data = [(10, 20, 30, 42)]
yAxis = YValueAxis()
yAxis.setPosition(50, 50, 125)
yAxis.configure(data)
xAxis = XValueAxis()
xAxis._length = 300
xAxis.joinAxis = yAxis
xAxis.joinAxisMode = 'top'
xAxis.configure(data)
drawing.add(xAxis)
drawing.add(yAxis)
return drawing
def sample5a():
"Sample drawing, xvalue/yvalue axes, y connected at 100 pts to x."
drawing = Drawing(400, 200)
data = [(10, 20, 30, 42)]
xAxis = XValueAxis()
xAxis.setPosition(50, 50, 300)
xAxis.configure(data)
yAxis = YValueAxis()
yAxis.setPosition(50, 50, 125)
yAxis.joinAxis = xAxis
yAxis.joinAxisMode = 'points'
yAxis.joinAxisPos = 100
yAxis.configure(data)
drawing.add(xAxis)
drawing.add(yAxis)
return drawing
def sample5b():
"Sample drawing, xvalue/yvalue axes, y connected at value 35 of x."
drawing = Drawing(400, 200)
data = [(10, 20, 30, 42)]
xAxis = XValueAxis()
xAxis.setPosition(50, 50, 300)
xAxis.configure(data)
yAxis = YValueAxis()
yAxis.setPosition(50, 50, 125)
yAxis.joinAxis = xAxis
yAxis.joinAxisMode = 'value'
yAxis.joinAxisPos = 35
yAxis.configure(data)
drawing.add(xAxis)
drawing.add(yAxis)
return drawing
def sample5c():
"Sample drawing, xvalue/yvalue axes, y connected at right of x."
drawing = Drawing(400, 200)
data = [(10, 20, 30, 42)]
xAxis = XValueAxis()
xAxis.setPosition(50, 50, 300)
xAxis.configure(data)
yAxis = YValueAxis()
yAxis.setPosition(50, 50, 125)
yAxis.joinAxis = xAxis
yAxis.joinAxisMode = 'right'
yAxis.configure(data)
drawing.add(xAxis)
drawing.add(yAxis)
return drawing
def sample5d():
"Sample drawing, xvalue/yvalue axes, y connected at left of x."
drawing = Drawing(400, 200)
data = [(10, 20, 30, 42)]
xAxis = XValueAxis()
xAxis.setPosition(50, 50, 300)
xAxis.configure(data)
yAxis = YValueAxis()
yAxis.setPosition(50, 50, 125)
yAxis.joinAxis = xAxis
yAxis.joinAxisMode = 'left'
yAxis.configure(data)
drawing.add(xAxis)
drawing.add(yAxis)
return drawing
def sample6a():
"Sample drawing, xcat/yvalue axes, x connected at top of y."
drawing = Drawing(400, 200)
data = [(10, 20, 30, 42)]
yAxis = YValueAxis()
yAxis.setPosition(50, 50, 125)
yAxis.configure(data)
xAxis = XCategoryAxis()
xAxis._length = 300
xAxis.configure(data)
xAxis.joinAxis = yAxis
xAxis.joinAxisMode = 'top'
xAxis.categoryNames = ['Beer', 'Wine', 'Meat', 'Cannelloni']
xAxis.labels.boxAnchor = 'n'
drawing.add(xAxis)
drawing.add(yAxis)
return drawing
def sample6b():
"Sample drawing, xcat/yvalue axes, x connected at bottom of y."
drawing = Drawing(400, 200)
data = [(10, 20, 30, 42)]
yAxis = YValueAxis()
yAxis.setPosition(50, 50, 125)
yAxis.configure(data)
xAxis = XCategoryAxis()
xAxis._length = 300
xAxis.configure(data)
xAxis.joinAxis = yAxis
xAxis.joinAxisMode = 'bottom'
xAxis.categoryNames = ['Beer', 'Wine', 'Meat', 'Cannelloni']
xAxis.labels.boxAnchor = 'n'
drawing.add(xAxis)
drawing.add(yAxis)
return drawing
def sample6c():
"Sample drawing, xcat/yvalue axes, x connected at 100 pts to y."
drawing = Drawing(400, 200)
data = [(10, 20, 30, 42)]
yAxis = YValueAxis()
yAxis.setPosition(50, 50, 125)
yAxis.configure(data)
xAxis = XCategoryAxis()
xAxis._length = 300
xAxis.configure(data)
xAxis.joinAxis = yAxis
xAxis.joinAxisMode = 'points'
xAxis.joinAxisPos = 100
xAxis.categoryNames = ['Beer', 'Wine', 'Meat', 'Cannelloni']
xAxis.labels.boxAnchor = 'n'
drawing.add(xAxis)
drawing.add(yAxis)
return drawing
def sample6d():
"Sample drawing, xcat/yvalue axes, x connected at value 20 of y."
drawing = Drawing(400, 200)
data = [(10, 20, 30, 42)]
yAxis = YValueAxis()
yAxis.setPosition(50, 50, 125)
yAxis.configure(data)
xAxis = XCategoryAxis()
xAxis._length = 300
xAxis.configure(data)
xAxis.joinAxis = yAxis
xAxis.joinAxisMode = 'value'
xAxis.joinAxisPos = 20
xAxis.categoryNames = ['Beer', 'Wine', 'Meat', 'Cannelloni']
xAxis.labels.boxAnchor = 'n'
drawing.add(xAxis)
drawing.add(yAxis)
return drawing
def sample7a():
"Sample drawing, xvalue/ycat axes, y connected at right of x."
drawing = Drawing(400, 200)
data = [(10, 20, 30, 42)]
xAxis = XValueAxis()
xAxis._length = 300
xAxis.configure(data)
yAxis = YCategoryAxis()
yAxis.setPosition(50, 50, 125)
yAxis.joinAxis = xAxis
yAxis.joinAxisMode = 'right'
yAxis.categoryNames = ['Beer', 'Wine', 'Meat', 'Cannelloni']
yAxis.labels.boxAnchor = 'e'
yAxis.configure(data)
drawing.add(xAxis)
drawing.add(yAxis)
return drawing
def sample7b():
"Sample drawing, xvalue/ycat axes, y connected at left of x."
drawing = Drawing(400, 200)
data = [(10, 20, 30, 42)]
xAxis = XValueAxis()
xAxis._length = 300
xAxis.configure(data)
yAxis = YCategoryAxis()
yAxis.setPosition(50, 50, 125)
yAxis.joinAxis = xAxis
yAxis.joinAxisMode = 'left'
yAxis.categoryNames = ['Beer', 'Wine', 'Meat', 'Cannelloni']
yAxis.labels.boxAnchor = 'e'
yAxis.configure(data)
drawing.add(xAxis)
drawing.add(yAxis)
return drawing
def sample7c():
"Sample drawing, xvalue/ycat axes, y connected at value 30 of x."
drawing = Drawing(400, 200)
data = [(10, 20, 30, 42)]
xAxis = XValueAxis()
xAxis._length = 300
xAxis.configure(data)
yAxis = YCategoryAxis()
yAxis.setPosition(50, 50, 125)
yAxis.joinAxis = xAxis
yAxis.joinAxisMode = 'value'
yAxis.joinAxisPos = 30
yAxis.categoryNames = ['Beer', 'Wine', 'Meat', 'Cannelloni']
yAxis.labels.boxAnchor = 'e'
yAxis.configure(data)
drawing.add(xAxis)
drawing.add(yAxis)
return drawing
def sample7d():
"Sample drawing, xvalue/ycat axes, y connected at 200 pts to x."
drawing = Drawing(400, 200)
data = [(10, 20, 30, 42)]
xAxis = XValueAxis()
xAxis._length = 300
xAxis.configure(data)
yAxis = YCategoryAxis()
yAxis.setPosition(50, 50, 125)
yAxis.joinAxis = xAxis
yAxis.joinAxisMode = 'points'
yAxis.joinAxisPos = 200
yAxis.categoryNames = ['Beer', 'Wine', 'Meat', 'Cannelloni']
yAxis.labels.boxAnchor = 'e'
yAxis.configure(data)
drawing.add(xAxis)
drawing.add(yAxis)
return drawing
| gpl-3.0 |
taedla01/MissionPlanner | Lib/distutils/config.py | 53 | 4336 | """distutils.pypirc
Provides the PyPIRCCommand class, the base class for the command classes
that uses .pypirc in the distutils.command package.
"""
import os
from ConfigParser import ConfigParser
from distutils.cmd import Command
DEFAULT_PYPIRC = """\
[distutils]
index-servers =
pypi
[pypi]
username:%s
password:%s
"""
class PyPIRCCommand(Command):
"""Base command that knows how to handle the .pypirc file
"""
DEFAULT_REPOSITORY = 'http://pypi.python.org/pypi'
DEFAULT_REALM = 'pypi'
repository = None
realm = None
user_options = [
('repository=', 'r',
"url of repository [default: %s]" % \
DEFAULT_REPOSITORY),
('show-response', None,
'display full response text from server')]
boolean_options = ['show-response']
def _get_rc_file(self):
"""Returns rc file path."""
return os.path.join(os.path.expanduser('~'), '.pypirc')
def _store_pypirc(self, username, password):
"""Creates a default .pypirc file."""
rc = self._get_rc_file()
f = open(rc, 'w')
try:
f.write(DEFAULT_PYPIRC % (username, password))
finally:
f.close()
try:
os.chmod(rc, 0600)
except OSError:
# should do something better here
pass
def _read_pypirc(self):
"""Reads the .pypirc file."""
rc = self._get_rc_file()
if os.path.exists(rc):
self.announce('Using PyPI login from %s' % rc)
repository = self.repository or self.DEFAULT_REPOSITORY
config = ConfigParser()
config.read(rc)
sections = config.sections()
if 'distutils' in sections:
# let's get the list of servers
index_servers = config.get('distutils', 'index-servers')
_servers = [server.strip() for server in
index_servers.split('\n')
if server.strip() != '']
if _servers == []:
# nothing set, let's try to get the default pypi
if 'pypi' in sections:
_servers = ['pypi']
else:
# the file is not properly defined, returning
# an empty dict
return {}
for server in _servers:
current = {'server': server}
current['username'] = config.get(server, 'username')
# optional params
for key, default in (('repository',
self.DEFAULT_REPOSITORY),
('realm', self.DEFAULT_REALM),
('password', None)):
if config.has_option(server, key):
current[key] = config.get(server, key)
else:
current[key] = default
if (current['server'] == repository or
current['repository'] == repository):
return current
elif 'server-login' in sections:
# old format
server = 'server-login'
if config.has_option(server, 'repository'):
repository = config.get(server, 'repository')
else:
repository = self.DEFAULT_REPOSITORY
return {'username': config.get(server, 'username'),
'password': config.get(server, 'password'),
'repository': repository,
'server': server,
'realm': self.DEFAULT_REALM}
return {}
def initialize_options(self):
"""Initialize options."""
self.repository = None
self.realm = None
self.show_response = 0
def finalize_options(self):
"""Finalizes options."""
if self.repository is None:
self.repository = self.DEFAULT_REPOSITORY
if self.realm is None:
self.realm = self.DEFAULT_REALM
| gpl-3.0 |
michalliu/OpenWrt-Firefly-Libraries | staging_dir/host/lib/python2.7/idlelib/ToolTip.py | 36 | 3173 | # general purpose 'tooltip' routines - currently unused in idlefork
# (although the 'calltips' extension is partly based on this code)
# may be useful for some purposes in (or almost in ;) the current project scope
# Ideas gleaned from PySol
from Tkinter import *
class ToolTipBase:
def __init__(self, button):
self.button = button
self.tipwindow = None
self.id = None
self.x = self.y = 0
self._id1 = self.button.bind("<Enter>", self.enter)
self._id2 = self.button.bind("<Leave>", self.leave)
self._id3 = self.button.bind("<ButtonPress>", self.leave)
def enter(self, event=None):
self.schedule()
def leave(self, event=None):
self.unschedule()
self.hidetip()
def schedule(self):
self.unschedule()
self.id = self.button.after(1500, self.showtip)
def unschedule(self):
id = self.id
self.id = None
if id:
self.button.after_cancel(id)
def showtip(self):
if self.tipwindow:
return
# The tip window must be completely outside the button;
# otherwise when the mouse enters the tip window we get
# a leave event and it disappears, and then we get an enter
# event and it reappears, and so on forever :-(
x = self.button.winfo_rootx() + 20
y = self.button.winfo_rooty() + self.button.winfo_height() + 1
self.tipwindow = tw = Toplevel(self.button)
tw.wm_overrideredirect(1)
tw.wm_geometry("+%d+%d" % (x, y))
self.showcontents()
def showcontents(self, text="Your text here"):
# Override this in derived class
label = Label(self.tipwindow, text=text, justify=LEFT,
background="#ffffe0", relief=SOLID, borderwidth=1)
label.pack()
def hidetip(self):
tw = self.tipwindow
self.tipwindow = None
if tw:
tw.destroy()
class ToolTip(ToolTipBase):
def __init__(self, button, text):
ToolTipBase.__init__(self, button)
self.text = text
def showcontents(self):
ToolTipBase.showcontents(self, self.text)
class ListboxToolTip(ToolTipBase):
def __init__(self, button, items):
ToolTipBase.__init__(self, button)
self.items = items
def showcontents(self):
listbox = Listbox(self.tipwindow, background="#ffffe0")
listbox.pack()
for item in self.items:
listbox.insert(END, item)
def _tooltip(parent):
root = Tk()
root.title("Test tooltip")
width, height, x, y = list(map(int, re.split('[x+]', parent.geometry())))
root.geometry("+%d+%d"%(x, y + 150))
label = Label(root, text="Place your mouse over buttons")
label.pack()
button1 = Button(root, text="Button 1")
button2 = Button(root, text="Button 2")
button1.pack()
button2.pack()
ToolTip(button1, "This is tooltip text for button1.")
ListboxToolTip(button2, ["This is","multiple line",
"tooltip text","for button2"])
root.mainloop()
if __name__ == '__main__':
from idlelib.idle_test.htest import run
run(_tooltip)
| gpl-2.0 |
Wolfenstein-Inc/Goldcoin | contrib/pyminer/pyminer.py | 1257 | 6438 | #!/usr/bin/python
#
# Copyright (c) 2011 The Bitcoin developers
# Distributed under the MIT/X11 software license, see the accompanying
# file license.txt or http://www.opensource.org/licenses/mit-license.php.
#
import time
import json
import pprint
import hashlib
import struct
import re
import base64
import httplib
import sys
from multiprocessing import Process
ERR_SLEEP = 15
MAX_NONCE = 1000000L
settings = {}
pp = pprint.PrettyPrinter(indent=4)
class BitcoinRPC:
OBJID = 1
def __init__(self, host, port, username, password):
authpair = "%s:%s" % (username, password)
self.authhdr = "Basic %s" % (base64.b64encode(authpair))
self.conn = httplib.HTTPConnection(host, port, False, 30)
def rpc(self, method, params=None):
self.OBJID += 1
obj = { 'version' : '1.1',
'method' : method,
'id' : self.OBJID }
if params is None:
obj['params'] = []
else:
obj['params'] = params
self.conn.request('POST', '/', json.dumps(obj),
{ 'Authorization' : self.authhdr,
'Content-type' : 'application/json' })
resp = self.conn.getresponse()
if resp is None:
print "JSON-RPC: no response"
return None
body = resp.read()
resp_obj = json.loads(body)
if resp_obj is None:
print "JSON-RPC: cannot JSON-decode body"
return None
if 'error' in resp_obj and resp_obj['error'] != None:
return resp_obj['error']
if 'result' not in resp_obj:
print "JSON-RPC: no result in object"
return None
return resp_obj['result']
def getblockcount(self):
return self.rpc('getblockcount')
def getwork(self, data=None):
return self.rpc('getwork', data)
def uint32(x):
return x & 0xffffffffL
def bytereverse(x):
return uint32(( ((x) << 24) | (((x) << 8) & 0x00ff0000) |
(((x) >> 8) & 0x0000ff00) | ((x) >> 24) ))
def bufreverse(in_buf):
out_words = []
for i in range(0, len(in_buf), 4):
word = struct.unpack('@I', in_buf[i:i+4])[0]
out_words.append(struct.pack('@I', bytereverse(word)))
return ''.join(out_words)
def wordreverse(in_buf):
out_words = []
for i in range(0, len(in_buf), 4):
out_words.append(in_buf[i:i+4])
out_words.reverse()
return ''.join(out_words)
class Miner:
def __init__(self, id):
self.id = id
self.max_nonce = MAX_NONCE
def work(self, datastr, targetstr):
# decode work data hex string to binary
static_data = datastr.decode('hex')
static_data = bufreverse(static_data)
# the first 76b of 80b do not change
blk_hdr = static_data[:76]
# decode 256-bit target value
targetbin = targetstr.decode('hex')
targetbin = targetbin[::-1] # byte-swap and dword-swap
targetbin_str = targetbin.encode('hex')
target = long(targetbin_str, 16)
# pre-hash first 76b of block header
static_hash = hashlib.sha256()
static_hash.update(blk_hdr)
for nonce in xrange(self.max_nonce):
# encode 32-bit nonce value
nonce_bin = struct.pack("<I", nonce)
# hash final 4b, the nonce value
hash1_o = static_hash.copy()
hash1_o.update(nonce_bin)
hash1 = hash1_o.digest()
# sha256 hash of sha256 hash
hash_o = hashlib.sha256()
hash_o.update(hash1)
hash = hash_o.digest()
# quick test for winning solution: high 32 bits zero?
if hash[-4:] != '\0\0\0\0':
continue
# convert binary hash to 256-bit Python long
hash = bufreverse(hash)
hash = wordreverse(hash)
hash_str = hash.encode('hex')
l = long(hash_str, 16)
# proof-of-work test: hash < target
if l < target:
print time.asctime(), "PROOF-OF-WORK found: %064x" % (l,)
return (nonce + 1, nonce_bin)
else:
print time.asctime(), "PROOF-OF-WORK false positive %064x" % (l,)
# return (nonce + 1, nonce_bin)
return (nonce + 1, None)
def submit_work(self, rpc, original_data, nonce_bin):
nonce_bin = bufreverse(nonce_bin)
nonce = nonce_bin.encode('hex')
solution = original_data[:152] + nonce + original_data[160:256]
param_arr = [ solution ]
result = rpc.getwork(param_arr)
print time.asctime(), "--> Upstream RPC result:", result
def iterate(self, rpc):
work = rpc.getwork()
if work is None:
time.sleep(ERR_SLEEP)
return
if 'data' not in work or 'target' not in work:
time.sleep(ERR_SLEEP)
return
time_start = time.time()
(hashes_done, nonce_bin) = self.work(work['data'],
work['target'])
time_end = time.time()
time_diff = time_end - time_start
self.max_nonce = long(
(hashes_done * settings['scantime']) / time_diff)
if self.max_nonce > 0xfffffffaL:
self.max_nonce = 0xfffffffaL
if settings['hashmeter']:
print "HashMeter(%d): %d hashes, %.2f Khash/sec" % (
self.id, hashes_done,
(hashes_done / 1000.0) / time_diff)
if nonce_bin is not None:
self.submit_work(rpc, work['data'], nonce_bin)
def loop(self):
rpc = BitcoinRPC(settings['host'], settings['port'],
settings['rpcuser'], settings['rpcpass'])
if rpc is None:
return
while True:
self.iterate(rpc)
def miner_thread(id):
miner = Miner(id)
miner.loop()
if __name__ == '__main__':
if len(sys.argv) != 2:
print "Usage: pyminer.py CONFIG-FILE"
sys.exit(1)
f = open(sys.argv[1])
for line in f:
# skip comment lines
m = re.search('^\s*#', line)
if m:
continue
# parse key=value lines
m = re.search('^(\w+)\s*=\s*(\S.*)$', line)
if m is None:
continue
settings[m.group(1)] = m.group(2)
f.close()
if 'host' not in settings:
settings['host'] = '127.0.0.1'
if 'port' not in settings:
settings['port'] = 8332
if 'threads' not in settings:
settings['threads'] = 1
if 'hashmeter' not in settings:
settings['hashmeter'] = 0
if 'scantime' not in settings:
settings['scantime'] = 30L
if 'rpcuser' not in settings or 'rpcpass' not in settings:
print "Missing username and/or password in cfg file"
sys.exit(1)
settings['port'] = int(settings['port'])
settings['threads'] = int(settings['threads'])
settings['hashmeter'] = int(settings['hashmeter'])
settings['scantime'] = long(settings['scantime'])
thr_list = []
for thr_id in range(settings['threads']):
p = Process(target=miner_thread, args=(thr_id,))
p.start()
thr_list.append(p)
time.sleep(1) # stagger threads
print settings['threads'], "mining threads started"
print time.asctime(), "Miner Starts - %s:%s" % (settings['host'], settings['port'])
try:
for thr_proc in thr_list:
thr_proc.join()
except KeyboardInterrupt:
pass
print time.asctime(), "Miner Stops - %s:%s" % (settings['host'], settings['port'])
| mit |
ofer43211/unisubs | apps/teams/migrations/0036_auto__add_field_team_last_notification_time.py | 5 | 20459 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Team.last_notification_time'
db.add_column('teams_team', 'last_notification_time', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now), keep_default=False)
def backwards(self, orm):
# Deleting field 'Team.last_notification_time'
db.delete_column('teams_team', 'last_notification_time')
models = {
'auth.customuser': {
'Meta': {'object_name': 'CustomUser', '_ormbases': ['auth.User']},
'autoplay_preferences': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'award_points': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'biography': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'changes_notification': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'follow_new_video': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'homepage': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'last_ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'new_message_notification': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'picture': ('utils.amazon.fields.S3EnabledImageField', [], {'max_length': '100', 'blank': 'True'}),
'preferred_language': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'user_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'primary_key': 'True'}),
'valid_email': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'videos': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['videos.Video']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'teams.application': {
'Meta': {'unique_together': "(('team', 'user'),)", 'object_name': 'Application'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'note': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'applications'", 'to': "orm['teams.Team']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'team_applications'", 'to': "orm['auth.CustomUser']"})
},
'teams.invite': {
'Meta': {'unique_together': "(('team', 'user'),)", 'object_name': 'Invite'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'note': ('django.db.models.fields.TextField', [], {'max_length': '200', 'blank': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'invitations'", 'to': "orm['teams.Team']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'team_invitations'", 'to': "orm['auth.CustomUser']"})
},
'teams.team': {
'Meta': {'object_name': 'Team'},
'applicants': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'applicated_teams'", 'symmetrical': 'False', 'through': "orm['teams.Application']", 'to': "orm['auth.CustomUser']"}),
'application_text': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'header_html_text': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'highlight': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'invited': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.CustomUser']", 'through': "orm['teams.Invite']", 'symmetrical': 'False'}),
'is_moderated': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_visible': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'last_notification_time': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'logo': ('utils.amazon.fields.S3EnabledImageField', [], {'max_length': '100', 'blank': 'True'}),
'membership_policy': ('django.db.models.fields.IntegerField', [], {'default': '4'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '250'}),
'page_content': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'points': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'teams'", 'symmetrical': 'False', 'through': "orm['teams.TeamMember']", 'to': "orm['auth.CustomUser']"}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'intro_for_teams'", 'null': 'True', 'to': "orm['videos.Video']"}),
'video_policy': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'videos': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['videos.Video']", 'through': "orm['teams.TeamVideo']", 'symmetrical': 'False'})
},
'teams.teammember': {
'Meta': {'unique_together': "(('team', 'user'),)", 'object_name': 'TeamMember'},
'changes_notification': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_manager': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'members'", 'to': "orm['teams.Team']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']"})
},
'teams.teamvideo': {
'Meta': {'unique_together': "(('team', 'video'),)", 'object_name': 'TeamVideo'},
'added_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']"}),
'all_languages': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'completed_languages': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['videos.SubtitleLanguage']", 'symmetrical': 'False', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.Team']"}),
'thumbnail': ('utils.amazon.fields.S3EnabledImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'blank': 'True'}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.Video']"})
},
'teams.teamvideolanguage': {
'Meta': {'unique_together': "(('team_video', 'subtitle_language'),)", 'object_name': 'TeamVideoLanguage'},
'forked': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True', 'blank': 'True'}),
'is_lingua_franca': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True', 'blank': 'True'}),
'is_original': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True', 'blank': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '16', 'db_index': 'True'}),
'percent_done': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'subtitle_language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.SubtitleLanguage']", 'null': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.Team']"}),
'team_video': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'languages'", 'to': "orm['teams.TeamVideo']"}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.Video']"})
},
'teams.teamvideolanguagepair': {
'Meta': {'object_name': 'TeamVideoLanguagePair'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language_0': ('django.db.models.fields.CharField', [], {'max_length': '16', 'db_index': 'True'}),
'language_1': ('django.db.models.fields.CharField', [], {'max_length': '16', 'db_index': 'True'}),
'language_pair': ('django.db.models.fields.CharField', [], {'max_length': '16', 'db_index': 'True'}),
'percent_complete': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'subtitle_language_0': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'team_video_language_pairs_0'", 'to': "orm['videos.SubtitleLanguage']"}),
'subtitle_language_1': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'team_video_language_pairs_1'", 'null': 'True', 'to': "orm['videos.SubtitleLanguage']"}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.Team']"}),
'team_video': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.TeamVideo']"}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.Video']"})
},
'videos.subtitlelanguage': {
'Meta': {'unique_together': "(('video', 'language', 'standard_language'),)", 'object_name': 'SubtitleLanguage'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'followers': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'followed_languages'", 'blank': 'True', 'to': "orm['auth.CustomUser']"}),
'had_version': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'has_version': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_complete': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_forked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_original': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'last_version': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.SubtitleVersion']", 'null': 'True', 'blank': 'True'}),
'percent_done': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'standard_language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.SubtitleLanguage']", 'null': 'True', 'blank': 'True'}),
'subtitle_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'subtitles_fetched_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'blank': 'True'}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.Video']"}),
'writelock_owner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']", 'null': 'True', 'blank': 'True'}),
'writelock_session_key': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'writelock_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'})
},
'videos.subtitleversion': {
'Meta': {'unique_together': "(('language', 'version_no'),)", 'object_name': 'SubtitleVersion'},
'datetime_started': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_forked': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['videos.SubtitleLanguage']"}),
'note': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'notification_sent': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'result_of_rollback': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'text_change': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'time_change': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']", 'null': 'True'}),
'version_no': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'videos.video': {
'Meta': {'object_name': 'Video'},
'allow_community_edits': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'allow_video_urls_edit': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'complete_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'duration': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'edited': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'followers': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'followed_videos'", 'blank': 'True', 'to': "orm['auth.CustomUser']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_subtitled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'languages_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
's3_thumbnail': ('utils.amazon.fields.S3EnabledImageField', [], {'max_length': '100', 'blank': 'True'}),
'subtitles_fetched_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'thumbnail': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']", 'null': 'True', 'blank': 'True'}),
'video_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'was_subtitled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True', 'blank': 'True'}),
'widget_views_count': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'writelock_owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'writelock_owners'", 'null': 'True', 'to': "orm['auth.CustomUser']"}),
'writelock_session_key': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'writelock_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'})
}
}
complete_apps = ['teams']
| agpl-3.0 |
amiguez/youtube-dl | youtube_dl/extractor/mixcloud.py | 91 | 4042 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import compat_urllib_parse_unquote
from ..utils import (
ExtractorError,
HEADRequest,
str_to_int,
)
class MixcloudIE(InfoExtractor):
_VALID_URL = r'^(?:https?://)?(?:www\.)?mixcloud\.com/([^/]+)/([^/]+)'
IE_NAME = 'mixcloud'
_TESTS = [{
'url': 'http://www.mixcloud.com/dholbach/cryptkeeper/',
'info_dict': {
'id': 'dholbach-cryptkeeper',
'ext': 'mp3',
'title': 'Cryptkeeper',
'description': 'After quite a long silence from myself, finally another Drum\'n\'Bass mix with my favourite current dance floor bangers.',
'uploader': 'Daniel Holbach',
'uploader_id': 'dholbach',
'thumbnail': 're:https?://.*\.jpg',
'view_count': int,
'like_count': int,
},
}, {
'url': 'http://www.mixcloud.com/gillespeterson/caribou-7-inch-vinyl-mix-chat/',
'info_dict': {
'id': 'gillespeterson-caribou-7-inch-vinyl-mix-chat',
'ext': 'mp3',
'title': 'Caribou 7 inch Vinyl Mix & Chat',
'description': 'md5:2b8aec6adce69f9d41724647c65875e8',
'uploader': 'Gilles Peterson Worldwide',
'uploader_id': 'gillespeterson',
'thumbnail': 're:https?://.*/images/',
'view_count': int,
'like_count': int,
},
}]
def _check_url(self, url, track_id, ext):
try:
# We only want to know if the request succeed
# don't download the whole file
self._request_webpage(
HEADRequest(url), track_id,
'Trying %s URL' % ext)
return True
except ExtractorError:
return False
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
uploader = mobj.group(1)
cloudcast_name = mobj.group(2)
track_id = compat_urllib_parse_unquote('-'.join((uploader, cloudcast_name)))
webpage = self._download_webpage(url, track_id)
preview_url = self._search_regex(
r'\s(?:data-preview-url|m-preview)="([^"]+)"', webpage, 'preview url')
song_url = preview_url.replace('/previews/', '/c/originals/')
if not self._check_url(song_url, track_id, 'mp3'):
song_url = song_url.replace('.mp3', '.m4a').replace('originals/', 'm4a/64/')
if not self._check_url(song_url, track_id, 'm4a'):
raise ExtractorError('Unable to extract track url')
PREFIX = (
r'm-play-on-spacebar[^>]+'
r'(?:\s+[a-zA-Z0-9-]+(?:="[^"]+")?)*?\s+')
title = self._html_search_regex(
PREFIX + r'm-title="([^"]+)"', webpage, 'title')
thumbnail = self._proto_relative_url(self._html_search_regex(
PREFIX + r'm-thumbnail-url="([^"]+)"', webpage, 'thumbnail',
fatal=False))
uploader = self._html_search_regex(
PREFIX + r'm-owner-name="([^"]+)"',
webpage, 'uploader', fatal=False)
uploader_id = self._search_regex(
r'\s+"profile": "([^"]+)",', webpage, 'uploader id', fatal=False)
description = self._og_search_description(webpage)
like_count = str_to_int(self._search_regex(
r'\bbutton-favorite\b[^>]+m-ajax-toggle-count="([^"]+)"',
webpage, 'like count', fatal=False))
view_count = str_to_int(self._search_regex(
[r'<meta itemprop="interactionCount" content="UserPlays:([0-9]+)"',
r'/listeners/?">([0-9,.]+)</a>'],
webpage, 'play count', fatal=False))
return {
'id': track_id,
'title': title,
'url': song_url,
'description': description,
'thumbnail': thumbnail,
'uploader': uploader,
'uploader_id': uploader_id,
'view_count': view_count,
'like_count': like_count,
}
| unlicense |
youprofit/servo | python/mach/mach/test/test_logger.py | 128 | 1244 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import, unicode_literals
import logging
import time
import unittest
from mach.logging import StructuredHumanFormatter
from mozunit import main
class DummyLogger(logging.Logger):
def __init__(self, cb):
logging.Logger.__init__(self, 'test')
self._cb = cb
def handle(self, record):
self._cb(record)
class TestStructuredHumanFormatter(unittest.TestCase):
def test_non_ascii_logging(self):
# Ensures the formatter doesn't choke when non-ASCII characters are
# present in printed parameters.
formatter = StructuredHumanFormatter(time.time())
def on_record(record):
result = formatter.format(record)
relevant = result[9:]
self.assertEqual(relevant, 'Test: s\xe9curit\xe9')
logger = DummyLogger(on_record)
value = 's\xe9curit\xe9'
logger.log(logging.INFO, 'Test: {utf}',
extra={'action': 'action', 'params': {'utf': value}})
if __name__ == '__main__':
main()
| mpl-2.0 |
SalesforceEng/Providence | db.py | 1 | 3022 | '''
Copyright (c) 2015, Salesforce.com, Inc.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of Salesforce.com nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''
from sqlalchemy.engine import create_engine
import config
configuration = config.Configuration()
def get_engine():
credentials_id = configuration.get(('postgresql','credential-identifier'))
server_name = configuration.get(('postgresql','server'))
database_name = configuration.get(('postgresql','database'))
cred_url_part = ""
if credentials_id is not None:
creds = config.credential_manager.get_or_create_credentials_for(credentials_id, "password")
cred_url_part = "%s:%s@" % (creds.username, creds.password)
connectionurl = 'postgresql://%s%s/%s' % (cred_url_part, server_name, database_name)
return create_engine(connectionurl)
engine = get_engine()
from sqlalchemy.orm import sessionmaker
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy.exc import IntegrityError
Session = sessionmaker(bind=engine)
Session.configure(bind=engine)
import copy
def get_one_or_create(session,
model,
create_method='',
create_method_kwargs=None,
**kwargs):
try:
return session.query(model).filter_by(**kwargs).one(), False
except NoResultFound:
kwargs.update(create_method_kwargs or {})
created = getattr(model, create_method, model)(**kwargs)
try:
session.add(created)
session.flush()
return created, True
except IntegrityError:
session.rollback()
return session.query(model).filter_by(**kwargs).one(), False | bsd-3-clause |
ToontownUprising/src | toontown/ai/DistributedWinterCarolingTarget.py | 1 | 1681 | from otp.speedchat import SpeedChatGlobals
from direct.directnotify import DirectNotifyGlobal
from direct.distributed import DistributedObject
from direct.interval.IntervalGlobal import *
class DistributedWinterCarolingTarget(DistributedObject.DistributedObject):
notify = DirectNotifyGlobal.directNotify.newCategory('DistributedWinterCarolingTarget')
def __init__(self, cr):
DistributedObject.DistributedObject.__init__(self, cr)
self.triggered = False
self.triggerDelay = 15
def announceGenerate(self):
DistributedObject.DistributedObject.announceGenerate(self)
DistributedWinterCarolingTarget.notify.debug('announceGenerate')
self.accept(SpeedChatGlobals.SCStaticTextMsgEvent, self.phraseSaid)
def phraseSaid(self, phraseId):
self.notify.debug('Checking if phrase was said')
helpPhrases = []
for i in xrange(6):
helpPhrases.append(30220 + i)
def reset():
self.triggered = False
if phraseId in helpPhrases and not self.triggered:
self.triggered = True
self.d_requestScavengerHunt()
taskMgr.doMethodLater(self.triggerDelay, reset, 'ScavengerHunt-phrase-reset', extraArgs=[])
def delete(self):
self.ignore(SpeedChatGlobals.SCStaticTextMsgEvent)
DistributedObject.DistributedObject.delete(self)
def d_requestScavengerHunt(self):
self.sendUpdate('requestScavengerHunt', [])
def doScavengerHunt(self, amount):
DistributedWinterCarolingTarget.notify.debug('doScavengerHunt')
av = base.localAvatar
av.winterCarolingTargetMet(amount)
| mit |
mancoast/CPythonPyc_test | cpython/279_test_applesingle.py | 136 | 1810 | # Copyright (C) 2003 Python Software Foundation
import unittest
import os
from test import test_support
import struct
MacOS = test_support.import_module('MacOS')
# The following should exist if MacOS does.
import applesingle
AS_MAGIC=0x00051600
AS_VERSION=0x00020000
dataforkdata = 'hello\r\0world\n'
resourceforkdata = 'goodbye\ncruel\0world\r'
applesingledata = struct.pack(">ll16sh", AS_MAGIC, AS_VERSION, "foo", 2) + \
struct.pack(">llllll", 1, 50, len(dataforkdata),
2, 50+len(dataforkdata), len(resourceforkdata)) + \
dataforkdata + \
resourceforkdata
TESTFN2 = test_support.TESTFN + '2'
class TestApplesingle(unittest.TestCase):
def setUp(self):
fp = open(test_support.TESTFN, 'w')
fp.write(applesingledata)
fp.close()
def tearDown(self):
try:
os.unlink(test_support.TESTFN)
except:
pass
try:
os.unlink(TESTFN2)
except:
pass
def compareData(self, isrf, data):
if isrf:
fp = MacOS.openrf(TESTFN2, '*rb')
else:
fp = open(TESTFN2, 'rb')
filedata = fp.read(1000)
self.assertEqual(data, filedata)
def test_applesingle(self):
try:
os.unlink(TESTFN2)
except:
pass
applesingle.decode(test_support.TESTFN, TESTFN2)
self.compareData(False, dataforkdata)
self.compareData(True, resourceforkdata)
def test_applesingle_resonly(self):
try:
os.unlink(TESTFN2)
except:
pass
applesingle.decode(test_support.TESTFN, TESTFN2, resonly=True)
self.compareData(False, resourceforkdata)
def test_main():
test_support.run_unittest(TestApplesingle)
if __name__ == '__main__':
test_main()
| gpl-3.0 |
archhurd/archweb | main/migrations/0053_auto__add_field_package_pgp_signature.py | 4 | 11962 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
db.add_column('packages', 'pgp_signature', self.gf('django.db.models.fields.TextField')(null=True, blank=True), keep_default=False)
def backwards(self, orm):
db.delete_column('packages', 'pgp_signature')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'main.arch': {
'Meta': {'ordering': "['name']", 'object_name': 'Arch', 'db_table': "'arches'"},
'agnostic': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'})
},
'main.donor': {
'Meta': {'ordering': "['name']", 'object_name': 'Donor', 'db_table': "'donors'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'visible': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'main.package': {
'Meta': {'ordering': "('pkgname',)", 'object_name': 'Package', 'db_table': "'packages'"},
'arch': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Arch']"}),
'build_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'compressed_size': ('main.models.PositiveBigIntegerField', [], {}),
'epoch': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'filename': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'files_last_update': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'flag_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'installed_size': ('main.models.PositiveBigIntegerField', [], {}),
'last_update': ('django.db.models.fields.DateTimeField', [], {}),
'packager': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'packager_str': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'pgp_signature': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'pkgbase': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'pkgdesc': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'}),
'pkgname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'pkgrel': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'pkgver': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'repo': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'packages'", 'to': "orm['main.Repo']"}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True'})
},
'main.packagedepend': {
'Meta': {'object_name': 'PackageDepend', 'db_table': "'package_depends'"},
'depname': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'depvcmp': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'optional': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"})
},
'main.packagefile': {
'Meta': {'object_name': 'PackageFile', 'db_table': "'package_files'"},
'directory': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'filename': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_directory': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"})
},
'main.repo': {
'Meta': {'ordering': "['name']", 'object_name': 'Repo', 'db_table': "'repos'"},
'bugs_category': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'bugs_project': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'staging': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'svn_root': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'testing': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'main.todolist': {
'Meta': {'object_name': 'Todolist', 'db_table': "'todolists'"},
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}),
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'main.todolistpkg': {
'Meta': {'unique_together': "(('list', 'pkg'),)", 'object_name': 'TodolistPkg', 'db_table': "'todolist_pkgs'"},
'complete': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'list': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Todolist']"}),
'pkg': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['main.Package']"})
},
'main.userprofile': {
'Meta': {'object_name': 'UserProfile', 'db_table': "'user_profiles'"},
'alias': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'allowed_repos': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['main.Repo']", 'symmetrical': 'False', 'blank': 'True'}),
'favorite_distros': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'interests': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'languages': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'notify': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'occupation': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'other_contact': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pgp_key': ('devel.fields.PGPKeyField', [], {'max_length': '40', 'null': 'True', 'blank': 'True'}),
'picture': ('django.db.models.fields.files.FileField', [], {'default': "'devs/silhouette.png'", 'max_length': '100'}),
'public_email': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'roles': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'time_zone': ('django.db.models.fields.CharField', [], {'default': "'UTC'", 'max_length': '100'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'userprofile'", 'unique': 'True', 'to': "orm['auth.User']"}),
'website': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'yob': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['main']
| gpl-2.0 |
ahellander/pyurdme | examples/yeast_polarization/G_protein_cycle.py | 5 | 4520 | #!/usr/bin/env python
""" pyURDME model file for the polarization 1D example. """
import os
import sys
import pyurdme
import dolfin
import math
import matplotlib.pyplot as plt
import numpy
# Sub domain for Periodic boundary condition
class PeriodicBoundary1D(dolfin.SubDomain):
def __init__(self, a=0.0, b=1.0):
""" 1D domain from a to b. """
dolfin.SubDomain.__init__(self)
self.a = a
self.b = b
def inside(self, x, on_boundary):
return not bool((dolfin.near(x[0], self.b)) and on_boundary)
def map(self, x, y):
if dolfin.near(x[0], self.b):
y[0] = self.a + (x[0] - self.b)
class PheromoneGradient(pyurdme.URDMEDataFunction):
def __init__(self, a=0.0, b=1.0, L_min=0, L_max=4, MOLAR=1.0):
""" 1D domain from a to b. """
pyurdme.URDMEDataFunction.__init__(self, name="PheromoneGradient")
self.a = a
self.b = b
self.L_min = L_min
self.L_max = L_max
self.MOLAR = MOLAR
def map(self, x):
ret = ((self.L_max - self.L_min) * 0.5 * (1 + math.cos(0.5*x[0])) + self.L_min) * self.MOLAR
return ret
class G_protein_cycle_1D(pyurdme.URDMEModel):
def __init__(self,model_name="G_protein_cycle_1D"):
pyurdme.URDMEModel.__init__(self,model_name)
# Species
# R RL G Ga Gbg Gd
R = pyurdme.Species(name="R", diffusion_constant=0.01)
RL = pyurdme.Species(name="RL", diffusion_constant=0.01)
G = pyurdme.Species(name="G", diffusion_constant=0.01)
Ga = pyurdme.Species(name="Ga", diffusion_constant=0.01)
Gbg = pyurdme.Species(name="Gbg",diffusion_constant=0.01)
Gd = pyurdme.Species(name="Gd", diffusion_constant=0.01)
self.add_species([R,RL,G,Ga,Gbg,Gd])
L = 4*3.14159
NUM_VOXEL = 200
MOLAR=6.02e-01*((L/NUM_VOXEL)**3)
self.mesh = pyurdme.URDMEMesh.generate_interval_mesh(nx=NUM_VOXEL, a=-2*3.14159, b=2*3.14159, periodic=True)
SA = pyurdme.Parameter(name="SA" ,expression=201.056)
V = pyurdme.Parameter(name="V" ,expression=33.5)
k_RL = pyurdme.Parameter(name="k_RL" ,expression=2e-03/MOLAR)
k_RLm = pyurdme.Parameter(name="k_RLm" ,expression=1e-02)
k_Rs = pyurdme.Parameter(name="k_Rs" ,expression="4.0/SA")
k_Rd0 = pyurdme.Parameter(name="k_Rd0" ,expression=4e-04)
k_Rd1 = pyurdme.Parameter(name="k_Rd1" ,expression=4e-04)
k_G1 = pyurdme.Parameter(name="k_G1" ,expression="1.0*SA")
k_Ga = pyurdme.Parameter(name="k_Ga" ,expression="1e-06*SA")
k_Gd = pyurdme.Parameter(name="k_Gd" ,expression=0.1)
self.add_parameter([SA,V,k_RL,k_RLm,k_Rs,k_Rd0,k_Rd1,k_G1,k_Ga,k_Gd])
# Add Data Function to model the mating pheromone gradient.
self.add_data_function(PheromoneGradient(a=-2*3.14159, b=2*3.14159, MOLAR=MOLAR))
# Reactions
R0 = pyurdme.Reaction(name="R0", reactants={}, products={R:1}, massaction=True, rate=k_Rs)
R1 = pyurdme.Reaction(name="R1", reactants={R:1}, products={}, massaction=True, rate=k_Rd0)
R2 = pyurdme.Reaction(name="R2", reactants={R:1}, products={RL:1}, propensity_function="k_RL*R*PheromoneGradient/vol")
R3 = pyurdme.Reaction(name="R3", reactants={RL:1}, products={R:1}, massaction=True, rate=k_RLm)
R4 = pyurdme.Reaction(name="R4", reactants={RL:1}, products={}, massaction=True, rate=k_RLm)
R5 = pyurdme.Reaction(name="R5", reactants={G:1}, products={Ga:1, Gbg:1}, propensity_function="k_Ga*RL*G/vol")
R6 = pyurdme.Reaction(name="R6", reactants={Ga:1}, products={Gd:1}, massaction=True, rate=k_Ga)
R7 = pyurdme.Reaction(name="R7", reactants={Gd:1, Gbg:1}, products={G:1}, massaction=True, rate=k_G1)
self.add_reaction([R0,R1,R2,R3,R4,R5,R6,R7])
# Distribute molecules randomly over the mesh according to their initial values
self.set_initial_condition_scatter({R:10000})
self.set_initial_condition_scatter({G:10000})
self.timespan(range(201))
if __name__=="__main__":
""" Dump model to a file. """
model = G_protein_cycle_1D()
result = model.run()
x_vals = model.mesh.coordinates()[:, 0]
G = result.get_species("G", timepoints=49)
Gbg = result.get_species("Gbg", timepoints=49)
plt.plot(x_vals, Gbg)
plt.title('Gbg at t=49')
plt.xlabel('Space')
plt.ylabel('Number of Molecules')
plt.show()
| gpl-3.0 |
hryamzik/ansible | lib/ansible/modules/messaging/rabbitmq_policy.py | 23 | 4534 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, John Dewey <john@dewey.ws>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: rabbitmq_policy
short_description: Manage the state of policies in RabbitMQ.
description:
- Manage the state of a policy in RabbitMQ.
version_added: "1.5"
author: "John Dewey (@retr0h)"
options:
name:
description:
- The name of the policy to manage.
required: true
vhost:
description:
- The name of the vhost to apply to.
default: /
apply_to:
description:
- What the policy applies to. Requires RabbitMQ 3.2.0 or later.
default: all
choices: [all, exchanges, queues]
version_added: "2.1"
pattern:
description:
- A regex of queues to apply the policy to.
required: true
tags:
description:
- A dict or string describing the policy.
required: true
priority:
description:
- The priority of the policy.
default: 0
node:
description:
- Erlang node name of the rabbit we wish to configure.
default: rabbit
state:
description:
- The state of the policy.
default: present
choices: [present, absent]
'''
EXAMPLES = '''
- name: ensure the default vhost contains the HA policy via a dict
rabbitmq_policy:
name: HA
pattern: .*
args:
tags:
ha-mode: all
- name: ensure the default vhost contains the HA policy
rabbitmq_policy:
name: HA
pattern: .*
tags:
ha-mode: all
'''
import json
from ansible.module_utils.basic import AnsibleModule
class RabbitMqPolicy(object):
def __init__(self, module, name):
self._module = module
self._name = name
self._vhost = module.params['vhost']
self._pattern = module.params['pattern']
self._apply_to = module.params['apply_to']
self._tags = module.params['tags']
self._priority = module.params['priority']
self._node = module.params['node']
self._rabbitmqctl = module.get_bin_path('rabbitmqctl', True)
def _exec(self, args, run_in_check_mode=False):
if not self._module.check_mode or (self._module.check_mode and run_in_check_mode):
cmd = [self._rabbitmqctl, '-q', '-n', self._node]
args.insert(1, '-p')
args.insert(2, self._vhost)
rc, out, err = self._module.run_command(cmd + args, check_rc=True)
return out.splitlines()
return list()
def list(self):
policies = self._exec(['list_policies'], True)
for policy in policies:
if not policy:
continue
policy_name = policy.split('\t')[1]
if policy_name == self._name:
return True
return False
def set(self):
args = ['set_policy']
args.append(self._name)
args.append(self._pattern)
args.append(json.dumps(self._tags))
args.append('--priority')
args.append(self._priority)
if self._apply_to != 'all':
args.append('--apply-to')
args.append(self._apply_to)
return self._exec(args)
def clear(self):
return self._exec(['clear_policy', self._name])
def main():
arg_spec = dict(
name=dict(required=True),
vhost=dict(default='/'),
pattern=dict(required=True),
apply_to=dict(default='all', choices=['all', 'exchanges', 'queues']),
tags=dict(type='dict', required=True),
priority=dict(default='0'),
node=dict(default='rabbit'),
state=dict(default='present', choices=['present', 'absent']),
)
module = AnsibleModule(
argument_spec=arg_spec,
supports_check_mode=True
)
name = module.params['name']
state = module.params['state']
rabbitmq_policy = RabbitMqPolicy(module, name)
result = dict(changed=False, name=name, state=state)
if rabbitmq_policy.list():
if state == 'absent':
rabbitmq_policy.clear()
result['changed'] = True
else:
result['changed'] = False
elif state == 'present':
rabbitmq_policy.set()
result['changed'] = True
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
josh-willis/pycbc | pycbc/io/hdf.py | 2 | 47604 | # convenience classes for accessing hdf5 trigger files
# the 'get_column()' method is implemented parallel to
# the existing pylal.SnglInspiralUtils functions
import h5py
import numpy as np
import logging
import inspect
from itertools import chain
from six.moves import range
from six.moves import cPickle as pickle
from io import BytesIO
from lal import LIGOTimeGPS, YRJUL_SI
from glue.ligolw import ligolw
from glue.ligolw import lsctables
from glue.ligolw import utils as ligolw_utils
from glue.ligolw.utils import process as ligolw_process
from pycbc import version as pycbc_version
from pycbc.tmpltbank import return_search_summary
from pycbc.tmpltbank import return_empty_sngl
from pycbc import events, conversions, pnutils
from pycbc.events import ranking, veto
from pycbc.events.stat import sngl_statistic_dict
class HFile(h5py.File):
""" Low level extensions to the capabilities of reading an hdf5 File
"""
def select(self, fcn, *args, **kwds):
""" Return arrays from an hdf5 file that satisfy the given function
Parameters
----------
fcn : a function
A function that accepts the same number of argument as keys given
and returns a boolean array of the same length.
args : strings
A variable number of strings that are keys into the hdf5. These must
refer to arrays of equal length.
chunksize : {1e6, int}, optional
Number of elements to read and process at a time.
return_indices : bool, optional
If True, also return the indices of elements passing the function.
Returns
-------
values : np.ndarrays
A variable number of arrays depending on the number of keys into
the hdf5 file that are given. If return_indices is True, the first
element is an array of indices of elements passing the function.
>>> f = HFile(filename)
>>> snr = f.select(lambda snr: snr > 6, 'H1/snr')
"""
# get references to each array
refs = {}
data = {}
for arg in args:
refs[arg] = self[arg]
data[arg] = []
return_indices = kwds.get('return_indices', False)
indices = np.array([], dtype=np.uint64)
# To conserve memory read the array in chunks
chunksize = kwds.get('chunksize', int(1e6))
size = len(refs[arg])
i = 0
while i < size:
r = i + chunksize if i + chunksize < size else size
#Read each chunks worth of data and find where it passes the function
partial = [refs[arg][i:r] for arg in args]
keep = fcn(*partial)
if return_indices:
indices = np.concatenate([indices, np.flatnonzero(keep) + i])
#store only the results that pass the function
for arg, part in zip(args, partial):
data[arg].append(part[keep])
i += chunksize
# Combine the partial results into full arrays
if len(args) == 1:
res = np.concatenate(data[args[0]])
if return_indices:
return indices.astype(np.uint64), res
else:
return res
else:
res = tuple(np.concatenate(data[arg]) for arg in args)
if return_indices:
return (indices.astype(np.uint64),) + res
else:
return res
class DictArray(object):
""" Utility for organizing sets of arrays of equal length.
Manages a dictionary of arrays of equal length. This can also
be instantiated with a set of hdf5 files and the key values. The full
data is always in memory and all operations create new instances of the
DictArray.
"""
def __init__(self, data=None, files=None, groups=None):
""" Create a DictArray
Parameters
----------
data: dict, optional
Dictionary of equal length numpy arrays
files: list of filenames, optional
List of hdf5 file filenames. Incompatibile with the `data` option.
groups: list of strings
List of keys into each file. Required by the files option.
"""
# Check that input fits with how the DictArray is set up
if data and files:
raise RuntimeError('DictArray can only have data or files as '
'input, not both.')
if files and not groups:
raise RuntimeError('If files are given then need groups.')
self.data = data
self.groups = groups
if files:
self.data = {}
for g in groups:
self.data[g] = []
for f in files:
d = HFile(f)
for g in groups:
if g in d:
self.data[g].append(d[g][:])
d.close()
for k in self.data:
if not len(self.data[k]) == 0:
self.data[k] = np.concatenate(self.data[k])
for k in self.data:
setattr(self, k, self.data[k])
def _return(self, data):
return self.__class__(data=data)
def __len__(self):
return len(self.data[tuple(self.data.keys())[0]])
def __add__(self, other):
data = {}
for k in self.data:
try:
data[k] = np.concatenate([self.data[k], other.data[k]])
except KeyError:
logging.info('%s does not exist in other data' % k)
return self._return(data=data)
def select(self, idx):
""" Return a new DictArray containing only the indexed values
"""
data = {}
for k in self.data:
data[k] = self.data[k][idx]
return self._return(data=data)
def remove(self, idx):
""" Return a new DictArray that does not contain the indexed values
"""
data = {}
for k in self.data:
data[k] = np.delete(self.data[k], idx)
return self._return(data=data)
def save(self, outname):
f = HFile(outname, "w")
for k in self.attrs:
f.attrs[k] = self.attrs[k]
for k in self.data:
f.create_dataset(k, data=self.data[k],
compression='gzip',
compression_opts=9,
shuffle=True)
f.close()
class StatmapData(DictArray):
def __init__(self, data=None, seg=None, attrs=None, files=None,
groups=('stat', 'time1', 'time2', 'trigger_id1',
'trigger_id2', 'template_id', 'decimation_factor',
'timeslide_id')):
super(StatmapData, self).__init__(data=data, files=files,
groups=groups)
if data:
self.seg=seg
self.attrs=attrs
elif files:
f = HFile(files[0], "r")
self.seg = f['segments']
self.attrs = f.attrs
def _return(self, data):
return self.__class__(data=data, attrs=self.attrs, seg=self.seg)
def cluster(self, window):
""" Cluster the dict array, assuming it has the relevant Coinc colums,
time1, time2, stat, and timeslide_id
"""
# If no events, do nothing
if len(self.time1) == 0 or len(self.time2) == 0:
return self
from pycbc.events import cluster_coincs
interval = self.attrs['timeslide_interval']
cid = cluster_coincs(self.stat, self.time1, self.time2,
self.timeslide_id, interval, window)
return self.select(cid)
def save(self, outname):
super(StatmapData, self).save(outname)
with HFile(outname, "w") as f:
for key in self.seg.keys():
f['segments/%s/start' % key] = self.seg[key]['start'][:]
f['segments/%s/end' % key] = self.seg[key]['end'][:]
class MultiifoStatmapData(StatmapData):
def __init__(self, data=None, seg=None, attrs=None,
files=None, ifos=None):
groups = ['decimation_factor', 'stat', 'template_id', 'timeslide_id']
for ifo in ifos:
groups += ['%s/time' % ifo]
groups += ['%s/trigger_id' % ifo]
super(MultiifoStatmapData, self).__init__(data=data, files=files,
groups=groups, attrs=attrs,
seg=seg)
def _return(self, data):
ifolist = self.attrs['ifos'].split(' ')
return self.__class__(data=data, attrs=self.attrs, seg=self.seg,
ifos=ifolist)
def cluster(self, window):
""" Cluster the dict array, assuming it has the relevant Coinc colums,
time1, time2, stat, and timeslide_id
"""
# If no events, do nothing
pivot_ifo = self.attrs['pivot']
fixed_ifo = self.attrs['fixed']
if len(self.data['%s/time' % pivot_ifo]) == 0 or len(self.data['%s/time' % fixed_ifo]) == 0:
return self
from pycbc.events import cluster_coincs
interval = self.attrs['timeslide_interval']
cid = cluster_coincs(self.stat,
self.data['%s/time' % pivot_ifo],
self.data['%s/time' % fixed_ifo],
self.timeslide_id,
interval,
window)
return self.select(cid)
class FileData(object):
def __init__(self, fname, group=None, columnlist=None, filter_func=None):
"""
Parameters
----------
group : string
Name of group to be read from the file
columnlist : list of strings
Names of columns to be read; if None, use all existing columns
filter_func : string
String should evaluate to a Boolean expression using attributes
of the class instance derived from columns: ex. 'self.snr < 6.5'
"""
if not fname: raise RuntimeError("Didn't get a file!")
self.fname = fname
self.h5file = HFile(fname, "r")
if group is None:
if len(self.h5file.keys()) == 1:
group, = self.h5file.keys()
else:
raise RuntimeError("Didn't get a group!")
self.group_key = group
self.group = self.h5file[group]
self.columns = columnlist if columnlist is not None \
else list(self.group.keys())
self.filter_func = filter_func
self._mask = None
def close(self):
self.h5file.close()
@property
def mask(self):
"""
Create a mask implementing the requested filter on the datasets
Returns
-------
array of Boolean
True for dataset indices to be returned by the get_column method
"""
if self.filter_func is None:
raise RuntimeError("Can't get a mask without a filter function!")
else:
# only evaluate if no previous calculation was done
if self._mask is None:
# get required columns into the namespace as numpy arrays
for column in self.columns:
if column in self.filter_func:
setattr(self, column, self.group[column][:])
self._mask = eval(self.filter_func)
return self._mask
def get_column(self, col):
"""
Parameters
----------
col : string
Name of the dataset to be returned
Returns
-------
numpy array
Values from the dataset, filtered if requested
"""
# catch corner case with an empty file (group with no datasets)
if not len(self.group.keys()):
return np.array([])
vals = self.group[col]
if self.filter_func:
return vals[self.mask]
else:
return vals[:]
class DataFromFiles(object):
def __init__(self, filelist, group=None, columnlist=None, filter_func=None):
self.files = filelist
self.group = group
self.columns = columnlist
self.filter_func = filter_func
def get_column(self, col):
"""
Loop over files getting the requested dataset values from each
Parameters
----------
col : string
Name of the dataset to be returned
Returns
-------
numpy array
Values from the dataset, filtered if requested and
concatenated in order of file list
"""
logging.info('getting %s' % col)
vals = []
for f in self.files:
d = FileData(f, group=self.group, columnlist=self.columns,
filter_func=self.filter_func)
vals.append(d.get_column(col))
# Close each file since h5py has an upper limit on the number of
# open file objects (approx. 1000)
d.close()
logging.info('- got %i values' % sum(len(v) for v in vals))
return np.concatenate(vals)
class SingleDetTriggers(object):
"""
Provides easy access to the parameters of single-detector CBC triggers.
"""
# FIXME: Some of these are optional and should be kwargs.
def __init__(self, trig_file, bank_file, veto_file,
segment_name, filter_func, detector, premask=None):
logging.info('Loading triggers')
self.trigs_f = HFile(trig_file, 'r')
self.trigs = self.trigs_f[detector]
self.ifo = detector # convenience attributes
self.detector = detector
if bank_file:
logging.info('Loading bank')
self.bank = HFile(bank_file, 'r')
else:
logging.info('No bank file given')
# empty dict in place of non-existent hdf file
self.bank = {}
if premask is not None:
self.mask = premask
else:
self.mask = np.ones(len(self.trigs['end_time']), dtype=bool)
if veto_file:
logging.info('Applying veto segments')
# veto_mask is an array of indices into the trigger arrays
# giving the surviving triggers
logging.info('%i triggers before vetoes', self.mask.sum())
self.veto_mask, _ = events.veto.indices_outside_segments(
self.end_time, [veto_file],
ifo=detector, segment_name=segment_name)
idx = np.flatnonzero(self.mask)[self.veto_mask]
self.mask[:] = False
self.mask[idx] = True
logging.info('%i triggers remain after vetoes',
len(self.veto_mask))
# FIXME this should use the hfile select interface to avoid
# memory and processing limitations.
if filter_func:
# get required columns into the namespace with dummy attribute
# names to avoid confusion with other class properties
logging.info('Setting up filter function')
for c in self.trigs.keys():
if c in filter_func:
setattr(self, '_'+c, self.trigs[c][:])
for c in self.bank.keys():
if c in filter_func:
# get template parameters corresponding to triggers
setattr(self, '_'+c,
np.array(self.bank[c])[self.trigs['template_id'][:]])
self.filter_mask = eval(filter_func.replace('self.', 'self._'))
# remove the dummy attributes
for c in chain(self.trigs.keys(), self.bank.keys()):
if c in filter_func: delattr(self, '_'+c)
self.mask = self.mask & self.filter_mask
logging.info('%i triggers remain after cut on %s',
sum(self.mask), filter_func)
def checkbank(self, param):
if self.bank == {}:
return RuntimeError("Can't get %s values without a bank file"
% param)
def trig_dict(self):
"""Returns dict of the masked trigger valuse """
mtrigs = {}
for k in self.trigs:
if len(self.trigs[k]) == len(self.trigs['end_time']):
if self.mask is not None:
mtrigs[k] = self.trigs[k][self.mask]
else:
mtrigs[k] = self.trigs[k][:]
return mtrigs
@classmethod
def get_param_names(cls):
"""Returns a list of plottable CBC parameter variables"""
return [m[0] for m in inspect.getmembers(cls) \
if type(m[1]) == property]
def apply_mask(self, logic_mask):
"""Apply a boolean array to the set of triggers"""
if hasattr(self.mask, 'dtype') and (self.mask.dtype == 'bool'):
orig_indices = self.mask.nonzero()[0][logic_mask]
self.mask[:] = False
self.mask[orig_indices] = True
else:
self.mask = list(np.array(self.mask)[logic_mask])
def mask_to_n_loudest_clustered_events(self, n_loudest=10,
ranking_statistic="newsnr",
cluster_window=10,
statistic_files=None):
"""Edits the mask property of the class to point to the N loudest
single detector events as ranked by ranking statistic. Events are
clustered so that no more than 1 event within +/- cluster-window will
be considered."""
if statistic_files is None:
statistic_files = []
# If this becomes memory intensive we can optimize
stat_instance = sngl_statistic_dict[ranking_statistic](statistic_files)
stat = stat_instance.single(self.trig_dict())
# Used for naming in plots ... Seems an odd place for this to live!
if ranking_statistic == "newsnr":
self.stat_name = "Reweighted SNR"
elif ranking_statistic == "newsnr_sgveto":
self.stat_name = "Reweighted SNR (+sgveto)"
elif ranking_statistic == "newsnr_sgveto_psdvar":
self.stat_name = "Reweighted SNR (+sgveto+psdvar)"
elif ranking_statistic == "snr":
self.stat_name = "SNR"
else:
self.stat_name = ranking_statistic
times = self.end_time
index = stat.argsort()[::-1]
new_times = []
new_index = []
for curr_idx in index:
curr_time = times[curr_idx]
for time in new_times:
if abs(curr_time - time) < cluster_window:
break
else:
# Only get here if no other triggers within cluster window
new_index.append(curr_idx)
new_times.append(curr_time)
if len(new_index) >= n_loudest:
break
index = np.array(new_index)
index.sort()
self.stat = stat[index]
if hasattr(self.mask, 'dtype') and self.mask.dtype == 'bool':
orig_indices = np.flatnonzero(self.mask)[index]
self.mask = list(orig_indices)
elif isinstance(self.mask, list):
self.mask = list(np.array(self.mask)[index])
@property
def template_id(self):
return self.get_column('template_id')
@property
def mass1(self):
self.checkbank('mass1')
return self.bank['mass1'][:][self.template_id]
@property
def mass2(self):
self.checkbank('mass2')
return self.bank['mass2'][:][self.template_id]
@property
def spin1z(self):
self.checkbank('spin1z')
return self.bank['spin1z'][:][self.template_id]
@property
def spin2z(self):
self.checkbank('spin2z')
return self.bank['spin2z'][:][self.template_id]
@property
def spin2x(self):
self.checkbank('spin2x')
return self.bank['spin2x'][:][self.template_id]
@property
def spin2y(self):
self.checkbank('spin2y')
return self.bank['spin2y'][:][self.template_id]
@property
def spin1x(self):
self.checkbank('spin1x')
return self.bank['spin1x'][:][self.template_id]
@property
def spin1y(self):
self.checkbank('spin1y')
return self.bank['spin1y'][:][self.template_id]
@property
def inclination(self):
self.checkbank('inclination')
return self.bank['inclination'][:][self.template_id]
@property
def f_lower(self):
self.checkbank('f_lower')
return self.bank['f_lower'][:][self.template_id]
@property
def mtotal(self):
return self.mass1 + self.mass2
@property
def mchirp(self):
return conversions.mchirp_from_mass1_mass2(self.mass1, self.mass2)
@property
def eta(self):
return conversions.eta_from_mass1_mass2(self.mass1, self.mass2)
@property
def effective_spin(self):
# FIXME assumes aligned spins
return conversions.chi_eff(self.mass1, self.mass2,
self.spin1z, self.spin2z)
# IMPROVEME: would like to have a way to access all get_freq and/or
# other pnutils.* names rather than hard-coding each one
# - eg make this part of a fancy interface to the bank file ?
@property
def f_seobnrv2_peak(self):
return pnutils.get_freq('fSEOBNRv2Peak', self.mass1, self.mass2,
self.spin1z, self.spin2z)
@property
def f_seobnrv4_peak(self):
return pnutils.get_freq('fSEOBNRv4Peak', self.mass1, self.mass2,
self.spin1z, self.spin2z)
@property
def end_time(self):
return self.get_column('end_time')
@property
def template_duration(self):
return self.get_column('template_duration')
@property
def snr(self):
return self.get_column('snr')
@property
def sgchisq(self):
return self.get_column('sg_chisq')
@property
def u_vals(self):
return self.get_column('u_vals')
@property
def rchisq(self):
return self.get_column('chisq') \
/ (self.get_column('chisq_dof') * 2 - 2)
@property
def psd_var_val(self):
return self.get_column('psd_var_val')
@property
def newsnr(self):
return ranking.newsnr(self.snr, self.rchisq)
@property
def newsnr_sgveto(self):
return ranking.newsnr_sgveto(self.snr, self.rchisq, self.sgchisq)
@property
def newsnr_sgveto_psdvar(self):
return ranking.newsnr_sgveto_psdvar(self.snr, self.rchisq,
self.sgchisq, self.psd_var_val)
def get_column(self, cname):
# Fiducial value that seems to work, not extensively tuned.
MFRAC = 0.3
# If the mask accesses few enough elements then directly use it
# This can be slower than reading in all the elements if most of them
# will be read.
if self.mask is not None and (isinstance(self.mask, list) or \
(len(self.mask.nonzero()[0]) < (len(self.mask) * MFRAC))):
return self.trigs[cname][self.mask]
# We have a lot of elements to read so we resort to readin the entire
# array before masking.
elif self.mask is not None:
return self.trigs[cname][:][self.mask]
else:
return self.trigs[cname][:]
class ForegroundTriggers(object):
# FIXME: A lot of this is hardcoded to expect two ifos
def __init__(self, coinc_file, bank_file, sngl_files=None, n_loudest=None,
group='foreground'):
self.coinc_file = FileData(coinc_file, group=group)
if 'ifos' in self.coinc_file.h5file.attrs:
self.ifos = self.coinc_file.h5file.attrs['ifos'].split(' ')
else:
self.ifos = [self.coinc_file.h5file.attrs['detector_1'],
self.coinc_file.h5file.attrs['detector_2']]
self.sngl_files = {}
if sngl_files is not None:
for sngl_file in sngl_files:
curr_dat = FileData(sngl_file)
curr_ifo = curr_dat.group_key
self.sngl_files[curr_ifo] = curr_dat
if not all([ifo in self.sngl_files.keys() for ifo in self.ifos]):
print("sngl_files: {}".format(sngl_files))
print("self.ifos: {}".format(self.ifos))
raise RuntimeError("IFOs in statmap file not all represented "
"by single-detector trigger files.")
if not sorted(self.sngl_files.keys()) == sorted(self.ifos):
logging.warning("WARNING: Single-detector trigger files "
"given for IFOs not in the statmap file")
self.bank_file = HFile(bank_file, "r")
self.n_loudest = n_loudest
self._sort_arr = None
self._template_id = None
self._trig_ids = None
@property
def sort_arr(self):
if self._sort_arr is None:
ifar = self.coinc_file.get_column('ifar')
sorting = ifar.argsort()[::-1]
if self.n_loudest:
sorting = sorting[:self.n_loudest]
self._sort_arr = sorting
return self._sort_arr
@property
def template_id(self):
if self._template_id is None:
template_id = self.get_coincfile_array('template_id')
self._template_id = template_id
return self._template_id
@property
def trig_id(self):
if self._trig_ids is not None:
return self._trig_ids
self._trig_ids = {}
try: # New style multi-ifo file
ifos = self.coinc_file.h5file.attrs['ifos'].split(' ')
for ifo in ifos:
trigid = self.get_coincfile_array(ifo + '/trigger_id')
self._trig_ids[ifo] = trigid
except KeyError: # Old style two-ifo file
ifo1 = self.coinc_file.h5file.attrs['detector_1']
ifo2 = self.coinc_file.h5file.attrs['detector_2']
trigid1 = self.get_coincfile_array('trigger_id1')
trigid2 = self.get_coincfile_array('trigger_id2')
self._trig_ids[ifo1] = trigid1
self._trig_ids[ifo2] = trigid2
return self._trig_ids
def get_coincfile_array(self, variable):
return self.coinc_file.get_column(variable)[self.sort_arr]
def get_bankfile_array(self, variable):
try:
return self.bank_file[variable][:][self.template_id]
except IndexError:
if len(self.template_id) == 0:
return np.array([])
raise
def get_snglfile_array_dict(self, variable):
return_dict = {}
for ifo in self.ifos:
try:
tid = self.trig_id[ifo]
lgc = tid == -1
# Put in *some* value for the invalid points to avoid failure
# Make sure this doesn't change the cached internal array!
tid = np.copy(tid)
tid[lgc] = 0
# If small number of points don't read the full file
if len(tid) < 1000:
curr = []
hdf_dataset = self.sngl_files[ifo].group[variable]
for idx in tid:
curr.append(hdf_dataset[idx])
curr = np.array(curr)
else:
curr = self.sngl_files[ifo].get_column(variable)[tid]
except IndexError:
if len(self.trig_id[ifo]) == 0:
curr = np.array([])
lgc = curr == 0
else:
raise
return_dict[ifo] = (curr, np.logical_not(lgc))
return return_dict
def get_end_time(self):
try: # First try new-style format
ifos = self.coinc_file.h5file.attrs['ifos'].split(' ')
ref_times = None
for ifo in ifos:
times = self.get_coincfile_array('{}/time'.format(ifo))
if ref_times is None:
ref_times = times
else:
ref_times[ref_times < 0] = times[ref_times < 0]
except KeyError: # Else fall back on old two-det format
ref_times = self.get_coincfile_array('time1')
return ref_times
def to_coinc_xml_object(self, file_name):
outdoc = ligolw.Document()
outdoc.appendChild(ligolw.LIGO_LW())
ifos = list(self.sngl_files.keys())
proc_id = ligolw_process.register_to_xmldoc(outdoc, 'pycbc',
{}, ifos=ifos, comment='', version=pycbc_version.git_hash,
cvs_repository='pycbc/'+pycbc_version.git_branch,
cvs_entry_time=pycbc_version.date).process_id
search_summ_table = lsctables.New(lsctables.SearchSummaryTable)
coinc_h5file = self.coinc_file.h5file
try:
start_time = coinc_h5file['segments']['coinc']['start'][:].min()
end_time = coinc_h5file['segments']['coinc']['end'][:].max()
except KeyError:
start_times = []
end_times = []
for ifo_comb in coinc_h5file['segments']:
if ifo_comb == 'foreground_veto':
continue
seg_group = coinc_h5file['segments'][ifo_comb]
start_times.append(seg_group['start'][:].min())
end_times.append(seg_group['end'][:].max())
start_time = min(start_times)
end_time = max(end_times)
num_trigs = len(self.sort_arr)
search_summary = return_search_summary(start_time, end_time,
num_trigs, ifos)
search_summ_table.append(search_summary)
outdoc.childNodes[0].appendChild(search_summ_table)
sngl_inspiral_table = lsctables.New(lsctables.SnglInspiralTable)
coinc_def_table = lsctables.New(lsctables.CoincDefTable)
coinc_event_table = lsctables.New(lsctables.CoincTable)
coinc_inspiral_table = lsctables.New(lsctables.CoincInspiralTable)
coinc_event_map_table = lsctables.New(lsctables.CoincMapTable)
time_slide_table = lsctables.New(lsctables.TimeSlideTable)
# Set up time_slide table
time_slide_id = lsctables.TimeSlideID(0)
for ifo in ifos:
time_slide_row = lsctables.TimeSlide()
time_slide_row.instrument = ifo
time_slide_row.time_slide_id = time_slide_id
time_slide_row.offset = 0
time_slide_row.process_id = proc_id
time_slide_table.append(time_slide_row)
# Set up coinc_definer table
coinc_def_id = lsctables.CoincDefID(0)
coinc_def_row = lsctables.CoincDef()
coinc_def_row.search = "inspiral"
coinc_def_row.description = \
"sngl_inspiral<-->sngl_inspiral coincidences"
coinc_def_row.coinc_def_id = coinc_def_id
coinc_def_row.search_coinc_type = 0
coinc_def_table.append(coinc_def_row)
bank_col_names = ['mass1', 'mass2', 'spin1z', 'spin2z']
bank_col_vals = {}
for name in bank_col_names:
bank_col_vals[name] = self.get_bankfile_array(name)
coinc_event_names = ['ifar', 'time', 'fap', 'stat']
coinc_event_vals = {}
for name in coinc_event_names:
if name == 'time':
coinc_event_vals[name] = self.get_end_time()
else:
coinc_event_vals[name] = self.get_coincfile_array(name)
sngl_col_names = ['snr', 'chisq', 'chisq_dof', 'bank_chisq',
'bank_chisq_dof', 'cont_chisq', 'cont_chisq_dof',
'end_time', 'template_duration', 'coa_phase',
'sigmasq']
sngl_col_vals = {}
for name in sngl_col_names:
sngl_col_vals[name] = self.get_snglfile_array_dict(name)
sngl_event_count = 0
for idx in range(len(self.sort_arr)):
# Set up IDs and mapping values
coinc_id = lsctables.CoincID(idx)
# Set up sngls
# FIXME: As two-ifo is hardcoded loop over all ifos
sngl_combined_mchirp = 0
sngl_combined_mtot = 0
net_snrsq = 0
for ifo in ifos:
# If this ifo is not participating in this coincidence then
# ignore it and move on.
if not sngl_col_vals['snr'][ifo][1][idx]:
continue
event_id = lsctables.SnglInspiralID(sngl_event_count)
sngl_event_count += 1
sngl = return_empty_sngl()
sngl.event_id = event_id
sngl.ifo = ifo
net_snrsq += sngl_col_vals['snr'][ifo][0][idx]**2
for name in sngl_col_names:
val = sngl_col_vals[name][ifo][0][idx]
if name == 'end_time':
sngl.set_end(LIGOTimeGPS(val))
else:
setattr(sngl, name, val)
for name in bank_col_names:
val = bank_col_vals[name][idx]
setattr(sngl, name, val)
sngl.mtotal, sngl.eta = pnutils.mass1_mass2_to_mtotal_eta(
sngl.mass1, sngl.mass2)
sngl.mchirp, _ = pnutils.mass1_mass2_to_mchirp_eta(
sngl.mass1, sngl.mass2)
sngl.eff_distance = (sngl.sigmasq)**0.5 / sngl.snr
sngl_combined_mchirp += sngl.mchirp
sngl_combined_mtot += sngl.mtotal
sngl_inspiral_table.append(sngl)
# Set up coinc_map entry
coinc_map_row = lsctables.CoincMap()
coinc_map_row.table_name = 'sngl_inspiral'
coinc_map_row.coinc_event_id = coinc_id
coinc_map_row.event_id = event_id
coinc_event_map_table.append(coinc_map_row)
sngl_combined_mchirp = sngl_combined_mchirp / len(ifos)
sngl_combined_mtot = sngl_combined_mtot / len(ifos)
# Set up coinc inspiral and coinc event tables
coinc_event_row = lsctables.Coinc()
coinc_inspiral_row = lsctables.CoincInspiral()
coinc_event_row.coinc_def_id = coinc_def_id
coinc_event_row.nevents = len(ifos)
coinc_event_row.instruments = ','.join(ifos)
coinc_inspiral_row.set_ifos(ifos)
coinc_event_row.time_slide_id = time_slide_id
coinc_event_row.process_id = proc_id
coinc_event_row.coinc_event_id = coinc_id
coinc_inspiral_row.coinc_event_id = coinc_id
coinc_inspiral_row.mchirp = sngl_combined_mchirp
coinc_inspiral_row.mass = sngl_combined_mtot
coinc_inspiral_row.set_end(
LIGOTimeGPS(coinc_event_vals['time'][idx])
)
coinc_inspiral_row.snr = net_snrsq**0.5
coinc_inspiral_row.false_alarm_rate = coinc_event_vals['fap'][idx]
coinc_inspiral_row.combined_far = 1./coinc_event_vals['ifar'][idx]
# Transform to Hz
coinc_inspiral_row.combined_far = \
coinc_inspiral_row.combined_far / YRJUL_SI
coinc_event_row.likelihood = coinc_event_vals['stat'][idx]
coinc_inspiral_row.minimum_duration = 0.
coinc_event_table.append(coinc_event_row)
coinc_inspiral_table.append(coinc_inspiral_row)
outdoc.childNodes[0].appendChild(coinc_def_table)
outdoc.childNodes[0].appendChild(coinc_event_table)
outdoc.childNodes[0].appendChild(coinc_event_map_table)
outdoc.childNodes[0].appendChild(time_slide_table)
outdoc.childNodes[0].appendChild(coinc_inspiral_table)
outdoc.childNodes[0].appendChild(sngl_inspiral_table)
ligolw_utils.write_filename(outdoc, file_name)
class ReadByTemplate(object):
def __init__(self, filename, bank=None, segment_name=None, veto_files=None):
self.filename = filename
self.file = h5py.File(filename, 'r')
self.ifo = tuple(self.file.keys())[0]
self.valid = None
self.bank = h5py.File(bank, 'r') if bank else {}
# Determine the segments which define the boundaries of valid times
# to use triggers
key = '%s/search/' % self.ifo
s, e = self.file[key + 'start_time'][:], self.file[key + 'end_time'][:]
self.segs = veto.start_end_to_segments(s, e).coalesce()
if segment_name is None:
segment_name = []
if veto_files is None:
veto_files = []
for vfile, name in zip(veto_files, segment_name):
veto_segs = veto.select_segments_by_definer(vfile, ifo=self.ifo,
segment_name=name)
self.segs = (self.segs - veto_segs).coalesce()
self.valid = veto.segments_to_start_end(self.segs)
def get_data(self, col, num):
""" Get a column of data for template with id 'num'
Parameters
----------
col: str
Name of column to read
num: int
The template id to read triggers for
Returns
-------
data: numpy.ndarray
The requested column of data
"""
ref = self.file['%s/%s_template' % (self.ifo, col)][num]
return self.file['%s/%s' % (self.ifo, col)][ref]
def set_template(self, num):
""" Set the active template to read from
Parameters ----------
num: int
The template id to read triggers for
Returns
-------
trigger_id: numpy.ndarray
The indices of this templates triggers
"""
self.template_num = num
times = self.get_data('end_time', num)
# Determine which of these template's triggers are kept after
# applying vetoes
if self.valid:
self.keep = veto.indices_within_times(times, self.valid[0],
self.valid[1])
# logging.info('applying vetoes')
else:
self.keep = np.arange(0, len(times))
if self.bank != {}:
self.param = {}
if 'parameters' in self.bank.attrs:
for col in self.bank.attrs['parameters']:
self.param[col] = self.bank[col][self.template_num]
else:
for col in self.bank:
self.param[col] = self.bank[col][self.template_num]
# Calculate the trigger id by adding the relative offset in self.keep
# to the absolute beginning index of this templates triggers stored
# in 'template_boundaries'
trigger_id = self.keep + \
self.file['%s/template_boundaries' % self.ifo][num]
return trigger_id
def __getitem__(self, col):
""" Return the column of data for current active template after
applying vetoes
Parameters
----------
col: str
Name of column to read
Returns
-------
data: numpy.ndarray
The requested column of data
"""
if self.template_num is None:
raise ValueError('You must call set_template to first pick the '
'template to read data from')
data = self.get_data(col, self.template_num)
data = data[self.keep] if self.valid else data
return data
chisq_choices = ['traditional', 'cont', 'bank', 'max_cont_trad', 'sg',
'max_bank_cont', 'max_bank_trad', 'max_bank_cont_trad']
def get_chisq_from_file_choice(hdfile, chisq_choice):
f = hdfile
if chisq_choice in ['traditional','max_cont_trad', 'max_bank_trad',
'max_bank_cont_trad']:
trad_chisq = f['chisq'][:]
# We now need to handle the case where chisq is not actually calculated
# 0 is used as a sentinel value
trad_chisq_dof = f['chisq_dof'][:]
trad_chisq /= (trad_chisq_dof * 2 - 2)
if chisq_choice in ['cont', 'max_cont_trad', 'max_bank_cont',
'max_bank_cont_trad']:
cont_chisq = f['cont_chisq'][:]
cont_chisq_dof = f['cont_chisq_dof'][:]
cont_chisq /= cont_chisq_dof
if chisq_choice in ['bank', 'max_bank_cont', 'max_bank_trad',
'max_bank_cont_trad']:
bank_chisq = f['bank_chisq'][:]
bank_chisq_dof = f['bank_chisq_dof'][:]
bank_chisq /= bank_chisq_dof
if chisq_choice == 'sg':
chisq = f['sg_chisq'][:]
elif chisq_choice == 'traditional':
chisq = trad_chisq
elif chisq_choice == 'cont':
chisq = cont_chisq
elif chisq_choice == 'bank':
chisq = bank_chisq
elif chisq_choice == 'max_cont_trad':
chisq = np.maximum(trad_chisq, cont_chisq)
elif chisq_choice == 'max_bank_cont':
chisq = np.maximum(bank_chisq, cont_chisq)
elif chisq_choice == 'max_bank_trad':
chisq = np.maximum(bank_chisq, trad_chisq)
elif chisq_choice == 'max_bank_cont_trad':
chisq = np.maximum(np.maximum(bank_chisq, cont_chisq), trad_chisq)
else:
err_msg = "Do not recognize --chisq-choice %s" % chisq_choice
raise ValueError(err_msg)
return chisq
def save_dict_to_hdf5(dic, filename):
"""
Parameters
----------
dic:
python dictionary to be converted to hdf5 format
filename:
desired name of hdf5 file
"""
with h5py.File(filename, 'w') as h5file:
recursively_save_dict_contents_to_group(h5file, '/', dic)
def recursively_save_dict_contents_to_group(h5file, path, dic):
"""
Parameters
----------
h5file:
h5py file to be written to
path:
path within h5py file to saved dictionary
dic:
python dictionary to be converted to hdf5 format
"""
for key, item in dic.items():
if isinstance(item, (np.ndarray, np.int64, np.float64, str, int, float,
bytes, tuple, list)):
h5file[path + str(key)] = item
elif isinstance(item, dict):
recursively_save_dict_contents_to_group(h5file, path + key + '/', item)
else:
raise ValueError('Cannot save %s type' % type(item))
def load_hdf5_to_dict(h5file, path):
"""
Parameters
----------
h5file:
h5py file to be loaded as a dictionary
path:
path within h5py file to load: '/' for the whole h5py file
Returns
-------
dic:
dictionary with hdf5 file group content
"""
dic = {}
for key, item in h5file[path].items():
if isinstance(item, h5py.Dataset):
dic[key] = item[()]
elif isinstance(item, h5py.Group):
dic[key] = load_hdf5_to_dict(h5file, path + key + '/')
else:
raise ValueError('Cannot load %s type' % type(item))
return dic
def combine_and_copy(f, files, group):
""" Combine the same column from multiple files and save to a third"""
# ensure that the files input is stable for iteration order
assert isinstance(files, (list, tuple))
f[group] = np.concatenate([fi[group][:] if group in fi else \
np.array([], dtype=np.uint32) for fi in files])
def name_all_datasets(files):
assert isinstance(files, (list, tuple))
datasets = []
for fi in files:
datasets += get_all_subkeys(fi, '/')
return set(datasets)
def get_all_subkeys(grp, key):
subkey_list = []
subkey_start = key
if key == '':
grpk = grp
else:
grpk = grp[key]
for sk in grpk.keys():
path = subkey_start + '/' + sk
if isinstance(grp[path], h5py.Dataset):
subkey_list.append(path.lstrip('/'))
else:
subkey_list += get_all_subkeys(grp, path)
# returns an empty list if there is no dataset or subgroup within the group
return subkey_list
#
# =============================================================================
#
# Checkpointing utilities
#
# =============================================================================
#
def dump_state(state, fp, path=None, dsetname='state',
protocol=pickle.HIGHEST_PROTOCOL):
"""Dumps the given state to an hdf5 file handler.
The state is stored as a raw binary array to ``{path}/{dsetname}`` in the
given hdf5 file handler. If a dataset with the same name and path is
already in the file, the dataset will be resized and overwritten with the
new state data.
Parameters
----------
state : any picklable object
The sampler state to dump to file. Can be the object returned by
any of the samplers' `.state` attribute (a dictionary of dictionaries),
or any picklable object.
fp : h5py.File
An open hdf5 file handler. Must have write capability enabled.
path : str, optional
The path (group name) to store the state dataset to. Default (None)
will result in the array being stored to the top level.
dsetname : str, optional
The name of the dataset to store the binary array to. Default is
``state``.
protocol : int, optional
The protocol version to use for pickling. See the :py:mod:`pickle`
module for more details.
"""
memfp = BytesIO()
pickle.dump(state, memfp, protocol=protocol)
dump_pickle_to_hdf(memfp, fp, path=path, dsetname=dsetname)
def dump_pickle_to_hdf(memfp, fp, path=None, dsetname='state'):
"""Dumps pickled data to an hdf5 file object.
Parameters
----------
memfp : file object
Bytes stream of pickled data.
fp : h5py.File
An open hdf5 file handler. Must have write capability enabled.
path : str, optional
The path (group name) to store the state dataset to. Default (None)
will result in the array being stored to the top level.
dsetname : str, optional
The name of the dataset to store the binary array to. Default is
``state``.
"""
memfp.seek(0)
bdata = np.frombuffer(memfp.read(), dtype='S1')
if path is not None:
dsetname = path + '/' + dsetname
if dsetname not in fp:
fp.create_dataset(dsetname, shape=bdata.shape, maxshape=(None,),
dtype=bdata.dtype)
elif bdata.size != fp[dsetname].shape[0]:
fp[dsetname].resize((bdata.size,))
fp[dsetname][:] = bdata
def load_state(fp, path=None, dsetname='state'):
"""Loads a sampler state from the given hdf5 file object.
The sampler state is expected to be stored as a raw bytes array which can
be loaded by pickle.
Parameters
----------
fp : h5py.File
An open hdf5 file handler.
path : str, optional
The path (group name) that the state data is stored to. Default (None)
is to read from the top level.
dsetname : str, optional
The name of the dataset that the state data is stored to. Default is
``state``.
"""
if path is not None:
fp = fp[path]
bdata = fp[dsetname][()].tobytes()
return pickle.load(BytesIO(bdata))
| gpl-3.0 |
pipermerriam/flex | tests/validation/request/test_request_header_validation.py | 1 | 2831 | import pytest
from flex.validation.request import (
validate_request,
)
from flex.error_messages import MESSAGES
from flex.exceptions import ValidationError
from flex.constants import (
INTEGER,
HEADER,
ARRAY,
CSV,
SSV,
TSV,
PIPES,
)
from tests.factories import (
SchemaFactory,
RequestFactory,
)
from tests.utils import assert_message_in_errors
def test_request_header_validation():
schema = SchemaFactory(
paths={
'/get/': {
'get': {
'responses': {'200': {'description': "Success"}},
'parameters': [
{
'name': 'Authorization',
'in': HEADER,
'type': INTEGER,
}
]
},
},
},
)
request = RequestFactory(
url='http://www.example.com/get/',
headers={'Authorization': 'abc'},
)
with pytest.raises(ValidationError) as err:
validate_request(
request=request,
schema=schema,
)
assert_message_in_errors(
MESSAGES['type']['invalid'],
err.value.detail,
'method.parameters.headers.Authorization.type',
)
# integers within strings since headers are strings + undeclared parameters:
request = RequestFactory(
url='http://www.example.com/get/?foo=1',
headers={'Authorization': '123'},
)
validate_request(
request=request,
schema=schema,
)
@pytest.mark.parametrize(
'format_,value',
(
(CSV, '1,2,3'),
(SSV, '1 2 3'),
(TSV, '1\t2\t3'),
(PIPES, '1|2|3'),
),
)
def test_request_header_array_extraction(format_, value):
schema = SchemaFactory(
paths={
'/get/': {
'get': {
'responses': {200: {'description': "Success"}},
'parameters': [
{
'name': 'Authorization',
'in': HEADER,
'type': ARRAY,
'collectionFormat': format_,
'minItems': 3,
'maxItems': 3,
'items': {
'type': INTEGER,
'minimum': 1,
'maximum': 3,
},
},
],
},
},
},
)
request = RequestFactory(
url='http://www.example.com/get/',
headers={'Authorization': value},
)
validate_request(
request=request,
schema=schema,
)
| mit |
wooga/airflow | tests/providers/apache/hive/hooks/test_hive.py | 1 | 34850 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import datetime
import itertools
import os
import unittest
from collections import OrderedDict, namedtuple
from unittest import mock
import pandas as pd
from hmsclient import HMSClient
from airflow.exceptions import AirflowException
from airflow.models.connection import Connection
from airflow.models.dag import DAG
from airflow.providers.apache.hive.hooks.hive import HiveMetastoreHook, HiveServer2Hook
from airflow.secrets.environment_variables import CONN_ENV_PREFIX
from airflow.utils import timezone
from airflow.utils.operator_helpers import AIRFLOW_VAR_NAME_FORMAT_MAPPING
from tests.test_utils.asserts import assert_equal_ignore_multiple_spaces
from tests.test_utils.mock_hooks import MockHiveCliHook, MockHiveServer2Hook
from tests.test_utils.mock_process import MockSubProcess
DEFAULT_DATE = timezone.datetime(2015, 1, 1)
DEFAULT_DATE_ISO = DEFAULT_DATE.isoformat()
DEFAULT_DATE_DS = DEFAULT_DATE_ISO[:10]
class TestHiveEnvironment(unittest.TestCase):
def setUp(self):
self.next_day = (DEFAULT_DATE +
datetime.timedelta(days=1)).isoformat()[:10]
self.database = 'airflow'
self.partition_by = 'ds'
self.table = 'static_babynames_partitioned'
with mock.patch('airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.get_metastore_client'
) as get_metastore_mock:
get_metastore_mock.return_value = mock.MagicMock()
self.hook = HiveMetastoreHook()
class TestHiveCliHook(unittest.TestCase):
@mock.patch('tempfile.tempdir', '/tmp/')
@mock.patch('tempfile._RandomNameSequence.__next__')
@mock.patch('subprocess.Popen')
def test_run_cli(self, mock_popen, mock_temp_dir):
mock_subprocess = MockSubProcess()
mock_popen.return_value = mock_subprocess
mock_temp_dir.return_value = "test_run_cli"
with mock.patch.dict('os.environ', {
'AIRFLOW_CTX_DAG_ID': 'test_dag_id',
'AIRFLOW_CTX_TASK_ID': 'test_task_id',
'AIRFLOW_CTX_EXECUTION_DATE': '2015-01-01T00:00:00+00:00',
'AIRFLOW_CTX_DAG_RUN_ID': '55',
'AIRFLOW_CTX_DAG_OWNER': 'airflow',
'AIRFLOW_CTX_DAG_EMAIL': 'test@airflow.com',
}):
hook = MockHiveCliHook()
hook.run_cli("SHOW DATABASES")
hive_cmd = ['beeline', '-u', '"jdbc:hive2://localhost:10000/default"', '-hiveconf',
'airflow.ctx.dag_id=test_dag_id', '-hiveconf', 'airflow.ctx.task_id=test_task_id',
'-hiveconf', 'airflow.ctx.execution_date=2015-01-01T00:00:00+00:00', '-hiveconf',
'airflow.ctx.dag_run_id=55', '-hiveconf', 'airflow.ctx.dag_owner=airflow',
'-hiveconf', 'airflow.ctx.dag_email=test@airflow.com', '-hiveconf',
'mapreduce.job.queuename=airflow', '-hiveconf', 'mapred.job.queue.name=airflow',
'-hiveconf', 'tez.queue.name=airflow', '-f',
'/tmp/airflow_hiveop_test_run_cli/tmptest_run_cli']
mock_popen.assert_called_with(
hive_cmd,
stdout=mock_subprocess.PIPE,
stderr=mock_subprocess.STDOUT,
cwd="/tmp/airflow_hiveop_test_run_cli",
close_fds=True
)
@mock.patch('subprocess.Popen')
def test_run_cli_with_hive_conf(self, mock_popen):
hql = "set key;\n" \
"set airflow.ctx.dag_id;\nset airflow.ctx.dag_run_id;\n" \
"set airflow.ctx.task_id;\nset airflow.ctx.execution_date;\n"
dag_id_ctx_var_name = \
AIRFLOW_VAR_NAME_FORMAT_MAPPING['AIRFLOW_CONTEXT_DAG_ID']['env_var_format']
task_id_ctx_var_name = \
AIRFLOW_VAR_NAME_FORMAT_MAPPING['AIRFLOW_CONTEXT_TASK_ID']['env_var_format']
execution_date_ctx_var_name = \
AIRFLOW_VAR_NAME_FORMAT_MAPPING['AIRFLOW_CONTEXT_EXECUTION_DATE'][
'env_var_format']
dag_run_id_ctx_var_name = \
AIRFLOW_VAR_NAME_FORMAT_MAPPING['AIRFLOW_CONTEXT_DAG_RUN_ID'][
'env_var_format']
mock_output = ['Connecting to jdbc:hive2://localhost:10000/default',
'log4j:WARN No appenders could be found for logger (org.apache.hive.jdbc.Utils).',
'log4j:WARN Please initialize the log4j system properly.',
'log4j:WARN See http://logging.apache.org/log4j/1.2/faq.html#noconfig for more info.',
'Connected to: Apache Hive (version 1.2.1.2.3.2.0-2950)',
'Driver: Hive JDBC (version 1.2.1.spark2)',
'Transaction isolation: TRANSACTION_REPEATABLE_READ',
'0: jdbc:hive2://localhost:10000/default> USE default;',
'No rows affected (0.37 seconds)',
'0: jdbc:hive2://localhost:10000/default> set key;',
'+------------+--+',
'| set |',
'+------------+--+',
'| key=value |',
'+------------+--+',
'1 row selected (0.133 seconds)',
'0: jdbc:hive2://localhost:10000/default> set airflow.ctx.dag_id;',
'+---------------------------------+--+',
'| set |',
'+---------------------------------+--+',
'| airflow.ctx.dag_id=test_dag_id |',
'+---------------------------------+--+',
'1 row selected (0.008 seconds)',
'0: jdbc:hive2://localhost:10000/default> set airflow.ctx.dag_run_id;',
'+-----------------------------------------+--+',
'| set |',
'+-----------------------------------------+--+',
'| airflow.ctx.dag_run_id=test_dag_run_id |',
'+-----------------------------------------+--+',
'1 row selected (0.007 seconds)',
'0: jdbc:hive2://localhost:10000/default> set airflow.ctx.task_id;',
'+-----------------------------------+--+',
'| set |',
'+-----------------------------------+--+',
'| airflow.ctx.task_id=test_task_id |',
'+-----------------------------------+--+',
'1 row selected (0.009 seconds)',
'0: jdbc:hive2://localhost:10000/default> set airflow.ctx.execution_date;',
'+-------------------------------------------------+--+',
'| set |',
'+-------------------------------------------------+--+',
'| airflow.ctx.execution_date=test_execution_date |',
'+-------------------------------------------------+--+',
'1 row selected (0.006 seconds)',
'0: jdbc:hive2://localhost:10000/default> ',
'0: jdbc:hive2://localhost:10000/default> ',
'Closing: 0: jdbc:hive2://localhost:10000/default',
'']
with mock.patch.dict('os.environ', {
dag_id_ctx_var_name: 'test_dag_id',
task_id_ctx_var_name: 'test_task_id',
execution_date_ctx_var_name: 'test_execution_date',
dag_run_id_ctx_var_name: 'test_dag_run_id',
}):
hook = MockHiveCliHook()
mock_popen.return_value = MockSubProcess(output=mock_output)
output = hook.run_cli(hql=hql, hive_conf={'key': 'value'})
process_inputs = " ".join(mock_popen.call_args_list[0][0][0])
self.assertIn('value', process_inputs)
self.assertIn('test_dag_id', process_inputs)
self.assertIn('test_task_id', process_inputs)
self.assertIn('test_execution_date', process_inputs)
self.assertIn('test_dag_run_id', process_inputs)
self.assertIn('value', output)
self.assertIn('test_dag_id', output)
self.assertIn('test_task_id', output)
self.assertIn('test_execution_date', output)
self.assertIn('test_dag_run_id', output)
@mock.patch('airflow.providers.apache.hive.hooks.hive.HiveCliHook.run_cli')
def test_load_file_without_create_table(self, mock_run_cli):
filepath = "/path/to/input/file"
table = "output_table"
hook = MockHiveCliHook()
hook.load_file(filepath=filepath, table=table, create=False)
query = (
"LOAD DATA LOCAL INPATH '{filepath}' "
"OVERWRITE INTO TABLE {table} ;\n"
.format(filepath=filepath, table=table)
)
calls = [
mock.call(query)
]
mock_run_cli.assert_has_calls(calls, any_order=True)
@mock.patch('airflow.providers.apache.hive.hooks.hive.HiveCliHook.run_cli')
def test_load_file_create_table(self, mock_run_cli):
filepath = "/path/to/input/file"
table = "output_table"
field_dict = OrderedDict([("name", "string"), ("gender", "string")])
fields = ",\n ".join(
['`{k}` {v}'.format(k=k.strip('`'), v=v) for k, v in field_dict.items()])
hook = MockHiveCliHook()
hook.load_file(filepath=filepath, table=table,
field_dict=field_dict, create=True, recreate=True)
create_table = (
"DROP TABLE IF EXISTS {table};\n"
"CREATE TABLE IF NOT EXISTS {table} (\n{fields})\n"
"ROW FORMAT DELIMITED\n"
"FIELDS TERMINATED BY ','\n"
"STORED AS textfile\n;".format(table=table, fields=fields)
)
load_data = (
"LOAD DATA LOCAL INPATH '{filepath}' "
"OVERWRITE INTO TABLE {table} ;\n"
.format(filepath=filepath, table=table)
)
calls = [
mock.call(create_table),
mock.call(load_data)
]
mock_run_cli.assert_has_calls(calls, any_order=True)
@mock.patch('airflow.providers.apache.hive.hooks.hive.HiveCliHook.load_file')
@mock.patch('pandas.DataFrame.to_csv')
def test_load_df(self, mock_to_csv, mock_load_file):
df = pd.DataFrame({"c": ["foo", "bar", "baz"]})
table = "t"
delimiter = ","
encoding = "utf-8"
hook = MockHiveCliHook()
hook.load_df(df=df,
table=table,
delimiter=delimiter,
encoding=encoding)
assert mock_to_csv.call_count == 1
kwargs = mock_to_csv.call_args[1]
self.assertEqual(kwargs["header"], False)
self.assertEqual(kwargs["index"], False)
self.assertEqual(kwargs["sep"], delimiter)
assert mock_load_file.call_count == 1
kwargs = mock_load_file.call_args[1]
self.assertEqual(kwargs["delimiter"], delimiter)
self.assertEqual(kwargs["field_dict"], {"c": "STRING"})
self.assertTrue(isinstance(kwargs["field_dict"], OrderedDict))
self.assertEqual(kwargs["table"], table)
@mock.patch('airflow.providers.apache.hive.hooks.hive.HiveCliHook.load_file')
@mock.patch('pandas.DataFrame.to_csv')
def test_load_df_with_optional_parameters(self, mock_to_csv, mock_load_file):
hook = MockHiveCliHook()
bools = (True, False)
for create, recreate in itertools.product(bools, bools):
mock_load_file.reset_mock()
hook.load_df(df=pd.DataFrame({"c": range(0, 10)}),
table="t",
create=create,
recreate=recreate)
assert mock_load_file.call_count == 1
kwargs = mock_load_file.call_args[1]
self.assertEqual(kwargs["create"], create)
self.assertEqual(kwargs["recreate"], recreate)
@mock.patch('airflow.providers.apache.hive.hooks.hive.HiveCliHook.run_cli')
def test_load_df_with_data_types(self, mock_run_cli):
ord_dict = OrderedDict()
ord_dict['b'] = [True]
ord_dict['i'] = [-1]
ord_dict['t'] = [1]
ord_dict['f'] = [0.0]
ord_dict['c'] = ['c']
ord_dict['M'] = [datetime.datetime(2018, 1, 1)]
ord_dict['O'] = [object()]
ord_dict['S'] = [b'STRING']
ord_dict['U'] = ['STRING']
ord_dict['V'] = [None]
df = pd.DataFrame(ord_dict)
hook = MockHiveCliHook()
hook.load_df(df, 't')
query = """
CREATE TABLE IF NOT EXISTS t (
`b` BOOLEAN,
`i` BIGINT,
`t` BIGINT,
`f` DOUBLE,
`c` STRING,
`M` TIMESTAMP,
`O` STRING,
`S` STRING,
`U` STRING,
`V` STRING)
ROW FORMAT DELIMITED
FIELDS TERMINATED BY ','
STORED AS textfile
;
"""
assert_equal_ignore_multiple_spaces(
self, mock_run_cli.call_args_list[0][0][0], query)
class TestHiveMetastoreHook(TestHiveEnvironment):
VALID_FILTER_MAP = {'key2': 'value2'}
def test_get_max_partition_from_empty_part_specs(self):
max_partition = \
HiveMetastoreHook._get_max_partition_from_part_specs([],
'key1',
self.VALID_FILTER_MAP)
self.assertIsNone(max_partition)
# @mock.patch('airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook', 'get_metastore_client')
def test_get_max_partition_from_valid_part_specs_and_invalid_filter_map(self):
with self.assertRaises(AirflowException):
HiveMetastoreHook._get_max_partition_from_part_specs(
[{'key1': 'value1', 'key2': 'value2'},
{'key1': 'value3', 'key2': 'value4'}],
'key1',
{'key3': 'value5'})
def test_get_max_partition_from_valid_part_specs_and_invalid_partition_key(self):
with self.assertRaises(AirflowException):
HiveMetastoreHook._get_max_partition_from_part_specs(
[{'key1': 'value1', 'key2': 'value2'},
{'key1': 'value3', 'key2': 'value4'}],
'key3',
self.VALID_FILTER_MAP)
def test_get_max_partition_from_valid_part_specs_and_none_partition_key(self):
with self.assertRaises(AirflowException):
HiveMetastoreHook._get_max_partition_from_part_specs(
[{'key1': 'value1', 'key2': 'value2'},
{'key1': 'value3', 'key2': 'value4'}],
None,
self.VALID_FILTER_MAP)
def test_get_max_partition_from_valid_part_specs_and_none_filter_map(self):
max_partition = \
HiveMetastoreHook._get_max_partition_from_part_specs(
[{'key1': 'value1', 'key2': 'value2'},
{'key1': 'value3', 'key2': 'value4'}],
'key1',
None)
# No partition will be filtered out.
self.assertEqual(max_partition, b'value3')
def test_get_max_partition_from_valid_part_specs(self):
max_partition = \
HiveMetastoreHook._get_max_partition_from_part_specs(
[{'key1': 'value1', 'key2': 'value2'},
{'key1': 'value3', 'key2': 'value4'}],
'key1',
self.VALID_FILTER_MAP)
self.assertEqual(max_partition, b'value1')
@mock.patch("airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.get_connection",
return_value=[Connection(host="localhost", port="9802")])
@mock.patch("airflow.providers.apache.hive.hooks.hive.socket")
def test_error_metastore_client(self, socket_mock, _find_valid_server_mock):
socket_mock.socket.return_value.connect_ex.return_value = 0
self.hook.get_metastore_client()
def test_get_conn(self):
with mock.patch('airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook._find_valid_server'
) as find_valid_server:
find_valid_server.return_value = mock.MagicMock(return_value={})
metastore_hook = HiveMetastoreHook()
self.assertIsInstance(metastore_hook.get_conn(), HMSClient)
def test_check_for_partition(self):
# Check for existent partition.
FakePartition = namedtuple('FakePartition', ['values'])
fake_partition = FakePartition(['2015-01-01'])
metastore = self.hook.metastore.__enter__()
partition = "{p_by}='{date}'".format(date=DEFAULT_DATE_DS,
p_by=self.partition_by)
metastore.get_partitions_by_filter = mock.MagicMock(
return_value=[fake_partition])
self.assertTrue(
self.hook.check_for_partition(self.database, self.table,
partition)
)
metastore.get_partitions_by_filter(
self.database, self.table, partition, 1)
# Check for non-existent partition.
missing_partition = "{p_by}='{date}'".format(date=self.next_day,
p_by=self.partition_by)
metastore.get_partitions_by_filter = mock.MagicMock(return_value=[])
self.assertFalse(
self.hook.check_for_partition(self.database, self.table,
missing_partition)
)
metastore.get_partitions_by_filter.assert_called_with(
self.database, self.table, missing_partition, 1)
def test_check_for_named_partition(self):
# Check for existing partition.
partition = "{p_by}={date}".format(date=DEFAULT_DATE_DS,
p_by=self.partition_by)
self.hook.metastore.__enter__(
).check_for_named_partition = mock.MagicMock(return_value=True)
self.assertTrue(
self.hook.check_for_named_partition(self.database,
self.table,
partition))
self.hook.metastore.__enter__().check_for_named_partition.assert_called_with(
self.database, self.table, partition)
# Check for non-existent partition
missing_partition = "{p_by}={date}".format(date=self.next_day,
p_by=self.partition_by)
self.hook.metastore.__enter__().check_for_named_partition = mock.MagicMock(
return_value=False)
self.assertFalse(
self.hook.check_for_named_partition(self.database,
self.table,
missing_partition)
)
self.hook.metastore.__enter__().check_for_named_partition.assert_called_with(
self.database, self.table, missing_partition)
def test_get_table(self):
self.hook.metastore.__enter__().get_table = mock.MagicMock()
self.hook.get_table(db=self.database, table_name=self.table)
self.hook.metastore.__enter__().get_table.assert_called_with(
dbname=self.database, tbl_name=self.table)
def test_get_tables(self): # static_babynames_partitioned
self.hook.metastore.__enter__().get_tables = mock.MagicMock(
return_value=['static_babynames_partitioned'])
self.hook.get_tables(db=self.database, pattern=self.table + "*")
self.hook.metastore.__enter__().get_tables.assert_called_with(
db_name='airflow', pattern='static_babynames_partitioned*')
self.hook.metastore.__enter__().get_table_objects_by_name.assert_called_with(
'airflow', ['static_babynames_partitioned'])
def test_get_databases(self):
metastore = self.hook.metastore.__enter__()
metastore.get_databases = mock.MagicMock()
self.hook.get_databases(pattern='*')
metastore.get_databases.assert_called_with('*')
def test_get_partitions(self):
FakeFieldSchema = namedtuple('FakeFieldSchema', ['name'])
fake_schema = FakeFieldSchema('ds')
FakeTable = namedtuple('FakeTable', ['partitionKeys'])
fake_table = FakeTable([fake_schema])
FakePartition = namedtuple('FakePartition', ['values'])
fake_partition = FakePartition(['2015-01-01'])
metastore = self.hook.metastore.__enter__()
metastore.get_table = mock.MagicMock(return_value=fake_table)
metastore.get_partitions = mock.MagicMock(
return_value=[fake_partition])
partitions = self.hook.get_partitions(schema=self.database,
table_name=self.table)
self.assertEqual(len(partitions), 1)
self.assertEqual(partitions, [{self.partition_by: DEFAULT_DATE_DS}])
metastore.get_table.assert_called_with(
dbname=self.database, tbl_name=self.table)
metastore.get_partitions.assert_called_with(
db_name=self.database, tbl_name=self.table, max_parts=HiveMetastoreHook.MAX_PART_COUNT)
def test_max_partition(self):
FakeFieldSchema = namedtuple('FakeFieldSchema', ['name'])
fake_schema = FakeFieldSchema('ds')
FakeTable = namedtuple('FakeTable', ['partitionKeys'])
fake_table = FakeTable([fake_schema])
metastore = self.hook.metastore.__enter__()
metastore.get_table = mock.MagicMock(return_value=fake_table)
metastore.get_partition_names = mock.MagicMock(
return_value=['ds=2015-01-01'])
metastore.partition_name_to_spec = mock.MagicMock(
return_value={'ds': '2015-01-01'})
filter_map = {self.partition_by: DEFAULT_DATE_DS}
partition = self.hook.max_partition(schema=self.database,
table_name=self.table,
field=self.partition_by,
filter_map=filter_map)
self.assertEqual(partition, DEFAULT_DATE_DS.encode('utf-8'))
metastore.get_table.assert_called_with(
dbname=self.database, tbl_name=self.table)
metastore.get_partition_names.assert_called_with(
self.database, self.table, max_parts=HiveMetastoreHook.MAX_PART_COUNT)
metastore.partition_name_to_spec.assert_called_with('ds=2015-01-01')
def test_table_exists(self):
# Test with existent table.
self.hook.metastore.__enter__().get_table = mock.MagicMock(return_value=True)
self.assertTrue(self.hook.table_exists(self.table, db=self.database))
self.hook.metastore.__enter__().get_table.assert_called_with(
dbname='airflow', tbl_name='static_babynames_partitioned')
# Test with non-existent table.
self.hook.metastore.__enter__().get_table = mock.MagicMock(side_effect=Exception())
self.assertFalse(
self.hook.table_exists("does-not-exist")
)
self.hook.metastore.__enter__().get_table.assert_called_with(
dbname='default', tbl_name='does-not-exist')
class TestHiveServer2Hook(unittest.TestCase):
def _upload_dataframe(self):
df = pd.DataFrame({'a': [1, 2], 'b': [1, 2]})
self.local_path = '/tmp/TestHiveServer2Hook.csv'
df.to_csv(self.local_path, header=False, index=False)
def setUp(self):
self._upload_dataframe()
args = {'owner': 'airflow', 'start_date': DEFAULT_DATE}
self.dag = DAG('test_dag_id', default_args=args)
self.database = 'airflow'
self.table = 'hive_server_hook'
self.hql = """
CREATE DATABASE IF NOT EXISTS {{ params.database }};
USE {{ params.database }};
DROP TABLE IF EXISTS {{ params.table }};
CREATE TABLE IF NOT EXISTS {{ params.table }} (
a int,
b int)
ROW FORMAT DELIMITED
FIELDS TERMINATED BY ',';
LOAD DATA LOCAL INPATH '{{ params.csv_path }}'
OVERWRITE INTO TABLE {{ params.table }};
"""
self.columns = ['{}.a'.format(self.table),
'{}.b'.format(self.table)]
with mock.patch('airflow.providers.apache.hive.hooks.hive.HiveMetastoreHook.get_metastore_client'
) as get_metastore_mock:
get_metastore_mock.return_value = mock.MagicMock()
self.hook = HiveMetastoreHook()
def test_get_conn(self):
hook = MockHiveServer2Hook()
hook.get_conn()
@mock.patch('pyhive.hive.connect')
def test_get_conn_with_password(self, mock_connect):
conn_id = "conn_with_password"
conn_env = CONN_ENV_PREFIX + conn_id.upper()
with mock.patch.dict(
'os.environ',
{conn_env: "jdbc+hive2://conn_id:conn_pass@localhost:10000/default?authMechanism=LDAP"}
):
HiveServer2Hook(hiveserver2_conn_id=conn_id).get_conn()
mock_connect.assert_called_once_with(
host='localhost',
port=10000,
auth='LDAP',
kerberos_service_name=None,
username='conn_id',
password='conn_pass',
database='default')
def test_get_records(self):
hook = MockHiveServer2Hook()
query = "SELECT * FROM {}".format(self.table)
with mock.patch.dict('os.environ', {
'AIRFLOW_CTX_DAG_ID': 'test_dag_id',
'AIRFLOW_CTX_TASK_ID': 'HiveHook_3835',
'AIRFLOW_CTX_EXECUTION_DATE': '2015-01-01T00:00:00+00:00',
'AIRFLOW_CTX_DAG_RUN_ID': '55',
'AIRFLOW_CTX_DAG_OWNER': 'airflow',
'AIRFLOW_CTX_DAG_EMAIL': 'test@airflow.com',
}):
results = hook.get_records(query, schema=self.database)
self.assertListEqual(results, [(1, 1), (2, 2)])
hook.get_conn.assert_called_with(self.database)
hook.mock_cursor.execute.assert_any_call(
'set airflow.ctx.dag_id=test_dag_id')
hook.mock_cursor.execute.assert_any_call(
'set airflow.ctx.task_id=HiveHook_3835')
hook.mock_cursor.execute.assert_any_call(
'set airflow.ctx.execution_date=2015-01-01T00:00:00+00:00')
hook.mock_cursor.execute.assert_any_call(
'set airflow.ctx.dag_run_id=55')
hook.mock_cursor.execute.assert_any_call(
'set airflow.ctx.dag_owner=airflow')
hook.mock_cursor.execute.assert_any_call(
'set airflow.ctx.dag_email=test@airflow.com')
def test_get_pandas_df(self):
hook = MockHiveServer2Hook()
query = "SELECT * FROM {}".format(self.table)
with mock.patch.dict('os.environ', {
'AIRFLOW_CTX_DAG_ID': 'test_dag_id',
'AIRFLOW_CTX_TASK_ID': 'HiveHook_3835',
'AIRFLOW_CTX_EXECUTION_DATE': '2015-01-01T00:00:00+00:00',
'AIRFLOW_CTX_DAG_RUN_ID': '55',
'AIRFLOW_CTX_DAG_OWNER': 'airflow',
'AIRFLOW_CTX_DAG_EMAIL': 'test@airflow.com',
}):
df = hook.get_pandas_df(query, schema=self.database)
self.assertEqual(len(df), 2)
self.assertListEqual(df["hive_server_hook.a"].values.tolist(), [1, 2])
hook.get_conn.assert_called_with(self.database)
hook.mock_cursor.execute.assert_any_call(
'set airflow.ctx.dag_id=test_dag_id')
hook.mock_cursor.execute.assert_any_call(
'set airflow.ctx.task_id=HiveHook_3835')
hook.mock_cursor.execute.assert_any_call(
'set airflow.ctx.execution_date=2015-01-01T00:00:00+00:00')
hook.mock_cursor.execute.assert_any_call(
'set airflow.ctx.dag_run_id=55')
hook.mock_cursor.execute.assert_any_call(
'set airflow.ctx.dag_owner=airflow')
hook.mock_cursor.execute.assert_any_call(
'set airflow.ctx.dag_email=test@airflow.com')
def test_get_results_header(self):
hook = MockHiveServer2Hook()
query = "SELECT * FROM {}".format(self.table)
results = hook.get_results(query, schema=self.database)
self.assertListEqual([col[0] for col in results['header']],
self.columns)
def test_get_results_data(self):
hook = MockHiveServer2Hook()
query = "SELECT * FROM {}".format(self.table)
results = hook.get_results(query, schema=self.database)
self.assertListEqual(results['data'], [(1, 1), (2, 2)])
def test_to_csv(self):
hook = MockHiveServer2Hook()
hook._get_results = mock.MagicMock(return_value=iter([
[
('hive_server_hook.a', 'INT_TYPE', None, None, None, None, True),
('hive_server_hook.b', 'INT_TYPE', None, None, None, None, True)
], (1, 1), (2, 2)
]))
query = "SELECT * FROM {}".format(self.table)
csv_filepath = 'query_results.csv'
hook.to_csv(query, csv_filepath, schema=self.database,
delimiter=',', lineterminator='\n', output_header=True, fetch_size=2)
df = pd.read_csv(csv_filepath, sep=',')
self.assertListEqual(df.columns.tolist(), self.columns)
self.assertListEqual(df[self.columns[0]].values.tolist(), [1, 2])
self.assertEqual(len(df), 2)
def test_multi_statements(self):
sqls = [
"CREATE TABLE IF NOT EXISTS test_multi_statements (i INT)",
"SELECT * FROM {}".format(self.table),
"DROP TABLE test_multi_statements",
]
hook = MockHiveServer2Hook()
with mock.patch.dict('os.environ', {
'AIRFLOW_CTX_DAG_ID': 'test_dag_id',
'AIRFLOW_CTX_TASK_ID': 'HiveHook_3835',
'AIRFLOW_CTX_EXECUTION_DATE': '2015-01-01T00:00:00+00:00',
'AIRFLOW_CTX_DAG_RUN_ID': '55',
'AIRFLOW_CTX_DAG_OWNER': 'airflow',
'AIRFLOW_CTX_DAG_EMAIL': 'test@airflow.com',
}):
# df = hook.get_pandas_df(query, schema=self.database)
results = hook.get_records(sqls, schema=self.database)
self.assertListEqual(results, [(1, 1), (2, 2)])
# self.assertEqual(len(df), 2)
# self.assertListEqual(df["hive_server_hook.a"].values.tolist(), [1, 2])
hook.get_conn.assert_called_with(self.database)
hook.mock_cursor.execute.assert_any_call(
'CREATE TABLE IF NOT EXISTS test_multi_statements (i INT)')
hook.mock_cursor.execute.assert_any_call(
'SELECT * FROM {}'.format(self.table))
hook.mock_cursor.execute.assert_any_call(
'DROP TABLE test_multi_statements')
hook.mock_cursor.execute.assert_any_call(
'set airflow.ctx.dag_id=test_dag_id')
hook.mock_cursor.execute.assert_any_call(
'set airflow.ctx.task_id=HiveHook_3835')
hook.mock_cursor.execute.assert_any_call(
'set airflow.ctx.execution_date=2015-01-01T00:00:00+00:00')
hook.mock_cursor.execute.assert_any_call(
'set airflow.ctx.dag_run_id=55')
hook.mock_cursor.execute.assert_any_call(
'set airflow.ctx.dag_owner=airflow')
hook.mock_cursor.execute.assert_any_call(
'set airflow.ctx.dag_email=test@airflow.com')
def test_get_results_with_hive_conf(self):
hql = ["set key",
"set airflow.ctx.dag_id",
"set airflow.ctx.dag_run_id",
"set airflow.ctx.task_id",
"set airflow.ctx.execution_date"]
dag_id_ctx_var_name = \
AIRFLOW_VAR_NAME_FORMAT_MAPPING['AIRFLOW_CONTEXT_DAG_ID']['env_var_format']
task_id_ctx_var_name = \
AIRFLOW_VAR_NAME_FORMAT_MAPPING['AIRFLOW_CONTEXT_TASK_ID']['env_var_format']
execution_date_ctx_var_name = \
AIRFLOW_VAR_NAME_FORMAT_MAPPING['AIRFLOW_CONTEXT_EXECUTION_DATE'][
'env_var_format']
dag_run_id_ctx_var_name = \
AIRFLOW_VAR_NAME_FORMAT_MAPPING['AIRFLOW_CONTEXT_DAG_RUN_ID'][
'env_var_format']
with mock.patch.dict('os.environ', {
dag_id_ctx_var_name: 'test_dag_id',
task_id_ctx_var_name: 'test_task_id',
execution_date_ctx_var_name: 'test_execution_date',
dag_run_id_ctx_var_name: 'test_dag_run_id',
}):
hook = MockHiveServer2Hook()
hook._get_results = mock.MagicMock(return_value=iter(
["header", ("value", "test"), ("test_dag_id", "test"), ("test_task_id", "test"),
("test_execution_date", "test"), ("test_dag_run_id", "test")]
))
output = '\n'.join(res_tuple[0] for res_tuple in hook.get_results(
hql=hql, hive_conf={'key': 'value'})['data'])
self.assertIn('value', output)
self.assertIn('test_dag_id', output)
self.assertIn('test_task_id', output)
self.assertIn('test_execution_date', output)
self.assertIn('test_dag_run_id', output)
class TestHiveCli(unittest.TestCase):
def setUp(self):
self.nondefault_schema = "nondefault"
os.environ["AIRFLOW__CORE__SECURITY"] = "kerberos"
def tearDown(self):
del os.environ["AIRFLOW__CORE__SECURITY"]
def test_get_proxy_user_value(self):
hook = MockHiveCliHook()
returner = mock.MagicMock()
returner.extra_dejson = {'proxy_user': 'a_user_proxy'}
hook.use_beeline = True
hook.conn = returner
# Run
result = hook._prepare_cli_cmd()
# Verify
self.assertIn('hive.server2.proxy.user=a_user_proxy', result[2])
| apache-2.0 |
bpsinc-native/src_testing_gtest | test/gtest_xml_outfiles_test.py | 2526 | 5340 | #!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test for the gtest_xml_output module."""
__author__ = "keith.ray@gmail.com (Keith Ray)"
import os
from xml.dom import minidom, Node
import gtest_test_utils
import gtest_xml_test_utils
GTEST_OUTPUT_SUBDIR = "xml_outfiles"
GTEST_OUTPUT_1_TEST = "gtest_xml_outfile1_test_"
GTEST_OUTPUT_2_TEST = "gtest_xml_outfile2_test_"
EXPECTED_XML_1 = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="1" failures="0" disabled="0" errors="0" time="*" timestamp="*" name="AllTests">
<testsuite name="PropertyOne" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="TestSomeProperties" status="run" time="*" classname="PropertyOne" SetUpProp="1" TestSomeProperty="1" TearDownProp="1" />
</testsuite>
</testsuites>
"""
EXPECTED_XML_2 = """<?xml version="1.0" encoding="UTF-8"?>
<testsuites tests="1" failures="0" disabled="0" errors="0" time="*" timestamp="*" name="AllTests">
<testsuite name="PropertyTwo" tests="1" failures="0" disabled="0" errors="0" time="*">
<testcase name="TestSomeProperties" status="run" time="*" classname="PropertyTwo" SetUpProp="2" TestSomeProperty="2" TearDownProp="2" />
</testsuite>
</testsuites>
"""
class GTestXMLOutFilesTest(gtest_xml_test_utils.GTestXMLTestCase):
"""Unit test for Google Test's XML output functionality."""
def setUp(self):
# We want the trailing '/' that the last "" provides in os.path.join, for
# telling Google Test to create an output directory instead of a single file
# for xml output.
self.output_dir_ = os.path.join(gtest_test_utils.GetTempDir(),
GTEST_OUTPUT_SUBDIR, "")
self.DeleteFilesAndDir()
def tearDown(self):
self.DeleteFilesAndDir()
def DeleteFilesAndDir(self):
try:
os.remove(os.path.join(self.output_dir_, GTEST_OUTPUT_1_TEST + ".xml"))
except os.error:
pass
try:
os.remove(os.path.join(self.output_dir_, GTEST_OUTPUT_2_TEST + ".xml"))
except os.error:
pass
try:
os.rmdir(self.output_dir_)
except os.error:
pass
def testOutfile1(self):
self._TestOutFile(GTEST_OUTPUT_1_TEST, EXPECTED_XML_1)
def testOutfile2(self):
self._TestOutFile(GTEST_OUTPUT_2_TEST, EXPECTED_XML_2)
def _TestOutFile(self, test_name, expected_xml):
gtest_prog_path = gtest_test_utils.GetTestExecutablePath(test_name)
command = [gtest_prog_path, "--gtest_output=xml:%s" % self.output_dir_]
p = gtest_test_utils.Subprocess(command,
working_dir=gtest_test_utils.GetTempDir())
self.assert_(p.exited)
self.assertEquals(0, p.exit_code)
# TODO(wan@google.com): libtool causes the built test binary to be
# named lt-gtest_xml_outfiles_test_ instead of
# gtest_xml_outfiles_test_. To account for this possibillity, we
# allow both names in the following code. We should remove this
# hack when Chandler Carruth's libtool replacement tool is ready.
output_file_name1 = test_name + ".xml"
output_file1 = os.path.join(self.output_dir_, output_file_name1)
output_file_name2 = 'lt-' + output_file_name1
output_file2 = os.path.join(self.output_dir_, output_file_name2)
self.assert_(os.path.isfile(output_file1) or os.path.isfile(output_file2),
output_file1)
expected = minidom.parseString(expected_xml)
if os.path.isfile(output_file1):
actual = minidom.parse(output_file1)
else:
actual = minidom.parse(output_file2)
self.NormalizeXml(actual.documentElement)
self.AssertEquivalentNodes(expected.documentElement,
actual.documentElement)
expected.unlink()
actual.unlink()
if __name__ == "__main__":
os.environ["GTEST_STACK_TRACE_DEPTH"] = "0"
gtest_test_utils.Main()
| bsd-3-clause |
AndrewPashkin/python-tempo | src/tempo/unit.py | 1 | 3115 | # coding=utf-8
"""Date/time related constants."""
import datetime as dt
from tempo.utils import Enum
# Minimum and maximum points of time within which
# the library is able to operate
MIN = dt.datetime(year=1, month=1, day=1)
MAX = dt.datetime(year=9999, month=12, day=31,
hour=23, minute=59, second=59)
# Units relations
SECONDS_IN_MINUTE = 60
MINUTES_IN_HOUR = 60
SECONDS_IN_HOUR = SECONDS_IN_MINUTE * MINUTES_IN_HOUR
HOURS_IN_DAY = 24
MINUTES_IN_DAY = MINUTES_IN_HOUR * HOURS_IN_DAY
SECONDS_IN_DAY = MINUTES_IN_DAY * SECONDS_IN_MINUTE
DAYS_IN_WEEK = 7
HOURS_IN_WEEK = HOURS_IN_DAY * DAYS_IN_WEEK
MINUTES_IN_WEEK = HOURS_IN_WEEK * MINUTES_IN_HOUR
SECONDS_IN_WEEK = MINUTES_IN_WEEK * SECONDS_IN_MINUTE
MONTHS_IN_YEAR = 12
DAYS_IN_COMMON_YEAR = 365
DAYS_IN_LEAP_YEAR = 366
DAYS_OF_COMMON_YEAR = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
DAYS_OF_LEAP_YEAR = [31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
class Unit(Enum): # pylint: disable=no-init
""""Enumeration of supported time units."""
SECOND = 'second'
MINUTE = 'minute'
HOUR = 'hour'
DAY = 'day'
WEEK = 'week'
MONTH = 'month'
YEAR = 'year'
# Order of places in time representation
ORDER = {
Unit.SECOND: 1,
Unit.MINUTE: 2,
Unit.HOUR : 3,
Unit.DAY : 4,
Unit.WEEK : 5,
Unit.MONTH : 6,
Unit.YEAR : 7
}
# Used for distinguishing zero-based and one-based units.
BASE = {
Unit.SECOND: 0,
Unit.MINUTE: 0,
Unit.HOUR : 0,
Unit.DAY : 1,
Unit.WEEK : 1,
Unit.MONTH : 1,
Unit.YEAR : 1
}
# Maximum values of time components
UNITS_MAX = {
Unit.SECOND: {
Unit.MINUTE: SECONDS_IN_MINUTE,
Unit.HOUR: SECONDS_IN_HOUR,
Unit.DAY: SECONDS_IN_DAY,
Unit.WEEK: SECONDS_IN_WEEK,
Unit.MONTH: SECONDS_IN_DAY * max(DAYS_OF_COMMON_YEAR +
DAYS_OF_LEAP_YEAR),
Unit.YEAR: SECONDS_IN_DAY * max(DAYS_IN_COMMON_YEAR,
DAYS_IN_LEAP_YEAR),
},
Unit.MINUTE: {
Unit.HOUR: MINUTES_IN_HOUR,
Unit.DAY: MINUTES_IN_DAY,
Unit.WEEK: MINUTES_IN_WEEK,
Unit.MONTH: MINUTES_IN_DAY * max(DAYS_OF_COMMON_YEAR +
DAYS_OF_LEAP_YEAR),
Unit.YEAR: MINUTES_IN_DAY * max(DAYS_IN_COMMON_YEAR,
DAYS_IN_LEAP_YEAR),
},
Unit.HOUR: {
Unit.DAY: HOURS_IN_DAY,
Unit.WEEK: HOURS_IN_WEEK,
Unit.MONTH: HOURS_IN_DAY * max(DAYS_OF_COMMON_YEAR +
DAYS_OF_LEAP_YEAR),
Unit.YEAR: HOURS_IN_DAY * max(DAYS_IN_COMMON_YEAR, DAYS_IN_LEAP_YEAR)
},
Unit.DAY: {
Unit.WEEK: DAYS_IN_WEEK,
Unit.MONTH: max(DAYS_OF_COMMON_YEAR + DAYS_OF_LEAP_YEAR),
Unit.YEAR: max(DAYS_IN_COMMON_YEAR, DAYS_IN_LEAP_YEAR)
},
Unit.WEEK: {
Unit.MONTH: 6,
Unit.YEAR: 64,
},
Unit.MONTH: {
Unit.YEAR: MONTHS_IN_YEAR
}
}
| bsd-3-clause |
SoftwareMaven/django | tests/gis_tests/geos_tests/test_mutable_list.py | 173 | 14846 | # Copyright (c) 2008-2009 Aryeh Leib Taurog, http://www.aryehleib.com
# All rights reserved.
#
# Modified from original contribution by Aryeh Leib Taurog, which was
# released under the New BSD license.
import unittest
from django.contrib.gis.geos.mutable_list import ListMixin
from django.utils import six
class UserListA(ListMixin):
_mytype = tuple
def __init__(self, i_list, *args, **kwargs):
self._list = self._mytype(i_list)
super(UserListA, self).__init__(*args, **kwargs)
def __len__(self):
return len(self._list)
def __str__(self):
return str(self._list)
def __repr__(self):
return repr(self._list)
def _set_list(self, length, items):
# this would work:
# self._list = self._mytype(items)
# but then we wouldn't be testing length parameter
itemList = ['x'] * length
for i, v in enumerate(items):
itemList[i] = v
self._list = self._mytype(itemList)
def _get_single_external(self, index):
return self._list[index]
class UserListB(UserListA):
_mytype = list
def _set_single(self, index, value):
self._list[index] = value
def nextRange(length):
nextRange.start += 100
return range(nextRange.start, nextRange.start + length)
nextRange.start = 0
class ListMixinTest(unittest.TestCase):
"""
Tests base class ListMixin by comparing a list clone which is
a ListMixin subclass with a real Python list.
"""
limit = 3
listType = UserListA
def lists_of_len(self, length=None):
if length is None:
length = self.limit
pl = list(range(length))
return pl, self.listType(pl)
def limits_plus(self, b):
return range(-self.limit - b, self.limit + b)
def step_range(self):
return list(range(-1 - self.limit, 0)) + list(range(1, 1 + self.limit))
def test01_getslice(self):
'Slice retrieval'
pl, ul = self.lists_of_len()
for i in self.limits_plus(1):
self.assertEqual(pl[i:], ul[i:], 'slice [%d:]' % (i))
self.assertEqual(pl[:i], ul[:i], 'slice [:%d]' % (i))
for j in self.limits_plus(1):
self.assertEqual(pl[i:j], ul[i:j], 'slice [%d:%d]' % (i, j))
for k in self.step_range():
self.assertEqual(pl[i:j:k], ul[i:j:k], 'slice [%d:%d:%d]' % (i, j, k))
for k in self.step_range():
self.assertEqual(pl[i::k], ul[i::k], 'slice [%d::%d]' % (i, k))
self.assertEqual(pl[:i:k], ul[:i:k], 'slice [:%d:%d]' % (i, k))
for k in self.step_range():
self.assertEqual(pl[::k], ul[::k], 'slice [::%d]' % (k))
def test02_setslice(self):
'Slice assignment'
def setfcn(x, i, j, k, L):
x[i:j:k] = range(L)
pl, ul = self.lists_of_len()
for slen in range(self.limit + 1):
ssl = nextRange(slen)
ul[:] = ssl
pl[:] = ssl
self.assertEqual(pl, ul[:], 'set slice [:]')
for i in self.limits_plus(1):
ssl = nextRange(slen)
ul[i:] = ssl
pl[i:] = ssl
self.assertEqual(pl, ul[:], 'set slice [%d:]' % (i))
ssl = nextRange(slen)
ul[:i] = ssl
pl[:i] = ssl
self.assertEqual(pl, ul[:], 'set slice [:%d]' % (i))
for j in self.limits_plus(1):
ssl = nextRange(slen)
ul[i:j] = ssl
pl[i:j] = ssl
self.assertEqual(pl, ul[:], 'set slice [%d:%d]' % (i, j))
for k in self.step_range():
ssl = nextRange(len(ul[i:j:k]))
ul[i:j:k] = ssl
pl[i:j:k] = ssl
self.assertEqual(pl, ul[:], 'set slice [%d:%d:%d]' % (i, j, k))
sliceLen = len(ul[i:j:k])
self.assertRaises(ValueError, setfcn, ul, i, j, k, sliceLen + 1)
if sliceLen > 2:
self.assertRaises(ValueError, setfcn, ul, i, j, k, sliceLen - 1)
for k in self.step_range():
ssl = nextRange(len(ul[i::k]))
ul[i::k] = ssl
pl[i::k] = ssl
self.assertEqual(pl, ul[:], 'set slice [%d::%d]' % (i, k))
ssl = nextRange(len(ul[:i:k]))
ul[:i:k] = ssl
pl[:i:k] = ssl
self.assertEqual(pl, ul[:], 'set slice [:%d:%d]' % (i, k))
for k in self.step_range():
ssl = nextRange(len(ul[::k]))
ul[::k] = ssl
pl[::k] = ssl
self.assertEqual(pl, ul[:], 'set slice [::%d]' % (k))
def test03_delslice(self):
'Delete slice'
for Len in range(self.limit):
pl, ul = self.lists_of_len(Len)
del pl[:]
del ul[:]
self.assertEqual(pl[:], ul[:], 'del slice [:]')
for i in range(-Len - 1, Len + 1):
pl, ul = self.lists_of_len(Len)
del pl[i:]
del ul[i:]
self.assertEqual(pl[:], ul[:], 'del slice [%d:]' % (i))
pl, ul = self.lists_of_len(Len)
del pl[:i]
del ul[:i]
self.assertEqual(pl[:], ul[:], 'del slice [:%d]' % (i))
for j in range(-Len - 1, Len + 1):
pl, ul = self.lists_of_len(Len)
del pl[i:j]
del ul[i:j]
self.assertEqual(pl[:], ul[:], 'del slice [%d:%d]' % (i, j))
for k in list(range(-Len - 1, 0)) + list(range(1, Len)):
pl, ul = self.lists_of_len(Len)
del pl[i:j:k]
del ul[i:j:k]
self.assertEqual(pl[:], ul[:], 'del slice [%d:%d:%d]' % (i, j, k))
for k in list(range(-Len - 1, 0)) + list(range(1, Len)):
pl, ul = self.lists_of_len(Len)
del pl[:i:k]
del ul[:i:k]
self.assertEqual(pl[:], ul[:], 'del slice [:%d:%d]' % (i, k))
pl, ul = self.lists_of_len(Len)
del pl[i::k]
del ul[i::k]
self.assertEqual(pl[:], ul[:], 'del slice [%d::%d]' % (i, k))
for k in list(range(-Len - 1, 0)) + list(range(1, Len)):
pl, ul = self.lists_of_len(Len)
del pl[::k]
del ul[::k]
self.assertEqual(pl[:], ul[:], 'del slice [::%d]' % (k))
def test04_get_set_del_single(self):
'Get/set/delete single item'
pl, ul = self.lists_of_len()
for i in self.limits_plus(0):
self.assertEqual(pl[i], ul[i], 'get single item [%d]' % i)
for i in self.limits_plus(0):
pl, ul = self.lists_of_len()
pl[i] = 100
ul[i] = 100
self.assertEqual(pl[:], ul[:], 'set single item [%d]' % i)
for i in self.limits_plus(0):
pl, ul = self.lists_of_len()
del pl[i]
del ul[i]
self.assertEqual(pl[:], ul[:], 'del single item [%d]' % i)
def test05_out_of_range_exceptions(self):
'Out of range exceptions'
def setfcn(x, i):
x[i] = 20
def getfcn(x, i):
return x[i]
def delfcn(x, i):
del x[i]
pl, ul = self.lists_of_len()
for i in (-1 - self.limit, self.limit):
self.assertRaises(IndexError, setfcn, ul, i) # 'set index %d' % i)
self.assertRaises(IndexError, getfcn, ul, i) # 'get index %d' % i)
self.assertRaises(IndexError, delfcn, ul, i) # 'del index %d' % i)
def test06_list_methods(self):
'List methods'
pl, ul = self.lists_of_len()
pl.append(40)
ul.append(40)
self.assertEqual(pl[:], ul[:], 'append')
pl.extend(range(50, 55))
ul.extend(range(50, 55))
self.assertEqual(pl[:], ul[:], 'extend')
pl.reverse()
ul.reverse()
self.assertEqual(pl[:], ul[:], 'reverse')
for i in self.limits_plus(1):
pl, ul = self.lists_of_len()
pl.insert(i, 50)
ul.insert(i, 50)
self.assertEqual(pl[:], ul[:], 'insert at %d' % i)
for i in self.limits_plus(0):
pl, ul = self.lists_of_len()
self.assertEqual(pl.pop(i), ul.pop(i), 'popped value at %d' % i)
self.assertEqual(pl[:], ul[:], 'after pop at %d' % i)
pl, ul = self.lists_of_len()
self.assertEqual(pl.pop(), ul.pop(i), 'popped value')
self.assertEqual(pl[:], ul[:], 'after pop')
pl, ul = self.lists_of_len()
def popfcn(x, i):
x.pop(i)
self.assertRaises(IndexError, popfcn, ul, self.limit)
self.assertRaises(IndexError, popfcn, ul, -1 - self.limit)
pl, ul = self.lists_of_len()
for val in range(self.limit):
self.assertEqual(pl.index(val), ul.index(val), 'index of %d' % val)
for val in self.limits_plus(2):
self.assertEqual(pl.count(val), ul.count(val), 'count %d' % val)
for val in range(self.limit):
pl, ul = self.lists_of_len()
pl.remove(val)
ul.remove(val)
self.assertEqual(pl[:], ul[:], 'after remove val %d' % val)
def indexfcn(x, v):
return x.index(v)
def removefcn(x, v):
return x.remove(v)
self.assertRaises(ValueError, indexfcn, ul, 40)
self.assertRaises(ValueError, removefcn, ul, 40)
def test07_allowed_types(self):
'Type-restricted list'
pl, ul = self.lists_of_len()
ul._allowed = six.integer_types
ul[1] = 50
ul[:2] = [60, 70, 80]
def setfcn(x, i, v):
x[i] = v
self.assertRaises(TypeError, setfcn, ul, 2, 'hello')
self.assertRaises(TypeError, setfcn, ul, slice(0, 3, 2), ('hello', 'goodbye'))
def test08_min_length(self):
'Length limits'
pl, ul = self.lists_of_len()
ul._minlength = 1
def delfcn(x, i):
del x[:i]
def setfcn(x, i):
x[:i] = []
for i in range(self.limit - ul._minlength + 1, self.limit + 1):
self.assertRaises(ValueError, delfcn, ul, i)
self.assertRaises(ValueError, setfcn, ul, i)
del ul[:ul._minlength]
ul._maxlength = 4
for i in range(0, ul._maxlength - len(ul)):
ul.append(i)
self.assertRaises(ValueError, ul.append, 10)
def test09_iterable_check(self):
'Error on assigning non-iterable to slice'
pl, ul = self.lists_of_len(self.limit + 1)
def setfcn(x, i, v):
x[i] = v
self.assertRaises(TypeError, setfcn, ul, slice(0, 3, 2), 2)
def test10_checkindex(self):
'Index check'
pl, ul = self.lists_of_len()
for i in self.limits_plus(0):
if i < 0:
self.assertEqual(ul._checkindex(i), i + self.limit, '_checkindex(neg index)')
else:
self.assertEqual(ul._checkindex(i), i, '_checkindex(pos index)')
for i in (-self.limit - 1, self.limit):
self.assertRaises(IndexError, ul._checkindex, i)
def test_11_sorting(self):
'Sorting'
pl, ul = self.lists_of_len()
pl.insert(0, pl.pop())
ul.insert(0, ul.pop())
pl.sort()
ul.sort()
self.assertEqual(pl[:], ul[:], 'sort')
mid = pl[len(pl) // 2]
pl.sort(key=lambda x: (mid - x) ** 2)
ul.sort(key=lambda x: (mid - x) ** 2)
self.assertEqual(pl[:], ul[:], 'sort w/ key')
pl.insert(0, pl.pop())
ul.insert(0, ul.pop())
pl.sort(reverse=True)
ul.sort(reverse=True)
self.assertEqual(pl[:], ul[:], 'sort w/ reverse')
mid = pl[len(pl) // 2]
pl.sort(key=lambda x: (mid - x) ** 2)
ul.sort(key=lambda x: (mid - x) ** 2)
self.assertEqual(pl[:], ul[:], 'sort w/ key')
def test_12_arithmetic(self):
'Arithmetic'
pl, ul = self.lists_of_len()
al = list(range(10, 14))
self.assertEqual(list(pl + al), list(ul + al), 'add')
self.assertEqual(type(ul), type(ul + al), 'type of add result')
self.assertEqual(list(al + pl), list(al + ul), 'radd')
self.assertEqual(type(al), type(al + ul), 'type of radd result')
objid = id(ul)
pl += al
ul += al
self.assertEqual(pl[:], ul[:], 'in-place add')
self.assertEqual(objid, id(ul), 'in-place add id')
for n in (-1, 0, 1, 3):
pl, ul = self.lists_of_len()
self.assertEqual(list(pl * n), list(ul * n), 'mul by %d' % n)
self.assertEqual(type(ul), type(ul * n), 'type of mul by %d result' % n)
self.assertEqual(list(n * pl), list(n * ul), 'rmul by %d' % n)
self.assertEqual(type(ul), type(n * ul), 'type of rmul by %d result' % n)
objid = id(ul)
pl *= n
ul *= n
self.assertEqual(pl[:], ul[:], 'in-place mul by %d' % n)
self.assertEqual(objid, id(ul), 'in-place mul by %d id' % n)
pl, ul = self.lists_of_len()
self.assertEqual(pl, ul, 'cmp for equal')
self.assertNotEqual(ul, pl + [2], 'cmp for not equal')
self.assertGreaterEqual(pl, ul, 'cmp for gte self')
self.assertLessEqual(pl, ul, 'cmp for lte self')
self.assertGreaterEqual(ul, pl, 'cmp for self gte')
self.assertLessEqual(ul, pl, 'cmp for self lte')
self.assertGreater(pl + [5], ul, 'cmp')
self.assertGreaterEqual(pl + [5], ul, 'cmp')
self.assertLess(pl, ul + [2], 'cmp')
self.assertLessEqual(pl, ul + [2], 'cmp')
self.assertGreater(ul + [5], pl, 'cmp')
self.assertGreaterEqual(ul + [5], pl, 'cmp')
self.assertLess(ul, pl + [2], 'cmp')
self.assertLessEqual(ul, pl + [2], 'cmp')
# Also works with a custom IndexError
ul_longer = ul + [2]
ul_longer._IndexError = TypeError
ul._IndexError = TypeError
self.assertNotEqual(ul_longer, pl)
self.assertGreater(ul_longer, ul)
pl[1] = 20
self.assertGreater(pl, ul, 'cmp for gt self')
self.assertLess(ul, pl, 'cmp for self lt')
pl[1] = -20
self.assertLess(pl, ul, 'cmp for lt self')
self.assertGreater(ul, pl, 'cmp for gt self')
class ListMixinTestSingle(ListMixinTest):
listType = UserListB
| bsd-3-clause |
Inspq/ansible | lib/ansible/modules/storage/netapp/netapp_e_amg_sync.py | 6 | 10639 | #!/usr/bin/python
# (c) 2016, NetApp, Inc
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: netapp_e_amg_sync
short_description: Conduct synchronization actions on asynchronous member groups.
description:
- Allows for the initialization, suspension and resumption of an asynchronous mirror group's synchronization for NetApp E-series storage arrays.
version_added: '2.2'
author: Kevin Hulquest (@hulquest)
options:
api_username:
required: true
description:
- The username to authenticate with the SANtricity WebServices Proxy or embedded REST API.
api_password:
required: true
description:
- The password to authenticate with the SANtricity WebServices Proxy or embedded REST API.
api_url:
required: true
description:
- The url to the SANtricity WebServices Proxy or embedded REST API.
validate_certs:
required: false
default: true
description:
- Should https certificates be validated?
ssid:
description:
- The ID of the storage array containing the AMG you wish to target
name:
description:
- The name of the async mirror group you wish to target
required: yes
state:
description:
- The synchronization action you'd like to take.
- If C(running) then it will begin syncing if there is no active sync or will resume a suspended sync. If there is already a sync in progress, it will return with an OK status.
- If C(suspended) it will suspend any ongoing sync action, but return OK if there is no active sync or if the sync is already suspended
choices:
- running
- suspended
required: yes
delete_recovery_point:
description:
- Indicates whether the failures point can be deleted on the secondary if necessary to achieve the synchronization.
- If true, and if the amount of unsynchronized data exceeds the CoW repository capacity on the secondary for any member volume, the last failures point will be deleted and synchronization will continue.
- If false, the synchronization will be suspended if the amount of unsynchronized data exceeds the CoW Repository capacity on the secondary and the failures point will be preserved.
- "NOTE: This only has impact for newly launched syncs."
choices:
- yes
- no
default: no
"""
EXAMPLES = """
- name: start AMG async
netapp_e_amg_sync:
name: "{{ amg_sync_name }}"
state: running
ssid: "{{ ssid }}"
api_url: "{{ netapp_api_url }}"
api_username: "{{ netapp_api_username }}"
api_password: "{{ netapp_api_password }}"
"""
RETURN = """
json:
description: The object attributes of the AMG.
returned: success
type: string
example:
{
"changed": false,
"connectionType": "fc",
"groupRef": "3700000060080E5000299C24000006EF57ACAC70",
"groupState": "optimal",
"id": "3700000060080E5000299C24000006EF57ACAC70",
"label": "made_with_ansible",
"localRole": "primary",
"mirrorChannelRemoteTarget": "9000000060080E5000299C24005B06E557AC7EEC",
"orphanGroup": false,
"recoveryPointAgeAlertThresholdMinutes": 20,
"remoteRole": "secondary",
"remoteTarget": {
"nodeName": {
"ioInterfaceType": "fc",
"iscsiNodeName": null,
"remoteNodeWWN": "20040080E5299F1C"
},
"remoteRef": "9000000060080E5000299C24005B06E557AC7EEC",
"scsiinitiatorTargetBaseProperties": {
"ioInterfaceType": "fc",
"iscsiinitiatorTargetBaseParameters": null
}
},
"remoteTargetId": "ansible2",
"remoteTargetName": "Ansible2",
"remoteTargetWwn": "60080E5000299F880000000056A25D56",
"repositoryUtilizationWarnThreshold": 80,
"roleChangeProgress": "none",
"syncActivity": "idle",
"syncCompletionTimeAlertThresholdMinutes": 10,
"syncIntervalMinutes": 10,
"worldWideName": "60080E5000299C24000006EF57ACAC70"
}
"""
import json
from ansible.module_utils.api import basic_auth_argument_spec
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pycompat24 import get_exception
from ansible.module_utils.urls import open_url
from ansible.module_utils.six.moves.urllib.error import HTTPError
def request(url, data=None, headers=None, method='GET', use_proxy=True,
force=False, last_mod_time=None, timeout=10, validate_certs=True,
url_username=None, url_password=None, http_agent=None, force_basic_auth=True, ignore_errors=False):
try:
r = open_url(url=url, data=data, headers=headers, method=method, use_proxy=use_proxy,
force=force, last_mod_time=last_mod_time, timeout=timeout, validate_certs=validate_certs,
url_username=url_username, url_password=url_password, http_agent=http_agent,
force_basic_auth=force_basic_auth)
except HTTPError:
err = get_exception()
r = err.fp
try:
raw_data = r.read()
if raw_data:
data = json.loads(raw_data)
else:
raw_data = None
except:
if ignore_errors:
pass
else:
raise Exception(raw_data)
resp_code = r.getcode()
if resp_code >= 400 and not ignore_errors:
raise Exception(resp_code, data)
else:
return resp_code, data
class AMGsync(object):
def __init__(self):
argument_spec = basic_auth_argument_spec()
argument_spec.update(dict(
api_username=dict(type='str', required=True),
api_password=dict(type='str', required=True, no_log=True),
api_url=dict(type='str', required=True),
name=dict(required=True, type='str'),
ssid=dict(required=True, type='str'),
state=dict(required=True, type='str', choices=['running', 'suspended']),
delete_recovery_point=dict(required=False, type='bool', default=False)
))
self.module = AnsibleModule(argument_spec=argument_spec)
args = self.module.params
self.name = args['name']
self.ssid = args['ssid']
self.state = args['state']
self.delete_recovery_point = args['delete_recovery_point']
try:
self.user = args['api_username']
self.pwd = args['api_password']
self.url = args['api_url']
except KeyError:
self.module.fail_json(msg="You must pass in api_username"
"and api_password and api_url to the module.")
self.certs = args['validate_certs']
self.post_headers = {
"Accept": "application/json",
"Content-Type": "application/json"
}
self.amg_id, self.amg_obj = self.get_amg()
def get_amg(self):
endpoint = self.url + '/storage-systems/%s/async-mirrors' % self.ssid
(rc, amg_objs) = request(endpoint, url_username=self.user, url_password=self.pwd, validate_certs=self.certs,
headers=self.post_headers)
try:
amg_id = filter(lambda d: d['label'] == self.name, amg_objs)[0]['id']
amg_obj = filter(lambda d: d['label'] == self.name, amg_objs)[0]
except IndexError:
self.module.fail_json(
msg="There is no async mirror group %s associated with storage array %s" % (self.name, self.ssid))
return amg_id, amg_obj
@property
def current_state(self):
amg_id, amg_obj = self.get_amg()
return amg_obj['syncActivity']
def run_sync_action(self):
# If we get to this point we know that the states differ, and there is no 'err' state,
# so no need to revalidate
post_body = dict()
if self.state == 'running':
if self.current_state == 'idle':
if self.delete_recovery_point:
post_body.update(dict(deleteRecoveryPointIfNecessary=self.delete_recovery_point))
suffix = 'sync'
else:
# In a suspended state
suffix = 'resume'
else:
suffix = 'suspend'
endpoint = self.url + "/storage-systems/%s/async-mirrors/%s/%s" % (self.ssid, self.amg_id, suffix)
(rc, resp) = request(endpoint, method='POST', url_username=self.user, url_password=self.pwd,
validate_certs=self.certs, data=json.dumps(post_body), headers=self.post_headers,
ignore_errors=True)
if not str(rc).startswith('2'):
self.module.fail_json(msg=str(resp['errorMessage']))
return resp
def apply(self):
state_map = dict(
running=['active'],
suspended=['userSuspended', 'internallySuspended', 'paused'],
err=['unkown', '_UNDEFINED'])
if self.current_state not in state_map[self.state]:
if self.current_state in state_map['err']:
self.module.fail_json(
msg="The sync is a state of '%s', this requires manual intervention. " +
"Please investigate and try again" % self.current_state)
else:
self.amg_obj = self.run_sync_action()
(ret, amg) = self.get_amg()
self.module.exit_json(changed=False, **amg)
def main():
sync = AMGsync()
sync.apply()
if __name__ == '__main__':
main()
| gpl-3.0 |
horance-liu/tensorflow | tensorflow/python/keras/_impl/keras/layers/normalization.py | 28 | 5754 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Normalization layers.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.keras._impl.keras import backend as K
from tensorflow.python.keras._impl.keras import constraints
from tensorflow.python.keras._impl.keras import initializers
from tensorflow.python.keras._impl.keras import regularizers
from tensorflow.python.keras._impl.keras.engine import Layer
from tensorflow.python.layers import normalization as tf_normalization_layers
class BatchNormalization(tf_normalization_layers.BatchNormalization, Layer):
"""Batch normalization layer (Ioffe and Szegedy, 2014).
Normalize the activations of the previous layer at each batch,
i.e. applies a transformation that maintains the mean activation
close to 0 and the activation standard deviation close to 1.
Arguments:
axis: Integer, the axis that should be normalized
(typically the features axis).
For instance, after a `Conv2D` layer with
`data_format="channels_first"`,
set `axis=1` in `BatchNormalization`.
momentum: Momentum for the moving average.
epsilon: Small float added to variance to avoid dividing by zero.
center: If True, add offset of `beta` to normalized tensor.
If False, `beta` is ignored.
scale: If True, multiply by `gamma`.
If False, `gamma` is not used.
When the next layer is linear (also e.g. `nn.relu`),
this can be disabled since the scaling
will be done by the next layer.
beta_initializer: Initializer for the beta weight.
gamma_initializer: Initializer for the gamma weight.
moving_mean_initializer: Initializer for the moving mean.
moving_variance_initializer: Initializer for the moving variance.
beta_regularizer: Optional regularizer for the beta weight.
gamma_regularizer: Optional regularizer for the gamma weight.
beta_constraint: Optional constraint for the beta weight.
gamma_constraint: Optional constraint for the gamma weight.
Input shape:
Arbitrary. Use the keyword argument `input_shape`
(tuple of integers, does not include the samples axis)
when using this layer as the first layer in a model.
Output shape:
Same shape as input.
References:
- [Batch Normalization: Accelerating Deep Network Training by Reducing
Internal Covariate Shift](https://arxiv.org/abs/1502.03167)
"""
def __init__(self,
axis=-1,
momentum=0.99,
epsilon=1e-3,
center=True,
scale=True,
beta_initializer='zeros',
gamma_initializer='ones',
moving_mean_initializer='zeros',
moving_variance_initializer='ones',
beta_regularizer=None,
gamma_regularizer=None,
beta_constraint=None,
gamma_constraint=None,
**kwargs):
self.supports_masking = True
super(BatchNormalization, self).__init__(
axis=axis,
momentum=momentum,
epsilon=epsilon,
center=center,
scale=scale,
beta_initializer=initializers.get(beta_initializer),
gamma_initializer=initializers.get(gamma_initializer),
moving_mean_initializer=initializers.get(moving_mean_initializer),
moving_variance_initializer=initializers.get(
moving_variance_initializer),
beta_regularizer=regularizers.get(beta_regularizer),
gamma_regularizer=regularizers.get(gamma_regularizer),
beta_constraint=constraints.get(beta_constraint),
gamma_constraint=constraints.get(gamma_constraint),
**kwargs
)
def call(self, inputs, training=None):
if training is None:
training = K.learning_phase()
output = super(BatchNormalization, self).call(inputs, training=training)
if training is K.learning_phase():
output._uses_learning_phase = True # pylint: disable=protected-access
return output
def get_config(self):
config = {
'axis': self.axis,
'momentum': self.momentum,
'epsilon': self.epsilon,
'center': self.center,
'scale': self.scale,
'beta_initializer': initializers.serialize(self.beta_initializer),
'gamma_initializer': initializers.serialize(self.gamma_initializer),
'moving_mean_initializer':
initializers.serialize(self.moving_mean_initializer),
'moving_variance_initializer':
initializers.serialize(self.moving_variance_initializer),
'beta_regularizer': regularizers.serialize(self.beta_regularizer),
'gamma_regularizer': regularizers.serialize(self.gamma_regularizer),
'beta_constraint': constraints.serialize(self.beta_constraint),
'gamma_constraint': constraints.serialize(self.gamma_constraint)
}
base_config = super(BatchNormalization, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
| apache-2.0 |
openstack/heat | heat/tests/openstack/nova/test_server.py | 1 | 246323 | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import contextlib
import copy
from unittest import mock
from keystoneauth1 import exceptions as ks_exceptions
from neutronclient.v2_0 import client as neutronclient
from novaclient import exceptions as nova_exceptions
from oslo_serialization import jsonutils
from oslo_utils import uuidutils
import requests
from urllib import parse as urlparse
from heat.common import exception
from heat.common.i18n import _
from heat.common import template_format
from heat.engine.clients.os import glance
from heat.engine.clients.os import heat_plugin
from heat.engine.clients.os import neutron
from heat.engine.clients.os import nova
from heat.engine.clients.os import swift
from heat.engine.clients.os import zaqar
from heat.engine import environment
from heat.engine import resource
from heat.engine.resources.openstack.nova import server as servers
from heat.engine.resources.openstack.nova import server_network_mixin
from heat.engine.resources import scheduler_hints as sh
from heat.engine import scheduler
from heat.engine import stack as parser
from heat.engine import template
from heat.objects import resource_data as resource_data_object
from heat.tests import common
from heat.tests.openstack.nova import fakes as fakes_nova
from heat.tests import utils
wp_template = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "WordPress",
"Parameters" : {
"key_name" : {
"Description" : "key_name",
"Type" : "String",
"Default" : "test"
}
},
"Resources" : {
"WebServer": {
"Type": "OS::Nova::Server",
"Properties": {
"image" : "F18-x86_64-gold",
"flavor" : "m1.large",
"key_name" : "test",
"user_data" : "wordpress"
}
}
}
}
'''
ns_template = '''
heat_template_version: 2015-04-30
resources:
server:
type: OS::Nova::Server
properties:
image: F17-x86_64-gold
flavor: m1.large
user_data: {get_file: a_file}
networks: [{'network': 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'}]
'''
with_port_template = '''
heat_template_version: 2015-04-30
resources:
port:
type: OS::Neutron::Port
properties:
network: 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
server:
type: OS::Nova::Server
properties:
image: F17-x86_64-gold
flavor: m1.small
networks:
- port: {get_resource: port}
fixed_ip: 10.0.0.99
'''
bdm_v2_template = '''
heat_template_version: 2015-04-30
resources:
server:
type: OS::Nova::Server
properties:
flavor: m1.tiny
block_device_mapping_v2:
- device_name: vda
delete_on_termination: true
image_id: F17-x86_64-gold
'''
subnet_template = '''
heat_template_version: 2013-05-23
resources:
server:
type: OS::Nova::Server
properties:
image: F17-x86_64-gold
flavor: m1.large
networks:
- { network: 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa' }
subnet:
type: OS::Neutron::Subnet
properties:
network: 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
subnet_unreferenced:
type: OS::Neutron::Subnet
properties:
network: 'bbccbbcc-bbcc-bbcc-bbcc-bbccbbccbbcc'
'''
multi_subnet_template = '''
heat_template_version: 2013-05-23
resources:
server:
type: OS::Nova::Server
properties:
image: F17-x86_64-gold
flavor: m1.large
networks:
- network: {get_resource: network}
network:
type: OS::Neutron::Net
properties:
name: NewNetwork
subnet1:
type: OS::Neutron::Subnet
properties:
network: {get_resource: network}
name: NewSubnet1
subnet2:
type: OS::Neutron::Subnet
properties:
network: {get_resource: network}
name: NewSubnet2
'''
no_subnet_template = '''
heat_template_version: 2013-05-23
resources:
server:
type: OS::Nova::Server
properties:
image: F17-x86_64-gold
flavor: m1.large
subnet:
type: OS::Neutron::Subnet
properties:
network: 12345
'''
tmpl_server_with_network_id = """
heat_template_version: 2015-10-15
resources:
server:
type: OS::Nova::Server
properties:
flavor: m1.small
image: F17-x86_64-gold
networks:
- network: 4321
"""
tmpl_server_with_sub_secu_group = """
heat_template_version: 2015-10-15
resources:
server:
type: OS::Nova::Server
properties:
flavor: m1.small
image: F17-x86_64-gold
networks:
- subnet: 2a60cbaa-3d33-4af6-a9ce-83594ac546fc
security_groups:
- my_seg
"""
server_with_sw_config_personality = """
heat_template_version: 2014-10-16
resources:
swconfig:
type: OS::Heat::SoftwareConfig
properties:
config: |
#!/bin/bash
echo -e "test"
server:
type: OS::Nova::Server
properties:
image: F17-x86_64-gold
flavor: m1.large
personality: { /tmp/test: { get_attr: [swconfig, config]}}
"""
def create_fake_iface(port=None, net=None, mac=None, ip=None, subnet=None):
class fake_interface(object):
def __init__(self, port_id, net_id, mac_addr, fixed_ip, subnet_id):
self.port_id = port_id
self.net_id = net_id
self.mac_addr = mac_addr
self.fixed_ips = [{'ip_address': fixed_ip, 'subnet_id': subnet_id}]
return fake_interface(port, net, mac, ip, subnet)
class ServerStatus(object):
def __init__(self, server, statuses):
self._server = server
self._status = iter(statuses)
def __call__(self, server_id):
try:
self._server.status = next(self._status)
except StopIteration:
raise AssertionError('Unexpected call to servers.get()')
return self._server
class ServersTest(common.HeatTestCase):
def setUp(self):
super(ServersTest, self).setUp()
self.fc = fakes_nova.FakeClient()
self.limits = mock.Mock()
self.limits.absolute = self._limits_absolute()
self.mock_flavor = mock.Mock(ram=4, disk=4)
self.mock_image = mock.Mock(min_ram=1, min_disk=1, status='ACTIVE')
def flavor_side_effect(*args):
return 2 if args[0] == 'm1.small' else 1
def image_side_effect(*args):
return 2 if args[0] == 'F17-x86_64-gold' else 1
self.patchobject(nova.NovaClientPlugin, 'find_flavor_by_name_or_id',
side_effect=flavor_side_effect)
self.patchobject(glance.GlanceClientPlugin, 'find_image_by_name_or_id',
side_effect=image_side_effect)
self.port_show = self.patchobject(neutronclient.Client,
'show_port')
self.subnet_show = self.patchobject(neutronclient.Client,
'show_subnet')
self.network_show = self.patchobject(neutronclient.Client,
'show_network')
def _limits_absolute(self):
max_personality = mock.Mock()
max_personality.name = 'maxPersonality'
max_personality.value = 5
max_personality_size = mock.Mock()
max_personality_size.name = 'maxPersonalitySize'
max_personality_size.value = 10240
max_server_meta = mock.Mock()
max_server_meta.name = 'maxServerMeta'
max_server_meta.value = 3
yield max_personality
yield max_personality_size
yield max_server_meta
def _setup_test_stack(self, stack_name, test_templ=wp_template):
t = template_format.parse(test_templ)
files = {}
if test_templ == ns_template:
files = {'a_file': 'stub'}
templ = template.Template(t,
env=environment.Environment(
{'key_name': 'test'}),
files=files)
stack = parser.Stack(utils.dummy_context(region_name="RegionOne"),
stack_name, templ,
stack_id=uuidutils.generate_uuid(),
stack_user_project_id='8888')
return templ, stack
def _prepare_server_check(self, status='ACTIVE'):
templ, self.stack = self._setup_test_stack('server_check')
server = self.fc.servers.list()[1]
server.status = status
res = self.stack['WebServer']
res.state_set(res.CREATE, res.COMPLETE)
res.client = mock.Mock()
res.client().servers.get.return_value = server
return res
def test_check(self):
res = self._prepare_server_check()
scheduler.TaskRunner(res.check)()
self.assertEqual((res.CHECK, res.COMPLETE), res.state)
def test_check_fail(self):
res = self._prepare_server_check()
res.client().servers.get.side_effect = Exception('boom')
exc = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(res.check))
self.assertIn('boom', str(exc))
self.assertEqual((res.CHECK, res.FAILED), res.state)
def test_check_not_active(self):
res = self._prepare_server_check(status='FOO')
exc = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(res.check))
self.assertIn('FOO', str(exc))
def _get_test_template(self, stack_name, server_name=None,
image_id=None):
tmpl, stack = self._setup_test_stack(stack_name)
tmpl.t['Resources']['WebServer']['Properties'][
'image'] = image_id or 'CentOS 5.2'
tmpl.t['Resources']['WebServer']['Properties'][
'flavor'] = '256 MB Server'
if server_name is not None:
tmpl.t['Resources']['WebServer']['Properties'][
'name'] = server_name
return tmpl, stack
def _setup_test_server(self, return_server, name, image_id=None,
override_name=False, stub_create=True,
networks=None):
stack_name = '%s_s' % name
def _mock_find_id(resource, name_or_id, cmd_resource=None):
return name_or_id
mock_find = self.patchobject(neutron.NeutronClientPlugin,
'find_resourceid_by_name_or_id')
mock_find.side_effect = _mock_find_id
server_name = str(name) if override_name else None
tmpl, self.stack = self._get_test_template(stack_name, server_name,
image_id)
props = tmpl.t['Resources']['WebServer']['Properties']
# set old_networks for server
if networks is not None:
props['networks'] = networks
self.server_props = props
resource_defns = tmpl.resource_definitions(self.stack)
server = servers.Server(str(name), resource_defns['WebServer'],
self.stack)
self.patchobject(server, 'store_external_ports')
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=self.mock_image)
if stub_create:
self.patchobject(self.fc.servers, 'create',
return_value=return_server)
# mock check_create_complete innards
self.patchobject(self.fc.servers, 'get',
return_value=return_server)
return server
def _create_test_server(self, return_server, name, override_name=False,
stub_create=True, networks=None):
server = self._setup_test_server(return_server, name,
stub_create=stub_create,
networks=networks)
scheduler.TaskRunner(server.create)()
return server
def test_subnet_dependency_by_network_id(self):
templ, stack = self._setup_test_stack('subnet-test',
subnet_template)
server_rsrc = stack['server']
subnet_rsrc = stack['subnet']
deps = []
server_rsrc.add_explicit_dependencies(deps)
server_rsrc.add_dependencies(deps)
self.assertEqual(4, len(deps))
self.assertEqual(subnet_rsrc, deps[3])
self.assertNotIn(stack['subnet_unreferenced'], deps)
def test_subnet_dependency_unknown_network_id(self):
# The use case here is creating a network + subnets + server
# from within one stack
templ, stack = self._setup_test_stack('subnet-test',
multi_subnet_template)
server_rsrc = stack['server']
subnet1_rsrc = stack['subnet1']
subnet2_rsrc = stack['subnet2']
deps = []
server_rsrc.add_explicit_dependencies(deps)
server_rsrc.add_dependencies(deps)
self.assertEqual(8, len(deps))
self.assertIn(subnet1_rsrc, deps)
self.assertIn(subnet2_rsrc, deps)
def test_subnet_nodeps(self):
templ, stack = self._setup_test_stack('subnet-test',
no_subnet_template)
server_rsrc = stack['server']
subnet_rsrc = stack['subnet']
deps = []
server_rsrc.add_explicit_dependencies(deps)
server_rsrc.add_dependencies(deps)
self.assertEqual(2, len(deps))
self.assertNotIn(subnet_rsrc, deps)
def test_server_create(self):
return_server = self.fc.servers.list()[1]
return_server.id = '5678'
return_server._info['os_collect_config'] = {}
server_name = 'test_server_create'
stack_name = '%s_s' % server_name
server = self._create_test_server(return_server, server_name)
self.patchobject(nova.NovaClientPlugin, 'is_version_supported',
return_value=True)
# this makes sure the auto increment worked on server creation
self.assertGreater(server.id, 0)
interfaces = [create_fake_iface(port='1234',
mac='fa:16:3e:8c:22:aa',
ip='4.5.6.7'),
create_fake_iface(port='5678',
mac='fa:16:3e:8c:33:bb',
ip='5.6.9.8'),
create_fake_iface(port='1013',
mac='fa:16:3e:8c:44:cc',
ip='10.13.12.13',
subnet='private_subnet_id')]
ports = [dict(id=interfaces[0].port_id,
mac_address=interfaces[0].mac_addr,
fixed_ips=interfaces[0].fixed_ips,
network_id='public_id'),
dict(id=interfaces[1].port_id,
mac_address=interfaces[1].mac_addr,
fixed_ips=interfaces[1].fixed_ips,
network_id='public_id'),
dict(id=interfaces[2].port_id,
mac_address=interfaces[2].mac_addr,
fixed_ips=interfaces[2].fixed_ips,
network_id='private_id')]
public_net = dict(id='public_id',
name='public',
mtu=1500,
subnets=['public_subnet_id'])
private_net = dict(id='private_id',
name='private',
mtu=1500,
subnets=['private_subnet_id'])
private_subnet = dict(id='private_subnet_id',
name='private_subnet',
cidr='private_cidr',
allocation_pools=[{'start': 'start_addr',
'end': 'end_addr'}],
gateway_ip='private_gateway',
network_id='private_id')
self.patchobject(self.fc.servers, 'get', return_value=return_server)
self.patchobject(neutronclient.Client, 'list_ports',
return_value={'ports': ports})
self.patchobject(neutronclient.Client, 'list_networks',
side_effect=[{'networks': [public_net]},
{'networks': [public_net]},
{'networks': [private_net]}])
self.patchobject(neutronclient.Client, 'list_floatingips',
return_value={'floatingips': []})
self.patchobject(self.fc.servers, 'tag_list', return_value=['test'])
self.subnet_show.return_value = {'subnet': private_subnet}
self.network_show.return_value = {'network': private_net}
public_ip = return_server.networks['public'][0]
self.assertEqual('1234',
server.FnGetAtt('addresses')['public'][0]['port'])
self.assertEqual('5678',
server.FnGetAtt('addresses')['public'][1]['port'])
self.assertEqual(public_ip,
server.FnGetAtt('addresses')['public'][0]['addr'])
self.assertEqual(public_ip,
server.FnGetAtt('networks')['public'][0])
private_ip = return_server.networks['private'][0]
self.assertEqual('1013',
server.FnGetAtt('addresses')['private'][0]['port'])
self.assertEqual(private_ip,
server.FnGetAtt('addresses')['private'][0]['addr'])
self.assertEqual([private_subnet],
server.FnGetAtt('addresses')['private'][0]['subnets'])
self.assertEqual(private_net,
server.FnGetAtt('addresses')['private'][0]['network'])
self.assertEqual(private_ip,
server.FnGetAtt('networks')['private'][0])
self.assertEqual(return_server._info, server.FnGetAtt('show'))
self.assertEqual('sample-server2', server.FnGetAtt('instance_name'))
self.assertEqual('192.0.2.0', server.FnGetAtt('accessIPv4'))
self.assertEqual('::babe:4317:0A83', server.FnGetAtt('accessIPv6'))
expected_name = utils.PhysName(stack_name, server.name)
self.assertEqual(expected_name, server.FnGetAtt('name'))
self.assertEqual(['test'], server.FnGetAtt('tags'))
# test with unsupported version
self.patchobject(nova.NovaClientPlugin, 'is_version_supported',
return_value=False)
if server.attributes._resolved_values.get('tags'):
del server.attributes._resolved_values['tags']
self.assertIsNone(server.FnGetAtt('tags'))
self.assertEqual({}, server.FnGetAtt('os_collect_config'))
def test_server_create_metadata(self):
stack_name = 'create_metadata_test_stack'
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
return_server = self.fc.servers.list()[1]
(tmpl, stack) = self._setup_test_stack(stack_name)
tmpl['Resources']['WebServer']['Properties'][
'metadata'] = {'a': 1}
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('create_metadata_test_server',
resource_defns['WebServer'], stack)
self.patchobject(server, 'store_external_ports')
mock_create = self.patchobject(self.fc.servers, 'create',
return_value=return_server)
scheduler.TaskRunner(server.create)()
args, kwargs = mock_create.call_args
self.assertEqual({'a': "1"}, kwargs['meta'])
def test_server_create_with_subnet_security_group(self):
stack_name = 'server_with_subnet_security_group'
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
return_server = self.fc.servers.list()[1]
(tmpl, stack) = self._setup_test_stack(
stack_name, test_templ=tmpl_server_with_sub_secu_group)
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_with_sub_secu',
resource_defns['server'], stack)
mock_find = self.patchobject(
neutron.NeutronClientPlugin,
'find_resourceid_by_name_or_id',
return_value='2a60cbaa-3d33-4af6-a9ce-83594ac546fc')
sec_uuids = ['86c0f8ae-23a8-464f-8603-c54113ef5467']
self.patchobject(neutron.NeutronClientPlugin,
'get_secgroup_uuids', return_value=sec_uuids)
self.patchobject(server, 'store_external_ports')
self.patchobject(neutron.NeutronClientPlugin,
'network_id_from_subnet_id',
return_value='05d8e681-4b37-4570-bc8d-810089f706b2')
mock_create_port = self.patchobject(
neutronclient.Client, 'create_port')
mock_create = self.patchobject(
self.fc.servers, 'create', return_value=return_server)
scheduler.TaskRunner(server.create)()
kwargs = {'network_id': '05d8e681-4b37-4570-bc8d-810089f706b2',
'fixed_ips': [
{'subnet_id': '2a60cbaa-3d33-4af6-a9ce-83594ac546fc'}],
'security_groups': sec_uuids,
'name': 'server_with_sub_secu-port-0',
}
mock_create_port.assert_called_with({'port': kwargs})
self.assertEqual(1, mock_find.call_count)
args, kwargs = mock_create.call_args
self.assertEqual({}, kwargs['meta'])
def test_server_create_with_str_network(self):
stack_name = 'server_with_str_network'
return_server = self.fc.servers.list()[1]
(tmpl, stack) = self._setup_test_stack(stack_name)
mock_nc = self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=self.mock_image)
self.patchobject(nova.NovaClientPlugin, 'get_flavor',
return_value=self.mock_flavor)
self.patchobject(neutron.NeutronClientPlugin,
'find_resourceid_by_name_or_id')
props = tmpl['Resources']['WebServer']['Properties']
props['networks'] = [{'allocate_network': 'none'}]
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('WebServer',
resource_defns['WebServer'], stack)
self.patchobject(server, 'store_external_ports')
create_mock = self.patchobject(self.fc.servers, 'create',
return_value=return_server)
scheduler.TaskRunner(server.create)()
mock_nc.assert_called_with()
self.assertEqual(3, mock_nc.call_count)
self.assertEqual('none', create_mock.call_args[1]['nics'])
def test_server_create_with_image_id(self):
return_server = self.fc.servers.list()[1]
return_server.id = '5678'
server_name = 'test_server_create_image_id'
server = self._setup_test_server(return_server,
server_name,
override_name=True)
server.resource_id = '1234'
interfaces = [create_fake_iface(port='1234',
mac='fa:16:3e:8c:22:aa',
ip='4.5.6.7'),
create_fake_iface(port='5678',
mac='fa:16:3e:8c:33:bb',
ip='5.6.9.8'),
create_fake_iface(port='1013',
mac='fa:16:3e:8c:44:cc',
ip='10.13.12.13')]
ports = [dict(id=interfaces[0].port_id,
mac_address=interfaces[0].mac_addr,
fixed_ips=interfaces[0].fixed_ips,
network_id='public_id'),
dict(id=interfaces[1].port_id,
mac_address=interfaces[1].mac_addr,
fixed_ips=interfaces[1].fixed_ips,
network_id='public_id'),
dict(id=interfaces[2].port_id,
mac_address=interfaces[2].mac_addr,
fixed_ips=interfaces[2].fixed_ips,
network_id='private_id')]
public_net = dict(id='public_id', name='public')
private_net = dict(id='private_id', name='private')
self.patchobject(self.fc.servers, 'get', return_value=return_server)
self.patchobject(neutronclient.Client, 'list_ports',
return_value={'ports': ports})
self.patchobject(neutronclient.Client, 'list_networks',
side_effect=[{'networks': [public_net]},
{'networks': [public_net]},
{'networks': [private_net]}])
self.patchobject(neutronclient.Client, 'list_floatingips',
return_value={'floatingips': []})
self.patchobject(return_server, 'interface_detach')
self.patchobject(return_server, 'interface_attach')
public_ip = return_server.networks['public'][0]
self.assertEqual('1234',
server.FnGetAtt('addresses')['public'][0]['port'])
self.assertEqual('5678',
server.FnGetAtt('addresses')['public'][1]['port'])
self.assertEqual(public_ip,
server.FnGetAtt('addresses')['public'][0]['addr'])
self.assertEqual(public_ip,
server.FnGetAtt('networks')['public'][0])
private_ip = return_server.networks['private'][0]
self.assertEqual('1013',
server.FnGetAtt('addresses')['private'][0]['port'])
self.assertEqual(private_ip,
server.FnGetAtt('addresses')['private'][0]['addr'])
self.assertEqual(private_ip,
server.FnGetAtt('networks')['private'][0])
self.assertEqual(server_name, server.FnGetAtt('name'))
def test_server_image_name_err(self):
stack_name = 'img_name_err'
(tmpl, stack) = self._setup_test_stack(stack_name)
mock_image = self.patchobject(glance.GlanceClientPlugin,
'find_image_by_name_or_id')
self.stub_KeypairConstraint_validate()
mock_image.side_effect = (
glance.client_exception.EntityMatchNotFound(
entity='image', args={'name': 'Slackware'}))
# Init a server with non exist image name
tmpl['Resources']['WebServer']['Properties']['image'] = 'Slackware'
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('WebServer',
resource_defns['WebServer'], stack)
error = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(server.create))
self.assertIn("No image matching {'name': 'Slackware'}.",
str(error))
def test_server_duplicate_image_name_err(self):
stack_name = 'img_dup_err'
(tmpl, stack) = self._setup_test_stack(stack_name)
mock_image = self.patchobject(glance.GlanceClientPlugin,
'find_image_by_name_or_id')
self.stub_KeypairConstraint_validate()
mock_image.side_effect = (
glance.client_exception.EntityUniqueMatchNotFound(
entity='image', args='CentOS 5.2'))
tmpl['Resources']['WebServer']['Properties']['image'] = 'CentOS 5.2'
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('WebServer',
resource_defns['WebServer'], stack)
error = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(server.create))
self.assertIn('No image unique match found for CentOS 5.2.',
str(error))
def test_server_create_unexpected_status(self):
# NOTE(pshchelo) checking is done only on check_create_complete
# level so not to mock out all delete/retry logic that kicks in
# on resource create failure
return_server = self.fc.servers.list()[1]
server = self._create_test_server(return_server,
'cr_unexp_sts')
return_server.status = 'BOGUS'
self.patchobject(self.fc.servers, 'get',
return_value=return_server)
e = self.assertRaises(exception.ResourceUnknownStatus,
server.check_create_complete,
server.resource_id)
self.assertEqual('Server is not active - Unknown status BOGUS due to '
'"Unknown"', str(e))
def test_server_create_error_status(self):
# NOTE(pshchelo) checking is done only on check_create_complete
# level so not to mock out all delete/retry logic that kicks in
# on resource create failure
return_server = self.fc.servers.list()[1]
server = self._create_test_server(return_server,
'cr_err_sts')
return_server.status = 'ERROR'
return_server.fault = {
'message': 'NoValidHost',
'code': 500,
'created': '2013-08-14T03:12:10Z'
}
self.patchobject(self.fc.servers, 'get',
return_value=return_server)
e = self.assertRaises(exception.ResourceInError,
server.check_create_complete,
server.resource_id)
self.assertEqual(
'Went to status ERROR due to "Message: NoValidHost, Code: 500"',
str(e))
def test_server_create_raw_userdata(self):
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
return_server = self.fc.servers.list()[1]
stack_name = 'raw_userdata_s'
(tmpl, stack) = self._setup_test_stack(stack_name)
tmpl['Resources']['WebServer']['Properties'][
'user_data_format'] = 'RAW'
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('WebServer',
resource_defns['WebServer'], stack)
self.patchobject(server, 'store_external_ports')
mock_create = self.patchobject(self.fc.servers, 'create',
return_value=return_server)
scheduler.TaskRunner(server.create)()
args, kwargs = mock_create.call_args
self.assertEqual('wordpress', kwargs['userdata'])
self.assertEqual({}, kwargs['meta'])
def test_server_create_raw_config_userdata(self):
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
return_server = self.fc.servers.list()[1]
stack_name = 'raw_userdata_s'
(tmpl, stack) = self._setup_test_stack(stack_name)
tmpl['Resources']['WebServer']['Properties'][
'user_data_format'] = 'RAW'
tmpl['Resources']['WebServer']['Properties'][
'user_data'] = '8c813873-f6ee-4809-8eec-959ef39acb55'
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('WebServer',
resource_defns['WebServer'], stack)
self.patchobject(server, 'store_external_ports')
self.rpc_client = mock.MagicMock()
server._rpc_client = self.rpc_client
sc = {'config': 'wordpress from config'}
self.rpc_client.show_software_config.return_value = sc
mock_create = self.patchobject(self.fc.servers, 'create',
return_value=return_server)
scheduler.TaskRunner(server.create)()
args, kwargs = mock_create.call_args
self.assertEqual('wordpress from config', kwargs['userdata'])
self.assertEqual({}, kwargs['meta'])
def test_server_create_raw_config_userdata_None(self):
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
return_server = self.fc.servers.list()[1]
stack_name = 'raw_userdata_s'
(tmpl, stack) = self._setup_test_stack(stack_name)
sc_id = '8c813873-f6ee-4809-8eec-959ef39acb55'
tmpl['Resources']['WebServer']['Properties'][
'user_data_format'] = 'RAW'
tmpl['Resources']['WebServer']['Properties']['user_data'] = sc_id
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('WebServer',
resource_defns['WebServer'], stack)
self.patchobject(server, 'store_external_ports')
self.rpc_client = mock.MagicMock()
server._rpc_client = self.rpc_client
@contextlib.contextmanager
def exc_filter(*args):
try:
yield
except exception.NotFound:
pass
self.rpc_client.ignore_error_by_name.side_effect = exc_filter
self.rpc_client.show_software_config.side_effect = exception.NotFound
mock_create = self.patchobject(self.fc.servers, 'create',
return_value=return_server)
scheduler.TaskRunner(server.create)()
args, kwargs = mock_create.call_args
self.assertEqual(sc_id, kwargs['userdata'])
self.assertEqual({}, kwargs['meta'])
def _server_create_software_config(self, md=None,
stack_name='software_config_s',
ret_tmpl=False):
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
return_server = self.fc.servers.list()[1]
(tmpl, stack) = self._setup_test_stack(stack_name)
self.stack = stack
self.server_props = tmpl['Resources']['WebServer']['Properties']
self.server_props['user_data_format'] = 'SOFTWARE_CONFIG'
if md is not None:
tmpl['Resources']['WebServer']['Metadata'] = md
stack.stack_user_project_id = '8888'
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('WebServer',
resource_defns['WebServer'], stack)
self.patchobject(server, 'store_external_ports')
self.patchobject(server, 'heat')
self.patchobject(self.fc.servers, 'create',
return_value=return_server)
scheduler.TaskRunner(server.create)()
self.assertEqual('4567', server.access_key)
self.assertEqual('8901', server.secret_key)
self.assertEqual('1234', server._get_user_id())
self.assertEqual('POLL_SERVER_CFN',
server.properties.get('software_config_transport'))
self.assertTrue(stack.access_allowed('4567', 'WebServer'))
self.assertFalse(stack.access_allowed('45678', 'WebServer'))
self.assertFalse(stack.access_allowed('4567', 'wWebServer'))
if ret_tmpl:
return server, tmpl
else:
return server
@mock.patch.object(heat_plugin.HeatClientPlugin, 'url_for')
def test_server_create_software_config(self, fake_url):
fake_url.return_value = 'http://ip:8000/v1'
server = self._server_create_software_config()
self.assertEqual({
'os-collect-config': {
'cfn': {
'access_key_id': '4567',
'metadata_url': 'http://ip:8000/v1/',
'path': 'WebServer.Metadata',
'secret_access_key': '8901',
'stack_name': 'software_config_s'
},
'collectors': ['ec2', 'cfn', 'local']
},
'deployments': []
}, server.metadata_get())
@mock.patch.object(heat_plugin.HeatClientPlugin, 'url_for')
def test_resolve_attribute_os_collect_config(self, fake_url):
fake_url.return_value = 'http://ip/heat-api-cfn/v1'
server = self._server_create_software_config()
self.assertEqual({
'cfn': {
'access_key_id': '4567',
'metadata_url': 'http://ip/heat-api-cfn/v1/',
'path': 'WebServer.Metadata',
'secret_access_key': '8901',
'stack_name': 'software_config_s'
},
'collectors': ['ec2', 'cfn', 'local']
}, server.FnGetAtt('os_collect_config'))
@mock.patch.object(heat_plugin.HeatClientPlugin, 'url_for')
def test_server_create_software_config_metadata(self, fake_url):
md = {'os-collect-config': {'polling_interval': 10}}
fake_url.return_value = 'http://ip/heat-api-cfn/v1'
server = self._server_create_software_config(md=md)
self.assertEqual({
'os-collect-config': {
'cfn': {
'access_key_id': '4567',
'metadata_url': 'http://ip/heat-api-cfn/v1/',
'path': 'WebServer.Metadata',
'secret_access_key': '8901',
'stack_name': 'software_config_s'
},
'collectors': ['ec2', 'cfn', 'local'],
'polling_interval': 10
},
'deployments': []
}, server.metadata_get())
def _server_create_software_config_poll_heat(self, md=None):
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
return_server = self.fc.servers.list()[1]
stack_name = 'software_config_s'
(tmpl, stack) = self._setup_test_stack(stack_name)
props = tmpl.t['Resources']['WebServer']['Properties']
props['user_data_format'] = 'SOFTWARE_CONFIG'
props['software_config_transport'] = 'POLL_SERVER_HEAT'
if md is not None:
tmpl.t['Resources']['WebServer']['Metadata'] = md
self.server_props = props
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('WebServer',
resource_defns['WebServer'], stack)
self.patchobject(server, 'store_external_ports')
self.patchobject(self.fc.servers, 'create',
return_value=return_server)
scheduler.TaskRunner(server.create)()
self.assertEqual('1234', server._get_user_id())
self.assertTrue(stack.access_allowed('1234', 'WebServer'))
self.assertFalse(stack.access_allowed('45678', 'WebServer'))
self.assertFalse(stack.access_allowed('4567', 'wWebServer'))
return stack, server
def test_server_create_software_config_poll_heat(self):
stack, server = self._server_create_software_config_poll_heat()
self.assertEqual({
'os-collect-config': {
'heat': {
'auth_url': 'http://server.test:5000/v2.0',
'password': server.password,
'project_id': '8888',
'region_name': 'RegionOne',
'resource_name': 'WebServer',
'stack_id': 'software_config_s/%s' % stack.id,
'user_id': '1234'
},
'collectors': ['ec2', 'heat', 'local']
},
'deployments': []
}, server.metadata_get())
def test_server_create_software_config_poll_heat_metadata(self):
md = {'os-collect-config': {'polling_interval': 10}}
stack, server = self._server_create_software_config_poll_heat(md=md)
self.assertEqual({
'os-collect-config': {
'heat': {
'auth_url': 'http://server.test:5000/v2.0',
'password': server.password,
'project_id': '8888',
'region_name': 'RegionOne',
'resource_name': 'WebServer',
'stack_id': 'software_config_s/%s' % stack.id,
'user_id': '1234'
},
'collectors': ['ec2', 'heat', 'local'],
'polling_interval': 10
},
'deployments': []
}, server.metadata_get())
def _server_create_software_config_poll_temp_url(self, md=None):
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
return_server = self.fc.servers.list()[1]
stack_name = 'software_config_s'
(tmpl, stack) = self._setup_test_stack(stack_name)
props = tmpl.t['Resources']['WebServer']['Properties']
props['user_data_format'] = 'SOFTWARE_CONFIG'
props['software_config_transport'] = 'POLL_TEMP_URL'
if md is not None:
tmpl.t['Resources']['WebServer']['Metadata'] = md
self.server_props = props
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('WebServer',
resource_defns['WebServer'], stack)
self.patchobject(server, 'store_external_ports')
sc = mock.Mock()
sc.head_account.return_value = {
'x-account-meta-temp-url-key': 'secrit'
}
sc.url = 'http://192.0.2.2'
self.patchobject(swift.SwiftClientPlugin, '_create',
return_value=sc)
self.patchobject(self.fc.servers, 'create',
return_value=return_server)
scheduler.TaskRunner(server.create)()
metadata_put_url = server.data().get('metadata_put_url')
md = server.metadata_get()
metadata_url = md['os-collect-config']['request']['metadata_url']
self.assertNotEqual(metadata_url, metadata_put_url)
container_name = server.physical_resource_name()
object_name = server.data().get('metadata_object_name')
self.assertTrue(uuidutils.is_uuid_like(object_name))
test_path = '/v1/AUTH_test_tenant_id/%s/%s' % (
server.physical_resource_name(), object_name)
self.assertEqual(test_path, urlparse.urlparse(metadata_put_url).path)
self.assertEqual(test_path, urlparse.urlparse(metadata_url).path)
sc.put_object.assert_called_once_with(
container_name, object_name, jsonutils.dumps(md))
sc.head_container.return_value = {'x-container-object-count': '0'}
server._delete_temp_url()
sc.delete_object.assert_called_once_with(container_name, object_name)
sc.head_container.assert_called_once_with(container_name)
sc.delete_container.assert_called_once_with(container_name)
return metadata_url, server
def test_server_create_software_config_poll_temp_url(self):
metadata_url, server = (
self._server_create_software_config_poll_temp_url())
self.assertEqual({
'os-collect-config': {
'request': {
'metadata_url': metadata_url
},
'collectors': ['ec2', 'request', 'local']
},
'deployments': []
}, server.metadata_get())
def test_server_create_software_config_poll_temp_url_metadata(self):
md = {'os-collect-config': {'polling_interval': 10}}
metadata_url, server = (
self._server_create_software_config_poll_temp_url(md=md))
self.assertEqual({
'os-collect-config': {
'request': {
'metadata_url': metadata_url
},
'collectors': ['ec2', 'request', 'local'],
'polling_interval': 10
},
'deployments': []
}, server.metadata_get())
def test_delete_swift_service_removed(self):
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
return_server = self.fc.servers.list()[1]
stack_name = 'software_config_s'
(tmpl, stack) = self._setup_test_stack(stack_name)
props = tmpl.t['Resources']['WebServer']['Properties']
props['user_data_format'] = 'SOFTWARE_CONFIG'
props['software_config_transport'] = 'POLL_TEMP_URL'
self.server_props = props
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('WebServer',
resource_defns['WebServer'], stack)
self.patchobject(server, 'store_external_ports')
sc = mock.Mock()
sc.head_account.return_value = {
'x-account-meta-temp-url-key': 'secrit'
}
sc.url = 'http://192.0.2.2'
self.patchobject(swift.SwiftClientPlugin, '_create',
return_value=sc)
self.patchobject(self.fc.servers, 'create',
return_value=return_server)
scheduler.TaskRunner(server.create)()
self.assertEqual((server.CREATE, server.COMPLETE), server.state)
self.patchobject(server.client_plugin(),
'does_endpoint_exist',
return_value=False)
side_effect = [server, fakes_nova.fake_exception()]
self.patchobject(self.fc.servers, 'get', side_effect=side_effect)
scheduler.TaskRunner(server.delete)()
self.assertEqual((server.DELETE, server.COMPLETE), server.state)
def _prepare_for_server_create(self, md=None):
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
return_server = self.fc.servers.list()[1]
stack_name = 'software_config_s'
(tmpl, stack) = self._setup_test_stack(stack_name)
props = tmpl.t['Resources']['WebServer']['Properties']
props['user_data_format'] = 'SOFTWARE_CONFIG'
props['software_config_transport'] = 'ZAQAR_MESSAGE'
if md is not None:
tmpl.t['Resources']['WebServer']['Metadata'] = md
self.server_props = props
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('WebServer',
resource_defns['WebServer'], stack)
self.patchobject(server, 'store_external_ports')
self.patchobject(self.fc.servers, 'create',
return_value=return_server)
return server, stack
def _server_create_software_config_zaqar(self, md=None):
server, stack = self._prepare_for_server_create(md)
zcc = self.patchobject(zaqar.ZaqarClientPlugin, 'create_for_tenant')
zc = mock.Mock()
zcc.return_value = zc
queue = mock.Mock()
zc.queue.return_value = queue
scheduler.TaskRunner(server.create)()
metadata_queue_id = server.data().get('metadata_queue_id')
md = server.metadata_get()
queue_id = md['os-collect-config']['zaqar']['queue_id']
self.assertEqual(queue_id, metadata_queue_id)
zc.queue.assert_called_once_with(queue_id)
queue.post.assert_called_once_with(
{'body': server.metadata_get(), 'ttl': 3600})
zc.queue.reset_mock()
server._delete_queue()
zc.queue.assert_called_once_with(queue_id)
zc.queue(queue_id).delete.assert_called_once_with()
return queue_id, server
def test_server_create_software_config_zaqar(self):
queue_id, server = self._server_create_software_config_zaqar()
self.assertEqual({
'os-collect-config': {
'zaqar': {
'user_id': '1234',
'password': server.password,
'auth_url': 'http://server.test:5000/v2.0',
'project_id': '8888',
'queue_id': queue_id,
'region_name': 'RegionOne',
},
'collectors': ['ec2', 'zaqar', 'local']
},
'deployments': []
}, server.metadata_get())
def test_create_delete_no_zaqar_service(self):
zcc = self.patchobject(zaqar.ZaqarClientPlugin, 'create_for_tenant')
zcc.side_effect = ks_exceptions.EndpointNotFound
server, stack = self._prepare_for_server_create()
creator = scheduler.TaskRunner(server.create)
self.assertRaises(exception.ResourceFailure, creator)
self.assertEqual((server.CREATE, server.FAILED), server.state)
self.assertEqual({
'os-collect-config': {
'zaqar': {
'user_id': '1234',
'password': server.password,
'auth_url': 'http://server.test:5000/v2.0',
'project_id': '8888',
'queue_id': mock.ANY,
'region_name': 'RegionOne',
},
'collectors': ['ec2', 'zaqar', 'local']
},
'deployments': []
}, server.metadata_get())
scheduler.TaskRunner(server.delete)()
self.assertEqual((server.DELETE, server.COMPLETE), server.state)
def test_delete_zaqar_service_removed(self):
zcc = self.patchobject(zaqar.ZaqarClientPlugin, 'create_for_tenant')
zcc.return_value = mock.Mock()
server, stack = self._prepare_for_server_create()
scheduler.TaskRunner(server.create)()
self.assertEqual((server.CREATE, server.COMPLETE), server.state)
self.patchobject(server.client_plugin(),
'does_endpoint_exist',
return_value=False)
side_effect = [server, fakes_nova.fake_exception()]
self.patchobject(self.fc.servers, 'get', side_effect=side_effect)
scheduler.TaskRunner(server.delete)()
self.assertEqual((server.DELETE, server.COMPLETE), server.state)
def test_server_create_software_config_zaqar_metadata(self):
md = {'os-collect-config': {'polling_interval': 10}}
queue_id, server = self._server_create_software_config_zaqar(md=md)
self.assertEqual({
'os-collect-config': {
'zaqar': {
'user_id': '1234',
'password': server.password,
'auth_url': 'http://server.test:5000/v2.0',
'project_id': '8888',
'queue_id': queue_id,
'region_name': 'RegionOne',
},
'collectors': ['ec2', 'zaqar', 'local'],
'polling_interval': 10
},
'deployments': []
}, server.metadata_get())
def test_server_create_default_admin_pass(self):
return_server = self.fc.servers.list()[1]
return_server.adminPass = 'autogenerated'
stack_name = 'admin_pass_s'
(tmpl, stack) = self._setup_test_stack(stack_name)
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('WebServer',
resource_defns['WebServer'], stack)
self.patchobject(server, 'store_external_ports')
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
mock_create = self.patchobject(self.fc.servers, 'create',
return_value=return_server)
scheduler.TaskRunner(server.create)()
_, kwargs = mock_create.call_args
self.assertIsNone(kwargs['admin_pass'])
self.assertEqual({}, kwargs['meta'])
def test_server_create_custom_admin_pass(self):
return_server = self.fc.servers.list()[1]
return_server.adminPass = 'foo'
stack_name = 'admin_pass_s'
(tmpl, stack) = self._setup_test_stack(stack_name)
tmpl.t['Resources']['WebServer']['Properties']['admin_pass'] = 'foo'
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('WebServer',
resource_defns['WebServer'], stack)
self.patchobject(server, 'store_external_ports')
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
mock_create = self.patchobject(self.fc.servers, 'create',
return_value=return_server)
scheduler.TaskRunner(server.create)()
_, kwargs = mock_create.call_args
self.assertEqual('foo', kwargs['admin_pass'])
self.assertEqual({}, kwargs['meta'])
def test_server_create_with_stack_scheduler_hints(self):
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
return_server = self.fc.servers.list()[1]
return_server.id = '5678'
sh.cfg.CONF.set_override('stack_scheduler_hints', True)
# Unroll _create_test_server, to enable check
# for addition of heat ids (stack id, resource name)
stack_name = 'test_server_w_stack_sched_hints_s'
server_name = 'server_w_stack_sched_hints'
(t, stack) = self._get_test_template(stack_name, server_name)
self.patchobject(stack, 'path_in_stack',
return_value=[('parent', stack.name)])
resource_defns = t.resource_definitions(stack)
server = servers.Server(server_name,
resource_defns['WebServer'], stack)
self.patchobject(server, 'store_external_ports')
# server.uuid is only available once the resource has been added.
stack.add_resource(server)
self.assertIsNotNone(server.uuid)
mock_create = self.patchobject(self.fc.servers, 'create',
return_value=return_server)
shm = sh.SchedulerHintsMixin
scheduler_hints = {shm.HEAT_ROOT_STACK_ID: stack.root_stack_id(),
shm.HEAT_STACK_ID: stack.id,
shm.HEAT_STACK_NAME: stack.name,
shm.HEAT_PATH_IN_STACK: [','.join(['parent',
stack.name])],
shm.HEAT_RESOURCE_NAME: server.name,
shm.HEAT_RESOURCE_UUID: server.uuid}
scheduler.TaskRunner(server.create)()
_, kwargs = mock_create.call_args
self.assertEqual(scheduler_hints, kwargs['scheduler_hints'])
self.assertEqual({}, kwargs['meta'])
# this makes sure the auto increment worked on server creation
self.assertGreater(server.id, 0)
def test_check_maximum(self):
msg = 'test_check_maximum'
self.assertIsNone(servers.Server._check_maximum(1, 1, msg))
self.assertIsNone(servers.Server._check_maximum(1000, -1, msg))
error = self.assertRaises(exception.StackValidationFailed,
servers.Server._check_maximum,
2, 1, msg)
self.assertEqual(msg, str(error))
def test_server_validate(self):
stack_name = 'srv_val'
(tmpl, stack) = self._setup_test_stack(stack_name)
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_create_image',
resource_defns['WebServer'], stack)
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=self.mock_image)
self.patchobject(nova.NovaClientPlugin, 'get_flavor',
return_value=self.mock_flavor)
self.assertIsNone(server.validate())
def test_server_validate_with_bootable_vol(self):
stack_name = 'srv_val_bootvol'
(tmpl, stack) = self._setup_test_stack(stack_name)
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
self.stub_VolumeConstraint_validate()
# create a server with bootable volume
web_server = tmpl.t['Resources']['WebServer']
del web_server['Properties']['image']
def create_server(device_name):
web_server['Properties']['block_device_mapping'] = [{
"device_name": device_name,
"volume_id": "5d7e27da-6703-4f7e-9f94-1f67abef734c",
"delete_on_termination": False
}]
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_with_bootable_volume',
resource_defns['WebServer'], stack)
return server
server = create_server(u'vda')
self.assertIsNone(server.validate())
server = create_server('vda')
self.assertIsNone(server.validate())
server = create_server('vdb')
ex = self.assertRaises(exception.StackValidationFailed,
server.validate)
self.assertEqual('Neither image nor bootable volume is specified for '
'instance server_with_bootable_volume',
str(ex))
web_server['Properties']['image'] = ''
server = create_server('vdb')
self.assertIsNone(server.validate())
def test_server_validate_with_nova_keypair_resource(self):
stack_name = 'srv_val_test'
nova_keypair_template = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "WordPress",
"Resources" : {
"WebServer": {
"Type": "OS::Nova::Server",
"Properties": {
"image" : "F17-x86_64-gold",
"flavor" : "m1.large",
"key_name" : { "Ref": "SSHKey" },
"user_data" : "wordpress"
}
},
"SSHKey": {
"Type": "OS::Nova::KeyPair",
"Properties": {
"name": "my_key"
}
}
}
}
'''
t = template_format.parse(nova_keypair_template)
templ = template.Template(t)
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
stack = parser.Stack(utils.dummy_context(), stack_name, templ,
stack_id=uuidutils.generate_uuid())
resource_defns = templ.resource_definitions(stack)
server = servers.Server('server_validate_test',
resource_defns['WebServer'], stack)
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=self.mock_image)
self.patchobject(nova.NovaClientPlugin, 'get_flavor',
return_value=self.mock_flavor)
self.assertIsNone(server.validate())
def test_server_validate_with_invalid_ssh_key(self):
stack_name = 'srv_val_test'
(tmpl, stack) = self._setup_test_stack(stack_name)
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
web_server = tmpl['Resources']['WebServer']
# Make the ssh key have an invalid name
web_server['Properties']['key_name'] = 'test2'
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('WebServer',
resource_defns['WebServer'], stack)
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=self.mock_image)
self.patchobject(nova.NovaClientPlugin, 'get_flavor',
return_value=self.mock_flavor)
error = self.assertRaises(exception.StackValidationFailed,
server.validate)
self.assertEqual(
"Property error: Resources.WebServer.Properties.key_name: "
"Error validating value 'test2': The Key (test2) could not "
"be found.", str(error))
def test_server_validate_software_config_invalid_meta(self):
stack_name = 'srv_val_test'
(tmpl, stack) = self._setup_test_stack(stack_name)
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
web_server = tmpl['Resources']['WebServer']
web_server['Properties']['user_data_format'] = 'SOFTWARE_CONFIG'
web_server['Metadata'] = {'deployments': 'notallowed'}
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('WebServer',
resource_defns['WebServer'], stack)
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=self.mock_image)
self.patchobject(nova.NovaClientPlugin, 'get_flavor',
return_value=self.mock_flavor)
error = self.assertRaises(exception.StackValidationFailed,
server.validate)
self.assertEqual(
"deployments key not allowed in resource metadata "
"with user_data_format of SOFTWARE_CONFIG", str(error))
def test_server_validate_with_networks(self):
stack_name = 'srv_net'
(tmpl, stack) = self._setup_test_stack(stack_name)
self.stub_KeypairConstraint_validate()
network_name = 'public'
# create a server with 'uuid' and 'network' properties
tmpl['Resources']['WebServer']['Properties']['networks'] = (
[{'uuid': 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
'network': network_name}])
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_validate_with_networks',
resource_defns['WebServer'], stack)
self.stub_NetworkConstraint_validate()
ex = self.assertRaises(exception.StackValidationFailed,
server.validate)
self.assertIn("Cannot define the following properties at "
"the same time: networks.network, networks.uuid",
str(ex))
def test_server_validate_with_network_empty_ref(self):
stack_name = 'srv_net'
(tmpl, stack) = self._setup_test_stack(stack_name)
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
tmpl['Resources']['WebServer']['Properties']['networks'] = (
[{'network': ''}])
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_validate_with_networks',
resource_defns['WebServer'], stack)
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=self.mock_image)
self.patchobject(nova.NovaClientPlugin, 'get_flavor',
return_value=self.mock_flavor)
self.patchobject(neutron.NeutronClientPlugin,
'find_resourceid_by_name_or_id')
self.assertIsNone(server.validate())
def test_server_validate_with_only_fixed_ip(self):
stack_name = 'srv_net'
(tmpl, stack) = self._setup_test_stack(stack_name)
# create a server with 'uuid' and 'network' properties
tmpl['Resources']['WebServer']['Properties']['networks'] = (
[{'fixed_ip': '10.0.0.99'}])
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_validate_with_networks',
resource_defns['WebServer'], stack)
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=self.mock_image)
self.patchobject(nova.NovaClientPlugin, 'get_flavor',
return_value=self.mock_flavor)
self.patchobject(neutron.NeutronClientPlugin,
'find_resourceid_by_name_or_id')
ex = self.assertRaises(exception.StackValidationFailed,
server.validate)
self.assertIn(_('One of the properties "network", "port", '
'"allocate_network" or "subnet" should be set '
'for the specified network of '
'server "%s".') % server.name,
str(ex))
def test_server_validate_with_network_floating_ip(self):
stack_name = 'srv_net_floating_ip'
(tmpl, stack) = self._setup_test_stack(stack_name)
# create a server with 'uuid' and 'network' properties
tmpl['Resources']['WebServer']['Properties']['networks'] = (
[{'floating_ip': '172.24.4.14',
'network': '6b1688bb-18a0-4754-ab05-19daaedc5871'}])
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_validate_net_floating_ip',
resource_defns['WebServer'], stack)
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=self.mock_image)
self.patchobject(nova.NovaClientPlugin, 'get_flavor',
return_value=self.mock_flavor)
self.patchobject(neutron.NeutronClientPlugin,
'find_resourceid_by_name_or_id')
ex = self.assertRaises(exception.StackValidationFailed,
server.validate)
self.assertIn(_('Property "floating_ip" is not supported if '
'only "network" is specified, because the '
'corresponding port can not be retrieved.'),
str(ex))
def test_server_validate_with_networks_str_net(self):
stack_name = 'srv_networks_str_nets'
(tmpl, stack) = self._setup_test_stack(stack_name)
# create a server with 'uuid' and 'network' properties
tmpl['Resources']['WebServer']['Properties']['networks'] = (
[{'network': '6b1688bb-18a0-4754-ab05-19daaedc5871',
'allocate_network': 'auto'}])
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_validate_net_list_str',
resource_defns['WebServer'], stack)
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=self.mock_image)
self.patchobject(nova.NovaClientPlugin, 'get_flavor',
return_value=self.mock_flavor)
self.patchobject(neutron.NeutronClientPlugin,
'find_resourceid_by_name_or_id')
ex = self.assertRaises(exception.StackValidationFailed,
server.validate)
self.assertIn(_('Can not specify "allocate_network" with '
'other keys of networks at the same time.'),
str(ex))
def test_server_validate_port_fixed_ip(self):
stack_name = 'port_with_fixed_ip'
(tmpl, stack) = self._setup_test_stack(stack_name,
test_templ=with_port_template)
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('validate_port_reference_fixed_ip',
resource_defns['server'], stack)
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=self.mock_image)
self.patchobject(nova.NovaClientPlugin, 'get_flavor',
return_value=self.mock_flavor)
error = self.assertRaises(exception.ResourcePropertyConflict,
server.validate)
self.assertEqual("Cannot define the following properties at the same "
"time: networks/fixed_ip, networks/port.",
str(error))
# test if the 'port' doesn't reference with non-created resource
tmpl['Resources']['server']['Properties']['networks'] = (
[{'port': 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
'fixed_ip': '10.0.0.99'}])
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('with_port_fixed_ip',
resource_defns['server'], stack)
self.patchobject(neutron.NeutronClientPlugin,
'find_resourceid_by_name_or_id')
error = self.assertRaises(exception.ResourcePropertyConflict,
server.validate)
self.assertEqual("Cannot define the following properties at the same "
"time: networks/fixed_ip, networks/port.",
str(error))
def test_server_validate_with_uuid_fixed_ip(self):
stack_name = 'srv_net'
(tmpl, stack) = self._setup_test_stack(stack_name)
tmpl['Resources']['WebServer']['Properties']['networks'] = (
[{'uuid': 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
'fixed_ip': '10.0.0.99'}])
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_validate_with_networks',
resource_defns['WebServer'], stack)
self.patchobject(neutron.NeutronClientPlugin,
'find_resourceid_by_name_or_id')
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=self.mock_image)
self.patchobject(nova.NovaClientPlugin, 'get_flavor',
return_value=self.mock_flavor)
self.assertIsNone(server.validate())
def test_server_validate_with_network_fixed_ip(self):
stack_name = 'srv_net'
(tmpl, stack) = self._setup_test_stack(stack_name)
tmpl['Resources']['WebServer']['Properties']['networks'] = (
[{'network': 'public',
'fixed_ip': '10.0.0.99'}])
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_validate_with_networks',
resource_defns['WebServer'], stack)
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=self.mock_image)
self.patchobject(nova.NovaClientPlugin, 'get_flavor',
return_value=self.mock_flavor)
self.patchobject(neutron.NeutronClientPlugin,
'find_resourceid_by_name_or_id')
self.assertIsNone(server.validate())
def test_server_validate_net_security_groups(self):
# Test that if network 'ports' are assigned security groups are
# not, because they'll be ignored
stack_name = 'srv_net_secgroups'
(tmpl, stack) = self._setup_test_stack(stack_name)
tmpl['Resources']['WebServer']['Properties']['networks'] = [
{'port': ''}]
tmpl['Resources']['WebServer']['Properties'][
'security_groups'] = ['my_security_group']
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_validate_net_security_groups',
resource_defns['WebServer'], stack)
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=self.mock_image)
self.patchobject(nova.NovaClientPlugin, 'get_flavor',
return_value=self.mock_flavor)
self.patchobject(neutron.NeutronClientPlugin,
'find_resourceid_by_name_or_id')
error = self.assertRaises(exception.ResourcePropertyConflict,
server.validate)
self.assertEqual("Cannot define the following properties at the same "
"time: security_groups, networks/port.",
str(error))
def test_server_delete(self):
return_server = self.fc.servers.list()[1]
server = self._create_test_server(return_server,
'create_delete')
server.resource_id = '1234'
# this makes sure the auto increment worked on server creation
self.assertGreater(server.id, 0)
side_effect = [server, fakes_nova.fake_exception()]
self.patchobject(self.fc.servers, 'get', side_effect=side_effect)
scheduler.TaskRunner(server.delete)()
self.assertEqual((server.DELETE, server.COMPLETE), server.state)
def test_server_delete_notfound(self):
return_server = self.fc.servers.list()[1]
server = self._create_test_server(return_server,
'create_delete2')
server.resource_id = '1234'
# this makes sure the auto increment worked on server creation
self.assertGreater(server.id, 0)
self.patchobject(self.fc.client, 'delete_servers_1234',
side_effect=fakes_nova.fake_exception())
scheduler.TaskRunner(server.delete)()
self.assertEqual((server.DELETE, server.COMPLETE), server.state)
def test_server_delete_error(self):
return_server = self.fc.servers.list()[1]
server = self._create_test_server(return_server,
'create_delete')
server.resource_id = '1234'
# this makes sure the auto increment worked on server creation
self.assertGreater(server.id, 0)
def make_error(*args):
return_server.status = "ERROR"
return return_server
self.patchobject(self.fc.servers, 'get',
side_effect=[return_server, return_server,
make_error()])
resf = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(server.delete))
self.assertIn("Server %s delete failed" % return_server.name,
str(resf))
def test_server_delete_error_task_in_progress(self):
# test server in 'ERROR', but task state in nova is 'deleting'
return_server = self.fc.servers.list()[1]
server = self._create_test_server(return_server,
'create_delete')
server.resource_id = '1234'
def make_error(*args):
return_server.status = "ERROR"
setattr(return_server, 'OS-EXT-STS:task_state', 'deleting')
return return_server
def make_error_done(*args):
return_server.status = "ERROR"
setattr(return_server, 'OS-EXT-STS:task_state', None)
return return_server
self.patchobject(self.fc.servers, 'get',
side_effect=[make_error(),
make_error_done()])
resf = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(server.delete))
self.assertIn("Server %s delete failed" % return_server.name,
str(resf))
def test_server_soft_delete(self):
return_server = self.fc.servers.list()[1]
server = self._create_test_server(return_server,
'create_delete')
server.resource_id = '1234'
# this makes sure the auto increment worked on server creation
self.assertGreater(server.id, 0)
self.patchobject(self.fc.servers, 'get',
side_effect=ServerStatus(return_server,
[return_server.status,
return_server.status,
"SOFT_DELETED",
"DELETED"]))
scheduler.TaskRunner(server.delete)()
self.assertEqual((server.DELETE, server.COMPLETE), server.state)
def test_server_update_metadata(self):
return_server = self.fc.servers.list()[1]
server = self._create_test_server(return_server,
'md_update')
ud_tmpl = self._get_test_template('update_stack')[0]
ud_tmpl.t['Resources']['WebServer']['Metadata'] = {'test': 123}
resource_defns = ud_tmpl.resource_definitions(server.stack)
scheduler.TaskRunner(server.update, resource_defns['WebServer'])()
self.assertEqual({'test': 123}, server.metadata_get())
ud_tmpl.t['Resources']['WebServer']['Metadata'] = {'test': 456}
server.t = ud_tmpl.resource_definitions(server.stack)['WebServer']
self.assertEqual({'test': 123}, server.metadata_get())
server.metadata_update()
self.assertEqual({'test': 456}, server.metadata_get())
@mock.patch.object(heat_plugin.HeatClientPlugin, 'url_for')
def test_server_update_metadata_software_config(self, fake_url):
fake_url.return_value = 'http://ip:8000/v1'
server, ud_tmpl = self._server_create_software_config(
stack_name='update_meta_sc', ret_tmpl=True)
expected_md = {
'os-collect-config': {
'cfn': {
'access_key_id': '4567',
'metadata_url': 'http://ip:8000/v1/',
'path': 'WebServer.Metadata',
'secret_access_key': '8901',
'stack_name': 'update_meta_sc'
},
'collectors': ['ec2', 'cfn', 'local']
},
'deployments': []}
self.assertEqual(expected_md, server.metadata_get())
ud_tmpl.t['Resources']['WebServer']['Metadata'] = {'test': 123}
resource_defns = ud_tmpl.resource_definitions(server.stack)
scheduler.TaskRunner(server.update, resource_defns['WebServer'])()
expected_md.update({'test': 123})
self.assertEqual(expected_md, server.metadata_get())
server.metadata_update()
self.assertEqual(expected_md, server.metadata_get())
@mock.patch.object(heat_plugin.HeatClientPlugin, 'url_for')
def test_server_update_metadata_software_config_merge(self, fake_url):
md = {'os-collect-config': {'polling_interval': 10}}
fake_url.return_value = 'http://ip/heat-api-cfn/v1'
server, ud_tmpl = self._server_create_software_config(
stack_name='update_meta_sc', ret_tmpl=True,
md=md)
expected_md = {
'os-collect-config': {
'cfn': {
'access_key_id': '4567',
'metadata_url': 'http://ip/heat-api-cfn/v1/',
'path': 'WebServer.Metadata',
'secret_access_key': '8901',
'stack_name': 'update_meta_sc'
},
'collectors': ['ec2', 'cfn', 'local'],
'polling_interval': 10
},
'deployments': []}
self.assertEqual(expected_md, server.metadata_get())
ud_tmpl.t['Resources']['WebServer']['Metadata'] = {'test': 123}
resource_defns = ud_tmpl.resource_definitions(server.stack)
scheduler.TaskRunner(server.update, resource_defns['WebServer'])()
expected_md.update({'test': 123})
self.assertEqual(expected_md, server.metadata_get())
server.metadata_update()
self.assertEqual(expected_md, server.metadata_get())
@mock.patch.object(heat_plugin.HeatClientPlugin, 'url_for')
def test_server_update_software_config_transport(self, fake_url):
md = {'os-collect-config': {'polling_interval': 10}}
fake_url.return_value = 'http://ip/heat-api-cfn/v1'
server = self._server_create_software_config(
stack_name='update_meta_sc', md=md)
expected_md = {
'os-collect-config': {
'cfn': {
'access_key_id': '4567',
'metadata_url': 'http://ip/heat-api-cfn/v1/',
'path': 'WebServer.Metadata',
'secret_access_key': '8901',
'stack_name': 'update_meta_sc'
},
'collectors': ['ec2', 'cfn', 'local'],
'polling_interval': 10
},
'deployments': []}
self.assertEqual(expected_md, server.metadata_get())
sc = mock.Mock()
sc.head_account.return_value = {
'x-account-meta-temp-url-key': 'secrit'
}
sc.url = 'http://192.0.2.2'
self.patchobject(swift.SwiftClientPlugin, '_create',
return_value=sc)
update_props = self.server_props.copy()
update_props['software_config_transport'] = 'POLL_TEMP_URL'
update_template = server.t.freeze(properties=update_props)
self.rpc_client = mock.MagicMock()
server._rpc_client = self.rpc_client
self.rpc_client.create_software_config.return_value = None
scheduler.TaskRunner(server.update, update_template)()
self.assertEqual((server.UPDATE, server.COMPLETE), server.state)
md = server.metadata_get()
metadata_url = md['os-collect-config']['request']['metadata_url']
self.assertTrue(metadata_url.startswith(
'http://192.0.2.2/v1/AUTH_test_tenant_id/'))
expected_md = {
'os-collect-config': {
'cfn': {
'access_key_id': None,
'metadata_url': None,
'path': None,
'secret_access_key': None,
'stack_name': None,
},
'request': {
'metadata_url': 'the_url',
},
'collectors': ['ec2', 'request', 'local'],
'polling_interval': 10
},
'deployments': []}
md['os-collect-config']['request']['metadata_url'] = 'the_url'
self.assertEqual(expected_md, server.metadata_get())
def test_update_transport_heat_to_zaqar(self):
stack, server = self._server_create_software_config_poll_heat()
password = server.password
self.assertEqual({
'os-collect-config': {
'heat': {
'auth_url': 'http://server.test:5000/v2.0',
'password': password,
'project_id': '8888',
'region_name': 'RegionOne',
'resource_name': 'WebServer',
'stack_id': 'software_config_s/%s' % stack.id,
'user_id': '1234'
},
'collectors': ['ec2', 'heat', 'local'],
},
'deployments': []
}, server.metadata_get())
update_props = self.server_props.copy()
update_props['software_config_transport'] = 'ZAQAR_MESSAGE'
update_template = server.t.freeze(properties=update_props)
zcc = self.patchobject(zaqar.ZaqarClientPlugin, 'create_for_tenant')
zc = mock.Mock()
zcc.return_value = zc
queue = mock.Mock()
zc.queue.return_value = queue
self.rpc_client = mock.MagicMock()
server._rpc_client = self.rpc_client
self.rpc_client.create_software_config.return_value = None
scheduler.TaskRunner(server.update, update_template)()
self.assertEqual((server.UPDATE, server.COMPLETE), server.state)
password_1 = server.password
self.assertEqual(password, password_1)
self.assertEqual({
'os-collect-config': {
'zaqar': {
'user_id': '1234',
'password': password_1,
'auth_url': 'http://server.test:5000/v2.0',
'project_id': '8888',
'queue_id': server.data().get('metadata_queue_id'),
'region_name': 'RegionOne',
},
'heat': {
'auth_url': None,
'password': None,
'project_id': None,
'region_name': None,
'resource_name': None,
'stack_id': None,
'user_id': None
},
'collectors': ['ec2', 'zaqar', 'local']
},
'deployments': []
}, server.metadata_get())
def test_server_update_nova_metadata(self):
return_server = self.fc.servers.list()[1]
server = self._create_test_server(return_server,
'md_update')
new_meta = {'test': 123}
self.patchobject(self.fc.servers, 'get',
return_value=return_server)
set_meta_mock = self.patchobject(self.fc.servers, 'set_meta')
update_props = self.server_props.copy()
update_props['metadata'] = new_meta
update_template = server.t.freeze(properties=update_props)
scheduler.TaskRunner(server.update, update_template)()
self.assertEqual((server.UPDATE, server.COMPLETE), server.state)
set_meta_mock.assert_called_with(
return_server, server.client_plugin().meta_serialize(new_meta))
def test_server_update_nova_metadata_complex(self):
"""Test that complex metadata values are correctly serialized to JSON.
Test that complex metadata values are correctly serialized to JSON when
sent to Nova.
"""
return_server = self.fc.servers.list()[1]
server = self._create_test_server(return_server,
'md_update')
self.patchobject(self.fc.servers, 'get',
return_value=return_server)
new_meta = {'test': {'testkey': 'testvalue'}}
set_meta_mock = self.patchobject(self.fc.servers, 'set_meta')
# If we're going to call set_meta() directly we
# need to handle the serialization ourselves.
update_props = self.server_props.copy()
update_props['metadata'] = new_meta
update_template = server.t.freeze(properties=update_props)
scheduler.TaskRunner(server.update, update_template)()
self.assertEqual((server.UPDATE, server.COMPLETE), server.state)
set_meta_mock.assert_called_with(
return_server, server.client_plugin().meta_serialize(new_meta))
def test_server_update_nova_metadata_with_delete(self):
return_server = self.fc.servers.list()[1]
server = self._create_test_server(return_server,
'md_update')
# part one, add some metadata
new_meta = {'test': '123', 'this': 'that'}
self.patchobject(self.fc.servers, 'get',
return_value=return_server)
set_meta_mock = self.patchobject(self.fc.servers, 'set_meta')
update_props = self.server_props.copy()
update_props['metadata'] = new_meta
update_template = server.t.freeze(properties=update_props)
scheduler.TaskRunner(server.update, update_template)()
self.assertEqual((server.UPDATE, server.COMPLETE), server.state)
set_meta_mock.assert_called_with(
return_server, server.client_plugin().meta_serialize(new_meta))
# part two change the metadata (test removing the old key)
new_meta = {'new_key': 'yeah'}
# new fake with the correct metadata
server.resource_id = '56789'
new_return_server = self.fc.servers.list()[5]
self.patchobject(self.fc.servers, 'get',
return_value=new_return_server)
del_meta_mock = self.patchobject(self.fc.servers, 'delete_meta')
update_props = self.server_props.copy()
update_props['metadata'] = new_meta
update_template = server.t.freeze(properties=update_props)
scheduler.TaskRunner(server.update, update_template)()
self.assertEqual((server.UPDATE, server.COMPLETE), server.state)
del_meta_mock.assert_called_with(new_return_server,
['test', 'this'])
set_meta_mock.assert_called_with(
new_return_server, server.client_plugin().meta_serialize(new_meta))
def test_server_update_server_name(self):
"""Server.handle_update supports changing the name."""
return_server = self.fc.servers.list()[1]
return_server.id = '5678'
server = self._create_test_server(return_server,
'srv_update')
new_name = 'new_name'
update_props = self.server_props.copy()
update_props['name'] = new_name
update_template = server.t.freeze(properties=update_props)
self.patchobject(self.fc.servers, 'get',
return_value=return_server)
self.patchobject(return_server, 'update')
return_server.update(new_name).AndReturn(None)
scheduler.TaskRunner(server.update, update_template)()
self.assertEqual((server.UPDATE, server.COMPLETE), server.state)
def test_server_update_server_admin_password(self):
"""Server.handle_update supports changing the admin password."""
return_server = self.fc.servers.list()[1]
return_server.id = '5678'
server = self._create_test_server(return_server,
'change_password')
new_password = 'new_password'
update_props = self.server_props.copy()
update_props['admin_pass'] = new_password
update_template = server.t.freeze(properties=update_props)
self.patchobject(self.fc.servers, 'get', return_value=return_server)
self.patchobject(return_server, 'change_password')
scheduler.TaskRunner(server.update, update_template)()
self.assertEqual((server.UPDATE, server.COMPLETE), server.state)
return_server.change_password.assert_called_once_with(new_password)
self.assertEqual(1, return_server.change_password.call_count)
def test_server_get_live_state(self):
return_server = self.fc.servers.list()[1]
return_server.id = '5678'
self.patchobject(nova.NovaClientPlugin, 'is_version_supported',
return_value=False)
server = self._create_test_server(return_server,
'get_live_state_stack')
server.properties.data['networks'] = [{'network': 'public_id',
'fixed_ip': '5.6.9.8'}]
public_net = dict(id='public_id', name='public')
private_net = dict(id='private_id', name='private')
iface0 = create_fake_iface(port='aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
net='public',
ip='5.6.9.8',
mac='fa:16:3e:8c:33:aa')
port0 = dict(id=iface0.port_id,
network_id=iface0.net_id,
mac_address=iface0.mac_addr,
fixed_ips=iface0.fixed_ips)
iface1 = create_fake_iface(port='bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb',
net='public',
ip='4.5.6.7',
mac='fa:16:3e:8c:22:aa')
port1 = dict(id=iface1.port_id,
network_id=iface1.net_id,
mac_address=iface1.mac_addr,
fixed_ips=iface1.fixed_ips)
iface2 = create_fake_iface(port='cccccccc-cccc-cccc-cccc-cccccccccccc',
net='private',
ip='10.13.12.13',
mac='fa:16:3e:8c:44:cc')
port2 = dict(id=iface2.port_id,
network_id=iface2.net_id,
mac_address=iface2.mac_addr,
fixed_ips=iface2.fixed_ips)
self.patchobject(return_server, 'interface_list',
return_value=[iface0, iface1, iface2])
self.patchobject(neutronclient.Client, 'list_ports',
return_value={'ports': [port0, port1, port2]})
self.patchobject(neutronclient.Client, 'list_networks',
side_effect=[{'networks': [public_net]},
{'networks': [public_net]},
{'networks': [private_net]}])
self.patchobject(neutronclient.Client, 'list_floatingips',
return_value={'floatingips': []})
self.patchobject(neutron.NeutronClientPlugin,
'find_resourceid_by_name_or_id',
side_effect=['public_id',
'private_id'])
reality = server.get_live_state(server.properties.data)
expected = {'flavor': '1',
'image': '2',
'name': 'sample-server2',
'networks': [
{'fixed_ip': '4.5.6.7',
'network': 'public',
'port': 'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb'},
{'fixed_ip': '5.6.9.8',
'network': 'public',
'port': None},
{'fixed_ip': '10.13.12.13',
'network': 'private',
'port': 'cccccccc-cccc-cccc-cccc-cccccccccccc'}],
'metadata': {}}
self.assertEqual(set(expected.keys()), set(reality.keys()))
expected_nets = expected.pop('networks')
reality_nets = reality.pop('networks')
for net in reality_nets:
for exp_net in expected_nets:
if net == exp_net:
for key in net:
self.assertEqual(exp_net[key], net[key])
break
for key in reality.keys():
self.assertEqual(expected[key], reality[key])
def test_server_update_server_flavor(self):
"""Tests update server changing the flavor.
Server.handle_update supports changing the flavor, and makes
the change making a resize API call against Nova.
"""
return_server = self.fc.servers.list()[1]
return_server.id = '1234'
server = self._create_test_server(return_server,
'srv_update')
update_props = self.server_props.copy()
update_props['flavor'] = 'm1.small'
update_template = server.t.freeze(properties=update_props)
self.patchobject(self.fc.servers, 'get',
side_effect=ServerStatus(return_server,
['ACTIVE',
'RESIZE',
'VERIFY_RESIZE',
'VERIFY_RESIZE',
'ACTIVE']))
mock_post = self.patchobject(self.fc.client,
'post_servers_1234_action',
return_value=(202, None))
scheduler.TaskRunner(server.update, update_template)()
self.assertEqual((server.UPDATE, server.COMPLETE), server.state)
mock_post.assert_has_calls([
mock.call(body={'resize': {'flavorRef': '2'}}),
mock.call(body={'confirmResize': None}),
])
def test_server_update_server_flavor_failed(self):
"""Check raising exception due to resize call failing.
If the status after a resize is not VERIFY_RESIZE, it means the resize
call failed, so we raise an explicit error.
"""
return_server = self.fc.servers.list()[1]
return_server.id = '1234'
server = self._create_test_server(return_server,
'srv_update2')
update_props = self.server_props.copy()
update_props['flavor'] = 'm1.small'
update_template = server.t.freeze(properties=update_props)
self.patchobject(self.fc.servers, 'get',
side_effect=ServerStatus(return_server,
['RESIZE', 'ERROR']))
mock_post = self.patchobject(self.fc.client,
'post_servers_1234_action',
return_value=(202, None))
updater = scheduler.TaskRunner(server.update, update_template)
error = self.assertRaises(exception.ResourceFailure, updater)
self.assertEqual(
"Error: resources.srv_update2: Resizing to '2' failed, "
"status 'ERROR'", str(error))
self.assertEqual((server.UPDATE, server.FAILED), server.state)
mock_post.assert_called_once_with(body={'resize': {'flavorRef': '2'}})
def test_server_update_flavor_resize_has_not_started(self):
"""Test update of server flavor if server resize has not started.
Server resize is asynchronous operation in nova. So when heat is
requesting resize and polling the server then the server may still be
in ACTIVE state. So we need to wait some amount of time till the server
status becomes RESIZE.
"""
# create the server for resizing
server = self.fc.servers.list()[1]
server.id = '1234'
server_resource = self._create_test_server(server,
'resize_server')
# prepare template with resized server
update_props = self.server_props.copy()
update_props['flavor'] = 'm1.small'
update_template = server_resource.t.freeze(properties=update_props)
# define status transition when server resize
# ACTIVE(initial) -> ACTIVE -> RESIZE -> VERIFY_RESIZE
self.patchobject(self.fc.servers, 'get',
side_effect=ServerStatus(server,
['ACTIVE',
'ACTIVE',
'RESIZE',
'VERIFY_RESIZE',
'VERIFY_RESIZE',
'ACTIVE']))
mock_post = self.patchobject(self.fc.client,
'post_servers_1234_action',
return_value=(202, None))
# check that server resize has finished correctly
scheduler.TaskRunner(server_resource.update, update_template)()
self.assertEqual((server_resource.UPDATE, server_resource.COMPLETE),
server_resource.state)
mock_post.assert_has_calls([
mock.call(body={'resize': {'flavorRef': '2'}}),
mock.call(body={'confirmResize': None}),
])
@mock.patch.object(servers.Server, 'prepare_for_replace')
def test_server_update_server_flavor_replace(self, mock_replace):
stack_name = 'update_flvrep'
(tmpl, stack) = self._setup_test_stack(stack_name)
server_props = tmpl['Resources']['WebServer']['Properties']
server_props['flavor_update_policy'] = 'REPLACE'
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_server_update_flavor_replace',
resource_defns['WebServer'], stack)
update_props = server_props.copy()
update_props['flavor'] = 'm1.small'
update_template = server.t.freeze(properties=update_props)
updater = scheduler.TaskRunner(server.update, update_template)
self.assertRaises(resource.UpdateReplace, updater)
@mock.patch.object(servers.Server, 'prepare_for_replace')
def test_server_update_server_flavor_policy_update(self, mock_replace):
stack_name = 'update_flvpol'
(tmpl, stack) = self._setup_test_stack(stack_name)
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_server_update_flavor_replace',
resource_defns['WebServer'], stack)
update_props = tmpl.t['Resources']['WebServer']['Properties'].copy()
# confirm that when flavor_update_policy is changed during
# the update then the updated policy is followed for a flavor
# update
update_props['flavor_update_policy'] = 'REPLACE'
update_props['flavor'] = 'm1.small'
update_template = server.t.freeze(properties=update_props)
updater = scheduler.TaskRunner(server.update, update_template)
self.assertRaises(resource.UpdateReplace, updater)
@mock.patch.object(servers.Server, 'prepare_for_replace')
@mock.patch.object(nova.NovaClientPlugin, 'client')
def test_server_update_server_userdata_replace(self, mock_create,
mock_replace):
stack_name = 'update_udatrep'
(tmpl, stack) = self._setup_test_stack(stack_name)
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_update_userdata_replace',
resource_defns['WebServer'], stack)
update_props = tmpl.t['Resources']['WebServer']['Properties'].copy()
update_props['user_data'] = 'changed'
update_template = server.t.freeze(properties=update_props)
server.action = server.CREATE
updater = scheduler.TaskRunner(server.update, update_template)
self.assertRaises(resource.UpdateReplace, updater)
@mock.patch.object(nova.NovaClientPlugin, 'client')
def test_update_failed_server_not_replace(self, mock_create):
stack_name = 'update_failed_server_not_replace'
(tmpl, stack) = self._setup_test_stack(stack_name)
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('failed_not_replace',
resource_defns['WebServer'], stack)
update_props = tmpl.t['Resources']['WebServer']['Properties'].copy()
update_props['name'] = 'my_server'
update_template = server.t.freeze(properties=update_props)
server.action = server.CREATE
server.status = server.FAILED
server.resource_id = '6a953104-b874-44d2-a29a-26e7c367dc5c'
nova_server = self.fc.servers.list()[1]
nova_server.status = 'ACTIVE'
server.client = mock.Mock()
server.client().servers.get.return_value = nova_server
scheduler.TaskRunner(server.update, update_template)()
self.assertEqual((server.UPDATE, server.COMPLETE), server.state)
@mock.patch.object(servers.Server, 'prepare_for_replace')
@mock.patch.object(nova.NovaClientPlugin, 'client')
def test_server_update_server_userdata_ignore(self, mock_create,
mock_replace):
stack_name = 'update_udatignore'
(tmpl, stack) = self._setup_test_stack(stack_name)
self.patchobject(servers.Server, 'check_update_complete',
return_value=True)
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_update_userdata_ignore',
resource_defns['WebServer'], stack)
update_props = tmpl.t['Resources']['WebServer']['Properties'].copy()
update_props['user_data'] = 'changed'
update_props['user_data_update_policy'] = 'IGNORE'
update_template = server.t.freeze(properties=update_props)
server.action = server.CREATE
scheduler.TaskRunner(server.update, update_template)()
self.assertEqual((server.UPDATE, server.COMPLETE), server.state)
@mock.patch.object(servers.Server, 'prepare_for_replace')
def test_server_update_image_replace(self, mock_replace):
stack_name = 'update_imgrep'
(tmpl, stack) = self._setup_test_stack(stack_name)
tmpl.t['Resources']['WebServer']['Properties'][
'image_update_policy'] = 'REPLACE'
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_update_image_replace',
resource_defns['WebServer'], stack)
image_id = self.getUniqueString()
update_props = tmpl.t['Resources']['WebServer']['Properties'].copy()
update_props['image'] = image_id
update_template = server.t.freeze(properties=update_props)
updater = scheduler.TaskRunner(server.update, update_template)
self.assertRaises(resource.UpdateReplace, updater)
def _test_server_update_image_rebuild(self, status, policy='REBUILD',
password=None):
# Server.handle_update supports changing the image, and makes
# the change making a rebuild API call against Nova.
return_server = self.fc.servers.list()[1]
return_server.id = '1234'
server = self._create_test_server(return_server,
'srv_updimgrbld')
new_image = 'F17-x86_64-gold'
# current test demonstrate updating when image_update_policy was not
# changed, so image_update_policy will be used from self.properties
before_props = self.server_props.copy()
before_props['image_update_policy'] = policy
server.t = server.t.freeze(properties=before_props)
server.reparse()
update_props = before_props.copy()
update_props['image'] = new_image
if password:
update_props['admin_pass'] = password
update_template = server.t.freeze(properties=update_props)
mock_rebuild = self.patchobject(self.fc.servers, 'rebuild')
def get_sideeff(stat):
def sideeff(*args):
return_server.status = stat
return return_server
return sideeff
for stat in status:
self.patchobject(self.fc.servers, 'get',
side_effect=get_sideeff(stat))
scheduler.TaskRunner(server.update, update_template)()
self.assertEqual((server.UPDATE, server.COMPLETE), server.state)
if 'REBUILD' == policy:
mock_rebuild.assert_called_once_with(
return_server, '2', password=password,
preserve_ephemeral=False,
meta={}, files={})
else:
mock_rebuild.assert_called_once_with(
return_server, '2', password=password,
preserve_ephemeral=True,
meta={}, files={})
def test_server_update_image_rebuild_status_rebuild(self):
# Normally we will see 'REBUILD' first and then 'ACTIVE".
self._test_server_update_image_rebuild(status=('REBUILD', 'ACTIVE'))
def test_server_update_image_rebuild_status_active(self):
# It is possible for us to miss the REBUILD status.
self._test_server_update_image_rebuild(status=('ACTIVE',))
def test_server_update_image_rebuild_status_rebuild_keep_ephemeral(self):
# Normally we will see 'REBUILD' first and then 'ACTIVE".
self._test_server_update_image_rebuild(
policy='REBUILD_PRESERVE_EPHEMERAL', status=('REBUILD', 'ACTIVE'))
def test_server_update_image_rebuild_status_active_keep_ephemeral(self):
# It is possible for us to miss the REBUILD status.
self._test_server_update_image_rebuild(
policy='REBUILD_PRESERVE_EPHEMERAL', status=('ACTIVE',))
def test_server_update_image_rebuild_with_new_password(self):
# Normally we will see 'REBUILD' first and then 'ACTIVE".
self._test_server_update_image_rebuild(password='new_admin_password',
status=('REBUILD', 'ACTIVE'))
def test_server_update_image_rebuild_failed(self):
# If the status after a rebuild is not REBUILD or ACTIVE, it means the
# rebuild call failed, so we raise an explicit error.
return_server = self.fc.servers.list()[1]
return_server.id = '1234'
server = self._create_test_server(return_server,
'srv_updrbldfail')
new_image = 'F17-x86_64-gold'
# current test demonstrate updating when image_update_policy was not
# changed, so image_update_policy will be used from self.properties
before_props = self.server_props.copy()
before_props['image_update_policy'] = 'REBUILD'
update_props = before_props.copy()
update_props['image'] = new_image
update_template = server.t.freeze(properties=update_props)
server.t = server.t.freeze(properties=before_props)
server.reparse()
mock_rebuild = self.patchobject(self.fc.servers, 'rebuild')
self.patchobject(self.fc.servers, 'get',
side_effect=ServerStatus(return_server,
['REBUILD', 'ERROR']))
updater = scheduler.TaskRunner(server.update, update_template)
error = self.assertRaises(exception.ResourceFailure, updater)
self.assertEqual(
"Error: resources.srv_updrbldfail: "
"Rebuilding server failed, status 'ERROR'",
str(error))
self.assertEqual((server.UPDATE, server.FAILED), server.state)
mock_rebuild.assert_called_once_with(
return_server, '2', password=None, preserve_ephemeral=False,
meta={}, files={})
def test_server_update_properties(self):
return_server = self.fc.servers.list()[1]
server = self._create_test_server(return_server,
'update_prop')
update_props = self.server_props.copy()
update_props['image'] = 'F17-x86_64-gold'
update_props['image_update_policy'] = 'REPLACE'
update_template = server.t.freeze(properties=update_props)
updater = scheduler.TaskRunner(server.update, update_template)
self.assertRaises(resource.UpdateReplace, updater)
def test_server_status_build(self):
return_server = self.fc.servers.list()[0]
server = self._setup_test_server(return_server,
'sts_build')
server.resource_id = '1234'
def status_active(*args):
return_server.status = 'ACTIVE'
return return_server
self.patchobject(self.fc.servers, 'get',
return_value=status_active())
scheduler.TaskRunner(server.create)()
self.assertEqual((server.CREATE, server.COMPLETE), server.state)
def test_server_status_suspend_no_resource_id(self):
return_server = self.fc.servers.list()[1]
server = self._create_test_server(return_server,
'srv_sus1')
server.resource_id = None
ex = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(server.suspend))
self.assertEqual('Error: resources.srv_sus1: '
'Cannot suspend srv_sus1, '
'resource_id not set',
str(ex))
self.assertEqual((server.SUSPEND, server.FAILED), server.state)
def test_server_status_suspend_not_found(self):
return_server = self.fc.servers.list()[1]
server = self._create_test_server(return_server,
'srv_sus2')
server.resource_id = '1234'
self.patchobject(self.fc.servers, 'get',
side_effect=fakes_nova.fake_exception())
ex = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(server.suspend))
self.assertEqual('NotFound: resources.srv_sus2: '
'Failed to find server 1234',
str(ex))
self.assertEqual((server.SUSPEND, server.FAILED), server.state)
def _test_server_status_suspend(self, name, state=('CREATE', 'COMPLETE')):
return_server = self.fc.servers.list()[1]
server = self._create_test_server(return_server, name)
server.resource_id = '1234'
server.state_set(state[0], state[1])
self.patchobject(return_server, 'suspend')
self.patchobject(self.fc.servers, 'get',
side_effect=ServerStatus(return_server,
['ACTIVE',
'ACTIVE',
'SUSPENDED']))
scheduler.TaskRunner(server.suspend)()
self.assertEqual((server.SUSPEND, server.COMPLETE), server.state)
def test_server_suspend_in_create_complete(self):
self._test_server_status_suspend('test_suspend_in_create_complete')
def test_server_suspend_in_suspend_failed(self):
self._test_server_status_suspend(
name='test_suspend_in_suspend_failed',
state=('SUSPEND', 'FAILED'))
def test_server_suspend_in_suspend_complete(self):
self._test_server_status_suspend(
name='test_suspend_in_suspend_complete',
state=('SUSPEND', 'COMPLETE'))
def test_server_status_suspend_unknown_status(self):
return_server = self.fc.servers.list()[1]
server = self._create_test_server(return_server,
'srv_susp_uk')
server.resource_id = '1234'
self.patchobject(return_server, 'suspend')
self.patchobject(self.fc.servers, 'get',
side_effect=ServerStatus(return_server,
['ACTIVE',
'ACTIVE',
'TRANSMOGRIFIED']))
ex = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(server.suspend))
self.assertIsInstance(ex.exc, exception.ResourceUnknownStatus)
self.assertEqual('Suspend of server %s failed - '
'Unknown status TRANSMOGRIFIED '
'due to "Unknown"' % return_server.name,
str(ex.exc.message))
self.assertEqual((server.SUSPEND, server.FAILED), server.state)
def _test_server_status_resume(self, name, state=('SUSPEND', 'COMPLETE')):
return_server = self.fc.servers.list()[1]
server = self._create_test_server(return_server, name)
server.resource_id = '1234'
server.state_set(state[0], state[1])
self.patchobject(return_server, 'resume')
self.patchobject(self.fc.servers, 'get',
side_effect=ServerStatus(return_server,
['SUSPENDED',
'SUSPENDED',
'ACTIVE']))
scheduler.TaskRunner(server.resume)()
self.assertEqual((server.RESUME, server.COMPLETE), server.state)
def test_server_resume_in_suspend_complete(self):
self._test_server_status_resume(
name='test_resume_in_suspend_complete')
def test_server_resume_in_resume_failed(self):
self._test_server_status_resume(
name='test_resume_in_resume_failed',
state=('RESUME', 'FAILED'))
def test_server_resume_in_resume_complete(self):
self._test_server_status_resume(
name='test_resume_in_resume_complete',
state=('RESUME', 'COMPLETE'))
def test_server_status_resume_no_resource_id(self):
return_server = self.fc.servers.list()[1]
server = self._create_test_server(return_server,
'srv_susp_norid')
server.resource_id = None
server.state_set(server.SUSPEND, server.COMPLETE)
ex = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(server.resume))
self.assertEqual('Error: resources.srv_susp_norid: '
'Cannot resume srv_susp_norid, '
'resource_id not set',
str(ex))
self.assertEqual((server.RESUME, server.FAILED), server.state)
def test_server_status_resume_not_found(self):
return_server = self.fc.servers.list()[1]
server = self._create_test_server(return_server,
'srv_res_nf')
server.resource_id = '1234'
self.patchobject(self.fc.servers, 'get',
side_effect=fakes_nova.fake_exception())
server.state_set(server.SUSPEND, server.COMPLETE)
ex = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(server.resume))
self.assertEqual('NotFound: resources.srv_res_nf: '
'Failed to find server 1234',
str(ex))
self.assertEqual((server.RESUME, server.FAILED), server.state)
def test_server_status_build_spawning(self):
self._test_server_status_not_build_active('BUILD(SPAWNING)')
def test_server_status_hard_reboot(self):
self._test_server_status_not_build_active('HARD_REBOOT')
def test_server_status_password(self):
self._test_server_status_not_build_active('PASSWORD')
def test_server_status_reboot(self):
self._test_server_status_not_build_active('REBOOT')
def test_server_status_rescue(self):
self._test_server_status_not_build_active('RESCUE')
def test_server_status_resize(self):
self._test_server_status_not_build_active('RESIZE')
def test_server_status_revert_resize(self):
self._test_server_status_not_build_active('REVERT_RESIZE')
def test_server_status_shutoff(self):
self._test_server_status_not_build_active('SHUTOFF')
def test_server_status_suspended(self):
self._test_server_status_not_build_active('SUSPENDED')
def test_server_status_verify_resize(self):
self._test_server_status_not_build_active('VERIFY_RESIZE')
def _test_server_status_not_build_active(self, uncommon_status):
return_server = self.fc.servers.list()[0]
server = self._setup_test_server(return_server,
'srv_sts_bld')
server.resource_id = '1234'
self.patchobject(self.fc.servers, 'get',
side_effect=ServerStatus(return_server,
[uncommon_status,
'ACTIVE']))
scheduler.TaskRunner(server.create)()
self.assertEqual((server.CREATE, server.COMPLETE), server.state)
def test_build_nics(self):
return_server = self.fc.servers.list()[1]
server = self._create_test_server(return_server,
'test_server_create')
self.patchobject(neutronclient.Client, 'create_port',
return_value={'port': {'id': '4815162342'}})
self.assertIsNone(server._build_nics([]))
self.assertIsNone(server._build_nics(None))
self.assertEqual([
{'port-id': 'aaaabbbb', 'net-id': None, 'tag': 'nic1'},
{'v4-fixed-ip': '192.0.2.0', 'net-id': None}],
server._build_nics([
{'port': 'aaaabbbb', 'tag': 'nic1'},
{'fixed_ip': '192.0.2.0'}]))
self.assertEqual([{'port-id': 'aaaabbbb', 'net-id': None},
{'port-id': 'aaaabbbb', 'net-id': None}],
server._build_nics([{'port': 'aaaabbbb',
'fixed_ip': '192.0.2.0'},
{'port': 'aaaabbbb',
'fixed_ip': '2002::2'}]))
self.assertEqual([{'port-id': 'aaaabbbb', 'net-id': None},
{'v6-fixed-ip': '2002::2', 'net-id': None}],
server._build_nics([{'port': 'aaaabbbb'},
{'fixed_ip': '2002::2'}]))
self.assertEqual([{'net-id': 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'}],
server._build_nics(
[{'network':
'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'}]))
def test_server_network_errors(self):
stack_name = 'net_err'
(tmpl, stack) = self._setup_test_stack(stack_name,
test_templ=ns_template)
resolver = self.patchobject(neutron.NeutronClientPlugin,
'find_resourceid_by_name_or_id')
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server',
resource_defns['server'], stack)
resolver.side_effect = neutron.exceptions.NotFound()
server.reparse()
self.assertRaises(ValueError, server.properties.get, 'networks')
resolver.side_effect = neutron.exceptions.NeutronClientNoUniqueMatch()
ex = self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(server.create))
self.assertIn('use an ID to be more specific.', str(ex))
def test_server_without_ip_address(self):
return_server = self.fc.servers.list()[3]
return_server.id = '9102'
server = self._create_test_server(return_server,
'wo_ipaddr')
self.patchobject(neutron.NeutronClientPlugin,
'find_resourceid_by_name_or_id',
return_value=None)
self.patchobject(neutronclient.Client, 'list_ports',
return_value={'ports': [{'id': 'p_id',
'name': 'p_name',
'fixed_ips': [],
'network_id': 'n_id'}]})
self.patchobject(neutronclient.Client, 'list_networks',
return_value={'networks': [{'id': 'n_id',
'name': 'empty_net'}]})
self.patchobject(self.fc.servers, 'get', return_value=return_server)
self.patchobject(return_server, 'interface_list', return_value=[])
mock_detach = self.patchobject(return_server, 'interface_detach')
mock_attach = self.patchobject(return_server, 'interface_attach')
self.assertEqual({'empty_net': []}, server.FnGetAtt('addresses'))
self.assertEqual({'empty_net': []}, server.FnGetAtt('networks'))
self.assertEqual(0, mock_detach.call_count)
self.assertEqual(0, mock_attach.call_count)
def test_build_block_device_mapping(self):
self.assertIsNone(servers.Server._build_block_device_mapping([]))
self.assertIsNone(servers.Server._build_block_device_mapping(None))
self.assertEqual({
'vda': '1234::',
'vdb': '1234:snap:',
}, servers.Server._build_block_device_mapping([
{'device_name': 'vda', 'volume_id': '1234'},
{'device_name': 'vdb', 'snapshot_id': '1234'},
]))
self.assertEqual({
'vdc': '1234::10',
'vdd': '1234:snap::True'
}, servers.Server._build_block_device_mapping([
{
'device_name': 'vdc',
'volume_id': '1234',
'volume_size': 10
},
{
'device_name': 'vdd',
'snapshot_id': '1234',
'delete_on_termination': True
}
]))
@mock.patch.object(nova.NovaClientPlugin, 'client')
def test_validate_block_device_mapping_volume_size_valid_int(self,
mock_create):
stack_name = 'val_vsize_valid'
tmpl, stack = self._setup_test_stack(stack_name)
bdm = [{'device_name': 'vda', 'volume_id': '1234',
'volume_size': 10}]
wsp = tmpl.t['Resources']['WebServer']['Properties']
wsp['block_device_mapping'] = bdm
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_create_image_err',
resource_defns['WebServer'], stack)
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=self.mock_image)
self.patchobject(nova.NovaClientPlugin, 'get_flavor',
return_value=self.mock_flavor)
self.stub_VolumeConstraint_validate()
self.assertIsNone(server.validate())
@mock.patch.object(nova.NovaClientPlugin, 'client')
def test_validate_block_device_mapping_volume_size_valid_str(self,
mock_create):
stack_name = 'val_vsize_valid'
tmpl, stack = self._setup_test_stack(stack_name)
bdm = [{'device_name': 'vda', 'volume_id': '1234',
'volume_size': '10'}]
wsp = tmpl.t['Resources']['WebServer']['Properties']
wsp['block_device_mapping'] = bdm
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_create_image_err',
resource_defns['WebServer'], stack)
self.stub_VolumeConstraint_validate()
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=self.mock_image)
self.patchobject(nova.NovaClientPlugin, 'get_flavor',
return_value=self.mock_flavor)
self.assertIsNone(server.validate())
@mock.patch.object(nova.NovaClientPlugin, 'client')
def test_validate_bd_mapping_volume_size_invalid_str(self, mock_create):
stack_name = 'val_vsize_invalid'
tmpl, stack = self._setup_test_stack(stack_name)
bdm = [{'device_name': 'vda', 'volume_id': '1234',
'volume_size': '10a'}]
wsp = tmpl.t['Resources']['WebServer']['Properties']
wsp['block_device_mapping'] = bdm
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_create_image_err',
resource_defns['WebServer'], stack)
self.stub_VolumeConstraint_validate()
exc = self.assertRaises(exception.StackValidationFailed,
server.validate)
self.assertIn("Value '10a' is not an integer", str(exc))
@mock.patch.object(nova.NovaClientPlugin, 'client')
def test_validate_conflict_block_device_mapping_props(self, mock_create):
stack_name = 'val_blkdev1'
(tmpl, stack) = self._setup_test_stack(stack_name)
bdm = [{'device_name': 'vdb', 'snapshot_id': '1234',
'volume_id': '1234'}]
wsp = tmpl.t['Resources']['WebServer']['Properties']
wsp['block_device_mapping'] = bdm
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_create_image_err',
resource_defns['WebServer'], stack)
self.stub_VolumeConstraint_validate()
self.stub_SnapshotConstraint_validate()
self.assertRaises(exception.ResourcePropertyConflict, server.validate)
@mock.patch.object(nova.NovaClientPlugin, 'client')
def test_validate_insufficient_block_device_mapping_props(self,
mock_create):
stack_name = 'val_blkdev2'
(tmpl, stack) = self._setup_test_stack(stack_name)
bdm = [{'device_name': 'vdb', 'volume_size': 1,
'delete_on_termination': True}]
wsp = tmpl.t['Resources']['WebServer']['Properties']
wsp['block_device_mapping'] = bdm
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_create_image_err',
resource_defns['WebServer'], stack)
ex = self.assertRaises(exception.StackValidationFailed,
server.validate)
msg = ("Either volume_id or snapshot_id must be specified "
"for device mapping vdb")
self.assertEqual(msg, str(ex))
@mock.patch.object(nova.NovaClientPlugin, 'client')
def test_validate_block_device_mapping_with_empty_ref(self, mock_create):
stack_name = 'val_blkdev2'
(tmpl, stack) = self._setup_test_stack(stack_name)
bdm = [{'device_name': 'vda', 'volume_id': '',
'volume_size': '10'}]
wsp = tmpl.t['Resources']['WebServer']['Properties']
wsp['block_device_mapping'] = bdm
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_create_image_err',
resource_defns['WebServer'], stack)
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=self.mock_image)
self.patchobject(nova.NovaClientPlugin, 'get_flavor',
return_value=self.mock_flavor)
self.stub_VolumeConstraint_validate()
self.assertIsNone(server.validate())
@mock.patch.object(nova.NovaClientPlugin, 'client')
def test_validate_without_image_or_bootable_volume(self, mock_create):
stack_name = 'val_imgvol'
(tmpl, stack) = self._setup_test_stack(stack_name)
del tmpl['Resources']['WebServer']['Properties']['image']
bdm = [{'device_name': 'vdb', 'volume_id': '1234'}]
wsp = tmpl.t['Resources']['WebServer']['Properties']
wsp['block_device_mapping'] = bdm
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_create_image_err',
resource_defns['WebServer'], stack)
self.stub_VolumeConstraint_validate()
ex = self.assertRaises(exception.StackValidationFailed,
server.validate)
msg = ('Neither image nor bootable volume is specified '
'for instance %s' % server.name)
self.assertEqual(msg, str(ex))
@mock.patch.object(nova.NovaClientPlugin, 'client')
def test_validate_invalid_image_status(self, mock_create):
stack_name = 'test_stack'
tmpl, stack = self._setup_test_stack(stack_name)
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_inactive_image',
resource_defns['WebServer'], stack)
mock_image = mock.Mock(min_ram=2, status='sdfsdf')
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=mock_image)
error = self.assertRaises(exception.StackValidationFailed,
server.validate)
self.assertEqual(
'Image status is required to be active not sdfsdf.',
str(error))
@mock.patch.object(nova.NovaClientPlugin, 'client')
def test_validate_insufficient_ram_flavor(self, mock_create):
stack_name = 'test_stack'
tmpl, stack = self._setup_test_stack(stack_name)
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_insufficient_ram_flavor',
resource_defns['WebServer'], stack)
mock_image = mock.Mock(min_ram=100, status='active')
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=mock_image)
self.patchobject(nova.NovaClientPlugin, 'get_flavor',
return_value=self.mock_flavor)
error = self.assertRaises(exception.StackValidationFailed,
server.validate)
self.assertEqual(
'Image F18-x86_64-gold requires 100 minimum ram. Flavor m1.large '
'has only 4.',
str(error))
@mock.patch.object(nova.NovaClientPlugin, 'client')
def test_validate_image_flavor_not_found(self, mock_create):
stack_name = 'test_stack'
tmpl, stack = self._setup_test_stack(stack_name)
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('image_not_found',
resource_defns['WebServer'], stack)
self.patchobject(glance.GlanceClientPlugin, 'get_image',
side_effect=[
glance.client_exception.EntityMatchNotFound,
self.mock_image])
self.patchobject(nova.NovaClientPlugin, 'get_flavor',
side_effect=nova.exceptions.NotFound(''))
self.assertIsNone(server.validate())
self.assertIsNone(server.validate())
@mock.patch.object(nova.NovaClientPlugin, 'client')
def test_validate_insufficient_disk_flavor(self, mock_create):
stack_name = 'test_stack'
tmpl, stack = self._setup_test_stack(stack_name)
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_insufficient_disk_flavor',
resource_defns['WebServer'], stack)
mock_image = mock.Mock(min_ram=1, status='active', min_disk=100)
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=mock_image)
self.patchobject(nova.NovaClientPlugin, 'get_flavor',
return_value=self.mock_flavor)
error = self.assertRaises(exception.StackValidationFailed,
server.validate)
self.assertEqual(
'Image F18-x86_64-gold requires 100 GB minimum disk space. '
'Flavor m1.large has only 4 GB.',
str(error))
def test_build_block_device_mapping_v2(self):
self.assertIsNone(servers.Server._build_block_device_mapping_v2([]))
self.assertIsNone(servers.Server._build_block_device_mapping_v2(None))
self.assertEqual([{
'uuid': '1', 'source_type': 'volume',
'destination_type': 'volume', 'boot_index': 0,
'delete_on_termination': False}
], servers.Server._build_block_device_mapping_v2([
{'volume_id': '1'}
]))
self.assertEqual([{
'uuid': '1', 'source_type': 'snapshot',
'destination_type': 'volume', 'boot_index': 0,
'delete_on_termination': False}
], servers.Server._build_block_device_mapping_v2([
{'snapshot_id': '1'}
]))
self.assertEqual([{
'uuid': '1', 'source_type': 'image',
'destination_type': 'volume', 'boot_index': 0,
'delete_on_termination': False}
], servers.Server._build_block_device_mapping_v2([
{'image': '1'}
]))
self.assertEqual([{
'source_type': 'blank', 'destination_type': 'local',
'boot_index': -1, 'delete_on_termination': True,
'guest_format': 'swap', 'volume_size': 1}
], servers.Server._build_block_device_mapping_v2([
{'swap_size': 1}
]))
self.assertEqual([], servers.Server._build_block_device_mapping_v2([
{'device_name': ''}
]))
self.assertEqual([
{'source_type': 'blank',
'destination_type': 'local',
'boot_index': -1,
'delete_on_termination': True,
'volume_size': 1,
'guest_format': 'ext4'}
], servers.Server._build_block_device_mapping_v2([
{'ephemeral_size': 1,
'ephemeral_format': 'ext4'}
]))
def test_block_device_mapping_v2_image_resolve(self):
(tmpl, stack) = self._setup_test_stack('mapping',
test_templ=bdm_v2_template)
resource_defns = tmpl.resource_definitions(stack)
self.server = servers.Server('server',
resource_defns['server'], stack)
self.server.translate_properties(self.server.properties)
self.assertEqual('2',
self.server.properties['block_device_mapping_v2'][
0]['image'])
def test_block_device_mapping_v2_image_prop_conflict(self):
test_templ = bdm_v2_template + "\n image: F17-x86_64-gold"
(tmpl, stack) = self._setup_test_stack('mapping',
test_templ=test_templ)
resource_defns = tmpl.resource_definitions(stack)
msg = ("Cannot define the following "
"properties at the same time: block_device_mapping_v2.image, "
"block_device_mapping_v2.image_id")
server = servers.Server('server', resource_defns['server'], stack)
exc = self.assertRaises(exception.StackValidationFailed,
server.validate)
self.assertIn(msg, str(exc))
@mock.patch.object(nova.NovaClientPlugin, 'client')
def test_validate_with_both_blk_dev_map_and_blk_dev_map_v2(self,
mock_create):
stack_name = 'invalid_stack'
tmpl, stack = self._setup_test_stack(stack_name)
bdm = [{'device_name': 'vda', 'volume_id': '1234',
'volume_size': '10'}]
bdm_v2 = [{'volume_id': '1'}]
wsp = tmpl.t['Resources']['WebServer']['Properties']
wsp['block_device_mapping'] = bdm
wsp['block_device_mapping_v2'] = bdm_v2
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_create_image_err',
resource_defns['WebServer'], stack)
self.stub_VolumeConstraint_validate()
exc = self.assertRaises(exception.ResourcePropertyConflict,
server.validate)
msg = ('Cannot define the following properties at the same time: '
'block_device_mapping, block_device_mapping_v2.')
self.assertEqual(msg, str(exc))
def _test_validate_bdm_v2(self, stack_name, bdm_v2, with_image=True,
error_msg=None, raise_exc=None):
tmpl, stack = self._setup_test_stack(stack_name)
if not with_image:
del tmpl['Resources']['WebServer']['Properties']['image']
wsp = tmpl.t['Resources']['WebServer']['Properties']
wsp['block_device_mapping_v2'] = bdm_v2
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_create_image_err',
resource_defns['WebServer'], stack)
self.patchobject(nova.NovaClientPlugin, 'get_flavor',
return_value=self.mock_flavor)
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=self.mock_image)
self.stub_VolumeConstraint_validate()
if raise_exc:
ex = self.assertRaises(raise_exc, server.validate)
self.assertIn(error_msg, str(ex))
else:
self.assertIsNone(server.validate())
@mock.patch.object(nova.NovaClientPlugin, 'client')
def test_validate_conflict_block_device_mapping_v2_props(self,
mock_create):
stack_name = 'val_blkdev2'
bdm_v2 = [{'volume_id': '1', 'snapshot_id': 2}]
error_msg = ('Cannot define the following properties at '
'the same time: volume_id, snapshot_id')
self.stub_SnapshotConstraint_validate()
self._test_validate_bdm_v2(
stack_name, bdm_v2,
raise_exc=exception.ResourcePropertyConflict,
error_msg=error_msg)
@mock.patch.object(nova.NovaClientPlugin, 'client')
def test_validate_bdm_v2_with_empty_mapping(self, mock_create):
stack_name = 'val_blkdev2'
bdm_v2 = [{}]
msg = ('Either volume_id, snapshot_id, image_id, swap_size, '
'ephemeral_size or ephemeral_format must be specified.')
self._test_validate_bdm_v2(stack_name, bdm_v2,
raise_exc=exception.StackValidationFailed,
error_msg=msg)
@mock.patch.object(nova.NovaClientPlugin, 'client')
def test_validate_bdm_v2_properties_success(self, mock_create):
stack_name = 'bdm_v2_success'
bdm_v2 = [{'volume_id': '1', 'boot_index': -1}]
self._test_validate_bdm_v2(stack_name, bdm_v2)
@mock.patch.object(nova.NovaClientPlugin, 'client')
def test_validate_bdm_v2_with_unresolved_volume(self, mock_create):
stack_name = 'bdm_v2_with_unresolved_vol'
# empty string indicates that volume is unresolved
bdm_v2 = [{'volume_id': ''}]
self._test_validate_bdm_v2(stack_name, bdm_v2, with_image=False)
@mock.patch.object(nova.NovaClientPlugin, 'client')
def test_validate_bdm_v2_multiple_bootable_source(self, mock_create):
stack_name = 'v2_multiple_bootable'
# with two bootable sources: volume_id and image
bdm_v2 = [{'volume_id': '1', 'boot_index': 0}]
msg = ('Multiple bootable sources for instance')
self._test_validate_bdm_v2(stack_name, bdm_v2,
raise_exc=exception.StackValidationFailed,
error_msg=msg)
@mock.patch.object(nova.NovaClientPlugin, 'client')
def test_validate_bdm_v2_properties_no_bootable_vol(self, mock_create):
stack_name = 'bdm_v2_no_bootable'
bdm_v2 = [{'swap_size': 10}]
msg = ('Neither image nor bootable volume is specified for instance '
'server_create_image_err')
self._test_validate_bdm_v2(stack_name, bdm_v2,
raise_exc=exception.StackValidationFailed,
error_msg=msg,
with_image=False)
def test_validate_metadata_too_many(self):
stack_name = 'srv_val_metadata'
(tmpl, stack) = self._setup_test_stack(stack_name)
tmpl.t['Resources']['WebServer']['Properties']['metadata'] = {'a': 1,
'b': 2,
'c': 3,
'd': 4}
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_create_image_err',
resource_defns['WebServer'], stack)
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=self.mock_image)
self.patchobject(nova.NovaClientPlugin, 'get_flavor',
return_value=self.mock_flavor)
ex = self.assertRaises(exception.StackValidationFailed,
server.validate)
self.assertIn('Instance metadata must not contain greater than 3 '
'entries', str(ex))
def test_validate_metadata_okay(self):
stack_name = 'srv_val_metadata'
(tmpl, stack) = self._setup_test_stack(stack_name)
tmpl.t['Resources']['WebServer']['Properties']['metadata'] = {'a': 1,
'b': 2,
'c': 3}
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_create_image_err',
resource_defns['WebServer'], stack)
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=self.mock_image)
self.patchobject(nova.NovaClientPlugin, 'get_flavor',
return_value=self.mock_flavor)
self.assertIsNone(server.validate())
def test_server_unsupported_microversion_tags(self):
stack_name = 'srv_val_tags'
(tmpl, stack) = self._setup_test_stack(stack_name)
props = tmpl.t['Resources']['WebServer']['Properties']
props['tags'] = ['a']
# no need test with key_name
props.pop('key_name')
self.patchobject(nova.NovaClientPlugin, 'is_version_supported',
return_value=False)
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_create_image_err',
resource_defns['WebServer'], stack)
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=self.mock_image)
self.patchobject(nova.NovaClientPlugin, 'get_flavor',
return_value=self.mock_flavor)
exc = self.assertRaises(exception.StackValidationFailed,
server.validate)
self.assertEqual('Cannot use "tags" property - nova does not support '
'required api microversion.',
str(exc))
def test_server_validate_too_many_personality(self):
stack_name = 'srv_val'
(tmpl, stack) = self._setup_test_stack(stack_name)
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
self.patchobject(nova.NovaClientPlugin, 'is_version_supported',
return_value=False)
tmpl.t['Resources']['WebServer']['Properties'][
'personality'] = {"/fake/path1": "fake contents1",
"/fake/path2": "fake_contents2",
"/fake/path3": "fake_contents3",
"/fake/path4": "fake_contents4",
"/fake/path5": "fake_contents5",
"/fake/path6": "fake_contents6"}
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_create_image_err',
resource_defns['WebServer'], stack)
self.patchobject(self.fc.limits, 'get', return_value=self.limits)
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=self.mock_image)
exc = self.assertRaises(exception.StackValidationFailed,
server.validate)
self.assertEqual("The personality property may not contain "
"greater than 5 entries.", str(exc))
def test_server_validate_personality_unsupported(self):
stack_name = 'srv_val'
(tmpl, stack) = self._setup_test_stack(stack_name)
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
self.patchobject(nova.NovaClientPlugin, 'is_version_supported',
return_value=True)
tmpl.t['Resources']['WebServer']['Properties'][
'personality'] = {"/fake/path1": "fake contents1",
"/fake/path2": "fake_contents2",
"/fake/path3": "fake_contents3",
"/fake/path4": "fake_contents4",
"/fake/path5": "fake_contents5"}
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_create_image_err',
resource_defns['WebServer'], stack)
self.patchobject(self.fc.limits, 'get', return_value=self.limits)
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=self.mock_image)
exc = self.assertRaises(exception.StackValidationFailed,
server.validate)
self.assertEqual("Cannot use the personality parameter as nova "
"no longer supports it. Use user_data instead.",
str(exc))
def test_server_validate_personality_okay(self):
stack_name = 'srv_val'
(tmpl, stack) = self._setup_test_stack(stack_name)
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
self.patchobject(nova.NovaClientPlugin, 'is_version_supported',
return_value=False)
tmpl.t['Resources']['WebServer']['Properties'][
'personality'] = {"/fake/path1": "fake contents1",
"/fake/path2": "fake_contents2",
"/fake/path3": "fake_contents3",
"/fake/path4": "fake_contents4",
"/fake/path5": "fake_contents5"}
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_create_image_err',
resource_defns['WebServer'], stack)
self.patchobject(self.fc.limits, 'get', return_value=self.limits)
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=self.mock_image)
self.assertIsNone(server.validate())
def test_server_validate_personality_file_size_okay(self):
stack_name = 'srv_val'
(tmpl, stack) = self._setup_test_stack(stack_name)
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
self.patchobject(nova.NovaClientPlugin, 'is_version_supported',
return_value=False)
tmpl.t['Resources']['WebServer']['Properties'][
'personality'] = {"/fake/path1": "a" * 10240}
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_create_image_err',
resource_defns['WebServer'], stack)
self.patchobject(self.fc.limits, 'get', return_value=self.limits)
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=self.mock_image)
self.assertIsNone(server.validate())
def test_server_validate_personality_file_size_too_big(self):
stack_name = 'srv_val'
(tmpl, stack) = self._setup_test_stack(stack_name)
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
self.patchobject(nova.NovaClientPlugin, 'is_version_supported',
return_value=False)
tmpl.t['Resources']['WebServer']['Properties'][
'personality'] = {"/fake/path1": "a" * 10241}
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_create_image_err',
resource_defns['WebServer'], stack)
self.patchobject(self.fc.limits, 'get', return_value=self.limits)
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=self.mock_image)
exc = self.assertRaises(exception.StackValidationFailed,
server.validate)
self.assertEqual('The contents of personality file "/fake/path1" '
'is larger than the maximum allowed personality '
'file size (10240 bytes).', str(exc))
def test_server_validate_personality_get_attr_return_none(self):
stack_name = 'srv_val'
(tmpl, stack) = self._setup_test_stack(
stack_name, server_with_sw_config_personality)
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
self.patchobject(nova.NovaClientPlugin, 'is_version_supported',
return_value=False)
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_create_image_err',
resource_defns['server'], stack)
self.patchobject(self.fc.limits, 'get', return_value=self.limits)
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=self.mock_image)
self.assertIsNone(server.validate())
def test_resolve_attribute_server_not_found(self):
return_server = self.fc.servers.list()[1]
server = self._create_test_server(return_server,
'srv_resolve_attr')
server.resource_id = '1234'
self.patchobject(self.fc.servers, 'get',
side_effect=fakes_nova.fake_exception())
self.assertEqual('', server._resolve_any_attribute("accessIPv4"))
def test_resolve_attribute_console_url(self):
server = self.fc.servers.list()[0]
tmpl, stack = self._setup_test_stack('console_url_stack')
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
ws = servers.Server(
'WebServer', tmpl.resource_definitions(stack)['WebServer'], stack)
ws.resource_id = server.id
self.patchobject(self.fc.servers, 'get', return_value=server)
console_urls = ws._resolve_any_attribute('console_urls')
self.assertIsInstance(console_urls, collections.abc.Mapping)
supported_consoles = ('novnc', 'xvpvnc', 'spice-html5', 'rdp-html5',
'serial', 'webmks')
self.assertEqual(set(supported_consoles),
set(console_urls))
def test_resolve_attribute_networks(self):
return_server = self.fc.servers.list()[1]
server = self._create_test_server(return_server,
'srv_resolve_attr')
server.resource_id = '1234'
server.networks = {"fake_net": ["10.0.0.3"]}
self.patchobject(self.fc.servers, 'get', return_value=server)
self.patchobject(neutron.NeutronClientPlugin,
'find_resourceid_by_name_or_id',
return_value='fake_uuid')
expect_networks = {"fake_uuid": ["10.0.0.3"],
"fake_net": ["10.0.0.3"]}
self.assertEqual(expect_networks,
server._resolve_any_attribute("networks"))
def test_empty_instance_user(self):
"""Test Nova server doesn't set instance_user in build_userdata
Launching the instance should not pass any user name to
build_userdata. The default cloud-init user set up for the image
will be used instead.
"""
return_server = self.fc.servers.list()[1]
server = self._setup_test_server(return_server, 'without_user')
metadata = server.metadata_get()
build_data = self.patchobject(nova.NovaClientPlugin, 'build_userdata')
scheduler.TaskRunner(server.create)()
build_data.assert_called_with(metadata, 'wordpress',
instance_user=None,
user_data_format='HEAT_CFNTOOLS')
def create_old_net(self, port=None, net=None,
ip=None, uuid=None, subnet=None,
port_extra_properties=None, floating_ip=None,
str_network=None, tag=None):
return {'port': port, 'network': net, 'fixed_ip': ip, 'uuid': uuid,
'subnet': subnet, 'floating_ip': floating_ip,
'port_extra_properties': port_extra_properties,
'allocate_network': str_network,
'tag': tag}
def test_get_network_id_neutron(self):
return_server = self.fc.servers.list()[3]
server = self._create_test_server(return_server, 'networks_update')
net = {'port': '2a60cbaa-3d33-4af6-a9ce-83594ac546fc'}
net_id = server._get_network_id(net)
self.assertIsNone(net_id)
net = {'network': 'f3ef5d2f-d7ba-4b27-af66-58ca0b81e032',
'fixed_ip': '1.2.3.4'}
self.patchobject(neutron.NeutronClientPlugin,
'find_resourceid_by_name_or_id',
return_value='f3ef5d2f-d7ba-4b27-af66-58ca0b81e032')
net_id = server._get_network_id(net)
self.assertEqual('f3ef5d2f-d7ba-4b27-af66-58ca0b81e032', net_id)
net = {'network': '', 'fixed_ip': '1.2.3.4'}
net_id = server._get_network_id(net)
self.assertIsNone(net_id)
def test_exclude_not_updated_networks_no_matching(self):
return_server = self.fc.servers.list()[3]
server = self._create_test_server(return_server, 'networks_update')
for new_nets in (
[],
[{'port': '952fd4ae-53b9-4b39-9e5f-8929c553b5ae',
'network': '450abbc9-9b6d-4d6f-8c3a-c47ac34100dd'}]):
old_nets = [
self.create_old_net(
port='2a60cbaa-3d33-4af6-a9ce-83594ac546fc'),
self.create_old_net(
net='f3ef5d2f-d7ba-4b27-af66-58ca0b81e032', ip='1.2.3.4'),
self.create_old_net(
net='0da8adbf-a7e2-4c59-a511-96b03d2da0d7')]
interfaces = [
create_fake_iface(
port='2a60cbaa-3d33-4af6-a9ce-83594ac546fc',
net='450abbc9-9b6d-4d6f-8c3a-c47ac34100aa',
ip='4.3.2.1',
subnet='subnetsu-bnet-subn-etsu-bnetsubnetsu'),
create_fake_iface(
port='aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
net='f3ef5d2f-d7ba-4b27-af66-58ca0b81e032',
ip='1.2.3.4',
subnet='subnetsu-bnet-subn-etsu-bnetsubnetsu'),
create_fake_iface(
port='bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb',
net='0da8adbf-a7e2-4c59-a511-96b03d2da0d7',
ip='4.2.3.1',
subnet='subnetsu-bnet-subn-etsu-bnetsubnetsu')]
new_nets_cpy = copy.deepcopy(new_nets)
old_nets_cpy = copy.deepcopy(old_nets)
# Add values to old_nets_cpy that is populated in old_nets when
# calling update_networks_matching_iface_port() in
# _exclude_not_updated_networks()
old_nets_cpy[0]['fixed_ip'] = '4.3.2.1'
old_nets_cpy[0]['network'] = '450abbc9-9b6d-4d6f-8c3a-c47ac34100aa'
old_nets_cpy[0]['subnet'] = 'subnetsu-bnet-subn-etsu-bnetsubnetsu'
old_nets_cpy[1]['fixed_ip'] = '1.2.3.4'
old_nets_cpy[1]['port'] = 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'
old_nets_cpy[1]['subnet'] = 'subnetsu-bnet-subn-etsu-bnetsubnetsu'
old_nets_cpy[2]['fixed_ip'] = '4.2.3.1'
old_nets_cpy[2]['port'] = 'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb'
old_nets_cpy[2]['subnet'] = 'subnetsu-bnet-subn-etsu-bnetsubnetsu'
for net in new_nets_cpy:
for key in ('port', 'network', 'fixed_ip', 'uuid', 'subnet',
'port_extra_properties', 'floating_ip',
'allocate_network', 'tag'):
net.setdefault(key)
server._exclude_not_updated_networks(old_nets, new_nets,
interfaces)
self.assertEqual(old_nets_cpy, old_nets)
self.assertEqual(new_nets_cpy, new_nets)
def test_exclude_not_updated_networks_success(self):
return_server = self.fc.servers.list()[3]
server = self._create_test_server(return_server, 'networks_update')
old_nets = [
self.create_old_net(
port='2a60cbaa-3d33-4af6-a9ce-83594ac546fc'),
self.create_old_net(
net='f3ef5d2f-d7ba-4b27-af66-58ca0b81e032',
ip='1.2.3.4'),
self.create_old_net(
net='0da8adbf-a7e2-4c59-a511-96b03d2da0d7')]
new_nets = [
{'port': '2a60cbaa-3d33-4af6-a9ce-83594ac546fc'},
{'network': 'f3ef5d2f-d7ba-4b27-af66-58ca0b81e032',
'fixed_ip': '1.2.3.4'},
{'port': '952fd4ae-53b9-4b39-9e5f-8929c553b5ae'}]
interfaces = [
create_fake_iface(port='2a60cbaa-3d33-4af6-a9ce-83594ac546fc',
net='aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
ip='3.4.5.6'),
create_fake_iface(port='bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb',
net='f3ef5d2f-d7ba-4b27-af66-58ca0b81e032',
ip='1.2.3.4'),
create_fake_iface(port='cccccccc-cccc-cccc-cccc-cccccccccccc',
net='0da8adbf-a7e2-4c59-a511-96b03d2da0d7',
ip='2.3.4.5')]
new_nets_copy = copy.deepcopy(new_nets)
old_nets_copy = copy.deepcopy(old_nets)
# Add values to old_nets_copy that is populated in old_nets when
# calling update_networks_matching_iface_port() in
# _exclude_not_updated_networks()
old_nets_copy[2]['fixed_ip'] = '2.3.4.5'
old_nets_copy[2]['port'] = 'cccccccc-cccc-cccc-cccc-cccccccccccc'
for net in new_nets_copy:
for key in ('port', 'network', 'fixed_ip', 'uuid', 'subnet',
'port_extra_properties', 'floating_ip',
'allocate_network', 'tag'):
net.setdefault(key)
server._exclude_not_updated_networks(old_nets, new_nets, interfaces)
self.assertEqual([old_nets_copy[2]], old_nets)
self.assertEqual([new_nets_copy[2]], new_nets)
def test_exclude_not_updated_networks_nothing_for_update(self):
return_server = self.fc.servers.list()[3]
server = self._create_test_server(return_server, 'networks_update')
old_nets = [
self.create_old_net(
net='f3ef5d2f-d7ba-4b27-af66-58ca0b81e032',
ip='',
port='')]
new_nets = [
{'network': 'f3ef5d2f-d7ba-4b27-af66-58ca0b81e032',
'fixed_ip': None,
'port': None,
'subnet': None,
'uuid': None,
'port_extra_properties': None,
'floating_ip': None,
'allocate_network': None,
'tag': None}]
interfaces = [
create_fake_iface(port='',
net='f3ef5d2f-d7ba-4b27-af66-58ca0b81e032',
ip='')]
server._exclude_not_updated_networks(old_nets, new_nets, interfaces)
self.assertEqual([], old_nets)
self.assertEqual([], new_nets)
def test_update_networks_matching_iface_port(self):
return_server = self.fc.servers.list()[3]
server = self._create_test_server(return_server, 'networks_update')
# old order 0 1 2 3 4 5 6
nets = [
self.create_old_net(port='aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa'),
self.create_old_net(net='gggggggg-1111-1111-1111-gggggggggggg',
ip='1.2.3.4'),
self.create_old_net(net='gggggggg-1111-1111-1111-gggggggggggg'),
self.create_old_net(port='dddddddd-dddd-dddd-dddd-dddddddddddd'),
self.create_old_net(net='gggggggg-1111-1111-1111-gggggggggggg',
ip='5.6.7.8'),
self.create_old_net(net='gggggggg-1111-1111-1111-gggggggggggg',
subnet='hhhhhhhh-1111-1111-1111-hhhhhhhhhhhh'),
self.create_old_net(subnet='iiiiiiii-1111-1111-1111-iiiiiiiiiiii')]
# new order 2 3 0 1 4 6 5
interfaces = [
create_fake_iface(port='cccccccc-cccc-cccc-cccc-cccccccccccc',
net=nets[2]['network'],
ip='10.0.0.11'),
create_fake_iface(port=nets[3]['port'],
net='gggggggg-1111-1111-1111-gggggggggggg',
ip='10.0.0.12'),
create_fake_iface(port=nets[0]['port'],
net='gggggggg-1111-1111-1111-gggggggggggg',
ip='10.0.0.13'),
create_fake_iface(port='bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb',
net=nets[1]['network'],
ip=nets[1]['fixed_ip']),
create_fake_iface(port='eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee',
net=nets[4]['network'],
ip=nets[4]['fixed_ip']),
create_fake_iface(port='gggggggg-gggg-gggg-gggg-gggggggggggg',
net='gggggggg-1111-1111-1111-gggggggggggg',
ip='10.0.0.14',
subnet=nets[6]['subnet']),
create_fake_iface(port='ffffffff-ffff-ffff-ffff-ffffffffffff',
net=nets[5]['network'],
ip='10.0.0.15',
subnet=nets[5]['subnet'])]
# all networks should get port id
expected = [
{'port': 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
'network': 'gggggggg-1111-1111-1111-gggggggggggg',
'fixed_ip': '10.0.0.13',
'subnet': None,
'floating_ip': None,
'port_extra_properties': None,
'uuid': None,
'allocate_network': None,
'tag': None},
{'port': 'bbbbbbbb-bbbb-bbbb-bbbb-bbbbbbbbbbbb',
'network': 'gggggggg-1111-1111-1111-gggggggggggg',
'fixed_ip': '1.2.3.4',
'subnet': None,
'port_extra_properties': None,
'floating_ip': None,
'uuid': None,
'allocate_network': None,
'tag': None},
{'port': 'cccccccc-cccc-cccc-cccc-cccccccccccc',
'network': 'gggggggg-1111-1111-1111-gggggggggggg',
'fixed_ip': '10.0.0.11',
'subnet': None,
'port_extra_properties': None,
'floating_ip': None,
'uuid': None,
'allocate_network': None,
'tag': None},
{'port': 'dddddddd-dddd-dddd-dddd-dddddddddddd',
'network': 'gggggggg-1111-1111-1111-gggggggggggg',
'fixed_ip': '10.0.0.12',
'subnet': None,
'port_extra_properties': None,
'floating_ip': None,
'uuid': None,
'allocate_network': None,
'tag': None},
{'port': 'eeeeeeee-eeee-eeee-eeee-eeeeeeeeeeee',
'uuid': None,
'fixed_ip': '5.6.7.8',
'subnet': None,
'port_extra_properties': None,
'floating_ip': None,
'network': 'gggggggg-1111-1111-1111-gggggggggggg',
'allocate_network': None,
'tag': None},
{'port': 'ffffffff-ffff-ffff-ffff-ffffffffffff',
'uuid': None,
'fixed_ip': '10.0.0.15',
'subnet': 'hhhhhhhh-1111-1111-1111-hhhhhhhhhhhh',
'port_extra_properties': None,
'floating_ip': None,
'network': 'gggggggg-1111-1111-1111-gggggggggggg',
'allocate_network': None,
'tag': None},
{'port': 'gggggggg-gggg-gggg-gggg-gggggggggggg',
'uuid': None,
'fixed_ip': '10.0.0.14',
'subnet': 'iiiiiiii-1111-1111-1111-iiiiiiiiiiii',
'port_extra_properties': None,
'floating_ip': None,
'network': 'gggggggg-1111-1111-1111-gggggggggggg',
'allocate_network': None,
'tag': None}]
self.patchobject(neutron.NeutronClientPlugin,
'find_resourceid_by_name_or_id',
return_value='gggggggg-1111-1111-1111-gggggggggggg')
self.patchobject(neutron.NeutronClientPlugin,
'network_id_from_subnet_id',
return_value='gggggggg-1111-1111-1111-gggggggggggg')
server.update_networks_matching_iface_port(nets, interfaces)
self.assertEqual(expected, nets)
def test_server_update_None_networks_with_port(self):
return_server = self.fc.servers.list()[3]
return_server.id = '9102'
server = self._create_test_server(return_server, 'networks_update')
new_networks = [{'port': '2a60cbaa-3d33-4af6-a9ce-83594ac546fc'}]
update_props = self.server_props.copy()
# old_networks is None, and update to new_networks with port
update_props['networks'] = new_networks
update_template = server.t.freeze(properties=update_props)
self.patchobject(self.fc.servers, 'get', return_value=return_server)
iface = create_fake_iface(
port='aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
net='450abbc9-9b6d-4d6f-8c3a-c47ac34100ef',
ip='1.2.3.4')
self.patchobject(return_server, 'interface_list', return_value=[iface])
mock_detach = self.patchobject(return_server, 'interface_detach')
mock_attach = self.patchobject(return_server, 'interface_attach')
mock_detach_check = self.patchobject(nova.NovaClientPlugin,
'check_interface_detach',
return_value=True)
mock_attach_check = self.patchobject(nova.NovaClientPlugin,
'check_interface_attach',
return_value=True)
scheduler.TaskRunner(server.update, update_template)()
self.assertEqual((server.UPDATE, server.COMPLETE), server.state)
self.assertEqual(1, mock_detach.call_count)
self.assertEqual(1, mock_attach.call_count)
self.assertEqual(1, mock_detach_check.call_count)
self.assertEqual(1, mock_attach_check.call_count)
def test_server_update_None_networks_with_network_id(self):
return_server = self.fc.servers.list()[3]
return_server.id = '9102'
self.patchobject(neutronclient.Client, 'create_port',
return_value={'port': {'id': 'abcd1234'}})
server = self._create_test_server(return_server, 'networks_update')
# old_networks is None, and update to new_networks with port
new_networks = [{'network': 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
'fixed_ip': '1.2.3.4'}]
update_props = self.server_props.copy()
update_props['networks'] = new_networks
update_template = server.t.freeze(properties=update_props)
self.patchobject(self.fc.servers, 'get', return_value=return_server)
iface = create_fake_iface(
port='aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
net='450abbc9-9b6d-4d6f-8c3a-c47ac34100ef',
ip='1.2.3.4')
self.patchobject(return_server, 'interface_list', return_value=[iface])
mock_detach = self.patchobject(return_server, 'interface_detach')
mock_attach = self.patchobject(return_server, 'interface_attach')
mock_detach_check = self.patchobject(nova.NovaClientPlugin,
'check_interface_detach',
return_value=True)
mock_attach_check = self.patchobject(nova.NovaClientPlugin,
'check_interface_attach',
return_value=True)
scheduler.TaskRunner(server.update, update_template)()
self.assertEqual((server.UPDATE, server.COMPLETE), server.state)
self.assertEqual(1, mock_detach.call_count)
self.assertEqual(1, mock_attach.call_count)
self.assertEqual(1, mock_detach_check.call_count)
self.assertEqual(1, mock_attach_check.call_count)
def test_server_update_subnet_with_security_group(self):
return_server = self.fc.servers.list()[3]
return_server.id = '9102'
server = self._create_test_server(return_server, 'update_subnet')
# set old properties for 'networks' and 'security_groups'
before_props = self.server_props.copy()
before_props['networks'] = [
{'subnet': 'aaa09d50-8c23-4498-a542-aa0deb24f73e'}
]
before_props['security_groups'] = ['the_sg']
# set new property 'networks'
new_networks = [{'subnet': '2a60cbaa-3d33-4af6-a9ce-83594ac546fc'}]
update_props = self.server_props.copy()
update_props['networks'] = new_networks
update_props['security_groups'] = ['the_sg']
update_template = server.t.freeze(properties=update_props)
server.t = server.t.freeze(properties=before_props)
sec_uuids = ['86c0f8ae-23a8-464f-8603-c54113ef5467']
self.patchobject(self.fc.servers, 'get', return_value=return_server)
self.patchobject(neutron.NeutronClientPlugin,
'get_secgroup_uuids', return_value=sec_uuids)
self.patchobject(neutron.NeutronClientPlugin,
'network_id_from_subnet_id',
return_value='05d8e681-4b37-4570-bc8d-810089f706b2')
mock_create_port = self.patchobject(
neutronclient.Client, 'create_port')
iface = create_fake_iface(
port='aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
net='05d8e681-4b37-4570-bc8d-810089f706b2',
subnet='aaa09d50-8c23-4498-a542-aa0deb24f73e',
ip='1.2.3.4')
self.patchobject(return_server, 'interface_list', return_value=[iface])
mock_detach = self.patchobject(return_server, 'interface_detach')
mock_attach = self.patchobject(return_server, 'interface_attach')
mock_detach_check = self.patchobject(nova.NovaClientPlugin,
'check_interface_detach',
return_value=True)
mock_attach_check = self.patchobject(nova.NovaClientPlugin,
'check_interface_attach',
return_value=True)
scheduler.TaskRunner(server.update, update_template, before=server.t)()
self.assertEqual((server.UPDATE, server.COMPLETE), server.state)
self.assertEqual(1, mock_detach.call_count)
self.assertEqual(1, mock_attach.call_count)
self.assertEqual(1, mock_detach_check.call_count)
self.assertEqual(1, mock_attach_check.call_count)
kwargs = {'network_id': '05d8e681-4b37-4570-bc8d-810089f706b2',
'fixed_ips': [
{'subnet_id': '2a60cbaa-3d33-4af6-a9ce-83594ac546fc'}],
'security_groups': sec_uuids,
'name': 'update_subnet-port-0',
}
mock_create_port.assert_called_with({'port': kwargs})
def test_server_update_subnet_to_network_with_security_group(self):
return_server = self.fc.servers.list()[3]
return_server.id = '9102'
server = self._create_test_server(return_server, 'update_subnet')
# set old properties for 'networks' and 'security_groups'
before_props = self.server_props.copy()
before_props['networks'] = [
{'subnet': 'aaa09d50-8c23-4498-a542-aa0deb24f73e'}
]
before_props['security_groups'] = ['the_sg']
# set new property 'networks'
new_networks = [{'network': '2a60cbaa-3d33-4af6-a9ce-83594ac546fc'}]
update_props = self.server_props.copy()
update_props['networks'] = new_networks
update_props['security_groups'] = ['the_sg']
update_template = server.t.freeze(properties=update_props)
server.t = server.t.freeze(properties=before_props)
sec_uuids = ['86c0f8ae-23a8-464f-8603-c54113ef5467']
self.patchobject(self.fc.servers, 'get', return_value=return_server)
self.patchobject(neutron.NeutronClientPlugin,
'get_secgroup_uuids', return_value=sec_uuids)
self.patchobject(neutron.NeutronClientPlugin,
'network_id_from_subnet_id',
return_value='05d8e681-4b37-4570-bc8d-810089f706b2')
iface = create_fake_iface(
port='aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
net='05d8e681-4b37-4570-bc8d-810089f706b2',
subnet='aaa09d50-8c23-4498-a542-aa0deb24f73e',
ip='1.2.3.4')
self.patchobject(return_server, 'interface_list', return_value=[iface])
mock_detach = self.patchobject(return_server, 'interface_detach')
mock_attach = self.patchobject(return_server, 'interface_attach')
def interface_attach_mock(port, net):
class attachment(object):
def __init__(self, port_id, net_id):
self.port_id = port_id
self.net_id = net_id
return attachment(port, net)
mock_attach.return_value = interface_attach_mock(
'ad4a231b-67f7-45fe-aee9-461176b48203',
'2a60cbaa-3d33-4af6-a9ce-83594ac546fc')
mock_detach_check = self.patchobject(nova.NovaClientPlugin,
'check_interface_detach',
return_value=True)
mock_attach_check = self.patchobject(nova.NovaClientPlugin,
'check_interface_attach',
return_value=True)
mock_update_port = self.patchobject(
neutronclient.Client, 'update_port')
scheduler.TaskRunner(server.update, update_template, before=server.t)()
self.assertEqual((server.UPDATE, server.COMPLETE), server.state)
self.assertEqual(1, mock_detach.call_count)
self.assertEqual(1, mock_attach.call_count)
self.assertEqual(1, mock_detach_check.call_count)
self.assertEqual(1, mock_attach_check.call_count)
kwargs = {'security_groups': sec_uuids}
mock_update_port.assert_called_with(
'ad4a231b-67f7-45fe-aee9-461176b48203',
{'port': kwargs})
def test_server_update_empty_networks_with_complex_parameters(self):
return_server = self.fc.servers.list()[3]
return_server.id = '9102'
server = self._create_test_server(return_server, 'networks_update')
new_networks = [{'network': 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
'fixed_ip': '1.2.3.4',
'port': '2a60cbaa-3d33-4af6-a9ce-83594ac546fc'}]
update_props = self.server_props.copy()
update_props['networks'] = new_networks
update_template = server.t.freeze(properties=update_props)
self.patchobject(self.fc.servers, 'get', return_value=return_server)
iface = create_fake_iface(
port='aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
net='450abbc9-9b6d-4d6f-8c3a-c47ac34100ef',
ip='1.2.3.4')
self.patchobject(return_server, 'interface_list', return_value=[iface])
mock_detach = self.patchobject(return_server, 'interface_detach')
mock_attach = self.patchobject(return_server, 'interface_attach')
mock_detach_check = self.patchobject(nova.NovaClientPlugin,
'check_interface_detach',
return_value=True)
mock_attach_check = self.patchobject(nova.NovaClientPlugin,
'check_interface_attach',
return_value=True)
scheduler.TaskRunner(server.update, update_template)()
self.assertEqual((server.UPDATE, server.COMPLETE), server.state)
self.assertEqual(1, mock_detach.call_count)
self.assertEqual(1, mock_attach.call_count)
self.assertEqual(1, mock_detach_check.call_count)
self.assertEqual(1, mock_attach_check.call_count)
def test_server_update_empty_networks_to_None(self):
return_server = self.fc.servers.list()[3]
return_server.id = '9102'
server = self._create_test_server(return_server, 'networks_update',
networks=[])
update_props = copy.deepcopy(self.server_props)
update_props.pop('networks')
update_template = server.t.freeze(properties=update_props)
self.patchobject(self.fc.servers, 'get', return_value=return_server)
iface = create_fake_iface(
port='aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
net='450abbc9-9b6d-4d6f-8c3a-c47ac34100ef',
ip='1.2.3.4')
self.patchobject(return_server, 'interface_list', return_value=[iface])
mock_detach = self.patchobject(return_server, 'interface_detach')
mock_attach = self.patchobject(return_server, 'interface_attach')
mock_detach_check = self.patchobject(nova.NovaClientPlugin,
'check_interface_detach',
return_value=True)
mock_attach_check = self.patchobject(nova.NovaClientPlugin,
'check_interface_attach',
return_value=True)
scheduler.TaskRunner(server.update, update_template)()
self.assertEqual((server.UPDATE, server.COMPLETE), server.state)
# test we detach the old interface and attach a new one
self.assertEqual(0, mock_detach.call_count)
self.assertEqual(0, mock_attach.call_count)
self.assertEqual(0, mock_detach_check.call_count)
self.assertEqual(0, mock_attach_check.call_count)
def _test_server_update_to_auto(self, available_multi_nets=None):
multi_nets = available_multi_nets or []
return_server = self.fc.servers.list()[1]
return_server.id = '5678'
old_networks = [
{'port': '95e25541-d26a-478d-8f36-ae1c8f6b74dc'}]
server = self._create_test_server(return_server, 'networks_update',
networks=old_networks)
update_props = self.server_props.copy()
update_props['networks'] = [{'allocate_network': 'auto'}]
update_template = server.t.freeze(properties=update_props)
self.patchobject(self.fc.servers, 'get', return_value=return_server)
poor_interfaces = [
create_fake_iface(port='95e25541-d26a-478d-8f36-ae1c8f6b74dc',
net='aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
ip='11.12.13.14')]
self.patchobject(return_server, 'interface_list',
return_value=poor_interfaces)
self.patchobject(server, '_get_available_networks',
return_value=multi_nets)
mock_detach = self.patchobject(return_server, 'interface_detach')
mock_attach = self.patchobject(return_server, 'interface_attach')
updater = scheduler.TaskRunner(server.update, update_template)
if not multi_nets:
self.patchobject(nova.NovaClientPlugin, 'check_interface_detach',
return_value=True)
self.patchobject(nova.NovaClientPlugin,
'check_interface_attach',
return_value=True)
auto_allocate_net = '9cfe6c74-c105-4906-9a1f-81d9064e9bca'
self.patchobject(server, '_auto_allocate_network',
return_value=[auto_allocate_net])
updater()
self.assertEqual((server.UPDATE, server.COMPLETE), server.state)
self.assertEqual(1, mock_detach.call_count)
self.assertEqual(1, mock_attach.call_count)
mock_attach.assert_called_once_with(None,
[auto_allocate_net],
None)
else:
self.assertRaises(exception.ResourceFailure, updater)
self.assertEqual(0, mock_detach.call_count)
self.assertEqual(0, mock_attach.call_count)
def test_server_update_str_networks_auto(self):
self._test_server_update_to_auto()
def test_server_update_str_networks_auto_multi_nets(self):
available_nets = ['net_1', 'net_2']
self._test_server_update_to_auto(available_nets)
def test_server_update_str_networks_none(self):
return_server = self.fc.servers.list()[1]
return_server.id = '5678'
old_networks = [
{'port': '95e25541-d26a-478d-8f36-ae1c8f6b74dc'},
{'port': '4121f61a-1b2e-4ab0-901e-eade9b1cb09d'},
{'network': 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
'fixed_ip': '31.32.33.34'}]
server = self._create_test_server(return_server, 'networks_update',
networks=old_networks)
update_props = self.server_props.copy()
update_props['networks'] = [{'allocate_network': 'none'}]
update_template = server.t.freeze(properties=update_props)
self.patchobject(self.fc.servers, 'get', return_value=return_server)
port_interfaces = [
create_fake_iface(port='95e25541-d26a-478d-8f36-ae1c8f6b74dc',
net='aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
ip='11.12.13.14'),
create_fake_iface(port='4121f61a-1b2e-4ab0-901e-eade9b1cb09d',
net='aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
ip='21.22.23.24'),
create_fake_iface(port='0907fa82-a024-43c2-9fc5-efa1bccaa74a',
net='aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
ip='31.32.33.34')]
self.patchobject(return_server, 'interface_list',
return_value=port_interfaces)
mock_detach = self.patchobject(return_server, 'interface_detach')
self.patchobject(nova.NovaClientPlugin,
'check_interface_detach',
return_value=True)
mock_attach = self.patchobject(return_server, 'interface_attach')
scheduler.TaskRunner(server.update, update_template)()
self.assertEqual((server.UPDATE, server.COMPLETE), server.state)
self.assertEqual(3, mock_detach.call_count)
self.assertEqual(0, mock_attach.call_count)
def test_server_update_networks_with_complex_parameters(self):
return_server = self.fc.servers.list()[1]
return_server.id = '5678'
old_networks = [
{'port': '95e25541-d26a-478d-8f36-ae1c8f6b74dc'},
{'network': 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
'fixed_ip': '1.2.3.4'},
{'port': '4121f61a-1b2e-4ab0-901e-eade9b1cb09d'},
{'network': 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
'fixed_ip': '31.32.33.34'}]
server = self._create_test_server(return_server, 'networks_update',
networks=old_networks)
new_networks = [
{'network': 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
'fixed_ip': '1.2.3.4'},
{'port': '2a60cbaa-3d33-4af6-a9ce-83594ac546fc'}]
update_props = copy.deepcopy(self.server_props)
update_props['networks'] = new_networks
update_template = server.t.freeze(properties=update_props)
self.patchobject(self.fc.servers, 'get', return_value=return_server)
poor_interfaces = [
create_fake_iface(port='95e25541-d26a-478d-8f36-ae1c8f6b74dc',
net='aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
ip='11.12.13.14'),
create_fake_iface(port='450abbc9-9b6d-4d6f-8c3a-c47ac34100ef',
net='aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
ip='1.2.3.4'),
create_fake_iface(port='4121f61a-1b2e-4ab0-901e-eade9b1cb09d',
net='aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
ip='21.22.23.24'),
create_fake_iface(port='0907fa82-a024-43c2-9fc5-efa1bccaa74a',
net='aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
ip='31.32.33.34')]
self.patchobject(return_server, 'interface_list',
return_value=poor_interfaces)
mock_detach = self.patchobject(return_server, 'interface_detach')
mock_attach = self.patchobject(return_server, 'interface_attach')
mock_detach_check = self.patchobject(nova.NovaClientPlugin,
'check_interface_detach',
return_value=True)
mock_attach_check = self.patchobject(nova.NovaClientPlugin,
'check_interface_attach',
return_value=True)
scheduler.TaskRunner(server.update, update_template)()
self.assertEqual((server.UPDATE, server.COMPLETE), server.state)
# we only detach the three old networks, and attach a new one
self.assertEqual(3, mock_detach.call_count)
self.assertEqual(1, mock_attach.call_count)
self.assertEqual(3, mock_detach_check.call_count)
self.assertEqual(1, mock_attach_check.call_count)
def test_server_update_networks_with_None(self):
return_server = self.fc.servers.list()[1]
return_server.id = '5678'
old_networks = [
{'port': '95e25541-d26a-478d-8f36-ae1c8f6b74dc'},
{'port': '4121f61a-1b2e-4ab0-901e-eade9b1cb09d'},
{'network': 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
'fixed_ip': '31.32.33.34'}]
server = self._create_test_server(return_server, 'networks_update',
networks=old_networks)
update_props = self.server_props.copy()
update_props['networks'] = None
update_template = server.t.freeze(properties=update_props)
self.patchobject(self.fc.servers, 'get', return_value=return_server)
poor_interfaces = [
create_fake_iface(port='95e25541-d26a-478d-8f36-ae1c8f6b74dc',
net='aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
ip='11.12.13.14'),
create_fake_iface(port='4121f61a-1b2e-4ab0-901e-eade9b1cb09d',
net='aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
ip='21.22.23.24'),
create_fake_iface(port='0907fa82-a024-43c2-9fc5-efa1bccaa74a',
net='aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
ip='31.32.33.34')]
self.patchobject(return_server, 'interface_list',
return_value=poor_interfaces)
mock_detach = self.patchobject(return_server, 'interface_detach')
mock_attach = self.patchobject(return_server, 'interface_attach')
mock_detach_check = self.patchobject(nova.NovaClientPlugin,
'check_interface_detach',
return_value=True)
mock_attach_check = self.patchobject(nova.NovaClientPlugin,
'check_interface_attach',
return_value=True)
scheduler.TaskRunner(server.update, update_template, before=server.t)()
self.assertEqual((server.UPDATE, server.COMPLETE), server.state)
self.assertEqual(3, mock_detach.call_count)
self.assertEqual(1, mock_attach.call_count)
self.assertEqual(3, mock_detach_check.call_count)
self.assertEqual(1, mock_attach_check.call_count)
def test_server_update_old_networks_to_empty_list(self):
return_server = self.fc.servers.list()[1]
return_server.id = '5678'
old_networks = [
{'port': '95e25541-d26a-478d-8f36-ae1c8f6b74dc'},
{'port': '4121f61a-1b2e-4ab0-901e-eade9b1cb09d'},
{'network': 'aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
'fixed_ip': '31.32.33.34'}]
server = self._create_test_server(return_server, 'networks_update',
networks=old_networks)
update_props = self.server_props.copy()
update_props['networks'] = []
update_template = server.t.freeze(properties=update_props)
self.patchobject(self.fc.servers, 'get', return_value=return_server)
poor_interfaces = [
create_fake_iface(port='95e25541-d26a-478d-8f36-ae1c8f6b74dc',
net='aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
ip='11.12.13.14'),
create_fake_iface(port='4121f61a-1b2e-4ab0-901e-eade9b1cb09d',
net='aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
ip='21.22.23.24'),
create_fake_iface(port='0907fa82-a024-43c2-9fc5-efa1bccaa74a',
net='aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
ip='31.32.33.34')]
self.patchobject(return_server, 'interface_list',
return_value=poor_interfaces)
mock_detach = self.patchobject(return_server, 'interface_detach')
mock_attach = self.patchobject(return_server, 'interface_attach')
mock_detach_check = self.patchobject(nova.NovaClientPlugin,
'check_interface_detach',
return_value=True)
mock_attach_check = self.patchobject(nova.NovaClientPlugin,
'check_interface_attach',
return_value=True)
scheduler.TaskRunner(server.update, update_template)()
self.assertEqual((server.UPDATE, server.COMPLETE), server.state)
self.assertEqual(3, mock_detach.call_count)
self.assertEqual(1, mock_attach.call_count)
self.assertEqual(3, mock_detach_check.call_count)
self.assertEqual(1, mock_attach_check.call_count)
def test_server_update_remove_network_non_empty(self):
return_server = self.fc.servers.list()[1]
return_server.id = '5678'
old_networks = [
{'port': '95e25541-d26a-478d-8f36-ae1c8f6b74dc'},
{'port': '4121f61a-1b2e-4ab0-901e-eade9b1cb09d'}]
new_networks = [
{'port': '95e25541-d26a-478d-8f36-ae1c8f6b74dc'}]
server = self._create_test_server(return_server, 'networks_update',
networks=old_networks)
update_props = self.server_props.copy()
update_props['networks'] = new_networks
update_template = server.t.freeze(properties=update_props)
self.patchobject(self.fc.servers, 'get', return_value=return_server)
poor_interfaces = [
create_fake_iface(port='95e25541-d26a-478d-8f36-ae1c8f6b74dc',
net='aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
ip='11.12.13.14'),
create_fake_iface(port='4121f61a-1b2e-4ab0-901e-eade9b1cb09d',
net='aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa',
ip='21.22.23.24')]
self.patchobject(return_server, 'interface_list',
return_value=poor_interfaces)
mock_detach = self.patchobject(return_server, 'interface_detach')
mock_attach = self.patchobject(return_server, 'interface_attach')
mock_detach_check = self.patchobject(nova.NovaClientPlugin,
'check_interface_detach',
return_value=True)
scheduler.TaskRunner(server.update, update_template)()
self.assertEqual((server.UPDATE, server.COMPLETE), server.state)
self.assertEqual(1, mock_detach.call_count)
self.assertEqual(1, mock_detach_check.call_count)
self.assertEqual(0, mock_attach.call_count)
def test_server_properties_validation_create_and_update(self):
return_server = self.fc.servers.list()[1]
# create
# validation calls are already mocked there
server = self._create_test_server(return_server,
'my_server')
update_props = self.server_props.copy()
update_props['image'] = 'F17-x86_64-gold'
update_props['image_update_policy'] = 'REPLACE'
update_template = server.t.freeze(properties=update_props)
updater = scheduler.TaskRunner(server.update, update_template)
self.assertRaises(resource.UpdateReplace, updater)
def test_server_properties_validation_create_and_update_fail(self):
return_server = self.fc.servers.list()[1]
# create
# validation calls are already mocked there
server = self._create_test_server(return_server,
'my_server')
ex = glance.client_exception.EntityMatchNotFound(entity='image',
args='Update Image')
self.patchobject(glance.GlanceClientPlugin,
'find_image_by_name_or_id',
side_effect=[1, ex])
update_props = self.server_props.copy()
update_props['image'] = 'Update Image'
update_template = server.t.freeze(properties=update_props)
# update
updater = scheduler.TaskRunner(server.update, update_template)
err = self.assertRaises(exception.ResourceFailure,
updater)
self.assertEqual("StackValidationFailed: resources.my_server: "
"Property error: Properties.image: Error validating "
"value '1': No image matching Update Image.",
str(err))
def test_server_snapshot(self):
return_server = self.fc.servers.list()[1]
return_server.id = '1234'
server = self._create_test_server(return_server,
'test_server_snapshot')
scheduler.TaskRunner(server.snapshot)()
self.assertEqual((server.SNAPSHOT, server.COMPLETE), server.state)
self.assertEqual({'snapshot_image_id': '456'},
resource_data_object.ResourceData.get_all(server))
def test_server_check_snapshot_complete_image_in_deleted(self):
self._test_server_check_snapshot_complete(image_status='DELETED')
def test_server_check_snapshot_complete_image_in_error(self):
self._test_server_check_snapshot_complete()
def test_server_check_snapshot_complete_fail(self):
self._test_server_check_snapshot_complete()
def test_server_check_snapshot_complete_with_not_complete_task_state(self):
for task_state in {'image_uploading', 'image_snapshot_pending',
'image_snapshot', 'image_pending_upload'}:
self._test_check_snapshot_complete_with_task_state(
task_state=task_state)
def test_server_check_snapshot_complete_with_active_task_state(self):
self._test_check_snapshot_complete_with_task_state()
def _test_check_snapshot_complete_with_task_state(self,
task_state='active'):
return_server = self.fc.servers.list()[1]
return_server.id = '1234'
server = self._create_test_server(return_server,
'test_server_snapshot')
image = mock.MagicMock(status='active')
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=image)
server_with_task_state = mock.Mock()
setattr(server_with_task_state, 'OS-EXT-STS:task_state', task_state)
mock_get = self.patchobject(
nova.NovaClientPlugin, 'get_server',
return_value=server_with_task_state)
if task_state not in {'image_uploading', 'image_snapshot_pending',
'image_snapshot', 'image_pending_upload'}:
self.assertTrue(server.check_snapshot_complete('fake_iamge_id'))
else:
self.assertFalse(server.check_snapshot_complete('fake_iamge_id'))
mock_get.assert_called_once_with(server.resource_id)
def _test_server_check_snapshot_complete(self, image_status='ERROR'):
return_server = self.fc.servers.list()[1]
return_server.id = '1234'
server = self._create_test_server(return_server,
'test_server_snapshot')
image_in_error = mock.MagicMock(status=image_status)
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=image_in_error)
self.assertRaises(exception.ResourceFailure,
scheduler.TaskRunner(server.snapshot))
self.assertEqual((server.SNAPSHOT, server.FAILED), server.state)
# test snapshot_image_id already set to resource data
self.assertEqual({'snapshot_image_id': '456'},
resource_data_object.ResourceData.get_all(server))
def test_server_dont_validate_personality_if_personality_isnt_set(self):
stack_name = 'srv_val'
(tmpl, stack) = self._setup_test_stack(stack_name)
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_create_image_err',
resource_defns['WebServer'], stack)
self.patchobject(nova.NovaClientPlugin, 'get_flavor',
return_value=self.mock_flavor)
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=self.mock_image)
mock_limits = self.patchobject(nova.NovaClientPlugin,
'absolute_limits')
self.patchobject(nova.NovaClientPlugin, 'client')
# Assert here checks that server resource validates, but actually
# this call is Act stage of this test. We calling server.validate()
# to verify that no excessive calls to Nova are made during validation.
self.assertIsNone(server.validate())
# Check nova.NovaClientPlugin.absolute_limits is not called during
# call to server.validate()
self.assertFalse(mock_limits.called)
def test_server_validate_connection_error_retry_successful(self):
stack_name = 'srv_val'
(tmpl, stack) = self._setup_test_stack(stack_name)
tmpl.t['Resources']['WebServer']['Properties'][
'personality'] = {"/fake/path1": "a" * 10}
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
self.patchobject(nova.NovaClientPlugin, 'is_version_supported',
return_value=False)
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_create_image_err',
resource_defns['WebServer'], stack)
self.patchobject(self.fc.limits, 'get',
side_effect=[requests.ConnectionError(),
self.limits])
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=self.mock_image)
self.assertIsNone(server.validate())
def test_server_validate_connection_error_retry_failure(self):
stack_name = 'srv_val'
(tmpl, stack) = self._setup_test_stack(stack_name)
tmpl.t['Resources']['WebServer']['Properties'][
'personality'] = {"/fake/path1": "a" * 10}
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
self.patchobject(nova.NovaClientPlugin, 'is_version_supported',
return_value=False)
resource_defns = tmpl.resource_definitions(stack)
server = servers.Server('server_create_image_err',
resource_defns['WebServer'], stack)
self.patchobject(self.fc.limits, 'get',
side_effect=[requests.ConnectionError(),
requests.ConnectionError(),
requests.ConnectionError()])
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=self.mock_image)
self.assertRaises(requests.ConnectionError, server.validate)
def test_server_restore(self):
t = template_format.parse(ns_template)
tmpl = template.Template(t, files={'a_file': 'the content'})
stack = parser.Stack(utils.dummy_context(), "server_restore", tmpl)
stack.store()
self.patchobject(nova.NovaClientPlugin, 'client',
return_value=self.fc)
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=self.mock_image)
self.patchobject(stack['server'], 'store_external_ports')
return_server = self.fc.servers.list()[1]
return_server.id = '1234'
mock_create = self.patchobject(self.fc.servers, 'create',
return_value=return_server)
self.patchobject(self.fc.servers, 'get',
return_value=return_server)
self.patchobject(neutron.NeutronClientPlugin,
'find_resourceid_by_name_or_id',
return_value='aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa')
self.patchobject(return_server, 'get', return_value=None)
scheduler.TaskRunner(stack.create)()
self.assertEqual(1, mock_create.call_count)
self.assertEqual((stack.CREATE, stack.COMPLETE), stack.state)
scheduler.TaskRunner(stack.snapshot, None)()
self.assertEqual((stack.SNAPSHOT, stack.COMPLETE), stack.state)
data = stack.prepare_abandon()
resource_data = data['resources']['server']['resource_data']
resource_data['snapshot_image_id'] = 'CentOS 5.2'
fake_snapshot = collections.namedtuple(
'Snapshot', ('data', 'stack_id'))(data, stack.id)
stack.restore(fake_snapshot)
self.assertEqual((stack.RESTORE, stack.COMPLETE), stack.state)
def test_snapshot_policy(self):
t = template_format.parse(wp_template)
t['Resources']['WebServer']['DeletionPolicy'] = 'Snapshot'
tmpl = template.Template(t)
stack = parser.Stack(
utils.dummy_context(), 'snapshot_policy', tmpl)
stack.store()
self.patchobject(stack['WebServer'], 'store_external_ports')
mock_plugin = self.patchobject(nova.NovaClientPlugin, 'client')
mock_plugin.return_value = self.fc
return_server = self.fc.servers.list()[1]
return_server.id = '1234'
mock_create = self.patchobject(self.fc.servers, 'create')
mock_create.return_value = return_server
mock_get = self.patchobject(self.fc.servers, 'get')
mock_get.return_value = return_server
image = self.fc.servers.create_image('1234', 'name')
create_image = self.patchobject(self.fc.servers, 'create_image')
create_image.return_value = image
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=self.mock_image)
delete_server = self.patchobject(self.fc.servers, 'delete')
delete_server.side_effect = nova_exceptions.NotFound(404)
scheduler.TaskRunner(stack.create)()
self.assertEqual((stack.CREATE, stack.COMPLETE), stack.state)
scheduler.TaskRunner(stack.delete)()
self.assertEqual((stack.DELETE, stack.COMPLETE), stack.state)
create_image.assert_called_once_with(
'1234', utils.PhysName('snapshot_policy', 'WebServer'))
delete_server.assert_called_once_with('1234')
def test_snapshot_policy_image_failed(self):
t = template_format.parse(wp_template)
t['Resources']['WebServer']['DeletionPolicy'] = 'Snapshot'
tmpl = template.Template(t)
stack = parser.Stack(
utils.dummy_context(), 'snapshot_policy', tmpl)
stack.store()
self.patchobject(stack['WebServer'], 'store_external_ports')
mock_plugin = self.patchobject(nova.NovaClientPlugin, 'client')
mock_plugin.return_value = self.fc
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=self.mock_image)
return_server = self.fc.servers.list()[1]
return_server.id = '1234'
mock_create = self.patchobject(self.fc.servers, 'create')
mock_create.return_value = return_server
mock_get = self.patchobject(self.fc.servers, 'get')
mock_get.return_value = return_server
image = self.fc.servers.create_image('1234', 'name')
create_image = self.patchobject(self.fc.servers, 'create_image')
create_image.return_value = image
delete_server = self.patchobject(self.fc.servers, 'delete')
delete_server.side_effect = nova_exceptions.NotFound(404)
scheduler.TaskRunner(stack.create)()
self.assertEqual((stack.CREATE, stack.COMPLETE), stack.state)
failed_image = mock.Mock(**{
'id': 456,
'name': 'CentOS 5.2',
'updated': '2010-10-10T12:00:00Z',
'created': '2010-08-10T12:00:00Z',
'status': 'ERROR'})
self.patchobject(glance.GlanceClientPlugin, 'get_image',
return_value=failed_image)
return_server = self.fc.servers.list()[1]
scheduler.TaskRunner(stack.delete)()
self.assertEqual((stack.DELETE, stack.FAILED), stack.state)
self.assertEqual(
'Resource DELETE failed: Error: resources.WebServer: ERROR',
stack.status_reason)
create_image.assert_called_once_with(
'1234', utils.PhysName('snapshot_policy', 'WebServer'))
delete_server.assert_not_called()
def test_handle_snapshot_delete(self):
t = template_format.parse(wp_template)
t['Resources']['WebServer']['DeletionPolicy'] = 'Snapshot'
tmpl = template.Template(t)
stack = parser.Stack(
utils.dummy_context(), 'snapshot_policy', tmpl)
stack.store()
rsrc = stack['WebServer']
mock_plugin = self.patchobject(nova.NovaClientPlugin, 'client')
mock_plugin.return_value = self.fc
delete_server = self.patchobject(self.fc.servers, 'delete')
delete_server.side_effect = nova_exceptions.NotFound(404)
create_image = self.patchobject(self.fc.servers, 'create_image')
# test resource_id is None
self.patchobject(servers.Server, 'user_data_software_config',
return_value=True)
delete_internal_ports = self.patchobject(servers.Server,
'_delete_internal_ports')
delete_queue = self.patchobject(servers.Server, '_delete_queue')
delete_user = self.patchobject(servers.Server, '_delete_user')
delete_swift_object = self.patchobject(servers.Server,
'_delete_temp_url')
rsrc.handle_snapshot_delete((rsrc.CREATE, rsrc.FAILED))
delete_server.assert_not_called()
create_image.assert_not_called()
# attempt to delete queue/user/swift_object/internal_ports
# if no resource_id
delete_internal_ports.assert_called_once_with()
delete_queue.assert_called_once_with()
delete_user.assert_called_once_with()
delete_swift_object.assert_called_once_with()
# test has resource_id but state is CREATE_FAILED
rsrc.resource_id = '4567'
rsrc.handle_snapshot_delete((rsrc.CREATE, rsrc.FAILED))
delete_server.assert_called_once_with('4567')
create_image.assert_not_called()
# attempt to delete internal_ports if has resource_id
self.assertEqual(2, delete_internal_ports.call_count)
def test_handle_delete_without_resource_id(self):
t = template_format.parse(wp_template)
tmpl = template.Template(t)
stack = parser.Stack(
utils.dummy_context(), 'without_resource_id', tmpl)
rsrc = stack['WebServer']
delete_server = self.patchobject(self.fc.servers, 'delete')
# test resource_id is None
self.patchobject(servers.Server, 'user_data_software_config',
return_value=True)
delete_internal_ports = self.patchobject(servers.Server,
'_delete_internal_ports')
delete_queue = self.patchobject(servers.Server, '_delete_queue')
delete_user = self.patchobject(servers.Server, '_delete_user')
delete_swift_object = self.patchobject(servers.Server,
'_delete_temp_url')
rsrc.handle_delete()
delete_server.assert_not_called()
# attempt to delete queue/user/swift_object/internal_ports
# if no resource_id
delete_internal_ports.assert_called_once_with()
delete_queue.assert_called_once_with()
delete_user.assert_called_once_with()
delete_swift_object.assert_called_once_with()
class ServerInternalPortTest(ServersTest):
def setUp(self):
super(ServerInternalPortTest, self).setUp()
self.resolve = self.patchobject(neutron.NeutronClientPlugin,
'find_resourceid_by_name_or_id')
self.port_create = self.patchobject(neutronclient.Client,
'create_port')
self.port_delete = self.patchobject(neutronclient.Client,
'delete_port')
self.port_show = self.patchobject(neutronclient.Client,
'show_port')
def neutron_side_effect(*args):
if args[0] == 'subnet':
return '1234'
if args[0] == 'network':
return '4321'
if args[0] == 'port':
return '12345'
self.resolve.side_effect = neutron_side_effect
def _return_template_stack_and_rsrc_defn(self, stack_name, temp):
templ = template.Template(template_format.parse(temp),
env=environment.Environment(
{'key_name': 'test'}))
stack = parser.Stack(utils.dummy_context(), stack_name, templ,
stack_id=uuidutils.generate_uuid(),
stack_user_project_id='8888')
resource_defns = templ.resource_definitions(stack)
server = servers.Server('server', resource_defns['server'],
stack)
return templ, stack, server
def test_build_nics_without_internal_port(self):
tmpl = """
heat_template_version: 2015-10-15
resources:
server:
type: OS::Nova::Server
properties:
flavor: m1.small
image: F17-x86_64-gold
networks:
- port: 12345
network: 4321
"""
t, stack, server = self._return_template_stack_and_rsrc_defn('test',
tmpl)
create_internal_port = self.patchobject(server,
'_create_internal_port',
return_value='12345')
networks = [{'port': '12345', 'network': '4321'}]
nics = server._build_nics(networks)
self.assertEqual([{'port-id': '12345', 'net-id': '4321'}], nics)
self.assertEqual(0, create_internal_port.call_count)
def test_validate_internal_port_subnet_not_this_network(self):
tmpl = """
heat_template_version: 2015-10-15
resources:
server:
type: OS::Nova::Server
properties:
flavor: m1.small
image: F17-x86_64-gold
networks:
- network: 4321
subnet: 1234
"""
t, stack, server = self._return_template_stack_and_rsrc_defn('test',
tmpl)
networks = server.properties['networks']
for network in networks:
# validation passes at validate time
server._validate_network(network)
self.patchobject(neutron.NeutronClientPlugin,
'network_id_from_subnet_id',
return_value='not_this_network')
ex = self.assertRaises(exception.StackValidationFailed,
server._build_nics, networks)
self.assertEqual('Specified subnet 1234 does not belongs to '
'network 4321.', str(ex))
def test_build_nics_create_internal_port_all_props_without_extras(self):
tmpl = """
heat_template_version: 2015-10-15
resources:
server:
type: OS::Nova::Server
properties:
flavor: m1.small
image: F17-x86_64-gold
security_groups:
- test_sec
networks:
- network: 4321
subnet: 1234
fixed_ip: 127.0.0.1
"""
t, stack, server = self._return_template_stack_and_rsrc_defn('test',
tmpl)
self.patchobject(server, '_validate_belonging_subnet_to_net')
self.patchobject(neutron.NeutronClientPlugin,
'get_secgroup_uuids', return_value=['5566'])
self.port_create.return_value = {'port': {'id': '111222'}}
data_set = self.patchobject(resource.Resource, 'data_set')
network = [{'network': '4321', 'subnet': '1234',
'fixed_ip': '127.0.0.1'}]
security_groups = ['test_sec']
server._build_nics(network, security_groups)
self.port_create.assert_called_once_with(
{'port': {'name': 'server-port-0',
'network_id': '4321',
'fixed_ips': [{
'ip_address': '127.0.0.1',
'subnet_id': '1234'
}],
'security_groups': ['5566']}})
data_set.assert_called_once_with('internal_ports',
'[{"id": "111222"}]')
def test_build_nics_do_not_create_internal_port(self):
t, stack, server = self._return_template_stack_and_rsrc_defn(
'test', tmpl_server_with_network_id)
self.port_create.return_value = {'port': {'id': '111222'}}
data_set = self.patchobject(resource.Resource, 'data_set')
network = [{'network': '4321'}]
server._build_nics(network)
self.assertFalse(self.port_create.called)
self.assertFalse(data_set.called)
def test_prepare_port_kwargs_with_extras(self):
tmpl = """
heat_template_version: 2015-10-15
resources:
server:
type: OS::Nova::Server
properties:
flavor: m1.small
image: F17-x86_64-gold
networks:
- network: 4321
subnet: 1234
fixed_ip: 127.0.0.1
port_extra_properties:
mac_address: 00:00:00:00:00:00
allowed_address_pairs:
- ip_address: 127.0.0.1
mac_address: None
- mac_address: 00:00:00:00:00:00
"""
t, stack, server = self._return_template_stack_and_rsrc_defn('test',
tmpl)
network = {'network': '4321', 'subnet': '1234',
'fixed_ip': '127.0.0.1',
'port_extra_properties': {
'value_specs': {},
'mac_address': '00:00:00:00:00:00',
'allowed_address_pairs': [
{'ip_address': '127.0.0.1',
'mac_address': None},
{'mac_address': '00:00:00:00:00:00'}
]
}}
sec_uuids = ['8d94c72093284da88caaef5e985d96f7']
self.patchobject(neutron.NeutronClientPlugin,
'get_secgroup_uuids', return_value=sec_uuids)
kwargs = server._prepare_internal_port_kwargs(
network, security_groups=['test_sec'])
self.assertEqual({'network_id': '4321',
'security_groups': sec_uuids,
'fixed_ips': [
{'ip_address': '127.0.0.1', 'subnet_id': '1234'}
],
'mac_address': '00:00:00:00:00:00',
'allowed_address_pairs': [
{'ip_address': '127.0.0.1'},
{'mac_address': '00:00:00:00:00:00'}]},
kwargs)
def test_build_nics_create_internal_port_without_net(self):
tmpl = """
heat_template_version: 2015-10-15
resources:
server:
type: OS::Nova::Server
properties:
flavor: m1.small
image: F17-x86_64-gold
networks:
- subnet: 1234
"""
t, stack, server = self._return_template_stack_and_rsrc_defn('test',
tmpl)
self.patchobject(neutron.NeutronClientPlugin,
'network_id_from_subnet_id',
return_value='4321')
net = {'subnet': '1234'}
net_id = server._get_network_id(net)
self.assertEqual('4321', net_id)
self.assertEqual({'subnet': '1234'}, net)
self.port_create.return_value = {'port': {'id': '111222'}}
data_set = self.patchobject(resource.Resource, 'data_set')
network = [{'subnet': '1234'}]
server._build_nics(network)
self.port_create.assert_called_once_with(
{'port': {'name': 'server-port-0',
'network_id': '4321',
'fixed_ips': [{
'subnet_id': '1234'
}]}})
data_set.assert_called_once_with('internal_ports',
'[{"id": "111222"}]')
def test_calculate_networks_internal_ports(self):
tmpl = """
heat_template_version: 2015-10-15
resources:
server:
type: OS::Nova::Server
properties:
flavor: m1.small
image: F17-x86_64-gold
networks:
- network: 4321
subnet: 1234
fixed_ip: 127.0.0.1
- port: 3344
"""
t, stack, server = self._return_template_stack_and_rsrc_defn('test',
tmpl)
data_mock = self.patchobject(server, '_data_get_ports')
data_mock.side_effect = [[{"id": "1122"}], [{"id": "1122"}], []]
self.port_create.return_value = {'port': {'id': '7788'}}
data_set = self.patchobject(resource.Resource, 'data_set')
old_net = [self.create_old_net(net='4321',
subnet='1234',
ip='127.0.0.1'),
self.create_old_net(port='3344')]
new_net = [{'port': '3344'},
{'port': '5566'},
{'network': '4321',
'subnet': '5678',
'fixed_ip': '10.0.0.1'}
]
interfaces = [create_fake_iface(port='1122', net='4321',
ip='127.0.0.1', subnet='1234'),
create_fake_iface(port='3344', net='4321', ip='10.0.0.2',
subnet='subnet')]
server.calculate_networks(old_net, new_net, interfaces)
# we can only delete the port 1122,
# port 3344 is external port, cant delete it
self.port_delete.assert_called_once_with('1122')
self.port_create.assert_called_once_with(
{'port': {'name': 'server-port-1',
'network_id': '4321',
'fixed_ips': [{'subnet_id': '5678',
'ip_address': '10.0.0.1'}]}})
self.assertEqual(2, data_set.call_count)
data_set.assert_has_calls((
mock.call('internal_ports', '[]'),
mock.call('internal_ports', '[{"id": "7788"}]')))
def test_calculate_networks_internal_ports_with_fipa(self):
tmpl = """
heat_template_version: 2015-10-15
resources:
server:
type: OS::Nova::Server
properties:
flavor: m1.small
image: F17-x86_64-gold
networks:
- network: 4321
subnet: 1234
fixed_ip: 127.0.0.1
floating_ip: 1199
- network: 8765
subnet: 5678
fixed_ip: 127.0.0.2
floating_ip: 9911
"""
t, stack, server = self._return_template_stack_and_rsrc_defn('test',
tmpl)
# NOTE(prazumovsky): this method update old_net and new_net with
# interfaces' ports. Because of uselessness of checking this method,
# we can afford to give port as part of calculate_networks args.
self.patchobject(server, 'update_networks_matching_iface_port')
server._data = {'internal_ports': '[{"id": "1122"}]'}
self.port_create.return_value = {'port': {'id': '5566'}}
self.patchobject(resource.Resource, 'data_set')
self.resolve.side_effect = ['0912', '9021']
fipa = self.patchobject(neutronclient.Client, 'update_floatingip',
side_effect=[neutronclient.exceptions.NotFound,
'9911',
'11910',
'1199'])
old_net = [
self.create_old_net(net='4321', subnet='1234', ip='127.0.0.1',
port='1122', floating_ip='1199'),
self.create_old_net(net='8765', subnet='5678', ip='127.0.0.2',
port='3344', floating_ip='9911')
]
interfaces = [create_fake_iface(port='1122', net='4321',
ip='127.0.0.1', subnet='1234'),
create_fake_iface(port='3344', net='8765',
ip='127.0.0.2', subnet='5678')]
new_net = [{'network': '8765',
'subnet': '5678',
'fixed_ip': '127.0.0.2',
'port': '3344',
'floating_ip': '11910'},
{'network': '0912',
'subnet': '9021',
'fixed_ip': '127.0.0.1',
'floating_ip': '1199',
'port': '1122'}]
server.calculate_networks(old_net, new_net, interfaces)
fipa.assert_has_calls((
mock.call('1199', {'floatingip': {'port_id': None}}),
mock.call('9911', {'floatingip': {'port_id': None}}),
mock.call('11910',
{'floatingip': {'port_id': '3344',
'fixed_ip_address': '127.0.0.2'}}),
mock.call('1199',
{'floatingip': {'port_id': '1122',
'fixed_ip_address': '127.0.0.1'}})
))
def test_delete_fipa_with_exception_not_found_neutron(self):
tmpl = """
heat_template_version: 2015-10-15
resources:
server:
type: OS::Nova::Server
properties:
flavor: m1.small
image: F17-x86_64-gold
networks:
- network: 4321
subnet: 1234
fixed_ip: 127.0.0.1
floating_ip: 1199
- network: 8765
subnet: 5678
fixed_ip: 127.0.0.2
floating_ip: 9911
"""
t, stack, server = self._return_template_stack_and_rsrc_defn('test',
tmpl)
delete_flip = mock.MagicMock(
side_effect=[neutron.exceptions.NotFound(404)])
server.client('neutron').update_floatingip = delete_flip
self.assertIsNone(server._floating_ip_disassociate('flip123'))
self.assertEqual(1, delete_flip.call_count)
def test_delete_internal_ports(self):
t, stack, server = self._return_template_stack_and_rsrc_defn(
'test', tmpl_server_with_network_id)
get_data = [{'internal_ports': '[{"id": "1122"}, {"id": "3344"}, '
'{"id": "5566"}]'},
{'internal_ports': '[{"id": "1122"}, {"id": "3344"}, '
'{"id": "5566"}]'},
{'internal_ports': '[{"id": "3344"}, '
'{"id": "5566"}]'},
{'internal_ports': '[{"id": "5566"}]'}]
self.patchobject(server, 'data', side_effect=get_data)
data_set = self.patchobject(server, 'data_set')
data_delete = self.patchobject(server, 'data_delete')
server._delete_internal_ports()
self.assertEqual(3, self.port_delete.call_count)
self.assertEqual(('1122',), self.port_delete.call_args_list[0][0])
self.assertEqual(('3344',), self.port_delete.call_args_list[1][0])
self.assertEqual(('5566',), self.port_delete.call_args_list[2][0])
self.assertEqual(3, data_set.call_count)
data_set.assert_has_calls((
mock.call('internal_ports',
'[{"id": "3344"}, {"id": "5566"}]'),
mock.call('internal_ports', '[{"id": "5566"}]'),
mock.call('internal_ports', '[]')))
data_delete.assert_called_once_with('internal_ports')
def test_get_data_internal_ports(self):
t, stack, server = self._return_template_stack_and_rsrc_defn(
'test', tmpl_server_with_network_id)
server._data = {"internal_ports": '[{"id": "1122"}]'}
data = server._data_get_ports()
self.assertEqual([{"id": "1122"}], data)
server._data = {"internal_ports": ''}
data = server._data_get_ports()
self.assertEqual([], data)
def test_store_external_ports(self):
t, stack, server = self._return_template_stack_and_rsrc_defn(
'test', tmpl_server_with_network_id)
class Fake(object):
def interface_list(self):
return [iface('1122'),
iface('1122'),
iface('2233'),
iface('3344')]
server.client = mock.Mock()
server.client().servers.get.return_value = Fake()
server.client_plugin = mock.Mock()
server._data = {"internal_ports": '[{"id": "1122"}]',
"external_ports": '[{"id": "3344"},{"id": "5566"}]'}
iface = collections.namedtuple('iface', ['port_id'])
update_data = self.patchobject(server, '_data_update_ports')
server.store_external_ports()
self.assertEqual(2, update_data.call_count)
self.assertEqual(('5566', 'delete',),
update_data.call_args_list[0][0])
self.assertEqual({'port_type': 'external_ports'},
update_data.call_args_list[0][1])
self.assertEqual(('2233', 'add',),
update_data.call_args_list[1][0])
self.assertEqual({'port_type': 'external_ports'},
update_data.call_args_list[1][1])
def test_prepare_ports_for_replace_detach_failed(self):
t, stack, server = self._return_template_stack_and_rsrc_defn(
'test', tmpl_server_with_network_id)
class Fake(object):
def interface_list(self):
return [iface(1122)]
iface = collections.namedtuple('iface', ['port_id'])
server.resource_id = 'ser-11'
port_ids = [{'id': 1122}]
server._data = {"internal_ports": jsonutils.dumps(port_ids)}
self.patchobject(nova.NovaClientPlugin, 'client')
self.patchobject(nova.NovaClientPlugin, 'interface_detach')
self.patchobject(nova.NovaClientPlugin, 'fetch_server')
self.patchobject(nova.NovaClientPlugin.check_interface_detach.retry,
'sleep')
nova.NovaClientPlugin.fetch_server.side_effect = [Fake()] * 10
exc = self.assertRaises(exception.InterfaceDetachFailed,
server.prepare_for_replace)
self.assertIn('Failed to detach interface (1122) from server '
'(ser-11)',
str(exc))
def test_prepare_ports_for_replace(self):
t, stack, server = self._return_template_stack_and_rsrc_defn(
'test', tmpl_server_with_network_id)
server.resource_id = 'test_server'
port_ids = [{'id': '1122'}, {'id': '3344'}]
external_port_ids = [{'id': '5566'}]
server._data = {"internal_ports": jsonutils.dumps(port_ids),
"external_ports": jsonutils.dumps(external_port_ids)}
self.patchobject(nova.NovaClientPlugin, 'client')
nova_server = self.fc.servers.list()[1]
server.client().servers.get.return_value = nova_server
self.patchobject(nova.NovaClientPlugin, 'interface_detach',
return_value=True)
self.patchobject(nova.NovaClientPlugin, 'check_interface_detach',
return_value=True)
server.prepare_for_replace()
# check, that the ports were detached from server
nova.NovaClientPlugin.interface_detach.assert_has_calls([
mock.call('test_server', '1122'),
mock.call('test_server', '3344'),
mock.call('test_server', '5566')])
def test_prepare_ports_for_replace_not_found(self):
t, stack, server = self._return_template_stack_and_rsrc_defn(
'test', tmpl_server_with_network_id)
server.resource_id = 'test_server'
port_ids = [{'id': '1122'}, {'id': '3344'}]
external_port_ids = [{'id': '5566'}]
server._data = {"internal_ports": jsonutils.dumps(port_ids),
"external_ports": jsonutils.dumps(external_port_ids)}
self.patchobject(nova.NovaClientPlugin, 'client')
self.patchobject(nova.NovaClientPlugin, 'fetch_server',
side_effect=nova_exceptions.NotFound(404))
check_detach = self.patchobject(nova.NovaClientPlugin,
'check_interface_detach')
self.patchobject(nova.NovaClientPlugin, 'client')
nova_server = self.fc.servers.list()[1]
nova_server.status = 'DELETED'
server.client().servers.get.return_value = nova_server
server.prepare_for_replace()
self.assertEqual(3, check_detach.call_count)
self.assertEqual(0, self.port_delete.call_count)
def test_prepare_ports_for_replace_error_state(self):
t, stack, server = self._return_template_stack_and_rsrc_defn(
'test', tmpl_server_with_network_id)
server.resource_id = 'test_server'
port_ids = [{'id': '1122'}, {'id': '3344'}]
external_port_ids = [{'id': '5566'}]
server._data = {"internal_ports": jsonutils.dumps(port_ids),
"external_ports": jsonutils.dumps(external_port_ids)}
self.patchobject(nova.NovaClientPlugin, 'client')
nova_server = self.fc.servers.list()[1]
nova_server.status = 'ERROR'
server.client().servers.get.return_value = nova_server
self.patchobject(nova.NovaClientPlugin, 'interface_detach',
return_value=True)
self.patchobject(nova.NovaClientPlugin, 'check_interface_detach',
return_value=True)
data_set = self.patchobject(server, 'data_set')
data_delete = self.patchobject(server, 'data_delete')
server.prepare_for_replace()
# check, that the internal ports were deleted
self.assertEqual(2, self.port_delete.call_count)
self.assertEqual(('1122',), self.port_delete.call_args_list[0][0])
self.assertEqual(('3344',), self.port_delete.call_args_list[1][0])
data_set.assert_has_calls((
mock.call('internal_ports',
'[{"id": "3344"}]'),
mock.call('internal_ports', '[{"id": "1122"}]')))
data_delete.assert_called_once_with('internal_ports')
def test_prepare_ports_for_replace_not_created(self):
t, stack, server = self._return_template_stack_and_rsrc_defn(
'test', tmpl_server_with_network_id)
prepare_mock = self.patchobject(server,
'prepare_ports_for_replace')
server.prepare_for_replace()
self.assertIsNone(server.resource_id)
self.assertEqual(0, prepare_mock.call_count)
@mock.patch.object(server_network_mixin.ServerNetworkMixin,
'store_external_ports')
def test_restore_ports_after_rollback(self, store_ports):
t, stack, server = self._return_template_stack_and_rsrc_defn(
'test', tmpl_server_with_network_id)
server.resource_id = 'existing_server'
port_ids = [{'id': 1122}, {'id': 3344}]
external_port_ids = [{'id': 5566}]
server._data = {"internal_ports": jsonutils.dumps(port_ids),
"external_ports": jsonutils.dumps(external_port_ids)}
self.patchobject(nova.NovaClientPlugin, '_check_active')
nova.NovaClientPlugin._check_active.side_effect = [False, True]
# add data to old server in backup stack
old_server = mock.Mock()
old_server.resource_id = 'old_server'
stack._backup_stack = mock.Mock()
stack._backup_stack().resources.get.return_value = old_server
old_server._data_get_ports.side_effect = [port_ids, external_port_ids]
self.patchobject(nova.NovaClientPlugin, 'interface_detach',
return_value=True)
self.patchobject(nova.NovaClientPlugin, 'check_interface_detach',
return_value=True)
self.patchobject(nova.NovaClientPlugin, 'interface_attach')
self.patchobject(nova.NovaClientPlugin, 'check_interface_attach',
return_value=True)
server.restore_prev_rsrc()
self.assertEqual(2, nova.NovaClientPlugin._check_active.call_count)
# check, that ports were detached from new server
nova.NovaClientPlugin.interface_detach.assert_has_calls([
mock.call('existing_server', 1122),
mock.call('existing_server', 3344),
mock.call('existing_server', 5566)])
# check, that ports were attached to old server
nova.NovaClientPlugin.interface_attach.assert_has_calls([
mock.call('old_server', 1122),
mock.call('old_server', 3344),
mock.call('old_server', 5566)])
@mock.patch.object(server_network_mixin.ServerNetworkMixin,
'store_external_ports')
def test_restore_ports_after_rollback_attach_failed(self, store_ports):
t, stack, server = self._return_template_stack_and_rsrc_defn(
'test', tmpl_server_with_network_id)
server.resource_id = 'existing_server'
port_ids = [{'id': 1122}, {'id': 3344}]
server._data = {"internal_ports": jsonutils.dumps(port_ids)}
self.patchobject(nova.NovaClientPlugin, '_check_active')
nova.NovaClientPlugin._check_active.return_value = True
# add data to old server in backup stack
old_server = mock.Mock()
old_server.resource_id = 'old_server'
stack._backup_stack = mock.Mock()
stack._backup_stack().resources.get.return_value = old_server
old_server._data_get_ports.side_effect = [port_ids, []]
class Fake(object):
def interface_list(self):
return [iface(1122)]
iface = collections.namedtuple('iface', ['port_id'])
self.patchobject(nova.NovaClientPlugin, 'interface_detach')
self.patchobject(nova.NovaClientPlugin, 'check_interface_detach',
return_value=True)
self.patchobject(nova.NovaClientPlugin, 'interface_attach')
self.patchobject(nova.NovaClientPlugin, 'fetch_server')
self.patchobject(nova.NovaClientPlugin.check_interface_attach.retry,
'sleep')
# need to mock 11 times: 1 for port 1122, 10 for port 3344
nova.NovaClientPlugin.fetch_server.side_effect = [Fake()] * 11
exc = self.assertRaises(exception.InterfaceAttachFailed,
server.restore_prev_rsrc)
self.assertIn('Failed to attach interface (3344) to server '
'(old_server)',
str(exc))
@mock.patch.object(server_network_mixin.ServerNetworkMixin,
'store_external_ports')
def test_restore_ports_after_rollback_convergence(self, store_ports):
t = template_format.parse(tmpl_server_with_network_id)
stack = utils.parse_stack(t)
stack.store()
self.patchobject(nova.NovaClientPlugin, '_check_active')
nova.NovaClientPlugin._check_active.return_value = True
# mock resource from previous template
prev_rsrc = stack['server']
# store in db
prev_rsrc.state_set(prev_rsrc.UPDATE, prev_rsrc.COMPLETE)
prev_rsrc.resource_id = 'prev_rsrc'
# mock resource from existing template, store in db, and set _data
resource_defns = stack.t.resource_definitions(stack)
existing_rsrc = servers.Server('server', resource_defns['server'],
stack)
existing_rsrc.stack = stack
existing_rsrc.current_template_id = stack.t.id
existing_rsrc.resource_id = 'existing_rsrc'
existing_rsrc.state_set(existing_rsrc.UPDATE, existing_rsrc.COMPLETE)
port_ids = [{'id': 1122}, {'id': 3344}]
external_port_ids = [{'id': 5566}]
existing_rsrc.data_set("internal_ports", jsonutils.dumps(port_ids))
existing_rsrc.data_set("external_ports",
jsonutils.dumps(external_port_ids))
# mock previous resource was replaced by existing resource
prev_rsrc.replaced_by = existing_rsrc.id
self.patchobject(nova.NovaClientPlugin, 'interface_detach',
return_value=True)
self.patchobject(nova.NovaClientPlugin, 'check_interface_detach',
return_value=True)
self.patchobject(nova.NovaClientPlugin, 'interface_attach')
self.patchobject(nova.NovaClientPlugin, 'check_interface_attach',
return_value=True)
prev_rsrc.restore_prev_rsrc(convergence=True)
# check, that ports were detached from existing server
nova.NovaClientPlugin.interface_detach.assert_has_calls([
mock.call('existing_rsrc', 1122),
mock.call('existing_rsrc', 3344),
mock.call('existing_rsrc', 5566)])
# check, that ports were attached to old server
nova.NovaClientPlugin.interface_attach.assert_has_calls([
mock.call('prev_rsrc', 1122),
mock.call('prev_rsrc', 3344),
mock.call('prev_rsrc', 5566)])
| apache-2.0 |
cchurch/ansible | lib/ansible/modules/network/onyx/onyx_bgp.py | 30 | 19451 | #!/usr/bin/python
#
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: onyx_bgp
version_added: "2.5"
author: "Samer Deeb (@samerd), Anas Badaha (@anasb)"
short_description: Configures BGP on Mellanox ONYX network devices
description:
- This module provides declarative management of BGP router and neighbors
on Mellanox ONYX network devices.
notes:
- Tested on ONYX 3.6.4000
options:
as_number:
description:
- Local AS number.
required: true
router_id:
description:
- Router IP address.
neighbors:
description:
- List of neighbors. Required if I(state=present).
suboptions:
remote_as:
description:
- Remote AS number.
required: true
neighbor:
description:
- Neighbor IP address.
required: true
multihop:
description:
- multihop number.
networks:
description:
- List of advertised networks.
fast_external_fallover:
description:
- will configure fast_external_fallover when it is True.
type: bool
version_added: 2.9
max_paths:
description:
- Maximum bgp paths.
version_added: 2.9
ecmp_bestpath:
description:
- Enables ECMP across AS paths.
type: bool
version_added: 2.9
evpn:
description:
- Configure evpn peer-group.
type: bool
version_added: 2.9
vrf:
description:
- vrf name.
version_added: 2.9
state:
description:
- BGP state.
default: present
choices: ['present', 'absent']
purge:
description:
- will remove all neighbors when it is True.
type: bool
default: false
version_added: 2.8
"""
EXAMPLES = """
- name: configure bgp
onyx_bgp:
as_number: 320
router_id: 10.3.3.3
neighbors:
- remote_as: 321
neighbor: 10.3.3.4
- remote_as: 322
neighbor: 10.3.3.5
multihop: 250
purge: True
state: present
networks:
- 172.16.1.0/24
vrf: default
evpn: yes
fast_external_fallover: yes
max_paths: 32
ecmp_bestpath: yes
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device.
returned: always
type: list
sample:
- router bgp 320 vrf default
- exit
- router bgp 320 router-id 10.3.3.3 force
- router bgp 320 vrf default bgp fast-external-fallover
- router bgp 320 vrf default maximum-paths 32
- router bgp 320 vrf default bestpath as-path multipath-relax force
- router bgp 320 vrf default neighbor evpn peer-group
- router bgp 320 vrf default neighbor evpn send-community extended
- router bgp 320 vrf default address-family l2vpn-evpn neighbor evpn next-hop-unchanged
- router bgp 320 vrf default address-family l2vpn-evpn neighbor evpn activate
- router bgp 320 vrf default address-family l2vpn-evpn auto-create
- router bgp 320 vrf default neighbor 10.3.3.4 remote-as 321
- router bgp 320 vrf default neighbor 10.3.3.4 ebgp-multihop 250
- router bgp 320 vrf default neighbor 10.3.3.5 remote-as 322
- router bgp 320 vrf default network 172.16.1.0 /24
"""
import re
from ansible.module_utils.six import iteritems
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.onyx.onyx import get_bgp_summary
from ansible.module_utils.network.onyx.onyx import BaseOnyxModule
class OnyxBgpModule(BaseOnyxModule):
LOCAL_AS_REGEX = re.compile(r'^\s.*router bgp\s+(\d+)\s+vrf\s+(\S+).*')
ROUTER_ID_REGEX = re.compile(
r'^\s.*router bgp\s+(\d+).*router-id\s+(\S+)\s+.*')
NEIGHBOR_REGEX = re.compile(
r'^\s.*router bgp\s+(\d+).*neighbor\s+(\S+)\s+remote\-as\s+(\d+).*')
NEIGHBOR_MULTIHOP_REGEX = re.compile(
r'^\s.*router bgp\s+(\d+).*neighbor\s+(\S+)\s+ebgp\-multihop\s+(\d+).*')
NETWORK_REGEX = re.compile(
r'^\s.*router bgp\s+(\d+).*network\s+(\S+)\s+(\S+).*')
FAST_EXTERNAL_FALLOVER_REGEX = re.compile(
r'^\s.*router bgp\s+(\d+)\s+vrf\s+(\S+)\s+bgp fast\-external\-fallover.*')
MAX_PATHS_REGEX = re.compile(
r'^\s.*router bgp\s+(\d+)\s+vrf\s+(\S+)\s+maximum\-paths\s+(\d+).*')
ECMP_BESTPATH_REGEX = re.compile(
r'^\s.*router bgp\s+(\d+)\s+vrf\s+(\S+)\s+bestpath as\-path multipath\-relax.*')
NEIGHBOR_EVPN_REGEX = re.compile(
r'^\s.*router bgp\s+(\d+)\s+vrf\s+(\S+)\s+neighbor\s+(\S+)\s+peer\-group evpn.*')
EVPN_PEER_GROUP_REGEX = re.compile(
r'^\s.*router bgp\s+(\d+)\s+vrf\s+(\S+)\s+neighbor evpn peer\-group.*')
EVPN_SEND_COMMUNITY_EXTENDED_REGEX = re.compile(
r'^\s.*router bgp\s+(\d+)\s+vrf\s+(\S+)\s+neighbor evpn send-community extended.*')
EVPN_NEXT_HOP_UNCHANGED_REGEX = re.compile(
r'^\s.*router bgp\s+(\d+)\s+vrf\s+(\S+)\s+address\-family l2vpn\-evpn neighbor evpn next\-hop-unchanged.*')
EVPN_ACTIVATE_REGEX = re.compile(
r'^\s.*router bgp\s+(\d+)\s+vrf\s+(\S+)\s+address-family l2vpn\-evpn neighbor evpn activate.*')
EVPN_AUTO_CREATE_REGEX = re.compile(
r'^\s.*router bgp\s+(\d+)\s+vrf\s+(\S+)\s+address-family l2vpn\-evpn auto-create.*')
_purge = False
EVPN_PEER_GROUP_ATTR = "evpn_peer_group"
EVPN_SEND_COMMUNITY_EXTENDED_ATTR = "evpn_send_community_extended"
EVPN_NEXT_HOP_UNCHANGED_ATTR = "evpn_next_hop_unchanged"
EVPN_ACTIVATE_ATTR = "evpn_activate"
EVPN_AUTO_CREATE_ATTR = "evpn_auto_create"
EVPN_PEER_GROUP_CMD = "router bgp %s vrf %s neighbor evpn peer-group"
EVPN_SEND_COMMUNITY_EXTENDED_CMD = "router bgp %s vrf %s neighbor evpn send-community extended"
EVPN_NEXT_HOP_UNCHANGED_CMD = "router bgp %s vrf %s address-family l2vpn-evpn neighbor evpn next-hop-unchanged"
EVPN_ACTIVATE_CMD = "router bgp %s vrf %s address-family l2vpn-evpn neighbor evpn activate"
EVPN_AUTO_CREATE_CMD = "router bgp %s vrf %s address-family l2vpn-evpn auto-create"
EVPN_ENABLE_ATTRS = [EVPN_PEER_GROUP_ATTR, EVPN_SEND_COMMUNITY_EXTENDED_ATTR,
EVPN_NEXT_HOP_UNCHANGED_ATTR, EVPN_ACTIVATE_ATTR, EVPN_AUTO_CREATE_ATTR]
EVPN_DISABLE_ATTRS = [EVPN_PEER_GROUP_ATTR, EVPN_AUTO_CREATE_ATTR]
EVPN_COMMANDS_REGEX_MAPPER = {
EVPN_PEER_GROUP_ATTR: (EVPN_PEER_GROUP_REGEX, EVPN_PEER_GROUP_CMD),
EVPN_SEND_COMMUNITY_EXTENDED_ATTR: (EVPN_SEND_COMMUNITY_EXTENDED_REGEX,
EVPN_SEND_COMMUNITY_EXTENDED_CMD),
EVPN_NEXT_HOP_UNCHANGED_ATTR: (EVPN_NEXT_HOP_UNCHANGED_REGEX,
EVPN_NEXT_HOP_UNCHANGED_CMD),
EVPN_ACTIVATE_ATTR: (EVPN_ACTIVATE_REGEX, EVPN_ACTIVATE_CMD),
EVPN_AUTO_CREATE_ATTR: (EVPN_AUTO_CREATE_REGEX, EVPN_AUTO_CREATE_CMD)
}
def init_module(self):
""" initialize module
"""
neighbor_spec = dict(
remote_as=dict(type='int', required=True),
neighbor=dict(required=True),
multihop=dict(type='int')
)
element_spec = dict(
as_number=dict(type='int', required=True),
router_id=dict(),
neighbors=dict(type='list', elements='dict',
options=neighbor_spec),
networks=dict(type='list', elements='str'),
state=dict(choices=['present', 'absent'], default='present'),
purge=dict(default=False, type='bool'),
vrf=dict(),
fast_external_fallover=dict(type='bool'),
max_paths=dict(type='int'),
ecmp_bestpath=dict(type='bool'),
evpn=dict(type='bool')
)
argument_spec = dict()
argument_spec.update(element_spec)
self._module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True)
def get_required_config(self):
module_params = self._module.params
self._required_config = dict(module_params)
self._purge = self._required_config.get('purge', False)
self.validate_param_values(self._required_config)
def _set_bgp_config(self, bgp_config):
lines = bgp_config.split('\n')
self._current_config['router_id'] = None
self._current_config['as_number'] = None
self._current_config['fast_external_fallover'] = False
self._current_config['ecmp_bestpath'] = False
self._current_config[self.EVPN_PEER_GROUP_ATTR] = False
self._current_config[self.EVPN_SEND_COMMUNITY_EXTENDED_ATTR] = False
self._current_config[self.EVPN_NEXT_HOP_UNCHANGED_ATTR] = False
self._current_config[self.EVPN_AUTO_CREATE_ATTR] = False
self._current_config[self.EVPN_ACTIVATE_ATTR] = False
neighbors = self._current_config['neighbors'] = dict()
networks = self._current_config['networks'] = list()
for line in lines:
if line.startswith('#'):
continue
if not self._current_config['as_number']:
match = self.LOCAL_AS_REGEX.match(line)
if match:
self._current_config['as_number'] = int(match.group(1))
self._current_config['vrf'] = match.group(2)
continue
if not self._current_config['router_id']:
match = self.ROUTER_ID_REGEX.match(line)
if match:
self._current_config['router_id'] = match.group(2)
continue
match = self.NEIGHBOR_REGEX.match(line)
if match:
neighbor = neighbors.setdefault(match.group(2), dict())
neighbor['remote_as'] = int(match.group(3))
continue
match = self.NEIGHBOR_MULTIHOP_REGEX.match(line)
if match:
neighbor = neighbors.setdefault(match.group(2), dict())
neighbor["multihop"] = int(match.group(3))
continue
match = self.NEIGHBOR_EVPN_REGEX.match(line)
if match:
neighbor = neighbors.setdefault(match.group(3), dict())
neighbor["evpn"] = True
continue
match = self.NETWORK_REGEX.match(line)
if match:
network = match.group(2) + match.group(3)
networks.append(network)
continue
match = self.FAST_EXTERNAL_FALLOVER_REGEX.match(line)
if match:
self._current_config['fast_external_fallover'] = True
continue
match = self.ECMP_BESTPATH_REGEX.match(line)
if match:
self._current_config['ecmp_bestpath'] = True
continue
match = self.MAX_PATHS_REGEX.match(line)
if match:
self._current_config['max_paths'] = int(match.group(3))
continue
for key, value in iteritems(self.EVPN_COMMANDS_REGEX_MAPPER):
match = value[0].match(line)
if match:
self._current_config[key] = True
break
def _get_bgp_summary(self):
return get_bgp_summary(self._module)
def load_current_config(self):
self._current_config = dict()
bgp_config = self._get_bgp_summary()
if bgp_config:
self._set_bgp_config(bgp_config)
def generate_commands(self):
state = self._required_config['state']
if state == 'present':
self._generate_bgp_cmds()
else:
self._generate_no_bgp_cmds()
def _generate_bgp_cmds(self):
vrf = self._required_config.get('vrf')
if vrf is None:
vrf = "default"
as_number = self._required_config['as_number']
curr_as_num = self._current_config.get('as_number')
curr_vrf = self._current_config.get("vrf")
bgp_removed = False
if curr_as_num != as_number or vrf != curr_vrf:
if curr_as_num:
self._commands.append('no router bgp %d vrf %s' % (curr_as_num, curr_vrf))
bgp_removed = True
self._commands.append('router bgp %d vrf %s' % (as_number, vrf))
self._commands.append('exit')
req_router_id = self._required_config.get('router_id')
if req_router_id is not None:
curr_route_id = self._current_config.get('router_id')
if bgp_removed or req_router_id != curr_route_id:
self._commands.append('router bgp %d vrf %s router-id %s force' % (as_number, vrf, req_router_id))
fast_external_fallover = self._required_config.get('fast_external_fallover')
if fast_external_fallover is not None:
current_fast_external_fallover = self._current_config.get('fast_external_fallover')
if fast_external_fallover and (bgp_removed or fast_external_fallover != current_fast_external_fallover):
self._commands.append('router bgp %d vrf %s bgp fast-external-fallover' % (as_number, vrf))
elif not fast_external_fallover and (bgp_removed or fast_external_fallover != current_fast_external_fallover):
self._commands.append('router bgp %d vrf %s no bgp fast-external-fallover' % (as_number, vrf))
max_paths = self._required_config.get('max_paths')
if max_paths is not None:
current_max_paths = self._current_config.get('max_paths')
if bgp_removed or max_paths != current_max_paths:
self._commands.append('router bgp %d vrf %s maximum-paths %s' % (as_number, vrf, max_paths))
ecmp_bestpath = self._required_config.get('ecmp_bestpath')
if ecmp_bestpath is not None:
current_ecmp_bestpath = self._current_config.get('ecmp_bestpath')
if ecmp_bestpath and (bgp_removed or ecmp_bestpath != current_ecmp_bestpath):
self._commands.append('router bgp %d vrf %s bestpath as-path multipath-relax force' % (as_number, vrf))
elif not ecmp_bestpath and (bgp_removed or ecmp_bestpath != current_ecmp_bestpath):
self._commands.append('router bgp %d vrf %s no bestpath as-path multipath-relax force' % (as_number, vrf))
evpn = self._required_config.get('evpn')
if evpn is not None:
self._generate_evpn_cmds(evpn, as_number, vrf)
self._generate_neighbors_cmds(as_number, vrf, bgp_removed)
self._generate_networks_cmds(as_number, vrf, bgp_removed)
def _generate_neighbors_cmds(self, as_number, vrf, bgp_removed):
req_neighbors = self._required_config['neighbors']
curr_neighbors = self._current_config.get('neighbors', {})
evpn = self._required_config.get('evpn')
if self._purge:
for neighbor in curr_neighbors:
remote_as = curr_neighbors[neighbor].get("remote_as")
self._commands.append('router bgp %s vrf %s no neighbor %s remote-as %s' % (
as_number, vrf, neighbor, remote_as))
if req_neighbors is not None:
for neighbor_data in req_neighbors:
neighbor = neighbor_data.get("neighbor")
curr_neighbor = curr_neighbors.get(neighbor)
remote_as = neighbor_data.get("remote_as")
multihop = neighbor_data.get("multihop")
if bgp_removed or curr_neighbor is None:
if remote_as is not None:
self._commands.append(
'router bgp %s vrf %s neighbor %s remote-as %s' % (as_number, vrf, neighbor, remote_as))
if multihop is not None:
self._commands.append(
'router bgp %s vrf %s neighbor %s ebgp-multihop %s' % (as_number, vrf, neighbor, multihop))
if evpn:
self._commands.append(
'router bgp %s vrf %s neighbor %s peer-group evpn' % (as_number, vrf, neighbor))
elif curr_neighbor is not None:
curr_remote_as = curr_neighbor.get("remote_as")
curr_multihop = curr_neighbor.get("multihop")
curr_neighbor_evpn = curr_neighbor.get("evpn")
if remote_as != curr_remote_as:
self._commands.append(
'router bgp %s vrf %s neighbor %s remote-as %s' % (as_number, vrf, neighbor, remote_as))
if multihop is not None and multihop != curr_multihop:
self._commands.append(
'router bgp %s vrf %s neighbor %s ebgp-multihop %s' % (as_number, vrf, neighbor, multihop))
if evpn and curr_neighbor_evpn is not True:
self._commands.append(
'router bgp %s vrf %s neighbor %s peer-group evpn' % (as_number, vrf, neighbor))
def _generate_networks_cmds(self, as_number, vrf, bgp_removed):
req_networks = self._required_config['networks'] or []
curr_networks = self._current_config.get('networks', [])
if not bgp_removed:
for network in curr_networks:
if network not in req_networks:
net_attrs = network.split('/')
if len(net_attrs) != 2:
self._module.fail_json(
msg='Invalid network %s' % network)
net_address, netmask = net_attrs
cmd = 'router bgp %s no network %s /%s' % (
as_number, net_address, netmask)
self._commands.append(cmd)
for network in req_networks:
if bgp_removed or network not in curr_networks:
net_attrs = network.split('/')
if len(net_attrs) != 2:
self._module.fail_json(
msg='Invalid network %s' % network)
net_address, netmask = net_attrs
cmd = 'router bgp %s vrf %s network %s /%s' % (
as_number, vrf, net_address, netmask)
self._commands.append(cmd)
def _generate_no_bgp_cmds(self):
as_number = self._required_config['as_number']
curr_as_num = self._current_config.get('as_number')
if curr_as_num and curr_as_num == as_number:
self._commands.append('no router bgp %d' % as_number)
def _generate_evpn_cmds(self, evpn, as_number, vrf):
if evpn:
for attr in self.EVPN_ENABLE_ATTRS:
curr_attr = self._current_config.get(attr)
if curr_attr is not True:
self._commands.append(self.EVPN_COMMANDS_REGEX_MAPPER.get(attr)[1] % (as_number, vrf))
elif not evpn:
for attr in self.EVPN_DISABLE_ATTRS:
curr_attr = self._current_config.get(attr)
if curr_attr is not False:
self._commands.append("no " + self.EVPN_COMMANDS_REGEX_MAPPER.get(attr)[1] % (as_number, vrf))
def main():
""" main entry point for module execution
"""
OnyxBgpModule.main()
if __name__ == '__main__':
main()
| gpl-3.0 |
mscuthbert/abjad | abjad/tools/selectiontools/Selection.py | 2 | 16294 | # -*- encoding: utf-8 -*-
import collections
import types
from abjad.tools.topleveltools import iterate
class Selection(object):
r'''A selection of components.
'''
### CLASS VARIABLES ###
__slots__ = (
'_music',
)
### INITIALIZER ###
def __init__(self, music=None):
music = self._coerce_music(music)
self._music = tuple(music)
### SPECIAL METHODS ###
def __add__(self, expr):
r'''Cocatenates `expr` to selection.
Returns new selection.
'''
assert isinstance(expr, (Selection, list, tuple))
if isinstance(expr, Selection):
music = self._music + expr._music
return type(self)(music)
elif isinstance(expr, (tuple, list)):
music = self._music + tuple(expr)
return type(self)(music)
def __contains__(self, expr):
r'''Is true when `expr` is in selection. Otherwise false.
Returns boolean.
'''
return expr in self._music
def __eq__(self, expr):
r'''Is true when selection and `expr` are of the same type
and when music of selection equals music of `expr`.
Otherwise false.
Returns boolean.
'''
if isinstance(expr, type(self)):
return self._music == expr._music
elif isinstance(expr, collections.Sequence):
return self._music == tuple(expr)
return False
def __format__(self, format_specification=''):
r'''Formats duration.
Set `format_specification` to `''` or `'storage'`.
Interprets `''` equal to `'storage'`.
Returns string.
'''
from abjad.tools import systemtools
if format_specification in ('', 'storage'):
return systemtools.StorageFormatManager.get_storage_format(self)
return str(self)
def __getitem__(self, expr):
r'''Gets item `expr` from selection.
Returns component from selection.
'''
result = self._music.__getitem__(expr)
if isinstance(result, tuple):
selection = type(self)()
selection._music = result[:]
result = selection
return result
def __getstate__(self):
r'''Gets state of selection.
Returns dictionary.
'''
if hasattr(self, '__dict__'):
return vars(self)
state = {}
for class_ in type(self).__mro__:
for slot in getattr(class_, '__slots__', ()):
state[slot] = getattr(self, slot, None)
return state
def __hash__(self):
r'''Hashes selection.
Required to be explicitly re-defined on Python 3 if __eq__ changes.
Returns integer.
'''
hash_values = (type(self), self._music)
return hash(hash_values)
def __illustrate__(self):
r'''Attempts to illustrate selection.
Evaluates the storage format of the selection (to sever any references
to the source score from which the selection was taken). Then tries to
wrap the result in a staff; in the case that notes of only C4 are found
then sets the staff context name to ``'RhythmicStaff'``. If this works
then the staff is wrapped in a LilyPond file and the file is returned.
If this doesn't work then the method raises an exception.
The idea is that the illustration should work for simple selections of
that represent an essentially contiguous snippet of a single voice of
music.
Returns LilyPond file.
'''
from abjad.tools import lilypondfiletools
from abjad.tools import markuptools
from abjad.tools import pitchtools
from abjad.tools import scoretools
from abjad.tools.topleveltools import mutate
music = mutate(self).copy()
staff = scoretools.Staff(music)
found_different_pitch = False
for pitch in pitchtools.list_named_pitches_in_expr(staff):
if pitch != pitchtools.NamedPitch("c'"):
found_different_pitch = True
break
if not found_different_pitch:
staff.context_name = 'RhythmicStaff'
score = scoretools.Score([staff])
lilypond_file = lilypondfiletools.make_basic_lilypond_file(score)
lilypond_file.header_block.tagline = markuptools.Markup('""')
return lilypond_file
def __len__(self):
r'''Number of components in selection.
Returns nonnegative integer.
'''
return len(self._music)
def __ne__(self, expr):
r'''Is true when selection does not equal `expr`. Otherwise false.
Returns boolean.
'''
return not self == expr
def __radd__(self, expr):
r'''Concatenates selection to `expr`.
Returns newly created selection.
'''
assert isinstance(expr, (type(self), list, tuple))
if isinstance(expr, type(self)):
music = expr._music + self._music
return Selection(music)
# eventually remove this permissive branch
# and force the use of selections only
elif isinstance(expr, (tuple, list)):
music = tuple(expr) + self._music
return Selection(music)
def __repr__(self):
r'''Gets interpreter representation of selection.
Returns string.
'''
return '{}{!r}'.format(type(self).__name__, self._music)
def __setstate__(self, state):
r'''Sets state of selection.
Returns none.
'''
for key, value in state.items():
setattr(self, key, value)
### PRIVATE PROPERTIES ###
@property
def _preprolated_duration(self):
return sum(component._preprolated_duration for component in self)
@property
def _storage_format_specification(self):
from abjad.tools import systemtools
if self._music:
positional_argument_values = (
self._music,
)
else:
positional_argument_values = ()
return systemtools.StorageFormatSpecification(
self,
keyword_argument_names=(),
positional_argument_values=positional_argument_values,
)
### PRIVATE METHODS ###
@staticmethod
def _all_are_components_in_same_logical_voice(
expr, prototype=None, allow_orphans=True):
from abjad.tools import scoretools
from abjad.tools import selectiontools
allowable_types = (
list,
tuple,
types.GeneratorType,
selectiontools.Selection,
)
if not isinstance(expr, allowable_types):
return False
prototype = prototype or (scoretools.Component,)
if not isinstance(prototype, tuple):
prototype = (prototype, )
assert isinstance(prototype, tuple)
if len(expr) == 0:
return True
all_are_orphans_of_correct_type = True
if allow_orphans:
for component in expr:
if not isinstance(component, prototype):
all_are_orphans_of_correct_type = False
break
if not component._get_parentage().is_orphan:
all_are_orphans_of_correct_type = False
break
if all_are_orphans_of_correct_type:
return True
first = expr[0]
if not isinstance(first, prototype):
return False
orphan_components = True
if not first._get_parentage().is_orphan:
orphan_components = False
same_logical_voice = True
first_signature = first._get_parentage().logical_voice
for component in expr[1:]:
parentage = component._get_parentage()
if not parentage.is_orphan:
orphan_components = False
if not allow_orphans and orphan_components:
return False
if parentage.logical_voice != first_signature:
same_logical_voice = False
if not allow_orphans and not same_logical_voice:
return False
if allow_orphans and not orphan_components and \
not same_logical_voice:
return False
return True
@staticmethod
def _all_are_contiguous_components_in_same_logical_voice(
expr, prototype=None, allow_orphans=True):
from abjad.tools import scoretools
from abjad.tools import selectiontools
allowable_types = (
list,
tuple,
types.GeneratorType,
selectiontools.Selection,
)
if not isinstance(expr, allowable_types):
return False
prototype = prototype or (scoretools.Component,)
if not isinstance(prototype, tuple):
prototype = (prototype, )
assert isinstance(prototype, tuple)
if len(expr) == 0:
return True
all_are_orphans_of_correct_type = True
if allow_orphans:
for component in expr:
if not isinstance(component, prototype):
all_are_orphans_of_correct_type = False
break
if not component._get_parentage().is_orphan:
all_are_orphans_of_correct_type = False
break
if all_are_orphans_of_correct_type:
return True
if not allow_orphans:
if any(x._get_parentage().is_orphan for x in expr):
return False
first = expr[0]
if not isinstance(first, prototype):
return False
first_parentage = first._get_parentage()
first_logical_voice = first_parentage.logical_voice
first_root = first_parentage.root
previous = first
for current in expr[1:]:
current_parentage = current._get_parentage()
current_logical_voice = \
current_parentage.logical_voice
# false if wrong type of component found
if not isinstance(current, prototype):
return False
# false if in different logical voices
if current_logical_voice != first_logical_voice:
return False
# false if components are in same score and are discontiguous
if current_parentage.root == first_root:
if not previous._is_immediate_temporal_successor_of(current):
return False
previous = current
return True
@staticmethod
def _all_are_contiguous_components_in_same_parent(
expr, prototype=None, allow_orphans=True):
from abjad.tools import scoretools
from abjad.tools import selectiontools
allowable_types = (
list,
tuple,
types.GeneratorType,
selectiontools.Selection,
)
if not isinstance(expr, allowable_types):
return False
prototype = prototype or (scoretools.Component, )
if not isinstance(prototype, tuple):
prototype = (prototype, )
assert isinstance(prototype, tuple)
if len(expr) == 0:
return True
all_are_orphans_of_correct_type = True
if allow_orphans:
for component in expr:
if not isinstance(component, prototype):
all_are_orphans_of_correct_type = False
break
if not component._get_parentage().is_orphan:
all_are_orphans_of_correct_type = False
break
if all_are_orphans_of_correct_type:
return True
first = expr[0]
if not isinstance(first, prototype):
return False
first_parent = first._parent
if first_parent is None:
if allow_orphans:
orphan_components = True
else:
return False
same_parent = True
strictly_contiguous = True
previous = first
for current in expr[1:]:
if not isinstance(current, prototype):
return False
if not current._get_parentage().is_orphan:
orphan_components = False
if not current._parent is first_parent:
same_parent = False
if not previous._is_immediate_temporal_successor_of(current):
strictly_contiguous = False
if (not allow_orphans or
(allow_orphans and not orphan_components)) and \
(not same_parent or not strictly_contiguous):
return False
previous = current
return True
@staticmethod
def _coerce_music(music):
if music is None:
music = ()
elif isinstance(music, (tuple, list)):
music = tuple(music)
elif isinstance(music, Selection):
music = tuple(music)
elif isinstance(music, types.GeneratorType):
music = tuple(music)
else:
music = (music, )
return music
def _get_component(self, prototype=None, n=0, recurse=True):
from abjad.tools import scoretools
prototype = prototype or (scoretools.Component,)
if not isinstance(prototype, tuple):
prototype = (prototype,)
if 0 <= n:
if recurse:
components = iterate(self).by_class(prototype)
else:
components = self._music
for i, x in enumerate(components):
if i == n:
return x
else:
if recurse:
components = iterate(self).by_class(
prototype, reverse=True)
else:
components = reversed(self._music)
for i, x in enumerate(components):
if i == abs(n) - 1:
return x
def _get_spanner(self, prototype=None):
spanners = self._get_spanners(prototype=prototype)
if not spanners:
message = 'no spanners found.'
raise MissingSpannerError(message)
elif len(spanners) == 1:
return spanners.pop()
else:
message = 'multiple spanners found.'
raise ExtraSpannerError(message)
def _get_spanners(self, prototype=None):
from abjad.tools import spannertools
prototype = prototype or (spannertools.Spanner,)
if not isinstance(prototype, tuple):
prototype = (prototype, )
assert isinstance(prototype, tuple)
result = set()
for component in self:
spanners = component._get_spanners(prototype)
result.update(spanners)
return result
def _iterate_components(self, recurse=True, reverse=False):
if recurse:
return iterate(self).by_class()
else:
return self._iterate_top_level_components(reverse=reverse)
def _iterate_top_level_components(self, reverse=False):
if reverse:
for component in reversed(self):
yield component
else:
for component in self:
yield component
def _set_parents(self, new_parent):
r'''Not composer-safe.
'''
for component in self._music:
component._set_parent(new_parent)
### PUBLIC METHODS ###
def get_duration(self, in_seconds=False):
r'''Gets duration of contiguous selection.
Returns duration.
'''
return sum(
component._get_duration(in_seconds=in_seconds)
for component in self
)
def get_spanners(self, prototype=None, in_parentage=False):
r'''Gets spanners attached to any component in selection.
Returns set.
'''
result = set()
for component in self:
spanners = component._get_spanners(
prototype=prototype,
in_parentage=in_parentage,
)
result.update(spanners)
return result
collections.Sequence.register(Selection) | gpl-3.0 |
mitnk/letsencrypt | tests/letstest/multitester.py | 3 | 21239 | """
Letsencrypt Integration Test Tool
- Configures (canned) boulder server
- Launches EC2 instances with a given list of AMIs for different distros
- Copies letsencrypt repo and puts it on the instances
- Runs letsencrypt tests (bash scripts) on all of these
- Logs execution and success/fail for debugging
Notes:
- Some AWS images, e.g. official CentOS and FreeBSD images
require acceptance of user terms on the AWS marketplace
website. This can't be automated.
- AWS EC2 has a default limit of 20 t2/t1 instances, if more
are needed, they need to be requested via online webform.
Usage:
- Requires AWS IAM secrets to be set up with aws cli
- Requires an AWS associated keyfile <keyname>.pem
>aws configure --profile HappyHacker
[interactive: enter secrets for IAM role]
>aws ec2 create-key-pair --profile HappyHacker --key-name MyKeyPair \
--query 'KeyMaterial' --output text > MyKeyPair.pem
then:
>python multitester.py targets.yaml MyKeyPair.pem HappyHacker scripts/test_letsencrypt_auto_venv_only.sh
see:
https://docs.aws.amazon.com/cli/latest/userguide/cli-chap-getting-started.html
https://docs.aws.amazon.com/cli/latest/userguide/cli-ec2-keypairs.html
"""
from __future__ import print_function
from __future__ import with_statement
import sys, os, time, argparse, socket
import multiprocessing as mp
from multiprocessing import Manager
import urllib2
import yaml
import boto3
import fabric
from fabric.api import run, execute, local, env, sudo, cd, lcd
from fabric.operations import get, put
from fabric.context_managers import shell_env
# Command line parser
#-------------------------------------------------------------------------------
parser = argparse.ArgumentParser(description='Builds EC2 cluster for testing.')
parser.add_argument('config_file',
help='yaml configuration file for AWS server cluster')
parser.add_argument('key_file',
help='key file (<keyname>.pem) for AWS')
parser.add_argument('aws_profile',
help='profile for AWS (i.e. as in ~/.aws/certificates)')
parser.add_argument('test_script',
default='test_letsencrypt_auto_certonly_standalone.sh',
help='path of bash script in to deploy and run')
#parser.add_argument('--script_args',
# nargs='+',
# help='space-delimited list of arguments to pass to the bash test script',
# required=False)
parser.add_argument('--repo',
default='https://github.com/letsencrypt/letsencrypt.git',
help='letsencrypt git repo to use')
parser.add_argument('--branch',
default='~',
help='letsencrypt git branch to trial')
parser.add_argument('--pull_request',
default='~',
help='letsencrypt/letsencrypt pull request to trial')
parser.add_argument('--merge_master',
action='store_true',
help="if set merges PR into master branch of letsencrypt/letsencrypt")
parser.add_argument('--saveinstances',
action='store_true',
help="don't kill EC2 instances after run, useful for debugging")
parser.add_argument('--alt_pip',
default='',
help="server from which to pull candidate release packages")
parser.add_argument('--killboulder',
action='store_true',
help="do not leave a persistent boulder server running")
parser.add_argument('--boulderonly',
action='store_true',
help="only make a boulder server")
parser.add_argument('--fast',
action='store_true',
help="use larger instance types to run faster (saves about a minute, probably not worth it)")
cl_args = parser.parse_args()
# Credential Variables
#-------------------------------------------------------------------------------
# assumes naming: <key_filename> = <keyname>.pem
KEYFILE = cl_args.key_file
KEYNAME = os.path.split(cl_args.key_file)[1].split('.pem')[0]
PROFILE = cl_args.aws_profile
# Globals
#-------------------------------------------------------------------------------
BOULDER_AMI = 'ami-5f490b35' # premade shared boulder AMI 14.04LTS us-east-1
LOGDIR = "" #points to logging / working directory
# boto3/AWS api globals
AWS_SESSION = None
EC2 = None
# Boto3/AWS automation functions
#-------------------------------------------------------------------------------
def make_security_group():
# will fail if security group of GroupName already exists
# cannot have duplicate SGs of the same name
mysg = EC2.create_security_group(GroupName="letsencrypt_test",
Description='security group for automated testing')
mysg.authorize_ingress(IpProtocol="tcp", CidrIp="0.0.0.0/0", FromPort=22, ToPort=22)
mysg.authorize_ingress(IpProtocol="tcp", CidrIp="0.0.0.0/0", FromPort=80, ToPort=80)
mysg.authorize_ingress(IpProtocol="tcp", CidrIp="0.0.0.0/0", FromPort=443, ToPort=443)
# for boulder wfe (http) server
mysg.authorize_ingress(IpProtocol="tcp", CidrIp="0.0.0.0/0", FromPort=4000, ToPort=4000)
# for mosh
mysg.authorize_ingress(IpProtocol="udp", CidrIp="0.0.0.0/0", FromPort=60000, ToPort=61000)
return mysg
def make_instance(instance_name,
ami_id,
keyname,
machine_type='t2.micro',
security_groups=['letsencrypt_test'],
userdata=""): #userdata contains bash or cloud-init script
new_instance = EC2.create_instances(
ImageId=ami_id,
SecurityGroups=security_groups,
KeyName=keyname,
MinCount=1,
MaxCount=1,
UserData=userdata,
InstanceType=machine_type)[0]
# brief pause to prevent rare error on EC2 delay, should block until ready instead
time.sleep(1.0)
# give instance a name
try:
new_instance.create_tags(Tags=[{'Key': 'Name', 'Value': instance_name}])
except botocore.exceptions.ClientError as e:
if "InvalidInstanceID.NotFound" in str(e):
# This seems to be ephemeral... retry
time.sleep(1)
new_instance.create_tags(Tags=[{'Key': 'Name', 'Value': instance_name}])
else:
raise
return new_instance
def terminate_and_clean(instances):
"""
Some AMIs specify EBS stores that won't delete on instance termination.
These must be manually deleted after shutdown.
"""
volumes_to_delete = []
for instance in instances:
for bdmap in instance.block_device_mappings:
if 'Ebs' in bdmap.keys():
if not bdmap['Ebs']['DeleteOnTermination']:
volumes_to_delete.append(bdmap['Ebs']['VolumeId'])
for instance in instances:
instance.terminate()
# can't delete volumes until all attaching instances are terminated
_ids = [instance.id for instance in instances]
all_terminated = False
while not all_terminated:
all_terminated = True
for _id in _ids:
# necessary to reinit object for boto3 to get true state
inst = EC2.Instance(id=_id)
if inst.state['Name'] != 'terminated':
all_terminated = False
time.sleep(5)
for vol_id in volumes_to_delete:
volume = EC2.Volume(id=vol_id)
volume.delete()
return volumes_to_delete
# Helper Routines
#-------------------------------------------------------------------------------
def block_until_http_ready(urlstring, wait_time=10, timeout=240):
"Blocks until server at urlstring can respond to http requests"
server_ready = False
t_elapsed = 0
while not server_ready and t_elapsed < timeout:
try:
sys.stdout.write('.')
sys.stdout.flush()
req = urllib2.Request(urlstring)
response = urllib2.urlopen(req)
#if response.code == 200:
server_ready = True
except urllib2.URLError:
pass
time.sleep(wait_time)
t_elapsed += wait_time
def block_until_ssh_open(ipstring, wait_time=10, timeout=120):
"Blocks until server at ipstring has an open port 22"
reached = False
t_elapsed = 0
while not reached and t_elapsed < timeout:
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((ipstring, 22))
reached = True
except socket.error as err:
time.sleep(wait_time)
t_elapsed += wait_time
sock.close()
def block_until_instance_ready(booting_instance, wait_time=5, extra_wait_time=20):
"Blocks booting_instance until AWS EC2 instance is ready to accept SSH connections"
# the reinstantiation from id is necessary to force boto3
# to correctly update the 'state' variable during init
_id = booting_instance.id
_instance = EC2.Instance(id=_id)
_state = _instance.state['Name']
_ip = _instance.public_ip_address
while _state != 'running' or _ip is None:
time.sleep(wait_time)
_instance = EC2.Instance(id=_id)
_state = _instance.state['Name']
_ip = _instance.public_ip_address
block_until_ssh_open(_ip)
time.sleep(extra_wait_time)
return _instance
# Fabric Routines
#-------------------------------------------------------------------------------
def local_git_clone(repo_url):
"clones master of repo_url"
with lcd(LOGDIR):
local('if [ -d letsencrypt ]; then rm -rf letsencrypt; fi')
local('git clone %s letsencrypt'% repo_url)
local('tar czf le.tar.gz letsencrypt')
def local_git_branch(repo_url, branch_name):
"clones branch <branch_name> of repo_url"
with lcd(LOGDIR):
local('if [ -d letsencrypt ]; then rm -rf letsencrypt; fi')
local('git clone %s letsencrypt --branch %s --single-branch'%(repo_url, branch_name))
local('tar czf le.tar.gz letsencrypt')
def local_git_PR(repo_url, PRnumstr, merge_master=True):
"clones specified pull request from repo_url and optionally merges into master"
with lcd(LOGDIR):
local('if [ -d letsencrypt ]; then rm -rf letsencrypt; fi')
local('git clone %s letsencrypt'% repo_url)
local('cd letsencrypt && git fetch origin pull/%s/head:lePRtest'%PRnumstr)
local('cd letsencrypt && git co lePRtest')
if merge_master:
local('cd letsencrypt && git remote update origin')
local('cd letsencrypt && git merge origin/master -m "testmerge"')
local('tar czf le.tar.gz letsencrypt')
def local_repo_to_remote():
"copies local tarball of repo to remote"
with lcd(LOGDIR):
put(local_path='le.tar.gz', remote_path='')
run('tar xzf le.tar.gz')
def local_repo_clean():
"delete tarball"
with lcd(LOGDIR):
local('rm le.tar.gz')
def deploy_script(scriptpath, *args):
"copies to remote and executes local script"
#with lcd('scripts'):
put(local_path=scriptpath, remote_path='', mirror_local_mode=True)
scriptfile = os.path.split(scriptpath)[1]
args_str = ' '.join(args)
run('./'+scriptfile+' '+args_str)
def run_boulder():
with cd('$GOPATH/src/github.com/letsencrypt/boulder'):
run('go run cmd/rabbitmq-setup/main.go -server amqp://localhost')
run('nohup ./start.py >& /dev/null < /dev/null &')
def config_and_launch_boulder(instance):
execute(deploy_script, 'scripts/boulder_config.sh')
execute(run_boulder)
def install_and_launch_letsencrypt(instance, boulder_url, target):
execute(local_repo_to_remote)
with shell_env(BOULDER_URL=boulder_url,
PUBLIC_IP=instance.public_ip_address,
PRIVATE_IP=instance.private_ip_address,
PUBLIC_HOSTNAME=instance.public_dns_name,
PIP_EXTRA_INDEX_URL=cl_args.alt_pip,
OS_TYPE=target['type']):
execute(deploy_script, cl_args.test_script)
def grab_letsencrypt_log():
"grabs letsencrypt.log via cat into logged stdout"
sudo('if [ -f /var/log/letsencrypt/letsencrypt.log ]; then \
cat /var/log/letsencrypt/letsencrypt.log; else echo "[novarlog]"; fi')
# fallback file if /var/log is unwriteable...? correct?
sudo('if [ -f ./letsencrypt.log ]; then \
cat ./letsencrypt.log; else echo "[nolocallog]"; fi')
def create_client_instances(targetlist):
"Create a fleet of client instances"
instances = []
print("Creating instances: ", end="")
for target in targetlist:
if target['virt'] == 'hvm':
machine_type = 't2.medium' if cl_args.fast else 't2.micro'
else:
# 32 bit systems
machine_type = 'c1.medium' if cl_args.fast else 't1.micro'
if 'userdata' in target.keys():
userdata = target['userdata']
else:
userdata = ''
name = 'le-%s'%target['name']
print(name, end=" ")
instances.append(make_instance(name,
target['ami'],
KEYNAME,
machine_type=machine_type,
userdata=userdata))
print()
return instances
def test_client_process(inqueue, outqueue):
cur_proc = mp.current_process()
for inreq in iter(inqueue.get, SENTINEL):
ii, target = inreq
#save all stdout to log file
sys.stdout = open(LOGDIR+'/'+'%d_%s.log'%(ii,target['name']), 'w')
print("[%s : client %d %s %s]" % (cur_proc.name, ii, target['ami'], target['name']))
instances[ii] = block_until_instance_ready(instances[ii])
print("server %s at %s"%(instances[ii], instances[ii].public_ip_address))
env.host_string = "%s@%s"%(target['user'], instances[ii].public_ip_address)
print(env.host_string)
try:
install_and_launch_letsencrypt(instances[ii], boulder_url, target)
outqueue.put((ii, target, 'pass'))
print("%s - %s SUCCESS"%(target['ami'], target['name']))
except:
outqueue.put((ii, target, 'fail'))
print("%s - %s FAIL"%(target['ami'], target['name']))
pass
# append server letsencrypt.log to each per-machine output log
print("\n\nletsencrypt.log\n" + "-"*80 + "\n")
try:
execute(grab_letsencrypt_log)
except:
print("log fail\n")
pass
def cleanup(cl_args, instances, targetlist):
print('Logs in ', LOGDIR)
if not cl_args.saveinstances:
print('Terminating EC2 Instances and Cleaning Dangling EBS Volumes')
if cl_args.killboulder:
boulder_server.terminate()
terminate_and_clean(instances)
else:
# print login information for the boxes for debugging
for ii, target in enumerate(targetlist):
print(target['name'],
target['ami'],
"%s@%s"%(target['user'], instances[ii].public_ip_address))
#-------------------------------------------------------------------------------
# SCRIPT BEGINS
#-------------------------------------------------------------------------------
# Fabric library controlled through global env parameters
env.key_filename = KEYFILE
env.shell = '/bin/bash -l -i -c'
env.connection_attempts = 5
env.timeout = 10
# replace default SystemExit thrown by fabric during trouble
class FabricException(Exception):
pass
env['abort_exception'] = FabricException
# Set up local copy of git repo
#-------------------------------------------------------------------------------
LOGDIR = "letest-%d"%int(time.time())
print("Making local dir for test repo and logs: %s"%LOGDIR)
local('mkdir %s'%LOGDIR)
# figure out what git object to test and locally create it in LOGDIR
print("Making local git repo")
try:
if cl_args.pull_request != '~':
print('Testing PR %s '%cl_args.pull_request,
"MERGING into master" if cl_args.merge_master else "")
execute(local_git_PR, cl_args.repo, cl_args.pull_request, cl_args.merge_master)
elif cl_args.branch != '~':
print('Testing branch %s of %s'%(cl_args.branch, cl_args.repo))
execute(local_git_branch, cl_args.repo, cl_args.branch)
else:
print('Testing master of %s'%cl_args.repo)
execute(local_git_clone, cl_args.repo)
except FabricException:
print("FAIL: trouble with git repo")
exit()
# Set up EC2 instances
#-------------------------------------------------------------------------------
configdata = yaml.load(open(cl_args.config_file, 'r'))
targetlist = configdata['targets']
print('Testing against these images: [%d total]'%len(targetlist))
for target in targetlist:
print(target['ami'], target['name'])
print("Connecting to EC2 using\n profile %s\n keyname %s\n keyfile %s"%(PROFILE, KEYNAME, KEYFILE))
AWS_SESSION = boto3.session.Session(profile_name=PROFILE)
EC2 = AWS_SESSION.resource('ec2')
print("Making Security Group")
sg_exists = False
for sg in EC2.security_groups.all():
if sg.group_name == 'letsencrypt_test':
sg_exists = True
print(" %s already exists"%'letsencrypt_test')
if not sg_exists:
make_security_group()
time.sleep(30)
boulder_preexists = False
boulder_servers = EC2.instances.filter(Filters=[
{'Name': 'tag:Name', 'Values': ['le-boulderserver']},
{'Name': 'instance-state-name', 'Values': ['running']}])
boulder_server = next(iter(boulder_servers), None)
print("Requesting Instances...")
if boulder_server:
print("Found existing boulder server:", boulder_server)
boulder_preexists = True
else:
print("Can't find a boulder server, starting one...")
boulder_server = make_instance('le-boulderserver',
BOULDER_AMI,
KEYNAME,
machine_type='t2.micro',
#machine_type='t2.medium',
security_groups=['letsencrypt_test'])
try:
if not cl_args.boulderonly:
instances = create_client_instances(targetlist)
# Configure and launch boulder server
#-------------------------------------------------------------------------------
print("Waiting on Boulder Server")
boulder_server = block_until_instance_ready(boulder_server)
print(" server %s"%boulder_server)
# env.host_string defines the ssh user and host for connection
env.host_string = "ubuntu@%s"%boulder_server.public_ip_address
print("Boulder Server at (SSH):", env.host_string)
if not boulder_preexists:
print("Configuring and Launching Boulder")
config_and_launch_boulder(boulder_server)
# blocking often unnecessary, but cheap EC2 VMs can get very slow
block_until_http_ready('http://%s:4000'%boulder_server.public_ip_address,
wait_time=10, timeout=500)
boulder_url = "http://%s:4000/directory"%boulder_server.private_ip_address
print("Boulder Server at (public ip): http://%s:4000/directory"%boulder_server.public_ip_address)
print("Boulder Server at (EC2 private ip): %s"%boulder_url)
if cl_args.boulderonly:
sys.exit(0)
# Install and launch client scripts in parallel
#-------------------------------------------------------------------------------
print("Uploading and running test script in parallel: %s"%cl_args.test_script)
print("Output routed to log files in %s"%LOGDIR)
# (Advice: always use Manager.Queue, never regular multiprocessing.Queue
# the latter has implementation flaws that deadlock it in some circumstances)
manager = Manager()
outqueue = manager.Queue()
inqueue = manager.Queue()
SENTINEL = None #queue kill signal
# launch as many processes as clients to test
num_processes = len(targetlist)
jobs = [] #keep a reference to current procs
# initiate process execution
for i in range(num_processes):
p = mp.Process(target=test_client_process, args=(inqueue, outqueue))
jobs.append(p)
p.daemon = True # kills subprocesses if parent is killed
p.start()
# fill up work queue
for ii, target in enumerate(targetlist):
inqueue.put((ii, target))
# add SENTINELs to end client processes
for i in range(num_processes):
inqueue.put(SENTINEL)
# wait on termination of client processes
for p in jobs:
p.join()
# add SENTINEL to output queue
outqueue.put(SENTINEL)
# clean up
execute(local_repo_clean)
# print and save summary results
results_file = open(LOGDIR+'/results', 'w')
outputs = [outq for outq in iter(outqueue.get, SENTINEL)]
outputs.sort(key=lambda x: x[0])
for outq in outputs:
ii, target, status = outq
print('%d %s %s'%(ii, target['name'], status))
results_file.write('%d %s %s\n'%(ii, target['name'], status))
results_file.close()
finally:
cleanup(cl_args, instances, targetlist)
# kill any connections
fabric.network.disconnect_all()
| apache-2.0 |
plockaby/dart | tool/lib/dart/tool/commands/assign.py | 1 | 3693 | from . import BaseCommand
from termcolor import colored
import urllib.parse
import traceback
import json
class AssignCommand(BaseCommand):
def run(self, fqdn, process_name, process_environment, **kwargs):
try:
data = {
"op": "add",
"path": "/assignments",
"value": {
"name": process_name,
"environment": process_environment
}
}
# patch the configuration
url = "{}/tool/v1/hosts/{}".format(self.dart_api_url, urllib.parse.quote(fqdn))
response = self.dart_api.patch(url, data=json.dumps(data), timeout=10)
# catch expected errors
if (response.status_code in [400, 404]):
print("{} Could not assign {} in {} to {}: {}".format(colored("FAILURE!", "red", attrs=["bold"]), process_name, process_environment, fqdn, data.get("StatusDescription") or "Unknown error."))
return 1
# catch any other errors
response.raise_for_status()
# now tell the host to update its configurations
try:
url = "{}/coordination/v1/rewrite/{}".format(self.dart_api_url, urllib.parse.quote(fqdn))
response = self.dart_api.post(url, timeout=10)
response.raise_for_status()
except Exception as e:
print("{} Could not send rewrite command to {}: {}".format(colored("WARNING!", "yellow", attrs=["bold"]), fqdn, e))
print("{} Assigned {} in {} to {}.".format(colored("SUCCESS!", "green", attrs=["bold"]), process_name, process_environment, fqdn))
return 0
except Exception as e:
print("{} Could not assign {} in {} to {}: {}".format(colored("FAILURE!", "red", attrs=["bold"]), process_name, process_environment, fqdn, e))
self.logger.debug(traceback.format_exc())
return 1
class UnassignCommand(BaseCommand):
def run(self, fqdn, process_name, **kwargs):
try:
data = {
"op": "remove",
"path": "/assignments",
"value": {
"name": process_name,
}
}
# patch the configuration
url = "{}/tool/v1/hosts/{}".format(self.dart_api_url, urllib.parse.quote(fqdn))
response = self.dart_api.patch(url, data=json.dumps(data), timeout=10)
# catch expected errors
if (response.status_code in [400, 404]):
print("{} Could not unassign {} from {}: {}".format(colored("FAILURE!", "red", attrs=["bold"]), process_name, fqdn, data.get("StatusDescription") or "Unknown error."))
return 1
# catch any other errors
response.raise_for_status()
# now tell the host to update its configurations
try:
url = "{}/coordination/v1/rewrite/{}".format(self.dart_api_url, urllib.parse.quote(fqdn))
response = self.dart_api.post(url, timeout=10)
response.raise_for_status()
except Exception as e:
print("{} Could not send rewrite command to {}: {}".format(colored("WARNING!", "yellow", attrs=["bold"]), fqdn, e))
print("{} Unassigned {} from {}.".format(colored("SUCCESS!", "green", attrs=["bold"]), process_name, fqdn))
return 0
except Exception as e:
print("{} Could not unassign {} from {}: {}".format(colored("FAILURE!", "red", attrs=["bold"]), process_name, fqdn, e))
self.logger.debug(traceback.format_exc())
return 1
| artistic-2.0 |
isandlaTech/cohorte-demos | led/dump/led-demo-raspberry/cohorte/dist/cohorte-1.0.0-1.0.0-20141201.234602-19-python-distribution/repo/sleekxmpp/util/__init__.py | 9 | 1067 | # -*- coding: utf-8 -*-
"""
sleekxmpp.util
~~~~~~~~~~~~~~
Part of SleekXMPP: The Sleek XMPP Library
:copyright: (c) 2012 Nathanael C. Fritz, Lance J.T. Stout
:license: MIT, see LICENSE for more details
"""
from sleekxmpp.util.misc_ops import bytes, unicode, hashes, hash, \
num_to_bytes, bytes_to_num, quote, \
XOR, safedict
# =====================================================================
# Standardize import of Queue class:
import sys
def _gevent_threads_enabled():
if not 'gevent' in sys.modules:
return False
try:
from gevent import thread as green_thread
thread = __import__('thread')
return thread.LockType is green_thread.LockType
except ImportError:
return False
if _gevent_threads_enabled():
import gevent.queue as queue
Queue = queue.JoinableQueue
else:
try:
import queue
except ImportError:
import Queue as queue
Queue = queue.Queue
QueueEmpty = queue.Empty
| apache-2.0 |
pgmillon/ansible | lib/ansible/modules/network/fortios/fortios_application_rule_settings.py | 24 | 7379 | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2018 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# the lib use python logging can get it if the following is set in your
# Ansible config.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_application_rule_settings
short_description: Configure application rule settings in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS by
allowing the user to configure application feature and rule_settings category.
Examples includes all options and need to be adjusted to datasources before usage.
Tested with FOS v6.0.2
version_added: "2.8"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate ip address.
required: true
username:
description:
- FortiOS or FortiGate username.
required: true
password:
description:
- FortiOS or FortiGate password.
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS
protocol
type: bool
default: false
application_rule_settings:
description:
- Configure application rule settings.
default: null
suboptions:
state:
description:
- Indicates whether to create or remove the object
choices:
- present
- absent
id:
description:
- Rule ID.
required: true
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
tasks:
- name: Configure application rule settings.
fortios_application_rule_settings:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
application_rule_settings:
state: "present"
id: "3"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
fos = None
def login(data):
host = data['host']
username = data['username']
password = data['password']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password)
def filter_application_rule_settings_data(json):
option_list = ['id']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def application_rule_settings(data, fos):
vdom = data['vdom']
application_rule_settings_data = data['application_rule_settings']
filtered_data = filter_application_rule_settings_data(application_rule_settings_data)
if application_rule_settings_data['state'] == "present":
return fos.set('application',
'rule-settings',
data=filtered_data,
vdom=vdom)
elif application_rule_settings_data['state'] == "absent":
return fos.delete('application',
'rule-settings',
mkey=filtered_data['id'],
vdom=vdom)
def fortios_application(data, fos):
login(data)
methodlist = ['application_rule_settings']
for method in methodlist:
if data[method]:
resp = eval(method)(data, fos)
break
fos.logout()
return not resp['status'] == "success", resp['status'] == "success", resp
def main():
fields = {
"host": {"required": True, "type": "str"},
"username": {"required": True, "type": "str"},
"password": {"required": False, "type": "str", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": "False"},
"application_rule_settings": {
"required": False, "type": "dict",
"options": {
"state": {"required": True, "type": "str",
"choices": ["present", "absent"]},
"id": {"required": True, "type": "int"}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
global fos
fos = FortiOSAPI()
is_error, has_changed, result = fortios_application(module.params, fos)
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| gpl-3.0 |
albertomurillo/ansible | test/units/modules/remote_management/oneview/test_oneview_network_set.py | 68 | 6324 | # Copyright (c) 2016-2017 Hewlett Packard Enterprise Development LP
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from units.compat import unittest, mock
from hpe_test_utils import OneViewBaseTestCase
from oneview_module_loader import NetworkSetModule
FAKE_MSG_ERROR = 'Fake message error'
NETWORK_SET = dict(
name='OneViewSDK Test Network Set',
networkUris=['/rest/ethernet-networks/aaa-bbb-ccc']
)
NETWORK_SET_WITH_NEW_NAME = dict(name='OneViewSDK Test Network Set - Renamed')
PARAMS_FOR_PRESENT = dict(
config='config.json',
state='present',
data=dict(name=NETWORK_SET['name'],
networkUris=['/rest/ethernet-networks/aaa-bbb-ccc'])
)
PARAMS_WITH_CHANGES = dict(
config='config.json',
state='present',
data=dict(name=NETWORK_SET['name'],
newName=NETWORK_SET['name'] + " - Renamed",
networkUris=['/rest/ethernet-networks/aaa-bbb-ccc', 'Name of a Network'])
)
PARAMS_FOR_ABSENT = dict(
config='config.json',
state='absent',
data=dict(name=NETWORK_SET['name'])
)
class NetworkSetModuleSpec(unittest.TestCase,
OneViewBaseTestCase):
"""
OneViewBaseTestCase has common tests for class constructor and main function,
also provides the mocks used in this test case.
"""
def setUp(self):
self.configure_mocks(self, NetworkSetModule)
self.resource = self.mock_ov_client.network_sets
self.ethernet_network_client = self.mock_ov_client.ethernet_networks
def test_should_create_new_network_set(self):
self.resource.get_by.return_value = []
self.resource.create.return_value = NETWORK_SET
self.mock_ansible_module.params = PARAMS_FOR_PRESENT
NetworkSetModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
msg=NetworkSetModule.MSG_CREATED,
ansible_facts=dict(network_set=NETWORK_SET)
)
def test_should_not_update_when_data_is_equals(self):
self.resource.get_by.return_value = [NETWORK_SET]
self.mock_ansible_module.params = PARAMS_FOR_PRESENT
NetworkSetModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
msg=NetworkSetModule.MSG_ALREADY_PRESENT,
ansible_facts=dict(network_set=NETWORK_SET)
)
def test_update_when_data_has_modified_attributes(self):
data_merged = dict(name=NETWORK_SET['name'] + " - Renamed",
networkUris=['/rest/ethernet-networks/aaa-bbb-ccc',
'/rest/ethernet-networks/ddd-eee-fff']
)
self.resource.get_by.side_effect = [NETWORK_SET], []
self.resource.update.return_value = data_merged
self.ethernet_network_client.get_by.return_value = [{'uri': '/rest/ethernet-networks/ddd-eee-fff'}]
self.mock_ansible_module.params = PARAMS_WITH_CHANGES
NetworkSetModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
msg=NetworkSetModule.MSG_UPDATED,
ansible_facts=dict(network_set=data_merged)
)
def test_should_raise_exception_when_ethernet_network_not_found(self):
self.resource.get_by.side_effect = [NETWORK_SET], []
self.ethernet_network_client.get_by.return_value = []
self.mock_ansible_module.params = PARAMS_WITH_CHANGES
NetworkSetModule().run()
self.mock_ansible_module.fail_json.assert_called_once_with(
exception=mock.ANY,
msg=NetworkSetModule.MSG_ETHERNET_NETWORK_NOT_FOUND + "Name of a Network"
)
def test_should_remove_network(self):
self.resource.get_by.return_value = [NETWORK_SET]
self.mock_ansible_module.params = PARAMS_FOR_ABSENT
NetworkSetModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
msg=NetworkSetModule.MSG_DELETED
)
def test_should_do_nothing_when_network_set_not_exist(self):
self.resource.get_by.return_value = []
self.mock_ansible_module.params = PARAMS_FOR_ABSENT
NetworkSetModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
msg=NetworkSetModule.MSG_ALREADY_ABSENT
)
def test_update_scopes_when_different(self):
params_to_scope = PARAMS_FOR_PRESENT.copy()
params_to_scope['data']['scopeUris'] = ['test']
self.mock_ansible_module.params = params_to_scope
resource_data = NETWORK_SET.copy()
resource_data['scopeUris'] = ['fake']
resource_data['uri'] = 'rest/network-sets/fake'
self.resource.get_by.return_value = [resource_data]
patch_return = resource_data.copy()
patch_return['scopeUris'] = ['test']
self.resource.patch.return_value = patch_return
NetworkSetModule().run()
self.resource.patch.assert_called_once_with('rest/network-sets/fake',
operation='replace',
path='/scopeUris',
value=['test'])
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
ansible_facts=dict(network_set=patch_return),
msg=NetworkSetModule.MSG_UPDATED
)
def test_should_do_nothing_when_scopes_are_the_same(self):
params_to_scope = PARAMS_FOR_PRESENT.copy()
params_to_scope['data']['scopeUris'] = ['test']
self.mock_ansible_module.params = params_to_scope
resource_data = NETWORK_SET.copy()
resource_data['scopeUris'] = ['test']
self.resource.get_by.return_value = [resource_data]
NetworkSetModule().run()
self.resource.patch.not_been_called()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(network_set=resource_data),
msg=NetworkSetModule.MSG_ALREADY_PRESENT
)
if __name__ == '__main__':
unittest.main()
| gpl-3.0 |
mc-coal/mc-coal | lib/unidecode/x010.py | 252 | 4110 | data = (
'k', # 0x00
'kh', # 0x01
'g', # 0x02
'gh', # 0x03
'ng', # 0x04
'c', # 0x05
'ch', # 0x06
'j', # 0x07
'jh', # 0x08
'ny', # 0x09
'nny', # 0x0a
'tt', # 0x0b
'tth', # 0x0c
'dd', # 0x0d
'ddh', # 0x0e
'nn', # 0x0f
'tt', # 0x10
'th', # 0x11
'd', # 0x12
'dh', # 0x13
'n', # 0x14
'p', # 0x15
'ph', # 0x16
'b', # 0x17
'bh', # 0x18
'm', # 0x19
'y', # 0x1a
'r', # 0x1b
'l', # 0x1c
'w', # 0x1d
's', # 0x1e
'h', # 0x1f
'll', # 0x20
'a', # 0x21
'[?]', # 0x22
'i', # 0x23
'ii', # 0x24
'u', # 0x25
'uu', # 0x26
'e', # 0x27
'[?]', # 0x28
'o', # 0x29
'au', # 0x2a
'[?]', # 0x2b
'aa', # 0x2c
'i', # 0x2d
'ii', # 0x2e
'u', # 0x2f
'uu', # 0x30
'e', # 0x31
'ai', # 0x32
'[?]', # 0x33
'[?]', # 0x34
'[?]', # 0x35
'N', # 0x36
'\'', # 0x37
':', # 0x38
'', # 0x39
'[?]', # 0x3a
'[?]', # 0x3b
'[?]', # 0x3c
'[?]', # 0x3d
'[?]', # 0x3e
'[?]', # 0x3f
'0', # 0x40
'1', # 0x41
'2', # 0x42
'3', # 0x43
'4', # 0x44
'5', # 0x45
'6', # 0x46
'7', # 0x47
'8', # 0x48
'9', # 0x49
' / ', # 0x4a
' // ', # 0x4b
'n*', # 0x4c
'r*', # 0x4d
'l*', # 0x4e
'e*', # 0x4f
'sh', # 0x50
'ss', # 0x51
'R', # 0x52
'RR', # 0x53
'L', # 0x54
'LL', # 0x55
'R', # 0x56
'RR', # 0x57
'L', # 0x58
'LL', # 0x59
'[?]', # 0x5a
'[?]', # 0x5b
'[?]', # 0x5c
'[?]', # 0x5d
'[?]', # 0x5e
'[?]', # 0x5f
'[?]', # 0x60
'[?]', # 0x61
'[?]', # 0x62
'[?]', # 0x63
'[?]', # 0x64
'[?]', # 0x65
'[?]', # 0x66
'[?]', # 0x67
'[?]', # 0x68
'[?]', # 0x69
'[?]', # 0x6a
'[?]', # 0x6b
'[?]', # 0x6c
'[?]', # 0x6d
'[?]', # 0x6e
'[?]', # 0x6f
'[?]', # 0x70
'[?]', # 0x71
'[?]', # 0x72
'[?]', # 0x73
'[?]', # 0x74
'[?]', # 0x75
'[?]', # 0x76
'[?]', # 0x77
'[?]', # 0x78
'[?]', # 0x79
'[?]', # 0x7a
'[?]', # 0x7b
'[?]', # 0x7c
'[?]', # 0x7d
'[?]', # 0x7e
'[?]', # 0x7f
'[?]', # 0x80
'[?]', # 0x81
'[?]', # 0x82
'[?]', # 0x83
'[?]', # 0x84
'[?]', # 0x85
'[?]', # 0x86
'[?]', # 0x87
'[?]', # 0x88
'[?]', # 0x89
'[?]', # 0x8a
'[?]', # 0x8b
'[?]', # 0x8c
'[?]', # 0x8d
'[?]', # 0x8e
'[?]', # 0x8f
'[?]', # 0x90
'[?]', # 0x91
'[?]', # 0x92
'[?]', # 0x93
'[?]', # 0x94
'[?]', # 0x95
'[?]', # 0x96
'[?]', # 0x97
'[?]', # 0x98
'[?]', # 0x99
'[?]', # 0x9a
'[?]', # 0x9b
'[?]', # 0x9c
'[?]', # 0x9d
'[?]', # 0x9e
'[?]', # 0x9f
'A', # 0xa0
'B', # 0xa1
'G', # 0xa2
'D', # 0xa3
'E', # 0xa4
'V', # 0xa5
'Z', # 0xa6
'T`', # 0xa7
'I', # 0xa8
'K', # 0xa9
'L', # 0xaa
'M', # 0xab
'N', # 0xac
'O', # 0xad
'P', # 0xae
'Zh', # 0xaf
'R', # 0xb0
'S', # 0xb1
'T', # 0xb2
'U', # 0xb3
'P`', # 0xb4
'K`', # 0xb5
'G\'', # 0xb6
'Q', # 0xb7
'Sh', # 0xb8
'Ch`', # 0xb9
'C`', # 0xba
'Z\'', # 0xbb
'C', # 0xbc
'Ch', # 0xbd
'X', # 0xbe
'J', # 0xbf
'H', # 0xc0
'E', # 0xc1
'Y', # 0xc2
'W', # 0xc3
'Xh', # 0xc4
'OE', # 0xc5
'[?]', # 0xc6
'[?]', # 0xc7
'[?]', # 0xc8
'[?]', # 0xc9
'[?]', # 0xca
'[?]', # 0xcb
'[?]', # 0xcc
'[?]', # 0xcd
'[?]', # 0xce
'[?]', # 0xcf
'a', # 0xd0
'b', # 0xd1
'g', # 0xd2
'd', # 0xd3
'e', # 0xd4
'v', # 0xd5
'z', # 0xd6
't`', # 0xd7
'i', # 0xd8
'k', # 0xd9
'l', # 0xda
'm', # 0xdb
'n', # 0xdc
'o', # 0xdd
'p', # 0xde
'zh', # 0xdf
'r', # 0xe0
's', # 0xe1
't', # 0xe2
'u', # 0xe3
'p`', # 0xe4
'k`', # 0xe5
'g\'', # 0xe6
'q', # 0xe7
'sh', # 0xe8
'ch`', # 0xe9
'c`', # 0xea
'z\'', # 0xeb
'c', # 0xec
'ch', # 0xed
'x', # 0xee
'j', # 0xef
'h', # 0xf0
'e', # 0xf1
'y', # 0xf2
'w', # 0xf3
'xh', # 0xf4
'oe', # 0xf5
'f', # 0xf6
'[?]', # 0xf7
'[?]', # 0xf8
'[?]', # 0xf9
'[?]', # 0xfa
' // ', # 0xfb
'[?]', # 0xfc
'[?]', # 0xfd
'[?]', # 0xfe
)
| agpl-3.0 |
carquois/blobon | blobon/blogs/migrations/0037_auto__add_info_email.py | 1 | 17530 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Info_email'
db.create_table('blogs_info_email', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('blog', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['blogs.Blog'], null=True)),
('author', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True)),
('status', self.gf('django.db.models.fields.CharField')(max_length=2)),
('content', self.gf('django.db.models.fields.TextField')(max_length=10000, blank=True)),
('name', self.gf('django.db.models.fields.TextField')(max_length=100, blank=True)),
('created', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
))
db.send_create_signal('blogs', ['Info_email'])
def backwards(self, orm):
# Deleting model 'Info_email'
db.delete_table('blogs_info_email')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'blogs.blog': {
'Meta': {'object_name': 'Blog'},
'creator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'custom_domain': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_open': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '30'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '140'}),
'translation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Blog']", 'null': 'True', 'blank': 'True'})
},
'blogs.category': {
'Meta': {'object_name': 'Category'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'blog': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Blog']", 'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '140', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '140'}),
'top_level_cat': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Category']", 'null': 'True', 'blank': 'True'})
},
'blogs.comment': {
'Meta': {'object_name': 'Comment'},
'blog': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Blog']", 'null': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {'max_length': '10000'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '140'}),
'notify_me': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'post': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Post']", 'null': 'True'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '300', 'blank': 'True'})
},
'blogs.info_email': {
'Meta': {'object_name': 'Info_email'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'blog': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Blog']", 'null': 'True'}),
'content': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.TextField', [], {'max_length': '100', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '2'})
},
'blogs.page': {
'Meta': {'object_name': 'Page'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'blog': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Blog']", 'null': 'True'}),
'content': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'pub_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'})
},
'blogs.post': {
'Meta': {'object_name': 'Post'},
'artist': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'base62id': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'blog': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Blog']", 'null': 'True'}),
'category': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['blogs.Category']", 'null': 'True', 'blank': 'True'}),
'content': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_0': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_01': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_1': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_2': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_3': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_4': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_5': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_6': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'content_video': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_ready': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_top': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'karma': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'null': 'True', 'blank': 'True'}),
'layout_type': ('django.db.models.fields.CharField', [], {'default': "'s'", 'max_length': '1'}),
'message': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'pic': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_0': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_04': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_1': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_10': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_11': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_12': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_13': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_14': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_15': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_16': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_17': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_18': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_19': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_2': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_20': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_21': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_22': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_23': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_24': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_3': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_4': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_5': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_6': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_7': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_8': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pic_9': ('sorl.thumbnail.fields.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'pub_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'publish_on_facebook': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '140', 'blank': 'True'}),
'source': ('django.db.models.fields.URLField', [], {'max_length': '300', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'P'", 'max_length': '2', 'null': 'True'}),
'text': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'translated_content': ('django.db.models.fields.TextField', [], {'max_length': '10000', 'blank': 'True'}),
'translated_title': ('django.db.models.fields.CharField', [], {'max_length': '140', 'blank': 'True'}),
'views': ('django.db.models.fields.IntegerField', [], {'default': '0', 'blank': 'True'}),
'youtube_id': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'youtube_url': ('django.db.models.fields.URLField', [], {'max_length': '300', 'blank': 'True'})
},
'blogs.subscription': {
'Meta': {'object_name': 'Subscription'},
'blog': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Blog']", 'null': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
'blogs.tag': {
'Meta': {'object_name': 'Tag'},
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'blog': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['blogs.Blog']", 'null': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '140', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '140'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['blogs'] | mit |
lecaoquochung/ddnb.django | django/forms/forms.py | 4 | 27651 | """
Form classes
"""
from __future__ import unicode_literals
from collections import OrderedDict
import copy
import datetime
import warnings
from django.core.exceptions import ValidationError, NON_FIELD_ERRORS
from django.forms.fields import Field, FileField
from django.forms.utils import flatatt, ErrorDict, ErrorList
from django.forms.widgets import Media, MediaDefiningClass, TextInput, Textarea
from django.utils.deprecation import RemovedInDjango19Warning
from django.utils.encoding import smart_text, force_text, python_2_unicode_compatible
from django.utils.html import conditional_escape, format_html
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from django.utils import six
__all__ = ('BaseForm', 'Form')
def pretty_name(name):
"""Converts 'first_name' to 'First name'"""
if not name:
return ''
return name.replace('_', ' ').capitalize()
def get_declared_fields(bases, attrs, with_base_fields=True):
"""
Create a list of form field instances from the passed in 'attrs', plus any
similar fields on the base classes (in 'bases'). This is used by both the
Form and ModelForm metaclasses.
If 'with_base_fields' is True, all fields from the bases are used.
Otherwise, only fields in the 'declared_fields' attribute on the bases are
used. The distinction is useful in ModelForm subclassing.
Also integrates any additional media definitions.
"""
warnings.warn(
"get_declared_fields is deprecated and will be removed in Django 1.9.",
RemovedInDjango19Warning,
stacklevel=2,
)
fields = [(field_name, attrs.pop(field_name)) for field_name, obj in list(six.iteritems(attrs)) if isinstance(obj, Field)]
fields.sort(key=lambda x: x[1].creation_counter)
# If this class is subclassing another Form, add that Form's fields.
# Note that we loop over the bases in *reverse*. This is necessary in
# order to preserve the correct order of fields.
if with_base_fields:
for base in bases[::-1]:
if hasattr(base, 'base_fields'):
fields = list(six.iteritems(base.base_fields)) + fields
else:
for base in bases[::-1]:
if hasattr(base, 'declared_fields'):
fields = list(six.iteritems(base.declared_fields)) + fields
return OrderedDict(fields)
class DeclarativeFieldsMetaclass(MediaDefiningClass):
"""
Metaclass that collects Fields declared on the base classes.
"""
def __new__(mcs, name, bases, attrs):
# Collect fields from current class.
current_fields = []
for key, value in list(attrs.items()):
if isinstance(value, Field):
current_fields.append((key, value))
attrs.pop(key)
current_fields.sort(key=lambda x: x[1].creation_counter)
attrs['declared_fields'] = OrderedDict(current_fields)
new_class = (super(DeclarativeFieldsMetaclass, mcs)
.__new__(mcs, name, bases, attrs))
# Walk through the MRO.
declared_fields = OrderedDict()
for base in reversed(new_class.__mro__):
# Collect fields from base class.
if hasattr(base, 'declared_fields'):
declared_fields.update(base.declared_fields)
# Field shadowing.
for attr, value in base.__dict__.items():
if value is None and attr in declared_fields:
declared_fields.pop(attr)
new_class.base_fields = declared_fields
new_class.declared_fields = declared_fields
return new_class
@python_2_unicode_compatible
class BaseForm(object):
# This is the main implementation of all the Form logic. Note that this
# class is different than Form. See the comments by the Form class for more
# information. Any improvements to the form API should be made to *this*
# class, not to the Form class.
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
initial=None, error_class=ErrorList, label_suffix=None,
empty_permitted=False):
self.is_bound = data is not None or files is not None
self.data = data or {}
self.files = files or {}
self.auto_id = auto_id
self.prefix = prefix
self.initial = initial or {}
self.error_class = error_class
# Translators: This is the default suffix added to form field labels
self.label_suffix = label_suffix if label_suffix is not None else _(':')
self.empty_permitted = empty_permitted
self._errors = None # Stores the errors after clean() has been called.
self._changed_data = None
# The base_fields class attribute is the *class-wide* definition of
# fields. Because a particular *instance* of the class might want to
# alter self.fields, we create self.fields here by copying base_fields.
# Instances should always modify self.fields; they should not modify
# self.base_fields.
self.fields = copy.deepcopy(self.base_fields)
def __str__(self):
return self.as_table()
def __iter__(self):
for name in self.fields:
yield self[name]
def __getitem__(self, name):
"Returns a BoundField with the given name."
try:
field = self.fields[name]
except KeyError:
raise KeyError(
"Key %r not found in '%s'" % (name, self.__class__.__name__))
return BoundField(self, field, name)
@property
def errors(self):
"Returns an ErrorDict for the data provided for the form"
if self._errors is None:
self.full_clean()
return self._errors
def is_valid(self):
"""
Returns True if the form has no errors. Otherwise, False. If errors are
being ignored, returns False.
"""
return self.is_bound and not self.errors
def add_prefix(self, field_name):
"""
Returns the field name with a prefix appended, if this Form has a
prefix set.
Subclasses may wish to override.
"""
return '%s-%s' % (self.prefix, field_name) if self.prefix else field_name
def add_initial_prefix(self, field_name):
"""
Add a 'initial' prefix for checking dynamic initial values
"""
return 'initial-%s' % self.add_prefix(field_name)
def _html_output(self, normal_row, error_row, row_ender, help_text_html, errors_on_separate_row):
"Helper function for outputting HTML. Used by as_table(), as_ul(), as_p()."
top_errors = self.non_field_errors() # Errors that should be displayed above all fields.
output, hidden_fields = [], []
for name, field in self.fields.items():
html_class_attr = ''
bf = self[name]
# Escape and cache in local variable.
bf_errors = self.error_class([conditional_escape(error) for error in bf.errors])
if bf.is_hidden:
if bf_errors:
top_errors.extend(
[_('(Hidden field %(name)s) %(error)s') % {'name': name, 'error': force_text(e)}
for e in bf_errors])
hidden_fields.append(six.text_type(bf))
else:
# Create a 'class="..."' attribute if the row should have any
# CSS classes applied.
css_classes = bf.css_classes()
if css_classes:
html_class_attr = ' class="%s"' % css_classes
if errors_on_separate_row and bf_errors:
output.append(error_row % force_text(bf_errors))
if bf.label:
label = conditional_escape(force_text(bf.label))
label = bf.label_tag(label) or ''
else:
label = ''
if field.help_text:
help_text = help_text_html % force_text(field.help_text)
else:
help_text = ''
output.append(normal_row % {
'errors': force_text(bf_errors),
'label': force_text(label),
'field': six.text_type(bf),
'help_text': help_text,
'html_class_attr': html_class_attr,
'field_name': bf.html_name,
})
if top_errors:
output.insert(0, error_row % force_text(top_errors))
if hidden_fields: # Insert any hidden fields in the last row.
str_hidden = ''.join(hidden_fields)
if output:
last_row = output[-1]
# Chop off the trailing row_ender (e.g. '</td></tr>') and
# insert the hidden fields.
if not last_row.endswith(row_ender):
# This can happen in the as_p() case (and possibly others
# that users write): if there are only top errors, we may
# not be able to conscript the last row for our purposes,
# so insert a new, empty row.
last_row = (normal_row % {'errors': '', 'label': '',
'field': '', 'help_text': '',
'html_class_attr': html_class_attr})
output.append(last_row)
output[-1] = last_row[:-len(row_ender)] + str_hidden + row_ender
else:
# If there aren't any rows in the output, just append the
# hidden fields.
output.append(str_hidden)
return mark_safe('\n'.join(output))
def as_table(self):
"Returns this form rendered as HTML <tr>s -- excluding the <table></table>."
return self._html_output(
normal_row='<tr%(html_class_attr)s><th>%(label)s</th><td>%(errors)s%(field)s%(help_text)s</td></tr>',
error_row='<tr><td colspan="2">%s</td></tr>',
row_ender='</td></tr>',
help_text_html='<br /><span class="helptext">%s</span>',
errors_on_separate_row=False)
def as_ul(self):
"Returns this form rendered as HTML <li>s -- excluding the <ul></ul>."
return self._html_output(
normal_row='<li%(html_class_attr)s>%(errors)s%(label)s %(field)s%(help_text)s</li>',
error_row='<li>%s</li>',
row_ender='</li>',
help_text_html=' <span class="helptext">%s</span>',
errors_on_separate_row=False)
def as_p(self):
"Returns this form rendered as HTML <p>s."
return self._html_output(
normal_row='<p%(html_class_attr)s>%(label)s %(field)s%(help_text)s</p>',
error_row='%s',
row_ender='</p>',
help_text_html=' <span class="helptext">%s</span>',
errors_on_separate_row=True)
def non_field_errors(self):
"""
Returns an ErrorList of errors that aren't associated with a particular
field -- i.e., from Form.clean(). Returns an empty ErrorList if there
are none.
"""
return self.errors.get(NON_FIELD_ERRORS, self.error_class(error_class='nonfield'))
def _raw_value(self, fieldname):
"""
Returns the raw_value for a particular field name. This is just a
convenient wrapper around widget.value_from_datadict.
"""
field = self.fields[fieldname]
prefix = self.add_prefix(fieldname)
return field.widget.value_from_datadict(self.data, self.files, prefix)
def add_error(self, field, error):
"""
Update the content of `self._errors`.
The `field` argument is the name of the field to which the errors
should be added. If its value is None the errors will be treated as
NON_FIELD_ERRORS.
The `error` argument can be a single error, a list of errors, or a
dictionary that maps field names to lists of errors. What we define as
an "error" can be either a simple string or an instance of
ValidationError with its message attribute set and what we define as
list or dictionary can be an actual `list` or `dict` or an instance
of ValidationError with its `error_list` or `error_dict` attribute set.
If `error` is a dictionary, the `field` argument *must* be None and
errors will be added to the fields that correspond to the keys of the
dictionary.
"""
if not isinstance(error, ValidationError):
# Normalize to ValidationError and let its constructor
# do the hard work of making sense of the input.
error = ValidationError(error)
if hasattr(error, 'error_dict'):
if field is not None:
raise TypeError(
"The argument `field` must be `None` when the `error` "
"argument contains errors for multiple fields."
)
else:
error = error.error_dict
else:
error = {field or NON_FIELD_ERRORS: error.error_list}
for field, error_list in error.items():
if field not in self.errors:
if field != NON_FIELD_ERRORS and field not in self.fields:
raise ValueError(
"'%s' has no field named '%s'." % (self.__class__.__name__, field))
if field == NON_FIELD_ERRORS:
self._errors[field] = self.error_class(error_class='nonfield')
else:
self._errors[field] = self.error_class()
self._errors[field].extend(error_list)
if field in self.cleaned_data:
del self.cleaned_data[field]
def has_error(self, field, code=None):
if code is None:
return field in self.errors
if field in self.errors:
for error in self.errors.as_data()[field]:
if error.code == code:
return True
return False
def full_clean(self):
"""
Cleans all of self.data and populates self._errors and
self.cleaned_data.
"""
self._errors = ErrorDict()
if not self.is_bound: # Stop further processing.
return
self.cleaned_data = {}
# If the form is permitted to be empty, and none of the form data has
# changed from the initial data, short circuit any validation.
if self.empty_permitted and not self.has_changed():
return
self._clean_fields()
self._clean_form()
self._post_clean()
def _clean_fields(self):
for name, field in self.fields.items():
# value_from_datadict() gets the data from the data dictionaries.
# Each widget type knows how to retrieve its own data, because some
# widgets split data over several HTML fields.
value = field.widget.value_from_datadict(self.data, self.files, self.add_prefix(name))
try:
if isinstance(field, FileField):
initial = self.initial.get(name, field.initial)
value = field.clean(value, initial)
else:
value = field.clean(value)
self.cleaned_data[name] = value
if hasattr(self, 'clean_%s' % name):
value = getattr(self, 'clean_%s' % name)()
self.cleaned_data[name] = value
except ValidationError as e:
self.add_error(name, e)
def _clean_form(self):
try:
cleaned_data = self.clean()
except ValidationError as e:
self.add_error(None, e)
else:
if cleaned_data is not None:
self.cleaned_data = cleaned_data
def _post_clean(self):
"""
An internal hook for performing additional cleaning after form cleaning
is complete. Used for model validation in model forms.
"""
pass
def clean(self):
"""
Hook for doing any extra form-wide cleaning after Field.clean() been
called on every field. Any ValidationError raised by this method will
not be associated with a particular field; it will have a special-case
association with the field named '__all__'.
"""
return self.cleaned_data
def has_changed(self):
"""
Returns True if data differs from initial.
"""
return bool(self.changed_data)
@property
def changed_data(self):
if self._changed_data is None:
self._changed_data = []
# XXX: For now we're asking the individual widgets whether or not the
# data has changed. It would probably be more efficient to hash the
# initial data, store it in a hidden field, and compare a hash of the
# submitted data, but we'd need a way to easily get the string value
# for a given field. Right now, that logic is embedded in the render
# method of each widget.
for name, field in self.fields.items():
prefixed_name = self.add_prefix(name)
data_value = field.widget.value_from_datadict(self.data, self.files, prefixed_name)
if not field.show_hidden_initial:
initial_value = self.initial.get(name, field.initial)
if callable(initial_value):
initial_value = initial_value()
else:
initial_prefixed_name = self.add_initial_prefix(name)
hidden_widget = field.hidden_widget()
try:
initial_value = field.to_python(hidden_widget.value_from_datadict(
self.data, self.files, initial_prefixed_name))
except ValidationError:
# Always assume data has changed if validation fails.
self._changed_data.append(name)
continue
if field._has_changed(initial_value, data_value):
self._changed_data.append(name)
return self._changed_data
@property
def media(self):
"""
Provide a description of all media required to render the widgets on this form
"""
media = Media()
for field in self.fields.values():
media = media + field.widget.media
return media
def is_multipart(self):
"""
Returns True if the form needs to be multipart-encoded, i.e. it has
FileInput. Otherwise, False.
"""
for field in self.fields.values():
if field.widget.needs_multipart_form:
return True
return False
def hidden_fields(self):
"""
Returns a list of all the BoundField objects that are hidden fields.
Useful for manual form layout in templates.
"""
return [field for field in self if field.is_hidden]
def visible_fields(self):
"""
Returns a list of BoundField objects that aren't hidden fields.
The opposite of the hidden_fields() method.
"""
return [field for field in self if not field.is_hidden]
class Form(six.with_metaclass(DeclarativeFieldsMetaclass, BaseForm)):
"A collection of Fields, plus their associated data."
# This is a separate class from BaseForm in order to abstract the way
# self.fields is specified. This class (Form) is the one that does the
# fancy metaclass stuff purely for the semantic sugar -- it allows one
# to define a form using declarative syntax.
# BaseForm itself has no way of designating self.fields.
@python_2_unicode_compatible
class BoundField(object):
"A Field plus data"
def __init__(self, form, field, name):
self.form = form
self.field = field
self.name = name
self.html_name = form.add_prefix(name)
self.html_initial_name = form.add_initial_prefix(name)
self.html_initial_id = form.add_initial_prefix(self.auto_id)
if self.field.label is None:
self.label = pretty_name(name)
else:
self.label = self.field.label
self.help_text = field.help_text or ''
def __str__(self):
"""Renders this field as an HTML widget."""
if self.field.show_hidden_initial:
return self.as_widget() + self.as_hidden(only_initial=True)
return self.as_widget()
def __iter__(self):
"""
Yields rendered strings that comprise all widgets in this BoundField.
This really is only useful for RadioSelect widgets, so that you can
iterate over individual radio buttons in a template.
"""
id_ = self.field.widget.attrs.get('id') or self.auto_id
attrs = {'id': id_} if id_ else {}
for subwidget in self.field.widget.subwidgets(self.html_name, self.value(), attrs):
yield subwidget
def __len__(self):
return len(list(self.__iter__()))
def __getitem__(self, idx):
return list(self.__iter__())[idx]
@property
def errors(self):
"""
Returns an ErrorList for this field. Returns an empty ErrorList
if there are none.
"""
return self.form.errors.get(self.name, self.form.error_class())
def as_widget(self, widget=None, attrs=None, only_initial=False):
"""
Renders the field by rendering the passed widget, adding any HTML
attributes passed as attrs. If no widget is specified, then the
field's default widget will be used.
"""
if not widget:
widget = self.field.widget
if self.field.localize:
widget.is_localized = True
attrs = attrs or {}
auto_id = self.auto_id
if auto_id and 'id' not in attrs and 'id' not in widget.attrs:
if not only_initial:
attrs['id'] = auto_id
else:
attrs['id'] = self.html_initial_id
if not only_initial:
name = self.html_name
else:
name = self.html_initial_name
return force_text(widget.render(name, self.value(), attrs=attrs))
def as_text(self, attrs=None, **kwargs):
"""
Returns a string of HTML for representing this as an <input type="text">.
"""
return self.as_widget(TextInput(), attrs, **kwargs)
def as_textarea(self, attrs=None, **kwargs):
"Returns a string of HTML for representing this as a <textarea>."
return self.as_widget(Textarea(), attrs, **kwargs)
def as_hidden(self, attrs=None, **kwargs):
"""
Returns a string of HTML for representing this as an <input type="hidden">.
"""
return self.as_widget(self.field.hidden_widget(), attrs, **kwargs)
@property
def data(self):
"""
Returns the data for this BoundField, or None if it wasn't given.
"""
return self.field.widget.value_from_datadict(self.form.data, self.form.files, self.html_name)
def value(self):
"""
Returns the value for this BoundField, using the initial value if
the form is not bound or the data otherwise.
"""
if not self.form.is_bound:
data = self.form.initial.get(self.name, self.field.initial)
if callable(data):
data = data()
# If this is an auto-generated default date, nix the
# microseconds for standardized handling. See #22502.
if (isinstance(data, (datetime.datetime, datetime.time)) and
not getattr(self.field.widget, 'supports_microseconds', True)):
data = data.replace(microsecond=0)
else:
data = self.field.bound_data(
self.data, self.form.initial.get(self.name, self.field.initial)
)
return self.field.prepare_value(data)
def label_tag(self, contents=None, attrs=None, label_suffix=None):
"""
Wraps the given contents in a <label>, if the field has an ID attribute.
contents should be 'mark_safe'd to avoid HTML escaping. If contents
aren't given, uses the field's HTML-escaped label.
If attrs are given, they're used as HTML attributes on the <label> tag.
label_suffix allows overriding the form's label_suffix.
"""
contents = contents or self.label
if label_suffix is None:
label_suffix = (self.field.label_suffix if self.field.label_suffix is not None
else self.form.label_suffix)
# Only add the suffix if the label does not end in punctuation.
# Translators: If found as last label character, these punctuation
# characters will prevent the default label_suffix to be appended to the label
if label_suffix and contents and contents[-1] not in _(':?.!'):
contents = format_html('{0}{1}', contents, label_suffix)
widget = self.field.widget
id_ = widget.attrs.get('id') or self.auto_id
if id_:
id_for_label = widget.id_for_label(id_)
if id_for_label:
attrs = dict(attrs or {}, **{'for': id_for_label})
if self.field.required and hasattr(self.form, 'required_css_class'):
attrs = attrs or {}
if 'class' in attrs:
attrs['class'] += ' ' + self.form.required_css_class
else:
attrs['class'] = self.form.required_css_class
attrs = flatatt(attrs) if attrs else ''
contents = format_html('<label{0}>{1}</label>', attrs, contents)
else:
contents = conditional_escape(contents)
return mark_safe(contents)
def css_classes(self, extra_classes=None):
"""
Returns a string of space-separated CSS classes for this field.
"""
if hasattr(extra_classes, 'split'):
extra_classes = extra_classes.split()
extra_classes = set(extra_classes or [])
if self.errors and hasattr(self.form, 'error_css_class'):
extra_classes.add(self.form.error_css_class)
if self.field.required and hasattr(self.form, 'required_css_class'):
extra_classes.add(self.form.required_css_class)
return ' '.join(extra_classes)
@property
def is_hidden(self):
"Returns True if this BoundField's widget is hidden."
return self.field.widget.is_hidden
@property
def auto_id(self):
"""
Calculates and returns the ID attribute for this BoundField, if the
associated Form has specified auto_id. Returns an empty string otherwise.
"""
auto_id = self.form.auto_id
if auto_id and '%s' in smart_text(auto_id):
return smart_text(auto_id) % self.html_name
elif auto_id:
return self.html_name
return ''
@property
def id_for_label(self):
"""
Wrapper around the field widget's `id_for_label` method.
Useful, for example, for focusing on this field regardless of whether
it has a single widget or a MultiWidget.
"""
widget = self.field.widget
id_ = widget.attrs.get('id') or self.auto_id
return widget.id_for_label(id_)
| bsd-3-clause |
mscuthbert/abjad | abjad/tools/pitchtools/test/test_pitchtools_NamedInterval___repr__.py | 2 | 1436 | # -*- encoding: utf-8 -*-
from abjad import *
def test_pitchtools_NamedInterval___repr___01():
interval = pitchtools.NamedInterval('perfect', 1)
repr = interval.__repr__()
assert repr == "NamedInterval('P1')"
interval = pitchtools.NamedInterval('augmented', 1)
repr = interval.__repr__()
assert repr == "NamedInterval('+aug1')"
interval = pitchtools.NamedInterval('minor', 2)
repr = interval.__repr__()
assert repr == "NamedInterval('+m2')"
interval = pitchtools.NamedInterval('major', 2)
repr = interval.__repr__()
assert repr == "NamedInterval('+M2')"
interval = pitchtools.NamedInterval('minor', 3)
repr = interval.__repr__()
assert repr == "NamedInterval('+m3')"
def test_pitchtools_NamedInterval___repr___02():
interval = pitchtools.NamedInterval('perfect', -1)
repr = interval.__repr__()
assert repr == "NamedInterval('P1')"
interval = pitchtools.NamedInterval('augmented', -1)
repr = interval.__repr__()
assert repr == "NamedInterval('-aug1')"
interval = pitchtools.NamedInterval('minor', -2)
repr = interval.__repr__()
assert repr == "NamedInterval('-m2')"
interval = pitchtools.NamedInterval('major', -2)
repr = interval.__repr__()
assert repr == "NamedInterval('-M2')"
interval = pitchtools.NamedInterval('minor', -3)
repr = interval.__repr__()
assert repr == "NamedInterval('-m3')" | gpl-3.0 |
CVL-dev/cvl-fabric-launcher | pyinstaller-2.1/pyinstaller-gui.py | 7 | 3444 | #!/usr/bin/python
#-----------------------------------------------------------------------------
# Copyright (c) 2013, PyInstaller Development Team.
#
# Distributed under the terms of the GNU General Public License with exception
# for distributing bootloader.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#
# Tkinter interface to PyInstaller.
#
import sys
import subprocess
from Tkinter import *
import tkFileDialog
import FileDialog
class PyInstallerGUI:
def make_checkbutton(self, frame, text):
var = IntVar()
widget = Checkbutton(frame, text=text, variable=var)
widget.grid(sticky="NW")
return var
def __init__(self):
root = Tk()
root.title("PyInstaller GUI")
fr1 = Frame(root, width=300, height=100)
fr1.pack(side="top")
fr2 = Frame(root, width=300, height=300,
borderwidth=2, relief="ridge")
fr2.pack(ipadx=10, ipady=10)
fr4 = Frame(root, width=300, height=100)
fr4.pack(side="bottom", pady=10)
getFileButton = Button(fr1, text="Script to bundle ...")
getFileButton.bind("<Button>", self.GetFile)
getFileButton.pack(side="left")
self.filein = Entry(fr1)
self.filein.pack(side="right")
self.filetype = self.make_checkbutton(fr2, "One File Package")
self.ascii = self.make_checkbutton(fr2, "Do NOT include decodings")
self.debug = self.make_checkbutton(fr2, "Use debug versions")
if sys.platform.startswith('win'):
self.noconsole = self.make_checkbutton(fr2, "No console (Windows only)")
else:
self.noconsole = IntVar()
if not sys.platform.startswith('win'):
self.strip = self.make_checkbutton(fr2, "Strip the exe and shared libs")
else:
self.strip = IntVar()
okaybutton = Button(fr4, text="Okay ")
okaybutton.bind("<Button>", self.makePackage)
okaybutton.pack(side="left")
cancelbutton = Button(fr4, text="Cancel")
cancelbutton.bind("<Button>", self.killapp)
cancelbutton.pack(side="right")
self.fin = ''
self.fout = ''
ws = root.winfo_screenwidth()
hs = root.winfo_screenheight()
x = (ws/2) - (400/2)
y = (hs/2) - (250/2)
root.geometry('%dx%d+%d+%d' % (400, 250, x, y))
root.mainloop()
def killapp(self, event):
sys.exit(0)
def makePackage(self, event):
commands = ['python', 'pyinstaller.py']
if self.filetype.get():
commands.append('--onefile')
if self.ascii.get():
commands.append('--ascii')
if self.debug.get():
commands.append('--debug')
if self.noconsole.get():
commands.append('--noconsole')
if self.strip.get():
commands.append('--strip')
commands.append(self.fin)
retcode = subprocess.call(commands)
sys.exit(retcode)
def GetFile(self, event):
self.fin = tkFileDialog.askopenfilename()
self.filein.insert(0, self.fin)
if __name__ == "__main__":
raise SystemExit("Please use just 'pyinstaller.py'. Gui is not maintained.")
try:
app = PyInstallerGUI()
except KeyboardInterrupt:
raise SystemExit("Aborted by user request.")
| gpl-3.0 |
krischer/python-future | src/libpasteurize/fixes/fix_add_all_future_builtins.py | 60 | 1270 | """
For the ``future`` package.
Adds this import line::
from builtins import (ascii, bytes, chr, dict, filter, hex, input,
int, list, map, next, object, oct, open, pow,
range, round, str, super, zip)
to a module, irrespective of whether each definition is used.
Adds these imports after any other imports (in an initial block of them).
"""
from __future__ import unicode_literals
from lib2to3 import fixer_base
from libfuturize.fixer_util import touch_import_top
class FixAddAllFutureBuiltins(fixer_base.BaseFix):
BM_compatible = True
PATTERN = "file_input"
run_order = 1
def transform(self, node, results):
# import_str = """(ascii, bytes, chr, dict, filter, hex, input,
# int, list, map, next, object, oct, open, pow,
# range, round, str, super, zip)"""
touch_import_top(u'builtins', '*', node)
# builtins = """ascii bytes chr dict filter hex input
# int list map next object oct open pow
# range round str super zip"""
# for builtin in sorted(builtins.split(), reverse=True):
# touch_import_top(u'builtins', builtin, node)
| mit |
gregdek/ansible | lib/ansible/modules/remote_management/cpm/cpm_plugcontrol.py | 55 | 7369 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# (C) 2018 Red Hat Inc.
# Copyright (C) 2018 Western Telematic Inc. <kenp@wti.com>
#
# GNU General Public License v3.0+
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
# Module to execute WTI Plug Commands on WTI OOB and PDU devices.
# WTI remote_management
#
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = """
---
module: cpm_plugcontrol
version_added: "2.8"
author: "Western Telematic Inc. (@wtinetworkgear)"
short_description: Get and Set Plug actions on WTI OOB and PDU power devices
description:
- "Get and Set Plug actions on WTI OOB and PDU devices"
options:
cpm_action:
description:
- This is the Action to send the module.
required: true
choices: [ "getplugcontrol", "setplugcontrol" ]
cpm_url:
description:
- This is the URL of the WTI device to send the module.
required: true
cpm_username:
description:
- This is the Username of the WTI device to send the module.
cpm_password:
description:
- This is the Password of the WTI device to send the module.
use_https:
description:
- Designates to use an https connection or http connection.
required: false
type: bool
default: true
validate_certs:
description:
- If false, SSL certificates will not be validated. This should only be used
- on personally controlled sites using self-signed certificates.
required: false
type: bool
default: true
use_proxy:
description: Flag to control if the lookup will observe HTTP proxy environment variables when present.
required: false
type: bool
default: false
plug_id:
description:
- This is the plug number or the plug name that is to be manipulated
For the plugget command, the plug_id 'all' will return the status of all the plugs the
user has rights to access.
required: true
plug_state:
description:
- This is what action to take on the plug.
required: false
choices: [ "on", "off", "boot", "default" ]
"""
EXAMPLES = """
# Get Plug status for all ports
- name: Get the Plug status for ALL ports of a WTI device
cpm_plugcontrol:
cpm_action: "getplugcontrol"
cpm_url: "rest.wti.com"
cpm_username: "restpower"
cpm_password: "restfulpowerpass12"
use_https: true
validate_certs: true
plug_id: "all"
# Get Plug status for port 2
- name: Get the Plug status for the given port of a WTI device
cpm_plugcontrol:
cpm_action: "getplugcontrol"
cpm_url: "rest.wti.com"
cpm_username: "restpower"
cpm_password: "restfulpowerpass12"
use_https: true
validate_certs: false
plug_id: "2"
# Reboot plug 5
- name: Reboot Plug 5 on a given WTI device
cpm_plugcontrol:
cpm_action: "setplugcontrol"
cpm_url: "rest.wti.com"
cpm_username: "restpower"
cpm_password: "restfulpowerpass12"
use_https: true
plug_id: "5"
plug_state: "boot"
"""
RETURN = """
data:
description: The output JSON returned from the commands sent
returned: always
type: str
"""
import base64
import json
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_text, to_bytes, to_native
from ansible.module_utils.six.moves.urllib.error import HTTPError, URLError
from ansible.module_utils.urls import open_url, ConnectionError, SSLValidationError
def assemble_json(cpmmodule, cpmresult):
json_load = ""
plugspassed = cpmmodule.params["plug_id"].split(",")
for val in plugspassed:
if (val.isdigit() is True):
json_load = '%s{"plug": "%s"' % (json_load, to_native(val))
else:
json_load = '%s{"plugname": "%s"' % (json_load, to_native(val))
if cpmmodule.params["plug_state"] is not None:
json_load = '%s,"state": "%s"' % (json_load, to_native(cpmmodule.params["plug_state"]))
json_load = '%s}' % (json_load)
return json_load
def run_module():
# define the available arguments/parameters that a user can pass to
# the module
module_args = dict(
cpm_action=dict(choices=['getplugcontrol', 'setplugcontrol'], required=True),
cpm_url=dict(type='str', required=True),
cpm_username=dict(type='str', required=True),
cpm_password=dict(type='str', required=True, no_log=True),
plug_id=dict(type='str', required=True),
plug_state=dict(choices=['on', 'off', 'boot', 'default'], required=False),
use_https=dict(type='bool', default=True),
validate_certs=dict(type='bool', default=True),
use_proxy=dict(type='bool', default=False)
)
result = dict(
changed=False,
data=''
)
module = AnsibleModule(argument_spec=module_args, supports_check_mode=True)
if module.check_mode:
return result
auth = to_text(base64.b64encode(to_bytes('{0}:{1}'.format(to_native(module.params['cpm_username']), to_native(module.params['cpm_password'])),
errors='surrogate_or_strict')))
if module.params['use_https'] is True:
protocol = "https://"
else:
protocol = "http://"
Payload = None
if (module.params['cpm_action'] == 'getplugcontrol'):
fullurl = ("%s%s/api/v2/config/powerplug" % (protocol, to_native(module.params['cpm_url'])))
if (module.params['plug_id'].lower() != 'all'):
fullurl = '%s?plug=%s' % (fullurl, to_native(module.params['plug_id']))
method = 'GET'
elif (module.params['cpm_action'] == 'setplugcontrol'):
Payload = assemble_json(module, result)
fullurl = ("%s%s/api/v2/config/powerplug" % (protocol, to_native(module.params['cpm_url'])))
method = 'POST'
try:
response = open_url(fullurl, data=Payload, method=method, validate_certs=module.params['validate_certs'], use_proxy=module.params['use_proxy'],
headers={'Content-Type': 'application/json', 'Authorization': "Basic %s" % auth})
if (method != 'GET'):
result['changed'] = True
except HTTPError as e:
fail_json = dict(msg='Received HTTP error for {0} : {1}'.format(fullurl, to_native(e)), changed=False)
module.fail_json(**fail_json)
except URLError as e:
fail_json = dict(msg='Failed lookup url for {0} : {1}'.format(fullurl, to_native(e)), changed=False)
module.fail_json(**fail_json)
except SSLValidationError as e:
fail_json = dict(msg='Error validating the server''s certificate for {0} : {1}'.format(fullurl, to_native(e)), changed=False)
module.fail_json(**fail_json)
except ConnectionError as e:
fail_json = dict(msg='Error connecting to for {0} : {1}'.format(fullurl, to_native(e)), changed=False)
module.fail_json(**fail_json)
result['data'] = json.loads(response.read())
module.exit_json(**result)
def main():
run_module()
if __name__ == '__main__':
main()
| gpl-3.0 |
ikmaak/Printrun | printrun/pronsole.py | 12 | 68420 | #!/usr/bin/env python
# This file is part of the Printrun suite.
#
# Printrun is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Printrun is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Printrun. If not, see <http://www.gnu.org/licenses/>.
import cmd
import glob
import os
import time
import threading
import sys
import shutil
import subprocess
import codecs
import argparse
import locale
import logging
import traceback
import re
from serial import SerialException
from . import printcore
from .utils import install_locale, run_command, get_command_output, \
format_time, format_duration, RemainingTimeEstimator, \
get_home_pos, parse_build_dimensions, parse_temperature_report, \
setup_logging
install_locale('pronterface')
from .settings import Settings, BuildDimensionsSetting
from .power import powerset_print_start, powerset_print_stop
from printrun import gcoder
from .rpc import ProntRPC
if os.name == "nt":
try:
import _winreg
except:
pass
READLINE = True
try:
import readline
try:
readline.rl.mode.show_all_if_ambiguous = "on" # config pyreadline on windows
except:
pass
except:
READLINE = False # neither readline module is available
tempreading_exp = re.compile("(^T:| T:)")
REPORT_NONE = 0
REPORT_POS = 1
REPORT_TEMP = 2
REPORT_MANUAL = 4
class Status(object):
def __init__(self):
self.extruder_temp = 0
self.extruder_temp_target = 0
self.bed_temp = 0
self.bed_temp_target = 0
self.print_job = None
self.print_job_progress = 1.0
def update_tempreading(self, tempstr):
temps = parse_temperature_report(tempstr)
if "T0" in temps and temps["T0"][0]: hotend_temp = float(temps["T0"][0])
elif "T" in temps and temps["T"][0]: hotend_temp = float(temps["T"][0])
else: hotend_temp = None
if "T0" in temps and temps["T0"][1]: hotend_setpoint = float(temps["T0"][1])
elif "T" in temps and temps["T"][1]: hotend_setpoint = float(temps["T"][1])
else: hotend_setpoint = None
if hotend_temp is not None:
self.extruder_temp = hotend_temp
if hotend_setpoint is not None:
self.extruder_temp_target = hotend_setpoint
bed_temp = float(temps["B"][0]) if "B" in temps and temps["B"][0] else None
if bed_temp is not None:
self.bed_temp = bed_temp
setpoint = temps["B"][1]
if setpoint:
self.bed_temp_target = float(setpoint)
@property
def bed_enabled(self):
return self.bed_temp != 0
@property
def extruder_enabled(self):
return self.extruder_temp != 0
class pronsole(cmd.Cmd):
def __init__(self):
cmd.Cmd.__init__(self)
if not READLINE:
self.completekey = None
self.status = Status()
self.dynamic_temp = False
self.compute_eta = None
self.statuscheck = False
self.status_thread = None
self.monitor_interval = 3
self.p = printcore.printcore()
self.p.recvcb = self.recvcb
self.p.startcb = self.startcb
self.p.endcb = self.endcb
self.p.layerchangecb = self.layer_change_cb
self.p.process_host_command = self.process_host_command
self.recvlisteners = []
self.in_macro = False
self.p.onlinecb = self.online
self.p.errorcb = self.logError
self.fgcode = None
self.filename = None
self.rpc_server = None
self.curlayer = 0
self.sdlisting = 0
self.sdlisting_echo = 0
self.sdfiles = []
self.paused = False
self.sdprinting = 0
self.uploading = 0 # Unused, just for pronterface generalization
self.temps = {"pla": "185", "abs": "230", "off": "0"}
self.bedtemps = {"pla": "60", "abs": "110", "off": "0"}
self.percentdone = 0
self.posreport = ""
self.tempreadings = ""
self.userm114 = 0
self.userm105 = 0
self.m105_waitcycles = 0
self.macros = {}
self.history_file = "~/.pronsole-history"
self.rc_loaded = False
self.processing_rc = False
self.processing_args = False
self.settings = Settings(self)
self.settings._add(BuildDimensionsSetting("build_dimensions", "200x200x100+0+0+0+0+0+0", _("Build dimensions"), _("Dimensions of Build Platform\n & optional offset of origin\n & optional switch position\n\nExamples:\n XXXxYYY\n XXX,YYY,ZZZ\n XXXxYYYxZZZ+OffX+OffY+OffZ\nXXXxYYYxZZZ+OffX+OffY+OffZ+HomeX+HomeY+HomeZ"), "Printer"), self.update_build_dimensions)
self.settings._port_list = self.scanserial
self.settings._temperature_abs_cb = self.set_temp_preset
self.settings._temperature_pla_cb = self.set_temp_preset
self.settings._bedtemp_abs_cb = self.set_temp_preset
self.settings._bedtemp_pla_cb = self.set_temp_preset
self.update_build_dimensions(None, self.settings.build_dimensions)
self.update_tcp_streaming_mode(None, self.settings.tcp_streaming_mode)
self.monitoring = 0
self.starttime = 0
self.extra_print_time = 0
self.silent = False
self.commandprefixes = 'MGT$'
self.promptstrs = {"offline": "%(bold)soffline>%(normal)s ",
"fallback": "%(bold)sPC>%(normal)s ",
"macro": "%(bold)s..>%(normal)s ",
"online": "%(bold)sT:%(extruder_temp_fancy)s%(progress_fancy)s>%(normal)s "}
# --------------------------------------------------------------
# General console handling
# --------------------------------------------------------------
def postloop(self):
self.p.disconnect()
cmd.Cmd.postloop(self)
def preloop(self):
self.log(_("Welcome to the printer console! Type \"help\" for a list of available commands."))
self.prompt = self.promptf()
cmd.Cmd.preloop(self)
# We replace this function, defined in cmd.py .
# It's default behavior with regards to Ctr-C
# and Ctr-D doesn't make much sense...
def cmdloop(self, intro=None):
"""Repeatedly issue a prompt, accept input, parse an initial prefix
off the received input, and dispatch to action methods, passing them
the remainder of the line as argument.
"""
self.preloop()
if self.use_rawinput and self.completekey:
try:
import readline
self.old_completer = readline.get_completer()
readline.set_completer(self.complete)
readline.parse_and_bind(self.completekey + ": complete")
history = os.path.expanduser(self.history_file)
if os.path.exists(history):
readline.read_history_file(history)
except ImportError:
pass
try:
if intro is not None:
self.intro = intro
if self.intro:
self.stdout.write(str(self.intro) + "\n")
stop = None
while not stop:
if self.cmdqueue:
line = self.cmdqueue.pop(0)
else:
if self.use_rawinput:
try:
line = raw_input(self.prompt)
except EOFError:
self.log("")
self.do_exit("")
except KeyboardInterrupt:
self.log("")
line = ""
else:
self.stdout.write(self.prompt)
self.stdout.flush()
line = self.stdin.readline()
if not len(line):
line = ""
else:
line = line.rstrip('\r\n')
line = self.precmd(line)
stop = self.onecmd(line)
stop = self.postcmd(stop, line)
self.postloop()
finally:
if self.use_rawinput and self.completekey:
try:
import readline
readline.set_completer(self.old_completer)
readline.write_history_file(history)
except ImportError:
pass
def confirm(self):
y_or_n = raw_input("y/n: ")
if y_or_n == "y":
return True
elif y_or_n != "n":
return self.confirm()
return False
def log(self, *msg):
msg = u"".join(unicode(i) for i in msg)
logging.info(msg)
def logError(self, *msg):
msg = u"".join(unicode(i) for i in msg)
logging.error(msg)
if not self.settings.error_command:
return
output = get_command_output(self.settings.error_command, {"$m": msg})
if output:
self.log("Error command output:")
self.log(output.rstrip())
def promptf(self):
"""A function to generate prompts so that we can do dynamic prompts. """
if self.in_macro:
promptstr = self.promptstrs["macro"]
elif not self.p.online:
promptstr = self.promptstrs["offline"]
elif self.status.extruder_enabled:
promptstr = self.promptstrs["online"]
else:
promptstr = self.promptstrs["fallback"]
if "%" not in promptstr:
return promptstr
else:
specials = {}
specials["extruder_temp"] = str(int(self.status.extruder_temp))
specials["extruder_temp_target"] = str(int(self.status.extruder_temp_target))
if self.status.extruder_temp_target == 0:
specials["extruder_temp_fancy"] = str(int(self.status.extruder_temp))
else:
specials["extruder_temp_fancy"] = "%s/%s" % (str(int(self.status.extruder_temp)), str(int(self.status.extruder_temp_target)))
if self.p.printing:
progress = int(1000 * float(self.p.queueindex) / len(self.p.mainqueue)) / 10
elif self.sdprinting:
progress = self.percentdone
else:
progress = 0.0
specials["progress"] = str(progress)
if self.p.printing or self.sdprinting:
specials["progress_fancy"] = " " + str(progress) + "%"
else:
specials["progress_fancy"] = ""
specials["bold"] = "\033[01m"
specials["normal"] = "\033[00m"
return promptstr % specials
def postcmd(self, stop, line):
""" A hook we override to generate prompts after
each command is executed, for the next prompt.
We also use it to send M105 commands so that
temp info gets updated for the prompt."""
if self.p.online and self.dynamic_temp:
self.p.send_now("M105")
self.prompt = self.promptf()
return stop
def kill(self):
self.statuscheck = False
if self.status_thread:
self.status_thread.join()
self.status_thread = None
if self.rpc_server is not None:
self.rpc_server.shutdown()
def write_prompt(self):
sys.stdout.write(self.promptf())
sys.stdout.flush()
def help_help(self, l = ""):
self.do_help("")
def do_gcodes(self, l = ""):
self.help_gcodes()
def help_gcodes(self):
self.log("Gcodes are passed through to the printer as they are")
def precmd(self, line):
if line.upper().startswith("M114"):
self.userm114 += 1
elif line.upper().startswith("M105"):
self.userm105 += 1
return line
def help_shell(self):
self.log("Executes a python command. Example:")
self.log("! os.listdir('.')")
def do_shell(self, l):
exec(l)
def emptyline(self):
"""Called when an empty line is entered - do not remove"""
pass
def default(self, l):
if l[0].upper() in self.commandprefixes.upper():
if self.p and self.p.online:
if not self.p.loud:
self.log("SENDING:" + l.upper())
self.p.send_now(l.upper())
else:
self.logError(_("Printer is not online."))
return
elif l[0] == "@":
if self.p and self.p.online:
if not self.p.loud:
self.log("SENDING:" + l[1:])
self.p.send_now(l[1:])
else:
self.logError(_("Printer is not online."))
return
else:
cmd.Cmd.default(self, l)
def do_exit(self, l):
if self.status.extruder_temp_target != 0:
self.log("Setting extruder temp to 0")
self.p.send_now("M104 S0.0")
if self.status.bed_enabled:
if self.status.bed_temp_target != 0:
self.log("Setting bed temp to 0")
self.p.send_now("M140 S0.0")
self.log("Disconnecting from printer...")
if self.p.printing:
self.log(_("Are you sure you want to exit while printing?\n\
(this will terminate the print)."))
if not self.confirm():
return
self.log(_("Exiting program. Goodbye!"))
self.p.disconnect()
self.kill()
sys.exit()
def help_exit(self):
self.log(_("Disconnects from the printer and exits the program."))
# --------------------------------------------------------------
# Macro handling
# --------------------------------------------------------------
def complete_macro(self, text, line, begidx, endidx):
if (len(line.split()) == 2 and line[-1] != " ") or (len(line.split()) == 1 and line[-1] == " "):
return [i for i in self.macros.keys() if i.startswith(text)]
elif len(line.split()) == 3 or (len(line.split()) == 2 and line[-1] == " "):
return [i for i in ["/D", "/S"] + self.completenames(text) if i.startswith(text)]
else:
return []
def hook_macro(self, l):
l = l.rstrip()
ls = l.lstrip()
ws = l[:len(l) - len(ls)] # just leading whitespace
if len(ws) == 0:
self.end_macro()
# pass the unprocessed line to regular command processor to not require empty line in .pronsolerc
return self.onecmd(l)
self.cur_macro_def += l + "\n"
def end_macro(self):
if "onecmd" in self.__dict__: del self.onecmd # remove override
self.in_macro = False
self.prompt = self.promptf()
if self.cur_macro_def != "":
self.macros[self.cur_macro_name] = self.cur_macro_def
macro = self.compile_macro(self.cur_macro_name, self.cur_macro_def)
setattr(self.__class__, "do_" + self.cur_macro_name, lambda self, largs, macro = macro: macro(self, *largs.split()))
setattr(self.__class__, "help_" + self.cur_macro_name, lambda self, macro_name = self.cur_macro_name: self.subhelp_macro(macro_name))
if not self.processing_rc:
self.log("Macro '" + self.cur_macro_name + "' defined")
# save it
if not self.processing_args:
macro_key = "macro " + self.cur_macro_name
macro_def = macro_key
if "\n" in self.cur_macro_def:
macro_def += "\n"
else:
macro_def += " "
macro_def += self.cur_macro_def
self.save_in_rc(macro_key, macro_def)
else:
self.logError("Empty macro - cancelled")
del self.cur_macro_name, self.cur_macro_def
def compile_macro_line(self, line):
line = line.rstrip()
ls = line.lstrip()
ws = line[:len(line) - len(ls)] # just leading whitespace
if ls == "" or ls.startswith('#'): return "" # no code
if ls.startswith('!'):
return ws + ls[1:] + "\n" # python mode
else:
ls = ls.replace('"', '\\"') # need to escape double quotes
ret = ws + 'self.precmd("' + ls + '".format(*arg))\n' # parametric command mode
return ret + ws + 'self.onecmd("' + ls + '".format(*arg))\n'
def compile_macro(self, macro_name, macro_def):
if macro_def.strip() == "":
self.logError("Empty macro - cancelled")
return
macro = None
pycode = "def macro(self,*arg):\n"
if "\n" not in macro_def.strip():
pycode += self.compile_macro_line(" " + macro_def.strip())
else:
lines = macro_def.split("\n")
for l in lines:
pycode += self.compile_macro_line(l)
exec pycode
return macro
def start_macro(self, macro_name, prev_definition = "", suppress_instructions = False):
if not self.processing_rc and not suppress_instructions:
self.logError("Enter macro using indented lines, end with empty line")
self.cur_macro_name = macro_name
self.cur_macro_def = ""
self.onecmd = self.hook_macro # override onecmd temporarily
self.in_macro = False
self.prompt = self.promptf()
def delete_macro(self, macro_name):
if macro_name in self.macros.keys():
delattr(self.__class__, "do_" + macro_name)
del self.macros[macro_name]
self.log("Macro '" + macro_name + "' removed")
if not self.processing_rc and not self.processing_args:
self.save_in_rc("macro " + macro_name, "")
else:
self.logError("Macro '" + macro_name + "' is not defined")
def do_macro(self, args):
if args.strip() == "":
self.print_topics("User-defined macros", map(str, self.macros.keys()), 15, 80)
return
arglist = args.split(None, 1)
macro_name = arglist[0]
if macro_name not in self.macros and hasattr(self.__class__, "do_" + macro_name):
self.logError("Name '" + macro_name + "' is being used by built-in command")
return
if len(arglist) == 2:
macro_def = arglist[1]
if macro_def.lower() == "/d":
self.delete_macro(macro_name)
return
if macro_def.lower() == "/s":
self.subhelp_macro(macro_name)
return
self.cur_macro_def = macro_def
self.cur_macro_name = macro_name
self.end_macro()
return
if macro_name in self.macros:
self.start_macro(macro_name, self.macros[macro_name])
else:
self.start_macro(macro_name)
def help_macro(self):
self.log("Define single-line macro: macro <name> <definition>")
self.log("Define multi-line macro: macro <name>")
self.log("Enter macro definition in indented lines. Use {0} .. {N} to substitute macro arguments")
self.log("Enter python code, prefixed with ! Use arg[0] .. arg[N] to substitute macro arguments")
self.log("Delete macro: macro <name> /d")
self.log("Show macro definition: macro <name> /s")
self.log("'macro' without arguments displays list of defined macros")
def subhelp_macro(self, macro_name):
if macro_name in self.macros.keys():
macro_def = self.macros[macro_name]
if "\n" in macro_def:
self.log("Macro '" + macro_name + "' defined as:")
self.log(self.macros[macro_name] + "----------------")
else:
self.log("Macro '" + macro_name + "' defined as: '" + macro_def + "'")
else:
self.logError("Macro '" + macro_name + "' is not defined")
# --------------------------------------------------------------
# Configuration handling
# --------------------------------------------------------------
def set(self, var, str):
try:
t = type(getattr(self.settings, var))
value = self.settings._set(var, str)
if not self.processing_rc and not self.processing_args:
self.save_in_rc("set " + var, "set %s %s" % (var, value))
except AttributeError:
logging.debug(_("Unknown variable '%s'") % var)
except ValueError, ve:
if hasattr(ve, "from_validator"):
self.logError(_("Bad value %s for variable '%s': %s") % (str, var, ve.args[0]))
else:
self.logError(_("Bad value for variable '%s', expecting %s (%s)") % (var, repr(t)[1:-1], ve.args[0]))
def do_set(self, argl):
args = argl.split(None, 1)
if len(args) < 1:
for k in [kk for kk in dir(self.settings) if not kk.startswith("_")]:
self.log("%s = %s" % (k, str(getattr(self.settings, k))))
return
if len(args) < 2:
# Try getting the default value of the setting to check whether it
# actually exists
try:
getattr(self.settings, args[0])
except AttributeError:
logging.warning("Unknown variable '%s'" % args[0])
return
self.set(args[0], args[1])
def help_set(self):
self.log("Set variable: set <variable> <value>")
self.log("Show variable: set <variable>")
self.log("'set' without arguments displays all variables")
def complete_set(self, text, line, begidx, endidx):
if (len(line.split()) == 2 and line[-1] != " ") or (len(line.split()) == 1 and line[-1] == " "):
return [i for i in dir(self.settings) if not i.startswith("_") and i.startswith(text)]
elif len(line.split()) == 3 or (len(line.split()) == 2 and line[-1] == " "):
return [i for i in self.settings._tabcomplete(line.split()[1]) if i.startswith(text)]
else:
return []
def load_rc(self, rc_filename):
self.processing_rc = True
try:
rc = codecs.open(rc_filename, "r", "utf-8")
self.rc_filename = os.path.abspath(rc_filename)
for rc_cmd in rc:
if not rc_cmd.lstrip().startswith("#"):
self.onecmd(rc_cmd)
rc.close()
if hasattr(self, "cur_macro_def"):
self.end_macro()
self.rc_loaded = True
finally:
self.processing_rc = False
def load_default_rc(self, rc_filename = ".pronsolerc"):
if rc_filename == ".pronsolerc" and hasattr(sys, "frozen") and sys.frozen in ["windows_exe", "console_exe"]:
rc_filename = "printrunconf.ini"
try:
try:
self.load_rc(os.path.join(os.path.expanduser("~"), rc_filename))
except IOError:
self.load_rc(rc_filename)
except IOError:
# make sure the filename is initialized
self.rc_filename = os.path.abspath(os.path.join(os.path.expanduser("~"), rc_filename))
def save_in_rc(self, key, definition):
"""
Saves or updates macro or other definitions in .pronsolerc
key is prefix that determines what is being defined/updated (e.g. 'macro foo')
definition is the full definition (that is written to file). (e.g. 'macro foo move x 10')
Set key as empty string to just add (and not overwrite)
Set definition as empty string to remove it from .pronsolerc
To delete line from .pronsolerc, set key as the line contents, and definition as empty string
Only first definition with given key is overwritten.
Updates are made in the same file position.
Additions are made to the end of the file.
"""
rci, rco = None, None
if definition != "" and not definition.endswith("\n"):
definition += "\n"
try:
written = False
if os.path.exists(self.rc_filename):
shutil.copy(self.rc_filename, self.rc_filename + "~bak")
rci = codecs.open(self.rc_filename + "~bak", "r", "utf-8")
rco = codecs.open(self.rc_filename + "~new", "w", "utf-8")
if rci is not None:
overwriting = False
for rc_cmd in rci:
l = rc_cmd.rstrip()
ls = l.lstrip()
ws = l[:len(l) - len(ls)] # just leading whitespace
if overwriting and len(ws) == 0:
overwriting = False
if not written and key != "" and rc_cmd.startswith(key) and (rc_cmd + "\n")[len(key)].isspace():
overwriting = True
written = True
rco.write(definition)
if not overwriting:
rco.write(rc_cmd)
if not rc_cmd.endswith("\n"): rco.write("\n")
if not written:
rco.write(definition)
if rci is not None:
rci.close()
rco.close()
shutil.move(self.rc_filename + "~new", self.rc_filename)
# if definition != "":
# self.log("Saved '"+key+"' to '"+self.rc_filename+"'")
# else:
# self.log("Removed '"+key+"' from '"+self.rc_filename+"'")
except Exception, e:
self.logError("Saving failed for ", key + ":", str(e))
finally:
del rci, rco
# --------------------------------------------------------------
# Configuration update callbacks
# --------------------------------------------------------------
def update_build_dimensions(self, param, value):
self.build_dimensions_list = parse_build_dimensions(value)
self.p.analyzer.home_pos = get_home_pos(self.build_dimensions_list)
def update_tcp_streaming_mode(self, param, value):
self.p.tcp_streaming_mode = self.settings.tcp_streaming_mode
def update_rpc_server(self, param, value):
if value:
if self.rpc_server is None:
self.rpc_server = ProntRPC(self)
else:
if self.rpc_server is not None:
self.rpc_server.shutdown()
self.rpc_server = None
# --------------------------------------------------------------
# Command line options handling
# --------------------------------------------------------------
def add_cmdline_arguments(self, parser):
parser.add_argument('-v', '--verbose', help = _("increase verbosity"), action = "store_true")
parser.add_argument('-c', '--conf', '--config', help = _("load this file on startup instead of .pronsolerc ; you may chain config files, if so settings auto-save will use the last specified file"), action = "append", default = [])
parser.add_argument('-e', '--execute', help = _("executes command after configuration/.pronsolerc is loaded ; macros/settings from these commands are not autosaved"), action = "append", default = [])
parser.add_argument('filename', nargs='?', help = _("file to load"))
def process_cmdline_arguments(self, args):
if args.verbose:
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
for config in args.conf:
self.load_rc(config)
if not self.rc_loaded:
self.load_default_rc()
self.processing_args = True
for command in args.execute:
self.onecmd(command)
self.processing_args = False
self.update_rpc_server(None, self.settings.rpc_server)
if args.filename:
filename = args.filename.decode(locale.getpreferredencoding())
self.cmdline_filename_callback(filename)
def cmdline_filename_callback(self, filename):
self.do_load(filename)
def parse_cmdline(self, args):
parser = argparse.ArgumentParser(description = 'Printrun 3D printer interface')
self.add_cmdline_arguments(parser)
args = [arg for arg in args if not arg.startswith("-psn")]
args = parser.parse_args(args = args)
self.process_cmdline_arguments(args)
setup_logging(sys.stdout, self.settings.log_path, True)
# --------------------------------------------------------------
# Printer connection handling
# --------------------------------------------------------------
def connect_to_printer(self, port, baud, dtr):
try:
self.p.connect(port, baud, dtr)
except SerialException as e:
# Currently, there is no errno, but it should be there in the future
if e.errno == 2:
self.logError(_("Error: You are trying to connect to a non-existing port."))
elif e.errno == 8:
self.logError(_("Error: You don't have permission to open %s.") % port)
self.logError(_("You might need to add yourself to the dialout group."))
else:
self.logError(traceback.format_exc())
# Kill the scope anyway
return False
except OSError as e:
if e.errno == 2:
self.logError(_("Error: You are trying to connect to a non-existing port."))
else:
self.logError(traceback.format_exc())
return False
self.statuscheck = True
self.status_thread = threading.Thread(target = self.statuschecker)
self.status_thread.start()
return True
def do_connect(self, l):
a = l.split()
p = self.scanserial()
port = self.settings.port
if (port == "" or port not in p) and len(p) > 0:
port = p[0]
baud = self.settings.baudrate or 115200
if len(a) > 0:
port = a[0]
if len(a) > 1:
try:
baud = int(a[1])
except:
self.log("Bad baud value '" + a[1] + "' ignored")
if len(p) == 0 and not port:
self.log("No serial ports detected - please specify a port")
return
if len(a) == 0:
self.log("No port specified - connecting to %s at %dbps" % (port, baud))
if port != self.settings.port:
self.settings.port = port
self.save_in_rc("set port", "set port %s" % port)
if baud != self.settings.baudrate:
self.settings.baudrate = baud
self.save_in_rc("set baudrate", "set baudrate %d" % baud)
self.connect_to_printer(port, baud, self.settings.dtr)
def help_connect(self):
self.log("Connect to printer")
self.log("connect <port> <baudrate>")
self.log("If port and baudrate are not specified, connects to first detected port at 115200bps")
ports = self.scanserial()
if ports:
self.log("Available ports: ", " ".join(ports))
else:
self.log("No serial ports were automatically found.")
def complete_connect(self, text, line, begidx, endidx):
if (len(line.split()) == 2 and line[-1] != " ") or (len(line.split()) == 1 and line[-1] == " "):
return [i for i in self.scanserial() if i.startswith(text)]
elif len(line.split()) == 3 or (len(line.split()) == 2 and line[-1] == " "):
return [i for i in ["2400", "9600", "19200", "38400", "57600", "115200"] if i.startswith(text)]
else:
return []
def scanserial(self):
"""scan for available ports. return a list of device names."""
baselist = []
if os.name == "nt":
try:
key = _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, "HARDWARE\\DEVICEMAP\\SERIALCOMM")
i = 0
while(1):
baselist += [_winreg.EnumValue(key, i)[1]]
i += 1
except:
pass
for g in ['/dev/ttyUSB*', '/dev/ttyACM*', "/dev/tty.*", "/dev/cu.*", "/dev/rfcomm*"]:
baselist += glob.glob(g)
return filter(self._bluetoothSerialFilter, baselist)
def _bluetoothSerialFilter(self, serial):
return not ("Bluetooth" in serial or "FireFly" in serial)
def online(self):
self.log("\rPrinter is now online")
self.write_prompt()
def do_disconnect(self, l):
self.p.disconnect()
def help_disconnect(self):
self.log("Disconnects from the printer")
def do_block_until_online(self, l):
while not self.p.online:
time.sleep(0.1)
def help_block_until_online(self, l):
self.log("Blocks until printer is online")
self.log("Warning: if something goes wrong, this can block pronsole forever")
# --------------------------------------------------------------
# Printer status monitoring
# --------------------------------------------------------------
def statuschecker_inner(self, do_monitoring = True):
if self.p.online:
if self.p.writefailures >= 4:
self.logError(_("Disconnecting after 4 failed writes."))
self.status_thread = None
self.disconnect()
return
if do_monitoring:
if self.sdprinting and not self.paused:
self.p.send_now("M27")
if self.m105_waitcycles % 10 == 0:
self.p.send_now("M105")
self.m105_waitcycles += 1
cur_time = time.time()
wait_time = 0
while time.time() < cur_time + self.monitor_interval - 0.25:
if not self.statuscheck:
break
time.sleep(0.25)
# Safeguard: if system time changes and goes back in the past,
# we could get stuck almost forever
wait_time += 0.25
if wait_time > self.monitor_interval - 0.25:
break
# Always sleep at least a bit, if something goes wrong with the
# system time we'll avoid freezing the whole app this way
time.sleep(0.25)
def statuschecker(self):
while self.statuscheck:
self.statuschecker_inner()
# --------------------------------------------------------------
# File loading handling
# --------------------------------------------------------------
def do_load(self, filename):
self._do_load(filename)
def _do_load(self, filename):
if not filename:
self.logError("No file name given.")
return
self.log(_("Loading file: %s") % filename)
if not os.path.exists(filename):
self.logError("File not found!")
return
self.load_gcode(filename)
self.log(_("Loaded %s, %d lines.") % (filename, len(self.fgcode)))
self.log(_("Estimated duration: %d layers, %s") % self.fgcode.estimate_duration())
def load_gcode(self, filename, layer_callback = None, gcode = None):
if gcode is None:
self.fgcode = gcoder.LightGCode(deferred = True)
else:
self.fgcode = gcode
self.fgcode.prepare(open(filename, "rU"),
get_home_pos(self.build_dimensions_list),
layer_callback = layer_callback)
self.fgcode.estimate_duration()
self.filename = filename
def complete_load(self, text, line, begidx, endidx):
s = line.split()
if len(s) > 2:
return []
if (len(s) == 1 and line[-1] == " ") or (len(s) == 2 and line[-1] != " "):
if len(s) > 1:
return [i[len(s[1]) - len(text):] for i in glob.glob(s[1] + "*/") + glob.glob(s[1] + "*.g*")]
else:
return glob.glob("*/") + glob.glob("*.g*")
def help_load(self):
self.log("Loads a gcode file (with tab-completion)")
def do_slice(self, l):
l = l.split()
if len(l) == 0:
self.logError(_("No file name given."))
return
settings = 0
if l[0] == "set":
settings = 1
else:
self.log(_("Slicing file: %s") % l[0])
if not(os.path.exists(l[0])):
self.logError(_("File not found!"))
return
try:
if settings:
command = self.settings.sliceoptscommand
self.log(_("Entering slicer settings: %s") % command)
run_command(command, blocking = True)
else:
command = self.settings.slicecommand
stl_name = l[0]
gcode_name = stl_name.replace(".stl", "_export.gcode").replace(".STL", "_export.gcode")
run_command(command,
{"$s": stl_name,
"$o": gcode_name},
blocking = True)
self.log(_("Loading sliced file."))
self.do_load(l[0].replace(".stl", "_export.gcode"))
except Exception, e:
self.logError(_("Slicing failed: %s") % e)
def complete_slice(self, text, line, begidx, endidx):
s = line.split()
if len(s) > 2:
return []
if (len(s) == 1 and line[-1] == " ") or (len(s) == 2 and line[-1] != " "):
if len(s) > 1:
return [i[len(s[1]) - len(text):] for i in glob.glob(s[1] + "*/") + glob.glob(s[1] + "*.stl")]
else:
return glob.glob("*/") + glob.glob("*.stl")
def help_slice(self):
self.log(_("Creates a gcode file from an stl model using the slicer (with tab-completion)"))
self.log(_("slice filename.stl - create gcode file"))
self.log(_("slice filename.stl view - create gcode file and view using skeiniso (if using skeinforge)"))
self.log(_("slice set - adjust slicer settings"))
# --------------------------------------------------------------
# Print/upload handling
# --------------------------------------------------------------
def do_upload(self, l):
names = l.split()
if len(names) == 2:
filename = names[0]
targetname = names[1]
else:
self.logError(_("Please enter target name in 8.3 format."))
return
if not self.p.online:
self.logError(_("Not connected to printer."))
return
self._do_load(filename)
self.log(_("Uploading as %s") % targetname)
self.log(_("Uploading %s") % self.filename)
self.p.send_now("M28 " + targetname)
self.log(_("Press Ctrl-C to interrupt upload."))
self.p.startprint(self.fgcode)
try:
sys.stdout.write(_("Progress: ") + "00.0%")
sys.stdout.flush()
while self.p.printing:
time.sleep(0.5)
sys.stdout.write("\b\b\b\b\b%04.1f%%" % (100 * float(self.p.queueindex) / len(self.p.mainqueue),))
sys.stdout.flush()
self.p.send_now("M29 " + targetname)
time.sleep(0.2)
self.p.clear = True
self._do_ls(False)
self.log("\b\b\b\b\b100%.")
self.log(_("Upload completed. %s should now be on the card.") % targetname)
return
except (KeyboardInterrupt, Exception) as e:
if isinstance(e, KeyboardInterrupt):
self.logError(_("...interrupted!"))
else:
self.logError(_("Something wrong happened while uploading:")
+ "\n" + traceback.format_exc())
self.p.pause()
self.p.send_now("M29 " + targetname)
time.sleep(0.2)
self.p.cancelprint()
self.logError(_("A partial file named %s may have been written to the sd card.") % targetname)
def complete_upload(self, text, line, begidx, endidx):
s = line.split()
if len(s) > 2:
return []
if (len(s) == 1 and line[-1] == " ") or (len(s) == 2 and line[-1] != " "):
if len(s) > 1:
return [i[len(s[1]) - len(text):] for i in glob.glob(s[1] + "*/") + glob.glob(s[1] + "*.g*")]
else:
return glob.glob("*/") + glob.glob("*.g*")
def help_upload(self):
self.log("Uploads a gcode file to the sd card")
def help_print(self):
if not self.fgcode:
self.log(_("Send a loaded gcode file to the printer. Load a file with the load command first."))
else:
self.log(_("Send a loaded gcode file to the printer. You have %s loaded right now.") % self.filename)
def do_print(self, l):
if not self.fgcode:
self.logError(_("No file loaded. Please use load first."))
return
if not self.p.online:
self.logError(_("Not connected to printer."))
return
self.log(_("Printing %s") % self.filename)
self.log(_("You can monitor the print with the monitor command."))
self.sdprinting = False
self.p.startprint(self.fgcode)
def do_pause(self, l):
if self.sdprinting:
self.p.send_now("M25")
else:
if not self.p.printing:
self.logError(_("Not printing, cannot pause."))
return
self.p.pause()
self.paused = True
def help_pause(self):
self.log(_("Pauses a running print"))
def pause(self, event = None):
return self.do_pause(None)
def do_resume(self, l):
if not self.paused:
self.logError(_("Not paused, unable to resume. Start a print first."))
return
self.paused = False
if self.sdprinting:
self.p.send_now("M24")
return
else:
self.p.resume()
def help_resume(self):
self.log(_("Resumes a paused print."))
def listfiles(self, line):
if "Begin file list" in line:
self.sdlisting = 1
elif "End file list" in line:
self.sdlisting = 0
self.recvlisteners.remove(self.listfiles)
if self.sdlisting_echo:
self.log(_("Files on SD card:"))
self.log("\n".join(self.sdfiles))
elif self.sdlisting:
self.sdfiles.append(line.strip().lower())
def _do_ls(self, echo):
# FIXME: this was 2, but I think it should rather be 0 as in do_upload
self.sdlisting = 0
self.sdlisting_echo = echo
self.sdfiles = []
self.recvlisteners.append(self.listfiles)
self.p.send_now("M20")
def do_ls(self, l):
if not self.p.online:
self.logError(_("Printer is not online. Please connect to it first."))
return
self._do_ls(True)
def help_ls(self):
self.log(_("Lists files on the SD card"))
def waitforsdresponse(self, l):
if "file.open failed" in l:
self.logError(_("Opening file failed."))
self.recvlisteners.remove(self.waitforsdresponse)
return
if "File opened" in l:
self.log(l)
if "File selected" in l:
self.log(_("Starting print"))
self.p.send_now("M24")
self.sdprinting = True
# self.recvlisteners.remove(self.waitforsdresponse)
return
if "Done printing file" in l:
self.log(l)
self.sdprinting = False
self.recvlisteners.remove(self.waitforsdresponse)
return
if "SD printing byte" in l:
# M27 handler
try:
resp = l.split()
vals = resp[-1].split("/")
self.percentdone = 100.0 * int(vals[0]) / int(vals[1])
except:
pass
def do_reset(self, l):
self.p.reset()
def help_reset(self):
self.log(_("Resets the printer."))
def do_sdprint(self, l):
if not self.p.online:
self.log(_("Printer is not online. Please connect to it first."))
return
self._do_ls(False)
while self.listfiles in self.recvlisteners:
time.sleep(0.1)
if l.lower() not in self.sdfiles:
self.log(_("File is not present on card. Please upload it first."))
return
self.recvlisteners.append(self.waitforsdresponse)
self.p.send_now("M23 " + l.lower())
self.log(_("Printing file: %s from SD card.") % l.lower())
self.log(_("Requesting SD print..."))
time.sleep(1)
def help_sdprint(self):
self.log(_("Print a file from the SD card. Tab completes with available file names."))
self.log(_("sdprint filename.g"))
def complete_sdprint(self, text, line, begidx, endidx):
if not self.sdfiles and self.p.online:
self._do_ls(False)
while self.listfiles in self.recvlisteners:
time.sleep(0.1)
if (len(line.split()) == 2 and line[-1] != " ") or (len(line.split()) == 1 and line[-1] == " "):
return [i for i in self.sdfiles if i.startswith(text)]
# --------------------------------------------------------------
# Printcore callbacks
# --------------------------------------------------------------
def startcb(self, resuming = False):
self.starttime = time.time()
if resuming:
self.log(_("Print resumed at: %s") % format_time(self.starttime))
else:
self.log(_("Print started at: %s") % format_time(self.starttime))
if not self.sdprinting:
self.compute_eta = RemainingTimeEstimator(self.fgcode)
else:
self.compute_eta = None
if self.settings.start_command:
output = get_command_output(self.settings.start_command,
{"$s": str(self.filename),
"$t": format_time(time.time())})
if output:
self.log("Start command output:")
self.log(output.rstrip())
try:
powerset_print_start(reason = "Preventing sleep during print")
except:
self.logError(_("Failed to set power settings:")
+ "\n" + traceback.format_exc())
def endcb(self):
try:
powerset_print_stop()
except:
self.logError(_("Failed to set power settings:")
+ "\n" + traceback.format_exc())
if self.p.queueindex == 0:
print_duration = int(time.time() - self.starttime + self.extra_print_time)
self.log(_("Print ended at: %(end_time)s and took %(duration)s") % {"end_time": format_time(time.time()),
"duration": format_duration(print_duration)})
# Update total filament length used
new_total = self.settings.total_filament_used + self.fgcode.filament_length
self.set("total_filament_used", new_total)
if not self.settings.final_command:
return
output = get_command_output(self.settings.final_command,
{"$s": str(self.filename),
"$t": format_duration(print_duration)})
if output:
self.log("Final command output:")
self.log(output.rstrip())
def recvcb_report(self, l):
isreport = REPORT_NONE
if "ok C:" in l or "Count" in l \
or ("X:" in l and len(gcoder.m114_exp.findall(l)) == 6):
self.posreport = l
isreport = REPORT_POS
if self.userm114 > 0:
self.userm114 -= 1
isreport |= REPORT_MANUAL
if "ok T:" in l or tempreading_exp.findall(l):
self.tempreadings = l
isreport = REPORT_TEMP
if self.userm105 > 0:
self.userm105 -= 1
isreport |= REPORT_MANUAL
else:
self.m105_waitcycles = 0
return isreport
def recvcb_actions(self, l):
if l.startswith("!!"):
self.do_pause(None)
msg = l.split(" ", 1)
if len(msg) > 1 and self.silent is False: self.logError(msg[1].ljust(15))
sys.stdout.write(self.promptf())
sys.stdout.flush()
return True
elif l.startswith("//"):
command = l.split(" ", 1)
if len(command) > 1:
command = command[1]
self.log(_("Received command %s") % command)
command = command.split(":")
if len(command) == 2 and command[0] == "action":
command = command[1]
if command == "pause":
self.do_pause(None)
sys.stdout.write(self.promptf())
sys.stdout.flush()
return True
elif command == "resume":
self.do_resume(None)
sys.stdout.write(self.promptf())
sys.stdout.flush()
return True
elif command == "disconnect":
self.do_disconnect(None)
sys.stdout.write(self.promptf())
sys.stdout.flush()
return True
return False
def recvcb(self, l):
l = l.rstrip()
for listener in self.recvlisteners:
listener(l)
if not self.recvcb_actions(l):
report_type = self.recvcb_report(l)
if report_type & REPORT_TEMP:
self.status.update_tempreading(l)
if l != "ok" and not self.sdlisting \
and not self.monitoring and (report_type == REPORT_NONE or report_type & REPORT_MANUAL):
if l[:5] == "echo:":
l = l[5:].lstrip()
if self.silent is False: self.log("\r" + l.ljust(15))
sys.stdout.write(self.promptf())
sys.stdout.flush()
def layer_change_cb(self, newlayer):
layerz = self.fgcode.all_layers[newlayer].z
if layerz is not None:
self.curlayer = layerz
if self.compute_eta:
secondselapsed = int(time.time() - self.starttime + self.extra_print_time)
self.compute_eta.update_layer(newlayer, secondselapsed)
def get_eta(self):
if self.sdprinting or self.uploading:
if self.uploading:
fractioncomplete = float(self.p.queueindex) / len(self.p.mainqueue)
else:
fractioncomplete = float(self.percentdone / 100.0)
secondselapsed = int(time.time() - self.starttime + self.extra_print_time)
# Prevent division by zero
secondsestimate = secondselapsed / max(fractioncomplete, 0.000001)
secondsremain = secondsestimate - secondselapsed
progress = fractioncomplete
elif self.compute_eta is not None:
secondselapsed = int(time.time() - self.starttime + self.extra_print_time)
secondsremain, secondsestimate = self.compute_eta(self.p.queueindex, secondselapsed)
progress = self.p.queueindex
else:
secondsremain, secondsestimate, progress = 1, 1, 0
return secondsremain, secondsestimate, progress
def do_eta(self, l):
if not self.p.printing:
self.logError(_("Printer is not currently printing. No ETA available."))
else:
secondsremain, secondsestimate, progress = self.get_eta()
eta = _("Est: %s of %s remaining") % (format_duration(secondsremain),
format_duration(secondsestimate))
self.log(eta.strip())
def help_eta(self):
self.log(_("Displays estimated remaining print time."))
# --------------------------------------------------------------
# Temperature handling
# --------------------------------------------------------------
def set_temp_preset(self, key, value):
if not key.startswith("bed"):
self.temps["pla"] = str(self.settings.temperature_pla)
self.temps["abs"] = str(self.settings.temperature_abs)
self.log("Hotend temperature presets updated, pla:%s, abs:%s" % (self.temps["pla"], self.temps["abs"]))
else:
self.bedtemps["pla"] = str(self.settings.bedtemp_pla)
self.bedtemps["abs"] = str(self.settings.bedtemp_abs)
self.log("Bed temperature presets updated, pla:%s, abs:%s" % (self.bedtemps["pla"], self.bedtemps["abs"]))
def tempcb(self, l):
if "T:" in l:
self.log(l.strip().replace("T", "Hotend").replace("B", "Bed").replace("ok ", ""))
def do_gettemp(self, l):
if "dynamic" in l:
self.dynamic_temp = True
if self.p.online:
self.p.send_now("M105")
time.sleep(0.75)
if not self.status.bed_enabled:
self.log(_("Hotend: %s/%s") % (self.status.extruder_temp, self.status.extruder_temp_target))
else:
self.log(_("Hotend: %s/%s") % (self.status.extruder_temp, self.status.extruder_temp_target))
self.log(_("Bed: %s/%s") % (self.status.bed_temp, self.status.bed_temp_target))
def help_gettemp(self):
self.log(_("Read the extruder and bed temperature."))
def do_settemp(self, l):
l = l.lower().replace(", ", ".")
for i in self.temps.keys():
l = l.replace(i, self.temps[i])
try:
f = float(l)
except:
self.logError(_("You must enter a temperature."))
return
if f >= 0:
if f > 250:
self.log(_("%s is a high temperature to set your extruder to. Are you sure you want to do that?") % f)
if not self.confirm():
return
if self.p.online:
self.p.send_now("M104 S" + l)
self.log(_("Setting hotend temperature to %s degrees Celsius.") % f)
else:
self.logError(_("Printer is not online."))
else:
self.logError(_("You cannot set negative temperatures. To turn the hotend off entirely, set its temperature to 0."))
def help_settemp(self):
self.log(_("Sets the hotend temperature to the value entered."))
self.log(_("Enter either a temperature in celsius or one of the following keywords"))
self.log(", ".join([i + "(" + self.temps[i] + ")" for i in self.temps.keys()]))
def complete_settemp(self, text, line, begidx, endidx):
if (len(line.split()) == 2 and line[-1] != " ") or (len(line.split()) == 1 and line[-1] == " "):
return [i for i in self.temps.keys() if i.startswith(text)]
def do_bedtemp(self, l):
f = None
try:
l = l.lower().replace(", ", ".")
for i in self.bedtemps.keys():
l = l.replace(i, self.bedtemps[i])
f = float(l)
except:
self.logError(_("You must enter a temperature."))
if f is not None and f >= 0:
if self.p.online:
self.p.send_now("M140 S" + l)
self.log(_("Setting bed temperature to %s degrees Celsius.") % f)
else:
self.logError(_("Printer is not online."))
else:
self.logError(_("You cannot set negative temperatures. To turn the bed off entirely, set its temperature to 0."))
def help_bedtemp(self):
self.log(_("Sets the bed temperature to the value entered."))
self.log(_("Enter either a temperature in celsius or one of the following keywords"))
self.log(", ".join([i + "(" + self.bedtemps[i] + ")" for i in self.bedtemps.keys()]))
def complete_bedtemp(self, text, line, begidx, endidx):
if (len(line.split()) == 2 and line[-1] != " ") or (len(line.split()) == 1 and line[-1] == " "):
return [i for i in self.bedtemps.keys() if i.startswith(text)]
def do_monitor(self, l):
interval = 5
if not self.p.online:
self.logError(_("Printer is not online. Please connect to it first."))
return
if not (self.p.printing or self.sdprinting):
self.logError(_("Printer is not printing. Please print something before monitoring."))
return
self.log(_("Monitoring printer, use ^C to interrupt."))
if len(l):
try:
interval = float(l)
except:
self.logError(_("Invalid period given."))
self.log(_("Updating values every %f seconds.") % (interval,))
self.monitoring = 1
prev_msg_len = 0
try:
while True:
self.p.send_now("M105")
if self.sdprinting:
self.p.send_now("M27")
time.sleep(interval)
if self.p.printing:
preface = _("Print progress: ")
progress = 100 * float(self.p.queueindex) / len(self.p.mainqueue)
elif self.sdprinting:
preface = _("SD print progress: ")
progress = self.percentdone
prev_msg = preface + "%.1f%%" % progress
if self.silent is False:
sys.stdout.write("\r" + prev_msg.ljust(prev_msg_len))
sys.stdout.flush()
prev_msg_len = len(prev_msg)
except KeyboardInterrupt:
if self.silent is False: self.log(_("Done monitoring."))
self.monitoring = 0
def help_monitor(self):
self.log(_("Monitor a machine's temperatures and an SD print's status."))
self.log(_("monitor - Reports temperature and SD print status (if SD printing) every 5 seconds"))
self.log(_("monitor 2 - Reports temperature and SD print status (if SD printing) every 2 seconds"))
# --------------------------------------------------------------
# Manual printer controls
# --------------------------------------------------------------
def do_tool(self, l):
tool = None
try:
tool = int(l.lower().strip())
except:
self.logError(_("You must specify the tool index as an integer."))
if tool is not None and tool >= 0:
if self.p.online:
self.p.send_now("T%d" % tool)
self.log(_("Using tool %d.") % tool)
else:
self.logError(_("Printer is not online."))
else:
self.logError(_("You cannot set negative tool numbers."))
def help_tool(self):
self.log(_("Switches to the specified tool (e.g. doing tool 1 will emit a T1 G-Code)."))
def do_move(self, l):
if len(l.split()) < 2:
self.logError(_("No move specified."))
return
if self.p.printing:
self.logError(_("Printer is currently printing. Please pause the print before you issue manual commands."))
return
if not self.p.online:
self.logError(_("Printer is not online. Unable to move."))
return
l = l.split()
if l[0].lower() == "x":
feed = self.settings.xy_feedrate
axis = "X"
elif l[0].lower() == "y":
feed = self.settings.xy_feedrate
axis = "Y"
elif l[0].lower() == "z":
feed = self.settings.z_feedrate
axis = "Z"
elif l[0].lower() == "e":
feed = self.settings.e_feedrate
axis = "E"
else:
self.logError(_("Unknown axis."))
return
try:
float(l[1]) # check if distance can be a float
except:
self.logError(_("Invalid distance"))
return
try:
feed = int(l[2])
except:
pass
self.p.send_now("G91")
self.p.send_now("G0 " + axis + str(l[1]) + " F" + str(feed))
self.p.send_now("G90")
def help_move(self):
self.log(_("Move an axis. Specify the name of the axis and the amount. "))
self.log(_("move X 10 will move the X axis forward by 10mm at %s mm/min (default XY speed)") % self.settings.xy_feedrate)
self.log(_("move Y 10 5000 will move the Y axis forward by 10mm at 5000mm/min"))
self.log(_("move Z -1 will move the Z axis down by 1mm at %s mm/min (default Z speed)") % self.settings.z_feedrate)
self.log(_("Common amounts are in the tabcomplete list."))
def complete_move(self, text, line, begidx, endidx):
if (len(line.split()) == 2 and line[-1] != " ") or (len(line.split()) == 1 and line[-1] == " "):
return [i for i in ["X ", "Y ", "Z ", "E "] if i.lower().startswith(text)]
elif len(line.split()) == 3 or (len(line.split()) == 2 and line[-1] == " "):
base = line.split()[-1]
rlen = 0
if base.startswith("-"):
rlen = 1
if line[-1] == " ":
base = ""
return [i[rlen:] for i in ["-100", "-10", "-1", "-0.1", "100", "10", "1", "0.1", "-50", "-5", "-0.5", "50", "5", "0.5", "-200", "-20", "-2", "-0.2", "200", "20", "2", "0.2"] if i.startswith(base)]
else:
return []
def do_extrude(self, l, override = None, overridefeed = 300):
length = self.settings.default_extrusion # default extrusion length
feed = self.settings.e_feedrate # default speed
if not self.p.online:
self.logError("Printer is not online. Unable to extrude.")
return
if self.p.printing:
self.logError(_("Printer is currently printing. Please pause the print before you issue manual commands."))
return
ls = l.split()
if len(ls):
try:
length = float(ls[0])
except:
self.logError(_("Invalid length given."))
if len(ls) > 1:
try:
feed = int(ls[1])
except:
self.logError(_("Invalid speed given."))
if override is not None:
length = override
feed = overridefeed
self.do_extrude_final(length, feed)
def do_extrude_final(self, length, feed):
if length > 0:
self.log(_("Extruding %fmm of filament.") % (length,))
elif length < 0:
self.log(_("Reversing %fmm of filament.") % (-length,))
else:
self.log(_("Length is 0, not doing anything."))
self.p.send_now("G91")
self.p.send_now("G1 E" + str(length) + " F" + str(feed))
self.p.send_now("G90")
def help_extrude(self):
self.log(_("Extrudes a length of filament, 5mm by default, or the number of mm given as a parameter"))
self.log(_("extrude - extrudes 5mm of filament at 300mm/min (5mm/s)"))
self.log(_("extrude 20 - extrudes 20mm of filament at 300mm/min (5mm/s)"))
self.log(_("extrude -5 - REVERSES 5mm of filament at 300mm/min (5mm/s)"))
self.log(_("extrude 10 210 - extrudes 10mm of filament at 210mm/min (3.5mm/s)"))
def do_reverse(self, l):
length = self.settings.default_extrusion # default extrusion length
feed = self.settings.e_feedrate # default speed
if not self.p.online:
self.logError(_("Printer is not online. Unable to reverse."))
return
if self.p.printing:
self.logError(_("Printer is currently printing. Please pause the print before you issue manual commands."))
return
ls = l.split()
if len(ls):
try:
length = float(ls[0])
except:
self.logError(_("Invalid length given."))
if len(ls) > 1:
try:
feed = int(ls[1])
except:
self.logError(_("Invalid speed given."))
self.do_extrude("", -length, feed)
def help_reverse(self):
self.log(_("Reverses the extruder, 5mm by default, or the number of mm given as a parameter"))
self.log(_("reverse - reverses 5mm of filament at 300mm/min (5mm/s)"))
self.log(_("reverse 20 - reverses 20mm of filament at 300mm/min (5mm/s)"))
self.log(_("reverse 10 210 - extrudes 10mm of filament at 210mm/min (3.5mm/s)"))
self.log(_("reverse -5 - EXTRUDES 5mm of filament at 300mm/min (5mm/s)"))
def do_home(self, l):
if not self.p.online:
self.logError(_("Printer is not online. Unable to move."))
return
if self.p.printing:
self.logError(_("Printer is currently printing. Please pause the print before you issue manual commands."))
return
if "x" in l.lower():
self.p.send_now("G28 X0")
if "y" in l.lower():
self.p.send_now("G28 Y0")
if "z" in l.lower():
self.p.send_now("G28 Z0")
if "e" in l.lower():
self.p.send_now("G92 E0")
if not len(l):
self.p.send_now("G28")
self.p.send_now("G92 E0")
def help_home(self):
self.log(_("Homes the printer"))
self.log(_("home - homes all axes and zeroes the extruder(Using G28 and G92)"))
self.log(_("home xy - homes x and y axes (Using G28)"))
self.log(_("home z - homes z axis only (Using G28)"))
self.log(_("home e - set extruder position to zero (Using G92)"))
self.log(_("home xyze - homes all axes and zeroes the extruder (Using G28 and G92)"))
def do_off(self, l):
self.off()
def off(self, ignore = None):
if self.p.online:
if self.p.printing: self.pause(None)
self.log(_("; Motors off"))
self.onecmd("M84")
self.log(_("; Extruder off"))
self.onecmd("M104 S0")
self.log(_("; Heatbed off"))
self.onecmd("M140 S0")
self.log(_("; Fan off"))
self.onecmd("M107")
self.log(_("; Power supply off"))
self.onecmd("M81")
else:
self.logError(_("Printer is not online. Unable to turn it off."))
def help_off(self):
self.log(_("Turns off everything on the printer"))
# --------------------------------------------------------------
# Host commands handling
# --------------------------------------------------------------
def process_host_command(self, command):
"""Override host command handling"""
command = command.lstrip()
if command.startswith(";@"):
command = command[2:]
self.log(_("G-Code calling host command \"%s\"") % command)
self.onecmd(command)
def do_run_script(self, l):
p = run_command(l, {"$s": str(self.filename)}, stdout = subprocess.PIPE)
for line in p.stdout.readlines():
self.log("<< " + line.strip())
def help_run_script(self):
self.log(_("Runs a custom script. Current gcode filename can be given using $s token."))
def do_run_gcode_script(self, l):
p = run_command(l, {"$s": str(self.filename)}, stdout = subprocess.PIPE)
for line in p.stdout.readlines():
self.onecmd(line.strip())
def help_run_gcode_script(self):
self.log(_("Runs a custom script which output gcode which will in turn be executed. Current gcode filename can be given using $s token."))
| gpl-3.0 |
dodocat/git-repo | subcmds/manifest.py | 89 | 2770 | #
# Copyright (C) 2009 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import sys
from command import PagedCommand
class Manifest(PagedCommand):
common = False
helpSummary = "Manifest inspection utility"
helpUsage = """
%prog [-o {-|NAME.xml} [-r]]
"""
_helpDescription = """
With the -o option, exports the current manifest for inspection.
The manifest and (if present) local_manifest.xml are combined
together to produce a single manifest file. This file can be stored
in a Git repository for use during future 'repo init' invocations.
"""
@property
def helpDescription(self):
helptext = self._helpDescription + '\n'
r = os.path.dirname(__file__)
r = os.path.dirname(r)
fd = open(os.path.join(r, 'docs', 'manifest-format.txt'))
for line in fd:
helptext += line
fd.close()
return helptext
def _Options(self, p):
p.add_option('-r', '--revision-as-HEAD',
dest='peg_rev', action='store_true',
help='Save revisions as current HEAD')
p.add_option('--suppress-upstream-revision', dest='peg_rev_upstream',
default=True, action='store_false',
help='If in -r mode, do not write the upstream field. '
'Only of use if the branch names for a sha1 manifest are '
'sensitive.')
p.add_option('-o', '--output-file',
dest='output_file',
default='-',
help='File to save the manifest to',
metavar='-|NAME.xml')
def _Output(self, opt):
if opt.output_file == '-':
fd = sys.stdout
else:
fd = open(opt.output_file, 'w')
self.manifest.Save(fd,
peg_rev = opt.peg_rev,
peg_rev_upstream = opt.peg_rev_upstream)
fd.close()
if opt.output_file != '-':
print('Saved manifest to %s' % opt.output_file, file=sys.stderr)
def Execute(self, opt, args):
if args:
self.Usage()
if opt.output_file is not None:
self._Output(opt)
return
print('error: no operation to perform', file=sys.stderr)
print('error: see repo help manifest', file=sys.stderr)
sys.exit(1)
| apache-2.0 |
anbangleo/NlsdeWeb | Python-3.6.0/Lib/lib2to3/fixes/fix_map.py | 170 | 3058 | # Copyright 2007 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Fixer that changes map(F, ...) into list(map(F, ...)) unless there
exists a 'from future_builtins import map' statement in the top-level
namespace.
As a special case, map(None, X) is changed into list(X). (This is
necessary because the semantics are changed in this case -- the new
map(None, X) is equivalent to [(x,) for x in X].)
We avoid the transformation (except for the special case mentioned
above) if the map() call is directly contained in iter(<>), list(<>),
tuple(<>), sorted(<>), ...join(<>), or for V in <>:.
NOTE: This is still not correct if the original code was depending on
map(F, X, Y, ...) to go on until the longest argument is exhausted,
substituting None for missing values -- like zip(), it now stops as
soon as the shortest argument is exhausted.
"""
# Local imports
from ..pgen2 import token
from .. import fixer_base
from ..fixer_util import Name, Call, ListComp, in_special_context
from ..pygram import python_symbols as syms
class FixMap(fixer_base.ConditionalFix):
BM_compatible = True
PATTERN = """
map_none=power<
'map'
trailer< '(' arglist< 'None' ',' arg=any [','] > ')' >
>
|
map_lambda=power<
'map'
trailer<
'('
arglist<
lambdef< 'lambda'
(fp=NAME | vfpdef< '(' fp=NAME ')'> ) ':' xp=any
>
','
it=any
>
')'
>
>
|
power<
'map' trailer< '(' [arglist=any] ')' >
>
"""
skip_on = 'future_builtins.map'
def transform(self, node, results):
if self.should_skip(node):
return
if node.parent.type == syms.simple_stmt:
self.warning(node, "You should use a for loop here")
new = node.clone()
new.prefix = ""
new = Call(Name("list"), [new])
elif "map_lambda" in results:
new = ListComp(results["xp"].clone(),
results["fp"].clone(),
results["it"].clone())
else:
if "map_none" in results:
new = results["arg"].clone()
else:
if "arglist" in results:
args = results["arglist"]
if args.type == syms.arglist and \
args.children[0].type == token.NAME and \
args.children[0].value == "None":
self.warning(node, "cannot convert map(None, ...) "
"with multiple arguments because map() "
"now truncates to the shortest sequence")
return
if in_special_context(node):
return None
new = node.clone()
new.prefix = ""
new = Call(Name("list"), [new])
new.prefix = node.prefix
return new
| mit |
tectronics/freeermind | src/test/draw2d_dnd_2.py | 1 | 2844 | from org.eclipse.draw2d import MouseMotionListener
from org.eclipse.draw2d import FigureCanvas
from org.eclipse.draw2d import Panel
from org.eclipse.draw2d import Figure
from org.eclipse.draw2d import XYLayout
from org.eclipse.swt.widgets import Display;
from org.eclipse.swt.widgets import Shell;
from org.eclipse.draw2d.text import *
from org.eclipse.draw2d.geometry import Rectangle
from org.eclipse.draw2d import LightweightSystem
import java.lang.System as sys_
def main():
display = Display() #Display.getDefault()
shell = Shell(display)
shell.setMaximized(True)
shell.setMinimumSize(800, 640)
# start here
lws = LightweightSystem();
canvas = FigureCanvas(shell, lws)
from org.eclipse.draw2d.FigureCanvas import AUTOMATIC
canvas.getViewport().setContentsTracksWidth(True)
canvas.getViewport().setContentsTracksHeight(True)
canvas.setHorizontalScrollBarVisibility(AUTOMATIC)
canvas.setVerticalScrollBarVisibility(AUTOMATIC)
panel = Panel() # NOTE: Figure() or Panel() make no difference, it won't show the widgets
panel.setLayoutManager(XYLayout())
#lws = canvas.getLightweightSystem()
#lws.setContents(panel)
text = TextFlow('Fuck this shit!')
flowPage = FlowPage()
flowPage.add(text)
def fn(event):
print 'Mouse passed over me!'
class MML(MouseMotionListener): pass
MML.mouseEntered = fn
MML.mouseDragged = fn
MML.mouseExited = fn
MML.mouseHover = fn
MML.mouseMoved = fn
mml = MML()
flowPage.addMouseMotionListener(mml)
text.addMouseMotionListener(mml)
print 'flowPage.isEnabled: ', flowPage.isEnabled()
print 'text.isCoordinateSystem: ', text.isCoordinateSystem()
print 'text.isShowing: ', text.isShowing()
print 'text.isVisible: ', text.isVisible()
# for i in xrange(3):
# flowPage.add(text) #, Rectangle(10*i,40*i,80,20))
panel.add(flowPage, Rectangle(10,40,80,20))
canvas.setContents(panel)
# for i in xrange(3):
# print 'test',9,i
# panel.add(TextFlow("Fuck")) #, Rectangle(10*i,40*i,80,20))
#
# for i in xrange(3):
# print 'test',10,i
# panel.add(TextFlow("Shit"), Rectangle(10*-i, 40*-i, 80, 20))
# end here
shell.text = "Test draw2d"
shell.pack()
shell.open()
try:
while not shell.isDisposed():
# print 'test',15,'start a'
if not display.readAndDispatch():
# print 'test',15,'start b'
display.sleep()
# print 'test',15,'end b'
# print 'test',15,'end a'
finally:
if not shell.isDisposed():
shell.dispose()
if not display.isDisposed():
display.dispose()
sys_.exit(0)
if __name__ == '__main__':
main()
| gpl-3.0 |
sudosurootdev/external_chromium_org | net/tools/testserver/echo_message.py | 187 | 13195 | # Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Provides utility functions for TCP/UDP echo servers and clients.
This program has classes and functions to encode, decode, calculate checksum
and verify the "echo request" and "echo response" messages. "echo request"
message is an echo message sent from the client to the server. "echo response"
message is a response from the server to the "echo request" message from the
client.
The format of "echo request" message is
<version><checksum><payload_size><payload>. <version> is the version number
of the "echo request" protocol. <checksum> is the checksum of the <payload>.
<payload_size> is the size of the <payload>. <payload> is the echo message.
The format of "echo response" message is
<version><checksum><payload_size><key><encoded_payload>.<version>,
<checksum> and <payload_size> are same as what is in the "echo request" message.
<encoded_payload> is encoded version of the <payload>. <key> is a randomly
generated key that is used to encode/decode the <payload>.
"""
__author__ = 'rtenneti@google.com (Raman Tenneti)'
from itertools import cycle
from itertools import izip
import random
class EchoHeader(object):
"""Class to keep header info of the EchoRequest and EchoResponse messages.
This class knows how to parse the checksum, payload_size from the
"echo request" and "echo response" messages. It holds the checksum,
payload_size of the "echo request" and "echo response" messages.
"""
# This specifies the version.
VERSION_STRING = '01'
# This specifies the starting position of the checksum and length of the
# checksum. Maximum value for the checksum is less than (2 ** 31 - 1).
CHECKSUM_START = 2
CHECKSUM_LENGTH = 10
CHECKSUM_FORMAT = '%010d'
CHECKSUM_END = CHECKSUM_START + CHECKSUM_LENGTH
# This specifies the starting position of the <payload_size> and length of the
# <payload_size>. Maximum number of bytes that can be sent in the <payload> is
# 9,999,999.
PAYLOAD_SIZE_START = CHECKSUM_END
PAYLOAD_SIZE_LENGTH = 7
PAYLOAD_SIZE_FORMAT = '%07d'
PAYLOAD_SIZE_END = PAYLOAD_SIZE_START + PAYLOAD_SIZE_LENGTH
def __init__(self, checksum=0, payload_size=0):
"""Initializes the checksum and payload_size of self (EchoHeader).
Args:
checksum: (int)
The checksum of the payload.
payload_size: (int)
The size of the payload.
"""
self.checksum = checksum
self.payload_size = payload_size
def ParseAndInitialize(self, echo_message):
"""Parses the echo_message and initializes self with the parsed data.
This method extracts checksum, and payload_size from the echo_message
(echo_message could be either echo_request or echo_response messages) and
initializes self (EchoHeader) with checksum and payload_size.
Args:
echo_message: (string)
The string representation of EchoRequest or EchoResponse objects.
Raises:
ValueError: Invalid data
"""
if not echo_message or len(echo_message) < EchoHeader.PAYLOAD_SIZE_END:
raise ValueError('Invalid data:%s' % echo_message)
self.checksum = int(echo_message[
EchoHeader.CHECKSUM_START:EchoHeader.CHECKSUM_END])
self.payload_size = int(echo_message[
EchoHeader.PAYLOAD_SIZE_START:EchoHeader.PAYLOAD_SIZE_END])
def InitializeFromPayload(self, payload):
"""Initializes the EchoHeader object with the payload.
It calculates checksum for the payload and initializes self (EchoHeader)
with the calculated checksum and size of the payload.
This method is used by the client code during testing.
Args:
payload: (string)
The payload is the echo string (like 'hello').
Raises:
ValueError: Invalid data
"""
if not payload:
raise ValueError('Invalid data:%s' % payload)
self.payload_size = len(payload)
self.checksum = Checksum(payload, self.payload_size)
def __str__(self):
"""String representation of the self (EchoHeader).
Returns:
A string representation of self (EchoHeader).
"""
checksum_string = EchoHeader.CHECKSUM_FORMAT % self.checksum
payload_size_string = EchoHeader.PAYLOAD_SIZE_FORMAT % self.payload_size
return EchoHeader.VERSION_STRING + checksum_string + payload_size_string
class EchoRequest(EchoHeader):
"""Class holds data specific to the "echo request" message.
This class holds the payload extracted from the "echo request" message.
"""
# This specifies the starting position of the <payload>.
PAYLOAD_START = EchoHeader.PAYLOAD_SIZE_END
def __init__(self):
"""Initializes EchoRequest object."""
EchoHeader.__init__(self)
self.payload = ''
def ParseAndInitialize(self, echo_request_data):
"""Parses and Initializes the EchoRequest object from the echo_request_data.
This method extracts the header information (checksum and payload_size) and
payload from echo_request_data.
Args:
echo_request_data: (string)
The string representation of EchoRequest object.
Raises:
ValueError: Invalid data
"""
EchoHeader.ParseAndInitialize(self, echo_request_data)
if len(echo_request_data) <= EchoRequest.PAYLOAD_START:
raise ValueError('Invalid data:%s' % echo_request_data)
self.payload = echo_request_data[EchoRequest.PAYLOAD_START:]
def InitializeFromPayload(self, payload):
"""Initializes the EchoRequest object with payload.
It calculates checksum for the payload and initializes self (EchoRequest)
object.
Args:
payload: (string)
The payload string for which "echo request" needs to be constructed.
"""
EchoHeader.InitializeFromPayload(self, payload)
self.payload = payload
def __str__(self):
"""String representation of the self (EchoRequest).
Returns:
A string representation of self (EchoRequest).
"""
return EchoHeader.__str__(self) + self.payload
class EchoResponse(EchoHeader):
"""Class holds data specific to the "echo response" message.
This class knows how to parse the "echo response" message. This class holds
key, encoded_payload and decoded_payload of the "echo response" message.
"""
# This specifies the starting position of the |key_| and length of the |key_|.
# Minimum and maximum values for the |key_| are 100,000 and 999,999.
KEY_START = EchoHeader.PAYLOAD_SIZE_END
KEY_LENGTH = 6
KEY_FORMAT = '%06d'
KEY_END = KEY_START + KEY_LENGTH
KEY_MIN_VALUE = 0
KEY_MAX_VALUE = 999999
# This specifies the starting position of the <encoded_payload> and length
# of the <encoded_payload>.
ENCODED_PAYLOAD_START = KEY_END
def __init__(self, key='', encoded_payload='', decoded_payload=''):
"""Initializes the EchoResponse object."""
EchoHeader.__init__(self)
self.key = key
self.encoded_payload = encoded_payload
self.decoded_payload = decoded_payload
def ParseAndInitialize(self, echo_response_data=None):
"""Parses and Initializes the EchoResponse object from echo_response_data.
This method calls EchoHeader to extract header information from the
echo_response_data and it then extracts key and encoded_payload from the
echo_response_data. It holds the decoded payload of the encoded_payload.
Args:
echo_response_data: (string)
The string representation of EchoResponse object.
Raises:
ValueError: Invalid echo_request_data
"""
EchoHeader.ParseAndInitialize(self, echo_response_data)
if len(echo_response_data) <= EchoResponse.ENCODED_PAYLOAD_START:
raise ValueError('Invalid echo_response_data:%s' % echo_response_data)
self.key = echo_response_data[EchoResponse.KEY_START:EchoResponse.KEY_END]
self.encoded_payload = echo_response_data[
EchoResponse.ENCODED_PAYLOAD_START:]
self.decoded_payload = Crypt(self.encoded_payload, self.key)
def InitializeFromEchoRequest(self, echo_request):
"""Initializes EchoResponse with the data from the echo_request object.
It gets the checksum, payload_size and payload from the echo_request object
and then encodes the payload with a random key. It also saves the payload
as decoded_payload.
Args:
echo_request: (EchoRequest)
The EchoRequest object which has "echo request" message.
"""
self.checksum = echo_request.checksum
self.payload_size = echo_request.payload_size
self.key = (EchoResponse.KEY_FORMAT %
random.randrange(EchoResponse.KEY_MIN_VALUE,
EchoResponse.KEY_MAX_VALUE))
self.encoded_payload = Crypt(echo_request.payload, self.key)
self.decoded_payload = echo_request.payload
def __str__(self):
"""String representation of the self (EchoResponse).
Returns:
A string representation of self (EchoResponse).
"""
return EchoHeader.__str__(self) + self.key + self.encoded_payload
def Crypt(payload, key):
"""Encodes/decodes the payload with the key and returns encoded payload.
This method loops through the payload and XORs each byte with the key.
Args:
payload: (string)
The string to be encoded/decoded.
key: (string)
The key used to encode/decode the payload.
Returns:
An encoded/decoded string.
"""
return ''.join(chr(ord(x) ^ ord(y)) for (x, y) in izip(payload, cycle(key)))
def Checksum(payload, payload_size):
"""Calculates the checksum of the payload.
Args:
payload: (string)
The payload string for which checksum needs to be calculated.
payload_size: (int)
The number of bytes in the payload.
Returns:
The checksum of the payload.
"""
checksum = 0
length = min(payload_size, len(payload))
for i in range (0, length):
checksum += ord(payload[i])
return checksum
def GetEchoRequestData(payload):
"""Constructs an "echo request" message from the payload.
It builds an EchoRequest object from the payload and then returns a string
representation of the EchoRequest object.
This is used by the TCP/UDP echo clients to build the "echo request" message.
Args:
payload: (string)
The payload string for which "echo request" needs to be constructed.
Returns:
A string representation of the EchoRequest object.
Raises:
ValueError: Invalid payload
"""
try:
echo_request = EchoRequest()
echo_request.InitializeFromPayload(payload)
return str(echo_request)
except (IndexError, ValueError):
raise ValueError('Invalid payload:%s' % payload)
def GetEchoResponseData(echo_request_data):
"""Verifies the echo_request_data and returns "echo response" message.
It builds the EchoRequest object from the echo_request_data and then verifies
the checksum of the EchoRequest is same as the calculated checksum of the
payload. If the checksums don't match then it returns None. It checksums
match, it builds the echo_response object from echo_request object and returns
string representation of the EchoResponse object.
This is used by the TCP/UDP echo servers.
Args:
echo_request_data: (string)
The string that echo servers send to the clients.
Returns:
A string representation of the EchoResponse object. It returns None if the
echo_request_data is not valid.
Raises:
ValueError: Invalid echo_request_data
"""
try:
if not echo_request_data:
raise ValueError('Invalid payload:%s' % echo_request_data)
echo_request = EchoRequest()
echo_request.ParseAndInitialize(echo_request_data)
if Checksum(echo_request.payload,
echo_request.payload_size) != echo_request.checksum:
return None
echo_response = EchoResponse()
echo_response.InitializeFromEchoRequest(echo_request)
return str(echo_response)
except (IndexError, ValueError):
raise ValueError('Invalid payload:%s' % echo_request_data)
def DecodeAndVerify(echo_request_data, echo_response_data):
"""Decodes and verifies the echo_response_data.
It builds EchoRequest and EchoResponse objects from the echo_request_data and
echo_response_data. It returns True if the EchoResponse's payload and
checksum match EchoRequest's.
This is used by the TCP/UDP echo clients for testing purposes.
Args:
echo_request_data: (string)
The request clients sent to echo servers.
echo_response_data: (string)
The response clients received from the echo servers.
Returns:
True if echo_request_data and echo_response_data match.
Raises:
ValueError: Invalid echo_request_data or Invalid echo_response
"""
try:
echo_request = EchoRequest()
echo_request.ParseAndInitialize(echo_request_data)
except (IndexError, ValueError):
raise ValueError('Invalid echo_request:%s' % echo_request_data)
try:
echo_response = EchoResponse()
echo_response.ParseAndInitialize(echo_response_data)
except (IndexError, ValueError):
raise ValueError('Invalid echo_response:%s' % echo_response_data)
return (echo_request.checksum == echo_response.checksum and
echo_request.payload == echo_response.decoded_payload)
| bsd-3-clause |
andreyvit/pyjamas | examples/libtest/LoopTest.py | 12 | 2559 | from UnitTest import UnitTest
import time
from write import write, writebr
class A(object):
def __init__(self, x):
self.x = x
def getX(self):
return self.x
def fib(n):
if n<3.0:
return 1.0
return fib(n-2.0)+fib(n-1.0)
def int_fib(n):
if n<3:
return 1
return int_fib(n-2)+int_fib(n-1)
def long_fib(n):
if n<3L:
return 1L
return long_fib(n-2L)+long_fib(n-1L)
class LoopTest(UnitTest):
def testLoop1(self):
t1 = t0 = time.time()
n = 1000
a = A(1)
m = 0;
while t1 - t0 == 0:
m += 1
for i in range(n):
x = a.getX()
t1 = time.time()
dt = t1 - t0
writebr("Loop1: %.2f/sec" % (n*m/dt))
def testLoop2(self):
t1 = t0 = time.time()
n = 100
m = 0.0
while t1 - t0 == 0:
m += 1.0
for i in range(n):
fib(10.0)
t1 = time.time()
dt = t1 - t0
writebr("Loop2 (float): %.2f/sec" % (n*m/dt))
def testLoop3(self):
t1 = t0 = time.time()
n = 100
m = 0.0
while t1 - t0 == 0:
m += 1.0
for i in range(n):
int_fib(10)
t1 = time.time()
dt = t1 - t0
writebr("Loop3 (int): %.2f/sec" % (n*m/dt))
def testLoop4(self):
t1 = t0 = time.time()
n = 100
m = 0.0
while t1 - t0 == 0:
m += 1.0
for i in range(n):
long_fib(10L)
t1 = time.time()
dt = t1 - t0
writebr("Loop4 (long): %.2f/sec" % (n*m/dt))
'''
def testIterList(self):
lst = []
for i in xrange(1000):
lst.append(i)
t1 = t0 = time.time()
n = 100
m = 0.0
while t1 - t0 == 0:
m += 1.0
for x in xrange(20):
for i in lst:
pass
t1 = time.time()
dt = t1 - t0
writebr("IterList: %.2f/sec" % (n*m/dt))
def testEnumerateList(self):
lst = []
for i in xrange(1000):
lst.append(i)
t1 = t0 = time.time()
n = 100
m = 0.0
while t1 - t0 == 0:
m += 1.0
for x in xrange(2):
for i, j in enumerate(lst):
pass
t1 = time.time()
dt = t1 - t0
writebr("EnumerateList: %.2f/sec" % (n*m/dt))
'''
if __name__ == '__main__':
l = LoopTest()
l.run()
| apache-2.0 |
tylerjereddy/scipy | scipy/fft/_pocketfft/tests/test_real_transforms.py | 10 | 16426 | from os.path import join, dirname
from typing import Callable, Dict, Tuple, Union, Type
import numpy as np
from numpy.testing import (
assert_array_almost_equal, assert_equal, assert_allclose)
import pytest
from pytest import raises as assert_raises
from scipy.fft._pocketfft.realtransforms import (
dct, idct, dst, idst, dctn, idctn, dstn, idstn)
fftpack_test_dir = join(dirname(__file__), '..', '..', '..', 'fftpack', 'tests')
MDATA_COUNT = 8
FFTWDATA_COUNT = 14
def is_longdouble_binary_compatible():
try:
one = np.frombuffer(
b'\x00\x00\x00\x00\x00\x00\x00\x80\xff\x3f\x00\x00\x00\x00\x00\x00',
dtype='<f16')
return one == np.longfloat(1.)
except TypeError:
return False
def get_reference_data():
ref = getattr(globals(), '__reference_data', None)
if ref is not None:
return ref
# Matlab reference data
MDATA = np.load(join(fftpack_test_dir, 'test.npz'))
X = [MDATA['x%d' % i] for i in range(MDATA_COUNT)]
Y = [MDATA['y%d' % i] for i in range(MDATA_COUNT)]
# FFTW reference data: the data are organized as follows:
# * SIZES is an array containing all available sizes
# * for every type (1, 2, 3, 4) and every size, the array dct_type_size
# contains the output of the DCT applied to the input np.linspace(0, size-1,
# size)
FFTWDATA_DOUBLE = np.load(join(fftpack_test_dir, 'fftw_double_ref.npz'))
FFTWDATA_SINGLE = np.load(join(fftpack_test_dir, 'fftw_single_ref.npz'))
FFTWDATA_SIZES = FFTWDATA_DOUBLE['sizes']
assert len(FFTWDATA_SIZES) == FFTWDATA_COUNT
if is_longdouble_binary_compatible():
FFTWDATA_LONGDOUBLE = np.load(
join(fftpack_test_dir, 'fftw_longdouble_ref.npz'))
else:
FFTWDATA_LONGDOUBLE = {k: v.astype(np.longfloat)
for k,v in FFTWDATA_DOUBLE.items()}
ref = {
'FFTWDATA_LONGDOUBLE': FFTWDATA_LONGDOUBLE,
'FFTWDATA_DOUBLE': FFTWDATA_DOUBLE,
'FFTWDATA_SINGLE': FFTWDATA_SINGLE,
'FFTWDATA_SIZES': FFTWDATA_SIZES,
'X': X,
'Y': Y
}
globals()['__reference_data'] = ref
return ref
@pytest.fixture(params=range(FFTWDATA_COUNT))
def fftwdata_size(request):
return get_reference_data()['FFTWDATA_SIZES'][request.param]
@pytest.fixture(params=range(MDATA_COUNT))
def mdata_x(request):
return get_reference_data()['X'][request.param]
@pytest.fixture(params=range(MDATA_COUNT))
def mdata_xy(request):
ref = get_reference_data()
y = ref['Y'][request.param]
x = ref['X'][request.param]
return x, y
def fftw_dct_ref(type, size, dt):
x = np.linspace(0, size-1, size).astype(dt)
dt = np.result_type(np.float32, dt)
if dt == np.double:
data = get_reference_data()['FFTWDATA_DOUBLE']
elif dt == np.float32:
data = get_reference_data()['FFTWDATA_SINGLE']
elif dt == np.longfloat:
data = get_reference_data()['FFTWDATA_LONGDOUBLE']
else:
raise ValueError()
y = (data['dct_%d_%d' % (type, size)]).astype(dt)
return x, y, dt
def fftw_dst_ref(type, size, dt):
x = np.linspace(0, size-1, size).astype(dt)
dt = np.result_type(np.float32, dt)
if dt == np.double:
data = get_reference_data()['FFTWDATA_DOUBLE']
elif dt == np.float32:
data = get_reference_data()['FFTWDATA_SINGLE']
elif dt == np.longfloat:
data = get_reference_data()['FFTWDATA_LONGDOUBLE']
else:
raise ValueError()
y = (data['dst_%d_%d' % (type, size)]).astype(dt)
return x, y, dt
def ref_2d(func, x, **kwargs):
"""Calculate 2-D reference data from a 1d transform"""
x = np.array(x, copy=True)
for row in range(x.shape[0]):
x[row, :] = func(x[row, :], **kwargs)
for col in range(x.shape[1]):
x[:, col] = func(x[:, col], **kwargs)
return x
def naive_dct1(x, norm=None):
"""Calculate textbook definition version of DCT-I."""
x = np.array(x, copy=True)
N = len(x)
M = N-1
y = np.zeros(N)
m0, m = 1, 2
if norm == 'ortho':
m0 = np.sqrt(1.0/M)
m = np.sqrt(2.0/M)
for k in range(N):
for n in range(1, N-1):
y[k] += m*x[n]*np.cos(np.pi*n*k/M)
y[k] += m0 * x[0]
y[k] += m0 * x[N-1] * (1 if k % 2 == 0 else -1)
if norm == 'ortho':
y[0] *= 1/np.sqrt(2)
y[N-1] *= 1/np.sqrt(2)
return y
def naive_dst1(x, norm=None):
"""Calculate textbook definition version of DST-I."""
x = np.array(x, copy=True)
N = len(x)
M = N+1
y = np.zeros(N)
for k in range(N):
for n in range(N):
y[k] += 2*x[n]*np.sin(np.pi*(n+1.0)*(k+1.0)/M)
if norm == 'ortho':
y *= np.sqrt(0.5/M)
return y
def naive_dct4(x, norm=None):
"""Calculate textbook definition version of DCT-IV."""
x = np.array(x, copy=True)
N = len(x)
y = np.zeros(N)
for k in range(N):
for n in range(N):
y[k] += x[n]*np.cos(np.pi*(n+0.5)*(k+0.5)/(N))
if norm == 'ortho':
y *= np.sqrt(2.0/N)
else:
y *= 2
return y
def naive_dst4(x, norm=None):
"""Calculate textbook definition version of DST-IV."""
x = np.array(x, copy=True)
N = len(x)
y = np.zeros(N)
for k in range(N):
for n in range(N):
y[k] += x[n]*np.sin(np.pi*(n+0.5)*(k+0.5)/(N))
if norm == 'ortho':
y *= np.sqrt(2.0/N)
else:
y *= 2
return y
@pytest.mark.parametrize('dtype', [np.complex64, np.complex128, np.longcomplex])
@pytest.mark.parametrize('transform', [dct, dst, idct, idst])
def test_complex(transform, dtype):
y = transform(1j*np.arange(5, dtype=dtype))
x = 1j*transform(np.arange(5))
assert_array_almost_equal(x, y)
DecMapType = Dict[
Tuple[Callable[..., np.ndarray], Union[Type[np.floating], Type[int]], int],
int,
]
# map (tranform, dtype, type) -> decimal
dec_map: DecMapType = {
# DCT
(dct, np.double, 1): 13,
(dct, np.float32, 1): 6,
(dct, np.double, 2): 14,
(dct, np.float32, 2): 5,
(dct, np.double, 3): 14,
(dct, np.float32, 3): 5,
(dct, np.double, 4): 13,
(dct, np.float32, 4): 6,
# IDCT
(idct, np.double, 1): 14,
(idct, np.float32, 1): 6,
(idct, np.double, 2): 14,
(idct, np.float32, 2): 5,
(idct, np.double, 3): 14,
(idct, np.float32, 3): 5,
(idct, np.double, 4): 14,
(idct, np.float32, 4): 6,
# DST
(dst, np.double, 1): 13,
(dst, np.float32, 1): 6,
(dst, np.double, 2): 14,
(dst, np.float32, 2): 6,
(dst, np.double, 3): 14,
(dst, np.float32, 3): 7,
(dst, np.double, 4): 13,
(dst, np.float32, 4): 6,
# IDST
(idst, np.double, 1): 14,
(idst, np.float32, 1): 6,
(idst, np.double, 2): 14,
(idst, np.float32, 2): 6,
(idst, np.double, 3): 14,
(idst, np.float32, 3): 6,
(idst, np.double, 4): 14,
(idst, np.float32, 4): 6,
}
for k,v in dec_map.copy().items():
if k[1] == np.double:
dec_map[(k[0], np.longdouble, k[2])] = v
elif k[1] == np.float32:
dec_map[(k[0], int, k[2])] = v
@pytest.mark.parametrize('rdt', [np.longfloat, np.double, np.float32, int])
@pytest.mark.parametrize('type', [1, 2, 3, 4])
class TestDCT:
def test_definition(self, rdt, type, fftwdata_size):
x, yr, dt = fftw_dct_ref(type, fftwdata_size, rdt)
y = dct(x, type=type)
assert_equal(y.dtype, dt)
dec = dec_map[(dct, rdt, type)]
assert_allclose(y, yr, rtol=0., atol=np.max(yr)*10**(-dec))
@pytest.mark.parametrize('size', [7, 8, 9, 16, 32, 64])
def test_axis(self, rdt, type, size):
nt = 2
dec = dec_map[(dct, rdt, type)]
x = np.random.randn(nt, size)
y = dct(x, type=type)
for j in range(nt):
assert_array_almost_equal(y[j], dct(x[j], type=type),
decimal=dec)
x = x.T
y = dct(x, axis=0, type=type)
for j in range(nt):
assert_array_almost_equal(y[:,j], dct(x[:,j], type=type),
decimal=dec)
@pytest.mark.parametrize('rdt', [np.longfloat, np.double, np.float32, int])
def test_dct1_definition_ortho(rdt, mdata_x):
# Test orthornomal mode.
dec = dec_map[(dct, rdt, 1)]
x = np.array(mdata_x, dtype=rdt)
dt = np.result_type(np.float32, rdt)
y = dct(x, norm='ortho', type=1)
y2 = naive_dct1(x, norm='ortho')
assert_equal(y.dtype, dt)
assert_allclose(y, y2, rtol=0., atol=np.max(y2)*10**(-dec))
@pytest.mark.parametrize('rdt', [np.longfloat, np.double, np.float32, int])
def test_dct2_definition_matlab(mdata_xy, rdt):
# Test correspondence with matlab (orthornomal mode).
dt = np.result_type(np.float32, rdt)
x = np.array(mdata_xy[0], dtype=dt)
yr = mdata_xy[1]
y = dct(x, norm="ortho", type=2)
dec = dec_map[(dct, rdt, 2)]
assert_equal(y.dtype, dt)
assert_array_almost_equal(y, yr, decimal=dec)
@pytest.mark.parametrize('rdt', [np.longfloat, np.double, np.float32, int])
def test_dct3_definition_ortho(mdata_x, rdt):
# Test orthornomal mode.
x = np.array(mdata_x, dtype=rdt)
dt = np.result_type(np.float32, rdt)
y = dct(x, norm='ortho', type=2)
xi = dct(y, norm="ortho", type=3)
dec = dec_map[(dct, rdt, 3)]
assert_equal(xi.dtype, dt)
assert_array_almost_equal(xi, x, decimal=dec)
@pytest.mark.parametrize('rdt', [np.longfloat, np.double, np.float32, int])
def test_dct4_definition_ortho(mdata_x, rdt):
# Test orthornomal mode.
x = np.array(mdata_x, dtype=rdt)
dt = np.result_type(np.float32, rdt)
y = dct(x, norm='ortho', type=4)
y2 = naive_dct4(x, norm='ortho')
dec = dec_map[(dct, rdt, 4)]
assert_equal(y.dtype, dt)
assert_allclose(y, y2, rtol=0., atol=np.max(y2)*10**(-dec))
@pytest.mark.parametrize('rdt', [np.longfloat, np.double, np.float32, int])
@pytest.mark.parametrize('type', [1, 2, 3, 4])
def test_idct_definition(fftwdata_size, rdt, type):
xr, yr, dt = fftw_dct_ref(type, fftwdata_size, rdt)
x = idct(yr, type=type)
dec = dec_map[(idct, rdt, type)]
assert_equal(x.dtype, dt)
assert_allclose(x, xr, rtol=0., atol=np.max(xr)*10**(-dec))
@pytest.mark.parametrize('rdt', [np.longfloat, np.double, np.float32, int])
@pytest.mark.parametrize('type', [1, 2, 3, 4])
def test_definition(fftwdata_size, rdt, type):
xr, yr, dt = fftw_dst_ref(type, fftwdata_size, rdt)
y = dst(xr, type=type)
dec = dec_map[(dst, rdt, type)]
assert_equal(y.dtype, dt)
assert_allclose(y, yr, rtol=0., atol=np.max(yr)*10**(-dec))
@pytest.mark.parametrize('rdt', [np.longfloat, np.double, np.float32, int])
def test_dst1_definition_ortho(rdt, mdata_x):
# Test orthornomal mode.
dec = dec_map[(dst, rdt, 1)]
x = np.array(mdata_x, dtype=rdt)
dt = np.result_type(np.float32, rdt)
y = dst(x, norm='ortho', type=1)
y2 = naive_dst1(x, norm='ortho')
assert_equal(y.dtype, dt)
assert_allclose(y, y2, rtol=0., atol=np.max(y2)*10**(-dec))
@pytest.mark.parametrize('rdt', [np.longfloat, np.double, np.float32, int])
def test_dst4_definition_ortho(rdt, mdata_x):
# Test orthornomal mode.
dec = dec_map[(dst, rdt, 4)]
x = np.array(mdata_x, dtype=rdt)
dt = np.result_type(np.float32, rdt)
y = dst(x, norm='ortho', type=4)
y2 = naive_dst4(x, norm='ortho')
assert_equal(y.dtype, dt)
assert_array_almost_equal(y, y2, decimal=dec)
@pytest.mark.parametrize('rdt', [np.longfloat, np.double, np.float32, int])
@pytest.mark.parametrize('type', [1, 2, 3, 4])
def test_idst_definition(fftwdata_size, rdt, type):
xr, yr, dt = fftw_dst_ref(type, fftwdata_size, rdt)
x = idst(yr, type=type)
dec = dec_map[(idst, rdt, type)]
assert_equal(x.dtype, dt)
assert_allclose(x, xr, rtol=0., atol=np.max(xr)*10**(-dec))
@pytest.mark.parametrize('routine', [dct, dst, idct, idst])
@pytest.mark.parametrize('dtype', [np.float32, np.float64, np.longfloat])
@pytest.mark.parametrize('shape, axis', [
((16,), -1), ((16, 2), 0), ((2, 16), 1)
])
@pytest.mark.parametrize('type', [1, 2, 3, 4])
@pytest.mark.parametrize('overwrite_x', [True, False])
@pytest.mark.parametrize('norm', [None, 'ortho'])
def test_overwrite(routine, dtype, shape, axis, type, norm, overwrite_x):
# Check input overwrite behavior
np.random.seed(1234)
if np.issubdtype(dtype, np.complexfloating):
x = np.random.randn(*shape) + 1j*np.random.randn(*shape)
else:
x = np.random.randn(*shape)
x = x.astype(dtype)
x2 = x.copy()
routine(x2, type, None, axis, norm, overwrite_x=overwrite_x)
sig = "%s(%s%r, %r, axis=%r, overwrite_x=%r)" % (
routine.__name__, x.dtype, x.shape, None, axis, overwrite_x)
if not overwrite_x:
assert_equal(x2, x, err_msg="spurious overwrite in %s" % sig)
class Test_DCTN_IDCTN:
dec = 14
dct_type = [1, 2, 3, 4]
norms = [None, 'backward', 'ortho', 'forward']
rstate = np.random.RandomState(1234)
shape = (32, 16)
data = rstate.randn(*shape)
@pytest.mark.parametrize('fforward,finverse', [(dctn, idctn),
(dstn, idstn)])
@pytest.mark.parametrize('axes', [None,
1, (1,), [1],
0, (0,), [0],
(0, 1), [0, 1],
(-2, -1), [-2, -1]])
@pytest.mark.parametrize('dct_type', dct_type)
@pytest.mark.parametrize('norm', ['ortho'])
def test_axes_round_trip(self, fforward, finverse, axes, dct_type, norm):
tmp = fforward(self.data, type=dct_type, axes=axes, norm=norm)
tmp = finverse(tmp, type=dct_type, axes=axes, norm=norm)
assert_array_almost_equal(self.data, tmp, decimal=12)
@pytest.mark.parametrize('funcn,func', [(dctn, dct), (dstn, dst)])
@pytest.mark.parametrize('dct_type', dct_type)
@pytest.mark.parametrize('norm', norms)
def test_dctn_vs_2d_reference(self, funcn, func, dct_type, norm):
y1 = funcn(self.data, type=dct_type, axes=None, norm=norm)
y2 = ref_2d(func, self.data, type=dct_type, norm=norm)
assert_array_almost_equal(y1, y2, decimal=11)
@pytest.mark.parametrize('funcn,func', [(idctn, idct), (idstn, idst)])
@pytest.mark.parametrize('dct_type', dct_type)
@pytest.mark.parametrize('norm', norms)
def test_idctn_vs_2d_reference(self, funcn, func, dct_type, norm):
fdata = dctn(self.data, type=dct_type, norm=norm)
y1 = funcn(fdata, type=dct_type, norm=norm)
y2 = ref_2d(func, fdata, type=dct_type, norm=norm)
assert_array_almost_equal(y1, y2, decimal=11)
@pytest.mark.parametrize('fforward,finverse', [(dctn, idctn),
(dstn, idstn)])
def test_axes_and_shape(self, fforward, finverse):
with assert_raises(ValueError,
match="when given, axes and shape arguments"
" have to be of the same length"):
fforward(self.data, s=self.data.shape[0], axes=(0, 1))
with assert_raises(ValueError,
match="when given, axes and shape arguments"
" have to be of the same length"):
fforward(self.data, s=self.data.shape, axes=0)
@pytest.mark.parametrize('fforward', [dctn, dstn])
def test_shape(self, fforward):
tmp = fforward(self.data, s=(128, 128), axes=None)
assert_equal(tmp.shape, (128, 128))
@pytest.mark.parametrize('fforward,finverse', [(dctn, idctn),
(dstn, idstn)])
@pytest.mark.parametrize('axes', [1, (1,), [1],
0, (0,), [0]])
def test_shape_is_none_with_axes(self, fforward, finverse, axes):
tmp = fforward(self.data, s=None, axes=axes, norm='ortho')
tmp = finverse(tmp, s=None, axes=axes, norm='ortho')
assert_array_almost_equal(self.data, tmp, decimal=self.dec)
@pytest.mark.parametrize('func', [dct, dctn, idct, idctn,
dst, dstn, idst, idstn])
def test_swapped_byte_order(func):
rng = np.random.RandomState(1234)
x = rng.rand(10)
swapped_dt = x.dtype.newbyteorder('S')
assert_allclose(func(x.astype(swapped_dt)), func(x))
| bsd-3-clause |
mganeva/mantid | qt/applications/workbench/workbench/plotting/test/test_figureinteraction.py | 1 | 5158 | # Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2019 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source
# & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
# This file is part of the mantid workbench.
#
from __future__ import (absolute_import, division, print_function,
unicode_literals)
# system imports
import unittest
# third-party library imports
from mantid.py3compat.mock import MagicMock, PropertyMock, call, patch
from mantidqt.plotting.figuretype import FigureType
from qtpy.QtCore import Qt
# local package imports
from workbench.plotting.figureinteraction import FigureInteraction
class FigureInteractionTest(unittest.TestCase):
# Success tests
def test_construction_registers_handler_for_button_press_event(self):
fig_manager = MagicMock()
fig_manager.canvas = MagicMock()
interactor = FigureInteraction(fig_manager)
fig_manager.canvas.mpl_connect.assert_called_once_with('button_press_event',
interactor.on_mouse_button_press)
def test_disconnect_called_for_each_registered_handler(self):
fig_manager = MagicMock()
canvas = MagicMock()
fig_manager.canvas = canvas
interactor = FigureInteraction(fig_manager)
interactor.disconnect()
self.assertEqual(interactor.nevents, canvas.mpl_disconnect.call_count)
@patch('workbench.plotting.figureinteraction.QMenu',
autospec=True)
@patch('workbench.plotting.figureinteraction.figure_type',
autospec=True)
def test_right_click_gives_no_context_menu_for_empty_figure(self, mocked_figure_type,
mocked_qmenu):
fig_manager = self._create_mock_fig_manager_to_accept_right_click()
interactor = FigureInteraction(fig_manager)
mouse_event = self._create_mock_right_click()
mocked_figure_type.return_value = FigureType.Empty
interactor.on_mouse_button_press(mouse_event)
mocked_qmenu.assert_not_called()
@patch('workbench.plotting.figureinteraction.QMenu',
autospec=True)
@patch('workbench.plotting.figureinteraction.figure_type',
autospec=True)
def test_right_click_gives_no_context_menu_for_color_plot(self, mocked_figure_type,
mocked_qmenu):
fig_manager = self._create_mock_fig_manager_to_accept_right_click()
interactor = FigureInteraction(fig_manager)
mouse_event = self._create_mock_right_click()
mocked_figure_type.return_value = FigureType.Image
interactor.on_mouse_button_press(mouse_event)
mocked_qmenu.assert_not_called()
@patch('workbench.plotting.figureinteraction.QMenu',
autospec=True)
@patch('workbench.plotting.figureinteraction.figure_type',
autospec=True)
def test_right_click_gives_context_menu_for_plot_without_fit_enabled(self, mocked_figure_type,
mocked_qmenu_cls):
fig_manager = self._create_mock_fig_manager_to_accept_right_click()
fig_manager.fit_browser.tool = None
interactor = FigureInteraction(fig_manager)
mouse_event = self._create_mock_right_click()
mocked_figure_type.return_value = FigureType.Line
# Expect a call to QMenu() for the outer menu followed by a call with the first
# as its parent to generate the Axes menu.
qmenu_call1 = MagicMock()
qmenu_call2 = MagicMock()
mocked_qmenu_cls.side_effect = [qmenu_call1, qmenu_call2]
with patch('workbench.plotting.figureinteraction.QActionGroup',
autospec=True):
interactor.on_mouse_button_press(mouse_event)
self.assertEqual(0, qmenu_call1.addSeparator.call_count)
self.assertEqual(0, qmenu_call1.addAction.call_count)
expected_qmenu_calls = [call(), call("Axes", qmenu_call1)]
self.assertEqual(expected_qmenu_calls, mocked_qmenu_cls.call_args_list)
self.assertEqual(4, qmenu_call2.addAction.call_count)
# Failure tests
def test_construction_with_non_qt_canvas_raises_exception(self):
class NotQtCanvas(object):
pass
class FigureManager(object):
def __init__(self):
self.canvas = NotQtCanvas()
self.assertRaises(RuntimeError, FigureInteraction, FigureManager())
# Private methods
def _create_mock_fig_manager_to_accept_right_click(self):
fig_manager = MagicMock()
canvas = MagicMock()
type(canvas).buttond = PropertyMock(return_value={Qt.RightButton: 3})
fig_manager.canvas = canvas
return fig_manager
def _create_mock_right_click(self):
mouse_event = MagicMock()
type(mouse_event).button = PropertyMock(return_value=3)
return mouse_event
if __name__ == "__main__":
unittest.main()
| gpl-3.0 |
yanchen036/tensorflow | tensorflow/python/kernel_tests/self_adjoint_eig_op_test.py | 22 | 9533 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.math_ops.matrix_inverse."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes as dtypes_lib
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gradient_checker
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.platform import test
def _AddTest(test_class, op_name, testcase_name, fn):
test_name = "_".join(["test", op_name, testcase_name])
if hasattr(test_class, test_name):
raise RuntimeError("Test %s defined more than once" % test_name)
setattr(test_class, test_name, fn)
class SelfAdjointEigTest(test.TestCase):
def testWrongDimensions(self):
# The input to self_adjoint_eig should be a tensor of
# at least rank 2.
scalar = constant_op.constant(1.)
with self.assertRaises(ValueError):
linalg_ops.self_adjoint_eig(scalar)
vector = constant_op.constant([1., 2.])
with self.assertRaises(ValueError):
linalg_ops.self_adjoint_eig(vector)
def testConcurrentExecutesWithoutError(self):
all_ops = []
with self.test_session(use_gpu=True) as sess:
for compute_v_ in True, False:
matrix1 = random_ops.random_normal([5, 5], seed=42)
matrix2 = random_ops.random_normal([5, 5], seed=42)
if compute_v_:
e1, v1 = linalg_ops.self_adjoint_eig(matrix1)
e2, v2 = linalg_ops.self_adjoint_eig(matrix2)
all_ops += [e1, v1, e2, v2]
else:
e1 = linalg_ops.self_adjoint_eigvals(matrix1)
e2 = linalg_ops.self_adjoint_eigvals(matrix2)
all_ops += [e1, e2]
val = sess.run(all_ops)
self.assertAllEqual(val[0], val[2])
# The algorithm is slightly different for compute_v being True and False,
# so require approximate equality only here.
self.assertAllClose(val[2], val[4])
self.assertAllEqual(val[4], val[5])
self.assertAllEqual(val[1], val[3])
def testMatrixThatFailsWhenFlushingDenormsToZero(self):
# Test a 32x32 matrix which is known to fail if denorm floats are flushed to
# zero.
matrix = np.genfromtxt(
test.test_src_dir_path(
"python/kernel_tests/testdata/"
"self_adjoint_eig_fail_if_denorms_flushed.txt")).astype(np.float32)
self.assertEqual(matrix.shape, (32, 32))
matrix_tensor = constant_op.constant(matrix)
with self.test_session(use_gpu=True) as sess:
(e, v) = sess.run(linalg_ops.self_adjoint_eig(matrix_tensor))
self.assertEqual(e.size, 32)
self.assertAllClose(
np.matmul(v, v.transpose()), np.eye(32, dtype=np.float32), atol=2e-3)
self.assertAllClose(matrix,
np.matmul(np.matmul(v, np.diag(e)), v.transpose()))
def SortEigenDecomposition(e, v):
if v.ndim < 2:
return e, v
else:
perm = np.argsort(e, -1)
return np.take(e, perm, -1), np.take(v, perm, -1)
def EquilibrateEigenVectorPhases(x, y):
"""Equilibrate the phase of the Eigenvectors in the columns of `x` and `y`.
Eigenvectors are only unique up to an arbitrary phase. This function rotates x
such that it matches y. Precondition: The coluns of x and y differ by a
multiplicative complex phase factor only.
Args:
x: `np.ndarray` with Eigenvectors
y: `np.ndarray` with Eigenvectors
Returns:
`np.ndarray` containing an equilibrated version of x.
"""
phases = np.sum(np.conj(x) * y, -2, keepdims=True)
phases /= np.abs(phases)
return phases * x
def _GetSelfAdjointEigTest(dtype_, shape_, compute_v_):
def CompareEigenVectors(self, x, y, tol):
x = EquilibrateEigenVectorPhases(x, y)
self.assertAllClose(x, y, atol=tol)
def CompareEigenDecompositions(self, x_e, x_v, y_e, y_v, tol):
num_batches = int(np.prod(x_e.shape[:-1]))
n = x_e.shape[-1]
x_e = np.reshape(x_e, [num_batches] + [n])
x_v = np.reshape(x_v, [num_batches] + [n, n])
y_e = np.reshape(y_e, [num_batches] + [n])
y_v = np.reshape(y_v, [num_batches] + [n, n])
for i in range(num_batches):
x_ei, x_vi = SortEigenDecomposition(x_e[i, :], x_v[i, :, :])
y_ei, y_vi = SortEigenDecomposition(y_e[i, :], y_v[i, :, :])
self.assertAllClose(x_ei, y_ei, atol=tol, rtol=tol)
CompareEigenVectors(self, x_vi, y_vi, tol)
def Test(self):
np.random.seed(1)
n = shape_[-1]
batch_shape = shape_[:-2]
np_dtype = dtype_.as_numpy_dtype
a = np.random.uniform(
low=-1.0, high=1.0, size=n * n).reshape([n, n]).astype(np_dtype)
if dtype_.is_complex:
a += 1j * np.random.uniform(
low=-1.0, high=1.0, size=n * n).reshape([n, n]).astype(np_dtype)
a += np.conj(a.T)
a = np.tile(a, batch_shape + (1, 1))
if dtype_ in (dtypes_lib.float32, dtypes_lib.complex64):
atol = 1e-4
else:
atol = 1e-12
np_e, np_v = np.linalg.eigh(a)
with self.test_session(use_gpu=True):
if compute_v_:
tf_e, tf_v = linalg_ops.self_adjoint_eig(constant_op.constant(a))
# Check that V*diag(E)*V^T is close to A.
a_ev = math_ops.matmul(
math_ops.matmul(tf_v, array_ops.matrix_diag(tf_e)),
tf_v,
adjoint_b=True)
self.assertAllClose(a_ev.eval(), a, atol=atol)
# Compare to numpy.linalg.eigh.
CompareEigenDecompositions(self, np_e, np_v,
tf_e.eval(), tf_v.eval(), atol)
else:
tf_e = linalg_ops.self_adjoint_eigvals(constant_op.constant(a))
self.assertAllClose(
np.sort(np_e, -1), np.sort(tf_e.eval(), -1), atol=atol)
return Test
class SelfAdjointEigGradTest(test.TestCase):
pass # Filled in below
def _GetSelfAdjointEigGradTest(dtype_, shape_, compute_v_):
def Test(self):
np.random.seed(1)
n = shape_[-1]
batch_shape = shape_[:-2]
np_dtype = dtype_.as_numpy_dtype
a = np.random.uniform(
low=-1.0, high=1.0, size=n * n).reshape([n, n]).astype(np_dtype)
if dtype_.is_complex:
a += 1j * np.random.uniform(
low=-1.0, high=1.0, size=n * n).reshape([n, n]).astype(np_dtype)
a += np.conj(a.T)
a = np.tile(a, batch_shape + (1, 1))
# Optimal stepsize for central difference is O(epsilon^{1/3}).
epsilon = np.finfo(np_dtype).eps
delta = 0.1 * epsilon**(1.0 / 3.0)
# tolerance obtained by looking at actual differences using
# np.linalg.norm(theoretical-numerical, np.inf) on -mavx build
if dtype_ in (dtypes_lib.float32, dtypes_lib.complex64):
tol = 1e-2
else:
tol = 1e-7
with self.test_session(use_gpu=True):
tf_a = constant_op.constant(a)
if compute_v_:
tf_e, tf_v = linalg_ops.self_adjoint_eig(tf_a)
# (complex) Eigenvectors are only unique up to an arbitrary phase
# We normalize the vectors such that the first component has phase 0.
top_rows = tf_v[..., 0:1, :]
if tf_a.dtype.is_complex:
angle = -math_ops.angle(top_rows)
phase = math_ops.complex(math_ops.cos(angle), math_ops.sin(angle))
else:
phase = math_ops.sign(top_rows)
tf_v *= phase
outputs = [tf_e, tf_v]
else:
tf_e = linalg_ops.self_adjoint_eigvals(tf_a)
outputs = [tf_e]
for b in outputs:
x_init = np.random.uniform(
low=-1.0, high=1.0, size=n * n).reshape([n, n]).astype(np_dtype)
if dtype_.is_complex:
x_init += 1j * np.random.uniform(
low=-1.0, high=1.0, size=n * n).reshape([n, n]).astype(np_dtype)
x_init += np.conj(x_init.T)
x_init = np.tile(x_init, batch_shape + (1, 1))
theoretical, numerical = gradient_checker.compute_gradient(
tf_a,
tf_a.get_shape().as_list(),
b,
b.get_shape().as_list(),
x_init_value=x_init,
delta=delta)
self.assertAllClose(theoretical, numerical, atol=tol, rtol=tol)
return Test
if __name__ == "__main__":
for compute_v in True, False:
for dtype in (dtypes_lib.float32, dtypes_lib.float64, dtypes_lib.complex64,
dtypes_lib.complex128):
for size in 1, 2, 5, 10:
for batch_dims in [(), (3,)] + [(3, 2)] * (max(size, size) < 10):
shape = batch_dims + (size, size)
name = "%s_%s_%s" % (dtype, "_".join(map(str, shape)), compute_v)
_AddTest(SelfAdjointEigTest, "SelfAdjointEig", name,
_GetSelfAdjointEigTest(dtype, shape, compute_v))
_AddTest(SelfAdjointEigGradTest, "SelfAdjointEigGrad", name,
_GetSelfAdjointEigGradTest(dtype, shape, compute_v))
test.main()
| apache-2.0 |
welex91/ansible-modules-core | network/nxos/nxos_feature.py | 9 | 8152 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: nxos_feature
version_added: "2.1"
short_description: Manage features in NX-OS switches
description:
- Offers ability to enable and disable features in NX-OS
extends_documentation_fragment: nxos
author: Jason Edelman (@jedelman8), Gabriele Gerbino (@GGabriele)
options:
feature:
description:
- Name of feature
required: true
state:
description:
- Desired state of the feature
required: false
default: 'enabled'
choices: ['enabled','disabled']
'''
EXAMPLES = '''
# Ensure lacp is enabled
- nxos_feature: feature=lacp state=enabled host={{ inventory_hostname }}
# Ensure ospf is disabled
- nxos_feature: feature=ospf state=disabled host={{ inventory_hostname }}
# Ensure vpc is enabled
- nxos_feature: feature=vpc state=enabled host={{ inventory_hostname }}
'''
RETURN = '''
proposed:
description: proposed feature state
returned: always
type: dict
sample: {"state": "disabled"}
existing:
description: existing state of feature
returned: always
type: dict
sample: {"state": "enabled"}
end_state:
description: feature state after executing module
returned: always
type: dict
sample: {"state": "disabled"}
state:
description: state as sent in from the playbook
returned: always
type: string
sample: "disabled"
updates:
description: commands sent to the device
returned: always
type: list
sample: ["no feature eigrp"]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
feature:
description: the feature that has been examined
returned: always
type: string
sample: "vpc"
'''
def execute_config_command(commands, module):
try:
module.configure(commands)
except ShellError, clie:
module.fail_json(msg='Error sending CLI commands',
error=str(clie), commands=commands)
def get_cli_body_ssh(command, response):
"""Get response for when transport=cli. This is kind of a hack and mainly
needed because these modules were originally written for NX-API. And
not every command supports "| json" when using cli/ssh. As such, we assume
if | json returns an XML string, it is a valid command, but that the
resource doesn't exist yet.
"""
if 'xml' in response[0]:
body = []
else:
try:
body = [json.loads(response[0])]
except ValueError:
module.fail_json(msg='Command does not support JSON output',
command=command)
return body
def execute_show(cmds, module, command_type=None):
try:
if command_type:
response = module.execute(cmds, command_type=command_type)
else:
response = module.execute(cmds)
except ShellError, clie:
module.fail_json(msg='Error sending {0}'.format(cmds),
error=str(clie))
return response
def execute_show_command(command, module, command_type='cli_show'):
if module.params['transport'] == 'cli':
command += ' | json'
cmds = [command]
response = execute_show(cmds, module)
body = get_cli_body_ssh(command, response)
elif module.params['transport'] == 'nxapi':
cmds = [command]
body = execute_show(cmds, module, command_type=command_type)
return body
def apply_key_map(key_map, table):
new_dict = {}
for key, value in table.items():
new_key = key_map.get(key)
if new_key:
value = table.get(key)
if value:
new_dict[new_key] = str(value)
else:
new_dict[new_key] = value
return new_dict
def get_available_features(feature, module):
available_features = {}
command = 'show feature'
body = execute_show_command(command, module)
try:
body = body[0]['TABLE_cfcFeatureCtrlTable']['ROW_cfcFeatureCtrlTable']
except (TypeError, IndexError):
return available_features
for each_feature in body:
feature = each_feature['cfcFeatureCtrlName2']
state = each_feature['cfcFeatureCtrlOpStatus2']
if 'enabled' in state:
state = 'enabled'
if feature not in available_features.keys():
available_features[feature] = state
else:
if (available_features[feature] == 'disabled' and
state == 'enabled'):
available_features[feature] = state
return available_features
def get_commands(proposed, existing, state, module):
feature = validate_feature(module, mode='config')
commands = []
feature_check = proposed == existing
if not feature_check:
if state == 'enabled':
command = 'feature {0}'.format(feature)
commands.append(command)
elif state == 'disabled':
command = "no feature {0}".format(feature)
commands.append(command)
return commands
def validate_feature(module, mode='show'):
'''Some features may need to be mapped due to inconsistency
between how they appear from "show feature" output and
how they are configured'''
feature = module.params['feature']
feature_to_be_mapped = {
'show': {
'nv overlay': 'nve'},
'config':
{
'nve': 'nv overlay'}
}
if feature in feature_to_be_mapped[mode]:
feature = feature_to_be_mapped[mode][feature]
return feature
def main():
argument_spec = dict(
feature=dict(type='str', required=True),
state=dict(choices=['enabled', 'disabled'], default='enabled',
required=False),
)
module = get_module(argument_spec=argument_spec,
supports_check_mode=True)
feature = validate_feature(module)
state = module.params['state'].lower()
available_features = get_available_features(feature, module)
if feature not in available_features.keys():
module.fail_json(
msg='Invalid feature name.',
features_currently_supported=available_features,
invalid_feature=feature)
else:
existstate = available_features[feature]
existing = dict(state=existstate)
proposed = dict(state=state)
changed = False
end_state = existing
cmds = get_commands(proposed, existing, state, module)
if cmds:
if module.check_mode:
module.exit_json(changed=True, commands=cmds)
else:
execute_config_command(cmds, module)
changed = True
updated_features = get_available_features(feature, module)
existstate = updated_features[feature]
end_state = dict(state=existstate)
results = {}
results['proposed'] = proposed
results['existing'] = existing
results['end_state'] = end_state
results['state'] = state
results['updates'] = cmds
results['changed'] = changed
results['feature'] = module.params['feature']
module.exit_json(**results)
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
from ansible.module_utils.shell import *
from ansible.module_utils.netcfg import *
from ansible.module_utils.nxos import *
if __name__ == '__main__':
main()
| gpl-3.0 |
mcus/SickRage | lib/github/GitTreeElement.py | 74 | 3880 | # -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2012 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2012 Zearin <zearin@gonk.net> #
# Copyright 2013 AKFish <akfish@gmail.com> #
# Copyright 2013 Vincent Jacques <vincent@vincent-jacques.net> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import github.GithubObject
class GitTreeElement(github.GithubObject.NonCompletableGithubObject):
"""
This class represents GitTreeElements as returned for example by http://developer.github.com/v3/todo
"""
@property
def mode(self):
"""
:type: string
"""
return self._mode.value
@property
def path(self):
"""
:type: string
"""
return self._path.value
@property
def sha(self):
"""
:type: string
"""
return self._sha.value
@property
def size(self):
"""
:type: integer
"""
return self._size.value
@property
def type(self):
"""
:type: string
"""
return self._type.value
@property
def url(self):
"""
:type: string
"""
return self._url.value
def _initAttributes(self):
self._mode = github.GithubObject.NotSet
self._path = github.GithubObject.NotSet
self._sha = github.GithubObject.NotSet
self._size = github.GithubObject.NotSet
self._type = github.GithubObject.NotSet
self._url = github.GithubObject.NotSet
def _useAttributes(self, attributes):
if "mode" in attributes: # pragma no branch
self._mode = self._makeStringAttribute(attributes["mode"])
if "path" in attributes: # pragma no branch
self._path = self._makeStringAttribute(attributes["path"])
if "sha" in attributes: # pragma no branch
self._sha = self._makeStringAttribute(attributes["sha"])
if "size" in attributes: # pragma no branch
self._size = self._makeIntAttribute(attributes["size"])
if "type" in attributes: # pragma no branch
self._type = self._makeStringAttribute(attributes["type"])
if "url" in attributes: # pragma no branch
self._url = self._makeStringAttribute(attributes["url"])
| gpl-3.0 |
shakamunyi/neutron-vrrp | neutron/tests/unit/test_db_plugin_level.py | 19 | 3370 | # Copyright (c) 2014 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.api.v2 import attributes
from neutron.common import constants
from neutron.common import exceptions as n_exc
from neutron import context
from neutron import manager
from neutron.tests.unit import test_db_plugin
from neutron.tests.unit import testlib_api
from neutron.tests.unit import testlib_plugin
class TestNetworks(testlib_api.SqlTestCase,
testlib_plugin.PluginSetupHelper):
def setUp(self):
super(TestNetworks, self).setUp()
self._tenant_id = 'test-tenant'
# Update the plugin
self.setup_coreplugin(test_db_plugin.DB_PLUGIN_KLASS)
def _create_network(self, plugin, ctx, shared=True):
network = {'network': {'name': 'net',
'shared': shared,
'admin_state_up': True,
'tenant_id': self._tenant_id}}
created_network = plugin.create_network(ctx, network)
return (network, created_network['id'])
def _create_port(self, plugin, ctx, net_id, device_owner, tenant_id):
port = {'port': {'name': 'port',
'network_id': net_id,
'mac_address': attributes.ATTR_NOT_SPECIFIED,
'fixed_ips': attributes.ATTR_NOT_SPECIFIED,
'admin_state_up': True,
'device_id': 'device_id',
'device_owner': device_owner,
'tenant_id': tenant_id}}
plugin.create_port(ctx, port)
def _test_update_shared_net_used(self,
device_owner,
expected_exception=None):
plugin = manager.NeutronManager.get_plugin()
ctx = context.get_admin_context()
network, net_id = self._create_network(plugin, ctx)
self._create_port(plugin,
ctx,
net_id,
device_owner,
self._tenant_id + '1')
network['network']['shared'] = False
if (expected_exception):
with testlib_api.ExpectedException(expected_exception):
plugin.update_network(ctx, net_id, network)
else:
plugin.update_network(ctx, net_id, network)
def test_update_shared_net_used_fails(self):
self._test_update_shared_net_used('', n_exc.InvalidSharedSetting)
def test_update_shared_net_used_as_router_gateway(self):
self._test_update_shared_net_used(
constants.DEVICE_OWNER_ROUTER_GW)
def test_update_shared_net_used_by_floating_ip(self):
self._test_update_shared_net_used(
constants.DEVICE_OWNER_FLOATINGIP)
| apache-2.0 |
lyft/incubator-airflow | airflow/sensors/bash.py | 5 | 3435 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
from subprocess import PIPE, STDOUT, Popen
from tempfile import NamedTemporaryFile, TemporaryDirectory, gettempdir
from airflow.sensors.base_sensor_operator import BaseSensorOperator
from airflow.utils.decorators import apply_defaults
class BashSensor(BaseSensorOperator):
"""
Executes a bash command/script and returns True if and only if the
return code is 0.
:param bash_command: The command, set of commands or reference to a
bash script (must be '.sh') to be executed.
:type bash_command: str
:param env: If env is not None, it must be a mapping that defines the
environment variables for the new process; these are used instead
of inheriting the current process environment, which is the default
behavior. (templated)
:type env: dict
:param output_encoding: output encoding of bash command.
:type output_encoding: str
"""
template_fields = ('bash_command', 'env')
@apply_defaults
def __init__(self,
bash_command,
env=None,
output_encoding='utf-8',
*args, **kwargs):
super().__init__(*args, **kwargs)
self.bash_command = bash_command
self.env = env
self.output_encoding = output_encoding
def poke(self, context):
"""
Execute the bash command in a temporary directory
which will be cleaned afterwards
"""
bash_command = self.bash_command
self.log.info("Tmp dir root location: \n %s", gettempdir())
with TemporaryDirectory(prefix='airflowtmp') as tmp_dir:
with NamedTemporaryFile(dir=tmp_dir, prefix=self.task_id) as f:
f.write(bytes(bash_command, 'utf_8'))
f.flush()
fname = f.name
script_location = tmp_dir + "/" + fname
self.log.info("Temporary script location: %s", script_location)
self.log.info("Running command: %s", bash_command)
resp = Popen( # pylint: disable=subprocess-popen-preexec-fn
['bash', fname],
stdout=PIPE, stderr=STDOUT,
close_fds=True, cwd=tmp_dir,
env=self.env, preexec_fn=os.setsid)
self.log.info("Output:")
for line in iter(resp.stdout.readline, b''):
line = line.decode(self.output_encoding).strip()
self.log.info(line)
resp.wait()
self.log.info("Command exited with return code %s", resp.returncode)
return not resp.returncode
| apache-2.0 |
schleichdi2/OpenNfr_E2_Gui-6.0 | lib/python/Screens/DVD.py | 1 | 20193 | import os
from enigma import eTimer, iPlayableService, iServiceInformation, eServiceReference, iServiceKeys, getDesktop
from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Screens.ChoiceBox import ChoiceBox
from Screens.HelpMenu import HelpableScreen
from Screens.InfoBarGenerics import InfoBarSeek, InfoBarPVRState, InfoBarCueSheetSupport, InfoBarShowHide, InfoBarNotifications, InfoBarAudioSelection, InfoBarSubtitleSupport
from Components.ActionMap import ActionMap, NumberActionMap, HelpableActionMap
from Components.Label import Label
from Components.Pixmap import Pixmap
from Components.ServiceEventTracker import ServiceEventTracker, InfoBarBase
from Components.config import config
from Tools.Directories import pathExists
from Components.Harddisk import harddiskmanager
lastpath = ""
class DVDSummary(Screen):
def __init__(self, session, parent):
Screen.__init__(self, session, parent)
self["Title"] = Label("")
self["Time"] = Label("")
self["Chapter"] = Label("")
def updateChapter(self, chapter):
self["Chapter"].setText(chapter)
def setTitle(self, title):
self["Title"].setText(title)
class DVDOverlay(Screen):
def __init__(self, session, args = None, height = None):
desktop_size = getDesktop(0).size()
w = desktop_size.width()
h = desktop_size.height()
if height is not None:
h = height
DVDOverlay.skin = """<screen name="DVDOverlay" position="0,0" size="%d,%d" flags="wfNoBorder" zPosition="-1" backgroundColor="transparent" />""" %(w, h)
Screen.__init__(self, session)
class ChapterZap(Screen):
skin = """
<screen name="ChapterZap" position="235,255" size="250,60" title="Chapter" >
<widget name="chapter" position="35,15" size="110,25" font="Regular;23" />
<widget name="number" position="145,15" size="80,25" halign="right" font="Regular;23" />
</screen>"""
def quit(self):
self.Timer.stop()
self.close(0)
def keyOK(self):
self.Timer.stop()
self.close(int(self["number"].getText()))
def keyNumberGlobal(self, number):
self.Timer.start(3000, True) #reset timer
self.field += str(number)
self["number"].setText(self.field)
if len(self.field) >= 4:
self.keyOK()
def __init__(self, session, number):
Screen.__init__(self, session)
self.field = str(number)
self["chapter"] = Label(_("Chapter:"))
self["number"] = Label(self.field)
self["actions"] = NumberActionMap( [ "SetupActions" ],
{
"cancel": self.quit,
"ok": self.keyOK,
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal
})
self.Timer = eTimer()
self.Timer.callback.append(self.keyOK)
self.Timer.start(3000, True)
class DVDPlayer(Screen, InfoBarBase, InfoBarNotifications, InfoBarSeek, InfoBarPVRState, InfoBarShowHide, HelpableScreen, InfoBarCueSheetSupport, InfoBarAudioSelection, InfoBarSubtitleSupport):
ALLOW_SUSPEND = Screen.SUSPEND_PAUSES
ENABLE_RESUME_SUPPORT = True
def save_infobar_seek_config(self):
self.saved_config_speeds_forward = config.seek.speeds_forward.value
self.saved_config_speeds_backward = config.seek.speeds_backward.value
self.saved_config_enter_forward = config.seek.enter_forward.value
self.saved_config_enter_backward = config.seek.enter_backward.value
self.saved_config_seek_on_pause = config.seek.on_pause.value
self.saved_config_seek_speeds_slowmotion = config.seek.speeds_slowmotion.value
def change_infobar_seek_config(self):
config.seek.speeds_forward.value = [2, 4, 6, 8, 16, 32, 64]
config.seek.speeds_backward.value = [2, 4, 6, 8, 16, 32, 64]
config.seek.speeds_slowmotion.value = [ 2, 3, 4, 6 ]
config.seek.enter_forward.value = "2"
config.seek.enter_backward.value = "2"
config.seek.on_pause.value = "play"
def restore_infobar_seek_config(self):
config.seek.speeds_forward.value = self.saved_config_speeds_forward
config.seek.speeds_backward.value = self.saved_config_speeds_backward
config.seek.speeds_slowmotion.value = self.saved_config_seek_speeds_slowmotion
config.seek.enter_forward.value = self.saved_config_enter_forward
config.seek.enter_backward.value = self.saved_config_enter_backward
config.seek.on_pause.value = self.saved_config_seek_on_pause
def __init__(self, session, dvd_device=None, dvd_filelist=None, args=None):
if not dvd_filelist: dvd_filelist = []
Screen.__init__(self, session)
InfoBarBase.__init__(self)
InfoBarNotifications.__init__(self)
InfoBarCueSheetSupport.__init__(self, actionmap = "MediaPlayerCueSheetActions")
InfoBarShowHide.__init__(self)
InfoBarAudioSelection.__init__(self)
InfoBarSubtitleSupport.__init__(self)
HelpableScreen.__init__(self)
self.save_infobar_seek_config()
self.change_infobar_seek_config()
InfoBarSeek.__init__(self)
InfoBarPVRState.__init__(self)
self.oldService = self.session.nav.getCurrentlyPlayingServiceOrGroup()
self.session.nav.stopService()
self["audioLabel"] = Label("n/a")
self["subtitleLabel"] = Label("")
self["angleLabel"] = Label("")
self["chapterLabel"] = Label("")
self["anglePix"] = Pixmap()
self["anglePix"].hide()
self.last_audioTuple = None
self.last_subtitleTuple = None
self.last_angleTuple = None
self.totalChapters = 0
self.currentChapter = 0
self.totalTitles = 0
self.currentTitle = 0
self.__event_tracker = ServiceEventTracker(screen=self, eventmap=
{
iPlayableService.evStopped: self.__serviceStopped,
iPlayableService.evUser: self.__timeUpdated,
iPlayableService.evUser+1: self.__statePlay,
iPlayableService.evUser+2: self.__statePause,
iPlayableService.evUser+3: self.__osdFFwdInfoAvail,
iPlayableService.evUser+4: self.__osdFBwdInfoAvail,
iPlayableService.evUser+5: self.__osdStringAvail,
iPlayableService.evUser+6: self.__osdAudioInfoAvail,
iPlayableService.evUser+7: self.__osdSubtitleInfoAvail,
iPlayableService.evUser+8: self.__chapterUpdated,
iPlayableService.evUser+9: self.__titleUpdated,
iPlayableService.evUser+11: self.__menuOpened,
iPlayableService.evUser+12: self.__menuClosed,
iPlayableService.evUser+13: self.__osdAngleInfoAvail
})
self["DVDPlayerDirectionActions"] = ActionMap(["DirectionActions"],
{
#MENU KEY DOWN ACTIONS
"left": self.keyLeft,
"right": self.keyRight,
"up": self.keyUp,
"down": self.keyDown,
#MENU KEY REPEATED ACTIONS
"leftRepeated": self.doNothing,
"rightRepeated": self.doNothing,
"upRepeated": self.doNothing,
"downRepeated": self.doNothing,
#MENU KEY UP ACTIONS
"leftUp": self.doNothing,
"rightUp": self.doNothing,
"upUp": self.doNothing,
"downUp": self.doNothing,
})
self["OkCancelActions"] = ActionMap(["OkCancelActions"],
{
"ok": self.keyOk,
"cancel": self.keyCancel,
})
self["DVDPlayerPlaybackActions"] = HelpableActionMap(self, "DVDPlayerActions",
{
#PLAYER ACTIONS
"dvdMenu": (self.enterDVDMenu, _("show DVD main menu")),
"toggleInfo": (self.toggleInfo, _("toggle time, chapter, audio, subtitle info")),
"nextChapter": (self.nextChapter, _("forward to the next chapter")),
"prevChapter": (self.prevChapter, _("rewind to the previous chapter")),
"nextTitle": (self.nextTitle, _("jump forward to the next title")),
"prevTitle": (self.prevTitle, _("jump back to the previous title")),
"tv": (self.askLeavePlayer, _("exit DVD player or return to file browser")),
"dvdAudioMenu": (self.enterDVDAudioMenu, _("(show optional DVD audio menu)")),
"AudioSelection": (self.enterAudioSelection, _("Select audio track")),
"nextAudioTrack": (self.nextAudioTrack, _("switch to the next audio track")),
"nextSubtitleTrack": (self.nextSubtitleTrack, _("switch to the next subtitle language")),
"nextAngle": (self.nextAngle, _("switch to the next angle")),
"seekBeginning": self.seekBeginning,
}, -2)
self["NumberActions"] = NumberActionMap( [ "NumberActions"],
{
"1": self.keyNumberGlobal,
"2": self.keyNumberGlobal,
"3": self.keyNumberGlobal,
"4": self.keyNumberGlobal,
"5": self.keyNumberGlobal,
"6": self.keyNumberGlobal,
"7": self.keyNumberGlobal,
"8": self.keyNumberGlobal,
"9": self.keyNumberGlobal,
"0": self.keyNumberGlobal,
})
self.onClose.append(self.__onClose)
try:
from Plugins.SystemPlugins.Hotplug.plugin import hotplugNotifier
hotplugNotifier.append(self.hotplugCB)
except:
pass
self.autoplay = dvd_device or dvd_filelist
if dvd_device:
self.physicalDVD = True
else:
self.scanHotplug()
self.dvd_filelist = dvd_filelist
self.onFirstExecBegin.append(self.opened)
self.service = None
self.in_menu = False
def keyNumberGlobal(self, number):
print "You pressed number " + str(number)
self.session.openWithCallback(self.numberEntered, ChapterZap, number)
def numberEntered(self, retval):
# print self.servicelist
if retval > 0:
self.zapToNumber(retval)
def getServiceInterface(self, iface):
service = self.service
if service:
attr = getattr(service, iface, None)
if callable(attr):
return attr()
return None
def __serviceStopped(self):
self.dvdScreen.hide()
subs = self.getServiceInterface("subtitle")
if subs:
subs.disableSubtitles(self.session.current_dialog.instance)
def serviceStarted(self): #override InfoBarShowHide function
self.dvdScreen.show()
def doEofInternal(self, playing):
if self.in_menu:
self.hide()
def __menuOpened(self):
self.hide()
self.in_menu = True
self["NumberActions"].setEnabled(False)
def __menuClosed(self):
self.show()
self.in_menu = False
self["NumberActions"].setEnabled(True)
def setChapterLabel(self):
chapterLCD = "Menu"
chapterOSD = "DVD Menu"
if self.currentTitle > 0:
chapterLCD = "%s %d" % (_("Chap."), self.currentChapter)
chapterOSD = "DVD %s %d/%d" % (_("Chapter"), self.currentChapter, self.totalChapters)
chapterOSD += " (%s %d/%d)" % (_("Title"), self.currentTitle, self.totalTitles)
self["chapterLabel"].setText(chapterOSD)
try:
self.session.summary.updateChapter(chapterLCD)
except:
pass
def doNothing(self):
pass
def toggleInfo(self):
if not self.in_menu:
self.toggleShow()
print "toggleInfo"
def __timeUpdated(self):
print "timeUpdated"
def __statePlay(self):
print "statePlay"
def __statePause(self):
print "statePause"
def __osdFFwdInfoAvail(self):
self.setChapterLabel()
print "FFwdInfoAvail"
def __osdFBwdInfoAvail(self):
self.setChapterLabel()
print "FBwdInfoAvail"
def __osdStringAvail(self):
print "StringAvail"
def __osdAudioInfoAvail(self):
info = self.getServiceInterface("info")
audioTuple = info and info.getInfoObject(iServiceInformation.sUser+6)
print "AudioInfoAvail ", repr(audioTuple)
if audioTuple:
#audioString = "%d: %s (%s)" % (audioTuple[0], audioTuple[1],audioTuple[2])
audioString = "%s (%s)" % (audioTuple[1],audioTuple[2])
self["audioLabel"].setText(audioString)
if audioTuple != self.last_audioTuple and not self.in_menu:
self.doShow()
self.last_audioTuple = audioTuple
def __osdSubtitleInfoAvail(self):
info = self.getServiceInterface("info")
subtitleTuple = info and info.getInfoObject(iServiceInformation.sUser+7)
print "SubtitleInfoAvail ", repr(subtitleTuple)
if subtitleTuple:
subtitleString = ""
if subtitleTuple[0] is not 0:
#subtitleString = "%d: %s" % (subtitleTuple[0], subtitleTuple[1])
subtitleString = "%s" % subtitleTuple[1]
self["subtitleLabel"].setText(subtitleString)
if subtitleTuple != self.last_subtitleTuple and not self.in_menu:
self.doShow()
self.last_subtitleTuple = subtitleTuple
def __osdAngleInfoAvail(self):
info = self.getServiceInterface("info")
angleTuple = info and info.getInfoObject(iServiceInformation.sUser+8)
print "AngleInfoAvail ", repr(angleTuple)
if angleTuple:
angleString = ""
if angleTuple[1] > 1:
angleString = "%d / %d" % (angleTuple[0], angleTuple[1])
self["anglePix"].show()
else:
self["anglePix"].hide()
self["angleLabel"].setText(angleString)
if angleTuple != self.last_angleTuple and not self.in_menu:
self.doShow()
self.last_angleTuple = angleTuple
def __chapterUpdated(self):
info = self.getServiceInterface("info")
if info:
self.currentChapter = info.getInfo(iServiceInformation.sCurrentChapter)
self.totalChapters = info.getInfo(iServiceInformation.sTotalChapters)
self.setChapterLabel()
print "__chapterUpdated: %d/%d" % (self.currentChapter, self.totalChapters)
def __titleUpdated(self):
info = self.getServiceInterface("info")
if info:
self.currentTitle = info.getInfo(iServiceInformation.sCurrentTitle)
self.totalTitles = info.getInfo(iServiceInformation.sTotalTitles)
self.setChapterLabel()
print "__titleUpdated: %d/%d" % (self.currentTitle, self.totalTitles)
if not self.in_menu:
self.doShow()
def askLeavePlayer(self):
if self.autoplay:
self.exitCB((None,"exit"))
return
choices = [(_("Exit"), "exit"), (_("Continue playing"), "play")]
if self.physicalDVD:
cur = self.session.nav.getCurrentlyPlayingServiceOrGroup()
if cur and not cur.toString().endswith(harddiskmanager.getAutofsMountpoint(harddiskmanager.getCD())):
choices.insert(0,(_("Play DVD"), "playPhysical" ))
self.session.openWithCallback(self.exitCB, ChoiceBox, title=_("Leave DVD player?"), list = choices)
def sendKey(self, key):
keys = self.getServiceInterface("keys")
if keys:
keys.keyPressed(key)
return keys
def enterAudioSelection(self):
self.audioSelection()
def nextAudioTrack(self):
self.sendKey(iServiceKeys.keyUser)
def nextSubtitleTrack(self):
self.sendKey(iServiceKeys.keyUser+1)
def enterDVDAudioMenu(self):
self.sendKey(iServiceKeys.keyUser+2)
def nextChapter(self):
self.sendKey(iServiceKeys.keyUser+3)
def prevChapter(self):
self.sendKey(iServiceKeys.keyUser+4)
def nextTitle(self):
self.sendKey(iServiceKeys.keyUser+5)
def prevTitle(self):
self.sendKey(iServiceKeys.keyUser+6)
def enterDVDMenu(self):
self.sendKey(iServiceKeys.keyUser+7)
def nextAngle(self):
self.sendKey(iServiceKeys.keyUser+8)
def seekBeginning(self):
if self.service:
seekable = self.getSeek()
if seekable:
seekable.seekTo(0)
def zapToNumber(self, number):
if self.service:
seekable = self.getSeek()
if seekable:
print "seek to chapter %d" % number
seekable.seekChapter(number)
# MENU ACTIONS
def keyRight(self):
self.sendKey(iServiceKeys.keyRight)
def keyLeft(self):
self.sendKey(iServiceKeys.keyLeft)
def keyUp(self):
self.sendKey(iServiceKeys.keyUp)
def keyDown(self):
self.sendKey(iServiceKeys.keyDown)
def keyOk(self):
if self.sendKey(iServiceKeys.keyOk) and not self.in_menu:
self.toggleInfo()
def keyCancel(self):
self.askLeavePlayer()
def opened(self):
if self.autoplay and self.dvd_filelist:
# opened via autoplay
self.FileBrowserClosed(self.dvd_filelist[0])
elif self.autoplay and self.physicalDVD:
self.playPhysicalCB(True)
elif self.physicalDVD:
# opened from menu with dvd in drive
self.session.openWithCallback(self.playPhysicalCB, MessageBox, text=_("Do you want to play DVD in drive?"), timeout=5 )
def playPhysicalCB(self, answer):
if answer:
harddiskmanager.setDVDSpeed(harddiskmanager.getCD(), 1)
self.FileBrowserClosed(harddiskmanager.getAutofsMountpoint(harddiskmanager.getCD()))
def FileBrowserClosed(self, val):
curref = self.session.nav.getCurrentlyPlayingServiceOrGroup()
print "FileBrowserClosed", val
if val is None:
self.askLeavePlayer()
else:
isopathname = "/VIDEO_TS.ISO"
if os.path.exists(val + isopathname):
val += isopathname
newref = eServiceReference(4369, 0, val)
print "play", newref.toString()
if curref is None or curref != newref:
if newref.toString().endswith("/VIDEO_TS") or newref.toString().endswith("/"):
names = newref.toString().rsplit("/",3)
if names[2].startswith("Disk ") or names[2].startswith("DVD "):
name = str(names[1]) + " - " + str(names[2])
else:
name = names[2]
print "setting name to: ", self.service
newref.setName(str(name))
# Construct a path for the IFO header assuming it exists
ifofilename = val
if not ifofilename.upper().endswith("/VIDEO_TS"):
ifofilename += "/VIDEO_TS"
files = [("/VIDEO_TS.IFO", 0x100), ("/VTS_01_0.IFO", 0x100), ("/VTS_01_0.IFO", 0x200)] # ( filename, offset )
for name in files:
(status, isNTSC, isLowResolution) = self.readVideoAtributes( ifofilename, name )
if status:
break
height = getDesktop(0).size().height()
print "[DVD] height:", height
if isNTSC:
height = height * 576 / 480
print "[DVD] NTSC height:", height
if isLowResolution:
height *= 2
print "[DVD] LowResolution:", height
self.dvdScreen = self.session.instantiateDialog(DVDOverlay, height=height)
self.session.nav.playService(newref)
self.service = self.session.nav.getCurrentService()
print "self.service", self.service
print "cur_dlg", self.session.current_dialog
subs = self.getServiceInterface("subtitle")
if subs:
subs.enableSubtitles(self.dvdScreen.instance, None)
def readVideoAtributes(self, isofilename, checked_file):
(name, offset) = checked_file
isofilename += name
print "[DVD] file", name
status = False
isNTSC = False
isLowResolution = False
ifofile = None
try:
# Try to read the IFO header to determine PAL/NTSC format and the resolution
ifofile = open(isofilename, "r")
ifofile.seek(offset)
video_attr_high = ord(ifofile.read(1))
if video_attr_high != 0:
status = True
video_attr_low = ord(ifofile.read(1))
print "[DVD] %s: video_attr_high = %x" % ( name, video_attr_high ), "video_attr_low = %x" % video_attr_low
isNTSC = (video_attr_high & 0x10 == 0)
isLowResolution = (video_attr_low & 0x18 == 0x18)
except:
# If the service is an .iso or .img file we assume it is PAL
# Sorry we cannot open image files here.
print "[DVD] Cannot read file or is ISO/IMG"
finally:
if ifofile is not None:
ifofile.close()
return status, isNTSC, isLowResolution
def exitCB(self, answer):
if answer is not None:
if answer[1] == "exit":
if self.service:
self.service = None
self.close()
elif answer[1] == "playPhysical":
if self.service:
self.service = None
self.playPhysicalCB(True)
else:
pass
def __onClose(self):
self.restore_infobar_seek_config()
self.session.nav.playService(self.oldService)
try:
from Plugins.SystemPlugins.Hotplug.plugin import hotplugNotifier
hotplugNotifier.remove(self.hotplugCB)
except:
pass
def playLastCB(self, answer): # overwrite infobar cuesheet function
print "playLastCB", answer, self.resume_point
if self.service:
if answer:
seekable = self.getSeek()
if seekable:
seekable.seekTo(self.resume_point)
pause = self.service.pause()
pause.unpause()
self.hideAfterResume()
def showAfterCuesheetOperation(self):
if not self.in_menu:
self.show()
def createSummary(self):
return DVDSummary
#override some InfoBarSeek functions
def doEof(self):
self.setSeekState(self.SEEK_STATE_PLAY)
def calcRemainingTime(self):
return 0
def hotplugCB(self, dev, media_state):
print "[hotplugCB]", dev, media_state
if dev == harddiskmanager.getCD():
if media_state == "1":
self.scanHotplug()
else:
self.physicalDVD = False
def scanHotplug(self):
devicepath = harddiskmanager.getAutofsMountpoint(harddiskmanager.getCD())
if pathExists(devicepath):
from Components.Scanner import scanDevice
res = scanDevice(devicepath)
list = [ (r.description, r, res[r], self.session) for r in res ]
if list:
(desc, scanner, files, session) = list[0]
for file in files:
print file
if file.mimetype == "video/x-dvd":
print "physical dvd found:", devicepath
self.physicalDVD = True
return
self.physicalDVD = False
| gpl-2.0 |
alfredoavanzosc/odoomrp-wip-1 | partner_risk_insurance/models/res_partner.py | 29 | 2789 |
# -*- encoding: utf-8 -*-
##############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
from openerp import models, fields, api
class ResPartner(models.Model):
_inherit = 'res.partner'
@api.one
@api.depends('company_credit_limit', 'insurance_credit_limit')
def _credit_limit(self):
self.credit_limit = (self.company_credit_limit +
self.insurance_credit_limit)
credit_limit = fields.Float('Credit Limit', store=True,
compute=_credit_limit)
company_credit_limit = fields.Float("Company's Credit Limit",
help='Credit limit granted by the '
'company.')
insurance_credit_limit = fields.Float("Insurance's Credit Limit",
help='Credit limit granted by the '
'insurance company.')
risk_insurance_coverage_percent = fields.Float(
"Insurance's Credit Coverage", help='Percentage of the credit covered '
'by the insurance.')
risk_insurance_requested = fields.Boolean(
'Insurance Requested', help='Mark this field if an insurance was '
'requested for the credit of this partner.')
risk_insurance_grant_date = fields.Date('Insurance Grant Date',
help='Date when the insurance was '
'granted by the insurance company.'
)
risk_insurance_code = fields.Char('Insurance Code',
help='Code assigned to this partner by '
'the risk insurance company.')
risk_insurance_code_2 = fields.Char('Insurance Code 2',
help='Secondary code assigned to this '
'partner by the risk insurance '
'company.')
| agpl-3.0 |
PhasesResearchLab/ESPEI | espei/optimizers/graph.py | 1 | 3616 | """
Defines a OptNode and OptGraph to be used by OptimizerBase subclasses.
Together they define the path of one or more optimizations and can be used to
store and replay optimization history.
"""
import copy
class OptNode:
"""
Node as the result of an optimization.
Attributes
----------
parameters : dict
datasets : PickleableTinyDB
id : int
parent : OptNode
children : list of OptNode
Notes
-----
OptNodes are individual nodes in the graph that correspond to the result of
a call to fit - they represent optimized parameters given the parent state
and some data (also part of the OptNode).
Each OptNode can only be derived from one set of parameters, however one
parameter state may be a branching point to many new parameter states, so an
OptNode can have only one parent, but many children.
"""
def __init__(self, parameters, datasets, node_id=None):
self.parameters = copy.deepcopy(parameters)
self.datasets = copy.deepcopy(datasets)
self.id = node_id
self.parent = None
self.children = set()
def __repr__(self):
str_params = str(self.parameters)
return "<OptNode({}, node_id={})>".format(str_params, self.id)
def __str__(self):
str_params = str(self.parameters)
return "<OptNode({}, node_id={})>".format(str_params, self.id)
def __hash__(self):
return self.id
def __eq__(self, other):
return self.id == other.id
class OptGraph:
"""
Directed acyclic graph of optimal parameters.
Attributes
----------
Notes
-----
The OptGraph defines a directed acyclic graph of commits. Each commit
corresponds to a single OptNode. The root node is intended to be the fresh
parameters from the database before any optimization. Therefore, any path
from the root node to any other node represents a set of optimizations to
the parameters in the database.
"""
def __init__(self, root):
root.id = 0
root.parent = None
self._id_counter = 0
self.root = root
self.nodes = {self.root.id: root}
def add_node(self, node, parent):
node.id = self._get_next_id()
node.parent = parent
parent.children.add(node)
self.nodes[node.id] = node
def _get_next_id(self):
self._id_counter += 1
return self._id_counter
def __str__(self):
all_nodes = ''
for nid, node in self.nodes.items():
children = ', '.join([str(c.id) for c in node.children])
all_nodes += '{}: [{}], '.format(nid, children)
return "<OptGraph({})>".format(all_nodes)
@staticmethod
def get_path_to_node(node):
"""
Return the path from the root to the node.
Parameters
----------
node : OptNode
Returns
-------
list of OptNode
"""
rev_path = [node] # leaf -> root
while node.parent is not None:
rev_path.append(node.parent)
node = node.parent
return list(reversed(rev_path)) # root -> leaf
def get_transformation_dict(self, node):
"""
Return a dictionary of parameters from the path walked from the root to
the passed node.
Parameters
----------
node : OptNode
Returns
-------
dict
"""
path = self.get_path_to_node(node)
transform_dict = {}
for node in path:
transform_dict.update(node.parameters)
return transform_dict
| mit |
amrdraz/brython | www/src/Lib/test/test_struct.py | 26 | 25262 | import array
import unittest
import struct
import sys
from test import support
ISBIGENDIAN = sys.byteorder == "big"
IS32BIT = sys.maxsize == 0x7fffffff
integer_codes = 'b', 'B', 'h', 'H', 'i', 'I', 'l', 'L', 'q', 'Q', 'n', 'N'
byteorders = '', '@', '=', '<', '>', '!'
def iter_integer_formats(byteorders=byteorders):
for code in integer_codes:
for byteorder in byteorders:
if (byteorder in ('', '@') and code in ('q', 'Q') and
not HAVE_LONG_LONG):
continue
if (byteorder not in ('', '@') and code in ('n', 'N')):
continue
yield code, byteorder
# Native 'q' packing isn't available on systems that don't have the C
# long long type.
try:
struct.pack('q', 5)
except struct.error:
HAVE_LONG_LONG = False
else:
HAVE_LONG_LONG = True
def string_reverse(s):
return s[::-1]
def bigendian_to_native(value):
if ISBIGENDIAN:
return value
else:
return string_reverse(value)
class StructTest(unittest.TestCase):
def test_isbigendian(self):
self.assertEqual((struct.pack('=i', 1)[0] == 0), ISBIGENDIAN)
def test_consistence(self):
self.assertRaises(struct.error, struct.calcsize, 'Z')
sz = struct.calcsize('i')
self.assertEqual(sz * 3, struct.calcsize('iii'))
fmt = 'cbxxxxxxhhhhiillffd?'
fmt3 = '3c3b18x12h6i6l6f3d3?'
sz = struct.calcsize(fmt)
sz3 = struct.calcsize(fmt3)
self.assertEqual(sz * 3, sz3)
self.assertRaises(struct.error, struct.pack, 'iii', 3)
self.assertRaises(struct.error, struct.pack, 'i', 3, 3, 3)
self.assertRaises((TypeError, struct.error), struct.pack, 'i', 'foo')
self.assertRaises((TypeError, struct.error), struct.pack, 'P', 'foo')
self.assertRaises(struct.error, struct.unpack, 'd', b'flap')
s = struct.pack('ii', 1, 2)
self.assertRaises(struct.error, struct.unpack, 'iii', s)
self.assertRaises(struct.error, struct.unpack, 'i', s)
def test_transitiveness(self):
c = b'a'
b = 1
h = 255
i = 65535
l = 65536
f = 3.1415
d = 3.1415
t = True
for prefix in ('', '@', '<', '>', '=', '!'):
for format in ('xcbhilfd?', 'xcBHILfd?'):
format = prefix + format
s = struct.pack(format, c, b, h, i, l, f, d, t)
cp, bp, hp, ip, lp, fp, dp, tp = struct.unpack(format, s)
self.assertEqual(cp, c)
self.assertEqual(bp, b)
self.assertEqual(hp, h)
self.assertEqual(ip, i)
self.assertEqual(lp, l)
self.assertEqual(int(100 * fp), int(100 * f))
self.assertEqual(int(100 * dp), int(100 * d))
self.assertEqual(tp, t)
def test_new_features(self):
# Test some of the new features in detail
# (format, argument, big-endian result, little-endian result, asymmetric)
tests = [
('c', b'a', b'a', b'a', 0),
('xc', b'a', b'\0a', b'\0a', 0),
('cx', b'a', b'a\0', b'a\0', 0),
('s', b'a', b'a', b'a', 0),
('0s', b'helloworld', b'', b'', 1),
('1s', b'helloworld', b'h', b'h', 1),
('9s', b'helloworld', b'helloworl', b'helloworl', 1),
('10s', b'helloworld', b'helloworld', b'helloworld', 0),
('11s', b'helloworld', b'helloworld\0', b'helloworld\0', 1),
('20s', b'helloworld', b'helloworld'+10*b'\0', b'helloworld'+10*b'\0', 1),
('b', 7, b'\7', b'\7', 0),
('b', -7, b'\371', b'\371', 0),
('B', 7, b'\7', b'\7', 0),
('B', 249, b'\371', b'\371', 0),
('h', 700, b'\002\274', b'\274\002', 0),
('h', -700, b'\375D', b'D\375', 0),
('H', 700, b'\002\274', b'\274\002', 0),
('H', 0x10000-700, b'\375D', b'D\375', 0),
('i', 70000000, b'\004,\035\200', b'\200\035,\004', 0),
('i', -70000000, b'\373\323\342\200', b'\200\342\323\373', 0),
('I', 70000000, b'\004,\035\200', b'\200\035,\004', 0),
('I', 0x100000000-70000000, b'\373\323\342\200', b'\200\342\323\373', 0),
('l', 70000000, b'\004,\035\200', b'\200\035,\004', 0),
('l', -70000000, b'\373\323\342\200', b'\200\342\323\373', 0),
('L', 70000000, b'\004,\035\200', b'\200\035,\004', 0),
('L', 0x100000000-70000000, b'\373\323\342\200', b'\200\342\323\373', 0),
('f', 2.0, b'@\000\000\000', b'\000\000\000@', 0),
('d', 2.0, b'@\000\000\000\000\000\000\000',
b'\000\000\000\000\000\000\000@', 0),
('f', -2.0, b'\300\000\000\000', b'\000\000\000\300', 0),
('d', -2.0, b'\300\000\000\000\000\000\000\000',
b'\000\000\000\000\000\000\000\300', 0),
('?', 0, b'\0', b'\0', 0),
('?', 3, b'\1', b'\1', 1),
('?', True, b'\1', b'\1', 0),
('?', [], b'\0', b'\0', 1),
('?', (1,), b'\1', b'\1', 1),
]
for fmt, arg, big, lil, asy in tests:
for (xfmt, exp) in [('>'+fmt, big), ('!'+fmt, big), ('<'+fmt, lil),
('='+fmt, ISBIGENDIAN and big or lil)]:
res = struct.pack(xfmt, arg)
self.assertEqual(res, exp)
self.assertEqual(struct.calcsize(xfmt), len(res))
rev = struct.unpack(xfmt, res)[0]
if rev != arg:
self.assertTrue(asy)
def test_calcsize(self):
expected_size = {
'b': 1, 'B': 1,
'h': 2, 'H': 2,
'i': 4, 'I': 4,
'l': 4, 'L': 4,
'q': 8, 'Q': 8,
}
# standard integer sizes
for code, byteorder in iter_integer_formats(('=', '<', '>', '!')):
format = byteorder+code
size = struct.calcsize(format)
self.assertEqual(size, expected_size[code])
# native integer sizes
native_pairs = 'bB', 'hH', 'iI', 'lL', 'nN'
if HAVE_LONG_LONG:
native_pairs += 'qQ',
for format_pair in native_pairs:
for byteorder in '', '@':
signed_size = struct.calcsize(byteorder + format_pair[0])
unsigned_size = struct.calcsize(byteorder + format_pair[1])
self.assertEqual(signed_size, unsigned_size)
# bounds for native integer sizes
self.assertEqual(struct.calcsize('b'), 1)
self.assertLessEqual(2, struct.calcsize('h'))
self.assertLessEqual(4, struct.calcsize('l'))
self.assertLessEqual(struct.calcsize('h'), struct.calcsize('i'))
self.assertLessEqual(struct.calcsize('i'), struct.calcsize('l'))
if HAVE_LONG_LONG:
self.assertLessEqual(8, struct.calcsize('q'))
self.assertLessEqual(struct.calcsize('l'), struct.calcsize('q'))
self.assertGreaterEqual(struct.calcsize('n'), struct.calcsize('i'))
self.assertGreaterEqual(struct.calcsize('n'), struct.calcsize('P'))
def test_integers(self):
# Integer tests (bBhHiIlLqQnN).
import binascii
class IntTester(unittest.TestCase):
def __init__(self, format):
super(IntTester, self).__init__(methodName='test_one')
self.format = format
self.code = format[-1]
self.byteorder = format[:-1]
if not self.byteorder in byteorders:
raise ValueError("unrecognized packing byteorder: %s" %
self.byteorder)
self.bytesize = struct.calcsize(format)
self.bitsize = self.bytesize * 8
if self.code in tuple('bhilqn'):
self.signed = True
self.min_value = -(2**(self.bitsize-1))
self.max_value = 2**(self.bitsize-1) - 1
elif self.code in tuple('BHILQN'):
self.signed = False
self.min_value = 0
self.max_value = 2**self.bitsize - 1
else:
raise ValueError("unrecognized format code: %s" %
self.code)
def test_one(self, x, pack=struct.pack,
unpack=struct.unpack,
unhexlify=binascii.unhexlify):
format = self.format
if self.min_value <= x <= self.max_value:
expected = x
if self.signed and x < 0:
expected += 1 << self.bitsize
self.assertGreaterEqual(expected, 0)
expected = '%x' % expected
if len(expected) & 1:
expected = "0" + expected
expected = expected.encode('ascii')
expected = unhexlify(expected)
expected = (b"\x00" * (self.bytesize - len(expected)) +
expected)
if (self.byteorder == '<' or
self.byteorder in ('', '@', '=') and not ISBIGENDIAN):
expected = string_reverse(expected)
self.assertEqual(len(expected), self.bytesize)
# Pack work?
got = pack(format, x)
self.assertEqual(got, expected)
# Unpack work?
retrieved = unpack(format, got)[0]
self.assertEqual(x, retrieved)
# Adding any byte should cause a "too big" error.
self.assertRaises((struct.error, TypeError), unpack, format,
b'\x01' + got)
else:
# x is out of range -- verify pack realizes that.
self.assertRaises((OverflowError, ValueError, struct.error),
pack, format, x)
def run(self):
from random import randrange
# Create all interesting powers of 2.
values = []
for exp in range(self.bitsize + 3):
values.append(1 << exp)
# Add some random values.
for i in range(self.bitsize):
val = 0
for j in range(self.bytesize):
val = (val << 8) | randrange(256)
values.append(val)
# Values absorbed from other tests
values.extend([300, 700000, sys.maxsize*4])
# Try all those, and their negations, and +-1 from
# them. Note that this tests all power-of-2
# boundaries in range, and a few out of range, plus
# +-(2**n +- 1).
for base in values:
for val in -base, base:
for incr in -1, 0, 1:
x = val + incr
self.test_one(x)
# Some error cases.
class NotAnInt:
def __int__(self):
return 42
# Objects with an '__index__' method should be allowed
# to pack as integers. That is assuming the implemented
# '__index__' method returns an 'int'.
class Indexable(object):
def __init__(self, value):
self._value = value
def __index__(self):
return self._value
# If the '__index__' method raises a type error, then
# '__int__' should be used with a deprecation warning.
class BadIndex(object):
def __index__(self):
raise TypeError
def __int__(self):
return 42
self.assertRaises((TypeError, struct.error),
struct.pack, self.format,
"a string")
self.assertRaises((TypeError, struct.error),
struct.pack, self.format,
randrange)
self.assertRaises((TypeError, struct.error),
struct.pack, self.format,
3+42j)
self.assertRaises((TypeError, struct.error),
struct.pack, self.format,
NotAnInt())
self.assertRaises((TypeError, struct.error),
struct.pack, self.format,
BadIndex())
# Check for legitimate values from '__index__'.
for obj in (Indexable(0), Indexable(10), Indexable(17),
Indexable(42), Indexable(100), Indexable(127)):
try:
struct.pack(format, obj)
except:
self.fail("integer code pack failed on object "
"with '__index__' method")
# Check for bogus values from '__index__'.
for obj in (Indexable(b'a'), Indexable('b'), Indexable(None),
Indexable({'a': 1}), Indexable([1, 2, 3])):
self.assertRaises((TypeError, struct.error),
struct.pack, self.format,
obj)
for code, byteorder in iter_integer_formats():
format = byteorder+code
t = IntTester(format)
t.run()
def test_nN_code(self):
# n and N don't exist in standard sizes
def assertStructError(func, *args, **kwargs):
with self.assertRaises(struct.error) as cm:
func(*args, **kwargs)
self.assertIn("bad char in struct format", str(cm.exception))
for code in 'nN':
for byteorder in ('=', '<', '>', '!'):
format = byteorder+code
assertStructError(struct.calcsize, format)
assertStructError(struct.pack, format, 0)
assertStructError(struct.unpack, format, b"")
def test_p_code(self):
# Test p ("Pascal string") code.
for code, input, expected, expectedback in [
('p', b'abc', b'\x00', b''),
('1p', b'abc', b'\x00', b''),
('2p', b'abc', b'\x01a', b'a'),
('3p', b'abc', b'\x02ab', b'ab'),
('4p', b'abc', b'\x03abc', b'abc'),
('5p', b'abc', b'\x03abc\x00', b'abc'),
('6p', b'abc', b'\x03abc\x00\x00', b'abc'),
('1000p', b'x'*1000, b'\xff' + b'x'*999, b'x'*255)]:
got = struct.pack(code, input)
self.assertEqual(got, expected)
(got,) = struct.unpack(code, got)
self.assertEqual(got, expectedback)
def test_705836(self):
# SF bug 705836. "<f" and ">f" had a severe rounding bug, where a carry
# from the low-order discarded bits could propagate into the exponent
# field, causing the result to be wrong by a factor of 2.
import math
for base in range(1, 33):
# smaller <- largest representable float less than base.
delta = 0.5
while base - delta / 2.0 != base:
delta /= 2.0
smaller = base - delta
# Packing this rounds away a solid string of trailing 1 bits.
packed = struct.pack("<f", smaller)
unpacked = struct.unpack("<f", packed)[0]
# This failed at base = 2, 4, and 32, with unpacked = 1, 2, and
# 16, respectively.
self.assertEqual(base, unpacked)
bigpacked = struct.pack(">f", smaller)
self.assertEqual(bigpacked, string_reverse(packed))
unpacked = struct.unpack(">f", bigpacked)[0]
self.assertEqual(base, unpacked)
# Largest finite IEEE single.
big = (1 << 24) - 1
big = math.ldexp(big, 127 - 23)
packed = struct.pack(">f", big)
unpacked = struct.unpack(">f", packed)[0]
self.assertEqual(big, unpacked)
# The same, but tack on a 1 bit so it rounds up to infinity.
big = (1 << 25) - 1
big = math.ldexp(big, 127 - 24)
self.assertRaises(OverflowError, struct.pack, ">f", big)
def test_1530559(self):
for code, byteorder in iter_integer_formats():
format = byteorder + code
self.assertRaises(struct.error, struct.pack, format, 1.0)
self.assertRaises(struct.error, struct.pack, format, 1.5)
self.assertRaises(struct.error, struct.pack, 'P', 1.0)
self.assertRaises(struct.error, struct.pack, 'P', 1.5)
def test_unpack_from(self):
test_string = b'abcd01234'
fmt = '4s'
s = struct.Struct(fmt)
for cls in (bytes, bytearray):
data = cls(test_string)
self.assertEqual(s.unpack_from(data), (b'abcd',))
self.assertEqual(s.unpack_from(data, 2), (b'cd01',))
self.assertEqual(s.unpack_from(data, 4), (b'0123',))
for i in range(6):
self.assertEqual(s.unpack_from(data, i), (data[i:i+4],))
for i in range(6, len(test_string) + 1):
self.assertRaises(struct.error, s.unpack_from, data, i)
for cls in (bytes, bytearray):
data = cls(test_string)
self.assertEqual(struct.unpack_from(fmt, data), (b'abcd',))
self.assertEqual(struct.unpack_from(fmt, data, 2), (b'cd01',))
self.assertEqual(struct.unpack_from(fmt, data, 4), (b'0123',))
for i in range(6):
self.assertEqual(struct.unpack_from(fmt, data, i), (data[i:i+4],))
for i in range(6, len(test_string) + 1):
self.assertRaises(struct.error, struct.unpack_from, fmt, data, i)
def test_pack_into(self):
test_string = b'Reykjavik rocks, eow!'
writable_buf = array.array('b', b' '*100)
fmt = '21s'
s = struct.Struct(fmt)
# Test without offset
s.pack_into(writable_buf, 0, test_string)
from_buf = writable_buf.tobytes()[:len(test_string)]
self.assertEqual(from_buf, test_string)
# Test with offset.
s.pack_into(writable_buf, 10, test_string)
from_buf = writable_buf.tobytes()[:len(test_string)+10]
self.assertEqual(from_buf, test_string[:10] + test_string)
# Go beyond boundaries.
small_buf = array.array('b', b' '*10)
self.assertRaises((ValueError, struct.error), s.pack_into, small_buf, 0,
test_string)
self.assertRaises((ValueError, struct.error), s.pack_into, small_buf, 2,
test_string)
# Test bogus offset (issue 3694)
sb = small_buf
self.assertRaises((TypeError, struct.error), struct.pack_into, b'', sb,
None)
def test_pack_into_fn(self):
test_string = b'Reykjavik rocks, eow!'
writable_buf = array.array('b', b' '*100)
fmt = '21s'
pack_into = lambda *args: struct.pack_into(fmt, *args)
# Test without offset.
pack_into(writable_buf, 0, test_string)
from_buf = writable_buf.tobytes()[:len(test_string)]
self.assertEqual(from_buf, test_string)
# Test with offset.
pack_into(writable_buf, 10, test_string)
from_buf = writable_buf.tobytes()[:len(test_string)+10]
self.assertEqual(from_buf, test_string[:10] + test_string)
# Go beyond boundaries.
small_buf = array.array('b', b' '*10)
self.assertRaises((ValueError, struct.error), pack_into, small_buf, 0,
test_string)
self.assertRaises((ValueError, struct.error), pack_into, small_buf, 2,
test_string)
def test_unpack_with_buffer(self):
# SF bug 1563759: struct.unpack doesn't support buffer protocol objects
data1 = array.array('B', b'\x12\x34\x56\x78')
data2 = memoryview(b'\x12\x34\x56\x78') # XXX b'......XXXX......', 6, 4
for data in [data1, data2]:
value, = struct.unpack('>I', data)
self.assertEqual(value, 0x12345678)
def test_bool(self):
class ExplodingBool(object):
def __bool__(self):
raise IOError
for prefix in tuple("<>!=")+('',):
false = (), [], [], '', 0
true = [1], 'test', 5, -1, 0xffffffff+1, 0xffffffff/2
falseFormat = prefix + '?' * len(false)
packedFalse = struct.pack(falseFormat, *false)
unpackedFalse = struct.unpack(falseFormat, packedFalse)
trueFormat = prefix + '?' * len(true)
packedTrue = struct.pack(trueFormat, *true)
unpackedTrue = struct.unpack(trueFormat, packedTrue)
self.assertEqual(len(true), len(unpackedTrue))
self.assertEqual(len(false), len(unpackedFalse))
for t in unpackedFalse:
self.assertFalse(t)
for t in unpackedTrue:
self.assertTrue(t)
packed = struct.pack(prefix+'?', 1)
self.assertEqual(len(packed), struct.calcsize(prefix+'?'))
if len(packed) != 1:
self.assertFalse(prefix, msg='encoded bool is not one byte: %r'
%packed)
try:
struct.pack(prefix + '?', ExplodingBool())
except IOError:
pass
else:
self.fail("Expected IOError: struct.pack(%r, "
"ExplodingBool())" % (prefix + '?'))
for c in [b'\x01', b'\x7f', b'\xff', b'\x0f', b'\xf0']:
self.assertTrue(struct.unpack('>?', c)[0])
def test_count_overflow(self):
hugecount = '{}b'.format(sys.maxsize+1)
self.assertRaises(struct.error, struct.calcsize, hugecount)
hugecount2 = '{}b{}H'.format(sys.maxsize//2, sys.maxsize//2)
self.assertRaises(struct.error, struct.calcsize, hugecount2)
if IS32BIT:
def test_crasher(self):
self.assertRaises(MemoryError, struct.pack, "357913941b", "a")
def test_trailing_counter(self):
store = array.array('b', b' '*100)
# format lists containing only count spec should result in an error
self.assertRaises(struct.error, struct.pack, '12345')
self.assertRaises(struct.error, struct.unpack, '12345', '')
self.assertRaises(struct.error, struct.pack_into, '12345', store, 0)
self.assertRaises(struct.error, struct.unpack_from, '12345', store, 0)
# Format lists with trailing count spec should result in an error
self.assertRaises(struct.error, struct.pack, 'c12345', 'x')
self.assertRaises(struct.error, struct.unpack, 'c12345', 'x')
self.assertRaises(struct.error, struct.pack_into, 'c12345', store, 0,
'x')
self.assertRaises(struct.error, struct.unpack_from, 'c12345', store,
0)
# Mixed format tests
self.assertRaises(struct.error, struct.pack, '14s42', 'spam and eggs')
self.assertRaises(struct.error, struct.unpack, '14s42',
'spam and eggs')
self.assertRaises(struct.error, struct.pack_into, '14s42', store, 0,
'spam and eggs')
self.assertRaises(struct.error, struct.unpack_from, '14s42', store, 0)
def test_Struct_reinitialization(self):
# Issue 9422: there was a memory leak when reinitializing a
# Struct instance. This test can be used to detect the leak
# when running with regrtest -L.
s = struct.Struct('i')
s.__init__('ii')
def check_sizeof(self, format_str, number_of_codes):
# The size of 'PyStructObject'
totalsize = support.calcobjsize('2n3P')
# The size taken up by the 'formatcode' dynamic array
totalsize += struct.calcsize('P2n0P') * (number_of_codes + 1)
support.check_sizeof(self, struct.Struct(format_str), totalsize)
@support.cpython_only
def test__sizeof__(self):
for code in integer_codes:
self.check_sizeof(code, 1)
self.check_sizeof('BHILfdspP', 9)
self.check_sizeof('B' * 1234, 1234)
self.check_sizeof('fd', 2)
self.check_sizeof('xxxxxxxxxxxxxx', 0)
self.check_sizeof('100H', 100)
self.check_sizeof('187s', 1)
self.check_sizeof('20p', 1)
self.check_sizeof('0s', 1)
self.check_sizeof('0c', 0)
def test_main():
support.run_unittest(StructTest)
if __name__ == '__main__':
test_main()
| bsd-3-clause |
denys-duchier/django | django/db/backends/oracle/creation.py | 18 | 17682 | import sys
from django.conf import settings
from django.db.backends.base.creation import BaseDatabaseCreation
from django.db.utils import DatabaseError
from django.utils.crypto import get_random_string
from django.utils.functional import cached_property
TEST_DATABASE_PREFIX = 'test_'
class DatabaseCreation(BaseDatabaseCreation):
@cached_property
def _maindb_connection(self):
"""
This is analogous to other backends' `_nodb_connection` property,
which allows access to an "administrative" connection which can
be used to manage the test databases.
For Oracle, the only connection that can be used for that purpose
is the main (non-test) connection.
"""
settings_dict = settings.DATABASES[self.connection.alias]
user = settings_dict.get('SAVED_USER') or settings_dict['USER']
password = settings_dict.get('SAVED_PASSWORD') or settings_dict['PASSWORD']
settings_dict = settings_dict.copy()
settings_dict.update(USER=user, PASSWORD=password)
DatabaseWrapper = type(self.connection)
return DatabaseWrapper(settings_dict, alias=self.connection.alias)
def _create_test_db(self, verbosity=1, autoclobber=False, keepdb=False):
parameters = self._get_test_db_params()
cursor = self._maindb_connection.cursor()
if self._test_database_create():
try:
self._execute_test_db_creation(cursor, parameters, verbosity, keepdb)
except Exception as e:
if 'ORA-01543' not in str(e):
# All errors except "tablespace already exists" cancel tests
sys.stderr.write("Got an error creating the test database: %s\n" % e)
sys.exit(2)
if not autoclobber:
confirm = input(
"It appears the test database, %s, already exists. "
"Type 'yes' to delete it, or 'no' to cancel: " % parameters['user'])
if autoclobber or confirm == 'yes':
if verbosity >= 1:
print("Destroying old test database for alias '%s'..." % self.connection.alias)
try:
self._execute_test_db_destruction(cursor, parameters, verbosity)
except DatabaseError as e:
if 'ORA-29857' in str(e):
self._handle_objects_preventing_db_destruction(cursor, parameters,
verbosity, autoclobber)
else:
# Ran into a database error that isn't about leftover objects in the tablespace
sys.stderr.write("Got an error destroying the old test database: %s\n" % e)
sys.exit(2)
except Exception as e:
sys.stderr.write("Got an error destroying the old test database: %s\n" % e)
sys.exit(2)
try:
self._execute_test_db_creation(cursor, parameters, verbosity, keepdb)
except Exception as e:
sys.stderr.write("Got an error recreating the test database: %s\n" % e)
sys.exit(2)
else:
print("Tests cancelled.")
sys.exit(1)
if self._test_user_create():
if verbosity >= 1:
print("Creating test user...")
try:
self._create_test_user(cursor, parameters, verbosity, keepdb)
except Exception as e:
if 'ORA-01920' not in str(e):
# All errors except "user already exists" cancel tests
sys.stderr.write("Got an error creating the test user: %s\n" % e)
sys.exit(2)
if not autoclobber:
confirm = input(
"It appears the test user, %s, already exists. Type "
"'yes' to delete it, or 'no' to cancel: " % parameters['user'])
if autoclobber or confirm == 'yes':
try:
if verbosity >= 1:
print("Destroying old test user...")
self._destroy_test_user(cursor, parameters, verbosity)
if verbosity >= 1:
print("Creating test user...")
self._create_test_user(cursor, parameters, verbosity, keepdb)
except Exception as e:
sys.stderr.write("Got an error recreating the test user: %s\n" % e)
sys.exit(2)
else:
print("Tests cancelled.")
sys.exit(1)
self._maindb_connection.close() # done with main user -- test user and tablespaces created
self._switch_to_test_user(parameters)
return self.connection.settings_dict['NAME']
def _switch_to_test_user(self, parameters):
"""
Switch to the user that's used for creating the test database.
Oracle doesn't have the concept of separate databases under the same
user, so a separate user is used; see _create_test_db(). The main user
is also needed for cleanup when testing is completed, so save its
credentials in the SAVED_USER/SAVED_PASSWORD key in the settings dict.
"""
real_settings = settings.DATABASES[self.connection.alias]
real_settings['SAVED_USER'] = self.connection.settings_dict['SAVED_USER'] = \
self.connection.settings_dict['USER']
real_settings['SAVED_PASSWORD'] = self.connection.settings_dict['SAVED_PASSWORD'] = \
self.connection.settings_dict['PASSWORD']
real_test_settings = real_settings['TEST']
test_settings = self.connection.settings_dict['TEST']
real_test_settings['USER'] = real_settings['USER'] = test_settings['USER'] = \
self.connection.settings_dict['USER'] = parameters['user']
real_settings['PASSWORD'] = self.connection.settings_dict['PASSWORD'] = parameters['password']
def set_as_test_mirror(self, primary_settings_dict):
"""
Set this database up to be used in testing as a mirror of a primary
database whose settings are given.
"""
self.connection.settings_dict['USER'] = primary_settings_dict['USER']
self.connection.settings_dict['PASSWORD'] = primary_settings_dict['PASSWORD']
def _handle_objects_preventing_db_destruction(self, cursor, parameters, verbosity, autoclobber):
# There are objects in the test tablespace which prevent dropping it
# The easy fix is to drop the test user -- but are we allowed to do so?
print("There are objects in the old test database which prevent its destruction.")
print("If they belong to the test user, deleting the user will allow the test "
"database to be recreated.")
print("Otherwise, you will need to find and remove each of these objects, "
"or use a different tablespace.\n")
if self._test_user_create():
if not autoclobber:
confirm = input("Type 'yes' to delete user %s: " % parameters['user'])
if autoclobber or confirm == 'yes':
try:
if verbosity >= 1:
print("Destroying old test user...")
self._destroy_test_user(cursor, parameters, verbosity)
except Exception as e:
sys.stderr.write("Got an error destroying the test user: %s\n" % e)
sys.exit(2)
try:
if verbosity >= 1:
print("Destroying old test database for alias '%s'..." % self.connection.alias)
self._execute_test_db_destruction(cursor, parameters, verbosity)
except Exception as e:
sys.stderr.write("Got an error destroying the test database: %s\n" % e)
sys.exit(2)
else:
print("Tests cancelled -- test database cannot be recreated.")
sys.exit(1)
else:
print("Django is configured to use pre-existing test user '%s',"
" and will not attempt to delete it.\n" % parameters['user'])
print("Tests cancelled -- test database cannot be recreated.")
sys.exit(1)
def _destroy_test_db(self, test_database_name, verbosity=1):
"""
Destroy a test database, prompting the user for confirmation if the
database already exists. Return the name of the test database created.
"""
self.connection.settings_dict['USER'] = self.connection.settings_dict['SAVED_USER']
self.connection.settings_dict['PASSWORD'] = self.connection.settings_dict['SAVED_PASSWORD']
self.connection.close()
parameters = self._get_test_db_params()
cursor = self._maindb_connection.cursor()
if self._test_user_create():
if verbosity >= 1:
print('Destroying test user...')
self._destroy_test_user(cursor, parameters, verbosity)
if self._test_database_create():
if verbosity >= 1:
print('Destroying test database tables...')
self._execute_test_db_destruction(cursor, parameters, verbosity)
self._maindb_connection.close()
def _execute_test_db_creation(self, cursor, parameters, verbosity, keepdb=False):
if verbosity >= 2:
print("_create_test_db(): dbname = %s" % parameters['user'])
statements = [
"""CREATE TABLESPACE %(tblspace)s
DATAFILE '%(datafile)s' SIZE 20M
REUSE AUTOEXTEND ON NEXT 10M MAXSIZE %(maxsize)s
""",
"""CREATE TEMPORARY TABLESPACE %(tblspace_temp)s
TEMPFILE '%(datafile_tmp)s' SIZE 20M
REUSE AUTOEXTEND ON NEXT 10M MAXSIZE %(maxsize_tmp)s
""",
]
# Ignore "tablespace already exists" error when keepdb is on.
acceptable_ora_err = 'ORA-01543' if keepdb else None
self._execute_allow_fail_statements(cursor, statements, parameters, verbosity, acceptable_ora_err)
def _create_test_user(self, cursor, parameters, verbosity, keepdb=False):
if verbosity >= 2:
print("_create_test_user(): username = %s" % parameters['user'])
statements = [
"""CREATE USER %(user)s
IDENTIFIED BY "%(password)s"
DEFAULT TABLESPACE %(tblspace)s
TEMPORARY TABLESPACE %(tblspace_temp)s
QUOTA UNLIMITED ON %(tblspace)s
""",
"""GRANT CREATE SESSION,
CREATE TABLE,
CREATE SEQUENCE,
CREATE PROCEDURE,
CREATE TRIGGER
TO %(user)s""",
]
# Ignore "user already exists" error when keepdb is on
acceptable_ora_err = 'ORA-01920' if keepdb else None
success = self._execute_allow_fail_statements(cursor, statements, parameters, verbosity, acceptable_ora_err)
# If the password was randomly generated, change the user accordingly.
if not success and self._test_settings_get('PASSWORD') is None:
set_password = 'ALTER USER %(user)s IDENTIFIED BY "%(password)s"'
self._execute_statements(cursor, [set_password], parameters, verbosity)
# Most test-suites can be run without the create-view privilege. But some need it.
extra = "GRANT CREATE VIEW TO %(user)s"
success = self._execute_allow_fail_statements(cursor, [extra], parameters, verbosity, 'ORA-01031')
if not success and verbosity >= 2:
print("Failed to grant CREATE VIEW permission to test user. This may be ok.")
def _execute_test_db_destruction(self, cursor, parameters, verbosity):
if verbosity >= 2:
print("_execute_test_db_destruction(): dbname=%s" % parameters['user'])
statements = [
'DROP TABLESPACE %(tblspace)s INCLUDING CONTENTS AND DATAFILES CASCADE CONSTRAINTS',
'DROP TABLESPACE %(tblspace_temp)s INCLUDING CONTENTS AND DATAFILES CASCADE CONSTRAINTS',
]
self._execute_statements(cursor, statements, parameters, verbosity)
def _destroy_test_user(self, cursor, parameters, verbosity):
if verbosity >= 2:
print("_destroy_test_user(): user=%s" % parameters['user'])
print("Be patient. This can take some time...")
statements = [
'DROP USER %(user)s CASCADE',
]
self._execute_statements(cursor, statements, parameters, verbosity)
def _execute_statements(self, cursor, statements, parameters, verbosity, allow_quiet_fail=False):
for template in statements:
stmt = template % parameters
if verbosity >= 2:
print(stmt)
try:
cursor.execute(stmt)
except Exception as err:
if (not allow_quiet_fail) or verbosity >= 2:
sys.stderr.write("Failed (%s)\n" % (err))
raise
def _execute_allow_fail_statements(self, cursor, statements, parameters, verbosity, acceptable_ora_err):
"""
Execute statements which are allowed to fail silently if the Oracle
error code given by `acceptable_ora_err` is raised. Return True if the
statements execute without an exception, or False otherwise.
"""
try:
# Statement can fail when acceptable_ora_err is not None
allow_quiet_fail = acceptable_ora_err is not None and len(acceptable_ora_err) > 0
self._execute_statements(cursor, statements, parameters, verbosity, allow_quiet_fail=allow_quiet_fail)
return True
except DatabaseError as err:
description = str(err)
if acceptable_ora_err is None or acceptable_ora_err not in description:
raise
return False
def _get_test_db_params(self):
return {
'dbname': self._test_database_name(),
'user': self._test_database_user(),
'password': self._test_database_passwd(),
'tblspace': self._test_database_tblspace(),
'tblspace_temp': self._test_database_tblspace_tmp(),
'datafile': self._test_database_tblspace_datafile(),
'datafile_tmp': self._test_database_tblspace_tmp_datafile(),
'maxsize': self._test_database_tblspace_size(),
'maxsize_tmp': self._test_database_tblspace_tmp_size(),
}
def _test_settings_get(self, key, default=None, prefixed=None):
"""
Return a value from the test settings dict, or a given default, or a
prefixed entry from the main settings dict.
"""
settings_dict = self.connection.settings_dict
val = settings_dict['TEST'].get(key, default)
if val is None and prefixed:
val = TEST_DATABASE_PREFIX + settings_dict[prefixed]
return val
def _test_database_name(self):
return self._test_settings_get('NAME', prefixed='NAME')
def _test_database_create(self):
return self._test_settings_get('CREATE_DB', default=True)
def _test_user_create(self):
return self._test_settings_get('CREATE_USER', default=True)
def _test_database_user(self):
return self._test_settings_get('USER', prefixed='USER')
def _test_database_passwd(self):
password = self._test_settings_get('PASSWORD')
if password is None and self._test_user_create():
# Oracle passwords are limited to 30 chars and can't contain symbols.
password = get_random_string(length=30)
return password
def _test_database_tblspace(self):
return self._test_settings_get('TBLSPACE', prefixed='USER')
def _test_database_tblspace_tmp(self):
settings_dict = self.connection.settings_dict
return settings_dict['TEST'].get('TBLSPACE_TMP',
TEST_DATABASE_PREFIX + settings_dict['USER'] + '_temp')
def _test_database_tblspace_datafile(self):
tblspace = '%s.dbf' % self._test_database_tblspace()
return self._test_settings_get('DATAFILE', default=tblspace)
def _test_database_tblspace_tmp_datafile(self):
tblspace = '%s.dbf' % self._test_database_tblspace_tmp()
return self._test_settings_get('DATAFILE_TMP', default=tblspace)
def _test_database_tblspace_size(self):
return self._test_settings_get('DATAFILE_MAXSIZE', default='500M')
def _test_database_tblspace_tmp_size(self):
return self._test_settings_get('DATAFILE_TMP_MAXSIZE', default='500M')
def _get_test_db_name(self):
"""
Return the 'production' DB name to get the test DB creation machinery
to work. This isn't a great deal in this case because DB names as
handled by Django don't have real counterparts in Oracle.
"""
return self.connection.settings_dict['NAME']
def test_db_signature(self):
settings_dict = self.connection.settings_dict
return (
settings_dict['HOST'],
settings_dict['PORT'],
settings_dict['ENGINE'],
settings_dict['NAME'],
self._test_database_user(),
)
| bsd-3-clause |
rghe/ansible | lib/ansible/modules/web_infrastructure/apache2_module.py | 27 | 8137 | #!/usr/bin/python
# coding: utf-8 -*-
# (c) 2013-2014, Christian Berendt <berendt@b1-systems.de>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: apache2_module
version_added: 1.6
author:
- Christian Berendt (@berendt)
- Ralf Hertel (@n0trax)
- Robin Roth (@robinro)
short_description: Enables/disables a module of the Apache2 webserver.
description:
- Enables or disables a specified module of the Apache2 webserver.
options:
name:
description:
- Name of the module to enable/disable as given to C(a2enmod/a2dismod).
required: true
identifier:
description:
- Identifier of the module as listed by C(apache2ctl -M).
This is optional and usually determined automatically by the common convention of
appending C(_module) to I(name) as well as custom exception for popular modules.
required: False
version_added: "2.5"
force:
description:
- Force disabling of default modules and override Debian warnings.
required: false
type: bool
default: False
version_added: "2.1"
state:
description:
- Desired state of the module.
choices: ['present', 'absent']
default: present
ignore_configcheck:
description:
- Ignore configuration checks about inconsistent module configuration. Especially for mpm_* modules.
type: bool
default: False
version_added: "2.3"
requirements: ["a2enmod","a2dismod"]
'''
EXAMPLES = '''
# enables the Apache2 module "wsgi"
- apache2_module:
state: present
name: wsgi
# disables the Apache2 module "wsgi"
- apache2_module:
state: absent
name: wsgi
# disable default modules for Debian
- apache2_module:
state: absent
name: autoindex
force: True
# disable mpm_worker and ignore warnings about missing mpm module
- apache2_module:
state: absent
name: mpm_worker
ignore_configcheck: True
# enable dump_io module, which is identified as dumpio_module inside apache2
- apache2_module:
state: present
name: dump_io
identifier: dumpio_module
'''
RETURN = '''
result:
description: message about action taken
returned: always
type: string
warnings:
description: list of warning messages
returned: when needed
type: list
rc:
description: return code of underlying command
returned: failed
type: int
stdout:
description: stdout of underlying command
returned: failed
type: string
stderr:
description: stderr of underlying command
returned: failed
type: string
'''
import re
# import module snippets
from ansible.module_utils.basic import AnsibleModule
def _run_threaded(module):
control_binary = _get_ctl_binary(module)
result, stdout, stderr = module.run_command("%s -V" % control_binary)
return bool(re.search(r'threaded:[ ]*yes', stdout))
def _get_ctl_binary(module):
for command in ['apache2ctl', 'apachectl']:
ctl_binary = module.get_bin_path(command)
if ctl_binary is not None:
return ctl_binary
module.fail_json(
msg="Neither of apache2ctl nor apachctl found."
" At least one apache control binary is necessary."
)
def _module_is_enabled(module):
control_binary = _get_ctl_binary(module)
result, stdout, stderr = module.run_command("%s -M" % control_binary)
if result != 0:
error_msg = "Error executing %s: %s" % (control_binary, stderr)
if module.params['ignore_configcheck']:
if 'AH00534' in stderr and 'mpm_' in module.params['name']:
module.warnings.append(
"No MPM module loaded! apache2 reload AND other module actions"
" will fail if no MPM module is loaded immediately."
)
else:
module.warnings.append(error_msg)
return False
else:
module.fail_json(msg=error_msg)
searchstring = ' ' + module.params['identifier']
return searchstring in stdout
def create_apache_identifier(name):
"""
By convention if a module is loaded via name, it appears in apache2ctl -M as
name_module.
Some modules don't follow this convention and we use replacements for those."""
# a2enmod name replacement to apache2ctl -M names
text_workarounds = [
('shib2', 'mod_shib'),
('evasive', 'evasive20_module'),
]
# re expressions to extract subparts of names
re_workarounds = [
('php', r'^(php\d)\.'),
]
for a2enmod_spelling, module_name in text_workarounds:
if a2enmod_spelling in name:
return module_name
for search, reexpr in re_workarounds:
if search in name:
try:
rematch = re.search(reexpr, name)
return rematch.group(1) + '_module'
except AttributeError:
pass
return name + '_module'
def _set_state(module, state):
name = module.params['name']
force = module.params['force']
want_enabled = state == 'present'
state_string = {'present': 'enabled', 'absent': 'disabled'}[state]
a2mod_binary = {'present': 'a2enmod', 'absent': 'a2dismod'}[state]
success_msg = "Module %s %s" % (name, state_string)
if _module_is_enabled(module) != want_enabled:
if module.check_mode:
module.exit_json(changed=True,
result=success_msg,
warnings=module.warnings)
a2mod_binary = module.get_bin_path(a2mod_binary)
if a2mod_binary is None:
module.fail_json(msg="%s not found. Perhaps this system does not use %s to manage apache" % (a2mod_binary, a2mod_binary))
if not want_enabled and force:
# force exists only for a2dismod on debian
a2mod_binary += ' -f'
result, stdout, stderr = module.run_command("%s %s" % (a2mod_binary, name))
if _module_is_enabled(module) == want_enabled:
module.exit_json(changed=True,
result=success_msg,
warnings=module.warnings)
else:
msg = (
'Failed to set module {name} to {state}:\n'
'{stdout}\n'
'Maybe the module identifier ({identifier}) was guessed incorrectly.'
'Consider setting the "identifier" option.'
).format(
name=name,
state=state_string,
stdout=stdout,
identifier=module.params['identifier']
)
module.fail_json(msg=msg,
rc=result,
stdout=stdout,
stderr=stderr)
else:
module.exit_json(changed=False,
result=success_msg,
warnings=module.warnings)
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=True),
identifier=dict(required=False, type='str'),
force=dict(required=False, type='bool', default=False),
state=dict(default='present', choices=['absent', 'present']),
ignore_configcheck=dict(required=False, type='bool', default=False),
),
supports_check_mode=True,
)
module.warnings = []
name = module.params['name']
if name == 'cgi' and _run_threaded(module):
module.fail_json(msg="Your MPM seems to be threaded. No automatic actions on module %s possible." % name)
if not module.params['identifier']:
module.params['identifier'] = create_apache_identifier(module.params['name'])
if module.params['state'] in ['present', 'absent']:
_set_state(module, module.params['state'])
if __name__ == '__main__':
main()
| gpl-3.0 |
jitseniesen/spyder-memory-profiler | setup.py | 2 | 2960 | # -*- coding: utf-8 -*-
#
# Copyright © 2013 Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
from setuptools import setup, find_packages
import os
import os.path as osp
def get_version():
"""Get version from source file"""
import codecs
with codecs.open("spyder_memory_profiler/__init__.py", encoding="utf-8") as f:
lines = f.read().splitlines()
for l in lines:
if "__version__" in l:
version = l.split("=")[1].strip()
version = version.replace("'", '').replace('"', '')
return version
def get_package_data(name, extlist):
"""Return data files for package *name* with extensions in *extlist*"""
flist = []
# Workaround to replace os.path.relpath (not available until Python 2.6):
offset = len(name) + len(os.pathsep)
for dirpath, _dirnames, filenames in os.walk(name):
for fname in filenames:
if not fname.startswith('.') and osp.splitext(fname)[1] in extlist:
flist.append(osp.join(dirpath, fname)[offset:])
return flist
# Requirements
REQUIREMENTS = ['memory_profiler', 'spyder>=4']
EXTLIST = ['.jpg', '.png', '.json', '.mo', '.ini']
LIBNAME = 'spyder_memory_profiler'
LONG_DESCRIPTION = """
This is a plugin for the Spyder IDE that integrates the Python memory profiler.
It allows you to see the memory usage in every line.
Usage
-----
Add a ``@profile`` decorator to the functions that you wish to profile then
press Ctrl+Shift+F10 to run the profiler on the current script, or go to
``Run > Profile memory line by line``.
The results will be shown in a dockwidget, grouped by function. Lines with a
stronger color have the largest increments in memory usage.
"""
setup(
name=LIBNAME,
version=get_version(),
packages=find_packages(),
package_data={LIBNAME: get_package_data(LIBNAME, EXTLIST)},
keywords=["Qt PyQt4 PyQt5 PySide spyder plugins spyplugins profiler"],
install_requires=REQUIREMENTS,
url='https://github.com/spyder-ide/spyder-memory-profiler',
license='MIT',
author='Spyder Project Contributors',
description='Plugin for the Spyder IDE that integrates the Python'
' memory profiler',
long_description=LONG_DESCRIPTION,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: X11 Applications :: Qt',
'Environment :: Win32 (MS Windows)',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development',
'Topic :: Text Editors :: Integrated Development Environments (IDE)'])
| mit |
uber/pyro | tests/contrib/timeseries/test_lgssm.py | 1 | 3798 | # Copyright (c) 2017-2019 Uber Technologies, Inc.
# SPDX-License-Identifier: Apache-2.0
import torch
from tests.common import assert_equal
from pyro.contrib.timeseries import GenericLGSSM, GenericLGSSMWithGPNoiseModel
import pytest
@pytest.mark.parametrize('model_class', ['lgssm', 'lgssmgp'])
@pytest.mark.parametrize('state_dim', [2, 3])
@pytest.mark.parametrize('obs_dim', [2, 4])
@pytest.mark.parametrize('T', [11, 17])
def test_generic_lgssm_forecast(model_class, state_dim, obs_dim, T):
torch.set_default_tensor_type('torch.DoubleTensor')
if model_class == 'lgssm':
model = GenericLGSSM(state_dim=state_dim, obs_dim=obs_dim,
obs_noise_scale_init=0.1 + torch.rand(obs_dim))
elif model_class == 'lgssmgp':
model = GenericLGSSMWithGPNoiseModel(state_dim=state_dim, obs_dim=obs_dim, nu=1.5,
obs_noise_scale_init=0.1 + torch.rand(obs_dim))
# with these hyperparameters we essentially turn off the GP contributions
model.kernel.length_scale = 1.0e-6 * torch.ones(obs_dim)
model.kernel.kernel_scale = 1.0e-6 * torch.ones(obs_dim)
targets = torch.randn(T, obs_dim)
filtering_state = model._filter(targets)
actual_loc, actual_cov = model._forecast(3, filtering_state, include_observation_noise=False)
obs_matrix = model.obs_matrix if model_class == 'lgssm' else model.z_obs_matrix
trans_matrix = model.trans_matrix if model_class == 'lgssm' else model.z_trans_matrix
trans_matrix_sq = torch.mm(trans_matrix, trans_matrix)
trans_matrix_cubed = torch.mm(trans_matrix_sq, trans_matrix)
trans_obs = torch.mm(trans_matrix, obs_matrix)
trans_trans_obs = torch.mm(trans_matrix_sq, obs_matrix)
trans_trans_trans_obs = torch.mm(trans_matrix_cubed, obs_matrix)
# we only compute contributions for the state space portion for lgssmgp
fs_loc = filtering_state.loc if model_class == 'lgssm' else filtering_state.loc[-state_dim:]
predicted_mean1 = torch.mm(fs_loc.unsqueeze(-2), trans_obs).squeeze(-2)
predicted_mean2 = torch.mm(fs_loc.unsqueeze(-2), trans_trans_obs).squeeze(-2)
predicted_mean3 = torch.mm(fs_loc.unsqueeze(-2), trans_trans_trans_obs).squeeze(-2)
# check predicted means for 3 timesteps
assert_equal(actual_loc[0], predicted_mean1)
assert_equal(actual_loc[1], predicted_mean2)
assert_equal(actual_loc[2], predicted_mean3)
# check predicted covariances for 3 timesteps
fs_covar, process_covar = None, None
if model_class == 'lgssm':
process_covar = model._get_trans_dist().covariance_matrix
fs_covar = filtering_state.covariance_matrix
elif model_class == 'lgssmgp':
# we only compute contributions for the state space portion
process_covar = model.trans_noise_scale_sq.diag_embed()
fs_covar = filtering_state.covariance_matrix[-state_dim:, -state_dim:]
predicted_covar1 = torch.mm(trans_obs.t(), torch.mm(fs_covar, trans_obs)) + \
torch.mm(obs_matrix.t(), torch.mm(process_covar, obs_matrix))
predicted_covar2 = torch.mm(trans_trans_obs.t(), torch.mm(fs_covar, trans_trans_obs)) + \
torch.mm(trans_obs.t(), torch.mm(process_covar, trans_obs)) + \
torch.mm(obs_matrix.t(), torch.mm(process_covar, obs_matrix))
predicted_covar3 = torch.mm(trans_trans_trans_obs.t(), torch.mm(fs_covar, trans_trans_trans_obs)) + \
torch.mm(trans_trans_obs.t(), torch.mm(process_covar, trans_trans_obs)) + \
torch.mm(trans_obs.t(), torch.mm(process_covar, trans_obs)) + \
torch.mm(obs_matrix.t(), torch.mm(process_covar, obs_matrix))
assert_equal(actual_cov[0], predicted_covar1)
assert_equal(actual_cov[1], predicted_covar2)
assert_equal(actual_cov[2], predicted_covar3)
| apache-2.0 |
reinhrst/panda | usr/lib/python2.7/encodings/cp1140.py | 593 | 13361 | """ Python Character Mapping Codec cp1140 generated from 'python-mappings/CP1140.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp1140',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x9c' # 0x04 -> CONTROL
u'\t' # 0x05 -> HORIZONTAL TABULATION
u'\x86' # 0x06 -> CONTROL
u'\x7f' # 0x07 -> DELETE
u'\x97' # 0x08 -> CONTROL
u'\x8d' # 0x09 -> CONTROL
u'\x8e' # 0x0A -> CONTROL
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x9d' # 0x14 -> CONTROL
u'\x85' # 0x15 -> CONTROL
u'\x08' # 0x16 -> BACKSPACE
u'\x87' # 0x17 -> CONTROL
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x92' # 0x1A -> CONTROL
u'\x8f' # 0x1B -> CONTROL
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u'\x80' # 0x20 -> CONTROL
u'\x81' # 0x21 -> CONTROL
u'\x82' # 0x22 -> CONTROL
u'\x83' # 0x23 -> CONTROL
u'\x84' # 0x24 -> CONTROL
u'\n' # 0x25 -> LINE FEED
u'\x17' # 0x26 -> END OF TRANSMISSION BLOCK
u'\x1b' # 0x27 -> ESCAPE
u'\x88' # 0x28 -> CONTROL
u'\x89' # 0x29 -> CONTROL
u'\x8a' # 0x2A -> CONTROL
u'\x8b' # 0x2B -> CONTROL
u'\x8c' # 0x2C -> CONTROL
u'\x05' # 0x2D -> ENQUIRY
u'\x06' # 0x2E -> ACKNOWLEDGE
u'\x07' # 0x2F -> BELL
u'\x90' # 0x30 -> CONTROL
u'\x91' # 0x31 -> CONTROL
u'\x16' # 0x32 -> SYNCHRONOUS IDLE
u'\x93' # 0x33 -> CONTROL
u'\x94' # 0x34 -> CONTROL
u'\x95' # 0x35 -> CONTROL
u'\x96' # 0x36 -> CONTROL
u'\x04' # 0x37 -> END OF TRANSMISSION
u'\x98' # 0x38 -> CONTROL
u'\x99' # 0x39 -> CONTROL
u'\x9a' # 0x3A -> CONTROL
u'\x9b' # 0x3B -> CONTROL
u'\x14' # 0x3C -> DEVICE CONTROL FOUR
u'\x15' # 0x3D -> NEGATIVE ACKNOWLEDGE
u'\x9e' # 0x3E -> CONTROL
u'\x1a' # 0x3F -> SUBSTITUTE
u' ' # 0x40 -> SPACE
u'\xa0' # 0x41 -> NO-BREAK SPACE
u'\xe2' # 0x42 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\xe4' # 0x43 -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xe0' # 0x44 -> LATIN SMALL LETTER A WITH GRAVE
u'\xe1' # 0x45 -> LATIN SMALL LETTER A WITH ACUTE
u'\xe3' # 0x46 -> LATIN SMALL LETTER A WITH TILDE
u'\xe5' # 0x47 -> LATIN SMALL LETTER A WITH RING ABOVE
u'\xe7' # 0x48 -> LATIN SMALL LETTER C WITH CEDILLA
u'\xf1' # 0x49 -> LATIN SMALL LETTER N WITH TILDE
u'\xa2' # 0x4A -> CENT SIGN
u'.' # 0x4B -> FULL STOP
u'<' # 0x4C -> LESS-THAN SIGN
u'(' # 0x4D -> LEFT PARENTHESIS
u'+' # 0x4E -> PLUS SIGN
u'|' # 0x4F -> VERTICAL LINE
u'&' # 0x50 -> AMPERSAND
u'\xe9' # 0x51 -> LATIN SMALL LETTER E WITH ACUTE
u'\xea' # 0x52 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
u'\xeb' # 0x53 -> LATIN SMALL LETTER E WITH DIAERESIS
u'\xe8' # 0x54 -> LATIN SMALL LETTER E WITH GRAVE
u'\xed' # 0x55 -> LATIN SMALL LETTER I WITH ACUTE
u'\xee' # 0x56 -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\xef' # 0x57 -> LATIN SMALL LETTER I WITH DIAERESIS
u'\xec' # 0x58 -> LATIN SMALL LETTER I WITH GRAVE
u'\xdf' # 0x59 -> LATIN SMALL LETTER SHARP S (GERMAN)
u'!' # 0x5A -> EXCLAMATION MARK
u'$' # 0x5B -> DOLLAR SIGN
u'*' # 0x5C -> ASTERISK
u')' # 0x5D -> RIGHT PARENTHESIS
u';' # 0x5E -> SEMICOLON
u'\xac' # 0x5F -> NOT SIGN
u'-' # 0x60 -> HYPHEN-MINUS
u'/' # 0x61 -> SOLIDUS
u'\xc2' # 0x62 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
u'\xc4' # 0x63 -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xc0' # 0x64 -> LATIN CAPITAL LETTER A WITH GRAVE
u'\xc1' # 0x65 -> LATIN CAPITAL LETTER A WITH ACUTE
u'\xc3' # 0x66 -> LATIN CAPITAL LETTER A WITH TILDE
u'\xc5' # 0x67 -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\xc7' # 0x68 -> LATIN CAPITAL LETTER C WITH CEDILLA
u'\xd1' # 0x69 -> LATIN CAPITAL LETTER N WITH TILDE
u'\xa6' # 0x6A -> BROKEN BAR
u',' # 0x6B -> COMMA
u'%' # 0x6C -> PERCENT SIGN
u'_' # 0x6D -> LOW LINE
u'>' # 0x6E -> GREATER-THAN SIGN
u'?' # 0x6F -> QUESTION MARK
u'\xf8' # 0x70 -> LATIN SMALL LETTER O WITH STROKE
u'\xc9' # 0x71 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\xca' # 0x72 -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
u'\xcb' # 0x73 -> LATIN CAPITAL LETTER E WITH DIAERESIS
u'\xc8' # 0x74 -> LATIN CAPITAL LETTER E WITH GRAVE
u'\xcd' # 0x75 -> LATIN CAPITAL LETTER I WITH ACUTE
u'\xce' # 0x76 -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
u'\xcf' # 0x77 -> LATIN CAPITAL LETTER I WITH DIAERESIS
u'\xcc' # 0x78 -> LATIN CAPITAL LETTER I WITH GRAVE
u'`' # 0x79 -> GRAVE ACCENT
u':' # 0x7A -> COLON
u'#' # 0x7B -> NUMBER SIGN
u'@' # 0x7C -> COMMERCIAL AT
u"'" # 0x7D -> APOSTROPHE
u'=' # 0x7E -> EQUALS SIGN
u'"' # 0x7F -> QUOTATION MARK
u'\xd8' # 0x80 -> LATIN CAPITAL LETTER O WITH STROKE
u'a' # 0x81 -> LATIN SMALL LETTER A
u'b' # 0x82 -> LATIN SMALL LETTER B
u'c' # 0x83 -> LATIN SMALL LETTER C
u'd' # 0x84 -> LATIN SMALL LETTER D
u'e' # 0x85 -> LATIN SMALL LETTER E
u'f' # 0x86 -> LATIN SMALL LETTER F
u'g' # 0x87 -> LATIN SMALL LETTER G
u'h' # 0x88 -> LATIN SMALL LETTER H
u'i' # 0x89 -> LATIN SMALL LETTER I
u'\xab' # 0x8A -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbb' # 0x8B -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xf0' # 0x8C -> LATIN SMALL LETTER ETH (ICELANDIC)
u'\xfd' # 0x8D -> LATIN SMALL LETTER Y WITH ACUTE
u'\xfe' # 0x8E -> LATIN SMALL LETTER THORN (ICELANDIC)
u'\xb1' # 0x8F -> PLUS-MINUS SIGN
u'\xb0' # 0x90 -> DEGREE SIGN
u'j' # 0x91 -> LATIN SMALL LETTER J
u'k' # 0x92 -> LATIN SMALL LETTER K
u'l' # 0x93 -> LATIN SMALL LETTER L
u'm' # 0x94 -> LATIN SMALL LETTER M
u'n' # 0x95 -> LATIN SMALL LETTER N
u'o' # 0x96 -> LATIN SMALL LETTER O
u'p' # 0x97 -> LATIN SMALL LETTER P
u'q' # 0x98 -> LATIN SMALL LETTER Q
u'r' # 0x99 -> LATIN SMALL LETTER R
u'\xaa' # 0x9A -> FEMININE ORDINAL INDICATOR
u'\xba' # 0x9B -> MASCULINE ORDINAL INDICATOR
u'\xe6' # 0x9C -> LATIN SMALL LIGATURE AE
u'\xb8' # 0x9D -> CEDILLA
u'\xc6' # 0x9E -> LATIN CAPITAL LIGATURE AE
u'\u20ac' # 0x9F -> EURO SIGN
u'\xb5' # 0xA0 -> MICRO SIGN
u'~' # 0xA1 -> TILDE
u's' # 0xA2 -> LATIN SMALL LETTER S
u't' # 0xA3 -> LATIN SMALL LETTER T
u'u' # 0xA4 -> LATIN SMALL LETTER U
u'v' # 0xA5 -> LATIN SMALL LETTER V
u'w' # 0xA6 -> LATIN SMALL LETTER W
u'x' # 0xA7 -> LATIN SMALL LETTER X
u'y' # 0xA8 -> LATIN SMALL LETTER Y
u'z' # 0xA9 -> LATIN SMALL LETTER Z
u'\xa1' # 0xAA -> INVERTED EXCLAMATION MARK
u'\xbf' # 0xAB -> INVERTED QUESTION MARK
u'\xd0' # 0xAC -> LATIN CAPITAL LETTER ETH (ICELANDIC)
u'\xdd' # 0xAD -> LATIN CAPITAL LETTER Y WITH ACUTE
u'\xde' # 0xAE -> LATIN CAPITAL LETTER THORN (ICELANDIC)
u'\xae' # 0xAF -> REGISTERED SIGN
u'^' # 0xB0 -> CIRCUMFLEX ACCENT
u'\xa3' # 0xB1 -> POUND SIGN
u'\xa5' # 0xB2 -> YEN SIGN
u'\xb7' # 0xB3 -> MIDDLE DOT
u'\xa9' # 0xB4 -> COPYRIGHT SIGN
u'\xa7' # 0xB5 -> SECTION SIGN
u'\xb6' # 0xB6 -> PILCROW SIGN
u'\xbc' # 0xB7 -> VULGAR FRACTION ONE QUARTER
u'\xbd' # 0xB8 -> VULGAR FRACTION ONE HALF
u'\xbe' # 0xB9 -> VULGAR FRACTION THREE QUARTERS
u'[' # 0xBA -> LEFT SQUARE BRACKET
u']' # 0xBB -> RIGHT SQUARE BRACKET
u'\xaf' # 0xBC -> MACRON
u'\xa8' # 0xBD -> DIAERESIS
u'\xb4' # 0xBE -> ACUTE ACCENT
u'\xd7' # 0xBF -> MULTIPLICATION SIGN
u'{' # 0xC0 -> LEFT CURLY BRACKET
u'A' # 0xC1 -> LATIN CAPITAL LETTER A
u'B' # 0xC2 -> LATIN CAPITAL LETTER B
u'C' # 0xC3 -> LATIN CAPITAL LETTER C
u'D' # 0xC4 -> LATIN CAPITAL LETTER D
u'E' # 0xC5 -> LATIN CAPITAL LETTER E
u'F' # 0xC6 -> LATIN CAPITAL LETTER F
u'G' # 0xC7 -> LATIN CAPITAL LETTER G
u'H' # 0xC8 -> LATIN CAPITAL LETTER H
u'I' # 0xC9 -> LATIN CAPITAL LETTER I
u'\xad' # 0xCA -> SOFT HYPHEN
u'\xf4' # 0xCB -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\xf6' # 0xCC -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xf2' # 0xCD -> LATIN SMALL LETTER O WITH GRAVE
u'\xf3' # 0xCE -> LATIN SMALL LETTER O WITH ACUTE
u'\xf5' # 0xCF -> LATIN SMALL LETTER O WITH TILDE
u'}' # 0xD0 -> RIGHT CURLY BRACKET
u'J' # 0xD1 -> LATIN CAPITAL LETTER J
u'K' # 0xD2 -> LATIN CAPITAL LETTER K
u'L' # 0xD3 -> LATIN CAPITAL LETTER L
u'M' # 0xD4 -> LATIN CAPITAL LETTER M
u'N' # 0xD5 -> LATIN CAPITAL LETTER N
u'O' # 0xD6 -> LATIN CAPITAL LETTER O
u'P' # 0xD7 -> LATIN CAPITAL LETTER P
u'Q' # 0xD8 -> LATIN CAPITAL LETTER Q
u'R' # 0xD9 -> LATIN CAPITAL LETTER R
u'\xb9' # 0xDA -> SUPERSCRIPT ONE
u'\xfb' # 0xDB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xfc' # 0xDC -> LATIN SMALL LETTER U WITH DIAERESIS
u'\xf9' # 0xDD -> LATIN SMALL LETTER U WITH GRAVE
u'\xfa' # 0xDE -> LATIN SMALL LETTER U WITH ACUTE
u'\xff' # 0xDF -> LATIN SMALL LETTER Y WITH DIAERESIS
u'\\' # 0xE0 -> REVERSE SOLIDUS
u'\xf7' # 0xE1 -> DIVISION SIGN
u'S' # 0xE2 -> LATIN CAPITAL LETTER S
u'T' # 0xE3 -> LATIN CAPITAL LETTER T
u'U' # 0xE4 -> LATIN CAPITAL LETTER U
u'V' # 0xE5 -> LATIN CAPITAL LETTER V
u'W' # 0xE6 -> LATIN CAPITAL LETTER W
u'X' # 0xE7 -> LATIN CAPITAL LETTER X
u'Y' # 0xE8 -> LATIN CAPITAL LETTER Y
u'Z' # 0xE9 -> LATIN CAPITAL LETTER Z
u'\xb2' # 0xEA -> SUPERSCRIPT TWO
u'\xd4' # 0xEB -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
u'\xd6' # 0xEC -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xd2' # 0xED -> LATIN CAPITAL LETTER O WITH GRAVE
u'\xd3' # 0xEE -> LATIN CAPITAL LETTER O WITH ACUTE
u'\xd5' # 0xEF -> LATIN CAPITAL LETTER O WITH TILDE
u'0' # 0xF0 -> DIGIT ZERO
u'1' # 0xF1 -> DIGIT ONE
u'2' # 0xF2 -> DIGIT TWO
u'3' # 0xF3 -> DIGIT THREE
u'4' # 0xF4 -> DIGIT FOUR
u'5' # 0xF5 -> DIGIT FIVE
u'6' # 0xF6 -> DIGIT SIX
u'7' # 0xF7 -> DIGIT SEVEN
u'8' # 0xF8 -> DIGIT EIGHT
u'9' # 0xF9 -> DIGIT NINE
u'\xb3' # 0xFA -> SUPERSCRIPT THREE
u'\xdb' # 0xFB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
u'\xdc' # 0xFC -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\xd9' # 0xFD -> LATIN CAPITAL LETTER U WITH GRAVE
u'\xda' # 0xFE -> LATIN CAPITAL LETTER U WITH ACUTE
u'\x9f' # 0xFF -> CONTROL
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| gpl-3.0 |
monash-merc/karaage | karaage/legacy/applications/south_migrations/0022_auto__add_field_projectapplication_project_tmp.py | 3 | 17279 | # -*- coding: utf-8 -*-
from south.db import db
from south.v2 import SchemaMigration
class Migration(SchemaMigration):
depends_on = (
('projects', '0014_move_projects'),
)
def forwards(self, orm):
# Adding field 'ProjectApplication.project_tmp'
db.add_column('applications_projectapplication', 'project_tmp',
self.gf('django.db.models.fields.related.ForeignKey')(to=orm['projects.ProjectTmp'], null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'ProjectApplication.project_tmp'
db.delete_column('applications_projectapplication', 'project_tmp_id')
models = {
'applications.applicant': {
'Meta': {'object_name': 'Applicant'},
'address': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'department': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'unique': 'True', 'max_length': '75'}),
'email_verified': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'full_name': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'institute': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['institutes.Institute']", 'null': 'True', 'blank': 'True'}),
'mobile': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'position': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'postcode': ('django.db.models.fields.CharField', [], {'max_length': '8', 'null': 'True', 'blank': 'True'}),
'saml_id': ('django.db.models.fields.CharField', [], {'max_length': '200', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'supervisor': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'telephone': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '16', 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
'applications.application': {
'Meta': {'object_name': 'Application'},
'_class': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'complete_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']", 'null': 'True', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.Person']", 'null': 'True', 'blank': 'True'}),
'created_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'expires': ('django.db.models.fields.DateTimeField', [], {}),
'header_message': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'secret_token': ('django.db.models.fields.CharField', [], {'default': "'33073bf3cdbe36c36fd86db19b8c758065fa5272'", 'unique': 'True', 'max_length': '64'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '5'}),
'submitted_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'applications.projectapplication': {
'Meta': {'object_name': 'ProjectApplication', '_ormbases': ['applications.Application']},
'additional_req': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'application_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['applications.Application']", 'unique': 'True', 'primary_key': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'institute': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['institutes.Institute']", 'null': 'True', 'blank': 'True'}),
'machine_categories': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['machines.MachineCategory']", 'null': 'True', 'blank': 'True'}),
'make_leader': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'needs_account': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'pid': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['projects.Project']", 'null': 'True', 'blank': 'True'}),
'project_tmp': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['projects.ProjectTmp']", 'null': 'True', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'institutes.institute': {
'Meta': {'ordering': "['name']", 'object_name': 'Institute', 'db_table': "'institute'"},
'delegates': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'delegate'", 'to': "orm['people.Person']", 'through': "orm['institutes.InstituteDelegate']", 'blank': 'True', 'symmetrical': 'False', 'null': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'saml_entityid': ('django.db.models.fields.CharField', [], {'max_length': '200', 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
'institutes.institutedelegate': {
'Meta': {'object_name': 'InstituteDelegate', 'db_table': "'institutedelegate'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'institute': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['institutes.Institute']"}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.Person']"}),
'send_email': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'machines.machinecategory': {
'Meta': {'object_name': 'MachineCategory', 'db_table': "'machine_category'"},
'datastore': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'people.group': {
'Meta': {'ordering': "['name']", 'object_name': 'Group'},
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'groups'", 'symmetrical': 'False', 'to': "orm['people.Person']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
},
'people.person': {
'Meta': {'ordering': "['full_name', 'short_name']", 'object_name': 'Person', 'db_table': "'person'"},
'address': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'approved_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'user_approver'", 'null': 'True', 'to': "orm['people.Person']"}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'date_approved': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_deleted': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'user_deletor'", 'null': 'True', 'to': "orm['people.Person']"}),
'department': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'db_index': 'True'}),
'expires': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'full_name': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'institute': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['institutes.Institute']"}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_admin': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_systemuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_usage': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'legacy_ldap_password': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'login_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'mobile': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'position': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'postcode': ('django.db.models.fields.CharField', [], {'max_length': '8', 'null': 'True', 'blank': 'True'}),
'saml_id': ('django.db.models.fields.CharField', [], {'max_length': '200', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '4', 'null': 'True', 'blank': 'True'}),
'supervisor': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'telephone': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'projects.project': {
'Meta': {'ordering': "['pid']", 'object_name': 'Project', 'db_table': "'project'"},
'additional_req': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'approved_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'project_approver'", 'null': 'True', 'to': "orm['people.Person']"}),
'date_approved': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_deleted': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'project_deletor'", 'null': 'True', 'to': "orm['people.Person']"}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.Group']"}),
'institute': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['institutes.Institute']"}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_approved': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_usage': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'leaders': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'leaders'", 'symmetrical': 'False', 'to': "orm['people.Person']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'pid': ('django.db.models.fields.CharField', [], {'max_length': '50', 'primary_key': 'True'}),
'start_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2013, 9, 17, 0, 0)'})
},
'projects.projecttmp': {
'Meta': {'object_name': 'ProjectTmp'},
'additional_req': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'approved_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'project_approver_tmp'", 'null': 'True', 'to': "orm['people.Person']"}),
'date_approved': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_deleted': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'project_deletor_tmp'", 'null': 'True', 'to': "orm['people.Person']"}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'institute': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'project_tmp'", 'to': "orm['institutes.Institute']"}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_approved': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_usage': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'leaders': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'leaders_tmp'", 'symmetrical': 'False', 'to': "orm['people.Person']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'pid': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'start_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2013, 9, 17, 0, 0)'})
}
}
complete_apps = ['applications']
| gpl-3.0 |
demon-ru/iml-crm | addons/account/wizard/account_move_line_reconcile_select.py | 385 | 2362 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
class account_move_line_reconcile_select(osv.osv_memory):
_name = "account.move.line.reconcile.select"
_description = "Move line reconcile select"
_columns = {
'account_id': fields.many2one('account.account', 'Account', \
domain = [('reconcile', '=', 1)], required=True),
}
def action_open_window(self, cr, uid, ids, context=None):
"""
This function Open account move line window for reconcile on given account id
@param cr: the current row, from the database cursor,
@param uid: the current user’s ID for security checks,
@param ids: account move line reconcile select’s ID or list of IDs
@return: dictionary of Open account move line window for reconcile on given account id
"""
data = self.read(cr, uid, ids, context=context)[0]
return {
'domain': "[('account_id','=',%d),('reconcile_id','=',False),('state','<>','draft')]" % data['account_id'],
'name': _('Reconciliation'),
'view_type': 'form',
'view_mode': 'tree,form',
'view_id': False,
'res_model': 'account.move.line',
'type': 'ir.actions.act_window'
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
kiszk/spark | python/pyspark/sql/tests/test_functions.py | 11 | 14255 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import datetime
import sys
from pyspark.sql import Row
from pyspark.sql.functions import udf, input_file_name
from pyspark.testing.sqlutils import ReusedSQLTestCase
class FunctionsTests(ReusedSQLTestCase):
def test_explode(self):
from pyspark.sql.functions import explode, explode_outer, posexplode_outer
d = [
Row(a=1, intlist=[1, 2, 3], mapfield={"a": "b"}),
Row(a=1, intlist=[], mapfield={}),
Row(a=1, intlist=None, mapfield=None),
]
rdd = self.sc.parallelize(d)
data = self.spark.createDataFrame(rdd)
result = data.select(explode(data.intlist).alias("a")).select("a").collect()
self.assertEqual(result[0][0], 1)
self.assertEqual(result[1][0], 2)
self.assertEqual(result[2][0], 3)
result = data.select(explode(data.mapfield).alias("a", "b")).select("a", "b").collect()
self.assertEqual(result[0][0], "a")
self.assertEqual(result[0][1], "b")
result = [tuple(x) for x in data.select(posexplode_outer("intlist")).collect()]
self.assertEqual(result, [(0, 1), (1, 2), (2, 3), (None, None), (None, None)])
result = [tuple(x) for x in data.select(posexplode_outer("mapfield")).collect()]
self.assertEqual(result, [(0, 'a', 'b'), (None, None, None), (None, None, None)])
result = [x[0] for x in data.select(explode_outer("intlist")).collect()]
self.assertEqual(result, [1, 2, 3, None, None])
result = [tuple(x) for x in data.select(explode_outer("mapfield")).collect()]
self.assertEqual(result, [('a', 'b'), (None, None), (None, None)])
def test_basic_functions(self):
rdd = self.sc.parallelize(['{"foo":"bar"}', '{"foo":"baz"}'])
df = self.spark.read.json(rdd)
df.count()
df.collect()
df.schema
# cache and checkpoint
self.assertFalse(df.is_cached)
df.persist()
df.unpersist(True)
df.cache()
self.assertTrue(df.is_cached)
self.assertEqual(2, df.count())
with self.tempView("temp"):
df.createOrReplaceTempView("temp")
df = self.spark.sql("select foo from temp")
df.count()
df.collect()
def test_corr(self):
import math
df = self.sc.parallelize([Row(a=i, b=math.sqrt(i)) for i in range(10)]).toDF()
corr = df.stat.corr(u"a", "b")
self.assertTrue(abs(corr - 0.95734012) < 1e-6)
def test_sampleby(self):
df = self.sc.parallelize([Row(a=i, b=(i % 3)) for i in range(100)]).toDF()
sampled = df.stat.sampleBy(u"b", fractions={0: 0.5, 1: 0.5}, seed=0)
self.assertTrue(sampled.count() == 35)
def test_cov(self):
df = self.sc.parallelize([Row(a=i, b=2 * i) for i in range(10)]).toDF()
cov = df.stat.cov(u"a", "b")
self.assertTrue(abs(cov - 55.0 / 3) < 1e-6)
def test_crosstab(self):
df = self.sc.parallelize([Row(a=i % 3, b=i % 2) for i in range(1, 7)]).toDF()
ct = df.stat.crosstab(u"a", "b").collect()
ct = sorted(ct, key=lambda x: x[0])
for i, row in enumerate(ct):
self.assertEqual(row[0], str(i))
self.assertTrue(row[1], 1)
self.assertTrue(row[2], 1)
def test_math_functions(self):
df = self.sc.parallelize([Row(a=i, b=2 * i) for i in range(10)]).toDF()
from pyspark.sql import functions
import math
def get_values(l):
return [j[0] for j in l]
def assert_close(a, b):
c = get_values(b)
diff = [abs(v - c[k]) < 1e-6 for k, v in enumerate(a)]
return sum(diff) == len(a)
assert_close([math.cos(i) for i in range(10)],
df.select(functions.cos(df.a)).collect())
assert_close([math.cos(i) for i in range(10)],
df.select(functions.cos("a")).collect())
assert_close([math.sin(i) for i in range(10)],
df.select(functions.sin(df.a)).collect())
assert_close([math.sin(i) for i in range(10)],
df.select(functions.sin(df['a'])).collect())
assert_close([math.pow(i, 2 * i) for i in range(10)],
df.select(functions.pow(df.a, df.b)).collect())
assert_close([math.pow(i, 2) for i in range(10)],
df.select(functions.pow(df.a, 2)).collect())
assert_close([math.pow(i, 2) for i in range(10)],
df.select(functions.pow(df.a, 2.0)).collect())
assert_close([math.hypot(i, 2 * i) for i in range(10)],
df.select(functions.hypot(df.a, df.b)).collect())
assert_close([math.hypot(i, 2 * i) for i in range(10)],
df.select(functions.hypot("a", u"b")).collect())
assert_close([math.hypot(i, 2) for i in range(10)],
df.select(functions.hypot("a", 2)).collect())
assert_close([math.hypot(i, 2) for i in range(10)],
df.select(functions.hypot(df.a, 2)).collect())
def test_rand_functions(self):
df = self.df
from pyspark.sql import functions
rnd = df.select('key', functions.rand()).collect()
for row in rnd:
assert row[1] >= 0.0 and row[1] <= 1.0, "got: %s" % row[1]
rndn = df.select('key', functions.randn(5)).collect()
for row in rndn:
assert row[1] >= -4.0 and row[1] <= 4.0, "got: %s" % row[1]
# If the specified seed is 0, we should use it.
# https://issues.apache.org/jira/browse/SPARK-9691
rnd1 = df.select('key', functions.rand(0)).collect()
rnd2 = df.select('key', functions.rand(0)).collect()
self.assertEqual(sorted(rnd1), sorted(rnd2))
rndn1 = df.select('key', functions.randn(0)).collect()
rndn2 = df.select('key', functions.randn(0)).collect()
self.assertEqual(sorted(rndn1), sorted(rndn2))
def test_string_functions(self):
from pyspark.sql import functions
from pyspark.sql.functions import col, lit, _string_functions
df = self.spark.createDataFrame([['nick']], schema=['name'])
self.assertRaisesRegexp(
TypeError,
"must be the same type",
lambda: df.select(col('name').substr(0, lit(1))))
if sys.version_info.major == 2:
self.assertRaises(
TypeError,
lambda: df.select(col('name').substr(long(0), long(1))))
for name in _string_functions.keys():
self.assertEqual(
df.select(getattr(functions, name)("name")).first()[0],
df.select(getattr(functions, name)(col("name"))).first()[0])
def test_array_contains_function(self):
from pyspark.sql.functions import array_contains
df = self.spark.createDataFrame([(["1", "2", "3"],), ([],)], ['data'])
actual = df.select(array_contains(df.data, "1").alias('b')).collect()
self.assertEqual([Row(b=True), Row(b=False)], actual)
def test_between_function(self):
df = self.sc.parallelize([
Row(a=1, b=2, c=3),
Row(a=2, b=1, c=3),
Row(a=4, b=1, c=4)]).toDF()
self.assertEqual([Row(a=2, b=1, c=3), Row(a=4, b=1, c=4)],
df.filter(df.a.between(df.b, df.c)).collect())
def test_dayofweek(self):
from pyspark.sql.functions import dayofweek
dt = datetime.datetime(2017, 11, 6)
df = self.spark.createDataFrame([Row(date=dt)])
row = df.select(dayofweek(df.date)).first()
self.assertEqual(row[0], 2)
def test_expr(self):
from pyspark.sql import functions
row = Row(a="length string", b=75)
df = self.spark.createDataFrame([row])
result = df.select(functions.expr("length(a)")).collect()[0].asDict()
self.assertEqual(13, result["length(a)"])
# add test for SPARK-10577 (test broadcast join hint)
def test_functions_broadcast(self):
from pyspark.sql.functions import broadcast
df1 = self.spark.createDataFrame([(1, "1"), (2, "2")], ("key", "value"))
df2 = self.spark.createDataFrame([(1, "1"), (2, "2")], ("key", "value"))
# equijoin - should be converted into broadcast join
plan1 = df1.join(broadcast(df2), "key")._jdf.queryExecution().executedPlan()
self.assertEqual(1, plan1.toString().count("BroadcastHashJoin"))
# no join key -- should not be a broadcast join
plan2 = df1.crossJoin(broadcast(df2))._jdf.queryExecution().executedPlan()
self.assertEqual(0, plan2.toString().count("BroadcastHashJoin"))
# planner should not crash without a join
broadcast(df1)._jdf.queryExecution().executedPlan()
def test_first_last_ignorenulls(self):
from pyspark.sql import functions
df = self.spark.range(0, 100)
df2 = df.select(functions.when(df.id % 3 == 0, None).otherwise(df.id).alias("id"))
df3 = df2.select(functions.first(df2.id, False).alias('a'),
functions.first(df2.id, True).alias('b'),
functions.last(df2.id, False).alias('c'),
functions.last(df2.id, True).alias('d'))
self.assertEqual([Row(a=None, b=1, c=None, d=98)], df3.collect())
def test_approxQuantile(self):
df = self.sc.parallelize([Row(a=i, b=i+10) for i in range(10)]).toDF()
for f in ["a", u"a"]:
aq = df.stat.approxQuantile(f, [0.1, 0.5, 0.9], 0.1)
self.assertTrue(isinstance(aq, list))
self.assertEqual(len(aq), 3)
self.assertTrue(all(isinstance(q, float) for q in aq))
aqs = df.stat.approxQuantile(["a", u"b"], [0.1, 0.5, 0.9], 0.1)
self.assertTrue(isinstance(aqs, list))
self.assertEqual(len(aqs), 2)
self.assertTrue(isinstance(aqs[0], list))
self.assertEqual(len(aqs[0]), 3)
self.assertTrue(all(isinstance(q, float) for q in aqs[0]))
self.assertTrue(isinstance(aqs[1], list))
self.assertEqual(len(aqs[1]), 3)
self.assertTrue(all(isinstance(q, float) for q in aqs[1]))
aqt = df.stat.approxQuantile((u"a", "b"), [0.1, 0.5, 0.9], 0.1)
self.assertTrue(isinstance(aqt, list))
self.assertEqual(len(aqt), 2)
self.assertTrue(isinstance(aqt[0], list))
self.assertEqual(len(aqt[0]), 3)
self.assertTrue(all(isinstance(q, float) for q in aqt[0]))
self.assertTrue(isinstance(aqt[1], list))
self.assertEqual(len(aqt[1]), 3)
self.assertTrue(all(isinstance(q, float) for q in aqt[1]))
self.assertRaises(ValueError, lambda: df.stat.approxQuantile(123, [0.1, 0.9], 0.1))
self.assertRaises(ValueError, lambda: df.stat.approxQuantile(("a", 123), [0.1, 0.9], 0.1))
self.assertRaises(ValueError, lambda: df.stat.approxQuantile(["a", 123], [0.1, 0.9], 0.1))
def test_sort_with_nulls_order(self):
from pyspark.sql import functions
df = self.spark.createDataFrame(
[('Tom', 80), (None, 60), ('Alice', 50)], ["name", "height"])
self.assertEquals(
df.select(df.name).orderBy(functions.asc_nulls_first('name')).collect(),
[Row(name=None), Row(name=u'Alice'), Row(name=u'Tom')])
self.assertEquals(
df.select(df.name).orderBy(functions.asc_nulls_last('name')).collect(),
[Row(name=u'Alice'), Row(name=u'Tom'), Row(name=None)])
self.assertEquals(
df.select(df.name).orderBy(functions.desc_nulls_first('name')).collect(),
[Row(name=None), Row(name=u'Tom'), Row(name=u'Alice')])
self.assertEquals(
df.select(df.name).orderBy(functions.desc_nulls_last('name')).collect(),
[Row(name=u'Tom'), Row(name=u'Alice'), Row(name=None)])
def test_input_file_name_reset_for_rdd(self):
rdd = self.sc.textFile('python/test_support/hello/hello.txt').map(lambda x: {'data': x})
df = self.spark.createDataFrame(rdd, "data STRING")
df.select(input_file_name().alias('file')).collect()
non_file_df = self.spark.range(100).select(input_file_name())
results = non_file_df.collect()
self.assertTrue(len(results) == 100)
# [SPARK-24605]: if everything was properly reset after the last job, this should return
# empty string rather than the file read in the last job.
for result in results:
self.assertEqual(result[0], '')
def test_array_repeat(self):
from pyspark.sql.functions import array_repeat, lit
df = self.spark.range(1)
self.assertEquals(
df.select(array_repeat("id", 3)).toDF("val").collect(),
df.select(array_repeat("id", lit(3))).toDF("val").collect(),
)
def test_input_file_name_udf(self):
df = self.spark.read.text('python/test_support/hello/hello.txt')
df = df.select(udf(lambda x: x)("value"), input_file_name().alias('file'))
file_name = df.collect()[0].file
self.assertTrue("python/test_support/hello/hello.txt" in file_name)
if __name__ == "__main__":
import unittest
from pyspark.sql.tests.test_functions import *
try:
import xmlrunner
testRunner = xmlrunner.XMLTestRunner(output='target/test-reports', verbosity=2)
except ImportError:
testRunner = None
unittest.main(testRunner=testRunner, verbosity=2)
| apache-2.0 |
rmaz/buck | third-party/py/unittest2/unittest2/test/test_functiontestcase.py | 122 | 5570 | import unittest2
from unittest2.test.support import LoggingResult
class Test_FunctionTestCase(unittest2.TestCase):
# "Return the number of tests represented by the this test object. For
# unittest2.TestCase instances, this will always be 1"
def test_countTestCases(self):
test = unittest2.FunctionTestCase(lambda: None)
self.assertEqual(test.countTestCases(), 1)
# "When a setUp() method is defined, the test runner will run that method
# prior to each test. Likewise, if a tearDown() method is defined, the
# test runner will invoke that method after each test. In the example,
# setUp() was used to create a fresh sequence for each test."
#
# Make sure the proper call order is maintained, even if setUp() raises
# an exception.
def test_run_call_order__error_in_setUp(self):
events = []
result = LoggingResult(events)
def setUp():
events.append('setUp')
raise RuntimeError('raised by setUp')
def test():
events.append('test')
def tearDown():
events.append('tearDown')
expected = ['startTest', 'setUp', 'addError', 'stopTest']
unittest2.FunctionTestCase(test, setUp, tearDown).run(result)
self.assertEqual(events, expected)
# "When a setUp() method is defined, the test runner will run that method
# prior to each test. Likewise, if a tearDown() method is defined, the
# test runner will invoke that method after each test. In the example,
# setUp() was used to create a fresh sequence for each test."
#
# Make sure the proper call order is maintained, even if the test raises
# an error (as opposed to a failure).
def test_run_call_order__error_in_test(self):
events = []
result = LoggingResult(events)
def setUp():
events.append('setUp')
def test():
events.append('test')
raise RuntimeError('raised by test')
def tearDown():
events.append('tearDown')
expected = ['startTest', 'setUp', 'test', 'addError', 'tearDown',
'stopTest']
unittest2.FunctionTestCase(test, setUp, tearDown).run(result)
self.assertEqual(events, expected)
# "When a setUp() method is defined, the test runner will run that method
# prior to each test. Likewise, if a tearDown() method is defined, the
# test runner will invoke that method after each test. In the example,
# setUp() was used to create a fresh sequence for each test."
#
# Make sure the proper call order is maintained, even if the test signals
# a failure (as opposed to an error).
def test_run_call_order__failure_in_test(self):
events = []
result = LoggingResult(events)
def setUp():
events.append('setUp')
def test():
events.append('test')
self.fail('raised by test')
def tearDown():
events.append('tearDown')
expected = ['startTest', 'setUp', 'test', 'addFailure', 'tearDown',
'stopTest']
unittest2.FunctionTestCase(test, setUp, tearDown).run(result)
self.assertEqual(events, expected)
# "When a setUp() method is defined, the test runner will run that method
# prior to each test. Likewise, if a tearDown() method is defined, the
# test runner will invoke that method after each test. In the example,
# setUp() was used to create a fresh sequence for each test."
#
# Make sure the proper call order is maintained, even if tearDown() raises
# an exception.
def test_run_call_order__error_in_tearDown(self):
events = []
result = LoggingResult(events)
def setUp():
events.append('setUp')
def test():
events.append('test')
def tearDown():
events.append('tearDown')
raise RuntimeError('raised by tearDown')
expected = ['startTest', 'setUp', 'test', 'tearDown', 'addError',
'stopTest']
unittest2.FunctionTestCase(test, setUp, tearDown).run(result)
self.assertEqual(events, expected)
# "Return a string identifying the specific test case."
#
# Because of the vague nature of the docs, I'm not going to lock this
# test down too much. Really all that can be asserted is that the id()
# will be a string (either 8-byte or unicode -- again, because the docs
# just say "string")
def test_id(self):
test = unittest2.FunctionTestCase(lambda: None)
self.assertIsInstance(test.id(), basestring)
# "Returns a one-line description of the test, or None if no description
# has been provided. The default implementation of this method returns
# the first line of the test method's docstring, if available, or None."
def test_shortDescription__no_docstring(self):
test = unittest2.FunctionTestCase(lambda: None)
self.assertEqual(test.shortDescription(), None)
# "Returns a one-line description of the test, or None if no description
# has been provided. The default implementation of this method returns
# the first line of the test method's docstring, if available, or None."
def test_shortDescription__singleline_docstring(self):
desc = "this tests foo"
test = unittest2.FunctionTestCase(lambda: None, description=desc)
self.assertEqual(test.shortDescription(), "this tests foo")
if __name__ == '__main__':
unittest2.main()
| apache-2.0 |
antoviaque/edx-platform | lms/djangoapps/notifier_api/serializers.py | 61 | 2580 | from django.contrib.auth.models import User
from django.http import Http404
from rest_framework import serializers
from openedx.core.djangoapps.course_groups.cohorts import is_course_cohorted
from notification_prefs import NOTIFICATION_PREF_KEY
from lang_pref import LANGUAGE_KEY
class NotifierUserSerializer(serializers.ModelSerializer):
"""
A serializer containing all information about a user needed by the notifier
(namely the user's name, email address, notification and language
preferences, and course enrollment and cohort information).
Because these pieces of information reside in different tables, this is
designed to work well with prefetch_related and select_related, which
require the use of all() instead of get() or filter(). The following fields
should be prefetched on the user objects being serialized:
* profile
* preferences
* courseenrollment_set
* course_groups
* roles__permissions
"""
name = serializers.SerializerMethodField()
preferences = serializers.SerializerMethodField()
course_info = serializers.SerializerMethodField()
def get_name(self, user):
return user.profile.name
def get_preferences(self, user):
return {
pref.key: pref.value
for pref
in user.preferences.all()
if pref.key in [LANGUAGE_KEY, NOTIFICATION_PREF_KEY]
}
def get_course_info(self, user):
cohort_id_map = {
cohort.course_id: cohort.id
for cohort in user.course_groups.all()
}
see_all_cohorts_set = {
role.course_id
for role in user.roles.all()
for perm in role.permissions.all() if perm.name == "see_all_cohorts"
}
ret = {}
for enrollment in user.courseenrollment_set.all():
if enrollment.is_active:
try:
ret[unicode(enrollment.course_id)] = {
"cohort_id": cohort_id_map.get(enrollment.course_id),
"see_all_cohorts": (
enrollment.course_id in see_all_cohorts_set or
not is_course_cohorted(enrollment.course_id)
),
}
except Http404: # is_course_cohorted raises this if course does not exist
pass
return ret
class Meta(object):
model = User
fields = ("id", "email", "name", "preferences", "course_info")
read_only_fields = ("id", "email")
| agpl-3.0 |
abstract-open-solutions/OCB | addons/mrp/company.py | 381 | 1383 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv,fields
class company(osv.osv):
_inherit = 'res.company'
_columns = {
'manufacturing_lead': fields.float('Manufacturing Lead Time', required=True,
help="Security days for each manufacturing operation."),
}
_defaults = {
'manufacturing_lead': lambda *a: 1.0,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
ansible/ansible-modules-extras | system/lvol.py | 23 | 17012 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Jeroen Hoekx <jeroen.hoekx@dsquare.be>, Alexander Bulimov <lazywolf0@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
author:
- "Jeroen Hoekx (@jhoekx)"
- "Alexander Bulimov (@abulimov)"
module: lvol
short_description: Configure LVM logical volumes
description:
- This module creates, removes or resizes logical volumes.
version_added: "1.1"
options:
vg:
description:
- The volume group this logical volume is part of.
required: true
lv:
description:
- The name of the logical volume.
required: true
size:
description:
- The size of the logical volume, according to lvcreate(8) --size, by
default in megabytes or optionally with one of [bBsSkKmMgGtTpPeE] units; or
according to lvcreate(8) --extents as a percentage of [VG|PVS|FREE];
Float values must begin with a digit.
Resizing using percentage values was not supported prior to 2.1.
state:
choices: [ "present", "absent" ]
default: present
description:
- Control if the logical volume exists. If C(present) and the
volume does not already exist then the C(size) option is required.
required: false
active:
version_added: "2.2"
choices: [ "yes", "no" ]
default: "yes"
description:
- Whether the volume is activate and visible to the host.
required: false
force:
version_added: "1.5"
choices: [ "yes", "no" ]
default: "no"
description:
- Shrink or remove operations of volumes requires this switch. Ensures that
that filesystems get never corrupted/destroyed by mistake.
required: false
opts:
version_added: "2.0"
description:
- Free-form options to be passed to the lvcreate command
snapshot:
version_added: "2.1"
description:
- The name of the snapshot volume
required: false
pvs:
version_added: "2.2"
description:
- Comma separated list of physical volumes e.g. /dev/sda,/dev/sdb
required: false
shrink:
version_added: "2.2"
description:
- shrink if current size is higher than size requested
required: false
default: yes
notes:
- Filesystems on top of the volume are not resized.
'''
EXAMPLES = '''
# Create a logical volume of 512m.
- lvol:
vg: firefly
lv: test
size: 512
# Create a logical volume of 512m with disks /dev/sda and /dev/sdb
- lvol:
vg: firefly
lv: test
size: 512
pvs: /dev/sda,/dev/sdb
# Create cache pool logical volume
- lvol:
vg: firefly
lv: lvcache
size: 512m
opts: --type cache-pool
# Create a logical volume of 512g.
- lvol:
vg: firefly
lv: test
size: 512g
# Create a logical volume the size of all remaining space in the volume group
- lvol:
vg: firefly
lv: test
size: 100%FREE
# Create a logical volume with special options
- lvol:
vg: firefly
lv: test
size: 512g
opts: -r 16
# Extend the logical volume to 1024m.
- lvol:
vg: firefly
lv: test
size: 1024
# Extend the logical volume to consume all remaining space in the volume group
- lvol:
vg: firefly
lv: test
size: +100%FREE
# Extend the logical volume to take all remaining space of the PVs
- lvol:
vg: firefly
lv: test
size: 100%PVS
# Resize the logical volume to % of VG
- lvol:
vg: firefly
lv: test
size: 80%VG
force: yes
# Reduce the logical volume to 512m
- lvol:
vg: firefly
lv: test
size: 512
force: yes
# Set the logical volume to 512m and do not try to shrink if size is lower than current one
- lvol:
vg: firefly
lv: test
size: 512
shrink: no
# Remove the logical volume.
- lvol:
vg: firefly
lv: test
state: absent
force: yes
# Create a snapshot volume of the test logical volume.
- lvol:
vg: firefly
lv: test
snapshot: snap1
size: 100m
# Deactivate a logical volume
- lvol:
vg: firefly
lv: test
active: false
# Create a deactivated logical volume
- lvol:
vg: firefly
lv: test
size: 512g
active: false
'''
import re
decimal_point = re.compile(r"(\d+)")
def mkversion(major, minor, patch):
return (1000 * 1000 * int(major)) + (1000 * int(minor)) + int(patch)
def parse_lvs(data):
lvs = []
for line in data.splitlines():
parts = line.strip().split(';')
lvs.append({
'name': parts[0].replace('[','').replace(']',''),
'size': int(decimal_point.match(parts[1]).group(1)),
'active': (parts[2][4] == 'a')
})
return lvs
def parse_vgs(data):
vgs = []
for line in data.splitlines():
parts = line.strip().split(';')
vgs.append({
'name': parts[0],
'size': int(decimal_point.match(parts[1]).group(1)),
'free': int(decimal_point.match(parts[2]).group(1)),
'ext_size': int(decimal_point.match(parts[3]).group(1))
})
return vgs
def get_lvm_version(module):
ver_cmd = module.get_bin_path("lvm", required=True)
rc, out, err = module.run_command("%s version" % (ver_cmd))
if rc != 0:
return None
m = re.search("LVM version:\s+(\d+)\.(\d+)\.(\d+).*(\d{4}-\d{2}-\d{2})", out)
if not m:
return None
return mkversion(m.group(1), m.group(2), m.group(3))
def main():
module = AnsibleModule(
argument_spec=dict(
vg=dict(required=True),
lv=dict(required=True),
size=dict(type='str'),
opts=dict(type='str'),
state=dict(choices=["absent", "present"], default='present'),
force=dict(type='bool', default='no'),
shrink=dict(type='bool', default='yes'),
active=dict(type='bool', default='yes'),
snapshot=dict(type='str', default=None),
pvs=dict(type='str')
),
supports_check_mode=True,
)
# Determine if the "--yes" option should be used
version_found = get_lvm_version(module)
if version_found == None:
module.fail_json(msg="Failed to get LVM version number")
version_yesopt = mkversion(2, 2, 99) # First LVM with the "--yes" option
if version_found >= version_yesopt:
yesopt = "--yes"
else:
yesopt = ""
vg = module.params['vg']
lv = module.params['lv']
size = module.params['size']
opts = module.params['opts']
state = module.params['state']
force = module.boolean(module.params['force'])
shrink = module.boolean(module.params['shrink'])
active = module.boolean(module.params['active'])
size_opt = 'L'
size_unit = 'm'
snapshot = module.params['snapshot']
pvs = module.params['pvs']
if pvs is None:
pvs = ""
else:
pvs = pvs.replace(",", " ")
if opts is None:
opts = ""
# Add --test option when running in check-mode
if module.check_mode:
test_opt = ' --test'
else:
test_opt = ''
if size:
# LVCREATE(8) -l --extents option with percentage
if '%' in size:
size_parts = size.split('%', 1)
size_percent = int(size_parts[0])
if size_percent > 100:
module.fail_json(msg="Size percentage cannot be larger than 100%")
size_whole = size_parts[1]
if size_whole == 'ORIGIN':
module.fail_json(msg="Snapshot Volumes are not supported")
elif size_whole not in ['VG', 'PVS', 'FREE']:
module.fail_json(msg="Specify extents as a percentage of VG|PVS|FREE")
size_opt = 'l'
size_unit = ''
if not '%' in size:
# LVCREATE(8) -L --size option unit
if size[-1].lower() in 'bskmgtpe':
size_unit = size[-1].lower()
size = size[0:-1]
try:
float(size)
if not size[0].isdigit(): raise ValueError()
except ValueError:
module.fail_json(msg="Bad size specification of '%s'" % size)
# when no unit, megabytes by default
if size_opt == 'l':
unit = 'm'
else:
unit = size_unit
# Get information on volume group requested
vgs_cmd = module.get_bin_path("vgs", required=True)
rc, current_vgs, err = module.run_command(
"%s --noheadings -o vg_name,size,free,vg_extent_size --units %s --separator ';' %s" % (vgs_cmd, unit, vg))
if rc != 0:
if state == 'absent':
module.exit_json(changed=False, stdout="Volume group %s does not exist." % vg)
else:
module.fail_json(msg="Volume group %s does not exist." % vg, rc=rc, err=err)
vgs = parse_vgs(current_vgs)
this_vg = vgs[0]
# Get information on logical volume requested
lvs_cmd = module.get_bin_path("lvs", required=True)
rc, current_lvs, err = module.run_command(
"%s -a --noheadings --nosuffix -o lv_name,size,lv_attr --units %s --separator ';' %s" % (lvs_cmd, unit, vg))
if rc != 0:
if state == 'absent':
module.exit_json(changed=False, stdout="Volume group %s does not exist." % vg)
else:
module.fail_json(msg="Volume group %s does not exist." % vg, rc=rc, err=err)
changed = False
lvs = parse_lvs(current_lvs)
if snapshot is None:
check_lv = lv
else:
check_lv = snapshot
for test_lv in lvs:
if test_lv['name'] == check_lv:
this_lv = test_lv
break
else:
this_lv = None
if state == 'present' and not size:
if this_lv is None:
module.fail_json(msg="No size given.")
msg = ''
if this_lv is None:
if state == 'present':
### create LV
lvcreate_cmd = module.get_bin_path("lvcreate", required=True)
if snapshot is not None:
cmd = "%s %s %s -%s %s%s -s -n %s %s %s/%s" % (lvcreate_cmd, test_opt, yesopt, size_opt, size, size_unit, snapshot, opts, vg, lv)
else:
cmd = "%s %s %s -n %s -%s %s%s %s %s %s" % (lvcreate_cmd, test_opt, yesopt, lv, size_opt, size, size_unit, opts, vg, pvs)
rc, _, err = module.run_command(cmd)
if rc == 0:
changed = True
else:
module.fail_json(msg="Creating logical volume '%s' failed" % lv, rc=rc, err=err)
else:
if state == 'absent':
### remove LV
if not force:
module.fail_json(msg="Sorry, no removal of logical volume %s without force=yes." % (this_lv['name']))
lvremove_cmd = module.get_bin_path("lvremove", required=True)
rc, _, err = module.run_command("%s %s --force %s/%s" % (lvremove_cmd, test_opt, vg, this_lv['name']))
if rc == 0:
module.exit_json(changed=True)
else:
module.fail_json(msg="Failed to remove logical volume %s" % (lv), rc=rc, err=err)
elif not size:
pass
elif size_opt == 'l':
### Resize LV based on % value
tool = None
size_free = this_vg['free']
if size_whole == 'VG' or size_whole == 'PVS':
size_requested = size_percent * this_vg['size'] / 100
else: # size_whole == 'FREE':
size_requested = size_percent * this_vg['free'] / 100
if '+' in size:
size_requested += this_lv['size']
if this_lv['size'] < size_requested:
if (size_free > 0) and (('+' not in size) or (size_free >= (size_requested - this_lv['size']))):
tool = module.get_bin_path("lvextend", required=True)
else:
module.fail_json(msg="Logical Volume %s could not be extended. Not enough free space left (%s%s required / %s%s available)" % (this_lv['name'], (size_requested - this_lv['size']), unit, size_free, unit))
elif shrink and this_lv['size'] > size_requested + this_vg['ext_size']: # more than an extent too large
if size_requested == 0:
module.fail_json(msg="Sorry, no shrinking of %s to 0 permitted." % (this_lv['name']))
elif not force:
module.fail_json(msg="Sorry, no shrinking of %s without force=yes" % (this_lv['name']))
else:
tool = module.get_bin_path("lvreduce", required=True)
tool = '%s %s' % (tool, '--force')
if tool:
cmd = "%s %s -%s %s%s %s/%s %s" % (tool, test_opt, size_opt, size, size_unit, vg, this_lv['name'], pvs)
rc, out, err = module.run_command(cmd)
if "Reached maximum COW size" in out:
module.fail_json(msg="Unable to resize %s to %s%s" % (lv, size, size_unit), rc=rc, err=err, out=out)
elif rc == 0:
changed = True
msg="Volume %s resized to %s%s" % (this_lv['name'], size_requested, unit)
elif "matches existing size" in err:
module.exit_json(changed=False, vg=vg, lv=this_lv['name'], size=this_lv['size'])
elif "not larger than existing size" in err:
module.exit_json(changed=False, vg=vg, lv=this_lv['name'], size=this_lv['size'], msg="Original size is larger than requested size", err=err)
else:
module.fail_json(msg="Unable to resize %s to %s%s" % (lv, size, size_unit), rc=rc, err=err)
else:
### resize LV based on absolute values
tool = None
if int(size) > this_lv['size']:
tool = module.get_bin_path("lvextend", required=True)
elif shrink and int(size) < this_lv['size']:
if int(size) == 0:
module.fail_json(msg="Sorry, no shrinking of %s to 0 permitted." % (this_lv['name']))
if not force:
module.fail_json(msg="Sorry, no shrinking of %s without force=yes." % (this_lv['name']))
else:
tool = module.get_bin_path("lvreduce", required=True)
tool = '%s %s' % (tool, '--force')
if tool:
cmd = "%s %s -%s %s%s %s/%s %s" % (tool, test_opt, size_opt, size, size_unit, vg, this_lv['name'], pvs)
rc, out, err = module.run_command(cmd)
if "Reached maximum COW size" in out:
module.fail_json(msg="Unable to resize %s to %s%s" % (lv, size, size_unit), rc=rc, err=err, out=out)
elif rc == 0:
changed = True
elif "matches existing size" in err:
module.exit_json(changed=False, vg=vg, lv=this_lv['name'], size=this_lv['size'])
elif "not larger than existing size" in err:
module.exit_json(changed=False, vg=vg, lv=this_lv['name'], size=this_lv['size'], msg="Original size is larger than requested size", err=err)
else:
module.fail_json(msg="Unable to resize %s to %s%s" % (lv, size, size_unit), rc=rc, err=err)
if this_lv is not None:
if active:
lvchange_cmd = module.get_bin_path("lvchange", required=True)
rc, _, err = module.run_command("%s -ay %s/%s" % (lvchange_cmd, vg, this_lv['name']))
if rc == 0:
module.exit_json(changed=((not this_lv['active']) or changed), vg=vg, lv=this_lv['name'], size=this_lv['size'])
else:
module.fail_json(msg="Failed to activate logical volume %s" % (lv), rc=rc, err=err)
else:
lvchange_cmd = module.get_bin_path("lvchange", required=True)
rc, _, err = module.run_command("%s -an %s/%s" % (lvchange_cmd, vg, this_lv['name']))
if rc == 0:
module.exit_json(changed=(this_lv['active'] or changed), vg=vg, lv=this_lv['name'], size=this_lv['size'])
else:
module.fail_json(msg="Failed to deactivate logical volume %s" % (lv), rc=rc, err=err)
module.exit_json(changed=changed, msg=msg)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
| gpl-3.0 |
sibskull/synaptiks | synaptiks/_bindings/xinput.py | 1 | 10525 | # -*- coding: utf-8 -*-
# Copyright (C) 2010, 2011 Sebastian Wiesner <lunaryorn@googlemail.com>
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
synaptiks._bindings.xinput
==========================
ctypes-based libXi binding.
.. moduleauthor:: Sebastian Wiesner <lunaryorn@googlemail.com>
"""
from __future__ import (print_function, division, unicode_literals,
absolute_import)
from ctypes import (POINTER, Structure, byref, string_at, cast,
c_int, c_char_p, c_long, c_ulong, c_byte)
from synaptiks._bindings import xlib
from synaptiks._bindings.util import load_library, scoped_pointer
c_int_p = POINTER(c_int)
c_ulong_p = POINTER(c_ulong)
c_byte_p = POINTER(c_byte)
# XInput types
class XIAnyClassInfo(Structure):
pass
XIAnyClassInfo_p = POINTER(XIAnyClassInfo)
class XIDeviceInfo(Structure):
_fields_ = [
('deviceid', c_int),
('name', c_char_p),
('use', c_int),
('attachment', c_int),
('enabled', c_int),
('num_classes', c_int),
('classes', POINTER(XIAnyClassInfo_p))]
XIDeviceInfo_p = POINTER(XIDeviceInfo)
# Misc constants
PROP_MODE_REPLACE = 0
ANY_PROPERTY_TYPE = 0
# Special device IDs
#: Device id for all devices
ALL_DEVICES = 0
#: Device id for all master devices
ALL_MASTER_DEVICES = 1
# type types
#: Master pointer.
#:
#: With his type, :attr:`XIDeviceInfo.attachment` contains the device ID of the
#: paired master keyboard.
MASTER_POINTER = 1
#: Master keyboard
#:
#: With this type, :attr:`XIDeviceInfo.attachment` contains the device ID of the
#: paired master pointer.
MASTER_KEYBOARD = 2
#: Slave pointer
#:
#: With this type, :attr:`XIDeviceInfo.attachment` contains the device ID of the
#: the master pointer this device is attached to.
SLAVE_POINTER = 3
#: Slave keyboard
#:
#: With this type, :attr:`XIDeviceInfo.attachment` contains the device ID of the
#: the master keyboard this device is attached to.
SLAVE_KEYBOARD = 4
#: A slave device not attached to any master.
#:
#: With this type, :attr:`XIDeviceInfo.attachment` has an undefined value.
FLOATING_SLAVE = 5
SIGNATURES = dict(
XIQueryVersion=([xlib.Display_p, c_int_p, c_int_p], xlib.Status),
XIQueryDevice=([xlib.Display_p, c_int, c_int_p], XIDeviceInfo_p),
XIFreeDeviceInfo=([XIDeviceInfo_p], None),
XIListProperties=([xlib.Display_p, c_int, c_int_p], xlib.Atom_p),
XIGetProperty=([xlib.Display_p, c_int, xlib.Atom, c_long, c_long,
xlib.Bool, xlib.Atom, xlib.Atom_p, c_int_p,
c_ulong_p, c_ulong_p, POINTER(c_byte_p)], xlib.Status),
XIChangeProperty=([xlib.Display_p, c_int, xlib.Atom, xlib.Atom,
c_int, c_int, c_byte_p, c_int], None),
)
libXi = load_library('Xi', SIGNATURES)
# add libXi functions under pythonic names and with pythonic api to
# top-level namespace
def query_version(display, expected_version):
"""
Query the server-side XInput version.
``display`` is a :class:`~synaptiks._bindings.xlib.Display_p` providing the
server connection, ``expected_version`` a tuple ``(major, minor)`` with the
expected version. Both components are integers.
Return a tuple ``(matched, actual_version)``. ``matched`` is ``True``,
if the server-side version is at least the ``expected_version``,
``False`` otherwise. ``actual_version`` is a ``(major, minor)`` tuple
containing the actual version on the server side.
"""
major, minor = map(c_int, expected_version)
state = libXi.XIQueryVersion(display, byref(major), byref(minor))
return (state == xlib.SUCCESS, (major.value, minor.value))
def query_device(display, deviceid):
"""
Query the device with the given ``deviceid``.
``display`` is a :class:`~synaptiks._bindings.xlib.Display_p` providing the
server connection. ``deviceid`` is either a integral device id to query a
single device, or a :ref:`special ID <xinput-special-ids>` to query
multiple devices at once.
Return a tuple ``(number_of_devices, devices)``. ``number_of_devices``
is an integer with the number of devices, ``devices`` is a
:class:`XIDeviceInfo_p` to a C array of :class:`XIDeviceInfo` objects.
This array is to be freed using :func:`free_device_info`.
It is recommended, that you wrap the ``devices`` pointer into
:func:`~synaptiks._bindings.util.scoped_pointer()` and use a ``with``
block to make sure, that the allocated memory is freed.
"""
number_of_devices = c_int(0)
devices = libXi.XIQueryDevice(display, deviceid,
byref(number_of_devices))
return (number_of_devices.value, devices)
free_device_info = libXi.XIFreeDeviceInfo
def list_properties(display, deviceid):
"""
Query all properties of the device with the given ``device_id``.
The properties are returned as C array of X11 Atoms. Use
:func:`~synaptiks._bindings.xlib.get_atom_name` to retrieve the name of
these properties.
``display`` is a :class:`~synaptiks._bindings.xlib.Display_p` providing the
server connection. ``deviceid`` is an integer with a device id.
Return a tuple ``(number_of_properties, property_atoms)``.
``number_of_properties`` is an integer with the number of properties.
``property_atoms`` is :class:`~synaptiks._bindings.xlib.Atom_p` to a C
array of :class:`~synaptiks._bindings.xlib.Atom` objects. This array is
to be freed using :func:`synaptiks._bindings.xlib.free`.
It is recommended, that you wrap the ``property_atoms`` pointer into
:func:`~synaptiks._bindings.util.scoped_pointer()` and use a ``with``
block to make sure, that the allocated memory is freed.
"""
number_of_properties = c_int(0)
property_atoms = libXi.XIListProperties(display, deviceid,
byref(number_of_properties))
return (number_of_properties.value, property_atoms)
def get_property(display, deviceid, property):
"""
Get the given ``property`` from the device with the given id.
``display`` is a :class:`~synaptiks._bindings.xlib.Display_p` providing the
server connection, ``deviceid`` is an integer with a device id.
``property`` is a :class:`~synaptiks._bindings.xlib.Atom` with the X11 atom
of the property to get.
Return a tuple ``(type, format, data)``. ``type`` and ``format`` are
integers, ``data`` is a byte string. If the property exists on the
device, ``type`` contains the type atom, ``format`` the format of the
property (on of ``8``, ``16`` or ``32``) and ``data`` the property
contents as bytes. Otherwise ``type`` is
:data:`~synaptiks._bindings.xlib.NONE` and ``format`` is ``0``.
``data`` contains an empty string.
"""
length = 1
while True:
type_return = xlib.Atom(0)
format_return = c_int(0)
num_items_return = c_ulong(0)
bytes_after_return = c_ulong(0)
data = c_byte_p()
state = libXi.XIGetProperty(
display, deviceid, property, 0, length, False,
ANY_PROPERTY_TYPE, byref(type_return), byref(format_return),
byref(num_items_return), byref(bytes_after_return), byref(data))
with scoped_pointer(data, xlib.free):
if state != xlib.SUCCESS:
# XXX: better diagnostics
raise EnvironmentError()
if bytes_after_return.value == 0:
# got all bytes now, handle them
format = format_return.value
type = type_return.value
number_of_items = num_items_return.value
byte_length = number_of_items * format // 8
return (type, format, string_at(data, byte_length))
else:
# get some more bytes and try again
length += 1
def change_property(display, deviceid, property, type, format, data):
"""
Change the given ``property`` on the device with the given id.
Properties store binary ``data``. For the X server to correctly interpret
the data correctly, it must be assigned a matching ``type`` and ``format``.
``display`` is a :class:`~synaptiks._bindings.xlib.Display_p` providing the
server connection, ``deviceid`` is an integer with a device id.
``property`` is a :class:`~synaptiks._bindings.xlib.Atom` with the X11 atom
of the property to change. ``type`` is the
:class:`~synaptiks._bindings.xlib.Atom` describing the type of the
property, mostly either :data:`~synaptiks._bindings.xlib.INTEGER` or the
atom for ``'FLOAT'`` (use :class:`~synaptiks._bindings.xlib.intern_atom` to
create get this atom). ``format`` is an integer describing the format,
must either 8, 16 or 32. The format directly corresponds to the number of
bytes per item in the property.
Raise :exc:`~exceptions.ValueError`, if ``format`` is anything else than 8,
16 or 32.
"""
if format not in (8, 16, 32):
raise ValueError(format)
number_of_items = (len(data) * 8) // format
libXi.XIChangeProperty(
display, deviceid, property, type, format, PROP_MODE_REPLACE,
cast(c_char_p(data), c_byte_p), number_of_items)
| bsd-2-clause |
google/mirandum | alerts/streamjar/views.py | 1 | 2919 | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from django.core.exceptions import ObjectDoesNotExist
from django.http import HttpResponseRedirect
from django.contrib.auth.decorators import login_required
from django.shortcuts import render
import django.forms as forms
from main.support import ac
from streamjar.models import StreamjarUpdate, StreamjarAlertConfig
from streamjar.signals import config_to_alert
MODULE_NAME = "streamjar"
@login_required
def home(request):
ffconfigs = StreamjarUpdate.objects.filter(user=request.user)
alerts = StreamjarAlertConfig.objects.filter(user=request.user)
return render(request, "streamjar/home.html", {'configs': ffconfigs,
'alerts': alerts})
class StreamjarForm(forms.Form):
access_token = forms.CharField()
@login_required
def setup(request):
if request.POST:
f = StreamjarForm(request.POST)
if f.is_valid():
ffu = StreamjarUpdate(
access_token=f.cleaned_data['access_token'],
type="streamjar",
user=request.user)
ffu.save()
return HttpResponseRedirect("/accounts/")
else:
f = StreamjarForm()
return render(request, "streamjar/setup.html", {'form': f})
class AlertForm(forms.ModelForm):
class Meta:
model = StreamjarAlertConfig
fields = ['image_url', 'sound_url', 'alert_text', 'blacklist', 'font', 'font_size', 'font_color', 'filter_type', 'filter_amount', 'layout', 'animation_in', 'animation_out', 'font_effect']
widgets = {
'image_url': forms.TextInput(attrs={'size': 50}),
'sound_url': forms.TextInput(attrs={'size': 50}),
'alert_text': forms.TextInput(attrs={'size': 50}),
}
@login_required
def test_alert(request, alert_id=None):
ac = StreamjarAlertConfig.objects.get(pk=int(alert_id), user=request.user)
config_to_alert(ac, {'name': 'Livestream Alerts', 'amount': '$17.32', 'comment': 'Test Streamjar Donation from Livestream Alerts'}, True)
if request.GET.get('ret') == 'alerts':
return HttpResponseRedirect("/alert_page")
return HttpResponseRedirect("/streamjar/")
alert_config = ac(
MODULE_NAME,
AlertForm,
StreamjarAlertConfig,
{"alert_text": "[[name]] has donated [[amount]]![[br]][[comment]]"})
| apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.