repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
Thrameos/jpype | test/jpypetest/test_ref.py | 2 | 2285 | # *****************************************************************************
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# See NOTICE file for details.
#
# *****************************************************************************
import sys
import _jpype
import jpype
from jpype import JImplements, JOverride
from jpype.types import *
import common
class ReferenceQueueTestCase(common.JPypeTestCase):
def setUp(self):
common.JPypeTestCase.setUp(self)
self.refqueue = jpype.JClass(
'org.jpype.ref.JPypeReferenceQueue').getInstance()
def testAccess(self):
# Make sure we can get the instance
self.assertTrue(self.refqueue != None)
def testRunning(self):
# Get the queue instance
self.assertTrue(self.refqueue.isRunning())
def testRefs(self):
# This routine will exercise each of the clean up paths once
fixture = JClass("jpype.common.Fixture")()
def f():
# Create a proxy to test the proxy path
@JImplements("java.util.function.Supplier")
class MySupplier(object):
@JOverride
def get(self):
# Send a Python exc to trigger Python ref path
raise RuntimeError("foo")
try:
u = MySupplier()
fixture.callSupplier(u)
except RuntimeError as ex:
pass
f()
# Force a direct byffer and then trash it
b = bytearray([1, 2, 3])
_jpype.convertToDirectBuffer(b)
# Then force a GC to clean it up
jpype.java.lang.System.gc()
# We can't check the results here as the GC may chose not
# to run which would trigger a failure
| apache-2.0 |
slank/ansible | contrib/inventory/ssh_config.py | 160 | 3979 | #!/usr/bin/env python
# (c) 2014, Tomas Karasek <tomas.karasek@digile.fi>
#
# This file is part of Ansible.
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Dynamic inventory script which lets you use aliases from ~/.ssh/config.
#
# There were some issues with various Paramiko versions. I took a deeper look
# and tested heavily. Now, ansible parses this alright with Paramiko versions
# 1.7.2 to 1.15.2.
#
# It prints inventory based on parsed ~/.ssh/config. You can refer to hosts
# with their alias, rather than with the IP or hostname. It takes advantage
# of the ansible_ssh_{host,port,user,private_key_file}.
#
# If you have in your .ssh/config:
# Host git
# HostName git.domain.org
# User tkarasek
# IdentityFile /home/tomk/keys/thekey
#
# You can do
# $ ansible git -m ping
#
# Example invocation:
# ssh_config.py --list
# ssh_config.py --host <alias>
import argparse
import os.path
import sys
import paramiko
try:
import json
except ImportError:
import simplejson as json
SSH_CONF = '~/.ssh/config'
_key = 'ssh_config'
_ssh_to_ansible = [('user', 'ansible_ssh_user'),
('hostname', 'ansible_ssh_host'),
('identityfile', 'ansible_ssh_private_key_file'),
('port', 'ansible_ssh_port')]
def get_config():
if not os.path.isfile(os.path.expanduser(SSH_CONF)):
return {}
with open(os.path.expanduser(SSH_CONF)) as f:
cfg = paramiko.SSHConfig()
cfg.parse(f)
ret_dict = {}
for d in cfg._config:
if type(d['host']) is list:
alias = d['host'][0]
else:
alias = d['host']
if ('?' in alias) or ('*' in alias):
continue
_copy = dict(d)
del _copy['host']
if 'config' in _copy:
ret_dict[alias] = _copy['config']
else:
ret_dict[alias] = _copy
return ret_dict
def print_list():
cfg = get_config()
meta = {'hostvars': {}}
for alias, attributes in cfg.items():
tmp_dict = {}
for ssh_opt, ans_opt in _ssh_to_ansible:
if ssh_opt in attributes:
# If the attribute is a list, just take the first element.
# Private key is returned in a list for some reason.
attr = attributes[ssh_opt]
if type(attr) is list:
attr = attr[0]
tmp_dict[ans_opt] = attr
if tmp_dict:
meta['hostvars'][alias] = tmp_dict
print(json.dumps({_key: list(set(meta['hostvars'].keys())), '_meta': meta}))
def print_host(host):
cfg = get_config()
print(json.dumps(cfg[host]))
def get_args(args_list):
parser = argparse.ArgumentParser(
description='ansible inventory script parsing .ssh/config')
mutex_group = parser.add_mutually_exclusive_group(required=True)
help_list = 'list all hosts from .ssh/config inventory'
mutex_group.add_argument('--list', action='store_true', help=help_list)
help_host = 'display variables for a host'
mutex_group.add_argument('--host', help=help_host)
return parser.parse_args(args_list)
def main(args_list):
args = get_args(args_list)
if args.list:
print_list()
if args.host:
print_host(args.host)
if __name__ == '__main__':
main(sys.argv[1:])
| gpl-3.0 |
gkarlin/django-jenkins | build/Django/django/contrib/messages/storage/__init__.py | 116 | 1185 | from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
def get_storage(import_path):
"""
Imports the message storage class described by import_path, where
import_path is the full Python path to the class.
"""
try:
dot = import_path.rindex('.')
except ValueError:
raise ImproperlyConfigured("%s isn't a Python path." % import_path)
module, classname = import_path[:dot], import_path[dot + 1:]
try:
mod = import_module(module)
except ImportError as e:
raise ImproperlyConfigured('Error importing module %s: "%s"' %
(module, e))
try:
return getattr(mod, classname)
except AttributeError:
raise ImproperlyConfigured('Module "%s" does not define a "%s" '
'class.' % (module, classname))
# Callable with the same interface as the storage classes i.e. accepts a
# 'request' object. It is wrapped in a lambda to stop 'settings' being used at
# the module level
default_storage = lambda request: get_storage(settings.MESSAGE_STORAGE)(request)
| lgpl-3.0 |
MatthewWilkes/django-oscar | tests/unit/offer/results_tests.py | 62 | 1039 | from decimal import Decimal as D
from django.test import TestCase
from oscar.apps.offer import models, results
class TestOfferApplicationsObject(TestCase):
def setUp(self):
self.applications = results.OfferApplications()
self.offer = models.ConditionalOffer()
def test_is_countable(self):
self.assertEqual(0, len(self.applications))
def test_can_filter_shipping_discounts(self):
result = models.ShippingDiscount()
self.applications.add(self.offer, result)
self.assertEqual(1, len(self.applications.shipping_discounts))
def test_can_filter_offer_discounts(self):
result = models.BasketDiscount(D('2.00'))
self.applications.add(self.offer, result)
self.assertEqual(1, len(self.applications.offer_discounts))
def test_can_filter_post_order_actions(self):
result = models.PostOrderAction("Something will happen")
self.applications.add(self.offer, result)
self.assertEqual(1, len(self.applications.post_order_actions))
| bsd-3-clause |
SGenheden/Scripts | Mol/parse_optq.py | 1 | 2134 | # Author: Samuel Genheden samuel.genheden@gmail.com
"""
Program to parse RESP charges and make Gromacs residue template file (.rtp)
Atoms in the PDB file need to be in the same order as in the charge file
The atom types file need to have an atomtype definition on each line
NAME1 TYPE1
NAME2 TYPE2
...
Used in membrane engineering project
Examples
--------
parse_optq.py -f model0_1.pdb -q qout -o model0.rtp -t atypes.txt
Make an rtp file based on model0_1 and qout
"""
import argparse
import parmed
if __name__ == '__main__':
argparser = argparse.ArgumentParser(description="Script to parse optimal charges")
argparser.add_argument('-f','--file',help="the PDB file")
argparser.add_argument('-q','--qout',help="the output charges",default="qout")
argparser.add_argument('-o','--out',help="the output RTP file")
argparser.add_argument('-t','--types',help="a file with atom types")
args = argparser.parse_args()
struct = parmed.load_file(args.file)
qline = ""
with open(args.qout, "r") as f :
line = f.readline()
while line :
qline += line.strip() + " "
line = f.readline()
charges = map(float,qline.strip().split())
for atom, charge in zip(struct.atoms, charges) :
print "%4s%10.6f"%(atom.name, charge)
if args.out is not None :
atype = {}
with open(args.types, "r") as f :
for line in f.readlines() :
a, t = line.strip().split()
atype[a] = t
with open(args.out, "w") as f :
f.write("[ bondedtypes ]\n")
f.write("1 5 9 2 1 3 1 0\n\n")
f.write("[ UNK ]\n\n")
f.write("[ atoms ]\n")
for i, (atom, charge) in enumerate(zip(struct.atoms, charges)) :
f.write("%5s %6s %10.6f %3d\n"%(atom.name,
atype[atom.name], charge, i))
f.write("\n[ bonds ]\n")
for bond in struct.bonds :
f.write("%5s %5s\n"%(bond.atom1.name, bond.atom2.name))
f.write("\n")
| mit |
joliva/wiki-appengine | main.py | 1 | 12161 | #!/usr/bin/env python
import cgi, re, os, logging, string
import hmac, random
from datetime import datetime
import webapp2, jinja2
from google.appengine.ext import db
from google.appengine.api import memcache
template_dir = os.path.join(os.path.dirname(__file__), 'templates')
jinja_env = jinja2.Environment(loader = jinja2.FileSystemLoader(template_dir), autoescape=False)
UNAME_RE = re.compile(r"^[a-zA-Z0-9_-]{3,20}$")
UPASS_RE = re.compile(r"^.{3,20}$")
UEMAIL_RE = re.compile(r"^[\S]+@[\S]+\.[\S]+$")
COOKIE_SALT = 'KISSMYGRITS'
def valid_username(username):
return UNAME_RE.match(username)
def valid_password(password):
return UPASS_RE.match(password)
def valid_email(email):
return email == "" or UEMAIL_RE.match(email)
def make_salt():
# salt will be a random six character string
return ''.join([chr(random.randint(97,122)) for idx in xrange(6)])
def make_password_hash(password):
if password:
salt = make_salt()
return hmac.new(salt, password).hexdigest() + ('|%s' % salt)
else:
return None
class WikiUsers(db.Model):
username = db.StringProperty(required = True)
password_hash = db.StringProperty(required = True)
email = db.StringProperty()
created = db.DateTimeProperty(auto_now_add = True)
@staticmethod
def get_user(username):
user = None
if username:
qry = "SELECT * FROM WikiUsers WHERE username = '%s'" % username
#logging.info('query = %s', qry)
user = db.GqlQuery(qry).get()
return user
@staticmethod
def create_user(user):
# assumes properties of user were previously validated
if user:
user = WikiUsers(**user)
key = user.put()
class WikiEntry(db.Model):
name = db.StringProperty(required = True, indexed = True)
content = db.TextProperty(required = True)
created = db.DateTimeProperty(auto_now_add = True, indexed = True)
class Handler(webapp2.RequestHandler):
def update_cache(self, name, value):
# store in cache
logging.info('insert %s into cache', name)
memcache.set(name, {'cached_time':datetime.now(), 'content':value})
def store(self, name, content):
# insert new wiki entry into datastore
p = WikiEntry(name = name, content=content)
key = p.put()
# update cache
self.update_cache(name, content)
def retrieve(self, name, id=None):
if id != None and id != '':
value = WikiEntry.get_by_id(int(id)).content
return {'cached_time':datetime.now(), 'content':value}
else:
# attempt first to get page from cache
value = memcache.get(name)
if value:
return value
else:
logging.info('%s is not in the cache', name)
# attempt to retrieve from database
query = "SELECT * FROM WikiEntry WHERE name='%s' ORDER BY created DESC LIMIT 1" % name
entry = db.GqlQuery(query).get()
if entry:
self.update_cache(name, entry.content)
value = memcache.get(name)
return value
else:
logging.info('%s is not in the DB', name)
return None
def retrieve_all(self, name):
# attempt to retrieve from database
query = "SELECT * FROM WikiEntry WHERE name='%s' ORDER BY created DESC" % name
entries = db.GqlQuery(query).fetch(100)
return entries
def write(self, *a, **kw):
self.response.out.write(*a, **kw)
def render_str(self, template, **params):
t = jinja_env.get_template(template)
return t.render(params)
def render(self, template, **kw):
self.write(self.render_str(template, **kw))
def create_cookie(self, value):
# cookie format: value|salted hash
if value:
return '%s|' % value + hmac.new(COOKIE_SALT, value).hexdigest()
else:
return None
def store_cookie(self, key, value):
if key and value:
self.response.set_cookie(key, value=self.create_cookie(value), path='/')
def remove_cookie(self, key):
if key:
self.response.set_cookie(key, value='', path='/')
#self.response.delete_cookie(key)
def get_cookie(self, key):
# cookie format: value|salted hash
if key:
hashed_value = self.request.cookies.get(key)
if hashed_value:
value, salted_hash = hashed_value.split('|')
if hashed_value == ('%s|' % value) + hmac.new(COOKIE_SALT, value).hexdigest():
return value
return None
class Signup(Handler):
def get(self):
self.render('signup.html')
def post(self):
username = self.request.get("username")
password = self.request.get("password")
verify = self.request.get("verify")
email = self.request.get("email")
err_name=""
err_pass=""
err_vpass=""
err_email=""
err = False
if not valid_username(username):
err_name = "That's not a valid username."
err = True
if WikiUsers.get_user(username) != None:
err_name = "That user already exists"
err = True
if not valid_password(password):
password=""
verify=""
err_pass = "That's not a valid password."
err = True
elif verify != password:
password=""
verify=""
err_vpass = "Your passwords didn't match."
err = True
if not valid_email(email):
err_email = "That's not a valid email."
err = True
if err == True:
args = {"username":username, "password":password, "verify":verify, "email":email, "err_name":err_name, "err_pass":err_pass, "err_vpass":err_vpass, "err_email":err_email}
self.render('signup.html', **args)
else:
# save new user into DB
user = {}
user['username'] = username
user['password_hash'] = make_password_hash(password)
user['email'] = email
WikiUsers.create_user(user)
# save login session cookie
self.store_cookie('username', username)
self.redirect(FRONT_URL)
class Login(Handler):
def get(self):
self.render('login.html')
def post(self):
username = self.request.get("username")
password = self.request.get("password")
err = False
if username and password:
# validate login credentials
user = WikiUsers.get_user(username)
if user:
# password hash: hmac.new(salt, password).hexdigest() + '|' + salt
password_hash = user.password_hash.encode('ascii')
logging.info('password_hash = %s', password_hash)
hashval, salt = password_hash.split('|')
logging.info('hashval = %s salt=%s', hashval, salt)
if hashval == hmac.new(salt, password).hexdigest():
# save login session cookie
self.store_cookie('username', username)
self.redirect(FRONT_URL)
return
args = {"username":username, "password":password, "error":'Invalid Login'}
self.render('login.html', **args)
class Logout(Handler):
def get(self):
self.remove_cookie('username')
self.redirect(FRONT_URL)
class WikiPage(Handler):
def get(self, name):
if name == '': name = '_front'
logging.info('name=%s', name)
id = self.request.get('id')
# attempt to retrieve page from DB
value = self.retrieve(name, id)
if value == None:
# redirect to an edit page to create the new entry
logging.info('redirect to page to add new wiki topic: %s', BASE_EDIT + name)
self.redirect(BASE_EDIT + name)
else:
# display the page
now = datetime.now()
delta_secs = (now - value['cached_time']).seconds
if self.request.get('cause') == 'logoff':
self.remove_cookie('username')
self.redirect(BASE_URL + name) # reload page
# determine if user logged in to set header
username = self.get_cookie('username')
if username:
edit_link=BASE_EDIT + name
edit_status='edit'
edit_user_sep=' | '
hist_link=BASE_HIST + name
hist_status='history'
wiki_user='<%s>' % username
login_link=BASE_URL + name + '?cause=logoff'
login_status='logout'
login_signup_sep=''
signup_link=''
signup_status=''
else:
edit_link=BASE_URL + name
edit_status=''
edit_user_sep=''
hist_link=BASE_HIST + name
hist_status='history'
wiki_user=''
login_link=BASE_URL + '/login'
login_status='login'
login_signup_sep=' | '
signup_link=BASE_URL + '/signup'
signup_status='signup'
args = dict(topic=name,
content=value['content'],
cache_time=delta_secs,
edit_link=edit_link,
edit_status=edit_status,
edit_user_sep=edit_user_sep,
hist_link=hist_link,
hist_status=hist_status,
wiki_user=wiki_user,
login_link=login_link,
login_status=login_status,
login_signup_sep=login_signup_sep,
signup_link=signup_link,
signup_status=signup_status)
self.render('entry.html', **args)
class HistPage(Handler):
def get(self, name):
if self.request.get('cause') == 'logoff':
self.remove_cookie('username')
self.redirect(BASE_HIST + name) # reload page
# determine if user logged in to set header
username = self.get_cookie('username')
if username:
edit_link=BASE_EDIT + name
edit_status='edit'
edit_user_sep=''
wiki_user='<%s>' % username
login_link=BASE_HIST + name + '?cause=logoff'
login_status='logout'
login_signup_sep=''
signup_link=''
signup_status=''
else:
edit_link=BASE_URL + name
edit_status='view'
edit_user_sep=''
wiki_user=''
login_link=BASE_URL + '/login'
login_status='login'
login_signup_sep=' | '
signup_link=BASE_URL + '/signup'
signup_status='signup'
entries = self.retrieve_all(name)
args = dict(topic=name,
edit_link=edit_link,
edit_status=edit_status,
edit_user_sep=edit_user_sep,
wiki_user=wiki_user,
login_link=login_link,
login_status=login_status,
login_signup_sep=login_signup_sep,
signup_link=signup_link,
signup_status=signup_status,
entries=entries)
self.render('history.html', **args)
class EditPage(Handler):
def get(self, name):
if self.request.get('cause') == 'logoff':
self.remove_cookie('username')
self.redirect(BASE_URL + name) # reload page
# determine if user logged in to set header
username = self.get_cookie('username')
if username:
edit_link=BASE_URL + name
edit_status='view'
edit_user_sep=''
wiki_user='<%s>' % username
login_link=BASE_URL + name + '?cause=logoff'
login_status='logout'
login_signup_sep=''
signup_link=''
signup_status=''
id = self.request.get('id')
# attempt to retrieve page from DB
value = self.retrieve(name, id)
if value:
content = value['content']
else:
content = ''
args = dict(topic=name,
content=content,
edit_link=edit_link,
edit_status=edit_status,
edit_user_sep=edit_user_sep,
wiki_user=wiki_user,
login_link=login_link,
login_status=login_status,
login_signup_sep=login_signup_sep,
signup_link=signup_link,
signup_status=signup_status)
self.render('editentry.html', **args)
else:
edit_link=''
edit_status=''
edit_user_sep=''
wiki_user=''
login_link=BASE_URL + '/login'
login_status='login'
login_signup_sep=' | '
signup_link=BASE_URL + '/signup'
signup_status='signup'
args = dict(topic=name,
msg='Not Authorized to create topic if not logged in.',
edit_link=edit_link,
edit_status=edit_status,
edit_user_sep=edit_user_sep,
wiki_user=wiki_user,
login_link=login_link,
login_status=login_status,
login_signup_sep=login_signup_sep,
signup_link=signup_link,
signup_status=signup_status)
self.response.set_status(401)
self.render('unauthorized.html', **args)
def post(self, name):
# validate field
content = self.request.get('content')
# save into datastore and cache
self.store(name, content)
# redirect to entry permalink
self.redirect(BASE_URL + name)
class Flush(Handler):
def get(self):
memcache.flush_all()
BASE_URL = '/wiki'
FRONT_URL = BASE_URL + '/'
BASE_EDIT = BASE_URL + '/_edit'
BASE_HIST = BASE_URL + '/_history'
PAGE_RE = r'(/(?:[a-zA-Z0-9_-]+/?)*)'
routes = [
(BASE_URL + '/signup/?', Signup),
(BASE_URL + '/login/?', Login),
(BASE_URL + '/logout/?', Logout),
(BASE_URL + '/flush/?', Flush),
(BASE_EDIT + PAGE_RE + '/', EditPage),
(BASE_EDIT + PAGE_RE, EditPage),
(BASE_HIST + PAGE_RE + '/', HistPage),
(BASE_HIST + PAGE_RE, HistPage),
(BASE_URL + PAGE_RE + '/', WikiPage),
(BASE_URL + PAGE_RE, WikiPage)
]
app = webapp2.WSGIApplication(routes, debug=True)
| bsd-3-clause |
huyang1532/python-data-mining-platform | pymining/classifier/naive_bayes.py | 8 | 5954 | import math
import pickle
from ..math.matrix import Matrix
from ..math.text2matrix import Text2Matrix
from ..nlp.segmenter import Segmenter
from ..common.global_info import GlobalInfo
from ..common.configuration import Configuration
class NaiveBayes:
def __init__(self, config, nodeName, loadFromFile = False):
#store variable(term)'s likelihood to each class
self.vTable = []
#store prior of each class
self.cPrior = []
#store isTrained by data
self.trained = loadFromFile
self.curNode = config.GetChild(nodeName)
self.modelPath = self.curNode.GetChild("model_path").GetValue()
self.logPath = self.curNode.GetChild("log_path").GetValue()
if (loadFromFile):
f = open(self.modelPath, "r")
modelStr = pickle.load(f)
[self.vTable, self.cPrior] = pickle.loads(modelStr)
f.close()
def Train(self, x, y):
#check parameters
if (x.nRow <> len(y)):
print "ERROR!, x.nRow should == len(y)"
return False
#calculate prior of each class
#1. init cPrior:
yy = set(y)
yy = list(yy)
yy.sort()
self.cPrior = [0 for i in range(yy[len(yy) - 1] + 1)]
#2. fill cPrior
for i in y:
self.cPrior[i] += 1
#calculate likehood of each term
#1. init vTable:
self.vTable = [[0 for i in range(len(self.cPrior))] \
for j in range(x.nCol)]
#2. fill vTable
for r in range(x.nRow):
for i in range(x.rows[r], x.rows[r + 1]):
self.vTable[x.cols[i]][y[r]] += 1
#normalize vTable
for i in range(x.nCol):
for j in range(len(self.cPrior)):
if (self.cPrior[j] > 1e-10):
self.vTable[i][j] /= float(self.cPrior[j])
#normalize cPrior
for i in range(len(self.cPrior)):
self.cPrior[i] /= float(len(y))
self.trained = True
#dump model path
f = open(self.modelPath, "w")
modelStr = pickle.dumps([self.vTable, self.cPrior], 1)
pickle.dump(modelStr, f)
f.close()
return True
def TestSample(self, cols, vals):
#check parameter
if (not self.trained):
print "Error!, not trained!"
return False
if (len(cols) <> len(vals)):
print "Error! len of cols should == len of vals"
return False
#calculate best p
targetP = []
maxP = -1000000000
for target in range(len(self.cPrior)):
curP = 0
curP += math.log(self.cPrior[target])
for c in range(0, len(cols)):
if (self.vTable[cols[c]][target] == 0):
curP += math.log(1e-7)
else:
curP += math.log(self.vTable[cols[c]][target])
#debug
#if (self.logPath <> ""):
# term = GlobalInfo.idToTerm[cols[c]]
# prob = math.log(self.vTable[cols[c]][target] + 1e-7)
# f.write(term.encode("utf-8") + ":" + str(cols[c]) + ":" + str(prob) + "\n")
targetP.append(curP)
if (curP > maxP):
bestY = target
maxP = curP
#normalize probable
ret = []
total = 0
for i in range(len(targetP)):
total += math.exp(targetP[i])
for i in range(len(targetP)):
ret.append((i, math.exp(targetP[i]) / total))
return tuple(ret)
def Test(self, x, y):
#check parameter
if (not self.trained):
print "Error!, not trained!"
return False
if (x.nRow != len(y)):
print "Error! x.nRow should == len(y)"
return False
retY = []
correct = 0
if (self.logPath <> ""):
f = open(self.logPath, "w")
#predict all doc one by one
for r in range(x.nRow):
bestY = -1
maxP = -1000000000
#debug
if (self.logPath <> ""):
f.write("\n ===============new doc=================")
#calculate best p
for target in range(len(self.cPrior)):
curP = 0
if (self.cPrior[target] > 1e-8):
curP += math.log(self.cPrior[target])
else:
curP += math.log(1e-8)
#debug
#if (self.logPath <> ""):
# f.write("<target> : " + str(target) + "\n")
for c in range(x.rows[r], x.rows[r + 1]):
if (self.vTable[x.cols[c]][target] == 0):
curP += math.log(1e-7)
else:
curP += math.log(self.vTable[x.cols[c]][target])
#debug
#if (self.logPath <> ""):
# term = GlobalInfo.idToTerm[x.cols[c]]
# prob = math.log(self.vTable[x.cols[c]][target] + 1e-7)
# f.write(term.encode("utf-8") + ":" + str(x.cols[c]) + ":" + str(prob) + "\n")
if (curP > maxP):
bestY = target
maxP = curP
#debug
#if (self.logPath <> ""):
# f.write("curP:" + str(curP) + "\n")
if (bestY < 0):
print "best y < 0, error!"
return False
if (bestY == y[r]):
correct += 1
#debug
else:
if (self.logPath <> ""):
f.write("predict error!")
retY.append(bestY)
if (self.logPath <> ""):
f.close()
return [retY, float(correct) / len(retY)]
| bsd-3-clause |
mm1ke/portage | pym/portage/package/ebuild/_config/features_set.py | 16 | 3844 | # Copyright 2010 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
__all__ = (
'features_set',
)
import logging
from portage.const import SUPPORTED_FEATURES
from portage.localization import _
from portage.output import colorize
from portage.util import writemsg_level
class features_set(object):
"""
Provides relevant set operations needed for access and modification of
config.features. The FEATURES variable is automatically synchronized
upon modification.
Modifications result in a permanent override that will cause the change
to propagate to the incremental stacking mechanism in config.regenerate().
This eliminates the need to call config.backup_changes() when FEATURES
is modified, since any overrides are guaranteed to persist despite calls
to config.reset().
"""
def __init__(self, settings):
self._settings = settings
self._features = set()
def __contains__(self, k):
return k in self._features
def __iter__(self):
return iter(self._features)
def _sync_env_var(self):
self._settings['FEATURES'] = ' '.join(sorted(self._features))
def add(self, k):
self._settings.modifying()
self._settings._features_overrides.append(k)
if k not in self._features:
self._features.add(k)
self._sync_env_var()
def update(self, values):
self._settings.modifying()
values = list(values)
self._settings._features_overrides.extend(values)
need_sync = False
for k in values:
if k in self._features:
continue
self._features.add(k)
need_sync = True
if need_sync:
self._sync_env_var()
def difference_update(self, values):
self._settings.modifying()
values = list(values)
self._settings._features_overrides.extend('-' + k for k in values)
remove_us = self._features.intersection(values)
if remove_us:
self._features.difference_update(values)
self._sync_env_var()
def remove(self, k):
"""
This never raises KeyError, since it records a permanent override
that will prevent the given flag from ever being added again by
incremental stacking in config.regenerate().
"""
self.discard(k)
def discard(self, k):
self._settings.modifying()
self._settings._features_overrides.append('-' + k)
if k in self._features:
self._features.remove(k)
self._sync_env_var()
def _validate(self):
"""
Implements unknown-features-warn and unknown-features-filter.
"""
if 'unknown-features-warn' in self._features:
unknown_features = \
self._features.difference(SUPPORTED_FEATURES)
if unknown_features:
unknown_features = unknown_features.difference(
self._settings._unknown_features)
if unknown_features:
self._settings._unknown_features.update(unknown_features)
writemsg_level(colorize("BAD",
_("FEATURES variable contains unknown value(s): %s") % \
", ".join(sorted(unknown_features))) \
+ "\n", level=logging.WARNING, noiselevel=-1)
if 'unknown-features-filter' in self._features:
unknown_features = \
self._features.difference(SUPPORTED_FEATURES)
if unknown_features:
self.difference_update(unknown_features)
self._prune_overrides()
def _prune_overrides(self):
"""
If there are lots of invalid package.env FEATURES settings
then unknown-features-filter can make _features_overrides
grow larger and larger, so prune it. This performs incremental
stacking with preservation of negative values since they need
to persist for future config.regenerate() calls.
"""
overrides_set = set(self._settings._features_overrides)
positive = set()
negative = set()
for x in self._settings._features_overrides:
if x[:1] == '-':
positive.discard(x[1:])
negative.add(x[1:])
else:
positive.add(x)
negative.discard(x)
self._settings._features_overrides[:] = \
list(positive) + list('-' + x for x in negative)
| gpl-2.0 |
dmitry-sobolev/ansible | lib/ansible/modules/network/nxos/nxos_acl_interface.py | 19 | 9981 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: nxos_acl_interface
extends_documentation_fragment: nxos
version_added: "2.2"
short_description: Manages applying ACLs to interfaces.
description:
- Manages applying ACLs to interfaces.
author:
- Jason Edelman (@jedelman8)
- Gabriele Gerbino (@GGabriele)
options:
name:
description:
- Case sensitive name of the access list (ACL).
required: true
interface:
description:
- Full name of interface, e.g. I(Ethernet1/1).
required: true
direction:
description:
- Direction ACL to be applied in on the interface.
required: true
choices: ['ingress', 'egress']
state:
description:
- Specify desired state of the resource.
required: false
default: present
choices: ['present','absent']
'''
EXAMPLES = '''
- name: apply egress acl to ethernet1/41
nxos_acl_interface:
name: ANSIBLE
interface: ethernet1/41
direction: egress
state: present
username: "{{ un }}"
password: "{{ pwd }}"
host: "{{ inventory_hostname }}"
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: always
type: dict
sample: {"direction": "egress", "interface": "ethernet1/41",
"name": "ANSIBLE"}
existing:
description: k/v pairs of existing ACL applied to the interface
type: dict
sample: {}
end_state:
description: k/v pairs of interface ACL after module execution
returned: always
type: dict
sample: {"direction": "egress", "interface": "ethernet1/41",
"name": "ANSIBLE"}
acl_applied_to:
description: list of interfaces the ACL is applied to
returned: always
type: list
sample: [{"acl_type": "Router ACL", "direction": "egress",
"interface": "Ethernet1/41", "name": "ANSIBLE"}]
updates:
description: commands sent to the device
returned: always
type: list
sample: ["interface ethernet1/41", "ip access-group ANSIBLE out"]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
import re
from ansible.module_utils.nxos import load_config, run_commands
from ansible.module_utils.nxos import nxos_argument_spec, check_args
from ansible.module_utils.basic import AnsibleModule
def execute_show_command(command, module, command_type='cli_show'):
if module.params['transport'] == 'cli':
if 'summary' not in command:
command += ' | json'
cmds = [command]
body = run_commands(module, cmds)
elif module.params['transport'] == 'nxapi':
cmds = [command]
body = run_commands(module, cmds)
return body
def get_acl_interface(module, acl):
command = 'show ip access-list summary'
name_regex = '.*IPV4\s+ACL\s+(?P<name>\S+).*'
interface_regex = ('.*\s+(?P<interface>\w+(\d+)?\/?(\d+)?)\s-\s'
'(?P<direction>\w+)\s+\W(?P<acl_type>\w+\s\w+)\W.*')
acl_list = []
body = execute_show_command(command, module, command_type='cli_show_ascii')
body_split = body[0].split('Active on interfaces:')
for each_acl in body_split:
intf_list = []
temp = {}
try:
match_name = re.match(name_regex, each_acl, re.DOTALL)
name_dict = match_name.groupdict()
name = name_dict['name']
except AttributeError:
name = ''
temp['interfaces'] = []
for line in each_acl.split('\n'):
intf_temp = {}
try:
match_interface = re.match(interface_regex, line, re.DOTALL)
interface_dict = match_interface.groupdict()
interface = interface_dict['interface']
direction = interface_dict['direction']
acl_type = interface_dict['acl_type']
except AttributeError:
interface = ''
direction = ''
acl_type = ''
if interface:
intf_temp['interface'] = interface
if acl_type:
intf_temp['acl_type'] = acl_type
if direction:
intf_temp['direction'] = direction
if intf_temp:
temp['interfaces'].append(intf_temp)
if name:
temp['name'] = name
if temp:
acl_list.append(temp)
existing_no_null = []
for each in acl_list:
if each.get('name') == acl:
interfaces = each.get('interfaces')
for interface in interfaces:
new_temp = {}
new_temp['name'] = acl
new_temp.update(interface)
existing_no_null.append(new_temp)
return existing_no_null
def other_existing_acl(get_existing, interface, direction):
# now we'll just get the interface in question
# needs to be a list since same acl could be applied in both dirs
acls_interface = []
if get_existing:
for each in get_existing:
if each.get('interface').lower() == interface:
acls_interface.append(each)
else:
acls_interface = []
if acls_interface:
this = {}
for each in acls_interface:
if each.get('direction') == direction:
this = each
else:
acls_interface = []
this = {}
return acls_interface, this
def apply_acl(proposed):
commands = []
commands.append('interface ' + proposed.get('interface'))
direction = proposed.get('direction')
if direction == 'egress':
cmd = 'ip access-group {0} {1}'.format(proposed.get('name'), 'out')
elif direction == 'ingress':
cmd = 'ip access-group {0} {1}'.format(proposed.get('name'), 'in')
commands.append(cmd)
return commands
def remove_acl(proposed):
commands = []
commands.append('interface ' + proposed.get('interface'))
direction = proposed.get('direction')
if direction == 'egress':
cmd = 'no ip access-group {0} {1}'.format(proposed.get('name'), 'out')
elif direction == 'ingress':
cmd = 'no ip access-group {0} {1}'.format(proposed.get('name'), 'in')
commands.append(cmd)
return commands
def flatten_list(command_lists):
flat_command_list = []
for command in command_lists:
if isinstance(command, list):
flat_command_list.extend(command)
else:
flat_command_list.append(command)
return flat_command_list
def main():
argument_spec = dict(
name=dict(required=False, type='str'),
interface=dict(required=True),
direction=dict(required=True, choices=['egress', 'ingress']),
state=dict(choices=['absent', 'present'],
default='present'),
include_defaults=dict(default=True),
config=dict(),
save=dict(type='bool', default=False)
)
argument_spec.update(nxos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
warnings = list()
check_args(module, warnings)
state = module.params['state']
name = module.params['name']
interface = module.params['interface'].lower()
direction = module.params['direction'].lower()
proposed = dict(name=name, interface=interface, direction=direction)
# includes all interfaces the ACL is applied to (list)
get_existing = get_acl_interface(module, name)
# interface_acls = includes entries of this ACL on the interface (list)
# this_dir_acl_intf = dict - not null if it already exists
interfaces_acls, existing = other_existing_acl(
get_existing, interface, direction)
end_state = existing
end_state_acls = get_existing
changed = False
cmds = []
commands = []
if state == 'present':
if not existing:
command = apply_acl(proposed)
if command:
commands.append(command)
elif state == 'absent':
if existing:
command = remove_acl(proposed)
if command:
commands.append(command)
if commands:
cmds = flatten_list(commands)
if cmds:
if module.check_mode:
module.exit_json(changed=True, commands=cmds)
else:
load_config(module, cmds)
changed = True
end_state_acls = get_acl_interface(module, name)
interfaces_acls, this_dir_acl_intf = other_existing_acl(
end_state_acls, interface, direction)
end_state = this_dir_acl_intf
if 'configure' in cmds:
cmds.pop(0)
else:
cmds = []
results = {}
results['proposed'] = proposed
results['existing'] = existing
results['updates'] = cmds
results['changed'] = changed
results['warnings'] = warnings
results['end_state'] = end_state
results['acl_applied_to'] = end_state_acls
module.exit_json(**results)
if __name__ == '__main__':
main()
| gpl-3.0 |
vveerava/Openstack | neutron/openstack/common/fixture/moxstubout.py | 73 | 1223 | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import fixtures
import mox
class MoxStubout(fixtures.Fixture):
"""Deal with code around mox and stubout as a fixture."""
def setUp(self):
super(MoxStubout, self).setUp()
# emulate some of the mox stuff, we can't use the metaclass
# because it screws with our generators
self.mox = mox.Mox()
self.stubs = self.mox.stubs
self.addCleanup(self.mox.UnsetStubs)
self.addCleanup(self.mox.VerifyAll)
| apache-2.0 |
cedriclaunay/gaffer | python/GafferImageUI/ImageViewToolbar.py | 2 | 5329 | ##########################################################################
#
# Copyright (c) 2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import Gaffer
import GafferUI
import GafferImageUI
# Toggles between default value and the last non-default value
class _TogglePlugValueWidget( GafferUI.PlugValueWidget ) :
def __init__( self, plug, imagePrefix, defaultToggleValue = None, **kw ) :
row = GafferUI.ListContainer( GafferUI.ListContainer.Orientation.Horizontal, spacing = 2 )
GafferUI.PlugValueWidget.__init__( self, row, plug, **kw )
self.__imagePrefix = imagePrefix
with row :
self.__button = GafferUI.Button( "", self.__imagePrefix + "Off.png", hasFrame=False )
self.__clickedConnection = self.__button.clickedSignal().connect( Gaffer.WeakMethod( self.__clicked ) )
if not isinstance( plug, Gaffer.BoolPlug ) :
plugValueWidget = GafferUI.PlugValueWidget.create( plug, useTypeOnly=True )
plugValueWidget.numericWidget().setFixedCharacterWidth( 5 )
self.__toggleValue = defaultToggleValue
self._updateFromPlug()
def hasLabel( self ) :
return True
def getToolTip( self ) :
result = GafferUI.PlugValueWidget.getToolTip( self )
result += "<ul>"
result += "<li>Click to toggle to/from default value</li>"
result += "<ul>"
return result
def _updateFromPlug( self ) :
with self.getContext() :
value = self.getPlug().getValue()
if value != self.getPlug().defaultValue() :
self.__toggleValue = value
self.__button.setImage( self.__imagePrefix + "On.png" )
else :
self.__button.setImage( self.__imagePrefix + "Off.png" )
self.setEnabled( self.getPlug().settable() )
def __clicked( self, button ) :
with self.getContext() :
value = self.getPlug().getValue()
if value == self.getPlug().defaultValue() and self.__toggleValue is not None :
self.getPlug().setValue( self.__toggleValue )
else :
self.getPlug().setToDefault()
## Clipping, exposure and gamma
GafferUI.PlugValueWidget.registerCreator(
GafferImageUI.ImageView,
"clipping",
_TogglePlugValueWidget,
imagePrefix ="clipping",
defaultToggleValue = True,
)
Gaffer.Metadata.registerPlugValue( GafferImageUI.ImageView, "clipping", "divider", True )
Gaffer.Metadata.registerPlugDescription( GafferImageUI.ImageView, "clipping",
"Highlights the regions in which the colour values go above 1 or below 0."
)
GafferUI.PlugValueWidget.registerCreator(
GafferImageUI.ImageView,
"exposure",
_TogglePlugValueWidget,
imagePrefix ="exposure",
defaultToggleValue = 1,
)
Gaffer.Metadata.registerPlugDescription( GafferImageUI.ImageView, "exposure",
"Applies an exposure adjustment to the image."
)
GafferUI.PlugValueWidget.registerCreator(
GafferImageUI.ImageView,
"gamma",
_TogglePlugValueWidget,
imagePrefix ="gamma",
defaultToggleValue = 2,
)
Gaffer.Metadata.registerPlugDescription( GafferImageUI.ImageView, "gamma",
"Applies a gamma correction to the image."
)
## Display Transform
Gaffer.Metadata.registerPlugValue( GafferImageUI.ImageView, "displayTransform", "label", "" )
def __displayTransformPlugValueWidgetCreator( plug ) :
widget = GafferUI.EnumPlugValueWidget(
plug,
labelsAndValues = zip(
GafferImageUI.ImageView.registeredDisplayTransforms(),
GafferImageUI.ImageView.registeredDisplayTransforms(),
),
)
widget.selectionMenu()._qtWidget().setFixedWidth( 100 )
return widget
GafferUI.PlugValueWidget.registerCreator(
GafferImageUI.ImageView,
"displayTransform",
__displayTransformPlugValueWidgetCreator
)
Gaffer.Metadata.registerPlugDescription( GafferImageUI.ImageView, "displayTransform",
"Applies colour space transformations for viewing the image correctly."
)
| bsd-3-clause |
jamesr66a/cryptkernel | tools/perf/python/twatch.py | 7370 | 1334 | #! /usr/bin/python
# -*- python -*-
# -*- coding: utf-8 -*-
# twatch - Experimental use of the perf python interface
# Copyright (C) 2011 Arnaldo Carvalho de Melo <acme@redhat.com>
#
# This application is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 2.
#
# This application is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
import perf
def main():
cpus = perf.cpu_map()
threads = perf.thread_map()
evsel = perf.evsel(task = 1, comm = 1, mmap = 0,
wakeup_events = 1, watermark = 1,
sample_id_all = 1,
sample_type = perf.SAMPLE_PERIOD | perf.SAMPLE_TID | perf.SAMPLE_CPU | perf.SAMPLE_TID)
evsel.open(cpus = cpus, threads = threads);
evlist = perf.evlist(cpus, threads)
evlist.add(evsel)
evlist.mmap()
while True:
evlist.poll(timeout = -1)
for cpu in cpus:
event = evlist.read_on_cpu(cpu)
if not event:
continue
print "cpu: %2d, pid: %4d, tid: %4d" % (event.sample_cpu,
event.sample_pid,
event.sample_tid),
print event
if __name__ == '__main__':
main()
| gpl-2.0 |
youdonghai/intellij-community | plugins/hg4idea/testData/bin/mercurial/sshrepo.py | 88 | 8196 | # sshrepo.py - ssh repository proxy class for mercurial
#
# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from node import bin, hex
from i18n import _
import repo, util, error, encoding
import re, urllib
class remotelock(object):
def __init__(self, repo):
self.repo = repo
def release(self):
self.repo.unlock()
self.repo = None
def __del__(self):
if self.repo:
self.release()
class sshrepository(repo.repository):
def __init__(self, ui, path, create=0):
self._url = path
self.ui = ui
m = re.match(r'^ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?$', path)
if not m:
self.abort(error.RepoError(_("couldn't parse location %s") % path))
self.user = m.group(2)
self.host = m.group(3)
self.port = m.group(5)
self.path = m.group(7) or "."
sshcmd = self.ui.config("ui", "ssh", "ssh")
remotecmd = self.ui.config("ui", "remotecmd", "hg")
args = util.sshargs(sshcmd, self.host, self.user, self.port)
if create:
cmd = '%s %s "%s init %s"'
cmd = cmd % (sshcmd, args, remotecmd, self.path)
ui.note(_('running %s\n') % cmd)
res = util.system(cmd)
if res != 0:
self.abort(error.RepoError(_("could not create remote repo")))
self.validate_repo(ui, sshcmd, args, remotecmd)
def url(self):
return self._url
def validate_repo(self, ui, sshcmd, args, remotecmd):
# cleanup up previous run
self.cleanup()
cmd = '%s %s "%s -R %s serve --stdio"'
cmd = cmd % (sshcmd, args, remotecmd, self.path)
cmd = util.quotecommand(cmd)
ui.note(_('running %s\n') % cmd)
self.pipeo, self.pipei, self.pipee = util.popen3(cmd)
# skip any noise generated by remote shell
self.do_cmd("hello")
r = self.do_cmd("between", pairs=("%s-%s" % ("0"*40, "0"*40)))
lines = ["", "dummy"]
max_noise = 500
while lines[-1] and max_noise:
l = r.readline()
self.readerr()
if lines[-1] == "1\n" and l == "\n":
break
if l:
ui.debug("remote: ", l)
lines.append(l)
max_noise -= 1
else:
self.abort(error.RepoError(_("no suitable response from remote hg")))
self.capabilities = set()
for l in reversed(lines):
if l.startswith("capabilities:"):
self.capabilities.update(l[:-1].split(":")[1].split())
break
def readerr(self):
while 1:
size = util.fstat(self.pipee).st_size
if size == 0:
break
l = self.pipee.readline()
if not l:
break
self.ui.status(_("remote: "), l)
def abort(self, exception):
self.cleanup()
raise exception
def cleanup(self):
try:
self.pipeo.close()
self.pipei.close()
# read the error descriptor until EOF
for l in self.pipee:
self.ui.status(_("remote: "), l)
self.pipee.close()
except:
pass
__del__ = cleanup
def do_cmd(self, cmd, **args):
self.ui.debug("sending %s command\n" % cmd)
self.pipeo.write("%s\n" % cmd)
for k, v in args.iteritems():
self.pipeo.write("%s %d\n" % (k, len(v)))
self.pipeo.write(v)
self.pipeo.flush()
return self.pipei
def call(self, cmd, **args):
self.do_cmd(cmd, **args)
return self._recv()
def _recv(self):
l = self.pipei.readline()
self.readerr()
try:
l = int(l)
except:
self.abort(error.ResponseError(_("unexpected response:"), l))
return self.pipei.read(l)
def _send(self, data, flush=False):
self.pipeo.write("%d\n" % len(data))
if data:
self.pipeo.write(data)
if flush:
self.pipeo.flush()
self.readerr()
def lock(self):
self.call("lock")
return remotelock(self)
def unlock(self):
self.call("unlock")
def lookup(self, key):
self.requirecap('lookup', _('look up remote revision'))
d = self.call("lookup", key=key)
success, data = d[:-1].split(" ", 1)
if int(success):
return bin(data)
else:
self.abort(error.RepoError(data))
def heads(self):
d = self.call("heads")
try:
return map(bin, d[:-1].split(" "))
except:
self.abort(error.ResponseError(_("unexpected response:"), d))
def branchmap(self):
d = self.call("branchmap")
try:
branchmap = {}
for branchpart in d.splitlines():
branchheads = branchpart.split(' ')
branchname = urllib.unquote(branchheads[0])
# Earlier servers (1.3.x) send branch names in (their) local
# charset. The best we can do is assume it's identical to our
# own local charset, in case it's not utf-8.
try:
branchname.decode('utf-8')
except UnicodeDecodeError:
branchname = encoding.fromlocal(branchname)
branchheads = [bin(x) for x in branchheads[1:]]
branchmap[branchname] = branchheads
return branchmap
except:
raise error.ResponseError(_("unexpected response:"), d)
def branches(self, nodes):
n = " ".join(map(hex, nodes))
d = self.call("branches", nodes=n)
try:
br = [tuple(map(bin, b.split(" "))) for b in d.splitlines()]
return br
except:
self.abort(error.ResponseError(_("unexpected response:"), d))
def between(self, pairs):
n = " ".join(["-".join(map(hex, p)) for p in pairs])
d = self.call("between", pairs=n)
try:
p = [l and map(bin, l.split(" ")) or [] for l in d.splitlines()]
return p
except:
self.abort(error.ResponseError(_("unexpected response:"), d))
def changegroup(self, nodes, kind):
n = " ".join(map(hex, nodes))
return self.do_cmd("changegroup", roots=n)
def changegroupsubset(self, bases, heads, kind):
self.requirecap('changegroupsubset', _('look up remote changes'))
bases = " ".join(map(hex, bases))
heads = " ".join(map(hex, heads))
return self.do_cmd("changegroupsubset", bases=bases, heads=heads)
def unbundle(self, cg, heads, source):
d = self.call("unbundle", heads=' '.join(map(hex, heads)))
if d:
# remote may send "unsynced changes"
self.abort(error.RepoError(_("push refused: %s") % d))
while 1:
d = cg.read(4096)
if not d:
break
self._send(d)
self._send("", flush=True)
r = self._recv()
if r:
# remote may send "unsynced changes"
self.abort(error.RepoError(_("push failed: %s") % r))
r = self._recv()
try:
return int(r)
except:
self.abort(error.ResponseError(_("unexpected response:"), r))
def addchangegroup(self, cg, source, url):
d = self.call("addchangegroup")
if d:
self.abort(error.RepoError(_("push refused: %s") % d))
while 1:
d = cg.read(4096)
if not d:
break
self.pipeo.write(d)
self.readerr()
self.pipeo.flush()
self.readerr()
r = self._recv()
if not r:
return 1
try:
return int(r)
except:
self.abort(error.ResponseError(_("unexpected response:"), r))
def stream_out(self):
return self.do_cmd('stream_out')
instance = sshrepository
| apache-2.0 |
constantinius/YaaGame | kytten/override.py | 2 | 4350 | # kytten/override.py
# Copyrighted (C) 2009 by Conrad "Lynx" Wong
import pyglet
KYTTEN_LAYOUT_GROUPS = {}
KYTTEN_LAYOUT_GROUP_REFCOUNTS = {}
def GetKyttenLayoutGroups(group):
if not KYTTEN_LAYOUT_GROUPS.has_key(group):
top_group = pyglet.text.layout.TextLayoutGroup(group)
background_group = pyglet.graphics.OrderedGroup(0, top_group)
foreground_group = \
pyglet.text.layout.TextLayoutForegroundGroup(1, top_group)
foreground_decoration_group = \
pyglet.text.layout.TextLayoutForegroundDecorationGroup(
2, top_group)
KYTTEN_LAYOUT_GROUPS[group] = (top_group,
background_group,
foreground_group,
foreground_decoration_group)
KYTTEN_LAYOUT_GROUP_REFCOUNTS[group] = 0
KYTTEN_LAYOUT_GROUP_REFCOUNTS[group] += 1
return KYTTEN_LAYOUT_GROUPS[group]
def ReleaseKyttenLayoutGroups(group):
KYTTEN_LAYOUT_GROUP_REFCOUNTS[group] -= 1
if not KYTTEN_LAYOUT_GROUP_REFCOUNTS[group]:
del KYTTEN_LAYOUT_GROUP_REFCOUNTS[group]
del KYTTEN_LAYOUT_GROUPS[group]
class KyttenLabel(pyglet.text.Label):
def _init_groups(self, group):
if not group:
return # use the default groups
self.top_group, self.background_group, self.foreground_group, \
self.foreground_decoration_group = GetKyttenLayoutGroups(group)
def teardown(self):
pyglet.text.Label.teardown(self)
group = self.top_group.parent
if group is not None:
ReleaseKyttenLayoutGroups(group)
self.top_group = self.background_self = self.foreground_group \
= self.foreground_decoration_group = None
class KyttenInputLabel(KyttenLabel):
def _get_left(self):
if self._multiline:
width = self._width
else:
width = self.content_width
if self.width and width > self.width:
# align to right edge, clip left
return self._x + self.width - width
if self._anchor_x == 'left':
return self._x
elif self._anchor_x == 'center':
return self._x - width // 2
elif self._anchor_x == 'right':
return self._x - width
else:
assert False, 'Invalid anchor_x'
def _update(self):
pyglet.text.Label._update(self)
# Iterate through our vertex lists and break if we need to clip
remove = []
if self.width and not self._multiline:
for vlist in self._vertex_lists:
num_quads = len(vlist.vertices) / 8
remove_quads = 0
has_quads = False
for n in xrange(0, num_quads):
x1, y1, x2, y2, x3, y3, x4, y4 = vlist.vertices[n*8:n*8+8]
tx1, ty1, tz1, tx2, ty2, tz2, \
tx3, ty3, tz3, tx4, ty4, tz4 = \
vlist.tex_coords[n*12:n*12+12]
if x2 >= self._x:
has_quads = True
m = n - remove_quads # shift quads left
if x1 < self._x: # clip on left side
percent = (float(self._x) - float(x1)) / \
(float(x2) - float(x1))
x1 = x4 = max(self._x, x1)
tx1 = tx4 = (tx2 - tx1) * percent + tx1
vlist.vertices[m*8:m*8+8] = \
[x1, y1, x2, y2, x3, y3, x4, y4]
vlist.tex_coords[m*12:m*12+12] = \
[tx1, ty1, tz1, tx2, ty2, tz2,
tx3, ty3, tz3, tx4, ty4, tz4]
else:
# We'll delete quads entirely not visible
remove_quads = remove_quads + 1
if remove_quads == num_quads:
remove.append(vlist)
elif remove_quads > 0:
vlist.resize((num_quads - remove_quads) * 4)
for vlist in remove:
vlist.delete()
self._vertex_lists.remove(vlist)
| mit |
schalkneethling/bedrock | bedrock/mozorg/tests/test_helper_misc.py | 3 | 24457 | # coding: utf-8
import os.path
from datetime import datetime
from django.conf import settings
from django.test.client import RequestFactory
from django.test.utils import override_settings
from django_jinja.backend import Jinja2
from jinja2 import Markup
from mock import patch
from nose.tools import eq_, ok_
from pyquery import PyQuery as pq
from bedrock.base.templatetags.helpers import static
from bedrock.mozorg.templatetags import misc
from bedrock.mozorg.tests import TestCase
TEST_FILES_ROOT = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'test_files')
TEST_L10N_MEDIA_PATH = os.path.join(TEST_FILES_ROOT, 'media', '%s', 'l10n')
TEST_DONATE_LINK = ('https://donate.mozilla.org/{locale}/'
'?presets={presets}&amount={default}'
'&utm_source=mozilla.org&utm_medium=referral&utm_content={source}'
'¤cy={currency}')
TEST_DONATE_PARAMS = {
'en-US': {
'currency': 'usd',
'presets': '100,50,25,15',
'default': '50'
},
'es-MX': {
'currency': 'eur',
'presets': '100,50,25,15',
'default': '15'
},
}
TEST_FIREFOX_TWITTER_ACCOUNTS = {
'en-US': 'https://twitter.com/firefox',
'es-ES': 'https://twitter.com/firefox_es',
'pt-BR': 'https://twitter.com/firefoxbrasil',
}
jinja_env = Jinja2.get_default()
# Where should this function go?
def render(s, context=None):
t = jinja_env.from_string(s)
return t.render(context or {})
def test_convert_to_high_res():
eq_(misc.convert_to_high_res('/media/img/the.dude.png'), '/media/img/the.dude-high-res.png')
eq_(misc.convert_to_high_res('/media/thats-a-bummer-man.jpg'),
'/media/thats-a-bummer-man-high-res.jpg')
@patch('bedrock.mozorg.templatetags.misc._l10n_media_exists')
@patch('django.conf.settings.LANGUAGE_CODE', 'en-US')
class TestImgL10n(TestCase):
rf = RequestFactory()
def _render(self, locale, url):
req = self.rf.get('/')
req.locale = locale
return render("{{{{ l10n_img('{0}') }}}}".format(url),
{'request': req})
def test_works_for_default_lang(self, media_exists_mock):
"""Should output correct path for default lang always."""
media_exists_mock.return_value = True
eq_(self._render('en-US', 'dino/head.png'),
static('img/l10n/en-US/dino/head.png'))
eq_(self._render('en-US', 'dino/does-not-exist.png'),
static('img/l10n/en-US/dino/does-not-exist.png'))
def test_works_for_other_lang(self, media_exists_mock):
"""Should use the request lang if file exists."""
media_exists_mock.return_value = True
eq_(self._render('de', 'dino/head.png'),
static('img/l10n/de/dino/head.png'))
def test_defaults_when_lang_file_missing(self, media_exists_mock):
"""Should use default lang when file doesn't exist for lang."""
media_exists_mock.return_value = False
eq_(self._render('is', 'dino/head.png'),
static('img/l10n/en-US/dino/head.png'))
def test_latam_spanishes_fallback_to_european_spanish(self, media_exists_mock):
"""Should use es-ES image when file doesn't exist for lang."""
media_exists_mock.side_effect = [False, True]
eq_(self._render('es-AR', 'dino/head.png'),
static('img/l10n/es-ES/dino/head.png'))
media_exists_mock.reset_mock()
media_exists_mock.side_effect = [False, True]
eq_(self._render('es-CL', 'dino/head.png'),
static('img/l10n/es-ES/dino/head.png'))
media_exists_mock.reset_mock()
media_exists_mock.side_effect = [False, True]
eq_(self._render('es-MX', 'dino/head.png'),
static('img/l10n/es-ES/dino/head.png'))
media_exists_mock.reset_mock()
media_exists_mock.side_effect = [False, True]
eq_(self._render('es', 'dino/head.png'),
static('img/l10n/es-ES/dino/head.png'))
def test_file_not_checked_for_default_lang(self, media_exists_mock):
"""
Should not check filesystem for default lang, but should for others.
"""
eq_(self._render('en-US', 'dino/does-not-exist.png'),
static('img/l10n/en-US/dino/does-not-exist.png'))
ok_(not media_exists_mock.called)
self._render('is', 'dino/does-not-exist.png')
media_exists_mock.assert_called_once_with('img', 'is', 'dino/does-not-exist.png')
@override_settings(DEBUG=False)
@patch('bedrock.mozorg.templatetags.misc._l10n_media_exists')
class TestL10nCSS(TestCase):
rf = RequestFactory()
static_url_dev = '/static/'
cdn_url = '//mozorg.cdn.mozilla.net'
static_url_prod = cdn_url + static_url_dev
markup = ('<link rel="stylesheet" media="screen,projection,tv" href='
'"%scss/l10n/%s/intl.css">')
def _render(self, locale):
req = self.rf.get('/')
req.locale = locale
return render('{{ l10n_css() }}', {'request': req})
@override_settings(DEV=True)
@patch('django.contrib.staticfiles.storage.staticfiles_storage.base_url', static_url_dev)
def test_dev_when_css_file_exists(self, media_exists_mock):
"""Should output a path to the CSS file if exists."""
media_exists_mock.return_value = True
eq_(self._render('de'), self.markup % (self.static_url_dev, 'de'))
eq_(self._render('es-ES'), self.markup % (self.static_url_dev, 'es-ES'))
@override_settings(DEV=True)
def test_dev_when_css_file_missing(self, media_exists_mock):
"""Should output nothing if the CSS file is missing."""
media_exists_mock.return_value = False
eq_(self._render('en-US'), '')
eq_(self._render('fr'), '')
@override_settings(DEV=False)
@patch('django.contrib.staticfiles.storage.staticfiles_storage.base_url', static_url_prod)
def test_prod_when_css_file_exists(self, media_exists_mock):
"""Should output a path to the CSS file if exists."""
media_exists_mock.return_value = True
eq_(self._render('de'), self.markup % (self.static_url_prod, 'de'))
eq_(self._render('es-ES'), self.markup % (self.static_url_prod, 'es-ES'))
@override_settings(DEV=False)
def test_prod_when_css_file_missing(self, media_exists_mock):
"""Should output nothing if the CSS file is missing."""
media_exists_mock.return_value = False
eq_(self._render('en-US'), '')
eq_(self._render('fr'), '')
class TestVideoTag(TestCase):
rf = RequestFactory()
# Video stubs
moz_video = 'http://videos.mozilla.org/serv/flux/example.%s'
nomoz_video = 'http://example.org/example.%s'
def _render(self, template):
req = self.rf.get('/')
req.locale = 'en-US'
return render(template, {'request': req})
def test_empty(self):
# No video, no output.
eq_(render('{{ video() }}'), '')
def test_video(self):
# A few common variations
videos = [self.nomoz_video % ext for ext in ('ogv', 'mp4', 'webm')]
doc = pq(self._render("{{ video%s }}" % str(tuple(videos))))
# Tags generated?
eq_(doc('video').length, 1)
eq_(doc('video source').length, 3)
# Extensions in the right order?
for i, ext in enumerate(('webm', 'ogv', 'mp4')):
ok_(doc('video source:eq(%s)' % i).attr('src').endswith(ext))
def test_prefix(self):
# Prefix should be applied to all videos.
doc = pq(self._render("{{ video('meh.mp4', 'meh.ogv', "
"prefix='http://example.com/blah/') }}"))
expected = ('http://example.com/blah/meh.ogv',
'http://example.com/blah/meh.mp4')
eq_(doc('video source').length, 2)
for i in xrange(2):
eq_(doc('video source:eq(%s)' % i).attr('src'), expected[i])
def test_fileformats(self):
# URLs ending in strange extensions are ignored.
videos = [self.nomoz_video % ext for ext in
('ogv', 'exe', 'webm', 'txt')]
videos.append('http://example.net/noextension')
doc = pq(self._render("{{ video%s }}" % (str(tuple(videos)))))
eq_(doc('video source').length, 2)
for i, ext in enumerate(('webm', 'ogv')):
ok_(doc('video source:eq(%s)' % i).attr('src').endswith(ext))
@override_settings(STATIC_URL='/media/')
@patch('bedrock.mozorg.templatetags.misc.find_static', return_value=True)
class TestPlatformImg(TestCase):
rf = RequestFactory()
def _render(self, url, optional_attributes=None):
req = self.rf.get('/')
req.locale = 'en-US'
return render("{{{{ platform_img('{0}', {1}) }}}}".format(url, optional_attributes),
{'request': req})
def _render_l10n(self, url):
req = self.rf.get('/')
req.locale = 'en-US'
return render("{{{{ l10n_img('{0}') }}}}".format(url),
{'request': req})
def test_platform_img_no_optional_attributes(self, find_static):
"""Should return expected markup without optional attributes"""
markup = self._render('test.png')
self.assertIn(u'data-src-windows="/media/img/test-windows.png"', markup)
self.assertIn(u'data-src-mac="/media/img/test-mac.png"', markup)
def test_platform_img_with_optional_attributes(self, find_static):
"""Should return expected markup with optional attributes"""
markup = self._render('test.png', {'data-test-attr': 'test'})
self.assertIn(u'data-test-attr="test"', markup)
def test_platform_img_with_high_res(self, find_static):
"""Should return expected markup with high resolution image attrs"""
markup = self._render('test.png', {'high-res': True})
self.assertIn(u'data-src-windows-high-res="/media/img/test-windows-high-res.png"', markup)
self.assertIn(u'data-src-mac-high-res="/media/img/test-mac-high-res.png"', markup)
self.assertIn(u'data-high-res="true"', markup)
def test_platform_img_with_l10n(self, find_static):
"""Should return expected markup with l10n image path"""
l10n_url_win = self._render_l10n('test-windows.png')
l10n_url_mac = self._render_l10n('test-mac.png')
markup = self._render('test.png', {'l10n': True})
self.assertIn(u'data-src-windows="' + l10n_url_win + '"', markup)
self.assertIn(u'data-src-mac="' + l10n_url_mac + '"', markup)
def test_platform_img_with_l10n_and_optional_attributes(self, find_static):
"""
Should return expected markup with l10n image path and optional
attributes
"""
l10n_url_win = self._render_l10n('test-windows.png')
l10n_url_mac = self._render_l10n('test-mac.png')
markup = self._render('test.png', {'l10n': True, 'data-test-attr': 'test'})
self.assertIn(u'data-src-windows="' + l10n_url_win + '"', markup)
self.assertIn(u'data-src-mac="' + l10n_url_mac + '"', markup)
self.assertIn(u'data-test-attr="test"', markup)
def test_platform_img_with_l10n_and_high_res(self, find_static):
"""
Should return expected markup with l10n image path and high resolution
attributes
"""
l10n_url_win = self._render_l10n('test-windows.png')
l10n_hr_url_win = misc.convert_to_high_res(l10n_url_win)
l10n_url_mac = self._render_l10n('test-mac.png')
l10n_hr_url_mac = misc.convert_to_high_res(l10n_url_mac)
markup = self._render('test.png', {'l10n': True, 'high-res': True})
self.assertIn(u'data-src-windows-high-res="' + l10n_hr_url_win + '"', markup)
self.assertIn(u'data-src-mac-high-res="' + l10n_hr_url_mac + '"', markup)
self.assertIn(u'data-high-res="true"', markup)
class TestPressBlogUrl(TestCase):
rf = RequestFactory()
def _render(self, locale):
req = self.rf.get('/')
req.locale = locale
return render("{{{{ press_blog_url() }}}}".format('/'),
{'request': req})
def test_press_blog_url_no_locale(self):
"""No locale, fallback to default press blog"""
eq_(self._render(''), 'https://blog.mozilla.org/press/')
def test_press_blog_url_english(self):
"""en-US locale, default press blog"""
eq_(self._render('en-US'), 'https://blog.mozilla.org/press/')
def test_press_blog_url_europe(self):
"""Major European locales have their own blog"""
eq_(self._render('es-ES'), 'https://blog.mozilla.org/press-es/')
eq_(self._render('fr'), 'https://blog.mozilla.org/press-fr/')
eq_(self._render('de'), 'https://blog.mozilla.org/press-de/')
eq_(self._render('pl'), 'https://blog.mozilla.org/press-pl/')
eq_(self._render('it'), 'https://blog.mozilla.org/press-it/')
eq_(self._render('en-GB'), 'https://blog.mozilla.org/press-uk/')
def test_press_blog_url_latam(self):
"""South American Spanishes have a specific blog"""
eq_(self._render('es-AR'), 'https://blog.mozilla.org/press-latam/')
eq_(self._render('es-CL'), 'https://blog.mozilla.org/press-latam/')
eq_(self._render('es-MX'), 'https://blog.mozilla.org/press-latam/')
def test_press_blog_url_other_locale(self):
"""No blog for locale, fallback to default press blog"""
eq_(self._render('oc'), 'https://blog.mozilla.org/press/')
@override_settings(DONATE_LINK=TEST_DONATE_LINK,
DONATE_PARAMS=TEST_DONATE_PARAMS)
class TestDonateUrl(TestCase):
rf = RequestFactory()
def _render(self, locale, source=''):
req = self.rf.get('/')
req.locale = locale
return render("{{{{ donate_url('{0}') }}}}".format(source),
{'request': req})
def test_donate_url_no_locale(self):
"""No locale, fallback to default page"""
eq_(self._render('', 'mozillaorg_footer'),
'https://donate.mozilla.org//'
'?presets=100,50,25,15&amount=50'
'&utm_source=mozilla.org&utm_medium=referral'
'&utm_content=mozillaorg_footer&currency=usd')
def test_donate_url_english(self):
"""en-US locale, default page"""
eq_(self._render('en-US', 'mozillaorg_footer'),
'https://donate.mozilla.org/en-US/'
'?presets=100,50,25,15&amount=50'
'&utm_source=mozilla.org&utm_medium=referral'
'&utm_content=mozillaorg_footer&currency=usd')
def test_donate_url_spanish(self):
"""es-MX locale, a localized page"""
eq_(self._render('es-MX', 'mozillaorg_footer'),
'https://donate.mozilla.org/es-MX/'
'?presets=100,50,25,15&amount=15'
'&utm_source=mozilla.org&utm_medium=referral'
'&utm_content=mozillaorg_footer&currency=eur')
def test_donate_url_other_locale(self):
"""No page for locale, fallback to default page"""
eq_(self._render('pt-PT', 'mozillaorg_footer'),
'https://donate.mozilla.org/pt-PT/'
'?presets=100,50,25,15&amount=50'
'&utm_source=mozilla.org&utm_medium=referral'
'&utm_content=mozillaorg_footer&currency=usd')
@override_settings(FIREFOX_TWITTER_ACCOUNTS=TEST_FIREFOX_TWITTER_ACCOUNTS)
class TestFirefoxTwitterUrl(TestCase):
rf = RequestFactory()
def _render(self, locale):
req = self.rf.get('/')
req.locale = locale
return render('{{ firefox_twitter_url() }}', {'request': req})
def test_firefox_twitter_url_no_locale(self):
"""No locale, fallback to default account"""
eq_(self._render(''), 'https://twitter.com/firefox')
def test_firefox_twitter_url_english(self):
"""en-US locale, default account"""
eq_(self._render('en-US'), 'https://twitter.com/firefox')
def test_firefox_twitter_url_spanish(self):
"""es-ES locale, a local account"""
eq_(self._render('es-ES'), 'https://twitter.com/firefox_es')
def test_firefox_twitter_url_portuguese(self):
"""pt-BR locale, a local account"""
eq_(self._render('pt-BR'), 'https://twitter.com/firefoxbrasil')
def test_firefox_twitter_url_other_locale(self):
"""No account for locale, fallback to default account"""
eq_(self._render('es-AR'), 'https://twitter.com/firefox')
eq_(self._render('es-CL'), 'https://twitter.com/firefox')
eq_(self._render('es-MX'), 'https://twitter.com/firefox')
eq_(self._render('pt-PT'), 'https://twitter.com/firefox')
@override_settings(STATIC_URL='/media/')
class TestHighResImg(TestCase):
rf = RequestFactory()
def _render(self, url, optional_attributes=None):
req = self.rf.get('/')
req.locale = 'en-US'
return render("{{{{ high_res_img('{0}', {1}) }}}}".format(url, optional_attributes),
{'request': req})
def _render_l10n(self, url):
req = self.rf.get('/')
req.locale = 'en-US'
return render("{{{{ l10n_img('{0}') }}}}".format(url),
{'request': req})
def test_high_res_img_no_optional_attributes(self):
"""Should return expected markup without optional attributes"""
markup = self._render('test.png')
expected = (
u'<img class="" src="/media/img/test.png" '
u'srcset="/media/img/test-high-res.png 1.5x">')
self.assertEqual(markup, expected)
def test_high_res_img_with_optional_attributes(self):
"""Should return expected markup with optional attributes"""
markup = self._render('test.png', {'data-test-attr': 'test', 'class': 'logo'})
expected = (
u'<img class="logo" src="/media/img/test.png" '
u'srcset="/media/img/test-high-res.png 1.5x" '
u'data-test-attr="test">')
self.assertEqual(markup, expected)
def test_high_res_img_with_l10n(self):
"""Should return expected markup with l10n image path"""
l10n_url = self._render_l10n('test.png')
l10n_hr_url = misc.convert_to_high_res(l10n_url)
markup = self._render('test.png', {'l10n': True})
expected = (
u'<img class="" src="' + l10n_url + '" '
u'srcset="' + l10n_hr_url + ' 1.5x">')
self.assertEqual(markup, expected)
def test_high_res_img_with_l10n_and_optional_attributes(self):
"""Should return expected markup with l10n image path"""
l10n_url = self._render_l10n('test.png')
l10n_hr_url = misc.convert_to_high_res(l10n_url)
markup = self._render('test.png', {'l10n': True, 'data-test-attr': 'test'})
expected = (
u'<img class="" src="' + l10n_url + '" '
u'srcset="' + l10n_hr_url + ' 1.5x" data-test-attr="test">')
self.assertEqual(markup, expected)
class TestAbsoluteURLFilter(TestCase):
rf = RequestFactory()
static_url_dev = '/static/'
static_url_prod = '//mozorg.cdn.mozilla.net/static/'
static_url_full = 'https://mozorg.cdn.mozilla.net/static/'
image_path = 'img/mozorg/mozilla-256.jpg'
inline_template = "{{ static('%s')|absolute_url }}" % image_path
block_template = ("{% filter absolute_url %}{% block page_image %}" +
"{{ static('%s') }}" % image_path + "{% endblock %}{% endfilter %}")
def _render(self, template):
return render(template, {'request': self.rf.get('/')})
@patch('django.contrib.staticfiles.storage.staticfiles_storage.base_url', static_url_dev)
def test_image_dev(self):
"""Should return a fully qualified URL including a protocol"""
expected = settings.CANONICAL_URL + self.static_url_dev + self.image_path
eq_(self._render(self.inline_template), expected)
eq_(self._render(self.block_template), expected)
@patch('django.contrib.staticfiles.storage.staticfiles_storage.base_url', static_url_prod)
def test_image_prod(self):
"""Should return a fully qualified URL including a protocol"""
expected = 'https:' + self.static_url_prod + self.image_path
eq_(self._render(self.inline_template), expected)
eq_(self._render(self.block_template), expected)
@override_settings(DEV=False)
def test_urls(self):
"""Should return a fully qualified URL including a protocol"""
expected = 'https://www.mozilla.org/en-US/firefox/new/'
eq_(misc.absolute_url('/en-US/firefox/new/'), expected)
eq_(misc.absolute_url('//www.mozilla.org/en-US/firefox/new/'), expected)
eq_(misc.absolute_url('https://www.mozilla.org/en-US/firefox/new/'), expected)
class TestFirefoxIOSURL(TestCase):
rf = RequestFactory()
def _render(self, locale, ct_param=None):
req = self.rf.get('/')
req.locale = locale
if ct_param:
return render("{{ firefox_ios_url('%s') }}" % ct_param,
{'request': req})
return render("{{ firefox_ios_url() }}", {'request': req})
def test_firefox_ios_url_no_locale(self):
"""No locale, fallback to default URL"""
eq_(self._render(''), 'https://itunes.apple.com'
'/app/apple-store/id989804926?pt=373246&mt=8')
def test_firefox_ios_url_default(self):
"""should fallback to default URL"""
eq_(self._render('ar'), 'https://itunes.apple.com'
'/app/apple-store/id989804926?pt=373246&mt=8')
eq_(self._render('zu'), 'https://itunes.apple.com'
'/app/apple-store/id989804926?pt=373246&mt=8')
def test_firefox_ios_url_localized(self):
"""should return localized URL"""
eq_(self._render('en-US'), 'https://itunes.apple.com/us'
'/app/apple-store/id989804926?pt=373246&mt=8')
eq_(self._render('es-ES'), 'https://itunes.apple.com/es'
'/app/apple-store/id989804926?pt=373246&mt=8')
eq_(self._render('ja'), 'https://itunes.apple.com/jp'
'/app/apple-store/id989804926?pt=373246&mt=8')
def test_firefox_ios_url_param(self):
"""should return default or localized URL with ct param"""
eq_(self._render('', 'mozorg'), 'https://itunes.apple.com'
'/app/apple-store/id989804926?pt=373246&mt=8&ct=mozorg')
eq_(self._render('en-US', 'mozorg'), 'https://itunes.apple.com/us'
'/app/apple-store/id989804926?pt=373246&mt=8&ct=mozorg')
eq_(self._render('es-ES', 'mozorg'), 'https://itunes.apple.com/es'
'/app/apple-store/id989804926?pt=373246&mt=8&ct=mozorg')
# from jingo
def test_f():
s = render('{{ "{0} : {z}"|f("a", z="b") }}')
eq_(s, 'a : b')
def test_f_unicode():
s = render('{{ "foo {0}"|f(bar) }}', {'bar': u'bar\xe9'})
eq_(s, u'foo bar\xe9')
s = render('{{ t|f(bar) }}', {'t': u'\xe9 {0}', 'bar': 'baz'})
eq_(s, u'\xe9 baz')
def test_f_markup():
format_string = 'Hello <b>{0}</b>'
format_markup = Markup(format_string)
val_string = '<em>Steve</em>'
val_markup = Markup(val_string)
template = '{{ fmt|f(val) }}'
expect = 'Hello <b><em>Steve</em></b>'
combinations = (
(format_string, val_string),
(format_string, val_markup),
(format_markup, val_string),
(format_markup, val_markup),
)
def _check(f, v):
s = render(template, {'fmt': f, 'val': v})
eq_(expect, s)
for f, v in combinations:
yield _check, f, v
def test_datetime():
time = datetime(2009, 12, 25, 10, 11, 12)
s = render('{{ d|datetime }}', {'d': time})
eq_(s, 'December 25, 2009')
s = render('{{ d|datetime("%Y-%m-%d %H:%M:%S") }}', {'d': time})
eq_(s, '2009-12-25 10:11:12')
s = render('{{ None|datetime }}')
eq_(s, '')
def test_datetime_unicode():
fmt = u"%Y 年 %m 月 %e 日"
misc.datetime(datetime.now(), fmt)
def test_ifeq():
eq_context = {'a': 1, 'b': 1}
neq_context = {'a': 1, 'b': 2}
s = render('{{ a|ifeq(b, "<b>something</b>") }}', eq_context)
eq_(s, '<b>something</b>')
s = render('{{ a|ifeq(b, "<b>something</b>") }}', neq_context)
eq_(s, '')
def test_csrf():
s = render('{{ csrf() }}', {'csrf_token': 'fffuuu'})
csrf = "<input type='hidden' name='csrfmiddlewaretoken' value='fffuuu' />"
assert csrf in s
| mpl-2.0 |
Jmainguy/ansible-modules-extras | cloud/amazon/sts_session_token.py | 34 | 5257 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: sts_session_token
short_description: Obtain a session token from the AWS Security Token Service
description:
- Obtain a session token from the AWS Security Token Service
version_added: "2.2"
author: Victor Costan (@pwnall)
options:
duration_seconds:
description:
- The duration, in seconds, of the session token. See http://docs.aws.amazon.com/STS/latest/APIReference/API_GetSessionToken.html#API_GetSessionToken_RequestParameters for acceptable and default values.
required: false
default: null
mfa_serial_number:
description:
- The identification number of the MFA device that is associated with the user who is making the GetSessionToken call.
required: false
default: null
mfa_token:
description:
- The value provided by the MFA device, if the trust policy of the user requires MFA.
required: false
default: null
notes:
- In order to use the session token in a following playbook task you must pass the I(access_key), I(access_secret) and I(access_token).
extends_documentation_fragment:
- aws
- ec2
requirements:
- boto3
- botocore
'''
RETURN = """
sts_creds:
description: The Credentials object returned by the AWS Security Token Service
returned: always
type: list
sample:
access_key: ASXXXXXXXXXXXXXXXXXX
expiration: "2016-04-08T11:59:47+00:00"
secret_key: XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
session_token: XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
changed:
description: True if obtaining the credentials succeeds
type: bool
returned: always
"""
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Get a session token (more details: http://docs.aws.amazon.com/STS/latest/APIReference/API_GetSessionToken.html)
sts_session_token:
duration: 3600
register: session_credentials
# Use the session token obtained above to tag an instance in account 123456789012
ec2_tag:
aws_access_key: "{{ session_credentials.sts_creds.access_key }}"
aws_secret_key: "{{ session_credentials.sts_creds.secret_key }}"
security_token: "{{ session_credentials.sts_creds.session_token }}"
resource: i-xyzxyz01
state: present
tags:
MyNewTag: value
'''
try:
import boto3
from botocore.exceptions import ClientError
HAS_BOTO3 = True
except ImportError:
HAS_BOTO3 = False
def normalize_credentials(credentials):
access_key = credentials.get('AccessKeyId', None)
secret_key = credentials.get('SecretAccessKey', None)
session_token = credentials.get('SessionToken', None)
expiration = credentials.get('Expiration', None)
return {
'access_key': access_key,
'secret_key': secret_key,
'session_token': session_token,
'expiration': expiration
}
def get_session_token(connection, module):
duration_seconds = module.params.get('duration_seconds')
mfa_serial_number = module.params.get('mfa_serial_number')
mfa_token = module.params.get('mfa_token')
changed = False
args = {}
if duration_seconds is not None:
args['DurationSeconds'] = duration_seconds
if mfa_serial_number is not None:
args['SerialNumber'] = mfa_serial_number
if mfa_token is not None:
args['TokenCode'] = mfa_token
try:
response = connection.get_session_token(**args)
changed = True
except ClientError, e:
module.fail_json(msg=e)
credentials = normalize_credentials(response.get('Credentials', {}))
module.exit_json(changed=changed, sts_creds=credentials)
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
duration_seconds = dict(required=False, default=None, type='int'),
mfa_serial_number = dict(required=False, default=None),
mfa_token = dict(required=False, default=None)
)
)
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO3:
module.fail_json(msg='boto3 and botocore are required.')
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module, boto3=True)
if region:
connection = boto3_conn(module, conn_type='client', resource='sts', region=region, endpoint=ec2_url, **aws_connect_kwargs)
else:
module.fail_json(msg="region must be specified")
get_session_token(connection, module)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()
| gpl-3.0 |
grap/OpenUpgrade | addons/marketing_campaign/marketing_campaign.py | 51 | 41800 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2013 OpenERP SA (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
import base64
import itertools
from datetime import datetime
from dateutil.relativedelta import relativedelta
from operator import itemgetter
from traceback import format_exception
from sys import exc_info
from openerp.tools.safe_eval import safe_eval as eval
import re
from openerp.addons.decimal_precision import decimal_precision as dp
from openerp import api
from openerp.osv import fields, osv
from openerp.report import render_report
from openerp.tools.translate import _
_intervalTypes = {
'hours': lambda interval: relativedelta(hours=interval),
'days': lambda interval: relativedelta(days=interval),
'months': lambda interval: relativedelta(months=interval),
'years': lambda interval: relativedelta(years=interval),
}
DT_FMT = '%Y-%m-%d %H:%M:%S'
class marketing_campaign(osv.osv):
_name = "marketing.campaign"
_description = "Marketing Campaign"
def _count_segments(self, cr, uid, ids, field_name, arg, context=None):
res = {}
try:
for segments in self.browse(cr, uid, ids, context=context):
res[segments.id] = len(segments.segment_ids)
except:
pass
return res
_columns = {
'name': fields.char('Name', required=True),
'object_id': fields.many2one('ir.model', 'Resource', required=True,
help="Choose the resource on which you want \
this campaign to be run"),
'partner_field_id': fields.many2one('ir.model.fields', 'Partner Field',
domain="[('model_id', '=', object_id), ('ttype', '=', 'many2one'), ('relation', '=', 'res.partner')]",
help="The generated workitems will be linked to the partner related to the record. "\
"If the record is the partner itself leave this field empty. "\
"This is useful for reporting purposes, via the Campaign Analysis or Campaign Follow-up views."),
'unique_field_id': fields.many2one('ir.model.fields', 'Unique Field',
domain="[('model_id', '=', object_id), ('ttype', 'in', ['char','int','many2one','text','selection'])]",
help='If set, this field will help segments that work in "no duplicates" mode to avoid '\
'selecting similar records twice. Similar records are records that have the same value for '\
'this unique field. For example by choosing the "email_from" field for CRM Leads you would prevent '\
'sending the same campaign to the same email address again. If not set, the "no duplicates" segments '\
"will only avoid selecting the same record again if it entered the campaign previously. "\
"Only easily comparable fields like textfields, integers, selections or single relationships may be used."),
'mode': fields.selection([('test', 'Test Directly'),
('test_realtime', 'Test in Realtime'),
('manual', 'With Manual Confirmation'),
('active', 'Normal')],
'Mode', required=True, help= \
"""Test - It creates and process all the activities directly (without waiting for the delay on transitions) but does not send emails or produce reports.
Test in Realtime - It creates and processes all the activities directly but does not send emails or produce reports.
With Manual Confirmation - the campaigns runs normally, but the user has to validate all workitem manually.
Normal - the campaign runs normally and automatically sends all emails and reports (be very careful with this mode, you're live!)"""),
'state': fields.selection([('draft', 'New'),
('running', 'Running'),
('cancelled', 'Cancelled'),
('done', 'Done')],
'Status', copy=False),
'activity_ids': fields.one2many('marketing.campaign.activity',
'campaign_id', 'Activities'),
'fixed_cost': fields.float('Fixed Cost', help="Fixed cost for running this campaign. You may also specify variable cost and revenue on each campaign activity. Cost and Revenue statistics are included in Campaign Reporting.", digits_compute=dp.get_precision('Product Price')),
'segment_ids': fields.one2many('marketing.campaign.segment', 'campaign_id', 'Segments', readonly=False),
'segments_count': fields.function(_count_segments, type='integer', string='Segments')
}
_defaults = {
'state': lambda *a: 'draft',
'mode': lambda *a: 'test',
}
def state_running_set(self, cr, uid, ids, *args):
# TODO check that all subcampaigns are running
campaign = self.browse(cr, uid, ids[0])
if not campaign.activity_ids:
raise osv.except_osv(_("Error"), _("The campaign cannot be started. There are no activities in it."))
has_start = False
has_signal_without_from = False
for activity in campaign.activity_ids:
if activity.start:
has_start = True
if activity.signal and len(activity.from_ids) == 0:
has_signal_without_from = True
if not has_start and not has_signal_without_from:
raise osv.except_osv(_("Error"), _("The campaign cannot be started. It does not have any starting activity. Modify campaign's activities to mark one as the starting point."))
return self.write(cr, uid, ids, {'state': 'running'})
def state_done_set(self, cr, uid, ids, *args):
# TODO check that this campaign is not a subcampaign in running mode.
segment_ids = self.pool.get('marketing.campaign.segment').search(cr, uid,
[('campaign_id', 'in', ids),
('state', '=', 'running')])
if segment_ids :
raise osv.except_osv(_("Error"), _("The campaign cannot be marked as done before all segments are closed."))
self.write(cr, uid, ids, {'state': 'done'})
return True
def state_cancel_set(self, cr, uid, ids, *args):
# TODO check that this campaign is not a subcampaign in running mode.
self.write(cr, uid, ids, {'state': 'cancelled'})
return True
# dead code
def signal(self, cr, uid, model, res_id, signal, run_existing=True, context=None):
record = self.pool[model].browse(cr, uid, res_id, context)
return self._signal(cr, uid, record, signal, run_existing, context)
#dead code
def _signal(self, cr, uid, record, signal, run_existing=True, context=None):
if not signal:
raise ValueError('Signal cannot be False.')
Workitems = self.pool.get('marketing.campaign.workitem')
domain = [('object_id.model', '=', record._name),
('state', '=', 'running')]
campaign_ids = self.search(cr, uid, domain, context=context)
for campaign in self.browse(cr, uid, campaign_ids, context=context):
for activity in campaign.activity_ids:
if activity.signal != signal:
continue
data = dict(activity_id=activity.id,
res_id=record.id,
state='todo')
wi_domain = [(k, '=', v) for k, v in data.items()]
wi_ids = Workitems.search(cr, uid, wi_domain, context=context)
if wi_ids:
if not run_existing:
continue
else:
partner = self._get_partner_for(campaign, record)
if partner:
data['partner_id'] = partner.id
wi_id = Workitems.create(cr, uid, data, context=context)
wi_ids = [wi_id]
Workitems.process(cr, uid, wi_ids, context=context)
return True
def _get_partner_for(self, campaign, record):
partner_field = campaign.partner_field_id.name
if partner_field:
return record[partner_field]
elif campaign.object_id.model == 'res.partner':
return record
return None
# prevent duplication until the server properly duplicates several levels of nested o2m
def copy(self, cr, uid, id, default=None, context=None):
raise osv.except_osv(_("Operation not supported"), _("You cannot duplicate a campaign, Not supported yet."))
def _find_duplicate_workitems(self, cr, uid, record, campaign_rec, context=None):
"""Finds possible duplicates workitems for a record in this campaign, based on a uniqueness
field.
:param record: browse_record to find duplicates workitems for.
:param campaign_rec: browse_record of campaign
"""
Workitems = self.pool.get('marketing.campaign.workitem')
duplicate_workitem_domain = [('res_id','=', record.id),
('campaign_id','=', campaign_rec.id)]
unique_field = campaign_rec.unique_field_id
if unique_field:
unique_value = getattr(record, unique_field.name, None)
if unique_value:
if unique_field.ttype == 'many2one':
unique_value = unique_value.id
similar_res_ids = self.pool[campaign_rec.object_id.model].search(cr, uid,
[(unique_field.name, '=', unique_value)], context=context)
if similar_res_ids:
duplicate_workitem_domain = [('res_id','in', similar_res_ids),
('campaign_id','=', campaign_rec.id)]
return Workitems.search(cr, uid, duplicate_workitem_domain, context=context)
class marketing_campaign_segment(osv.osv):
_name = "marketing.campaign.segment"
_description = "Campaign Segment"
_order = "name"
def _get_next_sync(self, cr, uid, ids, fn, args, context=None):
# next auto sync date is same for all segments
sync_job = self.pool.get('ir.model.data').get_object(cr, uid, 'marketing_campaign', 'ir_cron_marketing_campaign_every_day', context=context)
next_sync = sync_job and sync_job.nextcall or False
return dict.fromkeys(ids, next_sync)
_columns = {
'name': fields.char('Name', required=True),
'campaign_id': fields.many2one('marketing.campaign', 'Campaign', required=True, select=1, ondelete="cascade"),
'object_id': fields.related('campaign_id','object_id', type='many2one', relation='ir.model', string='Resource'),
'ir_filter_id': fields.many2one('ir.filters', 'Filter', ondelete="restrict",
help="Filter to select the matching resource records that belong to this segment. "\
"New filters can be created and saved using the advanced search on the list view of the Resource. "\
"If no filter is set, all records are selected without filtering. "\
"The synchronization mode may also add a criterion to the filter."),
'sync_last_date': fields.datetime('Last Synchronization', help="Date on which this segment was synchronized last time (automatically or manually)"),
'sync_mode': fields.selection([('create_date', 'Only records created after last sync'),
('write_date', 'Only records modified after last sync (no duplicates)'),
('all', 'All records (no duplicates)')],
'Synchronization mode',
help="Determines an additional criterion to add to the filter when selecting new records to inject in the campaign. "\
'"No duplicates" prevents selecting records which have already entered the campaign previously.'\
'If the campaign has a "unique field" set, "no duplicates" will also prevent selecting records which have '\
'the same value for the unique field as other records that already entered the campaign.'),
'state': fields.selection([('draft', 'New'),
('cancelled', 'Cancelled'),
('running', 'Running'),
('done', 'Done')],
'Status', copy=False),
'date_run': fields.datetime('Launch Date', help="Initial start date of this segment."),
'date_done': fields.datetime('End Date', help="Date this segment was last closed or cancelled."),
'date_next_sync': fields.function(_get_next_sync, string='Next Synchronization', type='datetime', help="Next time the synchronization job is scheduled to run automatically"),
}
_defaults = {
'state': lambda *a: 'draft',
'sync_mode': lambda *a: 'create_date',
}
def _check_model(self, cr, uid, ids, context=None):
for obj in self.browse(cr, uid, ids, context=context):
if not obj.ir_filter_id:
return True
if obj.campaign_id.object_id.model != obj.ir_filter_id.model_id:
return False
return True
_constraints = [
(_check_model, 'Model of filter must be same as resource model of Campaign ', ['ir_filter_id,campaign_id']),
]
def onchange_campaign_id(self, cr, uid, ids, campaign_id):
res = {'domain':{'ir_filter_id':[]}}
campaign_pool = self.pool.get('marketing.campaign')
if campaign_id:
campaign = campaign_pool.browse(cr, uid, campaign_id)
model_name = self.pool.get('ir.model').read(cr, uid, [campaign.object_id.id], ['model'])
if model_name:
mod_name = model_name[0]['model']
res['domain'] = {'ir_filter_id': [('model_id', '=', mod_name)]}
else:
res['value'] = {'ir_filter_id': False}
return res
def state_running_set(self, cr, uid, ids, *args):
segment = self.browse(cr, uid, ids[0])
vals = {'state': 'running'}
if not segment.date_run:
vals['date_run'] = time.strftime('%Y-%m-%d %H:%M:%S')
self.write(cr, uid, ids, vals)
return True
def state_done_set(self, cr, uid, ids, *args):
wi_ids = self.pool.get("marketing.campaign.workitem").search(cr, uid,
[('state', '=', 'todo'), ('segment_id', 'in', ids)])
self.pool.get("marketing.campaign.workitem").write(cr, uid, wi_ids, {'state':'cancelled'})
self.write(cr, uid, ids, {'state': 'done','date_done': time.strftime('%Y-%m-%d %H:%M:%S')})
return True
def state_cancel_set(self, cr, uid, ids, *args):
wi_ids = self.pool.get("marketing.campaign.workitem").search(cr, uid,
[('state', '=', 'todo'), ('segment_id', 'in', ids)])
self.pool.get("marketing.campaign.workitem").write(cr, uid, wi_ids, {'state':'cancelled'})
self.write(cr, uid, ids, {'state': 'cancelled','date_done': time.strftime('%Y-%m-%d %H:%M:%S')})
return True
def synchroniz(self, cr, uid, ids, *args):
self.process_segment(cr, uid, ids)
return True
@api.cr_uid_ids_context
def process_segment(self, cr, uid, segment_ids=None, context=None):
Workitems = self.pool.get('marketing.campaign.workitem')
Campaigns = self.pool.get('marketing.campaign')
if not segment_ids:
segment_ids = self.search(cr, uid, [('state', '=', 'running')], context=context)
action_date = time.strftime('%Y-%m-%d %H:%M:%S')
campaigns = set()
for segment in self.browse(cr, uid, segment_ids, context=context):
if segment.campaign_id.state != 'running':
continue
campaigns.add(segment.campaign_id.id)
act_ids = self.pool.get('marketing.campaign.activity').search(cr,
uid, [('start', '=', True), ('campaign_id', '=', segment.campaign_id.id)], context=context)
model_obj = self.pool[segment.object_id.model]
criteria = []
if segment.sync_last_date and segment.sync_mode != 'all':
criteria += [(segment.sync_mode, '>', segment.sync_last_date)]
if segment.ir_filter_id:
criteria += eval(segment.ir_filter_id.domain)
object_ids = model_obj.search(cr, uid, criteria, context=context)
# XXX TODO: rewrite this loop more efficiently without doing 1 search per record!
for record in model_obj.browse(cr, uid, object_ids, context=context):
# avoid duplicate workitem for the same resource
if segment.sync_mode in ('write_date','all'):
if Campaigns._find_duplicate_workitems(cr, uid, record, segment.campaign_id, context=context):
continue
wi_vals = {
'segment_id': segment.id,
'date': action_date,
'state': 'todo',
'res_id': record.id
}
partner = self.pool.get('marketing.campaign')._get_partner_for(segment.campaign_id, record)
if partner:
wi_vals['partner_id'] = partner.id
for act_id in act_ids:
wi_vals['activity_id'] = act_id
Workitems.create(cr, uid, wi_vals, context=context)
self.write(cr, uid, segment.id, {'sync_last_date':action_date}, context=context)
Workitems.process_all(cr, uid, list(campaigns), context=context)
return True
class marketing_campaign_activity(osv.osv):
_name = "marketing.campaign.activity"
_order = "name"
_description = "Campaign Activity"
_action_types = [
('email', 'Email'),
('report', 'Report'),
('action', 'Custom Action'),
# TODO implement the subcampaigns.
# TODO implement the subcampaign out. disallow out transitions from
# subcampaign activities ?
#('subcampaign', 'Sub-Campaign'),
]
_columns = {
'name': fields.char('Name', required=True),
'campaign_id': fields.many2one('marketing.campaign', 'Campaign',
required = True, ondelete='cascade', select=1),
'object_id': fields.related('campaign_id','object_id',
type='many2one', relation='ir.model',
string='Object', readonly=True),
'start': fields.boolean('Start', help= "This activity is launched when the campaign starts.", select=True),
'condition': fields.text('Condition', size=256, required=True,
help="Python expression to decide whether the activity can be executed, otherwise it will be deleted or cancelled."
"The expression may use the following [browsable] variables:\n"
" - activity: the campaign activity\n"
" - workitem: the campaign workitem\n"
" - resource: the resource object this campaign item represents\n"
" - transitions: list of campaign transitions outgoing from this activity\n"
"...- re: Python regular expression module"),
'type': fields.selection(_action_types, 'Type', required=True,
help="""The type of action to execute when an item enters this activity, such as:
- Email: send an email using a predefined email template
- Report: print an existing Report defined on the resource item and save it into a specific directory
- Custom Action: execute a predefined action, e.g. to modify the fields of the resource record
"""),
'email_template_id': fields.many2one('email.template', "Email Template", help='The email to send when this activity is activated'),
'report_id': fields.many2one('ir.actions.report.xml', "Report", help='The report to generate when this activity is activated', ),
'report_directory_id': fields.many2one('document.directory','Directory',
help="This folder is used to store the generated reports"),
'server_action_id': fields.many2one('ir.actions.server', string='Action',
help= "The action to perform when this activity is activated"),
'to_ids': fields.one2many('marketing.campaign.transition',
'activity_from_id',
'Next Activities'),
'from_ids': fields.one2many('marketing.campaign.transition',
'activity_to_id',
'Previous Activities'),
'variable_cost': fields.float('Variable Cost', help="Set a variable cost if you consider that every campaign item that has reached this point has entailed a certain cost. You can get cost statistics in the Reporting section", digits_compute=dp.get_precision('Product Price')),
'revenue': fields.float('Revenue', help="Set an expected revenue if you consider that every campaign item that has reached this point has generated a certain revenue. You can get revenue statistics in the Reporting section", digits_compute=dp.get_precision('Account')),
'signal': fields.char('Signal',
help='An activity with a signal can be called programmatically. Be careful, the workitem is always created when a signal is sent'),
'keep_if_condition_not_met': fields.boolean("Don't Delete Workitems",
help="By activating this option, workitems that aren't executed because the condition is not met are marked as cancelled instead of being deleted.")
}
_defaults = {
'type': lambda *a: 'email',
'condition': lambda *a: 'True',
}
def search(self, cr, uid, args, offset=0, limit=None, order=None,
context=None, count=False):
if context == None:
context = {}
if 'segment_id' in context and context['segment_id']:
segment_obj = self.pool.get('marketing.campaign.segment').browse(cr,
uid, context['segment_id'])
act_ids = []
for activity in segment_obj.campaign_id.activity_ids:
act_ids.append(activity.id)
return act_ids
return super(marketing_campaign_activity, self).search(cr, uid, args,
offset, limit, order, context, count)
#dead code
def _process_wi_report(self, cr, uid, activity, workitem, context=None):
report_data, format = render_report(cr, uid, [], activity.report_id.report_name, {}, context=context)
attach_vals = {
'name': '%s_%s_%s'%(activity.report_id.report_name,
activity.name,workitem.partner_id.name),
'datas_fname': '%s.%s'%(activity.report_id.report_name,
activity.report_id.report_type),
'parent_id': activity.report_directory_id.id,
'datas': base64.encodestring(report_data),
'file_type': format
}
self.pool.get('ir.attachment').create(cr, uid, attach_vals)
return True
def _process_wi_email(self, cr, uid, activity, workitem, context=None):
return self.pool.get('email.template').send_mail(cr, uid,
activity.email_template_id.id,
workitem.res_id, context=context)
#dead code
def _process_wi_action(self, cr, uid, activity, workitem, context=None):
if context is None:
context = {}
server_obj = self.pool.get('ir.actions.server')
action_context = dict(context,
active_id=workitem.res_id,
active_ids=[workitem.res_id],
active_model=workitem.object_id.model,
workitem=workitem)
server_obj.run(cr, uid, [activity.server_action_id.id],
context=action_context)
return True
def process(self, cr, uid, act_id, wi_id, context=None):
activity = self.browse(cr, uid, act_id, context=context)
method = '_process_wi_%s' % (activity.type,)
action = getattr(self, method, None)
if not action:
raise NotImplementedError('Method %r is not implemented on %r object.' % (method, self))
workitem_obj = self.pool.get('marketing.campaign.workitem')
workitem = workitem_obj.browse(cr, uid, wi_id, context=context)
return action(cr, uid, activity, workitem, context=context)
class marketing_campaign_transition(osv.osv):
_name = "marketing.campaign.transition"
_description = "Campaign Transition"
_interval_units = [
('hours', 'Hour(s)'),
('days', 'Day(s)'),
('months', 'Month(s)'),
('years', 'Year(s)'),
]
def _get_name(self, cr, uid, ids, fn, args, context=None):
# name formatters that depend on trigger
formatters = {
'auto': _('Automatic transition'),
'time': _('After %(interval_nbr)d %(interval_type)s'),
'cosmetic': _('Cosmetic'),
}
# get the translations of the values of selection field 'interval_type'
fields = self.fields_get(cr, uid, ['interval_type'], context=context)
interval_type_selection = dict(fields['interval_type']['selection'])
result = dict.fromkeys(ids, False)
for trans in self.browse(cr, uid, ids, context=context):
values = {
'interval_nbr': trans.interval_nbr,
'interval_type': interval_type_selection.get(trans.interval_type, ''),
}
result[trans.id] = formatters[trans.trigger] % values
return result
def _delta(self, cr, uid, ids, context=None):
assert len(ids) == 1
transition = self.browse(cr, uid, ids[0], context=context)
if transition.trigger != 'time':
raise ValueError('Delta is only relevant for timed transition.')
return relativedelta(**{str(transition.interval_type): transition.interval_nbr})
_columns = {
'name': fields.function(_get_name, string='Name',
type='char', size=128),
'activity_from_id': fields.many2one('marketing.campaign.activity',
'Previous Activity', select=1,
required=True, ondelete="cascade"),
'activity_to_id': fields.many2one('marketing.campaign.activity',
'Next Activity',
required=True, ondelete="cascade"),
'interval_nbr': fields.integer('Interval Value', required=True),
'interval_type': fields.selection(_interval_units, 'Interval Unit',
required=True),
'trigger': fields.selection([('auto', 'Automatic'),
('time', 'Time'),
('cosmetic', 'Cosmetic'), # fake plastic transition
],
'Trigger', required=True,
help="How is the destination workitem triggered"),
}
_defaults = {
'interval_nbr': 1,
'interval_type': 'days',
'trigger': 'time',
}
def _check_campaign(self, cr, uid, ids, context=None):
for obj in self.browse(cr, uid, ids, context=context):
if obj.activity_from_id.campaign_id != obj.activity_to_id.campaign_id:
return False
return True
_constraints = [
(_check_campaign, 'The To/From Activity of transition must be of the same Campaign ', ['activity_from_id,activity_to_id']),
]
_sql_constraints = [
('interval_positive', 'CHECK(interval_nbr >= 0)', 'The interval must be positive or zero')
]
class marketing_campaign_workitem(osv.osv):
_name = "marketing.campaign.workitem"
_description = "Campaign Workitem"
def _res_name_get(self, cr, uid, ids, field_name, arg, context=None):
res = dict.fromkeys(ids, '/')
for wi in self.browse(cr, uid, ids, context=context):
if not wi.res_id:
continue
proxy = self.pool[wi.object_id.model]
if not proxy.exists(cr, uid, [wi.res_id]):
continue
ng = proxy.name_get(cr, uid, [wi.res_id], context=context)
if ng:
res[wi.id] = ng[0][1]
return res
def _resource_search(self, cr, uid, obj, name, args, domain=None, context=None):
"""Returns id of workitem whose resource_name matches with the given name"""
if not len(args):
return []
condition_name = None
for domain_item in args:
# we only use the first domain criterion and ignore all the rest including operators
if isinstance(domain_item, (list,tuple)) and len(domain_item) == 3 and domain_item[0] == 'res_name':
condition_name = [None, domain_item[1], domain_item[2]]
break
assert condition_name, "Invalid search domain for marketing_campaign_workitem.res_name. It should use 'res_name'"
cr.execute("""select w.id, w.res_id, m.model \
from marketing_campaign_workitem w \
left join marketing_campaign_activity a on (a.id=w.activity_id)\
left join marketing_campaign c on (c.id=a.campaign_id)\
left join ir_model m on (m.id=c.object_id)
""")
res = cr.fetchall()
workitem_map = {}
matching_workitems = []
for id, res_id, model in res:
workitem_map.setdefault(model,{}).setdefault(res_id,set()).add(id)
for model, id_map in workitem_map.iteritems():
model_pool = self.pool[model]
condition_name[0] = model_pool._rec_name
condition = [('id', 'in', id_map.keys()), condition_name]
for res_id in model_pool.search(cr, uid, condition, context=context):
matching_workitems.extend(id_map[res_id])
return [('id', 'in', list(set(matching_workitems)))]
_columns = {
'segment_id': fields.many2one('marketing.campaign.segment', 'Segment', readonly=True),
'activity_id': fields.many2one('marketing.campaign.activity','Activity',
required=True, readonly=True),
'campaign_id': fields.related('activity_id', 'campaign_id',
type='many2one', relation='marketing.campaign', string='Campaign', readonly=True, store=True),
'object_id': fields.related('activity_id', 'campaign_id', 'object_id',
type='many2one', relation='ir.model', string='Resource', select=1, readonly=True, store=True),
'res_id': fields.integer('Resource ID', select=1, readonly=True),
'res_name': fields.function(_res_name_get, string='Resource Name', fnct_search=_resource_search, type="char", size=64),
'date': fields.datetime('Execution Date', help='If date is not set, this workitem has to be run manually', readonly=True),
'partner_id': fields.many2one('res.partner', 'Partner', select=1, readonly=True),
'state': fields.selection([ ('todo', 'To Do'),
('cancelled', 'Cancelled'),
('exception', 'Exception'),
('done', 'Done'),
], 'Status', readonly=True, copy=False),
'error_msg' : fields.text('Error Message', readonly=True)
}
_defaults = {
'state': lambda *a: 'todo',
'date': False,
}
@api.cr_uid_ids_context
def button_draft(self, cr, uid, workitem_ids, context=None):
for wi in self.browse(cr, uid, workitem_ids, context=context):
if wi.state in ('exception', 'cancelled'):
self.write(cr, uid, [wi.id], {'state':'todo'}, context=context)
return True
@api.cr_uid_ids_context
def button_cancel(self, cr, uid, workitem_ids, context=None):
for wi in self.browse(cr, uid, workitem_ids, context=context):
if wi.state in ('todo','exception'):
self.write(cr, uid, [wi.id], {'state':'cancelled'}, context=context)
return True
def _process_one(self, cr, uid, workitem, context=None):
if workitem.state != 'todo':
return False
activity = workitem.activity_id
proxy = self.pool[workitem.object_id.model]
object_id = proxy.browse(cr, uid, workitem.res_id, context=context)
eval_context = {
'activity': activity,
'workitem': workitem,
'object': object_id,
'resource': object_id,
'transitions': activity.to_ids,
're': re,
}
try:
condition = activity.condition
campaign_mode = workitem.campaign_id.mode
if condition:
if not eval(condition, eval_context):
if activity.keep_if_condition_not_met:
workitem.write({'state': 'cancelled'})
else:
workitem.unlink()
return
result = True
if campaign_mode in ('manual', 'active'):
Activities = self.pool.get('marketing.campaign.activity')
result = Activities.process(cr, uid, activity.id, workitem.id,
context=context)
values = dict(state='done')
if not workitem.date:
values['date'] = datetime.now().strftime(DT_FMT)
workitem.write(values)
if result:
# process _chain
workitem.refresh() # reload
date = datetime.strptime(workitem.date, DT_FMT)
for transition in activity.to_ids:
if transition.trigger == 'cosmetic':
continue
launch_date = False
if transition.trigger == 'auto':
launch_date = date
elif transition.trigger == 'time':
launch_date = date + transition._delta()
if launch_date:
launch_date = launch_date.strftime(DT_FMT)
values = {
'date': launch_date,
'segment_id': workitem.segment_id.id,
'activity_id': transition.activity_to_id.id,
'partner_id': workitem.partner_id.id,
'res_id': workitem.res_id,
'state': 'todo',
}
wi_id = self.create(cr, uid, values, context=context)
# Now, depending on the trigger and the campaign mode
# we know whether we must run the newly created workitem.
#
# rows = transition trigger \ colums = campaign mode
#
# test test_realtime manual normal (active)
# time Y N N N
# cosmetic N N N N
# auto Y Y N Y
#
run = (transition.trigger == 'auto' \
and campaign_mode != 'manual') \
or (transition.trigger == 'time' \
and campaign_mode == 'test')
if run:
new_wi = self.browse(cr, uid, wi_id, context)
self._process_one(cr, uid, new_wi, context)
except Exception:
tb = "".join(format_exception(*exc_info()))
workitem.write({'state': 'exception', 'error_msg': tb})
@api.cr_uid_ids_context
def process(self, cr, uid, workitem_ids, context=None):
for wi in self.browse(cr, uid, workitem_ids, context=context):
self._process_one(cr, uid, wi, context=context)
return True
def process_all(self, cr, uid, camp_ids=None, context=None):
camp_obj = self.pool.get('marketing.campaign')
if camp_ids is None:
camp_ids = camp_obj.search(cr, uid, [('state','=','running')], context=context)
for camp in camp_obj.browse(cr, uid, camp_ids, context=context):
if camp.mode == 'manual':
# manual states are not processed automatically
continue
while True:
domain = [('campaign_id', '=', camp.id), ('state', '=', 'todo'), ('date', '!=', False)]
if camp.mode in ('test_realtime', 'active'):
domain += [('date','<=', time.strftime('%Y-%m-%d %H:%M:%S'))]
workitem_ids = self.search(cr, uid, domain, context=context)
if not workitem_ids:
break
self.process(cr, uid, workitem_ids, context=context)
return True
def preview(self, cr, uid, ids, context=None):
res = {}
wi_obj = self.browse(cr, uid, ids[0], context=context)
if wi_obj.activity_id.type == 'email':
view_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'email_template', 'email_template_preview_form')
res = {
'name': _('Email Preview'),
'view_type': 'form',
'view_mode': 'form,tree',
'res_model': 'email_template.preview',
'view_id': False,
'context': context,
'views': [(view_id and view_id[1] or 0, 'form')],
'type': 'ir.actions.act_window',
'target': 'new',
'nodestroy':True,
'context': "{'template_id':%d,'default_res_id':%d}"%
(wi_obj.activity_id.email_template_id.id,
wi_obj.res_id)
}
elif wi_obj.activity_id.type == 'report':
datas = {
'ids': [wi_obj.res_id],
'model': wi_obj.object_id.model
}
res = {
'type' : 'ir.actions.report.xml',
'report_name': wi_obj.activity_id.report_id.report_name,
'datas' : datas,
}
else:
raise osv.except_osv(_('No preview'),_('The current step for this item has no email or report to preview.'))
return res
class email_template(osv.osv):
_inherit = "email.template"
_defaults = {
'model_id': lambda obj, cr, uid, context: context.get('object_id',False),
}
# TODO: add constraint to prevent disabling / disapproving an email account used in a running campaign
class report_xml(osv.osv):
_inherit = 'ir.actions.report.xml'
def search(self, cr, uid, args, offset=0, limit=None, order=None, context=None, count=False):
if context is None:
context = {}
object_id = context.get('object_id')
if object_id:
model = self.pool.get('ir.model').browse(cr, uid, object_id, context=context).model
args.append(('model', '=', model))
return super(report_xml, self).search(cr, uid, args, offset=offset, limit=limit, order=order, context=context, count=count)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
mayblue9/bokeh | bokeh/server/models/convenience.py | 29 | 1566 |
def can_read_doc_api(doc, apikey):
if can_write_doc_api(doc, apikey):
return True
return apikey == doc.readonlyapikey
def can_write_doc_api(doc, apikey):
return apikey == doc.apikey
def can_read_doc(doc, bokehuser):
return bokehuser.username in doc.r_users
def can_write_doc(doc, bokehuser):
return bokehuser.username in doc.rw_users
#api keys are r/w only, no such thing as read only api keys yet
def can_write_from_request(doc, request, user, temporary_docid=None):
# temporary_docid is a uuid - we're not too concerned about auth around it
# since it's a UUID and disposable
# hack - temporary workaround for multiuser server and bokeh applets,
# to be removed once bokeh applet infrastructure uses copy on write functionality
if temporary_docid:
return can_read_from_request(doc, request, user)
if request.headers.get('BOKEH-API-KEY'):
return doc.apikey == request.headers['BOKEH-API-KEY']
else:
if not user: return False
return can_write_doc(doc, user)
def can_read_from_request(doc, request, user):
# No temporary docid here - temporary docid is about read-write permissions,
# and has no impact on read permissions
if doc.published:
return True
if can_write_from_request(doc, request, user):
return True
else:
if request.headers.get('BOKEH-API-KEY'):
return doc.readonlyapikey == request.headers['BOKEH-API-KEY']
else:
if not user: return False
return can_read_doc(doc, user)
| bsd-3-clause |
thesquelched/libcloud | libcloud/compute/drivers/ecp.py | 32 | 11705 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Enomaly ECP driver
"""
import time
import base64
import os
import socket
import binascii
from libcloud.utils.py3 import httplib
from libcloud.utils.py3 import b
# JSON is included in the standard library starting with Python 2.6. For 2.5
# and 2.4, there's a simplejson egg at: http://pypi.python.org/pypi/simplejson
try:
import simplejson as json
except ImportError:
import json
from libcloud.common.base import Response, ConnectionUserAndKey
from libcloud.compute.base import NodeDriver, NodeSize, NodeLocation
from libcloud.compute.base import NodeImage, Node
from libcloud.compute.types import Provider, NodeState, InvalidCredsError
from libcloud.utils.networking import is_private_subnet
# Defaults
API_HOST = ''
API_PORT = (80, 443)
class ECPResponse(Response):
def success(self):
if self.status == httplib.OK or self.status == httplib.CREATED:
try:
j_body = json.loads(self.body)
except ValueError:
self.error = "JSON response cannot be decoded."
return False
if j_body['errno'] == 0:
return True
else:
self.error = "ECP error: %s" % j_body['message']
return False
elif self.status == httplib.UNAUTHORIZED:
raise InvalidCredsError()
else:
self.error = "HTTP Error Code: %s" % self.status
return False
def parse_error(self):
return self.error
# Interpret the json responses - no error checking required
def parse_body(self):
return json.loads(self.body)
def getheaders(self):
return self.headers
class ECPConnection(ConnectionUserAndKey):
"""
Connection class for the Enomaly ECP driver
"""
responseCls = ECPResponse
host = API_HOST
port = API_PORT
def add_default_headers(self, headers):
# Authentication
username = self.user_id
password = self.key
base64string = base64.encodestring(
b('%s:%s' % (username, password)))[:-1]
authheader = "Basic %s" % base64string
headers['Authorization'] = authheader
return headers
def _encode_multipart_formdata(self, fields):
"""
Based on Wade Leftwich's function:
http://code.activestate.com/recipes/146306/
"""
# use a random boundary that does not appear in the fields
boundary = ''
while boundary in ''.join(fields):
boundary = binascii.hexlify(os.urandom(16)).decode('utf-8')
L = []
for i in fields:
L.append('--' + boundary)
L.append('Content-Disposition: form-data; name="%s"' % i)
L.append('')
L.append(fields[i])
L.append('--' + boundary + '--')
L.append('')
body = '\r\n'.join(L)
content_type = 'multipart/form-data; boundary=%s' % boundary
header = {'Content-Type': content_type}
return header, body
class ECPNodeDriver(NodeDriver):
"""
Enomaly ECP node driver
"""
name = "Enomaly Elastic Computing Platform"
website = 'http://www.enomaly.com/'
type = Provider.ECP
connectionCls = ECPConnection
def list_nodes(self):
"""
Returns a list of all running Nodes
:rtype: ``list`` of :class:`Node`
"""
# Make the call
res = self.connection.request('/rest/hosting/vm/list').parse_body()
# Put together a list of node objects
nodes = []
for vm in res['vms']:
node = self._to_node(vm)
if node is not None:
nodes.append(node)
# And return it
return nodes
def _to_node(self, vm):
"""
Turns a (json) dictionary into a Node object.
This returns only running VMs.
"""
# Check state
if not vm['state'] == "running":
return None
# IPs
iplist = [interface['ip'] for interface in vm['interfaces'] if
interface['ip'] != '127.0.0.1']
public_ips = []
private_ips = []
for ip in iplist:
try:
socket.inet_aton(ip)
except socket.error:
# not a valid ip
continue
if is_private_subnet(ip):
private_ips.append(ip)
else:
public_ips.append(ip)
# Create the node object
n = Node(
id=vm['uuid'],
name=vm['name'],
state=NodeState.RUNNING,
public_ips=public_ips,
private_ips=private_ips,
driver=self,
)
return n
def reboot_node(self, node):
"""
Shuts down a VM and then starts it again.
@inherits: :class:`NodeDriver.reboot_node`
"""
# Turn the VM off
# Black magic to make the POST requests work
d = self.connection._encode_multipart_formdata({'action': 'stop'})
self.connection.request(
'/rest/hosting/vm/%s' % node.id,
method='POST',
headers=d[0],
data=d[1]
).parse_body()
node.state = NodeState.REBOOTING
# Wait for it to turn off and then continue (to turn it on again)
while node.state == NodeState.REBOOTING:
# Check if it's off.
response = self.connection.request(
'/rest/hosting/vm/%s' % node.id
).parse_body()
if response['vm']['state'] == 'off':
node.state = NodeState.TERMINATED
else:
time.sleep(5)
# Turn the VM back on.
# Black magic to make the POST requests work
d = self.connection._encode_multipart_formdata({'action': 'start'})
self.connection.request(
'/rest/hosting/vm/%s' % node.id,
method='POST',
headers=d[0],
data=d[1]
).parse_body()
node.state = NodeState.RUNNING
return True
def destroy_node(self, node):
"""
Shuts down and deletes a VM.
@inherits: :class:`NodeDriver.destroy_node`
"""
# Shut down first
# Black magic to make the POST requests work
d = self.connection._encode_multipart_formdata({'action': 'stop'})
self.connection.request(
'/rest/hosting/vm/%s' % node.id,
method='POST',
headers=d[0],
data=d[1]
).parse_body()
# Ensure there was no applicationl level error
node.state = NodeState.PENDING
# Wait for the VM to turn off before continuing
while node.state == NodeState.PENDING:
# Check if it's off.
response = self.connection.request(
'/rest/hosting/vm/%s' % node.id
).parse_body()
if response['vm']['state'] == 'off':
node.state = NodeState.TERMINATED
else:
time.sleep(5)
# Delete the VM
# Black magic to make the POST requests work
d = self.connection._encode_multipart_formdata({'action': 'delete'})
self.connection.request(
'/rest/hosting/vm/%s' % (node.id),
method='POST',
headers=d[0],
data=d[1]
).parse_body()
return True
def list_images(self, location=None):
"""
Returns a list of all package templates aka appiances aka images.
@inherits: :class:`NodeDriver.list_images`
"""
# Make the call
response = self.connection.request(
'/rest/hosting/ptemplate/list').parse_body()
# Turn the response into an array of NodeImage objects
images = []
for ptemplate in response['packages']:
images.append(NodeImage(
id=ptemplate['uuid'],
name='%s: %s' % (ptemplate['name'], ptemplate['description']),
driver=self,)
)
return images
def list_sizes(self, location=None):
"""
Returns a list of all hardware templates
@inherits: :class:`NodeDriver.list_sizes`
"""
# Make the call
response = self.connection.request(
'/rest/hosting/htemplate/list').parse_body()
# Turn the response into an array of NodeSize objects
sizes = []
for htemplate in response['templates']:
sizes.append(NodeSize(
id=htemplate['uuid'],
name=htemplate['name'],
ram=htemplate['memory'],
disk=0, # Disk is independent of hardware template.
bandwidth=0, # There is no way to keep track of bandwidth.
price=0, # The billing system is external.
driver=self,)
)
return sizes
def list_locations(self):
"""
This feature does not exist in ECP. Returns hard coded dummy location.
:rtype: ``list`` of :class:`NodeLocation`
"""
return [NodeLocation(id=1,
name="Cloud",
country='',
driver=self),
]
def create_node(self, **kwargs):
"""
Creates a virtual machine.
:keyword name: String with a name for this new node (required)
:type name: ``str``
:keyword size: The size of resources allocated to this node .
(required)
:type size: :class:`NodeSize`
:keyword image: OS Image to boot on node. (required)
:type image: :class:`NodeImage`
:rtype: :class:`Node`
"""
# Find out what network to put the VM on.
res = self.connection.request(
'/rest/hosting/network/list').parse_body()
# Use the first / default network because there is no way to specific
# which one
network = res['networks'][0]['uuid']
# Prepare to make the VM
data = {
'name': str(kwargs['name']),
'package': str(kwargs['image'].id),
'hardware': str(kwargs['size'].id),
'network_uuid': str(network),
'disk': ''
}
# Black magic to make the POST requests work
d = self.connection._encode_multipart_formdata(data)
response = self.connection.request(
'/rest/hosting/vm/',
method='PUT',
headers=d[0],
data=d[1]
).parse_body()
# Create a node object and return it.
n = Node(
id=response['machine_id'],
name=data['name'],
state=NodeState.PENDING,
public_ips=[],
private_ips=[],
driver=self,
)
return n
| apache-2.0 |
CamelBackNotation/CarnotKE | jyhton/Lib/test/zxjdbc/runner.py | 19 | 5996 | # Jython Database Specification API 2.0
#
# Copyright (c) 2001 brian zimmer <bzimmer@ziclix.com>
"""
To run the tests, simply invoke this script from the commandline:
jython runner.py <xml config file> [vendor, ...]
If no vendors are given, then all vendors will be tested. If a
vendor is given, then only that vendor will be tested.
"""
import unittest, os
import xmllib, __builtin__, re
def __imp__(module, attr=None):
if attr:
j = __import__(module, globals(), locals())
return getattr(j, attr)
else:
last = module.split(".")[-1]
return __import__(module, globals(), locals(), last)
class Factory:
def __init__(self, classname, method):
self.classname = classname
self.method = method
self.arguments = []
self.keywords = {}
class Testcase:
def __init__(self, frm, impt):
self.frm = frm
self.impt = impt
self.ignore = []
class Test:
def __init__(self, name, os):
self.name = name
self.os = os
self.factory = None
self.tests = []
class Vendor:
def __init__(self, name, datahandler=None):
self.name = name
self.scroll = None
self.datahandler = datahandler
self.tests = []
self.tables = {}
class ConfigParser(xmllib.XMLParser):
"""
A simple XML parser for the config file.
"""
def __init__(self, **kw):
apply(xmllib.XMLParser.__init__, (self,), kw)
self.vendors = []
self.table_stack = []
self.re_var = re.compile(r"\${(.*?)}")
def vendor(self):
assert len(self.vendors) > 0, "no vendors"
return self.vendors[-1]
def test(self):
v = self.vendor()
assert len(v.tests) > 0, "no tests"
return v.tests[-1]
def factory(self):
c = self.test()
assert c.factory, "no factory"
return c.factory
def testcase(self):
s = self.test()
assert len(s.tests) > 0, "no testcases"
return s.tests[-1]
def value(self, value):
def repl(sub):
from java.lang import System
return System.getProperty(sub.group(1), sub.group(1))
value = self.re_var.sub(repl, value)
return value
def start_vendor(self, attrs):
if attrs.has_key('datahandler'):
v = Vendor(attrs['name'], attrs['datahandler'])
else:
v = Vendor(attrs['name'])
if attrs.has_key('scroll'):
v.scroll = attrs['scroll']
self.vendors.append(v)
def start_test(self, attrs):
v = self.vendor()
c = Test(attrs['name'], attrs['os'])
v.tests.append(c)
def start_factory(self, attrs):
c = self.test()
f = Factory(attrs['class'], attrs['method'])
c.factory = f
def start_argument(self, attrs):
f = self.factory()
if attrs.has_key('type'):
f.arguments.append((attrs['name'], getattr(__builtin__, attrs['type'])(self.value(attrs['value']))))
else:
f.arguments.append((attrs['name'], self.value(attrs['value'])))
def start_keyword(self, attrs):
f = self.factory()
if attrs.has_key('type'):
f.keywords[attrs['name']] = getattr(__builtin__, attrs['type'])(self.value(attrs['value']))
else:
f.keywords[attrs['name']] = self.value(attrs['value'])
def start_ignore(self, attrs):
t = self.testcase()
t.ignore.append(attrs['name'])
def start_testcase(self, attrs):
c = self.test()
c.tests.append(Testcase(attrs['from'], attrs['import']))
def start_table(self, attrs):
self.table_stack.append((attrs['ref'], attrs['name']))
def end_table(self):
del self.table_stack[-1]
def handle_data(self, data):
if len(self.table_stack):
ref, tabname = self.table_stack[-1]
self.vendor().tables[ref] = (tabname, data.strip())
class SQLTestCase(unittest.TestCase):
"""
Base testing class. It contains the list of table and factory information
to run any tests.
"""
def __init__(self, name, vendor, factory):
unittest.TestCase.__init__(self, name)
self.vendor = vendor
self.factory = factory
if self.vendor.datahandler:
self.datahandler = __imp__(self.vendor.datahandler)
def table(self, name):
return self.vendor.tables[name]
def has_table(self, name):
return self.vendor.tables.has_key(name)
def make_suite(vendor, testcase, factory, mask=None):
clz = __imp__(testcase.frm, testcase.impt)
caseNames = filter(lambda x, i=testcase.ignore: x not in i, unittest.getTestCaseNames(clz, "test"))
if mask is not None:
caseNames = filter(lambda x, mask=mask: x == mask, caseNames)
tests = [clz(caseName, vendor, factory) for caseName in caseNames]
return unittest.TestSuite(tests)
def test(vendors, include=None, mask=None):
for vendor in vendors:
if not include or vendor.name in include:
print
print "testing [%s]" % (vendor.name)
for test in vendor.tests:
if not test.os or test.os == os.name:
for testcase in test.tests:
suite = make_suite(vendor, testcase, test.factory, mask)
unittest.TextTestRunner().run(suite)
else:
print
print "skipping [%s]" % (vendor.name)
if __name__ == '__main__':
import sys, getopt
try:
opts, args = getopt.getopt(sys.argv[1:], "t:", [])
except getopt.error, msg:
print "%s -t [testmask] <vendor>[,<vendor>]"
sys.exit(0)
mask = None
for a in opts:
opt, arg = a
if opt == '-t':
mask = arg
configParser = ConfigParser()
fp = open(args[0], "r")
configParser.feed(fp.read())
fp.close()
test(configParser.vendors, args[1:], mask=mask)
sys.exit(0)
| apache-2.0 |
tinkerinestudio/Tinkerine-Suite | TinkerineSuite/python/Lib/numpy/polynomial/tests/test_hermite.py | 24 | 17928 | """Tests for hermendre module.
"""
from __future__ import division
import numpy as np
import numpy.polynomial.hermite as herm
import numpy.polynomial.polynomial as poly
from numpy.testing import *
H0 = np.array([ 1])
H1 = np.array([0, 2])
H2 = np.array([ -2, 0, 4])
H3 = np.array([0, -12, 0, 8])
H4 = np.array([ 12, 0, -48, 0, 16])
H5 = np.array([0, 120, 0, -160, 0, 32])
H6 = np.array([-120, 0, 720, 0, -480, 0, 64])
H7 = np.array([0, -1680, 0, 3360, 0, -1344, 0, 128])
H8 = np.array([1680, 0, -13440, 0, 13440, 0, -3584, 0, 256])
H9 = np.array([0, 30240, 0, -80640, 0, 48384, 0, -9216, 0, 512])
Hlist = [H0, H1, H2, H3, H4, H5, H6, H7, H8, H9]
def trim(x) :
return herm.hermtrim(x, tol=1e-6)
class TestConstants(TestCase) :
def test_hermdomain(self) :
assert_equal(herm.hermdomain, [-1, 1])
def test_hermzero(self) :
assert_equal(herm.hermzero, [0])
def test_hermone(self) :
assert_equal(herm.hermone, [1])
def test_hermx(self) :
assert_equal(herm.hermx, [0, .5])
class TestArithmetic(TestCase) :
x = np.linspace(-3, 3, 100)
y0 = poly.polyval(x, H0)
y1 = poly.polyval(x, H1)
y2 = poly.polyval(x, H2)
y3 = poly.polyval(x, H3)
y4 = poly.polyval(x, H4)
y5 = poly.polyval(x, H5)
y6 = poly.polyval(x, H6)
y7 = poly.polyval(x, H7)
y8 = poly.polyval(x, H8)
y9 = poly.polyval(x, H9)
y = [y0, y1, y2, y3, y4, y5, y6, y7, y8, y9]
def test_hermval(self) :
def f(x) :
return x*(x**2 - 1)
#check empty input
assert_equal(herm.hermval([], [1]).size, 0)
#check normal input)
for i in range(10) :
msg = "At i=%d" % i
ser = np.zeros
tgt = self.y[i]
res = herm.hermval(self.x, [0]*i + [1])
assert_almost_equal(res, tgt, err_msg=msg)
#check that shape is preserved
for i in range(3) :
dims = [2]*i
x = np.zeros(dims)
assert_equal(herm.hermval(x, [1]).shape, dims)
assert_equal(herm.hermval(x, [1,0]).shape, dims)
assert_equal(herm.hermval(x, [1,0,0]).shape, dims)
def test_hermadd(self) :
for i in range(5) :
for j in range(5) :
msg = "At i=%d, j=%d" % (i,j)
tgt = np.zeros(max(i,j) + 1)
tgt[i] += 1
tgt[j] += 1
res = herm.hermadd([0]*i + [1], [0]*j + [1])
assert_equal(trim(res), trim(tgt), err_msg=msg)
def test_hermsub(self) :
for i in range(5) :
for j in range(5) :
msg = "At i=%d, j=%d" % (i,j)
tgt = np.zeros(max(i,j) + 1)
tgt[i] += 1
tgt[j] -= 1
res = herm.hermsub([0]*i + [1], [0]*j + [1])
assert_equal(trim(res), trim(tgt), err_msg=msg)
def test_hermmulx(self):
assert_equal(herm.hermmulx([0]), [0])
assert_equal(herm.hermmulx([1]), [0,.5])
for i in range(1, 5):
ser = [0]*i + [1]
tgt = [0]*(i - 1) + [i, 0, .5]
assert_equal(herm.hermmulx(ser), tgt)
def test_hermmul(self) :
# check values of result
for i in range(5) :
pol1 = [0]*i + [1]
val1 = herm.hermval(self.x, pol1)
for j in range(5) :
msg = "At i=%d, j=%d" % (i,j)
pol2 = [0]*j + [1]
val2 = herm.hermval(self.x, pol2)
pol3 = herm.hermmul(pol1, pol2)
val3 = herm.hermval(self.x, pol3)
assert_(len(pol3) == i + j + 1, msg)
assert_almost_equal(val3, val1*val2, err_msg=msg)
def test_hermdiv(self) :
for i in range(5) :
for j in range(5) :
msg = "At i=%d, j=%d" % (i,j)
ci = [0]*i + [1]
cj = [0]*j + [1]
tgt = herm.hermadd(ci, cj)
quo, rem = herm.hermdiv(tgt, ci)
res = herm.hermadd(herm.hermmul(quo, ci), rem)
assert_equal(trim(res), trim(tgt), err_msg=msg)
class TestCalculus(TestCase) :
def test_hermint(self) :
# check exceptions
assert_raises(ValueError, herm.hermint, [0], .5)
assert_raises(ValueError, herm.hermint, [0], -1)
assert_raises(ValueError, herm.hermint, [0], 1, [0,0])
# test integration of zero polynomial
for i in range(2, 5):
k = [0]*(i - 2) + [1]
res = herm.hermint([0], m=i, k=k)
assert_almost_equal(res, [0, .5])
# check single integration with integration constant
for i in range(5) :
scl = i + 1
pol = [0]*i + [1]
tgt = [i] + [0]*i + [1/scl]
hermpol = herm.poly2herm(pol)
hermint = herm.hermint(hermpol, m=1, k=[i])
res = herm.herm2poly(hermint)
assert_almost_equal(trim(res), trim(tgt))
# check single integration with integration constant and lbnd
for i in range(5) :
scl = i + 1
pol = [0]*i + [1]
hermpol = herm.poly2herm(pol)
hermint = herm.hermint(hermpol, m=1, k=[i], lbnd=-1)
assert_almost_equal(herm.hermval(-1, hermint), i)
# check single integration with integration constant and scaling
for i in range(5) :
scl = i + 1
pol = [0]*i + [1]
tgt = [i] + [0]*i + [2/scl]
hermpol = herm.poly2herm(pol)
hermint = herm.hermint(hermpol, m=1, k=[i], scl=2)
res = herm.herm2poly(hermint)
assert_almost_equal(trim(res), trim(tgt))
# check multiple integrations with default k
for i in range(5) :
for j in range(2,5) :
pol = [0]*i + [1]
tgt = pol[:]
for k in range(j) :
tgt = herm.hermint(tgt, m=1)
res = herm.hermint(pol, m=j)
assert_almost_equal(trim(res), trim(tgt))
# check multiple integrations with defined k
for i in range(5) :
for j in range(2,5) :
pol = [0]*i + [1]
tgt = pol[:]
for k in range(j) :
tgt = herm.hermint(tgt, m=1, k=[k])
res = herm.hermint(pol, m=j, k=range(j))
assert_almost_equal(trim(res), trim(tgt))
# check multiple integrations with lbnd
for i in range(5) :
for j in range(2,5) :
pol = [0]*i + [1]
tgt = pol[:]
for k in range(j) :
tgt = herm.hermint(tgt, m=1, k=[k], lbnd=-1)
res = herm.hermint(pol, m=j, k=range(j), lbnd=-1)
assert_almost_equal(trim(res), trim(tgt))
# check multiple integrations with scaling
for i in range(5) :
for j in range(2,5) :
pol = [0]*i + [1]
tgt = pol[:]
for k in range(j) :
tgt = herm.hermint(tgt, m=1, k=[k], scl=2)
res = herm.hermint(pol, m=j, k=range(j), scl=2)
assert_almost_equal(trim(res), trim(tgt))
def test_hermder(self) :
# check exceptions
assert_raises(ValueError, herm.hermder, [0], .5)
assert_raises(ValueError, herm.hermder, [0], -1)
# check that zeroth deriviative does nothing
for i in range(5) :
tgt = [1] + [0]*i
res = herm.hermder(tgt, m=0)
assert_equal(trim(res), trim(tgt))
# check that derivation is the inverse of integration
for i in range(5) :
for j in range(2,5) :
tgt = [1] + [0]*i
res = herm.hermder(herm.hermint(tgt, m=j), m=j)
assert_almost_equal(trim(res), trim(tgt))
# check derivation with scaling
for i in range(5) :
for j in range(2,5) :
tgt = [1] + [0]*i
res = herm.hermder(herm.hermint(tgt, m=j, scl=2), m=j, scl=.5)
assert_almost_equal(trim(res), trim(tgt))
class TestMisc(TestCase) :
def test_hermfromroots(self) :
res = herm.hermfromroots([])
assert_almost_equal(trim(res), [1])
for i in range(1,5) :
roots = np.cos(np.linspace(-np.pi, 0, 2*i + 1)[1::2])
pol = herm.hermfromroots(roots)
res = herm.hermval(roots, pol)
tgt = 0
assert_(len(pol) == i + 1)
assert_almost_equal(herm.herm2poly(pol)[-1], 1)
assert_almost_equal(res, tgt)
def test_hermroots(self) :
assert_almost_equal(herm.hermroots([1]), [])
assert_almost_equal(herm.hermroots([1, 1]), [-.5])
for i in range(2,5) :
tgt = np.linspace(-1, 1, i)
res = herm.hermroots(herm.hermfromroots(tgt))
assert_almost_equal(trim(res), trim(tgt))
def test_hermvander(self) :
# check for 1d x
x = np.arange(3)
v = herm.hermvander(x, 3)
assert_(v.shape == (3,4))
for i in range(4) :
coef = [0]*i + [1]
assert_almost_equal(v[...,i], herm.hermval(x, coef))
# check for 2d x
x = np.array([[1,2],[3,4],[5,6]])
v = herm.hermvander(x, 3)
assert_(v.shape == (3,2,4))
for i in range(4) :
coef = [0]*i + [1]
assert_almost_equal(v[...,i], herm.hermval(x, coef))
def test_hermfit(self) :
def f(x) :
return x*(x - 1)*(x - 2)
# Test exceptions
assert_raises(ValueError, herm.hermfit, [1], [1], -1)
assert_raises(TypeError, herm.hermfit, [[1]], [1], 0)
assert_raises(TypeError, herm.hermfit, [], [1], 0)
assert_raises(TypeError, herm.hermfit, [1], [[[1]]], 0)
assert_raises(TypeError, herm.hermfit, [1, 2], [1], 0)
assert_raises(TypeError, herm.hermfit, [1], [1, 2], 0)
assert_raises(TypeError, herm.hermfit, [1], [1], 0, w=[[1]])
assert_raises(TypeError, herm.hermfit, [1], [1], 0, w=[1,1])
# Test fit
x = np.linspace(0,2)
y = f(x)
#
coef3 = herm.hermfit(x, y, 3)
assert_equal(len(coef3), 4)
assert_almost_equal(herm.hermval(x, coef3), y)
#
coef4 = herm.hermfit(x, y, 4)
assert_equal(len(coef4), 5)
assert_almost_equal(herm.hermval(x, coef4), y)
#
coef2d = herm.hermfit(x, np.array([y,y]).T, 3)
assert_almost_equal(coef2d, np.array([coef3,coef3]).T)
# test weighting
w = np.zeros_like(x)
yw = y.copy()
w[1::2] = 1
y[0::2] = 0
wcoef3 = herm.hermfit(x, yw, 3, w=w)
assert_almost_equal(wcoef3, coef3)
#
wcoef2d = herm.hermfit(x, np.array([yw,yw]).T, 3, w=w)
assert_almost_equal(wcoef2d, np.array([coef3,coef3]).T)
def test_hermtrim(self) :
coef = [2, -1, 1, 0]
# Test exceptions
assert_raises(ValueError, herm.hermtrim, coef, -1)
# Test results
assert_equal(herm.hermtrim(coef), coef[:-1])
assert_equal(herm.hermtrim(coef, 1), coef[:-3])
assert_equal(herm.hermtrim(coef, 2), [0])
def test_hermline(self) :
assert_equal(herm.hermline(3,4), [3, 2])
def test_herm2poly(self) :
for i in range(10) :
assert_almost_equal(herm.herm2poly([0]*i + [1]), Hlist[i])
def test_poly2herm(self) :
for i in range(10) :
assert_almost_equal(herm.poly2herm(Hlist[i]), [0]*i + [1])
def assert_poly_almost_equal(p1, p2):
assert_almost_equal(p1.coef, p2.coef)
assert_equal(p1.domain, p2.domain)
class TestHermiteClass(TestCase) :
p1 = herm.Hermite([1,2,3])
p2 = herm.Hermite([1,2,3], [0,1])
p3 = herm.Hermite([1,2])
p4 = herm.Hermite([2,2,3])
p5 = herm.Hermite([3,2,3])
def test_equal(self) :
assert_(self.p1 == self.p1)
assert_(self.p2 == self.p2)
assert_(not self.p1 == self.p2)
assert_(not self.p1 == self.p3)
assert_(not self.p1 == [1,2,3])
def test_not_equal(self) :
assert_(not self.p1 != self.p1)
assert_(not self.p2 != self.p2)
assert_(self.p1 != self.p2)
assert_(self.p1 != self.p3)
assert_(self.p1 != [1,2,3])
def test_add(self) :
tgt = herm.Hermite([2,4,6])
assert_(self.p1 + self.p1 == tgt)
assert_(self.p1 + [1,2,3] == tgt)
assert_([1,2,3] + self.p1 == tgt)
def test_sub(self) :
tgt = herm.Hermite([1])
assert_(self.p4 - self.p1 == tgt)
assert_(self.p4 - [1,2,3] == tgt)
assert_([2,2,3] - self.p1 == tgt)
def test_mul(self) :
tgt = herm.Hermite([ 81., 52., 82., 12., 9.])
assert_poly_almost_equal(self.p1 * self.p1, tgt)
assert_poly_almost_equal(self.p1 * [1,2,3], tgt)
assert_poly_almost_equal([1,2,3] * self.p1, tgt)
def test_floordiv(self) :
tgt = herm.Hermite([1])
assert_(self.p4 // self.p1 == tgt)
assert_(self.p4 // [1,2,3] == tgt)
assert_([2,2,3] // self.p1 == tgt)
def test_mod(self) :
tgt = herm.Hermite([1])
assert_((self.p4 % self.p1) == tgt)
assert_((self.p4 % [1,2,3]) == tgt)
assert_(([2,2,3] % self.p1) == tgt)
def test_divmod(self) :
tquo = herm.Hermite([1])
trem = herm.Hermite([2])
quo, rem = divmod(self.p5, self.p1)
assert_(quo == tquo and rem == trem)
quo, rem = divmod(self.p5, [1,2,3])
assert_(quo == tquo and rem == trem)
quo, rem = divmod([3,2,3], self.p1)
assert_(quo == tquo and rem == trem)
def test_pow(self) :
tgt = herm.Hermite([1])
for i in range(5) :
res = self.p1**i
assert_(res == tgt)
tgt = tgt*self.p1
def test_call(self) :
# domain = [-1, 1]
x = np.linspace(-1, 1)
tgt = 3*(4*x**2 - 2) + 2*(2*x) + 1
assert_almost_equal(self.p1(x), tgt)
# domain = [0, 1]
x = np.linspace(0, 1)
xx = 2*x - 1
assert_almost_equal(self.p2(x), self.p1(xx))
def test_degree(self) :
assert_equal(self.p1.degree(), 2)
def test_cutdeg(self) :
assert_raises(ValueError, self.p1.cutdeg, .5)
assert_raises(ValueError, self.p1.cutdeg, -1)
assert_equal(len(self.p1.cutdeg(3)), 3)
assert_equal(len(self.p1.cutdeg(2)), 3)
assert_equal(len(self.p1.cutdeg(1)), 2)
assert_equal(len(self.p1.cutdeg(0)), 1)
def test_convert(self) :
x = np.linspace(-1,1)
p = self.p1.convert(domain=[0,1])
assert_almost_equal(p(x), self.p1(x))
def test_mapparms(self) :
parms = self.p2.mapparms()
assert_almost_equal(parms, [-1, 2])
def test_trim(self) :
coef = [1, 1e-6, 1e-12, 0]
p = herm.Hermite(coef)
assert_equal(p.trim().coef, coef[:3])
assert_equal(p.trim(1e-10).coef, coef[:2])
assert_equal(p.trim(1e-5).coef, coef[:1])
def test_truncate(self) :
assert_raises(ValueError, self.p1.truncate, .5)
assert_raises(ValueError, self.p1.truncate, 0)
assert_equal(len(self.p1.truncate(4)), 3)
assert_equal(len(self.p1.truncate(3)), 3)
assert_equal(len(self.p1.truncate(2)), 2)
assert_equal(len(self.p1.truncate(1)), 1)
def test_copy(self) :
p = self.p1.copy()
assert_(self.p1 == p)
def test_integ(self) :
p = self.p2.integ()
assert_almost_equal(p.coef, herm.hermint([1,2,3], 1, 0, scl=.5))
p = self.p2.integ(lbnd=0)
assert_almost_equal(p(0), 0)
p = self.p2.integ(1, 1)
assert_almost_equal(p.coef, herm.hermint([1,2,3], 1, 1, scl=.5))
p = self.p2.integ(2, [1, 2])
assert_almost_equal(p.coef, herm.hermint([1,2,3], 2, [1,2], scl=.5))
def test_deriv(self) :
p = self.p2.integ(2, [1, 2])
assert_almost_equal(p.deriv(1).coef, self.p2.integ(1, [1]).coef)
assert_almost_equal(p.deriv(2).coef, self.p2.coef)
def test_roots(self) :
p = herm.Hermite(herm.poly2herm([0, -1, 0, 1]), [0, 1])
res = p.roots()
tgt = [0, .5, 1]
assert_almost_equal(res, tgt)
def test_linspace(self):
xdes = np.linspace(0, 1, 20)
ydes = self.p2(xdes)
xres, yres = self.p2.linspace(20)
assert_almost_equal(xres, xdes)
assert_almost_equal(yres, ydes)
def test_fromroots(self) :
roots = [0, .5, 1]
p = herm.Hermite.fromroots(roots, domain=[0, 1])
res = p.coef
tgt = herm.poly2herm([0, -1, 0, 1])
assert_almost_equal(res, tgt)
def test_fit(self) :
def f(x) :
return x*(x - 1)*(x - 2)
x = np.linspace(0,3)
y = f(x)
# test default value of domain
p = herm.Hermite.fit(x, y, 3)
assert_almost_equal(p.domain, [0,3])
# test that fit works in given domains
p = herm.Hermite.fit(x, y, 3, None)
assert_almost_equal(p(x), y)
assert_almost_equal(p.domain, [0,3])
p = herm.Hermite.fit(x, y, 3, [])
assert_almost_equal(p(x), y)
assert_almost_equal(p.domain, [-1, 1])
# test that fit accepts weights.
w = np.zeros_like(x)
yw = y.copy()
w[1::2] = 1
yw[0::2] = 0
p = herm.Hermite.fit(x, yw, 3, w=w)
assert_almost_equal(p(x), y)
def test_identity(self) :
x = np.linspace(0,3)
p = herm.Hermite.identity()
assert_almost_equal(p(x), x)
p = herm.Hermite.identity([1,3])
assert_almost_equal(p(x), x)
#
if __name__ == "__main__":
run_module_suite()
| agpl-3.0 |
psychopy/psychopy | psychopy/hardware/forp.py | 1 | 6704 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Part of the PsychoPy library
# Copyright (C) 2002-2018 Jonathan Peirce (C) 2019-2021 Open Science Tools Ltd.
# Distributed under the terms of the GNU General Public License (GPL).
"""fORP fibre optic (MR-compatible) response devices by CurrentDesigns:
http://www.curdes.com/
This class is only useful when the fORP is connected via the serial port.
If you're connecting via USB, just treat it like a standard keyboard.
E.g., use a Keyboard component, and typically listen for Allowed keys
``'1', '2', '3', '4', '5'``. Or use ``event.getKeys()``.
"""
# Jeremy Gray and Dan Grupe developed the asKeys and baud parameters
from __future__ import absolute_import, print_function
from builtins import object
from psychopy import logging, event
import sys
from collections import defaultdict
try:
import serial
except ImportError:
serial = False
BUTTON_BLUE = 1
BUTTON_YELLOW = 2
BUTTON_GREEN = 3
BUTTON_RED = 4
BUTTON_TRIGGER = 5
# Maps bit patterns to character codes
BUTTON_MAP = [
(0x01, BUTTON_BLUE),
(0x02, BUTTON_YELLOW),
(0x04, BUTTON_GREEN),
(0x08, BUTTON_RED),
(0x10, BUTTON_TRIGGER)]
class ButtonBox(object):
"""Serial line interface to the fORP MRI response box.
To use this object class, select the box use setting `serialPort`,
and connect the serial line. To emulate key presses with a serial
connection, use `getEvents(asKeys=True)` (e.g., to be able to use
a RatingScale object during scanning). Alternatively connect the USB
cable and use fORP to emulate a keyboard.
fORP sends characters at 800Hz, so you should check the buffer
frequently. Also note that the trigger event numpy the fORP is
typically extremely short (occurs for a single 800Hz epoch).
"""
def __init__(self, serialPort=1, baudrate=19200):
"""
:Parameters:
`serialPort` :
should be a number (where 1=COM1, ...)
`baud` :
the communication rate (baud), eg, 57600
"""
super(ButtonBox, self).__init__()
if not serial:
raise ImportError("The module serial is needed to connect to "
"fORP. On most systems this can be installed "
"with\n\t easy_install pyserial")
self.port = serial.Serial(serialPort - 1, baudrate=baudrate,
bytesize=8, parity='N', stopbits=1,
timeout=0.001)
if not self.port.isOpen():
self.port.open()
self.buttonStatus = defaultdict(bool) # Defaults to False
self.rawEvts = []
self.pressEvents = []
def clearBuffer(self):
"""Empty the input buffer of all characters"""
self.port.flushInput()
def clearStatus(self):
""" Resets the pressed statuses, so getEvents will return pressed
buttons, even if they were already pressed in the last call.
"""
for k in self.buttonStatus:
self.buttonStatus[k] = False
def getEvents(self, returnRaw=False, asKeys=False, allowRepeats=False):
"""Returns a list of unique events (one event per button pressed)
and also stores a copy of the full list of events since last
getEvents() (stored as ForpBox.rawEvts)
`returnRaw` :
return (not just store) the full event list
`asKeys` :
If True, will also emulate pyglet keyboard events, so that
button 1 will register as a keyboard event with value "1",
and as such will be detectable using `event.getKeys()`
`allowRepeats` :
If True, this will return pressed buttons even if they were held
down between calls to getEvents(). If the fORP is on the "Eprime"
setting, you will get a stream of button presses while a button is
held down. On the "Bitwise" setting, you will get a set of all
currently pressed buttons every time a button is pressed or
released.
This option might be useful if you think your participant may be
holding the button down before you start checking for presses.
"""
nToGet = self.port.inWaiting()
evtStr = self.port.read(nToGet)
self.rawEvts = []
self.pressEvents = []
if allowRepeats:
self.clearStatus()
# for each character convert to an ordinal int value (numpy the ascii
# chr)
for thisChr in evtStr:
pressCode = ord(thisChr)
self.rawEvts.append(pressCode)
decodedEvents = self._generateEvents(pressCode)
self.pressEvents += decodedEvents
if asKeys:
for code in decodedEvents:
event._onPygletKey(symbol=code, modifiers=0)
# better as: emulated='fORP_bbox_asKey', but need to
# adjust event._onPygletKey and the symbol conversion
# pyglet.window.key.symbol_string(symbol).lower()
# return the abbreviated list if necessary
if returnRaw:
return self.rawEvts
else:
return self.getUniqueEvents()
def _generateEvents(self, pressCode):
"""For a given button press, returns a list buttons that went from
unpressed to pressed.
Also flags any unpressed buttons as unpressed.
`pressCode` :
a number with a bit set for every button currently pressed.
"""
curStatuses = self.__class__._decodePress(pressCode)
pressEvents = []
for button, pressed in curStatuses:
if pressed and not self.buttonStatus[button]:
# We're transitioning to pressed...
pressEvents.append(button)
self.buttonStatus[button] = True
if not pressed:
self.buttonStatus[button] = False
return pressEvents
@classmethod
def _decodePress(kls, pressCode):
"""Returns a list of buttons and whether they're pressed, given a
character code.
`pressCode` :
A number with a bit set for every button currently pressed. Will
be between 0 and 31.
"""
return [(mapping[1], bool(mapping[0] & pressCode))
for mapping in BUTTON_MAP]
def getUniqueEvents(self, fullEvts=False):
"""Returns a Python set of the unique (unordered) events of either
a list given or the current rawEvts buffer
"""
if fullEvts:
return set(self.rawEvts)
return set(self.pressEvents)
| gpl-3.0 |
axbaretto/beam | sdks/python/.tox/lint/lib/python2.7/site-packages/google/auth/compute_engine/credentials.py | 14 | 4516 | # Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Google Compute Engine credentials.
This module provides authentication for application running on Google Compute
Engine using the Compute Engine metadata server.
"""
from google.auth import credentials
from google.auth import exceptions
from google.auth.compute_engine import _metadata
class Credentials(credentials.Scoped, credentials.Credentials):
"""Compute Engine Credentials.
These credentials use the Google Compute Engine metadata server to obtain
OAuth 2.0 access tokens associated with the instance's service account.
For more information about Compute Engine authentication, including how
to configure scopes, see the `Compute Engine authentication
documentation`_.
.. note:: Compute Engine instances can be created with scopes and therefore
these credentials are considered to be 'scoped'. However, you can
not use :meth:`~google.auth.credentials.ScopedCredentials.with_scopes`
because it is not possible to change the scopes that the instance
has. Also note that
:meth:`~google.auth.credentials.ScopedCredentials.has_scopes` will not
work until the credentials have been refreshed.
.. _Compute Engine authentication documentation:
https://cloud.google.com/compute/docs/authentication#using
"""
def __init__(self, service_account_email='default'):
"""
Args:
service_account_email (str): The service account email to use, or
'default'. A Compute Engine instance may have multiple service
accounts.
"""
super(Credentials, self).__init__()
self._service_account_email = service_account_email
def _retrieve_info(self, request):
"""Retrieve information about the service account.
Updates the scopes and retrieves the full service account email.
Args:
request (google.auth.transport.Request): The object used to make
HTTP requests.
"""
info = _metadata.get_service_account_info(
request,
service_account=self._service_account_email)
self._service_account_email = info['email']
self._scopes = info['scopes']
def refresh(self, request):
"""Refresh the access token and scopes.
Args:
request (google.auth.transport.Request): The object used to make
HTTP requests.
Raises:
google.auth.exceptions.RefreshError: If the Compute Engine metadata
service can't be reached if if the instance has not
credentials.
"""
try:
self._retrieve_info(request)
self.token, self.expiry = _metadata.get_service_account_token(
request,
service_account=self._service_account_email)
except exceptions.TransportError as exc:
raise exceptions.RefreshError(exc)
@property
def service_account_email(self):
"""The service account email.
.. note: This is not guaranteed to be set until :meth`refresh` has been
called.
"""
return self._service_account_email
@property
def requires_scopes(self):
"""False: Compute Engine credentials can not be scoped."""
return False
def with_scopes(self, scopes):
"""Unavailable, Compute Engine credentials can not be scoped.
Scopes can only be set at Compute Engine instance creation time.
See the `Compute Engine authentication documentation`_ for details on
how to configure instance scopes.
.. _Compute Engine authentication documentation:
https://cloud.google.com/compute/docs/authentication#using
"""
raise NotImplementedError(
'Compute Engine credentials can not set scopes. Scopes must be '
'set when the Compute Engine instance is created.')
| apache-2.0 |
Thingee/cinder | cinder/common/config.py | 1 | 8272 | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# Copyright 2012 Red Hat, Inc.
# Copyright 2013 NTT corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Command-line flag library.
Emulates gflags by wrapping cfg.ConfigOpts.
The idea is to move fully to cfg eventually, and this wrapper is a
stepping stone.
"""
import socket
from oslo.config import cfg
from cinder.openstack.common.gettextutils import _
CONF = cfg.CONF
def _get_my_ip():
"""
Returns the actual ip of the local machine.
This code figures out what source address would be used if some traffic
were to be sent out to some well known address on the Internet. In this
case, a Google DNS server is used, but the specific address does not
matter much. No traffic is actually sent.
"""
try:
csock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
csock.connect(('8.8.8.8', 80))
(addr, port) = csock.getsockname()
csock.close()
return addr
except socket.error:
return "127.0.0.1"
core_opts = [
cfg.StrOpt('api_paste_config',
default="api-paste.ini",
help='File name for the paste.deploy config for cinder-api'),
cfg.StrOpt('state_path',
default='/var/lib/cinder',
deprecated_name='pybasedir',
help="Top-level directory for maintaining cinder's state"), ]
debug_opts = [
]
CONF.register_cli_opts(core_opts)
CONF.register_cli_opts(debug_opts)
global_opts = [
cfg.StrOpt('my_ip',
default=_get_my_ip(),
help='IP address of this host'),
cfg.StrOpt('glance_host',
default='$my_ip',
help='Default glance host name or IP'),
cfg.IntOpt('glance_port',
default=9292,
help='Default glance port'),
cfg.ListOpt('glance_api_servers',
default=['$glance_host:$glance_port'],
help='A list of the glance API servers available to cinder '
'([hostname|ip]:port)'),
cfg.IntOpt('glance_api_version',
default=1,
help='Version of the glance API to use'),
cfg.IntOpt('glance_num_retries',
default=0,
help='Number retries when downloading an image from glance'),
cfg.BoolOpt('glance_api_insecure',
default=False,
help='Allow to perform insecure SSL (https) requests to '
'glance'),
cfg.BoolOpt('glance_api_ssl_compression',
default=False,
help='Enables or disables negotiation of SSL layer '
'compression. In some cases disabling compression '
'can improve data throughput, such as when high '
'network bandwidth is available and you use '
'compressed image formats like qcow2.'),
cfg.IntOpt('glance_request_timeout',
default=None,
help='http/https timeout value for glance operations. If no '
'value (None) is supplied here, the glanceclient default '
'value is used.'),
cfg.StrOpt('scheduler_topic',
default='cinder-scheduler',
help='The topic that scheduler nodes listen on'),
cfg.StrOpt('volume_topic',
default='cinder-volume',
help='The topic that volume nodes listen on'),
cfg.StrOpt('backup_topic',
default='cinder-backup',
help='The topic that volume backup nodes listen on'),
cfg.BoolOpt('enable_v1_api',
default=True,
help=_("Deploy v1 of the Cinder API.")),
cfg.BoolOpt('enable_v2_api',
default=True,
help=_("Deploy v2 of the Cinder API.")),
cfg.BoolOpt('api_rate_limit',
default=True,
help='Enables or disables rate limit of the API.'),
cfg.ListOpt('osapi_volume_ext_list',
default=[],
help='Specify list of extensions to load when using osapi_'
'volume_extension option with cinder.api.contrib.'
'select_extensions'),
cfg.MultiStrOpt('osapi_volume_extension',
default=['cinder.api.contrib.standard_extensions'],
help='osapi volume extension to load'),
cfg.StrOpt('volume_manager',
default='cinder.volume.manager.VolumeManager',
help='Full class name for the Manager for volume'),
cfg.StrOpt('backup_manager',
default='cinder.backup.manager.BackupManager',
help='Full class name for the Manager for volume backup'),
cfg.StrOpt('scheduler_manager',
default='cinder.scheduler.manager.SchedulerManager',
help='Full class name for the Manager for scheduler'),
cfg.StrOpt('host',
default=socket.gethostname(),
help='Name of this node. This can be an opaque identifier. '
'It is not necessarily a host name, FQDN, or IP address.'),
# NOTE(vish): default to nova for compatibility with nova installs
cfg.StrOpt('storage_availability_zone',
default='nova',
help='Availability zone of this node'),
cfg.StrOpt('default_availability_zone',
default=None,
help='Default availability zone for new volumes. If not set, '
'the storage_availability_zone option value is used as '
'the default for new volumes.'),
cfg.StrOpt('default_volume_type',
default=None,
help='Default volume type to use'),
cfg.StrOpt('volume_usage_audit_period',
help='Time period for which to generate volume usages. '
'The options are hour, day, month, or year.'),
cfg.StrOpt('rootwrap_config',
default='/etc/cinder/rootwrap.conf',
help='Path to the rootwrap configuration file to use for '
'running commands as root'),
cfg.BoolOpt('monkey_patch',
default=False,
help='Enable monkey patching'),
cfg.ListOpt('monkey_patch_modules',
default=[],
help='List of modules/decorators to monkey patch'),
cfg.IntOpt('service_down_time',
default=60,
help='Maximum time since last check-in for a service to be '
'considered up'),
cfg.StrOpt('volume_api_class',
default='cinder.volume.api.API',
help='The full class name of the volume API class to use'),
cfg.StrOpt('backup_api_class',
default='cinder.backup.api.API',
help='The full class name of the volume backup API class'),
cfg.StrOpt('auth_strategy',
default='noauth',
help='The strategy to use for auth. Supports noauth, keystone, '
'and deprecated.'),
cfg.ListOpt('enabled_backends',
default=None,
help='A list of backend names to use. These backend names '
'should be backed by a unique [CONFIG] group '
'with its options'),
cfg.BoolOpt('no_snapshot_gb_quota',
default=False,
help='Whether snapshots count against GigaByte quota'),
cfg.StrOpt('transfer_api_class',
default='cinder.transfer.api.API',
help='The full class name of the volume transfer API class'), ]
CONF.register_opts(global_opts)
| apache-2.0 |
tawsifkhan/scikit-learn | sklearn/datasets/tests/test_lfw.py | 230 | 7880 | """This test for the LFW require medium-size data dowloading and processing
If the data has not been already downloaded by running the examples,
the tests won't run (skipped).
If the test are run, the first execution will be long (typically a bit
more than a couple of minutes) but as the dataset loader is leveraging
joblib, successive runs will be fast (less than 200ms).
"""
import random
import os
import shutil
import tempfile
import numpy as np
from sklearn.externals import six
try:
try:
from scipy.misc import imsave
except ImportError:
from scipy.misc.pilutil import imsave
except ImportError:
imsave = None
from sklearn.datasets import load_lfw_pairs
from sklearn.datasets import load_lfw_people
from sklearn.datasets import fetch_lfw_pairs
from sklearn.datasets import fetch_lfw_people
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_warns_message
from sklearn.utils.testing import SkipTest
from sklearn.utils.testing import raises
SCIKIT_LEARN_DATA = tempfile.mkdtemp(prefix="scikit_learn_lfw_test_")
SCIKIT_LEARN_EMPTY_DATA = tempfile.mkdtemp(prefix="scikit_learn_empty_test_")
LFW_HOME = os.path.join(SCIKIT_LEARN_DATA, 'lfw_home')
FAKE_NAMES = [
'Abdelatif_Smith',
'Abhati_Kepler',
'Camara_Alvaro',
'Chen_Dupont',
'John_Lee',
'Lin_Bauman',
'Onur_Lopez',
]
def setup_module():
"""Test fixture run once and common to all tests of this module"""
if imsave is None:
raise SkipTest("PIL not installed.")
if not os.path.exists(LFW_HOME):
os.makedirs(LFW_HOME)
random_state = random.Random(42)
np_rng = np.random.RandomState(42)
# generate some random jpeg files for each person
counts = {}
for name in FAKE_NAMES:
folder_name = os.path.join(LFW_HOME, 'lfw_funneled', name)
if not os.path.exists(folder_name):
os.makedirs(folder_name)
n_faces = np_rng.randint(1, 5)
counts[name] = n_faces
for i in range(n_faces):
file_path = os.path.join(folder_name, name + '_%04d.jpg' % i)
uniface = np_rng.randint(0, 255, size=(250, 250, 3))
try:
imsave(file_path, uniface)
except ImportError:
raise SkipTest("PIL not installed")
# add some random file pollution to test robustness
with open(os.path.join(LFW_HOME, 'lfw_funneled', '.test.swp'), 'wb') as f:
f.write(six.b('Text file to be ignored by the dataset loader.'))
# generate some pairing metadata files using the same format as LFW
with open(os.path.join(LFW_HOME, 'pairsDevTrain.txt'), 'wb') as f:
f.write(six.b("10\n"))
more_than_two = [name for name, count in six.iteritems(counts)
if count >= 2]
for i in range(5):
name = random_state.choice(more_than_two)
first, second = random_state.sample(range(counts[name]), 2)
f.write(six.b('%s\t%d\t%d\n' % (name, first, second)))
for i in range(5):
first_name, second_name = random_state.sample(FAKE_NAMES, 2)
first_index = random_state.choice(np.arange(counts[first_name]))
second_index = random_state.choice(np.arange(counts[second_name]))
f.write(six.b('%s\t%d\t%s\t%d\n' % (first_name, first_index,
second_name, second_index)))
with open(os.path.join(LFW_HOME, 'pairsDevTest.txt'), 'wb') as f:
f.write(six.b("Fake place holder that won't be tested"))
with open(os.path.join(LFW_HOME, 'pairs.txt'), 'wb') as f:
f.write(six.b("Fake place holder that won't be tested"))
def teardown_module():
"""Test fixture (clean up) run once after all tests of this module"""
if os.path.isdir(SCIKIT_LEARN_DATA):
shutil.rmtree(SCIKIT_LEARN_DATA)
if os.path.isdir(SCIKIT_LEARN_EMPTY_DATA):
shutil.rmtree(SCIKIT_LEARN_EMPTY_DATA)
@raises(IOError)
def test_load_empty_lfw_people():
fetch_lfw_people(data_home=SCIKIT_LEARN_EMPTY_DATA, download_if_missing=False)
def test_load_lfw_people_deprecation():
msg = ("Function 'load_lfw_people' has been deprecated in 0.17 and will be "
"removed in 0.19."
"Use fetch_lfw_people(download_if_missing=False) instead.")
assert_warns_message(DeprecationWarning, msg, load_lfw_people,
data_home=SCIKIT_LEARN_DATA)
def test_load_fake_lfw_people():
lfw_people = fetch_lfw_people(data_home=SCIKIT_LEARN_DATA,
min_faces_per_person=3, download_if_missing=False)
# The data is croped around the center as a rectangular bounding box
# arounthe the face. Colors are converted to gray levels:
assert_equal(lfw_people.images.shape, (10, 62, 47))
assert_equal(lfw_people.data.shape, (10, 2914))
# the target is array of person integer ids
assert_array_equal(lfw_people.target, [2, 0, 1, 0, 2, 0, 2, 1, 1, 2])
# names of the persons can be found using the target_names array
expected_classes = ['Abdelatif Smith', 'Abhati Kepler', 'Onur Lopez']
assert_array_equal(lfw_people.target_names, expected_classes)
# It is possible to ask for the original data without any croping or color
# conversion and not limit on the number of picture per person
lfw_people = fetch_lfw_people(data_home=SCIKIT_LEARN_DATA,
resize=None, slice_=None, color=True, download_if_missing=False)
assert_equal(lfw_people.images.shape, (17, 250, 250, 3))
# the ids and class names are the same as previously
assert_array_equal(lfw_people.target,
[0, 0, 1, 6, 5, 6, 3, 6, 0, 3, 6, 1, 2, 4, 5, 1, 2])
assert_array_equal(lfw_people.target_names,
['Abdelatif Smith', 'Abhati Kepler', 'Camara Alvaro',
'Chen Dupont', 'John Lee', 'Lin Bauman', 'Onur Lopez'])
@raises(ValueError)
def test_load_fake_lfw_people_too_restrictive():
fetch_lfw_people(data_home=SCIKIT_LEARN_DATA, min_faces_per_person=100, download_if_missing=False)
@raises(IOError)
def test_load_empty_lfw_pairs():
fetch_lfw_pairs(data_home=SCIKIT_LEARN_EMPTY_DATA, download_if_missing=False)
def test_load_lfw_pairs_deprecation():
msg = ("Function 'load_lfw_pairs' has been deprecated in 0.17 and will be "
"removed in 0.19."
"Use fetch_lfw_pairs(download_if_missing=False) instead.")
assert_warns_message(DeprecationWarning, msg, load_lfw_pairs,
data_home=SCIKIT_LEARN_DATA)
def test_load_fake_lfw_pairs():
lfw_pairs_train = fetch_lfw_pairs(data_home=SCIKIT_LEARN_DATA, download_if_missing=False)
# The data is croped around the center as a rectangular bounding box
# arounthe the face. Colors are converted to gray levels:
assert_equal(lfw_pairs_train.pairs.shape, (10, 2, 62, 47))
# the target is whether the person is the same or not
assert_array_equal(lfw_pairs_train.target, [1, 1, 1, 1, 1, 0, 0, 0, 0, 0])
# names of the persons can be found using the target_names array
expected_classes = ['Different persons', 'Same person']
assert_array_equal(lfw_pairs_train.target_names, expected_classes)
# It is possible to ask for the original data without any croping or color
# conversion
lfw_pairs_train = fetch_lfw_pairs(data_home=SCIKIT_LEARN_DATA,
resize=None, slice_=None, color=True, download_if_missing=False)
assert_equal(lfw_pairs_train.pairs.shape, (10, 2, 250, 250, 3))
# the ids and class names are the same as previously
assert_array_equal(lfw_pairs_train.target, [1, 1, 1, 1, 1, 0, 0, 0, 0, 0])
assert_array_equal(lfw_pairs_train.target_names, expected_classes)
| bsd-3-clause |
AnishShah/tensorflow | tensorflow/python/eager/tensor_test.py | 5 | 12902 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Unit tests for TensorFlow "Eager" Mode's Tensor class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import re
import numpy as np
from tensorflow.python import pywrap_tensorflow
from tensorflow.python.eager import context
from tensorflow.python.eager import core
from tensorflow.python.eager import test
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
def _create_tensor(value, device=None, dtype=None):
ctx = context.context()
if device is None:
device = ctx.device_name
if dtype is not None:
dtype = dtype.as_datatype_enum
try:
return ops.EagerTensor(
value, context=ctx._handle, device=device, dtype=dtype)
except core._NotOkStatusException as e: # pylint: disable=protected-access
raise core._status_to_exception(e.code, e.message)
class TFETensorTest(test_util.TensorFlowTestCase):
def testScalarTensor(self):
t = _create_tensor(3, dtype=dtypes.int32)
self.assertAllEqual(t, _create_tensor(np.array(3)))
self.assertEqual(dtypes.int32, t.dtype)
self.assertEqual(0, t.shape.ndims)
self.assertAllEqual([], t.shape.as_list())
self.assertIn("tf.Tensor", str(t))
self.assertIn("tf.Tensor", repr(t))
def testBadConstructorArgs(self):
ctx = context.context()
handle = ctx._handle
device = ctx.device_name
# Missing context.
with self.assertRaisesRegexp(
TypeError, r"Required argument 'context' \(pos 2\) not found"):
ops.EagerTensor(1, device=device)
# Missing device.
with self.assertRaisesRegexp(
TypeError, r"Required argument 'device' \(pos 3\) not found"):
ops.EagerTensor(1, context=handle)
# Bad dtype type.
with self.assertRaisesRegexp(TypeError,
"Expecting a DataType value for dtype. Got"):
ops.EagerTensor(1, context=handle, device=device, dtype="1")
# Following errors happen when trying to copy to GPU.
if not context.context().num_gpus():
self.skipTest("No GPUs found")
with ops.device("/device:GPU:0"):
device = ctx.device_name
# Bad context.
with self.assertRaisesRegexp(
TypeError, "Expecting a PyCapsule encoded context handle. Got"):
ops.EagerTensor(1.0, context=1, device=device)
# Bad device.
with self.assertRaisesRegexp(
TypeError, "Error parsing device argument to CopyToDevice"):
ops.EagerTensor(1.0, context=handle, device=1)
def testNumpyValue(self):
values = np.array([3.0])
t = _create_tensor(values)
self.assertAllEqual(values, t)
def testNumpyValueWithCast(self):
values = np.array([3.0], dtype=np.float32)
t = _create_tensor(values, dtype=dtypes.float64)
self.assertAllEqual(values, t)
ctx = context.context()
# Bad dtype value.
with self.assertRaisesRegexp(TypeError, "Invalid dtype argument value"):
ops.EagerTensor(
values, context=ctx._handle, device=ctx.device_name, dtype=12345)
def testNumpyOrderHandling(self):
n = np.array([[1, 2], [3, 4]], order="F")
t = _create_tensor(n)
self.assertAllEqual([[1, 2], [3, 4]], t)
def testNumpyArrayDtype(self):
tensor = constant_op.constant([1.0, 2.0, 3.0])
numpy_tensor = np.asarray(tensor, dtype=np.int32)
self.assertAllEqual(numpy_tensor, [1, 2, 3])
def testNdimsAgreesWithNumpy(self):
numpy_tensor = np.asarray(1.0)
tensor = constant_op.constant(numpy_tensor)
self.assertAllEqual(numpy_tensor.ndim, tensor.ndim)
numpy_tensor = np.asarray([1.0, 2.0, 3.0])
tensor = constant_op.constant(numpy_tensor)
self.assertAllEqual(numpy_tensor.ndim, tensor.ndim)
numpy_tensor = np.asarray([[1.0, 2.0, 3.0], [1.0, 2.0, 3.0]])
tensor = constant_op.constant(numpy_tensor)
self.assertAllEqual(numpy_tensor.ndim, tensor.ndim)
def testCopy(self):
t = constant_op.constant(1.0)
tt = copy.copy(t)
self.assertAllEqual(tt, 1.0)
del tt
tt = copy.deepcopy(t)
self.assertAllEqual(tt, 1.0)
del tt
self.assertAllEqual(t, 1.0)
def testConstantDtype(self):
self.assertEqual(constant_op.constant(1.0, dtype=np.int64).dtype,
dtypes.int64)
def testTensorAndNumpyMatrix(self):
expected = np.array([[1.0, 2.0], [3.0, 4.0]], np.float32)
actual = _create_tensor([[1.0, 2.0], [3.0, 4.0]])
self.assertAllEqual(expected, actual)
self.assertEqual(np.float32, actual.dtype)
self.assertEqual(dtypes.float32, actual.dtype)
self.assertAllEqual([2, 2], actual.shape.as_list())
def testFloatDowncast(self):
# Unless explicitly specified, float64->float32
t = _create_tensor(3.0)
self.assertEqual(dtypes.float32, t.dtype)
t = _create_tensor(3.0, dtype=dtypes.float64)
self.assertEqual(dtypes.float64, t.dtype)
def testBool(self):
t = _create_tensor(False)
if t:
self.assertFalse(True)
def testIntDowncast(self):
t = _create_tensor(3)
self.assertEqual(dtypes.int32, t.dtype)
t = _create_tensor(3, dtype=dtypes.int64)
self.assertEqual(dtypes.int64, t.dtype)
t = _create_tensor(2**33)
self.assertEqual(dtypes.int64, t.dtype)
def testTensorCreationFailure(self):
with self.assertRaises(ValueError):
# Should fail because the each row of the Python object has a different
# number of columns.
self.assertEqual(None, _create_tensor([[1], [1, 2]]))
def testMultiLineTensorStr(self):
t = _create_tensor(np.eye(3))
tensor_str = str(t)
self.assertIn("shape=%s, dtype=%s" % (t.shape, t.dtype.name), tensor_str)
self.assertIn(str(t), tensor_str)
def testMultiLineTensorRepr(self):
t = _create_tensor(np.eye(3))
tensor_repr = repr(t)
self.assertTrue(tensor_repr.startswith("<"))
self.assertTrue(tensor_repr.endswith(">"))
self.assertIn("id=%d, shape=%s, dtype=%s, numpy=\n%r" %
(t._id, t.shape, t.dtype.name, t.numpy()), tensor_repr)
def testTensorStrReprObeyNumpyPrintOptions(self):
orig_threshold = np.get_printoptions()["threshold"]
orig_edgeitems = np.get_printoptions()["edgeitems"]
np.set_printoptions(threshold=2, edgeitems=1)
t = _create_tensor(np.arange(10, dtype=np.int32))
self.assertTrue(re.match(r".*\[.*0.*\.\.\..*9.*\]", str(t)))
self.assertTrue(re.match(r".*\[.*0.*\.\.\..*9.*\]", repr(t)))
# Clean up: reset to previous printoptions.
np.set_printoptions(threshold=orig_threshold, edgeitems=orig_edgeitems)
def testZeroDimTensorStr(self):
t = _create_tensor(42)
self.assertIn("42, shape=(), dtype=int32", str(t))
def testZeroDimTensorRepr(self):
t = _create_tensor(42)
self.assertTrue(repr(t).startswith("<"))
self.assertTrue(repr(t).endswith(">"))
self.assertIn("id=%d, shape=(), dtype=int32, numpy=42" % t._id, repr(t))
def testZeroSizeTensorStr(self):
t = _create_tensor(np.zeros(0, dtype=np.float32))
self.assertIn("[], shape=(0,), dtype=float32", str(t))
def testZeroSizeTensorRepr(self):
t = _create_tensor(np.zeros(0, dtype=np.float32))
self.assertTrue(repr(t).startswith("<"))
self.assertTrue(repr(t).endswith(">"))
self.assertIn("id=%d, shape=(0,), dtype=float32, numpy=%r" % (t._id,
t.numpy()),
repr(t))
def testStringTensor(self):
t_np_orig = np.array([[b"a", b"ab"], [b"abc", b"abcd"]])
t = _create_tensor(t_np_orig)
t_np = t.numpy()
self.assertTrue(np.all(t_np == t_np_orig), "%s vs %s" % (t_np, t_np_orig))
def testIterateOverTensor(self):
l = [[1, 2], [3, 4]]
t = _create_tensor(l)
for list_element, tensor_element in zip(l, t):
self.assertAllEqual(list_element, tensor_element.numpy())
def testStringTensorOnGPU(self):
if not context.context().num_gpus():
self.skipTest("No GPUs found")
with ops.device("/device:GPU:0"):
with self.assertRaisesRegexp(
RuntimeError, "Can't copy Tensor with type string to device"):
_create_tensor("test string")
class TFETensorUtilTest(test_util.TensorFlowTestCase):
def testListOfThree(self):
t1 = _create_tensor([[1, 2], [3, 4], [5, 6]], dtype=dtypes.int32)
t2 = _create_tensor([[1, 2, 5], [3, 4, 5]], dtype=dtypes.int32)
t3 = _create_tensor([[1], [3], [5], [6]], dtype=dtypes.int32)
r = pywrap_tensorflow.TFE_Py_TensorShapeSlice([t1, t2, t3], 0)
self.assertAllEqual(np.array([3, 2, 4]), r.numpy())
r = pywrap_tensorflow.TFE_Py_TensorShapeSlice([t1, t2, t3], 1)
self.assertAllEqual(np.array([2, 3, 1]), r.numpy())
def testEmptyTensorList(self):
a = pywrap_tensorflow.TFE_Py_TensorShapeSlice([], 0)
self.assertTrue(isinstance(a, ops.EagerTensor))
self.assertEqual(0, a.numpy().size)
def testTensorListContainsNonTensors(self):
t1 = _create_tensor([1, 2], dtype=dtypes.int32)
with self.assertRaisesRegexp(
TypeError,
r"Expected a list of EagerTensors but element 1 has type \"str\""):
pywrap_tensorflow.TFE_Py_TensorShapeSlice([t1, "abc"], 0)
with self.assertRaisesRegexp(
TypeError,
r"Expected a list of EagerTensors but element 0 has type \"int\""):
pywrap_tensorflow.TFE_Py_TensorShapeSlice([2, t1], 0)
def testTensorListNotList(self):
t1 = _create_tensor([1, 2], dtype=dtypes.int32)
with self.assertRaisesRegexp(
TypeError,
r"tensors argument must be a list or a tuple. Got.*EagerTensor"):
pywrap_tensorflow.TFE_Py_TensorShapeSlice(t1, -2)
def testNegativeSliceDim(self):
t1 = _create_tensor([1, 2], dtype=dtypes.int32)
with self.assertRaisesRegexp(
ValueError,
r"Slice dimension must be non-negative. Got -2"):
pywrap_tensorflow.TFE_Py_TensorShapeSlice([t1], -2)
def testUnicode(self):
self.assertEqual(constant_op.constant(u"asdf").numpy(), b"asdf")
def testFloatTensor(self):
self.assertEqual(dtypes.float64, _create_tensor(np.float64()).dtype)
self.assertEqual(dtypes.float32, _create_tensor(np.float32()).dtype)
self.assertEqual(dtypes.float16, _create_tensor(np.float16()).dtype)
self.assertEqual(dtypes.float32, _create_tensor(0.0).dtype)
def testSliceDimOutOfRange(self):
t1 = _create_tensor([[1, 2], [3, 4], [5, 6]], dtype=dtypes.int32)
t2 = _create_tensor([1, 2], dtype=dtypes.int32)
t3 = _create_tensor(2, dtype=dtypes.int32)
with self.assertRaisesRegexp(
IndexError,
r"Slice dimension \(2\) must be smaller than rank of all tensors, "
"but tensor at index 0 has rank 2"):
pywrap_tensorflow.TFE_Py_TensorShapeSlice([t1], 2)
with self.assertRaisesRegexp(
IndexError,
r"Slice dimension \(1\) must be smaller than rank of all tensors, "
"but tensor at index 0 has rank 1"):
pywrap_tensorflow.TFE_Py_TensorShapeSlice([t2], 1)
with self.assertRaisesRegexp(
IndexError,
r"Slice dimension \(1\) must be smaller than rank of all tensors, "
"but tensor at index 1 has rank 1"):
pywrap_tensorflow.TFE_Py_TensorShapeSlice([t1, t2], 1)
with self.assertRaisesRegexp(
IndexError,
r"Slice dimension \(0\) must be smaller than rank of all tensors, "
"but tensor at index 0 has rank 0"):
pywrap_tensorflow.TFE_Py_TensorShapeSlice([t3], 0)
with self.assertRaisesRegexp(
IndexError,
r"Slice dimension \(0\) must be smaller than rank of all tensors, "
"but tensor at index 2 has rank 0"):
pywrap_tensorflow.TFE_Py_TensorShapeSlice([t2, t1, t3], 0)
@test_util.assert_no_new_pyobjects_executing_eagerly
def testTensorDir(self):
t = array_ops.zeros(1)
t.test_attr = "Test"
instance_dir = dir(t)
type_dir = dir(ops.EagerTensor)
# Monkey patched attributes should show up in dir(t)
self.assertIn("test_attr", instance_dir)
instance_dir.remove("test_attr")
self.assertEqual(instance_dir, type_dir)
if __name__ == "__main__":
test.main()
| apache-2.0 |
ujenmr/ansible | lib/ansible/modules/storage/purestorage/purefa_ntp.py | 14 | 3550 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2018, Simon Dodsley (simon@purestorage.com)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: purefa_ntp
version_added: '2.8'
short_description: Configure Pure Storage FlashArray NTP settings
description:
- Set or erase NTP configuration for Pure Storage FlashArrays.
author:
- Simon Dodsley (@sdodsley)
options:
state:
description:
- Create or delete NTP servers configuration
default: present
choices: [ absent, present ]
ntp_servers:
description:
- A list of up to 4 alternate NTP servers. These may include IPv4,
IPv6 or FQDNs. Invalid IP addresses will cause the module to fail.
No validation is performed for FQDNs.
- If more than 4 servers are provided, only the first 4 unique
nameservers will be used.
- if no servers are given a default of I(0.pool.ntp.org) will be used.
extends_documentation_fragment:
- purestorage.fa
'''
EXAMPLES = r'''
- name: Delete exisitng NTP server entries
purefa_ntp:
state: absent
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
- name: Set array NTP servers
purefa_ntp:
state: present
ntp_servers:
- "0.pool.ntp.org"
- "1.pool.ntp.org"
- "2.pool.ntp.org"
- "3.pool.ntp.org"
fa_url: 10.10.10.2
api_token: e31060a7-21fc-e277-6240-25983c6c4592
'''
RETURN = r'''
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pure import get_system, purefa_argument_spec
def remove(duplicate):
final_list = []
for num in duplicate:
if num not in final_list:
final_list.append(num)
return final_list
def delete_ntp(module, array):
"""Delete NTP Servers"""
changed = False
if array.get(ntpserver=True)['ntpserver'] != []:
try:
array.set(ntpserver=[])
changed = True
except Exception:
module.fail_json(msg='Deletion of NTP servers failed')
module.exit_json(changed=changed)
def create_ntp(module, array):
"""Set NTP Servers"""
changed = False
if not module.params['ntp_servers']:
module.params['ntp_servers'] = ['0.pool.ntp.org']
try:
array.set(ntpserver=module.params['ntp_servers'][0:4])
changed = True
except Exception:
module.fail_json(msg='Update of NTP servers failed')
module.exit_json(changed=changed)
def main():
argument_spec = purefa_argument_spec()
argument_spec.update(dict(
ntp_servers=dict(type='list'),
state=dict(type='str', default='present', choices=['absent', 'present']),
))
required_if = [['state', 'present', ['ntp_servers']]]
module = AnsibleModule(argument_spec,
required_if=required_if,
supports_check_mode=False)
array = get_system(module)
if module.params['state'] == 'absent':
delete_ntp(module, array)
else:
module.params['ntp_servers'] = remove(module.params['ntp_servers'])
if sorted(array.get(ntpserver=True)['ntpserver']) != sorted(module.params['ntp_servers'][0:4]):
create_ntp(module, array)
module.exit_json(changed=False)
if __name__ == '__main__':
main()
| gpl-3.0 |
cat9/shadowsocks | shadowsocks/encrypt.py | 990 | 5180 | #!/usr/bin/env python
#
# Copyright 2012-2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import os
import sys
import hashlib
import logging
from shadowsocks import common
from shadowsocks.crypto import rc4_md5, openssl, sodium, table
method_supported = {}
method_supported.update(rc4_md5.ciphers)
method_supported.update(openssl.ciphers)
method_supported.update(sodium.ciphers)
method_supported.update(table.ciphers)
def random_string(length):
return os.urandom(length)
cached_keys = {}
def try_cipher(key, method=None):
Encryptor(key, method)
def EVP_BytesToKey(password, key_len, iv_len):
# equivalent to OpenSSL's EVP_BytesToKey() with count 1
# so that we make the same key and iv as nodejs version
cached_key = '%s-%d-%d' % (password, key_len, iv_len)
r = cached_keys.get(cached_key, None)
if r:
return r
m = []
i = 0
while len(b''.join(m)) < (key_len + iv_len):
md5 = hashlib.md5()
data = password
if i > 0:
data = m[i - 1] + password
md5.update(data)
m.append(md5.digest())
i += 1
ms = b''.join(m)
key = ms[:key_len]
iv = ms[key_len:key_len + iv_len]
cached_keys[cached_key] = (key, iv)
return key, iv
class Encryptor(object):
def __init__(self, key, method):
self.key = key
self.method = method
self.iv = None
self.iv_sent = False
self.cipher_iv = b''
self.decipher = None
method = method.lower()
self._method_info = self.get_method_info(method)
if self._method_info:
self.cipher = self.get_cipher(key, method, 1,
random_string(self._method_info[1]))
else:
logging.error('method %s not supported' % method)
sys.exit(1)
def get_method_info(self, method):
method = method.lower()
m = method_supported.get(method)
return m
def iv_len(self):
return len(self.cipher_iv)
def get_cipher(self, password, method, op, iv):
password = common.to_bytes(password)
m = self._method_info
if m[0] > 0:
key, iv_ = EVP_BytesToKey(password, m[0], m[1])
else:
# key_length == 0 indicates we should use the key directly
key, iv = password, b''
iv = iv[:m[1]]
if op == 1:
# this iv is for cipher not decipher
self.cipher_iv = iv[:m[1]]
return m[2](method, key, iv, op)
def encrypt(self, buf):
if len(buf) == 0:
return buf
if self.iv_sent:
return self.cipher.update(buf)
else:
self.iv_sent = True
return self.cipher_iv + self.cipher.update(buf)
def decrypt(self, buf):
if len(buf) == 0:
return buf
if self.decipher is None:
decipher_iv_len = self._method_info[1]
decipher_iv = buf[:decipher_iv_len]
self.decipher = self.get_cipher(self.key, self.method, 0,
iv=decipher_iv)
buf = buf[decipher_iv_len:]
if len(buf) == 0:
return buf
return self.decipher.update(buf)
def encrypt_all(password, method, op, data):
result = []
method = method.lower()
(key_len, iv_len, m) = method_supported[method]
if key_len > 0:
key, _ = EVP_BytesToKey(password, key_len, iv_len)
else:
key = password
if op:
iv = random_string(iv_len)
result.append(iv)
else:
iv = data[:iv_len]
data = data[iv_len:]
cipher = m(method, key, iv, op)
result.append(cipher.update(data))
return b''.join(result)
CIPHERS_TO_TEST = [
'aes-128-cfb',
'aes-256-cfb',
'rc4-md5',
'salsa20',
'chacha20',
'table',
]
def test_encryptor():
from os import urandom
plain = urandom(10240)
for method in CIPHERS_TO_TEST:
logging.warn(method)
encryptor = Encryptor(b'key', method)
decryptor = Encryptor(b'key', method)
cipher = encryptor.encrypt(plain)
plain2 = decryptor.decrypt(cipher)
assert plain == plain2
def test_encrypt_all():
from os import urandom
plain = urandom(10240)
for method in CIPHERS_TO_TEST:
logging.warn(method)
cipher = encrypt_all(b'key', method, 1, plain)
plain2 = encrypt_all(b'key', method, 0, cipher)
assert plain == plain2
if __name__ == '__main__':
test_encrypt_all()
test_encryptor()
| apache-2.0 |
nuchi/httpserver | httpserver.py | 1 | 1065 | #!/usr/bin/env python
import socket
from http_handler import Handler_thread
MAX_CONNECTIONS = 5
class HTTPserver(object):
def __init__(self, localOnly=False, port=80, max_connections=MAX_CONNECTIONS):
self.port = port
self.max_connections = max_connections
if localOnly:
self.hostname = '127.0.0.1'
else:
self.hostname = socket.gethostname()
self.server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
def serve(self):
self.server.bind((self.hostname, self.port))
self.server.listen(self.max_connections)
while True:
client_socket, address = self.server.accept()
ht = Handler_thread()
ht.daemon = True
ht.run(client_socket)
def close(self):
self.server.close()
def create_and_run_server(localOnly=True, port=8000):
new_server = HTTPserver(localOnly=localOnly, port=port)
try:
new_server.serve()
except KeyboardInterrupt:
print('\nClosing server.')
pass
finally:
new_server.close()
if __name__ == '__main__':
create_and_run_server() | mit |
schumi2004/NOT_UPDATED_Sick-Beard-Dutch | lib/unidecode/x031.py | 252 | 4125 | data = (
'[?]', # 0x00
'[?]', # 0x01
'[?]', # 0x02
'[?]', # 0x03
'[?]', # 0x04
'B', # 0x05
'P', # 0x06
'M', # 0x07
'F', # 0x08
'D', # 0x09
'T', # 0x0a
'N', # 0x0b
'L', # 0x0c
'G', # 0x0d
'K', # 0x0e
'H', # 0x0f
'J', # 0x10
'Q', # 0x11
'X', # 0x12
'ZH', # 0x13
'CH', # 0x14
'SH', # 0x15
'R', # 0x16
'Z', # 0x17
'C', # 0x18
'S', # 0x19
'A', # 0x1a
'O', # 0x1b
'E', # 0x1c
'EH', # 0x1d
'AI', # 0x1e
'EI', # 0x1f
'AU', # 0x20
'OU', # 0x21
'AN', # 0x22
'EN', # 0x23
'ANG', # 0x24
'ENG', # 0x25
'ER', # 0x26
'I', # 0x27
'U', # 0x28
'IU', # 0x29
'V', # 0x2a
'NG', # 0x2b
'GN', # 0x2c
'[?]', # 0x2d
'[?]', # 0x2e
'[?]', # 0x2f
'[?]', # 0x30
'g', # 0x31
'gg', # 0x32
'gs', # 0x33
'n', # 0x34
'nj', # 0x35
'nh', # 0x36
'd', # 0x37
'dd', # 0x38
'r', # 0x39
'lg', # 0x3a
'lm', # 0x3b
'lb', # 0x3c
'ls', # 0x3d
'lt', # 0x3e
'lp', # 0x3f
'rh', # 0x40
'm', # 0x41
'b', # 0x42
'bb', # 0x43
'bs', # 0x44
's', # 0x45
'ss', # 0x46
'', # 0x47
'j', # 0x48
'jj', # 0x49
'c', # 0x4a
'k', # 0x4b
't', # 0x4c
'p', # 0x4d
'h', # 0x4e
'a', # 0x4f
'ae', # 0x50
'ya', # 0x51
'yae', # 0x52
'eo', # 0x53
'e', # 0x54
'yeo', # 0x55
'ye', # 0x56
'o', # 0x57
'wa', # 0x58
'wae', # 0x59
'oe', # 0x5a
'yo', # 0x5b
'u', # 0x5c
'weo', # 0x5d
'we', # 0x5e
'wi', # 0x5f
'yu', # 0x60
'eu', # 0x61
'yi', # 0x62
'i', # 0x63
'', # 0x64
'nn', # 0x65
'nd', # 0x66
'ns', # 0x67
'nZ', # 0x68
'lgs', # 0x69
'ld', # 0x6a
'lbs', # 0x6b
'lZ', # 0x6c
'lQ', # 0x6d
'mb', # 0x6e
'ms', # 0x6f
'mZ', # 0x70
'mN', # 0x71
'bg', # 0x72
'', # 0x73
'bsg', # 0x74
'bst', # 0x75
'bj', # 0x76
'bt', # 0x77
'bN', # 0x78
'bbN', # 0x79
'sg', # 0x7a
'sn', # 0x7b
'sd', # 0x7c
'sb', # 0x7d
'sj', # 0x7e
'Z', # 0x7f
'', # 0x80
'N', # 0x81
'Ns', # 0x82
'NZ', # 0x83
'pN', # 0x84
'hh', # 0x85
'Q', # 0x86
'yo-ya', # 0x87
'yo-yae', # 0x88
'yo-i', # 0x89
'yu-yeo', # 0x8a
'yu-ye', # 0x8b
'yu-i', # 0x8c
'U', # 0x8d
'U-i', # 0x8e
'[?]', # 0x8f
'', # 0x90
'', # 0x91
'', # 0x92
'', # 0x93
'', # 0x94
'', # 0x95
'', # 0x96
'', # 0x97
'', # 0x98
'', # 0x99
'', # 0x9a
'', # 0x9b
'', # 0x9c
'', # 0x9d
'', # 0x9e
'', # 0x9f
'BU', # 0xa0
'ZI', # 0xa1
'JI', # 0xa2
'GU', # 0xa3
'EE', # 0xa4
'ENN', # 0xa5
'OO', # 0xa6
'ONN', # 0xa7
'IR', # 0xa8
'ANN', # 0xa9
'INN', # 0xaa
'UNN', # 0xab
'IM', # 0xac
'NGG', # 0xad
'AINN', # 0xae
'AUNN', # 0xaf
'AM', # 0xb0
'OM', # 0xb1
'ONG', # 0xb2
'INNN', # 0xb3
'P', # 0xb4
'T', # 0xb5
'K', # 0xb6
'H', # 0xb7
'[?]', # 0xb8
'[?]', # 0xb9
'[?]', # 0xba
'[?]', # 0xbb
'[?]', # 0xbc
'[?]', # 0xbd
'[?]', # 0xbe
'[?]', # 0xbf
'[?]', # 0xc0
'[?]', # 0xc1
'[?]', # 0xc2
'[?]', # 0xc3
'[?]', # 0xc4
'[?]', # 0xc5
'[?]', # 0xc6
'[?]', # 0xc7
'[?]', # 0xc8
'[?]', # 0xc9
'[?]', # 0xca
'[?]', # 0xcb
'[?]', # 0xcc
'[?]', # 0xcd
'[?]', # 0xce
'[?]', # 0xcf
'[?]', # 0xd0
'[?]', # 0xd1
'[?]', # 0xd2
'[?]', # 0xd3
'[?]', # 0xd4
'[?]', # 0xd5
'[?]', # 0xd6
'[?]', # 0xd7
'[?]', # 0xd8
'[?]', # 0xd9
'[?]', # 0xda
'[?]', # 0xdb
'[?]', # 0xdc
'[?]', # 0xdd
'[?]', # 0xde
'[?]', # 0xdf
'[?]', # 0xe0
'[?]', # 0xe1
'[?]', # 0xe2
'[?]', # 0xe3
'[?]', # 0xe4
'[?]', # 0xe5
'[?]', # 0xe6
'[?]', # 0xe7
'[?]', # 0xe8
'[?]', # 0xe9
'[?]', # 0xea
'[?]', # 0xeb
'[?]', # 0xec
'[?]', # 0xed
'[?]', # 0xee
'[?]', # 0xef
'[?]', # 0xf0
'[?]', # 0xf1
'[?]', # 0xf2
'[?]', # 0xf3
'[?]', # 0xf4
'[?]', # 0xf5
'[?]', # 0xf6
'[?]', # 0xf7
'[?]', # 0xf8
'[?]', # 0xf9
'[?]', # 0xfa
'[?]', # 0xfb
'[?]', # 0xfc
'[?]', # 0xfd
'[?]', # 0xfe
)
| gpl-3.0 |
CoDEmanX/ArangoDB | 3rdParty/V8-4.3.61/third_party/python_26/Lib/site-packages/pythonwin/pywin/framework/editor/__init__.py | 17 | 2824 | # __init__ for the Pythonwin editor package.
#
# We used to support optional editors - eg, color or non-color.
#
# This really isnt necessary with Scintilla, and scintilla
# is getting so deeply embedded that it was too much work.
import win32ui, sys, win32con
defaultCharacterFormat = (-402653169, 0, 200, 0, 0, 0, 49, 'Courier New')
##def GetDefaultEditorModuleName():
## import pywin
## # If someone has set pywin.editormodulename, then this is what we use
## try:
## prefModule = pywin.editormodulename
## except AttributeError:
## prefModule = win32ui.GetProfileVal("Editor","Module", "")
## return prefModule
##
##def WriteDefaultEditorModule(module):
## try:
## module = module.__name__
## except:
## pass
## win32ui.WriteProfileVal("Editor", "Module", module)
def LoadDefaultEditor():
pass
## prefModule = GetDefaultEditorModuleName()
## restorePrefModule = None
## mod = None
## if prefModule:
## try:
## mod = __import__(prefModule)
## except 'xx':
## msg = "Importing your preferred editor ('%s') failed.\n\nError %s: %s\n\nAn attempt will be made to load the default editor.\n\nWould you like this editor disabled in the future?" % (prefModule, sys.exc_info()[0], sys.exc_info()[1])
## rc = win32ui.MessageBox(msg, "Error importing editor", win32con.MB_YESNO)
## if rc == win32con.IDNO:
## restorePrefModule = prefModule
## WriteDefaultEditorModule("")
## del rc
##
## try:
## # Try and load the default one - dont catch errors here.
## if mod is None:
## prefModule = "pywin.framework.editor.color.coloreditor"
## mod = __import__(prefModule)
##
## # Get at the real module.
## mod = sys.modules[prefModule]
##
## # Do a "from mod import *"
## globals().update(mod.__dict__)
##
## finally:
## # Restore the users default editor if it failed and they requested not to disable it.
## if restorePrefModule:
## WriteDefaultEditorModule(restorePrefModule)
def GetEditorOption(option, defaultValue, min=None, max = None):
rc = win32ui.GetProfileVal("Editor", option, defaultValue)
if min is not None and rc < min: rc = defaultValue
if max is not None and rc > max: rc = defaultValue
return rc
def SetEditorOption(option, newValue):
win32ui.WriteProfileVal("Editor", option, newValue)
def DeleteEditorOption(option):
try:
win32ui.WriteProfileVal("Editor", option, None)
except win32ui.error:
pass
# Load and save font tuples
def GetEditorFontOption(option, default = None):
if default is None: default = defaultCharacterFormat
fmt = GetEditorOption( option, "" )
if fmt == "": return default
try:
return eval(fmt)
except:
print "WARNING: Invalid font setting in registry - setting ignored"
return default
def SetEditorFontOption(option, newValue):
SetEditorOption(option, str(newValue))
from pywin.framework.editor.color.coloreditor import *
| apache-2.0 |
elaeon/dsignature | creacion_firma/forms.py | 1 | 3487 | # -*- coding: utf-8 -*-
from django import forms
from django.forms import ModelForm
from creacion_firma.models import FirmarCertificado, NominaSubida, User
import datetime
class UserForm(forms.Form):
nombre = forms.CharField(max_length=150, widget=forms.TextInput(attrs={"style": "width: 400px"}))
correo_electronico = forms.EmailField(max_length=100)
password = forms.CharField(widget=forms.PasswordInput)
class FirmarCertificadoForm(ModelForm):
user = forms.ModelChoiceField(
queryset=User.objects.all().order_by("username"),
required=True)
class Meta:
model = FirmarCertificado
exclude = ('certificado',)
class SubirNominaForm(forms.Form):
anteriores = forms.ModelChoiceField(
queryset=NominaSubida.objects.filter(visible=True),
required=False)
nombre = forms.CharField(
max_length=50,
widget=forms.TextInput(attrs={"style": "width: 150px"}),
help_text="QNA, Reyes, etc",
required=False)
numero = forms.IntegerField(required=False)
year = forms.IntegerField(label=u"Año", required=False)
tipo = forms.ChoiceField(choices=(("ord", "Ordinaria"), ("ext", "Extraordinaria")), required=False)
pdf = forms.FileField()
xml = forms.FileField()
def clean(self):
cleaned_data = super(SubirNominaForm, self).clean()
anteriores_nomina = cleaned_data.get("anteriores")
nomina = cleaned_data.get("nombre")
if not (anteriores_nomina or nomina):
msg = "Elija un nombre o escriba uno"
self.add_error('anteriores', msg)
self.add_error('nombre', msg)
class SubirNominaXMLForm(forms.Form):
anteriores = forms.ModelChoiceField(
queryset=NominaSubida.objects.filter(visible=True),
required=False)
nombre = forms.CharField(
max_length=50,
widget=forms.TextInput(attrs={"style": "width: 150px"}),
help_text="QNA, Reyes, etc",
required=False)
numero = forms.IntegerField(required=False)
year = forms.IntegerField(label=u"Año", required=False)
tipo = forms.ChoiceField(choices=(("ord", "Ordinaria"), ("ext", "Extraordinaria")), required=False)
xml = forms.FileField()
def clean(self):
cleaned_data = super(SubirNominaXMLForm, self).clean()
anteriores_nomina = cleaned_data.get("anteriores")
nomina = cleaned_data.get("nombre")
if not (anteriores_nomina or nomina):
msg = "Elija un nombre o escriba uno"
self.add_error('anteriores', msg)
self.add_error('nombre', msg)
class LoginForm(forms.Form):
usuario = forms.CharField(max_length=150)
password = forms.CharField(max_length=32, widget=forms.PasswordInput)
class SelectYearForm(forms.Form):
year = forms.ChoiceField(label="Año", choices=((y, y) for y in xrange(2015, 2020)))
class FirmaOSinForm(forms.Form):
tipo = forms.ChoiceField(label="Tipo", choices=(("f", "firmado"), ("nf", "no firmado")))
class NominasFilterYear(forms.Form):
def __init__(self, *args, **kwargs):
if "year" in kwargs:
self.year = kwargs["year"]
del kwargs["year"]
else:
self.year = datetime.date.today().year
super(NominasFilterYear, self).__init__(*args, **kwargs)
self.fields['nomina'] = forms.ModelChoiceField(
queryset=NominaSubida.objects.filter(year=self.year).order_by("-numero", "nombre", "tipo")
)
| gpl-3.0 |
kenshay/ImageScripter | ProgramData/SystemFiles/Python/Lib/site-packages/django/urls/resolvers.py | 39 | 21589 | """
This module converts requested URLs to callback view functions.
RegexURLResolver is the main class here. Its resolve() method takes a URL (as
a string) and returns a ResolverMatch object which provides access to all
attributes of the resolved URL match.
"""
from __future__ import unicode_literals
import functools
import re
import threading
from importlib import import_module
from django.conf import settings
from django.core.checks import Warning
from django.core.checks.urls import check_resolver
from django.core.exceptions import ImproperlyConfigured
from django.utils import lru_cache, six
from django.utils.datastructures import MultiValueDict
from django.utils.encoding import force_str, force_text
from django.utils.functional import cached_property
from django.utils.http import RFC3986_SUBDELIMS, urlquote
from django.utils.regex_helper import normalize
from django.utils.translation import get_language
from .exceptions import NoReverseMatch, Resolver404
from .utils import get_callable
class ResolverMatch(object):
def __init__(self, func, args, kwargs, url_name=None, app_names=None, namespaces=None):
self.func = func
self.args = args
self.kwargs = kwargs
self.url_name = url_name
# If a URLRegexResolver doesn't have a namespace or app_name, it passes
# in an empty value.
self.app_names = [x for x in app_names if x] if app_names else []
self.app_name = ':'.join(self.app_names)
self.namespaces = [x for x in namespaces if x] if namespaces else []
self.namespace = ':'.join(self.namespaces)
if not hasattr(func, '__name__'):
# A class-based view
self._func_path = '.'.join([func.__class__.__module__, func.__class__.__name__])
else:
# A function-based view
self._func_path = '.'.join([func.__module__, func.__name__])
view_path = url_name or self._func_path
self.view_name = ':'.join(self.namespaces + [view_path])
def __getitem__(self, index):
return (self.func, self.args, self.kwargs)[index]
def __repr__(self):
return "ResolverMatch(func=%s, args=%s, kwargs=%s, url_name=%s, app_names=%s, namespaces=%s)" % (
self._func_path, self.args, self.kwargs, self.url_name,
self.app_names, self.namespaces,
)
@lru_cache.lru_cache(maxsize=None)
def get_resolver(urlconf=None):
if urlconf is None:
from django.conf import settings
urlconf = settings.ROOT_URLCONF
return RegexURLResolver(r'^/', urlconf)
@lru_cache.lru_cache(maxsize=None)
def get_ns_resolver(ns_pattern, resolver):
# Build a namespaced resolver for the given parent URLconf pattern.
# This makes it possible to have captured parameters in the parent
# URLconf pattern.
ns_resolver = RegexURLResolver(ns_pattern, resolver.url_patterns)
return RegexURLResolver(r'^/', [ns_resolver])
class LocaleRegexDescriptor(object):
def __get__(self, instance, cls=None):
"""
Return a compiled regular expression based on the active language.
"""
if instance is None:
return self
# As a performance optimization, if the given regex string is a regular
# string (not a lazily-translated string proxy), compile it once and
# avoid per-language compilation.
if isinstance(instance._regex, six.string_types):
instance.__dict__['regex'] = self._compile(instance._regex)
return instance.__dict__['regex']
language_code = get_language()
if language_code not in instance._regex_dict:
instance._regex_dict[language_code] = self._compile(force_text(instance._regex))
return instance._regex_dict[language_code]
def _compile(self, regex):
"""
Compile and return the given regular expression.
"""
try:
return re.compile(regex, re.UNICODE)
except re.error as e:
raise ImproperlyConfigured(
'"%s" is not a valid regular expression: %s' %
(regex, six.text_type(e))
)
class LocaleRegexProvider(object):
"""
A mixin to provide a default regex property which can vary by active
language.
"""
def __init__(self, regex):
# regex is either a string representing a regular expression, or a
# translatable string (using ugettext_lazy) representing a regular
# expression.
self._regex = regex
self._regex_dict = {}
regex = LocaleRegexDescriptor()
def describe(self):
"""
Format the URL pattern for display in warning messages.
"""
description = "'{}'".format(self.regex.pattern)
if getattr(self, 'name', False):
description += " [name='{}']".format(self.name)
return description
def _check_pattern_startswith_slash(self):
"""
Check that the pattern does not begin with a forward slash.
"""
regex_pattern = self.regex.pattern
if not settings.APPEND_SLASH:
# Skip check as it can be useful to start a URL pattern with a slash
# when APPEND_SLASH=False.
return []
if (regex_pattern.startswith('/') or regex_pattern.startswith('^/')) and not regex_pattern.endswith('/'):
warning = Warning(
"Your URL pattern {} has a regex beginning with a '/'. Remove this "
"slash as it is unnecessary. If this pattern is targeted in an "
"include(), ensure the include() pattern has a trailing '/'.".format(
self.describe()
),
id="urls.W002",
)
return [warning]
else:
return []
class RegexURLPattern(LocaleRegexProvider):
def __init__(self, regex, callback, default_args=None, name=None):
LocaleRegexProvider.__init__(self, regex)
self.callback = callback # the view
self.default_args = default_args or {}
self.name = name
def __repr__(self):
return force_str('<%s %s %s>' % (self.__class__.__name__, self.name, self.regex.pattern))
def check(self):
warnings = self._check_pattern_name()
if not warnings:
warnings = self._check_pattern_startswith_slash()
return warnings
def _check_pattern_name(self):
"""
Check that the pattern name does not contain a colon.
"""
if self.name is not None and ":" in self.name:
warning = Warning(
"Your URL pattern {} has a name including a ':'. Remove the colon, to "
"avoid ambiguous namespace references.".format(self.describe()),
id="urls.W003",
)
return [warning]
else:
return []
def resolve(self, path):
match = self.regex.search(path)
if match:
# If there are any named groups, use those as kwargs, ignoring
# non-named groups. Otherwise, pass all non-named arguments as
# positional arguments.
kwargs = match.groupdict()
args = () if kwargs else match.groups()
# In both cases, pass any extra_kwargs as **kwargs.
kwargs.update(self.default_args)
return ResolverMatch(self.callback, args, kwargs, self.name)
@cached_property
def lookup_str(self):
"""
A string that identifies the view (e.g. 'path.to.view_function' or
'path.to.ClassBasedView').
"""
callback = self.callback
# Python 3.5 collapses nested partials, so can change "while" to "if"
# when it's the minimum supported version.
while isinstance(callback, functools.partial):
callback = callback.func
if not hasattr(callback, '__name__'):
return callback.__module__ + "." + callback.__class__.__name__
elif six.PY3:
return callback.__module__ + "." + callback.__qualname__
else:
# PY2 does not support __qualname__
return callback.__module__ + "." + callback.__name__
class RegexURLResolver(LocaleRegexProvider):
def __init__(self, regex, urlconf_name, default_kwargs=None, app_name=None, namespace=None):
LocaleRegexProvider.__init__(self, regex)
# urlconf_name is the dotted Python path to the module defining
# urlpatterns. It may also be an object with an urlpatterns attribute
# or urlpatterns itself.
self.urlconf_name = urlconf_name
self.callback = None
self.default_kwargs = default_kwargs or {}
self.namespace = namespace
self.app_name = app_name
self._reverse_dict = {}
self._namespace_dict = {}
self._app_dict = {}
# set of dotted paths to all functions and classes that are used in
# urlpatterns
self._callback_strs = set()
self._populated = False
self._local = threading.local()
def __repr__(self):
if isinstance(self.urlconf_name, list) and len(self.urlconf_name):
# Don't bother to output the whole list, it can be huge
urlconf_repr = '<%s list>' % self.urlconf_name[0].__class__.__name__
else:
urlconf_repr = repr(self.urlconf_name)
return str('<%s %s (%s:%s) %s>') % (
self.__class__.__name__, urlconf_repr, self.app_name,
self.namespace, self.regex.pattern,
)
def check(self):
warnings = self._check_include_trailing_dollar()
for pattern in self.url_patterns:
warnings.extend(check_resolver(pattern))
if not warnings:
warnings = self._check_pattern_startswith_slash()
return warnings
def _check_include_trailing_dollar(self):
"""
Check that include is not used with a regex ending with a dollar.
"""
regex_pattern = self.regex.pattern
if regex_pattern.endswith('$') and not regex_pattern.endswith(r'\$'):
warning = Warning(
"Your URL pattern {} uses include with a regex ending with a '$'. "
"Remove the dollar from the regex to avoid problems including "
"URLs.".format(self.describe()),
id="urls.W001",
)
return [warning]
else:
return []
def _populate(self):
# Short-circuit if called recursively in this thread to prevent
# infinite recursion. Concurrent threads may call this at the same
# time and will need to continue, so set 'populating' on a
# thread-local variable.
if getattr(self._local, 'populating', False):
return
self._local.populating = True
lookups = MultiValueDict()
namespaces = {}
apps = {}
language_code = get_language()
for pattern in reversed(self.url_patterns):
if isinstance(pattern, RegexURLPattern):
self._callback_strs.add(pattern.lookup_str)
p_pattern = pattern.regex.pattern
if p_pattern.startswith('^'):
p_pattern = p_pattern[1:]
if isinstance(pattern, RegexURLResolver):
if pattern.namespace:
namespaces[pattern.namespace] = (p_pattern, pattern)
if pattern.app_name:
apps.setdefault(pattern.app_name, []).append(pattern.namespace)
else:
parent_pat = pattern.regex.pattern
for name in pattern.reverse_dict:
for matches, pat, defaults in pattern.reverse_dict.getlist(name):
new_matches = normalize(parent_pat + pat)
lookups.appendlist(
name,
(
new_matches,
p_pattern + pat,
dict(defaults, **pattern.default_kwargs),
)
)
for namespace, (prefix, sub_pattern) in pattern.namespace_dict.items():
namespaces[namespace] = (p_pattern + prefix, sub_pattern)
for app_name, namespace_list in pattern.app_dict.items():
apps.setdefault(app_name, []).extend(namespace_list)
if not getattr(pattern._local, 'populating', False):
pattern._populate()
self._callback_strs.update(pattern._callback_strs)
else:
bits = normalize(p_pattern)
lookups.appendlist(pattern.callback, (bits, p_pattern, pattern.default_args))
if pattern.name is not None:
lookups.appendlist(pattern.name, (bits, p_pattern, pattern.default_args))
self._reverse_dict[language_code] = lookups
self._namespace_dict[language_code] = namespaces
self._app_dict[language_code] = apps
self._populated = True
self._local.populating = False
@property
def reverse_dict(self):
language_code = get_language()
if language_code not in self._reverse_dict:
self._populate()
return self._reverse_dict[language_code]
@property
def namespace_dict(self):
language_code = get_language()
if language_code not in self._namespace_dict:
self._populate()
return self._namespace_dict[language_code]
@property
def app_dict(self):
language_code = get_language()
if language_code not in self._app_dict:
self._populate()
return self._app_dict[language_code]
def _is_callback(self, name):
if not self._populated:
self._populate()
return name in self._callback_strs
def resolve(self, path):
path = force_text(path) # path may be a reverse_lazy object
tried = []
match = self.regex.search(path)
if match:
new_path = path[match.end():]
for pattern in self.url_patterns:
try:
sub_match = pattern.resolve(new_path)
except Resolver404 as e:
sub_tried = e.args[0].get('tried')
if sub_tried is not None:
tried.extend([pattern] + t for t in sub_tried)
else:
tried.append([pattern])
else:
if sub_match:
# Merge captured arguments in match with submatch
sub_match_dict = dict(match.groupdict(), **self.default_kwargs)
sub_match_dict.update(sub_match.kwargs)
# If there are *any* named groups, ignore all non-named groups.
# Otherwise, pass all non-named arguments as positional arguments.
sub_match_args = sub_match.args
if not sub_match_dict:
sub_match_args = match.groups() + sub_match.args
return ResolverMatch(
sub_match.func,
sub_match_args,
sub_match_dict,
sub_match.url_name,
[self.app_name] + sub_match.app_names,
[self.namespace] + sub_match.namespaces,
)
tried.append([pattern])
raise Resolver404({'tried': tried, 'path': new_path})
raise Resolver404({'path': path})
@cached_property
def urlconf_module(self):
if isinstance(self.urlconf_name, six.string_types):
return import_module(self.urlconf_name)
else:
return self.urlconf_name
@cached_property
def url_patterns(self):
# urlconf_module might be a valid set of patterns, so we default to it
patterns = getattr(self.urlconf_module, "urlpatterns", self.urlconf_module)
try:
iter(patterns)
except TypeError:
msg = (
"The included URLconf '{name}' does not appear to have any "
"patterns in it. If you see valid patterns in the file then "
"the issue is probably caused by a circular import."
)
raise ImproperlyConfigured(msg.format(name=self.urlconf_name))
return patterns
def resolve_error_handler(self, view_type):
callback = getattr(self.urlconf_module, 'handler%s' % view_type, None)
if not callback:
# No handler specified in file; use lazy import, since
# django.conf.urls imports this file.
from django.conf import urls
callback = getattr(urls, 'handler%s' % view_type)
return get_callable(callback), {}
def reverse(self, lookup_view, *args, **kwargs):
return self._reverse_with_prefix(lookup_view, '', *args, **kwargs)
def _reverse_with_prefix(self, lookup_view, _prefix, *args, **kwargs):
if args and kwargs:
raise ValueError("Don't mix *args and **kwargs in call to reverse()!")
text_args = [force_text(v) for v in args]
text_kwargs = {k: force_text(v) for (k, v) in kwargs.items()}
if not self._populated:
self._populate()
possibilities = self.reverse_dict.getlist(lookup_view)
for possibility, pattern, defaults in possibilities:
for result, params in possibility:
if args:
if len(args) != len(params):
continue
candidate_subs = dict(zip(params, text_args))
else:
if (set(kwargs.keys()) | set(defaults.keys()) != set(params) |
set(defaults.keys())):
continue
matches = True
for k, v in defaults.items():
if kwargs.get(k, v) != v:
matches = False
break
if not matches:
continue
candidate_subs = text_kwargs
# WSGI provides decoded URLs, without %xx escapes, and the URL
# resolver operates on such URLs. First substitute arguments
# without quoting to build a decoded URL and look for a match.
# Then, if we have a match, redo the substitution with quoted
# arguments in order to return a properly encoded URL.
candidate_pat = _prefix.replace('%', '%%') + result
if re.search('^%s%s' % (re.escape(_prefix), pattern), candidate_pat % candidate_subs, re.UNICODE):
# safe characters from `pchar` definition of RFC 3986
url = urlquote(candidate_pat % candidate_subs, safe=RFC3986_SUBDELIMS + str('/~:@'))
# Don't allow construction of scheme relative urls.
if url.startswith('//'):
url = '/%%2F%s' % url[2:]
return url
# lookup_view can be URL name or callable, but callables are not
# friendly in error messages.
m = getattr(lookup_view, '__module__', None)
n = getattr(lookup_view, '__name__', None)
if m is not None and n is not None:
lookup_view_s = "%s.%s" % (m, n)
else:
lookup_view_s = lookup_view
patterns = [pattern for (possibility, pattern, defaults) in possibilities]
if patterns:
if args:
arg_msg = "arguments '%s'" % (args,)
elif kwargs:
arg_msg = "keyword arguments '%s'" % (kwargs,)
else:
arg_msg = "no arguments"
msg = (
"Reverse for '%s' with %s not found. %d pattern(s) tried: %s" %
(lookup_view_s, arg_msg, len(patterns), patterns)
)
else:
msg = (
"Reverse for '%(view)s' not found. '%(view)s' is not "
"a valid view function or pattern name." % {'view': lookup_view_s}
)
raise NoReverseMatch(msg)
class LocaleRegexURLResolver(RegexURLResolver):
"""
A URL resolver that always matches the active language code as URL prefix.
Rather than taking a regex argument, we just override the ``regex``
function to always return the active language-code as regex.
"""
def __init__(
self, urlconf_name, default_kwargs=None, app_name=None, namespace=None,
prefix_default_language=True,
):
super(LocaleRegexURLResolver, self).__init__(
None, urlconf_name, default_kwargs, app_name, namespace,
)
self.prefix_default_language = prefix_default_language
@property
def regex(self):
language_code = get_language() or settings.LANGUAGE_CODE
if language_code not in self._regex_dict:
if language_code == settings.LANGUAGE_CODE and not self.prefix_default_language:
regex_string = ''
else:
regex_string = '^%s/' % language_code
self._regex_dict[language_code] = re.compile(regex_string, re.UNICODE)
return self._regex_dict[language_code]
| gpl-3.0 |
junneyang/taskflow | taskflow/examples/example_utils.py | 4 | 3306 | # -*- coding: utf-8 -*-
# Copyright (C) 2013 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import logging
import os
import shutil
import sys
import tempfile
from six.moves import urllib_parse
from taskflow import exceptions
from taskflow.persistence import backends
LOG = logging.getLogger(__name__)
try:
import sqlalchemy as _sa # noqa
SQLALCHEMY_AVAILABLE = True
except ImportError:
SQLALCHEMY_AVAILABLE = False
def print_wrapped(text):
print("-" * (len(text)))
print(text)
print("-" * (len(text)))
def rm_path(persist_path):
if not os.path.exists(persist_path):
return
if os.path.isdir(persist_path):
rm_func = shutil.rmtree
elif os.path.isfile(persist_path):
rm_func = os.unlink
else:
raise ValueError("Unknown how to `rm` path: %s" % (persist_path))
try:
rm_func(persist_path)
except (IOError, OSError):
pass
def _make_conf(backend_uri):
parsed_url = urllib_parse.urlparse(backend_uri)
backend_type = parsed_url.scheme.lower()
if not backend_type:
raise ValueError("Unknown backend type for uri: %s" % (backend_type))
if backend_type in ('file', 'dir'):
conf = {
'path': parsed_url.path,
'connection': backend_uri,
}
elif backend_type in ('zookeeper',):
conf = {
'path': parsed_url.path,
'hosts': parsed_url.netloc,
'connection': backend_uri,
}
else:
conf = {
'connection': backend_uri,
}
return conf
@contextlib.contextmanager
def get_backend(backend_uri=None):
tmp_dir = None
if not backend_uri:
if len(sys.argv) > 1:
backend_uri = str(sys.argv[1])
if not backend_uri:
tmp_dir = tempfile.mkdtemp()
backend_uri = "file:///%s" % tmp_dir
try:
backend = backends.fetch(_make_conf(backend_uri))
except exceptions.NotFound as e:
# Fallback to one that will work if the provided backend is not found.
if not tmp_dir:
tmp_dir = tempfile.mkdtemp()
backend_uri = "file:///%s" % tmp_dir
LOG.exception("Falling back to file backend using temporary"
" directory located at: %s", tmp_dir)
backend = backends.fetch(_make_conf(backend_uri))
else:
raise e
try:
# Ensure schema upgraded before we continue working.
with contextlib.closing(backend.get_connection()) as conn:
conn.upgrade()
yield backend
finally:
# Make sure to cleanup the temporary path if one was created for us.
if tmp_dir:
rm_path(tmp_dir)
| apache-2.0 |
mdinacci/rtw | demos/proto2/src/proto2.py | 1 | 15023 | # -*- coding: utf-8-*-
"""
Author: Marco Dinacci <dev@dinointeractive.com>
Copyright © 2008-2009
"""
from pandac.PandaModules import *
loadPrcFile("../res/Config.prc")
#loadPrcFileData("", "want-directtools 1")
#loadPrcFileData("", "want-tk 1")
import direct.directbase.DirectStart
from direct.gui.OnscreenText import OnscreenText
from direct.directtools.DirectGeometry import LineNodePath
from direct.showbase.DirectObject import DirectObject
from pandac.PandaModules import *
from direct.task.Task import Task
from mdlib.panda.entity import *
from mdlib.panda.core import AbstractScene, AbstractLogic, AbstractApplication
from mdlib.panda.data import GOM
from mdlib.panda.input import *
from mdlib.panda.utils import *
from mdlib.types import Types
import sys, math
#base.wireframeOn()
class Camera(object):
ZOOM = 30
TARGET_DISTANCE = 10
def __init__(self):
base.disableMouse()
base.camera.setPos(0,0,0)
def followTarget(self, target):
self.target = target
self.update()
def getPos(self):
return base.camera.getPos()
def zoomOut(self):
base.camera.setY(base.camera, - self.ZOOM)
def zoomIn(self):
base.camera.setY(base.camera, self.ZOOM)
def update(self):
base.camera.setPos(self.target.nodepath.getPos() - \
self.target.forward * self.TARGET_DISTANCE)
z = self.target.jumpZ
base.camera.setZ(self.target.nodepath.getZ() -z + 1)
pos = self.target.nodepath.getPos()
pos.setZ(pos.getZ() -z)
base.camera.lookAt(pos)
base.camera.setZ(self.target.nodepath.getZ() -z + 3)
HEIGHT_TRACK = 0.5
class GameLogic(AbstractLogic):
DUMMY_VALUE = -999
# the view is not really the view but just the scene for now.
def __init__(self, view):
super(GameLogic, self).__init__(view)
self.env = GOM.createEntity(environment_params)
self.view.addEntity(self.env)
self.track = GOM.createEntity(new_track_params)
self.track.nodepath.setCollideMask(BitMask32(1))
self.view.addEntity(self.track)
self.ball = GOM.createEntity(ball_params)
self.ball.nodepath.showTightBounds()
collSphere = self.ball.nodepath.find("**/ball")
collSphere.node().setIntoCollideMask(BitMask32(2))
collSphere.node().setFromCollideMask(BitMask32.allOff())
self.view.addEntity(self.ball)
self.player = GOM.createEntity(player_params)
self.player.nodepath.setPos(self.ball.nodepath.getPos())
self.player.nodepath.setQuat(self.track.nodepath,Quat(1,0,0,0))
self.ball.forward = Vec3(0,1,0)
self.view.addEntity(self.player)
# normally the view should create it
self.cam = Camera()
self.cam.followTarget(self.ball)
self.camGroundZ = -999
self.view.cam = self.cam
# HACK
self.view.player = self.player
self.view.ball = self.ball
self.view.track = self.track
self.lastTile = ""
self.tileType = "neutral"
self.lastTileType = "neutral"
self._setupCollisionDetection()
def update(self, task):
self.inputMgr.update()
return task.cont
def updatePhysics(self, task):
dt = globalClock.getDt()
if dt > .2: return task.cont
self.camGroundZ = self.DUMMY_VALUE
ballIsCollidingWithGround = False
# keep the collision node perpendicular to the track, this is necessary
# since the ball rolls all the time
self.ballCollNodeNp.setQuat(self.track.nodepath,Quat(1,0,0,0))
# check track collisions
# TODO must optimise this, no need to check the whole track,
# but only the current segment
self.picker.traverse(self.track.nodepath)
if self.pq.getNumEntries() > 0:
self.pq.sortEntries()
firstGroundContact = self.DUMMY_VALUE
firstTile = None
for i in range(self.pq.getNumEntries()):
entry = self.pq.getEntry(i)
z = entry.getSurfacePoint(render).getZ()
# check camera collision. There can be more than one
if entry.getFromNodePath() == self.cameraCollNodeNp:
if z > firstGroundContact:
firstGroundContact = z
firstTile = entry.getIntoNodePath()
# check ball's ray collision with ground
elif entry.getFromNodePath() == self.ballCollNodeNp:
np = entry.getIntoNodePath()
#print np
self.tileType = np.findAllTextures().getTexture(0).getName()
self.ball.RayGroundZ = z
ballIsCollidingWithGround = True
if entry != self.lastTile:
self.lastTile = entry
self.camGroundZ = firstGroundContact
if ballIsCollidingWithGround == False:
if self.ball.isJumping():
print "no ball-ground contact but jumping"
pass
else:
print "no ball-ground contact, losing"
self.ball.getLost()
self.view.gameIsAlive = False
return task.done # automatically stop the task
# check for rays colliding with the ball
self.picker.traverse(self.ball.nodepath)
if self.pq.getNumEntries() > 0:
self.pq.sortEntries()
if self.pq.getNumEntries() == 1:
entry = self.pq.getEntry(0)
if entry.getFromNodePath() == self.cameraCollNodeNp:
self.camBallZ = entry.getSurfacePoint(render).getZ()
else:
raise AssertionError("must always be 1")
#if self.camGroundZ > self.camBallZ:
# ground collision happened before ball collision, this means
# that the ball is descending a slope
# Get the row colliding with the cam's ray, get two rows after,
# set all of them transparent
# TODO store the rows in a list, as I have to set the transparency
# back to 0 after the ball has passed
#pass
#row = firstTile.getParent()
#row.setSa(0.8)
#row.setTransparency(TransparencyAttrib.MAlpha)
forward = self.view._rootNode.getRelativeVector(self.player.nodepath,
Vec3(0,1,0))
forward.setZ(0)
forward.normalize()
speedVec = forward * dt * self.ball.speed
self.ball.forward = forward
self.ball.speedVec = speedVec
self.player.nodepath.setPos(self.player.nodepath.getPos() + speedVec)
self.player.nodepath.setZ(self.ball.RayGroundZ + self.ball.jumpZ + \
self.ball.physics.radius + HEIGHT_TRACK)
# rotate the ball
self.ball.nodepath.setP(self.ball.nodepath.getP() -1 * dt * \
self.ball.speed * self.ball.spinningFactor)
# set the ball to the position of the controller node
self.ball.nodepath.setPos(self.player.nodepath.getPos())
# rotate the controller to follow the direction of the ball
self.player.nodepath.setH(self.ball.nodepath.getH())
return task.cont
def resetGame(self):
self.player.nodepath.setPos(Point3(12,7,.13))
self.ball.nodepath.setPos(Point3(12,7,.13))
self.ball.nodepath.setQuat(Quat(1,0,0,0))
self.view.gameIsAlive = True
def updateLogic(self, task):
# steer
if self.keyMap["right"] == True:
right = self.view._rootNode.getRelativeVector(self.player.nodepath,
Vec3(1,0,0))
if self.ball.speed > 0:
self.ball.turnRight()
if self.keyMap["left"] == True:
if self.ball.speed > 0:
self.ball.turnLeft()
if self.keyMap["forward"] == True:
self.ball.accelerate()
else:
self.ball.decelerate()
if self.keyMap["backward"] == True:
self.ball.brake()
if self.keyMap["jump"] == True:
self.ball.jump()
self.keyMap["jump"] = False
# special actions
if self.tileType == "neutral":
self.ball.neutral()
elif self.tileType == "jump":
if self.lastTileType != "jump":
self.ball.jump()
elif self.tileType == "accelerate":
self.ball.sprint()
elif self.tileType == "slow":
self.ball.slowDown()
self.lastTileType = self.tileType
if self.ball.speed < 0:
self.ball.speed = 0
return task.cont
def setKey(self, key, value):
self.keyMap[key] = value
def debugPosition(self):
for text in aspect2d.findAllMatches("**/text").asList():
text.getParent().removeNode()
OnscreenText(text="Camera's Ray-Ball: %s" % self.camBallZ,
style=1, fg=(1,1,1,1),
pos=(-0.9,-0.45), scale = .07)
OnscreenText(text="Camera's Ray-Ground : %s" % self.camGroundZ,
style=1, fg=(1,1,1,1),
pos=(-0.9,-0.55), scale = .07)
OnscreenText(text="Camera: %s" % base.camera.getZ(),
style=1, fg=(1,1,1,1),
pos=(-0.9,-0.65), scale = .07)
OnscreenText(text="Ball ray-plane: %s" % self.ball.RayGroundZ,
style=1, fg=(1,1,1,1),
pos=(-0.9,-0.75), scale = .07)
def _setupCollisionDetection(self):
self.pq = CollisionHandlerQueue();
# ball-ground collision setup
self.ballCollNodeNp = self.ball.nodepath.attachCollisionRay("ball-ground",
0,0,10, # origin
0,0,-1, # direction
BitMask32(1),BitMask32.allOff())
self.ballCollNodeNp.setQuat(self.track.nodepath, Quat(1,0,0,0))
self.ballCollNodeNp.show()
# camera-ball collision setup
bmFrom = BitMask32(1); bmFrom.setBit(1)
self.cameraCollNodeNp = base.camera.attachCollisionRay("camera-ball",
0,0,0,
0,1,0,
bmFrom,BitMask32.allOff())
self.cameraCollNodeNp.setQuat(base.camera.getQuat() + Quat(.1,0,0,0))
self.cameraCollNodeNp.show()
self.picker = CollisionTraverser()
self.picker.setRespectPrevTransform(True)
self.picker.addCollider(self.ballCollNodeNp, self.pq)
self.picker.addCollider(self.cameraCollNodeNp, self.pq)
def _subscribeToEvents(self):
self.keyMap = {"left":False, "right":False, "forward":False, \
"backward":False, "jump": False}
self.inputMgr = InputManager(base)
self.inputMgr.createSchemeAndSwitch("game")
self.inputMgr.bindCallback("arrow_left", self.setKey, ["left",True], scheme="game")
self.inputMgr.bindCallback("arrow_right", self.setKey, ["right",True])
self.inputMgr.bindCallback("arrow_up", self.setKey, ["forward",True])
self.inputMgr.bindCallback("arrow_left-up", self.setKey, ["left",False])
self.inputMgr.bindCallback("arrow_right-up", self.setKey, ["right",False])
self.inputMgr.bindCallback("arrow_up-up", self.setKey, ["forward",False])
self.inputMgr.bindCallback("arrow_down", self.setKey, ["backward",True])
self.inputMgr.bindCallback("arrow_down-up", self.setKey, ["backward",False])
self.inputMgr.bindCallback("space", self.setKey, ["jump",True])
self.inputMgr.bindCallback("c", self.view.switchCamera)
self.inputMgr.bindCallback("d", self.debugPosition)
class World(AbstractScene):
def __init__(self):
super(World, self).__init__()
self.lines = render.attachNewNode("lines")
loader.loadModelCopy("models/misc/xyzAxis").reparentTo(render)
self.setSceneGraphNode(render)
#self._setupCollisionDetection()
self._setupLights()
self.gameIsAlive = True
def update(self, task):
#dt = globalClock.getDt()
#if dt > .2: return task.cont
if self.gameIsAlive:
self.cam.update()
self.lines.removeNode()
self.lines = render.attachNewNode("lines")
return task.cont
def switchCamera(self):
base.oobe()
def _setupLights(self):
lAttrib = LightAttrib.makeAllOff()
ambientLight = AmbientLight( "ambientLight" )
ambientLight.setColor( Vec4(.55, .55, .55, 1) )
lAttrib = lAttrib.addLight( ambientLight )
directionalLight = DirectionalLight( "directionalLight" )
directionalLight.setDirection( Vec3( 0, 0, -1 ) )
directionalLight.setColor( Vec4( 0.375, 0.375, 0.375, 1 ) )
directionalLight.setSpecularColor(Vec4(1,1,1,1))
lAttrib = lAttrib.addLight( directionalLight )
class GameApplication(AbstractApplication):
def _subscribeToEvents(self):
base.accept("escape", self.shutdown)
base.accept("r", self.restartGame)
def _createLogicAndView(self):
self.scene = World()
self.logic = GameLogic(self.scene)
def restartGame(self):
taskMgr.remove("update-input")
taskMgr.remove("update-logic")
taskMgr.remove("update-physics")
taskMgr.remove("update-scene")
self.logic.resetGame()
self.start()
def start(self):
taskMgr.add(self.logic.update, "update-input")
taskMgr.add(self.logic.updateLogic, "update-logic")
taskMgr.add(self.logic.updatePhysics, "update-physics")
taskMgr.add(self.scene.update, "update-scene")
def shutdown(self):
sys.exit()
# set a fixed frame rate
from pandac.PandaModules import ClockObject
FPS = 40
globalClock = ClockObject.getGlobalClock()
#globalClock.setMode(ClockObject.MLimited)
#globalClock.setFrameRate(FPS)
if __name__ == '__main__':
GameApplication().start()
run()
| mit |
ayushagrawal288/zamboni | mkt/webapps/cron.py | 3 | 19390 | import logging
import os
import shutil
import time
from datetime import datetime
from django.conf import settings
from django.db.models import Q
import commonware.log
import cronjobs
from celery import chord
import mkt
from lib.metrics import get_monolith_client
from mkt.api.models import Nonce
from mkt.developers.models import ActivityLog
from mkt.files.models import File, FileUpload
from mkt.site.decorators import use_master
from mkt.site.storage_utils import (private_storage, storage_is_remote,
walk_storage)
from mkt.site.utils import chunked, days_ago, walkfiles
from .indexers import WebappIndexer
from .models import Installed, Installs, Trending, Webapp
from .tasks import delete_logs, dump_user_installs, zip_users
log = commonware.log.getLogger('z.cron')
task_log = logging.getLogger('z.task')
def _change_last_updated(next):
# We jump through some hoops here to make sure we only change the add-ons
# that really need it, and to invalidate properly.
current = dict(Webapp.objects.values_list('id', 'last_updated'))
changes = {}
for addon, last_updated in next.items():
try:
if current[addon] != last_updated:
changes[addon] = last_updated
except KeyError:
pass
if not changes:
return
log.debug('Updating %s add-ons' % len(changes))
# Update + invalidate.
qs = Webapp.objects.filter(id__in=changes).no_transforms()
for addon in qs:
addon.last_updated = changes[addon.id]
addon.save()
@cronjobs.register
@use_master
def addon_last_updated():
next = {}
qs = Webapp._last_updated_queries().values()
for addon, last_updated in qs.values_list('id', 'last_updated'):
next[addon] = last_updated
_change_last_updated(next)
# Get anything that didn't match above.
other = (Webapp.objects.filter(last_updated__isnull=True)
.values_list('id', 'created'))
_change_last_updated(dict(other))
@cronjobs.register
def hide_disabled_files():
# If an add-on or a file is disabled, it should be moved to
# GUARDED_ADDONS_PATH so it's not publicly visible.
#
# We ignore deleted versions since we hide those files when deleted and
# also due to bug 980916.
ids = (File.objects
.filter(version__deleted=False)
.filter(Q(status=mkt.STATUS_DISABLED) |
Q(version__addon__status=mkt.STATUS_DISABLED) |
Q(version__addon__disabled_by_user=True))
.values_list('id', flat=True))
for chunk in chunked(ids, 300):
qs = File.objects.filter(id__in=chunk)
qs = qs.select_related('version')
for f in qs:
f.hide_disabled_file()
@cronjobs.register
def unhide_disabled_files():
# Files are getting stuck in /guarded-addons for some reason. This job
# makes sure guarded add-ons are supposed to be disabled.
log = logging.getLogger('z.files.disabled')
q = (Q(version__addon__status=mkt.STATUS_DISABLED) |
Q(version__addon__disabled_by_user=True))
files = set(File.objects.filter(q | Q(status=mkt.STATUS_DISABLED))
.values_list('version__addon', 'filename'))
for filepath in walkfiles(settings.GUARDED_ADDONS_PATH):
addon, filename = filepath.split('/')[-2:]
if tuple([int(addon), filename]) not in files:
log.warning('File that should not be guarded: %s.' % filepath)
try:
file_ = (File.objects.select_related('version__addon')
.get(version__addon=addon, filename=filename))
file_.unhide_disabled_file()
except File.DoesNotExist:
log.warning('File object does not exist for: %s.' % filepath)
except Exception:
log.error('Could not unhide file: %s.' % filepath,
exc_info=True)
@cronjobs.register
def clean_old_signed(seconds=60 * 60):
"""Clean out apps signed for reviewers."""
log.info('Removing old apps signed for reviewers')
root = settings.SIGNED_APPS_REVIEWER_PATH
# Local storage uses local time for file modification. S3 uses UTC time.
now = datetime.utcnow if storage_is_remote() else datetime.now
for nextroot, dirs, files in walk_storage(root):
for fn in files:
full = os.path.join(nextroot, fn)
age = now() - private_storage.modified_time(full)
if age.total_seconds() > seconds:
log.debug('Removing signed app: %s, %dsecs old.' % (
full, age.total_seconds()))
private_storage.delete(full)
def _get_installs(app_id):
"""
Calculate popularity of app for all regions and per region.
Returns value in the format of::
{'all': <global installs>,
<region_slug>: <regional installs>,
...}
"""
# How many days back do we include when calculating popularity.
POPULARITY_PERIOD = 90
client = get_monolith_client()
popular = {
'filter': {
'range': {
'date': {
'gte': days_ago(POPULARITY_PERIOD).date().isoformat(),
'lte': days_ago(1).date().isoformat()
}
}
},
'aggs': {
'total_installs': {
'sum': {
'field': 'app_installs'
}
}
}
}
query = {
'query': {
'filtered': {
'query': {'match_all': {}},
'filter': {'term': {'app-id': app_id}}
}
},
'aggregations': {
'popular': popular,
'region': {
'terms': {
'field': 'region',
# Add size so we get all regions, not just the top 10.
'size': len(mkt.regions.ALL_REGIONS)
},
'aggregations': {
'popular': popular
}
}
},
'size': 0
}
try:
res = client.raw(query)
except ValueError as e:
task_log.error('Error response from Monolith: {0}'.format(e))
return {}
if 'aggregations' not in res:
task_log.error('No installs for app {}'.format(app_id))
return {}
results = {
'all': res['aggregations']['popular']['total_installs']['value']
}
if 'region' in res['aggregations']:
for regional_res in res['aggregations']['region']['buckets']:
region_slug = regional_res['key']
popular = regional_res['popular']['total_installs']['value']
results[region_slug] = popular
return results
@cronjobs.register
@use_master
def update_app_installs():
"""
Update app install counts for all published apps.
We break these into chunks so we can bulk index them. Each chunk will
process the apps in it and reindex them in bulk. After all the chunks are
processed we find records that haven't been updated and purge/reindex those
so we nullify their values.
"""
chunk_size = 100
ids = list(Webapp.objects.filter(status=mkt.STATUS_PUBLIC,
disabled_by_user=False)
.values_list('id', flat=True))
for chunk in chunked(ids, chunk_size):
count = 0
times = []
reindex_ids = []
for app in Webapp.objects.filter(id__in=chunk).no_transforms():
reindex = False
count += 1
now = datetime.now()
t_start = time.time()
scores = _get_installs(app.id)
# Update global installs, then per-region installs below.
value = scores.get('all')
if value > 0:
reindex = True
installs, created = app.popularity.get_or_create(
region=0, defaults={'value': value})
if not created:
installs.update(value=value, modified=now)
else:
# The value is <= 0 so we can just ignore it.
app.popularity.filter(region=0).delete()
for region in mkt.regions.REGIONS_DICT.values():
value = scores.get(region.slug)
if value > 0:
reindex = True
installs, created = app.popularity.get_or_create(
region=region.id, defaults={'value': value})
if not created:
installs.update(value=value, modified=now)
else:
# The value is <= 0 so we can just ignore it.
app.popularity.filter(region=region.id).delete()
if reindex:
reindex_ids.append(app.id)
times.append(time.time() - t_start)
# Now reindex the apps that actually have a popularity value.
if reindex_ids:
WebappIndexer.run_indexing(reindex_ids)
log.info('Installs calculated for %s apps. Avg time overall: '
'%0.2fs' % (count, sum(times) / count))
# Purge any records that were not updated.
#
# Note: We force update `modified` even if no data changes so any records
# with older modified times can be purged.
now = datetime.now()
midnight = datetime(year=now.year, month=now.month, day=now.day)
qs = Installs.objects.filter(modified__lte=midnight)
# First get the IDs so we know what to reindex.
purged_ids = qs.values_list('addon', flat=True).distinct()
# Then delete them.
qs.delete()
for ids in chunked(purged_ids, chunk_size):
WebappIndexer.run_indexing(ids)
def _get_trending(app_id):
"""
Calculate trending for app for all regions and per region.
a = installs from 8 days ago to 1 day ago
b = installs from 29 days ago to 9 days ago, averaged per week
trending = (a - b) / b if a > 100 and b > 1 else 0
Returns value in the format of::
{'all': <global trending score>,
<region_slug>: <regional trending score>,
...}
"""
# How many app installs are required in the prior week to be considered
# "trending". Adjust this as total Marketplace app installs increases.
#
# Note: AMO uses 1000.0 for add-ons.
PRIOR_WEEK_INSTALL_THRESHOLD = 100.0
client = get_monolith_client()
week1 = {
'filter': {
'range': {
'date': {
'gte': days_ago(8).date().isoformat(),
'lte': days_ago(1).date().isoformat()
}
}
},
'aggs': {
'total_installs': {
'sum': {
'field': 'app_installs'
}
}
}
}
week3 = {
'filter': {
'range': {
'date': {
'gte': days_ago(29).date().isoformat(),
'lte': days_ago(9).date().isoformat()
}
}
},
'aggs': {
'total_installs': {
'sum': {
'field': 'app_installs'
}
}
}
}
query = {
'query': {
'filtered': {
'query': {'match_all': {}},
'filter': {'term': {'app-id': app_id}}
}
},
'aggregations': {
'week1': week1,
'week3': week3,
'region': {
'terms': {
'field': 'region',
# Add size so we get all regions, not just the top 10.
'size': len(mkt.regions.ALL_REGIONS)
},
'aggregations': {
'week1': week1,
'week3': week3
}
}
},
'size': 0
}
try:
res = client.raw(query)
except ValueError as e:
task_log.error('Error response from Monolith: {0}'.format(e))
return {}
if 'aggregations' not in res:
task_log.error('No installs for app {}'.format(app_id))
return {}
def _score(week1, week3):
# If last week app installs are < 100, this app isn't trending.
if week1 < PRIOR_WEEK_INSTALL_THRESHOLD:
return 0.0
score = 0.0
if week3 > 1.0:
score = (week1 - week3) / week3
if score < 0.0:
score = 0.0
return score
# Global trending score.
week1 = res['aggregations']['week1']['total_installs']['value']
week3 = res['aggregations']['week3']['total_installs']['value'] / 3.0
if week1 < PRIOR_WEEK_INSTALL_THRESHOLD:
# If global installs over the last week aren't over 100, we
# short-circuit and return a zero-like value as this is not a trending
# app by definition. Since global installs aren't above 100, per-region
# installs won't be either.
return {}
results = {
'all': _score(week1, week3)
}
if 'region' in res['aggregations']:
for regional_res in res['aggregations']['region']['buckets']:
region_slug = regional_res['key']
week1 = regional_res['week1']['total_installs']['value']
week3 = regional_res['week3']['total_installs']['value'] / 3.0
results[region_slug] = _score(week1, week3)
return results
@cronjobs.register
@use_master
def update_app_trending():
"""
Update trending for all published apps.
We break these into chunks so we can bulk index them. Each chunk will
process the apps in it and reindex them in bulk. After all the chunks are
processed we find records that haven't been updated and purge/reindex those
so we nullify their values.
"""
chunk_size = 100
ids = list(Webapp.objects.filter(status=mkt.STATUS_PUBLIC,
disabled_by_user=False)
.values_list('id', flat=True))
for chunk in chunked(ids, chunk_size):
count = 0
times = []
reindex_ids = []
for app in Webapp.objects.filter(id__in=chunk).no_transforms():
reindex = False
count += 1
now = datetime.now()
t_start = time.time()
scores = _get_trending(app.id)
# Update global trending, then per-region trending below.
value = scores.get('all')
if value > 0:
reindex = True
trending, created = app.trending.get_or_create(
region=0, defaults={'value': value})
if not created:
trending.update(value=value, modified=now)
else:
# The value is <= 0 so the app is not trending. Let's remove it
# from the trending table.
app.trending.filter(region=0).delete()
for region in mkt.regions.REGIONS_DICT.values():
value = scores.get(region.slug)
if value > 0:
reindex = True
trending, created = app.trending.get_or_create(
region=region.id, defaults={'value': value})
if not created:
trending.update(value=value, modified=now)
else:
# The value is <= 0 so the app is not trending.
# Let's remove it from the trending table.
app.trending.filter(region=region.id).delete()
times.append(time.time() - t_start)
if reindex:
reindex_ids.append(app.id)
# Now reindex the apps that actually have a trending score.
if reindex_ids:
WebappIndexer.run_indexing(reindex_ids)
log.info('Trending calculated for %s apps. Avg time overall: '
'%0.2fs' % (count, sum(times) / count))
# Purge any records that were not updated.
#
# Note: We force update `modified` even if no data changes so any records
# with older modified times can be purged.
now = datetime.now()
midnight = datetime(year=now.year, month=now.month, day=now.day)
qs = Trending.objects.filter(modified__lte=midnight)
# First get the IDs so we know what to reindex.
purged_ids = qs.values_list('addon', flat=True).distinct()
# Then delete them.
qs.delete()
for ids in chunked(purged_ids, chunk_size):
WebappIndexer.run_indexing(ids)
@cronjobs.register
def dump_user_installs_cron():
"""
Sets up tasks to do user install dumps.
"""
chunk_size = 100
# Get valid users to dump.
user_ids = set(Installed.objects.filter(user__enable_recommendations=True)
.values_list('user', flat=True))
# Remove old dump data before running.
user_dir = os.path.join(settings.DUMPED_USERS_PATH, 'users')
if os.path.exists(user_dir):
shutil.rmtree(user_dir)
grouping = []
for chunk in chunked(user_ids, chunk_size):
grouping.append(dump_user_installs.subtask(args=[chunk]))
post = zip_users.subtask(immutable=True)
ts = chord(grouping, post)
ts.apply_async()
def _remove_stale_files(path, age, msg):
for file_name in os.listdir(path):
file_path = os.path.join(path, file_name)
if (os.stat(file_path).st_mtime < time.time() - age):
log.debug(msg.format(file_path))
os.remove(file_path)
@cronjobs.register
def mkt_gc(**kw):
"""Site-wide garbage collections."""
log.debug('Collecting data to delete')
logs = (ActivityLog.objects.filter(created__lt=days_ago(90))
.exclude(action__in=mkt.LOG_KEEP).values_list('id', flat=True))
for chunk in chunked(logs, 100):
chunk.sort()
log.debug('Deleting log entries: %s' % str(chunk))
delete_logs.delay(chunk)
# Clear oauth nonce rows. These expire after 10 minutes but we're just
# clearing those that are more than 1 day old.
Nonce.objects.filter(created__lt=days_ago(1)).delete()
# Delete the dump apps over 30 days.
_remove_stale_files(settings.DUMPED_APPS_PATH,
settings.DUMPED_APPS_DAYS_DELETE,
'Deleting old tarball: {0}')
# Delete the dumped user installs over 30 days.
_remove_stale_files(settings.DUMPED_USERS_PATH,
settings.DUMPED_USERS_DAYS_DELETE,
'Deleting old tarball: {0}')
# Delete old files in select directories under TMP_PATH.
_remove_stale_files(os.path.join(settings.TMP_PATH, 'preview'),
settings.TMP_PATH_DAYS_DELETE,
'Deleting TMP_PATH file: {0}')
_remove_stale_files(os.path.join(settings.TMP_PATH, 'icon'),
settings.TMP_PATH_DAYS_DELETE,
'Deleting TMP_PATH file: {0}')
# Delete stale FileUploads.
for fu in FileUpload.objects.filter(created__lte=days_ago(90)):
log.debug(u'[FileUpload:{uuid}] Removing file: {path}'
.format(uuid=fu.uuid, path=fu.path))
if fu.path:
try:
os.remove(fu.path)
except OSError:
pass
fu.delete()
| bsd-3-clause |
jeffzheng1/tensorflow | tensorflow/python/kernel_tests/io_ops_test.py | 4 | 3342 | # -*- coding: utf-8 -*-
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.python.ops.io_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tempfile
import tensorflow as tf
class IoOpsTest(tf.test.TestCase):
def testReadFile(self):
cases = ['', 'Some contents', 'Неки садржаји на српском']
for contents in cases:
contents = tf.compat.as_bytes(contents)
temp = tempfile.NamedTemporaryFile(
prefix='ReadFileTest', dir=self.get_temp_dir())
open(temp.name, 'wb').write(contents)
with self.test_session():
read = tf.read_file(temp.name)
self.assertEqual([], read.get_shape())
self.assertEqual(read.eval(), contents)
def testWriteFile(self):
cases = ['', 'Some contents']
for contents in cases:
contents = tf.compat.as_bytes(contents)
temp = tempfile.NamedTemporaryFile(
prefix='WriteFileTest', dir=self.get_temp_dir())
with self.test_session() as sess:
w = tf.write_file(temp.name, contents)
sess.run(w)
file_contents = open(temp.name, 'rb').read()
self.assertEqual(file_contents, contents)
def _subset(self, files, indices):
return set(tf.compat.as_bytes(files[i].name)
for i in range(len(files)) if i in indices)
def testMatchingFiles(self):
cases = ['ABcDEF.GH', 'ABzDEF.GH', 'ABasdfjklDEF.GH', 'AB3DEF.GH',
'AB4DEF.GH', 'ABDEF.GH', 'XYZ']
files = [tempfile.NamedTemporaryFile(
prefix=c, dir=self.get_temp_dir()) for c in cases]
with self.test_session():
# Test exact match without wildcards.
for f in files:
self.assertEqual(tf.matching_files(f.name).eval(),
tf.compat.as_bytes(f.name))
# We will look for files matching "ABxDEF.GH*" where "x" is some wildcard.
pos = files[0].name.find(cases[0])
pattern = files[0].name[:pos] + 'AB%sDEF.GH*'
self.assertEqual(set(tf.matching_files(pattern % 'z').eval()),
self._subset(files, [1]))
self.assertEqual(set(tf.matching_files(pattern % '?').eval()),
self._subset(files, [0, 1, 3, 4]))
self.assertEqual(set(tf.matching_files(pattern % '*').eval()),
self._subset(files, [0, 1, 2, 3, 4, 5]))
self.assertEqual(set(tf.matching_files(pattern % '[cxz]').eval()),
self._subset(files, [0, 1]))
self.assertEqual(set(tf.matching_files(pattern % '[0-9]').eval()),
self._subset(files, [3, 4]))
if __name__ == '__main__':
tf.test.main()
| apache-2.0 |
wetek-enigma/enigma2 | lib/python/Components/HdmiCec.py | 20 | 13311 | import struct
import os
from fcntl import ioctl
from sys import maxint
from enigma import eTimer, eHdmiCEC, eActionMap
from config import config, ConfigSelection, ConfigYesNo, ConfigSubsection, ConfigText
from Tools.StbHardware import getFPWasTimerWakeup
from Tools.Directories import fileExists
config.hdmicec = ConfigSubsection()
config.hdmicec.enabled = ConfigYesNo(default = False)
config.hdmicec.control_tv_standby = ConfigYesNo(default = True)
config.hdmicec.control_tv_wakeup = ConfigYesNo(default = True)
config.hdmicec.report_active_source = ConfigYesNo(default = True)
config.hdmicec.report_active_menu = ConfigYesNo(default = True)
config.hdmicec.handle_tv_standby = ConfigYesNo(default = True)
config.hdmicec.handle_tv_wakeup = ConfigYesNo(default = True)
config.hdmicec.tv_wakeup_detection = ConfigSelection(
choices = {
"wakeup": _("Wakeup"),
"tvreportphysicaladdress": _("TV physical address report"),
"sourcerequest": _("Source request"),
"streamrequest": _("Stream request"),
"osdnamerequest": _("OSD name request"),
"activity": _("Any activity"),
},
default = "streamrequest")
config.hdmicec.fixed_physical_address = ConfigText(default = "0.0.0.0")
config.hdmicec.volume_forwarding = ConfigYesNo(default = False)
config.hdmicec.control_receiver_wakeup = ConfigYesNo(default = False)
config.hdmicec.control_receiver_standby = ConfigYesNo(default = False)
config.hdmicec.handle_deepstandby_events = ConfigYesNo(default = False)
config.hdmicec.preemphasis = ConfigYesNo(default = False)
choicelist = []
for i in (10, 50, 100, 150, 250, 500, 750, 1000, 1500, 2000):
choicelist.append(("%d" % i, "%d ms" % i))
config.hdmicec.minimum_send_interval = ConfigSelection(default = "0", choices = [("0", _("Disabled"))] + choicelist)
class HdmiCec:
instance = None
def __init__(self):
if config.hdmicec.enabled.value:
assert not HdmiCec.instance, "only one HdmiCec instance is allowed!"
HdmiCec.instance = self
self.wait = eTimer()
self.wait.timeout.get().append(self.sendCmd)
self.queue = []
eHdmiCEC.getInstance().messageReceived.get().append(self.messageReceived)
config.misc.standbyCounter.addNotifier(self.onEnterStandby, initial_call = False)
config.misc.DeepStandby.addNotifier(self.onEnterDeepStandby, initial_call = False)
self.setFixedPhysicalAddress(config.hdmicec.fixed_physical_address.value)
self.volumeForwardingEnabled = False
self.volumeForwardingDestination = 0
eActionMap.getInstance().bindAction('', -maxint - 1, self.keyEvent)
config.hdmicec.volume_forwarding.addNotifier(self.configVolumeForwarding)
config.hdmicec.enabled.addNotifier(self.configVolumeForwarding)
if config.hdmicec.handle_deepstandby_events.value:
if not getFPWasTimerWakeup():
self.wakeupMessages()
dummy = self.checkifPowerupWithoutWakingTv() # initially write 'False' to file, see below
# if fileExists("/proc/stb/hdmi/preemphasis"):
# self.sethdmipreemphasis()
def getPhysicalAddress(self):
physicaladdress = eHdmiCEC.getInstance().getPhysicalAddress()
hexstring = '%04x' % physicaladdress
return hexstring[0] + '.' + hexstring[1] + '.' + hexstring[2] + '.' + hexstring[3]
def setFixedPhysicalAddress(self, address):
if address != config.hdmicec.fixed_physical_address.value:
config.hdmicec.fixed_physical_address.value = address
config.hdmicec.fixed_physical_address.save()
hexstring = address[0] + address[2] + address[4] + address[6]
eHdmiCEC.getInstance().setFixedPhysicalAddress(int(float.fromhex(hexstring)))
def sendMessage(self, address, message):
if config.hdmicec.enabled.value:
cmd = 0
data = ''
if message == "wakeup":
cmd = 0x04
elif message == "sourceactive":
address = 0x0f # use broadcast for active source command
cmd = 0x82
physicaladdress = eHdmiCEC.getInstance().getPhysicalAddress()
data = str(struct.pack('BB', int(physicaladdress/256), int(physicaladdress%256)))
elif message == "standby":
cmd = 0x36
elif message == "sourceinactive":
physicaladdress = eHdmiCEC.getInstance().getPhysicalAddress()
cmd = 0x9d
data = str(struct.pack('BB', int(physicaladdress/256), int(physicaladdress%256)))
elif message == "menuactive":
cmd = 0x8e
data = str(struct.pack('B', 0x00))
elif message == "menuinactive":
cmd = 0x8e
data = str(struct.pack('B', 0x01))
elif message == "givesystemaudiostatus":
cmd = 0x7d
address = 0x05
elif message == "setsystemaudiomode":
cmd = 0x70
address = 0x05
physicaladdress = eHdmiCEC.getInstance().getPhysicalAddress()
data = str(struct.pack('BB', int(physicaladdress/256), int(physicaladdress%256)))
elif message == "osdname":
cmd = 0x47
data = os.uname()[1]
data = data[:14]
elif message == "poweractive":
cmd = 0x90
data = str(struct.pack('B', 0x00))
elif message == "powerinactive":
cmd = 0x90
data = str(struct.pack('B', 0x01))
elif message == "reportaddress":
address = 0x0f # use broadcast address
cmd = 0x84
physicaladdress = eHdmiCEC.getInstance().getPhysicalAddress()
devicetype = eHdmiCEC.getInstance().getDeviceType()
data = str(struct.pack('BBB', int(physicaladdress/256), int(physicaladdress%256), devicetype))
elif message == "vendorid":
cmd = 0x87
data = '\x00\x00\x00'
elif message == "keypoweron":
cmd = 0x44
data = str(struct.pack('B', 0x6d))
elif message == "keypoweroff":
cmd = 0x44
data = str(struct.pack('B', 0x6c))
if cmd:
if config.hdmicec.minimum_send_interval.value != "0" and message != "standby": # Use no interval time when message is standby. usefull for Panasonic TV
self.queue.append((address, cmd, data))
if not self.wait.isActive():
self.wait.start(int(config.hdmicec.minimum_send_interval.value), True)
else:
eHdmiCEC.getInstance().sendMessage(address, cmd, data, len(data))
def sendCmd(self):
if len(self.queue):
(address, cmd, data) = self.queue.pop(0)
eHdmiCEC.getInstance().sendMessage(address, cmd, data, len(data))
self.wait.start(int(config.hdmicec.minimum_send_interval.value), True)
def sendMessages(self, address, messages):
for message in messages:
self.sendMessage(address, message)
def wakeupMessages(self):
if config.hdmicec.enabled.value:
if self.checkifPowerupWithoutWakingTv() == 'True':
print "[HdmiCec] Skip waking TV, found 'True' in '/tmp/powerup_without_waking_tv.txt' (usually written by openWebif)"
else:
messages = []
if config.hdmicec.control_tv_wakeup.value:
messages.append("wakeup")
if config.hdmicec.report_active_source.value:
messages.append("sourceactive")
if config.hdmicec.report_active_menu.value:
messages.append("menuactive")
if messages:
self.sendMessages(0, messages)
if config.hdmicec.control_receiver_wakeup.value:
self.sendMessage(5, "keypoweron")
self.sendMessage(5, "setsystemaudiomode")
def standbyMessages(self):
if config.hdmicec.enabled.value:
messages = []
if config.hdmicec.control_tv_standby.value:
messages.append("standby")
else:
if config.hdmicec.report_active_source.value:
messages.append("sourceinactive")
if config.hdmicec.report_active_menu.value:
messages.append("menuinactive")
if messages:
self.sendMessages(0, messages)
if config.hdmicec.control_receiver_standby.value:
self.sendMessage(5, "keypoweroff")
self.sendMessage(5, "standby")
def onLeaveStandby(self):
self.wakeupMessages()
def onEnterStandby(self, configElement):
from Screens.Standby import inStandby
inStandby.onClose.append(self.onLeaveStandby)
self.standbyMessages()
def onEnterDeepStandby(self, configElement):
if config.hdmicec.handle_deepstandby_events.value:
self.standbyMessages()
def standby(self):
from Screens.Standby import Standby, inStandby
if not inStandby:
from Tools import Notifications
Notifications.AddNotification(Standby)
def wakeup(self):
from Screens.Standby import Standby, inStandby
if inStandby:
inStandby.Power()
def messageReceived(self, message):
if config.hdmicec.enabled.value:
from Screens.Standby import inStandby
cmd = message.getCommand()
data = 16 * '\x00'
length = message.getData(data, len(data))
if cmd == 0x00: # feature abort
if data[0] == '\x44':
print 'eHdmiCec: volume forwarding not supported by device %02x'%(message.getAddress())
self.volumeForwardingEnabled = False
elif cmd == 0x46: # request name
self.sendMessage(message.getAddress(), 'osdname')
elif cmd == 0x7e or cmd == 0x72: # system audio mode status
if data[0] == '\x01':
self.volumeForwardingDestination = 5 # on: send volume keys to receiver
else:
self.volumeForwardingDestination = 0 # off: send volume keys to tv
if config.hdmicec.volume_forwarding.value:
print 'eHdmiCec: volume forwarding to device %02x enabled'% self.volumeForwardingDestination
self.volumeForwardingEnabled = True
elif cmd == 0x8f: # request power status
if inStandby:
self.sendMessage(message.getAddress(), 'powerinactive')
else:
self.sendMessage(message.getAddress(), 'poweractive')
elif cmd == 0x83: # request address
self.sendMessage(message.getAddress(), 'reportaddress')
elif cmd == 0x86: # request streaming path
physicaladdress = ord(data[0]) * 256 + ord(data[1])
ouraddress = eHdmiCEC.getInstance().getPhysicalAddress()
if physicaladdress == ouraddress:
if not inStandby:
if config.hdmicec.report_active_source.value:
self.sendMessage(message.getAddress(), 'sourceactive')
elif cmd == 0x85: # request active source
if not inStandby:
if config.hdmicec.report_active_source.value:
self.sendMessage(message.getAddress(), 'sourceactive')
elif cmd == 0x8c: # request vendor id
self.sendMessage(message.getAddress(), 'vendorid')
elif cmd == 0x8d: # menu request
requesttype = ord(data[0])
if requesttype == 2: # query
if inStandby:
self.sendMessage(message.getAddress(), 'menuinactive')
else:
self.sendMessage(message.getAddress(), 'menuactive')
# handle standby request from the tv
if cmd == 0x36 and config.hdmicec.handle_tv_standby.value:
self.standby()
# handle wakeup requests from the tv
if config.hdmicec.handle_tv_wakeup.value:
if cmd == 0x04 and config.hdmicec.tv_wakeup_detection.value == "wakeup":
self.wakeup()
elif cmd == 0x84 and config.hdmicec.tv_wakeup_detection.value == "tvreportphysicaladdress":
if (ord(data[0]) * 256 + ord(data[1])) == 0 and ord(data[2]) == 0:
self.wakeup()
elif cmd == 0x85 and config.hdmicec.tv_wakeup_detection.value == "sourcerequest":
self.wakeup()
elif cmd == 0x86 and config.hdmicec.tv_wakeup_detection.value == "streamrequest":
physicaladdress = ord(data[0]) * 256 + ord(data[1])
ouraddress = eHdmiCEC.getInstance().getPhysicalAddress()
if physicaladdress == ouraddress:
self.wakeup()
elif cmd == 0x46 and config.hdmicec.tv_wakeup_detection.value == "osdnamerequest":
self.wakeup()
elif cmd != 0x36 and config.hdmicec.tv_wakeup_detection.value == "activity":
self.wakeup()
def configVolumeForwarding(self, configElement):
if config.hdmicec.enabled.value and config.hdmicec.volume_forwarding.value:
self.volumeForwardingEnabled = True
self.sendMessage(0x05, 'givesystemaudiostatus')
else:
self.volumeForwardingEnabled = False
def keyEvent(self, keyCode, keyEvent):
if not self.volumeForwardingEnabled: return
cmd = 0
data = ''
if keyEvent == 0:
if keyCode == 115:
cmd = 0x44
data = str(struct.pack('B', 0x41))
if keyCode == 114:
cmd = 0x44
data = str(struct.pack('B', 0x42))
if keyCode == 113:
cmd = 0x44
data = str(struct.pack('B', 0x43))
if keyEvent == 2:
if keyCode == 115:
cmd = 0x44
data = str(struct.pack('B', 0x41))
if keyCode == 114:
cmd = 0x44
data = str(struct.pack('B', 0x42))
if keyCode == 113:
cmd = 0x44
data = str(struct.pack('B', 0x43))
if keyEvent == 1:
if keyCode == 115 or keyCode == 114 or keyCode == 113:
cmd = 0x45
if cmd:
eHdmiCEC.getInstance().sendMessage(self.volumeForwardingDestination, cmd, data, len(data))
return 1
else:
return 0
def sethdmipreemphasis(self):
try:
if config.hdmicec.preemphasis.value == True:
file = open("/proc/stb/hdmi/preemphasis", "w")
file.write('on')
file.close()
else:
file = open("/proc/stb/hdmi/preemphasis", "w")
file.write('off')
file.close()
except:
return
def checkifPowerupWithoutWakingTv(self):
try:
#returns 'True' if openWebif function "Power on without TV" has written 'True' to this file:
f = open("/tmp/powerup_without_waking_tv.txt", "r")
powerupWithoutWakingTv = f.read()
f.close()
except:
powerupWithoutWakingTv = 'False'
try:
#write 'False' to the file so that turning on the TV is only suppressed once
#(and initially, so that openWebif knows that the image supports this feature)
f = open("/tmp/powerup_without_waking_tv.txt", "w")
f.write('False')
f.close()
except:
print "[HdmiCec] failed writing /tmp/powerup_without_waking_tv.txt"
return powerupWithoutWakingTv
hdmi_cec = HdmiCec()
| gpl-2.0 |
cherokee/webserver | qa/159-RuleHeader2.py | 8 | 1073 | from base import *
DIR = "header_test2_referer_match"
REFERER = "example.159com"
MAGIC = "Dealing with rule based headers (bis).."
CONF = """
vserver!1!rule!1590!match = header
vserver!1!rule!1590!match!header = Referer
vserver!1!rule!1590!match!match = .+\.159com
vserver!1!rule!1590!handler = file
vserver!1!rule!1591!match = header
vserver!1!rule!1591!match!header = Referer
vserver!1!rule!1591!match!match = .+\.159net
vserver!1!rule!1591!handler = cgi
"""
CGI = """#!/bin/sh
echo "Content-Type: text/plain"
echo
echo "%s"
""" % (MAGIC)
class Test (TestBase):
def __init__ (self):
TestBase.__init__ (self, __file__)
self.name = "Rule header: match II"
self.request = "GET /%s/test HTTP/1.0\r\n" % (DIR) + \
"Referer: %s\r\n" % (REFERER)
self.conf = CONF
self.expected_error = 200
self.required_content = ["/bin/sh", "echo"]
def Prepare (self, www):
d = self.Mkdir (www, DIR)
f = self.WriteFile (d, 'test', 0755, CGI)
| gpl-2.0 |
xyzz/vcmi-build | project/jni/python/src/Lib/test/test_mimetypes.py | 77 | 2581 | import mimetypes
import StringIO
import unittest
from test import test_support
# Tell it we don't know about external files:
mimetypes.knownfiles = []
mimetypes.inited = False
mimetypes._default_mime_types()
class MimeTypesTestCase(unittest.TestCase):
def setUp(self):
self.db = mimetypes.MimeTypes()
def test_default_data(self):
eq = self.assertEqual
eq(self.db.guess_type("foo.html"), ("text/html", None))
eq(self.db.guess_type("foo.tgz"), ("application/x-tar", "gzip"))
eq(self.db.guess_type("foo.tar.gz"), ("application/x-tar", "gzip"))
eq(self.db.guess_type("foo.tar.Z"), ("application/x-tar", "compress"))
def test_data_urls(self):
eq = self.assertEqual
guess_type = self.db.guess_type
eq(guess_type("data:,thisIsTextPlain"), ("text/plain", None))
eq(guess_type("data:;base64,thisIsTextPlain"), ("text/plain", None))
eq(guess_type("data:text/x-foo,thisIsTextXFoo"), ("text/x-foo", None))
def test_file_parsing(self):
eq = self.assertEqual
sio = StringIO.StringIO("x-application/x-unittest pyunit\n")
self.db.readfp(sio)
eq(self.db.guess_type("foo.pyunit"),
("x-application/x-unittest", None))
eq(self.db.guess_extension("x-application/x-unittest"), ".pyunit")
def test_non_standard_types(self):
eq = self.assertEqual
# First try strict
eq(self.db.guess_type('foo.xul', strict=True), (None, None))
eq(self.db.guess_extension('image/jpg', strict=True), None)
# And then non-strict
eq(self.db.guess_type('foo.xul', strict=False), ('text/xul', None))
eq(self.db.guess_extension('image/jpg', strict=False), '.jpg')
def test_guess_all_types(self):
eq = self.assertEqual
unless = self.failUnless
# First try strict. Use a set here for testing the results because if
# test_urllib2 is run before test_mimetypes, global state is modified
# such that the 'all' set will have more items in it.
all = set(self.db.guess_all_extensions('text/plain', strict=True))
unless(all >= set(['.bat', '.c', '.h', '.ksh', '.pl', '.txt']))
# And now non-strict
all = self.db.guess_all_extensions('image/jpg', strict=False)
all.sort()
eq(all, ['.jpg'])
# And now for no hits
all = self.db.guess_all_extensions('image/jpg', strict=True)
eq(all, [])
def test_main():
test_support.run_unittest(MimeTypesTestCase)
if __name__ == "__main__":
test_main()
| lgpl-2.1 |
GunnerJnr/_CodeInstitute | Stream-3/Full-Stack-Development/19.Djangos-Testing-Framework/5.How-To-Test-Forms/we_are_social/threads/templatetags/thread_extras.py | 10 | 1438 | import arrow
from django import template
from django.core.urlresolvers import reverse
register = template.Library()
@register.filter
def get_total_subject_posts(subject):
"""
get_total_subject_posts():
"""
total_posts = 0
for thread in subject.threads.all():
total_posts += thread.posts.count()
return total_posts
@register.filter
def started_time(created_at):
"""
started_time():
"""
return arrow.get(created_at).humanize()
@register.simple_tag
def last_posted_user_name(thread):
"""
last_posted_user_name():
"""
last_post = thread.posts.all().order_by('created_at').last()
return last_post.user.username
@register.simple_tag
def user_vote_button(thread, subject, user):
vote = thread.poll.votes.filter(user_id=user.id).first()
if not vote:
if user.is_authenticated():
link = """
<div class="col-md-3 btn-vote">
<a href="%s" class="btn btn-default btn-sm">
Add my vote!
</a>
</div>
""" % reverse('cast_vote', kwargs={
'thread_id': thread.id,
'subject_id': subject.id
})
return link
return ""
@register.filter
def vote_percentage(subject):
count = subject.votes.count()
if count == 0:
return 0
total_votes = subject.poll.votes.count()
return (100 / total_votes) * count
| mit |
sarthakmeh03/django | tests/model_options/test_tablespaces.py | 43 | 5370 | from __future__ import unicode_literals
from django.apps import apps
from django.conf import settings
from django.db import connection
from django.test import TestCase, skipIfDBFeature, skipUnlessDBFeature
from .models.tablespaces import (
Article, ArticleRef, Authors, Reviewers, Scientist, ScientistRef,
)
def sql_for_table(model):
with connection.schema_editor(collect_sql=True) as editor:
editor.create_model(model)
return editor.collected_sql[0]
def sql_for_index(model):
return '\n'.join(connection.schema_editor()._model_indexes_sql(model))
# We can't test the DEFAULT_TABLESPACE and DEFAULT_INDEX_TABLESPACE settings
# because they're evaluated when the model class is defined. As a consequence,
# @override_settings doesn't work, and the tests depend
class TablespacesTests(TestCase):
def setUp(self):
# The unmanaged models need to be removed after the test in order to
# prevent bad interactions with the flush operation in other tests.
self._old_models = apps.app_configs['model_options'].models.copy()
for model in Article, Authors, Reviewers, Scientist:
model._meta.managed = True
def tearDown(self):
for model in Article, Authors, Reviewers, Scientist:
model._meta.managed = False
apps.app_configs['model_options'].models = self._old_models
apps.all_models['model_options'] = self._old_models
apps.clear_cache()
def assertNumContains(self, haystack, needle, count):
real_count = haystack.count(needle)
self.assertEqual(real_count, count, "Found %d instances of '%s', expected %d" % (real_count, needle, count))
@skipUnlessDBFeature('supports_tablespaces')
def test_tablespace_for_model(self):
sql = sql_for_table(Scientist).lower()
if settings.DEFAULT_INDEX_TABLESPACE:
# 1 for the table
self.assertNumContains(sql, 'tbl_tbsp', 1)
# 1 for the index on the primary key
self.assertNumContains(sql, settings.DEFAULT_INDEX_TABLESPACE, 1)
else:
# 1 for the table + 1 for the index on the primary key
self.assertNumContains(sql, 'tbl_tbsp', 2)
@skipIfDBFeature('supports_tablespaces')
def test_tablespace_ignored_for_model(self):
# No tablespace-related SQL
self.assertEqual(sql_for_table(Scientist),
sql_for_table(ScientistRef))
@skipUnlessDBFeature('supports_tablespaces')
def test_tablespace_for_indexed_field(self):
sql = sql_for_table(Article).lower()
if settings.DEFAULT_INDEX_TABLESPACE:
# 1 for the table
self.assertNumContains(sql, 'tbl_tbsp', 1)
# 1 for the primary key + 1 for the index on code
self.assertNumContains(sql, settings.DEFAULT_INDEX_TABLESPACE, 2)
else:
# 1 for the table + 1 for the primary key + 1 for the index on code
self.assertNumContains(sql, 'tbl_tbsp', 3)
# 1 for the index on reference
self.assertNumContains(sql, 'idx_tbsp', 1)
@skipIfDBFeature('supports_tablespaces')
def test_tablespace_ignored_for_indexed_field(self):
# No tablespace-related SQL
self.assertEqual(sql_for_table(Article),
sql_for_table(ArticleRef))
@skipUnlessDBFeature('supports_tablespaces')
def test_tablespace_for_many_to_many_field(self):
sql = sql_for_table(Authors).lower()
# The join table of the ManyToManyField goes to the model's tablespace,
# and its indexes too, unless DEFAULT_INDEX_TABLESPACE is set.
if settings.DEFAULT_INDEX_TABLESPACE:
# 1 for the table
self.assertNumContains(sql, 'tbl_tbsp', 1)
# 1 for the primary key
self.assertNumContains(sql, settings.DEFAULT_INDEX_TABLESPACE, 1)
else:
# 1 for the table + 1 for the index on the primary key
self.assertNumContains(sql, 'tbl_tbsp', 2)
self.assertNumContains(sql, 'idx_tbsp', 0)
sql = sql_for_index(Authors).lower()
# The ManyToManyField declares no db_tablespace, its indexes go to
# the model's tablespace, unless DEFAULT_INDEX_TABLESPACE is set.
if settings.DEFAULT_INDEX_TABLESPACE:
self.assertNumContains(sql, settings.DEFAULT_INDEX_TABLESPACE, 2)
else:
self.assertNumContains(sql, 'tbl_tbsp', 2)
self.assertNumContains(sql, 'idx_tbsp', 0)
sql = sql_for_table(Reviewers).lower()
# The join table of the ManyToManyField goes to the model's tablespace,
# and its indexes too, unless DEFAULT_INDEX_TABLESPACE is set.
if settings.DEFAULT_INDEX_TABLESPACE:
# 1 for the table
self.assertNumContains(sql, 'tbl_tbsp', 1)
# 1 for the primary key
self.assertNumContains(sql, settings.DEFAULT_INDEX_TABLESPACE, 1)
else:
# 1 for the table + 1 for the index on the primary key
self.assertNumContains(sql, 'tbl_tbsp', 2)
self.assertNumContains(sql, 'idx_tbsp', 0)
sql = sql_for_index(Reviewers).lower()
# The ManyToManyField declares db_tablespace, its indexes go there.
self.assertNumContains(sql, 'tbl_tbsp', 0)
self.assertNumContains(sql, 'idx_tbsp', 2)
| bsd-3-clause |
eXistenZNL/SickRage | lib/lockfile/pidlockfile.py | 488 | 6221 | # -*- coding: utf-8 -*-
# pidlockfile.py
#
# Copyright © 2008–2009 Ben Finney <ben+python@benfinney.id.au>
#
# This is free software: you may copy, modify, and/or distribute this work
# under the terms of the Python Software Foundation License, version 2 or
# later as published by the Python Software Foundation.
# No warranty expressed or implied. See the file LICENSE.PSF-2 for details.
""" Lockfile behaviour implemented via Unix PID files.
"""
from __future__ import absolute_import
import os
import sys
import errno
import time
from . import (LockBase, AlreadyLocked, LockFailed, NotLocked, NotMyLock,
LockTimeout)
class PIDLockFile(LockBase):
""" Lockfile implemented as a Unix PID file.
The lock file is a normal file named by the attribute `path`.
A lock's PID file contains a single line of text, containing
the process ID (PID) of the process that acquired the lock.
>>> lock = PIDLockFile('somefile')
>>> lock = PIDLockFile('somefile')
"""
def __init__(self, path, threaded=False, timeout=None):
# pid lockfiles don't support threaded operation, so always force
# False as the threaded arg.
LockBase.__init__(self, path, False, timeout)
dirname = os.path.dirname(self.lock_file)
basename = os.path.split(self.path)[-1]
self.unique_name = self.path
def read_pid(self):
""" Get the PID from the lock file.
"""
return read_pid_from_pidfile(self.path)
def is_locked(self):
""" Test if the lock is currently held.
The lock is held if the PID file for this lock exists.
"""
return os.path.exists(self.path)
def i_am_locking(self):
""" Test if the lock is held by the current process.
Returns ``True`` if the current process ID matches the
number stored in the PID file.
"""
return self.is_locked() and os.getpid() == self.read_pid()
def acquire(self, timeout=None):
""" Acquire the lock.
Creates the PID file for this lock, or raises an error if
the lock could not be acquired.
"""
timeout = timeout is not None and timeout or self.timeout
end_time = time.time()
if timeout is not None and timeout > 0:
end_time += timeout
while True:
try:
write_pid_to_pidfile(self.path)
except OSError as exc:
if exc.errno == errno.EEXIST:
# The lock creation failed. Maybe sleep a bit.
if timeout is not None and time.time() > end_time:
if timeout > 0:
raise LockTimeout("Timeout waiting to acquire"
" lock for %s" %
self.path)
else:
raise AlreadyLocked("%s is already locked" %
self.path)
time.sleep(timeout is not None and timeout/10 or 0.1)
else:
raise LockFailed("failed to create %s" % self.path)
else:
return
def release(self):
""" Release the lock.
Removes the PID file to release the lock, or raises an
error if the current process does not hold the lock.
"""
if not self.is_locked():
raise NotLocked("%s is not locked" % self.path)
if not self.i_am_locking():
raise NotMyLock("%s is locked, but not by me" % self.path)
remove_existing_pidfile(self.path)
def break_lock(self):
""" Break an existing lock.
Removes the PID file if it already exists, otherwise does
nothing.
"""
remove_existing_pidfile(self.path)
def read_pid_from_pidfile(pidfile_path):
""" Read the PID recorded in the named PID file.
Read and return the numeric PID recorded as text in the named
PID file. If the PID file cannot be read, or if the content is
not a valid PID, return ``None``.
"""
pid = None
try:
pidfile = open(pidfile_path, 'r')
except IOError:
pass
else:
# According to the FHS 2.3 section on PID files in /var/run:
#
# The file must consist of the process identifier in
# ASCII-encoded decimal, followed by a newline character.
#
# Programs that read PID files should be somewhat flexible
# in what they accept; i.e., they should ignore extra
# whitespace, leading zeroes, absence of the trailing
# newline, or additional lines in the PID file.
line = pidfile.readline().strip()
try:
pid = int(line)
except ValueError:
pass
pidfile.close()
return pid
def write_pid_to_pidfile(pidfile_path):
""" Write the PID in the named PID file.
Get the numeric process ID (“PID”) of the current process
and write it to the named file as a line of text.
"""
open_flags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY)
open_mode = 0o644
pidfile_fd = os.open(pidfile_path, open_flags, open_mode)
pidfile = os.fdopen(pidfile_fd, 'w')
# According to the FHS 2.3 section on PID files in /var/run:
#
# The file must consist of the process identifier in
# ASCII-encoded decimal, followed by a newline character. For
# example, if crond was process number 25, /var/run/crond.pid
# would contain three characters: two, five, and newline.
pid = os.getpid()
line = "%(pid)d\n" % vars()
pidfile.write(line)
pidfile.close()
def remove_existing_pidfile(pidfile_path):
""" Remove the named PID file if it exists.
Removing a PID file that doesn't already exist puts us in the
desired state, so we ignore the condition if the file does not
exist.
"""
try:
os.remove(pidfile_path)
except OSError as exc:
if exc.errno == errno.ENOENT:
pass
else:
raise
| gpl-3.0 |
dokterbob/django-shopkit | shopkit/price/advanced/__init__.py | 1 | 1338 | # Copyright (C) 2010-2011 Mathijs de Bruin <mathijs@mathijsfietst.nl>
#
# This file is part of django-shopkit.
#
# django-shopkit is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
The model structure in this extension is very preliminary. Ideally, one would
want all ones prices to reside in a single table.
One way to approach this would be using a private function `_get_valid` for
`PriceBase` subclasses and then implementing a `get_valid` in `PriceBase` which
calls the `_get_valid` functions for direct parent classes that inherit from
`PriceBase`. This could then be collapsed into a single QuerySet using Q objects.
But perhaps this is too complicated. Any comments welcomed.
""" | agpl-3.0 |
xforce/diorama-native-modding | tools/gyp/pylib/gyp/generator/cmake.py | 148 | 44560 | # Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""cmake output module
This module is under development and should be considered experimental.
This module produces cmake (2.8.8+) input as its output. One CMakeLists.txt is
created for each configuration.
This module's original purpose was to support editing in IDEs like KDevelop
which use CMake for project management. It is also possible to use CMake to
generate projects for other IDEs such as eclipse cdt and code::blocks. QtCreator
will convert the CMakeLists.txt to a code::blocks cbp for the editor to read,
but build using CMake. As a result QtCreator editor is unaware of compiler
defines. The generated CMakeLists.txt can also be used to build on Linux. There
is currently no support for building on platforms other than Linux.
The generated CMakeLists.txt should properly compile all projects. However,
there is a mismatch between gyp and cmake with regard to linking. All attempts
are made to work around this, but CMake sometimes sees -Wl,--start-group as a
library and incorrectly repeats it. As a result the output of this generator
should not be relied on for building.
When using with kdevelop, use version 4.4+. Previous versions of kdevelop will
not be able to find the header file directories described in the generated
CMakeLists.txt file.
"""
import multiprocessing
import os
import signal
import string
import subprocess
import gyp.common
generator_default_variables = {
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '',
'STATIC_LIB_PREFIX': 'lib',
'STATIC_LIB_SUFFIX': '.a',
'SHARED_LIB_PREFIX': 'lib',
'SHARED_LIB_SUFFIX': '.so',
'SHARED_LIB_DIR': '${builddir}/lib.${TOOLSET}',
'LIB_DIR': '${obj}.${TOOLSET}',
'INTERMEDIATE_DIR': '${obj}.${TOOLSET}/${TARGET}/geni',
'SHARED_INTERMEDIATE_DIR': '${obj}/gen',
'PRODUCT_DIR': '${builddir}',
'RULE_INPUT_PATH': '${RULE_INPUT_PATH}',
'RULE_INPUT_DIRNAME': '${RULE_INPUT_DIRNAME}',
'RULE_INPUT_NAME': '${RULE_INPUT_NAME}',
'RULE_INPUT_ROOT': '${RULE_INPUT_ROOT}',
'RULE_INPUT_EXT': '${RULE_INPUT_EXT}',
'CONFIGURATION_NAME': '${configuration}',
}
FULL_PATH_VARS = ('${CMAKE_SOURCE_DIR}', '${builddir}', '${obj}')
generator_supports_multiple_toolsets = True
generator_wants_static_library_dependencies_adjusted = True
COMPILABLE_EXTENSIONS = {
'.c': 'cc',
'.cc': 'cxx',
'.cpp': 'cxx',
'.cxx': 'cxx',
'.s': 's', # cc
'.S': 's', # cc
}
def RemovePrefix(a, prefix):
"""Returns 'a' without 'prefix' if it starts with 'prefix'."""
return a[len(prefix):] if a.startswith(prefix) else a
def CalculateVariables(default_variables, params):
"""Calculate additional variables for use in the build (called by gyp)."""
default_variables.setdefault('OS', gyp.common.GetFlavor(params))
def Compilable(filename):
"""Return true if the file is compilable (should be in OBJS)."""
return any(filename.endswith(e) for e in COMPILABLE_EXTENSIONS)
def Linkable(filename):
"""Return true if the file is linkable (should be on the link line)."""
return filename.endswith('.o')
def NormjoinPathForceCMakeSource(base_path, rel_path):
"""Resolves rel_path against base_path and returns the result.
If rel_path is an absolute path it is returned unchanged.
Otherwise it is resolved against base_path and normalized.
If the result is a relative path, it is forced to be relative to the
CMakeLists.txt.
"""
if os.path.isabs(rel_path):
return rel_path
if any([rel_path.startswith(var) for var in FULL_PATH_VARS]):
return rel_path
# TODO: do we need to check base_path for absolute variables as well?
return os.path.join('${CMAKE_SOURCE_DIR}',
os.path.normpath(os.path.join(base_path, rel_path)))
def NormjoinPath(base_path, rel_path):
"""Resolves rel_path against base_path and returns the result.
TODO: what is this really used for?
If rel_path begins with '$' it is returned unchanged.
Otherwise it is resolved against base_path if relative, then normalized.
"""
if rel_path.startswith('$') and not rel_path.startswith('${configuration}'):
return rel_path
return os.path.normpath(os.path.join(base_path, rel_path))
def CMakeStringEscape(a):
"""Escapes the string 'a' for use inside a CMake string.
This means escaping
'\' otherwise it may be seen as modifying the next character
'"' otherwise it will end the string
';' otherwise the string becomes a list
The following do not need to be escaped
'#' when the lexer is in string state, this does not start a comment
The following are yet unknown
'$' generator variables (like ${obj}) must not be escaped,
but text $ should be escaped
what is wanted is to know which $ come from generator variables
"""
return a.replace('\\', '\\\\').replace(';', '\\;').replace('"', '\\"')
def SetFileProperty(output, source_name, property_name, values, sep):
"""Given a set of source file, sets the given property on them."""
output.write('set_source_files_properties(')
output.write(source_name)
output.write(' PROPERTIES ')
output.write(property_name)
output.write(' "')
for value in values:
output.write(CMakeStringEscape(value))
output.write(sep)
output.write('")\n')
def SetFilesProperty(output, variable, property_name, values, sep):
"""Given a set of source files, sets the given property on them."""
output.write('set_source_files_properties(')
WriteVariable(output, variable)
output.write(' PROPERTIES ')
output.write(property_name)
output.write(' "')
for value in values:
output.write(CMakeStringEscape(value))
output.write(sep)
output.write('")\n')
def SetTargetProperty(output, target_name, property_name, values, sep=''):
"""Given a target, sets the given property."""
output.write('set_target_properties(')
output.write(target_name)
output.write(' PROPERTIES ')
output.write(property_name)
output.write(' "')
for value in values:
output.write(CMakeStringEscape(value))
output.write(sep)
output.write('")\n')
def SetVariable(output, variable_name, value):
"""Sets a CMake variable."""
output.write('set(')
output.write(variable_name)
output.write(' "')
output.write(CMakeStringEscape(value))
output.write('")\n')
def SetVariableList(output, variable_name, values):
"""Sets a CMake variable to a list."""
if not values:
return SetVariable(output, variable_name, "")
if len(values) == 1:
return SetVariable(output, variable_name, values[0])
output.write('list(APPEND ')
output.write(variable_name)
output.write('\n "')
output.write('"\n "'.join([CMakeStringEscape(value) for value in values]))
output.write('")\n')
def UnsetVariable(output, variable_name):
"""Unsets a CMake variable."""
output.write('unset(')
output.write(variable_name)
output.write(')\n')
def WriteVariable(output, variable_name, prepend=None):
if prepend:
output.write(prepend)
output.write('${')
output.write(variable_name)
output.write('}')
class CMakeTargetType(object):
def __init__(self, command, modifier, property_modifier):
self.command = command
self.modifier = modifier
self.property_modifier = property_modifier
cmake_target_type_from_gyp_target_type = {
'executable': CMakeTargetType('add_executable', None, 'RUNTIME'),
'static_library': CMakeTargetType('add_library', 'STATIC', 'ARCHIVE'),
'shared_library': CMakeTargetType('add_library', 'SHARED', 'LIBRARY'),
'loadable_module': CMakeTargetType('add_library', 'MODULE', 'LIBRARY'),
'none': CMakeTargetType('add_custom_target', 'SOURCES', None),
}
def StringToCMakeTargetName(a):
"""Converts the given string 'a' to a valid CMake target name.
All invalid characters are replaced by '_'.
Invalid for cmake: ' ', '/', '(', ')', '"'
Invalid for make: ':'
Invalid for unknown reasons but cause failures: '.'
"""
return a.translate(string.maketrans(' /():."', '_______'))
def WriteActions(target_name, actions, extra_sources, extra_deps,
path_to_gyp, output):
"""Write CMake for the 'actions' in the target.
Args:
target_name: the name of the CMake target being generated.
actions: the Gyp 'actions' dict for this target.
extra_sources: [(<cmake_src>, <src>)] to append with generated source files.
extra_deps: [<cmake_taget>] to append with generated targets.
path_to_gyp: relative path from CMakeLists.txt being generated to
the Gyp file in which the target being generated is defined.
"""
for action in actions:
action_name = StringToCMakeTargetName(action['action_name'])
action_target_name = '%s__%s' % (target_name, action_name)
inputs = action['inputs']
inputs_name = action_target_name + '__input'
SetVariableList(output, inputs_name,
[NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs])
outputs = action['outputs']
cmake_outputs = [NormjoinPathForceCMakeSource(path_to_gyp, out)
for out in outputs]
outputs_name = action_target_name + '__output'
SetVariableList(output, outputs_name, cmake_outputs)
# Build up a list of outputs.
# Collect the output dirs we'll need.
dirs = set(dir for dir in (os.path.dirname(o) for o in outputs) if dir)
if int(action.get('process_outputs_as_sources', False)):
extra_sources.extend(zip(cmake_outputs, outputs))
# add_custom_command
output.write('add_custom_command(OUTPUT ')
WriteVariable(output, outputs_name)
output.write('\n')
if len(dirs) > 0:
for directory in dirs:
output.write(' COMMAND ${CMAKE_COMMAND} -E make_directory ')
output.write(directory)
output.write('\n')
output.write(' COMMAND ')
output.write(gyp.common.EncodePOSIXShellList(action['action']))
output.write('\n')
output.write(' DEPENDS ')
WriteVariable(output, inputs_name)
output.write('\n')
output.write(' WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/')
output.write(path_to_gyp)
output.write('\n')
output.write(' COMMENT ')
if 'message' in action:
output.write(action['message'])
else:
output.write(action_target_name)
output.write('\n')
output.write(' VERBATIM\n')
output.write(')\n')
# add_custom_target
output.write('add_custom_target(')
output.write(action_target_name)
output.write('\n DEPENDS ')
WriteVariable(output, outputs_name)
output.write('\n SOURCES ')
WriteVariable(output, inputs_name)
output.write('\n)\n')
extra_deps.append(action_target_name)
def NormjoinRulePathForceCMakeSource(base_path, rel_path, rule_source):
if rel_path.startswith(("${RULE_INPUT_PATH}","${RULE_INPUT_DIRNAME}")):
if any([rule_source.startswith(var) for var in FULL_PATH_VARS]):
return rel_path
return NormjoinPathForceCMakeSource(base_path, rel_path)
def WriteRules(target_name, rules, extra_sources, extra_deps,
path_to_gyp, output):
"""Write CMake for the 'rules' in the target.
Args:
target_name: the name of the CMake target being generated.
actions: the Gyp 'actions' dict for this target.
extra_sources: [(<cmake_src>, <src>)] to append with generated source files.
extra_deps: [<cmake_taget>] to append with generated targets.
path_to_gyp: relative path from CMakeLists.txt being generated to
the Gyp file in which the target being generated is defined.
"""
for rule in rules:
rule_name = StringToCMakeTargetName(target_name + '__' + rule['rule_name'])
inputs = rule.get('inputs', [])
inputs_name = rule_name + '__input'
SetVariableList(output, inputs_name,
[NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs])
outputs = rule['outputs']
var_outputs = []
for count, rule_source in enumerate(rule.get('rule_sources', [])):
action_name = rule_name + '_' + str(count)
rule_source_dirname, rule_source_basename = os.path.split(rule_source)
rule_source_root, rule_source_ext = os.path.splitext(rule_source_basename)
SetVariable(output, 'RULE_INPUT_PATH', rule_source)
SetVariable(output, 'RULE_INPUT_DIRNAME', rule_source_dirname)
SetVariable(output, 'RULE_INPUT_NAME', rule_source_basename)
SetVariable(output, 'RULE_INPUT_ROOT', rule_source_root)
SetVariable(output, 'RULE_INPUT_EXT', rule_source_ext)
# Build up a list of outputs.
# Collect the output dirs we'll need.
dirs = set(dir for dir in (os.path.dirname(o) for o in outputs) if dir)
# Create variables for the output, as 'local' variable will be unset.
these_outputs = []
for output_index, out in enumerate(outputs):
output_name = action_name + '_' + str(output_index)
SetVariable(output, output_name,
NormjoinRulePathForceCMakeSource(path_to_gyp, out,
rule_source))
if int(rule.get('process_outputs_as_sources', False)):
extra_sources.append(('${' + output_name + '}', out))
these_outputs.append('${' + output_name + '}')
var_outputs.append('${' + output_name + '}')
# add_custom_command
output.write('add_custom_command(OUTPUT\n')
for out in these_outputs:
output.write(' ')
output.write(out)
output.write('\n')
for directory in dirs:
output.write(' COMMAND ${CMAKE_COMMAND} -E make_directory ')
output.write(directory)
output.write('\n')
output.write(' COMMAND ')
output.write(gyp.common.EncodePOSIXShellList(rule['action']))
output.write('\n')
output.write(' DEPENDS ')
WriteVariable(output, inputs_name)
output.write(' ')
output.write(NormjoinPath(path_to_gyp, rule_source))
output.write('\n')
# CMAKE_SOURCE_DIR is where the CMakeLists.txt lives.
# The cwd is the current build directory.
output.write(' WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/')
output.write(path_to_gyp)
output.write('\n')
output.write(' COMMENT ')
if 'message' in rule:
output.write(rule['message'])
else:
output.write(action_name)
output.write('\n')
output.write(' VERBATIM\n')
output.write(')\n')
UnsetVariable(output, 'RULE_INPUT_PATH')
UnsetVariable(output, 'RULE_INPUT_DIRNAME')
UnsetVariable(output, 'RULE_INPUT_NAME')
UnsetVariable(output, 'RULE_INPUT_ROOT')
UnsetVariable(output, 'RULE_INPUT_EXT')
# add_custom_target
output.write('add_custom_target(')
output.write(rule_name)
output.write(' DEPENDS\n')
for out in var_outputs:
output.write(' ')
output.write(out)
output.write('\n')
output.write('SOURCES ')
WriteVariable(output, inputs_name)
output.write('\n')
for rule_source in rule.get('rule_sources', []):
output.write(' ')
output.write(NormjoinPath(path_to_gyp, rule_source))
output.write('\n')
output.write(')\n')
extra_deps.append(rule_name)
def WriteCopies(target_name, copies, extra_deps, path_to_gyp, output):
"""Write CMake for the 'copies' in the target.
Args:
target_name: the name of the CMake target being generated.
actions: the Gyp 'actions' dict for this target.
extra_deps: [<cmake_taget>] to append with generated targets.
path_to_gyp: relative path from CMakeLists.txt being generated to
the Gyp file in which the target being generated is defined.
"""
copy_name = target_name + '__copies'
# CMake gets upset with custom targets with OUTPUT which specify no output.
have_copies = any(copy['files'] for copy in copies)
if not have_copies:
output.write('add_custom_target(')
output.write(copy_name)
output.write(')\n')
extra_deps.append(copy_name)
return
class Copy(object):
def __init__(self, ext, command):
self.cmake_inputs = []
self.cmake_outputs = []
self.gyp_inputs = []
self.gyp_outputs = []
self.ext = ext
self.inputs_name = None
self.outputs_name = None
self.command = command
file_copy = Copy('', 'copy')
dir_copy = Copy('_dirs', 'copy_directory')
for copy in copies:
files = copy['files']
destination = copy['destination']
for src in files:
path = os.path.normpath(src)
basename = os.path.split(path)[1]
dst = os.path.join(destination, basename)
copy = file_copy if os.path.basename(src) else dir_copy
copy.cmake_inputs.append(NormjoinPathForceCMakeSource(path_to_gyp, src))
copy.cmake_outputs.append(NormjoinPathForceCMakeSource(path_to_gyp, dst))
copy.gyp_inputs.append(src)
copy.gyp_outputs.append(dst)
for copy in (file_copy, dir_copy):
if copy.cmake_inputs:
copy.inputs_name = copy_name + '__input' + copy.ext
SetVariableList(output, copy.inputs_name, copy.cmake_inputs)
copy.outputs_name = copy_name + '__output' + copy.ext
SetVariableList(output, copy.outputs_name, copy.cmake_outputs)
# add_custom_command
output.write('add_custom_command(\n')
output.write('OUTPUT')
for copy in (file_copy, dir_copy):
if copy.outputs_name:
WriteVariable(output, copy.outputs_name, ' ')
output.write('\n')
for copy in (file_copy, dir_copy):
for src, dst in zip(copy.gyp_inputs, copy.gyp_outputs):
# 'cmake -E copy src dst' will create the 'dst' directory if needed.
output.write('COMMAND ${CMAKE_COMMAND} -E %s ' % copy.command)
output.write(src)
output.write(' ')
output.write(dst)
output.write("\n")
output.write('DEPENDS')
for copy in (file_copy, dir_copy):
if copy.inputs_name:
WriteVariable(output, copy.inputs_name, ' ')
output.write('\n')
output.write('WORKING_DIRECTORY ${CMAKE_SOURCE_DIR}/')
output.write(path_to_gyp)
output.write('\n')
output.write('COMMENT Copying for ')
output.write(target_name)
output.write('\n')
output.write('VERBATIM\n')
output.write(')\n')
# add_custom_target
output.write('add_custom_target(')
output.write(copy_name)
output.write('\n DEPENDS')
for copy in (file_copy, dir_copy):
if copy.outputs_name:
WriteVariable(output, copy.outputs_name, ' ')
output.write('\n SOURCES')
if file_copy.inputs_name:
WriteVariable(output, file_copy.inputs_name, ' ')
output.write('\n)\n')
extra_deps.append(copy_name)
def CreateCMakeTargetBaseName(qualified_target):
"""This is the name we would like the target to have."""
_, gyp_target_name, gyp_target_toolset = (
gyp.common.ParseQualifiedTarget(qualified_target))
cmake_target_base_name = gyp_target_name
if gyp_target_toolset and gyp_target_toolset != 'target':
cmake_target_base_name += '_' + gyp_target_toolset
return StringToCMakeTargetName(cmake_target_base_name)
def CreateCMakeTargetFullName(qualified_target):
"""An unambiguous name for the target."""
gyp_file, gyp_target_name, gyp_target_toolset = (
gyp.common.ParseQualifiedTarget(qualified_target))
cmake_target_full_name = gyp_file + ':' + gyp_target_name
if gyp_target_toolset and gyp_target_toolset != 'target':
cmake_target_full_name += '_' + gyp_target_toolset
return StringToCMakeTargetName(cmake_target_full_name)
class CMakeNamer(object):
"""Converts Gyp target names into CMake target names.
CMake requires that target names be globally unique. One way to ensure
this is to fully qualify the names of the targets. Unfortunatly, this
ends up with all targets looking like "chrome_chrome_gyp_chrome" instead
of just "chrome". If this generator were only interested in building, it
would be possible to fully qualify all target names, then create
unqualified target names which depend on all qualified targets which
should have had that name. This is more or less what the 'make' generator
does with aliases. However, one goal of this generator is to create CMake
files for use with IDEs, and fully qualified names are not as user
friendly.
Since target name collision is rare, we do the above only when required.
Toolset variants are always qualified from the base, as this is required for
building. However, it also makes sense for an IDE, as it is possible for
defines to be different.
"""
def __init__(self, target_list):
self.cmake_target_base_names_conficting = set()
cmake_target_base_names_seen = set()
for qualified_target in target_list:
cmake_target_base_name = CreateCMakeTargetBaseName(qualified_target)
if cmake_target_base_name not in cmake_target_base_names_seen:
cmake_target_base_names_seen.add(cmake_target_base_name)
else:
self.cmake_target_base_names_conficting.add(cmake_target_base_name)
def CreateCMakeTargetName(self, qualified_target):
base_name = CreateCMakeTargetBaseName(qualified_target)
if base_name in self.cmake_target_base_names_conficting:
return CreateCMakeTargetFullName(qualified_target)
return base_name
def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
options, generator_flags, all_qualified_targets, output):
# The make generator does this always.
# TODO: It would be nice to be able to tell CMake all dependencies.
circular_libs = generator_flags.get('circular', True)
if not generator_flags.get('standalone', False):
output.write('\n#')
output.write(qualified_target)
output.write('\n')
gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target)
rel_gyp_file = gyp.common.RelativePath(gyp_file, options.toplevel_dir)
rel_gyp_dir = os.path.dirname(rel_gyp_file)
# Relative path from build dir to top dir.
build_to_top = gyp.common.InvertRelativePath(build_dir, options.toplevel_dir)
# Relative path from build dir to gyp dir.
build_to_gyp = os.path.join(build_to_top, rel_gyp_dir)
path_from_cmakelists_to_gyp = build_to_gyp
spec = target_dicts.get(qualified_target, {})
config = spec.get('configurations', {}).get(config_to_use, {})
target_name = spec.get('target_name', '<missing target name>')
target_type = spec.get('type', '<missing target type>')
target_toolset = spec.get('toolset')
cmake_target_type = cmake_target_type_from_gyp_target_type.get(target_type)
if cmake_target_type is None:
print ('Target %s has unknown target type %s, skipping.' %
( target_name, target_type ) )
return
SetVariable(output, 'TARGET', target_name)
SetVariable(output, 'TOOLSET', target_toolset)
cmake_target_name = namer.CreateCMakeTargetName(qualified_target)
extra_sources = []
extra_deps = []
# Actions must come first, since they can generate more OBJs for use below.
if 'actions' in spec:
WriteActions(cmake_target_name, spec['actions'], extra_sources, extra_deps,
path_from_cmakelists_to_gyp, output)
# Rules must be early like actions.
if 'rules' in spec:
WriteRules(cmake_target_name, spec['rules'], extra_sources, extra_deps,
path_from_cmakelists_to_gyp, output)
# Copies
if 'copies' in spec:
WriteCopies(cmake_target_name, spec['copies'], extra_deps,
path_from_cmakelists_to_gyp, output)
# Target and sources
srcs = spec.get('sources', [])
# Gyp separates the sheep from the goats based on file extensions.
# A full separation is done here because of flag handing (see below).
s_sources = []
c_sources = []
cxx_sources = []
linkable_sources = []
other_sources = []
for src in srcs:
_, ext = os.path.splitext(src)
src_type = COMPILABLE_EXTENSIONS.get(ext, None)
src_norm_path = NormjoinPath(path_from_cmakelists_to_gyp, src);
if src_type == 's':
s_sources.append(src_norm_path)
elif src_type == 'cc':
c_sources.append(src_norm_path)
elif src_type == 'cxx':
cxx_sources.append(src_norm_path)
elif Linkable(ext):
linkable_sources.append(src_norm_path)
else:
other_sources.append(src_norm_path)
for extra_source in extra_sources:
src, real_source = extra_source
_, ext = os.path.splitext(real_source)
src_type = COMPILABLE_EXTENSIONS.get(ext, None)
if src_type == 's':
s_sources.append(src)
elif src_type == 'cc':
c_sources.append(src)
elif src_type == 'cxx':
cxx_sources.append(src)
elif Linkable(ext):
linkable_sources.append(src)
else:
other_sources.append(src)
s_sources_name = None
if s_sources:
s_sources_name = cmake_target_name + '__asm_srcs'
SetVariableList(output, s_sources_name, s_sources)
c_sources_name = None
if c_sources:
c_sources_name = cmake_target_name + '__c_srcs'
SetVariableList(output, c_sources_name, c_sources)
cxx_sources_name = None
if cxx_sources:
cxx_sources_name = cmake_target_name + '__cxx_srcs'
SetVariableList(output, cxx_sources_name, cxx_sources)
linkable_sources_name = None
if linkable_sources:
linkable_sources_name = cmake_target_name + '__linkable_srcs'
SetVariableList(output, linkable_sources_name, linkable_sources)
other_sources_name = None
if other_sources:
other_sources_name = cmake_target_name + '__other_srcs'
SetVariableList(output, other_sources_name, other_sources)
# CMake gets upset when executable targets provide no sources.
# http://www.cmake.org/pipermail/cmake/2010-July/038461.html
dummy_sources_name = None
has_sources = (s_sources_name or
c_sources_name or
cxx_sources_name or
linkable_sources_name or
other_sources_name)
if target_type == 'executable' and not has_sources:
dummy_sources_name = cmake_target_name + '__dummy_srcs'
SetVariable(output, dummy_sources_name,
"${obj}.${TOOLSET}/${TARGET}/genc/dummy.c")
output.write('if(NOT EXISTS "')
WriteVariable(output, dummy_sources_name)
output.write('")\n')
output.write(' file(WRITE "')
WriteVariable(output, dummy_sources_name)
output.write('" "")\n')
output.write("endif()\n")
# CMake is opposed to setting linker directories and considers the practice
# of setting linker directories dangerous. Instead, it favors the use of
# find_library and passing absolute paths to target_link_libraries.
# However, CMake does provide the command link_directories, which adds
# link directories to targets defined after it is called.
# As a result, link_directories must come before the target definition.
# CMake unfortunately has no means of removing entries from LINK_DIRECTORIES.
library_dirs = config.get('library_dirs')
if library_dirs is not None:
output.write('link_directories(')
for library_dir in library_dirs:
output.write(' ')
output.write(NormjoinPath(path_from_cmakelists_to_gyp, library_dir))
output.write('\n')
output.write(')\n')
output.write(cmake_target_type.command)
output.write('(')
output.write(cmake_target_name)
if cmake_target_type.modifier is not None:
output.write(' ')
output.write(cmake_target_type.modifier)
if s_sources_name:
WriteVariable(output, s_sources_name, ' ')
if c_sources_name:
WriteVariable(output, c_sources_name, ' ')
if cxx_sources_name:
WriteVariable(output, cxx_sources_name, ' ')
if linkable_sources_name:
WriteVariable(output, linkable_sources_name, ' ')
if other_sources_name:
WriteVariable(output, other_sources_name, ' ')
if dummy_sources_name:
WriteVariable(output, dummy_sources_name, ' ')
output.write(')\n')
# Let CMake know if the 'all' target should depend on this target.
exclude_from_all = ('TRUE' if qualified_target not in all_qualified_targets
else 'FALSE')
SetTargetProperty(output, cmake_target_name,
'EXCLUDE_FROM_ALL', exclude_from_all)
for extra_target_name in extra_deps:
SetTargetProperty(output, extra_target_name,
'EXCLUDE_FROM_ALL', exclude_from_all)
# Output name and location.
if target_type != 'none':
# Link as 'C' if there are no other files
if not c_sources and not cxx_sources:
SetTargetProperty(output, cmake_target_name, 'LINKER_LANGUAGE', ['C'])
# Mark uncompiled sources as uncompiled.
if other_sources_name:
output.write('set_source_files_properties(')
WriteVariable(output, other_sources_name, '')
output.write(' PROPERTIES HEADER_FILE_ONLY "TRUE")\n')
# Mark object sources as linkable.
if linkable_sources_name:
output.write('set_source_files_properties(')
WriteVariable(output, other_sources_name, '')
output.write(' PROPERTIES EXTERNAL_OBJECT "TRUE")\n')
# Output directory
target_output_directory = spec.get('product_dir')
if target_output_directory is None:
if target_type in ('executable', 'loadable_module'):
target_output_directory = generator_default_variables['PRODUCT_DIR']
elif target_type == 'shared_library':
target_output_directory = '${builddir}/lib.${TOOLSET}'
elif spec.get('standalone_static_library', False):
target_output_directory = generator_default_variables['PRODUCT_DIR']
else:
base_path = gyp.common.RelativePath(os.path.dirname(gyp_file),
options.toplevel_dir)
target_output_directory = '${obj}.${TOOLSET}'
target_output_directory = (
os.path.join(target_output_directory, base_path))
cmake_target_output_directory = NormjoinPathForceCMakeSource(
path_from_cmakelists_to_gyp,
target_output_directory)
SetTargetProperty(output,
cmake_target_name,
cmake_target_type.property_modifier + '_OUTPUT_DIRECTORY',
cmake_target_output_directory)
# Output name
default_product_prefix = ''
default_product_name = target_name
default_product_ext = ''
if target_type == 'static_library':
static_library_prefix = generator_default_variables['STATIC_LIB_PREFIX']
default_product_name = RemovePrefix(default_product_name,
static_library_prefix)
default_product_prefix = static_library_prefix
default_product_ext = generator_default_variables['STATIC_LIB_SUFFIX']
elif target_type in ('loadable_module', 'shared_library'):
shared_library_prefix = generator_default_variables['SHARED_LIB_PREFIX']
default_product_name = RemovePrefix(default_product_name,
shared_library_prefix)
default_product_prefix = shared_library_prefix
default_product_ext = generator_default_variables['SHARED_LIB_SUFFIX']
elif target_type != 'executable':
print ('ERROR: What output file should be generated?',
'type', target_type, 'target', target_name)
product_prefix = spec.get('product_prefix', default_product_prefix)
product_name = spec.get('product_name', default_product_name)
product_ext = spec.get('product_extension')
if product_ext:
product_ext = '.' + product_ext
else:
product_ext = default_product_ext
SetTargetProperty(output, cmake_target_name, 'PREFIX', product_prefix)
SetTargetProperty(output, cmake_target_name,
cmake_target_type.property_modifier + '_OUTPUT_NAME',
product_name)
SetTargetProperty(output, cmake_target_name, 'SUFFIX', product_ext)
# Make the output of this target referenceable as a source.
cmake_target_output_basename = product_prefix + product_name + product_ext
cmake_target_output = os.path.join(cmake_target_output_directory,
cmake_target_output_basename)
SetFileProperty(output, cmake_target_output, 'GENERATED', ['TRUE'], '')
# Includes
includes = config.get('include_dirs')
if includes:
# This (target include directories) is what requires CMake 2.8.8
includes_name = cmake_target_name + '__include_dirs'
SetVariableList(output, includes_name,
[NormjoinPathForceCMakeSource(path_from_cmakelists_to_gyp, include)
for include in includes])
output.write('set_property(TARGET ')
output.write(cmake_target_name)
output.write(' APPEND PROPERTY INCLUDE_DIRECTORIES ')
WriteVariable(output, includes_name, '')
output.write(')\n')
# Defines
defines = config.get('defines')
if defines is not None:
SetTargetProperty(output,
cmake_target_name,
'COMPILE_DEFINITIONS',
defines,
';')
# Compile Flags - http://www.cmake.org/Bug/view.php?id=6493
# CMake currently does not have target C and CXX flags.
# So, instead of doing...
# cflags_c = config.get('cflags_c')
# if cflags_c is not None:
# SetTargetProperty(output, cmake_target_name,
# 'C_COMPILE_FLAGS', cflags_c, ' ')
# cflags_cc = config.get('cflags_cc')
# if cflags_cc is not None:
# SetTargetProperty(output, cmake_target_name,
# 'CXX_COMPILE_FLAGS', cflags_cc, ' ')
# Instead we must...
cflags = config.get('cflags', [])
cflags_c = config.get('cflags_c', [])
cflags_cxx = config.get('cflags_cc', [])
if (not cflags_c or not c_sources) and (not cflags_cxx or not cxx_sources):
SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', cflags, ' ')
elif c_sources and not (s_sources or cxx_sources):
flags = []
flags.extend(cflags)
flags.extend(cflags_c)
SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', flags, ' ')
elif cxx_sources and not (s_sources or c_sources):
flags = []
flags.extend(cflags)
flags.extend(cflags_cxx)
SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', flags, ' ')
else:
# TODO: This is broken, one cannot generally set properties on files,
# as other targets may require different properties on the same files.
if s_sources and cflags:
SetFilesProperty(output, s_sources_name, 'COMPILE_FLAGS', cflags, ' ')
if c_sources and (cflags or cflags_c):
flags = []
flags.extend(cflags)
flags.extend(cflags_c)
SetFilesProperty(output, c_sources_name, 'COMPILE_FLAGS', flags, ' ')
if cxx_sources and (cflags or cflags_cxx):
flags = []
flags.extend(cflags)
flags.extend(cflags_cxx)
SetFilesProperty(output, cxx_sources_name, 'COMPILE_FLAGS', flags, ' ')
# Linker flags
ldflags = config.get('ldflags')
if ldflags is not None:
SetTargetProperty(output, cmake_target_name, 'LINK_FLAGS', ldflags, ' ')
# Note on Dependencies and Libraries:
# CMake wants to handle link order, resolving the link line up front.
# Gyp does not retain or enforce specifying enough information to do so.
# So do as other gyp generators and use --start-group and --end-group.
# Give CMake as little information as possible so that it doesn't mess it up.
# Dependencies
rawDeps = spec.get('dependencies', [])
static_deps = []
shared_deps = []
other_deps = []
for rawDep in rawDeps:
dep_cmake_name = namer.CreateCMakeTargetName(rawDep)
dep_spec = target_dicts.get(rawDep, {})
dep_target_type = dep_spec.get('type', None)
if dep_target_type == 'static_library':
static_deps.append(dep_cmake_name)
elif dep_target_type == 'shared_library':
shared_deps.append(dep_cmake_name)
else:
other_deps.append(dep_cmake_name)
# ensure all external dependencies are complete before internal dependencies
# extra_deps currently only depend on their own deps, so otherwise run early
if static_deps or shared_deps or other_deps:
for extra_dep in extra_deps:
output.write('add_dependencies(')
output.write(extra_dep)
output.write('\n')
for deps in (static_deps, shared_deps, other_deps):
for dep in gyp.common.uniquer(deps):
output.write(' ')
output.write(dep)
output.write('\n')
output.write(')\n')
linkable = target_type in ('executable', 'loadable_module', 'shared_library')
other_deps.extend(extra_deps)
if other_deps or (not linkable and (static_deps or shared_deps)):
output.write('add_dependencies(')
output.write(cmake_target_name)
output.write('\n')
for dep in gyp.common.uniquer(other_deps):
output.write(' ')
output.write(dep)
output.write('\n')
if not linkable:
for deps in (static_deps, shared_deps):
for lib_dep in gyp.common.uniquer(deps):
output.write(' ')
output.write(lib_dep)
output.write('\n')
output.write(')\n')
# Libraries
if linkable:
external_libs = [lib for lib in spec.get('libraries', []) if len(lib) > 0]
if external_libs or static_deps or shared_deps:
output.write('target_link_libraries(')
output.write(cmake_target_name)
output.write('\n')
if static_deps:
write_group = circular_libs and len(static_deps) > 1
if write_group:
output.write('-Wl,--start-group\n')
for dep in gyp.common.uniquer(static_deps):
output.write(' ')
output.write(dep)
output.write('\n')
if write_group:
output.write('-Wl,--end-group\n')
if shared_deps:
for dep in gyp.common.uniquer(shared_deps):
output.write(' ')
output.write(dep)
output.write('\n')
if external_libs:
for lib in gyp.common.uniquer(external_libs):
output.write(' ')
output.write(lib)
output.write('\n')
output.write(')\n')
UnsetVariable(output, 'TOOLSET')
UnsetVariable(output, 'TARGET')
def GenerateOutputForConfig(target_list, target_dicts, data,
params, config_to_use):
options = params['options']
generator_flags = params['generator_flags']
# generator_dir: relative path from pwd to where make puts build files.
# Makes migrating from make to cmake easier, cmake doesn't put anything here.
# Each Gyp configuration creates a different CMakeLists.txt file
# to avoid incompatibilities between Gyp and CMake configurations.
generator_dir = os.path.relpath(options.generator_output or '.')
# output_dir: relative path from generator_dir to the build directory.
output_dir = generator_flags.get('output_dir', 'out')
# build_dir: relative path from source root to our output files.
# e.g. "out/Debug"
build_dir = os.path.normpath(os.path.join(generator_dir,
output_dir,
config_to_use))
toplevel_build = os.path.join(options.toplevel_dir, build_dir)
output_file = os.path.join(toplevel_build, 'CMakeLists.txt')
gyp.common.EnsureDirExists(output_file)
output = open(output_file, 'w')
output.write('cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n')
output.write('cmake_policy(VERSION 2.8.8)\n')
gyp_file, project_target, _ = gyp.common.ParseQualifiedTarget(target_list[-1])
output.write('project(')
output.write(project_target)
output.write(')\n')
SetVariable(output, 'configuration', config_to_use)
ar = None
cc = None
cxx = None
make_global_settings = data[gyp_file].get('make_global_settings', [])
build_to_top = gyp.common.InvertRelativePath(build_dir,
options.toplevel_dir)
for key, value in make_global_settings:
if key == 'AR':
ar = os.path.join(build_to_top, value)
if key == 'CC':
cc = os.path.join(build_to_top, value)
if key == 'CXX':
cxx = os.path.join(build_to_top, value)
ar = gyp.common.GetEnvironFallback(['AR_target', 'AR'], ar)
cc = gyp.common.GetEnvironFallback(['CC_target', 'CC'], cc)
cxx = gyp.common.GetEnvironFallback(['CXX_target', 'CXX'], cxx)
if ar:
SetVariable(output, 'CMAKE_AR', ar)
if cc:
SetVariable(output, 'CMAKE_C_COMPILER', cc)
if cxx:
SetVariable(output, 'CMAKE_CXX_COMPILER', cxx)
# The following appears to be as-yet undocumented.
# http://public.kitware.com/Bug/view.php?id=8392
output.write('enable_language(ASM)\n')
# ASM-ATT does not support .S files.
# output.write('enable_language(ASM-ATT)\n')
if cc:
SetVariable(output, 'CMAKE_ASM_COMPILER', cc)
SetVariable(output, 'builddir', '${CMAKE_BINARY_DIR}')
SetVariable(output, 'obj', '${builddir}/obj')
output.write('\n')
# TODO: Undocumented/unsupported (the CMake Java generator depends on it).
# CMake by default names the object resulting from foo.c to be foo.c.o.
# Gyp traditionally names the object resulting from foo.c foo.o.
# This should be irrelevant, but some targets extract .o files from .a
# and depend on the name of the extracted .o files.
output.write('set(CMAKE_C_OUTPUT_EXTENSION_REPLACE 1)\n')
output.write('set(CMAKE_CXX_OUTPUT_EXTENSION_REPLACE 1)\n')
output.write('\n')
# Force ninja to use rsp files. Otherwise link and ar lines can get too long,
# resulting in 'Argument list too long' errors.
output.write('set(CMAKE_NINJA_FORCE_RESPONSE_FILE 1)\n')
output.write('\n')
namer = CMakeNamer(target_list)
# The list of targets upon which the 'all' target should depend.
# CMake has it's own implicit 'all' target, one is not created explicitly.
all_qualified_targets = set()
for build_file in params['build_files']:
for qualified_target in gyp.common.AllTargets(target_list,
target_dicts,
os.path.normpath(build_file)):
all_qualified_targets.add(qualified_target)
for qualified_target in target_list:
WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
options, generator_flags, all_qualified_targets, output)
output.close()
def PerformBuild(data, configurations, params):
options = params['options']
generator_flags = params['generator_flags']
# generator_dir: relative path from pwd to where make puts build files.
# Makes migrating from make to cmake easier, cmake doesn't put anything here.
generator_dir = os.path.relpath(options.generator_output or '.')
# output_dir: relative path from generator_dir to the build directory.
output_dir = generator_flags.get('output_dir', 'out')
for config_name in configurations:
# build_dir: relative path from source root to our output files.
# e.g. "out/Debug"
build_dir = os.path.normpath(os.path.join(generator_dir,
output_dir,
config_name))
arguments = ['cmake', '-G', 'Ninja']
print 'Generating [%s]: %s' % (config_name, arguments)
subprocess.check_call(arguments, cwd=build_dir)
arguments = ['ninja', '-C', build_dir]
print 'Building [%s]: %s' % (config_name, arguments)
subprocess.check_call(arguments)
def CallGenerateOutputForConfig(arglist):
# Ignore the interrupt signal so that the parent process catches it and
# kills all multiprocessing children.
signal.signal(signal.SIGINT, signal.SIG_IGN)
target_list, target_dicts, data, params, config_name = arglist
GenerateOutputForConfig(target_list, target_dicts, data, params, config_name)
def GenerateOutput(target_list, target_dicts, data, params):
user_config = params.get('generator_flags', {}).get('config', None)
if user_config:
GenerateOutputForConfig(target_list, target_dicts, data,
params, user_config)
else:
config_names = target_dicts[target_list[0]]['configurations'].keys()
if params['parallel']:
try:
pool = multiprocessing.Pool(len(config_names))
arglists = []
for config_name in config_names:
arglists.append((target_list, target_dicts, data,
params, config_name))
pool.map(CallGenerateOutputForConfig, arglists)
except KeyboardInterrupt, e:
pool.terminate()
raise e
else:
for config_name in config_names:
GenerateOutputForConfig(target_list, target_dicts, data,
params, config_name)
| bsd-3-clause |
hantek/pylearn2 | pylearn2/scripts/tutorials/deep_trainer/run_deep_trainer.py | 44 | 9086 | #!/usr/bin/env python
from __future__ import print_function
__author__ = "Li Yao"
"""
See readme.txt
A small example of how to glue shining features of pylearn2 together
to train models layer by layer.
"""
MAX_EPOCHS_UNSUPERVISED = 1
MAX_EPOCHS_SUPERVISED = 2
from pylearn2.config import yaml_parse
from pylearn2.corruption import BinomialCorruptor
from pylearn2.corruption import GaussianCorruptor
from pylearn2.costs.mlp import Default
from pylearn2.models.autoencoder import Autoencoder, DenoisingAutoencoder
from pylearn2.models.rbm import GaussianBinaryRBM
from pylearn2.models.softmax_regression import SoftmaxRegression
from pylearn2.training_algorithms.sgd import SGD
from pylearn2.costs.autoencoder import MeanSquaredReconstructionError
from pylearn2.termination_criteria import EpochCounter
from pylearn2.datasets.dense_design_matrix import DenseDesignMatrix
from pylearn2.energy_functions.rbm_energy import GRBM_Type_1
from pylearn2.blocks import StackedBlocks
from pylearn2.datasets.transformer_dataset import TransformerDataset
from pylearn2.costs.ebm_estimation import SMD
from pylearn2.training_algorithms.sgd import MonitorBasedLRAdjuster
from pylearn2.train import Train
from optparse import OptionParser
import numpy
class ToyDataset(DenseDesignMatrix):
def __init__(self):
# simulated random dataset
rng = numpy.random.RandomState(seed=42)
data = rng.normal(size=(1000, 10))
self.y = numpy.random.binomial(1, 0.5, (1000, 1))
super(ToyDataset, self).__init__(X=data, y=self.y, y_labels=2)
def get_dataset_toy():
"""
The toy dataset is only meant to used for testing pipelines.
Do not try to visualize weights on it. It is not picture and
has no color channel info to support visualization
"""
trainset = ToyDataset()
testset = ToyDataset()
return trainset, testset
def get_dataset_cifar10():
print('loading CIFAR-10 dataset...')
# We create the dataset by parsing YAML strings describing the dataset.
# The yaml parser will automatically tag trainset and testset with a
# yaml_src field containing the YAML string that was used to specify them.
# This is useful because later the training algorithm can store this YAML
# string in the saved model to efficiently describe exactly what data it
# was trained on.
template = \
"""!obj:pylearn2.datasets.cifar10.CIFAR10 {
which_set: %s,
center: 1,
rescale: 1,
}"""
trainset = yaml_parse.load(template % "train")
testset = yaml_parse.load(template % "test")
print('...done loading CIFAR-10.')
return trainset, testset
def get_dataset_mnist():
print('loading MNIST dataset...')
# We create the dataset by parsing YAML strings describing the dataset.
# The yaml parser will automatically tag trainset and testset with a
# yaml_src field containing the YAML string that was used to specify them.
# This is useful because later the training algorithm can store this YAML
# string in the saved model to efficiently describe exactly what data it
# was trained on.
template = \
"""!obj:pylearn2.datasets.mnist.MNIST {
which_set: %s,
}"""
trainset = yaml_parse.load(template % "train")
testset = yaml_parse.load(template % "test")
print('...done loading MNIST.')
return trainset, testset
def get_autoencoder(structure):
n_input, n_output = structure
config = {
'nhid': n_output,
'nvis': n_input,
'tied_weights': True,
'act_enc': 'sigmoid',
'act_dec': 'sigmoid',
'irange': 0.001,
}
return Autoencoder(**config)
def get_denoising_autoencoder(structure):
n_input, n_output = structure
curruptor = BinomialCorruptor(corruption_level=0.5)
config = {
'corruptor': curruptor,
'nhid': n_output,
'nvis': n_input,
'tied_weights': True,
'act_enc': 'sigmoid',
'act_dec': 'sigmoid',
'irange': 0.001,
}
return DenoisingAutoencoder(**config)
def get_grbm(structure):
n_input, n_output = structure
config = {
'nvis': n_input,
'nhid': n_output,
"irange" : 0.05,
"energy_function_class" : GRBM_Type_1,
"learn_sigma" : True,
"init_sigma" : .4,
"init_bias_hid" : -2.,
"mean_vis" : False,
"sigma_lr_scale" : 1e-3
}
return GaussianBinaryRBM(**config)
def get_logistic_regressor(structure):
n_input, n_output = structure
layer = SoftmaxRegression(n_classes=n_output, irange=0.02, nvis=n_input)
return layer
def get_layer_trainer_logistic(layer, trainset):
# configs on sgd
config = {'learning_rate': 0.1,
'cost' : Default(),
'batch_size': 10,
'monitoring_batches': 10,
'monitoring_dataset': trainset,
'termination_criterion': EpochCounter(max_epochs=MAX_EPOCHS_SUPERVISED),
'update_callbacks': None
}
train_algo = SGD(**config)
model = layer
return Train(model = model,
dataset = trainset,
algorithm = train_algo,
extensions = None)
def get_layer_trainer_sgd_autoencoder(layer, trainset):
# configs on sgd
train_algo = SGD(
learning_rate = 0.1,
cost = MeanSquaredReconstructionError(),
batch_size = 10,
monitoring_batches = 10,
monitoring_dataset = trainset,
termination_criterion = EpochCounter(max_epochs=MAX_EPOCHS_UNSUPERVISED),
update_callbacks = None
)
model = layer
extensions = None
return Train(model = model,
algorithm = train_algo,
extensions = extensions,
dataset = trainset)
def get_layer_trainer_sgd_rbm(layer, trainset):
train_algo = SGD(
learning_rate = 1e-1,
batch_size = 5,
#"batches_per_iter" : 2000,
monitoring_batches = 20,
monitoring_dataset = trainset,
cost = SMD(corruptor=GaussianCorruptor(stdev=0.4)),
termination_criterion = EpochCounter(max_epochs=MAX_EPOCHS_UNSUPERVISED),
)
model = layer
extensions = [MonitorBasedLRAdjuster()]
return Train(model = model, algorithm = train_algo,
save_path='grbm.pkl',save_freq=1,
extensions = extensions, dataset = trainset)
def main(args=None):
"""
args is the list of arguments that will be passed to the option parser.
The default (None) means use sys.argv[1:].
"""
parser = OptionParser()
parser.add_option("-d", "--data", dest="dataset", default="toy",
help="specify the dataset, either cifar10, mnist or toy")
(options,args) = parser.parse_args(args=args)
if options.dataset == 'toy':
trainset, testset = get_dataset_toy()
n_output = 2
elif options.dataset == 'cifar10':
trainset, testset, = get_dataset_cifar10()
n_output = 10
elif options.dataset == 'mnist':
trainset, testset, = get_dataset_mnist()
n_output = 10
else:
NotImplementedError()
design_matrix = trainset.get_design_matrix()
n_input = design_matrix.shape[1]
# build layers
layers = []
structure = [[n_input, 10], [10, 50], [50, 100], [100, n_output]]
# layer 0: gaussianRBM
layers.append(get_grbm(structure[0]))
# layer 1: denoising AE
layers.append(get_denoising_autoencoder(structure[1]))
# layer 2: AE
layers.append(get_autoencoder(structure[2]))
# layer 3: logistic regression used in supervised training
layers.append(get_logistic_regressor(structure[3]))
# construct training sets for different layers
trainset = [ trainset ,
TransformerDataset( raw = trainset, transformer = layers[0] ),
TransformerDataset( raw = trainset, transformer = StackedBlocks( layers[0:2] )),
TransformerDataset( raw = trainset, transformer = StackedBlocks( layers[0:3] )) ]
# construct layer trainers
layer_trainers = []
layer_trainers.append(get_layer_trainer_sgd_rbm(layers[0], trainset[0]))
layer_trainers.append(get_layer_trainer_sgd_autoencoder(layers[1], trainset[1]))
layer_trainers.append(get_layer_trainer_sgd_autoencoder(layers[2], trainset[2]))
layer_trainers.append(get_layer_trainer_logistic(layers[3], trainset[3]))
# unsupervised pretraining
for i, layer_trainer in enumerate(layer_trainers[0:3]):
print('-----------------------------------')
print(' Unsupervised training layer %d, %s'%(i, layers[i].__class__))
print('-----------------------------------')
layer_trainer.main_loop()
print('\n')
print('------------------------------------------------------')
print(' Unsupervised training done! Start supervised training...')
print('------------------------------------------------------')
print('\n')
# supervised training
layer_trainers[-1].main_loop()
if __name__ == '__main__':
main()
| bsd-3-clause |
postlund/home-assistant | homeassistant/components/opentherm_gw/sensor.py | 3 | 3684 | """Support for OpenTherm Gateway sensors."""
import logging
from homeassistant.components.sensor import ENTITY_ID_FORMAT
from homeassistant.const import CONF_ID
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity import Entity, async_generate_entity_id
from . import DOMAIN
from .const import DATA_GATEWAYS, DATA_OPENTHERM_GW, SENSOR_INFO
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the OpenTherm Gateway sensors."""
sensors = []
for var, info in SENSOR_INFO.items():
device_class = info[0]
unit = info[1]
friendly_name_format = info[2]
sensors.append(
OpenThermSensor(
hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][config_entry.data[CONF_ID]],
var,
device_class,
unit,
friendly_name_format,
)
)
async_add_entities(sensors)
class OpenThermSensor(Entity):
"""Representation of an OpenTherm Gateway sensor."""
def __init__(self, gw_dev, var, device_class, unit, friendly_name_format):
"""Initialize the OpenTherm Gateway sensor."""
self.entity_id = async_generate_entity_id(
ENTITY_ID_FORMAT, f"{var}_{gw_dev.gw_id}", hass=gw_dev.hass
)
self._gateway = gw_dev
self._var = var
self._value = None
self._device_class = device_class
self._unit = unit
self._friendly_name = friendly_name_format.format(gw_dev.name)
self._unsub_updates = None
async def async_added_to_hass(self):
"""Subscribe to updates from the component."""
_LOGGER.debug("Added OpenTherm Gateway sensor %s", self._friendly_name)
self._unsub_updates = async_dispatcher_connect(
self.hass, self._gateway.update_signal, self.receive_report
)
async def async_will_remove_from_hass(self):
"""Unsubscribe from updates from the component."""
_LOGGER.debug("Removing OpenTherm Gateway sensor %s", self._friendly_name)
self._unsub_updates()
@property
def entity_registry_enabled_default(self):
"""Disable sensors by default."""
return False
@callback
def receive_report(self, status):
"""Handle status updates from the component."""
value = status.get(self._var)
if isinstance(value, float):
value = f"{value:2.1f}"
self._value = value
self.async_schedule_update_ha_state()
@property
def name(self):
"""Return the friendly name of the sensor."""
return self._friendly_name
@property
def device_info(self):
"""Return device info."""
return {
"identifiers": {(DOMAIN, self._gateway.gw_id)},
"name": self._gateway.name,
"manufacturer": "Schelte Bron",
"model": "OpenTherm Gateway",
"sw_version": self._gateway.gw_version,
}
@property
def unique_id(self):
"""Return a unique ID."""
return f"{self._gateway.gw_id}-{self._var}"
@property
def device_class(self):
"""Return the device class."""
return self._device_class
@property
def state(self):
"""Return the state of the device."""
return self._value
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return self._unit
@property
def should_poll(self):
"""Return False because entity pushes its state."""
return False
| apache-2.0 |
UNINETT/nav | python/nav/eventengine/plugins/linkstate.py | 2 | 8822 | #
# Copyright (C) 2012 Uninett AS
#
# This file is part of Network Administration Visualized (NAV).
#
# NAV is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 3 as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details. You should have received a copy of the GNU General Public License
# along with NAV. If not, see <http://www.gnu.org/licenses/>.
#
""""linkState event plugin"""
import copy
from nav.config import ConfigurationError
from nav.eventengine.alerts import AlertGenerator
from nav.eventengine.plugins import delayedstate
from nav.models.manage import Interface, Netbox
from nav.models.event import EventQueue as Event, EventQueueVar as EventVar
class LinkStateHandler(delayedstate.DelayedStateHandler):
"""Accepts linkState events"""
HAS_WARNING_ALERT = False
ALERT_WAIT_TIME = 'linkDown.alert'
handled_types = ('linkState',)
_target = None
def __init__(self, *args, **kwargs):
super(LinkStateHandler, self).__init__(*args, **kwargs)
self.config = LinkStateConfiguration(self.engine.config)
def get_target(self):
if not self._target:
self._target = Interface.objects.get(id=self.event.subid)
assert self._target.netbox_id == self.event.netbox.id
return self._target
def get_link_partner(self):
"""Returns the link partner of the target interface"""
return self.get_target().to_netbox
def handle(self):
if self._is_a_master_for_virtualized_instances():
self._copy_event_for_instances()
return super(LinkStateHandler, self).handle()
def _handle_end(self):
self._post_event_if_aggregate_restored() # always verify aggregates
return super(LinkStateHandler, self)._handle_end()
def _set_internal_state_down(self):
self._set_ifoperstatus(Interface.OPER_DOWN)
def _set_internal_state_up(self):
self._set_ifoperstatus(Interface.OPER_UP)
def _set_ifoperstatus(self, ifoperstatus):
ifc = self.get_target()
if ifc.ifoperstatus != ifoperstatus:
ifc.ifoperstatus = ifoperstatus
Interface.objects.filter(id=ifc.id).update(
ifoperstatus=ifoperstatus)
def _get_up_alert(self):
alert = AlertGenerator(self.event)
alert.alert_type = "linkUp"
self._logger.info("Posting %s alert for %s", alert.alert_type,
self.get_target())
return alert
def _get_down_alert(self):
self._post_event_if_aggregate_degraded() # always verify aggregates
alert = AlertGenerator(self.event)
alert.alert_type = "linkDown"
if any((self._hold_back_alert_due_to_vlan_mismatch(),
self._hold_back_alert_due_to_redundancy_limit())):
self._logger.info("%s: withholding %s alert because of unmatched "
"criteria", self.get_target(), alert.alert_type)
return None
return alert
def _post_down_warning(self):
pass
def _hold_back_alert_due_to_redundancy_limit(self):
if self.config.alert_only_on_redundancy_loss():
partner = self.get_link_partner()
redundancy_loss = partner and partner.up == Netbox.UP_UP
if redundancy_loss:
self._logger.info("likely link redundancy degradation: %s is "
"down, but link partner %s is still up",
self.get_target(), partner)
else:
return True
return False
def _hold_back_alert_due_to_vlan_mismatch(self):
limited_to_vlans = self.config.get_vlan_limit_set()
if limited_to_vlans:
vlans = self._get_target_vlans()
if vlans.intersection(limited_to_vlans):
self._logger.info("%s vlans %r intersects with list of "
"limited vlans %r",
self.get_target(), vlans, limited_to_vlans)
elif vlans:
self._logger.info("%s vlans %r does not intersect with list "
"of limited vlans %r",
self.get_target(), vlans, limited_to_vlans)
return True
return False
def _get_target_vlans(self):
"""Returns the set of untagged/tagged vlans configured on the target
interface.
"""
ifc = self.get_target()
vlans = ifc.swportvlan_set.values('vlan__vlan')
vlans = {row['vlan__vlan'] for row in vlans}
return vlans
#
# Methods to handle aggregateLinkState event posting if this interface is
# part of an aggregate
#
def _post_event_if_aggregate_degraded(self):
if self.get_target().get_aggregator():
self._logger.info(
"down event for %s, posting linkDegraded event for %s",
self.get_target(), self.get_target().get_aggregator())
return self._get_aggregate_link_event(start=True)
def _post_event_if_aggregate_restored(self):
if self.get_target().get_aggregator():
self._logger.info(
"up event for %s, posting linkRestored event for %s",
self.get_target(), self.get_target().get_aggregator())
return self._get_aggregate_link_event(start=False)
def _get_aggregate_link_event(self, start):
target = self.get_target()
aggregator = target.get_aggregator()
event = Event()
event.source_id = event.target_id = 'eventEngine'
event.netbox_id = aggregator.netbox_id
event.subid = aggregator.id
event.event_type_id = 'aggregateLinkState'
event.state = event.STATE_START if start else event.STATE_END
event.save()
EventVar(event_queue=event, variable='alerttype',
value='linkDegraded' if start else 'linkRestored').save()
EventVar(event_queue=event, variable='aggregate',
value=target.id).save()
EventVar(event_queue=event, variable='aggregate_ifname',
value=target.ifname).save()
EventVar(event_queue=event, variable='aggregate_ifalias',
value=target.ifalias or '').save()
#
# Methods to handle duplication of events for virtualized netbox instances
#
def _is_a_master_for_virtualized_instances(self):
ifc = self.get_target()
return ifc and ifc.netbox and ifc.netbox.instances.count() > 0
def _copy_event_for_instances(self):
ifc = self.get_target()
netbox = ifc.netbox
for instance in netbox.instances.all():
self._copy_event_for_instance(netbox, instance, ifc)
def _copy_event_for_instance(self, netbox, instance, ifc):
try:
other_ifc = Interface.objects.get(netbox=instance,
ifname=ifc.ifname)
except Interface.DoesNotExist:
self._logger.info("interface %s does not exist on instance %s",
ifc.ifname, instance)
return
new_event = copy.copy(self.event) # type: nav.models.event.EventQueue
new_event.pk = None
new_event.netbox = instance
new_event.device = None
new_event.subid = other_ifc.pk
self._logger.info('duplicating linkState event for %s to %s',
ifc, instance)
new_event.save()
class LinkStateConfiguration(object):
"""Retrieves configuration options for the LinkStateHandler"""
def __init__(self, config):
self.config = config
def get_vlan_limit_set(self):
"""Returns a set of VLAN IDs to limit linkState alerts to"""
opt = ("linkdown", "limit_to_vlans")
if self.config.has_option(*opt):
vlanstring = self.config.get(*opt)
try:
vlans = [int(vlan) for vlan in vlanstring.split()]
except (TypeError, ValueError):
raise ConfigurationError("Invalid config value for %s" % opt)
return set(vlans)
else:
return set()
def alert_only_on_redundancy_loss(self):
"""Returns True if linkState alerts are only to be sent on linkDown's
that degrade a redundant link setup
"""
opt = ("linkdown", "only_redundant")
if self.config.has_option(*opt):
return self.config.getboolean(*opt)
else:
return True
| gpl-2.0 |
40223137/2015abc | static/Brython3.1.0-20150301-090019/Lib/gc.py | 743 | 3548 | """This module provides access to the garbage collector for reference cycles.
enable() -- Enable automatic garbage collection.
disable() -- Disable automatic garbage collection.
isenabled() -- Returns true if automatic collection is enabled.
collect() -- Do a full collection right now.
get_count() -- Return the current collection counts.
set_debug() -- Set debugging flags.
get_debug() -- Get debugging flags.
set_threshold() -- Set the collection thresholds.
get_threshold() -- Return the current the collection thresholds.
get_objects() -- Return a list of all objects tracked by the collector.
is_tracked() -- Returns true if a given object is tracked.
get_referrers() -- Return the list of objects that refer to an object.
get_referents() -- Return the list of objects that an object refers to.
"""
DEBUG_COLLECTABLE = 2
DEBUG_LEAK = 38
DEBUG_SAVEALL = 32
DEBUG_STATS = 1
DEBUG_UNCOLLECTABLE = 4
class __loader__:
pass
callbacks = []
def collect(*args,**kw):
"""collect([generation]) -> n
With no arguments, run a full collection. The optional argument
may be an integer specifying which generation to collect. A ValueError
is raised if the generation number is invalid.
The number of unreachable objects is returned.
"""
pass
def disable(*args,**kw):
"""disable() -> None
Disable automatic garbage collection.
"""
pass
def enable(*args,**kw):
"""enable() -> None
Enable automatic garbage collection.
"""
pass
garbage = []
def get_count(*args,**kw):
"""get_count() -> (count0, count1, count2)
Return the current collection counts
"""
pass
def get_debug(*args,**kw):
"""get_debug() -> flags
Get the garbage collection debugging flags.
"""
pass
def get_objects(*args,**kw):
"""get_objects() -> [...]
Return a list of objects tracked by the collector (excluding the list
returned).
"""
pass
def get_referents(*args,**kw):
"""get_referents(*objs) -> list Return the list of objects that are directly referred to by objs."""
pass
def get_referrers(*args,**kw):
"""get_referrers(*objs) -> list Return the list of objects that directly refer to any of objs."""
pass
def get_threshold(*args,**kw):
"""get_threshold() -> (threshold0, threshold1, threshold2)
Return the current collection thresholds
"""
pass
def is_tracked(*args,**kw):
"""is_tracked(obj) -> bool
Returns true if the object is tracked by the garbage collector.
Simple atomic objects will return false.
"""
pass
def isenabled(*args,**kw):
"""isenabled() -> status
Returns true if automatic garbage collection is enabled.
"""
pass
def set_debug(*args,**kw):
"""set_debug(flags) -> None
Set the garbage collection debugging flags. Debugging information is
written to sys.stderr.
flags is an integer and can have the following bits turned on:
DEBUG_STATS - Print statistics during collection.
DEBUG_COLLECTABLE - Print collectable objects found.
DEBUG_UNCOLLECTABLE - Print unreachable but uncollectable objects found.
DEBUG_SAVEALL - Save objects to gc.garbage rather than freeing them.
DEBUG_LEAK - Debug leaking programs (everything but STATS).
"""
pass
def set_threshold(*args,**kw):
"""set_threshold(threshold0, [threshold1, threshold2]) -> None
Sets the collection thresholds. Setting threshold0 to zero disables
collection.
"""
pass
| gpl-3.0 |
Kazade/NeHe-Website | google_appengine/lib/django_1_2/tests/modeltests/model_forms/models.py | 38 | 52607 | """
XX. Generating HTML forms from models
This is mostly just a reworking of the ``form_for_model``/``form_for_instance``
tests to use ``ModelForm``. As such, the text may not make sense in all cases,
and the examples are probably a poor fit for the ``ModelForm`` syntax. In other
words, most of these tests should be rewritten.
"""
import os
import tempfile
from django.db import models
from django.core.files.storage import FileSystemStorage
temp_storage_dir = tempfile.mkdtemp()
temp_storage = FileSystemStorage(temp_storage_dir)
ARTICLE_STATUS = (
(1, 'Draft'),
(2, 'Pending'),
(3, 'Live'),
)
ARTICLE_STATUS_CHAR = (
('d', 'Draft'),
('p', 'Pending'),
('l', 'Live'),
)
class Category(models.Model):
name = models.CharField(max_length=20)
slug = models.SlugField(max_length=20)
url = models.CharField('The URL', max_length=40)
def __unicode__(self):
return self.name
class Writer(models.Model):
name = models.CharField(max_length=50, help_text='Use both first and last names.')
def __unicode__(self):
return self.name
class Article(models.Model):
headline = models.CharField(max_length=50)
slug = models.SlugField()
pub_date = models.DateField()
created = models.DateField(editable=False)
writer = models.ForeignKey(Writer)
article = models.TextField()
categories = models.ManyToManyField(Category, blank=True)
status = models.PositiveIntegerField(choices=ARTICLE_STATUS, blank=True, null=True)
def save(self):
import datetime
if not self.id:
self.created = datetime.date.today()
return super(Article, self).save()
def __unicode__(self):
return self.headline
class ImprovedArticle(models.Model):
article = models.OneToOneField(Article)
class ImprovedArticleWithParentLink(models.Model):
article = models.OneToOneField(Article, parent_link=True)
class BetterWriter(Writer):
score = models.IntegerField()
class WriterProfile(models.Model):
writer = models.OneToOneField(Writer, primary_key=True)
age = models.PositiveIntegerField()
def __unicode__(self):
return "%s is %s" % (self.writer, self.age)
from django.contrib.localflavor.us.models import PhoneNumberField
class PhoneNumber(models.Model):
phone = PhoneNumberField()
description = models.CharField(max_length=20)
def __unicode__(self):
return self.phone
class TextFile(models.Model):
description = models.CharField(max_length=20)
file = models.FileField(storage=temp_storage, upload_to='tests', max_length=15)
def __unicode__(self):
return self.description
try:
# If PIL is available, try testing ImageFields. Checking for the existence
# of Image is enough for CPython, but for PyPy, you need to check for the
# underlying modules If PIL is not available, ImageField tests are omitted.
# Try to import PIL in either of the two ways it can end up installed.
try:
from PIL import Image, _imaging
except ImportError:
import Image, _imaging
test_images = True
class ImageFile(models.Model):
def custom_upload_path(self, filename):
path = self.path or 'tests'
return '%s/%s' % (path, filename)
description = models.CharField(max_length=20)
# Deliberately put the image field *after* the width/height fields to
# trigger the bug in #10404 with width/height not getting assigned.
width = models.IntegerField(editable=False)
height = models.IntegerField(editable=False)
image = models.ImageField(storage=temp_storage, upload_to=custom_upload_path,
width_field='width', height_field='height')
path = models.CharField(max_length=16, blank=True, default='')
def __unicode__(self):
return self.description
class OptionalImageFile(models.Model):
def custom_upload_path(self, filename):
path = self.path or 'tests'
return '%s/%s' % (path, filename)
description = models.CharField(max_length=20)
image = models.ImageField(storage=temp_storage, upload_to=custom_upload_path,
width_field='width', height_field='height',
blank=True, null=True)
width = models.IntegerField(editable=False, null=True)
height = models.IntegerField(editable=False, null=True)
path = models.CharField(max_length=16, blank=True, default='')
def __unicode__(self):
return self.description
except ImportError:
test_images = False
class CommaSeparatedInteger(models.Model):
field = models.CommaSeparatedIntegerField(max_length=20)
def __unicode__(self):
return self.field
class Product(models.Model):
slug = models.SlugField(unique=True)
def __unicode__(self):
return self.slug
class Price(models.Model):
price = models.DecimalField(max_digits=10, decimal_places=2)
quantity = models.PositiveIntegerField()
def __unicode__(self):
return u"%s for %s" % (self.quantity, self.price)
class Meta:
unique_together = (('price', 'quantity'),)
class ArticleStatus(models.Model):
status = models.CharField(max_length=2, choices=ARTICLE_STATUS_CHAR, blank=True, null=True)
class Inventory(models.Model):
barcode = models.PositiveIntegerField(unique=True)
parent = models.ForeignKey('self', to_field='barcode', blank=True, null=True)
name = models.CharField(blank=False, max_length=20)
def __unicode__(self):
return self.name
class Book(models.Model):
title = models.CharField(max_length=40)
author = models.ForeignKey(Writer, blank=True, null=True)
special_id = models.IntegerField(blank=True, null=True, unique=True)
class Meta:
unique_together = ('title', 'author')
class BookXtra(models.Model):
isbn = models.CharField(max_length=16, unique=True)
suffix1 = models.IntegerField(blank=True, default=0)
suffix2 = models.IntegerField(blank=True, default=0)
class Meta:
unique_together = (('suffix1', 'suffix2'))
abstract = True
class DerivedBook(Book, BookXtra):
pass
class ExplicitPK(models.Model):
key = models.CharField(max_length=20, primary_key=True)
desc = models.CharField(max_length=20, blank=True, unique=True)
class Meta:
unique_together = ('key', 'desc')
def __unicode__(self):
return self.key
class Post(models.Model):
title = models.CharField(max_length=50, unique_for_date='posted', blank=True)
slug = models.CharField(max_length=50, unique_for_year='posted', blank=True)
subtitle = models.CharField(max_length=50, unique_for_month='posted', blank=True)
posted = models.DateField()
def __unicode__(self):
return self.name
class DerivedPost(Post):
pass
class BigInt(models.Model):
biggie = models.BigIntegerField()
def __unicode__(self):
return unicode(self.biggie)
class MarkupField(models.CharField):
def __init__(self, *args, **kwargs):
kwargs["max_length"] = 20
super(MarkupField, self).__init__(*args, **kwargs)
def formfield(self, **kwargs):
# don't allow this field to be used in form (real use-case might be
# that you know the markup will always be X, but it is among an app
# that allows the user to say it could be something else)
# regressed at r10062
return None
class CustomFieldForExclusionModel(models.Model):
name = models.CharField(max_length=10)
markup = MarkupField()
class FlexibleDatePost(models.Model):
title = models.CharField(max_length=50, unique_for_date='posted', blank=True)
slug = models.CharField(max_length=50, unique_for_year='posted', blank=True)
subtitle = models.CharField(max_length=50, unique_for_month='posted', blank=True)
posted = models.DateField(blank=True, null=True)
__test__ = {'API_TESTS': """
>>> from django import forms
>>> from django.forms.models import ModelForm, model_to_dict
>>> from django.core.files.uploadedfile import SimpleUploadedFile
The bare bones, absolutely nothing custom, basic case.
>>> class CategoryForm(ModelForm):
... class Meta:
... model = Category
>>> CategoryForm.base_fields.keys()
['name', 'slug', 'url']
Extra fields.
>>> class CategoryForm(ModelForm):
... some_extra_field = forms.BooleanField()
...
... class Meta:
... model = Category
>>> CategoryForm.base_fields.keys()
['name', 'slug', 'url', 'some_extra_field']
Extra field that has a name collision with a related object accessor.
>>> class WriterForm(ModelForm):
... book = forms.CharField(required=False)
...
... class Meta:
... model = Writer
>>> wf = WriterForm({'name': 'Richard Lockridge'})
>>> wf.is_valid()
True
Replacing a field.
>>> class CategoryForm(ModelForm):
... url = forms.BooleanField()
...
... class Meta:
... model = Category
>>> CategoryForm.base_fields['url'].__class__
<class 'django.forms.fields.BooleanField'>
Using 'fields'.
>>> class CategoryForm(ModelForm):
...
... class Meta:
... model = Category
... fields = ['url']
>>> CategoryForm.base_fields.keys()
['url']
Using 'exclude'
>>> class CategoryForm(ModelForm):
...
... class Meta:
... model = Category
... exclude = ['url']
>>> CategoryForm.base_fields.keys()
['name', 'slug']
Using 'fields' *and* 'exclude'. Not sure why you'd want to do this, but uh,
"be liberal in what you accept" and all.
>>> class CategoryForm(ModelForm):
...
... class Meta:
... model = Category
... fields = ['name', 'url']
... exclude = ['url']
>>> CategoryForm.base_fields.keys()
['name']
Using 'widgets'
>>> class CategoryForm(ModelForm):
...
... class Meta:
... model = Category
... fields = ['name', 'url', 'slug']
... widgets = {
... 'name': forms.Textarea,
... 'url': forms.TextInput(attrs={'class': 'url'})
... }
>>> str(CategoryForm()['name'])
'<textarea id="id_name" rows="10" cols="40" name="name"></textarea>'
>>> str(CategoryForm()['url'])
'<input id="id_url" type="text" class="url" name="url" maxlength="40" />'
>>> str(CategoryForm()['slug'])
'<input id="id_slug" type="text" name="slug" maxlength="20" />'
Don't allow more than one 'model' definition in the inheritance hierarchy.
Technically, it would generate a valid form, but the fact that the resulting
save method won't deal with multiple objects is likely to trip up people not
familiar with the mechanics.
>>> class CategoryForm(ModelForm):
... class Meta:
... model = Category
>>> class OddForm(CategoryForm):
... class Meta:
... model = Article
OddForm is now an Article-related thing, because BadForm.Meta overrides
CategoryForm.Meta.
>>> OddForm.base_fields.keys()
['headline', 'slug', 'pub_date', 'writer', 'article', 'status', 'categories']
>>> class ArticleForm(ModelForm):
... class Meta:
... model = Article
First class with a Meta class wins.
>>> class BadForm(ArticleForm, CategoryForm):
... pass
>>> OddForm.base_fields.keys()
['headline', 'slug', 'pub_date', 'writer', 'article', 'status', 'categories']
Subclassing without specifying a Meta on the class will use the parent's Meta
(or the first parent in the MRO if there are multiple parent classes).
>>> class CategoryForm(ModelForm):
... class Meta:
... model = Category
>>> class SubCategoryForm(CategoryForm):
... pass
>>> SubCategoryForm.base_fields.keys()
['name', 'slug', 'url']
We can also subclass the Meta inner class to change the fields list.
>>> class CategoryForm(ModelForm):
... checkbox = forms.BooleanField()
...
... class Meta:
... model = Category
>>> class SubCategoryForm(CategoryForm):
... class Meta(CategoryForm.Meta):
... exclude = ['url']
>>> print SubCategoryForm()
<tr><th><label for="id_name">Name:</label></th><td><input id="id_name" type="text" name="name" maxlength="20" /></td></tr>
<tr><th><label for="id_slug">Slug:</label></th><td><input id="id_slug" type="text" name="slug" maxlength="20" /></td></tr>
<tr><th><label for="id_checkbox">Checkbox:</label></th><td><input type="checkbox" name="checkbox" id="id_checkbox" /></td></tr>
# test using fields to provide ordering to the fields
>>> class CategoryForm(ModelForm):
... class Meta:
... model = Category
... fields = ['url', 'name']
>>> CategoryForm.base_fields.keys()
['url', 'name']
>>> print CategoryForm()
<tr><th><label for="id_url">The URL:</label></th><td><input id="id_url" type="text" name="url" maxlength="40" /></td></tr>
<tr><th><label for="id_name">Name:</label></th><td><input id="id_name" type="text" name="name" maxlength="20" /></td></tr>
>>> class CategoryForm(ModelForm):
... class Meta:
... model = Category
... fields = ['slug', 'url', 'name']
... exclude = ['url']
>>> CategoryForm.base_fields.keys()
['slug', 'name']
# Old form_for_x tests #######################################################
>>> from django.forms import ModelForm, CharField
>>> import datetime
>>> Category.objects.all()
[]
>>> class CategoryForm(ModelForm):
... class Meta:
... model = Category
>>> f = CategoryForm()
>>> print f
<tr><th><label for="id_name">Name:</label></th><td><input id="id_name" type="text" name="name" maxlength="20" /></td></tr>
<tr><th><label for="id_slug">Slug:</label></th><td><input id="id_slug" type="text" name="slug" maxlength="20" /></td></tr>
<tr><th><label for="id_url">The URL:</label></th><td><input id="id_url" type="text" name="url" maxlength="40" /></td></tr>
>>> print f.as_ul()
<li><label for="id_name">Name:</label> <input id="id_name" type="text" name="name" maxlength="20" /></li>
<li><label for="id_slug">Slug:</label> <input id="id_slug" type="text" name="slug" maxlength="20" /></li>
<li><label for="id_url">The URL:</label> <input id="id_url" type="text" name="url" maxlength="40" /></li>
>>> print f['name']
<input id="id_name" type="text" name="name" maxlength="20" />
>>> f = CategoryForm(auto_id=False)
>>> print f.as_ul()
<li>Name: <input type="text" name="name" maxlength="20" /></li>
<li>Slug: <input type="text" name="slug" maxlength="20" /></li>
<li>The URL: <input type="text" name="url" maxlength="40" /></li>
>>> f = CategoryForm({'name': 'Entertainment', 'slug': 'entertainment', 'url': 'entertainment'})
>>> f.is_valid()
True
>>> f.cleaned_data['url']
u'entertainment'
>>> f.cleaned_data['name']
u'Entertainment'
>>> f.cleaned_data['slug']
u'entertainment'
>>> obj = f.save()
>>> obj
<Category: Entertainment>
>>> Category.objects.all()
[<Category: Entertainment>]
>>> f = CategoryForm({'name': "It's a test", 'slug': 'its-test', 'url': 'test'})
>>> f.is_valid()
True
>>> f.cleaned_data['url']
u'test'
>>> f.cleaned_data['name']
u"It's a test"
>>> f.cleaned_data['slug']
u'its-test'
>>> obj = f.save()
>>> obj
<Category: It's a test>
>>> Category.objects.order_by('name')
[<Category: Entertainment>, <Category: It's a test>]
If you call save() with commit=False, then it will return an object that
hasn't yet been saved to the database. In this case, it's up to you to call
save() on the resulting model instance.
>>> f = CategoryForm({'name': 'Third test', 'slug': 'third-test', 'url': 'third'})
>>> f.is_valid()
True
>>> f.cleaned_data['url']
u'third'
>>> f.cleaned_data['name']
u'Third test'
>>> f.cleaned_data['slug']
u'third-test'
>>> obj = f.save(commit=False)
>>> obj
<Category: Third test>
>>> Category.objects.order_by('name')
[<Category: Entertainment>, <Category: It's a test>]
>>> obj.save()
>>> Category.objects.order_by('name')
[<Category: Entertainment>, <Category: It's a test>, <Category: Third test>]
If you call save() with invalid data, you'll get a ValueError.
>>> f = CategoryForm({'name': '', 'slug': 'not a slug!', 'url': 'foo'})
>>> f.errors['name']
[u'This field is required.']
>>> f.errors['slug']
[u"Enter a valid 'slug' consisting of letters, numbers, underscores or hyphens."]
>>> f.cleaned_data
Traceback (most recent call last):
...
AttributeError: 'CategoryForm' object has no attribute 'cleaned_data'
>>> f.save()
Traceback (most recent call last):
...
ValueError: The Category could not be created because the data didn't validate.
>>> f = CategoryForm({'name': '', 'slug': '', 'url': 'foo'})
>>> f.save()
Traceback (most recent call last):
...
ValueError: The Category could not be created because the data didn't validate.
Create a couple of Writers.
>>> w_royko = Writer(name='Mike Royko')
>>> w_royko.save()
>>> w_woodward = Writer(name='Bob Woodward')
>>> w_woodward.save()
ManyToManyFields are represented by a MultipleChoiceField, ForeignKeys and any
fields with the 'choices' attribute are represented by a ChoiceField.
>>> class ArticleForm(ModelForm):
... class Meta:
... model = Article
>>> f = ArticleForm(auto_id=False)
>>> print f
<tr><th>Headline:</th><td><input type="text" name="headline" maxlength="50" /></td></tr>
<tr><th>Slug:</th><td><input type="text" name="slug" maxlength="50" /></td></tr>
<tr><th>Pub date:</th><td><input type="text" name="pub_date" /></td></tr>
<tr><th>Writer:</th><td><select name="writer">
<option value="" selected="selected">---------</option>
<option value="...">Mike Royko</option>
<option value="...">Bob Woodward</option>
</select></td></tr>
<tr><th>Article:</th><td><textarea rows="10" cols="40" name="article"></textarea></td></tr>
<tr><th>Status:</th><td><select name="status">
<option value="" selected="selected">---------</option>
<option value="1">Draft</option>
<option value="2">Pending</option>
<option value="3">Live</option>
</select></td></tr>
<tr><th>Categories:</th><td><select multiple="multiple" name="categories">
<option value="1">Entertainment</option>
<option value="2">It's a test</option>
<option value="3">Third test</option>
</select><br /> Hold down "Control", or "Command" on a Mac, to select more than one.</td></tr>
You can restrict a form to a subset of the complete list of fields
by providing a 'fields' argument. If you try to save a
model created with such a form, you need to ensure that the fields
that are _not_ on the form have default values, or are allowed to have
a value of None. If a field isn't specified on a form, the object created
from the form can't provide a value for that field!
>>> class PartialArticleForm(ModelForm):
... class Meta:
... model = Article
... fields = ('headline','pub_date')
>>> f = PartialArticleForm(auto_id=False)
>>> print f
<tr><th>Headline:</th><td><input type="text" name="headline" maxlength="50" /></td></tr>
<tr><th>Pub date:</th><td><input type="text" name="pub_date" /></td></tr>
When the ModelForm is passed an instance, that instance's current values are
inserted as 'initial' data in each Field.
>>> w = Writer.objects.get(name='Mike Royko')
>>> class RoykoForm(ModelForm):
... class Meta:
... model = Writer
>>> f = RoykoForm(auto_id=False, instance=w)
>>> print f
<tr><th>Name:</th><td><input type="text" name="name" value="Mike Royko" maxlength="50" /><br />Use both first and last names.</td></tr>
>>> art = Article(headline='Test article', slug='test-article', pub_date=datetime.date(1988, 1, 4), writer=w, article='Hello.')
>>> art.save()
>>> art.id
1
>>> class TestArticleForm(ModelForm):
... class Meta:
... model = Article
>>> f = TestArticleForm(auto_id=False, instance=art)
>>> print f.as_ul()
<li>Headline: <input type="text" name="headline" value="Test article" maxlength="50" /></li>
<li>Slug: <input type="text" name="slug" value="test-article" maxlength="50" /></li>
<li>Pub date: <input type="text" name="pub_date" value="1988-01-04" /></li>
<li>Writer: <select name="writer">
<option value="">---------</option>
<option value="..." selected="selected">Mike Royko</option>
<option value="...">Bob Woodward</option>
</select></li>
<li>Article: <textarea rows="10" cols="40" name="article">Hello.</textarea></li>
<li>Status: <select name="status">
<option value="" selected="selected">---------</option>
<option value="1">Draft</option>
<option value="2">Pending</option>
<option value="3">Live</option>
</select></li>
<li>Categories: <select multiple="multiple" name="categories">
<option value="1">Entertainment</option>
<option value="2">It's a test</option>
<option value="3">Third test</option>
</select> Hold down "Control", or "Command" on a Mac, to select more than one.</li>
>>> f = TestArticleForm({'headline': u'Test headline', 'slug': 'test-headline', 'pub_date': u'1984-02-06', 'writer': unicode(w_royko.pk), 'article': 'Hello.'}, instance=art)
>>> f.errors
{}
>>> f.is_valid()
True
>>> test_art = f.save()
>>> test_art.id
1
>>> test_art = Article.objects.get(id=1)
>>> test_art.headline
u'Test headline'
You can create a form over a subset of the available fields
by specifying a 'fields' argument to form_for_instance.
>>> class PartialArticleForm(ModelForm):
... class Meta:
... model = Article
... fields=('headline', 'slug', 'pub_date')
>>> f = PartialArticleForm({'headline': u'New headline', 'slug': 'new-headline', 'pub_date': u'1988-01-04'}, auto_id=False, instance=art)
>>> print f.as_ul()
<li>Headline: <input type="text" name="headline" value="New headline" maxlength="50" /></li>
<li>Slug: <input type="text" name="slug" value="new-headline" maxlength="50" /></li>
<li>Pub date: <input type="text" name="pub_date" value="1988-01-04" /></li>
>>> f.is_valid()
True
>>> new_art = f.save()
>>> new_art.id
1
>>> new_art = Article.objects.get(id=1)
>>> new_art.headline
u'New headline'
Add some categories and test the many-to-many form output.
>>> new_art.categories.all()
[]
>>> new_art.categories.add(Category.objects.get(name='Entertainment'))
>>> new_art.categories.all()
[<Category: Entertainment>]
>>> class TestArticleForm(ModelForm):
... class Meta:
... model = Article
>>> f = TestArticleForm(auto_id=False, instance=new_art)
>>> print f.as_ul()
<li>Headline: <input type="text" name="headline" value="New headline" maxlength="50" /></li>
<li>Slug: <input type="text" name="slug" value="new-headline" maxlength="50" /></li>
<li>Pub date: <input type="text" name="pub_date" value="1988-01-04" /></li>
<li>Writer: <select name="writer">
<option value="">---------</option>
<option value="..." selected="selected">Mike Royko</option>
<option value="...">Bob Woodward</option>
</select></li>
<li>Article: <textarea rows="10" cols="40" name="article">Hello.</textarea></li>
<li>Status: <select name="status">
<option value="" selected="selected">---------</option>
<option value="1">Draft</option>
<option value="2">Pending</option>
<option value="3">Live</option>
</select></li>
<li>Categories: <select multiple="multiple" name="categories">
<option value="1" selected="selected">Entertainment</option>
<option value="2">It's a test</option>
<option value="3">Third test</option>
</select> Hold down "Control", or "Command" on a Mac, to select more than one.</li>
Initial values can be provided for model forms
>>> f = TestArticleForm(auto_id=False, initial={'headline': 'Your headline here', 'categories': ['1','2']})
>>> print f.as_ul()
<li>Headline: <input type="text" name="headline" value="Your headline here" maxlength="50" /></li>
<li>Slug: <input type="text" name="slug" maxlength="50" /></li>
<li>Pub date: <input type="text" name="pub_date" /></li>
<li>Writer: <select name="writer">
<option value="" selected="selected">---------</option>
<option value="...">Mike Royko</option>
<option value="...">Bob Woodward</option>
</select></li>
<li>Article: <textarea rows="10" cols="40" name="article"></textarea></li>
<li>Status: <select name="status">
<option value="" selected="selected">---------</option>
<option value="1">Draft</option>
<option value="2">Pending</option>
<option value="3">Live</option>
</select></li>
<li>Categories: <select multiple="multiple" name="categories">
<option value="1" selected="selected">Entertainment</option>
<option value="2" selected="selected">It's a test</option>
<option value="3">Third test</option>
</select> Hold down "Control", or "Command" on a Mac, to select more than one.</li>
>>> f = TestArticleForm({'headline': u'New headline', 'slug': u'new-headline', 'pub_date': u'1988-01-04',
... 'writer': unicode(w_royko.pk), 'article': u'Hello.', 'categories': [u'1', u'2']}, instance=new_art)
>>> new_art = f.save()
>>> new_art.id
1
>>> new_art = Article.objects.get(id=1)
>>> new_art.categories.order_by('name')
[<Category: Entertainment>, <Category: It's a test>]
Now, submit form data with no categories. This deletes the existing categories.
>>> f = TestArticleForm({'headline': u'New headline', 'slug': u'new-headline', 'pub_date': u'1988-01-04',
... 'writer': unicode(w_royko.pk), 'article': u'Hello.'}, instance=new_art)
>>> new_art = f.save()
>>> new_art.id
1
>>> new_art = Article.objects.get(id=1)
>>> new_art.categories.all()
[]
Create a new article, with categories, via the form.
>>> class ArticleForm(ModelForm):
... class Meta:
... model = Article
>>> f = ArticleForm({'headline': u'The walrus was Paul', 'slug': u'walrus-was-paul', 'pub_date': u'1967-11-01',
... 'writer': unicode(w_royko.pk), 'article': u'Test.', 'categories': [u'1', u'2']})
>>> new_art = f.save()
>>> new_art.id
2
>>> new_art = Article.objects.get(id=2)
>>> new_art.categories.order_by('name')
[<Category: Entertainment>, <Category: It's a test>]
Create a new article, with no categories, via the form.
>>> class ArticleForm(ModelForm):
... class Meta:
... model = Article
>>> f = ArticleForm({'headline': u'The walrus was Paul', 'slug': u'walrus-was-paul', 'pub_date': u'1967-11-01',
... 'writer': unicode(w_royko.pk), 'article': u'Test.'})
>>> new_art = f.save()
>>> new_art.id
3
>>> new_art = Article.objects.get(id=3)
>>> new_art.categories.all()
[]
Create a new article, with categories, via the form, but use commit=False.
The m2m data won't be saved until save_m2m() is invoked on the form.
>>> class ArticleForm(ModelForm):
... class Meta:
... model = Article
>>> f = ArticleForm({'headline': u'The walrus was Paul', 'slug': 'walrus-was-paul', 'pub_date': u'1967-11-01',
... 'writer': unicode(w_royko.pk), 'article': u'Test.', 'categories': [u'1', u'2']})
>>> new_art = f.save(commit=False)
# Manually save the instance
>>> new_art.save()
>>> new_art.id
4
# The instance doesn't have m2m data yet
>>> new_art = Article.objects.get(id=4)
>>> new_art.categories.all()
[]
# Save the m2m data on the form
>>> f.save_m2m()
>>> new_art.categories.order_by('name')
[<Category: Entertainment>, <Category: It's a test>]
Here, we define a custom ModelForm. Because it happens to have the same fields as
the Category model, we can just call the form's save() to apply its changes to an
existing Category instance.
>>> class ShortCategory(ModelForm):
... name = CharField(max_length=5)
... slug = CharField(max_length=5)
... url = CharField(max_length=3)
>>> cat = Category.objects.get(name='Third test')
>>> cat
<Category: Third test>
>>> cat.id
3
>>> form = ShortCategory({'name': 'Third', 'slug': 'third', 'url': '3rd'}, instance=cat)
>>> form.save()
<Category: Third>
>>> Category.objects.get(id=3)
<Category: Third>
Here, we demonstrate that choices for a ForeignKey ChoiceField are determined
at runtime, based on the data in the database when the form is displayed, not
the data in the database when the form is instantiated.
>>> class ArticleForm(ModelForm):
... class Meta:
... model = Article
>>> f = ArticleForm(auto_id=False)
>>> print f.as_ul()
<li>Headline: <input type="text" name="headline" maxlength="50" /></li>
<li>Slug: <input type="text" name="slug" maxlength="50" /></li>
<li>Pub date: <input type="text" name="pub_date" /></li>
<li>Writer: <select name="writer">
<option value="" selected="selected">---------</option>
<option value="...">Mike Royko</option>
<option value="...">Bob Woodward</option>
</select></li>
<li>Article: <textarea rows="10" cols="40" name="article"></textarea></li>
<li>Status: <select name="status">
<option value="" selected="selected">---------</option>
<option value="1">Draft</option>
<option value="2">Pending</option>
<option value="3">Live</option>
</select></li>
<li>Categories: <select multiple="multiple" name="categories">
<option value="1">Entertainment</option>
<option value="2">It's a test</option>
<option value="3">Third</option>
</select> Hold down "Control", or "Command" on a Mac, to select more than one.</li>
>>> Category.objects.create(name='Fourth', url='4th')
<Category: Fourth>
>>> Writer.objects.create(name='Carl Bernstein')
<Writer: Carl Bernstein>
>>> print f.as_ul()
<li>Headline: <input type="text" name="headline" maxlength="50" /></li>
<li>Slug: <input type="text" name="slug" maxlength="50" /></li>
<li>Pub date: <input type="text" name="pub_date" /></li>
<li>Writer: <select name="writer">
<option value="" selected="selected">---------</option>
<option value="...">Mike Royko</option>
<option value="...">Bob Woodward</option>
<option value="...">Carl Bernstein</option>
</select></li>
<li>Article: <textarea rows="10" cols="40" name="article"></textarea></li>
<li>Status: <select name="status">
<option value="" selected="selected">---------</option>
<option value="1">Draft</option>
<option value="2">Pending</option>
<option value="3">Live</option>
</select></li>
<li>Categories: <select multiple="multiple" name="categories">
<option value="1">Entertainment</option>
<option value="2">It's a test</option>
<option value="3">Third</option>
<option value="4">Fourth</option>
</select> Hold down "Control", or "Command" on a Mac, to select more than one.</li>
# ModelChoiceField ############################################################
>>> from django.forms import ModelChoiceField, ModelMultipleChoiceField
>>> f = ModelChoiceField(Category.objects.all())
>>> list(f.choices)
[(u'', u'---------'), (1, u'Entertainment'), (2, u"It's a test"), (3, u'Third'), (4, u'Fourth')]
>>> f.clean('')
Traceback (most recent call last):
...
ValidationError: [u'This field is required.']
>>> f.clean(None)
Traceback (most recent call last):
...
ValidationError: [u'This field is required.']
>>> f.clean(0)
Traceback (most recent call last):
...
ValidationError: [u'Select a valid choice. That choice is not one of the available choices.']
>>> f.clean(3)
<Category: Third>
>>> f.clean(2)
<Category: It's a test>
# Add a Category object *after* the ModelChoiceField has already been
# instantiated. This proves clean() checks the database during clean() rather
# than caching it at time of instantiation.
>>> Category.objects.create(name='Fifth', url='5th')
<Category: Fifth>
>>> f.clean(5)
<Category: Fifth>
# Delete a Category object *after* the ModelChoiceField has already been
# instantiated. This proves clean() checks the database during clean() rather
# than caching it at time of instantiation.
>>> Category.objects.get(url='5th').delete()
>>> f.clean(5)
Traceback (most recent call last):
...
ValidationError: [u'Select a valid choice. That choice is not one of the available choices.']
>>> f = ModelChoiceField(Category.objects.filter(pk=1), required=False)
>>> print f.clean('')
None
>>> f.clean('')
>>> f.clean('1')
<Category: Entertainment>
>>> f.clean('100')
Traceback (most recent call last):
...
ValidationError: [u'Select a valid choice. That choice is not one of the available choices.']
# queryset can be changed after the field is created.
>>> f.queryset = Category.objects.exclude(name='Fourth')
>>> list(f.choices)
[(u'', u'---------'), (1, u'Entertainment'), (2, u"It's a test"), (3, u'Third')]
>>> f.clean(3)
<Category: Third>
>>> f.clean(4)
Traceback (most recent call last):
...
ValidationError: [u'Select a valid choice. That choice is not one of the available choices.']
# check that we can safely iterate choices repeatedly
>>> gen_one = list(f.choices)
>>> gen_two = f.choices
>>> gen_one[2]
(2L, u"It's a test")
>>> list(gen_two)
[(u'', u'---------'), (1L, u'Entertainment'), (2L, u"It's a test"), (3L, u'Third')]
# check that we can override the label_from_instance method to print custom labels (#4620)
>>> f.queryset = Category.objects.all()
>>> f.label_from_instance = lambda obj: "category " + str(obj)
>>> list(f.choices)
[(u'', u'---------'), (1L, 'category Entertainment'), (2L, "category It's a test"), (3L, 'category Third'), (4L, 'category Fourth')]
# ModelMultipleChoiceField ####################################################
>>> f = ModelMultipleChoiceField(Category.objects.all())
>>> list(f.choices)
[(1, u'Entertainment'), (2, u"It's a test"), (3, u'Third'), (4, u'Fourth')]
>>> f.clean(None)
Traceback (most recent call last):
...
ValidationError: [u'This field is required.']
>>> f.clean([])
Traceback (most recent call last):
...
ValidationError: [u'This field is required.']
>>> f.clean([1])
[<Category: Entertainment>]
>>> f.clean([2])
[<Category: It's a test>]
>>> f.clean(['1'])
[<Category: Entertainment>]
>>> f.clean(['1', '2'])
[<Category: Entertainment>, <Category: It's a test>]
>>> f.clean([1, '2'])
[<Category: Entertainment>, <Category: It's a test>]
>>> f.clean((1, '2'))
[<Category: Entertainment>, <Category: It's a test>]
>>> f.clean(['100'])
Traceback (most recent call last):
...
ValidationError: [u'Select a valid choice. 100 is not one of the available choices.']
>>> f.clean('hello')
Traceback (most recent call last):
...
ValidationError: [u'Enter a list of values.']
>>> f.clean(['fail'])
Traceback (most recent call last):
...
ValidationError: [u'"fail" is not a valid value for a primary key.']
# Add a Category object *after* the ModelMultipleChoiceField has already been
# instantiated. This proves clean() checks the database during clean() rather
# than caching it at time of instantiation.
>>> Category.objects.create(id=6, name='Sixth', url='6th')
<Category: Sixth>
>>> f.clean([6])
[<Category: Sixth>]
# Delete a Category object *after* the ModelMultipleChoiceField has already been
# instantiated. This proves clean() checks the database during clean() rather
# than caching it at time of instantiation.
>>> Category.objects.get(url='6th').delete()
>>> f.clean([6])
Traceback (most recent call last):
...
ValidationError: [u'Select a valid choice. 6 is not one of the available choices.']
>>> f = ModelMultipleChoiceField(Category.objects.all(), required=False)
>>> f.clean([])
[]
>>> f.clean(())
[]
>>> f.clean(['10'])
Traceback (most recent call last):
...
ValidationError: [u'Select a valid choice. 10 is not one of the available choices.']
>>> f.clean(['3', '10'])
Traceback (most recent call last):
...
ValidationError: [u'Select a valid choice. 10 is not one of the available choices.']
>>> f.clean(['1', '10'])
Traceback (most recent call last):
...
ValidationError: [u'Select a valid choice. 10 is not one of the available choices.']
# queryset can be changed after the field is created.
>>> f.queryset = Category.objects.exclude(name='Fourth')
>>> list(f.choices)
[(1, u'Entertainment'), (2, u"It's a test"), (3, u'Third')]
>>> f.clean([3])
[<Category: Third>]
>>> f.clean([4])
Traceback (most recent call last):
...
ValidationError: [u'Select a valid choice. 4 is not one of the available choices.']
>>> f.clean(['3', '4'])
Traceback (most recent call last):
...
ValidationError: [u'Select a valid choice. 4 is not one of the available choices.']
>>> f.queryset = Category.objects.all()
>>> f.label_from_instance = lambda obj: "multicategory " + str(obj)
>>> list(f.choices)
[(1L, 'multicategory Entertainment'), (2L, "multicategory It's a test"), (3L, 'multicategory Third'), (4L, 'multicategory Fourth')]
# OneToOneField ###############################################################
>>> class ImprovedArticleForm(ModelForm):
... class Meta:
... model = ImprovedArticle
>>> ImprovedArticleForm.base_fields.keys()
['article']
>>> class ImprovedArticleWithParentLinkForm(ModelForm):
... class Meta:
... model = ImprovedArticleWithParentLink
>>> ImprovedArticleWithParentLinkForm.base_fields.keys()
[]
>>> bw = BetterWriter(name=u'Joe Better', score=10)
>>> bw.save()
>>> sorted(model_to_dict(bw).keys())
['id', 'name', 'score', 'writer_ptr']
>>> class BetterWriterForm(ModelForm):
... class Meta:
... model = BetterWriter
>>> form = BetterWriterForm({'name': 'Some Name', 'score': 12})
>>> form.is_valid()
True
>>> bw2 = form.save()
>>> bw2.delete()
>>> class WriterProfileForm(ModelForm):
... class Meta:
... model = WriterProfile
>>> form = WriterProfileForm()
>>> print form.as_p()
<p><label for="id_writer">Writer:</label> <select name="writer" id="id_writer">
<option value="" selected="selected">---------</option>
<option value="...">Mike Royko</option>
<option value="...">Bob Woodward</option>
<option value="...">Carl Bernstein</option>
<option value="...">Joe Better</option>
</select></p>
<p><label for="id_age">Age:</label> <input type="text" name="age" id="id_age" /></p>
>>> data = {
... 'writer': unicode(w_woodward.pk),
... 'age': u'65',
... }
>>> form = WriterProfileForm(data)
>>> instance = form.save()
>>> instance
<WriterProfile: Bob Woodward is 65>
>>> form = WriterProfileForm(instance=instance)
>>> print form.as_p()
<p><label for="id_writer">Writer:</label> <select name="writer" id="id_writer">
<option value="">---------</option>
<option value="...">Mike Royko</option>
<option value="..." selected="selected">Bob Woodward</option>
<option value="...">Carl Bernstein</option>
<option value="...">Joe Better</option>
</select></p>
<p><label for="id_age">Age:</label> <input type="text" name="age" value="65" id="id_age" /></p>
# PhoneNumberField ############################################################
>>> class PhoneNumberForm(ModelForm):
... class Meta:
... model = PhoneNumber
>>> f = PhoneNumberForm({'phone': '(312) 555-1212', 'description': 'Assistance'})
>>> f.is_valid()
True
>>> f.cleaned_data['phone']
u'312-555-1212'
>>> f.cleaned_data['description']
u'Assistance'
# FileField ###################################################################
# File forms.
>>> class TextFileForm(ModelForm):
... class Meta:
... model = TextFile
# Test conditions when files is either not given or empty.
>>> f = TextFileForm(data={'description': u'Assistance'})
>>> f.is_valid()
False
>>> f = TextFileForm(data={'description': u'Assistance'}, files={})
>>> f.is_valid()
False
# Upload a file and ensure it all works as expected.
>>> f = TextFileForm(data={'description': u'Assistance'}, files={'file': SimpleUploadedFile('test1.txt', 'hello world')})
>>> f.is_valid()
True
>>> type(f.cleaned_data['file'])
<class 'django.core.files.uploadedfile.SimpleUploadedFile'>
>>> instance = f.save()
>>> instance.file
<FieldFile: tests/test1.txt>
>>> instance.file.delete()
>>> f = TextFileForm(data={'description': u'Assistance'}, files={'file': SimpleUploadedFile('test1.txt', 'hello world')})
>>> f.is_valid()
True
>>> type(f.cleaned_data['file'])
<class 'django.core.files.uploadedfile.SimpleUploadedFile'>
>>> instance = f.save()
>>> instance.file
<FieldFile: tests/test1.txt>
# Check if the max_length attribute has been inherited from the model.
>>> f = TextFileForm(data={'description': u'Assistance'}, files={'file': SimpleUploadedFile('test-maxlength.txt', 'hello world')})
>>> f.is_valid()
False
# Edit an instance that already has the file defined in the model. This will not
# save the file again, but leave it exactly as it is.
>>> f = TextFileForm(data={'description': u'Assistance'}, instance=instance)
>>> f.is_valid()
True
>>> f.cleaned_data['file']
<FieldFile: tests/test1.txt>
>>> instance = f.save()
>>> instance.file
<FieldFile: tests/test1.txt>
# Delete the current file since this is not done by Django.
>>> instance.file.delete()
# Override the file by uploading a new one.
>>> f = TextFileForm(data={'description': u'Assistance'}, files={'file': SimpleUploadedFile('test2.txt', 'hello world')}, instance=instance)
>>> f.is_valid()
True
>>> instance = f.save()
>>> instance.file
<FieldFile: tests/test2.txt>
# Delete the current file since this is not done by Django.
>>> instance.file.delete()
>>> f = TextFileForm(data={'description': u'Assistance'}, files={'file': SimpleUploadedFile('test2.txt', 'hello world')})
>>> f.is_valid()
True
>>> instance = f.save()
>>> instance.file
<FieldFile: tests/test2.txt>
# Delete the current file since this is not done by Django.
>>> instance.file.delete()
>>> instance.delete()
# Test the non-required FileField
>>> f = TextFileForm(data={'description': u'Assistance'})
>>> f.fields['file'].required = False
>>> f.is_valid()
True
>>> instance = f.save()
>>> instance.file
<FieldFile: None>
>>> f = TextFileForm(data={'description': u'Assistance'}, files={'file': SimpleUploadedFile('test3.txt', 'hello world')}, instance=instance)
>>> f.is_valid()
True
>>> instance = f.save()
>>> instance.file
<FieldFile: tests/test3.txt>
# Instance can be edited w/out re-uploading the file and existing file should be preserved.
>>> f = TextFileForm(data={'description': u'New Description'}, instance=instance)
>>> f.fields['file'].required = False
>>> f.is_valid()
True
>>> instance = f.save()
>>> instance.description
u'New Description'
>>> instance.file
<FieldFile: tests/test3.txt>
# Delete the current file since this is not done by Django.
>>> instance.file.delete()
>>> instance.delete()
>>> f = TextFileForm(data={'description': u'Assistance'}, files={'file': SimpleUploadedFile('test3.txt', 'hello world')})
>>> f.is_valid()
True
>>> instance = f.save()
>>> instance.file
<FieldFile: tests/test3.txt>
# Delete the current file since this is not done by Django.
>>> instance.file.delete()
>>> instance.delete()
# BigIntegerField ################################################################
>>> class BigIntForm(forms.ModelForm):
... class Meta:
... model = BigInt
...
>>> bif = BigIntForm({'biggie': '-9223372036854775808'})
>>> bif.is_valid()
True
>>> bif = BigIntForm({'biggie': '-9223372036854775809'})
>>> bif.is_valid()
False
>>> bif.errors
{'biggie': [u'Ensure this value is greater than or equal to -9223372036854775808.']}
>>> bif = BigIntForm({'biggie': '9223372036854775807'})
>>> bif.is_valid()
True
>>> bif = BigIntForm({'biggie': '9223372036854775808'})
>>> bif.is_valid()
False
>>> bif.errors
{'biggie': [u'Ensure this value is less than or equal to 9223372036854775807.']}
"""}
if test_images:
__test__['API_TESTS'] += """
# ImageField ###################################################################
# ImageField and FileField are nearly identical, but they differ slighty when
# it comes to validation. This specifically tests that #6302 is fixed for
# both file fields and image fields.
>>> class ImageFileForm(ModelForm):
... class Meta:
... model = ImageFile
>>> image_data = open(os.path.join(os.path.dirname(__file__), "test.png"), 'rb').read()
>>> image_data2 = open(os.path.join(os.path.dirname(__file__), "test2.png"), 'rb').read()
>>> f = ImageFileForm(data={'description': u'An image'}, files={'image': SimpleUploadedFile('test.png', image_data)})
>>> f.is_valid()
True
>>> type(f.cleaned_data['image'])
<class 'django.core.files.uploadedfile.SimpleUploadedFile'>
>>> instance = f.save()
>>> instance.image
<...FieldFile: tests/test.png>
>>> instance.width
16
>>> instance.height
16
# Delete the current file since this is not done by Django, but don't save
# because the dimension fields are not null=True.
>>> instance.image.delete(save=False)
>>> f = ImageFileForm(data={'description': u'An image'}, files={'image': SimpleUploadedFile('test.png', image_data)})
>>> f.is_valid()
True
>>> type(f.cleaned_data['image'])
<class 'django.core.files.uploadedfile.SimpleUploadedFile'>
>>> instance = f.save()
>>> instance.image
<...FieldFile: tests/test.png>
>>> instance.width
16
>>> instance.height
16
# Edit an instance that already has the (required) image defined in the model. This will not
# save the image again, but leave it exactly as it is.
>>> f = ImageFileForm(data={'description': u'Look, it changed'}, instance=instance)
>>> f.is_valid()
True
>>> f.cleaned_data['image']
<...FieldFile: tests/test.png>
>>> instance = f.save()
>>> instance.image
<...FieldFile: tests/test.png>
>>> instance.height
16
>>> instance.width
16
# Delete the current file since this is not done by Django, but don't save
# because the dimension fields are not null=True.
>>> instance.image.delete(save=False)
# Override the file by uploading a new one.
>>> f = ImageFileForm(data={'description': u'Changed it'}, files={'image': SimpleUploadedFile('test2.png', image_data2)}, instance=instance)
>>> f.is_valid()
True
>>> instance = f.save()
>>> instance.image
<...FieldFile: tests/test2.png>
>>> instance.height
32
>>> instance.width
48
# Delete the current file since this is not done by Django, but don't save
# because the dimension fields are not null=True.
>>> instance.image.delete(save=False)
>>> instance.delete()
>>> f = ImageFileForm(data={'description': u'Changed it'}, files={'image': SimpleUploadedFile('test2.png', image_data2)})
>>> f.is_valid()
True
>>> instance = f.save()
>>> instance.image
<...FieldFile: tests/test2.png>
>>> instance.height
32
>>> instance.width
48
# Delete the current file since this is not done by Django, but don't save
# because the dimension fields are not null=True.
>>> instance.image.delete(save=False)
>>> instance.delete()
# Test the non-required ImageField
>>> class OptionalImageFileForm(ModelForm):
... class Meta:
... model = OptionalImageFile
>>> f = OptionalImageFileForm(data={'description': u'Test'})
>>> f.is_valid()
True
>>> instance = f.save()
>>> instance.image
<...FieldFile: None>
>>> instance.width
>>> instance.height
>>> f = OptionalImageFileForm(data={'description': u'And a final one'}, files={'image': SimpleUploadedFile('test3.png', image_data)}, instance=instance)
>>> f.is_valid()
True
>>> instance = f.save()
>>> instance.image
<...FieldFile: tests/test3.png>
>>> instance.width
16
>>> instance.height
16
# Editing the instance without re-uploading the image should not affect the image or its width/height properties
>>> f = OptionalImageFileForm(data={'description': u'New Description'}, instance=instance)
>>> f.is_valid()
True
>>> instance = f.save()
>>> instance.description
u'New Description'
>>> instance.image
<...FieldFile: tests/test3.png>
>>> instance.width
16
>>> instance.height
16
# Delete the current file since this is not done by Django.
>>> instance.image.delete()
>>> instance.delete()
>>> f = OptionalImageFileForm(data={'description': u'And a final one'}, files={'image': SimpleUploadedFile('test4.png', image_data2)})
>>> f.is_valid()
True
>>> instance = f.save()
>>> instance.image
<...FieldFile: tests/test4.png>
>>> instance.width
48
>>> instance.height
32
>>> instance.delete()
# Test callable upload_to behavior that's dependent on the value of another field in the model
>>> f = ImageFileForm(data={'description': u'And a final one', 'path': 'foo'}, files={'image': SimpleUploadedFile('test4.png', image_data)})
>>> f.is_valid()
True
>>> instance = f.save()
>>> instance.image
<...FieldFile: foo/test4.png>
>>> instance.delete()
"""
__test__['API_TESTS'] += """
# Media on a ModelForm ########################################################
# Similar to a regular Form class you can define custom media to be used on
# the ModelForm.
>>> class ModelFormWithMedia(ModelForm):
... class Media:
... js = ('/some/form/javascript',)
... css = {
... 'all': ('/some/form/css',)
... }
... class Meta:
... model = PhoneNumber
>>> f = ModelFormWithMedia()
>>> print f.media
<link href="/some/form/css" type="text/css" media="all" rel="stylesheet" />
<script type="text/javascript" src="/some/form/javascript"></script>
>>> class CommaSeparatedIntegerForm(ModelForm):
... class Meta:
... model = CommaSeparatedInteger
>>> f = CommaSeparatedIntegerForm({'field': '1,2,3'})
>>> f.is_valid()
True
>>> f.cleaned_data
{'field': u'1,2,3'}
>>> f = CommaSeparatedIntegerForm({'field': '1a,2'})
>>> f.errors
{'field': [u'Enter only digits separated by commas.']}
>>> f = CommaSeparatedIntegerForm({'field': ',,,,'})
>>> f.is_valid()
True
>>> f.cleaned_data
{'field': u',,,,'}
>>> f = CommaSeparatedIntegerForm({'field': '1.2'})
>>> f.errors
{'field': [u'Enter only digits separated by commas.']}
>>> f = CommaSeparatedIntegerForm({'field': '1,a,2'})
>>> f.errors
{'field': [u'Enter only digits separated by commas.']}
>>> f = CommaSeparatedIntegerForm({'field': '1,,2'})
>>> f.is_valid()
True
>>> f.cleaned_data
{'field': u'1,,2'}
>>> f = CommaSeparatedIntegerForm({'field': '1'})
>>> f.is_valid()
True
>>> f.cleaned_data
{'field': u'1'}
This Price instance generated by this form is not valid because the quantity
field is required, but the form is valid because the field is excluded from
the form. This is for backwards compatibility.
>>> class PriceForm(ModelForm):
... class Meta:
... model = Price
... exclude = ('quantity',)
>>> form = PriceForm({'price': '6.00'})
>>> form.is_valid()
True
>>> price = form.save(commit=False)
>>> price.full_clean()
Traceback (most recent call last):
...
ValidationError: {'quantity': [u'This field cannot be null.']}
The form should not validate fields that it doesn't contain even if they are
specified using 'fields', not 'exclude'.
... class Meta:
... model = Price
... fields = ('price',)
>>> form = PriceForm({'price': '6.00'})
>>> form.is_valid()
True
The form should still have an instance of a model that is not complete and
not saved into a DB yet.
>>> form.instance.price
Decimal('6.00')
>>> form.instance.quantity is None
True
>>> form.instance.pk is None
True
# Choices on CharField and IntegerField
>>> class ArticleForm(ModelForm):
... class Meta:
... model = Article
>>> f = ArticleForm()
>>> f.fields['status'].clean('42')
Traceback (most recent call last):
...
ValidationError: [u'Select a valid choice. 42 is not one of the available choices.']
>>> class ArticleStatusForm(ModelForm):
... class Meta:
... model = ArticleStatus
>>> f = ArticleStatusForm()
>>> f.fields['status'].clean('z')
Traceback (most recent call last):
...
ValidationError: [u'Select a valid choice. z is not one of the available choices.']
# Foreign keys which use to_field #############################################
>>> apple = Inventory.objects.create(barcode=86, name='Apple')
>>> pear = Inventory.objects.create(barcode=22, name='Pear')
>>> core = Inventory.objects.create(barcode=87, name='Core', parent=apple)
>>> field = ModelChoiceField(Inventory.objects.all(), to_field_name='barcode')
>>> for choice in field.choices:
... print choice
(u'', u'---------')
(86, u'Apple')
(22, u'Pear')
(87, u'Core')
>>> class InventoryForm(ModelForm):
... class Meta:
... model = Inventory
>>> form = InventoryForm(instance=core)
>>> print form['parent']
<select name="parent" id="id_parent">
<option value="">---------</option>
<option value="86" selected="selected">Apple</option>
<option value="22">Pear</option>
<option value="87">Core</option>
</select>
>>> data = model_to_dict(core)
>>> data['parent'] = '22'
>>> form = InventoryForm(data=data, instance=core)
>>> core = form.save()
>>> core.parent
<Inventory: Pear>
>>> class CategoryForm(ModelForm):
... description = forms.CharField()
... class Meta:
... model = Category
... fields = ['description', 'url']
>>> CategoryForm.base_fields.keys()
['description', 'url']
>>> print CategoryForm()
<tr><th><label for="id_description">Description:</label></th><td><input type="text" name="description" id="id_description" /></td></tr>
<tr><th><label for="id_url">The URL:</label></th><td><input id="id_url" type="text" name="url" maxlength="40" /></td></tr>
# Model field that returns None to exclude itself with explicit fields ########
>>> class CustomFieldForExclusionForm(ModelForm):
... class Meta:
... model = CustomFieldForExclusionModel
... fields = ['name', 'markup']
>>> CustomFieldForExclusionForm.base_fields.keys()
['name']
>>> print CustomFieldForExclusionForm()
<tr><th><label for="id_name">Name:</label></th><td><input id="id_name" type="text" name="name" maxlength="10" /></td></tr>
# Clean up
>>> import shutil
>>> shutil.rmtree(temp_storage_dir)
"""
| bsd-3-clause |
valentin-krasontovitsch/ansible | lib/ansible/inventory/helpers.py | 120 | 1293 | # (c) 2017, Ansible by RedHat Inc,
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#############################################
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.utils.vars import combine_vars
def sort_groups(groups):
return sorted(groups, key=lambda g: (g.depth, g.priority, g.name))
def get_group_vars(groups):
"""
Combine all the group vars from a list of inventory groups.
:param groups: list of ansible.inventory.group.Group objects
:rtype: dict
"""
results = {}
for group in sort_groups(groups):
results = combine_vars(results, group.get_vars())
return results
| gpl-3.0 |
leppa/home-assistant | homeassistant/components/android_ip_webcam/switch.py | 6 | 2752 | """Support for Android IP Webcam settings."""
from homeassistant.components.switch import SwitchDevice
from . import (
CONF_HOST,
CONF_NAME,
CONF_SWITCHES,
DATA_IP_WEBCAM,
ICON_MAP,
KEY_MAP,
AndroidIPCamEntity,
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the IP Webcam switch platform."""
if discovery_info is None:
return
host = discovery_info[CONF_HOST]
name = discovery_info[CONF_NAME]
switches = discovery_info[CONF_SWITCHES]
ipcam = hass.data[DATA_IP_WEBCAM][host]
all_switches = []
for setting in switches:
all_switches.append(IPWebcamSettingsSwitch(name, host, ipcam, setting))
async_add_entities(all_switches, True)
class IPWebcamSettingsSwitch(AndroidIPCamEntity, SwitchDevice):
"""An abstract class for an IP Webcam setting."""
def __init__(self, name, host, ipcam, setting):
"""Initialize the settings switch."""
super().__init__(host, ipcam)
self._setting = setting
self._mapped_name = KEY_MAP.get(self._setting, self._setting)
self._name = f"{name} {self._mapped_name}"
self._state = False
@property
def name(self):
"""Return the name of the node."""
return self._name
async def async_update(self):
"""Get the updated status of the switch."""
self._state = bool(self._ipcam.current_settings.get(self._setting))
@property
def is_on(self):
"""Return the boolean response if the node is on."""
return self._state
async def async_turn_on(self, **kwargs):
"""Turn device on."""
if self._setting == "torch":
await self._ipcam.torch(activate=True)
elif self._setting == "focus":
await self._ipcam.focus(activate=True)
elif self._setting == "video_recording":
await self._ipcam.record(record=True)
else:
await self._ipcam.change_setting(self._setting, True)
self._state = True
self.async_schedule_update_ha_state()
async def async_turn_off(self, **kwargs):
"""Turn device off."""
if self._setting == "torch":
await self._ipcam.torch(activate=False)
elif self._setting == "focus":
await self._ipcam.focus(activate=False)
elif self._setting == "video_recording":
await self._ipcam.record(record=False)
else:
await self._ipcam.change_setting(self._setting, False)
self._state = False
self.async_schedule_update_ha_state()
@property
def icon(self):
"""Return the icon for the switch."""
return ICON_MAP.get(self._setting, "mdi:flash")
| apache-2.0 |
UrusTeam/URUS | Tools/autotest/pysim/aircraft.py | 81 | 3985 | import math
import random
import time
import util
from rotmat import Vector3, Matrix3
class Aircraft(object):
"""A basic aircraft class."""
def __init__(self):
self.home_latitude = 0
self.home_longitude = 0
self.home_altitude = 0
self.ground_level = 0
self.frame_height = 0.0
self.latitude = self.home_latitude
self.longitude = self.home_longitude
self.altitude = self.home_altitude
self.dcm = Matrix3()
# rotation rate in body frame
self.gyro = Vector3(0, 0, 0) # rad/s
self.velocity = Vector3(0, 0, 0) # m/s, North, East, Down
self.position = Vector3(0, 0, 0) # m North, East, Down
self.mass = 0.0
self.update_frequency = 50 # in Hz
self.gravity = 9.80665 # m/s/s
self.accelerometer = Vector3(0, 0, -self.gravity)
self.wind = util.Wind('0,0,0')
self.time_base = time.time()
self.time_now = self.time_base + 100*1.0e-6
self.gyro_noise = math.radians(0.1)
self.accel_noise = 0.3
def on_ground(self, position=None):
"""Return true if we are on the ground."""
if position is None:
position = self.position
return (-position.z) + self.home_altitude <= self.ground_level + self.frame_height
def update_position(self):
"""Update lat/lon/alt from position."""
bearing = math.degrees(math.atan2(self.position.y, self.position.x))
distance = math.sqrt(self.position.x**2 + self.position.y**2)
(self.latitude, self.longitude) = util.gps_newpos(self.home_latitude, self.home_longitude,
bearing, distance)
self.altitude = self.home_altitude - self.position.z
velocity_body = self.dcm.transposed() * self.velocity
self.accelerometer = self.accel_body.copy()
def set_yaw_degrees(self, yaw_degrees):
"""Rotate to the given yaw."""
(roll, pitch, yaw) = self.dcm.to_euler()
yaw = math.radians(yaw_degrees)
self.dcm.from_euler(roll, pitch, yaw)
def time_advance(self, deltat):
"""Advance time by deltat in seconds."""
self.time_now += deltat
def setup_frame_time(self, rate, speedup):
"""Setup frame_time calculation."""
self.rate = rate
self.speedup = speedup
self.frame_time = 1.0/rate
self.scaled_frame_time = self.frame_time/speedup
self.last_wall_time = time.time()
self.achieved_rate = rate
def adjust_frame_time(self, rate):
"""Adjust frame_time calculation."""
self.rate = rate
self.frame_time = 1.0/rate
self.scaled_frame_time = self.frame_time/self.speedup
def sync_frame_time(self):
"""Try to synchronise simulation time with wall clock time, taking
into account desired speedup."""
now = time.time()
if now < self.last_wall_time + self.scaled_frame_time:
time.sleep(self.last_wall_time+self.scaled_frame_time - now)
now = time.time()
if now > self.last_wall_time and now - self.last_wall_time < 0.1:
rate = 1.0/(now - self.last_wall_time)
self.achieved_rate = (0.98*self.achieved_rate) + (0.02*rate)
if self.achieved_rate < self.rate*self.speedup:
self.scaled_frame_time *= 0.999
else:
self.scaled_frame_time *= 1.001
self.last_wall_time = now
def add_noise(self, throttle):
"""Add noise based on throttle level (from 0..1)."""
self.gyro += Vector3(random.gauss(0, 1),
random.gauss(0, 1),
random.gauss(0, 1)) * throttle * self.gyro_noise
self.accel_body += Vector3(random.gauss(0, 1),
random.gauss(0, 1),
random.gauss(0, 1)) * throttle * self.accel_noise
| gpl-3.0 |
samueldotj/TeeRISC-Simulator | src/arch/x86/isa/insts/simd128/floating_point/logical/__init__.py | 91 | 2333 | # Copyright (c) 2007 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
categories = ["andp",
"orp",
"exclusive_or"]
microcode = '''
# SSE instructions
'''
for category in categories:
exec "import %s as cat" % category
microcode += cat.microcode
| bsd-3-clause |
robinro/ansible | lib/ansible/modules/monitoring/monit.py | 49 | 7071 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Darryl Stoflet <stoflet@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: monit
short_description: Manage the state of a program monitored via Monit
description:
- Manage the state of a program monitored via I(Monit)
version_added: "1.2"
options:
name:
description:
- The name of the I(monit) program/process to manage
required: true
default: null
state:
description:
- The state of service
required: true
default: null
choices: [ "present", "started", "stopped", "restarted", "monitored", "unmonitored", "reloaded" ]
timeout:
description:
- If there are pending actions for the service monitored by monit, then Ansible will check
for up to this many seconds to verify the the requested action has been performed.
Ansible will sleep for five seconds between each check.
required: false
default: 300
version_added: "2.1"
requirements: [ ]
author: "Darryl Stoflet (@dstoflet)"
'''
EXAMPLES = '''
# Manage the state of program "httpd" to be in "started" state.
- monit:
name: httpd
state: started
'''
import time
def main():
arg_spec = dict(
name=dict(required=True),
timeout=dict(default=300, type='int'),
state=dict(required=True, choices=['present', 'started', 'restarted', 'stopped', 'monitored', 'unmonitored', 'reloaded'])
)
module = AnsibleModule(argument_spec=arg_spec, supports_check_mode=True)
name = module.params['name']
state = module.params['state']
timeout = module.params['timeout']
MONIT = module.get_bin_path('monit', True)
def status():
"""Return the status of the process in monit, or the empty string if not present."""
rc, out, err = module.run_command('%s summary' % MONIT, check_rc=True)
for line in out.split('\n'):
# Sample output lines:
# Process 'name' Running
# Process 'name' Running - restart pending
parts = line.split()
if len(parts) > 2 and parts[0].lower() == 'process' and parts[1] == "'%s'" % name:
return ' '.join(parts[2:]).lower()
else:
return ''
def run_command(command):
"""Runs a monit command, and returns the new status."""
module.run_command('%s %s %s' % (MONIT, command, name), check_rc=True)
return status()
def wait_for_monit_to_stop_pending():
"""Fails this run if there is no status or it's pending/initalizing for timeout"""
timeout_time = time.time() + timeout
sleep_time = 5
running_status = status()
while running_status == '' or 'pending' in running_status or 'initializing' in running_status:
if time.time() >= timeout_time:
module.fail_json(
msg='waited too long for "pending", or "initiating" status to go away ({0})'.format(
running_status
),
state=state
)
time.sleep(sleep_time)
running_status = status()
if state == 'reloaded':
if module.check_mode:
module.exit_json(changed=True)
rc, out, err = module.run_command('%s reload' % MONIT)
if rc != 0:
module.fail_json(msg='monit reload failed', stdout=out, stderr=err)
wait_for_monit_to_stop_pending()
module.exit_json(changed=True, name=name, state=state)
present = status() != ''
if not present and not state == 'present':
module.fail_json(msg='%s process not presently configured with monit' % name, name=name, state=state)
if state == 'present':
if not present:
if module.check_mode:
module.exit_json(changed=True)
status = run_command('reload')
if status == '':
wait_for_monit_to_stop_pending()
module.exit_json(changed=True, name=name, state=state)
module.exit_json(changed=False, name=name, state=state)
wait_for_monit_to_stop_pending()
running = 'running' in status()
if running and state in ['started', 'monitored']:
module.exit_json(changed=False, name=name, state=state)
if running and state == 'stopped':
if module.check_mode:
module.exit_json(changed=True)
status = run_command('stop')
if status in ['not monitored'] or 'stop pending' in status:
module.exit_json(changed=True, name=name, state=state)
module.fail_json(msg='%s process not stopped' % name, status=status)
if running and state == 'unmonitored':
if module.check_mode:
module.exit_json(changed=True)
status = run_command('unmonitor')
if status in ['not monitored'] or 'unmonitor pending' in status:
module.exit_json(changed=True, name=name, state=state)
module.fail_json(msg='%s process not unmonitored' % name, status=status)
elif state == 'restarted':
if module.check_mode:
module.exit_json(changed=True)
status = run_command('restart')
if status in ['initializing', 'running'] or 'restart pending' in status:
module.exit_json(changed=True, name=name, state=state)
module.fail_json(msg='%s process not restarted' % name, status=status)
elif not running and state == 'started':
if module.check_mode:
module.exit_json(changed=True)
status = run_command('start')
if status in ['initializing', 'running'] or 'start pending' in status:
module.exit_json(changed=True, name=name, state=state)
module.fail_json(msg='%s process not started' % name, status=status)
elif not running and state == 'monitored':
if module.check_mode:
module.exit_json(changed=True)
status = run_command('monitor')
if status not in ['not monitored']:
module.exit_json(changed=True, name=name, state=state)
module.fail_json(msg='%s process not monitored' % name, status=status)
module.exit_json(changed=False, name=name, state=state)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
| gpl-3.0 |
duyetdev/openerp-6.1.1 | openerp/addons/crm_claim/__init__.py | 390 | 1078 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import crm_claim
import report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
ryfeus/lambda-packs | Tensorflow/source/tensorflow/contrib/learn/python/learn/learn_io/generator_io.py | 37 | 5360 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Methods to allow generator of dict with numpy arrays."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from collections import Container
from types import FunctionType
from types import GeneratorType
from tensorflow.python.estimator.inputs.queues.feeding_functions import _enqueue_data as enqueue_data
def generator_input_fn(x,
target_key=None,
batch_size=128,
num_epochs=1,
shuffle=True,
queue_capacity=1000,
num_threads=1,
pad_value=None):
"""Returns input function that returns dicts of numpy arrays
yielded from a generator.
It is assumed that every dict of numpy arrays yielded from the dictionary
represents a single sample. The generator should consume a single epoch of the
data.
This returns a function outputting `features` and `target` based on the dict
of numpy arrays. The dict `features` has the same keys as an element yielded
from x.
Example:
```python
def generator():
for index in range(10):
yield {'height': np.random.randint(32,36),
'age': np.random.randint(18, 80),
'label': np.ones(1)}
with tf.Session() as session:
input_fn = generator_io.generator_input_fn(
generator, target_key="label", batch_size=2, shuffle=False,
num_epochs=1)
```
Args:
x: Generator Function, returns a `Generator` that will yield the data
in `dict` of numpy arrays
target_key: String or Container of Strings, the key or Container of keys of
the numpy arrays in x dictionaries to use as target.
batch_size: Integer, size of batches to return.
num_epochs: Integer, number of epochs to iterate over data. If `None` will
run forever.
shuffle: Boolean, if True shuffles the queue. Avoid shuffle at prediction
time.
queue_capacity: Integer, size of queue to accumulate.
num_threads: Integer, number of threads used for reading and enqueueing.
pad_value: default value for dynamic padding of data samples, if provided.
Returns:
Function, that returns a feature `dict` with `Tensors` and an optional
label `dict` with `Tensors`, or if target_key is `str` label is a `Tensor`
Raises:
TypeError: `x` is not `FunctionType`.
TypeError: `x()` is not `GeneratorType`.
TypeError: `next(x())` is not `dict`.
TypeError: `target_key` is not `str` or `target_key` is not `Container`
of `str`.
KeyError: `target_key` not a key or `target_key[index]` not in next(`x()`).
KeyError: `key` mismatch between dicts emitted from `x()`
"""
if not isinstance(x, FunctionType):
raise TypeError(
'x must be generator function; got {}'.format(type(x).__name__))
generator = x()
if not isinstance(generator, GeneratorType):
raise TypeError(
'x() must be generator; got {}'.format(type(generator).__name__))
data = next(generator)
if not isinstance(data, dict):
raise TypeError('x() must yield dict; got {}'.format(type(data).__name__))
input_keys = sorted(next(x()).keys())
if target_key is not None:
if isinstance(target_key, str):
target_key = [target_key]
elif isinstance(target_key, Container):
for item in target_key:
if not isinstance(item, str):
raise TypeError('target_key must be str or Container of str; got {}'.
format(type(item).__name__))
if item not in input_keys:
raise KeyError(
'target_key not in yielded dict. Expected {} keys; got {}'.format(
input_keys, item))
else:
raise TypeError('target_key must be str or Container of str; got {}'.
format(type(target_key).__name__))
def _generator_input_fn():
"""generator input function."""
queue = enqueue_data(
x,
queue_capacity,
shuffle=shuffle,
num_threads=num_threads,
enqueue_size=batch_size,
num_epochs=num_epochs,
pad_value=pad_value)
features = (queue.dequeue_many(batch_size)
if num_epochs is None else queue.dequeue_up_to(batch_size))
if not isinstance(features, list):
features = [features]
features = dict(zip(input_keys, features))
if target_key is not None:
if len(target_key) > 1:
target = {key: features.pop(key) for key in target_key}
else:
target = features.pop(target_key[0])
return features, target
return features
return _generator_input_fn
| mit |
40223234/w16b_test | static/Brython3.1.3-20150514-095342/Lib/site-packages/pygame/locals.py | 603 | 1141 | ## pygame - Python Game Library
## Copyright (C) 2000-2003 Pete Shinners
##
## This library is free software; you can redistribute it and/or
## modify it under the terms of the GNU Library General Public
## License as published by the Free Software Foundation; either
## version 2 of the License, or (at your option) any later version.
##
## This library is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## Library General Public License for more details.
##
## You should have received a copy of the GNU Library General Public
## License along with this library; if not, write to the Free
## Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##
## Pete Shinners
## pete@shinners.org
"""Set of functions from PyGame that are handy to have in
the local namespace for your module"""
from pygame.constants import *
from pygame.rect import Rect
import pygame.color as color
Color = color.Color
| agpl-3.0 |
mtnman38/Aggregate | Executables/Aggregate 0.8.8 for Macintosh.app/Contents/Resources/lib/python2.7/requests/packages/urllib3/__init__.py | 650 | 1701 | # urllib3/__init__.py
# Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
urllib3 - Thread-safe connection pooling and re-using.
"""
__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
__license__ = 'MIT'
__version__ = 'dev'
from .connectionpool import (
HTTPConnectionPool,
HTTPSConnectionPool,
connection_from_url
)
from . import exceptions
from .filepost import encode_multipart_formdata
from .poolmanager import PoolManager, ProxyManager, proxy_from_url
from .response import HTTPResponse
from .util import make_headers, get_host, Timeout
# Set default logging handler to avoid "No handler found" warnings.
import logging
try: # Python 2.7+
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger(__name__).addHandler(NullHandler())
def add_stderr_logger(level=logging.DEBUG):
"""
Helper for quickly adding a StreamHandler to the logger. Useful for
debugging.
Returns the handler after adding it.
"""
# This method needs to be in this __init__.py to get the __name__ correct
# even if urllib3 is vendored within another package.
logger = logging.getLogger(__name__)
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
logger.addHandler(handler)
logger.setLevel(level)
logger.debug('Added an stderr logging handler to logger: %s' % __name__)
return handler
# ... Clean up.
del NullHandler
| gpl-2.0 |
ObsidianBlk/GemRB--Unofficial- | gemrb/GUIScripts/bg2/QuitGame.py | 7 | 1090 | # -*-python-*-
# GemRB - Infinity Engine Emulator
# Copyright (C) 2007 The GemRB Project
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# QuitGame.py - display EndGame sequence
###################################################
import GemRB
def OnLoad ():
GemRB.HideGUI ()
GemRB.QuitGame ()
which = GemRB.GetVar ("QuitGame3")
if which==-1:
GemRB.SetNextScript("DemoEnd")
else:
GemRB.SetNextScript("Start")
return
| gpl-2.0 |
silentfuzzle/calibre | src/calibre/ebooks/pdf/outline_writer.py | 14 | 2318 | #!/usr/bin/env python2
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:fdm=marker:ai
from __future__ import (unicode_literals, division, absolute_import,
print_function)
__license__ = 'GPL v3'
__copyright__ = '2012, Kovid Goyal <kovid at kovidgoyal.net>'
__docformat__ = 'restructuredtext en'
import os
from collections import defaultdict
class Outline(object):
def __init__(self, toc, items):
self.toc = toc
self.items = items
self.anchor_map = {}
self.pos_map = defaultdict(dict)
self.toc_map = {}
for item in items:
self.anchor_map[item] = anchors = set()
item_path = os.path.abspath(item).replace('/', os.sep)
if self.toc is not None:
for x in self.toc.flat():
if x.abspath != item_path: continue
x.outline_item_ = item
if x.fragment:
anchors.add(x.fragment)
def set_pos(self, item, anchor, pagenum, ypos):
self.pos_map[item][anchor] = (pagenum, ypos)
def get_pos(self, toc):
page, ypos = 0, 0
item = getattr(toc, 'outline_item_', None)
if item is not None:
# First use the item URL without fragment
page, ypos = self.pos_map.get(item, {}).get(None, (0, 0))
if toc.fragment:
amap = self.pos_map.get(item, None)
if amap is not None:
page, ypos = amap.get(toc.fragment, (page, ypos))
return page, ypos
def add_children(self, toc, parent):
for child in toc:
page, ypos = self.get_pos(child)
text = child.text or _('Page %d')%page
if page >= self.page_count:
page = self.page_count - 1
cn = parent.create(text, page, True)
self.add_children(child, cn)
def __call__(self, doc):
self.pos_map = dict(self.pos_map)
self.page_count = doc.page_count()
for child in self.toc:
page, ypos = self.get_pos(child)
text = child.text or _('Page %d')%page
if page >= self.page_count:
page = self.page_count - 1
node = doc.create_outline(text, page)
self.add_children(child, node)
| gpl-3.0 |
ArcherSys/ArcherSys | Lib/site-packages/pycparser/_ast_gen.py | 78 | 8663 | #-----------------------------------------------------------------
# _ast_gen.py
#
# Generates the AST Node classes from a specification given in
# a configuration file
#
# The design of this module was inspired by astgen.py from the
# Python 2.5 code-base.
#
# Copyright (C) 2008-2015, Eli Bendersky
# License: BSD
#-----------------------------------------------------------------
import pprint
from string import Template
class ASTCodeGenerator(object):
def __init__(self, cfg_filename='_c_ast.cfg'):
""" Initialize the code generator from a configuration
file.
"""
self.cfg_filename = cfg_filename
self.node_cfg = [NodeCfg(name, contents)
for (name, contents) in self.parse_cfgfile(cfg_filename)]
def generate(self, file=None):
""" Generates the code into file, an open file buffer.
"""
src = Template(_PROLOGUE_COMMENT).substitute(
cfg_filename=self.cfg_filename)
src += _PROLOGUE_CODE
for node_cfg in self.node_cfg:
src += node_cfg.generate_source() + '\n\n'
file.write(src)
def parse_cfgfile(self, filename):
""" Parse the configuration file and yield pairs of
(name, contents) for each node.
"""
with open(filename, "r") as f:
for line in f:
line = line.strip()
if not line or line.startswith('#'):
continue
colon_i = line.find(':')
lbracket_i = line.find('[')
rbracket_i = line.find(']')
if colon_i < 1 or lbracket_i <= colon_i or rbracket_i <= lbracket_i:
raise RuntimeError("Invalid line in %s:\n%s\n" % (filename, line))
name = line[:colon_i]
val = line[lbracket_i + 1:rbracket_i]
vallist = [v.strip() for v in val.split(',')] if val else []
yield name, vallist
class NodeCfg(object):
""" Node configuration.
name: node name
contents: a list of contents - attributes and child nodes
See comment at the top of the configuration file for details.
"""
def __init__(self, name, contents):
self.name = name
self.all_entries = []
self.attr = []
self.child = []
self.seq_child = []
for entry in contents:
clean_entry = entry.rstrip('*')
self.all_entries.append(clean_entry)
if entry.endswith('**'):
self.seq_child.append(clean_entry)
elif entry.endswith('*'):
self.child.append(clean_entry)
else:
self.attr.append(entry)
def generate_source(self):
src = self._gen_init()
src += '\n' + self._gen_children()
src += '\n' + self._gen_attr_names()
return src
def _gen_init(self):
src = "class %s(Node):\n" % self.name
if self.all_entries:
args = ', '.join(self.all_entries)
slots = ', '.join("'{0}'".format(e) for e in self.all_entries)
slots += ", 'coord', '__weakref__'"
arglist = '(self, %s, coord=None)' % args
else:
slots = "'coord', '__weakref__'"
arglist = '(self, coord=None)'
src += " __slots__ = (%s)\n" % slots
src += " def __init__%s:\n" % arglist
for name in self.all_entries + ['coord']:
src += " self.%s = %s\n" % (name, name)
return src
def _gen_children(self):
src = ' def children(self):\n'
if self.all_entries:
src += ' nodelist = []\n'
for child in self.child:
src += (
' if self.%(child)s is not None:' +
' nodelist.append(("%(child)s", self.%(child)s))\n') % (
dict(child=child))
for seq_child in self.seq_child:
src += (
' for i, child in enumerate(self.%(child)s or []):\n'
' nodelist.append(("%(child)s[%%d]" %% i, child))\n') % (
dict(child=seq_child))
src += ' return tuple(nodelist)\n'
else:
src += ' return ()\n'
return src
def _gen_attr_names(self):
src = " attr_names = (" + ''.join("%r, " % nm for nm in self.attr) + ')'
return src
_PROLOGUE_COMMENT = \
r'''#-----------------------------------------------------------------
# ** ATTENTION **
# This code was automatically generated from the file:
# $cfg_filename
#
# Do not modify it directly. Modify the configuration file and
# run the generator again.
# ** ** *** ** **
#
# pycparser: c_ast.py
#
# AST Node classes.
#
# Copyright (C) 2008-2015, Eli Bendersky
# License: BSD
#-----------------------------------------------------------------
'''
_PROLOGUE_CODE = r'''
import sys
class Node(object):
__slots__ = ()
""" Abstract base class for AST nodes.
"""
def children(self):
""" A sequence of all children that are Nodes
"""
pass
def show(self, buf=sys.stdout, offset=0, attrnames=False, nodenames=False, showcoord=False, _my_node_name=None):
""" Pretty print the Node and all its attributes and
children (recursively) to a buffer.
buf:
Open IO buffer into which the Node is printed.
offset:
Initial offset (amount of leading spaces)
attrnames:
True if you want to see the attribute names in
name=value pairs. False to only see the values.
nodenames:
True if you want to see the actual node names
within their parents.
showcoord:
Do you want the coordinates of each Node to be
displayed.
"""
lead = ' ' * offset
if nodenames and _my_node_name is not None:
buf.write(lead + self.__class__.__name__+ ' <' + _my_node_name + '>: ')
else:
buf.write(lead + self.__class__.__name__+ ': ')
if self.attr_names:
if attrnames:
nvlist = [(n, getattr(self,n)) for n in self.attr_names]
attrstr = ', '.join('%s=%s' % nv for nv in nvlist)
else:
vlist = [getattr(self, n) for n in self.attr_names]
attrstr = ', '.join('%s' % v for v in vlist)
buf.write(attrstr)
if showcoord:
buf.write(' (at %s)' % self.coord)
buf.write('\n')
for (child_name, child) in self.children():
child.show(
buf,
offset=offset + 2,
attrnames=attrnames,
nodenames=nodenames,
showcoord=showcoord,
_my_node_name=child_name)
class NodeVisitor(object):
""" A base NodeVisitor class for visiting c_ast nodes.
Subclass it and define your own visit_XXX methods, where
XXX is the class name you want to visit with these
methods.
For example:
class ConstantVisitor(NodeVisitor):
def __init__(self):
self.values = []
def visit_Constant(self, node):
self.values.append(node.value)
Creates a list of values of all the constant nodes
encountered below the given node. To use it:
cv = ConstantVisitor()
cv.visit(node)
Notes:
* generic_visit() will be called for AST nodes for which
no visit_XXX method was defined.
* The children of nodes for which a visit_XXX was
defined will not be visited - if you need this, call
generic_visit() on the node.
You can use:
NodeVisitor.generic_visit(self, node)
* Modeled after Python's own AST visiting facilities
(the ast module of Python 3.0)
"""
def visit(self, node):
""" Visit a node.
"""
method = 'visit_' + node.__class__.__name__
visitor = getattr(self, method, self.generic_visit)
return visitor(node)
def generic_visit(self, node):
""" Called if no explicit visitor function exists for a
node. Implements preorder visiting of the node.
"""
for c_name, c in node.children():
self.visit(c)
'''
if __name__ == "__main__":
import sys
ast_gen = ASTCodeGenerator('_c_ast.cfg')
ast_gen.generate(open('c_ast.py', 'w'))
| mit |
bop/hybrid | lib/python2.6/site-packages/django/contrib/formtools/tests/wizard/wizardtests/tests.py | 93 | 16277 | from __future__ import unicode_literals
import os
from django import forms
from django.test import TestCase
from django.test.client import RequestFactory
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.auth.tests.utils import skipIfCustomUser
from django.contrib.formtools.wizard.views import CookieWizardView
from django.utils._os import upath
class UserForm(forms.ModelForm):
class Meta:
model = User
UserFormSet = forms.models.modelformset_factory(User, form=UserForm, extra=2)
class WizardTests(object):
urls = 'django.contrib.formtools.tests.wizard.wizardtests.urls'
def setUp(self):
self.testuser, created = User.objects.get_or_create(username='testuser1')
self.wizard_step_data[0]['form1-user'] = self.testuser.pk
def test_initial_call(self):
response = self.client.get(self.wizard_url)
wizard = response.context['wizard']
self.assertEqual(response.status_code, 200)
self.assertEqual(wizard['steps'].current, 'form1')
self.assertEqual(wizard['steps'].step0, 0)
self.assertEqual(wizard['steps'].step1, 1)
self.assertEqual(wizard['steps'].last, 'form4')
self.assertEqual(wizard['steps'].prev, None)
self.assertEqual(wizard['steps'].next, 'form2')
self.assertEqual(wizard['steps'].count, 4)
def test_form_post_error(self):
response = self.client.post(self.wizard_url, self.wizard_step_1_data)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['wizard']['steps'].current, 'form1')
self.assertEqual(response.context['wizard']['form'].errors,
{'name': ['This field is required.'],
'user': ['This field is required.']})
def test_form_post_success(self):
response = self.client.post(self.wizard_url, self.wizard_step_data[0])
wizard = response.context['wizard']
self.assertEqual(response.status_code, 200)
self.assertEqual(wizard['steps'].current, 'form2')
self.assertEqual(wizard['steps'].step0, 1)
self.assertEqual(wizard['steps'].prev, 'form1')
self.assertEqual(wizard['steps'].next, 'form3')
def test_form_stepback(self):
response = self.client.get(self.wizard_url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['wizard']['steps'].current, 'form1')
response = self.client.post(self.wizard_url, self.wizard_step_data[0])
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['wizard']['steps'].current, 'form2')
response = self.client.post(self.wizard_url, {
'wizard_goto_step': response.context['wizard']['steps'].prev})
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['wizard']['steps'].current, 'form1')
def test_template_context(self):
response = self.client.get(self.wizard_url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['wizard']['steps'].current, 'form1')
self.assertEqual(response.context.get('another_var', None), None)
response = self.client.post(self.wizard_url, self.wizard_step_data[0])
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['wizard']['steps'].current, 'form2')
self.assertEqual(response.context.get('another_var', None), True)
# ticket #19025: `form` should be included in context
form = response.context_data['wizard']['form']
self.assertEqual(response.context_data['form'], form)
def test_form_finish(self):
response = self.client.get(self.wizard_url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['wizard']['steps'].current, 'form1')
response = self.client.post(self.wizard_url, self.wizard_step_data[0])
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['wizard']['steps'].current, 'form2')
post_data = self.wizard_step_data[1]
post_data['form2-file1'] = open(upath(__file__), 'rb')
response = self.client.post(self.wizard_url, post_data)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['wizard']['steps'].current, 'form3')
response = self.client.post(self.wizard_url, self.wizard_step_data[2])
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['wizard']['steps'].current, 'form4')
response = self.client.post(self.wizard_url, self.wizard_step_data[3])
self.assertEqual(response.status_code, 200)
all_data = response.context['form_list']
with open(upath(__file__), 'rb') as f:
self.assertEqual(all_data[1]['file1'].read(), f.read())
all_data[1]['file1'].close()
del all_data[1]['file1']
self.assertEqual(all_data, [
{'name': 'Pony', 'thirsty': True, 'user': self.testuser},
{'address1': '123 Main St', 'address2': 'Djangoland'},
{'random_crap': 'blah blah'},
[{'random_crap': 'blah blah'},
{'random_crap': 'blah blah'}]])
def test_cleaned_data(self):
response = self.client.get(self.wizard_url)
self.assertEqual(response.status_code, 200)
response = self.client.post(self.wizard_url, self.wizard_step_data[0])
self.assertEqual(response.status_code, 200)
post_data = self.wizard_step_data[1]
with open(upath(__file__), 'rb') as post_file:
post_data['form2-file1'] = post_file
response = self.client.post(self.wizard_url, post_data)
self.assertEqual(response.status_code, 200)
response = self.client.post(self.wizard_url, self.wizard_step_data[2])
self.assertEqual(response.status_code, 200)
response = self.client.post(self.wizard_url, self.wizard_step_data[3])
self.assertEqual(response.status_code, 200)
all_data = response.context['all_cleaned_data']
with open(upath(__file__), 'rb') as f:
self.assertEqual(all_data['file1'].read(), f.read())
all_data['file1'].close()
del all_data['file1']
self.assertEqual(all_data, {
'name': 'Pony', 'thirsty': True, 'user': self.testuser,
'address1': '123 Main St', 'address2': 'Djangoland',
'random_crap': 'blah blah', 'formset-form4': [
{'random_crap': 'blah blah'},
{'random_crap': 'blah blah'}]})
def test_manipulated_data(self):
response = self.client.get(self.wizard_url)
self.assertEqual(response.status_code, 200)
response = self.client.post(self.wizard_url, self.wizard_step_data[0])
self.assertEqual(response.status_code, 200)
post_data = self.wizard_step_data[1]
post_data['form2-file1'].close()
post_data['form2-file1'] = open(upath(__file__), 'rb')
response = self.client.post(self.wizard_url, post_data)
self.assertEqual(response.status_code, 200)
response = self.client.post(self.wizard_url, self.wizard_step_data[2])
self.assertEqual(response.status_code, 200)
self.client.cookies.pop('sessionid', None)
self.client.cookies.pop('wizard_cookie_contact_wizard', None)
response = self.client.post(self.wizard_url, self.wizard_step_data[3])
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['wizard']['steps'].current, 'form1')
def test_form_refresh(self):
response = self.client.get(self.wizard_url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['wizard']['steps'].current, 'form1')
response = self.client.post(self.wizard_url, self.wizard_step_data[0])
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['wizard']['steps'].current, 'form2')
response = self.client.post(self.wizard_url, self.wizard_step_data[0])
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['wizard']['steps'].current, 'form2')
post_data = self.wizard_step_data[1]
post_data['form2-file1'].close()
post_data['form2-file1'] = open(upath(__file__), 'rb')
response = self.client.post(self.wizard_url, post_data)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['wizard']['steps'].current, 'form3')
response = self.client.post(self.wizard_url, self.wizard_step_data[2])
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['wizard']['steps'].current, 'form4')
response = self.client.post(self.wizard_url, self.wizard_step_data[0])
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['wizard']['steps'].current, 'form2')
response = self.client.post(self.wizard_url, self.wizard_step_data[3])
self.assertEqual(response.status_code, 200)
@skipIfCustomUser
class SessionWizardTests(WizardTests, TestCase):
wizard_url = '/wiz_session/'
wizard_step_1_data = {
'session_contact_wizard-current_step': 'form1',
}
wizard_step_data = (
{
'form1-name': 'Pony',
'form1-thirsty': '2',
'session_contact_wizard-current_step': 'form1',
},
{
'form2-address1': '123 Main St',
'form2-address2': 'Djangoland',
'session_contact_wizard-current_step': 'form2',
},
{
'form3-random_crap': 'blah blah',
'session_contact_wizard-current_step': 'form3',
},
{
'form4-INITIAL_FORMS': '0',
'form4-TOTAL_FORMS': '2',
'form4-MAX_NUM_FORMS': '0',
'form4-0-random_crap': 'blah blah',
'form4-1-random_crap': 'blah blah',
'session_contact_wizard-current_step': 'form4',
}
)
@skipIfCustomUser
class CookieWizardTests(WizardTests, TestCase):
wizard_url = '/wiz_cookie/'
wizard_step_1_data = {
'cookie_contact_wizard-current_step': 'form1',
}
wizard_step_data = (
{
'form1-name': 'Pony',
'form1-thirsty': '2',
'cookie_contact_wizard-current_step': 'form1',
},
{
'form2-address1': '123 Main St',
'form2-address2': 'Djangoland',
'cookie_contact_wizard-current_step': 'form2',
},
{
'form3-random_crap': 'blah blah',
'cookie_contact_wizard-current_step': 'form3',
},
{
'form4-INITIAL_FORMS': '0',
'form4-TOTAL_FORMS': '2',
'form4-MAX_NUM_FORMS': '0',
'form4-0-random_crap': 'blah blah',
'form4-1-random_crap': 'blah blah',
'cookie_contact_wizard-current_step': 'form4',
}
)
@skipIfCustomUser
class WizardTestKwargs(TestCase):
wizard_url = '/wiz_other_template/'
wizard_step_1_data = {
'cookie_contact_wizard-current_step': 'form1',
}
wizard_step_data = (
{
'form1-name': 'Pony',
'form1-thirsty': '2',
'cookie_contact_wizard-current_step': 'form1',
},
{
'form2-address1': '123 Main St',
'form2-address2': 'Djangoland',
'cookie_contact_wizard-current_step': 'form2',
},
{
'form3-random_crap': 'blah blah',
'cookie_contact_wizard-current_step': 'form3',
},
{
'form4-INITIAL_FORMS': '0',
'form4-TOTAL_FORMS': '2',
'form4-MAX_NUM_FORMS': '0',
'form4-0-random_crap': 'blah blah',
'form4-1-random_crap': 'blah blah',
'cookie_contact_wizard-current_step': 'form4',
}
)
urls = 'django.contrib.formtools.tests.wizard.wizardtests.urls'
def setUp(self):
self.testuser, created = User.objects.get_or_create(username='testuser1')
self.wizard_step_data[0]['form1-user'] = self.testuser.pk
def test_template(self):
templates = os.path.join(os.path.dirname(upath(__file__)), 'templates')
with self.settings(
TEMPLATE_DIRS=list(settings.TEMPLATE_DIRS) + [templates]):
response = self.client.get(self.wizard_url)
self.assertTemplateUsed(response, 'other_wizard_form.html')
class WizardTestGenericViewInterface(TestCase):
def test_get_context_data_inheritance(self):
class TestWizard(CookieWizardView):
"""
A subclass that implements ``get_context_data`` using the standard
protocol for generic views (accept only **kwargs).
See ticket #17148.
"""
def get_context_data(self, **kwargs):
context = super(TestWizard, self).get_context_data(**kwargs)
context['test_key'] = 'test_value'
return context
factory = RequestFactory()
view = TestWizard.as_view([forms.Form])
response = view(factory.get('/'))
self.assertEqual(response.context_data['test_key'], 'test_value')
def test_get_context_data_with_mixin(self):
class AnotherMixin(object):
def get_context_data(self, **kwargs):
context = super(AnotherMixin, self).get_context_data(**kwargs)
context['another_key'] = 'another_value'
return context
class TestWizard(AnotherMixin, CookieWizardView):
"""
A subclass that implements ``get_context_data`` using the standard
protocol for generic views (accept only **kwargs).
See ticket #17148.
"""
def get_context_data(self, **kwargs):
context = super(TestWizard, self).get_context_data(**kwargs)
context['test_key'] = 'test_value'
return context
factory = RequestFactory()
view = TestWizard.as_view([forms.Form])
response = view(factory.get('/'))
self.assertEqual(response.context_data['test_key'], 'test_value')
self.assertEqual(response.context_data['another_key'], 'another_value')
@skipIfCustomUser
class WizardFormKwargsOverrideTests(TestCase):
def setUp(self):
super(WizardFormKwargsOverrideTests, self).setUp()
self.rf = RequestFactory()
# Create two users so we can filter by is_staff when handing our
# wizard a queryset keyword argument.
self.normal_user = User.objects.create(username='test1', email='normal@example.com')
self.staff_user = User.objects.create(username='test2', email='staff@example.com', is_staff=True)
def test_instance_is_maintained(self):
self.assertEqual(2, User.objects.count())
queryset = User.objects.get(pk=self.staff_user.pk)
class InstanceOverrideWizard(CookieWizardView):
def get_form_kwargs(self, step):
return {'instance': queryset}
view = InstanceOverrideWizard.as_view([UserForm])
response = view(self.rf.get('/'))
form = response.context_data['wizard']['form']
self.assertNotEqual(form.instance.pk, None)
self.assertEqual(form.instance.pk, self.staff_user.pk)
self.assertEqual('staff@example.com', form.initial.get('email', None))
def test_queryset_is_maintained(self):
queryset = User.objects.filter(pk=self.staff_user.pk)
class QuerySetOverrideWizard(CookieWizardView):
def get_form_kwargs(self, step):
return {'queryset': queryset}
view = QuerySetOverrideWizard.as_view([UserFormSet])
response = view(self.rf.get('/'))
formset = response.context_data['wizard']['form']
self.assertNotEqual(formset.queryset, None)
self.assertEqual(formset.initial_form_count(), 1)
self.assertEqual(['staff@example.com'],
list(formset.queryset.values_list('email', flat=True)))
| gpl-2.0 |
ojengwa/oh-mainline | vendor/packages/scrapy/scrapy/utils/conf.py | 40 | 2132 | import sys
import os
from ConfigParser import SafeConfigParser
from operator import itemgetter
def build_component_list(base, custom):
"""Compose a component list based on a custom and base dict of components
(typically middlewares or extensions), unless custom is already a list, in
which case it's returned.
"""
if isinstance(custom, (list, tuple)):
return custom
compdict = base.copy()
compdict.update(custom)
return [k for k, v in sorted(compdict.items(), key=itemgetter(1)) \
if v is not None]
def arglist_to_dict(arglist):
"""Convert a list of arguments like ['arg1=val1', 'arg2=val2', ...] to a
dict
"""
return dict(x.split('=', 1) for x in arglist)
def closest_scrapy_cfg(path='.', prevpath=None):
"""Return the path to the closest scrapy.cfg file by traversing the current
directory and its parents
"""
if path == prevpath:
return ''
path = os.path.abspath(path)
cfgfile = os.path.join(path, 'scrapy.cfg')
if os.path.exists(cfgfile):
return cfgfile
return closest_scrapy_cfg(os.path.dirname(path), path)
def init_env(project='default', set_syspath=True):
"""Initialize environment to use command-line tool from inside a project
dir. This sets the Scrapy settings module and modifies the Python path to
be able to locate the project module.
"""
cfg = get_config()
if cfg.has_option('settings', project):
os.environ['SCRAPY_SETTINGS_MODULE'] = cfg.get('settings', project)
closest = closest_scrapy_cfg()
if closest:
projdir = os.path.dirname(closest)
if set_syspath and projdir not in sys.path:
sys.path.append(projdir)
def get_config(use_closest=True):
"""Get Scrapy config file as a SafeConfigParser"""
sources = get_sources(use_closest)
cfg = SafeConfigParser()
cfg.read(sources)
return cfg
def get_sources(use_closest=True):
sources = ['/etc/scrapy.cfg', r'c:\scrapy\scrapy.cfg', \
os.path.expanduser('~/.scrapy.cfg')]
if use_closest:
sources.append(closest_scrapy_cfg())
return sources
| agpl-3.0 |
hlzz/dotfiles | graphics/cgal/Documentation/conversion_tools/markup_replacement.py | 1 | 1846 | #!/usr/bin/python2
#replace markup #, ## ,### by \section, \subsection, \subsubsection.
#anchor names are preserved and generated from the section name otherwise
#The script is not perfect and might miss some specific cases
from sys import argv
from os import path
import string
import re
anchors={}
def generate_anchor(chapter,text):
pattern = re.compile('[\W_]+')
words=text.split()
i=1;
res=chapter+pattern.sub('',words[0])
while len(res)<40 and i<len(words):
word=pattern.sub('',words[i])
res+=word
i+=1
if anchors.has_key(res):
anchors[res]+=1
res+="_"+str(anchors[res])
else:
anchors[res]=0
return res
f=file(argv[1])
regexp_line=re.compile('^\s*#')
#~ regexp_section=re.compile('^\s*#\s*([ a-b().,]+)\s*#(.*)')
regexp_section=re.compile('^\s*(#+)\s*([0-9a-zA-Z (),.:?%-`\']+[0-9a-zA-Z.?`)])\s*#+(.*)')
regexp_anchor=re.compile('^\s*{#([0-9a-zA-Z_]+)}')
result=""
diff=False
chapter=path.abspath(argv[1]).split('/')[-2]
for line in f.readlines():
if regexp_line.match(line):
m=regexp_section.search(line)
if m:
values=m.groups()
anchor=''
if len(values)==2:
anchor=generate_anchor(chapter,values[1])
else:
anchor=regexp_anchor.match(values[2])
if anchor:
anchor=anchor.group(1)
else:
anchor=generate_anchor(chapter,values[1])
if len(values[0])==1:
result+="\section "+anchor+" "+values[1]+"\n"
elif len(values[0])==2:
result+="\subsection "+anchor+" "+values[1]+"\n"
elif len(values[0])==3:
result+="\subsubsection "+anchor+" "+values[1]+"\n"
else:
print "Error while processing "+argv[1]
assert False
diff=True
else:
result+=line
else:
result+=line
f.close()
if diff:
f=file(argv[1],'w')
f.write(result)
f.close()
| bsd-3-clause |
metaml/nupic | src/nupic/math/mvn.py | 50 | 6412 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
raise Exception("XERROR -- removing for NuPIC 2")
import numpy
from numpy.linalg import svd
log2pi = numpy.log(2.0 * numpy.pi)
def cov(data, mean=None, prior=None, dfOffset=-1):
x = numpy.asmatrix(data)
if mean is None: mean = x.mean()
xc = x - mean
xxt = xc.T * xc
n = x.shape[0]
if prior is not None:
assert len(prior) == 2, "Must be of the form (n, SS)"
n += prior[0]
print prior[1].shape
xxt += prior[1]
n += dfOffset
assert n > 0
return (1.0 / n) * xxt
def getRank(d, s):
if numpy.min(s) > 0: return d
else: return max(numpy.argmin(s > 0), 1)
class mvn(object):
def __init__(self, mean, varcov):
if mean is not None: self.mean = numpy.asarray(mean)
else: self.mean = None
varcov = numpy.asmatrix(varcov)
self.d = varcov.shape[1]
self.varcov = varcov
self.u, self.s, self.vt = svd(varcov, full_matrices=0, compute_uv=1)
self.rank = getRank(self.d, self.s)
def __str__(self):
return "Mean:\n" + str(self.mean) + "\nCovariance:\n" + str(self.varcov)
def __repr__(self):
return "Mean:\n" + repr(self.mean) + "\nCovariance:\n" + repr(self.varcov)
def limitRank(self, minvar):
if numpy.min(self.s) > minvar: self.rank = self.d
else:
self.rank = max(numpy.argmin(self.s > minvar), 1)
def setRank(self, rank):
assert rank <= self.d
assert rank >= 1
self.rank = rank
def s0(self):
s = numpy.zeros(len(self.s))
s[0:self.rank] = self.s[0:self.rank]
s[self.rank:] = 0
return s
def si(self):
si = numpy.zeros(len(self.s))
si[0:self.rank] = 1.0 / self.s[0:self.rank]
si[self.rank:] = 0
return si
def sigma(self):
return self.u * numpy.asmatrix(numpy.diag(self.s0())) * self.vt
def sigmai(self):
# return self.vt.T * numpy.asmatrix(numpy.diag(self.si())) * self.u.T
return self.u * numpy.asmatrix(numpy.diag(self.si())) * self.vt
def rightRoot(self):
return numpy.asmatrix(numpy.diag(numpy.sqrt(self.s0()))) * self.vt
def leftRoot(self):
return self.u * numpy.asmatrix(numpy.diag(numpy.sqrt(self.s0())))
def leftInvRoot(self):
# return self.vt.T * numpy.asmatrix(numpy.diag(numpy.sqrt(self.si())))
return self.u * numpy.asmatrix(numpy.diag(numpy.sqrt(self.si())))
def rightInvRoot(self):
# return numpy.asmatrix(numpy.diag(numpy.sqrt(self.si()))) * self.u.T
return numpy.asmatrix(numpy.diag(numpy.sqrt(self.si()))) * self.vt
def sample(self, r=None, n=1):
if r is None: r = numpy.random
z = r.normal(0, 1, (n, self.d))
return z * self.rightRoot() + self.mean
def center(self, x):
x = numpy.asmatrix(x)
assert x.shape[1] == self.d
if self.mean is not None: return (x - self.mean)
else: return x
def whiten(self, x):
xc = self.center(x)
# Whiten.
z = xc * self.leftInvRoot()
return z
def z2(self, x):
z = self.whiten(x)
# Avoid matrix multiplication, just square the rows.
z = numpy.asarray(z)
z2 = z * z
return numpy.sum(z2, axis=1)
def logDetSigma(self):
return numpy.sum(numpy.log(self.s[0:self.rank]))
def logDetInvRoot(self):
return -0.5 * self.logDetSigma()
def logK(self):
return -0.5 * self.rank * log2pi
def logLikelihood(self, x):
z = numpy.asarray(self.whiten(x))
# z2 = numpy.sum(z * z, axis=1)
n = len(z)
return -0.5 * ( n*(self.rank * log2pi + self.logDetSigma()) + numpy.sum(z*z) )
def logLikelihoods(self, x):
z2 = self.z2(x)
return self.logK() + self.logDetInvRoot() - 0.5 * z2
def logDensity(self, x):
return self.logLikelihood(x)
class MaskBuilder(object):
def __init__(self, d):
self.indices = numpy.arange(d)
def __getitem__(self, *args):
return bits
class ConditionalMVN(object):
def __init__(self, mvn):
self.mvn = mvn
class Impl(object):
def __init__(self, mean1, mean2, Sigma11, Sigma12, Sigma22):
Sigma11 = numpy.asmatrix(Sigma11)
Sigma12 = numpy.asmatrix(Sigma12)
Sigma22 = numpy.asmatrix(Sigma22)
u22, s22, vt22 = svd(Sigma22, full_matrices=0, compute_uv=1)
rank22 = getRank(Sigma22.shape[1], s22)
s22i = numpy.zeros(len(s22))
s22i[0:rank22] = 1.0 / s22[0:rank22]
# Rest are zeroes.
Sigma22i = u22 * numpy.asmatrix(numpy.diag(s22i)) * vt22
self.mean1 = mean1
self.mean2 = mean2
self.Sigma11 = Sigma11
self.Sigma12 = Sigma12
self.Sigma22i = Sigma22i
def getDistribution(self, given):
given_from_mean = given - self.mean2
# Keep means in row form.
# mean = self.mean1 + self.Sigma12 * self.Sigma22i * given_from_mean
mean = self.mean1 + given_from_mean * self.Sigma22i * self.Sigma12.transpose()
varcov = self.Sigma11 - (self.Sigma12 * self.Sigma22i * (self.Sigma12.transpose()))
return mvn(mean, varcov)
def logDensity(self, x, given):
return getDistribution(given).logDensity(x)
def __getitem__(self, *args):
d = self.mvn.d
indices = numpy.arange(d).__getitem__(*args)
bits = numpy.repeat(False, d)
bits[indices] = True
givenMask = bits # Should it be this way, or the other way around?
varMask = ~givenMask
C22 = self.mvn.varcov[givenMask, ...][..., givenMask]
C12 = self.mvn.varcov[varMask, ...][..., givenMask]
C11 = self.mvn.varcov[varMask, ...][..., varMask]
return ConditionalMVN.Impl(self.mvn.mean[varMask], self.mvn.mean[givenMask],
C11, C12, C22)
| agpl-3.0 |
crosslinks/XlinkAnalyzer | pytests/XlaGuiTests.py | 1 | 2262 | import chimera
import unittest
from os import path
import xlinkanalyzer
from xlinkanalyzer import gui
RUNME = False
description = "Base classes for testing gui"
class XlaBaseTest(unittest.TestCase):
def setUp(self, mPaths, cPath):
mPath = xlinkanalyzer.__path__[0]
xlaTestPath = path.join(path.split(mPath)[0], 'pytests/test_data')
self.xlaTestMPaths = [path.join(xlaTestPath, _path) for _path in mPaths]
self.xlaTestCPath = path.join(xlaTestPath, cPath)
[chimera.openModels.open(_path) for _path in self.xlaTestMPaths]
self.models = chimera.openModels.list()
gui.show_dialog()
guiWin = xlinkanalyzer.get_gui()
guiWin.configFrame.resMngr.loadAssembly(guiWin, self.xlaTestCPath)
guiWin.configFrame.clear()
guiWin.configFrame.update()
guiWin.configFrame.mainWindow.setTitle(guiWin.configFrame.config.file)
guiWin.configFrame.config.state = "unchanged"
self.config = guiWin.configFrame.config
class TestLoadFromStructure(unittest.TestCase):
def setUp(self, mPaths):
mPath = xlinkanalyzer.__path__[0]
xlaTestPath = path.join(path.split(mPath)[0], 'pytests/test_data')
self.xlaTestMPaths = [path.join(xlaTestPath, _path) for _path in mPaths]
[chimera.openModels.open(_path) for _path in self.xlaTestMPaths]
self.models = chimera.openModels.list()
gui.show_dialog()
guiWin = xlinkanalyzer.get_gui()
guiWin.configFrame.resMngr.config.loadFromStructure(self.models[-1])
guiWin.configFrame.clear()
guiWin.configFrame.update()
guiWin.configFrame.config.state = "changed"
self.config = guiWin.configFrame.config
class XlaJustOpenXlaTest(unittest.TestCase):
def setUp(self, mPaths, cPath):
mPath = xlinkanalyzer.__path__[0]
xlaTestPath = path.join(path.split(mPath)[0], 'pytests/test_data')
self.xlaTestMPaths = [path.join(xlaTestPath, _path) for _path in mPaths]
self.xlaTestCPath = path.join(xlaTestPath, cPath)
[chimera.openModels.open(_path) for _path in self.xlaTestMPaths]
self.models = chimera.openModels.list()
gui.show_dialog()
guiWin = xlinkanalyzer.get_gui() | gpl-2.0 |
AlexHill/django | tests/raw_query/models.py | 150 | 1034 | from django.db import models
class Author(models.Model):
first_name = models.CharField(max_length=255)
last_name = models.CharField(max_length=255)
dob = models.DateField()
def __init__(self, *args, **kwargs):
super(Author, self).__init__(*args, **kwargs)
# Protect against annotations being passed to __init__ --
# this'll make the test suite get angry if annotations aren't
# treated differently than fields.
for k in kwargs:
assert k in [f.attname for f in self._meta.fields], \
"Author.__init__ got an unexpected parameter: %s" % k
class Book(models.Model):
title = models.CharField(max_length=255)
author = models.ForeignKey(Author)
paperback = models.BooleanField(default=False)
opening_line = models.TextField()
class Coffee(models.Model):
brand = models.CharField(max_length=255, db_column="name")
class Reviewer(models.Model):
reviewed = models.ManyToManyField(Book)
class FriendlyAuthor(Author):
pass
| bsd-3-clause |
Sofcom/treeio | treeio/events/models.py | 6 | 1430 | # encoding: utf-8
# Copyright 2011 Tree.io Limited
# This file is part of Treeio.
# License www.tree.io/license
"""
Events module objects.
Depends on: treeio.core, treeio.identities
"""
from django.db import models
from django.core.urlresolvers import reverse
from treeio.identities.models import Contact
from treeio.core.models import Object, Location
class Event(Object):
""" Single Event """
name = models.CharField(max_length=255)
location = models.ForeignKey(
Location, blank=True, null=True, on_delete=models.SET_NULL)
details = models.TextField(max_length=255, null=True, blank=True)
start = models.DateTimeField(null=True, blank=True)
end = models.DateTimeField()
class Meta:
"Event"
ordering = ['-end']
def __unicode__(self):
return self.name
def get_absolute_url(self):
"Returns absolute URL of the object"
try:
return reverse('events_event_view', args=[self.id])
except Exception:
return ""
class Invitation(models.Model):
""" Invitation to an Event """
contact = models.ForeignKey(Contact)
event = models.ForeignKey(Event)
status = models.CharField(max_length=255, choices=(('attending', 'Attending'),
('pending', 'Pending'),
('not-attending', 'Not Attending')))
| mit |
bgris/ODL_bgris | lib/python3.5/site-packages/odl/util/pytest_plugins.py | 2 | 5563 | # Copyright 2014-2016 The ODL development group
#
# This file is part of ODL.
#
# ODL is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ODL is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ODL. If not, see <http://www.gnu.org/licenses/>.
"""Test configuration file."""
from __future__ import print_function, division, absolute_import
from future import standard_library
standard_library.install_aliases()
import numpy as np
import operator
import os
import odl
from odl.trafos.backends import PYFFTW_AVAILABLE, PYWT_AVAILABLE
from odl.util import dtype_repr, OptionalArgDecorator
try:
from pytest import fixture
except ImportError:
# Make trivial decorator
class fixture(OptionalArgDecorator):
@staticmethod
def _wrapper(f, *a, **kw):
return f
# --- Add numpy and ODL to all doctests ---
@fixture(autouse=True)
def add_doctest_np_odl(doctest_namespace):
doctest_namespace['np'] = np
doctest_namespace['odl'] = odl
def pytest_addoption(parser):
parser.addoption('--largescale', action='store_true',
help='Run large and slow tests')
parser.addoption('--benchmark', action='store_true',
help='Run benchmarks')
parser.addoption('--examples', action='store_true',
help='Run examples')
parser.addoption('--doctest-doc', action='store_true',
help='Run doctests in the documentation')
# --- Ignored tests due to missing modules ---
this_dir = os.path.dirname(__file__)
odl_root = os.path.abspath(os.path.join(this_dir, os.pardir, os.pardir))
collect_ignore = [os.path.join(odl_root, 'setup.py')]
if not PYFFTW_AVAILABLE:
collect_ignore.append(
os.path.join(odl_root, 'odl', 'trafos', 'backends',
'pyfftw_bindings.py'))
if not PYWT_AVAILABLE:
collect_ignore.append(
os.path.join(odl_root, 'odl', 'trafos', 'backends',
'pywt_bindings.py'))
# Currently `pywt` is the only implementation
collect_ignore.append(
os.path.join(odl_root, 'odl', 'trafos', 'wavelet.py'))
def pytest_ignore_collect(path, config):
return os.path.normcase(str(path)) in collect_ignore
# --- Reusable fixtures ---
fn_impl_params = odl.FN_IMPLS.keys()
fn_impl_ids = [" impl = '{}' ".format(p) for p in fn_impl_params]
@fixture(scope="module", ids=fn_impl_ids, params=fn_impl_params)
def fn_impl(request):
"""String with an available `FnBase` implementation name."""
return request.param
ntuples_impl_params = odl.NTUPLES_IMPLS.keys()
ntuples_impl_ids = [" impl = '{}' ".format(p) for p in ntuples_impl_params]
@fixture(scope="module", ids=ntuples_impl_ids, params=ntuples_impl_params)
def ntuples_impl(request):
"""String with an available `NtuplesBase` implementation name."""
return request.param
floating_dtype_params = np.sctypes['float'] + np.sctypes['complex']
floating_dtype_ids = [' dtype = {} '.format(dtype_repr(dt))
for dt in floating_dtype_params]
@fixture(scope="module", ids=floating_dtype_ids, params=floating_dtype_params)
def floating_dtype(request):
"""Floating point (real or complex) dtype."""
return request.param
scalar_dtype_params = (floating_dtype_params +
np.sctypes['int'] +
np.sctypes['uint'])
scalar_dtype_ids = [' dtype = {} '.format(dtype_repr(dt))
for dt in scalar_dtype_params]
@fixture(scope="module", ids=scalar_dtype_ids, params=scalar_dtype_params)
def scalar_dtype(request):
"""Scalar (integers or real or complex) dtype."""
return request.param
ufunc_params = odl.util.ufuncs.UFUNCS
ufunc_ids = [' ufunc = {} '.format(p[0]) for p in ufunc_params]
@fixture(scope="module", ids=ufunc_ids, params=ufunc_params)
def ufunc(request):
"""Tuple with information on a ufunc.
Returns
-------
name : str
Name of the ufunc.
n_in : int
Number of input values of the ufunc.
n_out : int
Number of output values of the ufunc.
doc : str
Docstring for the ufunc.
"""
return request.param
reduction_params = odl.util.ufuncs.REDUCTIONS
reduction_ids = [' reduction = {} '.format(p[0]) for p in reduction_params]
@fixture(scope="module", ids=reduction_ids, params=reduction_params)
def reduction(request):
"""Tuple with information on a reduction.
Returns
-------
name : str
Name of the reduction.
doc : str
Docstring for the reduction.
"""
return request.param
arithmetic_op_par = [operator.add,
operator.truediv,
operator.mul,
operator.sub,
operator.iadd,
operator.itruediv,
operator.imul,
operator.isub]
arithmetic_op_ids = [' + ', ' / ', ' * ', ' - ',
' += ', ' /= ', ' *= ', ' -= ']
@fixture(ids=arithmetic_op_ids, params=arithmetic_op_par)
def arithmetic_op(request):
"""An arithmetic operator, e.g. +, -, // etc."""
return request.param
| gpl-3.0 |
Huyuwei/tvm | tests/python/unittest/test_pass_attrs_hash_equal.py | 2 | 2138 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import tvm
def test_attrs_equal():
x = tvm.make.node("attrs.TestAttrs", name="xx", padding=(3, 4))
y = tvm.make.node("attrs.TestAttrs", name="xx", padding=(3, 4))
z = tvm.make.node("attrs.TestAttrs", name="xx", padding=(3,4,1))
assert tvm.ir_pass.AttrsEqual(x, y)
assert not tvm.ir_pass.AttrsEqual(x, z)
dattr = tvm.make.node("DictAttrs", x=1, y=10, name="xyz", padding=(0,0))
assert not tvm.ir_pass.AttrsEqual(dattr, x)
dattr2 = tvm.make.node("DictAttrs", x=1, y=10, name="xyz", padding=(0,0))
assert tvm.ir_pass.AttrsEqual(dattr, dattr2)
assert tvm.ir_pass.AttrsEqual({"x": x}, {"x": y})
# array related checks
assert tvm.ir_pass.AttrsEqual({"x": [x, x]}, {"x": [y, x]})
assert not tvm.ir_pass.AttrsEqual({"x": [x, 1]}, {"x": [y, 2]})
n = tvm.var("n")
assert tvm.ir_pass.AttrsEqual({"x": n+1}, {"x": n+1})
def test_attrs_hash():
fhash = tvm.ir_pass.AttrsHash
x = tvm.make.node("attrs.TestAttrs", name="xx", padding=(3, 4))
y = tvm.make.node("attrs.TestAttrs", name="xx", padding=(3, 4))
assert fhash({"x": x}) == fhash({"x": y})
assert fhash({"x": x}) != fhash({"x": [y, 1]})
assert fhash({"x": [x, 1]}) == fhash({"x": [y, 1]})
assert fhash({"x": [x, 2]}) == fhash({"x": [y, 2]})
if __name__ == "__main__":
test_attrs_equal()
test_attrs_hash()
| apache-2.0 |
cognitiveclass/edx-platform | cms/envs/dev.py | 25 | 5958 | """
This config file runs the simplest dev environment"""
# We intentionally define lots of variables that aren't used, and
# want to import all variables from base settings files
# pylint: disable=wildcard-import, unused-wildcard-import
from .common import *
from openedx.core.lib.logsettings import get_logger_config
# import settings from LMS for consistent behavior with CMS
from lms.envs.dev import (WIKI_ENABLED)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
HTTPS = 'off'
LOGGING = get_logger_config(ENV_ROOT / "log",
logging_env="dev",
tracking_filename="tracking.log",
dev_env=True,
debug=True)
update_module_store_settings(
MODULESTORE,
module_store_options={
'default_class': 'xmodule.raw_module.RawDescriptor',
'fs_root': GITHUB_REPO_ROOT,
}
)
DJFS = {
'type': 'osfs',
'directory_root': 'cms/static/djpyfs',
'url_root': '/static/djpyfs'
}
# cdodge: This is the specifier for the MongoDB (using GridFS) backed static content store
# This is for static content for courseware, not system static content (e.g. javascript, css, edX branding, etc)
CONTENTSTORE = {
'ENGINE': 'xmodule.contentstore.mongo.MongoContentStore',
'DOC_STORE_CONFIG': {
'host': 'localhost',
'db': 'xcontent',
},
# allow for additional options that can be keyed on a name, e.g. 'trashcan'
'ADDITIONAL_OPTIONS': {
'trashcan': {
'bucket': 'trash_fs'
}
}
}
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ENV_ROOT / "db" / "edx.db",
'ATOMIC_REQUESTS': True,
}
}
LMS_BASE = "localhost:8000"
FEATURES['PREVIEW_LMS_BASE'] = "localhost:8000"
REPOS = {
'edx4edx': {
'branch': 'master',
'origin': 'git@github.com:MITx/edx4edx.git',
},
'content-mit-6002x': {
'branch': 'master',
# 'origin': 'git@github.com:MITx/6002x-fall-2012.git',
'origin': 'git@github.com:MITx/content-mit-6002x.git',
},
'6.00x': {
'branch': 'master',
'origin': 'git@github.com:MITx/6.00x.git',
},
'7.00x': {
'branch': 'master',
'origin': 'git@github.com:MITx/7.00x.git',
},
'3.091x': {
'branch': 'master',
'origin': 'git@github.com:MITx/3.091x.git',
},
}
CACHES = {
# This is the cache used for most things. Askbot will not work without a
# functioning cache -- it relies on caching to load its settings in places.
# In staging/prod envs, the sessions also live here.
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'edx_loc_mem_cache',
'KEY_FUNCTION': 'util.memcache.safe_key',
},
# The general cache is what you get if you use our util.cache. It's used for
# things like caching the course.xml file for different A/B test groups.
# We set it to be a DummyCache to force reloading of course.xml in dev.
# In staging environments, we would grab VERSION from data uploaded by the
# push process.
'general': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
'KEY_PREFIX': 'general',
'VERSION': 4,
'KEY_FUNCTION': 'util.memcache.safe_key',
},
'mongo_metadata_inheritance': {
'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache',
'LOCATION': '/var/tmp/mongo_metadata_inheritance',
'TIMEOUT': 300,
'KEY_FUNCTION': 'util.memcache.safe_key',
},
'loc_cache': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'edx_location_mem_cache',
},
'course_structure_cache': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'edx_course_structure_mem_cache',
},
}
# Make the keyedcache startup warnings go away
CACHE_TIMEOUT = 0
# Dummy secret key for dev
SECRET_KEY = '85920908f28904ed733fe576320db18cabd7b6cd'
################################ PIPELINE #################################
PIPELINE_SASS_ARGUMENTS = '--debug-info --require {proj_dir}/static/sass/bourbon/lib/bourbon.rb'.format(proj_dir=PROJECT_ROOT)
################################# CELERY ######################################
# By default don't use a worker, execute tasks as if they were local functions
CELERY_ALWAYS_EAGER = True
################################ DEBUG TOOLBAR #################################
INSTALLED_APPS += ('debug_toolbar', 'debug_toolbar_mongo', 'djpyfs')
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
INTERNAL_IPS = ('127.0.0.1',)
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.versions.VersionsPanel',
'debug_toolbar.panels.timer.TimerPanel',
'debug_toolbar.panels.settings.SettingsPanel',
'debug_toolbar.panels.headers.HeadersPanel',
'debug_toolbar.panels.request.RequestPanel',
'debug_toolbar.panels.sql.SQLPanel',
'debug_toolbar.panels.signals.SignalsPanel',
'debug_toolbar.panels.logging.LoggingPanel',
'debug_toolbar.panels.profiling.ProfilingPanel',
)
# To see stacktraces for MongoDB queries, set this to True.
# Stacktraces slow down page loads drastically (for pages with lots of queries).
DEBUG_TOOLBAR_MONGO_STACKTRACES = False
# Enable URL that shows information about the status of various services
FEATURES['ENABLE_SERVICE_STATUS'] = True
############################# SEGMENT-IO ##################################
# If there's an environment variable set, grab it to turn on Segment
# Note that this is the Studio key. There is a separate key for the LMS.
import os
CMS_SEGMENT_KEY = os.environ.get('SEGMENT_KEY')
#####################################################################
# Lastly, see if the developer has any local overrides.
try:
from .private import * # pylint: disable=import-error
except ImportError:
pass
| agpl-3.0 |
WarrickJiang/linux-stable | scripts/analyze_suspend.py | 1537 | 120394 | #!/usr/bin/python
#
# Tool for analyzing suspend/resume timing
# Copyright (c) 2013, Intel Corporation.
#
# This program is free software; you can redistribute it and/or modify it
# under the terms and conditions of the GNU General Public License,
# version 2, as published by the Free Software Foundation.
#
# This program is distributed in the hope it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin St - Fifth Floor, Boston, MA 02110-1301 USA.
#
# Authors:
# Todd Brandt <todd.e.brandt@linux.intel.com>
#
# Description:
# This tool is designed to assist kernel and OS developers in optimizing
# their linux stack's suspend/resume time. Using a kernel image built
# with a few extra options enabled, the tool will execute a suspend and
# will capture dmesg and ftrace data until resume is complete. This data
# is transformed into a device timeline and a callgraph to give a quick
# and detailed view of which devices and callbacks are taking the most
# time in suspend/resume. The output is a single html file which can be
# viewed in firefox or chrome.
#
# The following kernel build options are required:
# CONFIG_PM_DEBUG=y
# CONFIG_PM_SLEEP_DEBUG=y
# CONFIG_FTRACE=y
# CONFIG_FUNCTION_TRACER=y
# CONFIG_FUNCTION_GRAPH_TRACER=y
#
# For kernel versions older than 3.15:
# The following additional kernel parameters are required:
# (e.g. in file /etc/default/grub)
# GRUB_CMDLINE_LINUX_DEFAULT="... initcall_debug log_buf_len=16M ..."
#
# ----------------- LIBRARIES --------------------
import sys
import time
import os
import string
import re
import platform
from datetime import datetime
import struct
# ----------------- CLASSES --------------------
# Class: SystemValues
# Description:
# A global, single-instance container used to
# store system values and test parameters
class SystemValues:
version = 3.0
verbose = False
testdir = '.'
tpath = '/sys/kernel/debug/tracing/'
fpdtpath = '/sys/firmware/acpi/tables/FPDT'
epath = '/sys/kernel/debug/tracing/events/power/'
traceevents = [
'suspend_resume',
'device_pm_callback_end',
'device_pm_callback_start'
]
modename = {
'freeze': 'Suspend-To-Idle (S0)',
'standby': 'Power-On Suspend (S1)',
'mem': 'Suspend-to-RAM (S3)',
'disk': 'Suspend-to-disk (S4)'
}
mempath = '/dev/mem'
powerfile = '/sys/power/state'
suspendmode = 'mem'
hostname = 'localhost'
prefix = 'test'
teststamp = ''
dmesgfile = ''
ftracefile = ''
htmlfile = ''
rtcwake = False
rtcwaketime = 10
rtcpath = ''
android = False
adb = 'adb'
devicefilter = []
stamp = 0
execcount = 1
x2delay = 0
usecallgraph = False
usetraceevents = False
usetraceeventsonly = False
notestrun = False
altdevname = dict()
postresumetime = 0
tracertypefmt = '# tracer: (?P<t>.*)'
firmwarefmt = '# fwsuspend (?P<s>[0-9]*) fwresume (?P<r>[0-9]*)$'
postresumefmt = '# post resume time (?P<t>[0-9]*)$'
stampfmt = '# suspend-(?P<m>[0-9]{2})(?P<d>[0-9]{2})(?P<y>[0-9]{2})-'+\
'(?P<H>[0-9]{2})(?P<M>[0-9]{2})(?P<S>[0-9]{2})'+\
' (?P<host>.*) (?P<mode>.*) (?P<kernel>.*)$'
def __init__(self):
self.hostname = platform.node()
if(self.hostname == ''):
self.hostname = 'localhost'
rtc = "rtc0"
if os.path.exists('/dev/rtc'):
rtc = os.readlink('/dev/rtc')
rtc = '/sys/class/rtc/'+rtc
if os.path.exists(rtc) and os.path.exists(rtc+'/date') and \
os.path.exists(rtc+'/time') and os.path.exists(rtc+'/wakealarm'):
self.rtcpath = rtc
def setOutputFile(self):
if((self.htmlfile == '') and (self.dmesgfile != '')):
m = re.match('(?P<name>.*)_dmesg\.txt$', self.dmesgfile)
if(m):
self.htmlfile = m.group('name')+'.html'
if((self.htmlfile == '') and (self.ftracefile != '')):
m = re.match('(?P<name>.*)_ftrace\.txt$', self.ftracefile)
if(m):
self.htmlfile = m.group('name')+'.html'
if(self.htmlfile == ''):
self.htmlfile = 'output.html'
def initTestOutput(self, subdir):
if(not self.android):
self.prefix = self.hostname
v = open('/proc/version', 'r').read().strip()
kver = string.split(v)[2]
else:
self.prefix = 'android'
v = os.popen(self.adb+' shell cat /proc/version').read().strip()
kver = string.split(v)[2]
testtime = datetime.now().strftime('suspend-%m%d%y-%H%M%S')
if(subdir != "."):
self.testdir = subdir+"/"+testtime
else:
self.testdir = testtime
self.teststamp = \
'# '+testtime+' '+self.prefix+' '+self.suspendmode+' '+kver
self.dmesgfile = \
self.testdir+'/'+self.prefix+'_'+self.suspendmode+'_dmesg.txt'
self.ftracefile = \
self.testdir+'/'+self.prefix+'_'+self.suspendmode+'_ftrace.txt'
self.htmlfile = \
self.testdir+'/'+self.prefix+'_'+self.suspendmode+'.html'
os.mkdir(self.testdir)
def setDeviceFilter(self, devnames):
self.devicefilter = string.split(devnames)
def rtcWakeAlarm(self):
os.system('echo 0 > '+self.rtcpath+'/wakealarm')
outD = open(self.rtcpath+'/date', 'r').read().strip()
outT = open(self.rtcpath+'/time', 'r').read().strip()
mD = re.match('^(?P<y>[0-9]*)-(?P<m>[0-9]*)-(?P<d>[0-9]*)', outD)
mT = re.match('^(?P<h>[0-9]*):(?P<m>[0-9]*):(?P<s>[0-9]*)', outT)
if(mD and mT):
# get the current time from hardware
utcoffset = int((datetime.now() - datetime.utcnow()).total_seconds())
dt = datetime(\
int(mD.group('y')), int(mD.group('m')), int(mD.group('d')),
int(mT.group('h')), int(mT.group('m')), int(mT.group('s')))
nowtime = int(dt.strftime('%s')) + utcoffset
else:
# if hardware time fails, use the software time
nowtime = int(datetime.now().strftime('%s'))
alarm = nowtime + self.rtcwaketime
os.system('echo %d > %s/wakealarm' % (alarm, self.rtcpath))
sysvals = SystemValues()
# Class: DeviceNode
# Description:
# A container used to create a device hierachy, with a single root node
# and a tree of child nodes. Used by Data.deviceTopology()
class DeviceNode:
name = ''
children = 0
depth = 0
def __init__(self, nodename, nodedepth):
self.name = nodename
self.children = []
self.depth = nodedepth
# Class: Data
# Description:
# The primary container for suspend/resume test data. There is one for
# each test run. The data is organized into a cronological hierarchy:
# Data.dmesg {
# root structure, started as dmesg & ftrace, but now only ftrace
# contents: times for suspend start/end, resume start/end, fwdata
# phases {
# 10 sequential, non-overlapping phases of S/R
# contents: times for phase start/end, order/color data for html
# devlist {
# device callback or action list for this phase
# device {
# a single device callback or generic action
# contents: start/stop times, pid/cpu/driver info
# parents/children, html id for timeline/callgraph
# optionally includes an ftrace callgraph
# optionally includes intradev trace events
# }
# }
# }
# }
#
class Data:
dmesg = {} # root data structure
phases = [] # ordered list of phases
start = 0.0 # test start
end = 0.0 # test end
tSuspended = 0.0 # low-level suspend start
tResumed = 0.0 # low-level resume start
tLow = 0.0 # time spent in low-level suspend (standby/freeze)
fwValid = False # is firmware data available
fwSuspend = 0 # time spent in firmware suspend
fwResume = 0 # time spent in firmware resume
dmesgtext = [] # dmesg text file in memory
testnumber = 0
idstr = ''
html_device_id = 0
stamp = 0
outfile = ''
def __init__(self, num):
idchar = 'abcdefghijklmnopqrstuvwxyz'
self.testnumber = num
self.idstr = idchar[num]
self.dmesgtext = []
self.phases = []
self.dmesg = { # fixed list of 10 phases
'suspend_prepare': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#CCFFCC', 'order': 0},
'suspend': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#88FF88', 'order': 1},
'suspend_late': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#00AA00', 'order': 2},
'suspend_noirq': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#008888', 'order': 3},
'suspend_machine': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#0000FF', 'order': 4},
'resume_machine': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#FF0000', 'order': 5},
'resume_noirq': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#FF9900', 'order': 6},
'resume_early': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#FFCC00', 'order': 7},
'resume': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#FFFF88', 'order': 8},
'resume_complete': {'list': dict(), 'start': -1.0, 'end': -1.0,
'row': 0, 'color': '#FFFFCC', 'order': 9}
}
self.phases = self.sortedPhases()
def getStart(self):
return self.dmesg[self.phases[0]]['start']
def setStart(self, time):
self.start = time
self.dmesg[self.phases[0]]['start'] = time
def getEnd(self):
return self.dmesg[self.phases[-1]]['end']
def setEnd(self, time):
self.end = time
self.dmesg[self.phases[-1]]['end'] = time
def isTraceEventOutsideDeviceCalls(self, pid, time):
for phase in self.phases:
list = self.dmesg[phase]['list']
for dev in list:
d = list[dev]
if(d['pid'] == pid and time >= d['start'] and
time <= d['end']):
return False
return True
def addIntraDevTraceEvent(self, action, name, pid, time):
if(action == 'mutex_lock_try'):
color = 'red'
elif(action == 'mutex_lock_pass'):
color = 'green'
elif(action == 'mutex_unlock'):
color = 'blue'
else:
# create separate colors based on the name
v1 = len(name)*10 % 256
v2 = string.count(name, 'e')*100 % 256
v3 = ord(name[0])*20 % 256
color = '#%06X' % ((v1*0x10000) + (v2*0x100) + v3)
for phase in self.phases:
list = self.dmesg[phase]['list']
for dev in list:
d = list[dev]
if(d['pid'] == pid and time >= d['start'] and
time <= d['end']):
e = TraceEvent(action, name, color, time)
if('traceevents' not in d):
d['traceevents'] = []
d['traceevents'].append(e)
return d
break
return 0
def capIntraDevTraceEvent(self, action, name, pid, time):
for phase in self.phases:
list = self.dmesg[phase]['list']
for dev in list:
d = list[dev]
if(d['pid'] == pid and time >= d['start'] and
time <= d['end']):
if('traceevents' not in d):
return
for e in d['traceevents']:
if(e.action == action and
e.name == name and not e.ready):
e.length = time - e.time
e.ready = True
break
return
def trimTimeVal(self, t, t0, dT, left):
if left:
if(t > t0):
if(t - dT < t0):
return t0
return t - dT
else:
return t
else:
if(t < t0 + dT):
if(t > t0):
return t0 + dT
return t + dT
else:
return t
def trimTime(self, t0, dT, left):
self.tSuspended = self.trimTimeVal(self.tSuspended, t0, dT, left)
self.tResumed = self.trimTimeVal(self.tResumed, t0, dT, left)
self.start = self.trimTimeVal(self.start, t0, dT, left)
self.end = self.trimTimeVal(self.end, t0, dT, left)
for phase in self.phases:
p = self.dmesg[phase]
p['start'] = self.trimTimeVal(p['start'], t0, dT, left)
p['end'] = self.trimTimeVal(p['end'], t0, dT, left)
list = p['list']
for name in list:
d = list[name]
d['start'] = self.trimTimeVal(d['start'], t0, dT, left)
d['end'] = self.trimTimeVal(d['end'], t0, dT, left)
if('ftrace' in d):
cg = d['ftrace']
cg.start = self.trimTimeVal(cg.start, t0, dT, left)
cg.end = self.trimTimeVal(cg.end, t0, dT, left)
for line in cg.list:
line.time = self.trimTimeVal(line.time, t0, dT, left)
if('traceevents' in d):
for e in d['traceevents']:
e.time = self.trimTimeVal(e.time, t0, dT, left)
def normalizeTime(self, tZero):
# first trim out any standby or freeze clock time
if(self.tSuspended != self.tResumed):
if(self.tResumed > tZero):
self.trimTime(self.tSuspended, \
self.tResumed-self.tSuspended, True)
else:
self.trimTime(self.tSuspended, \
self.tResumed-self.tSuspended, False)
# shift the timeline so that tZero is the new 0
self.tSuspended -= tZero
self.tResumed -= tZero
self.start -= tZero
self.end -= tZero
for phase in self.phases:
p = self.dmesg[phase]
p['start'] -= tZero
p['end'] -= tZero
list = p['list']
for name in list:
d = list[name]
d['start'] -= tZero
d['end'] -= tZero
if('ftrace' in d):
cg = d['ftrace']
cg.start -= tZero
cg.end -= tZero
for line in cg.list:
line.time -= tZero
if('traceevents' in d):
for e in d['traceevents']:
e.time -= tZero
def newPhaseWithSingleAction(self, phasename, devname, start, end, color):
for phase in self.phases:
self.dmesg[phase]['order'] += 1
self.html_device_id += 1
devid = '%s%d' % (self.idstr, self.html_device_id)
list = dict()
list[devname] = \
{'start': start, 'end': end, 'pid': 0, 'par': '',
'length': (end-start), 'row': 0, 'id': devid, 'drv': '' };
self.dmesg[phasename] = \
{'list': list, 'start': start, 'end': end,
'row': 0, 'color': color, 'order': 0}
self.phases = self.sortedPhases()
def newPhase(self, phasename, start, end, color, order):
if(order < 0):
order = len(self.phases)
for phase in self.phases[order:]:
self.dmesg[phase]['order'] += 1
if(order > 0):
p = self.phases[order-1]
self.dmesg[p]['end'] = start
if(order < len(self.phases)):
p = self.phases[order]
self.dmesg[p]['start'] = end
list = dict()
self.dmesg[phasename] = \
{'list': list, 'start': start, 'end': end,
'row': 0, 'color': color, 'order': order}
self.phases = self.sortedPhases()
def setPhase(self, phase, ktime, isbegin):
if(isbegin):
self.dmesg[phase]['start'] = ktime
else:
self.dmesg[phase]['end'] = ktime
def dmesgSortVal(self, phase):
return self.dmesg[phase]['order']
def sortedPhases(self):
return sorted(self.dmesg, key=self.dmesgSortVal)
def sortedDevices(self, phase):
list = self.dmesg[phase]['list']
slist = []
tmp = dict()
for devname in list:
dev = list[devname]
tmp[dev['start']] = devname
for t in sorted(tmp):
slist.append(tmp[t])
return slist
def fixupInitcalls(self, phase, end):
# if any calls never returned, clip them at system resume end
phaselist = self.dmesg[phase]['list']
for devname in phaselist:
dev = phaselist[devname]
if(dev['end'] < 0):
dev['end'] = end
vprint('%s (%s): callback didnt return' % (devname, phase))
def deviceFilter(self, devicefilter):
# remove all by the relatives of the filter devnames
filter = []
for phase in self.phases:
list = self.dmesg[phase]['list']
for name in devicefilter:
dev = name
while(dev in list):
if(dev not in filter):
filter.append(dev)
dev = list[dev]['par']
children = self.deviceDescendants(name, phase)
for dev in children:
if(dev not in filter):
filter.append(dev)
for phase in self.phases:
list = self.dmesg[phase]['list']
rmlist = []
for name in list:
pid = list[name]['pid']
if(name not in filter and pid >= 0):
rmlist.append(name)
for name in rmlist:
del list[name]
def fixupInitcallsThatDidntReturn(self):
# if any calls never returned, clip them at system resume end
for phase in self.phases:
self.fixupInitcalls(phase, self.getEnd())
def newActionGlobal(self, name, start, end):
# which phase is this device callback or action "in"
targetphase = "none"
overlap = 0.0
for phase in self.phases:
pstart = self.dmesg[phase]['start']
pend = self.dmesg[phase]['end']
o = max(0, min(end, pend) - max(start, pstart))
if(o > overlap):
targetphase = phase
overlap = o
if targetphase in self.phases:
self.newAction(targetphase, name, -1, '', start, end, '')
return True
return False
def newAction(self, phase, name, pid, parent, start, end, drv):
# new device callback for a specific phase
self.html_device_id += 1
devid = '%s%d' % (self.idstr, self.html_device_id)
list = self.dmesg[phase]['list']
length = -1.0
if(start >= 0 and end >= 0):
length = end - start
list[name] = {'start': start, 'end': end, 'pid': pid, 'par': parent,
'length': length, 'row': 0, 'id': devid, 'drv': drv }
def deviceIDs(self, devlist, phase):
idlist = []
list = self.dmesg[phase]['list']
for devname in list:
if devname in devlist:
idlist.append(list[devname]['id'])
return idlist
def deviceParentID(self, devname, phase):
pdev = ''
pdevid = ''
list = self.dmesg[phase]['list']
if devname in list:
pdev = list[devname]['par']
if pdev in list:
return list[pdev]['id']
return pdev
def deviceChildren(self, devname, phase):
devlist = []
list = self.dmesg[phase]['list']
for child in list:
if(list[child]['par'] == devname):
devlist.append(child)
return devlist
def deviceDescendants(self, devname, phase):
children = self.deviceChildren(devname, phase)
family = children
for child in children:
family += self.deviceDescendants(child, phase)
return family
def deviceChildrenIDs(self, devname, phase):
devlist = self.deviceChildren(devname, phase)
return self.deviceIDs(devlist, phase)
def printDetails(self):
vprint(' test start: %f' % self.start)
for phase in self.phases:
dc = len(self.dmesg[phase]['list'])
vprint(' %16s: %f - %f (%d devices)' % (phase, \
self.dmesg[phase]['start'], self.dmesg[phase]['end'], dc))
vprint(' test end: %f' % self.end)
def masterTopology(self, name, list, depth):
node = DeviceNode(name, depth)
for cname in list:
clist = self.deviceChildren(cname, 'resume')
cnode = self.masterTopology(cname, clist, depth+1)
node.children.append(cnode)
return node
def printTopology(self, node):
html = ''
if node.name:
info = ''
drv = ''
for phase in self.phases:
list = self.dmesg[phase]['list']
if node.name in list:
s = list[node.name]['start']
e = list[node.name]['end']
if list[node.name]['drv']:
drv = ' {'+list[node.name]['drv']+'}'
info += ('<li>%s: %.3fms</li>' % (phase, (e-s)*1000))
html += '<li><b>'+node.name+drv+'</b>'
if info:
html += '<ul>'+info+'</ul>'
html += '</li>'
if len(node.children) > 0:
html += '<ul>'
for cnode in node.children:
html += self.printTopology(cnode)
html += '</ul>'
return html
def rootDeviceList(self):
# list of devices graphed
real = []
for phase in self.dmesg:
list = self.dmesg[phase]['list']
for dev in list:
if list[dev]['pid'] >= 0 and dev not in real:
real.append(dev)
# list of top-most root devices
rootlist = []
for phase in self.dmesg:
list = self.dmesg[phase]['list']
for dev in list:
pdev = list[dev]['par']
if(re.match('[0-9]*-[0-9]*\.[0-9]*[\.0-9]*\:[\.0-9]*$', pdev)):
continue
if pdev and pdev not in real and pdev not in rootlist:
rootlist.append(pdev)
return rootlist
def deviceTopology(self):
rootlist = self.rootDeviceList()
master = self.masterTopology('', rootlist, 0)
return self.printTopology(master)
# Class: TraceEvent
# Description:
# A container for trace event data found in the ftrace file
class TraceEvent:
ready = False
name = ''
time = 0.0
color = '#FFFFFF'
length = 0.0
action = ''
def __init__(self, a, n, c, t):
self.action = a
self.name = n
self.color = c
self.time = t
# Class: FTraceLine
# Description:
# A container for a single line of ftrace data. There are six basic types:
# callgraph line:
# call: " dpm_run_callback() {"
# return: " }"
# leaf: " dpm_run_callback();"
# trace event:
# tracing_mark_write: SUSPEND START or RESUME COMPLETE
# suspend_resume: phase or custom exec block data
# device_pm_callback: device callback info
class FTraceLine:
time = 0.0
length = 0.0
fcall = False
freturn = False
fevent = False
depth = 0
name = ''
type = ''
def __init__(self, t, m, d):
self.time = float(t)
# is this a trace event
if(d == 'traceevent' or re.match('^ *\/\* *(?P<msg>.*) \*\/ *$', m)):
if(d == 'traceevent'):
# nop format trace event
msg = m
else:
# function_graph format trace event
em = re.match('^ *\/\* *(?P<msg>.*) \*\/ *$', m)
msg = em.group('msg')
emm = re.match('^(?P<call>.*?): (?P<msg>.*)', msg)
if(emm):
self.name = emm.group('msg')
self.type = emm.group('call')
else:
self.name = msg
self.fevent = True
return
# convert the duration to seconds
if(d):
self.length = float(d)/1000000
# the indentation determines the depth
match = re.match('^(?P<d> *)(?P<o>.*)$', m)
if(not match):
return
self.depth = self.getDepth(match.group('d'))
m = match.group('o')
# function return
if(m[0] == '}'):
self.freturn = True
if(len(m) > 1):
# includes comment with function name
match = re.match('^} *\/\* *(?P<n>.*) *\*\/$', m)
if(match):
self.name = match.group('n')
# function call
else:
self.fcall = True
# function call with children
if(m[-1] == '{'):
match = re.match('^(?P<n>.*) *\(.*', m)
if(match):
self.name = match.group('n')
# function call with no children (leaf)
elif(m[-1] == ';'):
self.freturn = True
match = re.match('^(?P<n>.*) *\(.*', m)
if(match):
self.name = match.group('n')
# something else (possibly a trace marker)
else:
self.name = m
def getDepth(self, str):
return len(str)/2
def debugPrint(self, dev):
if(self.freturn and self.fcall):
print('%s -- %f (%02d): %s(); (%.3f us)' % (dev, self.time, \
self.depth, self.name, self.length*1000000))
elif(self.freturn):
print('%s -- %f (%02d): %s} (%.3f us)' % (dev, self.time, \
self.depth, self.name, self.length*1000000))
else:
print('%s -- %f (%02d): %s() { (%.3f us)' % (dev, self.time, \
self.depth, self.name, self.length*1000000))
# Class: FTraceCallGraph
# Description:
# A container for the ftrace callgraph of a single recursive function.
# This can be a dpm_run_callback, dpm_prepare, or dpm_complete callgraph
# Each instance is tied to a single device in a single phase, and is
# comprised of an ordered list of FTraceLine objects
class FTraceCallGraph:
start = -1.0
end = -1.0
list = []
invalid = False
depth = 0
def __init__(self):
self.start = -1.0
self.end = -1.0
self.list = []
self.depth = 0
def setDepth(self, line):
if(line.fcall and not line.freturn):
line.depth = self.depth
self.depth += 1
elif(line.freturn and not line.fcall):
self.depth -= 1
line.depth = self.depth
else:
line.depth = self.depth
def addLine(self, line, match):
if(not self.invalid):
self.setDepth(line)
if(line.depth == 0 and line.freturn):
if(self.start < 0):
self.start = line.time
self.end = line.time
self.list.append(line)
return True
if(self.invalid):
return False
if(len(self.list) >= 1000000 or self.depth < 0):
if(len(self.list) > 0):
first = self.list[0]
self.list = []
self.list.append(first)
self.invalid = True
if(not match):
return False
id = 'task %s cpu %s' % (match.group('pid'), match.group('cpu'))
window = '(%f - %f)' % (self.start, line.time)
if(self.depth < 0):
print('Too much data for '+id+\
' (buffer overflow), ignoring this callback')
else:
print('Too much data for '+id+\
' '+window+', ignoring this callback')
return False
self.list.append(line)
if(self.start < 0):
self.start = line.time
return False
def slice(self, t0, tN):
minicg = FTraceCallGraph()
count = -1
firstdepth = 0
for l in self.list:
if(l.time < t0 or l.time > tN):
continue
if(count < 0):
if(not l.fcall or l.name == 'dev_driver_string'):
continue
firstdepth = l.depth
count = 0
l.depth -= firstdepth
minicg.addLine(l, 0)
if((count == 0 and l.freturn and l.fcall) or
(count > 0 and l.depth <= 0)):
break
count += 1
return minicg
def sanityCheck(self):
stack = dict()
cnt = 0
for l in self.list:
if(l.fcall and not l.freturn):
stack[l.depth] = l
cnt += 1
elif(l.freturn and not l.fcall):
if(l.depth not in stack):
return False
stack[l.depth].length = l.length
stack[l.depth] = 0
l.length = 0
cnt -= 1
if(cnt == 0):
return True
return False
def debugPrint(self, filename):
if(filename == 'stdout'):
print('[%f - %f]') % (self.start, self.end)
for l in self.list:
if(l.freturn and l.fcall):
print('%f (%02d): %s(); (%.3f us)' % (l.time, \
l.depth, l.name, l.length*1000000))
elif(l.freturn):
print('%f (%02d): %s} (%.3f us)' % (l.time, \
l.depth, l.name, l.length*1000000))
else:
print('%f (%02d): %s() { (%.3f us)' % (l.time, \
l.depth, l.name, l.length*1000000))
print(' ')
else:
fp = open(filename, 'w')
print(filename)
for l in self.list:
if(l.freturn and l.fcall):
fp.write('%f (%02d): %s(); (%.3f us)\n' % (l.time, \
l.depth, l.name, l.length*1000000))
elif(l.freturn):
fp.write('%f (%02d): %s} (%.3f us)\n' % (l.time, \
l.depth, l.name, l.length*1000000))
else:
fp.write('%f (%02d): %s() { (%.3f us)\n' % (l.time, \
l.depth, l.name, l.length*1000000))
fp.close()
# Class: Timeline
# Description:
# A container for a suspend/resume html timeline. In older versions
# of the script there were multiple timelines, but in the latest
# there is only one.
class Timeline:
html = {}
scaleH = 0.0 # height of the row as a percent of the timeline height
rowH = 0.0 # height of each row in percent of the timeline height
row_height_pixels = 30
maxrows = 0
height = 0
def __init__(self):
self.html = {
'timeline': '',
'legend': '',
'scale': ''
}
def setRows(self, rows):
self.maxrows = int(rows)
self.scaleH = 100.0/float(self.maxrows)
self.height = self.maxrows*self.row_height_pixels
r = float(self.maxrows - 1)
if(r < 1.0):
r = 1.0
self.rowH = (100.0 - self.scaleH)/r
# Class: TestRun
# Description:
# A container for a suspend/resume test run. This is necessary as
# there could be more than one, and they need to be separate.
class TestRun:
ftrace_line_fmt_fg = \
'^ *(?P<time>[0-9\.]*) *\| *(?P<cpu>[0-9]*)\)'+\
' *(?P<proc>.*)-(?P<pid>[0-9]*) *\|'+\
'[ +!]*(?P<dur>[0-9\.]*) .*\| (?P<msg>.*)'
ftrace_line_fmt_nop = \
' *(?P<proc>.*)-(?P<pid>[0-9]*) *\[(?P<cpu>[0-9]*)\] *'+\
'(?P<flags>.{4}) *(?P<time>[0-9\.]*): *'+\
'(?P<msg>.*)'
ftrace_line_fmt = ftrace_line_fmt_nop
cgformat = False
ftemp = dict()
ttemp = dict()
inthepipe = False
tracertype = ''
data = 0
def __init__(self, dataobj):
self.data = dataobj
self.ftemp = dict()
self.ttemp = dict()
def isReady(self):
if(tracertype == '' or not data):
return False
return True
def setTracerType(self, tracer):
self.tracertype = tracer
if(tracer == 'function_graph'):
self.cgformat = True
self.ftrace_line_fmt = self.ftrace_line_fmt_fg
elif(tracer == 'nop'):
self.ftrace_line_fmt = self.ftrace_line_fmt_nop
else:
doError('Invalid tracer format: [%s]' % tracer, False)
# ----------------- FUNCTIONS --------------------
# Function: vprint
# Description:
# verbose print (prints only with -verbose option)
# Arguments:
# msg: the debug/log message to print
def vprint(msg):
global sysvals
if(sysvals.verbose):
print(msg)
# Function: initFtrace
# Description:
# Configure ftrace to use trace events and/or a callgraph
def initFtrace():
global sysvals
tp = sysvals.tpath
cf = 'dpm_run_callback'
if(sysvals.usetraceeventsonly):
cf = '-e dpm_prepare -e dpm_complete -e dpm_run_callback'
if(sysvals.usecallgraph or sysvals.usetraceevents):
print('INITIALIZING FTRACE...')
# turn trace off
os.system('echo 0 > '+tp+'tracing_on')
# set the trace clock to global
os.system('echo global > '+tp+'trace_clock')
# set trace buffer to a huge value
os.system('echo nop > '+tp+'current_tracer')
os.system('echo 100000 > '+tp+'buffer_size_kb')
# initialize the callgraph trace, unless this is an x2 run
if(sysvals.usecallgraph and sysvals.execcount == 1):
# set trace type
os.system('echo function_graph > '+tp+'current_tracer')
os.system('echo "" > '+tp+'set_ftrace_filter')
# set trace format options
os.system('echo funcgraph-abstime > '+tp+'trace_options')
os.system('echo funcgraph-proc > '+tp+'trace_options')
# focus only on device suspend and resume
os.system('cat '+tp+'available_filter_functions | grep '+\
cf+' > '+tp+'set_graph_function')
if(sysvals.usetraceevents):
# turn trace events on
events = iter(sysvals.traceevents)
for e in events:
os.system('echo 1 > '+sysvals.epath+e+'/enable')
# clear the trace buffer
os.system('echo "" > '+tp+'trace')
# Function: initFtraceAndroid
# Description:
# Configure ftrace to capture trace events
def initFtraceAndroid():
global sysvals
tp = sysvals.tpath
if(sysvals.usetraceevents):
print('INITIALIZING FTRACE...')
# turn trace off
os.system(sysvals.adb+" shell 'echo 0 > "+tp+"tracing_on'")
# set the trace clock to global
os.system(sysvals.adb+" shell 'echo global > "+tp+"trace_clock'")
# set trace buffer to a huge value
os.system(sysvals.adb+" shell 'echo nop > "+tp+"current_tracer'")
os.system(sysvals.adb+" shell 'echo 10000 > "+tp+"buffer_size_kb'")
# turn trace events on
events = iter(sysvals.traceevents)
for e in events:
os.system(sysvals.adb+" shell 'echo 1 > "+\
sysvals.epath+e+"/enable'")
# clear the trace buffer
os.system(sysvals.adb+" shell 'echo \"\" > "+tp+"trace'")
# Function: verifyFtrace
# Description:
# Check that ftrace is working on the system
# Output:
# True or False
def verifyFtrace():
global sysvals
# files needed for any trace data
files = ['buffer_size_kb', 'current_tracer', 'trace', 'trace_clock',
'trace_marker', 'trace_options', 'tracing_on']
# files needed for callgraph trace data
tp = sysvals.tpath
if(sysvals.usecallgraph):
files += [
'available_filter_functions',
'set_ftrace_filter',
'set_graph_function'
]
for f in files:
if(sysvals.android):
out = os.popen(sysvals.adb+' shell ls '+tp+f).read().strip()
if(out != tp+f):
return False
else:
if(os.path.exists(tp+f) == False):
return False
return True
# Function: parseStamp
# Description:
# Pull in the stamp comment line from the data file(s),
# create the stamp, and add it to the global sysvals object
# Arguments:
# m: the valid re.match output for the stamp line
def parseStamp(m, data):
global sysvals
data.stamp = {'time': '', 'host': '', 'mode': ''}
dt = datetime(int(m.group('y'))+2000, int(m.group('m')),
int(m.group('d')), int(m.group('H')), int(m.group('M')),
int(m.group('S')))
data.stamp['time'] = dt.strftime('%B %d %Y, %I:%M:%S %p')
data.stamp['host'] = m.group('host')
data.stamp['mode'] = m.group('mode')
data.stamp['kernel'] = m.group('kernel')
sysvals.suspendmode = data.stamp['mode']
if not sysvals.stamp:
sysvals.stamp = data.stamp
# Function: diffStamp
# Description:
# compare the host, kernel, and mode fields in 3 stamps
# Arguments:
# stamp1: string array with mode, kernel, and host
# stamp2: string array with mode, kernel, and host
# Return:
# True if stamps differ, False if they're the same
def diffStamp(stamp1, stamp2):
if 'host' in stamp1 and 'host' in stamp2:
if stamp1['host'] != stamp2['host']:
return True
if 'kernel' in stamp1 and 'kernel' in stamp2:
if stamp1['kernel'] != stamp2['kernel']:
return True
if 'mode' in stamp1 and 'mode' in stamp2:
if stamp1['mode'] != stamp2['mode']:
return True
return False
# Function: doesTraceLogHaveTraceEvents
# Description:
# Quickly determine if the ftrace log has some or all of the trace events
# required for primary parsing. Set the usetraceevents and/or
# usetraceeventsonly flags in the global sysvals object
def doesTraceLogHaveTraceEvents():
global sysvals
sysvals.usetraceeventsonly = True
sysvals.usetraceevents = False
for e in sysvals.traceevents:
out = os.popen('cat '+sysvals.ftracefile+' | grep "'+e+': "').read()
if(not out):
sysvals.usetraceeventsonly = False
if(e == 'suspend_resume' and out):
sysvals.usetraceevents = True
# Function: appendIncompleteTraceLog
# Description:
# [deprecated for kernel 3.15 or newer]
# Legacy support of ftrace outputs that lack the device_pm_callback
# and/or suspend_resume trace events. The primary data should be
# taken from dmesg, and this ftrace is used only for callgraph data
# or custom actions in the timeline. The data is appended to the Data
# objects provided.
# Arguments:
# testruns: the array of Data objects obtained from parseKernelLog
def appendIncompleteTraceLog(testruns):
global sysvals
# create TestRun vessels for ftrace parsing
testcnt = len(testruns)
testidx = -1
testrun = []
for data in testruns:
testrun.append(TestRun(data))
# extract the callgraph and traceevent data
vprint('Analyzing the ftrace data...')
tf = open(sysvals.ftracefile, 'r')
for line in tf:
# remove any latent carriage returns
line = line.replace('\r\n', '')
# grab the time stamp first (signifies the start of the test run)
m = re.match(sysvals.stampfmt, line)
if(m):
testidx += 1
parseStamp(m, testrun[testidx].data)
continue
# pull out any firmware data
if(re.match(sysvals.firmwarefmt, line)):
continue
# if we havent found a test time stamp yet keep spinning til we do
if(testidx < 0):
continue
# determine the trace data type (required for further parsing)
m = re.match(sysvals.tracertypefmt, line)
if(m):
tracer = m.group('t')
testrun[testidx].setTracerType(tracer)
continue
# parse only valid lines, if this isnt one move on
m = re.match(testrun[testidx].ftrace_line_fmt, line)
if(not m):
continue
# gather the basic message data from the line
m_time = m.group('time')
m_pid = m.group('pid')
m_msg = m.group('msg')
if(testrun[testidx].cgformat):
m_param3 = m.group('dur')
else:
m_param3 = 'traceevent'
if(m_time and m_pid and m_msg):
t = FTraceLine(m_time, m_msg, m_param3)
pid = int(m_pid)
else:
continue
# the line should be a call, return, or event
if(not t.fcall and not t.freturn and not t.fevent):
continue
# only parse the ftrace data during suspend/resume
data = testrun[testidx].data
if(not testrun[testidx].inthepipe):
# look for the suspend start marker
if(t.fevent):
if(t.name == 'SUSPEND START'):
testrun[testidx].inthepipe = True
data.setStart(t.time)
continue
else:
# trace event processing
if(t.fevent):
if(t.name == 'RESUME COMPLETE'):
testrun[testidx].inthepipe = False
data.setEnd(t.time)
if(testidx == testcnt - 1):
break
continue
# general trace events have two types, begin and end
if(re.match('(?P<name>.*) begin$', t.name)):
isbegin = True
elif(re.match('(?P<name>.*) end$', t.name)):
isbegin = False
else:
continue
m = re.match('(?P<name>.*)\[(?P<val>[0-9]*)\] .*', t.name)
if(m):
val = m.group('val')
if val == '0':
name = m.group('name')
else:
name = m.group('name')+'['+val+']'
else:
m = re.match('(?P<name>.*) .*', t.name)
name = m.group('name')
# special processing for trace events
if re.match('dpm_prepare\[.*', name):
continue
elif re.match('machine_suspend.*', name):
continue
elif re.match('suspend_enter\[.*', name):
if(not isbegin):
data.dmesg['suspend_prepare']['end'] = t.time
continue
elif re.match('dpm_suspend\[.*', name):
if(not isbegin):
data.dmesg['suspend']['end'] = t.time
continue
elif re.match('dpm_suspend_late\[.*', name):
if(isbegin):
data.dmesg['suspend_late']['start'] = t.time
else:
data.dmesg['suspend_late']['end'] = t.time
continue
elif re.match('dpm_suspend_noirq\[.*', name):
if(isbegin):
data.dmesg['suspend_noirq']['start'] = t.time
else:
data.dmesg['suspend_noirq']['end'] = t.time
continue
elif re.match('dpm_resume_noirq\[.*', name):
if(isbegin):
data.dmesg['resume_machine']['end'] = t.time
data.dmesg['resume_noirq']['start'] = t.time
else:
data.dmesg['resume_noirq']['end'] = t.time
continue
elif re.match('dpm_resume_early\[.*', name):
if(isbegin):
data.dmesg['resume_early']['start'] = t.time
else:
data.dmesg['resume_early']['end'] = t.time
continue
elif re.match('dpm_resume\[.*', name):
if(isbegin):
data.dmesg['resume']['start'] = t.time
else:
data.dmesg['resume']['end'] = t.time
continue
elif re.match('dpm_complete\[.*', name):
if(isbegin):
data.dmesg['resume_complete']['start'] = t.time
else:
data.dmesg['resume_complete']['end'] = t.time
continue
# is this trace event outside of the devices calls
if(data.isTraceEventOutsideDeviceCalls(pid, t.time)):
# global events (outside device calls) are simply graphed
if(isbegin):
# store each trace event in ttemp
if(name not in testrun[testidx].ttemp):
testrun[testidx].ttemp[name] = []
testrun[testidx].ttemp[name].append(\
{'begin': t.time, 'end': t.time})
else:
# finish off matching trace event in ttemp
if(name in testrun[testidx].ttemp):
testrun[testidx].ttemp[name][-1]['end'] = t.time
else:
if(isbegin):
data.addIntraDevTraceEvent('', name, pid, t.time)
else:
data.capIntraDevTraceEvent('', name, pid, t.time)
# call/return processing
elif sysvals.usecallgraph:
# create a callgraph object for the data
if(pid not in testrun[testidx].ftemp):
testrun[testidx].ftemp[pid] = []
testrun[testidx].ftemp[pid].append(FTraceCallGraph())
# when the call is finished, see which device matches it
cg = testrun[testidx].ftemp[pid][-1]
if(cg.addLine(t, m)):
testrun[testidx].ftemp[pid].append(FTraceCallGraph())
tf.close()
for test in testrun:
# add the traceevent data to the device hierarchy
if(sysvals.usetraceevents):
for name in test.ttemp:
for event in test.ttemp[name]:
begin = event['begin']
end = event['end']
# if event starts before timeline start, expand timeline
if(begin < test.data.start):
test.data.setStart(begin)
# if event ends after timeline end, expand the timeline
if(end > test.data.end):
test.data.setEnd(end)
test.data.newActionGlobal(name, begin, end)
# add the callgraph data to the device hierarchy
for pid in test.ftemp:
for cg in test.ftemp[pid]:
if(not cg.sanityCheck()):
id = 'task %s cpu %s' % (pid, m.group('cpu'))
vprint('Sanity check failed for '+\
id+', ignoring this callback')
continue
callstart = cg.start
callend = cg.end
for p in test.data.phases:
if(test.data.dmesg[p]['start'] <= callstart and
callstart <= test.data.dmesg[p]['end']):
list = test.data.dmesg[p]['list']
for devname in list:
dev = list[devname]
if(pid == dev['pid'] and
callstart <= dev['start'] and
callend >= dev['end']):
dev['ftrace'] = cg
break
if(sysvals.verbose):
test.data.printDetails()
# add the time in between the tests as a new phase so we can see it
if(len(testruns) > 1):
t1e = testruns[0].getEnd()
t2s = testruns[-1].getStart()
testruns[-1].newPhaseWithSingleAction('user mode', \
'user mode', t1e, t2s, '#FF9966')
# Function: parseTraceLog
# Description:
# Analyze an ftrace log output file generated from this app during
# the execution phase. Used when the ftrace log is the primary data source
# and includes the suspend_resume and device_pm_callback trace events
# The ftrace filename is taken from sysvals
# Output:
# An array of Data objects
def parseTraceLog():
global sysvals
vprint('Analyzing the ftrace data...')
if(os.path.exists(sysvals.ftracefile) == False):
doError('%s doesnt exist' % sysvals.ftracefile, False)
# extract the callgraph and traceevent data
testruns = []
testdata = []
testrun = 0
data = 0
tf = open(sysvals.ftracefile, 'r')
phase = 'suspend_prepare'
for line in tf:
# remove any latent carriage returns
line = line.replace('\r\n', '')
# stamp line: each stamp means a new test run
m = re.match(sysvals.stampfmt, line)
if(m):
data = Data(len(testdata))
testdata.append(data)
testrun = TestRun(data)
testruns.append(testrun)
parseStamp(m, data)
continue
if(not data):
continue
# firmware line: pull out any firmware data
m = re.match(sysvals.firmwarefmt, line)
if(m):
data.fwSuspend = int(m.group('s'))
data.fwResume = int(m.group('r'))
if(data.fwSuspend > 0 or data.fwResume > 0):
data.fwValid = True
continue
# tracer type line: determine the trace data type
m = re.match(sysvals.tracertypefmt, line)
if(m):
tracer = m.group('t')
testrun.setTracerType(tracer)
continue
# post resume time line: did this test run include post-resume data
m = re.match(sysvals.postresumefmt, line)
if(m):
t = int(m.group('t'))
if(t > 0):
sysvals.postresumetime = t
continue
# ftrace line: parse only valid lines
m = re.match(testrun.ftrace_line_fmt, line)
if(not m):
continue
# gather the basic message data from the line
m_time = m.group('time')
m_pid = m.group('pid')
m_msg = m.group('msg')
if(testrun.cgformat):
m_param3 = m.group('dur')
else:
m_param3 = 'traceevent'
if(m_time and m_pid and m_msg):
t = FTraceLine(m_time, m_msg, m_param3)
pid = int(m_pid)
else:
continue
# the line should be a call, return, or event
if(not t.fcall and not t.freturn and not t.fevent):
continue
# only parse the ftrace data during suspend/resume
if(not testrun.inthepipe):
# look for the suspend start marker
if(t.fevent):
if(t.name == 'SUSPEND START'):
testrun.inthepipe = True
data.setStart(t.time)
continue
# trace event processing
if(t.fevent):
if(t.name == 'RESUME COMPLETE'):
if(sysvals.postresumetime > 0):
phase = 'post_resume'
data.newPhase(phase, t.time, t.time, '#FF9966', -1)
else:
testrun.inthepipe = False
data.setEnd(t.time)
continue
if(phase == 'post_resume'):
data.setEnd(t.time)
if(t.type == 'suspend_resume'):
# suspend_resume trace events have two types, begin and end
if(re.match('(?P<name>.*) begin$', t.name)):
isbegin = True
elif(re.match('(?P<name>.*) end$', t.name)):
isbegin = False
else:
continue
m = re.match('(?P<name>.*)\[(?P<val>[0-9]*)\] .*', t.name)
if(m):
val = m.group('val')
if val == '0':
name = m.group('name')
else:
name = m.group('name')+'['+val+']'
else:
m = re.match('(?P<name>.*) .*', t.name)
name = m.group('name')
# ignore these events
if(re.match('acpi_suspend\[.*', t.name) or
re.match('suspend_enter\[.*', name)):
continue
# -- phase changes --
# suspend_prepare start
if(re.match('dpm_prepare\[.*', t.name)):
phase = 'suspend_prepare'
if(not isbegin):
data.dmesg[phase]['end'] = t.time
continue
# suspend start
elif(re.match('dpm_suspend\[.*', t.name)):
phase = 'suspend'
data.setPhase(phase, t.time, isbegin)
continue
# suspend_late start
elif(re.match('dpm_suspend_late\[.*', t.name)):
phase = 'suspend_late'
data.setPhase(phase, t.time, isbegin)
continue
# suspend_noirq start
elif(re.match('dpm_suspend_noirq\[.*', t.name)):
phase = 'suspend_noirq'
data.setPhase(phase, t.time, isbegin)
if(not isbegin):
phase = 'suspend_machine'
data.dmesg[phase]['start'] = t.time
continue
# suspend_machine/resume_machine
elif(re.match('machine_suspend\[.*', t.name)):
if(isbegin):
phase = 'suspend_machine'
data.dmesg[phase]['end'] = t.time
data.tSuspended = t.time
else:
if(sysvals.suspendmode in ['mem', 'disk']):
data.dmesg['suspend_machine']['end'] = t.time
data.tSuspended = t.time
phase = 'resume_machine'
data.dmesg[phase]['start'] = t.time
data.tResumed = t.time
data.tLow = data.tResumed - data.tSuspended
continue
# resume_noirq start
elif(re.match('dpm_resume_noirq\[.*', t.name)):
phase = 'resume_noirq'
data.setPhase(phase, t.time, isbegin)
if(isbegin):
data.dmesg['resume_machine']['end'] = t.time
continue
# resume_early start
elif(re.match('dpm_resume_early\[.*', t.name)):
phase = 'resume_early'
data.setPhase(phase, t.time, isbegin)
continue
# resume start
elif(re.match('dpm_resume\[.*', t.name)):
phase = 'resume'
data.setPhase(phase, t.time, isbegin)
continue
# resume complete start
elif(re.match('dpm_complete\[.*', t.name)):
phase = 'resume_complete'
if(isbegin):
data.dmesg[phase]['start'] = t.time
continue
# is this trace event outside of the devices calls
if(data.isTraceEventOutsideDeviceCalls(pid, t.time)):
# global events (outside device calls) are simply graphed
if(name not in testrun.ttemp):
testrun.ttemp[name] = []
if(isbegin):
# create a new list entry
testrun.ttemp[name].append(\
{'begin': t.time, 'end': t.time})
else:
if(len(testrun.ttemp[name]) > 0):
# if an antry exists, assume this is its end
testrun.ttemp[name][-1]['end'] = t.time
elif(phase == 'post_resume'):
# post resume events can just have ends
testrun.ttemp[name].append({
'begin': data.dmesg[phase]['start'],
'end': t.time})
else:
if(isbegin):
data.addIntraDevTraceEvent('', name, pid, t.time)
else:
data.capIntraDevTraceEvent('', name, pid, t.time)
# device callback start
elif(t.type == 'device_pm_callback_start'):
m = re.match('(?P<drv>.*) (?P<d>.*), parent: *(?P<p>.*), .*',\
t.name);
if(not m):
continue
drv = m.group('drv')
n = m.group('d')
p = m.group('p')
if(n and p):
data.newAction(phase, n, pid, p, t.time, -1, drv)
# device callback finish
elif(t.type == 'device_pm_callback_end'):
m = re.match('(?P<drv>.*) (?P<d>.*), err.*', t.name);
if(not m):
continue
n = m.group('d')
list = data.dmesg[phase]['list']
if(n in list):
dev = list[n]
dev['length'] = t.time - dev['start']
dev['end'] = t.time
# callgraph processing
elif sysvals.usecallgraph:
# this shouldn't happen, but JIC, ignore callgraph data post-res
if(phase == 'post_resume'):
continue
# create a callgraph object for the data
if(pid not in testrun.ftemp):
testrun.ftemp[pid] = []
testrun.ftemp[pid].append(FTraceCallGraph())
# when the call is finished, see which device matches it
cg = testrun.ftemp[pid][-1]
if(cg.addLine(t, m)):
testrun.ftemp[pid].append(FTraceCallGraph())
tf.close()
for test in testruns:
# add the traceevent data to the device hierarchy
if(sysvals.usetraceevents):
for name in test.ttemp:
for event in test.ttemp[name]:
begin = event['begin']
end = event['end']
# if event starts before timeline start, expand timeline
if(begin < test.data.start):
test.data.setStart(begin)
# if event ends after timeline end, expand the timeline
if(end > test.data.end):
test.data.setEnd(end)
test.data.newActionGlobal(name, begin, end)
# add the callgraph data to the device hierarchy
borderphase = {
'dpm_prepare': 'suspend_prepare',
'dpm_complete': 'resume_complete'
}
for pid in test.ftemp:
for cg in test.ftemp[pid]:
if len(cg.list) < 2:
continue
if(not cg.sanityCheck()):
id = 'task %s cpu %s' % (pid, m.group('cpu'))
vprint('Sanity check failed for '+\
id+', ignoring this callback')
continue
callstart = cg.start
callend = cg.end
if(cg.list[0].name in borderphase):
p = borderphase[cg.list[0].name]
list = test.data.dmesg[p]['list']
for devname in list:
dev = list[devname]
if(pid == dev['pid'] and
callstart <= dev['start'] and
callend >= dev['end']):
dev['ftrace'] = cg.slice(dev['start'], dev['end'])
continue
if(cg.list[0].name != 'dpm_run_callback'):
continue
for p in test.data.phases:
if(test.data.dmesg[p]['start'] <= callstart and
callstart <= test.data.dmesg[p]['end']):
list = test.data.dmesg[p]['list']
for devname in list:
dev = list[devname]
if(pid == dev['pid'] and
callstart <= dev['start'] and
callend >= dev['end']):
dev['ftrace'] = cg
break
# fill in any missing phases
for data in testdata:
lp = data.phases[0]
for p in data.phases:
if(data.dmesg[p]['start'] < 0 and data.dmesg[p]['end'] < 0):
print('WARNING: phase "%s" is missing!' % p)
if(data.dmesg[p]['start'] < 0):
data.dmesg[p]['start'] = data.dmesg[lp]['end']
if(p == 'resume_machine'):
data.tSuspended = data.dmesg[lp]['end']
data.tResumed = data.dmesg[lp]['end']
data.tLow = 0
if(data.dmesg[p]['end'] < 0):
data.dmesg[p]['end'] = data.dmesg[p]['start']
lp = p
if(len(sysvals.devicefilter) > 0):
data.deviceFilter(sysvals.devicefilter)
data.fixupInitcallsThatDidntReturn()
if(sysvals.verbose):
data.printDetails()
# add the time in between the tests as a new phase so we can see it
if(len(testdata) > 1):
t1e = testdata[0].getEnd()
t2s = testdata[-1].getStart()
testdata[-1].newPhaseWithSingleAction('user mode', \
'user mode', t1e, t2s, '#FF9966')
return testdata
# Function: loadKernelLog
# Description:
# [deprecated for kernel 3.15.0 or newer]
# load the dmesg file into memory and fix up any ordering issues
# The dmesg filename is taken from sysvals
# Output:
# An array of empty Data objects with only their dmesgtext attributes set
def loadKernelLog():
global sysvals
vprint('Analyzing the dmesg data...')
if(os.path.exists(sysvals.dmesgfile) == False):
doError('%s doesnt exist' % sysvals.dmesgfile, False)
# there can be multiple test runs in a single file delineated by stamps
testruns = []
data = 0
lf = open(sysvals.dmesgfile, 'r')
for line in lf:
line = line.replace('\r\n', '')
idx = line.find('[')
if idx > 1:
line = line[idx:]
m = re.match(sysvals.stampfmt, line)
if(m):
if(data):
testruns.append(data)
data = Data(len(testruns))
parseStamp(m, data)
continue
if(not data):
continue
m = re.match(sysvals.firmwarefmt, line)
if(m):
data.fwSuspend = int(m.group('s'))
data.fwResume = int(m.group('r'))
if(data.fwSuspend > 0 or data.fwResume > 0):
data.fwValid = True
continue
m = re.match('[ \t]*(\[ *)(?P<ktime>[0-9\.]*)(\]) (?P<msg>.*)', line)
if(m):
data.dmesgtext.append(line)
if(re.match('ACPI: resume from mwait', m.group('msg'))):
print('NOTE: This suspend appears to be freeze rather than'+\
' %s, it will be treated as such' % sysvals.suspendmode)
sysvals.suspendmode = 'freeze'
else:
vprint('ignoring dmesg line: %s' % line.replace('\n', ''))
testruns.append(data)
lf.close()
if(not data):
print('ERROR: analyze_suspend header missing from dmesg log')
sys.exit()
# fix lines with same timestamp/function with the call and return swapped
for data in testruns:
last = ''
for line in data.dmesgtext:
mc = re.match('.*(\[ *)(?P<t>[0-9\.]*)(\]) calling '+\
'(?P<f>.*)\+ @ .*, parent: .*', line)
mr = re.match('.*(\[ *)(?P<t>[0-9\.]*)(\]) call '+\
'(?P<f>.*)\+ returned .* after (?P<dt>.*) usecs', last)
if(mc and mr and (mc.group('t') == mr.group('t')) and
(mc.group('f') == mr.group('f'))):
i = data.dmesgtext.index(last)
j = data.dmesgtext.index(line)
data.dmesgtext[i] = line
data.dmesgtext[j] = last
last = line
return testruns
# Function: parseKernelLog
# Description:
# [deprecated for kernel 3.15.0 or newer]
# Analyse a dmesg log output file generated from this app during
# the execution phase. Create a set of device structures in memory
# for subsequent formatting in the html output file
# This call is only for legacy support on kernels where the ftrace
# data lacks the suspend_resume or device_pm_callbacks trace events.
# Arguments:
# data: an empty Data object (with dmesgtext) obtained from loadKernelLog
# Output:
# The filled Data object
def parseKernelLog(data):
global sysvals
phase = 'suspend_runtime'
if(data.fwValid):
vprint('Firmware Suspend = %u ns, Firmware Resume = %u ns' % \
(data.fwSuspend, data.fwResume))
# dmesg phase match table
dm = {
'suspend_prepare': 'PM: Syncing filesystems.*',
'suspend': 'PM: Entering [a-z]* sleep.*',
'suspend_late': 'PM: suspend of devices complete after.*',
'suspend_noirq': 'PM: late suspend of devices complete after.*',
'suspend_machine': 'PM: noirq suspend of devices complete after.*',
'resume_machine': 'ACPI: Low-level resume complete.*',
'resume_noirq': 'ACPI: Waking up from system sleep state.*',
'resume_early': 'PM: noirq resume of devices complete after.*',
'resume': 'PM: early resume of devices complete after.*',
'resume_complete': 'PM: resume of devices complete after.*',
'post_resume': '.*Restarting tasks \.\.\..*',
}
if(sysvals.suspendmode == 'standby'):
dm['resume_machine'] = 'PM: Restoring platform NVS memory'
elif(sysvals.suspendmode == 'disk'):
dm['suspend_late'] = 'PM: freeze of devices complete after.*'
dm['suspend_noirq'] = 'PM: late freeze of devices complete after.*'
dm['suspend_machine'] = 'PM: noirq freeze of devices complete after.*'
dm['resume_machine'] = 'PM: Restoring platform NVS memory'
dm['resume_early'] = 'PM: noirq restore of devices complete after.*'
dm['resume'] = 'PM: early restore of devices complete after.*'
dm['resume_complete'] = 'PM: restore of devices complete after.*'
elif(sysvals.suspendmode == 'freeze'):
dm['resume_machine'] = 'ACPI: resume from mwait'
# action table (expected events that occur and show up in dmesg)
at = {
'sync_filesystems': {
'smsg': 'PM: Syncing filesystems.*',
'emsg': 'PM: Preparing system for mem sleep.*' },
'freeze_user_processes': {
'smsg': 'Freezing user space processes .*',
'emsg': 'Freezing remaining freezable tasks.*' },
'freeze_tasks': {
'smsg': 'Freezing remaining freezable tasks.*',
'emsg': 'PM: Entering (?P<mode>[a-z,A-Z]*) sleep.*' },
'ACPI prepare': {
'smsg': 'ACPI: Preparing to enter system sleep state.*',
'emsg': 'PM: Saving platform NVS memory.*' },
'PM vns': {
'smsg': 'PM: Saving platform NVS memory.*',
'emsg': 'Disabling non-boot CPUs .*' },
}
t0 = -1.0
cpu_start = -1.0
prevktime = -1.0
actions = dict()
for line in data.dmesgtext:
# -- preprocessing --
# parse each dmesg line into the time and message
m = re.match('[ \t]*(\[ *)(?P<ktime>[0-9\.]*)(\]) (?P<msg>.*)', line)
if(m):
val = m.group('ktime')
try:
ktime = float(val)
except:
doWarning('INVALID DMESG LINE: '+\
line.replace('\n', ''), 'dmesg')
continue
msg = m.group('msg')
# initialize data start to first line time
if t0 < 0:
data.setStart(ktime)
t0 = ktime
else:
continue
# hack for determining resume_machine end for freeze
if(not sysvals.usetraceevents and sysvals.suspendmode == 'freeze' \
and phase == 'resume_machine' and \
re.match('calling (?P<f>.*)\+ @ .*, parent: .*', msg)):
data.dmesg['resume_machine']['end'] = ktime
phase = 'resume_noirq'
data.dmesg[phase]['start'] = ktime
# -- phase changes --
# suspend start
if(re.match(dm['suspend_prepare'], msg)):
phase = 'suspend_prepare'
data.dmesg[phase]['start'] = ktime
data.setStart(ktime)
# suspend start
elif(re.match(dm['suspend'], msg)):
data.dmesg['suspend_prepare']['end'] = ktime
phase = 'suspend'
data.dmesg[phase]['start'] = ktime
# suspend_late start
elif(re.match(dm['suspend_late'], msg)):
data.dmesg['suspend']['end'] = ktime
phase = 'suspend_late'
data.dmesg[phase]['start'] = ktime
# suspend_noirq start
elif(re.match(dm['suspend_noirq'], msg)):
data.dmesg['suspend_late']['end'] = ktime
phase = 'suspend_noirq'
data.dmesg[phase]['start'] = ktime
# suspend_machine start
elif(re.match(dm['suspend_machine'], msg)):
data.dmesg['suspend_noirq']['end'] = ktime
phase = 'suspend_machine'
data.dmesg[phase]['start'] = ktime
# resume_machine start
elif(re.match(dm['resume_machine'], msg)):
if(sysvals.suspendmode in ['freeze', 'standby']):
data.tSuspended = prevktime
data.dmesg['suspend_machine']['end'] = prevktime
else:
data.tSuspended = ktime
data.dmesg['suspend_machine']['end'] = ktime
phase = 'resume_machine'
data.tResumed = ktime
data.tLow = data.tResumed - data.tSuspended
data.dmesg[phase]['start'] = ktime
# resume_noirq start
elif(re.match(dm['resume_noirq'], msg)):
data.dmesg['resume_machine']['end'] = ktime
phase = 'resume_noirq'
data.dmesg[phase]['start'] = ktime
# resume_early start
elif(re.match(dm['resume_early'], msg)):
data.dmesg['resume_noirq']['end'] = ktime
phase = 'resume_early'
data.dmesg[phase]['start'] = ktime
# resume start
elif(re.match(dm['resume'], msg)):
data.dmesg['resume_early']['end'] = ktime
phase = 'resume'
data.dmesg[phase]['start'] = ktime
# resume complete start
elif(re.match(dm['resume_complete'], msg)):
data.dmesg['resume']['end'] = ktime
phase = 'resume_complete'
data.dmesg[phase]['start'] = ktime
# post resume start
elif(re.match(dm['post_resume'], msg)):
data.dmesg['resume_complete']['end'] = ktime
data.setEnd(ktime)
phase = 'post_resume'
break
# -- device callbacks --
if(phase in data.phases):
# device init call
if(re.match('calling (?P<f>.*)\+ @ .*, parent: .*', msg)):
sm = re.match('calling (?P<f>.*)\+ @ '+\
'(?P<n>.*), parent: (?P<p>.*)', msg);
f = sm.group('f')
n = sm.group('n')
p = sm.group('p')
if(f and n and p):
data.newAction(phase, f, int(n), p, ktime, -1, '')
# device init return
elif(re.match('call (?P<f>.*)\+ returned .* after '+\
'(?P<t>.*) usecs', msg)):
sm = re.match('call (?P<f>.*)\+ returned .* after '+\
'(?P<t>.*) usecs(?P<a>.*)', msg);
f = sm.group('f')
t = sm.group('t')
list = data.dmesg[phase]['list']
if(f in list):
dev = list[f]
dev['length'] = int(t)
dev['end'] = ktime
# -- non-devicecallback actions --
# if trace events are not available, these are better than nothing
if(not sysvals.usetraceevents):
# look for known actions
for a in at:
if(re.match(at[a]['smsg'], msg)):
if(a not in actions):
actions[a] = []
actions[a].append({'begin': ktime, 'end': ktime})
if(re.match(at[a]['emsg'], msg)):
actions[a][-1]['end'] = ktime
# now look for CPU on/off events
if(re.match('Disabling non-boot CPUs .*', msg)):
# start of first cpu suspend
cpu_start = ktime
elif(re.match('Enabling non-boot CPUs .*', msg)):
# start of first cpu resume
cpu_start = ktime
elif(re.match('smpboot: CPU (?P<cpu>[0-9]*) is now offline', msg)):
# end of a cpu suspend, start of the next
m = re.match('smpboot: CPU (?P<cpu>[0-9]*) is now offline', msg)
cpu = 'CPU'+m.group('cpu')
if(cpu not in actions):
actions[cpu] = []
actions[cpu].append({'begin': cpu_start, 'end': ktime})
cpu_start = ktime
elif(re.match('CPU(?P<cpu>[0-9]*) is up', msg)):
# end of a cpu resume, start of the next
m = re.match('CPU(?P<cpu>[0-9]*) is up', msg)
cpu = 'CPU'+m.group('cpu')
if(cpu not in actions):
actions[cpu] = []
actions[cpu].append({'begin': cpu_start, 'end': ktime})
cpu_start = ktime
prevktime = ktime
# fill in any missing phases
lp = data.phases[0]
for p in data.phases:
if(data.dmesg[p]['start'] < 0 and data.dmesg[p]['end'] < 0):
print('WARNING: phase "%s" is missing, something went wrong!' % p)
print(' In %s, this dmesg line denotes the start of %s:' % \
(sysvals.suspendmode, p))
print(' "%s"' % dm[p])
if(data.dmesg[p]['start'] < 0):
data.dmesg[p]['start'] = data.dmesg[lp]['end']
if(p == 'resume_machine'):
data.tSuspended = data.dmesg[lp]['end']
data.tResumed = data.dmesg[lp]['end']
data.tLow = 0
if(data.dmesg[p]['end'] < 0):
data.dmesg[p]['end'] = data.dmesg[p]['start']
lp = p
# fill in any actions we've found
for name in actions:
for event in actions[name]:
begin = event['begin']
end = event['end']
# if event starts before timeline start, expand timeline
if(begin < data.start):
data.setStart(begin)
# if event ends after timeline end, expand the timeline
if(end > data.end):
data.setEnd(end)
data.newActionGlobal(name, begin, end)
if(sysvals.verbose):
data.printDetails()
if(len(sysvals.devicefilter) > 0):
data.deviceFilter(sysvals.devicefilter)
data.fixupInitcallsThatDidntReturn()
return True
# Function: setTimelineRows
# Description:
# Organize the timeline entries into the smallest
# number of rows possible, with no entry overlapping
# Arguments:
# list: the list of devices/actions for a single phase
# sortedkeys: cronologically sorted key list to use
# Output:
# The total number of rows needed to display this phase of the timeline
def setTimelineRows(list, sortedkeys):
# clear all rows and set them to undefined
remaining = len(list)
rowdata = dict()
row = 0
for item in list:
list[item]['row'] = -1
# try to pack each row with as many ranges as possible
while(remaining > 0):
if(row not in rowdata):
rowdata[row] = []
for item in sortedkeys:
if(list[item]['row'] < 0):
s = list[item]['start']
e = list[item]['end']
valid = True
for ritem in rowdata[row]:
rs = ritem['start']
re = ritem['end']
if(not (((s <= rs) and (e <= rs)) or
((s >= re) and (e >= re)))):
valid = False
break
if(valid):
rowdata[row].append(list[item])
list[item]['row'] = row
remaining -= 1
row += 1
return row
# Function: createTimeScale
# Description:
# Create the timescale header for the html timeline
# Arguments:
# t0: start time (suspend begin)
# tMax: end time (resume end)
# tSuspend: time when suspend occurs, i.e. the zero time
# Output:
# The html code needed to display the time scale
def createTimeScale(t0, tMax, tSuspended):
timescale = '<div class="t" style="right:{0}%">{1}</div>\n'
output = '<div id="timescale">\n'
# set scale for timeline
tTotal = tMax - t0
tS = 0.1
if(tTotal <= 0):
return output
if(tTotal > 4):
tS = 1
if(tSuspended < 0):
for i in range(int(tTotal/tS)+1):
pos = '%0.3f' % (100 - ((float(i)*tS*100)/tTotal))
if(i > 0):
val = '%0.fms' % (float(i)*tS*1000)
else:
val = ''
output += timescale.format(pos, val)
else:
tSuspend = tSuspended - t0
divTotal = int(tTotal/tS) + 1
divSuspend = int(tSuspend/tS)
s0 = (tSuspend - tS*divSuspend)*100/tTotal
for i in range(divTotal):
pos = '%0.3f' % (100 - ((float(i)*tS*100)/tTotal) - s0)
if((i == 0) and (s0 < 3)):
val = ''
elif(i == divSuspend):
val = 'S/R'
else:
val = '%0.fms' % (float(i-divSuspend)*tS*1000)
output += timescale.format(pos, val)
output += '</div>\n'
return output
# Function: createHTMLSummarySimple
# Description:
# Create summary html file for a series of tests
# Arguments:
# testruns: array of Data objects from parseTraceLog
def createHTMLSummarySimple(testruns, htmlfile):
global sysvals
# print out the basic summary of all the tests
hf = open(htmlfile, 'w')
# write the html header first (html head, css code, up to body start)
html = '<!DOCTYPE html>\n<html>\n<head>\n\
<meta http-equiv="content-type" content="text/html; charset=UTF-8">\n\
<title>AnalyzeSuspend Summary</title>\n\
<style type=\'text/css\'>\n\
body {overflow-y: scroll;}\n\
.stamp {width: 100%;text-align:center;background-color:#495E09;line-height:30px;color:white;font: 25px Arial;}\n\
table {width:100%;border-collapse: collapse;}\n\
.summary {font: 22px Arial;border:1px solid;}\n\
th {border: 1px solid black;background-color:#A7C942;color:white;}\n\
td {text-align: center;}\n\
tr.alt td {background-color:#EAF2D3;}\n\
tr.avg td {background-color:#BDE34C;}\n\
a:link {color: #90B521;}\n\
a:visited {color: #495E09;}\n\
a:hover {color: #B1DF28;}\n\
a:active {color: #FFFFFF;}\n\
</style>\n</head>\n<body>\n'
# group test header
count = len(testruns)
headline_stamp = '<div class="stamp">{0} {1} {2} {3} ({4} tests)</div>\n'
html += headline_stamp.format(sysvals.stamp['host'],
sysvals.stamp['kernel'], sysvals.stamp['mode'],
sysvals.stamp['time'], count)
# check to see if all the tests have the same value
stampcolumns = False
for data in testruns:
if diffStamp(sysvals.stamp, data.stamp):
stampcolumns = True
break
th = '\t<th>{0}</th>\n'
td = '\t<td>{0}</td>\n'
tdlink = '\t<td><a href="{0}">Click Here</a></td>\n'
# table header
html += '<table class="summary">\n<tr>\n'
html += th.format("Test #")
if stampcolumns:
html += th.format("Hostname")
html += th.format("Kernel Version")
html += th.format("Suspend Mode")
html += th.format("Test Time")
html += th.format("Suspend Time")
html += th.format("Resume Time")
html += th.format("Detail")
html += '</tr>\n'
# test data, 1 row per test
sTimeAvg = 0.0
rTimeAvg = 0.0
num = 1
for data in testruns:
# data.end is the end of post_resume
resumeEnd = data.dmesg['resume_complete']['end']
if num % 2 == 1:
html += '<tr class="alt">\n'
else:
html += '<tr>\n'
# test num
html += td.format("test %d" % num)
num += 1
if stampcolumns:
# host name
val = "unknown"
if('host' in data.stamp):
val = data.stamp['host']
html += td.format(val)
# host kernel
val = "unknown"
if('kernel' in data.stamp):
val = data.stamp['kernel']
html += td.format(val)
# suspend mode
val = "unknown"
if('mode' in data.stamp):
val = data.stamp['mode']
html += td.format(val)
# test time
val = "unknown"
if('time' in data.stamp):
val = data.stamp['time']
html += td.format(val)
# suspend time
sTime = (data.tSuspended - data.start)*1000
sTimeAvg += sTime
html += td.format("%3.3f ms" % sTime)
# resume time
rTime = (resumeEnd - data.tResumed)*1000
rTimeAvg += rTime
html += td.format("%3.3f ms" % rTime)
# link to the output html
html += tdlink.format(data.outfile)
html += '</tr>\n'
# last line: test average
if(count > 0):
sTimeAvg /= count
rTimeAvg /= count
html += '<tr class="avg">\n'
html += td.format('Average') # name
if stampcolumns:
html += td.format('') # host
html += td.format('') # kernel
html += td.format('') # mode
html += td.format('') # time
html += td.format("%3.3f ms" % sTimeAvg) # suspend time
html += td.format("%3.3f ms" % rTimeAvg) # resume time
html += td.format('') # output link
html += '</tr>\n'
# flush the data to file
hf.write(html+'</table>\n')
hf.write('</body>\n</html>\n')
hf.close()
# Function: createHTML
# Description:
# Create the output html file from the resident test data
# Arguments:
# testruns: array of Data objects from parseKernelLog or parseTraceLog
# Output:
# True if the html file was created, false if it failed
def createHTML(testruns):
global sysvals
for data in testruns:
data.normalizeTime(testruns[-1].tSuspended)
x2changes = ['', 'absolute']
if len(testruns) > 1:
x2changes = ['1', 'relative']
# html function templates
headline_stamp = '<div class="stamp">{0} {1} {2} {3}</div>\n'
html_devlist1 = '<button id="devlist1" class="devlist" style="float:left;">Device Detail%s</button>' % x2changes[0]
html_zoombox = '<center><button id="zoomin">ZOOM IN</button><button id="zoomout">ZOOM OUT</button><button id="zoomdef">ZOOM 1:1</button></center>\n'
html_devlist2 = '<button id="devlist2" class="devlist" style="float:right;">Device Detail2</button>\n'
html_timeline = '<div id="dmesgzoombox" class="zoombox">\n<div id="{0}" class="timeline" style="height:{1}px">\n'
html_device = '<div id="{0}" title="{1}" class="thread" style="left:{2}%;top:{3}%;height:{4}%;width:{5}%;">{6}</div>\n'
html_traceevent = '<div title="{0}" class="traceevent" style="left:{1}%;top:{2}%;height:{3}%;width:{4}%;border:1px solid {5};background-color:{5}">{6}</div>\n'
html_phase = '<div class="phase" style="left:{0}%;width:{1}%;top:{2}%;height:{3}%;background-color:{4}">{5}</div>\n'
html_phaselet = '<div id="{0}" class="phaselet" style="left:{1}%;width:{2}%;background-color:{3}"></div>\n'
html_legend = '<div class="square" style="left:{0}%;background-color:{1}"> {2}</div>\n'
html_timetotal = '<table class="time1">\n<tr>'\
'<td class="green">{2} Suspend Time: <b>{0} ms</b></td>'\
'<td class="yellow">{2} Resume Time: <b>{1} ms</b></td>'\
'</tr>\n</table>\n'
html_timetotal2 = '<table class="time1">\n<tr>'\
'<td class="green">{3} Suspend Time: <b>{0} ms</b></td>'\
'<td class="gray">'+sysvals.suspendmode+' time: <b>{1} ms</b></td>'\
'<td class="yellow">{3} Resume Time: <b>{2} ms</b></td>'\
'</tr>\n</table>\n'
html_timegroups = '<table class="time2">\n<tr>'\
'<td class="green">{4}Kernel Suspend: {0} ms</td>'\
'<td class="purple">{4}Firmware Suspend: {1} ms</td>'\
'<td class="purple">{4}Firmware Resume: {2} ms</td>'\
'<td class="yellow">{4}Kernel Resume: {3} ms</td>'\
'</tr>\n</table>\n'
# device timeline
vprint('Creating Device Timeline...')
devtl = Timeline()
# Generate the header for this timeline
textnum = ['First', 'Second']
for data in testruns:
tTotal = data.end - data.start
tEnd = data.dmesg['resume_complete']['end']
if(tTotal == 0):
print('ERROR: No timeline data')
sys.exit()
if(data.tLow > 0):
low_time = '%.0f'%(data.tLow*1000)
if data.fwValid:
suspend_time = '%.0f'%((data.tSuspended-data.start)*1000 + \
(data.fwSuspend/1000000.0))
resume_time = '%.0f'%((tEnd-data.tSuspended)*1000 + \
(data.fwResume/1000000.0))
testdesc1 = 'Total'
testdesc2 = ''
if(len(testruns) > 1):
testdesc1 = testdesc2 = textnum[data.testnumber]
testdesc2 += ' '
if(data.tLow == 0):
thtml = html_timetotal.format(suspend_time, \
resume_time, testdesc1)
else:
thtml = html_timetotal2.format(suspend_time, low_time, \
resume_time, testdesc1)
devtl.html['timeline'] += thtml
sktime = '%.3f'%((data.dmesg['suspend_machine']['end'] - \
data.getStart())*1000)
sftime = '%.3f'%(data.fwSuspend / 1000000.0)
rftime = '%.3f'%(data.fwResume / 1000000.0)
rktime = '%.3f'%((data.getEnd() - \
data.dmesg['resume_machine']['start'])*1000)
devtl.html['timeline'] += html_timegroups.format(sktime, \
sftime, rftime, rktime, testdesc2)
else:
suspend_time = '%.0f'%((data.tSuspended-data.start)*1000)
resume_time = '%.0f'%((tEnd-data.tSuspended)*1000)
testdesc = 'Kernel'
if(len(testruns) > 1):
testdesc = textnum[data.testnumber]+' '+testdesc
if(data.tLow == 0):
thtml = html_timetotal.format(suspend_time, \
resume_time, testdesc)
else:
thtml = html_timetotal2.format(suspend_time, low_time, \
resume_time, testdesc)
devtl.html['timeline'] += thtml
# time scale for potentially multiple datasets
t0 = testruns[0].start
tMax = testruns[-1].end
tSuspended = testruns[-1].tSuspended
tTotal = tMax - t0
# determine the maximum number of rows we need to draw
timelinerows = 0
for data in testruns:
for phase in data.dmesg:
list = data.dmesg[phase]['list']
rows = setTimelineRows(list, list)
data.dmesg[phase]['row'] = rows
if(rows > timelinerows):
timelinerows = rows
# calculate the timeline height and create bounding box, add buttons
devtl.setRows(timelinerows + 1)
devtl.html['timeline'] += html_devlist1
if len(testruns) > 1:
devtl.html['timeline'] += html_devlist2
devtl.html['timeline'] += html_zoombox
devtl.html['timeline'] += html_timeline.format('dmesg', devtl.height)
# draw the colored boxes for each of the phases
for data in testruns:
for b in data.dmesg:
phase = data.dmesg[b]
length = phase['end']-phase['start']
left = '%.3f' % (((phase['start']-t0)*100.0)/tTotal)
width = '%.3f' % ((length*100.0)/tTotal)
devtl.html['timeline'] += html_phase.format(left, width, \
'%.3f'%devtl.scaleH, '%.3f'%(100-devtl.scaleH), \
data.dmesg[b]['color'], '')
# draw the time scale, try to make the number of labels readable
devtl.html['scale'] = createTimeScale(t0, tMax, tSuspended)
devtl.html['timeline'] += devtl.html['scale']
for data in testruns:
for b in data.dmesg:
phaselist = data.dmesg[b]['list']
for d in phaselist:
name = d
drv = ''
dev = phaselist[d]
if(d in sysvals.altdevname):
name = sysvals.altdevname[d]
if('drv' in dev and dev['drv']):
drv = ' {%s}' % dev['drv']
height = (100.0 - devtl.scaleH)/data.dmesg[b]['row']
top = '%.3f' % ((dev['row']*height) + devtl.scaleH)
left = '%.3f' % (((dev['start']-t0)*100)/tTotal)
width = '%.3f' % (((dev['end']-dev['start'])*100)/tTotal)
length = ' (%0.3f ms) ' % ((dev['end']-dev['start'])*1000)
color = 'rgba(204,204,204,0.5)'
devtl.html['timeline'] += html_device.format(dev['id'], \
d+drv+length+b, left, top, '%.3f'%height, width, name+drv)
# draw any trace events found
for data in testruns:
for b in data.dmesg:
phaselist = data.dmesg[b]['list']
for name in phaselist:
dev = phaselist[name]
if('traceevents' in dev):
vprint('Debug trace events found for device %s' % name)
vprint('%20s %20s %10s %8s' % ('action', \
'name', 'time(ms)', 'length(ms)'))
for e in dev['traceevents']:
vprint('%20s %20s %10.3f %8.3f' % (e.action, \
e.name, e.time*1000, e.length*1000))
height = (100.0 - devtl.scaleH)/data.dmesg[b]['row']
top = '%.3f' % ((dev['row']*height) + devtl.scaleH)
left = '%.3f' % (((e.time-t0)*100)/tTotal)
width = '%.3f' % (e.length*100/tTotal)
color = 'rgba(204,204,204,0.5)'
devtl.html['timeline'] += \
html_traceevent.format(e.action+' '+e.name, \
left, top, '%.3f'%height, \
width, e.color, '')
# timeline is finished
devtl.html['timeline'] += '</div>\n</div>\n'
# draw a legend which describes the phases by color
data = testruns[-1]
devtl.html['legend'] = '<div class="legend">\n'
pdelta = 100.0/len(data.phases)
pmargin = pdelta / 4.0
for phase in data.phases:
order = '%.2f' % ((data.dmesg[phase]['order'] * pdelta) + pmargin)
name = string.replace(phase, '_', ' ')
devtl.html['legend'] += html_legend.format(order, \
data.dmesg[phase]['color'], name)
devtl.html['legend'] += '</div>\n'
hf = open(sysvals.htmlfile, 'w')
thread_height = 0
# write the html header first (html head, css code, up to body start)
html_header = '<!DOCTYPE html>\n<html>\n<head>\n\
<meta http-equiv="content-type" content="text/html; charset=UTF-8">\n\
<title>AnalyzeSuspend</title>\n\
<style type=\'text/css\'>\n\
body {overflow-y: scroll;}\n\
.stamp {width: 100%;text-align:center;background-color:gray;line-height:30px;color:white;font: 25px Arial;}\n\
.callgraph {margin-top: 30px;box-shadow: 5px 5px 20px black;}\n\
.callgraph article * {padding-left: 28px;}\n\
h1 {color:black;font: bold 30px Times;}\n\
t0 {color:black;font: bold 30px Times;}\n\
t1 {color:black;font: 30px Times;}\n\
t2 {color:black;font: 25px Times;}\n\
t3 {color:black;font: 20px Times;white-space:nowrap;}\n\
t4 {color:black;font: bold 30px Times;line-height:60px;white-space:nowrap;}\n\
table {width:100%;}\n\
.gray {background-color:rgba(80,80,80,0.1);}\n\
.green {background-color:rgba(204,255,204,0.4);}\n\
.purple {background-color:rgba(128,0,128,0.2);}\n\
.yellow {background-color:rgba(255,255,204,0.4);}\n\
.time1 {font: 22px Arial;border:1px solid;}\n\
.time2 {font: 15px Arial;border-bottom:1px solid;border-left:1px solid;border-right:1px solid;}\n\
td {text-align: center;}\n\
r {color:#500000;font:15px Tahoma;}\n\
n {color:#505050;font:15px Tahoma;}\n\
.tdhl {color: red;}\n\
.hide {display: none;}\n\
.pf {display: none;}\n\
.pf:checked + label {background: url(\'data:image/svg+xml;utf,<?xml version="1.0" standalone="no"?><svg xmlns="http://www.w3.org/2000/svg" height="18" width="18" version="1.1"><circle cx="9" cy="9" r="8" stroke="black" stroke-width="1" fill="white"/><rect x="4" y="8" width="10" height="2" style="fill:black;stroke-width:0"/><rect x="8" y="4" width="2" height="10" style="fill:black;stroke-width:0"/></svg>\') no-repeat left center;}\n\
.pf:not(:checked) ~ label {background: url(\'data:image/svg+xml;utf,<?xml version="1.0" standalone="no"?><svg xmlns="http://www.w3.org/2000/svg" height="18" width="18" version="1.1"><circle cx="9" cy="9" r="8" stroke="black" stroke-width="1" fill="white"/><rect x="4" y="8" width="10" height="2" style="fill:black;stroke-width:0"/></svg>\') no-repeat left center;}\n\
.pf:checked ~ *:not(:nth-child(2)) {display: none;}\n\
.zoombox {position: relative; width: 100%; overflow-x: scroll;}\n\
.timeline {position: relative; font-size: 14px;cursor: pointer;width: 100%; overflow: hidden; background-color:#dddddd;}\n\
.thread {position: absolute; height: '+'%.3f'%thread_height+'%; overflow: hidden; line-height: 30px; border:1px solid;text-align:center;white-space:nowrap;background-color:rgba(204,204,204,0.5);}\n\
.thread:hover {background-color:white;border:1px solid red;z-index:10;}\n\
.hover {background-color:white;border:1px solid red;z-index:10;}\n\
.traceevent {position: absolute;opacity: 0.3;height: '+'%.3f'%thread_height+'%;width:0;overflow:hidden;line-height:30px;text-align:center;white-space:nowrap;}\n\
.phase {position: absolute;overflow: hidden;border:0px;text-align:center;}\n\
.phaselet {position:absolute;overflow:hidden;border:0px;text-align:center;height:100px;font-size:24px;}\n\
.t {position:absolute;top:0%;height:100%;border-right:1px solid black;}\n\
.legend {position: relative; width: 100%; height: 40px; text-align: center;margin-bottom:20px}\n\
.legend .square {position:absolute;top:10px; width: 0px;height: 20px;border:1px solid;padding-left:20px;}\n\
button {height:40px;width:200px;margin-bottom:20px;margin-top:20px;font-size:24px;}\n\
.devlist {position:'+x2changes[1]+';width:190px;}\n\
#devicedetail {height:100px;box-shadow: 5px 5px 20px black;}\n\
</style>\n</head>\n<body>\n'
hf.write(html_header)
# write the test title and general info header
if(sysvals.stamp['time'] != ""):
hf.write(headline_stamp.format(sysvals.stamp['host'],
sysvals.stamp['kernel'], sysvals.stamp['mode'], \
sysvals.stamp['time']))
# write the device timeline
hf.write(devtl.html['timeline'])
hf.write(devtl.html['legend'])
hf.write('<div id="devicedetailtitle"></div>\n')
hf.write('<div id="devicedetail" style="display:none;">\n')
# draw the colored boxes for the device detail section
for data in testruns:
hf.write('<div id="devicedetail%d">\n' % data.testnumber)
for b in data.phases:
phase = data.dmesg[b]
length = phase['end']-phase['start']
left = '%.3f' % (((phase['start']-t0)*100.0)/tTotal)
width = '%.3f' % ((length*100.0)/tTotal)
hf.write(html_phaselet.format(b, left, width, \
data.dmesg[b]['color']))
hf.write('</div>\n')
hf.write('</div>\n')
# write the ftrace data (callgraph)
data = testruns[-1]
if(sysvals.usecallgraph):
hf.write('<section id="callgraphs" class="callgraph">\n')
# write out the ftrace data converted to html
html_func_top = '<article id="{0}" class="atop" style="background-color:{1}">\n<input type="checkbox" class="pf" id="f{2}" checked/><label for="f{2}">{3} {4}</label>\n'
html_func_start = '<article>\n<input type="checkbox" class="pf" id="f{0}" checked/><label for="f{0}">{1} {2}</label>\n'
html_func_end = '</article>\n'
html_func_leaf = '<article>{0} {1}</article>\n'
num = 0
for p in data.phases:
list = data.dmesg[p]['list']
for devname in data.sortedDevices(p):
if('ftrace' not in list[devname]):
continue
name = devname
if(devname in sysvals.altdevname):
name = sysvals.altdevname[devname]
devid = list[devname]['id']
cg = list[devname]['ftrace']
flen = '<r>(%.3f ms @ %.3f to %.3f)</r>' % \
((cg.end - cg.start)*1000, cg.start*1000, cg.end*1000)
hf.write(html_func_top.format(devid, data.dmesg[p]['color'], \
num, name+' '+p, flen))
num += 1
for line in cg.list:
if(line.length < 0.000000001):
flen = ''
else:
flen = '<n>(%.3f ms @ %.3f)</n>' % (line.length*1000, \
line.time*1000)
if(line.freturn and line.fcall):
hf.write(html_func_leaf.format(line.name, flen))
elif(line.freturn):
hf.write(html_func_end)
else:
hf.write(html_func_start.format(num, line.name, flen))
num += 1
hf.write(html_func_end)
hf.write('\n\n </section>\n')
# write the footer and close
addScriptCode(hf, testruns)
hf.write('</body>\n</html>\n')
hf.close()
return True
# Function: addScriptCode
# Description:
# Adds the javascript code to the output html
# Arguments:
# hf: the open html file pointer
# testruns: array of Data objects from parseKernelLog or parseTraceLog
def addScriptCode(hf, testruns):
t0 = (testruns[0].start - testruns[-1].tSuspended) * 1000
tMax = (testruns[-1].end - testruns[-1].tSuspended) * 1000
# create an array in javascript memory with the device details
detail = ' var devtable = [];\n'
for data in testruns:
topo = data.deviceTopology()
detail += ' devtable[%d] = "%s";\n' % (data.testnumber, topo)
detail += ' var bounds = [%f,%f];\n' % (t0, tMax)
# add the code which will manipulate the data in the browser
script_code = \
'<script type="text/javascript">\n'+detail+\
' function zoomTimeline() {\n'\
' var timescale = document.getElementById("timescale");\n'\
' var dmesg = document.getElementById("dmesg");\n'\
' var zoombox = document.getElementById("dmesgzoombox");\n'\
' var val = parseFloat(dmesg.style.width);\n'\
' var newval = 100;\n'\
' var sh = window.outerWidth / 2;\n'\
' if(this.id == "zoomin") {\n'\
' newval = val * 1.2;\n'\
' if(newval > 40000) newval = 40000;\n'\
' dmesg.style.width = newval+"%";\n'\
' zoombox.scrollLeft = ((zoombox.scrollLeft + sh) * newval / val) - sh;\n'\
' } else if (this.id == "zoomout") {\n'\
' newval = val / 1.2;\n'\
' if(newval < 100) newval = 100;\n'\
' dmesg.style.width = newval+"%";\n'\
' zoombox.scrollLeft = ((zoombox.scrollLeft + sh) * newval / val) - sh;\n'\
' } else {\n'\
' zoombox.scrollLeft = 0;\n'\
' dmesg.style.width = "100%";\n'\
' }\n'\
' var html = "";\n'\
' var t0 = bounds[0];\n'\
' var tMax = bounds[1];\n'\
' var tTotal = tMax - t0;\n'\
' var wTotal = tTotal * 100.0 / newval;\n'\
' for(var tS = 1000; (wTotal / tS) < 3; tS /= 10);\n'\
' if(tS < 1) tS = 1;\n'\
' for(var s = ((t0 / tS)|0) * tS; s < tMax; s += tS) {\n'\
' var pos = (tMax - s) * 100.0 / tTotal;\n'\
' var name = (s == 0)?"S/R":(s+"ms");\n'\
' html += "<div class=\\"t\\" style=\\"right:"+pos+"%\\">"+name+"</div>";\n'\
' }\n'\
' timescale.innerHTML = html;\n'\
' }\n'\
' function deviceHover() {\n'\
' var name = this.title.slice(0, this.title.indexOf(" ("));\n'\
' var dmesg = document.getElementById("dmesg");\n'\
' var dev = dmesg.getElementsByClassName("thread");\n'\
' var cpu = -1;\n'\
' if(name.match("CPU_ON\[[0-9]*\]"))\n'\
' cpu = parseInt(name.slice(7));\n'\
' else if(name.match("CPU_OFF\[[0-9]*\]"))\n'\
' cpu = parseInt(name.slice(8));\n'\
' for (var i = 0; i < dev.length; i++) {\n'\
' dname = dev[i].title.slice(0, dev[i].title.indexOf(" ("));\n'\
' if((cpu >= 0 && dname.match("CPU_O[NF]*\\\[*"+cpu+"\\\]")) ||\n'\
' (name == dname))\n'\
' {\n'\
' dev[i].className = "thread hover";\n'\
' } else {\n'\
' dev[i].className = "thread";\n'\
' }\n'\
' }\n'\
' }\n'\
' function deviceUnhover() {\n'\
' var dmesg = document.getElementById("dmesg");\n'\
' var dev = dmesg.getElementsByClassName("thread");\n'\
' for (var i = 0; i < dev.length; i++) {\n'\
' dev[i].className = "thread";\n'\
' }\n'\
' }\n'\
' function deviceTitle(title, total, cpu) {\n'\
' var prefix = "Total";\n'\
' if(total.length > 3) {\n'\
' prefix = "Average";\n'\
' total[1] = (total[1]+total[3])/2;\n'\
' total[2] = (total[2]+total[4])/2;\n'\
' }\n'\
' var devtitle = document.getElementById("devicedetailtitle");\n'\
' var name = title.slice(0, title.indexOf(" "));\n'\
' if(cpu >= 0) name = "CPU"+cpu;\n'\
' var driver = "";\n'\
' var tS = "<t2>(</t2>";\n'\
' var tR = "<t2>)</t2>";\n'\
' if(total[1] > 0)\n'\
' tS = "<t2>("+prefix+" Suspend:</t2><t0> "+total[1].toFixed(3)+" ms</t0> ";\n'\
' if(total[2] > 0)\n'\
' tR = " <t2>"+prefix+" Resume:</t2><t0> "+total[2].toFixed(3)+" ms<t2>)</t2></t0>";\n'\
' var s = title.indexOf("{");\n'\
' var e = title.indexOf("}");\n'\
' if((s >= 0) && (e >= 0))\n'\
' driver = title.slice(s+1, e) + " <t1>@</t1> ";\n'\
' if(total[1] > 0 && total[2] > 0)\n'\
' devtitle.innerHTML = "<t0>"+driver+name+"</t0> "+tS+tR;\n'\
' else\n'\
' devtitle.innerHTML = "<t0>"+title+"</t0>";\n'\
' return name;\n'\
' }\n'\
' function deviceDetail() {\n'\
' var devinfo = document.getElementById("devicedetail");\n'\
' devinfo.style.display = "block";\n'\
' var name = this.title.slice(0, this.title.indexOf(" ("));\n'\
' var cpu = -1;\n'\
' if(name.match("CPU_ON\[[0-9]*\]"))\n'\
' cpu = parseInt(name.slice(7));\n'\
' else if(name.match("CPU_OFF\[[0-9]*\]"))\n'\
' cpu = parseInt(name.slice(8));\n'\
' var dmesg = document.getElementById("dmesg");\n'\
' var dev = dmesg.getElementsByClassName("thread");\n'\
' var idlist = [];\n'\
' var pdata = [[]];\n'\
' var pd = pdata[0];\n'\
' var total = [0.0, 0.0, 0.0];\n'\
' for (var i = 0; i < dev.length; i++) {\n'\
' dname = dev[i].title.slice(0, dev[i].title.indexOf(" ("));\n'\
' if((cpu >= 0 && dname.match("CPU_O[NF]*\\\[*"+cpu+"\\\]")) ||\n'\
' (name == dname))\n'\
' {\n'\
' idlist[idlist.length] = dev[i].id;\n'\
' var tidx = 1;\n'\
' if(dev[i].id[0] == "a") {\n'\
' pd = pdata[0];\n'\
' } else {\n'\
' if(pdata.length == 1) pdata[1] = [];\n'\
' if(total.length == 3) total[3]=total[4]=0.0;\n'\
' pd = pdata[1];\n'\
' tidx = 3;\n'\
' }\n'\
' var info = dev[i].title.split(" ");\n'\
' var pname = info[info.length-1];\n'\
' pd[pname] = parseFloat(info[info.length-3].slice(1));\n'\
' total[0] += pd[pname];\n'\
' if(pname.indexOf("suspend") >= 0)\n'\
' total[tidx] += pd[pname];\n'\
' else\n'\
' total[tidx+1] += pd[pname];\n'\
' }\n'\
' }\n'\
' var devname = deviceTitle(this.title, total, cpu);\n'\
' var left = 0.0;\n'\
' for (var t = 0; t < pdata.length; t++) {\n'\
' pd = pdata[t];\n'\
' devinfo = document.getElementById("devicedetail"+t);\n'\
' var phases = devinfo.getElementsByClassName("phaselet");\n'\
' for (var i = 0; i < phases.length; i++) {\n'\
' if(phases[i].id in pd) {\n'\
' var w = 100.0*pd[phases[i].id]/total[0];\n'\
' var fs = 32;\n'\
' if(w < 8) fs = 4*w | 0;\n'\
' var fs2 = fs*3/4;\n'\
' phases[i].style.width = w+"%";\n'\
' phases[i].style.left = left+"%";\n'\
' phases[i].title = phases[i].id+" "+pd[phases[i].id]+" ms";\n'\
' left += w;\n'\
' var time = "<t4 style=\\"font-size:"+fs+"px\\">"+pd[phases[i].id]+" ms<br></t4>";\n'\
' var pname = "<t3 style=\\"font-size:"+fs2+"px\\">"+phases[i].id.replace("_", " ")+"</t3>";\n'\
' phases[i].innerHTML = time+pname;\n'\
' } else {\n'\
' phases[i].style.width = "0%";\n'\
' phases[i].style.left = left+"%";\n'\
' }\n'\
' }\n'\
' }\n'\
' var cglist = document.getElementById("callgraphs");\n'\
' if(!cglist) return;\n'\
' var cg = cglist.getElementsByClassName("atop");\n'\
' for (var i = 0; i < cg.length; i++) {\n'\
' if(idlist.indexOf(cg[i].id) >= 0) {\n'\
' cg[i].style.display = "block";\n'\
' } else {\n'\
' cg[i].style.display = "none";\n'\
' }\n'\
' }\n'\
' }\n'\
' function devListWindow(e) {\n'\
' var sx = e.clientX;\n'\
' if(sx > window.innerWidth - 440)\n'\
' sx = window.innerWidth - 440;\n'\
' var cfg="top="+e.screenY+", left="+sx+", width=440, height=720, scrollbars=yes";\n'\
' var win = window.open("", "_blank", cfg);\n'\
' if(window.chrome) win.moveBy(sx, 0);\n'\
' var html = "<title>"+e.target.innerHTML+"</title>"+\n'\
' "<style type=\\"text/css\\">"+\n'\
' " ul {list-style-type:circle;padding-left:10px;margin-left:10px;}"+\n'\
' "</style>"\n'\
' var dt = devtable[0];\n'\
' if(e.target.id != "devlist1")\n'\
' dt = devtable[1];\n'\
' win.document.write(html+dt);\n'\
' }\n'\
' window.addEventListener("load", function () {\n'\
' var dmesg = document.getElementById("dmesg");\n'\
' dmesg.style.width = "100%"\n'\
' document.getElementById("zoomin").onclick = zoomTimeline;\n'\
' document.getElementById("zoomout").onclick = zoomTimeline;\n'\
' document.getElementById("zoomdef").onclick = zoomTimeline;\n'\
' var devlist = document.getElementsByClassName("devlist");\n'\
' for (var i = 0; i < devlist.length; i++)\n'\
' devlist[i].onclick = devListWindow;\n'\
' var dev = dmesg.getElementsByClassName("thread");\n'\
' for (var i = 0; i < dev.length; i++) {\n'\
' dev[i].onclick = deviceDetail;\n'\
' dev[i].onmouseover = deviceHover;\n'\
' dev[i].onmouseout = deviceUnhover;\n'\
' }\n'\
' zoomTimeline();\n'\
' });\n'\
'</script>\n'
hf.write(script_code);
# Function: executeSuspend
# Description:
# Execute system suspend through the sysfs interface, then copy the output
# dmesg and ftrace files to the test output directory.
def executeSuspend():
global sysvals
detectUSB(False)
t0 = time.time()*1000
tp = sysvals.tpath
# execute however many s/r runs requested
for count in range(1,sysvals.execcount+1):
# clear the kernel ring buffer just as we start
os.system('dmesg -C')
# enable callgraph ftrace only for the second run
if(sysvals.usecallgraph and count == 2):
# set trace type
os.system('echo function_graph > '+tp+'current_tracer')
os.system('echo "" > '+tp+'set_ftrace_filter')
# set trace format options
os.system('echo funcgraph-abstime > '+tp+'trace_options')
os.system('echo funcgraph-proc > '+tp+'trace_options')
# focus only on device suspend and resume
os.system('cat '+tp+'available_filter_functions | '+\
'grep dpm_run_callback > '+tp+'set_graph_function')
# if this is test2 and there's a delay, start here
if(count > 1 and sysvals.x2delay > 0):
tN = time.time()*1000
while (tN - t0) < sysvals.x2delay:
tN = time.time()*1000
time.sleep(0.001)
# start ftrace
if(sysvals.usecallgraph or sysvals.usetraceevents):
print('START TRACING')
os.system('echo 1 > '+tp+'tracing_on')
# initiate suspend
if(sysvals.usecallgraph or sysvals.usetraceevents):
os.system('echo SUSPEND START > '+tp+'trace_marker')
if(sysvals.rtcwake):
print('SUSPEND START')
print('will autoresume in %d seconds' % sysvals.rtcwaketime)
sysvals.rtcWakeAlarm()
else:
print('SUSPEND START (press a key to resume)')
pf = open(sysvals.powerfile, 'w')
pf.write(sysvals.suspendmode)
# execution will pause here
pf.close()
t0 = time.time()*1000
# return from suspend
print('RESUME COMPLETE')
if(sysvals.usecallgraph or sysvals.usetraceevents):
os.system('echo RESUME COMPLETE > '+tp+'trace_marker')
# see if there's firmware timing data to be had
t = sysvals.postresumetime
if(t > 0):
print('Waiting %d seconds for POST-RESUME trace events...' % t)
time.sleep(t)
# stop ftrace
if(sysvals.usecallgraph or sysvals.usetraceevents):
os.system('echo 0 > '+tp+'tracing_on')
print('CAPTURING TRACE')
writeDatafileHeader(sysvals.ftracefile)
os.system('cat '+tp+'trace >> '+sysvals.ftracefile)
os.system('echo "" > '+tp+'trace')
# grab a copy of the dmesg output
print('CAPTURING DMESG')
writeDatafileHeader(sysvals.dmesgfile)
os.system('dmesg -c >> '+sysvals.dmesgfile)
def writeDatafileHeader(filename):
global sysvals
fw = getFPDT(False)
prt = sysvals.postresumetime
fp = open(filename, 'a')
fp.write(sysvals.teststamp+'\n')
if(fw):
fp.write('# fwsuspend %u fwresume %u\n' % (fw[0], fw[1]))
if(prt > 0):
fp.write('# post resume time %u\n' % prt)
fp.close()
# Function: executeAndroidSuspend
# Description:
# Execute system suspend through the sysfs interface
# on a remote android device, then transfer the output
# dmesg and ftrace files to the local output directory.
def executeAndroidSuspend():
global sysvals
# check to see if the display is currently off
tp = sysvals.tpath
out = os.popen(sysvals.adb+\
' shell dumpsys power | grep mScreenOn').read().strip()
# if so we need to turn it on so we can issue a new suspend
if(out.endswith('false')):
print('Waking the device up for the test...')
# send the KEYPAD_POWER keyevent to wake it up
os.system(sysvals.adb+' shell input keyevent 26')
# wait a few seconds so the user can see the device wake up
time.sleep(3)
# execute however many s/r runs requested
for count in range(1,sysvals.execcount+1):
# clear the kernel ring buffer just as we start
os.system(sysvals.adb+' shell dmesg -c > /dev/null 2>&1')
# start ftrace
if(sysvals.usetraceevents):
print('START TRACING')
os.system(sysvals.adb+" shell 'echo 1 > "+tp+"tracing_on'")
# initiate suspend
for count in range(1,sysvals.execcount+1):
if(sysvals.usetraceevents):
os.system(sysvals.adb+\
" shell 'echo SUSPEND START > "+tp+"trace_marker'")
print('SUSPEND START (press a key on the device to resume)')
os.system(sysvals.adb+" shell 'echo "+sysvals.suspendmode+\
" > "+sysvals.powerfile+"'")
# execution will pause here, then adb will exit
while(True):
check = os.popen(sysvals.adb+\
' shell pwd 2>/dev/null').read().strip()
if(len(check) > 0):
break
time.sleep(1)
if(sysvals.usetraceevents):
os.system(sysvals.adb+" shell 'echo RESUME COMPLETE > "+tp+\
"trace_marker'")
# return from suspend
print('RESUME COMPLETE')
# stop ftrace
if(sysvals.usetraceevents):
os.system(sysvals.adb+" shell 'echo 0 > "+tp+"tracing_on'")
print('CAPTURING TRACE')
os.system('echo "'+sysvals.teststamp+'" > '+sysvals.ftracefile)
os.system(sysvals.adb+' shell cat '+tp+\
'trace >> '+sysvals.ftracefile)
# grab a copy of the dmesg output
print('CAPTURING DMESG')
os.system('echo "'+sysvals.teststamp+'" > '+sysvals.dmesgfile)
os.system(sysvals.adb+' shell dmesg >> '+sysvals.dmesgfile)
# Function: setUSBDevicesAuto
# Description:
# Set the autosuspend control parameter of all USB devices to auto
# This can be dangerous, so use at your own risk, most devices are set
# to always-on since the kernel cant determine if the device can
# properly autosuspend
def setUSBDevicesAuto():
global sysvals
rootCheck()
for dirname, dirnames, filenames in os.walk('/sys/devices'):
if(re.match('.*/usb[0-9]*.*', dirname) and
'idVendor' in filenames and 'idProduct' in filenames):
os.system('echo auto > %s/power/control' % dirname)
name = dirname.split('/')[-1]
desc = os.popen('cat %s/product 2>/dev/null' % \
dirname).read().replace('\n', '')
ctrl = os.popen('cat %s/power/control 2>/dev/null' % \
dirname).read().replace('\n', '')
print('control is %s for %6s: %s' % (ctrl, name, desc))
# Function: yesno
# Description:
# Print out an equivalent Y or N for a set of known parameter values
# Output:
# 'Y', 'N', or ' ' if the value is unknown
def yesno(val):
yesvals = ['auto', 'enabled', 'active', '1']
novals = ['on', 'disabled', 'suspended', 'forbidden', 'unsupported']
if val in yesvals:
return 'Y'
elif val in novals:
return 'N'
return ' '
# Function: ms2nice
# Description:
# Print out a very concise time string in minutes and seconds
# Output:
# The time string, e.g. "1901m16s"
def ms2nice(val):
ms = 0
try:
ms = int(val)
except:
return 0.0
m = ms / 60000
s = (ms / 1000) - (m * 60)
return '%3dm%2ds' % (m, s)
# Function: detectUSB
# Description:
# Detect all the USB hosts and devices currently connected and add
# a list of USB device names to sysvals for better timeline readability
# Arguments:
# output: True to output the info to stdout, False otherwise
def detectUSB(output):
global sysvals
field = {'idVendor':'', 'idProduct':'', 'product':'', 'speed':''}
power = {'async':'', 'autosuspend':'', 'autosuspend_delay_ms':'',
'control':'', 'persist':'', 'runtime_enabled':'',
'runtime_status':'', 'runtime_usage':'',
'runtime_active_time':'',
'runtime_suspended_time':'',
'active_duration':'',
'connected_duration':''}
if(output):
print('LEGEND')
print('---------------------------------------------------------------------------------------------')
print(' A = async/sync PM queue Y/N D = autosuspend delay (seconds)')
print(' S = autosuspend Y/N rACTIVE = runtime active (min/sec)')
print(' P = persist across suspend Y/N rSUSPEN = runtime suspend (min/sec)')
print(' E = runtime suspend enabled/forbidden Y/N ACTIVE = active duration (min/sec)')
print(' R = runtime status active/suspended Y/N CONNECT = connected duration (min/sec)')
print(' U = runtime usage count')
print('---------------------------------------------------------------------------------------------')
print(' NAME ID DESCRIPTION SPEED A S P E R U D rACTIVE rSUSPEN ACTIVE CONNECT')
print('---------------------------------------------------------------------------------------------')
for dirname, dirnames, filenames in os.walk('/sys/devices'):
if(re.match('.*/usb[0-9]*.*', dirname) and
'idVendor' in filenames and 'idProduct' in filenames):
for i in field:
field[i] = os.popen('cat %s/%s 2>/dev/null' % \
(dirname, i)).read().replace('\n', '')
name = dirname.split('/')[-1]
if(len(field['product']) > 0):
sysvals.altdevname[name] = \
'%s [%s]' % (field['product'], name)
else:
sysvals.altdevname[name] = \
'%s:%s [%s]' % (field['idVendor'], \
field['idProduct'], name)
if(output):
for i in power:
power[i] = os.popen('cat %s/power/%s 2>/dev/null' % \
(dirname, i)).read().replace('\n', '')
if(re.match('usb[0-9]*', name)):
first = '%-8s' % name
else:
first = '%8s' % name
print('%s [%s:%s] %-20s %-4s %1s %1s %1s %1s %1s %1s %1s %s %s %s %s' % \
(first, field['idVendor'], field['idProduct'], \
field['product'][0:20], field['speed'], \
yesno(power['async']), \
yesno(power['control']), \
yesno(power['persist']), \
yesno(power['runtime_enabled']), \
yesno(power['runtime_status']), \
power['runtime_usage'], \
power['autosuspend'], \
ms2nice(power['runtime_active_time']), \
ms2nice(power['runtime_suspended_time']), \
ms2nice(power['active_duration']), \
ms2nice(power['connected_duration'])))
# Function: getModes
# Description:
# Determine the supported power modes on this system
# Output:
# A string list of the available modes
def getModes():
global sysvals
modes = ''
if(not sysvals.android):
if(os.path.exists(sysvals.powerfile)):
fp = open(sysvals.powerfile, 'r')
modes = string.split(fp.read())
fp.close()
else:
line = os.popen(sysvals.adb+' shell cat '+\
sysvals.powerfile).read().strip()
modes = string.split(line)
return modes
# Function: getFPDT
# Description:
# Read the acpi bios tables and pull out FPDT, the firmware data
# Arguments:
# output: True to output the info to stdout, False otherwise
def getFPDT(output):
global sysvals
rectype = {}
rectype[0] = 'Firmware Basic Boot Performance Record'
rectype[1] = 'S3 Performance Table Record'
prectype = {}
prectype[0] = 'Basic S3 Resume Performance Record'
prectype[1] = 'Basic S3 Suspend Performance Record'
rootCheck()
if(not os.path.exists(sysvals.fpdtpath)):
if(output):
doError('file doesnt exist: %s' % sysvals.fpdtpath, False)
return False
if(not os.access(sysvals.fpdtpath, os.R_OK)):
if(output):
doError('file isnt readable: %s' % sysvals.fpdtpath, False)
return False
if(not os.path.exists(sysvals.mempath)):
if(output):
doError('file doesnt exist: %s' % sysvals.mempath, False)
return False
if(not os.access(sysvals.mempath, os.R_OK)):
if(output):
doError('file isnt readable: %s' % sysvals.mempath, False)
return False
fp = open(sysvals.fpdtpath, 'rb')
buf = fp.read()
fp.close()
if(len(buf) < 36):
if(output):
doError('Invalid FPDT table data, should '+\
'be at least 36 bytes', False)
return False
table = struct.unpack('4sIBB6s8sI4sI', buf[0:36])
if(output):
print('')
print('Firmware Performance Data Table (%s)' % table[0])
print(' Signature : %s' % table[0])
print(' Table Length : %u' % table[1])
print(' Revision : %u' % table[2])
print(' Checksum : 0x%x' % table[3])
print(' OEM ID : %s' % table[4])
print(' OEM Table ID : %s' % table[5])
print(' OEM Revision : %u' % table[6])
print(' Creator ID : %s' % table[7])
print(' Creator Revision : 0x%x' % table[8])
print('')
if(table[0] != 'FPDT'):
if(output):
doError('Invalid FPDT table')
return False
if(len(buf) <= 36):
return False
i = 0
fwData = [0, 0]
records = buf[36:]
fp = open(sysvals.mempath, 'rb')
while(i < len(records)):
header = struct.unpack('HBB', records[i:i+4])
if(header[0] not in rectype):
continue
if(header[1] != 16):
continue
addr = struct.unpack('Q', records[i+8:i+16])[0]
try:
fp.seek(addr)
first = fp.read(8)
except:
doError('Bad address 0x%x in %s' % (addr, sysvals.mempath), False)
rechead = struct.unpack('4sI', first)
recdata = fp.read(rechead[1]-8)
if(rechead[0] == 'FBPT'):
record = struct.unpack('HBBIQQQQQ', recdata)
if(output):
print('%s (%s)' % (rectype[header[0]], rechead[0]))
print(' Reset END : %u ns' % record[4])
print(' OS Loader LoadImage Start : %u ns' % record[5])
print(' OS Loader StartImage Start : %u ns' % record[6])
print(' ExitBootServices Entry : %u ns' % record[7])
print(' ExitBootServices Exit : %u ns' % record[8])
elif(rechead[0] == 'S3PT'):
if(output):
print('%s (%s)' % (rectype[header[0]], rechead[0]))
j = 0
while(j < len(recdata)):
prechead = struct.unpack('HBB', recdata[j:j+4])
if(prechead[0] not in prectype):
continue
if(prechead[0] == 0):
record = struct.unpack('IIQQ', recdata[j:j+prechead[1]])
fwData[1] = record[2]
if(output):
print(' %s' % prectype[prechead[0]])
print(' Resume Count : %u' % \
record[1])
print(' FullResume : %u ns' % \
record[2])
print(' AverageResume : %u ns' % \
record[3])
elif(prechead[0] == 1):
record = struct.unpack('QQ', recdata[j+4:j+prechead[1]])
fwData[0] = record[1] - record[0]
if(output):
print(' %s' % prectype[prechead[0]])
print(' SuspendStart : %u ns' % \
record[0])
print(' SuspendEnd : %u ns' % \
record[1])
print(' SuspendTime : %u ns' % \
fwData[0])
j += prechead[1]
if(output):
print('')
i += header[1]
fp.close()
return fwData
# Function: statusCheck
# Description:
# Verify that the requested command and options will work, and
# print the results to the terminal
# Output:
# True if the test will work, False if not
def statusCheck():
global sysvals
status = True
if(sysvals.android):
print('Checking the android system ...')
else:
print('Checking this system (%s)...' % platform.node())
# check if adb is connected to a device
if(sysvals.android):
res = 'NO'
out = os.popen(sysvals.adb+' get-state').read().strip()
if(out == 'device'):
res = 'YES'
print(' is android device connected: %s' % res)
if(res != 'YES'):
print(' Please connect the device before using this tool')
return False
# check we have root access
res = 'NO (No features of this tool will work!)'
if(sysvals.android):
out = os.popen(sysvals.adb+' shell id').read().strip()
if('root' in out):
res = 'YES'
else:
if(os.environ['USER'] == 'root'):
res = 'YES'
print(' have root access: %s' % res)
if(res != 'YES'):
if(sysvals.android):
print(' Try running "adb root" to restart the daemon as root')
else:
print(' Try running this script with sudo')
return False
# check sysfs is mounted
res = 'NO (No features of this tool will work!)'
if(sysvals.android):
out = os.popen(sysvals.adb+' shell ls '+\
sysvals.powerfile).read().strip()
if(out == sysvals.powerfile):
res = 'YES'
else:
if(os.path.exists(sysvals.powerfile)):
res = 'YES'
print(' is sysfs mounted: %s' % res)
if(res != 'YES'):
return False
# check target mode is a valid mode
res = 'NO'
modes = getModes()
if(sysvals.suspendmode in modes):
res = 'YES'
else:
status = False
print(' is "%s" a valid power mode: %s' % (sysvals.suspendmode, res))
if(res == 'NO'):
print(' valid power modes are: %s' % modes)
print(' please choose one with -m')
# check if the tool can unlock the device
if(sysvals.android):
res = 'YES'
out1 = os.popen(sysvals.adb+\
' shell dumpsys power | grep mScreenOn').read().strip()
out2 = os.popen(sysvals.adb+\
' shell input').read().strip()
if(not out1.startswith('mScreenOn') or not out2.startswith('usage')):
res = 'NO (wake the android device up before running the test)'
print(' can I unlock the screen: %s' % res)
# check if ftrace is available
res = 'NO'
ftgood = verifyFtrace()
if(ftgood):
res = 'YES'
elif(sysvals.usecallgraph):
status = False
print(' is ftrace supported: %s' % res)
# what data source are we using
res = 'DMESG'
if(ftgood):
sysvals.usetraceeventsonly = True
sysvals.usetraceevents = False
for e in sysvals.traceevents:
check = False
if(sysvals.android):
out = os.popen(sysvals.adb+' shell ls -d '+\
sysvals.epath+e).read().strip()
if(out == sysvals.epath+e):
check = True
else:
if(os.path.exists(sysvals.epath+e)):
check = True
if(not check):
sysvals.usetraceeventsonly = False
if(e == 'suspend_resume' and check):
sysvals.usetraceevents = True
if(sysvals.usetraceevents and sysvals.usetraceeventsonly):
res = 'FTRACE (all trace events found)'
elif(sysvals.usetraceevents):
res = 'DMESG and FTRACE (suspend_resume trace event found)'
print(' timeline data source: %s' % res)
# check if rtcwake
res = 'NO'
if(sysvals.rtcpath != ''):
res = 'YES'
elif(sysvals.rtcwake):
status = False
print(' is rtcwake supported: %s' % res)
return status
# Function: doError
# Description:
# generic error function for catastrphic failures
# Arguments:
# msg: the error message to print
# help: True if printHelp should be called after, False otherwise
def doError(msg, help):
if(help == True):
printHelp()
print('ERROR: %s\n') % msg
sys.exit()
# Function: doWarning
# Description:
# generic warning function for non-catastrophic anomalies
# Arguments:
# msg: the warning message to print
# file: If not empty, a filename to request be sent to the owner for debug
def doWarning(msg, file):
print('/* %s */') % msg
if(file):
print('/* For a fix, please send this'+\
' %s file to <todd.e.brandt@intel.com> */' % file)
# Function: rootCheck
# Description:
# quick check to see if we have root access
def rootCheck():
if(os.environ['USER'] != 'root'):
doError('This script must be run as root', False)
# Function: getArgInt
# Description:
# pull out an integer argument from the command line with checks
def getArgInt(name, args, min, max):
try:
arg = args.next()
except:
doError(name+': no argument supplied', True)
try:
val = int(arg)
except:
doError(name+': non-integer value given', True)
if(val < min or val > max):
doError(name+': value should be between %d and %d' % (min, max), True)
return val
# Function: rerunTest
# Description:
# generate an output from an existing set of ftrace/dmesg logs
def rerunTest():
global sysvals
if(sysvals.ftracefile != ''):
doesTraceLogHaveTraceEvents()
if(sysvals.dmesgfile == '' and not sysvals.usetraceeventsonly):
doError('recreating this html output '+\
'requires a dmesg file', False)
sysvals.setOutputFile()
vprint('Output file: %s' % sysvals.htmlfile)
print('PROCESSING DATA')
if(sysvals.usetraceeventsonly):
testruns = parseTraceLog()
else:
testruns = loadKernelLog()
for data in testruns:
parseKernelLog(data)
if(sysvals.ftracefile != ''):
appendIncompleteTraceLog(testruns)
createHTML(testruns)
# Function: runTest
# Description:
# execute a suspend/resume, gather the logs, and generate the output
def runTest(subdir):
global sysvals
# prepare for the test
if(not sysvals.android):
initFtrace()
else:
initFtraceAndroid()
sysvals.initTestOutput(subdir)
vprint('Output files:\n %s' % sysvals.dmesgfile)
if(sysvals.usecallgraph or
sysvals.usetraceevents or
sysvals.usetraceeventsonly):
vprint(' %s' % sysvals.ftracefile)
vprint(' %s' % sysvals.htmlfile)
# execute the test
if(not sysvals.android):
executeSuspend()
else:
executeAndroidSuspend()
# analyze the data and create the html output
print('PROCESSING DATA')
if(sysvals.usetraceeventsonly):
# data for kernels 3.15 or newer is entirely in ftrace
testruns = parseTraceLog()
else:
# data for kernels older than 3.15 is primarily in dmesg
testruns = loadKernelLog()
for data in testruns:
parseKernelLog(data)
if(sysvals.usecallgraph or sysvals.usetraceevents):
appendIncompleteTraceLog(testruns)
createHTML(testruns)
# Function: runSummary
# Description:
# create a summary of tests in a sub-directory
def runSummary(subdir, output):
global sysvals
# get a list of ftrace output files
files = []
for dirname, dirnames, filenames in os.walk(subdir):
for filename in filenames:
if(re.match('.*_ftrace.txt', filename)):
files.append("%s/%s" % (dirname, filename))
# process the files in order and get an array of data objects
testruns = []
for file in sorted(files):
if output:
print("Test found in %s" % os.path.dirname(file))
sysvals.ftracefile = file
sysvals.dmesgfile = file.replace('_ftrace.txt', '_dmesg.txt')
doesTraceLogHaveTraceEvents()
sysvals.usecallgraph = False
if not sysvals.usetraceeventsonly:
if(not os.path.exists(sysvals.dmesgfile)):
print("Skipping %s: not a valid test input" % file)
continue
else:
if output:
f = os.path.basename(sysvals.ftracefile)
d = os.path.basename(sysvals.dmesgfile)
print("\tInput files: %s and %s" % (f, d))
testdata = loadKernelLog()
data = testdata[0]
parseKernelLog(data)
testdata = [data]
appendIncompleteTraceLog(testdata)
else:
if output:
print("\tInput file: %s" % os.path.basename(sysvals.ftracefile))
testdata = parseTraceLog()
data = testdata[0]
data.normalizeTime(data.tSuspended)
link = file.replace(subdir+'/', '').replace('_ftrace.txt', '.html')
data.outfile = link
testruns.append(data)
createHTMLSummarySimple(testruns, subdir+'/summary.html')
# Function: printHelp
# Description:
# print out the help text
def printHelp():
global sysvals
modes = getModes()
print('')
print('AnalyzeSuspend v%.1f' % sysvals.version)
print('Usage: sudo analyze_suspend.py <options>')
print('')
print('Description:')
print(' This tool is designed to assist kernel and OS developers in optimizing')
print(' their linux stack\'s suspend/resume time. Using a kernel image built')
print(' with a few extra options enabled, the tool will execute a suspend and')
print(' capture dmesg and ftrace data until resume is complete. This data is')
print(' transformed into a device timeline and an optional callgraph to give')
print(' a detailed view of which devices/subsystems are taking the most')
print(' time in suspend/resume.')
print('')
print(' Generates output files in subdirectory: suspend-mmddyy-HHMMSS')
print(' HTML output: <hostname>_<mode>.html')
print(' raw dmesg output: <hostname>_<mode>_dmesg.txt')
print(' raw ftrace output: <hostname>_<mode>_ftrace.txt')
print('')
print('Options:')
print(' [general]')
print(' -h Print this help text')
print(' -v Print the current tool version')
print(' -verbose Print extra information during execution and analysis')
print(' -status Test to see if the system is enabled to run this tool')
print(' -modes List available suspend modes')
print(' -m mode Mode to initiate for suspend %s (default: %s)') % (modes, sysvals.suspendmode)
print(' -rtcwake t Use rtcwake to autoresume after <t> seconds (default: disabled)')
print(' [advanced]')
print(' -f Use ftrace to create device callgraphs (default: disabled)')
print(' -filter "d1 d2 ..." Filter out all but this list of dev names')
print(' -x2 Run two suspend/resumes back to back (default: disabled)')
print(' -x2delay t Minimum millisecond delay <t> between the two test runs (default: 0 ms)')
print(' -postres t Time after resume completion to wait for post-resume events (default: 0 S)')
print(' -multi n d Execute <n> consecutive tests at <d> seconds intervals. The outputs will')
print(' be created in a new subdirectory with a summary page.')
print(' [utilities]')
print(' -fpdt Print out the contents of the ACPI Firmware Performance Data Table')
print(' -usbtopo Print out the current USB topology with power info')
print(' -usbauto Enable autosuspend for all connected USB devices')
print(' [android testing]')
print(' -adb binary Use the given adb binary to run the test on an android device.')
print(' The device should already be connected and with root access.')
print(' Commands will be executed on the device using "adb shell"')
print(' [re-analyze data from previous runs]')
print(' -ftrace ftracefile Create HTML output using ftrace input')
print(' -dmesg dmesgfile Create HTML output using dmesg (not needed for kernel >= 3.15)')
print(' -summary directory Create a summary of all test in this dir')
print('')
return True
# ----------------- MAIN --------------------
# exec start (skipped if script is loaded as library)
if __name__ == '__main__':
cmd = ''
cmdarg = ''
multitest = {'run': False, 'count': 0, 'delay': 0}
# loop through the command line arguments
args = iter(sys.argv[1:])
for arg in args:
if(arg == '-m'):
try:
val = args.next()
except:
doError('No mode supplied', True)
sysvals.suspendmode = val
elif(arg == '-adb'):
try:
val = args.next()
except:
doError('No adb binary supplied', True)
if(not os.path.exists(val)):
doError('file doesnt exist: %s' % val, False)
if(not os.access(val, os.X_OK)):
doError('file isnt executable: %s' % val, False)
try:
check = os.popen(val+' version').read().strip()
except:
doError('adb version failed to execute', False)
if(not re.match('Android Debug Bridge .*', check)):
doError('adb version failed to execute', False)
sysvals.adb = val
sysvals.android = True
elif(arg == '-x2'):
if(sysvals.postresumetime > 0):
doError('-x2 is not compatible with -postres', False)
sysvals.execcount = 2
elif(arg == '-x2delay'):
sysvals.x2delay = getArgInt('-x2delay', args, 0, 60000)
elif(arg == '-postres'):
if(sysvals.execcount != 1):
doError('-x2 is not compatible with -postres', False)
sysvals.postresumetime = getArgInt('-postres', args, 0, 3600)
elif(arg == '-f'):
sysvals.usecallgraph = True
elif(arg == '-modes'):
cmd = 'modes'
elif(arg == '-fpdt'):
cmd = 'fpdt'
elif(arg == '-usbtopo'):
cmd = 'usbtopo'
elif(arg == '-usbauto'):
cmd = 'usbauto'
elif(arg == '-status'):
cmd = 'status'
elif(arg == '-verbose'):
sysvals.verbose = True
elif(arg == '-v'):
print("Version %.1f" % sysvals.version)
sys.exit()
elif(arg == '-rtcwake'):
sysvals.rtcwake = True
sysvals.rtcwaketime = getArgInt('-rtcwake', args, 0, 3600)
elif(arg == '-multi'):
multitest['run'] = True
multitest['count'] = getArgInt('-multi n (exec count)', args, 2, 1000000)
multitest['delay'] = getArgInt('-multi d (delay between tests)', args, 0, 3600)
elif(arg == '-dmesg'):
try:
val = args.next()
except:
doError('No dmesg file supplied', True)
sysvals.notestrun = True
sysvals.dmesgfile = val
if(os.path.exists(sysvals.dmesgfile) == False):
doError('%s doesnt exist' % sysvals.dmesgfile, False)
elif(arg == '-ftrace'):
try:
val = args.next()
except:
doError('No ftrace file supplied', True)
sysvals.notestrun = True
sysvals.usecallgraph = True
sysvals.ftracefile = val
if(os.path.exists(sysvals.ftracefile) == False):
doError('%s doesnt exist' % sysvals.ftracefile, False)
elif(arg == '-summary'):
try:
val = args.next()
except:
doError('No directory supplied', True)
cmd = 'summary'
cmdarg = val
sysvals.notestrun = True
if(os.path.isdir(val) == False):
doError('%s isnt accesible' % val, False)
elif(arg == '-filter'):
try:
val = args.next()
except:
doError('No devnames supplied', True)
sysvals.setDeviceFilter(val)
elif(arg == '-h'):
printHelp()
sys.exit()
else:
doError('Invalid argument: '+arg, True)
# just run a utility command and exit
if(cmd != ''):
if(cmd == 'status'):
statusCheck()
elif(cmd == 'fpdt'):
if(sysvals.android):
doError('cannot read FPDT on android device', False)
getFPDT(True)
elif(cmd == 'usbtopo'):
if(sysvals.android):
doError('cannot read USB topology '+\
'on an android device', False)
detectUSB(True)
elif(cmd == 'modes'):
modes = getModes()
print modes
elif(cmd == 'usbauto'):
setUSBDevicesAuto()
elif(cmd == 'summary'):
print("Generating a summary of folder \"%s\"" % cmdarg)
runSummary(cmdarg, True)
sys.exit()
# run test on android device
if(sysvals.android):
if(sysvals.usecallgraph):
doError('ftrace (-f) is not yet supported '+\
'in the android kernel', False)
if(sysvals.notestrun):
doError('cannot analyze test files on the '+\
'android device', False)
# if instructed, re-analyze existing data files
if(sysvals.notestrun):
rerunTest()
sys.exit()
# verify that we can run a test
if(not statusCheck()):
print('Check FAILED, aborting the test run!')
sys.exit()
if multitest['run']:
# run multiple tests in a separte subdirectory
s = 'x%d' % multitest['count']
subdir = datetime.now().strftime('suspend-'+s+'-%m%d%y-%H%M%S')
os.mkdir(subdir)
for i in range(multitest['count']):
if(i != 0):
print('Waiting %d seconds...' % (multitest['delay']))
time.sleep(multitest['delay'])
print('TEST (%d/%d) START' % (i+1, multitest['count']))
runTest(subdir)
print('TEST (%d/%d) COMPLETE' % (i+1, multitest['count']))
runSummary(subdir, False)
else:
# run the test in the current directory
runTest(".")
| gpl-2.0 |
Aegeaner/spark | python/pyspark/testing/utils.py | 1 | 3566 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import glob
import os
import struct
import sys
import unittest
from pyspark import SparkContext, SparkConf
have_scipy = False
have_numpy = False
try:
import scipy.sparse
have_scipy = True
except:
# No SciPy, but that's okay, we'll skip those tests
pass
try:
import numpy as np
have_numpy = True
except:
# No NumPy, but that's okay, we'll skip those tests
pass
SPARK_HOME = os.environ["SPARK_HOME"]
def read_int(b):
return struct.unpack("!i", b)[0]
def write_int(i):
return struct.pack("!i", i)
class QuietTest(object):
def __init__(self, sc):
self.log4j = sc._jvm.org.apache.log4j
def __enter__(self):
self.old_level = self.log4j.LogManager.getRootLogger().getLevel()
self.log4j.LogManager.getRootLogger().setLevel(self.log4j.Level.FATAL)
def __exit__(self, exc_type, exc_val, exc_tb):
self.log4j.LogManager.getRootLogger().setLevel(self.old_level)
class PySparkTestCase(unittest.TestCase):
def setUp(self):
self._old_sys_path = list(sys.path)
class_name = self.__class__.__name__
self.sc = SparkContext('local[4]', class_name)
def tearDown(self):
self.sc.stop()
sys.path = self._old_sys_path
class ReusedPySparkTestCase(unittest.TestCase):
@classmethod
def conf(cls):
"""
Override this in subclasses to supply a more specific conf
"""
return SparkConf()
@classmethod
def setUpClass(cls):
cls.sc = SparkContext('local[4]', cls.__name__, conf=cls.conf())
@classmethod
def tearDownClass(cls):
cls.sc.stop()
class ByteArrayOutput(object):
def __init__(self):
self.buffer = bytearray()
def write(self, b):
self.buffer += b
def close(self):
pass
def search_jar(project_relative_path, jar_name_prefix):
project_full_path = os.path.join(
os.environ["SPARK_HOME"], project_relative_path)
# We should ignore the following jars
ignored_jar_suffixes = ("javadoc.jar", "sources.jar", "test-sources.jar", "tests.jar")
# Search jar in the project dir using the jar name_prefix for both sbt build and maven
# build because the artifact jars are in different directories.
sbt_build = glob.glob(os.path.join(
project_full_path, "target/scala-*/%s*.jar" % jar_name_prefix))
maven_build = glob.glob(os.path.join(
project_full_path, "target/%s*.jar" % jar_name_prefix))
jar_paths = sbt_build + maven_build
jars = [jar for jar in jar_paths if not jar.endswith(ignored_jar_suffixes)]
if not jars:
return None
elif len(jars) > 1:
raise Exception("Found multiple JARs: %s; please remove all but one" % (", ".join(jars)))
else:
return jars[0]
| apache-2.0 |
Aorjoa/aiyara-ceph-dash | .tox/flake8/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/util/connection.py | 353 | 3380 | from __future__ import absolute_import
import socket
try:
from select import poll, POLLIN
except ImportError: # `poll` doesn't exist on OSX and other platforms
poll = False
try:
from select import select
except ImportError: # `select` doesn't exist on AppEngine.
select = False
def is_connection_dropped(conn): # Platform-specific
"""
Returns True if the connection is dropped and should be closed.
:param conn:
:class:`httplib.HTTPConnection` object.
Note: For platforms like AppEngine, this will always return ``False`` to
let the platform handle connection recycling transparently for us.
"""
sock = getattr(conn, 'sock', False)
if sock is False: # Platform-specific: AppEngine
return False
if sock is None: # Connection already closed (such as by httplib).
return True
if not poll:
if not select: # Platform-specific: AppEngine
return False
try:
return select([sock], [], [], 0.0)[0]
except socket.error:
return True
# This version is better on platforms that support it.
p = poll()
p.register(sock, POLLIN)
for (fno, ev) in p.poll(0.0):
if fno == sock.fileno():
# Either data is buffered (bad), or the connection is dropped.
return True
# This function is copied from socket.py in the Python 2.7 standard
# library test suite. Added to its signature is only `socket_options`.
def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
source_address=None, socket_options=None):
"""Connect to *address* and return the socket object.
Convenience function. Connect to *address* (a 2-tuple ``(host,
port)``) and return the socket object. Passing the optional
*timeout* parameter will set the timeout on the socket instance
before attempting to connect. If no *timeout* is supplied, the
global default timeout setting returned by :func:`getdefaulttimeout`
is used. If *source_address* is set it must be a tuple of (host, port)
for the socket to bind as a source address before making the connection.
An host of '' or port 0 tells the OS to use the default.
"""
host, port = address
if host.startswith('['):
host = host.strip('[]')
err = None
for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
sock = None
try:
sock = socket.socket(af, socktype, proto)
# If provided, set socket level options before connecting.
# This is the only addition urllib3 makes to this function.
_set_socket_options(sock, socket_options)
if timeout is not socket._GLOBAL_DEFAULT_TIMEOUT:
sock.settimeout(timeout)
if source_address:
sock.bind(source_address)
sock.connect(sa)
return sock
except socket.error as e:
err = e
if sock is not None:
sock.close()
sock = None
if err is not None:
raise err
raise socket.error("getaddrinfo returns an empty list")
def _set_socket_options(sock, options):
if options is None:
return
for opt in options:
sock.setsockopt(*opt)
| bsd-2-clause |
lfrdm/medpy | bin/medpy_intensity_range_standardization.py | 2 | 9361 | #!/usr/bin/python
"""
Standardizes the intensity range / profile of a number of similar images.
Copyright (C) 2013 Oskar Maier
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
# build-in modules
import os
import pickle
import argparse
import logging
# third-party modules
import numpy
# path changes
# own modules
from medpy.core import Logger
from medpy.core.exceptions import ArgumentError
from medpy.io import load, save
from medpy.utilities.argparseu import sequenceOfIntegersGeAscendingStrict
from medpy.filter import IntensityRangeStandardization
# information
__author__ = "Oskar Maier"
__version__ = "r0.1.0, 2013-10-11"
__email__ = "oskar.maier@googlemail.com"
__status__ = "Release"
__description__ = """
Standardizes the intensity range / profile of a number of similar images.
Takes a number of images that display the same scene (most commonly MRI volumes of the
same body region) and learns an average intensity range model from these. This model can
then be used to transfer the training image set and other, formerly unseen images, to the
learned average intensity range. Such prepared, these images display the same intensity
profiles for the same structures.
The employed algorithm guarantees a lossless intensity transformation and throws an
exception, should the model require re-training.
The application requires the supplied images to be stripped of their background. This can
either be done by setting a threshold value or by supplying a foreground-mask for each
image.
The script provide two mutually exclusive functionalities:
(1) the training of a new model and an optional application to the training images
(2) the application of a stored model to a number of images
Depending on the application, different arguments are require that are reflected by the
argument grouping.
The implementation is based on:
[1] Nyul, L.G.; Udupa, J.K.; Xuan Zhang, "New variants of a method of MRI scale
standardization," Medical Imaging, IEEE Transactions on , vol.19, no.2, pp.143-150,
Feb. 2000
For more details on the algorithm, see the medpy.filter.IntensityRangeStandardization class.
Copyright (C) 2013 Oskar Maier
This program comes with ABSOLUTELY NO WARRANTY; This is free software,
and you are welcome to redistribute it under certain conditions; see
the LICENSE file or <http://www.gnu.org/licenses/> for details.
"""
# code
def main():
args = getArguments(getParser())
# prepare logger
logger = Logger.getInstance()
if args.debug: logger.setLevel(logging.DEBUG)
elif args.verbose: logger.setLevel(logging.INFO)
# loading input images (as image, header pairs)
images = []
headers = []
for image_name in args.images:
i, h = load(image_name)
images.append(i)
headers.append(h)
# loading binary foreground masks if supplied, else create masks from threshold value
if args.masks:
masks = [load(mask_name)[0].astype(numpy.bool) for mask_name in args.masks]
else:
masks = [i > args.threshold for i in images]
# if in application mode, load the supplied model and apply it to the images
if args.lmodel:
logger.info('Loading the model and transforming images...')
with open(args.lmodel, 'r') as f:
trained_model = pickle.load(f)
if not isinstance(trained_model, IntensityRangeStandardization):
raise ArgumentError('{} does not seem to be a valid pickled instance of an IntensityRangeStandardization object'.format(args.lmodel))
transformed_images = [trained_model.transform(i[m]) for i, m in zip(images, masks)]
# in in training mode, train the model, apply it to the images and save it
else:
logger.info('Training the average intensity model...')
irs = IntensityRangeStandardization()
trained_model, transformed_images = irs.train_transform([i[m] for i, m in zip(images, masks)])
logger.info('Saving the trained model as {}...'.format(args.smodel))
with open(args.smodel, 'wb') as f:
pickle.dump(trained_model, f)
# save the transformed images
if args.simages:
logger.info('Saving intensity transformed images to {}...'.format(args.simages))
for ti, i, m, h, image_name in zip(transformed_images, images, masks, headers, args.images):
i[m] = ti
save(i, '{}/{}'.format(args.simages, image_name.split('/')[-1]), h, args.force)
logger.info('Terminated.')
def getArguments(parser):
"Provides additional validation of the arguments collected by argparse."
args = parser.parse_args()
# check mutual exlusive and reaquired arguments
if args.lmodel and args.smodel:
parser.error('only one of --load-model and --save-model can be supplied, as they decide on whether to apply the application or the training mode')
if not args.lmodel and not args.smodel:
parser.error('exactly one of --load-model or --save-model has to be supplied')
# application mode
if args.lmodel:
if not os.path.isfile(args.lmodel):
parser.error('the supplied model file {} does not exist'.format(args.lmodel))
if not args.simages:
parser.error('--save-images must be supplied when running the application mode')
# training mode
if args.smodel:
if not args.landmarkp in ('L2', 'L3', 'L4'):
args.landmarkp = sequenceOfIntegersGeAscendingStrict(args.landmarkp)
if not 'auto' == args.stdspace:
args.stdspace = sequenceOfIntegersGeAscendingStrict(args.stdspace)
if not args.force and os.path.isfile(args.smodel):
parser.error('the target model file {} already exists'.format(args.smodel))
# others
if args.simages:
if not os.path.isdir(args.simages):
parser.error('--save-images must be a valid directory')
if args.masks and len(args.masks) != len(args.images):
parser.error('the same number of masks must be passed to --masks as images have been supplied')
return args
def getParser():
"Creates and returns the argparse parser object."
parser = argparse.ArgumentParser(description=__description__, formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('images', nargs='+', help='The images used for training (in the learning case) or to transform (in the transformation case)')
apply_group = parser.add_argument_group('apply an existing model')
apply_group.add_argument('--load-model', dest='lmodel', default=False, help='Location of the pickled intensity range model to load. Activated application mode.')
train_group = parser.add_argument_group('train a new model and save and/or apply it')
train_group.add_argument('--save-model', dest='smodel', default=False, help='Save the trained model under this name as a pickled object (should end in .pkl). Activates training mode.')
train_group.add_argument('--cutoffp', dest='cutoffp', type=sequenceOfIntegersGeAscendingStrict, default='1,99', help='Colon-separated lower and upper cut-off percentile values to exclude intensity outliers during the model training.')
train_group.add_argument('--landmarkp', dest='landmarkp', default='L4', help='The landmark percentiles, based on which to train the model. Can be L2, L3, L4 or a colon-separated, ordered list of percentiles.')
train_group.add_argument('--stdspace', dest='stdspace', default='auto', help='Two colon-separated intensity values to roughly define the average intensity space to learn. In most cases should be left set to \'auto\'')
shared_group = parser.add_argument_group('shared arguments')
shared_group.add_argument('--save-images', dest='simages', default=False, help='Save the transformed images under this location. Required for the application mode, optional for the learning mode.')
shared_group.add_argument('--threshold', type=float, default=0, help='All voxel with an intensity > threshold are considered as foreground. Supply either this or a mask for each image.')
shared_group.add_argument('--masks', nargs='+', help='A number of binary foreground mask, one for each image. Alternative to supplying a threshold. Overrides the threshold parameter if supplied.')
parser.add_argument('-v', '--verbose', dest='verbose', action='store_true', help='Verbose output')
parser.add_argument('-d', '--debug', dest='debug', action='store_true', help='Display debug information.')
parser.add_argument('-f', '--force', dest='force', action='store_true', help='Overwrite existing files (both model and images)')
return parser
if __name__ == "__main__":
main() | gpl-3.0 |
ldts/zephyr | arch/x86/gen_gdt.py | 2 | 6755 | #!/usr/bin/env python3
#
# Copyright (c) 2017 Intel Corporation
#
# SPDX-License-Identifier: Apache-2.0
"""Generate a Global Descriptor Table (GDT) for x86 CPUs.
For additional detail on GDT and x86 memory management, please
consult the IA Architecture SW Developer Manual, vol. 3.
This script accepts as input the zephyr_prebuilt.elf binary,
which is a link of the Zephyr kernel without various build-time
generated data structures (such as the GDT) inserted into it.
This kernel image has been properly padded such that inserting
these data structures will not disturb the memory addresses of
other symbols.
The output is a GDT whose contents depend on the kernel
configuration. With no memory protection features enabled,
we generate flat 32-bit code and data segments. If hardware-
based stack overflow protection or userspace is enabled,
we additionally create descriptors for the main and double-
fault IA tasks, needed for userspace privilege elevation and
double-fault handling. If userspace is enabled, we also create
flat code/data segments for ring 3 execution.
"""
import argparse
import sys
import struct
import os
import elftools
from distutils.version import LooseVersion
from elftools.elf.elffile import ELFFile
from elftools.elf.sections import SymbolTableSection
if LooseVersion(elftools.__version__) < LooseVersion('0.24'):
sys.stderr.write("pyelftools is out of date, need version 0.24 or later\n")
sys.exit(1)
def debug(text):
if not args.verbose:
return
sys.stdout.write(os.path.basename(sys.argv[0]) + ": " + text + "\n")
def error(text):
sys.stderr.write(os.path.basename(sys.argv[0]) + ": " + text + "\n")
sys.exit(1)
gdt_pd_fmt = "<HIH"
FLAGS_GRAN = 1 << 7 # page granularity
ACCESS_EX = 1 << 3 # executable
ACCESS_DC = 1 << 2 # direction/conforming
ACCESS_RW = 1 << 1 # read or write permission
# 6 byte pseudo descriptor, but we're going to actually use this as the
# zero descriptor and return 8 bytes
def create_gdt_pseudo_desc(addr, size):
debug("create pseudo decriptor: %x %x" % (addr, size))
# ...and take back one byte for the Intel god whose Ark this is...
size = size - 1
return struct.pack(gdt_pd_fmt, size, addr, 0)
# Limit argument always in bytes
def chop_base_limit(base, limit):
base_lo = base & 0xFFFF
base_mid = (base >> 16) & 0xFF
base_hi = (base >> 24) & 0xFF
limit_lo = limit & 0xFFFF
limit_hi = (limit >> 16) & 0xF
return (base_lo, base_mid, base_hi, limit_lo, limit_hi)
gdt_ent_fmt = "<HHBBBB"
def create_code_data_entry(base, limit, dpl, flags, access):
debug("create code or data entry: %x %x %x %x %x" %
(base, limit, dpl, flags, access))
base_lo, base_mid, base_hi, limit_lo, limit_hi = chop_base_limit(base,
limit)
# This is a valid descriptor
present = 1
# 32-bit protected mode
size = 1
# 1 = code or data, 0 = system type
desc_type = 1
# Just set accessed to 1 already so the CPU doesn't need it update it,
# prevents freakouts if the GDT is in ROM, we don't care about this
# bit in the OS
accessed = 1
access = access | (present << 7) | (dpl << 5) | (desc_type << 4) | accessed
flags = flags | (size << 6) | limit_hi
return struct.pack(gdt_ent_fmt, limit_lo, base_lo, base_mid,
access, flags, base_hi)
def create_tss_entry(base, limit, dpl):
debug("create TSS entry: %x %x %x" % (base, limit, dpl))
present = 1
base_lo, base_mid, base_hi, limit_lo, limit_hi, = chop_base_limit(base,
limit)
type_code = 0x9 # non-busy 32-bit TSS descriptor
gran = 0
flags = (gran << 7) | limit_hi
type_byte = ((present << 7) | (dpl << 5) | type_code)
return struct.pack(gdt_ent_fmt, limit_lo, base_lo, base_mid,
type_byte, flags, base_hi)
def get_symbols(obj):
for section in obj.iter_sections():
if isinstance(section, SymbolTableSection):
return {sym.name: sym.entry.st_value
for sym in section.iter_symbols()}
raise LookupError("Could not find symbol table")
def parse_args():
global args
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument("-k", "--kernel", required=True,
help="Zephyr kernel image")
parser.add_argument("-v", "--verbose", action="store_true",
help="Print extra debugging information")
parser.add_argument("-o", "--output-gdt", required=True,
help="output GDT binary")
args = parser.parse_args()
if "VERBOSE" in os.environ:
args.verbose = 1
def main():
parse_args()
with open(args.kernel, "rb") as fp:
kernel = ELFFile(fp)
syms = get_symbols(kernel)
# NOTE: use-cases are extremely limited; we always have a basic flat
# code/data segments. If we are doing stack protection, we are going to
# have two TSS to manage the main task and the special task for double
# fault exception handling
if "CONFIG_USERSPACE" in syms:
num_entries = 7
elif "CONFIG_HW_STACK_PROTECTION" in syms:
num_entries = 5
else:
num_entries = 3
gdt_base = syms["_gdt"]
with open(args.output_gdt, "wb") as fp:
# The pseudo descriptor is stuffed into the NULL descriptor
# since the CPU never looks at it
fp.write(create_gdt_pseudo_desc(gdt_base, num_entries * 8))
# Selector 0x08: code descriptor
fp.write(create_code_data_entry(0, 0xFFFFF, 0,
FLAGS_GRAN, ACCESS_EX | ACCESS_RW))
# Selector 0x10: data descriptor
fp.write(create_code_data_entry(0, 0xFFFFF, 0,
FLAGS_GRAN, ACCESS_RW))
if num_entries >= 5:
main_tss = syms["_main_tss"]
df_tss = syms["_df_tss"]
# Selector 0x18: main TSS
fp.write(create_tss_entry(main_tss, 0x67, 0))
# Selector 0x20: double-fault TSS
fp.write(create_tss_entry(df_tss, 0x67, 0))
if num_entries == 7:
# Selector 0x28: code descriptor, dpl = 3
fp.write(create_code_data_entry(0, 0xFFFFF, 3,
FLAGS_GRAN, ACCESS_EX | ACCESS_RW))
# Selector 0x30: data descriptor, dpl = 3
fp.write(create_code_data_entry(0, 0xFFFFF, 3,
FLAGS_GRAN, ACCESS_RW))
if __name__ == "__main__":
main()
| apache-2.0 |
rahulraj/web_projects | assignment2/src/photogallery/generator/galleryitemfactory.py | 1 | 6059 | import os
import re
import os.path
from iptcinfo import IPTCInfo
from galleryitem import JpegPicture, JpegDirectory, directory_name_to_html_file_name
from ..utils.inject import assign_injectables
def is_jpeg_file(file_name):
"""
Determine if a file is labeled as a JPEG.
Args:
file_name the name of the file.
Returns:
True if the file ends with .jpg.
"""
return file_is_of_type(file_name, 'jpg')
def is_css_file(file_name):
"""
Determine if a file is labeled as CSS.
Args:
file_name the name of the file.
Returns:
True if the file ends with .css.
"""
return file_is_of_type(file_name, 'css')
def is_js_file(file_name):
"""
Determine if a file is labeled as JavaScript.
Args:
file_name the name of the file.
Returns:
True if the file ends with .js.
"""
return file_is_of_type(file_name, 'js')
def file_is_of_type(file_name, extension):
"""
Return whether a file is of a certain type.
Args:
file_name the name of the file to test.
extension the part of the name after the . which will be checked
with a regular expression.
Returns:
True if file_name ends with extension.
"""
type_re = re.compile(r'\.%s' % extension)
return type_re.search(file_name) != None
class GalleryItemFactory(object):
"""
Class to bootstrap the application by reading the disk and
creating GalleryItems from the existing JPEGs and subdirectories.
"""
def __init__(self, lookup_table, should_prompt,
iptc_info_constructor=IPTCInfo,
list_directory=os.listdir, is_directory=os.path.isdir):
"""
Constructor for GalleryItemFactory
Args:
lookup_table the lookup_table that the files use to search IPTCInfo.data.
should_prompt whether the program should prompt the user for directory
names.
iptc_info_constructor the constructor for IPTCInfo objects that the files
will use to lookup metadata (defaults to IPTCInfo).
list_directory the function that takes a path and lists the files in it,
defaults to os.listdir
is_directory a function that takes a file name and returns true if it
is a directory (defaults to os.path.isdir).
"""
assign_injectables(self, locals())
def create_directory(self, path, parent_path=None):
"""
Creates a JpegDirectory object with the appropriate GalleryItems
Args:
path the path to the directory that the JPEGs are stored in.
parent_path the directory one level up of path; if we are creating
a subdirectory this will be used to populate back_href.
It can be None if we are creating the top-most directory.
Returns:
A JpegDirectory containing GalleryItems wrapped around all the appropriate
contents of the directory referred to by path.
Raises:
Any exception thrown when trying to extract IPTC information from a JPEG
file. See the documentation of try_create_jpeg_picture for details.
"""
file_names = self.list_directory(path)
jpeg_names = filter(is_jpeg_file, file_names)
path_contents = []
for name in jpeg_names:
maybe_jpeg_picture = self.try_create_jpeg_picture(path, name)
if maybe_jpeg_picture is not None:
path_contents.append(maybe_jpeg_picture)
subdirectories = self.create_subdirectories(file_names, path)
path_contents.extend(subdirectories)
back_href = self.maybe_get_back_href(parent_path)
return JpegDirectory(path, path_contents, self.should_prompt,
back_href=back_href)
def try_create_jpeg_picture(self, path, name):
"""
Given a path and the name of a file ending in .jpg, tries to create
a JpegPicture object out of it.
Args:
path the path to the directory the file is in.
name the name of the file.
Returns:
A JpegPicture object, if creating it was successful. None if creating
the JpegPicture failed for some reason that does not warrant crashing
the program.
Raises:
Any exception raised when trying to extract IPTC information from the
JPEG, that is not an IOError or an exception with the message
'No IPTC data found.' In those two cases, simply skips the file and
prints a message saying so.
"""
full_jpeg_name = os.path.join(path, name)
try:
return JpegPicture(name,
directory_name_to_html_file_name(path),
self.iptc_info_constructor(full_jpeg_name),
self.lookup_table)
except IOError:
print "I was unable to open the file ", name, " for some reason"
print "Maybe it's corrupted?"
print "Skipping it..."
return None
except Exception as possible_iptc_exception:
if str(possible_iptc_exception) == 'No IPTC data found.':
print "I was unable to get IPTC data from the file %s" % name
print "Skipping it..."
return None
else:
raise possible_iptc_exception # Some other exception
def maybe_get_back_href(self, path):
"""
Given a nullable path name, turns it into a href that can be used
to write an anchor tag pointing to a HTML file. If path
is None, propagates the None by returning it.
Args:
path the path name, or None if it is not applicable.
"""
if path is None:
return None
else:
return directory_name_to_html_file_name(path)
def create_subdirectories(self, file_names, path):
"""
Helper methods to find the subdirectories of path and create JpegDirectories
for them, fully initializing their contents too.
Args:
file_names the names of the files in path.
path the root directory path to process.
"""
full_file_names = [os.path.join(path, name) for name in file_names]
directory_names = filter(self.is_directory, full_file_names)
jpeg_directories = [self.create_directory(directory_name, parent_path=path) \
for directory_name in directory_names]
return jpeg_directories
| mit |
littlstar/chromium.src | tools/deep_memory_profiler/accumulate.py | 100 | 9536 | #!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# A script to accumulate values from the 'dmprof cat' command into CSV or else.
#
# Usage:
# ./accumulate.py -f <format> -t <template-name> < input.json > output
#
# <format> is one of "csv", "json", and "tree". If "csv" or "json" is given,
# accumulate.py dumps a similar file to "dmprof csv|json". If "tree" is given,
# accumulate.py dumps a human-readable breakdown tree.
#
# <template-name> is a label in templates.json.
import datetime
import json
import logging
import optparse
import sys
from lib.ordered_dict import OrderedDict
LOGGER = logging.getLogger('dmprof-accumulate')
def visit_in_template(template, snapshot, depth):
"""Visits all categories via a given template.
This function is not used. It's a sample function to traverse a template.
"""
world = template[0]
breakdown = template[1]
rules = template[2]
for rule, _ in snapshot[world]['breakdown'][breakdown].iteritems():
print (' ' * depth) + rule
if rule in rules:
visit_in_template(rules[rule], snapshot, depth + 1)
def accumulate(template, snapshot, units_dict, target_units):
"""Accumulates units in a JSON |snapshot| with applying a given |template|.
Args:
template: A template tree included in a dmprof cat JSON file.
snapshot: A snapshot in a dmprof cat JSON file.
units_dict: A dict of units in worlds.
target_units: A list of unit ids which are a target of this accumulation.
"""
world = template[0]
breakdown = template[1]
rules = template[2]
remainder_units = target_units.copy()
category_tree = OrderedDict()
total = 0
for rule, match in snapshot[world]['breakdown'][breakdown].iteritems():
if 'hidden' in match and match['hidden']:
continue
matched_units = set(match['units']).intersection(target_units)
subtotal = 0
for unit_id in matched_units:
subtotal += units_dict[world][unit_id]
total += subtotal
remainder_units = remainder_units.difference(matched_units)
if rule not in rules:
# A category matched with |rule| is a leaf of the breakdown tree.
# It is NOT broken down more.
category_tree[rule] = subtotal
continue
# A category matched with |rule| is broken down more.
subtemplate = rules[rule]
subworld = subtemplate[0]
subbreakdown = subtemplate[1]
if subworld == world:
# Break down in the same world: consider units.
category_tree[rule], accounted_total, subremainder_units = accumulate(
subtemplate, snapshot, units_dict, matched_units)
subremainder_total = 0
if subremainder_units:
for unit_id in subremainder_units:
subremainder_total += units_dict[world][unit_id]
category_tree[rule][None] = subremainder_total
if subtotal != accounted_total + subremainder_total:
print >> sys.stderr, (
'WARNING: Sum of %s:%s is different from %s by %d bytes.' % (
subworld, subbreakdown, rule,
subtotal - (accounted_total + subremainder_total)))
else:
# Break down in a different world: consider only the total size.
category_tree[rule], accounted_total, _ = accumulate(
subtemplate, snapshot, units_dict, set(units_dict[subworld].keys()))
if subtotal >= accounted_total:
category_tree[rule][None] = subtotal - accounted_total
else:
print >> sys.stderr, (
'WARNING: Sum of %s:%s is larger than %s by %d bytes.' % (
subworld, subbreakdown, rule, accounted_total - subtotal))
print >> sys.stderr, (
'WARNING: Assuming remainder of %s is 0.' % rule)
category_tree[rule][None] = 0
return category_tree, total, remainder_units
def flatten(category_tree, header=''):
"""Flattens a category tree into a flat list."""
result = []
for rule, sub in category_tree.iteritems():
if not rule:
rule = 'remaining'
if header:
flattened_rule = header + '>' + rule
else:
flattened_rule = rule
if isinstance(sub, dict) or isinstance(sub, OrderedDict):
result.extend(flatten(sub, flattened_rule))
else:
result.append((flattened_rule, sub))
return result
def print_category_tree(category_tree, output, depth=0):
"""Prints a category tree in a human-readable format."""
for label in category_tree:
print >> output, (' ' * depth),
if (isinstance(category_tree[label], dict) or
isinstance(category_tree[label], OrderedDict)):
print >> output, '%s:' % label
print_category_tree(category_tree[label], output, depth + 1)
else:
print >> output, '%s: %d' % (label, category_tree[label])
def flatten_all_category_trees(category_trees):
flattened_labels = set()
flattened_table = []
for category_tree in category_trees:
flattened = OrderedDict()
for label, subtotal in flatten(category_tree):
flattened_labels.add(label)
flattened[label] = subtotal
flattened_table.append(flattened)
return flattened_labels, flattened_table
def output_csv(output, category_trees, data, first_time, output_exponent):
flattened_labels, flattened_table = flatten_all_category_trees(category_trees)
sorted_flattened_labels = sorted(flattened_labels)
print >> output, ','.join(['second'] + sorted_flattened_labels)
for index, row in enumerate(flattened_table):
values = [str(data['snapshots'][index]['time'] - first_time)]
for label in sorted_flattened_labels:
if label in row:
divisor = 1
if output_exponent.upper() == 'K':
divisor = 1024.0
elif output_exponent.upper() == 'M':
divisor = 1024.0 * 1024.0
values.append(str(row[label] / divisor))
else:
values.append('0')
print >> output, ','.join(values)
def output_json(output, category_trees, data, first_time, template_label):
flattened_labels, flattened_table = flatten_all_category_trees(category_trees)
json_snapshots = []
for index, row in enumerate(flattened_table):
row_with_meta = row.copy()
row_with_meta['second'] = data['snapshots'][index]['time'] - first_time
row_with_meta['dump_time'] = datetime.datetime.fromtimestamp(
data['snapshots'][index]['time']).strftime('%Y-%m-%d %H:%M:%S')
json_snapshots.append(row_with_meta)
json_root = {
'version': 'JSON_DEEP_2',
'policies': {
template_label: {
'legends': sorted(flattened_labels),
'snapshots': json_snapshots
}
}
}
json.dump(json_root, output, indent=2, sort_keys=True)
def output_tree(output, category_trees):
for index, category_tree in enumerate(category_trees):
print >> output, '< Snapshot #%d >' % index
print_category_tree(category_tree, output, 1)
print >> output, ''
def do_main(cat_input, output, template_label, output_format, output_exponent):
"""Does the main work: accumulate for every snapshot and print a result."""
if output_format not in ['csv', 'json', 'tree']:
raise NotImplementedError('The output format \"%s\" is not implemented.' %
output_format)
if output_exponent.upper() not in ['B', 'K', 'M']:
raise NotImplementedError('The exponent \"%s\" is not implemented.' %
output_exponent)
data = json.loads(cat_input.read(), object_pairs_hook=OrderedDict)
templates = data['templates']
if not template_label:
template_label = data['default_template']
if template_label not in templates:
LOGGER.error('A template \'%s\' is not found.' % template_label)
return
template = templates[template_label]
category_trees = []
first_time = None
for snapshot in data['snapshots']:
if not first_time:
first_time = snapshot['time']
units = {}
for world_name in snapshot['worlds']:
world_units = {}
for unit_id, sizes in snapshot['worlds'][world_name]['units'].iteritems():
world_units[int(unit_id)] = sizes[0]
units[world_name] = world_units
category_tree, _, _ = accumulate(
template, snapshot['worlds'], units, set(units[template[0]].keys()))
category_trees.append(category_tree)
if output_format == 'csv':
output_csv(output, category_trees, data, first_time, output_exponent)
elif output_format == 'json':
output_json(output, category_trees, data, first_time, template_label)
elif output_format == 'tree':
output_tree(output, category_trees)
def main():
LOGGER.setLevel(logging.DEBUG)
handler = logging.StreamHandler()
handler.setLevel(logging.INFO)
formatter = logging.Formatter('%(message)s')
handler.setFormatter(formatter)
LOGGER.addHandler(handler)
parser = optparse.OptionParser()
parser.add_option('-t', '--template', dest='template',
metavar='TEMPLATE',
help='Apply TEMPLATE to list up.')
parser.add_option('-f', '--format', dest='format', default='csv',
help='Specify the output format: csv, json or tree.')
parser.add_option('-e', '--exponent', dest='exponent', default='M',
help='Specify B (bytes), K (kilobytes) or M (megabytes).')
options, _ = parser.parse_args(sys.argv)
do_main(sys.stdin, sys.stdout,
options.template, options.format, options.exponent)
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause |
anntzer/scikit-learn | examples/model_selection/plot_confusion_matrix.py | 8 | 2074 | """
================
Confusion matrix
================
Example of confusion matrix usage to evaluate the quality
of the output of a classifier on the iris data set. The
diagonal elements represent the number of points for which
the predicted label is equal to the true label, while
off-diagonal elements are those that are mislabeled by the
classifier. The higher the diagonal values of the confusion
matrix the better, indicating many correct predictions.
The figures show the confusion matrix with and without
normalization by class support size (number of elements
in each class). This kind of normalization can be
interesting in case of class imbalance to have a more
visual interpretation of which class is being misclassified.
Here the results are not as good as they could be as our
choice for the regularization parameter C was not the best.
In real life applications this parameter is usually chosen
using :ref:`grid_search`.
"""
print(__doc__)
import numpy as np
import matplotlib.pyplot as plt
from sklearn import svm, datasets
from sklearn.model_selection import train_test_split
from sklearn.metrics import ConfusionMatrixDisplay
# import some data to play with
iris = datasets.load_iris()
X = iris.data
y = iris.target
class_names = iris.target_names
# Split the data into a training set and a test set
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)
# Run classifier, using a model that is too regularized (C too low) to see
# the impact on the results
classifier = svm.SVC(kernel='linear', C=0.01).fit(X_train, y_train)
np.set_printoptions(precision=2)
# Plot non-normalized confusion matrix
titles_options = [("Confusion matrix, without normalization", None),
("Normalized confusion matrix", 'true')]
for title, normalize in titles_options:
disp = ConfusionMatrixDisplay.from_estimator(
classifier, X_test, y_test, display_labels=class_names,
cmap=plt.cm.Blues, normalize=normalize
)
disp.ax_.set_title(title)
print(title)
print(disp.confusion_matrix)
plt.show()
| bsd-3-clause |
FreeAgent/djangoappengine-starter | django/contrib/gis/geos/collections.py | 311 | 4663 | """
This module houses the Geometry Collection objects:
GeometryCollection, MultiPoint, MultiLineString, and MultiPolygon
"""
from ctypes import c_int, c_uint, byref
from django.contrib.gis.geos.error import GEOSException, GEOSIndexError
from django.contrib.gis.geos.geometry import GEOSGeometry
from django.contrib.gis.geos.libgeos import get_pointer_arr, GEOM_PTR, GEOS_PREPARE
from django.contrib.gis.geos.linestring import LineString, LinearRing
from django.contrib.gis.geos.point import Point
from django.contrib.gis.geos.polygon import Polygon
from django.contrib.gis.geos import prototypes as capi
class GeometryCollection(GEOSGeometry):
_typeid = 7
def __init__(self, *args, **kwargs):
"Initializes a Geometry Collection from a sequence of Geometry objects."
# Checking the arguments
if not args:
raise TypeError('Must provide at least one Geometry to initialize %s.' % self.__class__.__name__)
if len(args) == 1:
# If only one geometry provided or a list of geometries is provided
# in the first argument.
if isinstance(args[0], (tuple, list)):
init_geoms = args[0]
else:
init_geoms = args
else:
init_geoms = args
# Ensuring that only the permitted geometries are allowed in this collection
# this is moved to list mixin super class
self._check_allowed(init_geoms)
# Creating the geometry pointer array.
collection = self._create_collection(len(init_geoms), iter(init_geoms))
super(GeometryCollection, self).__init__(collection, **kwargs)
def __iter__(self):
"Iterates over each Geometry in the Collection."
for i in xrange(len(self)):
yield self[i]
def __len__(self):
"Returns the number of geometries in this Collection."
return self.num_geom
### Methods for compatibility with ListMixin ###
def _create_collection(self, length, items):
# Creating the geometry pointer array.
geoms = get_pointer_arr(length)
for i, g in enumerate(items):
# this is a little sloppy, but makes life easier
# allow GEOSGeometry types (python wrappers) or pointer types
geoms[i] = capi.geom_clone(getattr(g, 'ptr', g))
return capi.create_collection(c_int(self._typeid), byref(geoms), c_uint(length))
def _get_single_internal(self, index):
return capi.get_geomn(self.ptr, index)
def _get_single_external(self, index):
"Returns the Geometry from this Collection at the given index (0-based)."
# Checking the index and returning the corresponding GEOS geometry.
return GEOSGeometry(capi.geom_clone(self._get_single_internal(index)), srid=self.srid)
def _set_list(self, length, items):
"Create a new collection, and destroy the contents of the previous pointer."
prev_ptr = self.ptr
srid = self.srid
self.ptr = self._create_collection(length, items)
if srid: self.srid = srid
capi.destroy_geom(prev_ptr)
_set_single = GEOSGeometry._set_single_rebuild
_assign_extended_slice = GEOSGeometry._assign_extended_slice_rebuild
@property
def kml(self):
"Returns the KML for this Geometry Collection."
return '<MultiGeometry>%s</MultiGeometry>' % ''.join([g.kml for g in self])
@property
def tuple(self):
"Returns a tuple of all the coordinates in this Geometry Collection"
return tuple([g.tuple for g in self])
coords = tuple
# MultiPoint, MultiLineString, and MultiPolygon class definitions.
class MultiPoint(GeometryCollection):
_allowed = Point
_typeid = 4
class MultiLineString(GeometryCollection):
_allowed = (LineString, LinearRing)
_typeid = 5
@property
def merged(self):
"""
Returns a LineString representing the line merge of this
MultiLineString.
"""
return self._topology(capi.geos_linemerge(self.ptr))
class MultiPolygon(GeometryCollection):
_allowed = Polygon
_typeid = 6
@property
def cascaded_union(self):
"Returns a cascaded union of this MultiPolygon."
if GEOS_PREPARE:
return GEOSGeometry(capi.geos_cascaded_union(self.ptr), self.srid)
else:
raise GEOSException('The cascaded union operation requires GEOS 3.1+.')
# Setting the allowed types here since GeometryCollection is defined before
# its subclasses.
GeometryCollection._allowed = (Point, LineString, LinearRing, Polygon, MultiPoint, MultiLineString, MultiPolygon)
| bsd-3-clause |
rwl/muntjac | muntjac/addon/invient/demo/invient_demo_win.py | 1 | 154853 | # @INVIENT_COPYRIGHT@
# @MUNTJAC_LICENSE@
"""Window for Invient charts demo."""
from StringIO \
import StringIO
from random \
import random
from threading \
import Thread
from time \
import sleep
from muntjac.addon.invient.invient_charts_util \
import getDate
from datetime \
import datetime
from muntjac.util \
import totalseconds, OrderedSet
from muntjac.api \
import TextArea, VerticalLayout, HorizontalLayout, Label, \
HorizontalSplitPanel, Window, Tree, Alignment, Button, GridLayout, \
ProgressIndicator
from muntjac.ui \
import button
from muntjac.data.property \
import IValueChangeListener
from muntjac.data.util.hierarchical_container \
import HierarchicalContainer
from muntjac.terminal.sizeable \
import ISizeable
from muntjac.addon.invient.invient_charts \
import ChartZoomListener, DateTimePoint, InvientCharts, DateTimeSeries, \
SeriesType, XYSeries, DecimalPoint, PointClickListener, \
ChartSVGAvailableListener, ChartClickListener, ChartResetZoomListener, \
SeriesClickListerner, SeriesHideListerner, SeriesShowListerner, \
SeriesLegendItemClickListerner, PointRemoveListener, PointSelectListener, \
PointUnselectListener, PieChartLegendItemClickListener
from muntjac.addon.invient.invient_charts_config \
import DateTimePlotBand, DateTimeRange, InvientChartsConfig, Margin, \
DateTimeAxis, NumberYAxis, AxisTitle, LineConfig, SymbolMarker, \
MarkerState, ZoomType, YAxisDataLabel, Grid, AreaConfig, SeriesState, \
CategoryAxis, NumberPlotLine, Legend, Layout, Position, HorzAlign, \
VertAlign, NumberValue, NumberXAxis, ScatterConfig, DataLabel, \
SeriesConfig, Stacking, AxisTitleAlign, BarConfig, Tooltip, ColumnConfig, \
XAxisDataLabel, Spacing, Tick, TickmarkPlacement, Symbol, NumberPlotBand, \
NumberRange, AreaSplineConfig, PieConfig, PieDataLabel, PointConfig, \
SplineConfig, ImageMarker, MinorGrid, PlotLabel, ChartLabel, \
ChartLabelItem, DashStyle
from muntjac.addon.invient.color \
import RGBA, RGB
from muntjac.addon.invient.gradient \
import LinearColorStop, LinearGradient
def timestamp(dt):
return long(totalseconds(dt - datetime(1970, 1, 1)) * 1e03) # ms
class InvientChartsDemoWin(Window):
_TREE_ITEM_CAPTION_PROP_ID = 'ChartType'
_SEPARATOR = '|'
def __init__(self):
super(InvientChartsDemoWin, self).__init__()
self._eventLog = TextArea()
self._isAppRunningOnGAE = True
mainLayout = VerticalLayout()
self.setContent(mainLayout)
self.setSizeFull()
mainLayout.setSizeFull()
self.setCaption('Invient Charts')
infoBar = HorizontalLayout()
mainLayout.addComponent(infoBar)
infoBar.setHeight('50px')
infoBar.setWidth('100%')
lblAppTitle = Label('Demo Gallery for Invient Charts')
lblAppTitle.setSizeFull()
lblAppTitle.setStyleName('v-label-app-title')
infoBar.addComponent(lblAppTitle)
self._mainSplit = HorizontalSplitPanel()
self._mainSplit.setSizeFull()
mainLayout.addComponent(self._mainSplit)
mainLayout.setExpandRatio(self._mainSplit, 1)
self._leftLayout = VerticalLayout()
self._leftLayout.setSpacing(True)
self._mainSplit.setFirstComponent(self._leftLayout)
self._rightLayout = VerticalLayout()
self._rightLayout.setSpacing(True)
self._rightLayout.setMargin(True)
self._mainSplit.setSecondComponent(self._rightLayout)
self._mainSplit.setSplitPosition(200, ISizeable.UNITS_PIXELS)
self._navTree = self.createChartsTree()
self._leftLayout.addComponent(self._navTree)
self._eventLog.setReadOnly(True)
self._eventLog.setStyleName('v-textarea-chart-events-log')
self._eventLog.setSizeFull()
self._eventLog.setHeight('200px')
self.setTheme('chartdemo')
self._masterChartMinDate = self.getDateZeroTime(2006, 1, 1)
self._masterChartMaxDate = self.getDateZeroTime(2008, 12, 31)
self._detailChartPointStartDate = self.getDateZeroTime(2008, 8, 1)
self._splineThread = None
self._indicator = None
self._scatterMaleData = None
self._scatterFemaleData = None
def attach(self):
super(InvientChartsDemoWin, self).attach()
self._isAppRunningOnGAE = \
self.getInvientChartsDemoApp().isAppRunningOnGAE()
# Select line chart when the screen is loaded
self._navTree.select(DemoSeriesType.LINE.getName()
+ self._SEPARATOR + ChartName.BASIC.getName())
def isAppRunningOnGAE(self):
return self._isAppRunningOnGAE
def getInvientChartsDemoApp(self):
return self.getApplication()
def showChart(self, demoSeriesTypeName, chartNameString):
if not self._isAppRunningOnGAE:
self.stopSplineSelfUpdateThread()
demoSeriesType = self.getDemoSeriesType(demoSeriesTypeName)
chartName = self.getChartName(chartNameString)
if demoSeriesType is not None and chartName is not None:
if demoSeriesType == DemoSeriesType.COMBINATION:
if chartName == ChartName.COMBINATION_COLUMN_LINE_AND_PIE:
self.showCombination()
elif chartName == ChartName.SCATTER_WITH_REGRESSION_LINE:
self.showCombinationScatterWithRegressionLine()
elif chartName == ChartName.MULTIPLE_AXES:
self.showCombinationMultipleAxes()
elif demoSeriesType == DemoSeriesType.LINE:
if chartName == ChartName.BASIC:
self.showLine()
elif chartName == ChartName.CLICK_TO_ADD_POINT:
self.showClickToAddPoint()
elif chartName == ChartName.WITH_DATA_LABELS:
self.showLineWithDataLabels()
elif chartName == ChartName.TIMESERIES_ZOOMABLE:
self.showTimeSeriesZoomable()
elif chartName == ChartName.MASTER_DETAIL:
self.showMasterDetail()
elif demoSeriesType == DemoSeriesType.BAR:
if chartName == ChartName.BASIC:
self.showBarBasic()
elif chartName == ChartName.STACKED:
self.showBarStacked()
elif chartName == ChartName.WITH_NEGATIVE_STACK:
self.showBarWithNegStack()
elif demoSeriesType == DemoSeriesType.COLUMN:
if chartName == ChartName.BASIC:
self.showColumnBasic()
elif chartName == ChartName.WITH_NEGATIVE_VALUES:
self.showColumnWithNegValues()
elif chartName == ChartName.STACKED:
self.showColumnStacked()
elif chartName == ChartName.STACKED_AND_GROUPED:
self.showColumnStackedAndGrouped()
elif chartName == ChartName.STACKED_PERCENT:
self.showColumnStackedPercent()
elif chartName == ChartName.WITH_ROTATED_LABELS:
self.showColumnWithRotatedLabels()
elif demoSeriesType == DemoSeriesType.AREA:
if chartName == ChartName.BASIC:
self.showAreaBasic()
elif chartName == ChartName.WITH_NEGATIVE_VALUES:
self.showAreaWithNegValues()
elif chartName == ChartName.STACKED:
self.showAreaStacked()
elif chartName == ChartName.PERCENTAGE:
self.showAreaPercent()
elif chartName == ChartName.INVERTED_AXES:
self.showAreaInvertedAxes()
elif chartName == ChartName.WITH_MISSING_POINTS:
self.showAreaWithMissingPoints()
elif demoSeriesType == DemoSeriesType.AREASPLINE:
if chartName == ChartName.BASIC:
self.showAreaSpline()
elif demoSeriesType == DemoSeriesType.PIE:
if chartName == ChartName.BASIC:
self.showPie()
elif chartName == ChartName.WITH_LEGEND:
self.showPieWithLegend()
elif chartName == ChartName.DONUT:
self.showDonut()
elif demoSeriesType == DemoSeriesType.SCATTER:
if chartName == ChartName.BASIC:
self.showScatter()
elif demoSeriesType == DemoSeriesType.SPLINE:
if chartName == ChartName.BASIC:
self.showSpline()
elif chartName == ChartName.WITH_PLOTBANDS:
self.showSplineWithPlotBands()
elif chartName == ChartName.WITH_SYMBOLS:
self.showSplineWithSymbol()
elif chartName == ChartName.UPDATING_EACH_SECOND:
self.showSplineUpdatingEachSecond()
else:
self.getApplication().getMainWindow().showNotification(
'Error occurred during chart processing! Try again!!!')
else:
self.getApplication().getMainWindow().showNotification(
'Error occurred during chart processing! Try again!!!')
def showMasterDetail(self):
# Create the master chart
masterChart = self.getMasterChart()
# Create detail chart
detailChart = self.getDetailChart(masterChart)
# Register events
l = MasterChartZoomListener(self, masterChart, detailChart)
masterChart.addListener(l)
# Add master
self.addChart(masterChart, False, False, False, False)
# Add detail
self.addChart(detailChart, True, True, False)
def getDetailChart(self, masterChart):
detailChartConfig = InvientChartsConfig()
detailChartConfig.getGeneralChartConfig().setMargin(Margin())
detailChartConfig.getGeneralChartConfig().getMargin().setBottom(120)
detailChartConfig.getGeneralChartConfig().getMargin().setLeft(50)
detailChartConfig.getGeneralChartConfig().getMargin().setRight(20)
detailChartConfig.getGeneralChartConfig().setReflow(False)
detailChartConfig.getCredit().setEnabled(False)
detailChartConfig.getTitle().setText(
'Historical USD to EUR Exchange Rate')
detailChartConfig.getSubtitle().setText(
'Select an area by dragging across the lower chart')
detailXAxis = DateTimeAxis()
detailXAxes = OrderedSet()
detailXAxes.add(detailXAxis)
detailChartConfig.setXAxes(detailXAxes)
detailYAxis = NumberYAxis()
detailYAxis.setTitle(AxisTitle(''))
detailYAxes = OrderedSet()
detailYAxes.add(detailYAxis)
detailChartConfig.setYAxes(detailYAxes)
detailChartConfig.getTooltip().setFormatterJsFunc(
'function() {'
+ ' var point = this.points[0];'
+ ' return \'<b>\'+ point.series.name +\'</b><br/>\' + '
+ ' $wnd.Highcharts.dateFormat(\'%A %B %e %Y\', this.x) + \':<br/>\' + '
+ ' \'1 USD = \'+ $wnd.Highcharts.numberFormat(point.y, 2) +\' EUR\';'
+ '}')
detailChartConfig.getTooltip().setShared(True)
detailChartConfig.getLegend().setEnabled(False)
lineCfg = LineConfig()
marker = SymbolMarker(False)
lineCfg.setMarker(marker)
marker.setHoverState(MarkerState())
marker.getHoverState().setEnabled(True)
marker.getHoverState().setRadius(3)
detailChartConfig.addSeriesConfig(lineCfg)
detailChart = InvientCharts(detailChartConfig)
# Line instance configuration
lineSeriesCfg = LineConfig()
start = timestamp(self._detailChartPointStartDate)
lineSeriesCfg.setPointStart(start)
lineSeriesCfg.setPointInterval(24 * 3600 * 1000.0)
lineSeriesCfg.setColor(RGB(69, 114, 167))
detailSeries = DateTimeSeries(detailChart, 'USD to EUR',
SeriesType.LINE, lineSeriesCfg)
detailPoints = OrderedSet()
masterChartSeries = masterChart.getSeries('USD to EUR')
for point in masterChartSeries.getPoints():
if (timestamp(point.getX()) >=
timestamp(self._detailChartPointStartDate)):
detailPoints.add(DateTimePoint(detailSeries, point.getY()))
detailSeries.setSeriesPoints(detailPoints)
detailChart.addSeries(detailSeries)
return detailChart
def getMasterChart(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setReflow(False)
chartConfig.getGeneralChartConfig().setBorderWidth(0)
chartConfig.getGeneralChartConfig().setMargin(Margin())
chartConfig.getGeneralChartConfig().getMargin().setLeft(50)
chartConfig.getGeneralChartConfig().getMargin().setRight(20)
chartConfig.getGeneralChartConfig().setZoomType(ZoomType.X)
chartConfig.getGeneralChartConfig().setClientZoom(False)
chartConfig.getGeneralChartConfig().setHeight(80)
chartConfig.getTitle().setText('')
xAxis = DateTimeAxis()
xAxis.setShowLastLabel(True)
xAxis.setMaxZoom(14 * 24 * 3600 * 1000.0)
plotBand = DateTimePlotBand('mask-before')
plotBand.setRange(DateTimeRange(self._masterChartMinDate,
self._detailChartPointStartDate))
plotBand.setColor(RGBA(0, 0, 0, 0.2))
xAxis.addPlotBand(plotBand)
xAxis.setTitle(AxisTitle(''))
xAxes = set()
xAxes.add(xAxis)
chartConfig.setXAxes(xAxes)
yAxis = NumberYAxis()
yAxis.setShowFirstLabel(False)
yAxis.setMin(0.6)
yAxis.setGrid(Grid())
yAxis.getGrid().setLineWidth(0)
yAxis.setLabel(YAxisDataLabel(False))
yAxis.setTitle(AxisTitle(''))
yAxes = set()
yAxes.add(yAxis)
chartConfig.setYAxes(yAxes)
chartConfig.getTooltip().setFormatterJsFunc(
'function() { return false; }')
chartConfig.getLegend().setEnabled(False)
chartConfig.getCredit().setEnabled(False)
# Plot options
areaCfg = AreaConfig()
colorStops = list()
colorStops.append(LinearColorStop(0, RGB(69, 114, 167)))
colorStops.append(LinearColorStop(1, RGBA(0, 0, 0, 0)))
# Fill color
areaCfg.setFillColor(LinearGradient(0, 0, 0, 70, colorStops))
areaCfg.setLineWidth(1)
areaCfg.setMarker(SymbolMarker(False))
areaCfg.setShadow(False)
areaCfg.setEnableMouseTracking(False)
areaCfg.setHoverState(SeriesState())
areaCfg.getHoverState().setLineWidth(1)
chartConfig.addSeriesConfig(areaCfg)
chart = InvientCharts(chartConfig)
# Provide methods to set pointInterval and pointStart and delegate
# call to SeriesConfig
seriesDataCfg = AreaConfig()
seriesDataCfg.setPointInterval(24 * 3600.0 * 1000)
start = timestamp(self._masterChartMinDate)
seriesDataCfg.setPointStart(start)
masterChartSeries = DateTimeSeries(chart, 'USD to EUR',
SeriesType.AREA, seriesDataCfg)
masterChartSeries.setSeriesPoints(self.getMasterDetailData(
masterChartSeries))
chart.addSeries(masterChartSeries)
return chart
def showLine(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.LINE)
chartConfig.getGeneralChartConfig().setMargin(Margin())
chartConfig.getGeneralChartConfig().getMargin().setRight(130)
chartConfig.getGeneralChartConfig().getMargin().setBottom(25)
chartConfig.getTitle().setX(-20)
chartConfig.getTitle().setText('Monthly Average Temperature')
chartConfig.getSubtitle().setText('Source: WorldClimate.com')
chartConfig.getTitle().setX(-20)
categoryAxis = CategoryAxis()
categoryAxis.setCategories(['Jan', 'Feb', 'Mar', 'Apr', 'May',
'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'])
xAxesSet = set()
xAxesSet.add(categoryAxis)
chartConfig.setXAxes(xAxesSet)
numberYAxis = NumberYAxis()
numberYAxis.setTitle(AxisTitle(u'Temperature (\u2103)'.encode('utf-8')))
plotLine = NumberPlotLine('TempAt0')
plotLine.setValue(NumberValue(0.0))
plotLine.setWidth(1)
plotLine.setZIndex(1)
plotLine.setColor(RGB(128, 128, 128))
numberYAxis.addPlotLine(plotLine)
yAxesSet = set()
yAxesSet.add(numberYAxis)
chartConfig.setYAxes(yAxesSet)
legend = Legend()
legend.setLayout(Layout.VERTICAL)
legendPos = Position()
legendPos.setAlign(HorzAlign.RIGHT)
legendPos.setVertAlign(VertAlign.TOP)
legendPos.setX(-10)
legendPos.setY(100)
legend.setPosition(legendPos)
legend.setBorderWidth(0)
chartConfig.setLegend(legend)
# Series data label formatter
lineCfg = LineConfig()
chartConfig.addSeriesConfig(lineCfg)
# Tooltip formatter
chartConfig.getTooltip().setFormatterJsFunc(
'function() { '
+ u' return \'<b>\' + this.series.name + \'</b><br/>\' + this.x + \': \'+ this.y +\'\u2103\''.encode('utf-8')
+ '}')
chart = InvientCharts(chartConfig)
seriesData = XYSeries('Tokyo')
seriesData.setSeriesPoints(self.getPoints(seriesData, [7.0, 6.9, 9.5,
14.5, 18.2, 21.5, 25.2, 26.5, 23.3, 18.3, 13.9, 9.6]))
chart.addSeries(seriesData)
seriesData = XYSeries('New York')
seriesData.setSeriesPoints(self.getPoints(seriesData, [-0.2, 0.8, 5.7,
11.3, 17.0, 22.0, 24.8, 24.1, 20.1, 14.1, 8.6, 2.5]))
chart.addSeries(seriesData)
seriesData = XYSeries('Berlin')
seriesData.setSeriesPoints(self.getPoints(seriesData, [-0.9, 0.6, 3.5,
8.4, 13.5, 17.0, 18.6, 17.9, 14.3, 9.0, 3.9, 1.0]))
chart.addSeries(seriesData)
seriesData = XYSeries('London')
seriesData.setSeriesPoints(self.getPoints(seriesData, [3.9, 4.2, 5.7,
8.5, 11.9, 15.2, 17.0, 16.6, 14.2, 10.3, 6.6, 4.8]))
chart.addSeries(seriesData)
self.addChart(chart)
def showClickToAddPoint(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.SCATTER)
chartConfig.getGeneralChartConfig().setMargin(Margin(70, 50, 60, 80))
chartConfig.getTitle().setText('User supplied data')
chartConfig.getSubtitle().setText('Click the plot area to add a '
'point. Click a point to remove it.')
xAxis = NumberXAxis()
xAxis.setMinPadding(0.2)
xAxis.setMaxPadding(0.2)
xAxis.setMaxZoom(60)
xAxes = set()
xAxes.add(xAxis)
chartConfig.setXAxes(xAxes)
yAxis = NumberYAxis()
yAxis.setTitle(AxisTitle('Value'))
yAxis.setMinPadding(0.2)
yAxis.setMaxPadding(0.2)
yAxis.setMaxZoom(60)
plotLine = NumberPlotLine('At0')
plotLine.setValue(NumberValue(0.0))
plotLine.setWidth(1)
plotLine.setColor(RGB(128, 128, 128))
yAxis.addPlotLine(plotLine)
yAxes = set()
yAxes.add(yAxis)
chartConfig.setYAxes(yAxes)
chartConfig.getLegend().setEnabled(False)
scatterCfg = ScatterConfig()
scatterCfg.setLineWidth(1)
chartConfig.addSeriesConfig(scatterCfg)
# chart data
chart = InvientCharts(chartConfig)
seriesData = XYSeries('User Supplied Data')
seriesData.addPoint(DecimalPoint(seriesData, 20, 20))
seriesData.addPoint(DecimalPoint(seriesData, 80, 80))
chart.addSeries(seriesData)
l = AddPointChartClickListener(self)
chart.addListener(l)
l = AddPointClickListener(self)
chart.addListener(l, [])
self.addChart(chart, False, False)
def showLineWithDataLabels(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setMargin(Margin())
chartConfig.getTitle().setText('Monthly Average Temperature')
chartConfig.getSubtitle().setText('Source: WorldClimate.com')
categoryAxis = CategoryAxis()
categoryAxis.setCategories(['Jan', 'Feb', 'Mar', 'Apr', 'May',
'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'])
xAxesSet = set()
xAxesSet.add(categoryAxis)
chartConfig.setXAxes(xAxesSet)
numberYAxis = NumberYAxis()
numberYAxis.setTitle(AxisTitle(u'Temperature (\u2103)'.encode('utf-8')))
yAxesSet = set()
yAxesSet.add(numberYAxis)
chartConfig.setYAxes(yAxesSet)
chartConfig.getTooltip().setEnabled(False)
# Series data label formatter
lineCfg = LineConfig()
lineCfg.setDataLabel(DataLabel())
lineCfg.getDataLabel().setEnabled(True)
lineCfg.setEnableMouseTracking(False)
chartConfig.addSeriesConfig(lineCfg)
chart = InvientCharts(chartConfig)
seriesData = XYSeries('Tokyo')
seriesData.setSeriesPoints(self.getPoints(seriesData, [7.0, 6.9, 9.5,
14.5, 18.4, 21.5, 25.2, 26.5, 23.3, 18.3, 13.9, 9.6]))
chart.addSeries(seriesData)
seriesData = XYSeries('London')
seriesData.setSeriesPoints(self.getPoints(seriesData, [3.9, 4.2, 5.7,
8.5, 11.9, 15.2, 17.0, 16.6, 14.2, 10.3, 6.6, 4.8]))
chart.addSeries(seriesData)
self.addChart(chart)
def showBarStacked(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.BAR)
chartConfig.getTitle().setText('Stacked bar chart')
xAxis = CategoryAxis()
categories = ['Apples', 'Oranges', 'Pears', 'Grapes', 'Bananas']
xAxis.setCategories(categories)
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
numberYAxis = NumberYAxis()
numberYAxis.setMin(0.0)
numberYAxis.setTitle(AxisTitle('Total fruit consumption'))
yAxesSet = set()
yAxesSet.add(numberYAxis)
chartConfig.setYAxes(yAxesSet)
legend = Legend()
legend.setBackgroundColor(RGB(255, 255, 255))
legend.setReversed(True)
chartConfig.setLegend(legend)
chartConfig.getTooltip().setFormatterJsFunc(
'function() {'
+ ' return \'\'+ this.series.name +\': \'+ this.y +\'\'; '
+ '}')
seriesCfg = SeriesConfig()
seriesCfg.setStacking(Stacking.NORMAL)
chartConfig.addSeriesConfig(seriesCfg)
chart = InvientCharts(chartConfig)
seriesData = XYSeries('John')
seriesData.setSeriesPoints(self.getPoints(seriesData, [5, 3, 4, 7, 2]))
chart.addSeries(seriesData)
seriesData = XYSeries('Jane')
seriesData.setSeriesPoints(self.getPoints(seriesData, [2, 2, 3, 2, 1]))
chart.addSeries(seriesData)
seriesData = XYSeries('Joe')
seriesData.setSeriesPoints(self.getPoints(seriesData, [3, 4, 4, 2, 5]))
chart.addSeries(seriesData)
self.addChart(chart)
def showBarBasic(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.BAR)
chartConfig.getTitle().setText('Historic World Population by Region')
chartConfig.getSubtitle().setText('Source: Wikipedia.org')
xAxisMain = CategoryAxis()
categories = ['Africa', 'America', 'Asia', 'Europe', 'Oceania']
xAxisMain.setCategories(categories)
xAxesSet = set()
xAxesSet.add(xAxisMain)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setMin(0.0)
yAxis.setTitle(AxisTitle('Population (millions)'))
yAxis.getTitle().setAlign(AxisTitleAlign.HIGH)
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
chartConfig.getTooltip().setFormatterJsFunc(
'function() {'
+ ' return \'\' + this.series.name +\': \'+ this.y +\' millions\';'
+ '}')
barCfg = BarConfig()
barCfg.setDataLabel(DataLabel())
chartConfig.addSeriesConfig(barCfg)
legend = Legend()
legend.setLayout(Layout.VERTICAL)
legend.setPosition(Position())
legend.getPosition().setAlign(HorzAlign.RIGHT)
legend.getPosition().setVertAlign(VertAlign.TOP)
legend.getPosition().setX(-100)
legend.getPosition().setY(100)
legend.setFloating(True)
legend.setBorderWidth(1)
legend.setBackgroundColor(RGB(255, 255, 255))
legend.setShadow(True)
chartConfig.setLegend(legend)
chartConfig.getCredit().setEnabled(False)
chart = InvientCharts(chartConfig)
seriesData = XYSeries('Year 1800')
seriesData.setSeriesPoints(self.getPoints(seriesData,
[107, 31, 635, 203, 2]))
chart.addSeries(seriesData)
seriesData = XYSeries('Year 1900')
seriesData.setSeriesPoints(self.getPoints(seriesData,
[133, 156, 947, 408, 6]))
chart.addSeries(seriesData)
seriesData = XYSeries('Year 2008')
seriesData.setSeriesPoints(self.getPoints(seriesData,
[973, 914, 4054, 732, 34]))
chart.addSeries(seriesData)
self.addChart(chart)
def showBarWithNegStack(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.BAR)
chartConfig.getTitle().setText(
'Population pyramid for Germany, midyear 2010')
chartConfig.getSubtitle().setText('Source: www.census.gov')
xAxisMain = CategoryAxis()
categories = ['0-4', '5-9', '10-14', '15-19', '20-24', '25-29',
'30-34', '35-39', '40-44', '45-49', '50-54', '55-59',
'60-64', '65-69', '70-74', '75-79', '80-84', '85-89',
'90-94', '95-99', '100 +']
xAxisMain.setCategories(categories)
xAxisMain.setReversed(False)
xAxesSet = set()
# Opposite axis
xAxesSet.add(xAxisMain)
xAxis = CategoryAxis()
xAxis.setCategories(categories)
xAxis.setOpposite(True)
xAxis.setReversed(False)
xAxis.setLinkedTo(xAxisMain)
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setTitle(AxisTitle(''))
yAxis.setMin(-4000000.0)
yAxis.setMax(4000000.0)
yAxis.setLabel(YAxisDataLabel())
yAxis.getLabel().setFormatterJsFunc(
'function() {'
+ ' return (Math.abs(this.value) / 1000000) + \'M\';'
+ ' }')
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
tooltip = Tooltip()
tooltip.setFormatterJsFunc(
'function() {'
+ ' return \'<b>\'+ this.series.name +\', age \'+ this.point.category +\'</b><br/>\' + '
+ ' \'Population: \'+ Highcharts.numberFormat(Math.abs(this.point.y), 0); '
+ '}')
series = SeriesConfig()
series.setStacking(Stacking.NORMAL)
chartConfig.addSeriesConfig(series)
chart = InvientCharts(chartConfig)
seriesData = XYSeries('Male')
seriesData.setSeriesPoints(self.getPoints(seriesData,
[-1746181, -1884428, -2089758, -2222362, -2537431, -2507081,
-2443179, -2664537, -3556505, -3680231, -3143062, -2721122,
-2229181, -2227768, -2176300, -1329968, -836804, -354784,
-90569, -28367, -3878]))
chart.addSeries(seriesData)
seriesData = XYSeries('Female')
seriesData.setSeriesPoints(self.getPoints(seriesData,
[1656154, 1787564, 1981671, 2108575, 2403438, 2366003,
2301402, 2519874, 3360596, 3493473, 3050775, 2759560,
2304444, 2426504, 2568938, 1785638, 1447162, 1005011,
330870, 130632, 21208]))
chart.addSeries(seriesData)
self.addChart(chart)
def showColumnBasic(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.COLUMN)
chartConfig.getTitle().setText('Monthly Average Rainfall')
chartConfig.getSubtitle().setText('Source: WorldClimate.com')
xAxis = CategoryAxis()
xAxis.setCategories(['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'])
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setMin(0.0)
yAxis.setTitle(AxisTitle('Rainfall (mm)'))
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
legend = Legend()
legend.setFloating(True)
legend.setLayout(Layout.VERTICAL)
legend.setPosition(Position())
legend.getPosition().setAlign(HorzAlign.LEFT)
legend.getPosition().setVertAlign(VertAlign.TOP)
legend.getPosition().setX(100)
legend.getPosition().setY(70)
legend.setShadow(True)
legend.setBackgroundColor(RGB(255, 255, 255))
chartConfig.setLegend(legend)
chartConfig.getTooltip().setFormatterJsFunc(
'function() {'
+ ' return \'\' + this.x +\': \'+ this.y +\' mm\'; '
+ '}')
colCfg = ColumnConfig()
colCfg.setPointPadding(0.2)
colCfg.setBorderWidth(0)
chartConfig.addSeriesConfig(colCfg)
chart = InvientCharts(chartConfig)
seriesData = XYSeries('Tokyo')
seriesData.setSeriesPoints(self.getPoints(seriesData,
[49.9, 71.5, 106.4, 129.2, 144.0, 176.0, 135.6, 148.5, 216.4,
194.1, 95.6, 54.4]))
chart.addSeries(seriesData)
seriesData = XYSeries('New York')
seriesData.setSeriesPoints(self.getPoints(seriesData,
[83.6, 78.8, 98.5, 93.4, 106.0, 84.5, 105.0, 104.3, 91.2,
83.5, 106.6, 92.3]))
chart.addSeries(seriesData)
seriesData = XYSeries('London')
seriesData.setSeriesPoints(self.getPoints(seriesData,
[48.9, 38.8, 39.3, 41.4, 47.0, 48.3, 59.0, 59.6, 52.4, 65.2,
59.3, 51.2]))
chart.addSeries(seriesData)
seriesData = XYSeries('Berlin')
seriesData.setSeriesPoints(self.getPoints(seriesData, [42.4, 33.2,
34.5, 39.7, 52.6, 75.5, 57.4, 60.4, 47.6, 39.1, 46.8, 51.1]))
chart.addSeries(seriesData)
self.addChart(chart)
def showColumnWithNegValues(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.COLUMN)
chartConfig.getTitle().setText('Column chart with negative values')
xAxis = CategoryAxis()
xAxis.setCategories(['Apples', 'Oranges', 'Pears', 'Grapes', 'Bananas'])
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
tooltip = Tooltip()
tooltip.setFormatterJsFunc(
'function() {'
+ ' return \'\' + this.series.name +\': \'+ this.y +\'\'; '
+ '}')
chartConfig.setTooltip(tooltip)
chartConfig.getCredit().setEnabled(False)
chart = InvientCharts(chartConfig)
seriesData = XYSeries('John')
seriesData.setSeriesPoints(self.getPoints(seriesData, [5, 3, 4, 7, 2]))
chart.addSeries(seriesData)
seriesData = XYSeries('Jane')
seriesData.setSeriesPoints(self.getPoints(seriesData, [2, -2, -3, 2, 1]))
chart.addSeries(seriesData)
seriesData = XYSeries('Joe')
seriesData.setSeriesPoints(self.getPoints(seriesData, [3, 4, 4, -2, 5]))
chart.addSeries(seriesData)
self.addChart(chart)
def showColumnStacked(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.COLUMN)
chartConfig.getTitle().setText('Stacked column chart')
xAxis = CategoryAxis()
xAxis.setCategories(['Apples', 'Oranges', 'Pears', 'Grapes', 'Bananas'])
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setMin(0.0)
yAxis.setTitle(AxisTitle('Total fruit consumption'))
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
legend = Legend()
legend.setPosition(Position())
legend.getPosition().setAlign(HorzAlign.RIGHT)
legend.getPosition().setVertAlign(VertAlign.TOP)
legend.getPosition().setX(-100)
legend.getPosition().setY(20)
legend.setFloating(True)
legend.setBackgroundColor(RGB(255, 255, 255))
legend.setBorderWidth(1)
legend.setShadow(True)
chartConfig.setLegend(legend)
chartConfig.getTooltip().setFormatterJsFunc(
'function() {'
+ ' return \'<b>\'+ this.x +\'</b><br/>\'+ this.series.name +\': \'+ this.y +\'<br/>\'+'
+ ' \'Total: \'+ this.point.stackTotal; '
+ '}')
colCfg = ColumnConfig()
colCfg.setStacking(Stacking.NORMAL)
chartConfig.addSeriesConfig(colCfg)
chart = InvientCharts(chartConfig)
seriesData = XYSeries('John')
seriesData.setSeriesPoints(self.getPoints(seriesData, [5, 3, 4, 7, 2]))
chart.addSeries(seriesData)
seriesData = XYSeries('Jane')
seriesData.setSeriesPoints(self.getPoints(seriesData, [2, 2, 3, 2, 1]))
chart.addSeries(seriesData)
seriesData = XYSeries('Joe')
seriesData.setSeriesPoints(self.getPoints(seriesData, [3, 4, 4, 2, 5]))
chart.addSeries(seriesData)
self.addChart(chart)
def showColumnStackedAndGrouped(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.COLUMN)
chartConfig.getTitle().setText(
'Total fruit consumtion, grouped by gender')
xAxis = CategoryAxis()
xAxis.setCategories(['Apples', 'Oranges', 'Pears',
'Grapes', 'Bananas'])
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setAllowDecimals(False)
yAxis.setMin(0.0)
yAxis.setTitle(AxisTitle('Number of fruits'))
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
series = ColumnConfig()
series.setStacking(Stacking.NORMAL)
chartConfig.addSeriesConfig(series)
chartConfig.getTooltip().setFormatterJsFunc(
'function() {'
+ ' return \'<b>\'+ this.x +\'</b><br/>\'+ this.series.name +\': \'+ this.y +\'<br/>\'+ \'Total: \'+ this.point.stackTotal;'
+ '}')
chart = InvientCharts(chartConfig)
seriesData = XYSeries('John')
seriesData.setSeriesPoints(self.getPoints(seriesData, [5, 3, 4, 7, 2]))
seriesData.setStack('male')
chart.addSeries(seriesData)
seriesData = XYSeries('Joe')
seriesData.setSeriesPoints(self.getPoints(seriesData, [3, 4, 4, 2, 5]))
seriesData.setStack('male')
chart.addSeries(seriesData)
seriesData = XYSeries('Jane')
seriesData.setSeriesPoints(self.getPoints(seriesData, [2, 5, 6, 2, 1]))
seriesData.setStack('female')
chart.addSeries(seriesData)
seriesData = XYSeries('Janet')
seriesData.setSeriesPoints(self.getPoints(seriesData, [3, 0, 4, 4, 3]))
seriesData.setStack('female')
chart.addSeries(seriesData)
self.addChart(chart)
def showColumnStackedPercent(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.COLUMN)
chartConfig.getTitle().setText('Stacked column chart')
xAxis = CategoryAxis()
xAxis.setCategories(['Apples', 'Oranges', 'Pears',
'Grapes', 'Bananas'])
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setMin(0.0)
yAxis.setTitle(AxisTitle('Total fruit consumption'))
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
series = ColumnConfig()
series.setStacking(Stacking.PERCENT)
chartConfig.addSeriesConfig(series)
chartConfig.getTooltip().setFormatterJsFunc(
'function() {'
+ ' return \'\' + this.series.name +\': \'+ this.y +\' (\'+ Math.round(this.percentage) +\'%)\'; '
+ '}')
chart = InvientCharts(chartConfig)
seriesData = XYSeries('John')
seriesData.setSeriesPoints(self.getPoints(seriesData, [5, 3, 4, 7, 2]))
chart.addSeries(seriesData)
seriesData = XYSeries('Joe')
seriesData.setSeriesPoints(self.getPoints(seriesData, [3, 4, 4, 2, 5]))
chart.addSeries(seriesData)
seriesData = XYSeries('Jane')
seriesData.setSeriesPoints(self.getPoints(seriesData, [2, 2, 3, 2, 1]))
chart.addSeries(seriesData)
self.addChart(chart)
def showColumnWithRotatedLabels(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.COLUMN)
chartConfig.getGeneralChartConfig().setMargin(Margin())
chartConfig.getGeneralChartConfig().getMargin().setTop(50)
chartConfig.getGeneralChartConfig().getMargin().setRight(50)
chartConfig.getGeneralChartConfig().getMargin().setBottom(100)
chartConfig.getGeneralChartConfig().getMargin().setLeft(80)
chartConfig.getTitle().setText('World\'s largest cities per 2008')
xAxis = CategoryAxis()
xAxis.setCategories(['Tokyo', 'Jakarta', 'New York', 'Seoul',
'Manila', 'Mumbai', 'Sao Paulo', 'Mexico City', 'Dehli',
'Osaka', 'Cairo', 'Kolkata', 'Los Angeles', 'Shanghai',
'Moscow', 'Beijing', 'Buenos Aires', 'Guangzhou',
'Shenzhen', 'Istanbul'])
xAxis.setLabel(XAxisDataLabel())
xAxis.getLabel().setRotation(-45)
xAxis.getLabel().setAlign(HorzAlign.RIGHT)
xAxis.getLabel().setStyle('{ font: \'normal 13px Verdana, sans-serif\' }')
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setMin(0.0)
yAxis.setTitle(AxisTitle('Population (millions)'))
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
chartConfig.setLegend(Legend(False))
chartConfig.getTooltip().setFormatterJsFunc(
'function() {'
+ ' return \'<b>\'+ this.x +\'</b><br/>\'+ \'Population in 2008: \'+ $wnd.Highcharts.numberFormat(this.y, 1) + '
+ ' \' millions\' '
+ '}')
chart = InvientCharts(chartConfig)
colCfg = ColumnConfig()
colCfg.setDataLabel(DataLabel())
colCfg.getDataLabel().setRotation(-90)
colCfg.getDataLabel().setAlign(HorzAlign.RIGHT)
colCfg.getDataLabel().setX(-3)
colCfg.getDataLabel().setY(10)
colCfg.getDataLabel().setColor(RGB(255, 255, 255))
colCfg.getDataLabel().setFormatterJsFunc('function() {'
+ ' return this.y; '
+ '}')
colCfg.getDataLabel().setStyle(
' { font: \'normal 13px Verdana, sans-serif\' } ')
seriesData = XYSeries('Population', colCfg)
seriesData.setSeriesPoints(self.getPoints(seriesData,
[34.4, 21.8, 20.1, 20, 19.6, 19.5, 19.1, 18.4, 18, 17.3,
16.8, 15, 14.7, 14.5, 13.3, 12.8, 12.4, 11.8, 11.7, 11.2]))
chart.addSeries(seriesData)
self.addChart(chart)
def showAreaWithNegValues(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.AREA)
chartConfig.getTitle().setText('Area chart with negative values')
xAxis = CategoryAxis()
xAxis.setCategories(['Apples', 'Oranges', 'Pears',
'Grapes', 'Bananas'])
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
chartConfig.getCredit().setEnabled(False)
chart = InvientCharts(chartConfig)
series = XYSeries('John')
series.setSeriesPoints(self.getPoints(series, [5, 3, 4, 7, 2]))
chart.addSeries(series)
series = XYSeries('Jane')
series.setSeriesPoints(self.getPoints(series, [2, -2, -3, 2, 1]))
chart.addSeries(series)
series = XYSeries('Joe')
series.setSeriesPoints(self.getPoints(series, [3, 4, 4, -2, 5]))
chart.addSeries(series)
self.addChart(chart)
def showAreaInvertedAxes(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.AREA)
chartConfig.getGeneralChartConfig().setInverted(True)
chartConfig.getTitle().setText(
'Average fruit consumption during one week')
chartConfig.getSubtitle().setStyle(
'{ position: \'absolute\', right: \'0px\', bottom: \'10px\'}')
legend = Legend()
legend.setFloating(True)
legend.setLayout(Layout.VERTICAL)
legend.setPosition(Position())
legend.getPosition().setAlign(HorzAlign.RIGHT)
legend.getPosition().setVertAlign(VertAlign.TOP)
legend.getPosition().setX(-150)
legend.getPosition().setY(100)
legend.setBorderWidth(1)
legend.setBackgroundColor(RGB(255, 255, 255))
chartConfig.setLegend(legend)
xAxis = CategoryAxis()
xAxis.setCategories(['Monday', 'Tuesday', 'Wednesday', 'Thursday',
'Friday', 'Saturday', 'Sunday'])
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setTitle(AxisTitle('Number of units'))
yAxis.setMin(0.0)
yAxis.setLabel(YAxisDataLabel())
yAxis.getLabel().setFormatterJsFunc(
'function() {' + ' return this.value; ' + '}')
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
chartConfig.getTooltip().setFormatterJsFunc('function() {'
+ ' return \'\' + this.x + \': \' + this.y; '
+ '}')
areaCfg = AreaConfig()
areaCfg.setFillOpacity(0.5)
chartConfig.addSeriesConfig(areaCfg)
chart = InvientCharts(chartConfig)
series = XYSeries('John')
series.setSeriesPoints(self.getPoints(series, [3, 4, 3, 5, 4, 10, 12]))
chart.addSeries(series)
series = XYSeries('Jane')
series.setSeriesPoints(self.getPoints(series, [1, 3, 4, 3, 3, 5, 4]))
chart.addSeries(series)
self.addChart(chart)
def showAreaWithMissingPoints(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.AREA)
chartConfig.getGeneralChartConfig().setSpacing(Spacing())
chartConfig.getGeneralChartConfig().getSpacing().setBottom(30)
chartConfig.getTitle().setText('Fruit consumption *')
chartConfig.getSubtitle().setText(
'* Jane\'s banana consumption is unknown')
chartConfig.getSubtitle().setFloating(True)
chartConfig.getSubtitle().setAlign(HorzAlign.RIGHT)
chartConfig.getSubtitle().setVertAlign(VertAlign.BOTTOM)
chartConfig.getSubtitle().setY(15)
legend = Legend()
legend.setFloating(True)
legend.setLayout(Layout.VERTICAL)
legend.setPosition(Position())
legend.getPosition().setAlign(HorzAlign.LEFT)
legend.getPosition().setVertAlign(VertAlign.TOP)
legend.getPosition().setX(150)
legend.getPosition().setY(100)
legend.setBorderWidth(1)
legend.setBackgroundColor(RGB(255, 255, 255))
chartConfig.setLegend(legend)
xAxis = CategoryAxis()
xAxis.setCategories(['Apples', 'Pears', 'Oranges', 'Bananas',
'Grapes', 'Plums', 'Strawberries', 'Raspberries'])
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setTitle(AxisTitle('Y-Axis'))
yAxis.setLabel(YAxisDataLabel())
yAxis.getLabel().setFormatterJsFunc(
'function() {'
+ ' return this.value; '
+ '}')
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
chartConfig.getTooltip().setFormatterJsFunc(
'function() {'
+ ' return \'<b>\'+ this.series.name +\'</b><br/>\'+ this.x +\': \'+ this.y;'
+ '}')
chartConfig.getCredit().setEnabled(False)
areaCfg = AreaConfig()
areaCfg.setFillOpacity(0.5)
chartConfig.addSeriesConfig(areaCfg)
chart = InvientCharts(chartConfig)
series = XYSeries('John')
series.setSeriesPoints(self.getPoints(series, [0, 1, 4, 4, 5, 2, 3, 7]))
chart.addSeries(series)
series = XYSeries('Jane')
series.addPoint([DecimalPoint(series, 1.0), DecimalPoint(series, 0.0),
DecimalPoint(series, 3.0), DecimalPoint(series),
DecimalPoint(series, 3.0), DecimalPoint(series, 1.0),
DecimalPoint(series, 2.0), DecimalPoint(series, 1.0)])
chart.addSeries(series)
self.addChart(chart)
def showAreaStacked(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.AREA)
chartConfig.getTitle().setText('Historic and Estimated Worldwide '
'Population Growth by Region')
chartConfig.getSubtitle().setText('Source: Wikipedia.org')
xAxis = CategoryAxis()
xAxis.setCategories(['1750', '1800', '1850', '1900', '1950',
'1999', '2050'])
tick = Tick()
tick.setPlacement(TickmarkPlacement.ON)
xAxis.setTick(tick)
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setTitle(AxisTitle('Billions'))
yAxis.setLabel(YAxisDataLabel())
yAxis.getLabel().setFormatterJsFunc('function() {'
+ ' return this.value / 1000; '
+ '}')
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
chartConfig.getTooltip().setFormatterJsFunc('function() {'
+ ' return \'\'+ this.x +\': \'+ $wnd.Highcharts.numberFormat(this.y, 0, \',\') +\' millions\';'
+ '}')
areaCfg = AreaConfig()
areaCfg.setStacking(Stacking.NORMAL)
areaCfg.setLineColor(RGB(102, 102, 102))
areaCfg.setLineWidth(1)
marker = SymbolMarker()
marker.setLineColor(RGB(102, 102, 102))
marker.setLineWidth(1)
areaCfg.setMarker(marker)
chartConfig.addSeriesConfig(areaCfg)
chart = InvientCharts(chartConfig)
series = XYSeries('Asia')
series.setSeriesPoints(self.getPoints(series,
[502, 635, 809, 947, 1402, 3634, 5268]))
chart.addSeries(series)
series = XYSeries('Africa')
series.setSeriesPoints(self.getPoints(series,
[106, 107, 111, 133, 221, 767, 1766]))
chart.addSeries(series)
series = XYSeries('Europe')
series.setSeriesPoints(self.getPoints(series,
[163, 203, 276, 408, 547, 729, 628]))
chart.addSeries(series)
series = XYSeries('America')
series.setSeriesPoints(self.getPoints(series,
[18, 31, 54, 156, 339, 818, 1201]))
chart.addSeries(series)
series = XYSeries('Oceania')
series.setSeriesPoints(self.getPoints(series,
[2, 2, 2, 6, 13, 30, 46]))
chart.addSeries(series)
self.addChart(chart)
def showAreaPercent(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.AREA)
chartConfig.getTitle().setText('Historic and Estimated Worldwide '
'Population Distribution by Region')
chartConfig.getSubtitle().setText('Source: Wikipedia.org')
xAxis = CategoryAxis()
xAxis.setCategories(['1750', '1800', '1850', '1900', '1950',
'1999', '2050'])
tick = Tick()
tick.setPlacement(TickmarkPlacement.ON)
xAxis.setTick(tick)
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setTitle(AxisTitle('Percent'))
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
chartConfig.getTooltip().setFormatterJsFunc(
'function() {'
+ ' return \'\' + this.x +\': \' + $wnd.Highcharts.numberFormat(this.percentage, 1) + '
+ ' \'% (\'+ $wnd.Highcharts.numberFormat(this.y, 0, \',\') +\' millions)\'; '
+ '}')
areaCfg = AreaConfig()
areaCfg.setStacking(Stacking.PERCENT)
areaCfg.setLineColor(RGB(255, 255, 255))
areaCfg.setLineWidth(1)
marker = SymbolMarker()
marker.setLineColor(RGB(255, 255, 255))
marker.setLineWidth(1)
areaCfg.setMarker(marker)
chartConfig.addSeriesConfig(areaCfg)
chart = InvientCharts(chartConfig)
series = XYSeries('Asia')
series.setSeriesPoints(self.getPoints(series,
[502, 635, 809, 947, 1402, 3634, 5268]))
chart.addSeries(series)
series = XYSeries('Africa')
series.setSeriesPoints(self.getPoints(series,
[106, 107, 111, 133, 221, 767, 1766]))
chart.addSeries(series)
series = XYSeries('Europe')
series.setSeriesPoints(self.getPoints(series,
[163, 203, 276, 408, 547, 729, 628]))
chart.addSeries(series)
series = XYSeries('America')
series.setSeriesPoints(self.getPoints(series,
[18, 31, 54, 156, 339, 818, 1201]))
chart.addSeries(series)
series = XYSeries('Oceania')
series.setSeriesPoints(self.getPoints(series,
[2, 2, 2, 6, 13, 30, 46]))
chart.addSeries(series)
self.addChart(chart)
def showAreaBasic(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.AREA)
chartConfig.getTitle().setText('US and USSR nuclear stockpiles')
chartConfig.getSubtitle().setText(
'Source: <a href=\'http://thebulletin.metapress.com/content/c4120650912x74k7/fulltext.pdf\'>thebulletin.metapress.com</a>')
xAxis = NumberXAxis()
xAxis.setLabel(XAxisDataLabel())
xAxis.getLabel().setFormatterJsFunc(
'function() {'
+ ' return this.value;'
+ '}')
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setTitle(AxisTitle('Nuclear weapon states'))
yAxis.setLabel(YAxisDataLabel())
yAxis.getLabel().setFormatterJsFunc(
'function() {'
+ ' return this.value / 1000 +\'k\';'
+ '}')
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
chartConfig.getTooltip().setFormatterJsFunc(
'function() {'
+ ' return this.series.name +\' produced <b>\'+'
+ ' $wnd.Highcharts.numberFormat(this.y, 0) +\'</b><br/>warheads in \'+ this.x;'
+ '}')
areaCfg = AreaConfig()
areaCfg.setPointStart(1940.0)
marker = SymbolMarker()
areaCfg.setMarker(marker)
marker.setEnabled(False)
marker.setSymbol(Symbol.CIRCLE)
marker.setRadius(2)
marker.setHoverState(MarkerState(True))
chartConfig.addSeriesConfig(areaCfg)
chart = InvientCharts(chartConfig)
# Series -
usaAreaCfg = AreaConfig()
usaAreaCfg.setPointStart(1940.0)
series = XYSeries('USA', usaAreaCfg)
points = set()
self.addNullPoints(points, series, 5)
points = points.union(self.getPoints(series,
[6, 11, 32, 110, 235, 369, 640, 1005, 1436, 2063, 3057, 4618,
6444, 9822, 15468, 20434, 24126, 27387, 29459, 31056, 31982,
32040, 31233, 29224, 27342, 26662, 26956, 27912, 28999,
28965, 27826, 25579, 25722, 24826, 24605, 24304, 23464, 23708,
24099, 24357, 24237, 24401, 24344, 23586, 22380, 21004, 17287,
14747, 13076, 12555, 12144, 11009, 10950, 10871, 10824, 10577,
10527, 10475, 10421, 10358, 10295, 10104]))
series.setSeriesPoints(points)
chart.addSeries(series)
russiaAreaCfg = AreaConfig()
russiaAreaCfg.setPointStart(1940.0)
series = XYSeries('USSR/Russia', russiaAreaCfg)
points = set()
self.addNullPoints(points, series, 10)
points = points.union(self.getPoints(series,
[5, 25, 50, 120, 150, 200, 426, 660, 869, 1060, 1605, 2471,
3322, 4238, 5221, 6129, 7089, 8339, 9399, 10538, 11643,
13092, 14478, 15915, 17385, 19055, 21205, 23044, 25393,
27935, 30062, 32049, 33952, 35804, 37431, 39197, 45000,
43000, 41000, 39000, 37000, 35000, 33000, 31000, 29000,
27000, 25000, 24000, 23000, 22000, 21000, 20000, 19000,
18000, 18000, 17000, 16000]))
series.setSeriesPoints(points)
chart.addSeries(series)
self.addChart(chart)
def addNullPoints(self, points, series, howManyNullPoints):
for _ in range(howManyNullPoints):
points.add(DecimalPoint(series))
def showAreaSpline(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.AREASPLINE)
chartConfig.getTitle().setText('Average fruit consumption during '
'one week')
legend = Legend()
legend.setLayout(Layout.VERTICAL)
legendPos = Position()
legendPos.setAlign(HorzAlign.LEFT)
legendPos.setVertAlign(VertAlign.TOP)
legendPos.setX(150)
legendPos.setY(100)
legend.setPosition(legendPos)
legend.setFloating(True)
legend.setBorderWidth(1)
legend.setBackgroundColor(RGB(255, 255, 255))
chartConfig.setLegend(legend)
xAxis = CategoryAxis()
xAxis.setCategories(['Monday', 'Tuesday', 'Wednesday', 'Thursday',
'Friday', 'Saturday', 'Sunday'])
plotBand = NumberPlotBand('sat-sun')
plotBand.setRange(NumberRange(4.6, 6.5))
plotBand.setColor(RGBA(68, 170, 213, 0.2))
xAxis.addPlotBand(plotBand)
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setTitle(AxisTitle('Fruit units'))
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
chartConfig.getCredit().setEnabled(False)
areaSpline = AreaSplineConfig()
areaSpline.setFillOpacity(0.5)
chartConfig.addSeriesConfig(areaSpline)
chart = InvientCharts(chartConfig)
series = XYSeries('John')
series.setSeriesPoints(self.getPoints(series, [3, 4, 3, 5, 4, 10, 12]))
chart.addSeries(series)
series = XYSeries('Jane')
series.setSeriesPoints(self.getPoints(series, [1, 3, 4, 3, 3, 5, 4]))
chart.addSeries(series)
self.addChart(chart)
def showPieWithLegend(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.PIE)
chartConfig.getTitle().setText('Browser market shares at a specific website, 2010')
chartConfig.getTooltip().setFormatterJsFunc(
'function() {'
+ ' return \'<b>\'+ this.point.name +\'</b>: \'+ this.y +\' %\'; '
+ '}')
pie = PieConfig()
pie.setAllowPointSelect(True)
pie.setCursor('pointer')
pie.setDataLabel(PieDataLabel(False))
pie.setShowInLegend(True)
chartConfig.addSeriesConfig(pie)
chart = InvientCharts(chartConfig)
series = XYSeries('Browser Share')
points = set()
points.add(DecimalPoint(series, 'Firefox', 45.0))
points.add(DecimalPoint(series, 'IE', 26.8))
config = PointConfig(True)
points.add(DecimalPoint(series, 'Chrome', 12.8, config))
points.add(DecimalPoint(series, 'Safari', 8.5))
points.add(DecimalPoint(series, 'Opera', 6.2))
points.add(DecimalPoint(series, 'Others', 0.7))
series.setSeriesPoints(points)
chart.addSeries(series)
self.addChart(chart)
def showDonut(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.PIE)
chartConfig.getGeneralChartConfig().setMargin(Margin())
chartConfig.getGeneralChartConfig().getMargin().setTop(50)
chartConfig.getGeneralChartConfig().getMargin().setRight(0)
chartConfig.getGeneralChartConfig().getMargin().setBottom(0)
chartConfig.getGeneralChartConfig().getMargin().setLeft(0)
chartConfig.getTitle().setText(
'Browser market shares at a specific website')
chartConfig.getSubtitle().setText(
'Inner circle: 2008, outer circle: 2010')
chartConfig.getTooltip().setFormatterJsFunc(
'function() {'
+ ' return \'<b>\'+ this.series.name +\'</b><br/>\'+ '
+ ' this.point.name +\': \'+ this.y +\' %\'; '
+ '}')
chart = InvientCharts(chartConfig)
pieCfg = PieConfig()
pieCfg.setInnerSize(65)
pieCfg.setDataLabel(PieDataLabel(False))
series = XYSeries('2008', SeriesType.PIE, pieCfg)
points = set()
points.add(self.getPointWithColor(series, 'Firefox', 44.2,
RGB(69, 114, 167)))
points.add(self.getPointWithColor(series, 'IE', 46.6,
RGB(170, 70, 67)))
points.add(self.getPointWithColor(series, 'Chrome', 3.1,
RGB(137, 165, 78)))
points.add(self.getPointWithColor(series, 'Safari', 2.7,
RGB(128, 105, 155)))
points.add(self.getPointWithColor(series, 'Opera', 2.3,
RGB(128, 105, 155)))
points.add(self.getPointWithColor(series, 'Mozilla', 0.4,
RGB(219, 132, 61)))
series.setSeriesPoints(points)
chart.addSeries(series)
pieCfg = PieConfig()
pieCfg.setInnerSize(150)
pieCfg.setDataLabel(PieDataLabel())
pieCfg.setColor(RGB(0, 0, 0))
pieCfg.getDataLabel().setConnectorColor(RGB(0, 0, 0))
series = XYSeries('2010', SeriesType.PIE, pieCfg)
points = set()
points.add(self.getPointWithColor(series, 'Firefox', 45.0,
RGB(69, 114, 167)))
points.add(self.getPointWithColor(series, 'IE', 26.8,
RGB(170, 70, 67)))
points.add(self.getPointWithColor(series, 'Chrome', 12.8,
RGB(137, 165, 78)))
points.add(self.getPointWithColor(series, 'Safari', 8.5,
RGB(128, 105, 155)))
points.add(self.getPointWithColor(series, 'Opera', 6.2,
RGB(128, 105, 155)))
points.add(self.getPointWithColor(series, 'Mozilla', 0.2,
RGB(219, 132, 61)))
series.setSeriesPoints(points)
chart.addSeries(series)
self.addChart(chart)
def getPointWithColor(self, series, name, y, color):
point = DecimalPoint(series, name, y)
point.setConfig(PointConfig(color))
return point
def showPie(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.PIE)
chartConfig.getTitle().setText('Browser market shares at a specific '
'website, 2010')
pieCfg = PieConfig()
pieCfg.setAllowPointSelect(True)
pieCfg.setCursor('pointer')
pieCfg.setDataLabel(PieDataLabel())
pieCfg.getDataLabel().setEnabled(True)
pieCfg.getDataLabel().setFormatterJsFunc(
'function() {'
+ ' return \'<b>\'+ this.point.name +\'</b>: \'+ this.y +\' %\';'
+ '}')
pieCfg.getDataLabel().setConnectorColor(RGB(0, 0, 0))
chartConfig.addSeriesConfig(pieCfg)
chart = InvientCharts(chartConfig)
series = XYSeries('Browser Share')
points = set()
points.add(DecimalPoint(series, 'Firefox', 45.0))
points.add(DecimalPoint(series, 'IE', 26.8))
config = PointConfig(True)
points.add(DecimalPoint(series, 'Chrome', 12.8, config))
points.add(DecimalPoint(series, 'Safari', 8.5))
points.add(DecimalPoint(series, 'Opera', 6.2))
points.add(DecimalPoint(series, 'Others', 0.7))
series.setSeriesPoints(points)
chart.addSeries(series)
self.addChart(chart)
def showScatter(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.SCATTER)
chartConfig.getGeneralChartConfig().setZoomType(ZoomType.XY)
chartConfig.getTitle().setText(
'Height Versus Weight of Individuals by Gender')
chartConfig.getSubtitle().setText('Source: Heinz 2003')
chartConfig.getTooltip().setFormatterJsFunc(
'function() {'
+ ' return \'\' + this.x + \' cm, \' + this.y + \' kg\'; '
+ '}')
xAxis = NumberXAxis()
xAxis.setTitle(AxisTitle('Height (cm)'))
xAxis.setStartOnTick(True)
xAxis.setEndOnTick(True)
xAxis.setShowLastLabel(True)
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setTitle(AxisTitle('Weight (kg)'))
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
legend = Legend()
legend.setLayout(Layout.VERTICAL)
legendPos = Position()
legendPos.setAlign(HorzAlign.LEFT)
legendPos.setVertAlign(VertAlign.TOP)
legendPos.setX(100)
legendPos.setY(70)
legend.setPosition(legendPos)
legend.setFloating(True)
legend.setBorderWidth(1)
legend.setBackgroundColor(RGB(255, 255, 255))
chartConfig.setLegend(legend)
scatterCfg = ScatterConfig()
marker = SymbolMarker(5)
scatterCfg.setMarker(marker)
marker.setHoverState(MarkerState())
marker.getHoverState().setEnabled(True)
marker.getHoverState().setLineColor(RGB(100, 100, 100))
chartConfig.addSeriesConfig(scatterCfg)
chart = InvientCharts(chartConfig)
femaleScatterCfg = ScatterConfig()
femaleScatterCfg.setColor(RGBA(223, 83, 83, 0.5))
series = XYSeries('Female', femaleScatterCfg)
series.setSeriesPoints(self.getScatterFemalePoints(series))
chart.addSeries(series)
maleScatterCfg = ScatterConfig()
maleScatterCfg.setColor(RGBA(119, 152, 191, 0.5))
series = XYSeries('Male', maleScatterCfg)
series.setSeriesPoints(self.getScatterMalePoints(series))
chart.addSeries(series)
self.addChart(chart)
def showCombinationScatterWithRegressionLine(self):
chartConfig = InvientChartsConfig()
chartConfig.getTitle().setText('Scatter plot with regression line')
xAxis = NumberXAxis()
xAxis.setMin(-0.5)
xAxis.setMax(5.5)
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setMin(0.0)
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
chart = InvientCharts(chartConfig)
# Line series
lineCfg = LineConfig()
lineCfg.setMarker(SymbolMarker(False))
lineCfg.setHoverState(SeriesState())
lineCfg.getHoverState().setLineWidth(0)
lineSeries = XYSeries('Regression Line', lineCfg)
lineSeries.setType(SeriesType.LINE)
lineSeries.setSeriesPoints(self.getPoints(lineSeries,
[[0, 1.11], [5, 4.51]]))
chart.addSeries(lineSeries)
# Scatter series
scatterCfg = ScatterConfig()
scatterCfg.setMarker(SymbolMarker(4))
scatterSeries = XYSeries('Observations', scatterCfg)
scatterSeries.setType(SeriesType.SCATTER)
scatterSeries.setSeriesPoints(self.getPoints(scatterSeries,
[1, 1.5, 2.8, 3.5, 3.9, 4.2]))
chart.addSeries(scatterSeries)
self.addChart(chart)
def showSpline(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.SPLINE)
chartConfig.getGeneralChartConfig().setInverted(True)
chartConfig.getGeneralChartConfig().setWidth(500)
chartConfig.getTitle().setText('Atmosphere Temperature by Altitude')
chartConfig.getSubtitle().setText(
'According to the Standard Atmosphere Model')
xAxis = NumberXAxis()
xAxis.setReversed(False)
xAxis.setTitle(AxisTitle('Altitude'))
xAxis.setLabel(XAxisDataLabel())
xAxis.getLabel().setFormatterJsFunc(
'function() {'
+ ' return this.value +\'km\';'
+ '}')
xAxis.setMaxPadding(0.05)
xAxis.setShowLastLabel(True)
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setTitle(AxisTitle('Temperature'))
yAxis.setLineWidth(2)
yAxis.setLabel(YAxisDataLabel())
yAxis.getLabel().setFormatterJsFunc(
'function() {'
+ u' return this.value + \'\u2103\';'.encode('utf-8')
+ '}')
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
tooltip = Tooltip()
tooltip.setFormatterJsFunc(
'function() {'
+ u' return \'\' + this.x +\' km: \'+ this.y +\'\u2103\';'.encode('utf-8')
+ '}')
chartConfig.setTooltip(tooltip)
legend = Legend()
legend.setEnabled(False)
chartConfig.setLegend(legend)
splineCfg = SplineConfig()
splineCfg.setMarker(SymbolMarker(True))
chartConfig.addSeriesConfig(splineCfg)
chart = InvientCharts(chartConfig)
series = XYSeries('Temperature')
series.setSeriesPoints(self.getPoints(series,
[[0, 15], [10, -50], [20, -56.5], [30, -46.5], [40, -22.1],
[50, -2.5], [60, -27.7], [70, -55.7], [80, -76.5]]))
chart.addSeries(series)
self.addChart(chart)
def showSplineWithSymbol(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.SPLINE)
chartConfig.getTitle().setText('Monthly Average Temperature')
chartConfig.getSubtitle().setText('Source: WorldClimate.com')
xAxis = CategoryAxis()
xAxis.setCategories(['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'])
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setTitle(AxisTitle('Temperature'))
yAxis.setLabel(YAxisDataLabel())
yAxis.getLabel().setFormatterJsFunc(
'function() {' +
u' return this.value + \'\u2103\';'.encode('utf-8') +
'}')
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
tooltip = Tooltip()
tooltip.setCrosshairs(True)
tooltip.setShared(True)
chartConfig.setTooltip(tooltip)
splineCfg = SplineConfig()
symbolMarker = SymbolMarker(True)
symbolMarker.setRadius(4)
symbolMarker.setLineColor(RGB(102, 102, 102))
symbolMarker.setLineWidth(1)
splineCfg.setMarker(symbolMarker)
chartConfig.addSeriesConfig(splineCfg)
chart = InvientCharts(chartConfig)
splineCfg = SplineConfig()
splineCfg.setMarker(SymbolMarker(Symbol.SQUARE))
series = XYSeries('Tokyo', splineCfg)
series.setSeriesPoints(self.getPoints(series,
[7.0, 6.9, 9.5, 14.5, 18.2, 21.5, 25.2]))
config = PointConfig(ImageMarker('/graphics/sun.png'))
highest = DecimalPoint(series, 26.5, config)
series.addPoint(highest)
series.addPoint(DecimalPoint(series, 23.3))
series.addPoint(DecimalPoint(series, 18.3))
series.addPoint(DecimalPoint(series, 13.9))
series.addPoint(DecimalPoint(series, 9.6))
chart.addSeries(series)
splineCfg = SplineConfig()
splineCfg.setMarker(SymbolMarker(Symbol.DIAMOND))
series = XYSeries('London', splineCfg)
config = PointConfig(ImageMarker('/graphics/snow.png'))
lowest = DecimalPoint(series, 3.9, config)
series.addPoint(lowest)
series.addPoint(DecimalPoint(series, 4.2))
series.addPoint(DecimalPoint(series, 5.7))
series.addPoint(DecimalPoint(series, 8.5))
series.addPoint(DecimalPoint(series, 11.9))
series.addPoint(DecimalPoint(series, 15.2))
series.addPoint(DecimalPoint(series, 17.0))
series.addPoint(DecimalPoint(series, 16.6))
series.addPoint(DecimalPoint(series, 14.2))
series.addPoint(DecimalPoint(series, 10.3))
series.addPoint(DecimalPoint(series, 6.6))
series.addPoint(DecimalPoint(series, 4.8))
chart.addSeries(series)
self.addChart(chart)
def showSplineUpdatingEachSecond(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.SPLINE)
chartConfig.getGeneralChartConfig().setMargin(Margin())
chartConfig.getGeneralChartConfig().getMargin().setRight(10)
chartConfig.getTitle().setText('Live random data')
xAxis = DateTimeAxis()
xAxis.setTick(Tick())
xAxis.getTick().setPixelInterval(150)
xAxes = set()
xAxes.add(xAxis)
chartConfig.setXAxes(xAxes)
yAxis = NumberYAxis()
yAxis.setTitle(AxisTitle('Value'))
plotLine = NumberPlotLine('LineAt0')
yAxis.addPlotLine(plotLine)
plotLine.setValue(NumberValue(0.0))
plotLine.setWidth(1)
plotLine.setColor(RGB(128, 128, 128))
yAxes = set()
yAxes.add(yAxis)
chartConfig.setYAxes(yAxes)
chartConfig.getTooltip().setFormatterJsFunc(
'function() {'
+ ' return \'<b>\'+ this.series.name +\'</b><br/>\'+ '
+ ' $wnd.Highcharts.dateFormat(\'%Y-%m-%d %H:%M:%S\', this.x) +\'<br/>\'+ '
+ ' $wnd.Highcharts.numberFormat(this.y, 2);'
+ '}')
chartConfig.getLegend().setEnabled(False)
chart = InvientCharts(chartConfig)
seriesData = DateTimeSeries(chart, 'Random Data', True)
points = set()
dtNow = datetime.now()
# Add random data.
for cnt in range(-19, 0):
points.add(DateTimePoint(seriesData,
self.getUpdatedDate(dtNow, cnt), random()))
seriesData.setSeriesPoints(points)
chart.addSeries(seriesData)
self.addChart(chart, False, False, False)
self._indicator = ProgressIndicator(0.2)
self._indicator.setPollingInterval(1000)
self._indicator.setStyleName('i-progressindicator-invisible')
self._rightLayout.addComponent(self._indicator)
if not self.isAppRunningOnGAE():
self._splineThread = SelfUpdateSplineThread(chart)
self._splineThread.start()
else:
self.getApplication().getMainWindow().showNotification(
'This chart does not auto-update because Google App '
'Engine does not support threads.')
def stopSplineSelfUpdateThread(self):
if self._splineThread is not None:
self._splineThread.stopUpdating()
self._indicator.setEnabled(False)
self.getApplication().notifyAll()
@classmethod
def getUpdatedDate(cls, dt, seconds):
ts = getDate(dt) + seconds
return datetime.fromtimestamp(ts)
def showSplineWithPlotBands(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setType(SeriesType.SPLINE)
chartConfig.getTitle().setText('Wind speed during two days')
chartConfig.getSubtitle().setText('October 6th and 7th 2009 at two '
'locations in Vik i Sogn, Norway')
chartConfig.getTooltip().setFormatterJsFunc(
'function() {'
+ ' return \'\' + $wnd.Highcharts.dateFormat(\'%e. %b %Y, %H:00\', this.x) +\': \'+ this.y +\' m/s\'; '
+ '}')
xAxis = DateTimeAxis()
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setTitle(AxisTitle('Wind speed (m/s)'))
yAxis.setMin(0.0)
yAxis.setMinorGrid(MinorGrid())
yAxis.getMinorGrid().setLineWidth(0)
yAxis.setGrid(Grid())
yAxis.getGrid().setLineWidth(0)
numberBand = NumberPlotBand('Light air')
numberBand.setRange(NumberRange(0.3, 1.5))
numberBand.setColor(RGBA(68, 170, 213, 0.1))
numberBand.setLabel(PlotLabel('Light air'))
numberBand.getLabel().setStyle('{ color: \'#606060\' }')
yAxis.getPlotBands().add(numberBand)
numberBand = NumberPlotBand('Light breeze')
numberBand.setRange(NumberRange(1.5, 3.3))
numberBand.setColor(RGBA(0, 0, 0, 0.0))
numberBand.setLabel(PlotLabel('Light breeze'))
numberBand.getLabel().setStyle('{ color: \'#606060\' }')
yAxis.getPlotBands().add(numberBand)
numberBand = NumberPlotBand('Gentle breeze')
numberBand.setRange(NumberRange(3.3, 5.5))
numberBand.setColor(RGBA(68, 170, 213, 0.1))
numberBand.setLabel(PlotLabel('Gentle breeze'))
numberBand.getLabel().setStyle('{ color: \'#606060\' }')
yAxis.getPlotBands().add(numberBand)
numberBand = NumberPlotBand('Moderate breeze')
numberBand.setRange(NumberRange(5.5, 8.0))
numberBand.setColor(RGBA(0, 0, 0, 0.0))
numberBand.setLabel(PlotLabel('Moderate breeze'))
numberBand.getLabel().setStyle('{ color: \'#606060\' }')
yAxis.getPlotBands().add(numberBand)
numberBand = NumberPlotBand('Fresh breeze')
numberBand.setRange(NumberRange(8.0, 11.0))
numberBand.setColor(RGBA(68, 170, 213, 0.1))
numberBand.setLabel(PlotLabel('Fresh breeze'))
numberBand.getLabel().setStyle('{ color: \'#606060\' }')
yAxis.getPlotBands().add(numberBand)
numberBand = NumberPlotBand('Strong breeze')
numberBand.setRange(NumberRange(11.0, 14.0))
numberBand.setColor(RGBA(0, 0, 0, 0.0))
numberBand.setLabel(PlotLabel('Strong breeze'))
numberBand.getLabel().setStyle('{ color: \'#606060\' }')
yAxis.getPlotBands().add(numberBand)
numberBand = NumberPlotBand('High wind')
numberBand.setRange(NumberRange(14.0, 15.0))
numberBand.setColor(RGBA(68, 170, 213, 0.1))
numberBand.setLabel(PlotLabel('High wind'))
numberBand.getLabel().setStyle('{ color: \'#606060\' }')
yAxis.getPlotBands().add(numberBand)
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
splineCfg = SplineConfig()
splineCfg.setLineWidth(4)
splineCfg.setHoverState(SeriesState())
splineCfg.getHoverState().setLineWidth(5)
symbolMarker = SymbolMarker(False)
splineCfg.setMarker(symbolMarker)
symbolMarker.setSymbol(Symbol.CIRCLE)
symbolMarker.setHoverState(MarkerState())
symbolMarker.getHoverState().setEnabled(True)
symbolMarker.getHoverState().setRadius(5)
symbolMarker.getHoverState().setLineWidth(1)
splineCfg.setPointStart(self.getPointStartDate(2009, 8, 6))
splineCfg.setPointInterval(3600.0 * 1000.0)
chartConfig.addSeriesConfig(splineCfg)
chart = InvientCharts(chartConfig)
series = DateTimeSeries(chart, 'Hestavollane', splineCfg, True)
series.setSeriesPoints(self.getDateTimePoints(series,
[4.3, 5.1, 4.3, 5.2, 5.4, 4.7, 3.5, 4.1, 5.6, 7.4, 6.9, 7.1,
7.9, 7.9, 7.5, 6.7, 7.7, 7.7, 7.4, 7.0, 7.1, 5.8, 5.9, 7.4,
8.2, 8.5, 9.4, 8.1, 10.9, 10.4, 10.9, 12.4, 12.1, 9.5, 7.5,
7.1, 7.5, 8.1, 6.8, 3.4, 2.1, 1.9, 2.8, 2.9, 1.3, 4.4, 4.2,
3.0, 3.0]))
chart.addSeries(series)
series = DateTimeSeries(chart, 'Voll', splineCfg, True)
series.setSeriesPoints(self.getDateTimePoints(series,
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.1, 0.0, 0.3, 0.0,
0.0, 0.4, 0.0, 0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.6, 1.2, 1.7, 0.7, 2.9, 4.1, 2.6, 3.7, 3.9, 1.7, 2.3,
3.0, 3.3, 4.8, 5.0, 4.8, 5.0, 3.2, 2.0, 0.9, 0.4, 0.3, 0.5,
0.4]))
chart.addSeries(series)
self.addChart(chart)
def showCombination(self):
chartConfig = InvientChartsConfig()
chartConfig.getTitle().setText('Combination chart')
tooltip = Tooltip()
tooltip.setFormatterJsFunc(
'function() {'
+ ' if (this.point.name) { // the pie chart '
+ ' return this.point.name +\': \'+ this.y +\' fruits\'; '
+ ' } else {'
+ ' return this.x +\': \'+ this.y; '
+ ' } '
+ '}')
xAxis = CategoryAxis()
xAxis.setCategories(['Apples', 'Oranges', 'Pears', 'Bananas', 'Plums'])
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setAllowDecimals(False)
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
chart = InvientCharts(chartConfig)
seriesData = XYSeries('Jane', SeriesType.COLUMN)
seriesData.setSeriesPoints(self.getPoints(seriesData, [3, 2, 1, 3, 4]))
chart.addSeries(seriesData)
seriesData = XYSeries('John', SeriesType.COLUMN)
seriesData.setSeriesPoints(self.getPoints(seriesData, [2, 3, 5, 7, 6]))
chart.addSeries(seriesData)
seriesData = XYSeries('Joe', SeriesType.COLUMN)
seriesData.setSeriesPoints(self.getPoints(seriesData, [4, 3, 3, 9, 0]))
chart.addSeries(seriesData)
seriesData = XYSeries('Average', SeriesType.SPLINE)
seriesData.setSeriesPoints(self.getPoints(seriesData,
[3, 2.67, 3, 6.33, 3.33]))
chart.addSeries(seriesData)
# Series Total consumption
pieCfg = PieConfig()
pieCfg.setCenterX(100)
pieCfg.setCenterY(80)
pieCfg.setSize(100)
pieCfg.setShowInLegend(False)
pieCfg.setDataLabel(PieDataLabel())
pieCfg.getDataLabel().setEnabled(False)
totalConsumpSeriesData = XYSeries('Total consumption',
SeriesType.PIE, pieCfg)
config = PointConfig(RGB(69, 114, 167))
point = DecimalPoint(totalConsumpSeriesData, 'Jane', 13, config)
totalConsumpSeriesData.addPoint(point)
config = PointConfig(RGB(170, 70, 67))
point = DecimalPoint(totalConsumpSeriesData, 'John', 23, config)
totalConsumpSeriesData.addPoint(point)
config = PointConfig(RGB(137, 165, 78))
point = DecimalPoint(totalConsumpSeriesData, 'Joe', 19, config)
totalConsumpSeriesData.addPoint(point)
chartLabel = ChartLabel()
chartLabel.addLabel(ChartLabelItem('Total fruit consumption',
'{ left: \'40px\', top: \'8px\', color: \'black\' }'))
chartConfig.setChartLabel(chartLabel)
chart.addSeries(totalConsumpSeriesData)
self.addChart(chart)
def showCombinationMultipleAxes(self):
chartConfig = InvientChartsConfig()
chartConfig.getTitle().setText(
'Average Monthly Weather Data for Tokyo')
chartConfig.getSubtitle().setText('Source: WorldClimate.com')
chartConfig.getTooltip().setFormatterJsFunc(
'function() {'
+ ' var unit = { '
+ ' \'Rainfall\': \'mm\','
+ u' \'Temperature\': \'\u2103\','.encode('utf-8')
+ ' \'Sea-Level Pressure\': \'mb\''
+ ' }[this.series.name];'
+ ' return \'\' + this.x + \': \' + this.y + \' \' + unit; '
+ '}')
legend = Legend()
legend.setLayout(Layout.VERTICAL)
legend.setPosition(Position())
legend.getPosition().setAlign(HorzAlign.LEFT)
legend.getPosition().setVertAlign(VertAlign.TOP)
legend.getPosition().setX(120)
legend.getPosition().setY(80)
legend.setFloating(True)
legend.setBackgroundColor(RGB(255, 255, 255))
chartConfig.setLegend(legend)
xAxis = CategoryAxis()
xAxis.setCategories(['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'])
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
# Multiple axes
temperatureAxis = NumberYAxis()
temperatureAxis.setAllowDecimals(False)
temperatureAxis.setLabel(YAxisDataLabel())
temperatureAxis.getLabel().setFormatterJsFunc(
'function() {'
+ u' return this.value +\'\u2103\'; '.encode('utf-8')
+ '}')
temperatureAxis.getLabel().setStyle('{ color: \'#89A54E\' }')
temperatureAxis.setTitle(AxisTitle('Temperature'))
temperatureAxis.getTitle().setStyle(' { color: \'#89A54E\' }')
temperatureAxis.setOpposite(True)
yAxesSet = set()
yAxesSet.add(temperatureAxis)
# secondary y-axis
rainfallAxis = NumberYAxis()
rainfallAxis.setGrid(Grid())
rainfallAxis.getGrid().setLineWidth(0)
rainfallAxis.setTitle(AxisTitle('Rainfall'))
rainfallAxis.getTitle().setStyle(' { color: \'#4572A7\' }')
rainfallAxis.setLabel(YAxisDataLabel())
rainfallAxis.getLabel().setStyle('{ color: \'#4572A7\' }')
rainfallAxis.getLabel().setFormatterJsFunc(
'function() {'
+ ' return this.value +\' mm\'; '
+ '}')
yAxesSet.add(rainfallAxis)
# tertiary y-axis
sealevelPressureAxis = NumberYAxis()
sealevelPressureAxis.setGrid(Grid())
sealevelPressureAxis.getGrid().setLineWidth(0)
sealevelPressureAxis.setTitle(AxisTitle('Sea-Level Pressure'))
sealevelPressureAxis.getTitle().setStyle(' { color: \'#AA4643\' }')
sealevelPressureAxis.setLabel(YAxisDataLabel())
sealevelPressureAxis.getLabel().setStyle('{ color: \'#AA4643\' }')
sealevelPressureAxis.getLabel().setFormatterJsFunc(
'function() {'
+ ' return this.value +\' mb\'; '
+ '}')
sealevelPressureAxis.setOpposite(True)
yAxesSet.add(sealevelPressureAxis)
chartConfig.setYAxes(yAxesSet)
chart = InvientCharts(chartConfig)
# Configuration of Rainfall series
colCfg = ColumnConfig()
colCfg.setColor(RGB(69, 114, 167))
# Rainfall series
rainfallSeriesData = XYSeries('Rainfall', SeriesType.COLUMN, colCfg)
rainfallSeriesData.setSeriesPoints(self.getPoints(rainfallSeriesData,
[49.9, 71.5, 106.4, 129.2, 144.0, 176.0, 135.6, 148.5, 216.4,
194.1, 95.6, 54.4]))
rainfallSeriesData.setYAxis(rainfallAxis)
chart.addSeries(rainfallSeriesData)
# Configuration of Sealevel series
seaLevelSplineCfg = SplineConfig()
seaLevelSplineCfg.setColor(RGB(170, 70, 67))
seaLevelSplineCfg.setMarker(SymbolMarker(False))
seaLevelSplineCfg.setDashStyle(DashStyle.SHORT_DOT)
# Sealevel series
seaLevelSeriesData = XYSeries('Sea-Level Pressure', SeriesType.SPLINE,
seaLevelSplineCfg)
seaLevelSeriesData.setSeriesPoints(self.getPoints(seaLevelSeriesData,
[1016, 1016, 1015.9, 1015.5, 1012.3, 1009.5, 1009.6, 1010.2,
1013.1, 1016.9, 1018.2, 1016.7]))
seaLevelSeriesData.setYAxis(sealevelPressureAxis)
chart.addSeries(seaLevelSeriesData)
# Configuration of Temperature series
tempSplineCfg = SplineConfig()
tempSplineCfg.setColor(RGB(137, 165, 78))
# Temperature series
tempSeriesData = XYSeries('Temperature', SeriesType.SPLINE,
tempSplineCfg)
tempSeriesData.setSeriesPoints(self.getPoints(tempSeriesData,
[7.0, 6.9, 9.5, 14.5, 18.2, 21.5, 25.2, 26.5, 23.3, 18.3,
13.9, 9.6]))
chart.addSeries(tempSeriesData)
self.addChart(chart)
def showTimeSeriesZoomable(self):
chartConfig = InvientChartsConfig()
chartConfig.getGeneralChartConfig().setZoomType(ZoomType.X)
chartConfig.getGeneralChartConfig().setSpacing(Spacing())
chartConfig.getGeneralChartConfig().getSpacing().setRight(20)
chartConfig.getSubtitle().setText(
'Click and drag in the plot area to zoom in')
xAxis = DateTimeAxis()
xAxis.setMaxZoom(14 * 24 * 3600 * 1000.0)
xAxesSet = set()
xAxesSet.add(xAxis)
chartConfig.setXAxes(xAxesSet)
yAxis = NumberYAxis()
yAxis.setTitle(AxisTitle('Exchange rate'))
yAxis.setMin(0.6)
yAxis.setStartOnTick(True)
yAxis.setShowFirstLabel(False)
yAxesSet = set()
yAxesSet.add(yAxis)
chartConfig.setYAxes(yAxesSet)
chartConfig.getTooltip().setShared(True)
chartConfig.getLegend().setEnabled(False)
# Set plot options
areaCfg = AreaConfig()
colorStops = list()
colorStops.append(LinearColorStop(0, RGB(69, 114, 167)))
colorStops.append(LinearColorStop(1, RGBA(2, 0, 0, 0)))
# Fill color
areaCfg.setFillColor(LinearGradient(0, 0, 0, 300, colorStops))
areaCfg.setLineWidth(1)
areaCfg.setShadow(False)
areaCfg.setHoverState(SeriesState())
areaCfg.getHoverState().setLineWidth(1)
marker = SymbolMarker(False)
areaCfg.setMarker(marker)
marker.setHoverState(MarkerState())
marker.getHoverState().setEnabled(True)
marker.getHoverState().setRadius(5)
chartConfig.addSeriesConfig(areaCfg)
chart = InvientCharts(chartConfig)
# Area configuration
serieaAreaCfg = AreaConfig()
serieaAreaCfg.setPointStart(self.getPointStartDate(2006, 1, 1))
serieaAreaCfg.setPointInterval(24 * 3600 * 1000.0)
# Series
dateTimeSeries = DateTimeSeries(chart, 'USD to EUR', SeriesType.AREA,
serieaAreaCfg)
points = self.getDateTimeSeriesPoints(dateTimeSeries)
dateTimeSeries.addPoint(points)
chart.addSeries(dateTimeSeries)
self.addChart(chart)
def addChart(self, chart, isPrepend=False, isRegisterEvents=True,
isRegisterSVGEvent=True, isSetHeight=True):
if isRegisterEvents:
self.registerEvents(chart)
chart.setSizeFull()
chart.setStyleName('v-chart-min-width')
if isSetHeight:
chart.setHeight('410px')
if isPrepend:
self._rightLayout.setStyleName('v-chart-master-detail')
self._rightLayout.addComponentAsFirst(chart)
else:
self._rightLayout.removeStyleName('v-chart-master-detail')
self.emptyEventLog()
self._rightLayout.removeAllComponents()
# Add chart
self._rightLayout.addComponent(chart)
# Add "Get SVG" button and register SVG available event
if isRegisterSVGEvent:
self.registerSVGAndPrintEvent(chart)
# Server events log
lbl = Label('Events received by the server:')
self._rightLayout.addComponent(lbl)
self._rightLayout.addComponent(self._eventLog)
def registerSVGAndPrintEvent(self, chart):
gridLayout = GridLayout(2, 1)
gridLayout.setWidth('100%')
gridLayout.setSpacing(True)
svgBtn = Button('Get SVG')
gridLayout.addComponent(svgBtn)
gridLayout.setComponentAlignment(svgBtn, Alignment.MIDDLE_RIGHT)
printBtn = Button('Print')
gridLayout.addComponent(printBtn)
gridLayout.setComponentAlignment(printBtn, Alignment.MIDDLE_LEFT)
self._rightLayout.addComponent(gridLayout)
l = GetSvgClickListener(self, chart)
svgBtn.addListener(l, button.IClickListener)
l = PrintClickListener(chart)
printBtn.addListener(l, button.IClickListener)
def registerEvents(self, chart):
l = DemoChartClickListener(self)
chart.addListener(l)
if chart.getConfig().getGeneralChartConfig().getZoomType() is not None:
l = DemoChartZoomListener(self)
chart.addListener(l)
l = DemoChartResetZoomListener(self)
chart.addListener(l)
l = DemoSeriesClickListerner(self)
chart.addListener(l, [])
l = DemoSeriesHideListerner(self)
chart.addListener(l, [])
l = DemoSeriesShowListerner(self)
chart.addListener(l, [])
l = DemoSeriesLegendItemClickListerner(self)
chart.addListener(l, [])
l = DemoPointClickListener(self)
chart.addListener(l, [])
l = DemoPointRemoveListener(self)
chart.addListener(l, [])
l = DemoPointSelectListener(self)
chart.addListener(l, [])
l = DemoPointUnselectListener(self)
chart.addListener(l, [])
l = DemoPieChartLegendItemClickListener(self)
chart.addListener(l)
@classmethod
def getPointStartDate(cls, year, month, day):
dt = datetime(year, month, day)
return long(totalseconds(dt - datetime(1970, 1, 1)) * 1e03)
@classmethod
def getDateZeroTime(cls, year, month, day):
return datetime(year, month, day)
# @classmethod
# def setZeroTime(cls, cal):
# cal.set(Calendar.HOUR, 0)
# cal.set(Calendar.MINUTE, 0)
# cal.set(Calendar.SECOND, 0)
# cal.set(Calendar.MILLISECOND, 0)
def getDateTimePoints(self, series, values):
points = OrderedSet()
for value in values:
points.add(DateTimePoint(series, value))
return points
@classmethod
def getPoints(cls, series, values):
if len(values) > 0 and isinstance(values[0], (float, int)):
points = OrderedSet()
for value in values:
points.add(DecimalPoint(series, value))
return points
else:
points = OrderedSet()
for value in values:
y = None
if len(value) == 0:
continue
if len(value) == 2:
x = value[0]
y = value[1]
else:
x = value[0]
points.add(DecimalPoint(series, x, y))
return points
@classmethod
def getFormattedTimestamp(cls, dt):
if dt is None:
return None
fmt = '%y/%m/%d %H:%M:%S'
return dt.strftime(fmt)
@classmethod
def getCurrFormattedTimestamp(cls):
return cls.getFormattedTimestamp(datetime.now())
def getChartName(self, chartNameString):
for chartName in ChartName.values():
if chartNameString.lower() == chartName.getName().lower():
return chartName
return None
def getDemoSeriesType(self, demoSeriesTypeName):
for demoSeriesType in DemoSeriesType.values():
if demoSeriesTypeName.lower() == demoSeriesType.getName().lower():
return demoSeriesType
return None
def createChartsTree(self):
tree = Tree('Chart Type')
tree.setContainerDataSource(self.getContainer())
tree.setImmediate(True)
tree.setItemCaptionPropertyId(self._TREE_ITEM_CAPTION_PROP_ID)
tree.setItemCaptionMode(Tree.ITEM_CAPTION_MODE_PROPERTY)
tree.setNullSelectionAllowed(False)
for Id in tree.rootItemIds():
tree.expandItemsRecursively(Id)
l = ChartTypeChangeListener(self, tree)
tree.addListener(l, IValueChangeListener)
return tree
def showChartInstancesForSeriesType(self, demoSeriesTypeName):
self._rightLayout.removeAllComponents()
demoCharts = self.getDemoCharts(self.getDemoSeriesType(
demoSeriesTypeName))
for chartName in demoCharts:
l = SeriesTypeClickListener(self)
btn = Button(chartName.getName(), l)
self._rightLayout.addComponent(btn)
btn.setWidth('200px')
def getContainer(self):
container = HierarchicalContainer()
container.addContainerProperty(self._TREE_ITEM_CAPTION_PROP_ID, str,'')
for demoSeriesType in DemoSeriesType.values():
itemId = demoSeriesType.getName()
item = container.addItem(itemId)
item.getItemProperty(self._TREE_ITEM_CAPTION_PROP_ID).setValue(
demoSeriesType.getName())
container.setChildrenAllowed(itemId, True)
# add child
self.addChartNamesForSeriesType(container, itemId, demoSeriesType)
return container
def addChartNamesForSeriesType(self, container, parentId, demoSeriesType):
for chartName in self.getDemoCharts(demoSeriesType):
childItemId = (demoSeriesType.getName() + self._SEPARATOR
+ chartName.getName())
childItem = container.addItem(childItemId)
childItem.getItemProperty(
self._TREE_ITEM_CAPTION_PROP_ID).setValue(
chartName.getName())
container.setParent(childItemId, parentId)
container.setChildrenAllowed(childItemId, False)
def getDemoCharts(self, demoSeriesType):
chartNames = list()
if demoSeriesType == DemoSeriesType.LINE:
chartNames.append(ChartName.BASIC)
chartNames.append(ChartName.WITH_DATA_LABELS)
chartNames.append(ChartName.TIMESERIES_ZOOMABLE)
chartNames.append(ChartName.MASTER_DETAIL)
chartNames.append(ChartName.CLICK_TO_ADD_POINT)
elif demoSeriesType == DemoSeriesType.BAR:
chartNames.append(ChartName.BASIC)
chartNames.append(ChartName.STACKED)
chartNames.append(ChartName.WITH_NEGATIVE_STACK)
elif demoSeriesType == DemoSeriesType.COLUMN:
chartNames.append(ChartName.BASIC)
chartNames.append(ChartName.WITH_NEGATIVE_VALUES)
chartNames.append(ChartName.STACKED)
chartNames.append(ChartName.STACKED_AND_GROUPED)
chartNames.append(ChartName.STACKED_PERCENT)
chartNames.append(ChartName.WITH_ROTATED_LABELS)
elif demoSeriesType == DemoSeriesType.AREA:
chartNames.append(ChartName.BASIC)
chartNames.append(ChartName.WITH_NEGATIVE_VALUES)
chartNames.append(ChartName.STACKED)
chartNames.append(ChartName.PERCENTAGE)
chartNames.append(ChartName.WITH_MISSING_POINTS)
chartNames.append(ChartName.INVERTED_AXES)
elif demoSeriesType == DemoSeriesType.AREASPLINE:
chartNames.append(ChartName.BASIC)
elif demoSeriesType == DemoSeriesType.PIE:
chartNames.append(ChartName.BASIC)
chartNames.append(ChartName.WITH_LEGEND)
chartNames.append(ChartName.DONUT)
elif demoSeriesType == DemoSeriesType.SCATTER:
chartNames.append(ChartName.BASIC)
elif demoSeriesType == DemoSeriesType.SPLINE:
chartNames.append(ChartName.BASIC)
chartNames.append(ChartName.WITH_PLOTBANDS)
chartNames.append(ChartName.WITH_SYMBOLS)
chartNames.append(ChartName.UPDATING_EACH_SECOND)
elif demoSeriesType == DemoSeriesType.COMBINATION:
chartNames.append(ChartName.COMBINATION_COLUMN_LINE_AND_PIE)
chartNames.append(ChartName.SCATTER_WITH_REGRESSION_LINE)
chartNames.append(ChartName.MULTIPLE_AXES)
return chartNames
def logEventInfo(self, *args):
nargs = len(args)
if nargs == 1:
eventInfo, = args
self.logEventInfo(eventInfo, True)
elif nargs == 2:
if isinstance(args[1], bool):
eventInfo, isAppend = args
self._eventLog.setReadOnly(False)
if isAppend:
self._eventLog.setValue('['
+ self.getCurrFormattedTimestamp() + '] '
+ eventInfo + '\n'
+ self._eventLog.getValue())
else:
self._eventLog.setValue('')
self._eventLog.setReadOnly(True)
else:
eventName, seriesName = args
sb = ''
sb += '[' + eventName + ']'
sb += ' series -> ' + seriesName
self.logEventInfo(sb)
elif nargs == 5:
if isinstance(args[1], float):
if isinstance(args[3], float):
eventName, xAxisMin, xAxisMax, yAxisMin, yAxisMax = args
sb = ''
sb += '[' + eventName + ']'
sb += ', xAxisMin -> ' + str(xAxisMin)
sb += ', xAxisMax -> ' + str(xAxisMax)
sb += ', yAxisMin -> ' + str(yAxisMin)
sb += ', yAxisMax -> ' + str(yAxisMax)
self.logEventInfo(sb)
else:
eventName, xAxisPos, yAxisPos, mouseX, mouseY = args
sb = ''
sb += '[' + eventName + ']'
sb += ', xAxisPos -> ' + str(xAxisPos)
sb += ', yAxisPos -> ' + str(yAxisPos)
sb += ', mouseX -> ' + str(mouseX)
sb += ', mouseY -> ' + str(mouseY)
self.logEventInfo(sb)
else:
if isinstance(args[3], datetime):
eventName, seriesName, category, x, y = args
self.logEventInfo(eventName, seriesName, category,
x, y, None, None)
else:
eventName, seriesName, category, x, y = args
self.logEventInfo(eventName, seriesName, category,
x, y, None, None)
elif nargs == 7:
if isinstance(args[3], datetime):
eventName, seriesName, category, x, y, mouseX, mouseY = args
self.logStringEventInfo(eventName, seriesName, category,
str(x) if x is not None else None,
str(y) if y is not None else None,
str(mouseX) if mouseX is not None else None,
str(mouseY) if mouseY is not None else None)
else:
eventName, seriesName, category, x, y, mouseX, mouseY = args
self.logStringEventInfo(eventName, seriesName, category,
str(x) if x is not None else None,
str(y) if y is not None else None,
str(mouseX) if mouseX is not None else None,
str(mouseY) if mouseY is not None else None)
else:
raise ValueError
def logStringEventInfo(self, eventName, seriesName, category, x, y,
mouseX, mouseY):
sb = StringIO()
sb.write('[' + eventName + ']')
sb.write(' series -> ' + seriesName)
if category is not None and len(category) > 0:
sb.write(', category -> ' + category)
if x is not None:
sb.write(', x -> ' + str(x))
if y is not None:
sb.write(', y -> ' + str(y))
if mouseX is not None:
sb.write(', mouseX -> ' + str(mouseX))
if mouseY is not None:
sb.write(', mouseY -> ' + str(mouseY))
self.logEventInfo(sb.getvalue())
sb.close()
def emptyEventLog(self):
self.logEventInfo('', False)
def getScatterFemalePoints(self, series):
if self._scatterFemaleData is not None:
return self._scatterFemaleData
# Initialize data
self._scatterFemaleData = self.getPoints(series,
[[161.2, 51.6],
[167.5, 59.0], [159.5, 49.2],
[157.0, 63.0], [155.8, 53.6],
[170.0, 59.0], [159.1, 47.6],
[166.0, 69.8], [176.2, 66.8],
[160.2, 75.2], [172.5, 55.2],
[170.9, 54.2], [172.9, 62.5],
[153.4, 42.0], [160.0, 50.0],
[147.2, 49.8], [168.2, 49.2],
[175.0, 73.2], [157.0, 47.8],
[167.6, 68.8], [159.5, 50.6],
[175.0, 82.5], [166.8, 57.2],
[176.5, 87.8], [170.2, 72.8],
[174.0, 54.5], [173.0, 59.8],
[179.9, 67.3], [170.5, 67.8],
[160.0, 47.0], [154.4, 46.2],
[162.0, 55.0], [176.5, 83.0],
[160.0, 54.4], [152.0, 45.8],
[162.1, 53.6], [170.0, 73.2],
[160.2, 52.1], [161.3, 67.9],
[166.4, 56.6], [168.9, 62.3],
[163.8, 58.5], [167.6, 54.5],
[160.0, 50.2], [161.3, 60.3],
[167.6, 58.3], [165.1, 56.2],
[160.0, 50.2], [170.0, 72.9],
[157.5, 59.8], [167.6, 61.0],
[160.7, 69.1], [163.2, 55.9],
[152.4, 46.5], [157.5, 54.3],
[168.3, 54.8], [180.3, 60.7],
[165.5, 60.0], [165.0, 62.0],
[164.5, 60.3], [156.0, 52.7],
[160.0, 74.3], [163.0, 62.0],
[165.7, 73.1], [161.0, 80.0],
[162.0, 54.7], [166.0, 53.2],
[174.0, 75.7], [172.7, 61.1],
[167.6, 55.7], [151.1, 48.7],
[164.5, 52.3], [163.5, 50.0],
[152.0, 59.3], [169.0, 62.5],
[164.0, 55.7], [161.2, 54.8],
[155.0, 45.9], [170.0, 70.6],
[176.2, 67.2], [170.0, 69.4],
[162.5, 58.2], [170.3, 64.8],
[164.1, 71.6], [169.5, 52.8],
[163.2, 59.8], [154.5, 49.0],
[159.8, 50.0], [173.2, 69.2],
[170.0, 55.9], [161.4, 63.4],
[169.0, 58.2], [166.2, 58.6],
[159.4, 45.7], [162.5, 52.2],
[159.0, 48.6], [162.8, 57.8],
[159.0, 55.6], [179.8, 66.8],
[162.9, 59.4], [161.0, 53.6],
[151.1, 73.2], [168.2, 53.4],
[168.9, 69.0], [173.2, 58.4],
[171.8, 56.2], [178.0, 70.6],
[164.3, 59.8], [163.0, 72.0],
[168.5, 65.2], [166.8, 56.6],
[172.7, 105.2], [163.5, 51.8],
[169.4, 63.4], [167.8, 59.0],
[159.5, 47.6], [167.6, 63.0],
[161.2, 55.2], [160.0, 45.0],
[163.2, 54.0], [162.2, 50.2],
[161.3, 60.2], [149.5, 44.8],
[157.5, 58.8], [163.2, 56.4],
[172.7, 62.0], [155.0, 49.2],
[156.5, 67.2], [164.0, 53.8],
[160.9, 54.4], [162.8, 58.0],
[167.0, 59.8], [160.0, 54.8],
[160.0, 43.2], [168.9, 60.5],
[158.2, 46.4], [156.0, 64.4],
[160.0, 48.8], [167.1, 62.2],
[158.0, 55.5], [167.6, 57.8],
[156.0, 54.6], [162.1, 59.2],
[173.4, 52.7], [159.8, 53.2],
[170.5, 64.5], [159.2, 51.8],
[157.5, 56.0], [161.3, 63.6],
[162.6, 63.2], [160.0, 59.5],
[168.9, 56.8], [165.1, 64.1],
[162.6, 50.0], [165.1, 72.3],
[166.4, 55.0], [160.0, 55.9],
[152.4, 60.4], [170.2, 69.1],
[162.6, 84.5], [170.2, 55.9],
[158.8, 55.5], [172.7, 69.5],
[167.6, 76.4], [162.6, 61.4],
[167.6, 65.9], [156.2, 58.6],
[175.2, 66.8], [172.1, 56.6],
[162.6, 58.6], [160.0, 55.9],
[165.1, 59.1], [182.9, 81.8],
[166.4, 70.7], [165.1, 56.8],
[177.8, 60.0], [165.1, 58.2],
[175.3, 72.7], [154.9, 54.1],
[158.8, 49.1], [172.7, 75.9],
[168.9, 55.0], [161.3, 57.3],
[167.6, 55.0], [165.1, 65.5],
[175.3, 65.5], [157.5, 48.6],
[163.8, 58.6], [167.6, 63.6],
[165.1, 55.2], [165.1, 62.7],
[168.9, 56.6], [162.6, 53.9],
[164.5, 63.2], [176.5, 73.6],
[168.9, 62.0], [175.3, 63.6],
[159.4, 53.2], [160.0, 53.4],
[170.2, 55.0], [162.6, 70.5],
[167.6, 54.5], [162.6, 54.5],
[160.7, 55.9], [160.0, 59.0],
[157.5, 63.6], [162.6, 54.5],
[152.4, 47.3], [170.2, 67.7],
[165.1, 80.9], [172.7, 70.5],
[165.1, 60.9], [170.2, 63.6],
[170.2, 54.5], [170.2, 59.1],
[161.3, 70.5], [167.6, 52.7],
[167.6, 62.7], [165.1, 86.3],
[162.6, 66.4], [152.4, 67.3],
[168.9, 63.0], [170.2, 73.6],
[175.2, 62.3], [175.2, 57.7],
[160.0, 55.4], [165.1, 104.1],
[174.0, 55.5], [170.2, 77.3],
[160.0, 80.5], [167.6, 64.5],
[167.6, 72.3], [167.6, 61.4],
[154.9, 58.2], [162.6, 81.8],
[175.3, 63.6], [171.4, 53.4],
[157.5, 54.5], [165.1, 53.6],
[160.0, 60.0], [174.0, 73.6],
[162.6, 61.4], [174.0, 55.5],
[162.6, 63.6], [161.3, 60.9],
[156.2, 60.0], [149.9, 46.8],
[169.5, 57.3], [160.0, 64.1],
[175.3, 63.6], [169.5, 67.3],
[160.0, 75.5], [172.7, 68.2],
[162.6, 61.4], [157.5, 76.8],
[176.5, 71.8], [164.4, 55.5],
[160.7, 48.6], [174.0, 66.4],
[163.8, 67.3]])
return self._scatterFemaleData
def getScatterMalePoints(self, series):
if self._scatterMaleData is not None:
return self._scatterMaleData
self._scatterMaleData = self.getPoints(series,
[[174.0, 65.6],
[175.3, 71.8], [193.5, 80.7],
[186.5, 72.6], [187.2, 78.8],
[181.5, 74.8], [184.0, 86.4],
[184.5, 78.4], [175.0, 62.0],
[184.0, 81.6], [180.0, 76.6],
[177.8, 83.6], [192.0, 90.0],
[176.0, 74.6], [174.0, 71.0],
[184.0, 79.6], [192.7, 93.8],
[171.5, 70.0], [173.0, 72.4],
[176.0, 85.9], [176.0, 78.8],
[180.5, 77.8], [172.7, 66.2],
[176.0, 86.4], [173.5, 81.8],
[178.0, 89.6], [180.3, 82.8],
[180.3, 76.4], [164.5, 63.2],
[173.0, 60.9], [183.5, 74.8],
[175.5, 70.0], [188.0, 72.4],
[189.2, 84.1], [172.8, 69.1],
[170.0, 59.5], [182.0, 67.2],
[170.0, 61.3], [177.8, 68.6],
[184.2, 80.1], [186.7, 87.8],
[171.4, 84.7], [172.7, 73.4],
[175.3, 72.1], [180.3, 82.6],
[182.9, 88.7], [188.0, 84.1],
[177.2, 94.1], [172.1, 74.9],
[167.0, 59.1], [169.5, 75.6],
[174.0, 86.2], [172.7, 75.3],
[182.2, 87.1], [164.1, 55.2],
[163.0, 57.0], [171.5, 61.4],
[184.2, 76.8], [174.0, 86.8],
[174.0, 72.2], [177.0, 71.6],
[186.0, 84.8], [167.0, 68.2],
[171.8, 66.1], [182.0, 72.0],
[167.0, 64.6], [177.8, 74.8],
[164.5, 70.0], [192.0, 101.6],
[175.5, 63.2], [171.2, 79.1],
[181.6, 78.9], [167.4, 67.7],
[181.1, 66.0], [177.0, 68.2],
[174.5, 63.9], [177.5, 72.0],
[170.5, 56.8], [182.4, 74.5],
[197.1, 90.9], [180.1, 93.0],
[175.5, 80.9], [180.6, 72.7],
[184.4, 68.0], [175.5, 70.9],
[180.6, 72.5], [177.0, 72.5],
[177.1, 83.4], [181.6, 75.5],
[176.5, 73.0], [175.0, 70.2],
[174.0, 73.4], [165.1, 70.5],
[177.0, 68.9], [192.0, 102.3],
[176.5, 68.4], [169.4, 65.9],
[182.1, 75.7], [179.8, 84.5],
[175.3, 87.7], [184.9, 86.4],
[177.3, 73.2], [167.4, 53.9],
[178.1, 72.0], [168.9, 55.5],
[157.2, 58.4], [180.3, 83.2],
[170.2, 72.7], [177.8, 64.1],
[172.7, 72.3], [165.1, 65.0],
[186.7, 86.4], [165.1, 65.0],
[174.0, 88.6], [175.3, 84.1],
[185.4, 66.8], [177.8, 75.5],
[180.3, 93.2], [180.3, 82.7],
[177.8, 58.0], [177.8, 79.5],
[177.8, 78.6], [177.8, 71.8],
[177.8, 116.4], [163.8, 72.2],
[188.0, 83.6], [198.1, 85.5],
[175.3, 90.9], [166.4, 85.9],
[190.5, 89.1], [166.4, 75.0],
[177.8, 77.7], [179.7, 86.4],
[172.7, 90.9], [190.5, 73.6],
[185.4, 76.4], [168.9, 69.1],
[167.6, 84.5], [175.3, 64.5],
[170.2, 69.1], [190.5, 108.6],
[177.8, 86.4], [190.5, 80.9],
[177.8, 87.7], [184.2, 94.5],
[176.5, 80.2], [177.8, 72.0],
[180.3, 71.4], [171.4, 72.7],
[172.7, 84.1], [172.7, 76.8],
[177.8, 63.6], [177.8, 80.9],
[182.9, 80.9], [170.2, 85.5],
[167.6, 68.6], [175.3, 67.7],
[165.1, 66.4], [185.4, 102.3],
[181.6, 70.5], [172.7, 95.9],
[190.5, 84.1], [179.1, 87.3],
[175.3, 71.8], [170.2, 65.9],
[193.0, 95.9], [171.4, 91.4],
[177.8, 81.8], [177.8, 96.8],
[167.6, 69.1], [167.6, 82.7],
[180.3, 75.5], [182.9, 79.5],
[176.5, 73.6], [186.7, 91.8],
[188.0, 84.1], [188.0, 85.9],
[177.8, 81.8], [174.0, 82.5],
[177.8, 80.5], [171.4, 70.0],
[185.4, 81.8], [185.4, 84.1],
[188.0, 90.5], [188.0, 91.4],
[182.9, 89.1], [176.5, 85.0],
[175.3, 69.1], [175.3, 73.6],
[188.0, 80.5], [188.0, 82.7],
[175.3, 86.4], [170.5, 67.7],
[179.1, 92.7], [177.8, 93.6],
[175.3, 70.9], [182.9, 75.0],
[170.8, 93.2], [188.0, 93.2],
[180.3, 77.7], [177.8, 61.4],
[185.4, 94.1], [168.9, 75.0],
[185.4, 83.6], [180.3, 85.5],
[174.0, 73.9], [167.6, 66.8],
[182.9, 87.3], [160.0, 72.3],
[180.3, 88.6], [167.6, 75.5],
[186.7, 101.4], [175.3, 91.1],
[175.3, 67.3], [175.9, 77.7],
[175.3, 81.8], [179.1, 75.5],
[181.6, 84.5], [177.8, 76.6],
[182.9, 85.0], [177.8, 102.5],
[184.2, 77.3], [179.1, 71.8],
[176.5, 87.9], [188.0, 94.3],
[174.0, 70.9], [167.6, 64.5],
[170.2, 77.3], [167.6, 72.3],
[188.0, 87.3], [174.0, 80.0],
[176.5, 82.3], [180.3, 73.6],
[167.6, 74.1], [188.0, 85.9],
[180.3, 73.2], [167.6, 76.3],
[183.0, 65.9], [183.0, 90.9],
[179.1, 89.1], [170.2, 62.3],
[177.8, 82.7], [179.1, 79.1],
[190.5, 98.2], [177.8, 84.1],
[180.3, 83.2], [180.3, 83.2]])
return self._scatterMaleData
def getDateTimeSeriesPoints(self, series):
return self.getDateTimePoints(series, [0.8446, 0.8445, 0.8444, 0.8451,
0.8418, 0.8264, 0.8258, 0.8232, 0.8233, 0.8258, 0.8283, 0.8278,
0.8256, 0.8292, 0.8239, 0.8239, 0.8245, 0.8265, 0.8261, 0.8269,
0.8273, 0.8244, 0.8244, 0.8172, 0.8139, 0.8146, 0.8164, 0.82,
0.8269, 0.8269, 0.8269, 0.8258, 0.8247, 0.8286, 0.8289, 0.8316,
0.832, 0.8333, 0.8352, 0.8357, 0.8355, 0.8354, 0.8403, 0.8403,
0.8406, 0.8403, 0.8396, 0.8418, 0.8409, 0.8384, 0.8386, 0.8372,
0.839, 0.84, 0.8389, 0.84, 0.8423, 0.8423, 0.8435, 0.8422,
0.838, 0.8373, 0.8316, 0.8303, 0.8303, 0.8302, 0.8369, 0.84,
0.8385, 0.84, 0.8401, 0.8402, 0.8381, 0.8351, 0.8314, 0.8273,
0.8213, 0.8207, 0.8207, 0.8215, 0.8242, 0.8273, 0.8301, 0.8346,
0.8312, 0.8312, 0.8312, 0.8306, 0.8327, 0.8282, 0.824, 0.8255,
0.8256, 0.8273, 0.8209, 0.8151, 0.8149, 0.8213, 0.8273, 0.8273,
0.8261, 0.8252, 0.824, 0.8262, 0.8258, 0.8261, 0.826, 0.8199,
0.8153, 0.8097, 0.8101, 0.8119, 0.8107, 0.8105, 0.8084, 0.8069,
0.8047, 0.8023, 0.7965, 0.7919, 0.7921, 0.7922, 0.7934, 0.7918,
0.7915, 0.787, 0.7861, 0.7861, 0.7853, 0.7867, 0.7827, 0.7834,
0.7766, 0.7751, 0.7739, 0.7767, 0.7802, 0.7788, 0.7828, 0.7816,
0.7829, 0.783, 0.7829, 0.7781, 0.7811, 0.7831, 0.7826, 0.7855,
0.7855, 0.7845, 0.7798, 0.7777, 0.7822, 0.7785, 0.7744, 0.7743,
0.7726, 0.7766, 0.7806, 0.785, 0.7907, 0.7912, 0.7913, 0.7931,
0.7952, 0.7951, 0.7928, 0.791, 0.7913, 0.7912, 0.7941, 0.7953,
0.7921, 0.7919, 0.7968, 0.7999, 0.7999, 0.7974, 0.7942, 0.796,
0.7969, 0.7862, 0.7821, 0.7821, 0.7821, 0.7811, 0.7833, 0.7849,
0.7819, 0.7809, 0.7809, 0.7827, 0.7848, 0.785, 0.7873, 0.7894,
0.7907, 0.7909, 0.7947, 0.7987, 0.799, 0.7927, 0.79, 0.7878,
0.7878, 0.7907, 0.7922, 0.7937, 0.786, 0.787, 0.7838, 0.7838,
0.7837, 0.7836, 0.7806, 0.7825, 0.7798, 0.777, 0.777, 0.7772,
0.7793, 0.7788, 0.7785, 0.7832, 0.7865, 0.7865, 0.7853, 0.7847,
0.7809, 0.778, 0.7799, 0.78, 0.7801, 0.7765, 0.7785, 0.7811,
0.782, 0.7835, 0.7845, 0.7844, 0.782, 0.7811, 0.7795, 0.7794,
0.7806, 0.7794, 0.7794, 0.7778, 0.7793, 0.7808, 0.7824, 0.787,
0.7894, 0.7893, 0.7882, 0.7871, 0.7882, 0.7871, 0.7878, 0.79,
0.7901, 0.7898, 0.7879, 0.7886, 0.7858, 0.7814, 0.7825, 0.7826,
0.7826, 0.786, 0.7878, 0.7868, 0.7883, 0.7893, 0.7892, 0.7876,
0.785, 0.787, 0.7873, 0.7901, 0.7936, 0.7939, 0.7938, 0.7956,
0.7975, 0.7978, 0.7972, 0.7995, 0.7995, 0.7994, 0.7976, 0.7977,
0.796, 0.7922, 0.7928, 0.7929, 0.7948, 0.797, 0.7953, 0.7907,
0.7872, 0.7852, 0.7852, 0.786, 0.7862, 0.7836, 0.7837, 0.784,
0.7867, 0.7867, 0.7869, 0.7837, 0.7827, 0.7825, 0.7779, 0.7791,
0.779, 0.7787, 0.78, 0.7807, 0.7803, 0.7817, 0.7799, 0.7799,
0.7795, 0.7801, 0.7765, 0.7725, 0.7683, 0.7641, 0.7639, 0.7616,
0.7608, 0.759, 0.7582, 0.7539, 0.75, 0.75, 0.7507, 0.7505,
0.7516, 0.7522, 0.7531, 0.7577, 0.7577, 0.7582, 0.755, 0.7542,
0.7576, 0.7616, 0.7648, 0.7648, 0.7641, 0.7614, 0.757, 0.7587,
0.7588, 0.762, 0.762, 0.7617, 0.7618, 0.7615, 0.7612, 0.7596,
0.758, 0.758, 0.758, 0.7547, 0.7549, 0.7613, 0.7655, 0.7693,
0.7694, 0.7688, 0.7678, 0.7708, 0.7727, 0.7749, 0.7741, 0.7741,
0.7732, 0.7727, 0.7737, 0.7724, 0.7712, 0.772, 0.7721, 0.7717,
0.7704, 0.769, 0.7711, 0.774, 0.7745, 0.7745, 0.774, 0.7716,
0.7713, 0.7678, 0.7688, 0.7718, 0.7718, 0.7728, 0.7729, 0.7698,
0.7685, 0.7681, 0.769, 0.769, 0.7698, 0.7699, 0.7651, 0.7613,
0.7616, 0.7614, 0.7614, 0.7607, 0.7602, 0.7611, 0.7622, 0.7615,
0.7598, 0.7598, 0.7592, 0.7573, 0.7566, 0.7567, 0.7591, 0.7582,
0.7585, 0.7613, 0.7631, 0.7615, 0.76, 0.7613, 0.7627, 0.7627,
0.7608, 0.7583, 0.7575, 0.7562, 0.752, 0.7512, 0.7512, 0.7517,
0.752, 0.7511, 0.748, 0.7509, 0.7531, 0.7531, 0.7527, 0.7498,
0.7493, 0.7504, 0.75, 0.7491, 0.7491, 0.7485, 0.7484, 0.7492,
0.7471, 0.7459, 0.7477, 0.7477, 0.7483, 0.7458, 0.7448, 0.743,
0.7399, 0.7395, 0.7395, 0.7378, 0.7382, 0.7362, 0.7355, 0.7348,
0.7361, 0.7361, 0.7365, 0.7362, 0.7331, 0.7339, 0.7344, 0.7327,
0.7327, 0.7336, 0.7333, 0.7359, 0.7359, 0.7372, 0.736, 0.736,
0.735, 0.7365, 0.7384, 0.7395, 0.7413, 0.7397, 0.7396, 0.7385,
0.7378, 0.7366, 0.74, 0.7411, 0.7406, 0.7405, 0.7414, 0.7431,
0.7431, 0.7438, 0.7443, 0.7443, 0.7443, 0.7434, 0.7429, 0.7442,
0.744, 0.7439, 0.7437, 0.7437, 0.7429, 0.7403, 0.7399, 0.7418,
0.7468, 0.748, 0.748, 0.749, 0.7494, 0.7522, 0.7515, 0.7502,
0.7472, 0.7472, 0.7462, 0.7455, 0.7449, 0.7467, 0.7458, 0.7427,
0.7427, 0.743, 0.7429, 0.744, 0.743, 0.7422, 0.7388, 0.7388,
0.7369, 0.7345, 0.7345, 0.7345, 0.7352, 0.7341, 0.7341, 0.734,
0.7324, 0.7272, 0.7264, 0.7255, 0.7258, 0.7258, 0.7256, 0.7257,
0.7247, 0.7243, 0.7244, 0.7235, 0.7235, 0.7235, 0.7235, 0.7262,
0.7288, 0.7301, 0.7337, 0.7337, 0.7324, 0.7297, 0.7317, 0.7315,
0.7288, 0.7263, 0.7263, 0.7242, 0.7253, 0.7264, 0.727, 0.7312,
0.7305, 0.7305, 0.7318, 0.7358, 0.7409, 0.7454, 0.7437, 0.7424,
0.7424, 0.7415, 0.7419, 0.7414, 0.7377, 0.7355, 0.7315, 0.7315,
0.732, 0.7332, 0.7346, 0.7328, 0.7323, 0.734, 0.734, 0.7336,
0.7351, 0.7346, 0.7321, 0.7294, 0.7266, 0.7266, 0.7254, 0.7242,
0.7213, 0.7197, 0.7209, 0.721, 0.721, 0.721, 0.7209, 0.7159,
0.7133, 0.7105, 0.7099, 0.7099, 0.7093, 0.7093, 0.7076, 0.707,
0.7049, 0.7012, 0.7011, 0.7019, 0.7046, 0.7063, 0.7089, 0.7077,
0.7077, 0.7077, 0.7091, 0.7118, 0.7079, 0.7053, 0.705, 0.7055,
0.7055, 0.7045, 0.7051, 0.7051, 0.7017, 0.7, 0.6995, 0.6994,
0.7014, 0.7036, 0.7021, 0.7002, 0.6967, 0.695, 0.695, 0.6939,
0.694, 0.6922, 0.6919, 0.6914, 0.6894, 0.6891, 0.6904, 0.689,
0.6834, 0.6823, 0.6807, 0.6815, 0.6815, 0.6847, 0.6859, 0.6822,
0.6827, 0.6837, 0.6823, 0.6822, 0.6822, 0.6792, 0.6746, 0.6735,
0.6731, 0.6742, 0.6744, 0.6739, 0.6731, 0.6761, 0.6761, 0.6785,
0.6818, 0.6836, 0.6823, 0.6805, 0.6793, 0.6849, 0.6833, 0.6825,
0.6825, 0.6816, 0.6799, 0.6813, 0.6809, 0.6868, 0.6933, 0.6933,
0.6945, 0.6944, 0.6946, 0.6964, 0.6965, 0.6956, 0.6956, 0.695,
0.6948, 0.6928, 0.6887, 0.6824, 0.6794, 0.6794, 0.6803, 0.6855,
0.6824, 0.6791, 0.6783, 0.6785, 0.6785, 0.6797, 0.68, 0.6803,
0.6805, 0.676, 0.677, 0.677, 0.6736, 0.6726, 0.6764, 0.6821,
0.6831, 0.6842, 0.6842, 0.6887, 0.6903, 0.6848, 0.6824, 0.6788,
0.6814, 0.6814, 0.6797, 0.6769, 0.6765, 0.6733, 0.6729, 0.6758,
0.6758, 0.675, 0.678, 0.6833, 0.6856, 0.6903, 0.6896, 0.6896,
0.6882, 0.6879, 0.6862, 0.6852, 0.6823, 0.6813, 0.6813, 0.6822,
0.6802, 0.6802, 0.6784, 0.6748, 0.6747, 0.6747, 0.6748, 0.6733,
0.665, 0.6611, 0.6583, 0.659, 0.659, 0.6581, 0.6578, 0.6574,
0.6532, 0.6502, 0.6514, 0.6514, 0.6507, 0.651, 0.6489, 0.6424,
0.6406, 0.6382, 0.6382, 0.6341, 0.6344, 0.6378, 0.6439, 0.6478,
0.6481, 0.6481, 0.6494, 0.6438, 0.6377, 0.6329, 0.6336, 0.6333,
0.6333, 0.633, 0.6371, 0.6403, 0.6396, 0.6364, 0.6356, 0.6356,
0.6368, 0.6357, 0.6354, 0.632, 0.6332, 0.6328, 0.6331, 0.6342,
0.6321, 0.6302, 0.6278, 0.6308, 0.6324, 0.6324, 0.6307, 0.6277,
0.6269, 0.6335, 0.6392, 0.64, 0.6401, 0.6396, 0.6407, 0.6423,
0.6429, 0.6472, 0.6485, 0.6486, 0.6467, 0.6444, 0.6467, 0.6509,
0.6478, 0.6461, 0.6461, 0.6468, 0.6449, 0.647, 0.6461, 0.6452,
0.6422, 0.6422, 0.6425, 0.6414, 0.6366, 0.6346, 0.635, 0.6346,
0.6346, 0.6343, 0.6346, 0.6379, 0.6416, 0.6442, 0.6431, 0.6431,
0.6435, 0.644, 0.6473, 0.6469, 0.6386, 0.6356, 0.634, 0.6346,
0.643, 0.6452, 0.6467, 0.6506, 0.6504, 0.6503, 0.6481, 0.6451,
0.645, 0.6441, 0.6414, 0.6409, 0.6409, 0.6428, 0.6431, 0.6418,
0.6371, 0.6349, 0.6333, 0.6334, 0.6338, 0.6342, 0.632, 0.6318,
0.637, 0.6368, 0.6368, 0.6383, 0.6371, 0.6371, 0.6355, 0.632,
0.6277, 0.6276, 0.6291, 0.6274, 0.6293, 0.6311, 0.631, 0.6312,
0.6312, 0.6304, 0.6294, 0.6348, 0.6378, 0.6368, 0.6368, 0.6368,
0.636, 0.637, 0.6418, 0.6411, 0.6435, 0.6427, 0.6427, 0.6419,
0.6446, 0.6468, 0.6487, 0.6594, 0.6666, 0.6666, 0.6678, 0.6712,
0.6705, 0.6718, 0.6784, 0.6811, 0.6811, 0.6794, 0.6804, 0.6781,
0.6756, 0.6735, 0.6763, 0.6762, 0.6777, 0.6815, 0.6802, 0.678,
0.6796, 0.6817, 0.6817, 0.6832, 0.6877, 0.6912, 0.6914, 0.7009,
0.7012, 0.701, 0.7005, 0.7076, 0.7087, 0.717, 0.7105, 0.7031,
0.7029, 0.7006, 0.7035, 0.7045, 0.6956, 0.6988, 0.6915, 0.6914,
0.6859, 0.6778, 0.6815, 0.6815, 0.6843, 0.6846, 0.6846, 0.6923,
0.6997, 0.7098, 0.7188, 0.7232, 0.7262, 0.7266, 0.7359, 0.7368,
0.7337, 0.7317, 0.7387, 0.7467, 0.7461, 0.7366, 0.7319, 0.7361,
0.7437, 0.7432, 0.7461, 0.7461, 0.7454, 0.7549, 0.7742, 0.7801,
0.7903, 0.7876, 0.7928, 0.7991, 0.8007, 0.7823, 0.7661, 0.785,
0.7863, 0.7862, 0.7821, 0.7858, 0.7731, 0.7779, 0.7844, 0.7866,
0.7864, 0.7788, 0.7875, 0.7971, 0.8004, 0.7857, 0.7932, 0.7938,
0.7927, 0.7918, 0.7919, 0.7989, 0.7988, 0.7949, 0.7948, 0.7882,
0.7745, 0.771, 0.775, 0.7791, 0.7882, 0.7882, 0.7899, 0.7905,
0.7889, 0.7879, 0.7855, 0.7866, 0.7865, 0.7795, 0.7758, 0.7717,
0.761, 0.7497, 0.7471, 0.7473, 0.7407, 0.7288, 0.7074, 0.6927,
0.7083, 0.7191, 0.719, 0.7153, 0.7156, 0.7158, 0.714, 0.7119,
0.7129, 0.7129, 0.7049, 0.7095])
def getMasterDetailData(self, series):
return self.getDateTimePoints(series, [0.8446, 0.8445, 0.8444, 0.8451,
0.8418, 0.8264, 0.8258, 0.8232, 0.8233, 0.8258, 0.8283, 0.8278,
0.8256, 0.8292, 0.8239, 0.8239, 0.8245, 0.8265, 0.8261, 0.8269,
0.8273, 0.8244, 0.8244, 0.8172, 0.8139, 0.8146, 0.8164, 0.82,
0.8269, 0.8269, 0.8269, 0.8258, 0.8247, 0.8286, 0.8289, 0.8316,
0.832, 0.8333, 0.8352, 0.8357, 0.8355, 0.8354, 0.8403, 0.8403,
0.8406, 0.8403, 0.8396, 0.8418, 0.8409, 0.8384, 0.8386, 0.8372,
0.839, 0.84, 0.8389, 0.84, 0.8423, 0.8423, 0.8435, 0.8422,
0.838, 0.8373, 0.8316, 0.8303, 0.8303, 0.8302, 0.8369, 0.84,
0.8385, 0.84, 0.8401, 0.8402, 0.8381, 0.8351, 0.8314, 0.8273,
0.8213, 0.8207, 0.8207, 0.8215, 0.8242, 0.8273, 0.8301, 0.8346,
0.8312, 0.8312, 0.8312, 0.8306, 0.8327, 0.8282, 0.824, 0.8255,
0.8256, 0.8273, 0.8209, 0.8151, 0.8149, 0.8213, 0.8273, 0.8273,
0.8261, 0.8252, 0.824, 0.8262, 0.8258, 0.8261, 0.826, 0.8199,
0.8153, 0.8097, 0.8101, 0.8119, 0.8107, 0.8105, 0.8084, 0.8069,
0.8047, 0.8023, 0.7965, 0.7919, 0.7921, 0.7922, 0.7934, 0.7918,
0.7915, 0.787, 0.7861, 0.7861, 0.7853, 0.7867, 0.7827, 0.7834,
0.7766, 0.7751, 0.7739, 0.7767, 0.7802, 0.7788, 0.7828, 0.7816,
0.7829, 0.783, 0.7829, 0.7781, 0.7811, 0.7831, 0.7826, 0.7855,
0.7855, 0.7845, 0.7798, 0.7777, 0.7822, 0.7785, 0.7744, 0.7743,
0.7726, 0.7766, 0.7806, 0.785, 0.7907, 0.7912, 0.7913, 0.7931,
0.7952, 0.7951, 0.7928, 0.791, 0.7913, 0.7912, 0.7941, 0.7953,
0.7921, 0.7919, 0.7968, 0.7999, 0.7999, 0.7974, 0.7942, 0.796,
0.7969, 0.7862, 0.7821, 0.7821, 0.7821, 0.7811, 0.7833, 0.7849,
0.7819, 0.7809, 0.7809, 0.7827, 0.7848, 0.785, 0.7873, 0.7894,
0.7907, 0.7909, 0.7947, 0.7987, 0.799, 0.7927, 0.79, 0.7878,
0.7878, 0.7907, 0.7922, 0.7937, 0.786, 0.787, 0.7838, 0.7838,
0.7837, 0.7836, 0.7806, 0.7825, 0.7798, 0.777, 0.777, 0.7772,
0.7793, 0.7788, 0.7785, 0.7832, 0.7865, 0.7865, 0.7853, 0.7847,
0.7809, 0.778, 0.7799, 0.78, 0.7801, 0.7765, 0.7785, 0.7811,
0.782, 0.7835, 0.7845, 0.7844, 0.782, 0.7811, 0.7795, 0.7794,
0.7806, 0.7794, 0.7794, 0.7778, 0.7793, 0.7808, 0.7824, 0.787,
0.7894, 0.7893, 0.7882, 0.7871, 0.7882, 0.7871, 0.7878, 0.79,
0.7901, 0.7898, 0.7879, 0.7886, 0.7858, 0.7814, 0.7825, 0.7826,
0.7826, 0.786, 0.7878, 0.7868, 0.7883, 0.7893, 0.7892, 0.7876,
0.785, 0.787, 0.7873, 0.7901, 0.7936, 0.7939, 0.7938, 0.7956,
0.7975, 0.7978, 0.7972, 0.7995, 0.7995, 0.7994, 0.7976, 0.7977,
0.796, 0.7922, 0.7928, 0.7929, 0.7948, 0.797, 0.7953, 0.7907,
0.7872, 0.7852, 0.7852, 0.786, 0.7862, 0.7836, 0.7837, 0.784,
0.7867, 0.7867, 0.7869, 0.7837, 0.7827, 0.7825, 0.7779, 0.7791,
0.779, 0.7787, 0.78, 0.7807, 0.7803, 0.7817, 0.7799, 0.7799,
0.7795, 0.7801, 0.7765, 0.7725, 0.7683, 0.7641, 0.7639, 0.7616,
0.7608, 0.759, 0.7582, 0.7539, 0.75, 0.75, 0.7507, 0.7505,
0.7516, 0.7522, 0.7531, 0.7577, 0.7577, 0.7582, 0.755, 0.7542,
0.7576, 0.7616, 0.7648, 0.7648, 0.7641, 0.7614, 0.757, 0.7587,
0.7588, 0.762, 0.762, 0.7617, 0.7618, 0.7615, 0.7612, 0.7596,
0.758, 0.758, 0.758, 0.7547, 0.7549, 0.7613, 0.7655, 0.7693,
0.7694, 0.7688, 0.7678, 0.7708, 0.7727, 0.7749, 0.7741, 0.7741,
0.7732, 0.7727, 0.7737, 0.7724, 0.7712, 0.772, 0.7721, 0.7717,
0.7704, 0.769, 0.7711, 0.774, 0.7745, 0.7745, 0.774, 0.7716,
0.7713, 0.7678, 0.7688, 0.7718, 0.7718, 0.7728, 0.7729, 0.7698,
0.7685, 0.7681, 0.769, 0.769, 0.7698, 0.7699, 0.7651, 0.7613,
0.7616, 0.7614, 0.7614, 0.7607, 0.7602, 0.7611, 0.7622, 0.7615,
0.7598, 0.7598, 0.7592, 0.7573, 0.7566, 0.7567, 0.7591, 0.7582,
0.7585, 0.7613, 0.7631, 0.7615, 0.76, 0.7613, 0.7627, 0.7627,
0.7608, 0.7583, 0.7575, 0.7562, 0.752, 0.7512, 0.7512, 0.7517,
0.752, 0.7511, 0.748, 0.7509, 0.7531, 0.7531, 0.7527, 0.7498,
0.7493, 0.7504, 0.75, 0.7491, 0.7491, 0.7485, 0.7484, 0.7492,
0.7471, 0.7459, 0.7477, 0.7477, 0.7483, 0.7458, 0.7448, 0.743,
0.7399, 0.7395, 0.7395, 0.7378, 0.7382, 0.7362, 0.7355, 0.7348,
0.7361, 0.7361, 0.7365, 0.7362, 0.7331, 0.7339, 0.7344, 0.7327,
0.7327, 0.7336, 0.7333, 0.7359, 0.7359, 0.7372, 0.736, 0.736,
0.735, 0.7365, 0.7384, 0.7395, 0.7413, 0.7397, 0.7396, 0.7385,
0.7378, 0.7366, 0.74, 0.7411, 0.7406, 0.7405, 0.7414, 0.7431,
0.7431, 0.7438, 0.7443, 0.7443, 0.7443, 0.7434, 0.7429, 0.7442,
0.744, 0.7439, 0.7437, 0.7437, 0.7429, 0.7403, 0.7399, 0.7418,
0.7468, 0.748, 0.748, 0.749, 0.7494, 0.7522, 0.7515, 0.7502,
0.7472, 0.7472, 0.7462, 0.7455, 0.7449, 0.7467, 0.7458, 0.7427,
0.7427, 0.743, 0.7429, 0.744, 0.743, 0.7422, 0.7388, 0.7388,
0.7369, 0.7345, 0.7345, 0.7345, 0.7352, 0.7341, 0.7341, 0.734,
0.7324, 0.7272, 0.7264, 0.7255, 0.7258, 0.7258, 0.7256, 0.7257,
0.7247, 0.7243, 0.7244, 0.7235, 0.7235, 0.7235, 0.7235, 0.7262,
0.7288, 0.7301, 0.7337, 0.7337, 0.7324, 0.7297, 0.7317, 0.7315,
0.7288, 0.7263, 0.7263, 0.7242, 0.7253, 0.7264, 0.727, 0.7312,
0.7305, 0.7305, 0.7318, 0.7358, 0.7409, 0.7454, 0.7437, 0.7424,
0.7424, 0.7415, 0.7419, 0.7414, 0.7377, 0.7355, 0.7315, 0.7315,
0.732, 0.7332, 0.7346, 0.7328, 0.7323, 0.734, 0.734, 0.7336,
0.7351, 0.7346, 0.7321, 0.7294, 0.7266, 0.7266, 0.7254, 0.7242,
0.7213, 0.7197, 0.7209, 0.721, 0.721, 0.721, 0.7209, 0.7159,
0.7133, 0.7105, 0.7099, 0.7099, 0.7093, 0.7093, 0.7076, 0.707,
0.7049, 0.7012, 0.7011, 0.7019, 0.7046, 0.7063, 0.7089, 0.7077,
0.7077, 0.7077, 0.7091, 0.7118, 0.7079, 0.7053, 0.705, 0.7055,
0.7055, 0.7045, 0.7051, 0.7051, 0.7017, 0.7, 0.6995, 0.6994,
0.7014, 0.7036, 0.7021, 0.7002, 0.6967, 0.695, 0.695, 0.6939,
0.694, 0.6922, 0.6919, 0.6914, 0.6894, 0.6891, 0.6904, 0.689,
0.6834, 0.6823, 0.6807, 0.6815, 0.6815, 0.6847, 0.6859, 0.6822,
0.6827, 0.6837, 0.6823, 0.6822, 0.6822, 0.6792, 0.6746, 0.6735,
0.6731, 0.6742, 0.6744, 0.6739, 0.6731, 0.6761, 0.6761, 0.6785,
0.6818, 0.6836, 0.6823, 0.6805, 0.6793, 0.6849, 0.6833, 0.6825,
0.6825, 0.6816, 0.6799, 0.6813, 0.6809, 0.6868, 0.6933, 0.6933,
0.6945, 0.6944, 0.6946, 0.6964, 0.6965, 0.6956, 0.6956, 0.695,
0.6948, 0.6928, 0.6887, 0.6824, 0.6794, 0.6794, 0.6803, 0.6855,
0.6824, 0.6791, 0.6783, 0.6785, 0.6785, 0.6797, 0.68, 0.6803,
0.6805, 0.676, 0.677, 0.677, 0.6736, 0.6726, 0.6764, 0.6821,
0.6831, 0.6842, 0.6842, 0.6887, 0.6903, 0.6848, 0.6824, 0.6788,
0.6814, 0.6814, 0.6797, 0.6769, 0.6765, 0.6733, 0.6729, 0.6758,
0.6758, 0.675, 0.678, 0.6833, 0.6856, 0.6903, 0.6896, 0.6896,
0.6882, 0.6879, 0.6862, 0.6852, 0.6823, 0.6813, 0.6813, 0.6822,
0.6802, 0.6802, 0.6784, 0.6748, 0.6747, 0.6747, 0.6748, 0.6733,
0.665, 0.6611, 0.6583, 0.659, 0.659, 0.6581, 0.6578, 0.6574,
0.6532, 0.6502, 0.6514, 0.6514, 0.6507, 0.651, 0.6489, 0.6424,
0.6406, 0.6382, 0.6382, 0.6341, 0.6344, 0.6378, 0.6439, 0.6478,
0.6481, 0.6481, 0.6494, 0.6438, 0.6377, 0.6329, 0.6336, 0.6333,
0.6333, 0.633, 0.6371, 0.6403, 0.6396, 0.6364, 0.6356, 0.6356,
0.6368, 0.6357, 0.6354, 0.632, 0.6332, 0.6328, 0.6331, 0.6342,
0.6321, 0.6302, 0.6278, 0.6308, 0.6324, 0.6324, 0.6307, 0.6277,
0.6269, 0.6335, 0.6392, 0.64, 0.6401, 0.6396, 0.6407, 0.6423,
0.6429, 0.6472, 0.6485, 0.6486, 0.6467, 0.6444, 0.6467, 0.6509,
0.6478, 0.6461, 0.6461, 0.6468, 0.6449, 0.647, 0.6461, 0.6452,
0.6422, 0.6422, 0.6425, 0.6414, 0.6366, 0.6346, 0.635, 0.6346,
0.6346, 0.6343, 0.6346, 0.6379, 0.6416, 0.6442, 0.6431, 0.6431,
0.6435, 0.644, 0.6473, 0.6469, 0.6386, 0.6356, 0.634, 0.6346,
0.643, 0.6452, 0.6467, 0.6506, 0.6504, 0.6503, 0.6481, 0.6451,
0.645, 0.6441, 0.6414, 0.6409, 0.6409, 0.6428, 0.6431, 0.6418,
0.6371, 0.6349, 0.6333, 0.6334, 0.6338, 0.6342, 0.632, 0.6318,
0.637, 0.6368, 0.6368, 0.6383, 0.6371, 0.6371, 0.6355, 0.632,
0.6277, 0.6276, 0.6291, 0.6274, 0.6293, 0.6311, 0.631, 0.6312,
0.6312, 0.6304, 0.6294, 0.6348, 0.6378, 0.6368, 0.6368, 0.6368,
0.636, 0.637, 0.6418, 0.6411, 0.6435, 0.6427, 0.6427, 0.6419,
0.6446, 0.6468, 0.6487, 0.6594, 0.6666, 0.6666, 0.6678, 0.6712,
0.6705, 0.6718, 0.6784, 0.6811, 0.6811, 0.6794, 0.6804, 0.6781,
0.6756, 0.6735, 0.6763, 0.6762, 0.6777, 0.6815, 0.6802, 0.678,
0.6796, 0.6817, 0.6817, 0.6832, 0.6877, 0.6912, 0.6914, 0.7009,
0.7012, 0.701, 0.7005, 0.7076, 0.7087, 0.717, 0.7105, 0.7031,
0.7029, 0.7006, 0.7035, 0.7045, 0.6956, 0.6988, 0.6915, 0.6914,
0.6859, 0.6778, 0.6815, 0.6815, 0.6843, 0.6846, 0.6846, 0.6923,
0.6997, 0.7098, 0.7188, 0.7232, 0.7262, 0.7266, 0.7359, 0.7368,
0.7337, 0.7317, 0.7387, 0.7467, 0.7461, 0.7366, 0.7319, 0.7361,
0.7437, 0.7432, 0.7461, 0.7461, 0.7454, 0.7549, 0.7742, 0.7801,
0.7903, 0.7876, 0.7928, 0.7991, 0.8007, 0.7823, 0.7661, 0.785,
0.7863, 0.7862, 0.7821, 0.7858, 0.7731, 0.7779, 0.7844, 0.7866,
0.7864, 0.7788, 0.7875, 0.7971, 0.8004, 0.7857, 0.7932, 0.7938,
0.7927, 0.7918, 0.7919, 0.7989, 0.7988, 0.7949, 0.7948, 0.7882,
0.7745, 0.771, 0.775, 0.7791, 0.7882, 0.7882, 0.7899, 0.7905,
0.7889, 0.7879, 0.7855, 0.7866, 0.7865, 0.7795, 0.7758, 0.7717,
0.761, 0.7497, 0.7471, 0.7473, 0.7407, 0.7288, 0.7074, 0.6927,
0.7083, 0.7191, 0.719, 0.7153, 0.7156, 0.7158, 0.714, 0.7119,
0.7129, 0.7129, 0.7049, 0.7095])
class MasterChartZoomListener(ChartZoomListener):
def __init__(self, window, masterChart, detailChart):
self._window = window
self._masterChart = masterChart
self._detailChart = detailChart
def chartZoom(self, chartZoomEvent):
# chartZoomEvent.getChartArea().get
masterChartSeries = self._masterChart.getSeries('USD to EUR')
min_ = chartZoomEvent.getChartArea().getxAxisMin()
max_ = chartZoomEvent.getChartArea().getxAxisMax()
detailPoints = set()
detailChartSeries = self._detailChart.getSeries('USD to EUR')
self._detailChart.removeSeries(detailChartSeries)
for point in masterChartSeries.getPoints():
if (timestamp(point.getX()) > min_
and timestamp(point.getX()) < max_):
dtp = DateTimePoint(detailChartSeries,
point.getX(), point.getY())
detailPoints.add(dtp)
# Update series with new points
detailChartSeries.setSeriesPoints(detailPoints)
self._detailChart.addSeries(detailChartSeries)
self._detailChart.refresh()
# Update plotbands
masterDateTimeAxis = iter(self._masterChart.getConfig().getXAxes()).next() # FIXME: iterator
masterDateTimeAxis.removePlotBand('mask-before')
plotBandBefore = DateTimePlotBand('mask-before')
plotBandBefore.setRange(DateTimeRange(self._window._masterChartMinDate,
datetime.fromtimestamp(min_ / 1e03)))
plotBandBefore.setColor(RGBA(0, 0, 0, 0.2))
masterDateTimeAxis.addPlotBand(plotBandBefore)
masterDateTimeAxis.removePlotBand('mask-after')
plotBandAfter = DateTimePlotBand('mask-after')
plotBandAfter.setRange(DateTimeRange(
datetime.fromtimestamp(max_ / 1e03),
self._window._masterChartMaxDate))
plotBandAfter.setColor(RGBA(0, 0, 0, 0.2))
masterDateTimeAxis.addPlotBand(plotBandAfter)
self._masterChart.refresh()
class AddPointChartClickListener(ChartClickListener):
def __init__(self, window):
self._window = window
def chartClick(self, chartClickEvent):
self._window.logEventInfo('chartClick',
chartClickEvent.getPoint().getX(),
chartClickEvent.getPoint().getY(),
chartClickEvent.getMousePosition().getMouseX(),
chartClickEvent.getMousePosition().getMouseY())
xySeries = chartClickEvent.getChart().getSeries('User Supplied Data')
xySeries.addPoint(DecimalPoint(xySeries,
chartClickEvent.getPoint().getX(),
chartClickEvent.getPoint().getY()))
class AddPointClickListener(PointClickListener):
def __init__(self, window):
self._window = window
def pointClick(self, pointClickEvent):
self._window.logEventInfo('pointClick',
pointClickEvent.getPoint().getSeries().getName(),
pointClickEvent.getCategory(),
pointClickEvent.getPoint().getX(),
pointClickEvent.getPoint().getY(),
pointClickEvent.getMousePosition().getMouseX(),
pointClickEvent.getMousePosition().getMouseY())
xySeries = pointClickEvent.getChart().getSeries('User Supplied Data')
if len(xySeries.getPoints()) > 1:
# remove the clicked point
xySeries.removePoint(pointClickEvent.getPoint())
class SelfUpdateSplineThread(Thread):
def __init__(self, chart):
super(SelfUpdateSplineThread, self).__init__()
self._chart = chart
self._keepUpdating = True ## FIXME: volatile
def stopUpdating(self):
self._keepUpdating = False
print 'stopUpdating ' + self._keepUpdating
def keepUpdating(self):
return self._keepUpdating
def run(self):
while self.keepUpdating():
# Sleep for 1 second
try:
sleep(1000)
except KeyboardInterrupt, e:
print ('InterruptedException occured. Exception message '
+ str(e))
seriesData = self._chart.getSeries('Random Data')
seriesData.addPoint(DateTimePoint(seriesData, datetime(),
random()), True)
print 'Inside run() keepUpdating ' + self._keepUpdating
class GetSvgClickListener(button.IClickListener):
def __init__(self, window, chart):
self._window = window
self._chart = chart
def buttonClick(self, event):
l = DemoChartSVGAvailableListener(self._window)
self._chart.addListener(l)
class DemoChartSVGAvailableListener(ChartSVGAvailableListener):
def __init__(self, window):
self._window = window
def svgAvailable(self, chartSVGAvailableEvent):
self._window.logEventInfo('[svgAvailable]' + ' svg -> '
+ chartSVGAvailableEvent.getSVG())
class PrintClickListener(button.IClickListener):
def __init__(self, chart):
self._chart = chart
def buttonClick(self, event):
self._chart.print_()
class DemoChartClickListener(ChartClickListener):
def __init__(self, window):
self._window = window
def chartClick(self, chartClickEvent):
self._window.logEventInfo('chartClick',
chartClickEvent.getPoint().getX(),
chartClickEvent.getPoint().getY(),
chartClickEvent.getMousePosition().getMouseX(),
chartClickEvent.getMousePosition().getMouseY())
class DemoChartZoomListener(ChartZoomListener):
def __init__(self, window):
self._window = window
def chartZoom(self, chartZoomEvent):
self._window.logEventInfo('chartSelection',
chartZoomEvent.getChartArea().getxAxisMin(),
chartZoomEvent.getChartArea().getxAxisMax(),
chartZoomEvent.getChartArea().getyAxisMin(),
chartZoomEvent.getChartArea().getyAxisMax())
class DemoChartResetZoomListener(ChartResetZoomListener):
def __init__(self, window):
self._window = window
def chartResetZoom(self, chartResetZoomEvent):
self._window.logEventInfo('[chartSelectionReset]')
class DemoSeriesClickListerner(SeriesClickListerner):
def __init__(self, window):
self._window = window
def seriesClick(self, seriesClickEvent):
EVENT_NAME = 'seriesClick'
if isinstance(seriesClickEvent.getNearestPoint(), DecimalPoint):
self._window.logEventInfo(EVENT_NAME,
seriesClickEvent.getSeries().getName(),
None,
seriesClickEvent.getNearestPoint().getX(),
seriesClickEvent.getNearestPoint().getY(),
seriesClickEvent.getMousePosition().getMouseX(),
seriesClickEvent.getMousePosition().getMouseY())
else:
self._window.logEventInfo(EVENT_NAME,
seriesClickEvent.getSeries().getName(),
None,
seriesClickEvent.getNearestPoint().getX(),
seriesClickEvent.getNearestPoint().getY(),
seriesClickEvent.getMousePosition().getMouseX(),
seriesClickEvent.getMousePosition().getMouseY())
class DemoSeriesHideListerner(SeriesHideListerner):
def __init__(self, window):
self._window = window
def seriesHide(self, seriesHideEvent):
self._window.logEventInfo('seriesHide',
seriesHideEvent.getSeries().getName())
class DemoSeriesShowListerner(SeriesShowListerner):
def __init__(self, window):
self._window = window
def seriesShow(self, seriesShowEvent):
self._window.logEventInfo('seriesShow',
seriesShowEvent.getSeries().getName())
class DemoSeriesLegendItemClickListerner(SeriesLegendItemClickListerner):
def __init__(self, window):
self._window = window
def seriesLegendItemClick(self, seriesLegendItemClickEvent):
self._window.logEventInfo('seriesLegendItemClick',
seriesLegendItemClickEvent.getSeries().getName())
class DemoPointClickListener(PointClickListener):
def __init__(self, window):
self._window = window
def pointClick(self, pointClickEvent):
EVENT_NAME = 'pointClick'
if isinstance(pointClickEvent.getPoint(), DecimalPoint):
self._window.logEventInfo(EVENT_NAME,
pointClickEvent.getPoint().getSeries().getName(),
pointClickEvent.getCategory(),
pointClickEvent.getPoint().getX(),
pointClickEvent.getPoint().getY(),
pointClickEvent.getMousePosition().getMouseX(),
pointClickEvent.getMousePosition().getMouseY())
else:
self._window.logEventInfo(EVENT_NAME,
pointClickEvent.getPoint().getSeries().getName(),
pointClickEvent.getCategory(),
pointClickEvent.getPoint().getX(),
pointClickEvent.getPoint().getY(),
pointClickEvent.getMousePosition().getMouseX(),
pointClickEvent.getMousePosition().getMouseY())
class DemoPointRemoveListener(PointRemoveListener):
def __init__(self, window):
self._window = window
def pointRemove(self, pointRemoveEvent):
EVENT_NAME = 'pointRemove'
if isinstance(pointRemoveEvent.getPoint(), DecimalPoint):
self._window.logEventInfo(EVENT_NAME,
pointRemoveEvent.getPoint().getSeries().getName(),
pointRemoveEvent.getCategory(),
pointRemoveEvent.getPoint().getX(),
pointRemoveEvent.getPoint().getY())
else:
self._window.logEventInfo(EVENT_NAME,
pointRemoveEvent.getPoint().getSeries().getName(),
pointRemoveEvent.getCategory(),
pointRemoveEvent.getPoint().getX(),
pointRemoveEvent.getPoint().getY())
class DemoPointSelectListener(PointSelectListener):
def __init__(self, window):
self._window = window
def pointSelected(self, pointSelectEvent):
EVENT_NAME = 'pointSelected'
if isinstance(pointSelectEvent.getPoint(), DecimalPoint):
self._window.logEventInfo(EVENT_NAME,
pointSelectEvent.getPoint().getSeries().getName(),
pointSelectEvent.getCategory(),
pointSelectEvent.getPoint().getX(),
pointSelectEvent.getPoint().getY())
else:
self._window.logEventInfo(EVENT_NAME,
pointSelectEvent.getPoint().getSeries().getName(),
pointSelectEvent.getCategory(),
pointSelectEvent.getPoint().getX(),
pointSelectEvent.getPoint().getY())
class DemoPointUnselectListener(PointUnselectListener):
def __init__(self, window):
self._window = window
def pointUnSelect(self, pointUnSelectEvent):
EVENT_NAME = 'pointUnSelected'
if isinstance(pointUnSelectEvent.getPoint(), DecimalPoint):
self._window.logEventInfo(EVENT_NAME,
pointUnSelectEvent.getPoint().getSeries().getName(),
pointUnSelectEvent.getCategory(),
pointUnSelectEvent.getPoint().getX(),
pointUnSelectEvent.getPoint().getY())
else:
self._window.logEventInfo(EVENT_NAME,
pointUnSelectEvent.getPoint().getSeries().getName(),
pointUnSelectEvent.getCategory(),
pointUnSelectEvent.getPoint().getX(),
pointUnSelectEvent.getPoint().getY())
class DemoPieChartLegendItemClickListener(PieChartLegendItemClickListener):
def __init__(self, window):
self._window = window
def legendItemClick(self, legendItemClickEvent):
EVENT_NAME = 'pieLegendItemClick'
if isinstance(legendItemClickEvent.getPoint(), DecimalPoint):
self._window.logEventInfo(EVENT_NAME,
legendItemClickEvent.getPoint().getSeries().getName(),
None,
legendItemClickEvent.getPoint().getX(),
legendItemClickEvent.getPoint().getY())
class ChartName(object):
BASIC = None
DONUT = None
CLICK_TO_ADD_POINT = None
MASTER_DETAIL = None
TIMESERIES_ZOOMABLE = None
WITH_DATA_LABELS = None
STACKED = None
WITH_NEGATIVE_STACK = None
WITH_NEGATIVE_VALUES = None
STACKED_AND_GROUPED = None
STACKED_PERCENT = None
WITH_ROTATED_LABELS = None
WITH_MISSING_POINTS = None
INVERTED_AXES = None
WITH_LEGEND = None
WITH_PLOTBANDS = None
WITH_SYMBOLS = None
UPDATING_EACH_SECOND = None
COMBINATION_COLUMN_LINE_AND_PIE = None
PERCENTAGE = None
SCATTER_WITH_REGRESSION_LINE = None
MULTIPLE_AXES = None
def __init__(self, name):
self._name = name
def getName(self):
return self._name
@classmethod
def values(cls):
return [cls.BASIC, cls.DONUT, cls.CLICK_TO_ADD_POINT, cls.MASTER_DETAIL,
cls.TIMESERIES_ZOOMABLE, cls.WITH_DATA_LABELS, cls.STACKED,
cls.WITH_NEGATIVE_STACK, cls.WITH_NEGATIVE_VALUES,
cls.STACKED_AND_GROUPED, cls.STACKED_PERCENT,
cls.WITH_ROTATED_LABELS, cls.WITH_MISSING_POINTS,
cls.INVERTED_AXES, cls.WITH_LEGEND, cls.WITH_PLOTBANDS, cls.WITH_SYMBOLS,
cls.UPDATING_EACH_SECOND, cls.COMBINATION_COLUMN_LINE_AND_PIE,
cls.PERCENTAGE, cls.SCATTER_WITH_REGRESSION_LINE, cls.MULTIPLE_AXES]
ChartName.BASIC = ChartName('Basic')
ChartName.DONUT = ChartName('Donut')
ChartName.CLICK_TO_ADD_POINT = ChartName('Click to add a point')
ChartName.MASTER_DETAIL = ChartName('Master-detail')
ChartName.TIMESERIES_ZOOMABLE = ChartName('Time series, zoomable')
ChartName.WITH_DATA_LABELS = ChartName('With data labels')
ChartName.STACKED = ChartName('Stacked')
ChartName.WITH_NEGATIVE_STACK = ChartName('With negative stack')
ChartName.WITH_NEGATIVE_VALUES = ChartName('With negative values')
ChartName.STACKED_AND_GROUPED = ChartName('Stacked and grouped')
ChartName.STACKED_PERCENT = ChartName('Stacked percentage')
ChartName.WITH_ROTATED_LABELS = ChartName('With rotated labels')
ChartName.WITH_MISSING_POINTS = ChartName('With missing points')
ChartName.INVERTED_AXES = ChartName('Inverted axes')
ChartName.WITH_LEGEND = ChartName('With legend')
ChartName.WITH_PLOTBANDS = ChartName('With plot bands')
ChartName.WITH_SYMBOLS = ChartName('With symbols')
ChartName.UPDATING_EACH_SECOND = ChartName('Updating each second')
ChartName.COMBINATION_COLUMN_LINE_AND_PIE = ChartName('Column, spline and pie')
ChartName.PERCENTAGE = ChartName('Percentage')
ChartName.SCATTER_WITH_REGRESSION_LINE = ChartName('Scatter with regression line')
ChartName.MULTIPLE_AXES = ChartName('Multiple axes')
class DemoSeriesType(object):
LINE = None
SPLINE = None
SCATTER = None
AREA = None
AREASPLINE = None
BAR = None
COLUMN = None
PIE = None
COMBINATION = None
def __init__(self, seriesType, name):
self._seriesType = seriesType
self._name = name
def getSeriesType(self):
return self._seriesType
def getName(self):
return self._name
@classmethod
def values(cls):
return [cls.LINE, cls.SPLINE, cls.SCATTER, cls.AREA, cls.AREASPLINE,
cls.BAR, cls.COLUMN, cls.PIE, cls.COMBINATION]
DemoSeriesType.LINE = DemoSeriesType(SeriesType.LINE, 'Line')
DemoSeriesType.SPLINE = DemoSeriesType(SeriesType.SPLINE, 'Spline')
DemoSeriesType.SCATTER = DemoSeriesType(SeriesType.SCATTER, 'Scatter')
DemoSeriesType.AREA = DemoSeriesType(SeriesType.AREA, 'Area - Line')
DemoSeriesType.AREASPLINE = DemoSeriesType(SeriesType.AREASPLINE, 'Area - Spline')
DemoSeriesType.BAR = DemoSeriesType(SeriesType.BAR, 'Bar')
DemoSeriesType.COLUMN = DemoSeriesType(SeriesType.COLUMN, 'Column')
DemoSeriesType.PIE = DemoSeriesType(SeriesType.PIE, 'Pie')
DemoSeriesType.COMBINATION = DemoSeriesType(SeriesType.COMMONSERIES, 'Combination')
class ChartTypeChangeListener(IValueChangeListener):
def __init__(self, window, tree):
self._window = window
self._tree = tree
def valueChange(self, event):
# try:
selectedId = event.getProperty().getValue()
if self._tree.getParent(selectedId) is not None:
parentId = self._tree.getParent(selectedId)
demoSeriesTypeName = self._tree.getContainerProperty(parentId,
self._window._TREE_ITEM_CAPTION_PROP_ID).getValue()
seriesInstanceName = self._tree.getContainerProperty(selectedId,
self._window._TREE_ITEM_CAPTION_PROP_ID).getValue()
print ('parent : ' + demoSeriesTypeName
+ ', selected : ' + seriesInstanceName)
self._window.showChart(demoSeriesTypeName, seriesInstanceName)
else:
demoSeriesTypeName = self._tree.getContainerProperty(selectedId,
self._window._TREE_ITEM_CAPTION_PROP_ID).getValue()
print 'Selected ' + demoSeriesTypeName
self._window.showChartInstancesForSeriesType(demoSeriesTypeName)
# except Exception, e:
# e.printStackTrace()
class SeriesTypeClickListener(button.IClickListener):
def __init__(self, window):
self._window = window
def buttonClick(self, event):
self._window._navTree.select(self.demoSeriesTypeName
+ self._window._SEPARATOR + event.getButton().getCaption())
| apache-2.0 |
jeffzheng1/tensorflow | tensorflow/contrib/learn/python/learn/utils/export_test.py | 6 | 5141 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for export tools."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import random
import six
import tempfile
import numpy as np
import tensorflow as tf
from tensorflow.contrib import learn
from tensorflow.contrib.learn.python.learn.utils import export
from tensorflow.contrib.session_bundle import manifest_pb2
class ExportTest(tf.test.TestCase):
def _get_default_signature(self, export_meta_filename):
"""Gets the default signature from the export.meta file."""
with tf.Session():
save = tf.train.import_meta_graph(export_meta_filename)
meta_graph_def = save.export_meta_graph()
collection_def = meta_graph_def.collection_def
signatures_any = collection_def['serving_signatures'].any_list.value
self.assertEquals(len(signatures_any), 1)
signatures = manifest_pb2.Signatures()
signatures_any[0].Unpack(signatures)
default_signature = signatures.default_signature
return default_signature
def testExportMonitor_EstimatorProvidesSignature(self):
random.seed(42)
x = np.random.rand(1000)
y = 2 * x + 3
cont_features = [tf.contrib.layers.real_valued_column('', dimension=1)]
regressor = learn.LinearRegressor(feature_columns=cont_features)
export_dir = tempfile.mkdtemp() + 'export/'
export_monitor = learn.monitors.ExportMonitor(
every_n_steps=1, export_dir=export_dir, exports_to_keep=2)
regressor.fit(x, y, steps=10,
monitors=[export_monitor])
self.assertTrue(tf.gfile.Exists(export_dir))
# Only the written checkpoints are exported.
self.assertTrue(tf.gfile.Exists(export_dir + '00000001/export'))
self.assertTrue(tf.gfile.Exists(export_dir + '00000010/export'))
self.assertEquals(export_monitor.last_export_dir,
six.b(os.path.join(export_dir, '00000010')))
# Validate the signature
signature = self._get_default_signature(export_dir + '00000010/export.meta')
self.assertTrue(signature.HasField('regression_signature'))
def testExportMonitor(self):
random.seed(42)
x = np.random.rand(1000)
y = 2 * x + 3
cont_features = [tf.contrib.layers.real_valued_column('', dimension=1)]
regressor = learn.LinearRegressor(feature_columns=cont_features)
export_dir = tempfile.mkdtemp() + 'export/'
export_monitor = learn.monitors.ExportMonitor(
every_n_steps=1, export_dir=export_dir, exports_to_keep=2,
signature_fn=export.generic_signature_fn)
regressor.fit(x, y, steps=10,
monitors=[export_monitor])
self.assertTrue(tf.gfile.Exists(export_dir))
# Only the written checkpoints are exported.
self.assertTrue(tf.gfile.Exists(export_dir + '00000001/export'))
self.assertTrue(tf.gfile.Exists(export_dir + '00000010/export'))
self.assertEquals(export_monitor.last_export_dir,
six.b(os.path.join(export_dir, '00000010')))
# Validate the signature
signature = self._get_default_signature(export_dir + '00000010/export.meta')
self.assertTrue(signature.HasField('generic_signature'))
def testExportMonitorRegressionSignature(self):
def _regression_signature(examples, unused_features, predictions):
signatures = {}
signatures['regression'] = (
tf.contrib.session_bundle.exporter.regression_signature(examples,
predictions))
return signatures['regression'], signatures
random.seed(42)
x = np.random.rand(1000)
y = 2 * x + 3
cont_features = [tf.contrib.layers.real_valued_column('', dimension=1)]
regressor = learn.LinearRegressor(feature_columns=cont_features)
export_dir = tempfile.mkdtemp() + 'export/'
export_monitor = learn.monitors.ExportMonitor(
every_n_steps=1,
export_dir=export_dir,
exports_to_keep=1,
signature_fn=_regression_signature)
regressor.fit(x, y, steps=10, monitors=[export_monitor])
self.assertTrue(tf.gfile.Exists(export_dir))
self.assertFalse(tf.gfile.Exists(export_dir + '00000000/export'))
self.assertTrue(tf.gfile.Exists(export_dir + '00000010/export'))
# Validate the signature
signature = self._get_default_signature(export_dir + '00000010/export.meta')
self.assertTrue(signature.HasField('regression_signature'))
if __name__ == '__main__':
tf.test.main()
| apache-2.0 |
GdZ/scriptfile | software/googleAppEngine/lib/django_1_3/django/contrib/gis/gdal/prototypes/ds.py | 311 | 4244 | """
This module houses the ctypes function prototypes for OGR DataSource
related data structures. OGR_Dr_*, OGR_DS_*, OGR_L_*, OGR_F_*,
OGR_Fld_* routines are relevant here.
"""
from ctypes import c_char_p, c_double, c_int, c_long, c_void_p, POINTER
from django.contrib.gis.gdal.envelope import OGREnvelope
from django.contrib.gis.gdal.libgdal import lgdal
from django.contrib.gis.gdal.prototypes.generation import \
const_string_output, double_output, geom_output, int_output, \
srs_output, void_output, voidptr_output
c_int_p = POINTER(c_int) # shortcut type
### Driver Routines ###
register_all = void_output(lgdal.OGRRegisterAll, [], errcheck=False)
cleanup_all = void_output(lgdal.OGRCleanupAll, [], errcheck=False)
get_driver = voidptr_output(lgdal.OGRGetDriver, [c_int])
get_driver_by_name = voidptr_output(lgdal.OGRGetDriverByName, [c_char_p])
get_driver_count = int_output(lgdal.OGRGetDriverCount, [])
get_driver_name = const_string_output(lgdal.OGR_Dr_GetName, [c_void_p])
### DataSource ###
open_ds = voidptr_output(lgdal.OGROpen, [c_char_p, c_int, POINTER(c_void_p)])
destroy_ds = void_output(lgdal.OGR_DS_Destroy, [c_void_p], errcheck=False)
release_ds = void_output(lgdal.OGRReleaseDataSource, [c_void_p])
get_ds_name = const_string_output(lgdal.OGR_DS_GetName, [c_void_p])
get_layer = voidptr_output(lgdal.OGR_DS_GetLayer, [c_void_p, c_int])
get_layer_by_name = voidptr_output(lgdal.OGR_DS_GetLayerByName, [c_void_p, c_char_p])
get_layer_count = int_output(lgdal.OGR_DS_GetLayerCount, [c_void_p])
### Layer Routines ###
get_extent = void_output(lgdal.OGR_L_GetExtent, [c_void_p, POINTER(OGREnvelope), c_int])
get_feature = voidptr_output(lgdal.OGR_L_GetFeature, [c_void_p, c_long])
get_feature_count = int_output(lgdal.OGR_L_GetFeatureCount, [c_void_p, c_int])
get_layer_defn = voidptr_output(lgdal.OGR_L_GetLayerDefn, [c_void_p])
get_layer_srs = srs_output(lgdal.OGR_L_GetSpatialRef, [c_void_p])
get_next_feature = voidptr_output(lgdal.OGR_L_GetNextFeature, [c_void_p])
reset_reading = void_output(lgdal.OGR_L_ResetReading, [c_void_p], errcheck=False)
test_capability = int_output(lgdal.OGR_L_TestCapability, [c_void_p, c_char_p])
get_spatial_filter = geom_output(lgdal.OGR_L_GetSpatialFilter, [c_void_p])
set_spatial_filter = void_output(lgdal.OGR_L_SetSpatialFilter, [c_void_p, c_void_p], errcheck=False)
set_spatial_filter_rect = void_output(lgdal.OGR_L_SetSpatialFilterRect, [c_void_p, c_double, c_double, c_double, c_double], errcheck=False)
### Feature Definition Routines ###
get_fd_geom_type = int_output(lgdal.OGR_FD_GetGeomType, [c_void_p])
get_fd_name = const_string_output(lgdal.OGR_FD_GetName, [c_void_p])
get_feat_name = const_string_output(lgdal.OGR_FD_GetName, [c_void_p])
get_field_count = int_output(lgdal.OGR_FD_GetFieldCount, [c_void_p])
get_field_defn = voidptr_output(lgdal.OGR_FD_GetFieldDefn, [c_void_p, c_int])
### Feature Routines ###
clone_feature = voidptr_output(lgdal.OGR_F_Clone, [c_void_p])
destroy_feature = void_output(lgdal.OGR_F_Destroy, [c_void_p], errcheck=False)
feature_equal = int_output(lgdal.OGR_F_Equal, [c_void_p, c_void_p])
get_feat_geom_ref = geom_output(lgdal.OGR_F_GetGeometryRef, [c_void_p])
get_feat_field_count = int_output(lgdal.OGR_F_GetFieldCount, [c_void_p])
get_feat_field_defn = voidptr_output(lgdal.OGR_F_GetFieldDefnRef, [c_void_p, c_int])
get_fid = int_output(lgdal.OGR_F_GetFID, [c_void_p])
get_field_as_datetime = int_output(lgdal.OGR_F_GetFieldAsDateTime, [c_void_p, c_int, c_int_p, c_int_p, c_int_p, c_int_p, c_int_p, c_int_p])
get_field_as_double = double_output(lgdal.OGR_F_GetFieldAsDouble, [c_void_p, c_int])
get_field_as_integer = int_output(lgdal.OGR_F_GetFieldAsInteger, [c_void_p, c_int])
get_field_as_string = const_string_output(lgdal.OGR_F_GetFieldAsString, [c_void_p, c_int])
get_field_index = int_output(lgdal.OGR_F_GetFieldIndex, [c_void_p, c_char_p])
### Field Routines ###
get_field_name = const_string_output(lgdal.OGR_Fld_GetNameRef, [c_void_p])
get_field_precision = int_output(lgdal.OGR_Fld_GetPrecision, [c_void_p])
get_field_type = int_output(lgdal.OGR_Fld_GetType, [c_void_p])
get_field_type_name = const_string_output(lgdal.OGR_GetFieldTypeName, [c_int])
get_field_width = int_output(lgdal.OGR_Fld_GetWidth, [c_void_p])
| mit |
geminy/aidear | oss/qt/qt-everywhere-opensource-src-5.9.0/qtwebengine/src/3rdparty/chromium/third_party/protobuf/python/google/protobuf/internal/containers.py | 36 | 20173 | # Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Contains container classes to represent different protocol buffer types.
This file defines container classes which represent categories of protocol
buffer field types which need extra maintenance. Currently these categories
are:
- Repeated scalar fields - These are all repeated fields which aren't
composite (e.g. they are of simple types like int32, string, etc).
- Repeated composite fields - Repeated fields which are composite. This
includes groups and nested messages.
"""
__author__ = 'petar@google.com (Petar Petrov)'
import collections
import sys
if sys.version_info[0] < 3:
# We would use collections.MutableMapping all the time, but in Python 2 it
# doesn't define __slots__. This causes two significant problems:
#
# 1. we can't disallow arbitrary attribute assignment, even if our derived
# classes *do* define __slots__.
#
# 2. we can't safely derive a C type from it without __slots__ defined (the
# interpreter expects to find a dict at tp_dictoffset, which we can't
# robustly provide. And we don't want an instance dict anyway.
#
# So this is the Python 2.7 definition of Mapping/MutableMapping functions
# verbatim, except that:
# 1. We declare __slots__.
# 2. We don't declare this as a virtual base class. The classes defined
# in collections are the interesting base classes, not us.
#
# Note: deriving from object is critical. It is the only thing that makes
# this a true type, allowing us to derive from it in C++ cleanly and making
# __slots__ properly disallow arbitrary element assignment.
class Mapping(object):
__slots__ = ()
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def __contains__(self, key):
try:
self[key]
except KeyError:
return False
else:
return True
def iterkeys(self):
return iter(self)
def itervalues(self):
for key in self:
yield self[key]
def iteritems(self):
for key in self:
yield (key, self[key])
def keys(self):
return list(self)
def items(self):
return [(key, self[key]) for key in self]
def values(self):
return [self[key] for key in self]
# Mappings are not hashable by default, but subclasses can change this
__hash__ = None
def __eq__(self, other):
if not isinstance(other, collections.Mapping):
return NotImplemented
return dict(self.items()) == dict(other.items())
def __ne__(self, other):
return not (self == other)
class MutableMapping(Mapping):
__slots__ = ()
__marker = object()
def pop(self, key, default=__marker):
try:
value = self[key]
except KeyError:
if default is self.__marker:
raise
return default
else:
del self[key]
return value
def popitem(self):
try:
key = next(iter(self))
except StopIteration:
raise KeyError
value = self[key]
del self[key]
return key, value
def clear(self):
try:
while True:
self.popitem()
except KeyError:
pass
def update(*args, **kwds):
if len(args) > 2:
raise TypeError("update() takes at most 2 positional "
"arguments ({} given)".format(len(args)))
elif not args:
raise TypeError("update() takes at least 1 argument (0 given)")
self = args[0]
other = args[1] if len(args) >= 2 else ()
if isinstance(other, Mapping):
for key in other:
self[key] = other[key]
elif hasattr(other, "keys"):
for key in other.keys():
self[key] = other[key]
else:
for key, value in other:
self[key] = value
for key, value in kwds.items():
self[key] = value
def setdefault(self, key, default=None):
try:
return self[key]
except KeyError:
self[key] = default
return default
collections.Mapping.register(Mapping)
collections.MutableMapping.register(MutableMapping)
else:
# In Python 3 we can just use MutableMapping directly, because it defines
# __slots__.
MutableMapping = collections.MutableMapping
class BaseContainer(object):
"""Base container class."""
# Minimizes memory usage and disallows assignment to other attributes.
__slots__ = ['_message_listener', '_values']
def __init__(self, message_listener):
"""
Args:
message_listener: A MessageListener implementation.
The RepeatedScalarFieldContainer will call this object's
Modified() method when it is modified.
"""
self._message_listener = message_listener
self._values = []
def __getitem__(self, key):
"""Retrieves item by the specified key."""
return self._values[key]
def __len__(self):
"""Returns the number of elements in the container."""
return len(self._values)
def __ne__(self, other):
"""Checks if another instance isn't equal to this one."""
# The concrete classes should define __eq__.
return not self == other
def __hash__(self):
raise TypeError('unhashable object')
def __repr__(self):
return repr(self._values)
def sort(self, *args, **kwargs):
# Continue to support the old sort_function keyword argument.
# This is expected to be a rare occurrence, so use LBYL to avoid
# the overhead of actually catching KeyError.
if 'sort_function' in kwargs:
kwargs['cmp'] = kwargs.pop('sort_function')
self._values.sort(*args, **kwargs)
class RepeatedScalarFieldContainer(BaseContainer):
"""Simple, type-checked, list-like container for holding repeated scalars."""
# Disallows assignment to other attributes.
__slots__ = ['_type_checker']
def __init__(self, message_listener, type_checker):
"""
Args:
message_listener: A MessageListener implementation.
The RepeatedScalarFieldContainer will call this object's
Modified() method when it is modified.
type_checker: A type_checkers.ValueChecker instance to run on elements
inserted into this container.
"""
super(RepeatedScalarFieldContainer, self).__init__(message_listener)
self._type_checker = type_checker
def append(self, value):
"""Appends an item to the list. Similar to list.append()."""
self._values.append(self._type_checker.CheckValue(value))
if not self._message_listener.dirty:
self._message_listener.Modified()
def insert(self, key, value):
"""Inserts the item at the specified position. Similar to list.insert()."""
self._values.insert(key, self._type_checker.CheckValue(value))
if not self._message_listener.dirty:
self._message_listener.Modified()
def extend(self, elem_seq):
"""Extends by appending the given iterable. Similar to list.extend()."""
if elem_seq is None:
return
try:
elem_seq_iter = iter(elem_seq)
except TypeError:
if not elem_seq:
# silently ignore falsy inputs :-/.
# TODO(ptucker): Deprecate this behavior. b/18413862
return
raise
new_values = [self._type_checker.CheckValue(elem) for elem in elem_seq_iter]
if new_values:
self._values.extend(new_values)
self._message_listener.Modified()
def MergeFrom(self, other):
"""Appends the contents of another repeated field of the same type to this
one. We do not check the types of the individual fields.
"""
self._values.extend(other._values)
self._message_listener.Modified()
def remove(self, elem):
"""Removes an item from the list. Similar to list.remove()."""
self._values.remove(elem)
self._message_listener.Modified()
def pop(self, key=-1):
"""Removes and returns an item at a given index. Similar to list.pop()."""
value = self._values[key]
self.__delitem__(key)
return value
def __setitem__(self, key, value):
"""Sets the item on the specified position."""
if isinstance(key, slice): # PY3
if key.step is not None:
raise ValueError('Extended slices not supported')
self.__setslice__(key.start, key.stop, value)
else:
self._values[key] = self._type_checker.CheckValue(value)
self._message_listener.Modified()
def __getslice__(self, start, stop):
"""Retrieves the subset of items from between the specified indices."""
return self._values[start:stop]
def __setslice__(self, start, stop, values):
"""Sets the subset of items from between the specified indices."""
new_values = []
for value in values:
new_values.append(self._type_checker.CheckValue(value))
self._values[start:stop] = new_values
self._message_listener.Modified()
def __delitem__(self, key):
"""Deletes the item at the specified position."""
del self._values[key]
self._message_listener.Modified()
def __delslice__(self, start, stop):
"""Deletes the subset of items from between the specified indices."""
del self._values[start:stop]
self._message_listener.Modified()
def __eq__(self, other):
"""Compares the current instance with another one."""
if self is other:
return True
# Special case for the same type which should be common and fast.
if isinstance(other, self.__class__):
return other._values == self._values
# We are presumably comparing against some other sequence type.
return other == self._values
collections.MutableSequence.register(BaseContainer)
class RepeatedCompositeFieldContainer(BaseContainer):
"""Simple, list-like container for holding repeated composite fields."""
# Disallows assignment to other attributes.
__slots__ = ['_message_descriptor']
def __init__(self, message_listener, message_descriptor):
"""
Note that we pass in a descriptor instead of the generated directly,
since at the time we construct a _RepeatedCompositeFieldContainer we
haven't yet necessarily initialized the type that will be contained in the
container.
Args:
message_listener: A MessageListener implementation.
The RepeatedCompositeFieldContainer will call this object's
Modified() method when it is modified.
message_descriptor: A Descriptor instance describing the protocol type
that should be present in this container. We'll use the
_concrete_class field of this descriptor when the client calls add().
"""
super(RepeatedCompositeFieldContainer, self).__init__(message_listener)
self._message_descriptor = message_descriptor
def add(self, **kwargs):
"""Adds a new element at the end of the list and returns it. Keyword
arguments may be used to initialize the element.
"""
new_element = self._message_descriptor._concrete_class(**kwargs)
new_element._SetListener(self._message_listener)
self._values.append(new_element)
if not self._message_listener.dirty:
self._message_listener.Modified()
return new_element
def extend(self, elem_seq):
"""Extends by appending the given sequence of elements of the same type
as this one, copying each individual message.
"""
message_class = self._message_descriptor._concrete_class
listener = self._message_listener
values = self._values
for message in elem_seq:
new_element = message_class()
new_element._SetListener(listener)
new_element.MergeFrom(message)
values.append(new_element)
listener.Modified()
def MergeFrom(self, other):
"""Appends the contents of another repeated field of the same type to this
one, copying each individual message.
"""
self.extend(other._values)
def remove(self, elem):
"""Removes an item from the list. Similar to list.remove()."""
self._values.remove(elem)
self._message_listener.Modified()
def pop(self, key=-1):
"""Removes and returns an item at a given index. Similar to list.pop()."""
value = self._values[key]
self.__delitem__(key)
return value
def __getslice__(self, start, stop):
"""Retrieves the subset of items from between the specified indices."""
return self._values[start:stop]
def __delitem__(self, key):
"""Deletes the item at the specified position."""
del self._values[key]
self._message_listener.Modified()
def __delslice__(self, start, stop):
"""Deletes the subset of items from between the specified indices."""
del self._values[start:stop]
self._message_listener.Modified()
def __eq__(self, other):
"""Compares the current instance with another one."""
if self is other:
return True
if not isinstance(other, self.__class__):
raise TypeError('Can only compare repeated composite fields against '
'other repeated composite fields.')
return self._values == other._values
class ScalarMap(MutableMapping):
"""Simple, type-checked, dict-like container for holding repeated scalars."""
# Disallows assignment to other attributes.
__slots__ = ['_key_checker', '_value_checker', '_values', '_message_listener']
def __init__(self, message_listener, key_checker, value_checker):
"""
Args:
message_listener: A MessageListener implementation.
The ScalarMap will call this object's Modified() method when it
is modified.
key_checker: A type_checkers.ValueChecker instance to run on keys
inserted into this container.
value_checker: A type_checkers.ValueChecker instance to run on values
inserted into this container.
"""
self._message_listener = message_listener
self._key_checker = key_checker
self._value_checker = value_checker
self._values = {}
def __getitem__(self, key):
try:
return self._values[key]
except KeyError:
key = self._key_checker.CheckValue(key)
val = self._value_checker.DefaultValue()
self._values[key] = val
return val
def __contains__(self, item):
# We check the key's type to match the strong-typing flavor of the API.
# Also this makes it easier to match the behavior of the C++ implementation.
self._key_checker.CheckValue(item)
return item in self._values
# We need to override this explicitly, because our defaultdict-like behavior
# will make the default implementation (from our base class) always insert
# the key.
def get(self, key, default=None):
if key in self:
return self[key]
else:
return default
def __setitem__(self, key, value):
checked_key = self._key_checker.CheckValue(key)
checked_value = self._value_checker.CheckValue(value)
self._values[checked_key] = checked_value
self._message_listener.Modified()
def __delitem__(self, key):
del self._values[key]
self._message_listener.Modified()
def __len__(self):
return len(self._values)
def __iter__(self):
return iter(self._values)
def __repr__(self):
return repr(self._values)
def MergeFrom(self, other):
self._values.update(other._values)
self._message_listener.Modified()
def InvalidateIterators(self):
# It appears that the only way to reliably invalidate iterators to
# self._values is to ensure that its size changes.
original = self._values
self._values = original.copy()
original[None] = None
# This is defined in the abstract base, but we can do it much more cheaply.
def clear(self):
self._values.clear()
self._message_listener.Modified()
class MessageMap(MutableMapping):
"""Simple, type-checked, dict-like container for with submessage values."""
# Disallows assignment to other attributes.
__slots__ = ['_key_checker', '_values', '_message_listener',
'_message_descriptor']
def __init__(self, message_listener, message_descriptor, key_checker):
"""
Args:
message_listener: A MessageListener implementation.
The ScalarMap will call this object's Modified() method when it
is modified.
key_checker: A type_checkers.ValueChecker instance to run on keys
inserted into this container.
value_checker: A type_checkers.ValueChecker instance to run on values
inserted into this container.
"""
self._message_listener = message_listener
self._message_descriptor = message_descriptor
self._key_checker = key_checker
self._values = {}
def __getitem__(self, key):
try:
return self._values[key]
except KeyError:
key = self._key_checker.CheckValue(key)
new_element = self._message_descriptor._concrete_class()
new_element._SetListener(self._message_listener)
self._values[key] = new_element
self._message_listener.Modified()
return new_element
def get_or_create(self, key):
"""get_or_create() is an alias for getitem (ie. map[key]).
Args:
key: The key to get or create in the map.
This is useful in cases where you want to be explicit that the call is
mutating the map. This can avoid lint errors for statements like this
that otherwise would appear to be pointless statements:
msg.my_map[key]
"""
return self[key]
# We need to override this explicitly, because our defaultdict-like behavior
# will make the default implementation (from our base class) always insert
# the key.
def get(self, key, default=None):
if key in self:
return self[key]
else:
return default
def __contains__(self, item):
return item in self._values
def __setitem__(self, key, value):
raise ValueError('May not set values directly, call my_map[key].foo = 5')
def __delitem__(self, key):
del self._values[key]
self._message_listener.Modified()
def __len__(self):
return len(self._values)
def __iter__(self):
return iter(self._values)
def __repr__(self):
return repr(self._values)
def MergeFrom(self, other):
for key in other:
self[key].MergeFrom(other[key])
# self._message_listener.Modified() not required here, because
# mutations to submessages already propagate.
def InvalidateIterators(self):
# It appears that the only way to reliably invalidate iterators to
# self._values is to ensure that its size changes.
original = self._values
self._values = original.copy()
original[None] = None
# This is defined in the abstract base, but we can do it much more cheaply.
def clear(self):
self._values.clear()
self._message_listener.Modified()
| gpl-3.0 |
paypal/keystone | keystone/middleware/auth_token.py | 6 | 1278 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010-2012 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
The actual code for auth_token has now been moved python-keystoneclient. It is
imported back here to ensure backward combatibility for old paste.ini files
that might still refer to here as opposed to keystoneclient
"""
from keystoneclient.middleware import auth_token as client_auth_token
will_expire_soon = client_auth_token.will_expire_soon
InvalidUserToken = client_auth_token.InvalidUserToken
ServiceError = client_auth_token.ServiceError
ConfigurationError = client_auth_token.ConfigurationError
AuthProtocol = client_auth_token.AuthProtocol
filter_factory = client_auth_token.filter_factory
app_factory = client_auth_token.app_factory
| apache-2.0 |
mjtamlyn/archery-scoring | scores/migrations/0001_initial.py | 1 | 2398 | # -*- coding: utf-8 -*-
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('entries', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Arrow',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('arrow_value', models.PositiveIntegerField()),
('arrow_of_round', models.PositiveIntegerField()),
('is_x', models.BooleanField(default=False)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Dozen',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('total', models.PositiveIntegerField()),
('dozen', models.PositiveIntegerField()),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Score',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('score', models.PositiveIntegerField(default=0, db_index=True)),
('hits', models.PositiveIntegerField(default=0)),
('golds', models.PositiveIntegerField(default=0)),
('xs', models.PositiveIntegerField(default=0)),
('alteration', models.IntegerField(default=0)),
('retired', models.BooleanField(default=False)),
('disqualified', models.BooleanField(default=False)),
('target', models.OneToOneField(to='entries.TargetAllocation', on_delete=models.CASCADE)),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='dozen',
name='score',
field=models.ForeignKey(to='scores.Score', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='arrow',
name='score',
field=models.ForeignKey(to='scores.Score', on_delete=models.CASCADE),
preserve_default=True,
),
]
| bsd-3-clause |
wangyj1/depdep | lib/core/configparser.py | 8 | 5921 | try:
import sys
from xml.etree import ElementTree
except ImportError,e:
import sys
sys.stdout.write("%s\n" %e)
sys.exit(1)
class ConfigParser:
result = {}
@staticmethod
def parse(config_file):
if not ConfigParser.result:
try:
with open(config_file, 'rt') as f:
tree = ElementTree.parse(f)
except:
raise
try:
node_list = ["target","settings","content", "scanning"]
for node_name in node_list:
for item in tree.getiterator(node_name):
if node_name == "target":
include_ip = ""
exclude_ip = ""
for node in item.findall('include-ip'):
if not include_ip:
include_ip = node.text.strip()
else:
include_ip = include_ip + " " + node.text.strip()
for node in item.findall('exclude-ip'):
if not exclude_ip:
exclude_ip = node.text.strip()
else:
exclude_ip = exclude_ip + "," + node.text.strip()
ConfigParser.result["include_ip"] = include_ip
ConfigParser.result["exclude_ip"] = exclude_ip
elif node_name == "scanning":
thread = item.find("thread").text.strip()
ConfigParser.result["scanning_thread"] = thread
elif node_name == "settings":
max_filesize = item.find('max_filesize').text.strip()
output_file = item.find('output_file').text.strip()
ConfigParser.result["max_filesize"] = max_filesize
ConfigParser.result["output_file"] = output_file
username = ""
password = ""
domain = ""
if (item.find('credentials/username') is not None) and (item.find('credentials/password') is not None) and (item.find('credentials/domain') is not None):
if item.find('credentials/username').text is not None:
username = item.find('credentials/username').text.strip()
if item.find('credentials/password').text is not None:
password = item.find('credentials/password').text.strip()
if item.find('credentials/domain').text is not None:
domain = item.find('credentials/domain').text.strip()
ConfigParser.result["username"] = username
ConfigParser.result["password"] = password
ConfigParser.result["domain"] = domain
elif node_name == "content":
content_thread = item.find('thread').text.strip()
ConfigParser.result["content_thread"] = content_thread
include_type = ""
exclude_type = ""
for node in item.findall('filetype/include-type'):
if not include_type:
include_type = node.text.strip()
else:
include_type = include_type + "," + node.text.strip()
for node in item.findall('filetype/exclude-type'):
if not exclude_type:
exclude_type = node.text.strip()
else:
exclude_type = exclude_type + "," + node.text.strip()
ConfigParser.result["include_type"] = include_type
ConfigParser.result["exclude_type"] = exclude_type
filename_type = {}
filename_type_list = []
filecontent_type = {}
filecontent_type_list = []
for node in item.findall('filename/keyword'):
keyword_name = node.text.strip()
keyword_case = node.get('case').strip()
keyword_type = node.get('type').strip()
keyword_description = node.get('description').strip()
filename_type[keyword_name] = filename_type_list
filename_type_list.append(keyword_case)
filename_type_list.append(keyword_type)
filename_type_list.append(keyword_description)
ConfigParser.result["filename_keyword_name"] = filename_type
filename_type_list = []
filename_type = {}
filename_type_list = []
filecontent_type = {}
filecontent_type_list = []
for node in item.findall('filecontent/keyword'):
keyword_name = node.text.strip()
keyword_case = node.get('case').strip()
keyword_type = node.get('type').strip()
keyword_description = node.get('description').strip()
filename_type[keyword_name] = filename_type_list
filename_type_list.append(keyword_case)
filename_type_list.append(keyword_type)
filename_type_list.append(keyword_description)
ConfigParser.result["filecontent_keyword_name"] = filename_type
filename_type_list = []
return ConfigParser.result
except:
raise
else:
return ConfigParser.result
| mit |
bkahlert/seqan-research | raw/workshop13/workshop2013-data-20130926/trunk/extras/apps/gustaf/tests/run_tests.py | 4 | 8141 | #!/usr/bin/env python
"""Execute the tests for program gustaf.
The golden test outputs are generated by the script generate_outputs.sh.
You have to give the root paths to the source and the binaries as arguments to
the program. These are the paths to the directory that contains the 'projects'
directory.
Usage: run_tests.py SOURCE_ROOT_PATH BINARY_ROOT_PATH
"""
import logging
import os.path
import sys
# Automagically add util/py_lib to PYTHONPATH environment variable.
path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..',
'..', '..', 'util', 'py_lib'))
sys.path.insert(0, path)
import seqan.app_tests as app_tests
def main(source_base, binary_base):
"""Main entry point of the script."""
print 'Executing test for gustaf'
print '==============================='
print
ph = app_tests.TestPathHelper(
source_base, binary_base,
'extras/apps/gustaf/tests') # tests dir
# ============================================================
# Auto-detect the binary path.
# ============================================================
path_to_program = app_tests.autolocateBinary(
binary_base, 'bin', 'gustaf')
# ============================================================
# Built TestConf list.
# ============================================================
# Build list with TestConf objects, analoguely to how the output
# was generated in generate_outputs.sh.
conf_list = []
# We prepare a list of transforms to apply to the output files. This is
# used to strip the input/output paths from the programs' output to
# make it more canonical and host independent.
ph.outFile('-') # To ensure that the out path is set.
transforms = [
app_tests.ReplaceTransform(
os.path.join(ph.source_base_path,
'extras/apps/gustaf/tests') + os.sep,
'', right=True),
app_tests.ReplaceTransform(ph.temp_dir + os.sep, '', right=True),
app_tests.NormalizeScientificExponentsTransform(),
]
# ============================================================
# Adeno Tests
# ============================================================
# ============================================================
# Sanity check with default values and empty output file
# ============================================================
conf = app_tests.TestConf(
program=path_to_program,
redir_stdout=ph.outFile('st2_l100.stdout'),
redir_stderr=ph.outFile('st2_l100.stderr'),
args=[ph.inFile('adeno.fa'),
ph.inFile('adeno_modified_reads.fa'),
'-gff', ph.outFile('st2_l100.gff'),
'-vcf', ph.outFile('st2_l100.vcf'),
],
to_diff=[(ph.inFile('st2_l100.vcf'),
ph.outFile('st2_l100.vcf'),
transforms),
(ph.inFile('st2_l100.gff'),
ph.outFile('st2_l100.gff'))])
conf_list.append(conf)
#out="st2_l100"
#${GUSTAF} adeno.fa adeno_modified_reads.fa -gff ${out}.gff -vcf ${out}.vcf > ${out}.stdout 2> ${out}.stderr
# ============================================================
# -st 1 -l 30
# ============================================================
conf = app_tests.TestConf(
program=path_to_program,
redir_stdout=ph.outFile('st1_l30.stdout'),
redir_stderr=ph.outFile('st1_l30.stderr'),
args=[ph.inFile('adeno.fa'),
ph.inFile('adeno_modified_reads.fa'),
'-gff', ph.outFile('st1_l30.gff'),
'-vcf', ph.outFile('st1_l30.vcf'),
'-st', str(1),
'-l', str(30),
],
to_diff=[(ph.inFile('st1_l30.vcf'),
ph.outFile('st1_l30.vcf'),
transforms),
(ph.inFile('st1_l30.gff'),
ph.outFile('st1_l30.gff'))])
conf_list.append(conf)
#out="st1_l30"
#${GUSTAF} adeno.fa adeno_modified_reads.fa -st 1 -l 30 -gff ${out}.gff -vcf ${out}.vcf > ${out}.stdout 2> ${out}.stderr
# ============================================================
# -st 1 -m stellar.gff
# ============================================================
conf = app_tests.TestConf(
program=path_to_program,
redir_stdout=ph.outFile('st1_l30_m.stdout'),
redir_stderr=ph.outFile('st1_l30_m.stderr'),
args=[ph.inFile('adeno.fa'),
ph.inFile('adeno_modified_reads.fa'),
'-m', ph.inFile('stellar.gff'),
'-gff', ph.outFile('st1_l30_m.gff'),
'-vcf', ph.outFile('st1_l30_m.vcf'),
'-st', str(1),
],
to_diff=[(ph.inFile('st1_l30_m.vcf'),
ph.outFile('st1_l30_m.vcf'),
transforms),
(ph.inFile('st1_l30_m.gff'),
ph.outFile('st1_l30_m.gff'))])
conf_list.append(conf)
#out="st1_l30_m"
#${GUSTAF} adeno.fa adeno_modified_reads.fa -st 1 -m stellar.gff -gff ${out}.gff -vcf ${out}.vcf > ${out}.stdout 2> ${out}.stderr
# ============================================================
# -st 1 -l 30 -ith 5
# ============================================================
conf = app_tests.TestConf(
program=path_to_program,
redir_stdout=ph.outFile('st1_l30_ith5.stdout'),
redir_stderr=ph.outFile('st1_l30_ith5.stderr'),
args=[ph.inFile('adeno.fa'),
ph.inFile('adeno_modified_reads.fa'),
'-gff', ph.outFile('st1_l30_ith5.gff'),
'-vcf', ph.outFile('st1_l30_ith5.vcf'),
'-st', str(1),
'-l', str(30),
'-ith', str(5),
],
to_diff=[(ph.inFile('st1_l30_m.vcf'),
ph.outFile('st1_l30_m.vcf'),
transforms),
(ph.inFile('st1_l30_ith5.gff'),
ph.outFile('st1_l30_ith5.gff'))])
conf_list.append(conf)
#out="st1_l30_ith5"
#${GUSTAF} adeno.fa adeno_modified_reads.fa -st 1 -l 30 -ith 5 -gff ${out}.gff -vcf ${out}.vcf > ${out}.stdout 2> ${out}.stderr
# ============================================================
# -st 1 -l 30 -gth 3
# ============================================================
conf = app_tests.TestConf(
program=path_to_program,
redir_stdout=ph.outFile('st1_l30_gth3.stdout'),
redir_stderr=ph.outFile('st1_l30_gth3.stderr'),
args=[ph.inFile('adeno.fa'),
ph.inFile('adeno_modified_reads.fa'),
'-gff', ph.outFile('st1_l30_gth3.gff'),
'-vcf', ph.outFile('st1_l30_gth3.vcf'),
'-st', str(1),
'-l', str(30),
'-gth', str(3),
],
to_diff=[(ph.inFile('st1_l30_m.vcf'),
ph.outFile('st1_l30_m.vcf'),
transforms),
(ph.inFile('st1_l30_gth3.gff'),
ph.outFile('st1_l30_gth3.gff'))])
conf_list.append(conf)
#out="st1_l30_gth3"
#${GUSTAF} adeno.fa adeno_modified_reads.fa -st 1 -l 30 -gth 3 -gff ${out}.gff -vcf ${out}.vcf > ${out}.stdout 2> ${out}.stderr
# ============================================================
# Execute the tests.
# ============================================================
failures = 0
for conf in conf_list:
res = app_tests.runTest(conf)
# Output to the user.
print ' '.join(['gustaf'] + conf.args),
if res:
print 'OK'
else:
failures += 1
print 'FAILED'
# Cleanup.
ph.deleteTempDir()
print '=============================='
print ' total tests: %d' % len(conf_list)
print ' failed tests: %d' % failures
print 'successful tests: %d' % (len(conf_list) - failures)
print '=============================='
# Compute and return return code.
return failures != 0
if __name__ == '__main__':
sys.exit(app_tests.main(main))
| mit |
gemrb/gemrb | gemrb/GUIScripts/bg1/ImportFile.py | 1 | 2330 | # GemRB - Infinity Engine Emulator
# Copyright (C) 2003 The GemRB Project
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
#
#character generation, import (GUICG20)
import GemRB
from GUIDefines import *
import GUICommon
import CharGenCommon
#import from a character sheet
ImportWindow = 0
TextAreaControl = 0
def OnLoad():
global ImportWindow, TextAreaControl
ImportWindow = GemRB.LoadWindow(20, "GUICG")
TextAreaControl = ImportWindow.GetControl(4)
TextAreaControl.SetText(10963)
TextAreaControl = ImportWindow.GetControl(2)
TextAreaControl.ListResources(CHR_EXPORTS)
DoneButton = ImportWindow.GetControl(0)
DoneButton.SetText (11973)
DoneButton.SetState(IE_GUI_BUTTON_DISABLED)
CancelButton = ImportWindow.GetControl(1)
CancelButton.SetText (13727)
DoneButton.SetEvent(IE_GUI_BUTTON_ON_PRESS, DonePress)
CancelButton.SetEvent(IE_GUI_BUTTON_ON_PRESS, CancelPress)
TextAreaControl.SetEvent(IE_GUI_TEXTAREA_ON_SELECT, SelectPress)
ImportWindow.ShowModal(MODAL_SHADOW_NONE)
return
def SelectPress():
DoneButton = ImportWindow.GetControl(0)
DoneButton.SetState(IE_GUI_BUTTON_ENABLED)
return
def DonePress():
ImportWindow.Close()
FileName = TextAreaControl.QueryText()
Slot = GemRB.GetVar("Slot")
GemRB.CreatePlayer(FileName, Slot| 0x8000, 1)
GemRB.SetToken ("CHARNAME", GemRB.GetPlayerName (Slot))
GemRB.SetToken ("SmallPortrait", GemRB.GetPlayerPortrait (Slot, 1)["ResRef"])
GemRB.SetToken ("LargePortrait", GemRB.GetPlayerPortrait (Slot, 0)["ResRef"])
GemRB.SetVar ("ImportedChar", 1)
CharGenCommon.jumpTo("appearance")
return
def CancelPress():
ImportWindow.Close()
GemRB.SetNextScript(GemRB.GetToken("NextScript"))
return
| gpl-2.0 |
pgmillon/ansible | lib/ansible/modules/web_infrastructure/sophos_utm/utm_proxy_auth_profile.py | 36 | 12351 | #!/usr/bin/python
# Copyright: (c) 2018, Stephan Schwarz <stearz@gmx.de>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = """
---
module: utm_proxy_auth_profile
author:
- Stephan Schwarz (@stearz)
short_description: create, update or destroy reverse_proxy auth_profile entry in Sophos UTM
description:
- Create, update or destroy a reverse_proxy auth_profile entry in SOPHOS UTM.
- This module needs to have the REST Ability of the UTM to be activated.
version_added: "2.8"
options:
name:
description:
- The name of the object. Will be used to identify the entry
required: true
aaa:
description:
- List of references to utm_aaa objects (allowed users or groups)
required: true
basic_prompt:
description:
- The message in the basic authentication prompt
required: true
backend_mode:
description:
- Specifies if the backend server needs authentication ([Basic|None])
default: None
choices:
- Basic
- None
backend_strip_basic_auth:
description:
- Should the login data be stripped when proxying the request to the backend host
type: bool
default: True
choices:
- True
- False
backend_user_prefix:
description:
- Prefix string to prepend to the username for backend authentication
default: ""
backend_user_suffix:
description:
- Suffix string to append to the username for backend authentication
default: ""
comment:
description:
- Optional comment string
default: ""
frontend_cookie:
description:
- Frontend cookie name
frontend_cookie_secret:
description:
- Frontend cookie secret
frontend_form:
description:
- Frontend authentication form name
frontend_form_template:
description:
- Frontend authentication form template
default: ""
frontend_login:
description:
- Frontend login name
frontend_logout:
description:
- Frontend logout name
frontend_mode:
description:
- Frontend authentication mode (Form|Basic)
default: Basic
choices:
- Basic
- Form
frontend_realm:
description:
- Frontend authentication realm
frontend_session_allow_persistency:
description:
- Allow session persistency
type: bool
default: False
choices:
- True
- False
frontend_session_lifetime:
description:
- session lifetime
required: true
frontend_session_lifetime_limited:
description:
- Specifies if limitation of session lifetime is active
type: bool
default: True
choices:
- True
- False
frontend_session_lifetime_scope:
description:
- scope for frontend_session_lifetime (days|hours|minutes)
default: hours
choices:
- days
- hours
- minutes
frontend_session_timeout:
description:
- session timeout
required: true
frontend_session_timeout_enabled:
description:
- Specifies if session timeout is active
type: bool
default: True
choices:
- True
- False
frontend_session_timeout_scope:
description:
- scope for frontend_session_timeout (days|hours|minutes)
default: minutes
choices:
- days
- hours
- minutes
logout_delegation_urls:
description:
- List of logout URLs that logouts are delegated to
default: []
logout_mode:
description:
- Mode of logout (None|Delegation)
default: None
choices:
- None
- Delegation
redirect_to_requested_url:
description:
- Should a redirect to the requested URL be made
type: bool
default: False
choices:
- True
- False
extends_documentation_fragment:
- utm
"""
EXAMPLES = """
- name: Create UTM proxy_auth_profile
utm_proxy_auth_profile:
utm_host: sophos.host.name
utm_token: abcdefghijklmno1234
name: TestAuthProfileEntry
aaa: [REF_OBJECT_STRING,REF_ANOTHEROBJECT_STRING]
basic_prompt: "Authentication required: Please login"
frontend_session_lifetime: 1
frontend_session_timeout: 1
state: present
- name: Remove UTM proxy_auth_profile
utm_proxy_auth_profile:
utm_host: sophos.host.name
utm_token: abcdefghijklmno1234
name: TestAuthProfileEntry
state: absent
- name: Read UTM proxy_auth_profile
utm_proxy_auth_profile:
utm_host: sophos.host.name
utm_token: abcdefghijklmno1234
name: TestAuthProfileEntry
state: info
"""
RETURN = """
result:
description: The utm object that was created
returned: success
type: complex
contains:
_ref:
description: The reference name of the object
type: string
_locked:
description: Whether or not the object is currently locked
type: boolean
_type:
description: The type of the object
type: string
name:
description: The name of the object
type: string
aaa:
description: List of references to utm_aaa objects (allowed users or groups)
type: list
basic_prompt:
description: The message in the basic authentication prompt
type: string
backend_mode:
description: Specifies if the backend server needs authentication ([Basic|None])
type: string
backend_strip_basic_auth:
description: Should the login data be stripped when proxying the request to the backend host
type: boolean
backend_user_prefix:
description: Prefix string to prepend to the username for backend authentication
type: string
backend_user_suffix:
description: Suffix string to append to the username for backend authentication
type: string
comment:
description: Optional comment string
type: string
frontend_cookie:
description: Frontend cookie name
type: string
frontend_cookie_secret:
description: Frontend cookie secret
type: string
frontend_form:
description: Frontend authentication form name
type: string
frontend_form_template:
description: Frontend authentication form template
type: string
frontend_login:
description: Frontend login name
type: string
frontend_logout:
description: Frontend logout name
type: string
frontend_mode:
description: Frontend authentication mode (Form|Basic)
type: string
frontend_realm:
description: Frontend authentication realm
type: string
frontend_session_allow_persistency:
description: Allow session persistency
type: boolean
frontend_session_lifetime:
description: session lifetime
type: integer
frontend_session_lifetime_limited:
description: Specifies if limitation of session lifetime is active
type: boolean
frontend_session_lifetime_scope:
description: scope for frontend_session_lifetime (days|hours|minutes)
type: string
frontend_session_timeout:
description: session timeout
type: integer
frontend_session_timeout_enabled:
description: Specifies if session timeout is active
type: boolean
frontend_session_timeout_scope:
description: scope for frontend_session_timeout (days|hours|minutes)
type: string
logout_delegation_urls:
description: List of logout URLs that logouts are delegated to
type: list
logout_mode:
description: Mode of logout (None|Delegation)
type: string
redirect_to_requested_url:
description: Should a redirect to the requested URL be made
type: boolean
"""
from ansible.module_utils.utm_utils import UTM, UTMModule
from ansible.module_utils._text import to_native
def main():
endpoint = "reverse_proxy/auth_profile"
key_to_check_for_changes = ["aaa", "basic_prompt", "backend_mode", "backend_strip_basic_auth",
"backend_user_prefix", "backend_user_suffix", "comment", "frontend_cookie",
"frontend_cookie_secret", "frontend_form", "frontend_form_template",
"frontend_login", "frontend_logout", "frontend_mode", "frontend_realm",
"frontend_session_allow_persistency", "frontend_session_lifetime",
"frontend_session_lifetime_limited", "frontend_session_lifetime_scope",
"frontend_session_timeout", "frontend_session_timeout_enabled",
"frontend_session_timeout_scope", "logout_delegation_urls", "logout_mode",
"redirect_to_requested_url"]
module = UTMModule(
argument_spec=dict(
name=dict(type='str', required=True),
aaa=dict(type='list', elements='str', required=True),
basic_prompt=dict(type='str', required=True),
backend_mode=dict(type='str', required=False, default="None", choices=['Basic', 'None']),
backend_strip_basic_auth=dict(type='bool', required=False, default=True, choices=[True, False]),
backend_user_prefix=dict(type='str', required=False, default=""),
backend_user_suffix=dict(type='str', required=False, default=""),
comment=dict(type='str', required=False, default=""),
frontend_cookie=dict(type='str', required=False),
frontend_cookie_secret=dict(type='str', required=False),
frontend_form=dict(type='str', required=False),
frontend_form_template=dict(type='str', required=False, default=""),
frontend_login=dict(type='str', required=False),
frontend_logout=dict(type='str', required=False),
frontend_mode=dict(type='str', required=False, default="Basic", choices=['Basic', 'Form']),
frontend_realm=dict(type='str', required=False),
frontend_session_allow_persistency=dict(type='bool', required=False, default=False, choices=[True, False]),
frontend_session_lifetime=dict(type='int', required=True),
frontend_session_lifetime_limited=dict(type='bool', required=False, default=True, choices=[True, False]),
frontend_session_lifetime_scope=dict(type='str', required=False, default="hours", choices=['days', 'hours', 'minutes']),
frontend_session_timeout=dict(type='int', required=True),
frontend_session_timeout_enabled=dict(type='bool', required=False, default=True, choices=[True, False]),
frontend_session_timeout_scope=dict(type='str', required=False, default="minutes", choices=['days', 'hours', 'minutes']),
logout_delegation_urls=dict(type='list', elements='str', required=False, default=[]),
logout_mode=dict(type='str', required=False, default="None", choices=['None', 'Delegation']),
redirect_to_requested_url=dict(type='bool', required=False, default=False, choices=[True, False])
)
)
try:
UTM(module, endpoint, key_to_check_for_changes).execute()
except Exception as e:
module.fail_json(msg=to_native(e))
if __name__ == '__main__':
main()
| gpl-3.0 |
w1ll1am23/home-assistant | homeassistant/components/modbus/modbus.py | 2 | 8002 | """Support for Modbus."""
import logging
import threading
from pymodbus.client.sync import ModbusSerialClient, ModbusTcpClient, ModbusUdpClient
from pymodbus.transaction import ModbusRtuFramer
from homeassistant.const import (
CONF_BINARY_SENSORS,
CONF_COVERS,
CONF_DELAY,
CONF_HOST,
CONF_METHOD,
CONF_NAME,
CONF_PORT,
CONF_SENSORS,
CONF_SWITCHES,
CONF_TIMEOUT,
CONF_TYPE,
EVENT_HOMEASSISTANT_STOP,
)
from homeassistant.helpers.discovery import load_platform
from .const import (
ATTR_ADDRESS,
ATTR_HUB,
ATTR_STATE,
ATTR_UNIT,
ATTR_VALUE,
CONF_BAUDRATE,
CONF_BINARY_SENSOR,
CONF_BYTESIZE,
CONF_CLIMATE,
CONF_CLIMATES,
CONF_COVER,
CONF_PARITY,
CONF_SENSOR,
CONF_STOPBITS,
CONF_SWITCH,
MODBUS_DOMAIN as DOMAIN,
SERVICE_WRITE_COIL,
SERVICE_WRITE_REGISTER,
)
_LOGGER = logging.getLogger(__name__)
def modbus_setup(
hass, config, service_write_register_schema, service_write_coil_schema
):
"""Set up Modbus component."""
hass.data[DOMAIN] = hub_collect = {}
for conf_hub in config[DOMAIN]:
hub_collect[conf_hub[CONF_NAME]] = ModbusHub(conf_hub)
# modbus needs to be activated before components are loaded
# to avoid a racing problem
hub_collect[conf_hub[CONF_NAME]].setup()
# load platforms
for component, conf_key in (
(CONF_CLIMATE, CONF_CLIMATES),
(CONF_COVER, CONF_COVERS),
(CONF_BINARY_SENSOR, CONF_BINARY_SENSORS),
(CONF_SENSOR, CONF_SENSORS),
(CONF_SWITCH, CONF_SWITCHES),
):
if conf_key in conf_hub:
load_platform(hass, component, DOMAIN, conf_hub, config)
def stop_modbus(event):
"""Stop Modbus service."""
for client in hub_collect.values():
client.close()
def write_register(service):
"""Write Modbus registers."""
unit = int(float(service.data[ATTR_UNIT]))
address = int(float(service.data[ATTR_ADDRESS]))
value = service.data[ATTR_VALUE]
client_name = service.data[ATTR_HUB]
if isinstance(value, list):
hub_collect[client_name].write_registers(
unit, address, [int(float(i)) for i in value]
)
else:
hub_collect[client_name].write_register(unit, address, int(float(value)))
def write_coil(service):
"""Write Modbus coil."""
unit = service.data[ATTR_UNIT]
address = service.data[ATTR_ADDRESS]
state = service.data[ATTR_STATE]
client_name = service.data[ATTR_HUB]
if isinstance(state, list):
hub_collect[client_name].write_coils(unit, address, state)
else:
hub_collect[client_name].write_coil(unit, address, state)
# register function to gracefully stop modbus
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_modbus)
# Register services for modbus
hass.services.register(
DOMAIN,
SERVICE_WRITE_REGISTER,
write_register,
schema=service_write_register_schema,
)
hass.services.register(
DOMAIN, SERVICE_WRITE_COIL, write_coil, schema=service_write_coil_schema
)
return True
class ModbusHub:
"""Thread safe wrapper class for pymodbus."""
def __init__(self, client_config):
"""Initialize the Modbus hub."""
# generic configuration
self._client = None
self._lock = threading.Lock()
self._config_name = client_config[CONF_NAME]
self._config_type = client_config[CONF_TYPE]
self._config_port = client_config[CONF_PORT]
self._config_timeout = client_config[CONF_TIMEOUT]
self._config_delay = 0
if self._config_type == "serial":
# serial configuration
self._config_method = client_config[CONF_METHOD]
self._config_baudrate = client_config[CONF_BAUDRATE]
self._config_stopbits = client_config[CONF_STOPBITS]
self._config_bytesize = client_config[CONF_BYTESIZE]
self._config_parity = client_config[CONF_PARITY]
else:
# network configuration
self._config_host = client_config[CONF_HOST]
self._config_delay = client_config[CONF_DELAY]
if self._config_delay > 0:
_LOGGER.warning(
"Parameter delay is accepted but not used in this version"
)
@property
def name(self):
"""Return the name of this hub."""
return self._config_name
def setup(self):
"""Set up pymodbus client."""
if self._config_type == "serial":
self._client = ModbusSerialClient(
method=self._config_method,
port=self._config_port,
baudrate=self._config_baudrate,
stopbits=self._config_stopbits,
bytesize=self._config_bytesize,
parity=self._config_parity,
timeout=self._config_timeout,
retry_on_empty=True,
)
elif self._config_type == "rtuovertcp":
self._client = ModbusTcpClient(
host=self._config_host,
port=self._config_port,
framer=ModbusRtuFramer,
timeout=self._config_timeout,
)
elif self._config_type == "tcp":
self._client = ModbusTcpClient(
host=self._config_host,
port=self._config_port,
timeout=self._config_timeout,
)
elif self._config_type == "udp":
self._client = ModbusUdpClient(
host=self._config_host,
port=self._config_port,
timeout=self._config_timeout,
)
# Connect device
self.connect()
def close(self):
"""Disconnect client."""
with self._lock:
self._client.close()
def connect(self):
"""Connect client."""
with self._lock:
self._client.connect()
def read_coils(self, unit, address, count):
"""Read coils."""
with self._lock:
kwargs = {"unit": unit} if unit else {}
return self._client.read_coils(address, count, **kwargs)
def read_discrete_inputs(self, unit, address, count):
"""Read discrete inputs."""
with self._lock:
kwargs = {"unit": unit} if unit else {}
return self._client.read_discrete_inputs(address, count, **kwargs)
def read_input_registers(self, unit, address, count):
"""Read input registers."""
with self._lock:
kwargs = {"unit": unit} if unit else {}
return self._client.read_input_registers(address, count, **kwargs)
def read_holding_registers(self, unit, address, count):
"""Read holding registers."""
with self._lock:
kwargs = {"unit": unit} if unit else {}
return self._client.read_holding_registers(address, count, **kwargs)
def write_coil(self, unit, address, value):
"""Write coil."""
with self._lock:
kwargs = {"unit": unit} if unit else {}
self._client.write_coil(address, value, **kwargs)
def write_coils(self, unit, address, value):
"""Write coil."""
with self._lock:
kwargs = {"unit": unit} if unit else {}
self._client.write_coils(address, value, **kwargs)
def write_register(self, unit, address, value):
"""Write register."""
with self._lock:
kwargs = {"unit": unit} if unit else {}
self._client.write_register(address, value, **kwargs)
def write_registers(self, unit, address, values):
"""Write registers."""
with self._lock:
kwargs = {"unit": unit} if unit else {}
self._client.write_registers(address, values, **kwargs)
| apache-2.0 |
ChrisAntaki/phantomjs | src/breakpad/src/tools/gyp/test/dependencies/gyptest-lib-only.py | 151 | 1091 | #!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verify that a link time only dependency will get pulled into the set of built
targets, even if no executable uses it.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('lib_only.gyp')
test.build('lib_only.gyp', test.ALL)
# Make doesn't put static libs in a common 'lib' directory, like it does with
# shared libs, so check in the obj path corresponding to the source path.
test.built_file_must_exist('a', type=test.STATIC_LIB, libdir='obj.target')
# TODO(bradnelson/mark):
# On linux and windows a library target will at least pull its link dependencies
# into the generated sln/_main.scons, since not doing so confuses users.
# This is not currently implemented on mac, which has the opposite behavior.
if test.format == 'xcode':
test.built_file_must_not_exist('b', type=test.STATIC_LIB)
else:
test.built_file_must_exist('b', type=test.STATIC_LIB, libdir='obj.target/b')
test.pass_test()
| bsd-3-clause |
datafolklabs/cement | cement/core/extension.py | 1 | 3997 | """Cement core extensions module."""
import sys
from abc import abstractmethod
from ..core import exc
from ..core.interface import Interface
from ..core.handler import Handler
from ..utils.misc import minimal_logger
LOG = minimal_logger(__name__)
class ExtensionInterface(Interface):
"""
This class defines the Extension Interface. Handlers that implement this
interface must provide the methods and attributes defined below. In
general, most implementations should sub-class from the provided
:class:`ExtensionHandler` base class as a starting point.
"""
class Meta:
"""Handler meta-data."""
#: The string identifier of the interface.
interface = 'extension'
@abstractmethod
def load_extension(self, ext_module):
"""
Load an extension whose module is ``ext_module``. For example,
``cement.ext.ext_json``.
Args:
ext_module (str): The name of the extension to load
"""
pass # pragma: no cover
@abstractmethod
def load_extensions(self, ext_list):
"""
Load all extensions from ``ext_list``.
Args:
ext_list (list): A list of extension modules to load. For example:
``['cement.ext.ext_json', 'cement.ext.ext_logging']``
"""
pass # pragma: no cover
class ExtensionHandler(ExtensionInterface, Handler):
"""
This handler implements the Extention Interface, which handles loading
framework extensions. All extension handlers should sub-class from
here, or ensure that their implementation meets the requirements of this
base class.
"""
class Meta:
"""
Handler meta-data (can be passed as keyword arguments to the parent
class).
"""
#: The string identifier of the handler.
label = 'cement'
def __init__(self, **kw):
super().__init__(**kw)
self.app = None
self._loaded_extensions = []
def get_loaded_extensions(self):
"""
Get all loaded extensions.
Returns:
list: A list of loaded extensions.
"""
return self._loaded_extensions
def list(self):
"""
Synonymous with ``get_loaded_extensions()``.
Returns:
list: A list of loaded extensions.
"""
return self._loaded_extensions
def load_extension(self, ext_module):
"""
Given an extension module name, load or in other-words ``import`` the
extension.
Args:
ext_module (str): The extension module name. For example:
``cement.ext.ext_logging``.
Raises:
cement.core.exc.FrameworkError: Raised if ``ext_module`` can not be
loaded.
"""
# If its not a full module path then preppend our default path
if ext_module.find('.') == -1:
ext_module = 'cement.ext.ext_%s' % ext_module
if ext_module in self._loaded_extensions:
LOG.debug("framework extension '%s' already loaded" % ext_module)
return
LOG.debug("loading the '%s' framework extension" % ext_module)
try:
if ext_module not in sys.modules:
__import__(ext_module, globals(), locals(), [], 0)
if hasattr(sys.modules[ext_module], 'load'):
sys.modules[ext_module].load(self.app)
if ext_module not in self._loaded_extensions:
self._loaded_extensions.append(ext_module)
except ImportError as e:
raise exc.FrameworkError(e.args[0])
def load_extensions(self, ext_list):
"""
Given a list of extension modules, iterate over the list and pass
individually to ``self.load_extension()``.
Args:
ext_list (list): A list of extension module names (str).
"""
for ext in ext_list:
self.load_extension(ext)
| bsd-3-clause |
shipci/boto | boto/manage/__init__.py | 271 | 1108 | # Copyright (c) 2006-2009 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
| mit |
RydrDojo/Ridr | pylotVenv/lib/python2.7/site-packages/sqlalchemy/dialects/mysql/mysqlconnector.py | 59 | 5323 | # mysql/mysqlconnector.py
# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""
.. dialect:: mysql+mysqlconnector
:name: MySQL Connector/Python
:dbapi: myconnpy
:connectstring: mysql+mysqlconnector://<user>:<password>@\
<host>[:<port>]/<dbname>
:url: http://dev.mysql.com/downloads/connector/python/
Unicode
-------
Please see :ref:`mysql_unicode` for current recommendations on unicode
handling.
"""
from .base import (MySQLDialect, MySQLExecutionContext,
MySQLCompiler, MySQLIdentifierPreparer,
BIT)
from ... import util
import re
class MySQLExecutionContext_mysqlconnector(MySQLExecutionContext):
def get_lastrowid(self):
return self.cursor.lastrowid
class MySQLCompiler_mysqlconnector(MySQLCompiler):
def visit_mod_binary(self, binary, operator, **kw):
if self.dialect._mysqlconnector_double_percents:
return self.process(binary.left, **kw) + " %% " + \
self.process(binary.right, **kw)
else:
return self.process(binary.left, **kw) + " % " + \
self.process(binary.right, **kw)
def post_process_text(self, text):
if self.dialect._mysqlconnector_double_percents:
return text.replace('%', '%%')
else:
return text
def escape_literal_column(self, text):
if self.dialect._mysqlconnector_double_percents:
return text.replace('%', '%%')
else:
return text
class MySQLIdentifierPreparer_mysqlconnector(MySQLIdentifierPreparer):
def _escape_identifier(self, value):
value = value.replace(self.escape_quote, self.escape_to_quote)
if self.dialect._mysqlconnector_double_percents:
return value.replace("%", "%%")
else:
return value
class _myconnpyBIT(BIT):
def result_processor(self, dialect, coltype):
"""MySQL-connector already converts mysql bits, so."""
return None
class MySQLDialect_mysqlconnector(MySQLDialect):
driver = 'mysqlconnector'
supports_unicode_binds = True
supports_sane_rowcount = True
supports_sane_multi_rowcount = True
supports_native_decimal = True
default_paramstyle = 'format'
execution_ctx_cls = MySQLExecutionContext_mysqlconnector
statement_compiler = MySQLCompiler_mysqlconnector
preparer = MySQLIdentifierPreparer_mysqlconnector
colspecs = util.update_copy(
MySQLDialect.colspecs,
{
BIT: _myconnpyBIT,
}
)
@util.memoized_property
def supports_unicode_statements(self):
return util.py3k or self._mysqlconnector_version_info > (2, 0)
@classmethod
def dbapi(cls):
from mysql import connector
return connector
def create_connect_args(self, url):
opts = url.translate_connect_args(username='user')
opts.update(url.query)
util.coerce_kw_type(opts, 'buffered', bool)
util.coerce_kw_type(opts, 'raise_on_warnings', bool)
# unfortunately, MySQL/connector python refuses to release a
# cursor without reading fully, so non-buffered isn't an option
opts.setdefault('buffered', True)
# FOUND_ROWS must be set in ClientFlag to enable
# supports_sane_rowcount.
if self.dbapi is not None:
try:
from mysql.connector.constants import ClientFlag
client_flags = opts.get(
'client_flags', ClientFlag.get_default())
client_flags |= ClientFlag.FOUND_ROWS
opts['client_flags'] = client_flags
except Exception:
pass
return [[], opts]
@util.memoized_property
def _mysqlconnector_version_info(self):
if self.dbapi and hasattr(self.dbapi, '__version__'):
m = re.match(r'(\d+)\.(\d+)(?:\.(\d+))?',
self.dbapi.__version__)
if m:
return tuple(
int(x)
for x in m.group(1, 2, 3)
if x is not None)
@util.memoized_property
def _mysqlconnector_double_percents(self):
return not util.py3k and self._mysqlconnector_version_info < (2, 0)
def _get_server_version_info(self, connection):
dbapi_con = connection.connection
version = dbapi_con.get_server_version()
return tuple(version)
def _detect_charset(self, connection):
return connection.connection.charset
def _extract_error_code(self, exception):
return exception.errno
def is_disconnect(self, e, connection, cursor):
errnos = (2006, 2013, 2014, 2045, 2055, 2048)
exceptions = (self.dbapi.OperationalError, self.dbapi.InterfaceError)
if isinstance(e, exceptions):
return e.errno in errnos or \
"MySQL Connection not available." in str(e)
else:
return False
def _compat_fetchall(self, rp, charset=None):
return rp.fetchall()
def _compat_fetchone(self, rp, charset=None):
return rp.fetchone()
dialect = MySQLDialect_mysqlconnector
| mit |
androidarmv6/android_external_chromium_org | tools/telemetry/telemetry/core/backends/chrome/chrome_browser_options.py | 23 | 1115 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.core import browser_options
from telemetry.core.backends.chrome import cros_interface
def CreateChromeBrowserOptions(br_options):
browser_type = br_options.browser_type
# Unit tests.
if not browser_type:
return br_options
if (cros_interface.IsRunningOnCrosDevice() or
browser_type.startswith('cros')):
return CrosBrowserOptions(br_options)
return br_options
class ChromeBrowserOptions(browser_options.BrowserOptions):
"""Chrome-specific browser options."""
def __init__(self, br_options):
super(ChromeBrowserOptions, self).__init__()
# Copy to self.
self.__dict__.update(br_options.__dict__)
class CrosBrowserOptions(ChromeBrowserOptions):
"""ChromeOS-specific browser options."""
def __init__(self, br_options):
super(CrosBrowserOptions, self).__init__(br_options)
# Create a browser with oobe property.
self.create_browser_with_oobe = False
self.auto_login = True
| bsd-3-clause |
lupyuen/RaspberryPiImage | home/pi/GrovePi/Software/Python/others/temboo/Library/Stripe/Plans/DeletePlan.py | 5 | 3090 | # -*- coding: utf-8 -*-
###############################################################################
#
# DeletePlan
# Deletes a specified plan.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class DeletePlan(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the DeletePlan Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(DeletePlan, self).__init__(temboo_session, '/Library/Stripe/Plans/DeletePlan')
def new_input_set(self):
return DeletePlanInputSet()
def _make_result_set(self, result, path):
return DeletePlanResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return DeletePlanChoreographyExecution(session, exec_id, path)
class DeletePlanInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the DeletePlan
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_APIKey(self, value):
"""
Set the value of the APIKey input for this Choreo. ((required, string) The API Key provided by Stripe)
"""
super(DeletePlanInputSet, self)._set_input('APIKey', value)
def set_PlanID(self, value):
"""
Set the value of the PlanID input for this Choreo. ((required, string) The unique identifier of the plan you want to delete)
"""
super(DeletePlanInputSet, self)._set_input('PlanID', value)
class DeletePlanResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the DeletePlan Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((json) The response from Stripe)
"""
return self._output.get('Response', None)
class DeletePlanChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return DeletePlanResultSet(response, path)
| apache-2.0 |
abseil/abseil-py | absl/testing/flagsaver.py | 3 | 6577 | # Copyright 2017 The Abseil Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Decorator and context manager for saving and restoring flag values.
There are many ways to save and restore. Always use the most convenient method
for a given use case.
Here are examples of each method. They all call do_stuff() while FLAGS.someflag
is temporarily set to 'foo'.
from absl.testing import flagsaver
# Use a decorator which can optionally override flags via arguments.
@flagsaver.flagsaver(someflag='foo')
def some_func():
do_stuff()
# Use a decorator which can optionally override flags with flagholders.
@flagsaver.flagsaver((module.FOO_FLAG, 'foo'), (other_mod.BAR_FLAG, 23))
def some_func():
do_stuff()
# Use a decorator which does not override flags itself.
@flagsaver.flagsaver
def some_func():
FLAGS.someflag = 'foo'
do_stuff()
# Use a context manager which can optionally override flags via arguments.
with flagsaver.flagsaver(someflag='foo'):
do_stuff()
# Save and restore the flag values yourself.
saved_flag_values = flagsaver.save_flag_values()
try:
FLAGS.someflag = 'foo'
do_stuff()
finally:
flagsaver.restore_flag_values(saved_flag_values)
We save and restore a shallow copy of each Flag object's __dict__ attribute.
This preserves all attributes of the flag, such as whether or not it was
overridden from its default value.
WARNING: Currently a flag that is saved and then deleted cannot be restored. An
exception will be raised. However if you *add* a flag after saving flag values,
and then restore flag values, the added flag will be deleted with no errors.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import inspect
from absl import flags
FLAGS = flags.FLAGS
def flagsaver(*args, **kwargs):
"""The main flagsaver interface. See module doc for usage."""
if not args:
return _FlagOverrider(**kwargs)
# args can be [func] if used as `@flagsaver` instead of `@flagsaver(...)`
if len(args) == 1 and callable(args[0]):
if kwargs:
raise ValueError(
"It's invalid to specify both positional and keyword parameters.")
func = args[0]
if inspect.isclass(func):
raise TypeError('@flagsaver.flagsaver cannot be applied to a class.')
return _wrap(func, {})
# args can be a list of (FlagHolder, value) pairs.
# In which case they augment any specified kwargs.
for arg in args:
if not isinstance(arg, tuple) or len(arg) != 2:
raise ValueError('Expected (FlagHolder, value) pair, found %r' % (arg,))
holder, value = arg
if not isinstance(holder, flags.FlagHolder):
raise ValueError('Expected (FlagHolder, value) pair, found %r' % (arg,))
if holder.name in kwargs:
raise ValueError('Cannot set --%s multiple times' % holder.name)
kwargs[holder.name] = value
return _FlagOverrider(**kwargs)
def save_flag_values(flag_values=FLAGS):
"""Returns copy of flag values as a dict.
Args:
flag_values: FlagValues, the FlagValues instance with which the flag will
be saved. This should almost never need to be overridden.
Returns:
Dictionary mapping keys to values. Keys are flag names, values are
corresponding __dict__ members. E.g. {'key': value_dict, ...}.
"""
return {name: _copy_flag_dict(flag_values[name]) for name in flag_values}
def restore_flag_values(saved_flag_values, flag_values=FLAGS):
"""Restores flag values based on the dictionary of flag values.
Args:
saved_flag_values: {'flag_name': value_dict, ...}
flag_values: FlagValues, the FlagValues instance from which the flag will
be restored. This should almost never need to be overridden.
"""
new_flag_names = list(flag_values)
for name in new_flag_names:
saved = saved_flag_values.get(name)
if saved is None:
# If __dict__ was not saved delete "new" flag.
delattr(flag_values, name)
else:
if flag_values[name].value != saved['_value']:
flag_values[name].value = saved['_value'] # Ensure C++ value is set.
flag_values[name].__dict__ = saved
def _wrap(func, overrides):
"""Creates a wrapper function that saves/restores flag values.
Args:
func: function object - This will be called between saving flags and
restoring flags.
overrides: {str: object} - Flag names mapped to their values. These flags
will be set after saving the original flag state.
Returns:
return value from func()
"""
@functools.wraps(func)
def _flagsaver_wrapper(*args, **kwargs):
"""Wrapper function that saves and restores flags."""
with _FlagOverrider(**overrides):
return func(*args, **kwargs)
return _flagsaver_wrapper
class _FlagOverrider(object):
"""Overrides flags for the duration of the decorated function call.
It also restores all original values of flags after decorated method
completes.
"""
def __init__(self, **overrides):
self._overrides = overrides
self._saved_flag_values = None
def __call__(self, func):
if inspect.isclass(func):
raise TypeError('flagsaver cannot be applied to a class.')
return _wrap(func, self._overrides)
def __enter__(self):
self._saved_flag_values = save_flag_values(FLAGS)
try:
FLAGS._set_attributes(**self._overrides)
except:
# It may fail because of flag validators.
restore_flag_values(self._saved_flag_values, FLAGS)
raise
def __exit__(self, exc_type, exc_value, traceback):
restore_flag_values(self._saved_flag_values, FLAGS)
def _copy_flag_dict(flag):
"""Returns a copy of the flag object's __dict__.
It's mostly a shallow copy of the __dict__, except it also does a shallow
copy of the validator list.
Args:
flag: flags.Flag, the flag to copy.
Returns:
A copy of the flag object's __dict__.
"""
copy = flag.__dict__.copy()
copy['_value'] = flag.value # Ensure correct restore for C++ flags.
copy['validators'] = list(flag.validators)
return copy
| apache-2.0 |
Curious72/sympy | sympy/polys/domains/old_polynomialring.py | 68 | 13856 | """Implementation of :class:`PolynomialRing` class. """
from __future__ import print_function, division
from sympy.polys.domains.ring import Ring
from sympy.polys.domains.compositedomain import CompositeDomain
from sympy.polys.domains.characteristiczero import CharacteristicZero
from sympy.polys.domains.old_fractionfield import FractionField
from sympy.polys.polyclasses import DMP, DMF
from sympy.polys.polyerrors import (GeneratorsNeeded, PolynomialError,
CoercionFailed, ExactQuotientFailed, NotReversible)
from sympy.polys.polyutils import dict_from_basic, basic_from_dict, _dict_reorder
from sympy.polys.orderings import monomial_key, build_product_order
from sympy.polys.agca.modules import FreeModulePolyRing
from sympy.core.compatibility import iterable, range
from sympy.utilities import public
# XXX why does this derive from CharacteristicZero???
@public
class PolynomialRingBase(Ring, CharacteristicZero, CompositeDomain):
"""
Base class for generalized polynomial rings.
This base class should be used for uniform access to generalized polynomial
rings. Subclasses only supply information about the element storage etc.
Do not instantiate.
"""
has_assoc_Ring = True
has_assoc_Field = True
default_order = "grevlex"
def __init__(self, dom, *gens, **opts):
if not gens:
raise GeneratorsNeeded("generators not specified")
lev = len(gens) - 1
self.ngens = len(gens)
self.zero = self.dtype.zero(lev, dom, ring=self)
self.one = self.dtype.one(lev, dom, ring=self)
self.domain = self.dom = dom
self.symbols = self.gens = gens
# NOTE 'order' may not be set if inject was called through CompositeDomain
self.order = opts.get('order', monomial_key(self.default_order))
def new(self, element):
return self.dtype(element, self.dom, len(self.gens) - 1, ring=self)
def __str__(self):
s_order = str(self.order)
orderstr = (
" order=" + s_order) if s_order != self.default_order else ""
return str(self.dom) + '[' + ','.join(map(str, self.gens)) + orderstr + ']'
def __hash__(self):
return hash((self.__class__.__name__, self.dtype, self.dom,
self.gens, self.order))
def __eq__(self, other):
"""Returns `True` if two domains are equivalent. """
return isinstance(other, PolynomialRingBase) and \
self.dtype == other.dtype and self.dom == other.dom and \
self.gens == other.gens and self.order == other.order
def from_ZZ_python(K1, a, K0):
"""Convert a Python `int` object to `dtype`. """
return K1(K1.dom.convert(a, K0))
def from_QQ_python(K1, a, K0):
"""Convert a Python `Fraction` object to `dtype`. """
return K1(K1.dom.convert(a, K0))
def from_ZZ_gmpy(K1, a, K0):
"""Convert a GMPY `mpz` object to `dtype`. """
return K1(K1.dom.convert(a, K0))
def from_QQ_gmpy(K1, a, K0):
"""Convert a GMPY `mpq` object to `dtype`. """
return K1(K1.dom.convert(a, K0))
def from_RealField(K1, a, K0):
"""Convert a mpmath `mpf` object to `dtype`. """
return K1(K1.dom.convert(a, K0))
def from_AlgebraicField(K1, a, K0):
"""Convert a `ANP` object to `dtype`. """
if K1.dom == K0:
return K1(a)
def from_GlobalPolynomialRing(K1, a, K0):
"""Convert a `DMP` object to `dtype`. """
if K1.gens == K0.gens:
if K1.dom == K0.dom:
return K1(a.rep) # set the correct ring
else:
return K1(a.convert(K1.dom).rep)
else:
monoms, coeffs = _dict_reorder(a.to_dict(), K0.gens, K1.gens)
if K1.dom != K0.dom:
coeffs = [ K1.dom.convert(c, K0.dom) for c in coeffs ]
return K1(dict(zip(monoms, coeffs)))
def get_field(self):
"""Returns a field associated with `self`. """
return FractionField(self.dom, *self.gens)
def poly_ring(self, *gens):
"""Returns a polynomial ring, i.e. `K[X]`. """
raise NotImplementedError('nested domains not allowed')
def frac_field(self, *gens):
"""Returns a fraction field, i.e. `K(X)`. """
raise NotImplementedError('nested domains not allowed')
def revert(self, a):
try:
return 1/a
except (ExactQuotientFailed, ZeroDivisionError):
raise NotReversible('%s is not a unit' % a)
def gcdex(self, a, b):
"""Extended GCD of `a` and `b`. """
return a.gcdex(b)
def gcd(self, a, b):
"""Returns GCD of `a` and `b`. """
return a.gcd(b)
def lcm(self, a, b):
"""Returns LCM of `a` and `b`. """
return a.lcm(b)
def factorial(self, a):
"""Returns factorial of `a`. """
return self.dtype(self.dom.factorial(a))
def _vector_to_sdm(self, v, order):
"""
For internal use by the modules class.
Convert an iterable of elements of this ring into a sparse distributed
module element.
"""
raise NotImplementedError
def _sdm_to_dics(self, s, n):
"""Helper for _sdm_to_vector."""
from sympy.polys.distributedmodules import sdm_to_dict
dic = sdm_to_dict(s)
res = [{} for _ in range(n)]
for k, v in dic.items():
res[k[0]][k[1:]] = v
return res
def _sdm_to_vector(self, s, n):
"""
For internal use by the modules class.
Convert a sparse distributed module into a list of length ``n``.
>>> from sympy import QQ, ilex
>>> from sympy.abc import x, y
>>> R = QQ.old_poly_ring(x, y, order=ilex)
>>> L = [((1, 1, 1), QQ(1)), ((0, 1, 0), QQ(1)), ((0, 0, 1), QQ(2))]
>>> R._sdm_to_vector(L, 2)
[x + 2*y, x*y]
"""
dics = self._sdm_to_dics(s, n)
# NOTE this works for global and local rings!
return [self(x) for x in dics]
def free_module(self, rank):
"""
Generate a free module of rank ``rank`` over ``self``.
>>> from sympy.abc import x
>>> from sympy import QQ
>>> QQ.old_poly_ring(x).free_module(2)
QQ[x]**2
"""
return FreeModulePolyRing(self, rank)
def _vector_to_sdm_helper(v, order):
"""Helper method for common code in Global and Local poly rings."""
from sympy.polys.distributedmodules import sdm_from_dict
d = {}
for i, e in enumerate(v):
for key, value in e.to_dict().items():
d[(i,) + key] = value
return sdm_from_dict(d, order)
@public
class GlobalPolynomialRing(PolynomialRingBase):
"""A true polynomial ring, with objects DMP. """
is_PolynomialRing = is_Poly = True
dtype = DMP
def from_FractionField(K1, a, K0):
"""
Convert a ``DMF`` object to ``DMP``.
Examples
========
>>> from sympy.polys.polyclasses import DMP, DMF
>>> from sympy.polys.domains import ZZ
>>> from sympy.abc import x
>>> f = DMF(([ZZ(1), ZZ(1)], [ZZ(1)]), ZZ)
>>> K = ZZ.old_frac_field(x)
>>> F = ZZ.old_poly_ring(x).from_FractionField(f, K)
>>> F == DMP([ZZ(1), ZZ(1)], ZZ)
True
>>> type(F)
<class 'sympy.polys.polyclasses.DMP'>
"""
if a.denom().is_one:
return K1.from_GlobalPolynomialRing(a.numer(), K0)
def to_sympy(self, a):
"""Convert `a` to a SymPy object. """
return basic_from_dict(a.to_sympy_dict(), *self.gens)
def from_sympy(self, a):
"""Convert SymPy's expression to `dtype`. """
try:
rep, _ = dict_from_basic(a, gens=self.gens)
except PolynomialError:
raise CoercionFailed("can't convert %s to type %s" % (a, self))
for k, v in rep.items():
rep[k] = self.dom.from_sympy(v)
return self(rep)
def is_positive(self, a):
"""Returns True if `LC(a)` is positive. """
return self.dom.is_positive(a.LC())
def is_negative(self, a):
"""Returns True if `LC(a)` is negative. """
return self.dom.is_negative(a.LC())
def is_nonpositive(self, a):
"""Returns True if `LC(a)` is non-positive. """
return self.dom.is_nonpositive(a.LC())
def is_nonnegative(self, a):
"""Returns True if `LC(a)` is non-negative. """
return self.dom.is_nonnegative(a.LC())
def _vector_to_sdm(self, v, order):
"""
>>> from sympy import lex, QQ
>>> from sympy.abc import x, y
>>> R = QQ.old_poly_ring(x, y)
>>> f = R.convert(x + 2*y)
>>> g = R.convert(x * y)
>>> R._vector_to_sdm([f, g], lex)
[((1, 1, 1), 1), ((0, 1, 0), 1), ((0, 0, 1), 2)]
"""
return _vector_to_sdm_helper(v, order)
class GeneralizedPolynomialRing(PolynomialRingBase):
"""A generalized polynomial ring, with objects DMF. """
dtype = DMF
def new(self, a):
"""Construct an element of `self` domain from `a`. """
res = self.dtype(a, self.dom, len(self.gens) - 1, ring=self)
# make sure res is actually in our ring
if res.denom().terms(order=self.order)[0][0] != (0,)*len(self.gens):
from sympy.printing.str import sstr
raise CoercionFailed("denominator %s not allowed in %s"
% (sstr(res), self))
return res
def __contains__(self, a):
try:
a = self.convert(a)
except CoercionFailed:
return False
return a.denom().terms(order=self.order)[0][0] == (0,)*len(self.gens)
def from_FractionField(K1, a, K0):
dmf = K1.get_field().from_FractionField(a, K0)
return K1((dmf.num, dmf.den))
def to_sympy(self, a):
"""Convert `a` to a SymPy object. """
return (basic_from_dict(a.numer().to_sympy_dict(), *self.gens) /
basic_from_dict(a.denom().to_sympy_dict(), *self.gens))
def from_sympy(self, a):
"""Convert SymPy's expression to `dtype`. """
p, q = a.as_numer_denom()
num, _ = dict_from_basic(p, gens=self.gens)
den, _ = dict_from_basic(q, gens=self.gens)
for k, v in num.items():
num[k] = self.dom.from_sympy(v)
for k, v in den.items():
den[k] = self.dom.from_sympy(v)
return self((num, den)).cancel()
def _vector_to_sdm(self, v, order):
"""
Turn an iterable into a sparse distributed module.
Note that the vector is multiplied by a unit first to make all entries
polynomials.
>>> from sympy import ilex, QQ
>>> from sympy.abc import x, y
>>> R = QQ.old_poly_ring(x, y, order=ilex)
>>> f = R.convert((x + 2*y) / (1 + x))
>>> g = R.convert(x * y)
>>> R._vector_to_sdm([f, g], ilex)
[((0, 0, 1), 2), ((0, 1, 0), 1), ((1, 1, 1), 1), ((1,
2, 1), 1)]
"""
# NOTE this is quite inefficient...
u = self.one.numer()
for x in v:
u *= x.denom()
return _vector_to_sdm_helper([x.numer()*u/x.denom() for x in v], order)
@public
def PolynomialRing(dom, *gens, **opts):
r"""
Create a generalized multivariate polynomial ring.
A generalized polynomial ring is defined by a ground field `K`, a set
of generators (typically `x_1, \dots, x_n`) and a monomial order `<`.
The monomial order can be global, local or mixed. In any case it induces
a total ordering on the monomials, and there exists for every (non-zero)
polynomial `f \in K[x_1, \dots, x_n]` a well-defined "leading monomial"
`LM(f) = LM(f, >)`. One can then define a multiplicative subset
`S = S_> = \{f \in K[x_1, \dots, x_n] | LM(f) = 1\}`. The generalized
polynomial ring corresponding to the monomial order is
`R = S^{-1}K[x_1, \dots, x_n]`.
If `>` is a so-called global order, that is `1` is the smallest monomial,
then we just have `S = K` and `R = K[x_1, \dots, x_n]`.
Examples
========
A few examples may make this clearer.
>>> from sympy.abc import x, y
>>> from sympy import QQ
Our first ring uses global lexicographic order.
>>> R1 = QQ.old_poly_ring(x, y, order=(("lex", x, y),))
The second ring uses local lexicographic order. Note that when using a
single (non-product) order, you can just specify the name and omit the
variables:
>>> R2 = QQ.old_poly_ring(x, y, order="ilex")
The third and fourth rings use a mixed orders:
>>> o1 = (("ilex", x), ("lex", y))
>>> o2 = (("lex", x), ("ilex", y))
>>> R3 = QQ.old_poly_ring(x, y, order=o1)
>>> R4 = QQ.old_poly_ring(x, y, order=o2)
We will investigate what elements of `K(x, y)` are contained in the various
rings.
>>> L = [x, 1/x, y/(1 + x), 1/(1 + y), 1/(1 + x*y)]
>>> test = lambda R: [f in R for f in L]
The first ring is just `K[x, y]`:
>>> test(R1)
[True, False, False, False, False]
The second ring is R1 localised at the maximal ideal (x, y):
>>> test(R2)
[True, False, True, True, True]
The third ring is R1 localised at the prime ideal (x):
>>> test(R3)
[True, False, True, False, True]
Finally the fourth ring is R1 localised at `S = K[x, y] \setminus yK[y]`:
>>> test(R4)
[True, False, False, True, False]
"""
order = opts.get("order", GeneralizedPolynomialRing.default_order)
if iterable(order):
order = build_product_order(order, gens)
order = monomial_key(order)
opts['order'] = order
if order.is_global:
return GlobalPolynomialRing(dom, *gens, **opts)
else:
return GeneralizedPolynomialRing(dom, *gens, **opts)
| bsd-3-clause |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.