repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
codepython/Diamond | src/collectors/diskusage/diskusage.py | 17 | 10774 | # coding=utf-8
"""
Collect IO Stats
Note: You may need to artificially generate some IO load on a disk/partition
before graphite will generate the metrics.
* http://www.kernel.org/doc/Documentation/iostats.txt
#### Dependencies
* /proc/diskstats
"""
import diamond.collector
import diamond.convertor
import time
import os
import re
try:
import psutil
except ImportError:
psutil = None
class DiskUsageCollector(diamond.collector.Collector):
MAX_VALUES = {
'reads': 4294967295,
'reads_merged': 4294967295,
'reads_milliseconds': 4294967295,
'writes': 4294967295,
'writes_merged': 4294967295,
'writes_milliseconds': 4294967295,
'io_milliseconds': 4294967295,
'io_milliseconds_weighted': 4294967295
}
LastCollectTime = None
def get_default_config_help(self):
config_help = super(DiskUsageCollector, self).get_default_config_help()
config_help.update({
'devices': "A regex of which devices to gather metrics for."
+ " Defaults to md, sd, xvd, disk, and dm devices",
'sector_size': 'The size to use to calculate sector usage',
'send_zero': 'Send io data even when there is no io',
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(DiskUsageCollector, self).get_default_config()
config.update({
'path': 'iostat',
'devices': ('PhysicalDrive[0-9]+$'
+ '|md[0-9]+$'
+ '|sd[a-z]+[0-9]*$'
+ '|x?vd[a-z]+[0-9]*$'
+ '|disk[0-9]+$'
+ '|dm\-[0-9]+$'),
'sector_size': 512,
'send_zero': False,
})
return config
def get_disk_statistics(self):
"""
Create a map of disks in the machine.
http://www.kernel.org/doc/Documentation/iostats.txt
Returns:
(major, minor) -> DiskStatistics(device, ...)
"""
result = {}
if os.access('/proc/diskstats', os.R_OK):
self.proc_diskstats = True
fp = open('/proc/diskstats')
try:
for line in fp:
try:
columns = line.split()
# On early linux v2.6 versions, partitions have only 4
# output fields not 11. From linux 2.6.25 partitions
# have the full stats set.
if len(columns) < 14:
continue
major = int(columns[0])
minor = int(columns[1])
device = columns[2]
if (device.startswith('ram')
or device.startswith('loop')):
continue
result[(major, minor)] = {
'device': device,
'reads': float(columns[3]),
'reads_merged': float(columns[4]),
'reads_sectors': float(columns[5]),
'reads_milliseconds': float(columns[6]),
'writes': float(columns[7]),
'writes_merged': float(columns[8]),
'writes_sectors': float(columns[9]),
'writes_milliseconds': float(columns[10]),
'io_in_progress': float(columns[11]),
'io_milliseconds': float(columns[12]),
'io_milliseconds_weighted': float(columns[13])
}
except ValueError:
continue
finally:
fp.close()
else:
self.proc_diskstats = False
if not psutil:
self.log.error('Unable to import psutil')
return None
disks = psutil.disk_io_counters(True)
for disk in disks:
result[(0, len(result))] = {
'device': disk,
'reads': disks[disk].read_count,
'reads_sectors': (disks[disk].read_bytes
/ int(self.config['sector_size'])),
'reads_milliseconds': disks[disk].read_time,
'writes': disks[disk].write_count,
'writes_sectors': (disks[disk].write_bytes
/ int(self.config['sector_size'])),
'writes_milliseconds': disks[disk].write_time,
'io_milliseconds':
disks[disk].read_time + disks[disk].write_time,
'io_milliseconds_weighted':
disks[disk].read_time + disks[disk].write_time
}
return result
def collect(self):
# Handle collection time intervals correctly
CollectTime = time.time()
time_delta = float(self.config['interval'])
if self.LastCollectTime:
time_delta = CollectTime - self.LastCollectTime
if not time_delta:
time_delta = float(self.config['interval'])
self.LastCollectTime = CollectTime
exp = self.config['devices']
reg = re.compile(exp)
results = self.get_disk_statistics()
if not results:
self.log.error('No diskspace metrics retrieved')
return None
for key, info in results.iteritems():
metrics = {}
name = info['device']
if not reg.match(name):
continue
for key, value in info.iteritems():
if key == 'device':
continue
oldkey = key
for unit in self.config['byte_unit']:
key = oldkey
if key.endswith('sectors'):
key = key.replace('sectors', unit)
value /= (1024 / int(self.config['sector_size']))
value = diamond.convertor.binary.convert(value=value,
oldUnit='kB',
newUnit=unit)
self.MAX_VALUES[key] = diamond.convertor.binary.convert(
value=diamond.collector.MAX_COUNTER,
oldUnit='byte',
newUnit=unit)
metric_name = '.'.join([info['device'], key])
# io_in_progress is a point in time counter, !derivative
if key != 'io_in_progress':
metric_value = self.derivative(
metric_name,
value,
self.MAX_VALUES[key],
time_delta=False)
else:
metric_value = value
metrics[key] = metric_value
if self.proc_diskstats:
metrics['read_requests_merged_per_second'] = (
metrics['reads_merged'] / time_delta)
metrics['write_requests_merged_per_second'] = (
metrics['writes_merged'] / time_delta)
metrics['reads_per_second'] = metrics['reads'] / time_delta
metrics['writes_per_second'] = metrics['writes'] / time_delta
for unit in self.config['byte_unit']:
metric_name = 'read_%s_per_second' % unit
key = 'reads_%s' % unit
metrics[metric_name] = metrics[key] / time_delta
metric_name = 'write_%s_per_second' % unit
key = 'writes_%s' % unit
metrics[metric_name] = metrics[key] / time_delta
# Set to zero so the nodes are valid even if we have 0 io for
# the metric duration
metric_name = 'average_request_size_%s' % unit
metrics[metric_name] = 0
metrics['io'] = metrics['reads'] + metrics['writes']
metrics['average_queue_length'] = (
metrics['io_milliseconds_weighted']
/ time_delta
/ 1000.0)
metrics['util_percentage'] = (metrics['io_milliseconds']
/ time_delta
/ 10.0)
if metrics['reads'] > 0:
metrics['read_await'] = (
metrics['reads_milliseconds'] / metrics['reads'])
else:
metrics['read_await'] = 0
if metrics['writes'] > 0:
metrics['write_await'] = (
metrics['writes_milliseconds'] / metrics['writes'])
else:
metrics['write_await'] = 0
for unit in self.config['byte_unit']:
rkey = 'reads_%s' % unit
wkey = 'writes_%s' % unit
metric_name = 'average_request_size_%s' % unit
if (metrics['io'] > 0):
metrics[metric_name] = (
metrics[rkey] + metrics[wkey]) / metrics['io']
else:
metrics[metric_name] = 0
metrics['iops'] = metrics['io'] / time_delta
if (metrics['io'] > 0):
metrics['service_time'] = (
metrics['io_milliseconds'] / metrics['io'])
metrics['await'] = (
metrics['reads_milliseconds']
+ metrics['writes_milliseconds']) / metrics['io']
else:
metrics['service_time'] = 0
metrics['await'] = 0
# http://www.scribd.com/doc/15013525
# Page 28
metrics['concurrent_io'] = (metrics['reads_per_second']
+ metrics['writes_per_second']
) * (metrics['service_time']
/ 1000.0)
# Only publish when we have io figures
if (metrics['io'] > 0 or self.config['send_zero']):
for key in metrics:
metric_name = '.'.join([info['device'], key]).replace(
'/', '_')
self.publish(metric_name, metrics[key])
| mit |
bzero/arctic | arctic/scripts/arctic_create_user.py | 3 | 2398 | import argparse
import base64
from pymongo import MongoClient
import uuid
import logging
from ..hooks import get_mongodb_uri
from .utils import do_db_auth
from arctic.arctic import Arctic
logger = logging.getLogger(__name__)
def main():
usage = """arctic_create_user --host research [--db mongoose_user] [--write] user
Creates the user's personal Arctic mongo database
Or add a user to an existing Mongo Database.
"""
parser = argparse.ArgumentParser(usage=usage)
parser.add_argument("--host", default='localhost', help="Hostname, or clustername. Default: localhost")
parser.add_argument("--db", default=None, help="Database to add user on. Default: mongoose_<user>")
parser.add_argument("--password", default=None, help="Password. Default: random")
parser.add_argument("--write", action='store_true', default=False, help="Used for granting write access to someone else's DB")
parser.add_argument("users", nargs='+', help="Users to add.")
args = parser.parse_args()
c = MongoClient(get_mongodb_uri(args.host))
if not do_db_auth(args.host, c, args.db if args.db else 'admin'):
logger.error("Failed to authenticate to '%s'. Check your admin password!" % (args.host))
return
for user in args.users:
write_access = args.write
p = args.password
if p is None:
p = base64.b64encode(uuid.uuid4().bytes).replace('/', '')[:12]
db = args.db
if not db:
# Users always have write access to their database
write_access = True
db = Arctic.DB_PREFIX + '_' + user
# Add the user to the database
c[db].add_user(user, p, read_only=not write_access)
logger.info("Granted: {user} [{permission}] to {db}".format(user=user,
permission='WRITE' if write_access else 'READ',
db=db))
logger.info("User creds: {db}/{user}/{password}".format(user=user,
host=args.host,
db=db,
password=p,
))
if __name__ == '__main__':
main()
| lgpl-2.1 |
TheTimmy/spack | var/spack/repos/builtin.mock/packages/build-error/package.py | 3 | 2155 | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class BuildError(Package):
"""This package has an install method that fails in a build script."""
homepage = "http://www.example.com/trivial_install"
url = "http://www.unit-test-should-replace-this-url/trivial_install-1.0.tar.gz"
version('1.0', 'foobarbaz')
def install(self, spec, prefix):
with open('configure', 'w') as f:
f.write("""#!/bin/sh\n
echo 'checking build system type... x86_64-apple-darwin16.6.0'
echo 'checking host system type... x86_64-apple-darwin16.6.0'
echo 'checking for gcc... /Users/gamblin2/src/spack/lib/spack/env/clang/clang'
echo 'checking whether the C compiler works... yes'
echo 'checking for C compiler default output file name... a.out'
echo 'checking for suffix of executables...'
echo 'configure: error: in /path/to/some/file:'
echo 'configure: error: cannot run C compiled programs.'
exit 1
""")
configure()
| lgpl-2.1 |
she11c0de/cubes | tests/backends/sql/aggregates.py | 1 | 2325 | # -*- coding=utf -*-
import unittest
from sqlalchemy import create_engine, MetaData, Table, Integer, String, Column
from cubes import *
from cubes.errors import *
from ...common import CubesTestCaseBase
from json import dumps
def printable(obj):
return dumps(obj, indent=4)
class AggregatesTestCase(CubesTestCaseBase):
sql_engine = "sqlite:///"
def setUp(self):
super(AggregatesTestCase, self).setUp()
self.facts = Table("facts", self.metadata,
Column("id", Integer),
Column("year", Integer),
Column("amount", Integer),
Column("price", Integer),
Column("discount", Integer)
)
self.metadata.create_all()
data = [
( 1, 2010, 1, 100, 0),
( 2, 2010, 2, 200, 10),
( 3, 2010, 4, 300, 0),
( 4, 2010, 8, 400, 20),
( 5, 2011, 1, 500, 0),
( 6, 2011, 2, 600, 40),
( 7, 2011, 4, 700, 0),
( 8, 2011, 8, 800, 80),
( 9, 2012, 1, 100, 0),
(10, 2012, 2, 200, 0),
(11, 2012, 4, 300, 0),
(12, 2012, 8, 400, 10),
(13, 2013, 1, 500, 0),
(14, 2013, 2, 600, 0),
(15, 2013, 4, 700, 0),
(16, 2013, 8, 800, 20),
]
self.load_data(self.facts, data)
self.workspace = self.create_workspace(model="aggregates.json")
def test_unknown_function(self):
browser = self.workspace.browser("unknown_function")
with self.assertRaisesRegex(ArgumentError, "Unknown.*function"):
browser.aggregate()
def test_explicit(self):
browser = self.workspace.browser("default")
result = browser.aggregate()
summary = result.summary
self.assertEqual(60, summary["amount_sum"])
self.assertEqual(16, summary["count"])
def test_post_calculation(self):
browser = self.workspace.browser("postcalc_in_measure")
result = browser.aggregate(drilldown=["year"])
cells = list(result.cells)
aggregates = sorted(cells[0].keys())
self.assertSequenceEqual(['amount_sma', 'amount_sum', 'count', 'year'],
aggregates)
| mit |
zhouzhenghui/python-for-android | python-build/python-libs/gdata/build/lib/gdata/Crypto/Util/test.py | 228 | 18297 | #
# test.py : Functions used for testing the modules
#
# Part of the Python Cryptography Toolkit
#
# Distribute and use freely; there are no restrictions on further
# dissemination and usage except those imposed by the laws of your
# country of residence. This software is provided "as is" without
# warranty of fitness for use or suitability for any purpose, express
# or implied. Use at your own risk or not at all.
#
__revision__ = "$Id: test.py,v 1.16 2004/08/13 22:24:18 akuchling Exp $"
import binascii
import string
import testdata
from Crypto.Cipher import *
def die(string):
import sys
print '***ERROR: ', string
# sys.exit(0) # Will default to continuing onward...
def print_timing (size, delta, verbose):
if verbose:
if delta == 0:
print 'Unable to measure time -- elapsed time too small'
else:
print '%.2f K/sec' % (size/delta)
def exerciseBlockCipher(cipher, verbose):
import string, time
try:
ciph = eval(cipher)
except NameError:
print cipher, 'module not available'
return None
print cipher+ ':'
str='1' # Build 128K of test data
for i in xrange(0, 17):
str=str+str
if ciph.key_size==0: ciph.key_size=16
password = 'password12345678Extra text for password'[0:ciph.key_size]
IV = 'Test IV Test IV Test IV Test'[0:ciph.block_size]
if verbose: print ' ECB mode:',
obj=ciph.new(password, ciph.MODE_ECB)
if obj.block_size != ciph.block_size:
die("Module and cipher object block_size don't match")
text='1234567812345678'[0:ciph.block_size]
c=obj.encrypt(text)
if (obj.decrypt(c)!=text): die('Error encrypting "'+text+'"')
text='KuchlingKuchling'[0:ciph.block_size]
c=obj.encrypt(text)
if (obj.decrypt(c)!=text): die('Error encrypting "'+text+'"')
text='NotTodayNotEver!'[0:ciph.block_size]
c=obj.encrypt(text)
if (obj.decrypt(c)!=text): die('Error encrypting "'+text+'"')
start=time.time()
s=obj.encrypt(str)
s2=obj.decrypt(s)
end=time.time()
if (str!=s2):
die('Error in resulting plaintext from ECB mode')
print_timing(256, end-start, verbose)
del obj
if verbose: print ' CFB mode:',
obj1=ciph.new(password, ciph.MODE_CFB, IV)
obj2=ciph.new(password, ciph.MODE_CFB, IV)
start=time.time()
ciphertext=obj1.encrypt(str[0:65536])
plaintext=obj2.decrypt(ciphertext)
end=time.time()
if (plaintext!=str[0:65536]):
die('Error in resulting plaintext from CFB mode')
print_timing(64, end-start, verbose)
del obj1, obj2
if verbose: print ' CBC mode:',
obj1=ciph.new(password, ciph.MODE_CBC, IV)
obj2=ciph.new(password, ciph.MODE_CBC, IV)
start=time.time()
ciphertext=obj1.encrypt(str)
plaintext=obj2.decrypt(ciphertext)
end=time.time()
if (plaintext!=str):
die('Error in resulting plaintext from CBC mode')
print_timing(256, end-start, verbose)
del obj1, obj2
if verbose: print ' PGP mode:',
obj1=ciph.new(password, ciph.MODE_PGP, IV)
obj2=ciph.new(password, ciph.MODE_PGP, IV)
start=time.time()
ciphertext=obj1.encrypt(str)
plaintext=obj2.decrypt(ciphertext)
end=time.time()
if (plaintext!=str):
die('Error in resulting plaintext from PGP mode')
print_timing(256, end-start, verbose)
del obj1, obj2
if verbose: print ' OFB mode:',
obj1=ciph.new(password, ciph.MODE_OFB, IV)
obj2=ciph.new(password, ciph.MODE_OFB, IV)
start=time.time()
ciphertext=obj1.encrypt(str)
plaintext=obj2.decrypt(ciphertext)
end=time.time()
if (plaintext!=str):
die('Error in resulting plaintext from OFB mode')
print_timing(256, end-start, verbose)
del obj1, obj2
def counter(length=ciph.block_size):
return length * 'a'
if verbose: print ' CTR mode:',
obj1=ciph.new(password, ciph.MODE_CTR, counter=counter)
obj2=ciph.new(password, ciph.MODE_CTR, counter=counter)
start=time.time()
ciphertext=obj1.encrypt(str)
plaintext=obj2.decrypt(ciphertext)
end=time.time()
if (plaintext!=str):
die('Error in resulting plaintext from CTR mode')
print_timing(256, end-start, verbose)
del obj1, obj2
# Test the IV handling
if verbose: print ' Testing IV handling'
obj1=ciph.new(password, ciph.MODE_CBC, IV)
plaintext='Test'*(ciph.block_size/4)*3
ciphertext1=obj1.encrypt(plaintext)
obj1.IV=IV
ciphertext2=obj1.encrypt(plaintext)
if ciphertext1!=ciphertext2:
die('Error in setting IV')
# Test keyword arguments
obj1=ciph.new(key=password)
obj1=ciph.new(password, mode=ciph.MODE_CBC)
obj1=ciph.new(mode=ciph.MODE_CBC, key=password)
obj1=ciph.new(IV=IV, mode=ciph.MODE_CBC, key=password)
return ciph
def exerciseStreamCipher(cipher, verbose):
import string, time
try:
ciph = eval(cipher)
except (NameError):
print cipher, 'module not available'
return None
print cipher + ':',
str='1' # Build 128K of test data
for i in xrange(0, 17):
str=str+str
key_size = ciph.key_size or 16
password = 'password12345678Extra text for password'[0:key_size]
obj1=ciph.new(password)
obj2=ciph.new(password)
if obj1.block_size != ciph.block_size:
die("Module and cipher object block_size don't match")
if obj1.key_size != ciph.key_size:
die("Module and cipher object key_size don't match")
text='1234567812345678Python'
c=obj1.encrypt(text)
if (obj2.decrypt(c)!=text): die('Error encrypting "'+text+'"')
text='B1FF I2 A R3A11Y |<00L D00D!!!!!'
c=obj1.encrypt(text)
if (obj2.decrypt(c)!=text): die('Error encrypting "'+text+'"')
text='SpamSpamSpamSpamSpamSpamSpamSpamSpam'
c=obj1.encrypt(text)
if (obj2.decrypt(c)!=text): die('Error encrypting "'+text+'"')
start=time.time()
s=obj1.encrypt(str)
str=obj2.decrypt(s)
end=time.time()
print_timing(256, end-start, verbose)
del obj1, obj2
return ciph
def TestStreamModules(args=['arc4', 'XOR'], verbose=1):
import sys, string
args=map(string.lower, args)
if 'arc4' in args:
# Test ARC4 stream cipher
arc4=exerciseStreamCipher('ARC4', verbose)
if (arc4!=None):
for entry in testdata.arc4:
key,plain,cipher=entry
key=binascii.a2b_hex(key)
plain=binascii.a2b_hex(plain)
cipher=binascii.a2b_hex(cipher)
obj=arc4.new(key)
ciphertext=obj.encrypt(plain)
if (ciphertext!=cipher):
die('ARC4 failed on entry '+`entry`)
if 'xor' in args:
# Test XOR stream cipher
XOR=exerciseStreamCipher('XOR', verbose)
if (XOR!=None):
for entry in testdata.xor:
key,plain,cipher=entry
key=binascii.a2b_hex(key)
plain=binascii.a2b_hex(plain)
cipher=binascii.a2b_hex(cipher)
obj=XOR.new(key)
ciphertext=obj.encrypt(plain)
if (ciphertext!=cipher):
die('XOR failed on entry '+`entry`)
def TestBlockModules(args=['aes', 'arc2', 'des', 'blowfish', 'cast', 'des3',
'idea', 'rc5'],
verbose=1):
import string
args=map(string.lower, args)
if 'aes' in args:
ciph=exerciseBlockCipher('AES', verbose) # AES
if (ciph!=None):
if verbose: print ' Verifying against test suite...'
for entry in testdata.aes:
key,plain,cipher=entry
key=binascii.a2b_hex(key)
plain=binascii.a2b_hex(plain)
cipher=binascii.a2b_hex(cipher)
obj=ciph.new(key, ciph.MODE_ECB)
ciphertext=obj.encrypt(plain)
if (ciphertext!=cipher):
die('AES failed on entry '+`entry`)
for i in ciphertext:
if verbose: print hex(ord(i)),
if verbose: print
for entry in testdata.aes_modes:
mode, key, plain, cipher, kw = entry
key=binascii.a2b_hex(key)
plain=binascii.a2b_hex(plain)
cipher=binascii.a2b_hex(cipher)
obj=ciph.new(key, mode, **kw)
obj2=ciph.new(key, mode, **kw)
ciphertext=obj.encrypt(plain)
if (ciphertext!=cipher):
die('AES encrypt failed on entry '+`entry`)
for i in ciphertext:
if verbose: print hex(ord(i)),
if verbose: print
plain2=obj2.decrypt(ciphertext)
if plain2!=plain:
die('AES decrypt failed on entry '+`entry`)
for i in plain2:
if verbose: print hex(ord(i)),
if verbose: print
if 'arc2' in args:
ciph=exerciseBlockCipher('ARC2', verbose) # Alleged RC2
if (ciph!=None):
if verbose: print ' Verifying against test suite...'
for entry in testdata.arc2:
key,plain,cipher=entry
key=binascii.a2b_hex(key)
plain=binascii.a2b_hex(plain)
cipher=binascii.a2b_hex(cipher)
obj=ciph.new(key, ciph.MODE_ECB)
ciphertext=obj.encrypt(plain)
if (ciphertext!=cipher):
die('ARC2 failed on entry '+`entry`)
for i in ciphertext:
if verbose: print hex(ord(i)),
print
if 'blowfish' in args:
ciph=exerciseBlockCipher('Blowfish',verbose)# Bruce Schneier's Blowfish cipher
if (ciph!=None):
if verbose: print ' Verifying against test suite...'
for entry in testdata.blowfish:
key,plain,cipher=entry
key=binascii.a2b_hex(key)
plain=binascii.a2b_hex(plain)
cipher=binascii.a2b_hex(cipher)
obj=ciph.new(key, ciph.MODE_ECB)
ciphertext=obj.encrypt(plain)
if (ciphertext!=cipher):
die('Blowfish failed on entry '+`entry`)
for i in ciphertext:
if verbose: print hex(ord(i)),
if verbose: print
if 'cast' in args:
ciph=exerciseBlockCipher('CAST', verbose) # CAST-128
if (ciph!=None):
if verbose: print ' Verifying against test suite...'
for entry in testdata.cast:
key,plain,cipher=entry
key=binascii.a2b_hex(key)
plain=binascii.a2b_hex(plain)
cipher=binascii.a2b_hex(cipher)
obj=ciph.new(key, ciph.MODE_ECB)
ciphertext=obj.encrypt(plain)
if (ciphertext!=cipher):
die('CAST failed on entry '+`entry`)
for i in ciphertext:
if verbose: print hex(ord(i)),
if verbose: print
if 0:
# The full-maintenance test; it requires 4 million encryptions,
# and correspondingly is quite time-consuming. I've disabled
# it; it's faster to compile block/cast.c with -DTEST and run
# the resulting program.
a = b = '\x01\x23\x45\x67\x12\x34\x56\x78\x23\x45\x67\x89\x34\x56\x78\x9A'
for i in range(0, 1000000):
obj = cast.new(b, cast.MODE_ECB)
a = obj.encrypt(a[:8]) + obj.encrypt(a[-8:])
obj = cast.new(a, cast.MODE_ECB)
b = obj.encrypt(b[:8]) + obj.encrypt(b[-8:])
if a!="\xEE\xA9\xD0\xA2\x49\xFD\x3B\xA6\xB3\x43\x6F\xB8\x9D\x6D\xCA\x92":
if verbose: print 'CAST test failed: value of "a" doesn\'t match'
if b!="\xB2\xC9\x5E\xB0\x0C\x31\xAD\x71\x80\xAC\x05\xB8\xE8\x3D\x69\x6E":
if verbose: print 'CAST test failed: value of "b" doesn\'t match'
if 'des' in args:
# Test/benchmark DES block cipher
des=exerciseBlockCipher('DES', verbose)
if (des!=None):
# Various tests taken from the DES library packaged with Kerberos V4
obj=des.new(binascii.a2b_hex('0123456789abcdef'), des.MODE_ECB)
s=obj.encrypt('Now is t')
if (s!=binascii.a2b_hex('3fa40e8a984d4815')):
die('DES fails test 1')
obj=des.new(binascii.a2b_hex('08192a3b4c5d6e7f'), des.MODE_ECB)
s=obj.encrypt('\000\000\000\000\000\000\000\000')
if (s!=binascii.a2b_hex('25ddac3e96176467')):
die('DES fails test 2')
obj=des.new(binascii.a2b_hex('0123456789abcdef'), des.MODE_CBC,
binascii.a2b_hex('1234567890abcdef'))
s=obj.encrypt("Now is the time for all ")
if (s!=binascii.a2b_hex('e5c7cdde872bf27c43e934008c389c0f683788499a7c05f6')):
die('DES fails test 3')
obj=des.new(binascii.a2b_hex('0123456789abcdef'), des.MODE_CBC,
binascii.a2b_hex('fedcba9876543210'))
s=obj.encrypt("7654321 Now is the time for \000\000\000\000")
if (s!=binascii.a2b_hex("ccd173ffab2039f4acd8aefddfd8a1eb468e91157888ba681d269397f7fe62b4")):
die('DES fails test 4')
del obj,s
# R. Rivest's test: see http://theory.lcs.mit.edu/~rivest/destest.txt
x=binascii.a2b_hex('9474B8E8C73BCA7D')
for i in range(0, 16):
obj=des.new(x, des.MODE_ECB)
if (i & 1): x=obj.decrypt(x)
else: x=obj.encrypt(x)
if x!=binascii.a2b_hex('1B1A2DDB4C642438'):
die("DES fails Rivest's test")
if verbose: print ' Verifying against test suite...'
for entry in testdata.des:
key,plain,cipher=entry
key=binascii.a2b_hex(key)
plain=binascii.a2b_hex(plain)
cipher=binascii.a2b_hex(cipher)
obj=des.new(key, des.MODE_ECB)
ciphertext=obj.encrypt(plain)
if (ciphertext!=cipher):
die('DES failed on entry '+`entry`)
for entry in testdata.des_cbc:
key, iv, plain, cipher=entry
key, iv, cipher=binascii.a2b_hex(key),binascii.a2b_hex(iv),binascii.a2b_hex(cipher)
obj1=des.new(key, des.MODE_CBC, iv)
obj2=des.new(key, des.MODE_CBC, iv)
ciphertext=obj1.encrypt(plain)
if (ciphertext!=cipher):
die('DES CBC mode failed on entry '+`entry`)
if 'des3' in args:
ciph=exerciseBlockCipher('DES3', verbose) # Triple DES
if (ciph!=None):
if verbose: print ' Verifying against test suite...'
for entry in testdata.des3:
key,plain,cipher=entry
key=binascii.a2b_hex(key)
plain=binascii.a2b_hex(plain)
cipher=binascii.a2b_hex(cipher)
obj=ciph.new(key, ciph.MODE_ECB)
ciphertext=obj.encrypt(plain)
if (ciphertext!=cipher):
die('DES3 failed on entry '+`entry`)
for i in ciphertext:
if verbose: print hex(ord(i)),
if verbose: print
for entry in testdata.des3_cbc:
key, iv, plain, cipher=entry
key, iv, cipher=binascii.a2b_hex(key),binascii.a2b_hex(iv),binascii.a2b_hex(cipher)
obj1=ciph.new(key, ciph.MODE_CBC, iv)
obj2=ciph.new(key, ciph.MODE_CBC, iv)
ciphertext=obj1.encrypt(plain)
if (ciphertext!=cipher):
die('DES3 CBC mode failed on entry '+`entry`)
if 'idea' in args:
ciph=exerciseBlockCipher('IDEA', verbose) # IDEA block cipher
if (ciph!=None):
if verbose: print ' Verifying against test suite...'
for entry in testdata.idea:
key,plain,cipher=entry
key=binascii.a2b_hex(key)
plain=binascii.a2b_hex(plain)
cipher=binascii.a2b_hex(cipher)
obj=ciph.new(key, ciph.MODE_ECB)
ciphertext=obj.encrypt(plain)
if (ciphertext!=cipher):
die('IDEA failed on entry '+`entry`)
if 'rc5' in args:
# Ronald Rivest's RC5 algorithm
ciph=exerciseBlockCipher('RC5', verbose)
if (ciph!=None):
if verbose: print ' Verifying against test suite...'
for entry in testdata.rc5:
key,plain,cipher=entry
key=binascii.a2b_hex(key)
plain=binascii.a2b_hex(plain)
cipher=binascii.a2b_hex(cipher)
obj=ciph.new(key[4:], ciph.MODE_ECB,
version =ord(key[0]),
word_size=ord(key[1]),
rounds =ord(key[2]) )
ciphertext=obj.encrypt(plain)
if (ciphertext!=cipher):
die('RC5 failed on entry '+`entry`)
for i in ciphertext:
if verbose: print hex(ord(i)),
if verbose: print
| apache-2.0 |
sivaprakashniet/push_pull | p2p/lib/python2.7/site-packages/django/core/management/commands/flush.py | 82 | 5016 | from __future__ import unicode_literals
import sys
from importlib import import_module
from django.apps import apps
from django.core.management import call_command
from django.core.management.base import BaseCommand, CommandError
from django.core.management.color import no_style
from django.core.management.sql import emit_post_migrate_signal, sql_flush
from django.db import DEFAULT_DB_ALIAS, connections, router, transaction
from django.utils import six
from django.utils.six.moves import input
class Command(BaseCommand):
help = ('Removes ALL DATA from the database, including data added during '
'migrations. Unmigrated apps will also have their initial_data '
'fixture reloaded. Does not achieve a "fresh install" state.')
def add_arguments(self, parser):
parser.add_argument('--noinput', action='store_false', dest='interactive', default=True,
help='Tells Django to NOT prompt the user for input of any kind.')
parser.add_argument('--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS,
help='Nominates a database to flush. Defaults to the "default" database.')
parser.add_argument('--no-initial-data', action='store_false',
dest='load_initial_data', default=True,
help='Tells Django not to load any initial data after database synchronization.')
def handle(self, **options):
database = options.get('database')
connection = connections[database]
verbosity = options.get('verbosity')
interactive = options.get('interactive')
# The following are stealth options used by Django's internals.
reset_sequences = options.get('reset_sequences', True)
allow_cascade = options.get('allow_cascade', False)
inhibit_post_migrate = options.get('inhibit_post_migrate', False)
self.style = no_style()
# Import the 'management' module within each installed app, to register
# dispatcher events.
for app_config in apps.get_app_configs():
try:
import_module('.management', app_config.name)
except ImportError:
pass
sql_list = sql_flush(self.style, connection, only_django=True,
reset_sequences=reset_sequences,
allow_cascade=allow_cascade)
if interactive:
confirm = input("""You have requested a flush of the database.
This will IRREVERSIBLY DESTROY all data currently in the %r database,
and return each table to an empty state.
Are you sure you want to do this?
Type 'yes' to continue, or 'no' to cancel: """ % connection.settings_dict['NAME'])
else:
confirm = 'yes'
if confirm == 'yes':
try:
with transaction.atomic(using=database,
savepoint=connection.features.can_rollback_ddl):
with connection.cursor() as cursor:
for sql in sql_list:
cursor.execute(sql)
except Exception as e:
new_msg = (
"Database %s couldn't be flushed. Possible reasons:\n"
" * The database isn't running or isn't configured correctly.\n"
" * At least one of the expected database tables doesn't exist.\n"
" * The SQL was invalid.\n"
"Hint: Look at the output of 'django-admin sqlflush'. "
"That's the SQL this command wasn't able to run.\n"
"The full error: %s") % (connection.settings_dict['NAME'], e)
six.reraise(CommandError, CommandError(new_msg), sys.exc_info()[2])
if not inhibit_post_migrate:
self.emit_post_migrate(verbosity, interactive, database)
# Reinstall the initial_data fixture.
if options.get('load_initial_data'):
# Reinstall the initial_data fixture for apps without migrations.
from django.db.migrations.executor import MigrationExecutor
executor = MigrationExecutor(connection)
app_options = options.copy()
for app_label in executor.loader.unmigrated_apps:
app_options['app_label'] = app_label
call_command('loaddata', 'initial_data', **app_options)
else:
self.stdout.write("Flush cancelled.\n")
@staticmethod
def emit_post_migrate(verbosity, interactive, database):
# Emit the post migrate signal. This allows individual applications to
# respond as if the database had been migrated from scratch.
all_models = []
for app_config in apps.get_app_configs():
all_models.extend(router.get_migratable_models(app_config, database, include_auto_created=True))
emit_post_migrate_signal(set(all_models), verbosity, interactive, database)
| bsd-3-clause |
Akasurde/ansible | lib/ansible/module_utils/common/text/formatters.py | 68 | 3865 | # -*- coding: utf-8 -*-
# Copyright (c) 2019 Ansible Project
# Simplified BSD License (see licenses/simplified_bsd.txt or https://opensource.org/licenses/BSD-2-Clause)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import re
from ansible.module_utils.six import iteritems
SIZE_RANGES = {
'Y': 1 << 80,
'Z': 1 << 70,
'E': 1 << 60,
'P': 1 << 50,
'T': 1 << 40,
'G': 1 << 30,
'M': 1 << 20,
'K': 1 << 10,
'B': 1,
}
def lenient_lowercase(lst):
"""Lowercase elements of a list.
If an element is not a string, pass it through untouched.
"""
lowered = []
for value in lst:
try:
lowered.append(value.lower())
except AttributeError:
lowered.append(value)
return lowered
def human_to_bytes(number, default_unit=None, isbits=False):
"""Convert number in string format into bytes (ex: '2K' => 2048) or using unit argument.
example: human_to_bytes('10M') <=> human_to_bytes(10, 'M').
When isbits is False (default), converts bytes from a human-readable format to integer.
example: human_to_bytes('1MB') returns 1048576 (int).
The function expects 'B' (uppercase) as a byte identifier passed
as a part of 'name' param string or 'unit', e.g. 'MB'/'KB'/etc.
(except when the identifier is single 'b', it is perceived as a byte identifier too).
if 'Mb'/'Kb'/... is passed, the ValueError will be rased.
When isbits is True, converts bits from a human-readable format to integer.
example: human_to_bytes('1Mb', isbits=True) returns 1048576 (int) -
string bits representation was passed and return as a number or bits.
The function expects 'b' (lowercase) as a bit identifier, e.g. 'Mb'/'Kb'/etc.
if 'MB'/'KB'/... is passed, the ValueError will be rased.
"""
m = re.search(r'^\s*(\d*\.?\d*)\s*([A-Za-z]+)?', str(number), flags=re.IGNORECASE)
if m is None:
raise ValueError("human_to_bytes() can't interpret following string: %s" % str(number))
try:
num = float(m.group(1))
except Exception:
raise ValueError("human_to_bytes() can't interpret following number: %s (original input string: %s)" % (m.group(1), number))
unit = m.group(2)
if unit is None:
unit = default_unit
if unit is None:
''' No unit given, returning raw number '''
return int(round(num))
range_key = unit[0].upper()
try:
limit = SIZE_RANGES[range_key]
except Exception:
raise ValueError("human_to_bytes() failed to convert %s (unit = %s). The suffix must be one of %s" % (number, unit, ", ".join(SIZE_RANGES.keys())))
# default value
unit_class = 'B'
unit_class_name = 'byte'
# handling bits case
if isbits:
unit_class = 'b'
unit_class_name = 'bit'
# check unit value if more than one character (KB, MB)
if len(unit) > 1:
expect_message = 'expect %s%s or %s' % (range_key, unit_class, range_key)
if range_key == 'B':
expect_message = 'expect %s or %s' % (unit_class, unit_class_name)
if unit_class_name in unit.lower():
pass
elif unit[1] != unit_class:
raise ValueError("human_to_bytes() failed to convert %s. Value is not a valid string (%s)" % (number, expect_message))
return int(round(num * limit))
def bytes_to_human(size, isbits=False, unit=None):
base = 'Bytes'
if isbits:
base = 'bits'
suffix = ''
for suffix, limit in sorted(iteritems(SIZE_RANGES), key=lambda item: -item[1]):
if (unit is None and size >= limit) or unit is not None and unit.upper() == suffix[0]:
break
if limit != 1:
suffix += base[0]
else:
suffix = base
return '%.2f %s' % (size / limit, suffix)
| gpl-3.0 |
Smiljanic/espresso | testsuite/python/nonBondedInteractions.py | 3 | 5356 | #
# Copyright (C) 2013,2014,2015,2016 The ESPResSo project
#
# This file is part of ESPResSo.
#
# ESPResSo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Tests particle property setters/getters
import unittest as ut
import espressomd
import numpy as np
from espressomd.interactions import LennardJonesInteraction
if "LENNARD_JONES_GENERIC" in espressomd.features():
from espressomd.interactions import GenericLennardJonesInteraction
class Non_bonded_interactionsTests(ut.TestCase):
# def __init__(self,particleId):
# self.pid=particleId
# Handle to espresso system
es = espressomd.System()
def intersMatch(self, inType, outType, inParams, outParams):
"""Check, if the interaction type set and gotten back as well as the bond
parameters set and gotten back match. Only check keys present in
inParams.
"""
if inType != outType:
print("Type mismatch:", inType, outType)
return False
for k in inParams.keys():
if k not in outParams:
print(k, "missing from returned parameters")
return False
if outParams[k] != inParams[k]:
print("Mismatch in parameter ", k, inParams[k], outParams[k])
return False
return True
def generateTestForNon_bonded_interaction(_partType1, _partType2, _interClass, _params, _interName):
"""Generates test cases for checking interaction parameters set and gotten back
from Es actually match. Only keys which are present in _params are checked
1st and 2nd arg: Particle type ids to check on
3rd: Class of the interaction to test, ie.e, FeneBond, HarmonicBond
4th: Interaction parameters as dictionary, i.e., {"k"=1.,"r_0"=0.
5th: Name of the interaction property to set (i.e. "lennardJones")
"""
partType1 = _partType1
partType2 = _partType2
interClass = _interClass
params = _params
interName = _interName
def func(self):
# This code is run at the execution of the generated function.
# It will use the state of the variables in the outer function,
# which was there, when the outer function was called
# Set parameters
getattr(self.es.non_bonded_inter[partType1, partType2], interName).set_params(
**params)
# Read them out again
outInter = getattr(
self.es.non_bonded_inter[partType1, partType2], interName)
outParams = outInter.get_params()
self.assertTrue(self.intersMatch(interClass, type(outInter), params, outParams), interClass(
**params).type_name() + ": value set and value gotten back differ for particle types " + str(partType1) + " and " + str(partType2) + ": " + params.__str__() + " vs. " + outParams.__str__())
return func
test_lj1 = generateTestForNon_bonded_interaction(
0, 0, LennardJonesInteraction,
{"epsilon": 1., "sigma": 2., "cutoff": 3.,
"shift": 4., "offset": 5., "min": 7.},
"lennard_jones")
test_lj2 = generateTestForNon_bonded_interaction(
0, 0, LennardJonesInteraction,
{"epsilon": 1.3, "sigma": 2.2, "cutoff": 3.4,
"shift": 4.1, "offset": 5.1, "min": 7.1},
"lennard_jones")
test_lj3 = generateTestForNon_bonded_interaction(
0, 0, LennardJonesInteraction,
{"epsilon": 1.3, "sigma": 2.2, "cutoff": 3.4,
"shift": 4.1, "offset": 5.1, "min": 7.1},
"lennard_jones")
if "LENNARD_JONES_GENERIC" in espressomd.features():
test_ljgen1 = generateTestForNon_bonded_interaction(
0, 0, GenericLennardJonesInteraction,
{"epsilon": 1., "sigma": 2., "cutoff": 3., "shift": 4., "offset": 5.,
"e1": 7, "e2": 8, "b1": 9., "b2": 10.},
"generic_lennard_jones")
test_ljgen2 = generateTestForNon_bonded_interaction(
0, 0, GenericLennardJonesInteraction,
{"epsilon": 1.1, "sigma": 2.1, "cutoff": 3.1, "shift": 4.1, "offset": 5.1,
"e1": 71, "e2": 81, "b1": 9.1, "b2": 10.1},
"generic_lennard_jones")
test_ljgen3 = generateTestForNon_bonded_interaction(
0, 0, GenericLennardJonesInteraction,
{"epsilon": 1.2, "sigma": 2.2, "cutoff": 3.2, "shift": 4.2, "offset": 5.2,
"e1": 72, "e2": 82, "b1": 9.2, "b2": 10.2},
"generic_lennard_jones")
def test_forcecap(self):
self.es.non_bonded_inter.set_force_cap(17.5)
self.assertEqual(self.es.non_bonded_inter.get_force_cap(), 17.5)
if __name__ == "__main__":
print("Features: ", espressomd.features())
ut.main()
| gpl-3.0 |
m3brown/collab | core/taggit/tests/tests.py | 5 | 28834 | from unittest import TestCase as UnitTestCase
import django
from django.conf import settings
from django.core.exceptions import ValidationError
from django.db import connection
from django.test import TestCase, TransactionTestCase
from django.test.utils import override_settings
from core.taggit.managers import TaggableManager
from core.taggit.models import Tag, TaggedItem, TagCategory
from .forms import (FoodForm, DirectFoodForm, CustomPKFoodForm,
OfficialFoodForm)
from .models import (Food, Pet, HousePet, DirectFood, DirectPet,
DirectHousePet, TaggedPet, CustomPKFood, CustomPKPet, CustomPKHousePet,
TaggedCustomPKPet, OfficialFood, OfficialPet, OfficialHousePet,
OfficialThroughModel, OfficialTag, Photo, Movie, Article)
from core.taggit.utils import parse_tags, edit_string_for_tags, add_tags
from django.contrib.auth import get_user_model
import random
import string
class BaseTaggingTest(object):
def random_user(self):
return get_user_model().objects.create_user(
''.join(random.choice(string.lowercase) for _ in range(12)))
def random_tag_category(self):
start = ''.join(random.choice(string.lowercase) for _ in range(12))
end = ''.join(random.choice(string.lowercase) for _ in range(12))
name = '%s %s' % (start, end)
slug = '%s-%s' % (start, end)
tag_category = TagCategory(name=name, slug=slug)
tag_category.save()
return tag_category
def assert_tags_equal(self, qs, tags, sort=True, attr="name"):
got = map(lambda tag: getattr(tag, attr), qs)
if sort:
got.sort()
tags.sort()
self.assertEqual(got, tags)
def assert_tagged_items_equal(self, qs, tag_item_pairs):
got = map(lambda ti: (ti.content_object.name, ti.tag.name), qs)
self.assertEqual(got, tag_item_pairs)
def assert_num_queries(self, n, f, *args, **kwargs):
original_DEBUG = settings.DEBUG
settings.DEBUG = True
current = len(connection.queries)
try:
f(*args, **kwargs)
self.assertEqual(
len(connection.queries) - current,
n,
)
finally:
settings.DEBUG = original_DEBUG
def _get_form_str(self, form_str):
if django.VERSION >= (1, 3):
form_str %= {
"help_start": '<span class="helptext">',
"help_stop": "</span>"
}
else:
form_str %= {
"help_start": "",
"help_stop": ""
}
return form_str
def assert_form_renders(self, form, html):
self.assertEqual(str(form), self._get_form_str(html))
class BaseTaggingTestCase(TestCase, BaseTaggingTest):
pass
class BaseTaggingTransactionTestCase(TransactionTestCase, BaseTaggingTest):
pass
class TagModelTestCase(BaseTaggingTransactionTestCase):
food_model = Food
tag_model = Tag
def test_unique_slug(self):
apple = self.food_model.objects.create(name="apple")
apple.tags.add("Red", "red")
def test_update(self):
special = self.tag_model.objects.create(name="special")
special.save()
def test_add(self):
apple = self.food_model.objects.create(name="apple")
yummy = self.tag_model.objects.create(name="yummy")
apple.tags.add(yummy)
def test_slugify(self):
a = Article.objects.create(title="django-taggit 1.0 Released")
a.tags.add("awesome", "release", "AWESOME")
self.assert_tags_equal(a.tags.all(), [
"category-awesome",
"category-release",
"category-awesome-1"
], attr="slug")
class TagModelDirectTestCase(TagModelTestCase):
food_model = DirectFood
tag_model = Tag
class TagModelCustomPKTestCase(TagModelTestCase):
food_model = CustomPKFood
tag_model = Tag
class TagModelOfficialTestCase(TagModelTestCase):
food_model = OfficialFood
tag_model = OfficialTag
class TagUtilTestCase(BaseTaggingTestCase):
def test_add_tags_util(self):
food_model = Food
apple = food_model.objects.create(name="apple")
self.assertEqual(list(apple.tags.all()), [])
self.assertEqual(list(food_model.tags.all()), [])
user1 = self.random_user()
category1 = self.random_tag_category()
add_tags(apple, 'green', category1.slug, user1, 'food')
self.assert_tags_equal(apple.tags.all(), ['green'])
self.assert_tags_equal(food_model.tags.all(), ['green'])
self.assert_tagged_items_equal(TaggedItem.objects.filter(content_type__name='food'),
[('apple','green')])
self.assert_tagged_items_equal(TaggedItem.objects.filter(tag_category=category1),
[('apple','green')])
self.assert_tagged_items_equal(TaggedItem.objects.filter(tag_creator=user1),
[('apple','green')])
# test null category
user2 = self.random_user()
add_tags(apple, 'yellow', None, user2, 'food')
self.assert_tags_equal(apple.tags.all(), ['green', 'yellow'])
self.assert_tags_equal(food_model.tags.all(), ['green', 'yellow'])
self.assert_tagged_items_equal(TaggedItem.objects.filter(content_type__name='food'),
[('apple','green'), ('apple','yellow')])
self.assert_tagged_items_equal(TaggedItem.objects.filter(tag_category=None),
[('apple','yellow')])
self.assert_tagged_items_equal(TaggedItem.objects.filter(tag_creator=user2),
[('apple','yellow')])
# test null user
category2 = self.random_tag_category()
add_tags(apple, 'red', category2.slug, None, 'food')
self.assert_tags_equal(apple.tags.all(), ['green', 'yellow', 'red'])
self.assert_tags_equal(food_model.tags.all(), ['green', 'yellow', 'red'])
self.assert_tagged_items_equal(TaggedItem.objects.filter(content_type__name='food'),
[('apple','green'), ('apple','yellow'), ('apple','red')])
self.assert_tagged_items_equal(TaggedItem.objects.filter(tag_category=category2),
[('apple','red')])
self.assert_tagged_items_equal(TaggedItem.objects.filter(tag_creator=None),
[('apple','red')])
# test null content_type
self.assertRaises(IndexError, add_tags, apple, 'red', category2.slug, user2, 'bad_content_type')
self.assertRaises(IndexError, add_tags, apple, 'red', category2.slug, user2, None)
def test_add_tags_duplicate_tag(self):
"""
add_tags function should only create a Tag if it doesn't already exist
"""
food_model = Food
apple = food_model.objects.create(name="apple")
self.assertEqual(list(apple.tags.all()), [])
self.assertEqual(list(food_model.tags.all()), [])
user1 = self.random_user()
category1 = self.random_tag_category()
add_tags(apple, 'green', category1.slug, user1, 'food')
self.assert_tags_equal(apple.tags.all(), ['green'])
self.assert_tags_equal(food_model.tags.all(), ['green'])
# test exact duplicate
add_tags(apple, 'green', category1.slug, user1, 'food')
self.assert_tags_equal(apple.tags.all(), ['green'])
self.assert_tags_equal(food_model.tags.all(), ['green'])
# test duplicate with different category/user/content_type
user2 = self.random_user()
category2 = self.random_tag_category()
food_model2 = Pet
food_model2.objects.create(name="apple")
add_tags(apple, 'green', category2.slug, user2, 'pet')
self.assert_tags_equal(apple.tags.all(), ['green'])
self.assert_tags_equal(food_model.tags.all(), ['green'])
def test_add_tags_new_taggeditem(self):
"""
add_tags function should only create a Tag if it doesn't already exist
"""
food_model = Food
apple = food_model.objects.create(name="apple")
self.assertEqual(list(apple.tags.all()), [])
self.assertEqual(list(food_model.tags.all()), [])
user1 = self.random_user()
category1 = self.random_tag_category()
add_tags(apple, 'green', category1.slug, user1, 'food')
self.assert_tags_equal(apple.tags.all(), ['green'])
self.assert_tags_equal(food_model.tags.all(), ['green'])
# test existing tag with different taggeditem
orange = food_model.objects.create(name="orange")
add_tags(orange, 'green', category1.slug, user1, 'food')
self.assert_tags_equal(apple.tags.all(), ['green'])
self.assert_tags_equal(orange.tags.all(), ['green'])
self.assert_tags_equal(food_model.tags.all(), ['green'])
self.assert_tagged_items_equal(TaggedItem.objects.filter(content_type__name='food'),
[('apple','green'),('orange','green')])
self.assert_tagged_items_equal(TaggedItem.objects.filter(tag_category=category1),
[('apple','green'),('orange','green')])
self.assert_tagged_items_equal(TaggedItem.objects.filter(tag_creator=user1),
[('apple','green'),('orange','green')])
def test_add_tags_duplicate_taggeditem(self):
"""
add_tags function should only create a TaggedItem if it doesn't
already exist
"""
food_model = Food
apple = food_model.objects.create(name="apple")
self.assertEqual(list(apple.tags.all()), [])
self.assertEqual(list(food_model.tags.all()), [])
user1 = self.random_user()
category1 = self.random_tag_category()
add_tags(apple, 'green', category1.slug, user1, 'food')
self.assert_tags_equal(apple.tags.all(), ['green'])
self.assert_tags_equal(food_model.tags.all(), ['green'])
self.assert_tagged_items_equal(TaggedItem.objects.filter(content_type__name='food'),
[('apple','green')])
self.assert_tagged_items_equal(TaggedItem.objects.filter(tag_category=category1),
[('apple','green')])
self.assert_tagged_items_equal(TaggedItem.objects.filter(tag_creator=user1),
[('apple','green')])
# test exact duplicate
add_tags(apple, 'green', category1.slug, user1, 'food')
self.assert_tagged_items_equal(TaggedItem.objects.filter(content_type__name='food'),
[('apple','green')])
self.assert_tagged_items_equal(TaggedItem.objects.filter(tag_category=category1),
[('apple','green')])
self.assert_tagged_items_equal(TaggedItem.objects.filter(tag_creator=user1),
[('apple','green')])
# test duplicate with different category
category2 = self.random_tag_category()
add_tags(apple, 'green', category2.slug, user1, 'food')
self.assert_tagged_items_equal(TaggedItem.objects.filter(content_type__name='food'),
[('apple','green'), ('apple','green')])
self.assert_tagged_items_equal(TaggedItem.objects.filter(tag_category=category2),
[('apple','green')])
self.assert_tagged_items_equal(TaggedItem.objects.filter(tag_creator=user1),
[('apple','green'),('apple','green')])
# test duplicate with different content_type
food_model2 = Pet
food_model2.objects.create(name="apple", id=apple.id)
add_tags(apple, 'green', category2.slug, user1, 'pet')
self.assert_tagged_items_equal(TaggedItem.objects.filter(content_type__name='pet'),
[('apple','green')])
self.assert_tagged_items_equal(TaggedItem.objects.filter(tag_category=category2),
[('apple','green'),('apple','green')])
self.assert_tagged_items_equal(TaggedItem.objects.filter(tag_creator=user1),
[('apple','green'),('apple','green'),('apple','green')])
# test duplicate with different creator
user2 = self.random_user()
add_tags(apple, 'green', category2.slug, user2, 'food')
self.assert_tagged_items_equal(TaggedItem.objects.filter(content_type__name='pet'),
[('apple','green')])
self.assert_tagged_items_equal(TaggedItem.objects.filter(tag_category=category2),
[('apple','green'), ('apple','green')])
self.assert_tagged_items_equal(TaggedItem.objects.filter(tag_creator=user2),
[])
# ensure the old user didn't change either
self.assert_tagged_items_equal(TaggedItem.objects.filter(tag_creator=user1),
[('apple','green'),('apple','green'),('apple','green')])
class TaggableManagerTestCase(BaseTaggingTestCase):
food_model = Food
pet_model = Pet
housepet_model = HousePet
taggeditem_model = TaggedItem
tag_model = Tag
def test_add_tag(self):
apple = self.food_model.objects.create(name="apple")
self.assertEqual(list(apple.tags.all()), [])
self.assertEqual(list(self.food_model.tags.all()), [])
apple.tags.add('green')
self.assert_tags_equal(apple.tags.all(), ['green'])
self.assert_tags_equal(self.food_model.tags.all(), ['green'])
pear = self.food_model.objects.create(name="pear")
pear.tags.add('green')
self.assert_tags_equal(pear.tags.all(), ['green'])
self.assert_tags_equal(self.food_model.tags.all(), ['green'])
apple.tags.add('red')
self.assert_tags_equal(apple.tags.all(), ['green', 'red'])
self.assert_tags_equal(self.food_model.tags.all(), ['green', 'red'])
self.assert_tags_equal(
self.food_model.tags.most_common(),
['green', 'red'],
sort=False
)
apple.tags.remove('green')
self.assert_tags_equal(apple.tags.all(), ['red'])
self.assert_tags_equal(self.food_model.tags.all(), ['green', 'red'])
tag = self.tag_model.objects.create(name="delicious")
apple.tags.add(tag)
self.assert_tags_equal(apple.tags.all(), ["red", "delicious"])
apple.delete()
self.assert_tags_equal(self.food_model.tags.all(), ["green"])
@override_settings(TAGGIT_FORCE_LOWERCASE=False)
def test_add_mixed_case_tags(self):
apple = self.food_model.objects.create(name="apple")
apple.tags.add('red')
apple.tags.add('Marlene')
self.assert_tags_equal(apple.tags.all(), ["red", "Marlene"])
@override_settings(TAGGIT_FORCE_LOWERCASE=True)
def test_add_mixed_case_tags_with_lowercase_forced(self):
apple = self.food_model.objects.create(name="apple")
apple.tags.add('red')
apple.tags.add('Marlene')
self.assert_tags_equal(apple.tags.all(), ["red", "marlene"])
def test_require_pk(self):
food_instance = self.food_model()
self.assertRaises(ValueError, lambda: food_instance.tags.all())
def test_delete_obj(self):
apple = self.food_model.objects.create(name="apple")
apple.tags.add("red")
self.assert_tags_equal(apple.tags.all(), ["red"])
strawberry = self.food_model.objects.create(name="strawberry")
strawberry.tags.add("red")
apple.delete()
self.assert_tags_equal(strawberry.tags.all(), ["red"])
def test_delete_bulk(self):
apple = self.food_model.objects.create(name="apple")
kitty = self.pet_model.objects.create(pk=apple.pk, name="kitty")
apple.tags.add("red", "delicious", "fruit")
kitty.tags.add("feline")
self.food_model.objects.all().delete()
self.assert_tags_equal(kitty.tags.all(), ["feline"])
def test_lookup_by_tag(self):
apple = self.food_model.objects.create(name="apple")
apple.tags.add("red", "green")
pear = self.food_model.objects.create(name="pear")
pear.tags.add("green")
self.assertEqual(
list(self.food_model.objects.filter(tags__name__in=["red"])),
[apple]
)
self.assertEqual(
list(self.food_model.objects.filter(tags__name__in=["green"])),
[apple, pear]
)
kitty = self.pet_model.objects.create(name="kitty")
kitty.tags.add("fuzzy", "red")
dog = self.pet_model.objects.create(name="dog")
dog.tags.add("woof", "red")
self.assertEqual(
list(self.food_model.objects.filter(
tags__name__in=["red"]).distinct()),
[apple]
)
tag = self.tag_model.objects.get(name="woof")
self.assertEqual(
list(self.pet_model.objects.filter(tags__in=[tag])), [dog])
cat = self.housepet_model.objects.create(name="cat", trained=True)
cat.tags.add("fuzzy")
self.assertEqual(
map(lambda o: o.pk, self.pet_model.objects.filter(
tags__name__in=["fuzzy"])).sort(),
[kitty.pk, cat.pk].sort()
)
def test_exclude(self):
apple = self.food_model.objects.create(name="apple")
apple.tags.add("red", "green", "delicious")
pear = self.food_model.objects.create(name="pear")
pear.tags.add("green", "delicious")
guava = self.food_model.objects.create(name="guava")
self.assertEqual(
map(lambda o: o.pk, self.food_model.objects.exclude(
tags__name__in=["red"])).sort(),
[pear.pk, guava.pk].sort(),
)
def test_similarity_by_tag(self):
"""Test that pears are more similar to apples than watermelons"""
apple = self.food_model.objects.create(name="apple")
apple.tags.add("green", "juicy", "small", "sour")
pear = self.food_model.objects.create(name="pear")
pear.tags.add("green", "juicy", "small", "sweet")
watermelon = self.food_model.objects.create(name="watermelon")
watermelon.tags.add("green", "juicy", "large", "sweet")
similar_objs = apple.tags.similar_objects()
self.assertEqual(similar_objs, [pear, watermelon])
self.assertEqual(map(lambda x: x.similar_tags, similar_objs), [3, 2])
def test_tag_reuse(self):
apple = self.food_model.objects.create(name="apple")
apple.tags.add("juicy", "juicy")
self.assert_tags_equal(apple.tags.all(), ['juicy'])
def test_query_traverse(self):
spot = self.pet_model.objects.create(name='Spot')
spike = self.pet_model.objects.create(name='Spike')
spot.tags.add('scary')
spike.tags.add('fluffy')
lookup_kwargs = {
'%s__name' % self.pet_model._meta.module_name: 'Spot'
}
self.assert_tags_equal(
self.tag_model.objects.filter(**lookup_kwargs),
['scary']
)
def test_taggeditem_unicode(self):
ross = self.pet_model.objects.create(name="ross")
# I keep Ross Perot for a pet, what's it to you?
ross.tags.add("president")
self.assertEqual(
unicode(self.taggeditem_model.objects.all()[0]),
"ross tagged with president"
)
def test_abstract_subclasses(self):
p = Photo.objects.create()
p.tags.add("outdoors", "pretty")
self.assert_tags_equal(
p.tags.all(),
["outdoors", "pretty"]
)
m = Movie.objects.create()
m.tags.add("hd")
self.assert_tags_equal(
m.tags.all(),
["hd"],
)
class TaggableManagerDirectTestCase(TaggableManagerTestCase):
food_model = DirectFood
pet_model = DirectPet
housepet_model = DirectHousePet
taggeditem_model = TaggedPet
class TaggableManagerCustomPKTestCase(TaggableManagerTestCase):
food_model = CustomPKFood
pet_model = CustomPKPet
housepet_model = CustomPKHousePet
taggeditem_model = TaggedCustomPKPet
def test_require_pk(self):
# TODO with a charfield pk, pk is never None, so taggit has no way to
# tell if the instance is saved or not
pass
class TaggableManagerOfficialTestCase(TaggableManagerTestCase):
food_model = OfficialFood
pet_model = OfficialPet
housepet_model = OfficialHousePet
taggeditem_model = OfficialThroughModel
tag_model = OfficialTag
def test_extra_fields(self):
self.tag_model.objects.create(name="red")
self.tag_model.objects.create(name="delicious", official=True)
apple = self.food_model.objects.create(name="apple")
apple.tags.add("delicious", "red")
pear = self.food_model.objects.create(name="Pear")
pear.tags.add("delicious")
self.assertEqual(
map(lambda o: o.pk, self.food_model.objects.filter(
tags__official=False)),
[apple.pk],
)
class TaggableFormTestCase(BaseTaggingTestCase):
form_class = FoodForm
food_model = Food
def test_form(self):
self.assertEqual(self.form_class.base_fields.keys(), ['name', 'tags'])
f = self.form_class({'name': 'apple', 'tags': 'green, red, yummy'})
self.assert_form_renders(f, """<tr><th><label for="id_name">Name:</label></th><td><input id="id_name" maxlength="50" name="name" type="text" value="apple" /></td></tr>
<tr><th><label for="id_tags">Tags:</label></th><td><input id="id_tags" name="tags" type="text" value="green, red, yummy" /><br />%(help_start)sA comma-separated list of tags.%(help_stop)s</td></tr>""")
f.save()
apple = self.food_model.objects.get(name='apple')
self.assert_tags_equal(apple.tags.all(), ['green', 'red', 'yummy'])
f = self.form_class({'name': 'apple', 'tags':
'green, red, yummy, delicious'}, instance=apple)
f.save()
apple = self.food_model.objects.get(name='apple')
self.assert_tags_equal(
apple.tags.all(), ['green', 'red', 'yummy', 'delicious'])
self.assertEqual(self.food_model.objects.count(), 1)
f = self.form_class({"name": "raspberry"})
self.assertFalse(f.is_valid())
f = self.form_class(instance=apple)
self.assert_form_renders(f, """<tr><th><label for="id_name">Name:</label></th><td><input id="id_name" maxlength="50" name="name" type="text" value="apple" /></td></tr>
<tr><th><label for="id_tags">Tags:</label></th><td><input id="id_tags" name="tags" type="text" value="delicious, green, red, yummy" /><br />%(help_start)sA comma-separated list of tags.%(help_stop)s</td></tr>""")
apple.tags.add('has,comma')
f = self.form_class(instance=apple)
self.assert_form_renders(f, """<tr><th><label for="id_name">Name:</label></th><td><input id="id_name" maxlength="50" name="name" type="text" value="apple" /></td></tr>
<tr><th><label for="id_tags">Tags:</label></th><td><input id="id_tags" name="tags" type="text" value=""has,comma", delicious, green, red, yummy" /><br />%(help_start)sA comma-separated list of tags.%(help_stop)s</td></tr>""")
apple.tags.add('has space')
f = self.form_class(instance=apple)
self.assert_form_renders(f, """<tr><th><label for="id_name">Name:</label></th><td><input id="id_name" maxlength="50" name="name" type="text" value="apple" /></td></tr>
<tr><th><label for="id_tags">Tags:</label></th><td><input id="id_tags" name="tags" type="text" value=""has space", "has,comma", delicious, green, red, yummy" /><br />%(help_start)sA comma-separated list of tags.%(help_stop)s</td></tr>""")
def test_formfield(self):
tm = TaggableManager(verbose_name='categories',
help_text='Add some categories', blank=True)
ff = tm.formfield()
self.assertEqual(ff.label, 'Categories')
self.assertEqual(ff.help_text, u'Add some categories')
self.assertEqual(ff.required, False)
self.assertEqual(ff.clean(""), [])
tm = TaggableManager()
ff = tm.formfield()
self.assertRaises(ValidationError, ff.clean, "")
class TaggableFormDirectTestCase(TaggableFormTestCase):
form_class = DirectFoodForm
food_model = DirectFood
class TaggableFormCustomPKTestCase(TaggableFormTestCase):
form_class = CustomPKFoodForm
food_model = CustomPKFood
class TaggableFormOfficialTestCase(TaggableFormTestCase):
form_class = OfficialFoodForm
food_model = OfficialFood
class TagStringParseTestCase(UnitTestCase):
"""
Ported from Jonathan Buchanan's `django-tagging
<http://django-tagging.googlecode.com/>`_
"""
def test_with_simple_space_delimited_tags(self):
"""
Test with simple space-delimited tags.
"""
self.assertEqual(parse_tags('one'), [u'one'])
self.assertEqual(parse_tags('one two'), [u'one', u'two'])
self.assertEqual(
parse_tags('one two three'), [u'one', u'three', u'two'])
self.assertEqual(parse_tags('one one two two'), [u'one', u'two'])
def test_with_comma_delimited_multiple_words(self):
"""
Test with comma-delimited multiple words.
An unquoted comma in the input will trigger this.
"""
self.assertEqual(parse_tags(',one'), [u'one'])
self.assertEqual(parse_tags(',one two'), [u'one two'])
self.assertEqual(parse_tags(',one two three'), [u'one two three'])
self.assertEqual(parse_tags('a-one, a-two and a-three'),
[u'a-one', u'a-two and a-three'])
def test_with_double_quoted_multiple_words(self):
"""
Test with double-quoted multiple words.
A completed quote will trigger this. Unclosed quotes are ignored.
"""
self.assertEqual(parse_tags('"one'), [u'one'])
self.assertEqual(parse_tags('"one two'), [u'one', u'two'])
self.assertEqual(
parse_tags('"one two three'), [u'one', u'three', u'two'])
self.assertEqual(parse_tags('"one two"'), [u'one two'])
self.assertEqual(parse_tags('a-one "a-two and a-three"'),
[u'a-one', u'a-two and a-three'])
def test_with_no_loose_commas(self):
"""
Test with no loose commas -- split on spaces.
"""
self.assertEqual(
parse_tags('one two "thr,ee"'), [u'one', u'thr,ee', u'two'])
def test_with_loose_commas(self):
"""
Loose commas - split on commas
"""
self.assertEqual(
parse_tags('"one", two three'), [u'one', u'two three'])
def test_tags_with_double_quotes_can_contain_commas(self):
"""
Double quotes can contain commas
"""
self.assertEqual(parse_tags('a-one "a-two, and a-three"'),
[u'a-one', u'a-two, and a-three'])
self.assertEqual(parse_tags('"two", one, one, two, "one"'),
[u'one', u'two'])
def test_with_naughty_input(self):
"""
Test with naughty input.
"""
# Bad users! Naughty users!
self.assertEqual(parse_tags(None), [])
self.assertEqual(parse_tags(''), [])
self.assertEqual(parse_tags('"'), [])
self.assertEqual(parse_tags('""'), [])
self.assertEqual(parse_tags('"' * 7), [])
self.assertEqual(parse_tags(',,,,,,'), [])
self.assertEqual(parse_tags('",",",",",",","'), [u','])
self.assertEqual(parse_tags('a-one "a-two" and "a-three'),
[u'a-one', u'a-three', u'a-two', u'and'])
def test_recreation_of_tag_list_string_representations(self):
plain = Tag.objects.create(name='plain')
spaces = Tag.objects.create(name='spa ces')
comma = Tag.objects.create(name='com,ma')
self.assertEqual(edit_string_for_tags([plain]), u'plain')
self.assertEqual(
edit_string_for_tags([plain, spaces]), u'"spa ces", plain')
self.assertEqual(edit_string_for_tags(
[plain, spaces, comma]), u'"com,ma", "spa ces", plain')
self.assertEqual(
edit_string_for_tags([plain, comma]), u'"com,ma", plain')
self.assertEqual(
edit_string_for_tags([comma, spaces]), u'"com,ma", "spa ces"')
| cc0-1.0 |
tensorflow/text | tensorflow_text/python/ops/sentence_breaking_ops.py | 1 | 7117 | # coding=utf-8
# Copyright 2021 TF.Text Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Break sentence ops."""
import abc
from tensorflow.python.ops.ragged import ragged_tensor
from tensorflow.python.util import deprecation
from tensorflow.python.framework import load_library
from tensorflow.python.platform import resource_loader
gen_sentence_breaking_ops = load_library.load_op_library(resource_loader.get_path_to_datafile('_sentence_breaking_ops.so'))
class SentenceBreaker(object):
"""An abstract base class for sentence breaker implementations."""
@abc.abstractmethod
def break_sentences(self, input): # pylint: disable=redefined-builtin
"""Splits `input` into sentences.
Args:
input: A string `Tensor` of shape [batch] with a batch of documents.
Returns:
A string `RaggedTensor` of shape [batch, (num_sentences)] with each input
broken up into its constituent sentences.
"""
raise NotImplementedError()
class SentenceBreakerWithOffsets(SentenceBreaker):
"""An abstract base class for sentence breakers that support offsets."""
@abc.abstractmethod
def break_sentences_with_offsets(self, input): # pylint: disable=redefined-builtin
"""Splits `input` into sentences and returns the starting & ending offsets.
Args:
input: A string `Tensor` of shape [batch] with a batch of documents.
Returns:
A tuple of (sentences, begin_offset, end_offset) where:
sentences: A string `RaggedTensor` of shape [batch, (num_sentences)] with
each input broken up into its constituent sentences.
begin_offset: A int64 `RaggedTensor` of shape [batch, (num_sentences)]
where each entry is the inclusive beginning byte offset of a sentence.
end_offset: A int64 `RaggedTensor` of shape [batch, (num_sentences)]
where each entry is the exclusive ending byte offset of a sentence.
"""
raise NotImplementedError()
@deprecation.deprecated(None,
"Deprecated, use 'StateBasedSentenceBreaker' instead.")
def sentence_fragments(token_word,
token_starts,
token_ends,
token_properties,
input_encoding='UTF-8',
errors='replace',
replacement_char=0xFFFD,
replace_control_characters=False):
"""Find the sentence fragments in a given text.
A sentence fragment is a potential next sentence determined using
deterministic heuristics based on punctuation, capitalization, and similar
text attributes.
NOTE: This op is deprecated. Use `StateBasedSentenceBreaker` instead.
Args:
token_word: A Tensor (w/ rank=2) or a RaggedTensor (w/ ragged_rank=1)
containing the token strings.
token_starts: A Tensor (w/ rank=2) or a RaggedTensor (w/ ragged_rank=1)
containing offsets where the token starts.
token_ends: A Tensor (w/ rank=2) or a RaggedTensor (w/ ragged_rank=1)
containing offsets where the token ends.
token_properties: A Tensor (w/ rank=2) or a RaggedTensor (w/ ragged_rank=1)
containing a bitmask.
The values of the bitmask are:
* 0x01 (ILL_FORMED) - Text is ill-formed: typically applies to all
tokens of a paragraph that is too short or lacks terminal punctuation.
* 0x02 (HEADING)
* 0x04 (BOLD)
* 0x10 (UNDERLINED)
* 0x20 (LIST)
* 0x40 (TITLE)
* 0x80 (EMOTICON)
* 0x100 (ACRONYM) - Token was identified as an acronym. Period-,
hyphen-, and space-separated acronyms: "U.S.", "U-S", and "U S".
* 0x200 (HYPERLINK) - Indicates that the token (or part of the token) is
covered by at least one hyperlink.
input_encoding: String name for the unicode encoding that should be used to
decode each string.
errors: Specifies the response when an input string can't be converted
using the indicated encoding. One of:
* `'strict'`: Raise an exception for any illegal substrings.
* `'replace'`: Replace illegal substrings with `replacement_char`.
* `'ignore'`: Skip illegal substrings.
replacement_char: The replacement codepoint to be used in place of invalid
substrings in `input` when `errors='replace'`; and in place of C0 control
characters in `input` when `replace_control_characters=True`.
replace_control_characters: Whether to replace the C0 control characters
`(U+0000 - U+001F)` with the `replacement_char`.
Returns:
A RaggedTensor of `fragment_start`, `fragment_end`, `fragment_properties`
and `terminal_punc_token`.
`fragment_properties` is an int32 bitmask whose values may contain:
* 1 = fragment ends with terminal punctuation
* 2 = fragment ends with multiple terminal punctuations (e.g.
"She said what?!")
* 3 = Has close parenthesis (e.g. "Mushrooms (they're fungi).")
* 4 = Has sentential close parenthesis (e.g. "(Mushrooms are fungi!)")
`terminal_punc_token` is a RaggedTensor containing the index of terminal
punctuation token immediately following the last word in the fragment
-- or index of the last word itself, if it's an acronym (since acronyms
include the terminal punctuation). index of the terminal punctuation
token.
""" # pylint: disable=pointless-string-statement
if not isinstance(token_starts, ragged_tensor.RaggedTensor):
token_starts = ragged_tensor.RaggedTensor.from_tensor(token_starts)
if not isinstance(token_ends, ragged_tensor.RaggedTensor):
token_ends = ragged_tensor.RaggedTensor.from_tensor(token_ends)
if not isinstance(token_word, ragged_tensor.RaggedTensor):
token_word = ragged_tensor.RaggedTensor.from_tensor(token_word)
if not isinstance(token_properties, ragged_tensor.RaggedTensor):
token_properties = ragged_tensor.RaggedTensor.from_tensor(token_properties)
fragment = gen_sentence_breaking_ops.sentence_fragments(
errors=errors,
replacement_char=replacement_char,
replace_control_characters=replace_control_characters,
input_encoding=input_encoding,
row_lengths=token_starts.row_lengths(),
token_start=token_starts.flat_values,
token_end=token_ends.flat_values,
token_word=token_word.flat_values,
token_properties=token_properties.flat_values)
start, end, properties, terminal_punc_token, row_lengths = fragment
return tuple(
ragged_tensor.RaggedTensor.from_row_lengths(value, row_lengths)
for value in [start, end, properties, terminal_punc_token])
| apache-2.0 |
AOSP-S4-KK/platform_external_chromium_org | tools/telemetry/telemetry/core/platform/profiler/netlog_profiler.py | 26 | 1525 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import tempfile
from telemetry.core.platform import profiler
class NetLogProfiler(profiler.Profiler):
_NET_LOG_ARG = '--log-net-log='
@classmethod
def name(cls):
return 'netlog'
@classmethod
def is_supported(cls, browser_type):
return not browser_type.startswith('cros')
@classmethod
def CustomizeBrowserOptions(cls, browser_type, options):
if browser_type.startswith('android'):
dump_file = '/sdcard/net-internals-profile.json'
else:
dump_file = tempfile.mkstemp()[1]
options.AppendExtraBrowserArgs([cls._NET_LOG_ARG + dump_file])
def CollectProfile(self):
# Find output filename from browser argument.
for i in self._browser_backend.browser_options.extra_browser_args:
if i.startswith(self._NET_LOG_ARG):
output_file = i[len(self._NET_LOG_ARG):]
assert output_file
# On Android pull the output file to the host.
if self._platform_backend.GetOSName() == 'android':
host_output_file = '%s.json' % self._output_path
self._browser_backend.adb.Adb().Adb().Pull(output_file, host_output_file)
# Clean the device
self._browser_backend.adb.Adb().RunShellCommand('rm %s' % output_file)
output_file = host_output_file
print 'Net-internals log saved as %s' % output_file
print 'To view, open in chrome://net-internals'
return [output_file]
| bsd-3-clause |
mjirayu/sit_academy | lms/envs/content.py | 168 | 1088 | """
These are debug machines used for content creators, so they're kind of a cross
between dev machines and AWS machines.
"""
# We intentionally define lots of variables that aren't used, and
# want to import all variables from base settings files
# pylint: disable=wildcard-import, unused-wildcard-import
from .aws import *
DEBUG = True
TEMPLATE_DEBUG = True
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
################################ DEBUG TOOLBAR #################################
INSTALLED_APPS += ('debug_toolbar',)
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.versions.VersionsPanel',
'debug_toolbar.panels.timer.TimerPanel',
'debug_toolbar.panels.settings.SettingsPanel',
'debug_toolbar.panels.headers.HeadersPanel',
'debug_toolbar.panels.request.RequestPanel',
'debug_toolbar.panels.sql.SQLPanel',
'debug_toolbar.panels.signals.SignalsPanel',
'debug_toolbar.panels.logging.LoggingPanel',
'debug_toolbar.panels.profiling.ProfilingPanel',
)
| agpl-3.0 |
UavLabsColombia/pibotballs | src/OtrosArchivos/pibotballs.py | 2 | 9675 | # Este codigo es una implementacion que se realiza con las librerias de vision por computador
# disponibles bajo python para el control de robots Recoge Bolas.
#
# Realizado por: Estudiantes de la universidad del valle.
# Heberth Alexander Ardila Cuellar / heberthardila@gmail.com / 3128204694 / uavlabs.org
# Jaime Andres Ortiz Aranda / jaime-aranda@outlook.com / 3023476635 ###
# Juan Sebastian Bolivar Rivera / sebasbr_1031@hotmail.com / 3157634355 /
#
# Este software se encuentra bajo la licencia GPLv3 disponible sobre este repositorio, sientase libre de modificarlo
# ajustarlo y redistribuirlo manteniendo la licencia y los autores
#
# Sobre los siguientes diagramas, se describen la conexiones que se ralizan sobre los sensores y el sistema
#
######################################
# Motoreductores DC 12v
# Motor_izquierdo_adelante, motor_izquierdo_atras, motor_derecho_adelante, motor_derecho_atras:
# _Front_
# _________________
# --------------------------------
#
# M1 =In1,In2 M2=In1,In2
# GPIO.M1= 23,29,31 GPIO.M2= 37,35,33
#
#
# M3=In3,In4 M4= In3,In4
# GPIO.M3 = 19,21,7 GPIO.M4= 15,13,11
#
# --------------------------------
######################################
#
######################################
##Controladores L298N
## Se utilizaran dos controladores para los motores
# Cont1, Cont2:
# (EnA)
# (In1)
# (In2)
# (In3)
# (In4)
# (EnB)
######################################
#
######################################
##Sensor Sonar HC SR04 Utilizado para calcular el promedio de la distancia a un objeto.
#sonar_izquierdo, sonar_derecho, sonar_frente_izquierdo, sonar_frente_derecho, sonar_frente
#(Trig)
#(Echo)
######################################
#
######################################
##Conexiones con RaspberryPi
# Conexiones USB:
# Conectaremos por USB la camara SJCAM M10, el cual sera utiizada
# para trabajar con las librerias de Vision Artificial
## Pines de conexion para la raspberry pi 3 en modo BOARD
# (01) 3.3V (02) 5v input/ouput
# (03) Free (04) 5v input/ouput
# (05) Free (06) GND
# (07) Cont1.M3.In2* (08) sonar_frente_derecho.trig* // ok
# (09) GND (10) sonar_frente_derecho.echo* // ok
# (11) Cont1.M4.In3* (12) sonar_izquierdo.trig // ok
# (13) Cont1.M4.In4* (14) GND
# (15) Cont1.M4.EnB* (16) sonar_izquierdo.echo // ok
# (17) 3.3V (18) sonar_frente.trig // ok
# (19) Cont1.M3.EnA* def (20) Tierra
# (21) Cont1.M3.In1* def (22) sonar_frente_echo // ok
# (23) Cont2.M1.EnA* //ok (24) sonar_derecho.trig* // ok
# (25) GND (26) sonar_derecho.echo* // ok
# (27) N/C (28) N/C
# (29) Cont2.M1.In1* // ok (30) Tierra
# (31) Cont2.M1.In2* // ok (32) Servo motor recogedor
# (33) Cont2.M2.In3* // ok (34) Tierra
# (35) Cont2.M2.In4* // ok (36) sonar_frente_izquierdo.trig // ok
# (37) Cont2.M2.EnB* // ok (38) sonar_frente_izquierdo.echo // ok
# (39) GND (40) Boton de Reset
#
#######################################
# importamos libreria del sistema
import sys
# from hcsr04sensor import sensor
# Importamos libreria GPIO
import RPi.GPIO as GPIO
# Importamos librerias para el control de tiempos
import time
# Importamos librerias para el control de hilos o multihilos
from threading import Thread
# se importa libreria SimpleCV "Vision por Computador Simple"
from SimpleCV import *
# Se inician y se cargan dependencias y configuraciones
# modo de los pines, basados en BCM o en BOARD
GPIO.setmode(GPIO.BOARD)
##Elementos globales para la clase
# posiciones
# 0 = motor1A 1=motor1B 2=motor2A 3=motor2B
channel = [11, 12, 13, 15, 16, 18]
# channel=[13,15]
# version de opencv
# se confirma version de OPENCV instalado
cversion = cv2.__version__
##
##cont=0
# Definimos las coordenadas para los objetos circulares en pantalla
xcord = 0
ycord = 0
radiopelota = 0
#Iniciando el software..
# Inicializacion del software
print "Iniciando el software para el control del robot...."
print "Info de la PI"
print GPIO.RPI_INFO
# version de python
print "Version de python:", sys.version
# Se imprime el modo de configuracion para los pines
mode = GPIO.getmode()
if (mode == 10):
print "modo de la tarjeta:", mode, "(BOARD)"
if (mode == 11):
print "modo de la tarjeta:", mode, "(BCM)"
# se quitan las alertas de re-definicion para los pines
GPIO.setwarnings(False)
# se definen y se inician los pines a utilizar sobre la raspberry para el control de motores...
# estado = 0 pin de salida, estado=1 pin de entrada
GPIO.setup(channel[0], GPIO.OUT)
print "Puerto:", channel[0], "Estado:", GPIO.gpio_function(channel[0])
GPIO.setup(channel[1], GPIO.OUT)
print "Puerto:", channel[1], "Estado:", GPIO.gpio_function(channel[1])
GPIO.setup(channel[2], GPIO.OUT)
print "Puerto:", channel[2], "Estado:", GPIO.gpio_function(channel[2])
GPIO.setup(channel[3], GPIO.OUT)
print "Puerto:", channel[3], "Estado:", GPIO.gpio_function(channel[3])
GPIO.setup(channel[4], GPIO.OUT)
print "Puerto:", channel[4], "Estado:", GPIO.gpio_function(channel[4])
GPIO.setup(channel[5], GPIO.OUT)
print "Puerto:", channel[5], "Estado:", GPIO.gpio_function(channel[5])
## Iniciar camara
print "Iniciando SimpleCV y camara.."
cam = SimpleCV.Camera(0)
print "Camara OK"
## MOvimientos para los motores
# los siguientes metodos describen los sentidos de giros para el robot, adelante, atras, derecha, izquierda, stop
def izquierda():
print "Izquierda"
GPIO.output(channel[3], GPIO.HIGH)
GPIO.output(channel[2], GPIO.LOW)
def derecha():
print "Derecha"
GPIO.output(channel[2], GPIO.HIGH)
GPIO.output(channel[3], GPIO.LOW)
def adelante():
print "Adelante"
GPIO.output(channel[0], GPIO.LOW)
GPIO.output(channel[1], GPIO.HIGH)
# GPIO.output(channel[0], GPIO.LOW)
def atras():
print "Atras"
GPIO.output(channel[1], GPIO.HIGH)
GPIO.output(channel[0], GPIO.LOW)
def stop():
print "Stop"
GPIO.output(channel[4], GPIO.LOW)
GPIO.output(channel[5], GPIO.LOW)
## numero de sonares que tendra disponible el robot
sonar_trig = [21, 23, 25, 27]
sonar_echo = [22, 24, 26, 28]
## Este metodo describe el funcionamiento del sensor HC-SR04, el cual retorna la distancia en CM de algun obstaculo
def dist_objeto(trig, echo):
print "Distancia en proceso de calculo..."
##Se inicial a distancia en 0 indicando que no hay datos sobre la lectura de distancia.
distancia = 0
## Se define el pin trig y el pin echo para el sensor
GPIO.setup(trig, GPIO.OUT)
GPIO.setup(echo, GPIO.IN)
## se apaga el pulso para no generar interferencias.
GPIO.output(trig, GPIO.LOW)
## tiempo que dura el pulso apagado 2microsegundos
time.sleep(2 * 10 ** -6)
## se enciende el pulso durante 10microsegundos
GPIO.output(trig, GPIO.HIGH)
time.sleep(10 * 10 ** -6)
## se apaga el pulso
GPIO.output(trig, GPIO.LOW)
print "No llega senial"
## se empieza a contabilizar el tiempo mientras no se llegue senial.
while GPIO.input(echo) == 0:
pulse_start = time.time()
## si se recibe senial en el sensor toma el tiempo
print "llegando senial"
while GPIO.input(echo) == 1:
pulse_end = time.time()
duracion_pulso = pulse_end - pulse_start
distancia = (duracion_pulso * 34300) / 2
return distancia
##Metodo que sensa la camara y reconoce algun objeto de esta, imprime las coordenadas en X,y y diametro de la circunferencia
def hubicar_pelota():
tiempo_inicial = time.time()
print "Hubicando pelota.."
for i in range(1):
global xcord
global ycord
global radiopelota
xcord = 0
ycord = 0
radiopelota = 0
img = cam.getImage().flipHorizontal()
dist = img.colorDistance(SimpleCV.Color.WHITE).dilate(2)
segmented = dist.stretch(230, 255)
blobs = segmented.findBlobs()
if blobs:
circles = blobs.filter([b.isCircle(0.3) for b in blobs])
if circles:
# print "X:",circles[-1].x , "Y:", circles[-1].y, "Radio:", circles[-1].radius()
xcord = circles[-1].x
ycord = circles[-1].y
radiopelota = circles[-1].radius()
tiempo_final = time.time()
print "Tiempo ejecucion:", tiempo_final - tiempo_inicial
print "Xcord:", xcord, "Ycord:", ycord, "Radio", radiopelota
def donde_ir():
print "Llengo a la pelota"
def determinar_obstaculos():
print "Determinando obtaculos..."
# Se toman las distancias de los sonares
# funcion que obtiene un pulso electrico de un puerto digital, sensando el dato de entrada, parando o iniciando el sistema.
def pulsador():
estado_boton = 1
return estado_boton
##se define el metodo que sensara todo el sistema
def sensar():
# while True:
# Orden de como va a sensar el sistema, prioridad de sensores..
hubicar_pelota()
determinar_obstaculos()
def cerrar_conexion():
print " "
print "Limpiando puerto GPIO..."
GPIO.cleanup()
print "Saliendo..."
sys.exit(0)
def run():
print "Logica de movimiento..."
sensar()
# El core o nucleo, es el encargador de iniciar todas las ejecuciones y revisar los estados de todos los sensores
def core():
# global cont
# cont = cont + 1
if (pulsador() == 0):
stop()
if (pulsador() == 1):
run()
# print "Cont:", cont
# Inicia la ejecucion de toda la clase
try:
while 1:
core()
# time.sleep(1)
except KeyboardInterrupt:
pass
cerrar_conexion()
| gpl-3.0 |
Roboauto/Udacity3 | scripts/splitBag.py | 1 | 1659 | #!/usr/bin/python
import sys, math
import rosbag
#topics that will remain in output bags
allowed_topics = ["/center_camera/image_color/compressed", "/center_camera/image_color",
"/vehicle/gps/fix"]
def getDigitCount(number):
"""How many digits will have the highest numbered file? e.g. if there will be 25
resulting bags the result is 2, for 300 bags the result is 3, for 8 bags it is 1"""
return len(str(number))
def getName(path):
"""removes ending from path ('/home/robo/Overcast.bag' -> '/home/robo/Overcast')"""
parts = path.split('.')
if len(parts) > 1:
del parts[-1]
return ".".join(parts)
def splitBag(path, part_duration):
bag = rosbag.Bag(path, 'r')
start = int(math.floor(bag.get_start_time()))
end = bag.get_end_time()
total_duration = end - start
part_count = int(math.ceil(total_duration / part_duration)) #how many bags will be produced
digitCount = getDigitCount(part_count + 1)
opened_bags = []
name = getName(path)
for i in range(part_count):
opened_bags.append(rosbag.Bag(name + str(i + 1).zfill(digitCount) + ".bag", 'w'))
last_percentage = -5;
for topic, msg, t in bag.read_messages(topics=allowed_topics):
index = int((msg.header.stamp.secs - start) / part_duration)
opened_bags[index].write(topic, msg, t=t)
percentage = int(((msg.header.stamp.secs - start) / total_duration) * 100);
if percentage >= last_percentage + 5:
print(str(percentage) + " %")
last_percentage = percentage
for b in opened_bags:
b.close()
bag.close();
if __name__ == "__main__":
path = sys.argv[1] #where is bag
duration = int(sys.argv[2]) #how long shoud one bag be (in seconds)
splitBag(path, duration) | gpl-3.0 |
rspousta/sony_xperia_m | tools/perf/scripts/python/syscall-counts.py | 11181 | 1522 | # system call counts
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide system call totals, broken down by syscall.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import syscall_name
usage = "perf script -s syscall-counts.py [comm]\n";
for_comm = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_syscall_totals()
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def print_syscall_totals():
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"-----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
print "%-40s %10d\n" % (syscall_name(id), val),
| gpl-2.0 |
typesupply/defconAppKit | Lib/defconAppKit/representationFactories/__init__.py | 1 | 1688 | from defcon import Glyph, Image, registerRepresentationFactory
from defconAppKit.representationFactories.nsBezierPathFactory import NSBezierPathFactory
from defconAppKit.representationFactories.glyphCellFactory import GlyphCellFactory
from defconAppKit.representationFactories.glyphCellDetailFactory import GlyphCellDetailFactory
from defconAppKit.representationFactories.glyphViewFactories import NoComponentsNSBezierPathFactory,\
OnlyComponentsNSBezierPathFactory, OutlineInformationFactory, NSImageFactory
from defconAppKit.representationFactories.menuImageFactory import MenuImageRepresentationFactory
_glyphFactories = {
"defconAppKit.NSBezierPath": (NSBezierPathFactory, None),
"defconAppKit.NoComponentsNSBezierPath": (NoComponentsNSBezierPathFactory, None),
"defconAppKit.OnlyComponentsNSBezierPath": (OnlyComponentsNSBezierPathFactory, None),
"defconAppKit.GlyphCell": (GlyphCellFactory, None),
"defconAppKit.GlyphCellDetail": (GlyphCellDetailFactory, None),
"defconAppKit.OutlineInformation": (OutlineInformationFactory, None),
"defconAppKit.MenuImage": (MenuImageRepresentationFactory, None),
}
_imageFactories = {
"defconAppKit.NSImage": (NSImageFactory, ["Image.FileNameChanged", "Image.ColorChanged", "Image.ImageDataChanged"])
}
def registerAllFactories():
for name, (factory, destructiveNotifications) in _glyphFactories.items():
registerRepresentationFactory(Glyph, name, factory, destructiveNotifications=destructiveNotifications)
for name, (factory, destructiveNotifications) in _imageFactories.items():
registerRepresentationFactory(Image, name, factory, destructiveNotifications=destructiveNotifications)
| mit |
arpith/zulip | zerver/lib/parallel.py | 18 | 2115 | from __future__ import absolute_import
from __future__ import print_function
from typing import Any, Generator, Iterable, Tuple
import os
import pty
import sys
import errno
def run_parallel(job, data, threads=6):
# type: (Any, Iterable[Any], int) -> Generator[Tuple[int, Any], None, None]
pids = {} # type: Dict[int, Any]
def wait_for_one():
# type: () -> Tuple[int, Any]
while True:
try:
(pid, status) = os.wait()
return status, pids.pop(pid)
except KeyError:
pass
for item in data:
pid = os.fork()
if pid == 0:
sys.stdin.close()
try:
os.close(pty.STDIN_FILENO)
except OSError as e:
if e.errno != errno.EBADF:
raise
sys.stdin = open("/dev/null", "r") # type: ignore # py2 and py3 stubs are not compatible
os._exit(job(item))
pids[pid] = item
threads = threads - 1
if threads == 0:
(status, item) = wait_for_one()
threads += 1
yield (status, item)
if status != 0:
# Stop if any error occurred
break
while True:
try:
(status, item) = wait_for_one()
yield (status, item)
except OSError as e:
if e.errno == errno.ECHILD:
break
else:
raise
if __name__ == "__main__":
# run some unit tests
import time
jobs = [10, 19, 18, 6, 14, 12, 8, 2, 1, 13, 3, 17, 9, 11, 5, 16, 7, 15, 4]
expected_output = [6, 10, 12, 2, 1, 14, 8, 3, 18, 19, 5, 9, 13, 11, 4, 7, 17, 16, 15]
def wait_and_print(x):
# type: (int) -> int
time.sleep(x * 0.1)
return 0
output = []
for (status, job) in run_parallel(wait_and_print, jobs):
output.append(job)
if output == expected_output:
print("Successfully passed test!")
else:
print("Failed test!")
print(jobs)
print(expected_output)
print(output)
| apache-2.0 |
bspline/etcd | Godeps/_workspace/src/github.com/ugorji/go/codec/test.py | 670 | 3808 | #!/usr/bin/env python
# This will create golden files in a directory passed to it.
# A Test calls this internally to create the golden files
# So it can process them (so we don't have to checkin the files).
# Ensure msgpack-python and cbor are installed first, using:
# pip install --user msgpack-python
# pip install --user cbor
import cbor, msgpack, msgpackrpc, sys, os, threading
def get_test_data_list():
# get list with all primitive types, and a combo type
l0 = [
-8,
-1616,
-32323232,
-6464646464646464,
192,
1616,
32323232,
6464646464646464,
192,
-3232.0,
-6464646464.0,
3232.0,
6464646464.0,
False,
True,
None,
u"someday",
u"",
u"bytestring",
1328176922000002000,
-2206187877999998000,
270,
-2013855847999995777,
#-6795364578871345152,
]
l1 = [
{ "true": True,
"false": False },
{ "true": "True",
"false": False,
"uint16(1616)": 1616 },
{ "list": [1616, 32323232, True, -3232.0, {"TRUE":True, "FALSE":False}, [True, False] ],
"int32":32323232, "bool": True,
"LONG STRING": "123456789012345678901234567890123456789012345678901234567890",
"SHORT STRING": "1234567890" },
{ True: "true", 8: False, "false": 0 }
]
l = []
l.extend(l0)
l.append(l0)
l.extend(l1)
return l
def build_test_data(destdir):
l = get_test_data_list()
for i in range(len(l)):
# packer = msgpack.Packer()
serialized = msgpack.dumps(l[i])
f = open(os.path.join(destdir, str(i) + '.msgpack.golden'), 'wb')
f.write(serialized)
f.close()
serialized = cbor.dumps(l[i])
f = open(os.path.join(destdir, str(i) + '.cbor.golden'), 'wb')
f.write(serialized)
f.close()
def doRpcServer(port, stopTimeSec):
class EchoHandler(object):
def Echo123(self, msg1, msg2, msg3):
return ("1:%s 2:%s 3:%s" % (msg1, msg2, msg3))
def EchoStruct(self, msg):
return ("%s" % msg)
addr = msgpackrpc.Address('localhost', port)
server = msgpackrpc.Server(EchoHandler())
server.listen(addr)
# run thread to stop it after stopTimeSec seconds if > 0
if stopTimeSec > 0:
def myStopRpcServer():
server.stop()
t = threading.Timer(stopTimeSec, myStopRpcServer)
t.start()
server.start()
def doRpcClientToPythonSvc(port):
address = msgpackrpc.Address('localhost', port)
client = msgpackrpc.Client(address, unpack_encoding='utf-8')
print client.call("Echo123", "A1", "B2", "C3")
print client.call("EchoStruct", {"A" :"Aa", "B":"Bb", "C":"Cc"})
def doRpcClientToGoSvc(port):
# print ">>>> port: ", port, " <<<<<"
address = msgpackrpc.Address('localhost', port)
client = msgpackrpc.Client(address, unpack_encoding='utf-8')
print client.call("TestRpcInt.Echo123", ["A1", "B2", "C3"])
print client.call("TestRpcInt.EchoStruct", {"A" :"Aa", "B":"Bb", "C":"Cc"})
def doMain(args):
if len(args) == 2 and args[0] == "testdata":
build_test_data(args[1])
elif len(args) == 3 and args[0] == "rpc-server":
doRpcServer(int(args[1]), int(args[2]))
elif len(args) == 2 and args[0] == "rpc-client-python-service":
doRpcClientToPythonSvc(int(args[1]))
elif len(args) == 2 and args[0] == "rpc-client-go-service":
doRpcClientToGoSvc(int(args[1]))
else:
print("Usage: test.py " +
"[testdata|rpc-server|rpc-client-python-service|rpc-client-go-service] ...")
if __name__ == "__main__":
doMain(sys.argv[1:])
| apache-2.0 |
vitan/hue | desktop/core/ext-py/Django-1.6.10/tests/inline_formsets/tests.py | 49 | 6672 | from __future__ import absolute_import, unicode_literals
from django.forms.models import inlineformset_factory
from django.test import TestCase, skipUnlessDBFeature
from django.utils import six
from .models import Poet, Poem, School, Parent, Child
class DeletionTests(TestCase):
def test_deletion(self):
PoemFormSet = inlineformset_factory(Poet, Poem, can_delete=True, fields="__all__")
poet = Poet.objects.create(name='test')
poem = poet.poem_set.create(name='test poem')
data = {
'poem_set-TOTAL_FORMS': '1',
'poem_set-INITIAL_FORMS': '1',
'poem_set-MAX_NUM_FORMS': '0',
'poem_set-0-id': str(poem.pk),
'poem_set-0-poet': str(poet.pk),
'poem_set-0-name': 'test',
'poem_set-0-DELETE': 'on',
}
formset = PoemFormSet(data, instance=poet)
formset.save()
self.assertTrue(formset.is_valid())
self.assertEqual(Poem.objects.count(), 0)
def test_add_form_deletion_when_invalid(self):
"""
Make sure that an add form that is filled out, but marked for deletion
doesn't cause validation errors.
"""
PoemFormSet = inlineformset_factory(Poet, Poem, can_delete=True, fields="__all__")
poet = Poet.objects.create(name='test')
data = {
'poem_set-TOTAL_FORMS': '1',
'poem_set-INITIAL_FORMS': '0',
'poem_set-MAX_NUM_FORMS': '0',
'poem_set-0-id': '',
'poem_set-0-poem': '1',
'poem_set-0-name': 'x' * 1000,
}
formset = PoemFormSet(data, instance=poet)
# Make sure this form doesn't pass validation.
self.assertEqual(formset.is_valid(), False)
self.assertEqual(Poem.objects.count(), 0)
# Then make sure that it *does* pass validation and delete the object,
# even though the data isn't actually valid.
data['poem_set-0-DELETE'] = 'on'
formset = PoemFormSet(data, instance=poet)
self.assertEqual(formset.is_valid(), True)
formset.save()
self.assertEqual(Poem.objects.count(), 0)
def test_change_form_deletion_when_invalid(self):
"""
Make sure that a change form that is filled out, but marked for deletion
doesn't cause validation errors.
"""
PoemFormSet = inlineformset_factory(Poet, Poem, can_delete=True, fields="__all__")
poet = Poet.objects.create(name='test')
poem = poet.poem_set.create(name='test poem')
data = {
'poem_set-TOTAL_FORMS': '1',
'poem_set-INITIAL_FORMS': '1',
'poem_set-MAX_NUM_FORMS': '0',
'poem_set-0-id': six.text_type(poem.id),
'poem_set-0-poem': six.text_type(poem.id),
'poem_set-0-name': 'x' * 1000,
}
formset = PoemFormSet(data, instance=poet)
# Make sure this form doesn't pass validation.
self.assertEqual(formset.is_valid(), False)
self.assertEqual(Poem.objects.count(), 1)
# Then make sure that it *does* pass validation and delete the object,
# even though the data isn't actually valid.
data['poem_set-0-DELETE'] = 'on'
formset = PoemFormSet(data, instance=poet)
self.assertEqual(formset.is_valid(), True)
formset.save()
self.assertEqual(Poem.objects.count(), 0)
def test_save_new(self):
"""
Make sure inlineformsets respect commit=False
regression for #10750
"""
# exclude some required field from the forms
ChildFormSet = inlineformset_factory(School, Child, exclude=['father', 'mother'])
school = School.objects.create(name='test')
mother = Parent.objects.create(name='mother')
father = Parent.objects.create(name='father')
data = {
'child_set-TOTAL_FORMS': '1',
'child_set-INITIAL_FORMS': '0',
'child_set-MAX_NUM_FORMS': '0',
'child_set-0-name': 'child',
}
formset = ChildFormSet(data, instance=school)
self.assertEqual(formset.is_valid(), True)
objects = formset.save(commit=False)
for obj in objects:
obj.mother = mother
obj.father = father
obj.save()
self.assertEqual(school.child_set.count(), 1)
class InlineFormsetFactoryTest(TestCase):
def test_inline_formset_factory(self):
"""
These should both work without a problem.
"""
inlineformset_factory(Parent, Child, fk_name='mother', fields="__all__")
inlineformset_factory(Parent, Child, fk_name='father', fields="__all__")
def test_exception_on_unspecified_foreign_key(self):
"""
Child has two ForeignKeys to Parent, so if we don't specify which one
to use for the inline formset, we should get an exception.
"""
six.assertRaisesRegex(self, Exception,
"<class 'inline_formsets.models.Child'> has more than 1 ForeignKey to <class 'inline_formsets.models.Parent'>",
inlineformset_factory, Parent, Child
)
def test_fk_name_not_foreign_key_field_from_child(self):
"""
If we specify fk_name, but it isn't a ForeignKey from the child model
to the parent model, we should get an exception.
"""
self.assertRaises(Exception,
"fk_name 'school' is not a ForeignKey to <class 'inline_formsets.models.Parent'>",
inlineformset_factory, Parent, Child, fk_name='school'
)
def test_non_foreign_key_field(self):
"""
If the field specified in fk_name is not a ForeignKey, we should get an
exception.
"""
six.assertRaisesRegex(self, Exception,
"<class 'inline_formsets.models.Child'> has no field named 'test'",
inlineformset_factory, Parent, Child, fk_name='test'
)
def test_any_iterable_allowed_as_argument_to_exclude(self):
# Regression test for #9171.
inlineformset_factory(
Parent, Child, exclude=['school'], fk_name='mother'
)
inlineformset_factory(
Parent, Child, exclude=('school',), fk_name='mother'
)
@skipUnlessDBFeature('allows_primary_key_0')
def test_zero_primary_key(self):
# Regression test for #21472
poet = Poet.objects.create(id=0, name='test')
poem = poet.poem_set.create(name='test poem')
PoemFormSet = inlineformset_factory(Poet, Poem, fields="__all__", extra=0)
formset = PoemFormSet(None, instance=poet)
self.assertEqual(len(formset.forms), 1)
| apache-2.0 |
lmprice/ansible | lib/ansible/modules/packaging/os/apk.py | 82 | 11119 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Kevin Brebanov <https://github.com/kbrebanov>
# Based on pacman (Afterburn <https://github.com/afterburn>, Aaron Bull Schaefer <aaron@elasticdog.com>)
# and apt (Matthew Williams <matthew@flowroute.com>) modules.
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: apk
short_description: Manages apk packages
description:
- Manages I(apk) packages for Alpine Linux.
author: "Kevin Brebanov (@kbrebanov)"
version_added: "2.0"
options:
available:
description:
- During upgrade, reset versioned world dependencies and change logic to prefer replacing or downgrading packages (instead of holding them)
if the currently installed package is no longer available from any repository.
type: bool
default: 'no'
version_added: "2.4"
name:
description:
- A package name, like C(foo), or multiple packages, like C(foo, bar).
repository:
description:
- A package repository or multiple repositories.
Unlike with the underlying apk command, this list will override the system repositories rather than supplement them.
version_added: "2.4"
state:
description:
- Indicates the desired package(s) state.
- C(present) ensures the package(s) is/are present.
- C(absent) ensures the package(s) is/are absent.
- C(latest) ensures the package(s) is/are present and the latest version(s).
default: present
choices: [ "present", "absent", "latest" ]
update_cache:
description:
- Update repository indexes. Can be run with other steps or on it's own.
type: bool
default: 'no'
upgrade:
description:
- Upgrade all installed packages to their latest version.
type: bool
default: 'no'
notes:
- '"name" and "upgrade" are mutually exclusive.'
- When used with a `loop:` each package will be processed individually, it is much more efficient to pass the list directly to the `name` option.
'''
EXAMPLES = '''
# Update repositories and install "foo" package
- apk:
name: foo
update_cache: yes
# Update repositories and install "foo" and "bar" packages
- apk:
name: foo,bar
update_cache: yes
# Remove "foo" package
- apk:
name: foo
state: absent
# Remove "foo" and "bar" packages
- apk:
name: foo,bar
state: absent
# Install the package "foo"
- apk:
name: foo
state: present
# Install the packages "foo" and "bar"
- apk:
name: foo,bar
state: present
# Update repositories and update package "foo" to latest version
- apk:
name: foo
state: latest
update_cache: yes
# Update repositories and update packages "foo" and "bar" to latest versions
- apk:
name: foo,bar
state: latest
update_cache: yes
# Update all installed packages to the latest versions
- apk:
upgrade: yes
# Upgrade / replace / downgrade / uninstall all installed packages to the latest versions available
- apk:
available: yes
upgrade: yes
# Update repositories as a separate step
- apk:
update_cache: yes
# Install package from a specific repository
- apk:
name: foo
state: latest
update_cache: yes
repository: http://dl-3.alpinelinux.org/alpine/edge/main
'''
RETURN = '''
packages:
description: a list of packages that have been changed
returned: when packages have changed
type: list
sample: ['package', 'other-package']
'''
import re
# Import module snippets.
from ansible.module_utils.basic import AnsibleModule
def parse_for_packages(stdout):
packages = []
data = stdout.split('\n')
regex = re.compile(r'^\(\d+/\d+\)\s+\S+\s+(\S+)')
for l in data:
p = regex.search(l)
if p:
packages.append(p.group(1))
return packages
def update_package_db(module, exit):
cmd = "%s update" % (APK_PATH)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc != 0:
module.fail_json(msg="could not update package db", stdout=stdout, stderr=stderr)
elif exit:
module.exit_json(changed=True, msg='updated repository indexes', stdout=stdout, stderr=stderr)
else:
return True
def query_toplevel(module, name):
# /etc/apk/world contains a list of top-level packages separated by ' ' or \n
# packages may contain repository (@) or version (=<>~) separator characters or start with negation !
regex = re.compile(r'^' + re.escape(name) + r'([@=<>~].+)?$')
with open('/etc/apk/world') as f:
content = f.read().split()
for p in content:
if regex.search(p):
return True
return False
def query_package(module, name):
cmd = "%s -v info --installed %s" % (APK_PATH, name)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
if rc == 0:
return True
else:
return False
def query_latest(module, name):
cmd = "%s version %s" % (APK_PATH, name)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
search_pattern = r"(%s)-[\d\.\w]+-[\d\w]+\s+(.)\s+[\d\.\w]+-[\d\w]+\s+" % (re.escape(name))
match = re.search(search_pattern, stdout)
if match and match.group(2) == "<":
return False
return True
def query_virtual(module, name):
cmd = "%s -v info --description %s" % (APK_PATH, name)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
search_pattern = r"^%s: virtual meta package" % (re.escape(name))
if re.search(search_pattern, stdout):
return True
return False
def get_dependencies(module, name):
cmd = "%s -v info --depends %s" % (APK_PATH, name)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
dependencies = stdout.split()
if len(dependencies) > 1:
return dependencies[1:]
else:
return []
def upgrade_packages(module, available):
if module.check_mode:
cmd = "%s upgrade --simulate" % (APK_PATH)
else:
cmd = "%s upgrade" % (APK_PATH)
if available:
cmd = "%s --available" % cmd
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
packagelist = parse_for_packages(stdout)
if rc != 0:
module.fail_json(msg="failed to upgrade packages", stdout=stdout, stderr=stderr, packages=packagelist)
if re.search(r'^OK', stdout):
module.exit_json(changed=False, msg="packages already upgraded", stdout=stdout, stderr=stderr, packages=packagelist)
module.exit_json(changed=True, msg="upgraded packages", stdout=stdout, stderr=stderr, packages=packagelist)
def install_packages(module, names, state):
upgrade = False
to_install = []
to_upgrade = []
for name in names:
# Check if virtual package
if query_virtual(module, name):
# Get virtual package dependencies
dependencies = get_dependencies(module, name)
for dependency in dependencies:
if state == 'latest' and not query_latest(module, dependency):
to_upgrade.append(dependency)
else:
if not query_toplevel(module, name):
to_install.append(name)
elif state == 'latest' and not query_latest(module, name):
to_upgrade.append(name)
if to_upgrade:
upgrade = True
if not to_install and not upgrade:
module.exit_json(changed=False, msg="package(s) already installed")
packages = " ".join(to_install + to_upgrade)
if upgrade:
if module.check_mode:
cmd = "%s add --upgrade --simulate %s" % (APK_PATH, packages)
else:
cmd = "%s add --upgrade %s" % (APK_PATH, packages)
else:
if module.check_mode:
cmd = "%s add --simulate %s" % (APK_PATH, packages)
else:
cmd = "%s add %s" % (APK_PATH, packages)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
packagelist = parse_for_packages(stdout)
if rc != 0:
module.fail_json(msg="failed to install %s" % (packages), stdout=stdout, stderr=stderr, packages=packagelist)
module.exit_json(changed=True, msg="installed %s package(s)" % (packages), stdout=stdout, stderr=stderr, packages=packagelist)
def remove_packages(module, names):
installed = []
for name in names:
if query_package(module, name):
installed.append(name)
if not installed:
module.exit_json(changed=False, msg="package(s) already removed")
names = " ".join(installed)
if module.check_mode:
cmd = "%s del --purge --simulate %s" % (APK_PATH, names)
else:
cmd = "%s del --purge %s" % (APK_PATH, names)
rc, stdout, stderr = module.run_command(cmd, check_rc=False)
packagelist = parse_for_packages(stdout)
# Check to see if packages are still present because of dependencies
for name in installed:
if query_package(module, name):
rc = 1
break
if rc != 0:
module.fail_json(msg="failed to remove %s package(s)" % (names), stdout=stdout, stderr=stderr, packages=packagelist)
module.exit_json(changed=True, msg="removed %s package(s)" % (names), stdout=stdout, stderr=stderr, packages=packagelist)
# ==========================================
# Main control flow.
def main():
module = AnsibleModule(
argument_spec=dict(
state=dict(default='present', choices=['present', 'installed', 'absent', 'removed', 'latest']),
name=dict(type='list'),
repository=dict(type='list'),
update_cache=dict(default='no', type='bool'),
upgrade=dict(default='no', type='bool'),
available=dict(default='no', type='bool'),
),
required_one_of=[['name', 'update_cache', 'upgrade']],
mutually_exclusive=[['name', 'upgrade']],
supports_check_mode=True
)
# Set LANG env since we parse stdout
module.run_command_environ_update = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C', LC_CTYPE='C')
global APK_PATH
APK_PATH = module.get_bin_path('apk', required=True)
p = module.params
# add repositories to the APK_PATH
if p['repository']:
for r in p['repository']:
APK_PATH = "%s --repository %s --repositories-file /dev/null" % (APK_PATH, r)
# normalize the state parameter
if p['state'] in ['present', 'installed']:
p['state'] = 'present'
if p['state'] in ['absent', 'removed']:
p['state'] = 'absent'
if p['update_cache']:
update_package_db(module, not p['name'] and not p['upgrade'])
if p['upgrade']:
upgrade_packages(module, p['available'])
if p['state'] in ['present', 'latest']:
install_packages(module, p['name'], p['state'])
elif p['state'] == 'absent':
remove_packages(module, p['name'])
if __name__ == '__main__':
main()
| gpl-3.0 |
Socrate2803/plugin.video.netflixbmc | resources/lib/pyOpenSSL/pyOpenSSL-0.13-py2.6-win32/OpenSSL/test/test_crypto.py | 70 | 101757 | # Copyright (c) Jean-Paul Calderone
# See LICENSE file for details.
"""
Unit tests for L{OpenSSL.crypto}.
"""
from unittest import main
import os, re
from subprocess import PIPE, Popen
from datetime import datetime, timedelta
from OpenSSL.crypto import TYPE_RSA, TYPE_DSA, Error, PKey, PKeyType
from OpenSSL.crypto import X509, X509Type, X509Name, X509NameType
from OpenSSL.crypto import X509Req, X509ReqType
from OpenSSL.crypto import X509Extension, X509ExtensionType
from OpenSSL.crypto import load_certificate, load_privatekey
from OpenSSL.crypto import FILETYPE_PEM, FILETYPE_ASN1, FILETYPE_TEXT
from OpenSSL.crypto import dump_certificate, load_certificate_request
from OpenSSL.crypto import dump_certificate_request, dump_privatekey
from OpenSSL.crypto import PKCS7Type, load_pkcs7_data
from OpenSSL.crypto import PKCS12, PKCS12Type, load_pkcs12
from OpenSSL.crypto import CRL, Revoked, load_crl
from OpenSSL.crypto import NetscapeSPKI, NetscapeSPKIType
from OpenSSL.crypto import sign, verify
from OpenSSL.test.util import TestCase, bytes, b
def normalize_certificate_pem(pem):
return dump_certificate(FILETYPE_PEM, load_certificate(FILETYPE_PEM, pem))
def normalize_privatekey_pem(pem):
return dump_privatekey(FILETYPE_PEM, load_privatekey(FILETYPE_PEM, pem))
root_cert_pem = b("""-----BEGIN CERTIFICATE-----
MIIC7TCCAlagAwIBAgIIPQzE4MbeufQwDQYJKoZIhvcNAQEFBQAwWDELMAkGA1UE
BhMCVVMxCzAJBgNVBAgTAklMMRAwDgYDVQQHEwdDaGljYWdvMRAwDgYDVQQKEwdU
ZXN0aW5nMRgwFgYDVQQDEw9UZXN0aW5nIFJvb3QgQ0EwIhgPMjAwOTAzMjUxMjM2
NThaGA8yMDE3MDYxMTEyMzY1OFowWDELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAklM
MRAwDgYDVQQHEwdDaGljYWdvMRAwDgYDVQQKEwdUZXN0aW5nMRgwFgYDVQQDEw9U
ZXN0aW5nIFJvb3QgQ0EwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAPmaQumL
urpE527uSEHdL1pqcDRmWzu+98Y6YHzT/J7KWEamyMCNZ6fRW1JCR782UQ8a07fy
2xXsKy4WdKaxyG8CcatwmXvpvRQ44dSANMihHELpANTdyVp6DCysED6wkQFurHlF
1dshEaJw8b/ypDhmbVIo6Ci1xvCJqivbLFnbAgMBAAGjgbswgbgwHQYDVR0OBBYE
FINVdy1eIfFJDAkk51QJEo3IfgSuMIGIBgNVHSMEgYAwfoAUg1V3LV4h8UkMCSTn
VAkSjch+BK6hXKRaMFgxCzAJBgNVBAYTAlVTMQswCQYDVQQIEwJJTDEQMA4GA1UE
BxMHQ2hpY2FnbzEQMA4GA1UEChMHVGVzdGluZzEYMBYGA1UEAxMPVGVzdGluZyBS
b290IENBggg9DMTgxt659DAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBBQUAA4GB
AGGCDazMJGoWNBpc03u6+smc95dEead2KlZXBATOdFT1VesY3+nUOqZhEhTGlDMi
hkgaZnzoIq/Uamidegk4hirsCT/R+6vsKAAxNTcBjUeZjlykCJWy5ojShGftXIKY
w/njVbKMXrvc83qmTdGl3TAM0fxQIpqgcglFLveEBgzn
-----END CERTIFICATE-----
""")
root_key_pem = b("""-----BEGIN RSA PRIVATE KEY-----
MIICXQIBAAKBgQD5mkLpi7q6ROdu7khB3S9aanA0Zls7vvfGOmB80/yeylhGpsjA
jWen0VtSQke/NlEPGtO38tsV7CsuFnSmschvAnGrcJl76b0UOOHUgDTIoRxC6QDU
3claegwsrBA+sJEBbqx5RdXbIRGicPG/8qQ4Zm1SKOgotcbwiaor2yxZ2wIDAQAB
AoGBAPCgMpmLxzwDaUmcFbTJUvlLW1hoxNNYSu2jIZm1k/hRAcE60JYwvBkgz3UB
yMEh0AtLxYe0bFk6EHah11tMUPgscbCq73snJ++8koUw+csk22G65hOs51bVb7Aa
6JBe67oLzdtvgCUFAA2qfrKzWRZzAdhUirQUZgySZk+Xq1pBAkEA/kZG0A6roTSM
BVnx7LnPfsycKUsTumorpXiylZJjTi9XtmzxhrYN6wgZlDOOwOLgSQhszGpxVoMD
u3gByT1b2QJBAPtL3mSKdvwRu/+40zaZLwvSJRxaj0mcE4BJOS6Oqs/hS1xRlrNk
PpQ7WJ4yM6ZOLnXzm2mKyxm50Mv64109FtMCQQDOqS2KkjHaLowTGVxwC0DijMfr
I9Lf8sSQk32J5VWCySWf5gGTfEnpmUa41gKTMJIbqZZLucNuDcOtzUaeWZlZAkA8
ttXigLnCqR486JDPTi9ZscoZkZ+w7y6e/hH8t6d5Vjt48JVyfjPIaJY+km58LcN3
6AWSeGAdtRFHVzR7oHjVAkB4hutvxiOeiIVQNBhM6RSI9aBPMI21DoX2JRoxvNW2
cbvAhow217X9V0dVerEOKxnNYspXRrh36h7k4mQA+sDq
-----END RSA PRIVATE KEY-----
""")
server_cert_pem = b("""-----BEGIN CERTIFICATE-----
MIICKDCCAZGgAwIBAgIJAJn/HpR21r/8MA0GCSqGSIb3DQEBBQUAMFgxCzAJBgNV
BAYTAlVTMQswCQYDVQQIEwJJTDEQMA4GA1UEBxMHQ2hpY2FnbzEQMA4GA1UEChMH
VGVzdGluZzEYMBYGA1UEAxMPVGVzdGluZyBSb290IENBMCIYDzIwMDkwMzI1MTIz
NzUzWhgPMjAxNzA2MTExMjM3NTNaMBgxFjAUBgNVBAMTDWxvdmVseSBzZXJ2ZXIw
gZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAL6m+G653V0tpBC/OKl22VxOi2Cv
lK4TYu9LHSDP9uDVTe7V5D5Tl6qzFoRRx5pfmnkqT5B+W9byp2NU3FC5hLm5zSAr
b45meUhjEJ/ifkZgbNUjHdBIGP9MAQUHZa5WKdkGIJvGAvs8UzUqlr4TBWQIB24+
lJ+Ukk/CRgasrYwdAgMBAAGjNjA0MB0GA1UdDgQWBBS4kC7Ij0W1TZXZqXQFAM2e
gKEG2DATBgNVHSUEDDAKBggrBgEFBQcDATANBgkqhkiG9w0BAQUFAAOBgQBh30Li
dJ+NlxIOx5343WqIBka3UbsOb2kxWrbkVCrvRapCMLCASO4FqiKWM+L0VDBprqIp
2mgpFQ6FHpoIENGvJhdEKpptQ5i7KaGhnDNTfdy3x1+h852G99f1iyj0RmbuFcM8
uzujnS8YXWvM7DM1Ilozk4MzPug8jzFp5uhKCQ==
-----END CERTIFICATE-----
""")
server_key_pem = normalize_privatekey_pem(b("""-----BEGIN RSA PRIVATE KEY-----
MIICWwIBAAKBgQC+pvhuud1dLaQQvzipdtlcTotgr5SuE2LvSx0gz/bg1U3u1eQ+
U5eqsxaEUceaX5p5Kk+QflvW8qdjVNxQuYS5uc0gK2+OZnlIYxCf4n5GYGzVIx3Q
SBj/TAEFB2WuVinZBiCbxgL7PFM1Kpa+EwVkCAduPpSflJJPwkYGrK2MHQIDAQAB
AoGAbwuZ0AR6JveahBaczjfnSpiFHf+mve2UxoQdpyr6ROJ4zg/PLW5K/KXrC48G
j6f3tXMrfKHcpEoZrQWUfYBRCUsGD5DCazEhD8zlxEHahIsqpwA0WWssJA2VOLEN
j6DuV2pCFbw67rfTBkTSo32ahfXxEKev5KswZk0JIzH3ooECQQDgzS9AI89h0gs8
Dt+1m11Rzqo3vZML7ZIyGApUzVan+a7hbc33nbGRkAXjHaUBJO31it/H6dTO+uwX
msWwNG5ZAkEA2RyFKs5xR5USTFaKLWCgpH/ydV96KPOpBND7TKQx62snDenFNNbn
FwwOhpahld+vqhYk+pfuWWUpQciE+Bu7ZQJASjfT4sQv4qbbKK/scePicnDdx9th
4e1EeB9xwb+tXXXUo/6Bor/AcUNwfiQ6Zt9PZOK9sR3lMZSsP7rMi7kzuQJABie6
1sXXjFH7nNJvRG4S39cIxq8YRYTy68II/dlB2QzGpKxV/POCxbJ/zu0CU79tuYK7
NaeNCFfH3aeTrX0LyQJAMBWjWmeKM2G2sCExheeQK0ROnaBC8itCECD4Jsve4nqf
r50+LF74iLXFwqysVCebPKMOpDWp/qQ1BbJQIPs7/A==
-----END RSA PRIVATE KEY-----
"""))
client_cert_pem = b("""-----BEGIN CERTIFICATE-----
MIICJjCCAY+gAwIBAgIJAKxpFI5lODkjMA0GCSqGSIb3DQEBBQUAMFgxCzAJBgNV
BAYTAlVTMQswCQYDVQQIEwJJTDEQMA4GA1UEBxMHQ2hpY2FnbzEQMA4GA1UEChMH
VGVzdGluZzEYMBYGA1UEAxMPVGVzdGluZyBSb290IENBMCIYDzIwMDkwMzI1MTIz
ODA1WhgPMjAxNzA2MTExMjM4MDVaMBYxFDASBgNVBAMTC3VnbHkgY2xpZW50MIGf
MA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDAZh/SRtNm5ntMT4qb6YzEpTroMlq2
rn+GrRHRiZ+xkCw/CGNhbtPir7/QxaUj26BSmQrHw1bGKEbPsWiW7bdXSespl+xK
iku4G/KvnnmWdeJHqsiXeUZtqurMELcPQAw9xPHEuhqqUJvvEoMTsnCEqGM+7Dtb
oCRajYyHfluARQIDAQABozYwNDAdBgNVHQ4EFgQUNQB+qkaOaEVecf1J3TTUtAff
0fAwEwYDVR0lBAwwCgYIKwYBBQUHAwIwDQYJKoZIhvcNAQEFBQADgYEAyv/Jh7gM
Q3OHvmsFEEvRI+hsW8y66zK4K5de239Y44iZrFYkt7Q5nBPMEWDj4F2hLYWL/qtI
9Zdr0U4UDCU9SmmGYh4o7R4TZ5pGFvBYvjhHbkSFYFQXZxKUi+WUxplP6I0wr2KJ
PSTJCjJOn3xo2NTKRgV1gaoTf2EhL+RG8TQ=
-----END CERTIFICATE-----
""")
client_key_pem = normalize_privatekey_pem(b("""-----BEGIN RSA PRIVATE KEY-----
MIICXgIBAAKBgQDAZh/SRtNm5ntMT4qb6YzEpTroMlq2rn+GrRHRiZ+xkCw/CGNh
btPir7/QxaUj26BSmQrHw1bGKEbPsWiW7bdXSespl+xKiku4G/KvnnmWdeJHqsiX
eUZtqurMELcPQAw9xPHEuhqqUJvvEoMTsnCEqGM+7DtboCRajYyHfluARQIDAQAB
AoGATkZ+NceY5Glqyl4mD06SdcKfV65814vg2EL7V9t8+/mi9rYL8KztSXGlQWPX
zuHgtRoMl78yQ4ZJYOBVo+nsx8KZNRCEBlE19bamSbQLCeQMenWnpeYyQUZ908gF
h6L9qsFVJepgA9RDgAjyDoS5CaWCdCCPCH2lDkdcqC54SVUCQQDseuduc4wi8h4t
V8AahUn9fn9gYfhoNuM0gdguTA0nPLVWz4hy1yJiWYQe0H7NLNNTmCKiLQaJpAbb
TC6vE8C7AkEA0Ee8CMJUc20BnGEmxwgWcVuqFWaKCo8jTH1X38FlATUsyR3krjW2
dL3yDD9NwHxsYP7nTKp/U8MV7U9IBn4y/wJBAJl7H0/BcLeRmuJk7IqJ7b635iYB
D/9beFUw3MUXmQXZUfyYz39xf6CDZsu1GEdEC5haykeln3Of4M9d/4Kj+FcCQQCY
si6xwT7GzMDkk/ko684AV3KPc/h6G0yGtFIrMg7J3uExpR/VdH2KgwMkZXisSMvw
JJEQjOMCVsEJlRk54WWjAkEAzoZNH6UhDdBK5F38rVt/y4SEHgbSfJHIAmPS32Kq
f6GGcfNpip0Uk7q7udTKuX7Q/buZi/C4YW7u3VKAquv9NA==
-----END RSA PRIVATE KEY-----
"""))
cleartextCertificatePEM = b("""-----BEGIN CERTIFICATE-----
MIIC7TCCAlagAwIBAgIIPQzE4MbeufQwDQYJKoZIhvcNAQEFBQAwWDELMAkGA1UE
BhMCVVMxCzAJBgNVBAgTAklMMRAwDgYDVQQHEwdDaGljYWdvMRAwDgYDVQQKEwdU
ZXN0aW5nMRgwFgYDVQQDEw9UZXN0aW5nIFJvb3QgQ0EwIhgPMjAwOTAzMjUxMjM2
NThaGA8yMDE3MDYxMTEyMzY1OFowWDELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAklM
MRAwDgYDVQQHEwdDaGljYWdvMRAwDgYDVQQKEwdUZXN0aW5nMRgwFgYDVQQDEw9U
ZXN0aW5nIFJvb3QgQ0EwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAPmaQumL
urpE527uSEHdL1pqcDRmWzu+98Y6YHzT/J7KWEamyMCNZ6fRW1JCR782UQ8a07fy
2xXsKy4WdKaxyG8CcatwmXvpvRQ44dSANMihHELpANTdyVp6DCysED6wkQFurHlF
1dshEaJw8b/ypDhmbVIo6Ci1xvCJqivbLFnbAgMBAAGjgbswgbgwHQYDVR0OBBYE
FINVdy1eIfFJDAkk51QJEo3IfgSuMIGIBgNVHSMEgYAwfoAUg1V3LV4h8UkMCSTn
VAkSjch+BK6hXKRaMFgxCzAJBgNVBAYTAlVTMQswCQYDVQQIEwJJTDEQMA4GA1UE
BxMHQ2hpY2FnbzEQMA4GA1UEChMHVGVzdGluZzEYMBYGA1UEAxMPVGVzdGluZyBS
b290IENBggg9DMTgxt659DAMBgNVHRMEBTADAQH/MA0GCSqGSIb3DQEBBQUAA4GB
AGGCDazMJGoWNBpc03u6+smc95dEead2KlZXBATOdFT1VesY3+nUOqZhEhTGlDMi
hkgaZnzoIq/Uamidegk4hirsCT/R+6vsKAAxNTcBjUeZjlykCJWy5ojShGftXIKY
w/njVbKMXrvc83qmTdGl3TAM0fxQIpqgcglFLveEBgzn
-----END CERTIFICATE-----
""")
cleartextPrivateKeyPEM = normalize_privatekey_pem(b("""\
-----BEGIN RSA PRIVATE KEY-----
MIICXQIBAAKBgQD5mkLpi7q6ROdu7khB3S9aanA0Zls7vvfGOmB80/yeylhGpsjA
jWen0VtSQke/NlEPGtO38tsV7CsuFnSmschvAnGrcJl76b0UOOHUgDTIoRxC6QDU
3claegwsrBA+sJEBbqx5RdXbIRGicPG/8qQ4Zm1SKOgotcbwiaor2yxZ2wIDAQAB
AoGBAPCgMpmLxzwDaUmcFbTJUvlLW1hoxNNYSu2jIZm1k/hRAcE60JYwvBkgz3UB
yMEh0AtLxYe0bFk6EHah11tMUPgscbCq73snJ++8koUw+csk22G65hOs51bVb7Aa
6JBe67oLzdtvgCUFAA2qfrKzWRZzAdhUirQUZgySZk+Xq1pBAkEA/kZG0A6roTSM
BVnx7LnPfsycKUsTumorpXiylZJjTi9XtmzxhrYN6wgZlDOOwOLgSQhszGpxVoMD
u3gByT1b2QJBAPtL3mSKdvwRu/+40zaZLwvSJRxaj0mcE4BJOS6Oqs/hS1xRlrNk
PpQ7WJ4yM6ZOLnXzm2mKyxm50Mv64109FtMCQQDOqS2KkjHaLowTGVxwC0DijMfr
I9Lf8sSQk32J5VWCySWf5gGTfEnpmUa41gKTMJIbqZZLucNuDcOtzUaeWZlZAkA8
ttXigLnCqR486JDPTi9ZscoZkZ+w7y6e/hH8t6d5Vjt48JVyfjPIaJY+km58LcN3
6AWSeGAdtRFHVzR7oHjVAkB4hutvxiOeiIVQNBhM6RSI9aBPMI21DoX2JRoxvNW2
cbvAhow217X9V0dVerEOKxnNYspXRrh36h7k4mQA+sDq
-----END RSA PRIVATE KEY-----
"""))
cleartextCertificateRequestPEM = b("""-----BEGIN CERTIFICATE REQUEST-----
MIIBnjCCAQcCAQAwXjELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAklMMRAwDgYDVQQH
EwdDaGljYWdvMRcwFQYDVQQKEw5NeSBDb21wYW55IEx0ZDEXMBUGA1UEAxMORnJl
ZGVyaWNrIERlYW4wgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBANp6Y17WzKSw
BsUWkXdqg6tnXy8H8hA1msCMWpc+/2KJ4mbv5NyD6UD+/SqagQqulPbF/DFea9nA
E0zhmHJELcM8gUTIlXv/cgDWnmK4xj8YkjVUiCdqKRAKeuzLG1pGmwwF5lGeJpXN
xQn5ecR0UYSOWj6TTGXB9VyUMQzCClcBAgMBAAGgADANBgkqhkiG9w0BAQUFAAOB
gQAAJGuF/R/GGbeC7FbFW+aJgr9ee0Xbl6nlhu7pTe67k+iiKT2dsl2ti68MVTnu
Vrb3HUNqOkiwsJf6kCtq5oPn3QVYzTa76Dt2y3Rtzv6boRSlmlfrgS92GNma8JfR
oICQk3nAudi6zl1Dix3BCv1pUp5KMtGn3MeDEi6QFGy2rA==
-----END CERTIFICATE REQUEST-----
""")
encryptedPrivateKeyPEM = b("""-----BEGIN RSA PRIVATE KEY-----
Proc-Type: 4,ENCRYPTED
DEK-Info: DES-EDE3-CBC,9573604A18579E9E
SHOho56WxDkT0ht10UTeKc0F5u8cqIa01kzFAmETw0MAs8ezYtK15NPdCXUm3X/2
a17G7LSF5bkxOgZ7vpXyMzun/owrj7CzvLxyncyEFZWvtvzaAhPhvTJtTIB3kf8B
8+qRcpTGK7NgXEgYBW5bj1y4qZkD4zCL9o9NQzsKI3Ie8i0239jsDOWR38AxjXBH
mGwAQ4Z6ZN5dnmM4fhMIWsmFf19sNyAML4gHenQCHhmXbjXeVq47aC2ProInJbrm
+00TcisbAQ40V9aehVbcDKtS4ZbMVDwncAjpXpcncC54G76N6j7F7wL7L/FuXa3A
fvSVy9n2VfF/pJ3kYSflLHH2G/DFxjF7dl0GxhKPxJjp3IJi9VtuvmN9R2jZWLQF
tfC8dXgy/P9CfFQhlinqBTEwgH0oZ/d4k4NVFDSdEMaSdmBAjlHpc+Vfdty3HVnV
rKXj//wslsFNm9kIwJGIgKUa/n2jsOiydrsk1mgH7SmNCb3YHgZhbbnq0qLat/HC
gHDt3FHpNQ31QzzL3yrenFB2L9osIsnRsDTPFNi4RX4SpDgNroxOQmyzCCV6H+d4
o1mcnNiZSdxLZxVKccq0AfRpHqpPAFnJcQHP6xyT9MZp6fBa0XkxDnt9kNU8H3Qw
7SJWZ69VXjBUzMlQViLuaWMgTnL+ZVyFZf9hTF7U/ef4HMLMAVNdiaGG+G+AjCV/
MbzjS007Oe4qqBnCWaFPSnJX6uLApeTbqAxAeyCql56ULW5x6vDMNC3dwjvS/CEh
11n8RkgFIQA0AhuKSIg3CbuartRsJnWOLwgLTzsrKYL4yRog1RJrtw==
-----END RSA PRIVATE KEY-----
""")
encryptedPrivateKeyPEMPassphrase = b("foobar")
# Some PKCS#7 stuff. Generated with the openssl command line:
#
# openssl crl2pkcs7 -inform pem -outform pem -certfile s.pem -nocrl
#
# with a certificate and key (but the key should be irrelevant) in s.pem
pkcs7Data = b("""\
-----BEGIN PKCS7-----
MIIDNwYJKoZIhvcNAQcCoIIDKDCCAyQCAQExADALBgkqhkiG9w0BBwGgggMKMIID
BjCCAm+gAwIBAgIBATANBgkqhkiG9w0BAQQFADB7MQswCQYDVQQGEwJTRzERMA8G
A1UEChMITTJDcnlwdG8xFDASBgNVBAsTC00yQ3J5cHRvIENBMSQwIgYDVQQDExtN
MkNyeXB0byBDZXJ0aWZpY2F0ZSBNYXN0ZXIxHTAbBgkqhkiG9w0BCQEWDm5ncHNA
cG9zdDEuY29tMB4XDTAwMDkxMDA5NTEzMFoXDTAyMDkxMDA5NTEzMFowUzELMAkG
A1UEBhMCU0cxETAPBgNVBAoTCE0yQ3J5cHRvMRIwEAYDVQQDEwlsb2NhbGhvc3Qx
HTAbBgkqhkiG9w0BCQEWDm5ncHNAcG9zdDEuY29tMFwwDQYJKoZIhvcNAQEBBQAD
SwAwSAJBAKy+e3dulvXzV7zoTZWc5TzgApr8DmeQHTYC8ydfzH7EECe4R1Xh5kwI
zOuuFfn178FBiS84gngaNcrFi0Z5fAkCAwEAAaOCAQQwggEAMAkGA1UdEwQCMAAw
LAYJYIZIAYb4QgENBB8WHU9wZW5TU0wgR2VuZXJhdGVkIENlcnRpZmljYXRlMB0G
A1UdDgQWBBTPhIKSvnsmYsBVNWjj0m3M2z0qVTCBpQYDVR0jBIGdMIGagBT7hyNp
65w6kxXlxb8pUU/+7Sg4AaF/pH0wezELMAkGA1UEBhMCU0cxETAPBgNVBAoTCE0y
Q3J5cHRvMRQwEgYDVQQLEwtNMkNyeXB0byBDQTEkMCIGA1UEAxMbTTJDcnlwdG8g
Q2VydGlmaWNhdGUgTWFzdGVyMR0wGwYJKoZIhvcNAQkBFg5uZ3BzQHBvc3QxLmNv
bYIBADANBgkqhkiG9w0BAQQFAAOBgQA7/CqT6PoHycTdhEStWNZde7M/2Yc6BoJu
VwnW8YxGO8Sn6UJ4FeffZNcYZddSDKosw8LtPOeWoK3JINjAk5jiPQ2cww++7QGG
/g5NDjxFZNDJP1dGiLAxPW6JXwov4v0FmdzfLOZ01jDcgQQZqEpYlgpuI5JEWUQ9
Ho4EzbYCOaEAMQA=
-----END PKCS7-----
""")
crlData = b("""\
-----BEGIN X509 CRL-----
MIIBWzCBxTANBgkqhkiG9w0BAQQFADBYMQswCQYDVQQGEwJVUzELMAkGA1UECBMC
SUwxEDAOBgNVBAcTB0NoaWNhZ28xEDAOBgNVBAoTB1Rlc3RpbmcxGDAWBgNVBAMT
D1Rlc3RpbmcgUm9vdCBDQRcNMDkwNzI2MDQzNDU2WhcNMTIwOTI3MDI0MTUyWjA8
MBUCAgOrGA8yMDA5MDcyNTIzMzQ1NlowIwICAQAYDzIwMDkwNzI1MjMzNDU2WjAM
MAoGA1UdFQQDCgEEMA0GCSqGSIb3DQEBBAUAA4GBAEBt7xTs2htdD3d4ErrcGAw1
4dKcVnIWTutoI7xxen26Wwvh8VCsT7i/UeP+rBl9rC/kfjWjzQk3/zleaarGTpBT
0yp4HXRFFoRhhSE/hP+eteaPXRgrsNRLHe9ZDd69wmh7J1wMDb0m81RG7kqcbsid
vrzEeLDRiiPl92dyyWmu
-----END X509 CRL-----
""")
# A broken RSA private key which can be used to test the error path through
# PKey.check.
inconsistentPrivateKeyPEM = b("""-----BEGIN RSA PRIVATE KEY-----
MIIBPAIBAAJBAKy+e3dulvXzV7zoTZWc5TzgApr8DmeQHTYC8ydfzH7EECe4R1Xh
5kwIzOuuFfn178FBiS84gngaNcrFi0Z5fAkCAwEaAQJBAIqm/bz4NA1H++Vx5Ewx
OcKp3w19QSaZAwlGRtsUxrP7436QjnREM3Bm8ygU11BjkPVmtrKm6AayQfCHqJoT
zIECIQDW0BoMoL0HOYM/mrTLhaykYAVqgIeJsPjvkEhTFXWBuQIhAM3deFAvWNu4
nklUQ37XsCT2c9tmNt1LAT+slG2JOTTRAiAuXDtC/m3NYVwyHfFm+zKHRzHkClk2
HjubeEgjpj32AQIhAJqMGTaZVOwevTXvvHwNeH+vRWsAYU/gbx+OQB+7VOcBAiEA
oolb6NMg/R3enNPvS1O4UU1H8wpaF77L4yiSWlE0p4w=
-----END RSA PRIVATE KEY-----
""")
class X509ExtTests(TestCase):
"""
Tests for L{OpenSSL.crypto.X509Extension}.
"""
def setUp(self):
"""
Create a new private key and start a certificate request (for a test
method to finish in one way or another).
"""
# Basic setup stuff to generate a certificate
self.pkey = PKey()
self.pkey.generate_key(TYPE_RSA, 384)
self.req = X509Req()
self.req.set_pubkey(self.pkey)
# Authority good you have.
self.req.get_subject().commonName = "Yoda root CA"
self.x509 = X509()
self.subject = self.x509.get_subject()
self.subject.commonName = self.req.get_subject().commonName
self.x509.set_issuer(self.subject)
self.x509.set_pubkey(self.pkey)
now = b(datetime.now().strftime("%Y%m%d%H%M%SZ"))
expire = b((datetime.now() + timedelta(days=100)).strftime("%Y%m%d%H%M%SZ"))
self.x509.set_notBefore(now)
self.x509.set_notAfter(expire)
def test_str(self):
"""
The string representation of L{X509Extension} instances as returned by
C{str} includes stuff.
"""
# This isn't necessarily the best string representation. Perhaps it
# will be changed/improved in the future.
self.assertEquals(
str(X509Extension(b('basicConstraints'), True, b('CA:false'))),
'CA:FALSE')
def test_type(self):
"""
L{X509Extension} and L{X509ExtensionType} refer to the same type object
and can be used to create instances of that type.
"""
self.assertIdentical(X509Extension, X509ExtensionType)
self.assertConsistentType(
X509Extension,
'X509Extension', b('basicConstraints'), True, b('CA:true'))
def test_construction(self):
"""
L{X509Extension} accepts an extension type name, a critical flag,
and an extension value and returns an L{X509ExtensionType} instance.
"""
basic = X509Extension(b('basicConstraints'), True, b('CA:true'))
self.assertTrue(
isinstance(basic, X509ExtensionType),
"%r is of type %r, should be %r" % (
basic, type(basic), X509ExtensionType))
comment = X509Extension(
b('nsComment'), False, b('pyOpenSSL unit test'))
self.assertTrue(
isinstance(comment, X509ExtensionType),
"%r is of type %r, should be %r" % (
comment, type(comment), X509ExtensionType))
def test_invalid_extension(self):
"""
L{X509Extension} raises something if it is passed a bad extension
name or value.
"""
self.assertRaises(
Error, X509Extension, b('thisIsMadeUp'), False, b('hi'))
self.assertRaises(
Error, X509Extension, b('basicConstraints'), False, b('blah blah'))
# Exercise a weird one (an extension which uses the r2i method). This
# exercises the codepath that requires a non-NULL ctx to be passed to
# X509V3_EXT_nconf. It can't work now because we provide no
# configuration database. It might be made to work in the future.
self.assertRaises(
Error, X509Extension, b('proxyCertInfo'), True,
b('language:id-ppl-anyLanguage,pathlen:1,policy:text:AB'))
def test_get_critical(self):
"""
L{X509ExtensionType.get_critical} returns the value of the
extension's critical flag.
"""
ext = X509Extension(b('basicConstraints'), True, b('CA:true'))
self.assertTrue(ext.get_critical())
ext = X509Extension(b('basicConstraints'), False, b('CA:true'))
self.assertFalse(ext.get_critical())
def test_get_short_name(self):
"""
L{X509ExtensionType.get_short_name} returns a string giving the short
type name of the extension.
"""
ext = X509Extension(b('basicConstraints'), True, b('CA:true'))
self.assertEqual(ext.get_short_name(), b('basicConstraints'))
ext = X509Extension(b('nsComment'), True, b('foo bar'))
self.assertEqual(ext.get_short_name(), b('nsComment'))
def test_get_data(self):
"""
L{X509Extension.get_data} returns a string giving the data of the
extension.
"""
ext = X509Extension(b('basicConstraints'), True, b('CA:true'))
# Expect to get back the DER encoded form of CA:true.
self.assertEqual(ext.get_data(), b('0\x03\x01\x01\xff'))
def test_get_data_wrong_args(self):
"""
L{X509Extension.get_data} raises L{TypeError} if passed any arguments.
"""
ext = X509Extension(b('basicConstraints'), True, b('CA:true'))
self.assertRaises(TypeError, ext.get_data, None)
self.assertRaises(TypeError, ext.get_data, "foo")
self.assertRaises(TypeError, ext.get_data, 7)
def test_unused_subject(self):
"""
The C{subject} parameter to L{X509Extension} may be provided for an
extension which does not use it and is ignored in this case.
"""
ext1 = X509Extension(
b('basicConstraints'), False, b('CA:TRUE'), subject=self.x509)
self.x509.add_extensions([ext1])
self.x509.sign(self.pkey, 'sha1')
# This is a little lame. Can we think of a better way?
text = dump_certificate(FILETYPE_TEXT, self.x509)
self.assertTrue(b('X509v3 Basic Constraints:') in text)
self.assertTrue(b('CA:TRUE') in text)
def test_subject(self):
"""
If an extension requires a subject, the C{subject} parameter to
L{X509Extension} provides its value.
"""
ext3 = X509Extension(
b('subjectKeyIdentifier'), False, b('hash'), subject=self.x509)
self.x509.add_extensions([ext3])
self.x509.sign(self.pkey, 'sha1')
text = dump_certificate(FILETYPE_TEXT, self.x509)
self.assertTrue(b('X509v3 Subject Key Identifier:') in text)
def test_missing_subject(self):
"""
If an extension requires a subject and the C{subject} parameter is
given no value, something happens.
"""
self.assertRaises(
Error, X509Extension, b('subjectKeyIdentifier'), False, b('hash'))
def test_invalid_subject(self):
"""
If the C{subject} parameter is given a value which is not an L{X509}
instance, L{TypeError} is raised.
"""
for badObj in [True, object(), "hello", [], self]:
self.assertRaises(
TypeError,
X509Extension,
'basicConstraints', False, 'CA:TRUE', subject=badObj)
def test_unused_issuer(self):
"""
The C{issuer} parameter to L{X509Extension} may be provided for an
extension which does not use it and is ignored in this case.
"""
ext1 = X509Extension(
b('basicConstraints'), False, b('CA:TRUE'), issuer=self.x509)
self.x509.add_extensions([ext1])
self.x509.sign(self.pkey, 'sha1')
text = dump_certificate(FILETYPE_TEXT, self.x509)
self.assertTrue(b('X509v3 Basic Constraints:') in text)
self.assertTrue(b('CA:TRUE') in text)
def test_issuer(self):
"""
If an extension requires a issuer, the C{issuer} parameter to
L{X509Extension} provides its value.
"""
ext2 = X509Extension(
b('authorityKeyIdentifier'), False, b('issuer:always'),
issuer=self.x509)
self.x509.add_extensions([ext2])
self.x509.sign(self.pkey, 'sha1')
text = dump_certificate(FILETYPE_TEXT, self.x509)
self.assertTrue(b('X509v3 Authority Key Identifier:') in text)
self.assertTrue(b('DirName:/CN=Yoda root CA') in text)
def test_missing_issuer(self):
"""
If an extension requires an issue and the C{issuer} parameter is given
no value, something happens.
"""
self.assertRaises(
Error,
X509Extension,
b('authorityKeyIdentifier'), False,
b('keyid:always,issuer:always'))
def test_invalid_issuer(self):
"""
If the C{issuer} parameter is given a value which is not an L{X509}
instance, L{TypeError} is raised.
"""
for badObj in [True, object(), "hello", [], self]:
self.assertRaises(
TypeError,
X509Extension,
'authorityKeyIdentifier', False, 'keyid:always,issuer:always',
issuer=badObj)
class PKeyTests(TestCase):
"""
Unit tests for L{OpenSSL.crypto.PKey}.
"""
def test_type(self):
"""
L{PKey} and L{PKeyType} refer to the same type object and can be used
to create instances of that type.
"""
self.assertIdentical(PKey, PKeyType)
self.assertConsistentType(PKey, 'PKey')
def test_construction(self):
"""
L{PKey} takes no arguments and returns a new L{PKey} instance.
"""
self.assertRaises(TypeError, PKey, None)
key = PKey()
self.assertTrue(
isinstance(key, PKeyType),
"%r is of type %r, should be %r" % (key, type(key), PKeyType))
def test_pregeneration(self):
"""
L{PKeyType.bits} and L{PKeyType.type} return C{0} before the key is
generated. L{PKeyType.check} raises L{TypeError} before the key is
generated.
"""
key = PKey()
self.assertEqual(key.type(), 0)
self.assertEqual(key.bits(), 0)
self.assertRaises(TypeError, key.check)
def test_failedGeneration(self):
"""
L{PKeyType.generate_key} takes two arguments, the first giving the key
type as one of L{TYPE_RSA} or L{TYPE_DSA} and the second giving the
number of bits to generate. If an invalid type is specified or
generation fails, L{Error} is raised. If an invalid number of bits is
specified, L{ValueError} or L{Error} is raised.
"""
key = PKey()
self.assertRaises(TypeError, key.generate_key)
self.assertRaises(TypeError, key.generate_key, 1, 2, 3)
self.assertRaises(TypeError, key.generate_key, "foo", "bar")
self.assertRaises(Error, key.generate_key, -1, 0)
self.assertRaises(ValueError, key.generate_key, TYPE_RSA, -1)
self.assertRaises(ValueError, key.generate_key, TYPE_RSA, 0)
# XXX RSA generation for small values of bits is fairly buggy in a wide
# range of OpenSSL versions. I need to figure out what the safe lower
# bound for a reasonable number of OpenSSL versions is and explicitly
# check for that in the wrapper. The failure behavior is typically an
# infinite loop inside OpenSSL.
# self.assertRaises(Error, key.generate_key, TYPE_RSA, 2)
# XXX DSA generation seems happy with any number of bits. The DSS
# says bits must be between 512 and 1024 inclusive. OpenSSL's DSA
# generator doesn't seem to care about the upper limit at all. For
# the lower limit, it uses 512 if anything smaller is specified.
# So, it doesn't seem possible to make generate_key fail for
# TYPE_DSA with a bits argument which is at least an int.
# self.assertRaises(Error, key.generate_key, TYPE_DSA, -7)
def test_rsaGeneration(self):
"""
L{PKeyType.generate_key} generates an RSA key when passed
L{TYPE_RSA} as a type and a reasonable number of bits.
"""
bits = 128
key = PKey()
key.generate_key(TYPE_RSA, bits)
self.assertEqual(key.type(), TYPE_RSA)
self.assertEqual(key.bits(), bits)
self.assertTrue(key.check())
def test_dsaGeneration(self):
"""
L{PKeyType.generate_key} generates a DSA key when passed
L{TYPE_DSA} as a type and a reasonable number of bits.
"""
# 512 is a magic number. The DSS (Digital Signature Standard)
# allows a minimum of 512 bits for DSA. DSA_generate_parameters
# will silently promote any value below 512 to 512.
bits = 512
key = PKey()
key.generate_key(TYPE_DSA, bits)
self.assertEqual(key.type(), TYPE_DSA)
self.assertEqual(key.bits(), bits)
self.assertRaises(TypeError, key.check)
def test_regeneration(self):
"""
L{PKeyType.generate_key} can be called multiple times on the same
key to generate new keys.
"""
key = PKey()
for type, bits in [(TYPE_RSA, 512), (TYPE_DSA, 576)]:
key.generate_key(type, bits)
self.assertEqual(key.type(), type)
self.assertEqual(key.bits(), bits)
def test_inconsistentKey(self):
"""
L{PKeyType.check} returns C{False} if the key is not consistent.
"""
key = load_privatekey(FILETYPE_PEM, inconsistentPrivateKeyPEM)
self.assertRaises(Error, key.check)
def test_check_wrong_args(self):
"""
L{PKeyType.check} raises L{TypeError} if called with any arguments.
"""
self.assertRaises(TypeError, PKey().check, None)
self.assertRaises(TypeError, PKey().check, object())
self.assertRaises(TypeError, PKey().check, 1)
class X509NameTests(TestCase):
"""
Unit tests for L{OpenSSL.crypto.X509Name}.
"""
def _x509name(self, **attrs):
# XXX There's no other way to get a new X509Name yet.
name = X509().get_subject()
attrs = list(attrs.items())
# Make the order stable - order matters!
def key(attr):
return attr[1]
attrs.sort(key=key)
for k, v in attrs:
setattr(name, k, v)
return name
def test_type(self):
"""
The type of X509Name objects is L{X509NameType}.
"""
self.assertIdentical(X509Name, X509NameType)
self.assertEqual(X509NameType.__name__, 'X509Name')
self.assertTrue(isinstance(X509NameType, type))
name = self._x509name()
self.assertTrue(
isinstance(name, X509NameType),
"%r is of type %r, should be %r" % (
name, type(name), X509NameType))
def test_onlyStringAttributes(self):
"""
Attempting to set a non-L{str} attribute name on an L{X509NameType}
instance causes L{TypeError} to be raised.
"""
name = self._x509name()
# Beyond these cases, you may also think that unicode should be
# rejected. Sorry, you're wrong. unicode is automatically converted to
# str outside of the control of X509Name, so there's no way to reject
# it.
self.assertRaises(TypeError, setattr, name, None, "hello")
self.assertRaises(TypeError, setattr, name, 30, "hello")
class evil(str):
pass
self.assertRaises(TypeError, setattr, name, evil(), "hello")
def test_setInvalidAttribute(self):
"""
Attempting to set any attribute name on an L{X509NameType} instance for
which no corresponding NID is defined causes L{AttributeError} to be
raised.
"""
name = self._x509name()
self.assertRaises(AttributeError, setattr, name, "no such thing", None)
def test_attributes(self):
"""
L{X509NameType} instances have attributes for each standard (?)
X509Name field.
"""
name = self._x509name()
name.commonName = "foo"
self.assertEqual(name.commonName, "foo")
self.assertEqual(name.CN, "foo")
name.CN = "baz"
self.assertEqual(name.commonName, "baz")
self.assertEqual(name.CN, "baz")
name.commonName = "bar"
self.assertEqual(name.commonName, "bar")
self.assertEqual(name.CN, "bar")
name.CN = "quux"
self.assertEqual(name.commonName, "quux")
self.assertEqual(name.CN, "quux")
def test_copy(self):
"""
L{X509Name} creates a new L{X509NameType} instance with all the same
attributes as an existing L{X509NameType} instance when called with
one.
"""
name = self._x509name(commonName="foo", emailAddress="bar@example.com")
copy = X509Name(name)
self.assertEqual(copy.commonName, "foo")
self.assertEqual(copy.emailAddress, "bar@example.com")
# Mutate the copy and ensure the original is unmodified.
copy.commonName = "baz"
self.assertEqual(name.commonName, "foo")
# Mutate the original and ensure the copy is unmodified.
name.emailAddress = "quux@example.com"
self.assertEqual(copy.emailAddress, "bar@example.com")
def test_repr(self):
"""
L{repr} passed an L{X509NameType} instance should return a string
containing a description of the type and the NIDs which have been set
on it.
"""
name = self._x509name(commonName="foo", emailAddress="bar")
self.assertEqual(
repr(name),
"<X509Name object '/emailAddress=bar/CN=foo'>")
def test_comparison(self):
"""
L{X509NameType} instances should compare based on their NIDs.
"""
def _equality(a, b, assertTrue, assertFalse):
assertTrue(a == b, "(%r == %r) --> False" % (a, b))
assertFalse(a != b)
assertTrue(b == a)
assertFalse(b != a)
def assertEqual(a, b):
_equality(a, b, self.assertTrue, self.assertFalse)
# Instances compare equal to themselves.
name = self._x509name()
assertEqual(name, name)
# Empty instances should compare equal to each other.
assertEqual(self._x509name(), self._x509name())
# Instances with equal NIDs should compare equal to each other.
assertEqual(self._x509name(commonName="foo"),
self._x509name(commonName="foo"))
# Instance with equal NIDs set using different aliases should compare
# equal to each other.
assertEqual(self._x509name(commonName="foo"),
self._x509name(CN="foo"))
# Instances with more than one NID with the same values should compare
# equal to each other.
assertEqual(self._x509name(CN="foo", organizationalUnitName="bar"),
self._x509name(commonName="foo", OU="bar"))
def assertNotEqual(a, b):
_equality(a, b, self.assertFalse, self.assertTrue)
# Instances with different values for the same NID should not compare
# equal to each other.
assertNotEqual(self._x509name(CN="foo"),
self._x509name(CN="bar"))
# Instances with different NIDs should not compare equal to each other.
assertNotEqual(self._x509name(CN="foo"),
self._x509name(OU="foo"))
def _inequality(a, b, assertTrue, assertFalse):
assertTrue(a < b)
assertTrue(a <= b)
assertTrue(b > a)
assertTrue(b >= a)
assertFalse(a > b)
assertFalse(a >= b)
assertFalse(b < a)
assertFalse(b <= a)
def assertLessThan(a, b):
_inequality(a, b, self.assertTrue, self.assertFalse)
# An X509Name with a NID with a value which sorts less than the value
# of the same NID on another X509Name compares less than the other
# X509Name.
assertLessThan(self._x509name(CN="abc"),
self._x509name(CN="def"))
def assertGreaterThan(a, b):
_inequality(a, b, self.assertFalse, self.assertTrue)
# An X509Name with a NID with a value which sorts greater than the
# value of the same NID on another X509Name compares greater than the
# other X509Name.
assertGreaterThan(self._x509name(CN="def"),
self._x509name(CN="abc"))
def test_hash(self):
"""
L{X509Name.hash} returns an integer hash based on the value of the
name.
"""
a = self._x509name(CN="foo")
b = self._x509name(CN="foo")
self.assertEqual(a.hash(), b.hash())
a.CN = "bar"
self.assertNotEqual(a.hash(), b.hash())
def test_der(self):
"""
L{X509Name.der} returns the DER encoded form of the name.
"""
a = self._x509name(CN="foo", C="US")
self.assertEqual(
a.der(),
b('0\x1b1\x0b0\t\x06\x03U\x04\x06\x13\x02US'
'1\x0c0\n\x06\x03U\x04\x03\x13\x03foo'))
def test_get_components(self):
"""
L{X509Name.get_components} returns a C{list} of two-tuples of C{str}
giving the NIDs and associated values which make up the name.
"""
a = self._x509name()
self.assertEqual(a.get_components(), [])
a.CN = "foo"
self.assertEqual(a.get_components(), [(b("CN"), b("foo"))])
a.organizationalUnitName = "bar"
self.assertEqual(
a.get_components(),
[(b("CN"), b("foo")), (b("OU"), b("bar"))])
class _PKeyInteractionTestsMixin:
"""
Tests which involve another thing and a PKey.
"""
def signable(self):
"""
Return something with a C{set_pubkey}, C{set_pubkey}, and C{sign} method.
"""
raise NotImplementedError()
def test_signWithUngenerated(self):
"""
L{X509Req.sign} raises L{ValueError} when pass a L{PKey} with no parts.
"""
request = self.signable()
key = PKey()
self.assertRaises(ValueError, request.sign, key, 'MD5')
def test_signWithPublicKey(self):
"""
L{X509Req.sign} raises L{ValueError} when pass a L{PKey} with no
private part as the signing key.
"""
request = self.signable()
key = PKey()
key.generate_key(TYPE_RSA, 512)
request.set_pubkey(key)
pub = request.get_pubkey()
self.assertRaises(ValueError, request.sign, pub, 'MD5')
def test_signWithUnknownDigest(self):
"""
L{X509Req.sign} raises L{ValueError} when passed a digest name which is
not known.
"""
request = self.signable()
key = PKey()
key.generate_key(TYPE_RSA, 512)
self.assertRaises(ValueError, request.sign, key, "monkeys")
def test_sign(self):
"""
L{X509Req.sign} succeeds when passed a private key object and a valid
digest function. C{X509Req.verify} can be used to check the signature.
"""
request = self.signable()
key = PKey()
key.generate_key(TYPE_RSA, 512)
request.set_pubkey(key)
request.sign(key, 'MD5')
# If the type has a verify method, cover that too.
if getattr(request, 'verify', None) is not None:
pub = request.get_pubkey()
self.assertTrue(request.verify(pub))
# Make another key that won't verify.
key = PKey()
key.generate_key(TYPE_RSA, 512)
self.assertRaises(Error, request.verify, key)
class X509ReqTests(TestCase, _PKeyInteractionTestsMixin):
"""
Tests for L{OpenSSL.crypto.X509Req}.
"""
def signable(self):
"""
Create and return a new L{X509Req}.
"""
return X509Req()
def test_type(self):
"""
L{X509Req} and L{X509ReqType} refer to the same type object and can be
used to create instances of that type.
"""
self.assertIdentical(X509Req, X509ReqType)
self.assertConsistentType(X509Req, 'X509Req')
def test_construction(self):
"""
L{X509Req} takes no arguments and returns an L{X509ReqType} instance.
"""
request = X509Req()
self.assertTrue(
isinstance(request, X509ReqType),
"%r is of type %r, should be %r" % (request, type(request), X509ReqType))
def test_version(self):
"""
L{X509ReqType.set_version} sets the X.509 version of the certificate
request. L{X509ReqType.get_version} returns the X.509 version of
the certificate request. The initial value of the version is 0.
"""
request = X509Req()
self.assertEqual(request.get_version(), 0)
request.set_version(1)
self.assertEqual(request.get_version(), 1)
request.set_version(3)
self.assertEqual(request.get_version(), 3)
def test_version_wrong_args(self):
"""
L{X509ReqType.set_version} raises L{TypeError} if called with the wrong
number of arguments or with a non-C{int} argument.
L{X509ReqType.get_version} raises L{TypeError} if called with any
arguments.
"""
request = X509Req()
self.assertRaises(TypeError, request.set_version)
self.assertRaises(TypeError, request.set_version, "foo")
self.assertRaises(TypeError, request.set_version, 1, 2)
self.assertRaises(TypeError, request.get_version, None)
def test_get_subject(self):
"""
L{X509ReqType.get_subject} returns an L{X509Name} for the subject of
the request and which is valid even after the request object is
otherwise dead.
"""
request = X509Req()
subject = request.get_subject()
self.assertTrue(
isinstance(subject, X509NameType),
"%r is of type %r, should be %r" % (subject, type(subject), X509NameType))
subject.commonName = "foo"
self.assertEqual(request.get_subject().commonName, "foo")
del request
subject.commonName = "bar"
self.assertEqual(subject.commonName, "bar")
def test_get_subject_wrong_args(self):
"""
L{X509ReqType.get_subject} raises L{TypeError} if called with any
arguments.
"""
request = X509Req()
self.assertRaises(TypeError, request.get_subject, None)
def test_add_extensions(self):
"""
L{X509Req.add_extensions} accepts a C{list} of L{X509Extension}
instances and adds them to the X509 request.
"""
request = X509Req()
request.add_extensions([
X509Extension(b('basicConstraints'), True, b('CA:false'))])
# XXX Add get_extensions so the rest of this unit test can be written.
def test_add_extensions_wrong_args(self):
"""
L{X509Req.add_extensions} raises L{TypeError} if called with the wrong
number of arguments or with a non-C{list}. Or it raises L{ValueError}
if called with a C{list} containing objects other than L{X509Extension}
instances.
"""
request = X509Req()
self.assertRaises(TypeError, request.add_extensions)
self.assertRaises(TypeError, request.add_extensions, object())
self.assertRaises(ValueError, request.add_extensions, [object()])
self.assertRaises(TypeError, request.add_extensions, [], None)
class X509Tests(TestCase, _PKeyInteractionTestsMixin):
"""
Tests for L{OpenSSL.crypto.X509}.
"""
pemData = cleartextCertificatePEM + cleartextPrivateKeyPEM
extpem = """
-----BEGIN CERTIFICATE-----
MIIC3jCCAkegAwIBAgIJAJHFjlcCgnQzMA0GCSqGSIb3DQEBBQUAMEcxCzAJBgNV
BAYTAlNFMRUwEwYDVQQIEwxXZXN0ZXJib3R0b20xEjAQBgNVBAoTCUNhdGFsb2dp
eDENMAsGA1UEAxMEUm9vdDAeFw0wODA0MjIxNDQ1MzhaFw0wOTA0MjIxNDQ1Mzha
MFQxCzAJBgNVBAYTAlNFMQswCQYDVQQIEwJXQjEUMBIGA1UEChMLT3Blbk1ldGFk
aXIxIjAgBgNVBAMTGW5vZGUxLm9tMi5vcGVubWV0YWRpci5vcmcwgZ8wDQYJKoZI
hvcNAQEBBQADgY0AMIGJAoGBAPIcQMrwbk2nESF/0JKibj9i1x95XYAOwP+LarwT
Op4EQbdlI9SY+uqYqlERhF19w7CS+S6oyqx0DRZSk4Y9dZ9j9/xgm2u/f136YS1u
zgYFPvfUs6PqYLPSM8Bw+SjJ+7+2+TN+Tkiof9WP1cMjodQwOmdsiRbR0/J7+b1B
hec1AgMBAAGjgcQwgcEwCQYDVR0TBAIwADAsBglghkgBhvhCAQ0EHxYdT3BlblNT
TCBHZW5lcmF0ZWQgQ2VydGlmaWNhdGUwHQYDVR0OBBYEFIdHsBcMVVMbAO7j6NCj
03HgLnHaMB8GA1UdIwQYMBaAFL2h9Bf9Mre4vTdOiHTGAt7BRY/8MEYGA1UdEQQ/
MD2CDSouZXhhbXBsZS5vcmeCESoub20yLmV4bWFwbGUuY29thwSC7wgKgRNvbTJA
b3Blbm1ldGFkaXIub3JnMA0GCSqGSIb3DQEBBQUAA4GBALd7WdXkp2KvZ7/PuWZA
MPlIxyjS+Ly11+BNE0xGQRp9Wz+2lABtpgNqssvU156+HkKd02rGheb2tj7MX9hG
uZzbwDAZzJPjzDQDD7d3cWsrVcfIdqVU7epHqIadnOF+X0ghJ39pAm6VVadnSXCt
WpOdIpB8KksUTCzV591Nr1wd
-----END CERTIFICATE-----
"""
def signable(self):
"""
Create and return a new L{X509}.
"""
return X509()
def test_type(self):
"""
L{X509} and L{X509Type} refer to the same type object and can be used
to create instances of that type.
"""
self.assertIdentical(X509, X509Type)
self.assertConsistentType(X509, 'X509')
def test_construction(self):
"""
L{X509} takes no arguments and returns an instance of L{X509Type}.
"""
certificate = X509()
self.assertTrue(
isinstance(certificate, X509Type),
"%r is of type %r, should be %r" % (certificate,
type(certificate),
X509Type))
self.assertEqual(type(X509Type).__name__, 'type')
self.assertEqual(type(certificate).__name__, 'X509')
self.assertEqual(type(certificate), X509Type)
self.assertEqual(type(certificate), X509)
def test_get_version_wrong_args(self):
"""
L{X509.get_version} raises L{TypeError} if invoked with any arguments.
"""
cert = X509()
self.assertRaises(TypeError, cert.get_version, None)
def test_set_version_wrong_args(self):
"""
L{X509.set_version} raises L{TypeError} if invoked with the wrong number
of arguments or an argument not of type C{int}.
"""
cert = X509()
self.assertRaises(TypeError, cert.set_version)
self.assertRaises(TypeError, cert.set_version, None)
self.assertRaises(TypeError, cert.set_version, 1, None)
def test_version(self):
"""
L{X509.set_version} sets the certificate version number.
L{X509.get_version} retrieves it.
"""
cert = X509()
cert.set_version(1234)
self.assertEquals(cert.get_version(), 1234)
def test_get_serial_number_wrong_args(self):
"""
L{X509.get_serial_number} raises L{TypeError} if invoked with any
arguments.
"""
cert = X509()
self.assertRaises(TypeError, cert.get_serial_number, None)
def test_serial_number(self):
"""
The serial number of an L{X509Type} can be retrieved and modified with
L{X509Type.get_serial_number} and L{X509Type.set_serial_number}.
"""
certificate = X509()
self.assertRaises(TypeError, certificate.set_serial_number)
self.assertRaises(TypeError, certificate.set_serial_number, 1, 2)
self.assertRaises(TypeError, certificate.set_serial_number, "1")
self.assertRaises(TypeError, certificate.set_serial_number, 5.5)
self.assertEqual(certificate.get_serial_number(), 0)
certificate.set_serial_number(1)
self.assertEqual(certificate.get_serial_number(), 1)
certificate.set_serial_number(2 ** 32 + 1)
self.assertEqual(certificate.get_serial_number(), 2 ** 32 + 1)
certificate.set_serial_number(2 ** 64 + 1)
self.assertEqual(certificate.get_serial_number(), 2 ** 64 + 1)
certificate.set_serial_number(2 ** 128 + 1)
self.assertEqual(certificate.get_serial_number(), 2 ** 128 + 1)
def _setBoundTest(self, which):
"""
L{X509Type.set_notBefore} takes a string in the format of an ASN1
GENERALIZEDTIME and sets the beginning of the certificate's validity
period to it.
"""
certificate = X509()
set = getattr(certificate, 'set_not' + which)
get = getattr(certificate, 'get_not' + which)
# Starts with no value.
self.assertEqual(get(), None)
# GMT (Or is it UTC?) -exarkun
when = b("20040203040506Z")
set(when)
self.assertEqual(get(), when)
# A plus two hours and thirty minutes offset
when = b("20040203040506+0530")
set(when)
self.assertEqual(get(), when)
# A minus one hour fifteen minutes offset
when = b("20040203040506-0115")
set(when)
self.assertEqual(get(), when)
# An invalid string results in a ValueError
self.assertRaises(ValueError, set, b("foo bar"))
# The wrong number of arguments results in a TypeError.
self.assertRaises(TypeError, set)
self.assertRaises(TypeError, set, b("20040203040506Z"), b("20040203040506Z"))
self.assertRaises(TypeError, get, b("foo bar"))
# XXX ASN1_TIME (not GENERALIZEDTIME)
def test_set_notBefore(self):
"""
L{X509Type.set_notBefore} takes a string in the format of an ASN1
GENERALIZEDTIME and sets the beginning of the certificate's validity
period to it.
"""
self._setBoundTest("Before")
def test_set_notAfter(self):
"""
L{X509Type.set_notAfter} takes a string in the format of an ASN1
GENERALIZEDTIME and sets the end of the certificate's validity period
to it.
"""
self._setBoundTest("After")
def test_get_notBefore(self):
"""
L{X509Type.get_notBefore} returns a string in the format of an ASN1
GENERALIZEDTIME even for certificates which store it as UTCTIME
internally.
"""
cert = load_certificate(FILETYPE_PEM, self.pemData)
self.assertEqual(cert.get_notBefore(), b("20090325123658Z"))
def test_get_notAfter(self):
"""
L{X509Type.get_notAfter} returns a string in the format of an ASN1
GENERALIZEDTIME even for certificates which store it as UTCTIME
internally.
"""
cert = load_certificate(FILETYPE_PEM, self.pemData)
self.assertEqual(cert.get_notAfter(), b("20170611123658Z"))
def test_gmtime_adj_notBefore_wrong_args(self):
"""
L{X509Type.gmtime_adj_notBefore} raises L{TypeError} if called with the
wrong number of arguments or a non-C{int} argument.
"""
cert = X509()
self.assertRaises(TypeError, cert.gmtime_adj_notBefore)
self.assertRaises(TypeError, cert.gmtime_adj_notBefore, None)
self.assertRaises(TypeError, cert.gmtime_adj_notBefore, 123, None)
def test_gmtime_adj_notBefore(self):
"""
L{X509Type.gmtime_adj_notBefore} changes the not-before timestamp to be
the current time plus the number of seconds passed in.
"""
cert = load_certificate(FILETYPE_PEM, self.pemData)
now = datetime.utcnow() + timedelta(seconds=100)
cert.gmtime_adj_notBefore(100)
self.assertEqual(cert.get_notBefore(), b(now.strftime("%Y%m%d%H%M%SZ")))
def test_gmtime_adj_notAfter_wrong_args(self):
"""
L{X509Type.gmtime_adj_notAfter} raises L{TypeError} if called with the
wrong number of arguments or a non-C{int} argument.
"""
cert = X509()
self.assertRaises(TypeError, cert.gmtime_adj_notAfter)
self.assertRaises(TypeError, cert.gmtime_adj_notAfter, None)
self.assertRaises(TypeError, cert.gmtime_adj_notAfter, 123, None)
def test_gmtime_adj_notAfter(self):
"""
L{X509Type.gmtime_adj_notAfter} changes the not-after timestamp to be
the current time plus the number of seconds passed in.
"""
cert = load_certificate(FILETYPE_PEM, self.pemData)
now = datetime.utcnow() + timedelta(seconds=100)
cert.gmtime_adj_notAfter(100)
self.assertEqual(cert.get_notAfter(), b(now.strftime("%Y%m%d%H%M%SZ")))
def test_has_expired_wrong_args(self):
"""
L{X509Type.has_expired} raises L{TypeError} if called with any
arguments.
"""
cert = X509()
self.assertRaises(TypeError, cert.has_expired, None)
def test_has_expired(self):
"""
L{X509Type.has_expired} returns C{True} if the certificate's not-after
time is in the past.
"""
cert = X509()
cert.gmtime_adj_notAfter(-1)
self.assertTrue(cert.has_expired())
def test_has_not_expired(self):
"""
L{X509Type.has_expired} returns C{False} if the certificate's not-after
time is in the future.
"""
cert = X509()
cert.gmtime_adj_notAfter(2)
self.assertFalse(cert.has_expired())
def test_digest(self):
"""
L{X509.digest} returns a string giving ":"-separated hex-encoded words
of the digest of the certificate.
"""
cert = X509()
self.assertEqual(
cert.digest("md5"),
b("A8:EB:07:F8:53:25:0A:F2:56:05:C5:A5:C4:C4:C7:15"))
def _extcert(self, pkey, extensions):
cert = X509()
cert.set_pubkey(pkey)
cert.get_subject().commonName = "Unit Tests"
cert.get_issuer().commonName = "Unit Tests"
when = b(datetime.now().strftime("%Y%m%d%H%M%SZ"))
cert.set_notBefore(when)
cert.set_notAfter(when)
cert.add_extensions(extensions)
return load_certificate(
FILETYPE_PEM, dump_certificate(FILETYPE_PEM, cert))
def test_extension_count(self):
"""
L{X509.get_extension_count} returns the number of extensions that are
present in the certificate.
"""
pkey = load_privatekey(FILETYPE_PEM, client_key_pem)
ca = X509Extension(b('basicConstraints'), True, b('CA:FALSE'))
key = X509Extension(b('keyUsage'), True, b('digitalSignature'))
subjectAltName = X509Extension(
b('subjectAltName'), True, b('DNS:example.com'))
# Try a certificate with no extensions at all.
c = self._extcert(pkey, [])
self.assertEqual(c.get_extension_count(), 0)
# And a certificate with one
c = self._extcert(pkey, [ca])
self.assertEqual(c.get_extension_count(), 1)
# And a certificate with several
c = self._extcert(pkey, [ca, key, subjectAltName])
self.assertEqual(c.get_extension_count(), 3)
def test_get_extension(self):
"""
L{X509.get_extension} takes an integer and returns an L{X509Extension}
corresponding to the extension at that index.
"""
pkey = load_privatekey(FILETYPE_PEM, client_key_pem)
ca = X509Extension(b('basicConstraints'), True, b('CA:FALSE'))
key = X509Extension(b('keyUsage'), True, b('digitalSignature'))
subjectAltName = X509Extension(
b('subjectAltName'), False, b('DNS:example.com'))
cert = self._extcert(pkey, [ca, key, subjectAltName])
ext = cert.get_extension(0)
self.assertTrue(isinstance(ext, X509Extension))
self.assertTrue(ext.get_critical())
self.assertEqual(ext.get_short_name(), b('basicConstraints'))
ext = cert.get_extension(1)
self.assertTrue(isinstance(ext, X509Extension))
self.assertTrue(ext.get_critical())
self.assertEqual(ext.get_short_name(), b('keyUsage'))
ext = cert.get_extension(2)
self.assertTrue(isinstance(ext, X509Extension))
self.assertFalse(ext.get_critical())
self.assertEqual(ext.get_short_name(), b('subjectAltName'))
self.assertRaises(IndexError, cert.get_extension, -1)
self.assertRaises(IndexError, cert.get_extension, 4)
self.assertRaises(TypeError, cert.get_extension, "hello")
def test_invalid_digest_algorithm(self):
"""
L{X509.digest} raises L{ValueError} if called with an unrecognized hash
algorithm.
"""
cert = X509()
self.assertRaises(ValueError, cert.digest, "monkeys")
def test_get_subject_wrong_args(self):
"""
L{X509.get_subject} raises L{TypeError} if called with any arguments.
"""
cert = X509()
self.assertRaises(TypeError, cert.get_subject, None)
def test_get_subject(self):
"""
L{X509.get_subject} returns an L{X509Name} instance.
"""
cert = load_certificate(FILETYPE_PEM, self.pemData)
subj = cert.get_subject()
self.assertTrue(isinstance(subj, X509Name))
self.assertEquals(
subj.get_components(),
[(b('C'), b('US')), (b('ST'), b('IL')), (b('L'), b('Chicago')),
(b('O'), b('Testing')), (b('CN'), b('Testing Root CA'))])
def test_set_subject_wrong_args(self):
"""
L{X509.set_subject} raises a L{TypeError} if called with the wrong
number of arguments or an argument not of type L{X509Name}.
"""
cert = X509()
self.assertRaises(TypeError, cert.set_subject)
self.assertRaises(TypeError, cert.set_subject, None)
self.assertRaises(TypeError, cert.set_subject, cert.get_subject(), None)
def test_set_subject(self):
"""
L{X509.set_subject} changes the subject of the certificate to the one
passed in.
"""
cert = X509()
name = cert.get_subject()
name.C = 'AU'
name.O = 'Unit Tests'
cert.set_subject(name)
self.assertEquals(
cert.get_subject().get_components(),
[(b('C'), b('AU')), (b('O'), b('Unit Tests'))])
def test_get_issuer_wrong_args(self):
"""
L{X509.get_issuer} raises L{TypeError} if called with any arguments.
"""
cert = X509()
self.assertRaises(TypeError, cert.get_issuer, None)
def test_get_issuer(self):
"""
L{X509.get_issuer} returns an L{X509Name} instance.
"""
cert = load_certificate(FILETYPE_PEM, self.pemData)
subj = cert.get_issuer()
self.assertTrue(isinstance(subj, X509Name))
comp = subj.get_components()
self.assertEquals(
comp,
[(b('C'), b('US')), (b('ST'), b('IL')), (b('L'), b('Chicago')),
(b('O'), b('Testing')), (b('CN'), b('Testing Root CA'))])
def test_set_issuer_wrong_args(self):
"""
L{X509.set_issuer} raises a L{TypeError} if called with the wrong
number of arguments or an argument not of type L{X509Name}.
"""
cert = X509()
self.assertRaises(TypeError, cert.set_issuer)
self.assertRaises(TypeError, cert.set_issuer, None)
self.assertRaises(TypeError, cert.set_issuer, cert.get_issuer(), None)
def test_set_issuer(self):
"""
L{X509.set_issuer} changes the issuer of the certificate to the one
passed in.
"""
cert = X509()
name = cert.get_issuer()
name.C = 'AU'
name.O = 'Unit Tests'
cert.set_issuer(name)
self.assertEquals(
cert.get_issuer().get_components(),
[(b('C'), b('AU')), (b('O'), b('Unit Tests'))])
def test_get_pubkey_uninitialized(self):
"""
When called on a certificate with no public key, L{X509.get_pubkey}
raises L{OpenSSL.crypto.Error}.
"""
cert = X509()
self.assertRaises(Error, cert.get_pubkey)
def test_subject_name_hash_wrong_args(self):
"""
L{X509.subject_name_hash} raises L{TypeError} if called with any
arguments.
"""
cert = X509()
self.assertRaises(TypeError, cert.subject_name_hash, None)
def test_subject_name_hash(self):
"""
L{X509.subject_name_hash} returns the hash of the certificate's subject
name.
"""
cert = load_certificate(FILETYPE_PEM, self.pemData)
self.assertIn(
cert.subject_name_hash(),
[3350047874, # OpenSSL 0.9.8, MD5
3278919224, # OpenSSL 1.0.0, SHA1
])
def test_get_signature_algorithm(self):
"""
L{X509Type.get_signature_algorithm} returns a string which means
the algorithm used to sign the certificate.
"""
cert = load_certificate(FILETYPE_PEM, self.pemData)
self.assertEqual(
b("sha1WithRSAEncryption"), cert.get_signature_algorithm())
def test_get_undefined_signature_algorithm(self):
"""
L{X509Type.get_signature_algorithm} raises L{ValueError} if the
signature algorithm is undefined or unknown.
"""
# This certificate has been modified to indicate a bogus OID in the
# signature algorithm field so that OpenSSL does not recognize it.
certPEM = """\
-----BEGIN CERTIFICATE-----
MIIC/zCCAmigAwIBAgIBATAGBgJ8BQUAMHsxCzAJBgNVBAYTAlNHMREwDwYDVQQK
EwhNMkNyeXB0bzEUMBIGA1UECxMLTTJDcnlwdG8gQ0ExJDAiBgNVBAMTG00yQ3J5
cHRvIENlcnRpZmljYXRlIE1hc3RlcjEdMBsGCSqGSIb3DQEJARYObmdwc0Bwb3N0
MS5jb20wHhcNMDAwOTEwMDk1MTMwWhcNMDIwOTEwMDk1MTMwWjBTMQswCQYDVQQG
EwJTRzERMA8GA1UEChMITTJDcnlwdG8xEjAQBgNVBAMTCWxvY2FsaG9zdDEdMBsG
CSqGSIb3DQEJARYObmdwc0Bwb3N0MS5jb20wXDANBgkqhkiG9w0BAQEFAANLADBI
AkEArL57d26W9fNXvOhNlZzlPOACmvwOZ5AdNgLzJ1/MfsQQJ7hHVeHmTAjM664V
+fXvwUGJLziCeBo1ysWLRnl8CQIDAQABo4IBBDCCAQAwCQYDVR0TBAIwADAsBglg
hkgBhvhCAQ0EHxYdT3BlblNTTCBHZW5lcmF0ZWQgQ2VydGlmaWNhdGUwHQYDVR0O
BBYEFM+EgpK+eyZiwFU1aOPSbczbPSpVMIGlBgNVHSMEgZ0wgZqAFPuHI2nrnDqT
FeXFvylRT/7tKDgBoX+kfTB7MQswCQYDVQQGEwJTRzERMA8GA1UEChMITTJDcnlw
dG8xFDASBgNVBAsTC00yQ3J5cHRvIENBMSQwIgYDVQQDExtNMkNyeXB0byBDZXJ0
aWZpY2F0ZSBNYXN0ZXIxHTAbBgkqhkiG9w0BCQEWDm5ncHNAcG9zdDEuY29tggEA
MA0GCSqGSIb3DQEBBAUAA4GBADv8KpPo+gfJxN2ERK1Y1l17sz/ZhzoGgm5XCdbx
jEY7xKfpQngV599k1xhl11IMqizDwu0855agrckg2MCTmOI9DZzDD77tAYb+Dk0O
PEVk0Mk/V0aIsDE9bolfCi/i/QWZ3N8s5nTWMNyBBBmoSliWCm4jkkRZRD0ejgTN
tgI5
-----END CERTIFICATE-----
"""
cert = load_certificate(FILETYPE_PEM, certPEM)
self.assertRaises(ValueError, cert.get_signature_algorithm)
class PKCS12Tests(TestCase):
"""
Test for L{OpenSSL.crypto.PKCS12} and L{OpenSSL.crypto.load_pkcs12}.
"""
pemData = cleartextCertificatePEM + cleartextPrivateKeyPEM
def test_type(self):
"""
L{PKCS12Type} is a type object.
"""
self.assertIdentical(PKCS12, PKCS12Type)
self.assertConsistentType(PKCS12, 'PKCS12')
def test_empty_construction(self):
"""
L{PKCS12} returns a new instance of L{PKCS12} with no certificate,
private key, CA certificates, or friendly name.
"""
p12 = PKCS12()
self.assertEqual(None, p12.get_certificate())
self.assertEqual(None, p12.get_privatekey())
self.assertEqual(None, p12.get_ca_certificates())
self.assertEqual(None, p12.get_friendlyname())
def test_type_errors(self):
"""
The L{PKCS12} setter functions (C{set_certificate}, C{set_privatekey},
C{set_ca_certificates}, and C{set_friendlyname}) raise L{TypeError}
when passed objects of types other than those expected.
"""
p12 = PKCS12()
self.assertRaises(TypeError, p12.set_certificate, 3)
self.assertRaises(TypeError, p12.set_certificate, PKey())
self.assertRaises(TypeError, p12.set_certificate, X509)
self.assertRaises(TypeError, p12.set_privatekey, 3)
self.assertRaises(TypeError, p12.set_privatekey, 'legbone')
self.assertRaises(TypeError, p12.set_privatekey, X509())
self.assertRaises(TypeError, p12.set_ca_certificates, 3)
self.assertRaises(TypeError, p12.set_ca_certificates, X509())
self.assertRaises(TypeError, p12.set_ca_certificates, (3, 4))
self.assertRaises(TypeError, p12.set_ca_certificates, ( PKey(), ))
self.assertRaises(TypeError, p12.set_friendlyname, 6)
self.assertRaises(TypeError, p12.set_friendlyname, ('foo', 'bar'))
def test_key_only(self):
"""
A L{PKCS12} with only a private key can be exported using
L{PKCS12.export} and loaded again using L{load_pkcs12}.
"""
passwd = 'blah'
p12 = PKCS12()
pkey = load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM)
p12.set_privatekey(pkey)
self.assertEqual(None, p12.get_certificate())
self.assertEqual(pkey, p12.get_privatekey())
try:
dumped_p12 = p12.export(passphrase=passwd, iter=2, maciter=3)
except Error:
# Some versions of OpenSSL will throw an exception
# for this nearly useless PKCS12 we tried to generate:
# [('PKCS12 routines', 'PKCS12_create', 'invalid null argument')]
return
p12 = load_pkcs12(dumped_p12, passwd)
self.assertEqual(None, p12.get_ca_certificates())
self.assertEqual(None, p12.get_certificate())
# OpenSSL fails to bring the key back to us. So sad. Perhaps in the
# future this will be improved.
self.assertTrue(isinstance(p12.get_privatekey(), (PKey, type(None))))
def test_cert_only(self):
"""
A L{PKCS12} with only a certificate can be exported using
L{PKCS12.export} and loaded again using L{load_pkcs12}.
"""
passwd = 'blah'
p12 = PKCS12()
cert = load_certificate(FILETYPE_PEM, cleartextCertificatePEM)
p12.set_certificate(cert)
self.assertEqual(cert, p12.get_certificate())
self.assertEqual(None, p12.get_privatekey())
try:
dumped_p12 = p12.export(passphrase=passwd, iter=2, maciter=3)
except Error:
# Some versions of OpenSSL will throw an exception
# for this nearly useless PKCS12 we tried to generate:
# [('PKCS12 routines', 'PKCS12_create', 'invalid null argument')]
return
p12 = load_pkcs12(dumped_p12, passwd)
self.assertEqual(None, p12.get_privatekey())
# OpenSSL fails to bring the cert back to us. Groany mcgroan.
self.assertTrue(isinstance(p12.get_certificate(), (X509, type(None))))
# Oh ho. It puts the certificate into the ca certificates list, in
# fact. Totally bogus, I would think. Nevertheless, let's exploit
# that to check to see if it reconstructed the certificate we expected
# it to. At some point, hopefully this will change so that
# p12.get_certificate() is actually what returns the loaded
# certificate.
self.assertEqual(
cleartextCertificatePEM,
dump_certificate(FILETYPE_PEM, p12.get_ca_certificates()[0]))
def gen_pkcs12(self, cert_pem=None, key_pem=None, ca_pem=None, friendly_name=None):
"""
Generate a PKCS12 object with components from PEM. Verify that the set
functions return None.
"""
p12 = PKCS12()
if cert_pem:
ret = p12.set_certificate(load_certificate(FILETYPE_PEM, cert_pem))
self.assertEqual(ret, None)
if key_pem:
ret = p12.set_privatekey(load_privatekey(FILETYPE_PEM, key_pem))
self.assertEqual(ret, None)
if ca_pem:
ret = p12.set_ca_certificates((load_certificate(FILETYPE_PEM, ca_pem),))
self.assertEqual(ret, None)
if friendly_name:
ret = p12.set_friendlyname(friendly_name)
self.assertEqual(ret, None)
return p12
def check_recovery(self, p12_str, key=None, cert=None, ca=None, passwd='',
extra=()):
"""
Use openssl program to confirm three components are recoverable from a
PKCS12 string.
"""
if key:
recovered_key = _runopenssl(
p12_str, "pkcs12", '-nocerts', '-nodes', '-passin',
'pass:' + passwd, *extra)
self.assertEqual(recovered_key[-len(key):], key)
if cert:
recovered_cert = _runopenssl(
p12_str, "pkcs12", '-clcerts', '-nodes', '-passin',
'pass:' + passwd, '-nokeys', *extra)
self.assertEqual(recovered_cert[-len(cert):], cert)
if ca:
recovered_cert = _runopenssl(
p12_str, "pkcs12", '-cacerts', '-nodes', '-passin',
'pass:' + passwd, '-nokeys', *extra)
self.assertEqual(recovered_cert[-len(ca):], ca)
def test_load_pkcs12(self):
"""
A PKCS12 string generated using the openssl command line can be loaded
with L{load_pkcs12} and its components extracted and examined.
"""
passwd = 'whatever'
pem = client_key_pem + client_cert_pem
p12_str = _runopenssl(
pem, "pkcs12", '-export', '-clcerts', '-passout', 'pass:' + passwd)
p12 = load_pkcs12(p12_str, passwd)
# verify
self.assertTrue(isinstance(p12, PKCS12))
cert_pem = dump_certificate(FILETYPE_PEM, p12.get_certificate())
self.assertEqual(cert_pem, client_cert_pem)
key_pem = dump_privatekey(FILETYPE_PEM, p12.get_privatekey())
self.assertEqual(key_pem, client_key_pem)
self.assertEqual(None, p12.get_ca_certificates())
def test_load_pkcs12_garbage(self):
"""
L{load_pkcs12} raises L{OpenSSL.crypto.Error} when passed a string
which is not a PKCS12 dump.
"""
passwd = 'whatever'
e = self.assertRaises(Error, load_pkcs12, 'fruit loops', passwd)
self.assertEqual( e.args[0][0][0], 'asn1 encoding routines')
self.assertEqual( len(e.args[0][0]), 3)
def test_replace(self):
"""
L{PKCS12.set_certificate} replaces the certificate in a PKCS12 cluster.
L{PKCS12.set_privatekey} replaces the private key.
L{PKCS12.set_ca_certificates} replaces the CA certificates.
"""
p12 = self.gen_pkcs12(client_cert_pem, client_key_pem, root_cert_pem)
p12.set_certificate(load_certificate(FILETYPE_PEM, server_cert_pem))
p12.set_privatekey(load_privatekey(FILETYPE_PEM, server_key_pem))
root_cert = load_certificate(FILETYPE_PEM, root_cert_pem)
client_cert = load_certificate(FILETYPE_PEM, client_cert_pem)
p12.set_ca_certificates([root_cert]) # not a tuple
self.assertEqual(1, len(p12.get_ca_certificates()))
self.assertEqual(root_cert, p12.get_ca_certificates()[0])
p12.set_ca_certificates([client_cert, root_cert])
self.assertEqual(2, len(p12.get_ca_certificates()))
self.assertEqual(client_cert, p12.get_ca_certificates()[0])
self.assertEqual(root_cert, p12.get_ca_certificates()[1])
def test_friendly_name(self):
"""
The I{friendlyName} of a PKCS12 can be set and retrieved via
L{PKCS12.get_friendlyname} and L{PKCS12_set_friendlyname}, and a
L{PKCS12} with a friendly name set can be dumped with L{PKCS12.export}.
"""
passwd = 'Dogmeat[]{}!@#$%^&*()~`?/.,<>-_+=";:'
p12 = self.gen_pkcs12(server_cert_pem, server_key_pem, root_cert_pem)
for friendly_name in [b('Serverlicious'), None, b('###')]:
p12.set_friendlyname(friendly_name)
self.assertEqual(p12.get_friendlyname(), friendly_name)
dumped_p12 = p12.export(passphrase=passwd, iter=2, maciter=3)
reloaded_p12 = load_pkcs12(dumped_p12, passwd)
self.assertEqual(
p12.get_friendlyname(), reloaded_p12.get_friendlyname())
# We would use the openssl program to confirm the friendly
# name, but it is not possible. The pkcs12 command
# does not store the friendly name in the cert's
# alias, which we could then extract.
self.check_recovery(
dumped_p12, key=server_key_pem, cert=server_cert_pem,
ca=root_cert_pem, passwd=passwd)
def test_various_empty_passphrases(self):
"""
Test that missing, None, and '' passphrases are identical for PKCS12
export.
"""
p12 = self.gen_pkcs12(client_cert_pem, client_key_pem, root_cert_pem)
passwd = ''
dumped_p12_empty = p12.export(iter=2, maciter=0, passphrase=passwd)
dumped_p12_none = p12.export(iter=3, maciter=2, passphrase=None)
dumped_p12_nopw = p12.export(iter=9, maciter=4)
for dumped_p12 in [dumped_p12_empty, dumped_p12_none, dumped_p12_nopw]:
self.check_recovery(
dumped_p12, key=client_key_pem, cert=client_cert_pem,
ca=root_cert_pem, passwd=passwd)
def test_removing_ca_cert(self):
"""
Passing C{None} to L{PKCS12.set_ca_certificates} removes all CA
certificates.
"""
p12 = self.gen_pkcs12(server_cert_pem, server_key_pem, root_cert_pem)
p12.set_ca_certificates(None)
self.assertEqual(None, p12.get_ca_certificates())
def test_export_without_mac(self):
"""
Exporting a PKCS12 with a C{maciter} of C{-1} excludes the MAC
entirely.
"""
passwd = 'Lake Michigan'
p12 = self.gen_pkcs12(server_cert_pem, server_key_pem, root_cert_pem)
dumped_p12 = p12.export(maciter=-1, passphrase=passwd, iter=2)
self.check_recovery(
dumped_p12, key=server_key_pem, cert=server_cert_pem,
passwd=passwd, extra=('-nomacver',))
def test_load_without_mac(self):
"""
Loading a PKCS12 without a MAC does something other than crash.
"""
passwd = 'Lake Michigan'
p12 = self.gen_pkcs12(server_cert_pem, server_key_pem, root_cert_pem)
dumped_p12 = p12.export(maciter=-1, passphrase=passwd, iter=2)
try:
recovered_p12 = load_pkcs12(dumped_p12, passwd)
# The person who generated this PCKS12 should be flogged,
# or better yet we should have a means to determine
# whether a PCKS12 had a MAC that was verified.
# Anyway, libopenssl chooses to allow it, so the
# pyopenssl binding does as well.
self.assertTrue(isinstance(recovered_p12, PKCS12))
except Error:
# Failing here with an exception is preferred as some openssl
# versions do.
pass
def test_zero_len_list_for_ca(self):
"""
A PKCS12 with an empty CA certificates list can be exported.
"""
passwd = 'Hobie 18'
p12 = self.gen_pkcs12(server_cert_pem, server_key_pem)
p12.set_ca_certificates([])
self.assertEqual((), p12.get_ca_certificates())
dumped_p12 = p12.export(passphrase=passwd, iter=3)
self.check_recovery(
dumped_p12, key=server_key_pem, cert=server_cert_pem,
passwd=passwd)
def test_export_without_args(self):
"""
All the arguments to L{PKCS12.export} are optional.
"""
p12 = self.gen_pkcs12(server_cert_pem, server_key_pem, root_cert_pem)
dumped_p12 = p12.export() # no args
self.check_recovery(
dumped_p12, key=server_key_pem, cert=server_cert_pem, passwd='')
def test_key_cert_mismatch(self):
"""
L{PKCS12.export} raises an exception when a key and certificate
mismatch.
"""
p12 = self.gen_pkcs12(server_cert_pem, client_key_pem, root_cert_pem)
self.assertRaises(Error, p12.export)
# These quoting functions taken directly from Twisted's twisted.python.win32.
_cmdLineQuoteRe = re.compile(r'(\\*)"')
_cmdLineQuoteRe2 = re.compile(r'(\\+)\Z')
def cmdLineQuote(s):
"""
Internal method for quoting a single command-line argument.
@type: C{str}
@param s: A single unquoted string to quote for something that is expecting
cmd.exe-style quoting
@rtype: C{str}
@return: A cmd.exe-style quoted string
@see: U{http://www.perlmonks.org/?node_id=764004}
"""
s = _cmdLineQuoteRe2.sub(r"\1\1", _cmdLineQuoteRe.sub(r'\1\1\\"', s))
return '"%s"' % s
def quoteArguments(arguments):
"""
Quote an iterable of command-line arguments for passing to CreateProcess or
a similar API. This allows the list passed to C{reactor.spawnProcess} to
match the child process's C{sys.argv} properly.
@type arguments: C{iterable} of C{str}
@param arguments: An iterable of unquoted arguments to quote
@rtype: C{str}
@return: A space-delimited string containing quoted versions of L{arguments}
"""
return ' '.join(map(cmdLineQuote, arguments))
def _runopenssl(pem, *args):
"""
Run the command line openssl tool with the given arguments and write
the given PEM to its stdin. Not safe for quotes.
"""
if os.name == 'posix':
command = "openssl " + " ".join([
"'%s'" % (arg.replace("'", "'\\''"),) for arg in args])
else:
command = "openssl " + quoteArguments(args)
proc = Popen(command, shell=True, stdin=PIPE, stdout=PIPE)
proc.stdin.write(pem)
proc.stdin.close()
return proc.stdout.read()
class FunctionTests(TestCase):
"""
Tests for free-functions in the L{OpenSSL.crypto} module.
"""
def test_load_privatekey_invalid_format(self):
"""
L{load_privatekey} raises L{ValueError} if passed an unknown filetype.
"""
self.assertRaises(ValueError, load_privatekey, 100, root_key_pem)
def test_load_privatekey_invalid_passphrase_type(self):
"""
L{load_privatekey} raises L{TypeError} if passed a passphrase that is
neither a c{str} nor a callable.
"""
self.assertRaises(
TypeError,
load_privatekey,
FILETYPE_PEM, encryptedPrivateKeyPEMPassphrase, object())
def test_load_privatekey_wrong_args(self):
"""
L{load_privatekey} raises L{TypeError} if called with the wrong number
of arguments.
"""
self.assertRaises(TypeError, load_privatekey)
def test_load_privatekey_wrongPassphrase(self):
"""
L{load_privatekey} raises L{OpenSSL.crypto.Error} when it is passed an
encrypted PEM and an incorrect passphrase.
"""
self.assertRaises(
Error,
load_privatekey, FILETYPE_PEM, encryptedPrivateKeyPEM, b("quack"))
def test_load_privatekey_passphrase(self):
"""
L{load_privatekey} can create a L{PKey} object from an encrypted PEM
string if given the passphrase.
"""
key = load_privatekey(
FILETYPE_PEM, encryptedPrivateKeyPEM,
encryptedPrivateKeyPEMPassphrase)
self.assertTrue(isinstance(key, PKeyType))
def test_load_privatekey_wrongPassphraseCallback(self):
"""
L{load_privatekey} raises L{OpenSSL.crypto.Error} when it is passed an
encrypted PEM and a passphrase callback which returns an incorrect
passphrase.
"""
called = []
def cb(*a):
called.append(None)
return "quack"
self.assertRaises(
Error,
load_privatekey, FILETYPE_PEM, encryptedPrivateKeyPEM, cb)
self.assertTrue(called)
def test_load_privatekey_passphraseCallback(self):
"""
L{load_privatekey} can create a L{PKey} object from an encrypted PEM
string if given a passphrase callback which returns the correct
password.
"""
called = []
def cb(writing):
called.append(writing)
return encryptedPrivateKeyPEMPassphrase
key = load_privatekey(FILETYPE_PEM, encryptedPrivateKeyPEM, cb)
self.assertTrue(isinstance(key, PKeyType))
self.assertEqual(called, [False])
def test_load_privatekey_passphrase_exception(self):
"""
An exception raised by the passphrase callback passed to
L{load_privatekey} causes L{OpenSSL.crypto.Error} to be raised.
This isn't as nice as just letting the exception pass through. The
behavior might be changed to that eventually.
"""
def broken(ignored):
raise RuntimeError("This is not working.")
self.assertRaises(
Error,
load_privatekey,
FILETYPE_PEM, encryptedPrivateKeyPEM, broken)
def test_dump_privatekey_wrong_args(self):
"""
L{dump_privatekey} raises L{TypeError} if called with the wrong number
of arguments.
"""
self.assertRaises(TypeError, dump_privatekey)
def test_dump_privatekey_unknown_cipher(self):
"""
L{dump_privatekey} raises L{ValueError} if called with an unrecognized
cipher name.
"""
key = PKey()
key.generate_key(TYPE_RSA, 512)
self.assertRaises(
ValueError, dump_privatekey,
FILETYPE_PEM, key, "zippers", "passphrase")
def test_dump_privatekey_invalid_passphrase_type(self):
"""
L{dump_privatekey} raises L{TypeError} if called with a passphrase which
is neither a C{str} nor a callable.
"""
key = PKey()
key.generate_key(TYPE_RSA, 512)
self.assertRaises(
TypeError,
dump_privatekey, FILETYPE_PEM, key, "blowfish", object())
def test_dump_privatekey_invalid_filetype(self):
"""
L{dump_privatekey} raises L{ValueError} if called with an unrecognized
filetype.
"""
key = PKey()
key.generate_key(TYPE_RSA, 512)
self.assertRaises(ValueError, dump_privatekey, 100, key)
def test_dump_privatekey_passphrase(self):
"""
L{dump_privatekey} writes an encrypted PEM when given a passphrase.
"""
passphrase = b("foo")
key = load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM)
pem = dump_privatekey(FILETYPE_PEM, key, "blowfish", passphrase)
self.assertTrue(isinstance(pem, bytes))
loadedKey = load_privatekey(FILETYPE_PEM, pem, passphrase)
self.assertTrue(isinstance(loadedKey, PKeyType))
self.assertEqual(loadedKey.type(), key.type())
self.assertEqual(loadedKey.bits(), key.bits())
def test_dump_certificate(self):
"""
L{dump_certificate} writes PEM, DER, and text.
"""
pemData = cleartextCertificatePEM + cleartextPrivateKeyPEM
cert = load_certificate(FILETYPE_PEM, pemData)
dumped_pem = dump_certificate(FILETYPE_PEM, cert)
self.assertEqual(dumped_pem, cleartextCertificatePEM)
dumped_der = dump_certificate(FILETYPE_ASN1, cert)
good_der = _runopenssl(dumped_pem, "x509", "-outform", "DER")
self.assertEqual(dumped_der, good_der)
cert2 = load_certificate(FILETYPE_ASN1, dumped_der)
dumped_pem2 = dump_certificate(FILETYPE_PEM, cert2)
self.assertEqual(dumped_pem2, cleartextCertificatePEM)
dumped_text = dump_certificate(FILETYPE_TEXT, cert)
good_text = _runopenssl(dumped_pem, "x509", "-noout", "-text")
self.assertEqual(dumped_text, good_text)
def test_dump_privatekey(self):
"""
L{dump_privatekey} writes a PEM, DER, and text.
"""
key = load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM)
self.assertTrue(key.check())
dumped_pem = dump_privatekey(FILETYPE_PEM, key)
self.assertEqual(dumped_pem, cleartextPrivateKeyPEM)
dumped_der = dump_privatekey(FILETYPE_ASN1, key)
# XXX This OpenSSL call writes "writing RSA key" to standard out. Sad.
good_der = _runopenssl(dumped_pem, "rsa", "-outform", "DER")
self.assertEqual(dumped_der, good_der)
key2 = load_privatekey(FILETYPE_ASN1, dumped_der)
dumped_pem2 = dump_privatekey(FILETYPE_PEM, key2)
self.assertEqual(dumped_pem2, cleartextPrivateKeyPEM)
dumped_text = dump_privatekey(FILETYPE_TEXT, key)
good_text = _runopenssl(dumped_pem, "rsa", "-noout", "-text")
self.assertEqual(dumped_text, good_text)
def test_dump_certificate_request(self):
"""
L{dump_certificate_request} writes a PEM, DER, and text.
"""
req = load_certificate_request(FILETYPE_PEM, cleartextCertificateRequestPEM)
dumped_pem = dump_certificate_request(FILETYPE_PEM, req)
self.assertEqual(dumped_pem, cleartextCertificateRequestPEM)
dumped_der = dump_certificate_request(FILETYPE_ASN1, req)
good_der = _runopenssl(dumped_pem, "req", "-outform", "DER")
self.assertEqual(dumped_der, good_der)
req2 = load_certificate_request(FILETYPE_ASN1, dumped_der)
dumped_pem2 = dump_certificate_request(FILETYPE_PEM, req2)
self.assertEqual(dumped_pem2, cleartextCertificateRequestPEM)
dumped_text = dump_certificate_request(FILETYPE_TEXT, req)
good_text = _runopenssl(dumped_pem, "req", "-noout", "-text")
self.assertEqual(dumped_text, good_text)
self.assertRaises(ValueError, dump_certificate_request, 100, req)
def test_dump_privatekey_passphraseCallback(self):
"""
L{dump_privatekey} writes an encrypted PEM when given a callback which
returns the correct passphrase.
"""
passphrase = b("foo")
called = []
def cb(writing):
called.append(writing)
return passphrase
key = load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM)
pem = dump_privatekey(FILETYPE_PEM, key, "blowfish", cb)
self.assertTrue(isinstance(pem, bytes))
self.assertEqual(called, [True])
loadedKey = load_privatekey(FILETYPE_PEM, pem, passphrase)
self.assertTrue(isinstance(loadedKey, PKeyType))
self.assertEqual(loadedKey.type(), key.type())
self.assertEqual(loadedKey.bits(), key.bits())
def test_load_pkcs7_data(self):
"""
L{load_pkcs7_data} accepts a PKCS#7 string and returns an instance of
L{PKCS7Type}.
"""
pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data)
self.assertTrue(isinstance(pkcs7, PKCS7Type))
class PKCS7Tests(TestCase):
"""
Tests for L{PKCS7Type}.
"""
def test_type(self):
"""
L{PKCS7Type} is a type object.
"""
self.assertTrue(isinstance(PKCS7Type, type))
self.assertEqual(PKCS7Type.__name__, 'PKCS7')
# XXX This doesn't currently work.
# self.assertIdentical(PKCS7, PKCS7Type)
# XXX Opposite results for all these following methods
def test_type_is_signed_wrong_args(self):
"""
L{PKCS7Type.type_is_signed} raises L{TypeError} if called with any
arguments.
"""
pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data)
self.assertRaises(TypeError, pkcs7.type_is_signed, None)
def test_type_is_signed(self):
"""
L{PKCS7Type.type_is_signed} returns C{True} if the PKCS7 object is of
the type I{signed}.
"""
pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data)
self.assertTrue(pkcs7.type_is_signed())
def test_type_is_enveloped_wrong_args(self):
"""
L{PKCS7Type.type_is_enveloped} raises L{TypeError} if called with any
arguments.
"""
pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data)
self.assertRaises(TypeError, pkcs7.type_is_enveloped, None)
def test_type_is_enveloped(self):
"""
L{PKCS7Type.type_is_enveloped} returns C{False} if the PKCS7 object is
not of the type I{enveloped}.
"""
pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data)
self.assertFalse(pkcs7.type_is_enveloped())
def test_type_is_signedAndEnveloped_wrong_args(self):
"""
L{PKCS7Type.type_is_signedAndEnveloped} raises L{TypeError} if called
with any arguments.
"""
pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data)
self.assertRaises(TypeError, pkcs7.type_is_signedAndEnveloped, None)
def test_type_is_signedAndEnveloped(self):
"""
L{PKCS7Type.type_is_signedAndEnveloped} returns C{False} if the PKCS7
object is not of the type I{signed and enveloped}.
"""
pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data)
self.assertFalse(pkcs7.type_is_signedAndEnveloped())
def test_type_is_data(self):
"""
L{PKCS7Type.type_is_data} returns C{False} if the PKCS7 object is not of
the type data.
"""
pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data)
self.assertFalse(pkcs7.type_is_data())
def test_type_is_data_wrong_args(self):
"""
L{PKCS7Type.type_is_data} raises L{TypeError} if called with any
arguments.
"""
pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data)
self.assertRaises(TypeError, pkcs7.type_is_data, None)
def test_get_type_name_wrong_args(self):
"""
L{PKCS7Type.get_type_name} raises L{TypeError} if called with any
arguments.
"""
pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data)
self.assertRaises(TypeError, pkcs7.get_type_name, None)
def test_get_type_name(self):
"""
L{PKCS7Type.get_type_name} returns a C{str} giving the type name.
"""
pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data)
self.assertEquals(pkcs7.get_type_name(), b('pkcs7-signedData'))
def test_attribute(self):
"""
If an attribute other than one of the methods tested here is accessed on
an instance of L{PKCS7Type}, L{AttributeError} is raised.
"""
pkcs7 = load_pkcs7_data(FILETYPE_PEM, pkcs7Data)
self.assertRaises(AttributeError, getattr, pkcs7, "foo")
class NetscapeSPKITests(TestCase, _PKeyInteractionTestsMixin):
"""
Tests for L{OpenSSL.crypto.NetscapeSPKI}.
"""
def signable(self):
"""
Return a new L{NetscapeSPKI} for use with signing tests.
"""
return NetscapeSPKI()
def test_type(self):
"""
L{NetscapeSPKI} and L{NetscapeSPKIType} refer to the same type object
and can be used to create instances of that type.
"""
self.assertIdentical(NetscapeSPKI, NetscapeSPKIType)
self.assertConsistentType(NetscapeSPKI, 'NetscapeSPKI')
def test_construction(self):
"""
L{NetscapeSPKI} returns an instance of L{NetscapeSPKIType}.
"""
nspki = NetscapeSPKI()
self.assertTrue(isinstance(nspki, NetscapeSPKIType))
def test_invalid_attribute(self):
"""
Accessing a non-existent attribute of a L{NetscapeSPKI} instance causes
an L{AttributeError} to be raised.
"""
nspki = NetscapeSPKI()
self.assertRaises(AttributeError, lambda: nspki.foo)
def test_b64_encode(self):
"""
L{NetscapeSPKI.b64_encode} encodes the certificate to a base64 blob.
"""
nspki = NetscapeSPKI()
blob = nspki.b64_encode()
self.assertTrue(isinstance(blob, bytes))
class RevokedTests(TestCase):
"""
Tests for L{OpenSSL.crypto.Revoked}
"""
def test_construction(self):
"""
Confirm we can create L{OpenSSL.crypto.Revoked}. Check
that it is empty.
"""
revoked = Revoked()
self.assertTrue(isinstance(revoked, Revoked))
self.assertEquals(type(revoked), Revoked)
self.assertEquals(revoked.get_serial(), b('00'))
self.assertEquals(revoked.get_rev_date(), None)
self.assertEquals(revoked.get_reason(), None)
def test_construction_wrong_args(self):
"""
Calling L{OpenSSL.crypto.Revoked} with any arguments results
in a L{TypeError} being raised.
"""
self.assertRaises(TypeError, Revoked, None)
self.assertRaises(TypeError, Revoked, 1)
self.assertRaises(TypeError, Revoked, "foo")
def test_serial(self):
"""
Confirm we can set and get serial numbers from
L{OpenSSL.crypto.Revoked}. Confirm errors are handled
with grace.
"""
revoked = Revoked()
ret = revoked.set_serial(b('10b'))
self.assertEquals(ret, None)
ser = revoked.get_serial()
self.assertEquals(ser, b('010B'))
revoked.set_serial(b('31ppp')) # a type error would be nice
ser = revoked.get_serial()
self.assertEquals(ser, b('31'))
self.assertRaises(ValueError, revoked.set_serial, b('pqrst'))
self.assertRaises(TypeError, revoked.set_serial, 100)
self.assertRaises(TypeError, revoked.get_serial, 1)
self.assertRaises(TypeError, revoked.get_serial, None)
self.assertRaises(TypeError, revoked.get_serial, "")
def test_date(self):
"""
Confirm we can set and get revocation dates from
L{OpenSSL.crypto.Revoked}. Confirm errors are handled
with grace.
"""
revoked = Revoked()
date = revoked.get_rev_date()
self.assertEquals(date, None)
now = b(datetime.now().strftime("%Y%m%d%H%M%SZ"))
ret = revoked.set_rev_date(now)
self.assertEqual(ret, None)
date = revoked.get_rev_date()
self.assertEqual(date, now)
def test_reason(self):
"""
Confirm we can set and get revocation reasons from
L{OpenSSL.crypto.Revoked}. The "get" need to work
as "set". Likewise, each reason of all_reasons() must work.
"""
revoked = Revoked()
for r in revoked.all_reasons():
for x in range(2):
ret = revoked.set_reason(r)
self.assertEquals(ret, None)
reason = revoked.get_reason()
self.assertEquals(
reason.lower().replace(b(' '), b('')),
r.lower().replace(b(' '), b('')))
r = reason # again with the resp of get
revoked.set_reason(None)
self.assertEqual(revoked.get_reason(), None)
def test_set_reason_wrong_arguments(self):
"""
Calling L{OpenSSL.crypto.Revoked.set_reason} with other than
one argument, or an argument which isn't a valid reason,
results in L{TypeError} or L{ValueError} being raised.
"""
revoked = Revoked()
self.assertRaises(TypeError, revoked.set_reason, 100)
self.assertRaises(ValueError, revoked.set_reason, b('blue'))
def test_get_reason_wrong_arguments(self):
"""
Calling L{OpenSSL.crypto.Revoked.get_reason} with any
arguments results in L{TypeError} being raised.
"""
revoked = Revoked()
self.assertRaises(TypeError, revoked.get_reason, None)
self.assertRaises(TypeError, revoked.get_reason, 1)
self.assertRaises(TypeError, revoked.get_reason, "foo")
class CRLTests(TestCase):
"""
Tests for L{OpenSSL.crypto.CRL}
"""
cert = load_certificate(FILETYPE_PEM, cleartextCertificatePEM)
pkey = load_privatekey(FILETYPE_PEM, cleartextPrivateKeyPEM)
def test_construction(self):
"""
Confirm we can create L{OpenSSL.crypto.CRL}. Check
that it is empty
"""
crl = CRL()
self.assertTrue( isinstance(crl, CRL) )
self.assertEqual(crl.get_revoked(), None)
def test_construction_wrong_args(self):
"""
Calling L{OpenSSL.crypto.CRL} with any number of arguments
results in a L{TypeError} being raised.
"""
self.assertRaises(TypeError, CRL, 1)
self.assertRaises(TypeError, CRL, "")
self.assertRaises(TypeError, CRL, None)
def test_export(self):
"""
Use python to create a simple CRL with a revocation, and export
the CRL in formats of PEM, DER and text. Those outputs are verified
with the openssl program.
"""
crl = CRL()
revoked = Revoked()
now = b(datetime.now().strftime("%Y%m%d%H%M%SZ"))
revoked.set_rev_date(now)
revoked.set_serial(b('3ab'))
revoked.set_reason(b('sUpErSeDEd'))
crl.add_revoked(revoked)
# PEM format
dumped_crl = crl.export(self.cert, self.pkey, days=20)
text = _runopenssl(dumped_crl, "crl", "-noout", "-text")
text.index(b('Serial Number: 03AB'))
text.index(b('Superseded'))
text.index(b('Issuer: /C=US/ST=IL/L=Chicago/O=Testing/CN=Testing Root CA'))
# DER format
dumped_crl = crl.export(self.cert, self.pkey, FILETYPE_ASN1)
text = _runopenssl(dumped_crl, "crl", "-noout", "-text", "-inform", "DER")
text.index(b('Serial Number: 03AB'))
text.index(b('Superseded'))
text.index(b('Issuer: /C=US/ST=IL/L=Chicago/O=Testing/CN=Testing Root CA'))
# text format
dumped_text = crl.export(self.cert, self.pkey, type=FILETYPE_TEXT)
self.assertEqual(text, dumped_text)
def test_add_revoked_keyword(self):
"""
L{OpenSSL.CRL.add_revoked} accepts its single argument as the
I{revoked} keyword argument.
"""
crl = CRL()
revoked = Revoked()
crl.add_revoked(revoked=revoked)
self.assertTrue(isinstance(crl.get_revoked()[0], Revoked))
def test_export_wrong_args(self):
"""
Calling L{OpenSSL.CRL.export} with fewer than two or more than
four arguments, or with arguments other than the certificate,
private key, integer file type, and integer number of days it
expects, results in a L{TypeError} being raised.
"""
crl = CRL()
self.assertRaises(TypeError, crl.export)
self.assertRaises(TypeError, crl.export, self.cert)
self.assertRaises(TypeError, crl.export, self.cert, self.pkey, FILETYPE_PEM, 10, "foo")
self.assertRaises(TypeError, crl.export, None, self.pkey, FILETYPE_PEM, 10)
self.assertRaises(TypeError, crl.export, self.cert, None, FILETYPE_PEM, 10)
self.assertRaises(TypeError, crl.export, self.cert, self.pkey, None, 10)
self.assertRaises(TypeError, crl.export, self.cert, FILETYPE_PEM, None)
def test_export_unknown_filetype(self):
"""
Calling L{OpenSSL.CRL.export} with a file type other than
L{FILETYPE_PEM}, L{FILETYPE_ASN1}, or L{FILETYPE_TEXT} results
in a L{ValueError} being raised.
"""
crl = CRL()
self.assertRaises(ValueError, crl.export, self.cert, self.pkey, 100, 10)
def test_get_revoked(self):
"""
Use python to create a simple CRL with two revocations.
Get back the L{Revoked} using L{OpenSSL.CRL.get_revoked} and
verify them.
"""
crl = CRL()
revoked = Revoked()
now = b(datetime.now().strftime("%Y%m%d%H%M%SZ"))
revoked.set_rev_date(now)
revoked.set_serial(b('3ab'))
crl.add_revoked(revoked)
revoked.set_serial(b('100'))
revoked.set_reason(b('sUpErSeDEd'))
crl.add_revoked(revoked)
revs = crl.get_revoked()
self.assertEqual(len(revs), 2)
self.assertEqual(type(revs[0]), Revoked)
self.assertEqual(type(revs[1]), Revoked)
self.assertEqual(revs[0].get_serial(), b('03AB'))
self.assertEqual(revs[1].get_serial(), b('0100'))
self.assertEqual(revs[0].get_rev_date(), now)
self.assertEqual(revs[1].get_rev_date(), now)
def test_get_revoked_wrong_args(self):
"""
Calling L{OpenSSL.CRL.get_revoked} with any arguments results
in a L{TypeError} being raised.
"""
crl = CRL()
self.assertRaises(TypeError, crl.get_revoked, None)
self.assertRaises(TypeError, crl.get_revoked, 1)
self.assertRaises(TypeError, crl.get_revoked, "")
self.assertRaises(TypeError, crl.get_revoked, "", 1, None)
def test_add_revoked_wrong_args(self):
"""
Calling L{OpenSSL.CRL.add_revoked} with other than one
argument results in a L{TypeError} being raised.
"""
crl = CRL()
self.assertRaises(TypeError, crl.add_revoked)
self.assertRaises(TypeError, crl.add_revoked, 1, 2)
self.assertRaises(TypeError, crl.add_revoked, "foo", "bar")
def test_load_crl(self):
"""
Load a known CRL and inspect its revocations. Both
PEM and DER formats are loaded.
"""
crl = load_crl(FILETYPE_PEM, crlData)
revs = crl.get_revoked()
self.assertEqual(len(revs), 2)
self.assertEqual(revs[0].get_serial(), b('03AB'))
self.assertEqual(revs[0].get_reason(), None)
self.assertEqual(revs[1].get_serial(), b('0100'))
self.assertEqual(revs[1].get_reason(), b('Superseded'))
der = _runopenssl(crlData, "crl", "-outform", "DER")
crl = load_crl(FILETYPE_ASN1, der)
revs = crl.get_revoked()
self.assertEqual(len(revs), 2)
self.assertEqual(revs[0].get_serial(), b('03AB'))
self.assertEqual(revs[0].get_reason(), None)
self.assertEqual(revs[1].get_serial(), b('0100'))
self.assertEqual(revs[1].get_reason(), b('Superseded'))
def test_load_crl_wrong_args(self):
"""
Calling L{OpenSSL.crypto.load_crl} with other than two
arguments results in a L{TypeError} being raised.
"""
self.assertRaises(TypeError, load_crl)
self.assertRaises(TypeError, load_crl, FILETYPE_PEM)
self.assertRaises(TypeError, load_crl, FILETYPE_PEM, crlData, None)
def test_load_crl_bad_filetype(self):
"""
Calling L{OpenSSL.crypto.load_crl} with an unknown file type
raises a L{ValueError}.
"""
self.assertRaises(ValueError, load_crl, 100, crlData)
def test_load_crl_bad_data(self):
"""
Calling L{OpenSSL.crypto.load_crl} with file data which can't
be loaded raises a L{OpenSSL.crypto.Error}.
"""
self.assertRaises(Error, load_crl, FILETYPE_PEM, "hello, world")
class SignVerifyTests(TestCase):
"""
Tests for L{OpenSSL.crypto.sign} and L{OpenSSL.crypto.verify}.
"""
def test_sign_verify(self):
"""
L{sign} generates a cryptographic signature which L{verify} can check.
"""
content = b(
"It was a bright cold day in April, and the clocks were striking "
"thirteen. Winston Smith, his chin nuzzled into his breast in an "
"effort to escape the vile wind, slipped quickly through the "
"glass doors of Victory Mansions, though not quickly enough to "
"prevent a swirl of gritty dust from entering along with him.")
# sign the content with this private key
priv_key = load_privatekey(FILETYPE_PEM, root_key_pem)
# verify the content with this cert
good_cert = load_certificate(FILETYPE_PEM, root_cert_pem)
# certificate unrelated to priv_key, used to trigger an error
bad_cert = load_certificate(FILETYPE_PEM, server_cert_pem)
for digest in ['md5', 'sha1']:
sig = sign(priv_key, content, digest)
# Verify the signature of content, will throw an exception if error.
verify(good_cert, sig, content, digest)
# This should fail because the certificate doesn't match the
# private key that was used to sign the content.
self.assertRaises(Error, verify, bad_cert, sig, content, digest)
# This should fail because we've "tainted" the content after
# signing it.
self.assertRaises(
Error, verify,
good_cert, sig, content + b("tainted"), digest)
# test that unknown digest types fail
self.assertRaises(
ValueError, sign, priv_key, content, "strange-digest")
self.assertRaises(
ValueError, verify, good_cert, sig, content, "strange-digest")
def test_sign_nulls(self):
"""
L{sign} produces a signature for a string with embedded nulls.
"""
content = b("Watch out! \0 Did you see it?")
priv_key = load_privatekey(FILETYPE_PEM, root_key_pem)
good_cert = load_certificate(FILETYPE_PEM, root_cert_pem)
sig = sign(priv_key, content, "sha1")
verify(good_cert, sig, content, "sha1")
if __name__ == '__main__':
main()
| gpl-2.0 |
blaquee/volatility | volatility/renderers/xlsx.py | 13 | 1857 | from volatility import debug
from volatility.renderers.basic import Renderer
__author__ = "gleeda"
try:
from openpyxl.workbook import Workbook
from openpyxl.writer.excel import ExcelWriter
from openpyxl.cell import get_column_letter
from openpyxl.styles import Color, Fill, Style, PatternFill, Border, Side, Alignment, Protection, Font
from openpyxl.cell import Cell
from openpyxl import load_workbook
has_openpyxl = True
except ImportError:
has_openpyxl = False
class XLSXRenderer(Renderer):
def __init__(self, renderers_func, config):
if not has_openpyxl:
debug.error("You must install OpenPyxl 2.1.2 for xlsx format:\n\thttps://pypi.python.org/pypi/openpyxl")
self._config = config
self._columns = None
self._text_cell_renderers_func = renderers_func
self._text_cell_renderers = None
self._wb = Workbook(optimized_write = True)
self._ws = self._wb.create_sheet()
def description(self):
output = []
for column in self._columns:
output.append((column.name))
return output
def _add_row(self, node, data):
accumulator = data
accumulator[node] = max(accumulator.values()) + 1
self._ws.append(list(node.values))
return accumulator
def render(self, outfd, grid):
"""Renders the TreeGrid in data out to the output file from the config options"""
if not self._config.OUTPUT_FILE:
debug.error("Please specify a valid output file using --output-file")
self._columns = grid.columns
self._text_cell_renderers = self._text_cell_renderers_func(self._columns)
self._ws.append(self.description())
grid.visit(None, self._add_row, {None: 0})
self._wb.save(filename = self._config.OUTPUT_FILE)
| gpl-2.0 |
openhatch/oh-mainline | vendor/packages/gdata/samples/oauth/oauth_on_appengine/appengine_utilities/rotmodel.py | 131 | 2149 | """
Copyright (c) 2008, appengine-utilities project
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
- Neither the name of the appengine-utilities project nor the names of its
contributors may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
from google.appengine.ext import db
class ROTModel(db.Model):
"""
ROTModel overrides the db.Model put function, having it retry
up to 3 times when it encounters a datastore timeout. This is
to try an maximize the chance the data makes it into the datastore
when attempted. If it fails, it raises the db.Timeout error and the
calling application will need to handle that.
"""
def put(self):
count = 0
while count < 3:
try:
return db.Model.put(self)
except db.Timeout:
count += 1
else:
raise db.Timeout()
| agpl-3.0 |
hellofreedom/ansible-modules-core | cloud/rackspace/rax_identity.py | 150 | 3026 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# This is a DOCUMENTATION stub specific to this module, it extends
# a documentation fragment located in ansible.utils.module_docs_fragments
DOCUMENTATION = '''
---
module: rax_identity
short_description: Load Rackspace Cloud Identity
description:
- Verifies Rackspace Cloud credentials and returns identity information
version_added: "1.5"
options:
state:
description:
- Indicate desired state of the resource
choices: ['present', 'absent']
default: present
author:
- "Christopher H. Laco (@claco)"
- "Matt Martz (@sivel)"
extends_documentation_fragment: rackspace.openstack
'''
EXAMPLES = '''
- name: Load Rackspace Cloud Identity
gather_facts: False
hosts: local
connection: local
tasks:
- name: Load Identity
local_action:
module: rax_identity
credentials: ~/.raxpub
region: DFW
register: rackspace_identity
'''
try:
import pyrax
HAS_PYRAX = True
except ImportError:
HAS_PYRAX = False
def cloud_identity(module, state, identity):
instance = dict(
authenticated=identity.authenticated,
credentials=identity._creds_file
)
changed = False
instance.update(rax_to_dict(identity))
instance['services'] = instance.get('services', {}).keys()
if state == 'present':
if not identity.authenticated:
module.fail_json(msg='Credentials could not be verified!')
module.exit_json(changed=changed, identity=instance)
def main():
argument_spec = rax_argument_spec()
argument_spec.update(
dict(
state=dict(default='present', choices=['present'])
)
)
module = AnsibleModule(
argument_spec=argument_spec,
required_together=rax_required_together()
)
if not HAS_PYRAX:
module.fail_json(msg='pyrax is required for this module')
state = module.params.get('state')
setup_rax_module(module, pyrax)
if not pyrax.identity:
module.fail_json(msg='Failed to instantiate client. This '
'typically indicates an invalid region or an '
'incorrectly capitalized region name.')
cloud_identity(module, state, pyrax.identity)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.rax import *
# invoke the module
main()
| gpl-3.0 |
pscottdevos/pyfilesystem | fs/tests/test_mountfs.py | 13 | 3012 | from fs.mountfs import MountFS
from fs.memoryfs import MemoryFS
import unittest
class TestMountFS(unittest.TestCase):
def test_auto_close(self):
"""Test MountFS auto close is working"""
multi_fs = MountFS()
m1 = MemoryFS()
m2 = MemoryFS()
multi_fs.mount('/m1', m1)
multi_fs.mount('/m2', m2)
self.assert_(not m1.closed)
self.assert_(not m2.closed)
multi_fs.close()
self.assert_(m1.closed)
self.assert_(m2.closed)
def test_no_auto_close(self):
"""Test MountFS auto close can be disabled"""
multi_fs = MountFS(auto_close=False)
m1 = MemoryFS()
m2 = MemoryFS()
multi_fs.mount('/m1', m1)
multi_fs.mount('/m2', m2)
self.assert_(not m1.closed)
self.assert_(not m2.closed)
multi_fs.close()
self.assert_(not m1.closed)
self.assert_(not m2.closed)
def test_mountfile(self):
"""Test mounting a file"""
quote = b"""If you wish to make an apple pie from scratch, you must first invent the universe."""
mem_fs = MemoryFS()
mem_fs.makedir('foo')
mem_fs.setcontents('foo/bar.txt', quote)
foo_dir = mem_fs.opendir('foo')
mount_fs = MountFS()
mount_fs.mountfile('bar.txt', foo_dir.open, foo_dir.getinfo)
self.assert_(mount_fs.isdir('/'))
self.assert_(mount_fs.isdir('./'))
self.assert_(mount_fs.isdir(''))
# Check we can see the mounted file in the dir list
self.assertEqual(mount_fs.listdir(), ["bar.txt"])
self.assert_(not mount_fs.exists('nobodyhere.txt'))
self.assert_(mount_fs.exists('bar.txt'))
self.assert_(mount_fs.isfile('bar.txt'))
self.assert_(not mount_fs.isdir('bar.txt'))
# Check open and getinfo callables
self.assertEqual(mount_fs.getcontents('bar.txt'), quote)
self.assertEqual(mount_fs.getsize('bar.txt'), len(quote))
# Check changes are written back
mem_fs.setcontents('foo/bar.txt', 'baz')
self.assertEqual(mount_fs.getcontents('bar.txt'), b'baz')
self.assertEqual(mount_fs.getsize('bar.txt'), len('baz'))
# Check changes are written to the original fs
self.assertEqual(mem_fs.getcontents('foo/bar.txt'), b'baz')
self.assertEqual(mem_fs.getsize('foo/bar.txt'), len('baz'))
# Check unmount
self.assert_(mount_fs.unmount("bar.txt"))
self.assertEqual(mount_fs.listdir(), [])
self.assert_(not mount_fs.exists('bar.txt'))
# Check unount a second time is a null op, and returns False
self.assertFalse(mount_fs.unmount("bar.txt"))
def test_empty(self):
"""Test MountFS with nothing mounted."""
mount_fs = MountFS()
self.assertEqual(mount_fs.getinfo(''), {})
self.assertEqual(mount_fs.getxattr('', 'yo'), None)
self.assertEqual(mount_fs.listdir(), [])
self.assertEqual(list(mount_fs.ilistdir()), [])
| bsd-3-clause |
sabiodelhielo/rancher-validation | lib/kubectl_client.py | 1 | 8663 | import os
import json
import time
import subprocess
DEBUG = os.environ.get('DEBUG', 'false')
CONFORMANCE_YAML = ("tests/kubernetes_conformance/resources/k8s_ymls/"
"sonobuoy-conformance.yaml")
class KubectlClient(object):
def __init__(self):
self._kube_config_path = None
self._hide = False if DEBUG.lower() == 'true' else True
@property
def kube_config_path(self):
return self._kube_config_path
@kube_config_path.setter
def kube_config_path(self, value):
self._kube_config_path = value
@staticmethod
def _load_json(output):
if output == '':
return None
return json.loads(output)
def _default_output_json(self, **cli_options):
"""
Adds --output=json to options
Does not override if output is passed in!
"""
if 'output' not in list(cli_options.keys()):
cli_options['output'] = 'json'
return cli_options
def _cli_options(self, **kwargs):
"""
Pass through any kubectl option
A couple of exceptions for the keyword args mapping to the
cli options names:
1) if option flag has a '-', replace with '_'
e.i. '--all-namespaces' can be passed in all_namespaces=True
2) reserved words:
For cli option: 'as' => 'as_user'
"""
command_options = ""
for k, v in kwargs.items():
# Do not include values that are none
if v is None:
continue
# reserved word
k = 'as' if k == 'as_user' else k
# k = 'all' if k == 'all_' else k
if v is False or v is True:
value = str(v).lower()
else:
value = v
command_options += " --{}={}".format(k.replace('_', '-'), value)
return command_options
def execute_kubectl_cmd(self, cmd, json_out=True):
command = 'kubectl --kubeconfig {0} {1}'.format(
self.kube_config_path, cmd)
if json_out:
command += ' -o json'
print("Running kubectl command: {}".format(command))
start_time = time.time()
result = self.run_command(command)
end_time = time.time()
print('Run time for command {0}: {1} seconds'.format(
command, end_time - start_time))
return result
def execute_kubectl(self, cmd, **cli_options):
# always add kubeconfig
cli_options['kubeconfig'] = self.kube_config_path
command = 'kubectl {0}{1}'.format(
cmd, self._cli_options(**cli_options))
print("Running kubectl command: {}".format(command))
start_time = time.time()
result = self.run_command(command)
end_time = time.time()
print('Run time for command {0}: {1} seconds'.format(
command, end_time - start_time))
return result
def exec_cmd(self, pod, cmd, namespace):
result = self.execute_kubectl_cmd(
'exec {0} --namespace={1} -- {2}'.format(pod, namespace, cmd),
json_out=False)
return result
def logs(self, pod='', **cli_options):
command = 'logs {0}'.format(pod) if pod else "logs"
result = self.execute_kubectl(command, **cli_options)
return result
def cp_from_pod(self, pod, namespace, path_in_pod, local_path):
command = "cp {}/{}:{} {}".format(
namespace, pod, path_in_pod, local_path)
return self.execute_kubectl(command)
def list_namespaces(self):
ns = self.get_resource("namespace")
return [n['metadata']['name'] for n in ns['items']]
def get_nodes(self):
nodes = self.get_resource("nodes")
return nodes
def create_ns(self, namespace):
self.create_resource("namespace", namespace)
# Verify namespace is created
ns = self.get_resource("namespace", name=namespace)
assert ns["metadata"]["name"] == namespace
assert ns["status"]["phase"] == "Active"
return ns
def run(self, name, **cli_options):
command = "run {0}".format(name)
result = self.execute_kubectl(command, **cli_options)
return result
def create_resourse_from_yml(self, file_yml, namespace=None):
cmd = "create -f {0}".format(file_yml)
if namespace:
cmd += ' --namespace={0}'.format(namespace)
return self.execute_kubectl_cmd(cmd)
def delete_resourse_from_yml(self, file_yml, namespace=None):
cmd = "delete -f {0}".format(file_yml)
if namespace:
cmd += ' --namespace={0}'.format(namespace)
return self.execute_kubectl_cmd(cmd, json_out=False)
def create_resource(self, resource, name=None, **cli_options):
cli_options = self._default_output_json(**cli_options)
command = "create {0}".format(resource)
if name:
command += ' {0}'.format(name)
result = self.execute_kubectl(command, **cli_options)
return self._load_json(result)
def get_resource(self, resource, name=None, **cli_options):
cli_options = self._default_output_json(**cli_options)
command = "get {0}".format(resource)
if name:
command += ' {0}'.format(name)
result = self.execute_kubectl(command, **cli_options)
return self._load_json(result)
def delete_resourse(self, resource, name=None, **cli_options):
command = "delete {0}".format(resource)
if name:
command += ' {0}'.format(name)
return self.execute_kubectl(command, **cli_options)
def wait_for_pods(self, number_of_pods=1, state='Running', **cli_options):
start_time = int(time.time())
while True:
pods = self.get_resource('pods', **cli_options)
print("pods:")
print(pods)
print (len(pods['items']))
if len(pods['items']) == number_of_pods:
running_pods = 0
for pod in pods['items']:
print (pod['status']['phase'])
if pod['status']['phase'] != state:
print("Pod '{0}' not {1} is {2}!".format(
pod['metadata']['name'], state,
pod['status']['phase']))
break
else:
running_pods += 1
if running_pods == number_of_pods:
return pods
if int(time.time()) - start_time > 300:
pod_states = {}
for p in pods.get('items', []):
pod_states[p['metadata']['name']] = p['status']['phase']
raise Exception(
'Timeout Exception: pods did not start\n'
'Expect number of pods {0} vs number of pods found {1}:\n'
'Pod states: {2}'.format(
number_of_pods, len(pod_states), pod_states))
time.sleep(5)
def wait_for_pod(self, name, state='Running', **cli_options):
"""
If a pod name is known, wait for pod to start
"""
start_time = int(time.time())
while True:
pod = self.get_resource('pod', name=name, **cli_options)
if pod['status']['phase'] != state:
print("Pod '{0}' not {1} is {2}!".format(
pod['metadata']['name'], state, pod['status']['phase']))
else:
time.sleep(15)
return pod
if int(time.time()) - start_time > 300:
raise Exception(
'Timeout Exception: pod {} did not start\n'.format(name))
time.sleep(5)
def apply_conformance_tests(self):
command = "apply -f {0}".format(CONFORMANCE_YAML)
result = self.execute_kubectl_cmd(command)
assert result.ok, (
"Failed to apply sonobuoy-conformance.yaml.\nCommand: '{0}'\n"
"stdout: {1}\nstderr:{2}\n".format(
command, result.stdout, result.stderr))
return result
def run_command(self, command):
return subprocess.check_output(command, shell=True, text=True)
def run_command_with_stderr(self, command):
try:
output = subprocess.check_output(command, shell=True,
stderr=subprocess.PIPE)
returncode = 0
except subprocess.CalledProcessError as e:
output = e.output
returncode = e.returncode
print(returncode)
| apache-2.0 |
ericmjl/influenza-reassortment | source_pair.py | 1 | 4298 | import networkx as nx
import numpy as np
import pickle as pkl
import pandas as pd
import tables as tb
import sys
from itertools import combinations
class SourcePairSearcher(object):
"""
SourcePairSearcher
Identifies isolates for which a source pair search will be performed.
"""
def __init__(self, handle, isolate_num, segment_stores):
super(SourcePairSearcher, self).__init__()
self.handle = handle
# Open access the list of isolates for which source pairs are to be found.
with open('{0} Isolates for Source Pair Search.pkllist'.format(self.handle), 'r') as f:
self.sink = pkl.load(f)[isolate_num]
print(self.sink)
self.isolate_num = isolate_num
self.G = nx.read_gpickle('{0} Initialized Graph.pkl'.format(self.handle))
# self.hdf5store = tb.open_file('{0} Segment Affmats.h5'.format(self.handle))
self.older_nodes = []
self.segment_stores = segment_stores
self.maxpwi = 0
self.sources = dict()
def run(self):
self.get_nodes_earlier_in_time()
for n in range(1, 5):
combs = self.segment_combinations(n)
for comb in combs:
print('Currently on combination:')
print('{0}'.format(comb))
comb1 = self.sum_subset_segment_pwis(comb[0])
comb2 = self.sum_subset_segment_pwis(comb[1])
sumpwi = comb1.max() + comb2.max()
print("Sum PWI: {0}".format(sumpwi))
if sumpwi < self.maxpwi:
pass
else:
filtered1 = comb1[comb1 == comb1.max()]
filtered2 = comb2[comb2 == comb2.max()]
if sumpwi > self.maxpwi and not np.isnan(sumpwi):
self.sources = dict()
self.maxpwi = sumpwi
self.sources[comb[0]] = [i for i in filtered1.index]
self.sources[comb[1]] = [i for i in filtered2.index]
print(self.maxpwi)
self.add_edges()
self.extract_nodes()
self.save_graph()
def add_edges(self):
for segs, isolates in self.sources.items():
for source in isolates:
d = {'edge_type':'reassortant', 'pwi':self.maxpwi, 'segments':dict()}
for s in segs:
d['segments'][s] = None
self.G.add_edge(source, self.sink, attr_dict=d)
def save_graph(self):
nx.write_gpickle(self.G, 'reassortant_edges/{0} Reassortant Edges {1}.pkl'.format(self.handle, self.isolate_num))
def extract_nodes(self):
nodes_to_extract = set()
for n1, n2 in self.G.edges():
nodes_to_extract.add(n1)
nodes_to_extract.add(n2)
self.G = self.G.subgraph(nodes_to_extract)
def get_segment_store(self, segment):
"""
This helper function gets the particular store from the hdf5 set of stores.
"""
self.segment_stores[segment] = pd.read_hdf('{0} Segment Affmats.h5'.format(self.handle), key='segment{0}'.format(segment))
def segment_combinations(self, n):
"""
Here:
n = number of segments from first source.
Therefore, logically:
!n = complement of segments from second source.
"""
segments = set(range(1,9))
return [(tuple(set(i)), tuple(segments.difference(i))) for i in combinations(segments, n)]
def get_nodes_earlier_in_time(self):
print('Getting earlier nodes...')
isolate_date = self.G.node[self.sink]['collection_date']
self.older_nodes = [n for n, d in self.G.nodes(data=True) if d['collection_date'] < isolate_date]
def get_col(self, segment):
"""
Gets the column of PWIs for the sink as a Pandas dataframe, filtered only to
older nodes.
"""
df = self.segment_stores[segment].loc[self.older_nodes,self.sink]
print(df)
return df
def sum_subset_segment_pwis(self, segments):
"""
Returns the summed PWIs for a given subset of segments
"""
sumpwis = None
for i, segment in enumerate(segments):
pwis = self.get_col(segment)
if i == 0:
sumpwis = pwis
if i > 0:
sumpwis = sumpwis + pwis
sumpwis
return sumpwis
if __name__ == '__main__':
handle = sys.argv[1]
start = int(sys.argv[2])
end = int(sys.argv[3])
def get_segment_store(segment):
"""
This helper function gets the particular store from the hdf5 set of stores.
"""
return pd.read_hdf('{0} Segment Affmats.h5'.format(handle), key='segment{0}'.format(segment))
segment_stores = dict()
for segment in range(1,9):
print('Getting segment {0} store'.format(segment))
segment_stores[segment] = get_segment_store(segment)
for i in range(start, end):
sps = SourcePairSearcher(handle, i, segment_stores)
sps.run() | mit |
tectronics/prometeo-erp | core/auth/views.py | 3 | 6081 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""This file is part of the prometeo project.
This program is free software: you can redistribute it and/or modify it
under the terms of the GNU Lesser General Public License as published by the
Free Software Foundation, either version 3 of the License, or (at your
option) any later version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
You should have received a copy of the GNU Lesser General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>
"""
__author__ = 'Emanuele Bertoldi <emanuele.bertoldi@gmail.com>'
__copyright__ = 'Copyright (c) 2011 Emanuele Bertoldi'
__version__ = '0.0.5'
from django.shortcuts import render_to_response, get_object_or_404
from django.utils.translation import ugettext_lazy as _
from django.views.generic import list_detail, create_update
from django.views.generic.simple import redirect_to
from django.core.urlresolvers import reverse
from django.template import RequestContext
from django.contrib.auth import logout
from django.contrib.comments.models import *
from django.contrib import messages
from django.conf import settings
from prometeo.core.auth.decorators import obj_permission_required as permission_required
from prometeo.core.views import filtered_list_detail, set_language
from models import *
from forms import *
def _adapt_form(request, form):
if not request.user.has_perm('auth.change_group'):
del form.fields['groups']
if not request.user.has_perm('auth.change_permission'):
del form.fields['user_permissions']
if not request.user.is_superuser:
del form.fields['is_staff']
del form.fields['is_active']
del form.fields['is_superuser']
def _get_user(request, *args, **kwargs):
username = kwargs.get('username', None)
return get_object_or_404(User, username=username)
def _get_comment(request, *args, **kwargs):
id = kwargs.get('id', None)
return get_object_or_404(Comment, id=id, site__pk=settings.SITE_ID)
def user_logged(request):
"""Sets the language selected by the logged user.
"""
lang = request.user.get_profile().language
return set_language(request, lang)
@permission_required('auth.view_user')
def user_list(request, page=0, paginate_by=10, **kwargs):
"""Displays the list of all active users.
"""
return filtered_list_detail(
request,
MyUser.objects.all(),
fields=['username', 'first_name', 'last_name', 'is_active', 'is_staff', 'is_superuser', 'last_login'],
paginate_by=paginate_by,
page=page,
template_name='auth/user_list.html',
**kwargs
)
@permission_required('auth.view_user', _get_user)
def user_detail(request, username, **kwargs):
"""Displays a user's profile.
"""
object_list = MyUser.objects.all()
return list_detail.object_detail(
request,
slug=username,
slug_field='username',
queryset=object_list,
template_name='auth/user_detail.html',
extra_context={'object_list': object_list},
**kwargs
)
@permission_required('auth.add_user')
def user_add(request, **kwargs):
"""Adds a new user's profile.
"""
user = User(is_active=True)
if request.method == 'POST':
form = UserEditForm(request.POST, instance=user)
_adapt_form(request, form)
if form.is_valid():
form.save()
messages.success(request, _("The user was created successfully."))
return redirect_to(request, url=user.get_absolute_url())
else:
form = UserEditForm(instance=user)
_adapt_form(request, form)
return render_to_response('auth/user_edit.html', RequestContext(request, {'form': form, 'object': user}))
@permission_required('auth.change_user', _get_user)
def user_edit(request, username, **kwargs):
"""Edits a user's profile.
"""
user = get_object_or_404(User, username=username)
if request.method == 'POST':
form = UserEditForm(request.POST, instance=user)
_adapt_form(request, form)
pform = UserProfileForm(request.POST, instance=user.get_profile())
if form.is_valid() and pform.is_valid():
user = form.save()
profile = pform.save()
if request.user == user:
set_language(request, profile.language)
messages.success(request, _("The user was updated successfully."))
return redirect_to(request, url=user.get_absolute_url())
else:
form = UserEditForm(instance=user)
_adapt_form(request, form)
pform = UserProfileForm(instance=user.get_profile())
return render_to_response('auth/user_edit.html', RequestContext(request, {'form': form, 'pform': pform, 'object': user}))
@permission_required('auth.delete_user', _get_user)
def user_delete(request, username, **kwargs):
"""Deletes a user's profile.
"""
user = get_object_or_404(User, username=username)
if request.method == 'POST' and user == request.user:
logout(request)
return create_update.delete_object(
request,
model=User,
slug=user.username,
slug_field='username',
post_delete_redirect='/users/',
template_name='auth/user_delete.html',
**kwargs
)
@permission_required('comments.delete_comment', _get_comment)
def comment_delete(request, id, **kwargs):
"""Deletes a user's comment.
"""
comment = get_object_or_404(Comment, id=id, site__pk=settings.SITE_ID)
return create_update.delete_object(
request,
model=Comment,
object_id=id,
post_delete_redirect=comment.content_object.get_absolute_url(),
template_name='auth/comment_delete.html',
**kwargs
)
| lgpl-3.0 |
metsarono/dotfiles | linux/.config/sublime-text-2/Packages/Default/detect_indentation.py | 10 | 2837 | import sublime, sublime_plugin
from functools import partial
class DetectIndentationCommand(sublime_plugin.TextCommand):
"""Examines the contents of the buffer to determine the indentation
settings."""
def run(self, edit, show_message = True, threshold = 10):
sample = self.view.substr(sublime.Region(0, min(self.view.size(), 2**14)))
starts_with_tab = 0
spaces_list = []
indented_lines = 0
for line in sample.split("\n"):
if not line: continue
if line[0] == "\t":
starts_with_tab += 1
indented_lines += 1
elif line.startswith(' '):
spaces = 0
for ch in line:
if ch == ' ': spaces += 1
else: break
if spaces > 1 and spaces != len(line):
indented_lines += 1
spaces_list.append(spaces)
evidence = [1.0, 1.0, 0.8, 0.9, 0.8, 0.9, 0.9, 0.95, 1.0]
if indented_lines >= threshold:
if len(spaces_list) > starts_with_tab:
for indent in xrange(8, 1, -1):
same_indent = filter(lambda x: x % indent == 0, spaces_list)
if len(same_indent) >= evidence[indent] * len(spaces_list):
if show_message:
sublime.status_message("Detect Indentation: Setting indentation to "
+ str(indent) + " spaces")
self.view.settings().set('translate_tabs_to_spaces', True)
self.view.settings().set('tab_size', indent)
return
for indent in xrange(8, 1, -2):
same_indent = filter(lambda x: x % indent == 0 or x % indent == 1, spaces_list)
if len(same_indent) >= evidence[indent] * len(spaces_list):
if show_message:
sublime.status_message("Detect Indentation: Setting indentation to "
+ str(indent) + " spaces")
self.view.settings().set('translate_tabs_to_spaces', True)
self.view.settings().set('tab_size', indent)
return
elif starts_with_tab >= 0.8 * indented_lines:
if show_message:
sublime.status_message("Detect Indentation: Setting indentation to tabs")
self.view.settings().set('translate_tabs_to_spaces', False)
class DetectIndentationEventListener(sublime_plugin.EventListener):
def on_load(self, view):
if view.settings().get('detect_indentation'):
is_at_front = view.window() != None
view.run_command('detect_indentation', {'show_message': is_at_front})
| gpl-3.0 |
ntoll/Pcode | rope/contrib/generate.py | 91 | 13249 | import rope.base.evaluate
from rope.base import change, pyobjects, exceptions, pynames, worder, codeanalyze
from rope.refactor import sourceutils, importutils, functionutils, suites
def create_generate(kind, project, resource, offset):
"""A factory for creating `Generate` objects
`kind` can be 'variable', 'function', 'class', 'module' or
'package'.
"""
generate = eval('Generate' + kind.title())
return generate(project, resource, offset)
def create_module(project, name, sourcefolder=None):
"""Creates a module and returns a `rope.base.resources.File`"""
if sourcefolder is None:
sourcefolder = project.root
packages = name.split('.')
parent = sourcefolder
for package in packages[:-1]:
parent = parent.get_child(package)
return parent.create_file(packages[-1] + '.py')
def create_package(project, name, sourcefolder=None):
"""Creates a package and returns a `rope.base.resources.Folder`"""
if sourcefolder is None:
sourcefolder = project.root
packages = name.split('.')
parent = sourcefolder
for package in packages[:-1]:
parent = parent.get_child(package)
made_packages = parent.create_folder(packages[-1])
made_packages.create_file('__init__.py')
return made_packages
class _Generate(object):
def __init__(self, project, resource, offset):
self.project = project
self.resource = resource
self.info = self._generate_info(project, resource, offset)
self.name = self.info.get_name()
self._check_exceptional_conditions()
def _generate_info(self, project, resource, offset):
return _GenerationInfo(project.pycore, resource, offset)
def _check_exceptional_conditions(self):
if self.info.element_already_exists():
raise exceptions.RefactoringError(
'Element <%s> already exists.' % self.name)
if not self.info.primary_is_found():
raise exceptions.RefactoringError(
'Cannot determine the scope <%s> should be defined in.' % self.name)
def get_changes(self):
changes = change.ChangeSet('Generate %s <%s>' %
(self._get_element_kind(), self.name))
indents = self.info.get_scope_indents()
blanks = self.info.get_blank_lines()
base_definition = sourceutils.fix_indentation(self._get_element(), indents)
definition = '\n' * blanks[0] + base_definition + '\n' * blanks[1]
resource = self.info.get_insertion_resource()
start, end = self.info.get_insertion_offsets()
collector = codeanalyze.ChangeCollector(resource.read())
collector.add_change(start, end, definition)
changes.add_change(change.ChangeContents(
resource, collector.get_changed()))
return changes
def get_location(self):
return (self.info.get_insertion_resource(),
self.info.get_insertion_lineno())
def _get_element_kind(self):
raise NotImplementedError()
def _get_element(self):
raise NotImplementedError()
class GenerateFunction(_Generate):
def _generate_info(self, project, resource, offset):
return _FunctionGenerationInfo(project.pycore, resource, offset)
def _get_element(self):
decorator = ''
args = []
if self.info.is_static_method():
decorator = '@staticmethod\n'
if self.info.is_method() or self.info.is_constructor() or \
self.info.is_instance():
args.append('self')
args.extend(self.info.get_passed_args())
definition = '%sdef %s(%s):\n pass\n' % (decorator, self.name,
', '.join(args))
return definition
def _get_element_kind(self):
return 'Function'
class GenerateVariable(_Generate):
def _get_element(self):
return '%s = None\n' % self.name
def _get_element_kind(self):
return 'Variable'
class GenerateClass(_Generate):
def _get_element(self):
return 'class %s(object):\n pass\n' % self.name
def _get_element_kind(self):
return 'Class'
class GenerateModule(_Generate):
def get_changes(self):
package = self.info.get_package()
changes = change.ChangeSet('Generate Module <%s>' % self.name)
new_resource = self.project.get_file('%s/%s.py' % (package.path, self.name))
if new_resource.exists():
raise exceptions.RefactoringError(
'Module <%s> already exists' % new_resource.path)
changes.add_change(change.CreateResource(new_resource))
changes.add_change(_add_import_to_module(
self.project.pycore, self.resource, new_resource))
return changes
def get_location(self):
package = self.info.get_package()
return (package.get_child('%s.py' % self.name) , 1)
class GeneratePackage(_Generate):
def get_changes(self):
package = self.info.get_package()
changes = change.ChangeSet('Generate Package <%s>' % self.name)
new_resource = self.project.get_folder('%s/%s' % (package.path, self.name))
if new_resource.exists():
raise exceptions.RefactoringError(
'Package <%s> already exists' % new_resource.path)
changes.add_change(change.CreateResource(new_resource))
changes.add_change(_add_import_to_module(
self.project.pycore, self.resource, new_resource))
child = self.project.get_folder(package.path + '/' + self.name)
changes.add_change(change.CreateFile(child, '__init__.py'))
return changes
def get_location(self):
package = self.info.get_package()
child = package.get_child(self.name)
return (child.get_child('__init__.py') , 1)
def _add_import_to_module(pycore, resource, imported):
pymodule = pycore.resource_to_pyobject(resource)
import_tools = importutils.ImportTools(pycore)
module_imports = import_tools.module_imports(pymodule)
module_name = pycore.modname(imported)
new_import = importutils.NormalImport(((module_name, None), ))
module_imports.add_import(new_import)
return change.ChangeContents(resource, module_imports.get_changed_source())
class _GenerationInfo(object):
def __init__(self, pycore, resource, offset):
self.pycore = pycore
self.resource = resource
self.offset = offset
self.source_pymodule = self.pycore.resource_to_pyobject(resource)
finder = rope.base.evaluate.ScopeNameFinder(self.source_pymodule)
self.primary, self.pyname = finder.get_primary_and_pyname_at(offset)
self._init_fields()
def _init_fields(self):
self.source_scope = self._get_source_scope()
self.goal_scope = self._get_goal_scope()
self.goal_pymodule = self._get_goal_module(self.goal_scope)
def _get_goal_scope(self):
if self.primary is None:
return self._get_source_scope()
pyobject = self.primary.get_object()
if isinstance(pyobject, pyobjects.PyDefinedObject):
return pyobject.get_scope()
elif isinstance(pyobject.get_type(), pyobjects.PyClass):
return pyobject.get_type().get_scope()
def _get_goal_module(self, scope):
if scope is None:
return
while scope.parent is not None:
scope = scope.parent
return scope.pyobject
def _get_source_scope(self):
module_scope = self.source_pymodule.get_scope()
lineno = self.source_pymodule.lines.get_line_number(self.offset)
return module_scope.get_inner_scope_for_line(lineno)
def get_insertion_lineno(self):
lines = self.goal_pymodule.lines
if self.goal_scope == self.source_scope:
line_finder = self.goal_pymodule.logical_lines
lineno = lines.get_line_number(self.offset)
lineno = line_finder.logical_line_in(lineno)[0]
root = suites.ast_suite_tree(self.goal_scope.pyobject.get_ast())
suite = root.find_suite(lineno)
indents = sourceutils.get_indents(lines, lineno)
while self.get_scope_indents() < indents:
lineno = suite.get_start()
indents = sourceutils.get_indents(lines, lineno)
suite = suite.parent
return lineno
else:
return min(self.goal_scope.get_end() + 1, lines.length())
def get_insertion_resource(self):
return self.goal_pymodule.get_resource()
def get_insertion_offsets(self):
if self.goal_scope.get_kind() == 'Class':
start, end = sourceutils.get_body_region(self.goal_scope.pyobject)
if self.goal_pymodule.source_code[start:end].strip() == 'pass':
return start, end
lines = self.goal_pymodule.lines
start = lines.get_line_start(self.get_insertion_lineno())
return (start, start)
def get_scope_indents(self):
if self.goal_scope.get_kind() == 'Module':
return 0
return sourceutils.get_indents(self.goal_pymodule.lines,
self.goal_scope.get_start()) + 4
def get_blank_lines(self):
if self.goal_scope.get_kind() == 'Module':
base_blanks = 2
if self.goal_pymodule.source_code.strip() == '':
base_blanks = 0
if self.goal_scope.get_kind() == 'Class':
base_blanks = 1
if self.goal_scope.get_kind() == 'Function':
base_blanks = 0
if self.goal_scope == self.source_scope:
return (0, base_blanks)
return (base_blanks, 0)
def get_package(self):
primary = self.primary
if self.primary is None:
return self.pycore.get_source_folders()[0]
if isinstance(primary.get_object(), pyobjects.PyPackage):
return primary.get_object().get_resource()
raise exceptions.RefactoringError(
'A module/package can be only created in a package.')
def primary_is_found(self):
return self.goal_scope is not None
def element_already_exists(self):
if self.pyname is None or isinstance(self.pyname, pynames.UnboundName):
return False
return self.get_name() in self.goal_scope.get_defined_names()
def get_name(self):
return worder.get_name_at(self.resource, self.offset)
class _FunctionGenerationInfo(_GenerationInfo):
def _get_goal_scope(self):
if self.is_constructor():
return self.pyname.get_object().get_scope()
if self.is_instance():
return self.pyname.get_object().get_type().get_scope()
if self.primary is None:
return self._get_source_scope()
pyobject = self.primary.get_object()
if isinstance(pyobject, pyobjects.PyDefinedObject):
return pyobject.get_scope()
elif isinstance(pyobject.get_type(), pyobjects.PyClass):
return pyobject.get_type().get_scope()
def element_already_exists(self):
if self.pyname is None or isinstance(self.pyname, pynames.UnboundName):
return False
return self.get_name() in self.goal_scope.get_defined_names()
def is_static_method(self):
return self.primary is not None and \
isinstance(self.primary.get_object(), pyobjects.PyClass)
def is_method(self):
return self.primary is not None and \
isinstance(self.primary.get_object().get_type(), pyobjects.PyClass)
def is_constructor(self):
return self.pyname is not None and \
isinstance(self.pyname.get_object(), pyobjects.PyClass)
def is_instance(self):
if self.pyname is None:
return False
pyobject = self.pyname.get_object()
return isinstance(pyobject.get_type(), pyobjects.PyClass)
def get_name(self):
if self.is_constructor():
return '__init__'
if self.is_instance():
return '__call__'
return worder.get_name_at(self.resource, self.offset)
def get_passed_args(self):
result = []
source = self.source_pymodule.source_code
finder = worder.Worder(source)
if finder.is_a_function_being_called(self.offset):
start, end = finder.get_primary_range(self.offset)
parens_start, parens_end = finder.get_word_parens_range(end - 1)
call = source[start:parens_end]
parser = functionutils._FunctionParser(call, False)
args, keywords = parser.get_parameters()
for arg in args:
if self._is_id(arg):
result.append(arg)
else:
result.append('arg%d' % len(result))
for name, value in keywords:
result.append(name)
return result
def _is_id(self, arg):
def id_or_underline(c):
return c.isalpha() or c == '_'
for c in arg:
if not id_or_underline(c) and not c.isdigit():
return False
return id_or_underline(arg[0])
| gpl-3.0 |
matthiasdiener/spack | var/spack/repos/builtin/packages/jsoncpp/package.py | 4 | 2139 | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Jsoncpp(CMakePackage):
"""JsonCpp is a C++ library that allows manipulating JSON values,
including serialization and deserialization to and from strings.
It can also preserve existing comment in unserialization/serialization
steps, making it a convenient format to store user input files."""
homepage = "https://github.com/open-source-parsers/jsoncpp"
url = "https://github.com/open-source-parsers/jsoncpp/archive/1.7.3.tar.gz"
version('1.7.3', 'aff6bfb5b81d9a28785429faa45839c5')
variant('build_type', default='RelWithDebInfo',
description='The build type to build',
values=('Debug', 'Release', 'RelWithDebInfo',
'MinSizeRel', 'Coverage'))
depends_on('cmake@3.1:', type='build')
depends_on('python', type='test')
def cmake_args(self):
return ['-DBUILD_SHARED_LIBS=ON']
| lgpl-2.1 |
rzambre/servo | tests/wpt/web-platform-tests/tools/pytest/doc/en/example/nonpython/conftest.py | 202 | 1337 | # content of conftest.py
import pytest
def pytest_collect_file(parent, path):
if path.ext == ".yml" and path.basename.startswith("test"):
return YamlFile(path, parent)
class YamlFile(pytest.File):
def collect(self):
import yaml # we need a yaml parser, e.g. PyYAML
raw = yaml.safe_load(self.fspath.open())
for name, spec in raw.items():
yield YamlItem(name, self, spec)
class YamlItem(pytest.Item):
def __init__(self, name, parent, spec):
super(YamlItem, self).__init__(name, parent)
self.spec = spec
def runtest(self):
for name, value in self.spec.items():
# some custom test execution (dumb example follows)
if name != value:
raise YamlException(self, name, value)
def repr_failure(self, excinfo):
""" called when self.runtest() raises an exception. """
if isinstance(excinfo.value, YamlException):
return "\n".join([
"usecase execution failed",
" spec failed: %r: %r" % excinfo.value.args[1:3],
" no further details known at this point."
])
def reportinfo(self):
return self.fspath, 0, "usecase: %s" % self.name
class YamlException(Exception):
""" custom exception for error reporting. """
| mpl-2.0 |
raymondxyang/tensorflow | tensorflow/contrib/learn/python/learn/session_run_hook.py | 144 | 1204 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""This file is deprecated. Use tensorflow.python.training.session_run_hook."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.training import session_run_hook
# pylint: disable=invalid-name
SessionRunHook = session_run_hook.SessionRunHook
SessionRunArgs = session_run_hook.SessionRunArgs
SessionRunContext = session_run_hook.SessionRunContext
SessionRunValues = session_run_hook.SessionRunValues
# pylint: enable=invalid-name
| apache-2.0 |
seojungmin/peloton | script/formatting/formatter.py | 1 | 7020 | #!/usr/bin/env python
# encoding: utf-8
## ==============================================
## GOAL : Format code, Update headers
## ==============================================
import argparse
import logging
import os
import re
import sys
import datetime
import subprocess
## ==============================================
## CONFIGURATION
## ==============================================
# NOTE: absolute path to peloton directory is calculated from current directory
# directory structure: peloton/scripts/formatting/<this_file>
# PELOTON_DIR needs to be redefined if the directory structure is changed
CODE_SOURCE_DIR = os.path.abspath(os.path.dirname(__file__))
PELOTON_DIR = reduce(os.path.join, [CODE_SOURCE_DIR, os.path.pardir, os.path.pardir])
#other directory paths used are relative to peloton_dir
PELOTON_SRC_DIR = os.path.join(PELOTON_DIR, "src")
PELOTON_TESTS_DIR = os.path.join(PELOTON_DIR, "test")
# DEFAULT DIRS
DEFAULT_DIRS = []
DEFAULT_DIRS.append(PELOTON_SRC_DIR)
DEFAULT_DIRS.append(PELOTON_TESTS_DIR)
CLANG_FORMAT = "clang-format-3.6"
CLANG_FORMAT_FILE = os.path.join(PELOTON_DIR, ".clang-format")
## ==============================================
## HEADER CONFIGURATION
## ==============================================
#header framework, dynamic information will be added inside function
header_comment_line_1 = "//===----------------------------------------------------------------------===//\n"
header_comment_line_1 += "//\n"
header_comment_line_1 += "// Peloton\n"
header_comment_line_2 = "//\n"
header_comment_line_3 = "// "
header_comment_line_4 = "//\n"
header_comment_line_5 = "// Identification: "
header_comment_line_6 = "//\n"
header_comment_line_7 = "// Copyright (c) 2015-%d, Carnegie Mellon University Database Group\n" % datetime.datetime.now().year
header_comment_line_8 = "//\n"
header_comment_line_9 = "//===----------------------------------------------------------------------===//\n\n"
header_comment_1 = header_comment_line_1 + header_comment_line_2
header_comment_3 = header_comment_line_4
header_comment_5 = header_comment_line_6 + header_comment_line_7 + header_comment_line_8 \
+ header_comment_line_9
#regular expresseion used to track header
header_regex = re.compile("((\/\/===-*===\/\/\n(\/\/.*\n)*\/\/===-*===\/\/[\n]*)\n\n)*")
## ==============================================
## LOGGING CONFIGURATION
## ==============================================
LOG = logging.getLogger(__name__)
LOG_handler = logging.StreamHandler()
LOG_formatter = logging.Formatter(
fmt='%(asctime)s [%(funcName)s:%(lineno)03d] %(levelname)-5s: %(message)s',
datefmt='%m-%d-%Y %H:%M:%S'
)
LOG_handler.setFormatter(LOG_formatter)
LOG.addHandler(LOG_handler)
LOG.setLevel(logging.INFO)
## ==============================================
## UTILITY FUNCTION DEFINITIONS
## ==============================================
#format the file passed as argument
def format_file(file_path, update_header, clang_format_code):
file_name = os.path.basename(file_path)
abs_path = os.path.abspath(file_path)
rel_path_from_peloton_dir = os.path.relpath(abs_path,PELOTON_DIR)
with open(file_path, "r+") as fd:
file_data = fd.read()
if update_header:
# strip old header if it exists
header_match = header_regex.match(file_data)
if not header_match is None:
LOG.info("Strip header from %s", file_name)
header_comment = header_match.group()
LOG.debug("Header comment : %s", header_comment)
file_data = file_data.replace(header_comment,"")
# add new header
LOG.info("Add header to %s", file_name)
header_comment_2 = header_comment_line_3 + file_name + "\n"
header_comment_4 = header_comment_line_5 + rel_path_from_peloton_dir + "\n"
header_comment = header_comment_1 + header_comment_2 + header_comment_3 \
+ header_comment_4 + header_comment_5
#print header_comment
file_data = header_comment + file_data
fd.seek(0,0)
fd.truncate()
fd.write(file_data)
elif clang_format_code:
try:
formatting_command = CLANG_FORMAT + " -style=file -i " + file_path
LOG.info(formatting_command)
subprocess.call([CLANG_FORMAT, "-style=file", "-i", file_path])
except OSError as e:
LOG.error("clang-format seems not installed")
exit("clang-format seems not installed")
#END WITH
fd.close()
#END FORMAT__FILE(FILE_NAME)
#format all the files in the dir passed as argument
def format_dir(dir_path, update_header, clang_format_code):
for subdir, dirs, files in os.walk(dir_path):
for file in files:
#print os.path.join(subdir, file)
file_path = subdir + os.path.sep + file
if file_path.endswith(".h") or file_path.endswith(".cpp"):
format_file(file_path, update_header, clang_format_code)
#END IF
#END FOR [file]
#END FOR [os.walk]
#END ADD_HEADERS_DIR(DIR_PATH)
## ==============================================
## Main Function
## ==============================================
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Update headers and/or format source code')
parser.add_argument("-u", "--update-header", help='Action: Update existing headers or add new ones', action='store_true')
parser.add_argument("-c", "--clang-format-code", help='Action: Apply clang-format to source code', action='store_true')
parser.add_argument("-f", "--staged-files", help='Action: Apply the selected action(s) to all staged files (git)', action='store_true')
parser.add_argument('paths', metavar='PATH', type=str, nargs='*',
help='Files or directories to (recursively) apply the actions to')
args = parser.parse_args()
if args.staged_files:
targets = [os.path.abspath(os.path.join(PELOTON_DIR, f)) for f in subprocess.check_output(["git", "diff", "--name-only", "HEAD", "--cached", "--diff-filter=d"]).split()]
if not targets:
LOG.error("no staged files or not calling from a repository -- exiting")
sys.exit("no staged files or not calling from a repository")
elif not args.paths:
LOG.error("no files or directories given -- exiting")
sys.exit("no files or directories given")
else:
targets = args.paths
for x in targets:
if os.path.isfile(x):
LOG.info("Scanning file: " + x)
format_file(x, args.update_header, args.clang_format_code)
elif os.path.isdir(x):
LOG.info("Scanning directory " + x)
format_dir(x, args.update_header, args.clang_format_code)
## FOR
## IF
| apache-2.0 |
catapult-project/catapult-csm | third_party/gsutil/third_party/boto/boto/beanstalk/response.py | 153 | 28051 | """Classify responses from layer1 and strict type values."""
from datetime import datetime
from boto.compat import six
class BaseObject(object):
def __repr__(self):
result = self.__class__.__name__ + '{ '
counter = 0
for key, value in six.iteritems(self.__dict__):
# first iteration no comma
counter += 1
if counter > 1:
result += ', '
result += key + ': '
result += self._repr_by_type(value)
result += ' }'
return result
def _repr_by_type(self, value):
# Everything is either a 'Response', 'list', or 'None/str/int/bool'.
result = ''
if isinstance(value, Response):
result += value.__repr__()
elif isinstance(value, list):
result += self._repr_list(value)
else:
result += str(value)
return result
def _repr_list(self, array):
result = '['
for value in array:
result += ' ' + self._repr_by_type(value) + ','
# Check for trailing comma with a space.
if len(result) > 1:
result = result[:-1] + ' '
result += ']'
return result
class Response(BaseObject):
def __init__(self, response):
super(Response, self).__init__()
if response['ResponseMetadata']:
self.response_metadata = ResponseMetadata(response['ResponseMetadata'])
else:
self.response_metadata = None
class ResponseMetadata(BaseObject):
def __init__(self, response):
super(ResponseMetadata, self).__init__()
self.request_id = str(response['RequestId'])
class ApplicationDescription(BaseObject):
def __init__(self, response):
super(ApplicationDescription, self).__init__()
self.application_name = str(response['ApplicationName'])
self.configuration_templates = []
if response['ConfigurationTemplates']:
for member in response['ConfigurationTemplates']:
configuration_template = str(member)
self.configuration_templates.append(configuration_template)
self.date_created = datetime.fromtimestamp(response['DateCreated'])
self.date_updated = datetime.fromtimestamp(response['DateUpdated'])
self.description = str(response['Description'])
self.versions = []
if response['Versions']:
for member in response['Versions']:
version = str(member)
self.versions.append(version)
class ApplicationVersionDescription(BaseObject):
def __init__(self, response):
super(ApplicationVersionDescription, self).__init__()
self.application_name = str(response['ApplicationName'])
self.date_created = datetime.fromtimestamp(response['DateCreated'])
self.date_updated = datetime.fromtimestamp(response['DateUpdated'])
self.description = str(response['Description'])
if response['SourceBundle']:
self.source_bundle = S3Location(response['SourceBundle'])
else:
self.source_bundle = None
self.version_label = str(response['VersionLabel'])
class AutoScalingGroup(BaseObject):
def __init__(self, response):
super(AutoScalingGroup, self).__init__()
self.name = str(response['Name'])
class ConfigurationOptionDescription(BaseObject):
def __init__(self, response):
super(ConfigurationOptionDescription, self).__init__()
self.change_severity = str(response['ChangeSeverity'])
self.default_value = str(response['DefaultValue'])
self.max_length = int(response['MaxLength']) if response['MaxLength'] else None
self.max_value = int(response['MaxValue']) if response['MaxValue'] else None
self.min_value = int(response['MinValue']) if response['MinValue'] else None
self.name = str(response['Name'])
self.namespace = str(response['Namespace'])
if response['Regex']:
self.regex = OptionRestrictionRegex(response['Regex'])
else:
self.regex = None
self.user_defined = str(response['UserDefined'])
self.value_options = []
if response['ValueOptions']:
for member in response['ValueOptions']:
value_option = str(member)
self.value_options.append(value_option)
self.value_type = str(response['ValueType'])
class ConfigurationOptionSetting(BaseObject):
def __init__(self, response):
super(ConfigurationOptionSetting, self).__init__()
self.namespace = str(response['Namespace'])
self.option_name = str(response['OptionName'])
self.value = str(response['Value'])
class ConfigurationSettingsDescription(BaseObject):
def __init__(self, response):
super(ConfigurationSettingsDescription, self).__init__()
self.application_name = str(response['ApplicationName'])
self.date_created = datetime.fromtimestamp(response['DateCreated'])
self.date_updated = datetime.fromtimestamp(response['DateUpdated'])
self.deployment_status = str(response['DeploymentStatus'])
self.description = str(response['Description'])
self.environment_name = str(response['EnvironmentName'])
self.option_settings = []
if response['OptionSettings']:
for member in response['OptionSettings']:
option_setting = ConfigurationOptionSetting(member)
self.option_settings.append(option_setting)
self.solution_stack_name = str(response['SolutionStackName'])
self.template_name = str(response['TemplateName'])
class EnvironmentDescription(BaseObject):
def __init__(self, response):
super(EnvironmentDescription, self).__init__()
self.application_name = str(response['ApplicationName'])
self.cname = str(response['CNAME'])
self.date_created = datetime.fromtimestamp(response['DateCreated'])
self.date_updated = datetime.fromtimestamp(response['DateUpdated'])
self.description = str(response['Description'])
self.endpoint_url = str(response['EndpointURL'])
self.environment_id = str(response['EnvironmentId'])
self.environment_name = str(response['EnvironmentName'])
self.health = str(response['Health'])
if response['Resources']:
self.resources = EnvironmentResourcesDescription(response['Resources'])
else:
self.resources = None
self.solution_stack_name = str(response['SolutionStackName'])
self.status = str(response['Status'])
self.template_name = str(response['TemplateName'])
self.version_label = str(response['VersionLabel'])
class EnvironmentInfoDescription(BaseObject):
def __init__(self, response):
super(EnvironmentInfoDescription, self).__init__()
self.ec2_instance_id = str(response['Ec2InstanceId'])
self.info_type = str(response['InfoType'])
self.message = str(response['Message'])
self.sample_timestamp = datetime.fromtimestamp(response['SampleTimestamp'])
class EnvironmentResourceDescription(BaseObject):
def __init__(self, response):
super(EnvironmentResourceDescription, self).__init__()
self.auto_scaling_groups = []
if response['AutoScalingGroups']:
for member in response['AutoScalingGroups']:
auto_scaling_group = AutoScalingGroup(member)
self.auto_scaling_groups.append(auto_scaling_group)
self.environment_name = str(response['EnvironmentName'])
self.instances = []
if response['Instances']:
for member in response['Instances']:
instance = Instance(member)
self.instances.append(instance)
self.launch_configurations = []
if response['LaunchConfigurations']:
for member in response['LaunchConfigurations']:
launch_configuration = LaunchConfiguration(member)
self.launch_configurations.append(launch_configuration)
self.load_balancers = []
if response['LoadBalancers']:
for member in response['LoadBalancers']:
load_balancer = LoadBalancer(member)
self.load_balancers.append(load_balancer)
self.triggers = []
if response['Triggers']:
for member in response['Triggers']:
trigger = Trigger(member)
self.triggers.append(trigger)
class EnvironmentResourcesDescription(BaseObject):
def __init__(self, response):
super(EnvironmentResourcesDescription, self).__init__()
if response['LoadBalancer']:
self.load_balancer = LoadBalancerDescription(response['LoadBalancer'])
else:
self.load_balancer = None
class EventDescription(BaseObject):
def __init__(self, response):
super(EventDescription, self).__init__()
self.application_name = str(response['ApplicationName'])
self.environment_name = str(response['EnvironmentName'])
self.event_date = datetime.fromtimestamp(response['EventDate'])
self.message = str(response['Message'])
self.request_id = str(response['RequestId'])
self.severity = str(response['Severity'])
self.template_name = str(response['TemplateName'])
self.version_label = str(response['VersionLabel'])
class Instance(BaseObject):
def __init__(self, response):
super(Instance, self).__init__()
self.id = str(response['Id'])
class LaunchConfiguration(BaseObject):
def __init__(self, response):
super(LaunchConfiguration, self).__init__()
self.name = str(response['Name'])
class Listener(BaseObject):
def __init__(self, response):
super(Listener, self).__init__()
self.port = int(response['Port']) if response['Port'] else None
self.protocol = str(response['Protocol'])
class LoadBalancer(BaseObject):
def __init__(self, response):
super(LoadBalancer, self).__init__()
self.name = str(response['Name'])
class LoadBalancerDescription(BaseObject):
def __init__(self, response):
super(LoadBalancerDescription, self).__init__()
self.domain = str(response['Domain'])
self.listeners = []
if response['Listeners']:
for member in response['Listeners']:
listener = Listener(member)
self.listeners.append(listener)
self.load_balancer_name = str(response['LoadBalancerName'])
class OptionRestrictionRegex(BaseObject):
def __init__(self, response):
super(OptionRestrictionRegex, self).__init__()
self.label = response['Label']
self.pattern = response['Pattern']
class SolutionStackDescription(BaseObject):
def __init__(self, response):
super(SolutionStackDescription, self).__init__()
self.permitted_file_types = []
if response['PermittedFileTypes']:
for member in response['PermittedFileTypes']:
permitted_file_type = str(member)
self.permitted_file_types.append(permitted_file_type)
self.solution_stack_name = str(response['SolutionStackName'])
class S3Location(BaseObject):
def __init__(self, response):
super(S3Location, self).__init__()
self.s3_bucket = str(response['S3Bucket'])
self.s3_key = str(response['S3Key'])
class Trigger(BaseObject):
def __init__(self, response):
super(Trigger, self).__init__()
self.name = str(response['Name'])
class ValidationMessage(BaseObject):
def __init__(self, response):
super(ValidationMessage, self).__init__()
self.message = str(response['Message'])
self.namespace = str(response['Namespace'])
self.option_name = str(response['OptionName'])
self.severity = str(response['Severity'])
# These are the response objects layer2 uses, one for each layer1 api call.
class CheckDNSAvailabilityResponse(Response):
def __init__(self, response):
response = response['CheckDNSAvailabilityResponse']
super(CheckDNSAvailabilityResponse, self).__init__(response)
response = response['CheckDNSAvailabilityResult']
self.fully_qualified_cname = str(response['FullyQualifiedCNAME'])
self.available = bool(response['Available'])
# Our naming convension produces this class name but api names it with more
# capitals.
class CheckDnsAvailabilityResponse(CheckDNSAvailabilityResponse): pass
class CreateApplicationResponse(Response):
def __init__(self, response):
response = response['CreateApplicationResponse']
super(CreateApplicationResponse, self).__init__(response)
response = response['CreateApplicationResult']
if response['Application']:
self.application = ApplicationDescription(response['Application'])
else:
self.application = None
class CreateApplicationVersionResponse(Response):
def __init__(self, response):
response = response['CreateApplicationVersionResponse']
super(CreateApplicationVersionResponse, self).__init__(response)
response = response['CreateApplicationVersionResult']
if response['ApplicationVersion']:
self.application_version = ApplicationVersionDescription(response['ApplicationVersion'])
else:
self.application_version = None
class CreateConfigurationTemplateResponse(Response):
def __init__(self, response):
response = response['CreateConfigurationTemplateResponse']
super(CreateConfigurationTemplateResponse, self).__init__(response)
response = response['CreateConfigurationTemplateResult']
self.application_name = str(response['ApplicationName'])
self.date_created = datetime.fromtimestamp(response['DateCreated'])
self.date_updated = datetime.fromtimestamp(response['DateUpdated'])
self.deployment_status = str(response['DeploymentStatus'])
self.description = str(response['Description'])
self.environment_name = str(response['EnvironmentName'])
self.option_settings = []
if response['OptionSettings']:
for member in response['OptionSettings']:
option_setting = ConfigurationOptionSetting(member)
self.option_settings.append(option_setting)
self.solution_stack_name = str(response['SolutionStackName'])
self.template_name = str(response['TemplateName'])
class CreateEnvironmentResponse(Response):
def __init__(self, response):
response = response['CreateEnvironmentResponse']
super(CreateEnvironmentResponse, self).__init__(response)
response = response['CreateEnvironmentResult']
self.application_name = str(response['ApplicationName'])
self.cname = str(response['CNAME'])
self.date_created = datetime.fromtimestamp(response['DateCreated'])
self.date_updated = datetime.fromtimestamp(response['DateUpdated'])
self.description = str(response['Description'])
self.endpoint_url = str(response['EndpointURL'])
self.environment_id = str(response['EnvironmentId'])
self.environment_name = str(response['EnvironmentName'])
self.health = str(response['Health'])
if response['Resources']:
self.resources = EnvironmentResourcesDescription(response['Resources'])
else:
self.resources = None
self.solution_stack_name = str(response['SolutionStackName'])
self.status = str(response['Status'])
self.template_name = str(response['TemplateName'])
self.version_label = str(response['VersionLabel'])
class CreateStorageLocationResponse(Response):
def __init__(self, response):
response = response['CreateStorageLocationResponse']
super(CreateStorageLocationResponse, self).__init__(response)
response = response['CreateStorageLocationResult']
self.s3_bucket = str(response['S3Bucket'])
class DeleteApplicationResponse(Response):
def __init__(self, response):
response = response['DeleteApplicationResponse']
super(DeleteApplicationResponse, self).__init__(response)
class DeleteApplicationVersionResponse(Response):
def __init__(self, response):
response = response['DeleteApplicationVersionResponse']
super(DeleteApplicationVersionResponse, self).__init__(response)
class DeleteConfigurationTemplateResponse(Response):
def __init__(self, response):
response = response['DeleteConfigurationTemplateResponse']
super(DeleteConfigurationTemplateResponse, self).__init__(response)
class DeleteEnvironmentConfigurationResponse(Response):
def __init__(self, response):
response = response['DeleteEnvironmentConfigurationResponse']
super(DeleteEnvironmentConfigurationResponse, self).__init__(response)
class DescribeApplicationVersionsResponse(Response):
def __init__(self, response):
response = response['DescribeApplicationVersionsResponse']
super(DescribeApplicationVersionsResponse, self).__init__(response)
response = response['DescribeApplicationVersionsResult']
self.application_versions = []
if response['ApplicationVersions']:
for member in response['ApplicationVersions']:
application_version = ApplicationVersionDescription(member)
self.application_versions.append(application_version)
class DescribeApplicationsResponse(Response):
def __init__(self, response):
response = response['DescribeApplicationsResponse']
super(DescribeApplicationsResponse, self).__init__(response)
response = response['DescribeApplicationsResult']
self.applications = []
if response['Applications']:
for member in response['Applications']:
application = ApplicationDescription(member)
self.applications.append(application)
class DescribeConfigurationOptionsResponse(Response):
def __init__(self, response):
response = response['DescribeConfigurationOptionsResponse']
super(DescribeConfigurationOptionsResponse, self).__init__(response)
response = response['DescribeConfigurationOptionsResult']
self.options = []
if response['Options']:
for member in response['Options']:
option = ConfigurationOptionDescription(member)
self.options.append(option)
self.solution_stack_name = str(response['SolutionStackName'])
class DescribeConfigurationSettingsResponse(Response):
def __init__(self, response):
response = response['DescribeConfigurationSettingsResponse']
super(DescribeConfigurationSettingsResponse, self).__init__(response)
response = response['DescribeConfigurationSettingsResult']
self.configuration_settings = []
if response['ConfigurationSettings']:
for member in response['ConfigurationSettings']:
configuration_setting = ConfigurationSettingsDescription(member)
self.configuration_settings.append(configuration_setting)
class DescribeEnvironmentResourcesResponse(Response):
def __init__(self, response):
response = response['DescribeEnvironmentResourcesResponse']
super(DescribeEnvironmentResourcesResponse, self).__init__(response)
response = response['DescribeEnvironmentResourcesResult']
if response['EnvironmentResources']:
self.environment_resources = EnvironmentResourceDescription(response['EnvironmentResources'])
else:
self.environment_resources = None
class DescribeEnvironmentsResponse(Response):
def __init__(self, response):
response = response['DescribeEnvironmentsResponse']
super(DescribeEnvironmentsResponse, self).__init__(response)
response = response['DescribeEnvironmentsResult']
self.environments = []
if response['Environments']:
for member in response['Environments']:
environment = EnvironmentDescription(member)
self.environments.append(environment)
class DescribeEventsResponse(Response):
def __init__(self, response):
response = response['DescribeEventsResponse']
super(DescribeEventsResponse, self).__init__(response)
response = response['DescribeEventsResult']
self.events = []
if response['Events']:
for member in response['Events']:
event = EventDescription(member)
self.events.append(event)
self.next_tokent = str(response['NextToken'])
class ListAvailableSolutionStacksResponse(Response):
def __init__(self, response):
response = response['ListAvailableSolutionStacksResponse']
super(ListAvailableSolutionStacksResponse, self).__init__(response)
response = response['ListAvailableSolutionStacksResult']
self.solution_stack_details = []
if response['SolutionStackDetails']:
for member in response['SolutionStackDetails']:
solution_stack_detail = SolutionStackDescription(member)
self.solution_stack_details.append(solution_stack_detail)
self.solution_stacks = []
if response['SolutionStacks']:
for member in response['SolutionStacks']:
solution_stack = str(member)
self.solution_stacks.append(solution_stack)
class RebuildEnvironmentResponse(Response):
def __init__(self, response):
response = response['RebuildEnvironmentResponse']
super(RebuildEnvironmentResponse, self).__init__(response)
class RequestEnvironmentInfoResponse(Response):
def __init__(self, response):
response = response['RequestEnvironmentInfoResponse']
super(RequestEnvironmentInfoResponse, self).__init__(response)
class RestartAppServerResponse(Response):
def __init__(self, response):
response = response['RestartAppServerResponse']
super(RestartAppServerResponse, self).__init__(response)
class RetrieveEnvironmentInfoResponse(Response):
def __init__(self, response):
response = response['RetrieveEnvironmentInfoResponse']
super(RetrieveEnvironmentInfoResponse, self).__init__(response)
response = response['RetrieveEnvironmentInfoResult']
self.environment_info = []
if response['EnvironmentInfo']:
for member in response['EnvironmentInfo']:
environment_info = EnvironmentInfoDescription(member)
self.environment_info.append(environment_info)
class SwapEnvironmentCNAMEsResponse(Response):
def __init__(self, response):
response = response['SwapEnvironmentCNAMEsResponse']
super(SwapEnvironmentCNAMEsResponse, self).__init__(response)
class SwapEnvironmentCnamesResponse(SwapEnvironmentCNAMEsResponse): pass
class TerminateEnvironmentResponse(Response):
def __init__(self, response):
response = response['TerminateEnvironmentResponse']
super(TerminateEnvironmentResponse, self).__init__(response)
response = response['TerminateEnvironmentResult']
self.application_name = str(response['ApplicationName'])
self.cname = str(response['CNAME'])
self.date_created = datetime.fromtimestamp(response['DateCreated'])
self.date_updated = datetime.fromtimestamp(response['DateUpdated'])
self.description = str(response['Description'])
self.endpoint_url = str(response['EndpointURL'])
self.environment_id = str(response['EnvironmentId'])
self.environment_name = str(response['EnvironmentName'])
self.health = str(response['Health'])
if response['Resources']:
self.resources = EnvironmentResourcesDescription(response['Resources'])
else:
self.resources = None
self.solution_stack_name = str(response['SolutionStackName'])
self.status = str(response['Status'])
self.template_name = str(response['TemplateName'])
self.version_label = str(response['VersionLabel'])
class UpdateApplicationResponse(Response):
def __init__(self, response):
response = response['UpdateApplicationResponse']
super(UpdateApplicationResponse, self).__init__(response)
response = response['UpdateApplicationResult']
if response['Application']:
self.application = ApplicationDescription(response['Application'])
else:
self.application = None
class UpdateApplicationVersionResponse(Response):
def __init__(self, response):
response = response['UpdateApplicationVersionResponse']
super(UpdateApplicationVersionResponse, self).__init__(response)
response = response['UpdateApplicationVersionResult']
if response['ApplicationVersion']:
self.application_version = ApplicationVersionDescription(response['ApplicationVersion'])
else:
self.application_version = None
class UpdateConfigurationTemplateResponse(Response):
def __init__(self, response):
response = response['UpdateConfigurationTemplateResponse']
super(UpdateConfigurationTemplateResponse, self).__init__(response)
response = response['UpdateConfigurationTemplateResult']
self.application_name = str(response['ApplicationName'])
self.date_created = datetime.fromtimestamp(response['DateCreated'])
self.date_updated = datetime.fromtimestamp(response['DateUpdated'])
self.deployment_status = str(response['DeploymentStatus'])
self.description = str(response['Description'])
self.environment_name = str(response['EnvironmentName'])
self.option_settings = []
if response['OptionSettings']:
for member in response['OptionSettings']:
option_setting = ConfigurationOptionSetting(member)
self.option_settings.append(option_setting)
self.solution_stack_name = str(response['SolutionStackName'])
self.template_name = str(response['TemplateName'])
class UpdateEnvironmentResponse(Response):
def __init__(self, response):
response = response['UpdateEnvironmentResponse']
super(UpdateEnvironmentResponse, self).__init__(response)
response = response['UpdateEnvironmentResult']
self.application_name = str(response['ApplicationName'])
self.cname = str(response['CNAME'])
self.date_created = datetime.fromtimestamp(response['DateCreated'])
self.date_updated = datetime.fromtimestamp(response['DateUpdated'])
self.description = str(response['Description'])
self.endpoint_url = str(response['EndpointURL'])
self.environment_id = str(response['EnvironmentId'])
self.environment_name = str(response['EnvironmentName'])
self.health = str(response['Health'])
if response['Resources']:
self.resources = EnvironmentResourcesDescription(response['Resources'])
else:
self.resources = None
self.solution_stack_name = str(response['SolutionStackName'])
self.status = str(response['Status'])
self.template_name = str(response['TemplateName'])
self.version_label = str(response['VersionLabel'])
class ValidateConfigurationSettingsResponse(Response):
def __init__(self, response):
response = response['ValidateConfigurationSettingsResponse']
super(ValidateConfigurationSettingsResponse, self).__init__(response)
response = response['ValidateConfigurationSettingsResult']
self.messages = []
if response['Messages']:
for member in response['Messages']:
message = ValidationMessage(member)
self.messages.append(message)
| bsd-3-clause |
sorgerlab/indra | indra/assemblers/pysb/bmi_wrapper.py | 6 | 13862 | """This module allows creating a Basic Modeling Interface (BMI) model from
and automatically assembled PySB model. The BMI model can be instantiated
within a simulation workflow system where it is simulated together
with other models."""
import os
import copy
import numpy
import pickle
import textwrap
from lxml import etree
from pysb.bng import generate_equations
from pysb.simulator import ScipyOdeSimulator
class BMIModel(object):
"""This class represents a BMI model wrapping a model assembled by INDRA.
Parameters
----------
model : pysb.Model
A PySB model assembled by INDRA to be wrapped in BMI.
inputs : Optional[list[str]]
A list of variable names that are considered to be inputs to the model
meaning that they are read from other models. Note that designating
a variable as input means that it must be provided by another component
during the simulation.
stop_time : int
The stopping time for this model, controlling the time units up to
which the model is simulated.
outside_name_map : dict
A dictionary mapping outside variables names to inside variable names
(i.e. ones that are in the wrapped model)
"""
def __init__(self, model, inputs=None, stop_time=1000,
outside_name_map=None):
self.model = model
generate_equations(model)
self.inputs = inputs if inputs else []
self.stop_time = stop_time
self.outside_name_map = outside_name_map if outside_name_map else {}
self.dt = numpy.array(10.0)
self.units = 'seconds'
self.sim = None
self.attributes = copy.copy(default_attributes)
self.species_name_map = {}
for idx, species in enumerate(self.model.species):
monomer = species.monomer_patterns[0].monomer
self.species_name_map[monomer.name] = idx
self.input_vars = self._get_input_vars()
# These attributes are related to the simulation state
self.state = numpy.array([100.0 for s in self.species_name_map.keys()])
self.time = numpy.array(0.0)
self.status = 'start'
self.time_course = [(self.time, self.state)]
# EMELI needs a DONE attribute
self.DONE = False
def _get_input_vars(self):
return self.inputs
# The code below attempts to discover input variables, it is currently
# inactive but could be made optional later
# species_is_obj = {s: False for s in self.species_name_map.keys()}
# for ann in self.model.annotations:
# if ann.predicate == 'rule_has_object':
# species_is_obj[ann.object] = True
# # Return all the variables that aren't objects in a rule
# input_vars = [s for s, tf in species_is_obj.items() if not tf]
# return input_vars
# Simulation functions
def initialize(self, cfg_file=None, mode=None):
"""Initialize the model for simulation, possibly given a config file.
Parameters
----------
cfg_file : Optional[str]
The name of the configuration file to load, optional.
"""
self.sim = ScipyOdeSimulator(self.model)
self.state = numpy.array(copy.copy(self.sim.initials)[0])
self.time = numpy.array(0.0)
self.status = 'initialized'
def update(self, dt=None):
"""Simulate the model for a given time interval.
Parameters
----------
dt : Optional[float]
The time step to simulate, if None, the default built-in time step
is used.
"""
# EMELI passes dt = -1 so we need to handle that here
dt = dt if (dt is not None and dt > 0) else self.dt
tspan = [0, dt]
# Run simulaton with initials set to current state
res = self.sim.run(tspan=tspan, initials=self.state)
# Set the state based on the result here
self.state = res.species[-1]
self.time += dt
if self.time > self.stop_time:
self.DONE = True
print((self.time, self.state))
self.time_course.append((self.time.copy(), self.state.copy()))
def finalize(self):
"""Finish the simulation and clean up resources as needed."""
self.status = 'finalized'
# Setter functions for state variables
def set_value(self, var_name, value):
"""Set the value of a given variable to a given value.
Parameters
----------
var_name : str
The name of the variable in the model whose value should be set.
value : float
The value the variable should be set to
"""
if var_name in self.outside_name_map:
var_name = self.outside_name_map[var_name]
print('%s=%.5f' % (var_name, 1e9*value))
if var_name == 'Precipitation':
value = 1e9*value
species_idx = self.species_name_map[var_name]
self.state[species_idx] = value
def set_values(self, var_name, value):
"""Set the value of a given variable to a given value.
Parameters
----------
var_name : str
The name of the variable in the model whose value should be set.
value : float
The value the variable should be set to
"""
self.set_value(var_name, value)
# Getter functions for state
def get_value(self, var_name):
"""Return the value of a given variable.
Parameters
----------
var_name : str
The name of the variable whose value should be returned
Returns
-------
value : float
The value of the given variable in the current state
"""
if var_name in self.outside_name_map:
var_name = self.outside_name_map[var_name]
species_idx = self.species_name_map[var_name]
return self.state[species_idx]
def get_values(self, var_name):
"""Return the value of a given variable.
Parameters
----------
var_name : str
The name of the variable whose value should be returned
Returns
-------
value : float
The value of the given variable in the current state
"""
return self.get_value(var_name)
def get_status(self):
"""Return the current status of the model."""
return self.status
# Getter functions for basic properties
def get_attribute(self, att_name):
"""Return the value of a given attribute.
Atrributes include: model_name, version, author_name, grid_type,
time_step_type, step_method, time_units
Parameters
----------
att_name : str
The name of the attribute whose value should be returned.
Returns
-------
value : str
The value of the attribute
"""
return self.attributes.get(att_name)
def get_input_var_names(self):
"""Return a list of variables names that can be set as input.
Returns
-------
var_names : list[str]
A list of variable names that can be set from the outside
"""
in_vars = copy.copy(self.input_vars)
for idx, var in enumerate(in_vars):
if self._map_in_out(var) is not None:
in_vars[idx] = self._map_in_out(var)
return in_vars
def get_output_var_names(self):
"""Return a list of variables names that can be read as output.
Returns
-------
var_names : list[str]
A list of variable names that can be read from the outside
"""
# Return all the variables that aren't input variables
all_vars = list(self.species_name_map.keys())
output_vars = list(set(all_vars) - set(self.input_vars))
# Re-map to outside var names if needed
for idx, var in enumerate(output_vars):
if self._map_in_out(var) is not None:
output_vars[idx] = self._map_in_out(var)
return output_vars
def get_var_name(self, var_name):
"""Return the internal variable name given an outside variable name.
Parameters
----------
var_name : str
The name of the outside variable to map
Returns
-------
internal_var_name : str
The internal name of the corresponding variable
"""
return self._map_out_in(var_name)
def get_var_units(self, var_name):
"""Return the units of a given variable.
Parameters
----------
var_name : str
The name of the variable whose units should be returned
Returns
-------
unit : str
The units of the variable
"""
return '1'
def get_var_type(self, var_name):
"""Return the type of a given variable.
Parameters
----------
var_name : str
The name of the variable whose type should be returned
Returns
-------
unit : str
The type of the variable as a string
"""
return 'float64'
def get_var_rank(self, var_name):
"""Return the matrix rank of the given variable.
Parameters
----------
var_name : str
The name of the variable whose rank should be returned
Returns
-------
rank : int
The dimensionality of the variable, 0 for scalar, 1 for vector,
etc.
"""
return numpy.int16(0)
def get_start_time(self):
"""Return the initial time point of the model.
Returns
-------
start_time : float
The initial time point of the model.
"""
return 0.0
def get_current_time(self):
"""Return the current time point that the model is at during simulation
Returns
-------
time : float
The current time point
"""
return self.time
def get_time_step(self):
"""Return the time step associated with model simulation.
Returns
-------
dt : float
The time step for model simulation
"""
return self.dt
def get_time_units(self):
"""Return the time units of the model simulation.
Returns
-------
units : str
The time unit of simulation as a string
"""
return self.units
def make_repository_component(self):
"""Return an XML string representing this BMI in a workflow.
This description is required by EMELI to discover and load models.
Returns
-------
xml : str
String serialized XML representation of the component in the
model repository.
"""
component = etree.Element('component')
comp_name = etree.Element('comp_name')
comp_name.text = self.model.name
component.append(comp_name)
mod_path = etree.Element('module_path')
mod_path.text = os.getcwd()
component.append(mod_path)
mod_name = etree.Element('module_name')
mod_name.text = self.model.name
component.append(mod_name)
class_name = etree.Element('class_name')
class_name.text = 'model_class'
component.append(class_name)
model_name = etree.Element('model_name')
model_name.text = self.model.name
component.append(model_name)
lang = etree.Element('language')
lang.text = 'python'
component.append(lang)
ver = etree.Element('version')
ver.text = self.get_attribute('version')
component.append(ver)
au = etree.Element('author')
au.text = self.get_attribute('author_name')
component.append(au)
hu = etree.Element('help_url')
hu.text = 'http://github.com/sorgerlab/indra'
component.append(hu)
for tag in ('cfg_template', 'time_step_type', 'time_units',
'grid_type', 'description', 'comp_type', 'uses_types'):
elem = etree.Element(tag)
elem.text = tag
component.append(elem)
return etree.tounicode(component, pretty_print=True)
def export_into_python(self):
"""Write the model into a pickle and create a module that loads it.
The model basically exports itself as a pickle file and a Python
file is then written which loads the pickle file. This allows importing
the model in the simulation workflow.
"""
pkl_path = self.model.name + '.pkl'
with open(pkl_path, 'wb') as fh:
pickle.dump(self, fh, protocol=2)
py_str = """
import pickle
with open('%s', 'rb') as fh:
model_class = pickle.load(fh)
""" % os.path.abspath(pkl_path)
py_str = textwrap.dedent(py_str)
py_path = self.model.name + '.py'
with open(py_path, 'w') as fh:
fh.write(py_str)
def _map_out_in(self, outside_var_name):
"""Return the internal name of a variable mapped from outside."""
return self.outside_name_map.get(outside_var_name)
def _map_in_out(self, inside_var_name):
"""Return the external name of a variable mapped from inside."""
for out_name, in_name in self.outside_name_map.items():
if inside_var_name == in_name:
return out_name
return None
default_attributes = {
'model_name': 'indra_model',
'version': '1.0',
'author_name': 'Benjamin M. Gyori',
'grid_type': 'none',
'time_step_type': 'fixed',
'step_method': 'explicit',
'time_units': 'seconds'
}
| bsd-2-clause |
nanolearningllc/edx-platform-cypress | pavelib/paver_tests/test_paver_get_quality_reports.py | 117 | 1523 | """
Tests to ensure only the report files we want are returned as part of run_quality.
"""
import unittest
from mock import patch
import pavelib.quality
class TestGetReportFiles(unittest.TestCase):
"""
Ensure only the report files we want are returned as part of run_quality.
"""
@patch('os.walk')
def test_get_pylint_reports(self, my_mock):
my_mock.return_value = iter([
('/foo', ('',), ('pylint.report',)),
('/bar', ('/baz',), ('pylint.report',))
])
reports = pavelib.quality.get_violations_reports("pylint")
self.assertEqual(len(reports), 2)
@patch('os.walk')
def test_get_pep8_reports(self, my_mock):
my_mock.return_value = iter([
('/foo', ('',), ('pep8.report',)),
('/bar', ('/baz',), ('pep8.report',))
])
reports = pavelib.quality.get_violations_reports("pep8")
self.assertEqual(len(reports), 2)
@patch('os.walk')
def test_get_pep8_reports_noisy(self, my_mock):
""" Several conditions: different report types, different files, multiple files """
my_mock.return_value = iter([
('/foo', ('',), ('pep8.report',)),
('/fooz', ('/ball',), ('pylint.report',)),
('/fooz', ('/ball',), ('non.report',)),
('/fooz', ('/ball',), ('lms.xml',)),
('/bar', ('/baz',), ('pep8.report',))
])
reports = pavelib.quality.get_violations_reports("pep8")
self.assertEqual(len(reports), 2)
| agpl-3.0 |
mnmnc/campephilus | modules/plotter/plot.py | 1 | 2808 | import matplotlib.pyplot as plt
class Plot:
def save(self, destination_filename="plotted.png", width=10, height=10, local_dpi=100):
fig = plt.gcf()
fig.set_size_inches(width, height)
plt.savefig(destination_filename, dpi=local_dpi)
def plot(self, xlist, ylist, marker_style='circle', def_color="r", def_alpha=0.5, mylinewidth=2.0):
if marker_style == "circle":
plt.plot(xlist, ylist, def_color+'o', alpha=def_alpha)
elif marker_style == "pixel":
plt.plot(xlist, ylist, def_color+',', alpha=def_alpha)
elif marker_style == "point":
plt.plot(xlist, ylist, def_color+'.', alpha=def_alpha)
elif marker_style == "x":
plt.plot(xlist, ylist, def_color+'x', alpha=def_alpha)
elif marker_style == "line":
plt.plot(xlist, ylist, def_color+'-', alpha=def_alpha, linewidth=mylinewidth)
elif marker_style == "triangle":
plt.plot(xlist, ylist, def_color+'^', alpha=def_alpha)
else:
plt.plot(xlist, ylist, def_color+'o', alpha=def_alpha)
def plot_with_sizes(self, xlist, ylist, sizes, marker_style='circle', def_color="r", def_alpha=0.5):
if marker_style == "circle":
plt.scatter(xlist, ylist, s=sizes, alpha=def_alpha)
elif marker_style == "pixel":
plt.scatter(xlist, ylist, s=sizes, alpha=def_alpha)
elif marker_style == "point":
plt.scatter(xlist, ylist, s=sizes, alpha=def_alpha)
elif marker_style == "x":
plt.scatter(xlist, ylist, s=sizes, alpha=def_alpha)
elif marker_style == "line":
plt.scatter(xlist, ylist, s=sizes, alpha=def_alpha)
elif marker_style == "triangle":
plt.scatter(xlist, ylist, s=sizes, alpha=def_alpha)
else:
plt.scatter(xlist, ylist, s=sizes, alpha=def_alpha)
def set_label(self, axis_name, label):
if axis_name == "x":
plt.xlabel(label)
elif axis_name == "y":
plt.ylabel(label)
else:
print("[ERR] Unknown label", label)
def set_title(self, title="Title", size=12):
font = {'fontname':'Courier New','fontsize':size}
plt.title(title, **font)
def set_text(self, x=0, y=0, text="Text missing"):
plt.text(x, y, text)
def set_axis_limit(self, min_x=0, max_x=100, min_y=0, max_y=100):
plt.axis([min_x, max_x, min_y, max_y])
def set_note(self, x,y, text_x, text_y, text):
"""
x,y - pointed end
text_x, text_y - location of the text
"""
plt.annotate(text, xy=(x, y),
xytext=(text_x, text_y),
arrowprops=dict(
facecolor='black',
shrink=0.08,
width=1.0,
headwidth=5.0,
alpha=0.3
)
)
def clear_plot(self):
plt.clf()
def main():
out_image = "D:\\out.png"
pl = Plot()
#plt.plot([1,2,3,4], [1,4,9,16], 'ro')
pl.set_axis_limit(10,10)
pl.plot([1,4,9,16], [1,2,3,4], "line", "b", 0.4)
#plot([8,4,9,16], [11,14,3,4], 'y^', ms=8.0, alpha=0.4)
f = plt.gcf()
pl.save(out_image)
pass
if __name__ == "__main__":
main() | apache-2.0 |
FrancoisRheaultUS/dipy | dipy/align/tests/test_crosscorr.py | 19 | 7954 | import numpy as np
from numpy.testing import assert_array_almost_equal
from dipy.align import floating
from dipy.align import crosscorr as cc
def test_cc_factors_2d():
r"""
Compares the output of the optimized function to compute the cross-
correlation factors against a direct (not optimized, but less error prone)
implementation.
"""
a = np.array(range(20*20), dtype=floating).reshape(20, 20)
b = np.array(range(20*20)[::-1], dtype=floating).reshape(20, 20)
a /= a.max()
b /= b.max()
for radius in [0, 1, 3, 6]:
factors = np.asarray(cc.precompute_cc_factors_2d(a, b, radius))
expected = np.asarray(cc.precompute_cc_factors_2d_test(a, b, radius))
assert_array_almost_equal(factors, expected)
def test_cc_factors_3d():
r"""
Compares the output of the optimized function to compute the cross-
correlation factors against a direct (not optimized, but less error prone)
implementation.
"""
a = np.array(range(20*20*20), dtype=floating).reshape(20, 20, 20)
b = np.array(range(20*20*20)[::-1], dtype=floating).reshape(20, 20, 20)
a /= a.max()
b /= b.max()
for radius in [0, 1, 3, 6]:
factors = np.asarray(cc.precompute_cc_factors_3d(a, b, radius))
expected = np.asarray(cc.precompute_cc_factors_3d_test(a, b, radius))
assert_array_almost_equal(factors, expected, decimal=5)
def test_compute_cc_steps_2d():
# Select arbitrary images' shape (same shape for both images)
sh = (32, 32)
radius = 2
# Select arbitrary centers
c_f = (np.asarray(sh)/2) + 1.25
c_g = c_f + 2.5
# Compute the identity vector field I(x) = x in R^2
x_0 = np.asarray(range(sh[0]))
x_1 = np.asarray(range(sh[1]))
X = np.ndarray(sh + (2,), dtype=np.float64)
O = np.ones(sh)
X[..., 0] = x_0[:, None] * O
X[..., 1] = x_1[None, :] * O
# Compute the gradient fields of F and G
np.random.seed(1147572)
gradF = np.array(X - c_f, dtype=floating)
gradG = np.array(X - c_g, dtype=floating)
sz = np.size(gradF)
Fnoise = np.random.ranf(sz).reshape(gradF.shape) * gradF.max() * 0.1
Fnoise = Fnoise.astype(floating)
gradF += Fnoise
sz = np.size(gradG)
Gnoise = np.random.ranf(sz).reshape(gradG.shape) * gradG.max() * 0.1
Gnoise = Gnoise.astype(floating)
gradG += Gnoise
sq_norm_grad_G = np.sum(gradG**2, -1)
F = np.array(0.5*np.sum(gradF**2, -1), dtype=floating)
G = np.array(0.5*sq_norm_grad_G, dtype=floating)
Fnoise = np.random.ranf(np.size(F)).reshape(F.shape) * F.max() * 0.1
Fnoise = Fnoise.astype(floating)
F += Fnoise
Gnoise = np.random.ranf(np.size(G)).reshape(G.shape) * G.max() * 0.1
Gnoise = Gnoise.astype(floating)
G += Gnoise
# precompute the cross correlation factors
factors = cc.precompute_cc_factors_2d_test(F, G, radius)
factors = np.array(factors, dtype=floating)
# test the forward step against the exact expression
I = factors[..., 0]
J = factors[..., 1]
sfm = factors[..., 2]
sff = factors[..., 3]
smm = factors[..., 4]
expected = np.ndarray(shape=sh + (2,), dtype=floating)
factor = (-2.0 * sfm / (sff * smm)) * (J - (sfm / sff) * I)
expected[..., 0] = factor * gradF[..., 0]
factor = (-2.0 * sfm / (sff * smm)) * (J - (sfm / sff) * I)
expected[..., 1] = factor * gradF[..., 1]
actual, energy = cc.compute_cc_forward_step_2d(gradF, factors, 0)
assert_array_almost_equal(actual, expected)
for radius in range(1, 5):
expected[:radius, ...] = 0
expected[:, :radius, ...] = 0
expected[-radius::, ...] = 0
expected[:, -radius::, ...] = 0
actual, energy = cc.compute_cc_forward_step_2d(gradF, factors, radius)
assert_array_almost_equal(actual, expected)
# test the backward step against the exact expression
factor = (-2.0 * sfm / (sff * smm)) * (I - (sfm / smm) * J)
expected[..., 0] = factor * gradG[..., 0]
factor = (-2.0 * sfm / (sff * smm)) * (I - (sfm / smm) * J)
expected[..., 1] = factor * gradG[..., 1]
actual, energy = cc.compute_cc_backward_step_2d(gradG, factors, 0)
assert_array_almost_equal(actual, expected)
for radius in range(1, 5):
expected[:radius, ...] = 0
expected[:, :radius, ...] = 0
expected[-radius::, ...] = 0
expected[:, -radius::, ...] = 0
actual, energy = cc.compute_cc_backward_step_2d(gradG, factors, radius)
assert_array_almost_equal(actual, expected)
def test_compute_cc_steps_3d():
sh = (32, 32, 32)
radius = 2
# Select arbitrary centers
c_f = (np.asarray(sh)/2) + 1.25
c_g = c_f + 2.5
# Compute the identity vector field I(x) = x in R^2
x_0 = np.asarray(range(sh[0]))
x_1 = np.asarray(range(sh[1]))
x_2 = np.asarray(range(sh[2]))
X = np.ndarray(sh + (3,), dtype=np.float64)
O = np.ones(sh)
X[..., 0] = x_0[:, None, None] * O
X[..., 1] = x_1[None, :, None] * O
X[..., 2] = x_2[None, None, :] * O
# Compute the gradient fields of F and G
np.random.seed(12465825)
gradF = np.array(X - c_f, dtype=floating)
gradG = np.array(X - c_g, dtype=floating)
sz = np.size(gradF)
Fnoise = np.random.ranf(sz).reshape(gradF.shape) * gradF.max() * 0.1
Fnoise = Fnoise.astype(floating)
gradF += Fnoise
sz = np.size(gradG)
Gnoise = np.random.ranf(sz).reshape(gradG.shape) * gradG.max() * 0.1
Gnoise = Gnoise.astype(floating)
gradG += Gnoise
sq_norm_grad_G = np.sum(gradG**2, -1)
F = np.array(0.5*np.sum(gradF**2, -1), dtype=floating)
G = np.array(0.5*sq_norm_grad_G, dtype=floating)
Fnoise = np.random.ranf(np.size(F)).reshape(F.shape) * F.max() * 0.1
Fnoise = Fnoise.astype(floating)
F += Fnoise
Gnoise = np.random.ranf(np.size(G)).reshape(G.shape) * G.max() * 0.1
Gnoise = Gnoise.astype(floating)
G += Gnoise
# precompute the cross correlation factors
factors = cc.precompute_cc_factors_3d_test(F, G, radius)
factors = np.array(factors, dtype=floating)
# test the forward step against the exact expression
I = factors[..., 0]
J = factors[..., 1]
sfm = factors[..., 2]
sff = factors[..., 3]
smm = factors[..., 4]
expected = np.ndarray(shape=sh + (3,), dtype=floating)
factor = (-2.0 * sfm / (sff * smm)) * (J - (sfm / sff) * I)
expected[..., 0] = factor * gradF[..., 0]
expected[..., 1] = factor * gradF[..., 1]
expected[..., 2] = factor * gradF[..., 2]
actual, energy = cc.compute_cc_forward_step_3d(gradF, factors, 0)
assert_array_almost_equal(actual, expected)
for radius in range(1, 5):
expected[:radius, ...] = 0
expected[:, :radius, ...] = 0
expected[:, :, :radius, :] = 0
expected[-radius::, ...] = 0
expected[:, -radius::, ...] = 0
expected[:, :, -radius::, ...] = 0
actual, energy = cc.compute_cc_forward_step_3d(gradF, factors, radius)
assert_array_almost_equal(actual, expected)
# test the backward step against the exact expression
factor = (-2.0 * sfm / (sff * smm)) * (I - (sfm / smm) * J)
expected[..., 0] = factor * gradG[..., 0]
expected[..., 1] = factor * gradG[..., 1]
expected[..., 2] = factor * gradG[..., 2]
actual, energy = cc.compute_cc_backward_step_3d(gradG, factors, 0)
assert_array_almost_equal(actual, expected)
for radius in range(1, 5):
expected[:radius, ...] = 0
expected[:, :radius, ...] = 0
expected[:, :, :radius, :] = 0
expected[-radius::, ...] = 0
expected[:, -radius::, ...] = 0
expected[:, :, -radius::, ...] = 0
actual, energy = cc.compute_cc_backward_step_3d(gradG, factors, radius)
assert_array_almost_equal(actual, expected)
if __name__ == '__main__':
test_cc_factors_2d()
test_cc_factors_3d()
test_compute_cc_steps_2d()
test_compute_cc_steps_3d()
| bsd-3-clause |
miketamis/CouchPotatoServer | libs/dateutil/parser.py | 103 | 33736 | # -*- coding:iso-8859-1 -*-
"""
Copyright (c) 2003-2007 Gustavo Niemeyer <gustavo@niemeyer.net>
This module offers extensions to the standard Python
datetime module.
"""
from __future__ import unicode_literals
__license__ = "Simplified BSD"
import datetime
import string
import time
import collections
try:
from io import StringIO
except ImportError:
from io import StringIO
from six import text_type, binary_type, integer_types
from . import relativedelta
from . import tz
__all__ = ["parse", "parserinfo"]
# Some pointers:
#
# http://www.cl.cam.ac.uk/~mgk25/iso-time.html
# http://www.iso.ch/iso/en/prods-services/popstds/datesandtime.html
# http://www.w3.org/TR/NOTE-datetime
# http://ringmaster.arc.nasa.gov/tools/time_formats.html
# http://search.cpan.org/author/MUIR/Time-modules-2003.0211/lib/Time/ParseDate.pm
# http://stein.cshl.org/jade/distrib/docs/java.text.SimpleDateFormat.html
class _timelex(object):
def __init__(self, instream):
if isinstance(instream, text_type):
instream = StringIO(instream)
self.instream = instream
self.wordchars = ('abcdfeghijklmnopqrstuvwxyz'
'ABCDEFGHIJKLMNOPQRSTUVWXYZ_'
'ßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿ'
'ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞ')
self.numchars = '0123456789'
self.whitespace = ' \t\r\n'
self.charstack = []
self.tokenstack = []
self.eof = False
def get_token(self):
if self.tokenstack:
return self.tokenstack.pop(0)
seenletters = False
token = None
state = None
wordchars = self.wordchars
numchars = self.numchars
whitespace = self.whitespace
while not self.eof:
if self.charstack:
nextchar = self.charstack.pop(0)
else:
nextchar = self.instream.read(1)
while nextchar == '\x00':
nextchar = self.instream.read(1)
if not nextchar:
self.eof = True
break
elif not state:
token = nextchar
if nextchar in wordchars:
state = 'a'
elif nextchar in numchars:
state = '0'
elif nextchar in whitespace:
token = ' '
break # emit token
else:
break # emit token
elif state == 'a':
seenletters = True
if nextchar in wordchars:
token += nextchar
elif nextchar == '.':
token += nextchar
state = 'a.'
else:
self.charstack.append(nextchar)
break # emit token
elif state == '0':
if nextchar in numchars:
token += nextchar
elif nextchar == '.':
token += nextchar
state = '0.'
else:
self.charstack.append(nextchar)
break # emit token
elif state == 'a.':
seenletters = True
if nextchar == '.' or nextchar in wordchars:
token += nextchar
elif nextchar in numchars and token[-1] == '.':
token += nextchar
state = '0.'
else:
self.charstack.append(nextchar)
break # emit token
elif state == '0.':
if nextchar == '.' or nextchar in numchars:
token += nextchar
elif nextchar in wordchars and token[-1] == '.':
token += nextchar
state = 'a.'
else:
self.charstack.append(nextchar)
break # emit token
if (state in ('a.', '0.') and
(seenletters or token.count('.') > 1 or token[-1] == '.')):
l = token.split('.')
token = l[0]
for tok in l[1:]:
self.tokenstack.append('.')
if tok:
self.tokenstack.append(tok)
return token
def __iter__(self):
return self
def __next__(self):
token = self.get_token()
if token is None:
raise StopIteration
return token
def next(self):
return self.__next__() # Python 2.x support
def split(cls, s):
return list(cls(s))
split = classmethod(split)
class _resultbase(object):
def __init__(self):
for attr in self.__slots__:
setattr(self, attr, None)
def _repr(self, classname):
l = []
for attr in self.__slots__:
value = getattr(self, attr)
if value is not None:
l.append("%s=%s" % (attr, repr(value)))
return "%s(%s)" % (classname, ", ".join(l))
def __repr__(self):
return self._repr(self.__class__.__name__)
class parserinfo(object):
# m from a.m/p.m, t from ISO T separator
JUMP = [" ", ".", ",", ";", "-", "/", "'",
"at", "on", "and", "ad", "m", "t", "of",
"st", "nd", "rd", "th"]
WEEKDAYS = [("Mon", "Monday"),
("Tue", "Tuesday"),
("Wed", "Wednesday"),
("Thu", "Thursday"),
("Fri", "Friday"),
("Sat", "Saturday"),
("Sun", "Sunday")]
MONTHS = [("Jan", "January"),
("Feb", "February"),
("Mar", "March"),
("Apr", "April"),
("May", "May"),
("Jun", "June"),
("Jul", "July"),
("Aug", "August"),
("Sep", "Sept", "September"),
("Oct", "October"),
("Nov", "November"),
("Dec", "December")]
HMS = [("h", "hour", "hours"),
("m", "minute", "minutes"),
("s", "second", "seconds")]
AMPM = [("am", "a"),
("pm", "p")]
UTCZONE = ["UTC", "GMT", "Z"]
PERTAIN = ["of"]
TZOFFSET = {}
def __init__(self, dayfirst = False, yearfirst = False):
self._jump = self._convert(self.JUMP)
self._weekdays = self._convert(self.WEEKDAYS)
self._months = self._convert(self.MONTHS)
self._hms = self._convert(self.HMS)
self._ampm = self._convert(self.AMPM)
self._utczone = self._convert(self.UTCZONE)
self._pertain = self._convert(self.PERTAIN)
self.dayfirst = dayfirst
self.yearfirst = yearfirst
self._year = time.localtime().tm_year
self._century = self._year // 100 * 100
def _convert(self, lst):
dct = {}
for i in range(len(lst)):
v = lst[i]
if isinstance(v, tuple):
for v in v:
dct[v.lower()] = i
else:
dct[v.lower()] = i
return dct
def jump(self, name):
return name.lower() in self._jump
def weekday(self, name):
if len(name) >= 3:
try:
return self._weekdays[name.lower()]
except KeyError:
pass
return None
def month(self, name):
if len(name) >= 3:
try:
return self._months[name.lower()] + 1
except KeyError:
pass
return None
def hms(self, name):
try:
return self._hms[name.lower()]
except KeyError:
return None
def ampm(self, name):
try:
return self._ampm[name.lower()]
except KeyError:
return None
def pertain(self, name):
return name.lower() in self._pertain
def utczone(self, name):
return name.lower() in self._utczone
def tzoffset(self, name):
if name in self._utczone:
return 0
return self.TZOFFSET.get(name)
def convertyear(self, year):
if year < 100:
year += self._century
if abs(year - self._year) >= 50:
if year < self._year:
year += 100
else:
year -= 100
return year
def validate(self, res):
# move to info
if res.year is not None:
res.year = self.convertyear(res.year)
if res.tzoffset == 0 and not res.tzname or res.tzname == 'Z':
res.tzname = "UTC"
res.tzoffset = 0
elif res.tzoffset != 0 and res.tzname and self.utczone(res.tzname):
res.tzoffset = 0
return True
class parser(object):
def __init__(self, info = None):
self.info = info or parserinfo()
def parse(self, timestr, default = None,
ignoretz = False, tzinfos = None,
**kwargs):
if not default:
default = datetime.datetime.now().replace(hour = 0, minute = 0,
second = 0, microsecond = 0)
res = self._parse(timestr, **kwargs)
if res is None:
raise ValueError("unknown string format")
repl = {}
for attr in ["year", "month", "day", "hour",
"minute", "second", "microsecond"]:
value = getattr(res, attr)
if value is not None:
repl[attr] = value
ret = default.replace(**repl)
if res.weekday is not None and not res.day:
ret = ret + relativedelta.relativedelta(weekday = res.weekday)
if not ignoretz:
if isinstance(tzinfos, collections.Callable) or tzinfos and res.tzname in tzinfos:
if isinstance(tzinfos, collections.Callable):
tzdata = tzinfos(res.tzname, res.tzoffset)
else:
tzdata = tzinfos.get(res.tzname)
if isinstance(tzdata, datetime.tzinfo):
tzinfo = tzdata
elif isinstance(tzdata, text_type):
tzinfo = tz.tzstr(tzdata)
elif isinstance(tzdata, integer_types):
tzinfo = tz.tzoffset(res.tzname, tzdata)
else:
raise ValueError("offset must be tzinfo subclass, " \
"tz string, or int offset")
ret = ret.replace(tzinfo = tzinfo)
elif res.tzname and res.tzname in time.tzname:
ret = ret.replace(tzinfo = tz.tzlocal())
elif res.tzoffset == 0:
ret = ret.replace(tzinfo = tz.tzutc())
elif res.tzoffset:
ret = ret.replace(tzinfo = tz.tzoffset(res.tzname, res.tzoffset))
return ret
class _result(_resultbase):
__slots__ = ["year", "month", "day", "weekday",
"hour", "minute", "second", "microsecond",
"tzname", "tzoffset"]
def _parse(self, timestr, dayfirst = None, yearfirst = None, fuzzy = False):
info = self.info
if dayfirst is None:
dayfirst = info.dayfirst
if yearfirst is None:
yearfirst = info.yearfirst
res = self._result()
l = _timelex.split(timestr)
try:
# year/month/day list
ymd = []
# Index of the month string in ymd
mstridx = -1
len_l = len(l)
i = 0
while i < len_l:
# Check if it's a number
try:
value_repr = l[i]
value = float(value_repr)
except ValueError:
value = None
if value is not None:
# Token is a number
len_li = len(l[i])
i += 1
if (len(ymd) == 3 and len_li in (2, 4)
and (i >= len_l or (l[i] != ':' and
info.hms(l[i]) is None))):
# 19990101T23[59]
s = l[i - 1]
res.hour = int(s[:2])
if len_li == 4:
res.minute = int(s[2:])
elif len_li == 6 or (len_li > 6 and l[i - 1].find('.') == 6):
# YYMMDD or HHMMSS[.ss]
s = l[i - 1]
if not ymd and l[i - 1].find('.') == -1:
ymd.append(info.convertyear(int(s[:2])))
ymd.append(int(s[2:4]))
ymd.append(int(s[4:]))
else:
# 19990101T235959[.59]
res.hour = int(s[:2])
res.minute = int(s[2:4])
res.second, res.microsecond = _parsems(s[4:])
elif len_li == 8:
# YYYYMMDD
s = l[i - 1]
ymd.append(int(s[:4]))
ymd.append(int(s[4:6]))
ymd.append(int(s[6:]))
elif len_li in (12, 14):
# YYYYMMDDhhmm[ss]
s = l[i - 1]
ymd.append(int(s[:4]))
ymd.append(int(s[4:6]))
ymd.append(int(s[6:8]))
res.hour = int(s[8:10])
res.minute = int(s[10:12])
if len_li == 14:
res.second = int(s[12:])
elif ((i < len_l and info.hms(l[i]) is not None) or
(i + 1 < len_l and l[i] == ' ' and
info.hms(l[i + 1]) is not None)):
# HH[ ]h or MM[ ]m or SS[.ss][ ]s
if l[i] == ' ':
i += 1
idx = info.hms(l[i])
while True:
if idx == 0:
res.hour = int(value)
if value % 1:
res.minute = int(60 * (value % 1))
elif idx == 1:
res.minute = int(value)
if value % 1:
res.second = int(60 * (value % 1))
elif idx == 2:
res.second, res.microsecond = \
_parsems(value_repr)
i += 1
if i >= len_l or idx == 2:
break
# 12h00
try:
value_repr = l[i]
value = float(value_repr)
except ValueError:
break
else:
i += 1
idx += 1
if i < len_l:
newidx = info.hms(l[i])
if newidx is not None:
idx = newidx
elif i == len_l and l[i - 2] == ' ' and info.hms(l[i - 3]) is not None:
# X h MM or X m SS
idx = info.hms(l[i - 3]) + 1
if idx == 1:
res.minute = int(value)
if value % 1:
res.second = int(60 * (value % 1))
elif idx == 2:
res.second, res.microsecond = \
_parsems(value_repr)
i += 1
elif i + 1 < len_l and l[i] == ':':
# HH:MM[:SS[.ss]]
res.hour = int(value)
i += 1
value = float(l[i])
res.minute = int(value)
if value % 1:
res.second = int(60 * (value % 1))
i += 1
if i < len_l and l[i] == ':':
res.second, res.microsecond = _parsems(l[i + 1])
i += 2
elif i < len_l and l[i] in ('-', '/', '.'):
sep = l[i]
ymd.append(int(value))
i += 1
if i < len_l and not info.jump(l[i]):
try:
# 01-01[-01]
ymd.append(int(l[i]))
except ValueError:
# 01-Jan[-01]
value = info.month(l[i])
if value is not None:
ymd.append(value)
assert mstridx == -1
mstridx = len(ymd) - 1
else:
return None
i += 1
if i < len_l and l[i] == sep:
# We have three members
i += 1
value = info.month(l[i])
if value is not None:
ymd.append(value)
mstridx = len(ymd) - 1
assert mstridx == -1
else:
ymd.append(int(l[i]))
i += 1
elif i >= len_l or info.jump(l[i]):
if i + 1 < len_l and info.ampm(l[i + 1]) is not None:
# 12 am
res.hour = int(value)
if res.hour < 12 and info.ampm(l[i + 1]) == 1:
res.hour += 12
elif res.hour == 12 and info.ampm(l[i + 1]) == 0:
res.hour = 0
i += 1
else:
# Year, month or day
ymd.append(int(value))
i += 1
elif info.ampm(l[i]) is not None:
# 12am
res.hour = int(value)
if res.hour < 12 and info.ampm(l[i]) == 1:
res.hour += 12
elif res.hour == 12 and info.ampm(l[i]) == 0:
res.hour = 0
i += 1
elif not fuzzy:
return None
else:
i += 1
continue
# Check weekday
value = info.weekday(l[i])
if value is not None:
res.weekday = value
i += 1
continue
# Check month name
value = info.month(l[i])
if value is not None:
ymd.append(value)
assert mstridx == -1
mstridx = len(ymd) - 1
i += 1
if i < len_l:
if l[i] in ('-', '/'):
# Jan-01[-99]
sep = l[i]
i += 1
ymd.append(int(l[i]))
i += 1
if i < len_l and l[i] == sep:
# Jan-01-99
i += 1
ymd.append(int(l[i]))
i += 1
elif (i + 3 < len_l and l[i] == l[i + 2] == ' '
and info.pertain(l[i + 1])):
# Jan of 01
# In this case, 01 is clearly year
try:
value = int(l[i + 3])
except ValueError:
# Wrong guess
pass
else:
# Convert it here to become unambiguous
ymd.append(info.convertyear(value))
i += 4
continue
# Check am/pm
value = info.ampm(l[i])
if value is not None:
if value == 1 and res.hour < 12:
res.hour += 12
elif value == 0 and res.hour == 12:
res.hour = 0
i += 1
continue
# Check for a timezone name
if (res.hour is not None and len(l[i]) <= 5 and
res.tzname is None and res.tzoffset is None and
not [x for x in l[i] if x not in string.ascii_uppercase]):
res.tzname = l[i]
res.tzoffset = info.tzoffset(res.tzname)
i += 1
# Check for something like GMT+3, or BRST+3. Notice
# that it doesn't mean "I am 3 hours after GMT", but
# "my time +3 is GMT". If found, we reverse the
# logic so that timezone parsing code will get it
# right.
if i < len_l and l[i] in ('+', '-'):
l[i] = ('+', '-')[l[i] == '+']
res.tzoffset = None
if info.utczone(res.tzname):
# With something like GMT+3, the timezone
# is *not* GMT.
res.tzname = None
continue
# Check for a numbered timezone
if res.hour is not None and l[i] in ('+', '-'):
signal = (-1, 1)[l[i] == '+']
i += 1
len_li = len(l[i])
if len_li == 4:
# -0300
res.tzoffset = int(l[i][:2]) * 3600 + int(l[i][2:]) * 60
elif i + 1 < len_l and l[i + 1] == ':':
# -03:00
res.tzoffset = int(l[i]) * 3600 + int(l[i + 2]) * 60
i += 2
elif len_li <= 2:
# -[0]3
res.tzoffset = int(l[i][:2]) * 3600
else:
return None
i += 1
res.tzoffset *= signal
# Look for a timezone name between parenthesis
if (i + 3 < len_l and
info.jump(l[i]) and l[i + 1] == '(' and l[i + 3] == ')' and
3 <= len(l[i + 2]) <= 5 and
not [x for x in l[i + 2]
if x not in string.ascii_uppercase]):
# -0300 (BRST)
res.tzname = l[i + 2]
i += 4
continue
# Check jumps
if not (info.jump(l[i]) or fuzzy):
return None
i += 1
# Process year/month/day
len_ymd = len(ymd)
if len_ymd > 3:
# More than three members!?
return None
elif len_ymd == 1 or (mstridx != -1 and len_ymd == 2):
# One member, or two members with a month string
if mstridx != -1:
res.month = ymd[mstridx]
del ymd[mstridx]
if len_ymd > 1 or mstridx == -1:
if ymd[0] > 31:
res.year = ymd[0]
else:
res.day = ymd[0]
elif len_ymd == 2:
# Two members with numbers
if ymd[0] > 31:
# 99-01
res.year, res.month = ymd
elif ymd[1] > 31:
# 01-99
res.month, res.year = ymd
elif dayfirst and ymd[1] <= 12:
# 13-01
res.day, res.month = ymd
else:
# 01-13
res.month, res.day = ymd
if len_ymd == 3:
# Three members
if mstridx == 0:
res.month, res.day, res.year = ymd
elif mstridx == 1:
if ymd[0] > 31 or (yearfirst and ymd[2] <= 31):
# 99-Jan-01
res.year, res.month, res.day = ymd
else:
# 01-Jan-01
# Give precendence to day-first, since
# two-digit years is usually hand-written.
res.day, res.month, res.year = ymd
elif mstridx == 2:
# WTF!?
if ymd[1] > 31:
# 01-99-Jan
res.day, res.year, res.month = ymd
else:
# 99-01-Jan
res.year, res.day, res.month = ymd
else:
if ymd[0] > 31 or \
(yearfirst and ymd[1] <= 12 and ymd[2] <= 31):
# 99-01-01
res.year, res.month, res.day = ymd
elif ymd[0] > 12 or (dayfirst and ymd[1] <= 12):
# 13-01-01
res.day, res.month, res.year = ymd
else:
# 01-13-01
res.month, res.day, res.year = ymd
except (IndexError, ValueError, AssertionError):
return None
if not info.validate(res):
return None
return res
DEFAULTPARSER = parser()
def parse(timestr, parserinfo = None, **kwargs):
# Python 2.x support: datetimes return their string presentation as
# bytes in 2.x and unicode in 3.x, so it's reasonable to expect that
# the parser will get both kinds. Internally we use unicode only.
if isinstance(timestr, binary_type):
timestr = timestr.decode()
if parserinfo:
return parser(parserinfo).parse(timestr, **kwargs)
else:
return DEFAULTPARSER.parse(timestr, **kwargs)
class _tzparser(object):
class _result(_resultbase):
__slots__ = ["stdabbr", "stdoffset", "dstabbr", "dstoffset",
"start", "end"]
class _attr(_resultbase):
__slots__ = ["month", "week", "weekday",
"yday", "jyday", "day", "time"]
def __repr__(self):
return self._repr("")
def __init__(self):
_resultbase.__init__(self)
self.start = self._attr()
self.end = self._attr()
def parse(self, tzstr):
res = self._result()
l = _timelex.split(tzstr)
try:
len_l = len(l)
i = 0
while i < len_l:
# BRST+3[BRDT[+2]]
j = i
while j < len_l and not [x for x in l[j]
if x in "0123456789:,-+"]:
j += 1
if j != i:
if not res.stdabbr:
offattr = "stdoffset"
res.stdabbr = "".join(l[i:j])
else:
offattr = "dstoffset"
res.dstabbr = "".join(l[i:j])
i = j
if (i < len_l and
(l[i] in ('+', '-') or l[i][0] in "0123456789")):
if l[i] in ('+', '-'):
# Yes, that's right. See the TZ variable
# documentation.
signal = (1, -1)[l[i] == '+']
i += 1
else:
signal = -1
len_li = len(l[i])
if len_li == 4:
# -0300
setattr(res, offattr,
(int(l[i][:2]) * 3600 + int(l[i][2:]) * 60) * signal)
elif i + 1 < len_l and l[i + 1] == ':':
# -03:00
setattr(res, offattr,
(int(l[i]) * 3600 + int(l[i + 2]) * 60) * signal)
i += 2
elif len_li <= 2:
# -[0]3
setattr(res, offattr,
int(l[i][:2]) * 3600 * signal)
else:
return None
i += 1
if res.dstabbr:
break
else:
break
if i < len_l:
for j in range(i, len_l):
if l[j] == ';': l[j] = ','
assert l[i] == ','
i += 1
if i >= len_l:
pass
elif (8 <= l.count(',') <= 9 and
not [y for x in l[i:] if x != ','
for y in x if y not in "0123456789"]):
# GMT0BST,3,0,30,3600,10,0,26,7200[,3600]
for x in (res.start, res.end):
x.month = int(l[i])
i += 2
if l[i] == '-':
value = int(l[i + 1]) * -1
i += 1
else:
value = int(l[i])
i += 2
if value:
x.week = value
x.weekday = (int(l[i]) - 1) % 7
else:
x.day = int(l[i])
i += 2
x.time = int(l[i])
i += 2
if i < len_l:
if l[i] in ('-', '+'):
signal = (-1, 1)[l[i] == "+"]
i += 1
else:
signal = 1
res.dstoffset = (res.stdoffset + int(l[i])) * signal
elif (l.count(',') == 2 and l[i:].count('/') <= 2 and
not [y for x in l[i:] if x not in (',', '/', 'J', 'M',
'.', '-', ':')
for y in x if y not in "0123456789"]):
for x in (res.start, res.end):
if l[i] == 'J':
# non-leap year day (1 based)
i += 1
x.jyday = int(l[i])
elif l[i] == 'M':
# month[-.]week[-.]weekday
i += 1
x.month = int(l[i])
i += 1
assert l[i] in ('-', '.')
i += 1
x.week = int(l[i])
if x.week == 5:
x.week = -1
i += 1
assert l[i] in ('-', '.')
i += 1
x.weekday = (int(l[i]) - 1) % 7
else:
# year day (zero based)
x.yday = int(l[i]) + 1
i += 1
if i < len_l and l[i] == '/':
i += 1
# start time
len_li = len(l[i])
if len_li == 4:
# -0300
x.time = (int(l[i][:2]) * 3600 + int(l[i][2:]) * 60)
elif i + 1 < len_l and l[i + 1] == ':':
# -03:00
x.time = int(l[i]) * 3600 + int(l[i + 2]) * 60
i += 2
if i + 1 < len_l and l[i + 1] == ':':
i += 2
x.time += int(l[i])
elif len_li <= 2:
# -[0]3
x.time = (int(l[i][:2]) * 3600)
else:
return None
i += 1
assert i == len_l or l[i] == ','
i += 1
assert i >= len_l
except (IndexError, ValueError, AssertionError):
return None
return res
DEFAULTTZPARSER = _tzparser()
def _parsetz(tzstr):
return DEFAULTTZPARSER.parse(tzstr)
def _parsems(value):
"""Parse a I[.F] seconds value into (seconds, microseconds)."""
if "." not in value:
return int(value), 0
else:
i, f = value.split(".")
return int(i), int(f.ljust(6, "0")[:6])
# vim:ts=4:sw=4:et
| gpl-3.0 |
apollo13/ansible | test/integration/targets/incidental_script_inventory_vmware_inventory/vmware_inventory.py | 19 | 30336 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C): 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Requirements
# - pyvmomi >= 6.0.0.2016.4
# TODO:
# * more jq examples
# * optional folder hierarchy
"""
$ jq '._meta.hostvars[].config' data.json | head
{
"alternateguestname": "",
"instanceuuid": "5035a5cd-b8e8-d717-e133-2d383eb0d675",
"memoryhotaddenabled": false,
"guestfullname": "Red Hat Enterprise Linux 7 (64-bit)",
"changeversion": "2016-05-16T18:43:14.977925Z",
"uuid": "4235fc97-5ddb-7a17-193b-9a3ac97dc7b4",
"cpuhotremoveenabled": false,
"vpmcenabled": false,
"firmware": "bios",
"""
from __future__ import print_function
import atexit
import datetime
import itertools
import json
import os
import re
import ssl
import sys
import uuid
from time import time
from jinja2 import Environment
from ansible.module_utils.six import integer_types, PY3
from ansible.module_utils.six.moves import configparser
try:
import argparse
except ImportError:
sys.exit('Error: This inventory script required "argparse" python module. Please install it or upgrade to python-2.7')
try:
from pyVmomi import vim, vmodl
from pyVim.connect import SmartConnect, Disconnect
except ImportError:
sys.exit("ERROR: This inventory script required 'pyVmomi' Python module, it was not able to load it")
def regex_match(s, pattern):
'''Custom filter for regex matching'''
reg = re.compile(pattern)
if reg.match(s):
return True
else:
return False
def select_chain_match(inlist, key, pattern):
'''Get a key from a list of dicts, squash values to a single list, then filter'''
outlist = [x[key] for x in inlist]
outlist = list(itertools.chain(*outlist))
outlist = [x for x in outlist if regex_match(x, pattern)]
return outlist
class VMwareMissingHostException(Exception):
pass
class VMWareInventory(object):
__name__ = 'VMWareInventory'
guest_props = False
instances = []
debug = False
load_dumpfile = None
write_dumpfile = None
maxlevel = 1
lowerkeys = True
config = None
cache_max_age = None
cache_path_cache = None
cache_path_index = None
cache_dir = None
server = None
port = None
username = None
password = None
validate_certs = True
host_filters = []
skip_keys = []
groupby_patterns = []
groupby_custom_field_excludes = []
safe_types = [bool, str, float, None] + list(integer_types)
iter_types = [dict, list]
bad_types = ['Array', 'disabledMethod', 'declaredAlarmState']
vimTableMaxDepth = {
"vim.HostSystem": 2,
"vim.VirtualMachine": 2,
}
custom_fields = {}
# use jinja environments to allow for custom filters
env = Environment()
env.filters['regex_match'] = regex_match
env.filters['select_chain_match'] = select_chain_match
# translation table for attributes to fetch for known vim types
vimTable = {
vim.Datastore: ['_moId', 'name'],
vim.ResourcePool: ['_moId', 'name'],
vim.HostSystem: ['_moId', 'name'],
}
@staticmethod
def _empty_inventory():
return {"_meta": {"hostvars": {}}}
def __init__(self, load=True):
self.inventory = VMWareInventory._empty_inventory()
if load:
# Read settings and parse CLI arguments
self.parse_cli_args()
self.read_settings()
# Check the cache
cache_valid = self.is_cache_valid()
# Handle Cache
if self.args.refresh_cache or not cache_valid:
self.do_api_calls_update_cache()
else:
self.debugl('loading inventory from cache')
self.inventory = self.get_inventory_from_cache()
def debugl(self, text):
if self.args.debug:
try:
text = str(text)
except UnicodeEncodeError:
text = text.encode('utf-8')
print('%s %s' % (datetime.datetime.now(), text))
def show(self):
# Data to print
self.debugl('dumping results')
data_to_print = None
if self.args.host:
data_to_print = self.get_host_info(self.args.host)
elif self.args.list:
# Display list of instances for inventory
data_to_print = self.inventory
return json.dumps(data_to_print, indent=2)
def is_cache_valid(self):
''' Determines if the cache files have expired, or if it is still valid '''
valid = False
if os.path.isfile(self.cache_path_cache):
mod_time = os.path.getmtime(self.cache_path_cache)
current_time = time()
if (mod_time + self.cache_max_age) > current_time:
valid = True
return valid
def do_api_calls_update_cache(self):
''' Get instances and cache the data '''
self.inventory = self.instances_to_inventory(self.get_instances())
self.write_to_cache(self.inventory)
def write_to_cache(self, data):
''' Dump inventory to json file '''
with open(self.cache_path_cache, 'w') as f:
f.write(json.dumps(data, indent=2))
def get_inventory_from_cache(self):
''' Read in jsonified inventory '''
jdata = None
with open(self.cache_path_cache, 'r') as f:
jdata = f.read()
return json.loads(jdata)
def read_settings(self):
''' Reads the settings from the vmware_inventory.ini file '''
scriptbasename = __file__
scriptbasename = os.path.basename(scriptbasename)
scriptbasename = scriptbasename.replace('.py', '')
defaults = {'vmware': {
'server': '',
'port': 443,
'username': '',
'password': '',
'validate_certs': True,
'ini_path': os.path.join(os.path.dirname(__file__), '%s.ini' % scriptbasename),
'cache_name': 'ansible-vmware',
'cache_path': '~/.ansible/tmp',
'cache_max_age': 3600,
'max_object_level': 1,
'skip_keys': 'declaredalarmstate,'
'disabledmethod,'
'dynamicproperty,'
'dynamictype,'
'environmentbrowser,'
'managedby,'
'parent,'
'childtype,'
'resourceconfig',
'alias_pattern': '{{ config.name + "_" + config.uuid }}',
'host_pattern': '{{ guest.ipaddress }}',
'host_filters': '{{ runtime.powerstate == "poweredOn" }}',
'groupby_patterns': '{{ guest.guestid }},{{ "templates" if config.template else "guests"}}',
'lower_var_keys': True,
'custom_field_group_prefix': 'vmware_tag_',
'groupby_custom_field_excludes': '',
'groupby_custom_field': False}
}
if PY3:
config = configparser.ConfigParser()
else:
config = configparser.SafeConfigParser()
# where is the config?
vmware_ini_path = os.environ.get('VMWARE_INI_PATH', defaults['vmware']['ini_path'])
vmware_ini_path = os.path.expanduser(os.path.expandvars(vmware_ini_path))
config.read(vmware_ini_path)
if 'vmware' not in config.sections():
config.add_section('vmware')
# apply defaults
for k, v in defaults['vmware'].items():
if not config.has_option('vmware', k):
config.set('vmware', k, str(v))
# where is the cache?
self.cache_dir = os.path.expanduser(config.get('vmware', 'cache_path'))
if self.cache_dir and not os.path.exists(self.cache_dir):
os.makedirs(self.cache_dir)
# set the cache filename and max age
cache_name = config.get('vmware', 'cache_name')
self.cache_path_cache = self.cache_dir + "/%s.cache" % cache_name
self.debugl('cache path is %s' % self.cache_path_cache)
self.cache_max_age = int(config.getint('vmware', 'cache_max_age'))
# mark the connection info
self.server = os.environ.get('VMWARE_SERVER', config.get('vmware', 'server'))
self.debugl('server is %s' % self.server)
self.port = int(os.environ.get('VMWARE_PORT', config.get('vmware', 'port')))
self.username = os.environ.get('VMWARE_USERNAME', config.get('vmware', 'username'))
self.debugl('username is %s' % self.username)
self.password = os.environ.get('VMWARE_PASSWORD', config.get('vmware', 'password', raw=True))
self.validate_certs = os.environ.get('VMWARE_VALIDATE_CERTS', config.get('vmware', 'validate_certs'))
if self.validate_certs in ['no', 'false', 'False', False]:
self.validate_certs = False
self.debugl('cert validation is %s' % self.validate_certs)
# behavior control
self.maxlevel = int(config.get('vmware', 'max_object_level'))
self.debugl('max object level is %s' % self.maxlevel)
self.lowerkeys = config.get('vmware', 'lower_var_keys')
if type(self.lowerkeys) != bool:
if str(self.lowerkeys).lower() in ['yes', 'true', '1']:
self.lowerkeys = True
else:
self.lowerkeys = False
self.debugl('lower keys is %s' % self.lowerkeys)
self.skip_keys = list(config.get('vmware', 'skip_keys').split(','))
self.debugl('skip keys is %s' % self.skip_keys)
temp_host_filters = list(config.get('vmware', 'host_filters').split('}},'))
for host_filter in temp_host_filters:
host_filter = host_filter.rstrip()
if host_filter != "":
if not host_filter.endswith("}}"):
host_filter += "}}"
self.host_filters.append(host_filter)
self.debugl('host filters are %s' % self.host_filters)
temp_groupby_patterns = list(config.get('vmware', 'groupby_patterns').split('}},'))
for groupby_pattern in temp_groupby_patterns:
groupby_pattern = groupby_pattern.rstrip()
if groupby_pattern != "":
if not groupby_pattern.endswith("}}"):
groupby_pattern += "}}"
self.groupby_patterns.append(groupby_pattern)
self.debugl('groupby patterns are %s' % self.groupby_patterns)
temp_groupby_custom_field_excludes = config.get('vmware', 'groupby_custom_field_excludes')
self.groupby_custom_field_excludes = [x.strip('"') for x in [y.strip("'") for y in temp_groupby_custom_field_excludes.split(",")]]
self.debugl('groupby exclude strings are %s' % self.groupby_custom_field_excludes)
# Special feature to disable the brute force serialization of the
# virtual machine objects. The key name for these properties does not
# matter because the values are just items for a larger list.
if config.has_section('properties'):
self.guest_props = []
for prop in config.items('properties'):
self.guest_props.append(prop[1])
# save the config
self.config = config
def parse_cli_args(self):
''' Command line argument processing '''
parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on PyVmomi')
parser.add_argument('--debug', action='store_true', default=False,
help='show debug info')
parser.add_argument('--list', action='store_true', default=True,
help='List instances (default: True)')
parser.add_argument('--host', action='store',
help='Get all the variables about a specific instance')
parser.add_argument('--refresh-cache', action='store_true', default=False,
help='Force refresh of cache by making API requests to VSphere (default: False - use cache files)')
parser.add_argument('--max-instances', default=None, type=int,
help='maximum number of instances to retrieve')
self.args = parser.parse_args()
def get_instances(self):
''' Get a list of vm instances with pyvmomi '''
kwargs = {'host': self.server,
'user': self.username,
'pwd': self.password,
'port': int(self.port)}
if self.validate_certs and hasattr(ssl, 'SSLContext'):
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.verify_mode = ssl.CERT_REQUIRED
context.check_hostname = True
kwargs['sslContext'] = context
elif self.validate_certs and not hasattr(ssl, 'SSLContext'):
sys.exit('pyVim does not support changing verification mode with python < 2.7.9. Either update '
'python or use validate_certs=false.')
elif not self.validate_certs and hasattr(ssl, 'SSLContext'):
context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
context.verify_mode = ssl.CERT_NONE
context.check_hostname = False
kwargs['sslContext'] = context
elif not self.validate_certs and not hasattr(ssl, 'SSLContext'):
# Python 2.7.9 < or RHEL/CentOS 7.4 <
pass
return self._get_instances(kwargs)
def _get_instances(self, inkwargs):
''' Make API calls '''
instances = []
si = None
try:
si = SmartConnect(**inkwargs)
except ssl.SSLError as connection_error:
if '[SSL: CERTIFICATE_VERIFY_FAILED]' in str(connection_error) and self.validate_certs:
sys.exit("Unable to connect to ESXi server due to %s, "
"please specify validate_certs=False and try again" % connection_error)
except Exception as exc:
self.debugl("Unable to connect to ESXi server due to %s" % exc)
sys.exit("Unable to connect to ESXi server due to %s" % exc)
self.debugl('retrieving all instances')
if not si:
sys.exit("Could not connect to the specified host using specified "
"username and password")
atexit.register(Disconnect, si)
content = si.RetrieveContent()
# Create a search container for virtualmachines
self.debugl('creating containerview for virtualmachines')
container = content.rootFolder
viewType = [vim.VirtualMachine]
recursive = True
containerView = content.viewManager.CreateContainerView(container, viewType, recursive)
children = containerView.view
for child in children:
# If requested, limit the total number of instances
if self.args.max_instances:
if len(instances) >= self.args.max_instances:
break
instances.append(child)
self.debugl("%s total instances in container view" % len(instances))
if self.args.host:
instances = [x for x in instances if x.name == self.args.host]
instance_tuples = []
for instance in instances:
if self.guest_props:
ifacts = self.facts_from_proplist(instance)
else:
ifacts = self.facts_from_vobj(instance)
instance_tuples.append((instance, ifacts))
self.debugl('facts collected for all instances')
try:
cfm = content.customFieldsManager
if cfm is not None and cfm.field:
for f in cfm.field:
if not f.managedObjectType or f.managedObjectType == vim.VirtualMachine:
self.custom_fields[f.key] = f.name
self.debugl('%d custom fields collected' % len(self.custom_fields))
except vmodl.RuntimeFault as exc:
self.debugl("Unable to gather custom fields due to %s" % exc.msg)
except IndexError as exc:
self.debugl("Unable to gather custom fields due to %s" % exc)
return instance_tuples
def instances_to_inventory(self, instances):
''' Convert a list of vm objects into a json compliant inventory '''
self.debugl('re-indexing instances based on ini settings')
inventory = VMWareInventory._empty_inventory()
inventory['all'] = {}
inventory['all']['hosts'] = []
for idx, instance in enumerate(instances):
# make a unique id for this object to avoid vmware's
# numerous uuid's which aren't all unique.
thisid = str(uuid.uuid4())
idata = instance[1]
# Put it in the inventory
inventory['all']['hosts'].append(thisid)
inventory['_meta']['hostvars'][thisid] = idata.copy()
inventory['_meta']['hostvars'][thisid]['ansible_uuid'] = thisid
# Make a map of the uuid to the alias the user wants
name_mapping = self.create_template_mapping(
inventory,
self.config.get('vmware', 'alias_pattern')
)
# Make a map of the uuid to the ssh hostname the user wants
host_mapping = self.create_template_mapping(
inventory,
self.config.get('vmware', 'host_pattern')
)
# Reset the inventory keys
for k, v in name_mapping.items():
if not host_mapping or k not in host_mapping:
continue
# set ansible_host (2.x)
try:
inventory['_meta']['hostvars'][k]['ansible_host'] = host_mapping[k]
# 1.9.x backwards compliance
inventory['_meta']['hostvars'][k]['ansible_ssh_host'] = host_mapping[k]
except Exception:
continue
if k == v:
continue
# add new key
inventory['all']['hosts'].append(v)
inventory['_meta']['hostvars'][v] = inventory['_meta']['hostvars'][k]
# cleanup old key
inventory['all']['hosts'].remove(k)
inventory['_meta']['hostvars'].pop(k, None)
self.debugl('pre-filtered hosts:')
for i in inventory['all']['hosts']:
self.debugl(' * %s' % i)
# Apply host filters
for hf in self.host_filters:
if not hf:
continue
self.debugl('filter: %s' % hf)
filter_map = self.create_template_mapping(inventory, hf, dtype='boolean')
for k, v in filter_map.items():
if not v:
# delete this host
inventory['all']['hosts'].remove(k)
inventory['_meta']['hostvars'].pop(k, None)
self.debugl('post-filter hosts:')
for i in inventory['all']['hosts']:
self.debugl(' * %s' % i)
# Create groups
for gbp in self.groupby_patterns:
groupby_map = self.create_template_mapping(inventory, gbp)
for k, v in groupby_map.items():
if v not in inventory:
inventory[v] = {}
inventory[v]['hosts'] = []
if k not in inventory[v]['hosts']:
inventory[v]['hosts'].append(k)
if self.config.get('vmware', 'groupby_custom_field'):
for k, v in inventory['_meta']['hostvars'].items():
if 'customvalue' in v:
for tv in v['customvalue']:
newkey = None
field_name = self.custom_fields[tv['key']] if tv['key'] in self.custom_fields else tv['key']
if field_name in self.groupby_custom_field_excludes:
continue
values = []
keylist = map(lambda x: x.strip(), tv['value'].split(','))
for kl in keylist:
try:
newkey = "%s%s_%s" % (self.config.get('vmware', 'custom_field_group_prefix'), str(field_name), kl)
newkey = newkey.strip()
except Exception as e:
self.debugl(e)
values.append(newkey)
for tag in values:
if not tag:
continue
if tag not in inventory:
inventory[tag] = {}
inventory[tag]['hosts'] = []
if k not in inventory[tag]['hosts']:
inventory[tag]['hosts'].append(k)
return inventory
def create_template_mapping(self, inventory, pattern, dtype='string'):
''' Return a hash of uuid to templated string from pattern '''
mapping = {}
for k, v in inventory['_meta']['hostvars'].items():
t = self.env.from_string(pattern)
newkey = None
try:
newkey = t.render(v)
newkey = newkey.strip()
except Exception as e:
self.debugl(e)
if not newkey:
continue
elif dtype == 'integer':
newkey = int(newkey)
elif dtype == 'boolean':
if newkey.lower() == 'false':
newkey = False
elif newkey.lower() == 'true':
newkey = True
elif dtype == 'string':
pass
mapping[k] = newkey
return mapping
def facts_from_proplist(self, vm):
'''Get specific properties instead of serializing everything'''
rdata = {}
for prop in self.guest_props:
self.debugl('getting %s property for %s' % (prop, vm.name))
key = prop
if self.lowerkeys:
key = key.lower()
if '.' not in prop:
# props without periods are direct attributes of the parent
vm_property = getattr(vm, prop)
if isinstance(vm_property, vim.CustomFieldsManager.Value.Array):
temp_vm_property = []
for vm_prop in vm_property:
temp_vm_property.append({'key': vm_prop.key,
'value': vm_prop.value})
rdata[key] = temp_vm_property
else:
rdata[key] = vm_property
else:
# props with periods are subkeys of parent attributes
parts = prop.split('.')
total = len(parts) - 1
# pointer to the current object
val = None
# pointer to the current result key
lastref = rdata
for idx, x in enumerate(parts):
if isinstance(val, dict):
if x in val:
val = val.get(x)
elif x.lower() in val:
val = val.get(x.lower())
else:
# if the val wasn't set yet, get it from the parent
if not val:
try:
val = getattr(vm, x)
except AttributeError as e:
self.debugl(e)
else:
# in a subkey, get the subprop from the previous attrib
try:
val = getattr(val, x)
except AttributeError as e:
self.debugl(e)
# make sure it serializes
val = self._process_object_types(val)
# lowercase keys if requested
if self.lowerkeys:
x = x.lower()
# change the pointer or set the final value
if idx != total:
if x not in lastref:
lastref[x] = {}
lastref = lastref[x]
else:
lastref[x] = val
if self.args.debug:
self.debugl("For %s" % vm.name)
for key in list(rdata.keys()):
if isinstance(rdata[key], dict):
for ikey in list(rdata[key].keys()):
self.debugl("Property '%s.%s' has value '%s'" % (key, ikey, rdata[key][ikey]))
else:
self.debugl("Property '%s' has value '%s'" % (key, rdata[key]))
return rdata
def facts_from_vobj(self, vobj, level=0):
''' Traverse a VM object and return a json compliant data structure '''
# pyvmomi objects are not yet serializable, but may be one day ...
# https://github.com/vmware/pyvmomi/issues/21
# WARNING:
# Accessing an object attribute will trigger a SOAP call to the remote.
# Increasing the attributes collected or the depth of recursion greatly
# increases runtime duration and potentially memory+network utilization.
if level == 0:
try:
self.debugl("get facts for %s" % vobj.name)
except Exception as e:
self.debugl(e)
rdata = {}
methods = dir(vobj)
methods = [str(x) for x in methods if not x.startswith('_')]
methods = [x for x in methods if x not in self.bad_types]
methods = [x for x in methods if not x.lower() in self.skip_keys]
methods = sorted(methods)
for method in methods:
# Attempt to get the method, skip on fail
try:
methodToCall = getattr(vobj, method)
except Exception as e:
continue
# Skip callable methods
if callable(methodToCall):
continue
if self.lowerkeys:
method = method.lower()
rdata[method] = self._process_object_types(
methodToCall,
thisvm=vobj,
inkey=method,
)
return rdata
def _process_object_types(self, vobj, thisvm=None, inkey='', level=0):
''' Serialize an object '''
rdata = {}
if type(vobj).__name__ in self.vimTableMaxDepth and level >= self.vimTableMaxDepth[type(vobj).__name__]:
return rdata
if vobj is None:
rdata = None
elif type(vobj) in self.vimTable:
rdata = {}
for key in self.vimTable[type(vobj)]:
try:
rdata[key] = getattr(vobj, key)
except Exception as e:
self.debugl(e)
elif issubclass(type(vobj), str) or isinstance(vobj, str):
if vobj.isalnum():
rdata = vobj
else:
rdata = vobj.encode('utf-8').decode('utf-8')
elif issubclass(type(vobj), bool) or isinstance(vobj, bool):
rdata = vobj
elif issubclass(type(vobj), integer_types) or isinstance(vobj, integer_types):
rdata = vobj
elif issubclass(type(vobj), float) or isinstance(vobj, float):
rdata = vobj
elif issubclass(type(vobj), list) or issubclass(type(vobj), tuple):
rdata = []
try:
vobj = sorted(vobj)
except Exception:
pass
for idv, vii in enumerate(vobj):
if level + 1 <= self.maxlevel:
vid = self._process_object_types(
vii,
thisvm=thisvm,
inkey=inkey + '[' + str(idv) + ']',
level=(level + 1)
)
if vid:
rdata.append(vid)
elif issubclass(type(vobj), dict):
pass
elif issubclass(type(vobj), object):
methods = dir(vobj)
methods = [str(x) for x in methods if not x.startswith('_')]
methods = [x for x in methods if x not in self.bad_types]
methods = [x for x in methods if not inkey + '.' + x.lower() in self.skip_keys]
methods = sorted(methods)
for method in methods:
# Attempt to get the method, skip on fail
try:
methodToCall = getattr(vobj, method)
except Exception as e:
continue
if callable(methodToCall):
continue
if self.lowerkeys:
method = method.lower()
if level + 1 <= self.maxlevel:
try:
rdata[method] = self._process_object_types(
methodToCall,
thisvm=thisvm,
inkey=inkey + '.' + method,
level=(level + 1)
)
except vim.fault.NoPermission:
self.debugl("Skipping method %s (NoPermission)" % method)
else:
pass
return rdata
def get_host_info(self, host):
''' Return hostvars for a single host '''
if host in self.inventory['_meta']['hostvars']:
return self.inventory['_meta']['hostvars'][host]
elif self.args.host and self.inventory['_meta']['hostvars']:
match = None
for k, v in self.inventory['_meta']['hostvars'].items():
if self.inventory['_meta']['hostvars'][k]['name'] == self.args.host:
match = k
break
if match:
return self.inventory['_meta']['hostvars'][match]
else:
raise VMwareMissingHostException('%s not found' % host)
else:
raise VMwareMissingHostException('%s not found' % host)
if __name__ == "__main__":
# Run the script
print(VMWareInventory().show())
| gpl-3.0 |
lidan-fnst/samba | third_party/waf/wafadmin/Tools/xlcxx.py | 46 | 2067 | #!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006 (ita)
# Ralf Habacker, 2006 (rh)
# Yinon Ehrlich, 2009
# Michael Kuhn, 2009
import os, sys
import Configure, Options, Utils
import ccroot, ar
from Configure import conftest
@conftest
def find_xlcxx(conf):
cxx = conf.find_program(['xlc++_r', 'xlc++'], var='CXX', mandatory=True)
cxx = conf.cmd_to_list(cxx)
conf.env.CXX_NAME = 'xlc++'
conf.env.CXX = cxx
@conftest
def find_cpp(conf):
v = conf.env
cpp = None
if v['CPP']: cpp = v['CPP']
elif 'CPP' in conf.environ: cpp = conf.environ['CPP']
#if not cpp: cpp = v['CXX']
v['CPP'] = cpp
@conftest
def xlcxx_common_flags(conf):
v = conf.env
# CPPFLAGS CXXDEFINES _CXXINCFLAGS _CXXDEFFLAGS
v['CXXFLAGS_DEBUG'] = ['-g']
v['CXXFLAGS_RELEASE'] = ['-O2']
v['CXX_SRC_F'] = ''
v['CXX_TGT_F'] = ['-c', '-o', ''] # shell hack for -MD
v['CPPPATH_ST'] = '-I%s' # template for adding include paths
# linker
if not v['LINK_CXX']: v['LINK_CXX'] = v['CXX']
v['CXXLNK_SRC_F'] = ''
v['CXXLNK_TGT_F'] = ['-o', ''] # shell hack for -MD
v['LIB_ST'] = '-l%s' # template for adding libs
v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
v['STATICLIB_ST'] = '-l%s'
v['STATICLIBPATH_ST'] = '-L%s'
v['RPATH_ST'] = '-Wl,-rpath,%s'
v['CXXDEFINES_ST'] = '-D%s'
v['SONAME_ST'] = ''
v['SHLIB_MARKER'] = ''
v['STATICLIB_MARKER'] = ''
v['FULLSTATIC_MARKER'] = '-static'
# program
v['program_LINKFLAGS'] = ['-Wl,-brtl']
v['program_PATTERN'] = '%s'
# shared library
v['shlib_CXXFLAGS'] = ['-fPIC', '-DPIC'] # avoid using -DPIC, -fPIC aleady defines the __PIC__ macro
v['shlib_LINKFLAGS'] = ['-G', '-Wl,-brtl,-bexpfull']
v['shlib_PATTERN'] = 'lib%s.so'
# static lib
v['staticlib_LINKFLAGS'] = ''
v['staticlib_PATTERN'] = 'lib%s.a'
def detect(conf):
conf.find_xlcxx()
conf.find_cpp()
conf.find_ar()
conf.xlcxx_common_flags()
conf.cxx_load_tools()
conf.cxx_add_flags()
conf.link_add_flags()
| gpl-3.0 |
michael-lazar/praw3 | praw/settings.py | 2 | 1778 | # This file is part of PRAW.
#
# PRAW is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# PRAW is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# PRAW. If not, see <http://www.gnu.org/licenses/>.
"""Provides the code to load PRAW's configuration file `praw.ini`."""
from __future__ import print_function, unicode_literals
import os
import sys
from six.moves import configparser
def _load_configuration():
"""Attempt to load settings from various praw.ini files."""
config = configparser.RawConfigParser()
module_dir = os.path.dirname(sys.modules[__name__].__file__)
if 'APPDATA' in os.environ: # Windows
os_config_path = os.environ['APPDATA']
elif 'XDG_CONFIG_HOME' in os.environ: # Modern Linux
os_config_path = os.environ['XDG_CONFIG_HOME']
elif 'HOME' in os.environ: # Legacy Linux
os_config_path = os.path.join(os.environ['HOME'], '.config')
else:
os_config_path = None
locations = [os.path.join(module_dir, 'praw.ini'), 'praw.ini']
if os_config_path is not None:
locations.insert(1, os.path.join(os_config_path, 'praw.ini'))
if not config.read(locations):
raise Exception('Could not find config file in any of: {0}'
.format(locations))
return config
CONFIG = _load_configuration()
del _load_configuration
| gpl-3.0 |
tsufiev/horizon | horizon/test/tests/views.py | 69 | 3172 | # Copyright 2015 Cisco Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from horizon.test import helpers as test
from horizon import views
from django import forms
from django.test import client
from django.utils.translation import ugettext_lazy as _
from django.views import generic
FAKENAME = "FakeName"
class ViewData(object):
template_name = 'fake'
def get_context_data(self, **kwargs):
context = super(ViewData, self).get_context_data(**kwargs)
context['object'] = {'name': 'myName'}
return context
class PageWithNoTitle(ViewData, views.HorizonTemplateView):
pass
class PageWithTitle(ViewData, views.HorizonTemplateView):
page_title = "A Title"
class PageWithTitleData(ViewData, views.HorizonTemplateView):
page_title = "A Title: {{ object.name }}"
class FormWithTitle(ViewData, views.HorizonFormView):
form_class = forms.Form
page_title = "A Title: {{ object.name }}"
class ViewWithTitle(views.PageTitleMixin, generic.TemplateView):
page_title = "Fake"
class ViewWithTransTitle(views.PageTitleMixin, generic.TemplateView):
page_title = _("Fake")
class PageTitleTests(test.TestCase):
def setUp(self):
super(PageTitleTests, self).setUp()
self.request = client.RequestFactory().get('fake')
def _dispatch(self, viewClass):
p = viewClass()
p.request = self.request
return p.dispatch(self.request)
def test_render_context_with_title(self):
tm = ViewWithTitle()
context = tm.render_context_with_title({})
self.assertEqual("Fake", context['page_title'])
def test_render_context_with_title_override(self):
tm = ViewWithTitle()
context = tm.render_context_with_title({'page_title': "ekaF"})
self.assertEqual("ekaF", context['page_title'])
def test_render_context_with_title_lazy_translations(self):
tm = ViewWithTransTitle()
context = tm.render_context_with_title({})
self.assertEqual("Fake", context['page_title'])
def test_no_title_set(self):
res = self._dispatch(PageWithNoTitle)
self.assertEqual("", res.context_data['page_title'])
def test_title_set(self):
res = self._dispatch(PageWithTitle)
self.assertEqual("A Title", res.context_data['page_title'])
def test_title_with_data(self):
res = self._dispatch(PageWithTitleData)
self.assertEqual("A Title: myName", res.context_data['page_title'])
def test_form_with_title(self):
res = self._dispatch(FormWithTitle)
self.assertEqual("A Title: myName", res.context_data['page_title'])
| apache-2.0 |
Ditmar/plugin.video.pelisalacarta | platformcode/xbmc/download_and_play.py | 17 | 13664 | # -*- coding: utf-8 -*-
#------------------------------------------------------------
# Download and play
#------------------------------------------------------------
# License: GPL (http://www.gnu.org/licenses/gpl-3.0.html)
#------------------------------------------------------------
# Based on code from the Mega add-on (xbmchub.com)
#---------------------------------------------------------------------------
import os
import sys
import re
import urlparse
import urllib
import urllib2
import locale
import threading
import time
import socket
import xbmc
import xbmcgui
from core import config
from core import logger
from core import downloadtools
# Download a file and start playing while downloading
def download_and_play(url,file_name,download_path):
# Lanza thread
logger.info("[download_and_play.py] Active threads "+str(threading.active_count()))
logger.info("[download_and_play.py] "+repr(threading.enumerate()))
logger.info("[download_and_play.py] Starting download thread...")
download_thread = DownloadThread(url,file_name,download_path)
download_thread.start()
logger.info("[download_and_play.py] Download thread started")
logger.info("[download_and_play.py] Active threads "+str(threading.active_count()))
logger.info("[download_and_play.py] "+repr(threading.enumerate()))
# Espera
logger.info("[download_and_play.py] Waiting...")
while True:
cancelled=False
dialog = xbmcgui.DialogProgress()
dialog.create('Descargando...', 'Cierra esta ventana para empezar la reproducción')
dialog.update(0)
while not cancelled and download_thread.is_alive():
dialog.update( download_thread.get_progress() , "Cancela esta ventana para empezar la reproducción", "Velocidad: "+str(int(download_thread.get_speed()/1024))+" KB/s "+str(download_thread.get_actual_size())+"MB de "+str(download_thread.get_total_size())+"MB" , "Tiempo restante: "+str( downloadtools.sec_to_hms(download_thread.get_remaining_time())) )
xbmc.sleep(1000)
if dialog.iscanceled():
cancelled=True
break
dialog.close()
logger.info("[download_and_play.py] End of waiting")
# Lanza el reproductor
player = CustomPlayer()
player.set_download_thread(download_thread)
player.PlayStream( download_thread.get_file_name() )
# Fin de reproducción
logger.info("[download_and_play.py] Fin de reproducción")
if player.is_stopped():
logger.info("[download_and_play.py] Terminado por el usuario")
break
else:
if not download_thread.is_alive():
logger.info("[download_and_play.py] La descarga ha terminado")
break
else:
logger.info("[download_and_play.py] Continua la descarga")
# Cuando el reproductor acaba, si continúa descargando lo para ahora
logger.info("[download_and_play.py] Download thread alive="+str(download_thread.is_alive()))
if download_thread.is_alive():
logger.info("[download_and_play.py] Killing download thread")
download_thread.force_stop()
class CustomPlayer(xbmc.Player):
def __init__( self, *args, **kwargs ):
logger.info("CustomPlayer.__init__")
self.actualtime=0
self.totaltime=0
self.stopped=False
xbmc.Player.__init__( self )
def PlayStream(self, url):
logger.info("CustomPlayer.PlayStream url="+url)
self.play(url)
self.actualtime=0
self.url=url
while self.isPlaying():
self.actualtime = self.getTime()
self.totaltime = self.getTotalTime()
logger.info("CustomPlayer.PlayStream actualtime="+str(self.actualtime)+" totaltime="+str(self.totaltime))
xbmc.sleep(3000)
def set_download_thread(self,download_thread):
logger.info("CustomPlayer.set_download_thread")
self.download_thread = download_thread
def force_stop_download_thread(self):
logger.info("CustomPlayer.force_stop_download_thread")
if self.download_thread.is_alive():
logger.info("CustomPlayer.force_stop_download_thread Killing download thread")
self.download_thread.force_stop()
#while self.download_thread.is_alive():
# xbmc.sleep(1000)
def onPlayBackStarted(self):
logger.info("CustomPlayer.onPlayBackStarted PLAYBACK STARTED")
def onPlayBackEnded(self):
logger.info("CustomPlayer.onPlayBackEnded PLAYBACK ENDED")
def onPlayBackStopped(self):
logger.info("CustomPlayer.onPlayBackStopped PLAYBACK STOPPED")
self.stopped=True
self.force_stop_download_thread()
def is_stopped(self):
return self.stopped
# Download in background
class DownloadThread(threading.Thread):
def __init__(self, url, file_name, download_path):
logger.info("DownloadThread.__init__ "+repr(file))
self.url = url
self.download_path = download_path
self.file_name = os.path.join( download_path , file_name )
self.progress = 0
self.force_stop_file_name = os.path.join( self.download_path , "force_stop.tmp" )
self.velocidad=0
self.tiempofalta=0
self.actual_size=0
self.total_size=0
if os.path.exists(self.force_stop_file_name):
os.remove(self.force_stop_file_name)
threading.Thread.__init__(self)
def run(self):
logger.info("DownloadThread.run Download starts...")
if "megacrypter.com" in self.url:
self.download_file_megacrypter()
else:
self.download_file()
logger.info("DownloadThread.run Download ends")
def force_stop(self):
logger.info("DownloadThread.force_stop...")
force_stop_file = open( self.force_stop_file_name , "w" )
force_stop_file.write("0")
force_stop_file.close()
def get_progress(self):
return self.progress;
def get_file_name(self):
return self.file_name
def get_speed(self):
return self.velocidad
def get_remaining_time(self):
return self.tiempofalta
def get_actual_size(self):
return self.actual_size
def get_total_size(self):
return self.total_size
def download_file_megacrypter(self):
logger.info("DownloadThread.download_file Megacrypter downloader")
comando = "./megacrypter.sh"
logger.info("DownloadThread.download_file comando="+comando)
oldcwd = os.getcwd()
logger.info("DownloadThread.download_file oldcwd="+oldcwd)
cwd = os.path.join( config.get_runtime_path() , "tools")
logger.info("DownloadThread.download_file cwd="+cwd)
os.chdir(cwd)
logger.info("DownloadThread.download_file directory changed to="+os.getcwd())
logger.info("DownloadThread.download_file destino="+self.download_path)
import subprocess
os.system( comando+" '"+self.url+ "' \"" + self.download_path+"\"" )
#p = subprocess.Popen([comando , self.url , self.download_path], cwd=cwd, stdout=subprocess.PIPE , stderr=subprocess.PIPE )
#out, err = p.communicate()
#logger.info("DownloadThread.download_file out="+out)
os.chdir(oldcwd)
def download_file(self):
logger.info("DownloadThread.download_file Direct download")
headers=[]
# Se asegura de que el fichero se podrá crear
logger.info("DownloadThread.download_file nombrefichero="+self.file_name)
self.file_name = xbmc.makeLegalFilename(self.file_name)
logger.info("DownloadThread.download_file nombrefichero="+self.file_name)
logger.info("DownloadThread.download_file url="+self.url)
# Crea el fichero
existSize = 0
f = open(self.file_name, 'wb')
grabado = 0
# Login y password Filenium
# http://abcd%40gmail.com:mipass@filenium.com/get/Oi8vd3d3/LmZpbGVz/ZXJ2ZS5j/b20vZmls/ZS9kTnBL/dm11/b0/?.zip
if "filenium" in self.url:
from servers import filenium
self.url , authorization_header = filenium.extract_authorization_header(self.url)
headers.append( [ "Authorization", authorization_header ] )
# Interpreta las cabeceras en una URL como en XBMC
if "|" in self.url:
additional_headers = self.url.split("|")[1]
if "&" in additional_headers:
additional_headers = additional_headers.split("&")
else:
additional_headers = [ additional_headers ]
for additional_header in additional_headers:
logger.info("DownloadThread.download_file additional_header: "+additional_header)
name = re.findall( "(.*?)=.*?" , additional_header )[0]
value = urllib.unquote_plus(re.findall( ".*?=(.*?)$" , additional_header )[0])
headers.append( [ name,value ] )
self.url = self.url.split("|")[0]
logger.info("DownloadThread.download_file url="+self.url)
# Timeout del socket a 60 segundos
socket.setdefaulttimeout(60)
# Crea la petición y añade las cabeceras
h=urllib2.HTTPHandler(debuglevel=0)
request = urllib2.Request(self.url)
for header in headers:
logger.info("DownloadThread.download_file Header="+header[0]+": "+header[1])
request.add_header(header[0],header[1])
# Lanza la petición
opener = urllib2.build_opener(h)
urllib2.install_opener(opener)
try:
connexion = opener.open(request)
except urllib2.HTTPError,e:
logger.info("DownloadThread.download_file error %d (%s) al abrir la url %s" % (e.code,e.msg,self.url))
#print e.code
#print e.msg
#print e.hdrs
#print e.fp
f.close()
# El error 416 es que el rango pedido es mayor que el fichero => es que ya está completo
if e.code==416:
return 0
else:
return -2
try:
totalfichero = int(connexion.headers["Content-Length"])
except:
totalfichero = 1
self.total_size = int(float(totalfichero) / float(1024*1024))
logger.info("Content-Length=%s" % totalfichero)
blocksize = 100*1024
bloqueleido = connexion.read(blocksize)
logger.info("DownloadThread.download_file Iniciando descarga del fichero, bloqueleido=%s" % len(bloqueleido))
maxreintentos = 10
while len(bloqueleido)>0:
try:
if os.path.exists(self.force_stop_file_name):
logger.info("DownloadThread.download_file Detectado fichero force_stop, se interrumpe la descarga")
f.close()
xbmc.executebuiltin((u'XBMC.Notification("Cancelado", "Descarga en segundo plano cancelada", 300)'))
return
# Escribe el bloque leido
#try:
# import xbmcvfs
# f.write( bloqueleido )
#except:
f.write(bloqueleido)
grabado = grabado + len(bloqueleido)
logger.info("DownloadThread.download_file grabado=%d de %d" % (grabado,totalfichero) )
percent = int(float(grabado)*100/float(totalfichero))
self.progress=percent;
totalmb = float(float(totalfichero)/(1024*1024))
descargadosmb = float(float(grabado)/(1024*1024))
self.actual_size = int(descargadosmb)
# Lee el siguiente bloque, reintentando para no parar todo al primer timeout
reintentos = 0
while reintentos <= maxreintentos:
try:
before = time.time()
bloqueleido = connexion.read(blocksize)
after = time.time()
if (after - before) > 0:
self.velocidad=len(bloqueleido)/((after - before))
falta=totalfichero-grabado
if self.velocidad>0:
self.tiempofalta=falta/self.velocidad
else:
self.tiempofalta=0
break
except:
reintentos = reintentos + 1
logger.info("DownloadThread.download_file ERROR en la descarga del bloque, reintento %d" % reintentos)
for line in sys.exc_info():
logger.error( "%s" % line )
# Ha habido un error en la descarga
if reintentos > maxreintentos:
logger.info("DownloadThread.download_file ERROR en la descarga del fichero")
f.close()
return -2
except:
import traceback,sys
from pprint import pprint
exc_type, exc_value, exc_tb = sys.exc_info()
lines = traceback.format_exception(exc_type, exc_value, exc_tb)
for line in lines:
line_splits = line.split("\n")
for line_split in line_splits:
logger.error(line_split)
f.close()
return -2
return | gpl-3.0 |
GISAElkartea/amv2 | antxetamedia/blobs/models.py | 1 | 4103 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, transaction
from django.core.exceptions import ValidationError
from django.contrib.contenttypes.fields import GenericForeignKey
from django.utils.six import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.utils import timezone
@python_2_unicode_compatible
class Account(models.Model):
class Meta:
verbose_name = _('Account')
verbose_name_plural = _('Accounts')
name = models.CharField(_('Name'), max_length=64)
username = models.CharField(_('Username'), max_length=256)
password = models.CharField(_('Password'), max_length=256)
def __str__(self):
return self.name
class BlobQuerySet(models.QuerySet):
def with_content(self):
query = ((models.Q(remote__isnull=False) & ~models.Q(remote='')) |
(models.Q(local__isnull=False) & ~models.Q(local='')))
return self.filter(query)
@python_2_unicode_compatible
class Blob(models.Model):
objects = BlobQuerySet.as_manager()
class Meta:
ordering = ['-created']
verbose_name = _('Audio blob')
verbose_name_plural = _('Audio blobs')
content_type = models.ForeignKey('contenttypes.contenttype')
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id')
created = models.DateTimeField(_('Created'), auto_now_add=True, editable=False)
position = models.PositiveIntegerField(_('Position'), default=0)
local = models.FileField(_('Local file'), upload_to='blobs', null=True, blank=True, max_length=512,
help_text=_("If set, the file will be uploaded to the remote storage and the link will "
"be set at the remote field."))
remote = models.CharField(_('Remote file'), max_length=512, null=True, blank=True)
account = models.ForeignKey(Account, verbose_name=_('Account'))
def __str__(self):
return '{self.content_object} - #{self.position}'.format(self=self)
@property
def is_uploaded(self):
return bool(not self.local and self.remote)
@property
def link(self):
if self.local:
return self.local.url
if self.remote:
return self.remote
return ''
def clean(self):
if not self.local and not self.remote:
raise ValidationError(_("Blobs should have either a local or a remote file."))
@python_2_unicode_compatible
class BlobUpload(models.Model):
PENDING = 0
UPLOADING = 1
SUCCEEDED = 2
FAILED = 3
STATES = [
(PENDING, _('Pending')),
(UPLOADING, _('Uploading')),
(SUCCEEDED, _('Succeeded')),
(FAILED, _('Failed')),
]
class Meta:
verbose_name = _('Blob upload')
verbose_name_plural = _('Blob uploads')
blob = models.ForeignKey(Blob, verbose_name=_('Blob'))
state = models.PositiveSmallIntegerField(_('State'), choices=STATES, default=PENDING)
started = models.DateTimeField(_('Start time'), null=True, blank=True)
ended = models.DateTimeField(_('End time'), null=True, blank=True)
traceback = models.TextField(_('Traceback'), blank=True)
def __str__(self):
return _('{blob} upload').format(blob=self.blob)
def has_succeeded(self):
if self.state in (self.SUCCEEDED, self.FAILED):
return self.state == self.SUCCEEDED
return None
def is_uploading(self):
self.started = timezone.now()
self.state = self.UPLOADING
self.save()
def is_successful(self, remote):
with transaction.atomic():
self.ended = timezone.now()
self.state = self.SUCCEEDED
self.save()
self.blob.remote = remote
self.blob.local.delete()
self.blob.save()
def is_unsuccessful(self, traceback=None):
self.ended = timezone.now()
self.state = self.FAILED
self.traceback = traceback
self.save()
| agpl-3.0 |
docker-infra/ansible-modules-core | cloud/amazon/iam.py | 29 | 28918 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: iam
short_description: Manage IAM users, groups, roles and keys
description:
- Allows for the management of IAM users, groups, roles and access keys.
version_added: "2.0"
options:
iam_type:
description:
- Type of IAM resource
required: true
default: null
choices: [ "user", "group", "role"]
name:
description:
- Name of IAM resource to create or identify
required: true
new_name:
description:
- When state is update, will replace name with new_name on IAM resource
required: false
default: null
new_path:
description:
- When state is update, will replace the path with new_path on the IAM resource
required: false
default: null
state:
description:
- Whether to create, delete or update the IAM resource. Note, roles cannot be updated.
required: true
default: null
choices: [ "present", "absent", "update" ]
path:
description:
- When creating or updating, specify the desired path of the resource. If state is present, it will replace the current path to match what is passed in when they do not match.
required: false
default: "/"
access_key_state:
description:
- When type is user, it creates, removes, deactivates or activates a user's access key(s). Note that actions apply only to keys specified.
required: false
default: null
choices: [ "create", "remove", "active", "inactive"]
key_count:
description:
- When access_key_state is create it will ensure this quantity of keys are present. Defaults to 1.
required: false
default: '1'
access_key_ids:
description:
- A list of the keys that you want impacted by the access_key_state paramter.
groups:
description:
- A list of groups the user should belong to. When update, will gracefully remove groups not listed.
required: false
default: null
password:
description:
- When type is user and state is present, define the users login password. Also works with update. Note that always returns changed.
required: false
default: null
update_password:
required: false
default: always
choices: ['always', 'on_create']
description:
- C(always) will update passwords if they differ. C(on_create) will only set the password for newly created users.
aws_secret_key:
description:
- AWS secret key. If not set then the value of the AWS_SECRET_KEY environment variable is used.
required: false
default: null
aliases: [ 'ec2_secret_key', 'secret_key' ]
aws_access_key:
description:
- AWS access key. If not set then the value of the AWS_ACCESS_KEY environment variable is used.
required: false
default: null
aliases: [ 'ec2_access_key', 'access_key' ]
notes:
- 'Currently boto does not support the removal of Managed Policies, the module will error out if your user/group/role has managed policies when you try to do state=absent. They will need to be removed manually.'
author:
- "Jonathan I. Davila (@defionscode)"
- "Paul Seiffert (@seiffert)"
extends_documentation_fragment: aws
'''
EXAMPLES = '''
# Basic user creation example
tasks:
- name: Create two new IAM users with API keys
iam:
iam_type: user
name: "{{ item }}"
state: present
password: "{{ temp_pass }}"
access_key_state: create
with_items:
- jcleese
- mpython
# Advanced example, create two new groups and add the pre-existing user
# jdavila to both groups.
task:
- name: Create Two Groups, Mario and Luigi
iam:
iam_type: group
name: "{{ item }}"
state: present
with_items:
- Mario
- Luigi
register: new_groups
- name:
iam:
iam_type: user
name: jdavila
state: update
groups: "{{ item.created_group.group_name }}"
with_items: new_groups.results
'''
import json
import itertools
import sys
try:
import boto
import boto.iam
import boto.ec2
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
def boto_exception(err):
'''generic error message handler'''
if hasattr(err, 'error_message'):
error = err.error_message
elif hasattr(err, 'message'):
error = err.message
else:
error = '%s: %s' % (Exception, err)
return error
def create_user(module, iam, name, pwd, path, key_state, key_count):
key_qty = 0
keys = []
try:
user_meta = iam.create_user(
name, path).create_user_response.create_user_result.user
changed = True
if pwd is not None:
pwd = iam.create_login_profile(name, pwd)
if key_state in ['create']:
if key_count:
while key_count > key_qty:
keys.append(iam.create_access_key(
user_name=name).create_access_key_response.\
create_access_key_result.\
access_key)
key_qty += 1
else:
keys = None
except boto.exception.BotoServerError, err:
module.fail_json(changed=False, msg=str(err))
else:
user_info = dict(created_user=user_meta, password=pwd, access_keys=keys)
return (user_info, changed)
def delete_user(module, iam, name):
try:
current_keys = [ck['access_key_id'] for ck in
iam.get_all_access_keys(name).list_access_keys_result.access_key_metadata]
for key in current_keys:
iam.delete_access_key(key, name)
del_meta = iam.delete_user(name).delete_user_response
except boto.exception.BotoServerError, err:
error_msg = boto_exception(err)
if ('must detach all policies first') in error_msg:
for policy in iam.get_all_user_policies(name).list_user_policies_result.policy_names:
iam.delete_user_policy(name, policy)
try:
del_meta = iam.delete_user(name)
except boto.exception.BotoServerError, err:
error_msg = boto_exception(err)
if ('must detach all policies first') in error_msg:
module.fail_json(changed=changed, msg="All inline polices have been removed. Though it appears"
"that %s has Managed Polices. This is not "
"currently supported by boto. Please detach the polices "
"through the console and try again." % name)
else:
module.fail_json(changed=changed, msg=str(err))
else:
changed = True
return del_meta, name, changed
else:
changed = True
return del_meta, name, changed
def update_user(module, iam, name, new_name, new_path, key_state, key_count, keys, pwd, updated):
changed = False
name_change = False
if updated and new_name:
name = new_name
try:
current_keys, status = \
[ck['access_key_id'] for ck in
iam.get_all_access_keys(name).list_access_keys_result.access_key_metadata],\
[ck['status'] for ck in
iam.get_all_access_keys(name).list_access_keys_result.access_key_metadata]
key_qty = len(current_keys)
except boto.exception.BotoServerError, err:
error_msg = boto_exception(err)
if 'cannot be found' in error_msg and updated:
current_keys, status = \
[ck['access_key_id'] for ck in
iam.get_all_access_keys(new_name).list_access_keys_result.access_key_metadata],\
[ck['status'] for ck in
iam.get_all_access_keys(new_name).list_access_keys_result.access_key_metadata]
name = new_name
else:
module.fail_json(changed=False, msg=str(err))
updated_key_list = {}
if new_name or new_path:
c_path = iam.get_user(name).get_user_result.user['path']
if (name != new_name) or (c_path != new_path):
changed = True
try:
if not updated:
user = iam.update_user(
name, new_user_name=new_name, new_path=new_path).update_user_response.response_metadata
else:
user = iam.update_user(
name, new_path=new_path).update_user_response.response_metadata
user['updates'] = dict(
old_username=name, new_username=new_name, old_path=c_path, new_path=new_path)
except boto.exception.BotoServerError, err:
error_msg = boto_exception(err)
module.fail_json(changed=False, msg=str(err))
else:
if not updated:
name_change = True
if pwd:
try:
iam.update_login_profile(name, pwd)
changed = True
except boto.exception.BotoServerError:
try:
iam.create_login_profile(name, pwd)
changed = True
except boto.exception.BotoServerError, err:
error_msg = boto_exception(str(err))
if 'Password does not conform to the account password policy' in error_msg:
module.fail_json(changed=False, msg="Passsword doesn't conform to policy")
else:
module.fail_json(msg=error_msg)
else:
try:
iam.delete_login_profile(name)
changed = True
except boto.exception.BotoServerError:
pass
if key_state == 'create':
try:
while key_count > key_qty:
new_key = iam.create_access_key(
user_name=name).create_access_key_response.create_access_key_result.access_key
key_qty += 1
changed = True
except boto.exception.BotoServerError, err:
module.fail_json(changed=False, msg=str(err))
if keys and key_state:
for access_key in keys:
if access_key in current_keys:
for current_key, current_key_state in zip(current_keys, status):
if key_state != current_key_state.lower():
try:
iam.update_access_key(
access_key, key_state.capitalize(), user_name=name)
except boto.exception.BotoServerError, err:
module.fail_json(changed=False, msg=str(err))
else:
changed = True
if key_state == 'remove':
try:
iam.delete_access_key(access_key, user_name=name)
except boto.exception.BotoServerError, err:
module.fail_json(changed=False, msg=str(err))
else:
changed = True
try:
final_keys, final_key_status = \
[ck['access_key_id'] for ck in
iam.get_all_access_keys(name).
list_access_keys_result.
access_key_metadata],\
[ck['status'] for ck in
iam.get_all_access_keys(name).
list_access_keys_result.
access_key_metadata]
except boto.exception.BotoServerError, err:
module.fail_json(changed=changed, msg=str(err))
for fk, fks in zip(final_keys, final_key_status):
updated_key_list.update({fk: fks})
return name_change, updated_key_list, changed
def set_users_groups(module, iam, name, groups, updated=None,
new_name=None):
""" Sets groups for a user, will purge groups not explictly passed, while
retaining pre-existing groups that also are in the new list.
"""
changed = False
if updated:
name = new_name
try:
orig_users_groups = [og['group_name'] for og in iam.get_groups_for_user(
name).list_groups_for_user_result.groups]
remove_groups = [
rg for rg in frozenset(orig_users_groups).difference(groups)]
new_groups = [
ng for ng in frozenset(groups).difference(orig_users_groups)]
except boto.exception.BotoServerError, err:
module.fail_json(changed=changed, msg=str(err))
else:
if len(orig_users_groups) > 0:
for new in new_groups:
iam.add_user_to_group(new, name)
for rm in remove_groups:
iam.remove_user_from_group(rm, name)
else:
for group in groups:
try:
iam.add_user_to_group(group, name)
except boto.exception.BotoServerError, err:
error_msg = boto_exception(err)
if ('The group with name %s cannot be found.' % group) in error_msg:
module.fail_json(changed=False, msg="Group %s doesn't exist" % group)
if len(remove_groups) > 0 or len(new_groups) > 0:
changed = True
return (groups, changed)
def create_group(module=None, iam=None, name=None, path=None):
changed = False
try:
iam.create_group(
name, path).create_group_response.create_group_result.group
except boto.exception.BotoServerError, err:
module.fail_json(changed=changed, msg=str(err))
else:
changed = True
return name, changed
def delete_group(module=None, iam=None, name=None):
changed = False
try:
iam.delete_group(name)
except boto.exception.BotoServerError, err:
error_msg = boto_exception(err)
if ('must detach all policies first') in error_msg:
for policy in iam.get_all_group_policies(name).list_group_policies_result.policy_names:
iam.delete_group_policy(name, policy)
try:
iam.delete_group(name)
except boto.exception.BotoServerError, err:
error_msg = boto_exception(err)
if ('must detach all policies first') in error_msg:
module.fail_json(changed=changed, msg="All inline polices have been removed. Though it appears"
"that %s has Managed Polices. This is not "
"currently supported by boto. Please detach the polices "
"through the console and try again." % name)
else:
module.fail_json(changed=changed, msg=str(err))
else:
changed = True
else:
changed = True
return changed, name
def update_group(module=None, iam=None, name=None, new_name=None, new_path=None):
changed = False
try:
current_group_path = iam.get_group(
name).get_group_response.get_group_result.group['path']
if new_path:
if current_group_path != new_path:
iam.update_group(name, new_path=new_path)
changed = True
if new_name:
if name != new_name:
iam.update_group(name, new_group_name=new_name, new_path=new_path)
changed = True
name = new_name
except boto.exception.BotoServerError, err:
module.fail_json(changed=changed, msg=str(err))
return changed, name, new_path, current_group_path
def create_role(module, iam, name, path, role_list, prof_list):
changed = False
try:
if name not in role_list:
changed = True
iam.create_role(
name, path=path).create_role_response.create_role_result.role.role_name
if name not in prof_list:
iam.create_instance_profile(name, path=path)
iam.add_role_to_instance_profile(name, name)
except boto.exception.BotoServerError, err:
module.fail_json(changed=changed, msg=str(err))
else:
updated_role_list = [rl['role_name'] for rl in iam.list_roles().list_roles_response.
list_roles_result.roles]
return changed, updated_role_list
def delete_role(module, iam, name, role_list, prof_list):
changed = False
try:
if name in role_list:
cur_ins_prof = [rp['instance_profile_name'] for rp in
iam.list_instance_profiles_for_role(name).
list_instance_profiles_for_role_result.
instance_profiles]
for profile in cur_ins_prof:
iam.remove_role_from_instance_profile(profile, name)
try:
iam.delete_role(name)
except boto.exception.BotoServerError, err:
error_msg = boto_exception(err)
if ('must detach all policies first') in error_msg:
for policy in iam.list_role_policies(name).list_role_policies_result.policy_names:
iam.delete_role_policy(name, policy)
try:
iam.delete_role(name)
except boto.exception.BotoServerError, err:
error_msg = boto_exception(err)
if ('must detach all policies first') in error_msg:
module.fail_json(changed=changed, msg="All inline polices have been removed. Though it appears"
"that %s has Managed Polices. This is not "
"currently supported by boto. Please detach the polices "
"through the console and try again." % name)
else:
module.fail_json(changed=changed, msg=str(err))
else:
changed = True
else:
changed = True
for prof in prof_list:
if name == prof:
iam.delete_instance_profile(name)
except boto.exception.BotoServerError, err:
module.fail_json(changed=changed, msg=str(err))
else:
updated_role_list = [rl['role_name'] for rl in iam.list_roles().list_roles_response.
list_roles_result.roles]
return changed, updated_role_list
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
iam_type=dict(
default=None, required=True, choices=['user', 'group', 'role']),
groups=dict(type='list', default=None, required=False),
state=dict(
default=None, required=True, choices=['present', 'absent', 'update']),
password=dict(default=None, required=False, no_log=True),
update_password=dict(default='always', required=False, choices=['always', 'on_create']),
access_key_state=dict(default=None, required=False, choices=[
'active', 'inactive', 'create', 'remove',
'Active', 'Inactive', 'Create', 'Remove']),
access_key_ids=dict(type='list', default=None, required=False),
key_count=dict(type='int', default=1, required=False),
name=dict(default=None, required=False),
new_name=dict(default=None, required=False),
path=dict(default='/', required=False),
new_path=dict(default=None, required=False)
)
)
module = AnsibleModule(
argument_spec=argument_spec,
mutually_exclusive=[],
)
if not HAS_BOTO:
module.fail_json(msg='This module requires boto, please install it')
state = module.params.get('state').lower()
iam_type = module.params.get('iam_type').lower()
groups = module.params.get('groups')
name = module.params.get('name')
new_name = module.params.get('new_name')
password = module.params.get('password')
update_pw = module.params.get('update_password')
path = module.params.get('path')
new_path = module.params.get('new_path')
key_count = module.params.get('key_count')
key_state = module.params.get('access_key_state')
if key_state:
key_state = key_state.lower()
if any([n in key_state for n in ['active', 'inactive']]) and not key_ids:
module.fail_json(changed=False, msg="At least one access key has to be defined in order"
" to use 'active' or 'inactive'")
key_ids = module.params.get('access_key_ids')
if iam_type == 'user' and module.params.get('password') is not None:
pwd = module.params.get('password')
elif iam_type != 'user' and module.params.get('password') is not None:
module.fail_json(msg="a password is being specified when the iam_type "
"is not user. Check parameters")
else:
pwd = None
if iam_type != 'user' and (module.params.get('access_key_state') is not None or
module.params.get('access_key_id') is not None):
module.fail_json(msg="the IAM type must be user, when IAM access keys "
"are being modified. Check parameters")
if iam_type == 'role' and state == 'update':
module.fail_json(changed=False, msg="iam_type: role, cannot currently be updated, "
"please specificy present or absent")
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module)
try:
iam = boto.iam.connection.IAMConnection(**aws_connect_kwargs)
except boto.exception.NoAuthHandlerFound, e:
module.fail_json(msg=str(e))
result = {}
changed = False
orig_group_list = [gl['group_name'] for gl in iam.get_all_groups().
list_groups_result.
groups]
orig_user_list = [ul['user_name'] for ul in iam.get_all_users().
list_users_result.
users]
orig_role_list = [rl['role_name'] for rl in iam.list_roles().list_roles_response.
list_roles_result.
roles]
orig_prof_list = [ap['instance_profile_name'] for ap in iam.list_instance_profiles().
list_instance_profiles_response.
list_instance_profiles_result.
instance_profiles]
if iam_type == 'user':
been_updated = False
user_groups = None
user_exists = any([n in [name, new_name] for n in orig_user_list])
if user_exists:
current_path = iam.get_user(name).get_user_result.user['path']
if not new_path and current_path != path:
new_path = path
path = current_path
if state == 'present' and not user_exists and not new_name:
(meta, changed) = create_user(
module, iam, name, password, path, key_state, key_count)
keys = iam.get_all_access_keys(name).list_access_keys_result.\
access_key_metadata
if groups:
(user_groups, changed) = set_users_groups(
module, iam, name, groups, been_updated, new_name)
module.exit_json(
user_meta=meta, groups=user_groups, keys=keys, changed=changed)
elif state in ['present', 'update'] and user_exists:
if update_pw == 'on_create':
password = None
if name not in orig_user_list and new_name in orig_user_list:
been_updated = True
name_change, key_list, user_changed = update_user(
module, iam, name, new_name, new_path, key_state, key_count, key_ids, password, been_updated)
if name_change and new_name:
orig_name = name
name = new_name
if groups:
user_groups, groups_changed = set_users_groups(
module, iam, name, groups, been_updated, new_name)
if groups_changed == user_changed:
changed = groups_changed
else:
changed = True
else:
changed = user_changed
if new_name and new_path:
module.exit_json(changed=changed, groups=user_groups, old_user_name=orig_name,
new_user_name=new_name, old_path=path, new_path=new_path, keys=key_list)
elif new_name and not new_path and not been_updated:
module.exit_json(
changed=changed, groups=user_groups, old_user_name=orig_name, new_user_name=new_name, keys=key_list)
elif new_name and not new_path and been_updated:
module.exit_json(
changed=changed, groups=user_groups, user_name=new_name, keys=key_list, key_state=key_state)
elif not new_name and new_path:
module.exit_json(
changed=changed, groups=user_groups, user_name=name, old_path=path, new_path=new_path, keys=key_list)
else:
module.exit_json(
changed=changed, groups=user_groups, user_name=name, keys=key_list)
elif state == 'update' and not user_exists:
module.fail_json(
msg="The user %s does not exit. No update made." % name)
elif state == 'absent':
if name in orig_user_list:
set_users_groups(module, iam, name, '')
del_meta, name, changed = delete_user(module, iam, name)
module.exit_json(
deletion_meta=del_meta, deleted_user=name, changed=changed)
else:
module.exit_json(
changed=False, msg="User %s is already absent from your AWS IAM users" % name)
elif iam_type == 'group':
group_exists = name in orig_group_list
if state == 'present' and not group_exists:
new_group, changed = create_group(iam=iam, name=name, path=path)
module.exit_json(changed=changed, group_name=new_group)
elif state in ['present', 'update'] and group_exists:
changed, updated_name, updated_path, cur_path = update_group(
iam=iam, name=name, new_name=new_name, new_path=new_path)
if new_path and new_name:
module.exit_json(changed=changed, old_group_name=name,
new_group_name=updated_name, old_path=cur_path,
new_group_path=updated_path)
if new_path and not new_name:
module.exit_json(changed=changed, group_name=name,
old_path=cur_path,
new_group_path=updated_path)
if not new_path and new_name:
module.exit_json(changed=changed, old_group_name=name,
new_group_name=updated_name, group_path=cur_path)
if not new_path and not new_name:
module.exit_json(
changed=changed, group_name=name, group_path=cur_path)
elif state == 'update' and not group_exists:
module.fail_json(
changed=changed, msg="Update Failed. Group %s doesn't seem to exit!" % name)
elif state == 'absent':
if name in orig_group_list:
removed_group, changed = delete_group(iam=iam, name=name)
module.exit_json(changed=changed, delete_group=removed_group)
else:
module.exit_json(changed=changed, msg="Group already absent")
elif iam_type == 'role':
role_list = []
if state == 'present':
changed, role_list = create_role(
module, iam, name, path, orig_role_list, orig_prof_list)
elif state == 'absent':
changed, role_list = delete_role(
module, iam, name, orig_role_list, orig_prof_list)
elif state == 'update':
module.fail_json(
changed=False, msg='Role update not currently supported by boto.')
module.exit_json(changed=changed, roles=role_list)
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
main()
| gpl-3.0 |
kjbracey-arm/mbed | targets/TARGET_Cypress/TARGET_PSOC6/sb-tools/imgtool/imgtool.py | 7 | 6464 | #! /usr/bin/env python3
#
# Copyright 2017 Linaro Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import click
import getpass
from imgtool import keys
from imgtool import image
from imgtool.version import decode_version
def gen_rsa2048(keyfile, passwd):
keys.RSA2048.generate().export_private(path=keyfile, passwd=passwd)
def gen_ecdsa_p256(keyfile, passwd):
keys.ECDSA256P1.generate().export_private(keyfile, passwd=passwd)
def gen_ecdsa_p224(keyfile, passwd):
print("TODO: p-224 not yet implemented")
valid_langs = ['c', 'rust']
keygens = {
'rsa-2048': gen_rsa2048,
'ecdsa-p256': gen_ecdsa_p256,
'ecdsa-p224': gen_ecdsa_p224,
}
def load_key(keyfile):
# TODO: better handling of invalid pass-phrase
key = keys.load(keyfile)
if key is not None:
return key
passwd = getpass.getpass("Enter key passphrase: ").encode('utf-8')
return keys.load(keyfile, passwd)
def get_password():
while True:
passwd = getpass.getpass("Enter key passphrase: ")
passwd2 = getpass.getpass("Reenter passphrase: ")
if passwd == passwd2:
break
print("Passwords do not match, try again")
# Password must be bytes, always use UTF-8 for consistent
# encoding.
return passwd.encode('utf-8')
@click.option('-p', '--password', is_flag=True,
help='Prompt for password to protect key')
@click.option('-t', '--type', metavar='type', required=True,
type=click.Choice(keygens.keys()))
@click.option('-k', '--key', metavar='filename', required=True)
@click.command(help='Generate pub/private keypair')
def keygen(type, key, password):
password = get_password() if password else None
keygens[type](key, password)
@click.option('-l', '--lang', metavar='lang', default=valid_langs[0],
type=click.Choice(valid_langs))
@click.option('-k', '--key', metavar='filename', required=True)
@click.command(help='Get public key from keypair')
def getpub(key, lang):
key = load_key(key)
if key is None:
print("Invalid passphrase")
elif lang == 'c':
key.emit_c()
elif lang == 'rust':
key.emit_rust()
else:
raise ValueError("BUG: should never get here!")
def validate_version(ctx, param, value):
try:
decode_version(value)
return value
except ValueError as e:
raise click.BadParameter("{}".format(e))
class BasedIntParamType(click.ParamType):
name = 'integer'
def convert(self, value, param, ctx):
try:
if value[:2].lower() == '0x':
return int(value[2:], 16)
elif value[:1] == '0':
return int(value, 8)
return int(value, 10)
except ValueError:
self.fail('%s is not a valid integer' % value, param, ctx)
def load_data_from_file(filename):
FileObj = open(filename, 'rb')
data = FileObj.read()
FileObj.close()
return data
@click.argument('outfile')
@click.argument('infile')
@click.option('--overwrite-only', default=False, is_flag=True,
help='Use overwrite-only instead of swap upgrades')
@click.option('-M', '--max-sectors', type=int,
help='When padding allow for this amount of sectors (defaults to 128)')
@click.option('--pad', default=False, is_flag=True,
help='Pad image to --slot-size bytes, adding trailer magic')
@click.option('-S', '--slot-size', type=BasedIntParamType(), required=True,
help='Size of the slot where the image will be written')
@click.option('--pad-header', default=False, is_flag=True,
help='Add --header-size zeroed bytes at the beginning of the image')
@click.option('-H', '--header-size', type=BasedIntParamType(), required=True)
@click.option('-v', '--version', callback=validate_version, required=True)
@click.option('--align', type=click.Choice(['1', '2', '4', '8']),
required=True)
@click.option('-k', '--key', metavar='filename')
@click.option('-a', '--aes-header-file', default=None, metavar='filename')
@click.option('--image-id', required=True, type=int, help='Image ID')
@click.option('--rollback_counter', default=None, type=int, help='Rollback monotonic counter value')
@click.command(help='Create a signed or unsigned image')
def sign(key, align, version, header_size, pad_header, slot_size, pad,
max_sectors, overwrite_only, aes_header_file, image_id, rollback_counter, infile, outfile):
if aes_header_file is not None :
aes_header = load_data_from_file(aes_header_file)
else:
aes_header = None
img = image.Image.load(infile, version=decode_version(version),
header_size=header_size, pad_header=pad_header,
pad=pad, align=int(align), slot_size=slot_size,
max_sectors=max_sectors,
overwrite_only=overwrite_only, aes_header_data=aes_header,
image_id=image_id, rollback_counter=rollback_counter)
key = load_key(key) if key else None
img.sign(key)
if pad:
img.pad_to(slot_size)
img.save(outfile)
class AliasesGroup(click.Group):
_aliases = {
"create": "sign",
}
def list_commands(self, ctx):
cmds = [k for k in self.commands]
aliases = [k for k in self._aliases]
return sorted(cmds + aliases)
def get_command(self, ctx, cmd_name):
rv = click.Group.get_command(self, ctx, cmd_name)
if rv is not None:
return rv
if cmd_name in self._aliases:
return click.Group.get_command(self, ctx, self._aliases[cmd_name])
return None
@click.command(cls=AliasesGroup,
context_settings=dict(help_option_names=['-h', '--help']))
def imgtool():
pass
imgtool.add_command(keygen)
imgtool.add_command(getpub)
imgtool.add_command(sign)
if __name__ == '__main__':
imgtool()
| apache-2.0 |
detiber/ansible | lib/ansible/modules/cloud/ovirt/ovirt_nics_facts.py | 26 | 3930 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ovirt_nics_facts
short_description: Retrieve facts about one or more oVirt virtual machine network interfaces
author: "Ondra Machacek (@machacekondra)"
version_added: "2.3"
description:
- "Retrieve facts about one or more oVirt virtual machine network interfaces."
notes:
- "This module creates a new top-level C(ovirt_nics) fact, which
contains a list of NICs."
options:
vm:
description:
- "Name of the VM where NIC is attached."
required: true
name:
description:
- "Name of the NIC, can be used as glob expression."
extends_documentation_fragment: ovirt_facts
'''
EXAMPLES = '''
# Examples don't contain auth parameter for simplicity,
# look at ovirt_auth module to see how to reuse authentication:
# Gather facts about all NICs which names start with C(eth) for VM named C(centos7):
- ovirt_nics_facts:
vm: centos7
name: eth*
- debug:
var: ovirt_nics
'''
RETURN = '''
ovirt_nics:
description: "List of dictionaries describing the network interfaces. NIC attribues are mapped to dictionary keys,
all NICs attributes can be found at following url: https://ovirt.example.com/ovirt-engine/api/model#types/nic."
returned: On success.
type: list
'''
import fnmatch
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ovirt import (
check_sdk,
create_connection,
get_dict_of_struct,
ovirt_facts_full_argument_spec,
search_by_name,
)
def main():
argument_spec = ovirt_facts_full_argument_spec(
vm=dict(required=True),
name=dict(default=None),
)
module = AnsibleModule(argument_spec)
check_sdk(module)
try:
auth = module.params.pop('auth')
connection = create_connection(auth)
vms_service = connection.system_service().vms_service()
vm_name = module.params['vm']
vm = search_by_name(vms_service, vm_name)
if vm is None:
raise Exception("VM '%s' was not found." % vm_name)
nics_service = vms_service.service(vm.id).nics_service()
if module.params['name']:
nics = [
e for e in nics_service.list()
if fnmatch.fnmatch(e.name, module.params['name'])
]
else:
nics = nics_service.list()
module.exit_json(
changed=False,
ansible_facts=dict(
ovirt_nics=[
get_dict_of_struct(
struct=c,
connection=connection,
fetch_nested=module.params.get('fetch_nested'),
attributes=module.params.get('nested_attributes'),
) for c in nics
],
),
)
except Exception as e:
module.fail_json(msg=str(e), exception=traceback.format_exc())
finally:
connection.close(logout=auth.get('token') is None)
if __name__ == '__main__':
main()
| gpl-3.0 |
tatiana/invesalius | invesalius/style.py | 1 | 3570 | #--------------------------------------------------------------------------
# Software: InVesalius - Software de Reconstrucao 3D de Imagens Medicas
# Copyright: (C) 2001 Centro de Pesquisas Renato Archer
# Homepage: http://www.softwarepublico.gov.br
# Contact: invesalius@cti.gov.br
# License: GNU - GPL 2 (LICENSE.txt/LICENCA.txt)
#--------------------------------------------------------------------------
# Este programa e software livre; voce pode redistribui-lo e/ou
# modifica-lo sob os termos da Licenca Publica Geral GNU, conforme
# publicada pela Free Software Foundation; de acordo com a versao 2
# da Licenca.
#
# Este programa eh distribuido na expectativa de ser util, mas SEM
# QUALQUER GARANTIA; sem mesmo a garantia implicita de
# COMERCIALIZACAO ou de ADEQUACAO A QUALQUER PROPOSITO EM
# PARTICULAR. Consulte a Licenca Publica Geral GNU para obter mais
# detalhes.
#--------------------------------------------------------------------------
import wx.lib.pubsub as ps
# mode.py
# to be instanced inside Controller (control.py)
# IMPORTANT: When adding a new state, remember o insert it into LEVEL
# dictionary
# RULE:
# default is the only level 0
# states controlled somehow by taskmenu are level 1
# states controlled by toolbar are level 2
#LEVEL = {SLICE_STATE_DEFAULT: 0,
# SLICE_STATE_EDITOR: 1,
# SLICE_STATE_WL: 2,
# SLICE_STATE_SPIN: 2,
# SLICE_STATE_ZOOM: 2,
# SLICE_STATE_ZOOM_SL: 2}
#----------------------
# TODO: Add to viewer_slice.py:
#ps.Publisher().subscribe(self.OnSetMode, 'Set slice mode')
#def OnSetMode(self, pubsub_evt):
# mode = pubsub_evt.data
# according to mode, set cursor, interaction, etc
#----------------------
# TODO: Add GUI classes (frame, tasks related to slice, toolbar):
# always bind to this class (regarding slice mode) and not to
# viewer_slice directly
# example - pseudo code
#def OnToggleButtonSpin(self, evt)
# if evt.toggle: # doesn't exist, just to illustrate
# ps.Publisher().sendMessage('Enable mode', const.SLICE_STATE_ZOOM)
# else:
# ps.Publisher().subscribe('Disable mode', const.SLICE_STATE_ZOOM)
#----------------------
import constants as const
class StyleStateManager(object):
# don't need to be singleton, only needs to be instantiated inside
# (Controller) self.slice_mode = SliceMode()
def __init__(self):
self.stack = {}
# push default value to stack
self.stack[const.STYLE_LEVEL[const.STATE_DEFAULT]] = \
const.STATE_DEFAULT
def AddState(self, state):
level = const.STYLE_LEVEL[state]
max_level = max(self.stack.keys())
# Insert new state into stack
self.stack[level] = state
new_max_level = max(self.stack.keys())
return self.stack[new_max_level]
def RemoveState(self, state):
level = const.STYLE_LEVEL[state]
if level in self.stack.keys():
max_level = max(self.stack.keys())
# Remove item from stack
self.stack.pop(level)
# New max level
new_max_level = max(self.stack.keys())
# Only will affect InVesalius behaviour if the highest
# level in stack has been removed
if level == max_level:
new_state = self.stack[new_max_level]
return self.stack[new_max_level]
max_level = max(self.stack.keys())
return self.stack[max_level]
| gpl-2.0 |
CapOM/ChromiumGStreamerBackend | tools/copyright_scanner/copyright_scanner.py | 13 | 16467 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utilities for scanning source files to determine code authorship.
"""
import itertools
def ForwardSlashesToOsPathSeps(input_api, path):
"""Converts forward slashes ('/') in the input path to OS-specific
path separators. Used when the paths come from outside and are using
UNIX path separators. Only works for relative paths!
Args:
input_api: InputAPI, as in presubmit scripts.
path: The path to convert.
Returns:
Converted path.
"""
return input_api.os_path.join(*path.split('/'))
def FindFiles(input_api, root_dir, start_paths_list, excluded_dirs_list):
"""Similar to UNIX utility find(1), searches for files in the directories.
Automatically leaves out only source code files and excludes third_party
directories.
Args:
input_api: InputAPI, as in presubmit scripts.
root_dir: The root directory, to which all other paths are relative.
start_paths_list: The list of paths to start search from. Each path can
be a file or a directory.
excluded_dirs_list: The list of directories to skip.
Returns:
The list of source code files found, relative to |root_dir|.
"""
excluded_dirs_list = [d for d in excluded_dirs_list if not 'third_party' in d]
# Using a common pattern for third-partyies makes the ignore regexp shorter
excluded_dirs_list.append('third_party')
path_join = input_api.os_path.join
EXTRA_EXCLUDED_DIRS = [
# VCS dirs
path_join('.git'),
path_join('.svn'),
# Build output
path_join('out', 'Debug'),
path_join('out', 'Release'),
# 'Copyright' appears in license agreements
path_join('chrome', 'app', 'resources'),
# Quickoffice js files from internal src used on buildbots.
# crbug.com/350472.
path_join('chrome', 'browser', 'resources', 'chromeos', 'quickoffice'),
# This is a test output directory
path_join('chrome', 'tools', 'test', 'reference_build'),
# blink style copy right headers.
path_join('content', 'shell', 'renderer', 'test_runner'),
# blink style copy right headers.
path_join('content', 'shell', 'tools', 'plugin'),
# This is tests directory, doesn't exist in the snapshot
path_join('content', 'test', 'data'),
# This is a tests directory that doesn't exist in the shipped product.
path_join('gin', 'test'),
# This is a test output directory
path_join('data', 'dom_perf'),
# This is a tests directory that doesn't exist in the shipped product.
path_join('tools', 'perf', 'page_sets'),
path_join('tools', 'perf', 'page_sets', 'tough_animation_cases'),
# Histogram tools, doesn't exist in the snapshot
path_join('tools', 'histograms'),
# Swarming tools, doesn't exist in the snapshot
path_join('tools', 'swarming_client'),
# ARM sysroot, doesn't exist in the snapshot
path_join('build', 'linux', 'debian_wheezy_arm-sysroot'),
# Old location (TODO(sbc): Remove this once it no longer exists on any bots)
path_join('chrome', 'installer', 'linux', 'debian_wheezy_arm-sysroot'),
# Data is not part of open source chromium, but are included on some bots.
path_join('data'),
# This is not part of open source chromium, but are included on some bots.
path_join('skia', 'tools', 'clusterfuzz-data'),
# Not shipped, only relates to Chrome for Android, but not to WebView
path_join('clank'),
]
excluded_dirs_list.extend(EXTRA_EXCLUDED_DIRS)
# Surround the directory names with OS path separators.
dirs_blacklist = [path_join('.', d, '')[1:] for d in excluded_dirs_list if d]
def IsBlacklistedDir(d):
for item in dirs_blacklist:
if item in d:
return True
return False
files_whitelist_re = input_api.re.compile(
r'\.(asm|c(c|pp|xx)?|h(h|pp|xx)?|p(l|m)|xs|sh|php|py(|x)'
'|rb|idl|java|el|sc(i|e)|cs|pas|inc|js|pac|html|dtd|xsl|mod|mm?'
'|tex|mli?)$')
files = []
base_path_len = len(root_dir)
for path in start_paths_list:
full_path = path_join(root_dir, path)
if input_api.os_path.isfile(full_path):
if files_whitelist_re.search(path) and \
not IsBlacklistedDir(full_path[base_path_len:]): # Keep '/' prefix.
files.append(path)
else:
for dirpath, dirnames, filenames in input_api.os_walk(full_path):
# Remove excluded subdirs for faster scanning.
for item in dirnames[:]:
if IsBlacklistedDir(
path_join(dirpath, item)[base_path_len + 1:]):
dirnames.remove(item)
for filename in filenames:
filepath = \
path_join(dirpath, filename)[base_path_len + 1:]
if files_whitelist_re.search(filepath) and \
not IsBlacklistedDir(filepath):
files.append(filepath)
return files
class _GeneratedFilesDetector(object):
GENERATED_FILE = 'GENERATED FILE'
NO_COPYRIGHT = '*No copyright*'
def __init__(self, input_api):
self.python_multiline_string_double_re = \
input_api.re.compile(r'"""[^"]*(?:"""|$)', flags=input_api.re.MULTILINE)
self.python_multiline_string_single_re = \
input_api.re.compile(r"'''[^']*(?:'''|$)", flags=input_api.re.MULTILINE)
self.automatically_generated_re = input_api.re.compile(
r'(All changes made in this file will be lost'
'|DO NOT (EDIT|delete this file)'
'|Generated (at|automatically|data)'
'|Automatically generated'
'|\Wgenerated\s+(?:\w+\s+)*file\W)', flags=input_api.re.IGNORECASE)
def IsGeneratedFile(self, header):
header = header.upper()
if '"""' in header:
header = self.python_multiline_string_double_re.sub('', header)
if "'''" in header:
header = self.python_multiline_string_single_re.sub('', header)
# First do simple strings lookup to save time.
if 'ALL CHANGES MADE IN THIS FILE WILL BE LOST' in header:
return True
if 'DO NOT EDIT' in header or 'DO NOT DELETE' in header or \
'GENERATED' in header:
return self.automatically_generated_re.search(header)
return False
class _CopyrightsScanner(object):
@staticmethod
def StaticInit(input_api):
_CopyrightsScanner._c_comment_re = \
input_api.re.compile(r'''"[^"\\]*(?:\\.[^"\\]*)*"''')
_CopyrightsScanner._copyright_indicator = \
r'(?:copyright|copr\.|\xc2\xa9|\(c\))'
_CopyrightsScanner._full_copyright_indicator_re = input_api.re.compile(
r'(?:\W|^)' + _CopyrightsScanner._copyright_indicator + \
r'(?::\s*|\s+)(\w.*)$', input_api.re.IGNORECASE)
_CopyrightsScanner._copyright_disindicator_re = input_api.re.compile(
r'\s*\b(?:info(?:rmation)?|notice|and|or)\b', input_api.re.IGNORECASE)
def __init__(self, input_api):
self.max_line_numbers_proximity = 3
self.last_a_item_line_number = -200
self.last_b_item_line_number = -100
self.re = input_api.re
def _CloseLineNumbers(self, a, b):
return 0 <= a - b <= self.max_line_numbers_proximity
def MatchLine(self, line_number, line):
if '"' in line:
line = _CopyrightsScanner._c_comment_re.sub('', line)
upcase_line = line.upper()
# Record '(a)' and '(b)' last occurences in C++ comments.
# This is to filter out '(c)' used as a list item inside C++ comments.
# E.g. "// blah-blah (a) blah\n// blah-blah (b) and (c) blah"
cpp_comment_idx = upcase_line.find('//')
if cpp_comment_idx != -1:
if upcase_line.find('(A)') > cpp_comment_idx:
self.last_a_item_line_number = line_number
if upcase_line.find('(B)') > cpp_comment_idx:
self.last_b_item_line_number = line_number
# Fast bailout, uses the same patterns as _copyright_indicator regexp.
if not 'COPYRIGHT' in upcase_line and not 'COPR.' in upcase_line \
and not '\xc2\xa9' in upcase_line:
c_item_index = upcase_line.find('(C)')
if c_item_index == -1:
return None
if c_item_index > cpp_comment_idx and \
self._CloseLineNumbers(line_number,
self.last_b_item_line_number) and \
self._CloseLineNumbers(self.last_b_item_line_number,
self.last_a_item_line_number):
return None
copyr = None
m = _CopyrightsScanner._full_copyright_indicator_re.search(line)
if m and \
not _CopyrightsScanner._copyright_disindicator_re.match(m.group(1)):
copyr = m.group(0)
# Prettify the authorship string.
copyr = self.re.sub(r'([,.])?\s*$/', '', copyr)
copyr = self.re.sub(
_CopyrightsScanner._copyright_indicator, '', copyr, \
flags=self.re.IGNORECASE)
copyr = self.re.sub(r'^\s+', '', copyr)
copyr = self.re.sub(r'\s{2,}', ' ', copyr)
copyr = self.re.sub(r'\\@', '@', copyr)
return copyr
def FindCopyrights(input_api, root_dir, files_to_scan):
"""Determines code autorship, and finds generated files.
Args:
input_api: InputAPI, as in presubmit scripts.
root_dir: The root directory, to which all other paths are relative.
files_to_scan: The list of file names to scan.
Returns:
The list of copyrights associated with each of the files given.
If the certain file is generated, the corresponding list consists a single
entry -- 'GENERATED_FILE' string. If the file has no copyright info,
the corresponding list contains 'NO_COPYRIGHT' string.
"""
generated_files_detector = _GeneratedFilesDetector(input_api)
_CopyrightsScanner.StaticInit(input_api)
copyrights = []
for file_name in files_to_scan:
linenum = 0
header = []
file_copyrights = []
scanner = _CopyrightsScanner(input_api)
contents = input_api.ReadFile(
input_api.os_path.join(root_dir, file_name), 'r')
for l in contents.split('\n'):
linenum += 1
if linenum <= 25:
header.append(l)
c = scanner.MatchLine(linenum, l)
if c:
file_copyrights.append(c)
if generated_files_detector.IsGeneratedFile('\n'.join(header)):
copyrights.append([_GeneratedFilesDetector.GENERATED_FILE])
elif file_copyrights:
copyrights.append(file_copyrights)
else:
copyrights.append([_GeneratedFilesDetector.NO_COPYRIGHT])
return copyrights
def FindCopyrightViolations(input_api, root_dir, files_to_scan):
"""Looks for files that are not belong exlusively to the Chromium Authors.
Args:
input_api: InputAPI, as in presubmit scripts.
root_dir: The root directory, to which all other paths are relative.
files_to_scan: The list of file names to scan.
Returns:
The list of file names that contain non-Chromium copyrights.
"""
copyrights = FindCopyrights(input_api, root_dir, files_to_scan)
offending_files = []
allowed_copyrights_re = input_api.re.compile(
r'^(?:20[0-9][0-9](?:-20[0-9][0-9])? The Chromium Authors\. '
'All rights reserved.*)$')
for f, cs in itertools.izip(files_to_scan, copyrights):
if cs[0] == _GeneratedFilesDetector.GENERATED_FILE or \
cs[0] == _GeneratedFilesDetector.NO_COPYRIGHT:
continue
for c in cs:
if not allowed_copyrights_re.match(c):
offending_files.append(input_api.os_path.normpath(f))
break
return offending_files
def _GetWhitelistFileName(input_api):
return input_api.os_path.join(
'tools', 'copyright_scanner', 'third_party_files_whitelist.txt')
def _ProcessWhitelistedFilesList(input_api, lines):
whitelisted_files = []
for line in lines:
match = input_api.re.match(r'([^#\s]+)', line)
if match:
whitelisted_files.append(
ForwardSlashesToOsPathSeps(input_api, match.group(1)))
return whitelisted_files
def LoadWhitelistedFilesList(input_api):
"""Loads and parses the 3rd party code whitelist file.
input_api: InputAPI of presubmit scripts.
Returns:
The list of files.
"""
full_file_name = input_api.os_path.join(
input_api.change.RepositoryRoot(), _GetWhitelistFileName(input_api))
file_data = input_api.ReadFile(full_file_name, 'rb')
return _ProcessWhitelistedFilesList(input_api, file_data.splitlines())
def AnalyzeScanResults(input_api, whitelisted_files, offending_files):
"""Compares whitelist contents with the results of file scanning.
input_api: InputAPI of presubmit scripts.
whitelisted_files: Whitelisted files list.
offending_files: Files that contain 3rd party code.
Returns:
A triplet of "unknown", "missing", and "stale" file lists.
"Unknown" are files that contain 3rd party code but not whitelisted.
"Missing" are files that are whitelisted but doesn't really exist.
"Stale" are files that are whitelisted unnecessarily.
"""
unknown = set(offending_files) - set(whitelisted_files)
missing = [f for f in whitelisted_files if not input_api.os_path.isfile(
input_api.os_path.join(input_api.change.RepositoryRoot(), f))]
stale = set(whitelisted_files) - set(offending_files) - set(missing)
return (list(unknown), missing, list(stale))
def _GetDeletedContents(affected_file):
"""Returns a list of all deleted lines.
AffectedFile class from presubmit_support is lacking this functionality.
"""
deleted_lines = []
for line in affected_file.GenerateScmDiff().splitlines():
if line.startswith('-') and not line.startswith('--'):
deleted_lines.append(line[1:])
return deleted_lines
def _DoScanAtPresubmit(input_api, whitelisted_files, files_to_check):
# We pass empty 'known third-party' dirs list here. Since this is a patch
# for the Chromium's src tree, it must contain properly licensed Chromium
# code. Any third-party code must be put into a directory named 'third_party',
# and such dirs are automatically excluded by FindFiles.
files_to_scan = FindFiles(
input_api, input_api.change.RepositoryRoot(), files_to_check, [])
offending_files = FindCopyrightViolations(
input_api, input_api.change.RepositoryRoot(), files_to_scan)
return AnalyzeScanResults(
input_api, whitelisted_files, offending_files)
def ScanAtPresubmit(input_api, output_api):
"""Invoked at change presubmit time. Verifies that updated non third-party
code doesn't contain external copyrighted code.
input_api: InputAPI of presubmit scripts.
output_api: OutputAPI of presubmit scripts.
"""
files_to_check = set([])
deleted_files = set([])
whitelist_contents_changed = False
for f in input_api.AffectedFiles():
if f.LocalPath() == _GetWhitelistFileName(input_api):
whitelist_contents_changed = True
deleted_files |= set(_ProcessWhitelistedFilesList(
input_api, _GetDeletedContents(f)))
continue
if f.Action() != 'D':
files_to_check.add(f.LocalPath())
else:
deleted_files.add(f.LocalPath())
whitelisted_files = set(LoadWhitelistedFilesList(input_api))
if not whitelist_contents_changed:
whitelisted_files &= files_to_check | deleted_files
else:
# Need to re-check the entire contents of the whitelist file.
# Also add files removed from the whitelist. If the file has indeed been
# deleted, the scanner will not complain.
files_to_check |= whitelisted_files | deleted_files
(unknown_files, missing_files, stale_files) = _DoScanAtPresubmit(
input_api, list(whitelisted_files), list(files_to_check))
results = []
if unknown_files:
results.append(output_api.PresubmitError(
'The following files contain a third-party license but are not in ' \
'a listed third-party directory and are not whitelisted. You must ' \
'add the following files to the whitelist file %s\n' \
'(Note that if the code you are adding does not actually contain ' \
'any third-party code, it may contain the word "copyright", which ' \
'should be masked out, e.g. by writing it as "copy-right"):' \
'' % _GetWhitelistFileName(input_api),
sorted(unknown_files)))
if missing_files:
results.append(output_api.PresubmitPromptWarning(
'The following files are whitelisted in %s, ' \
'but do not exist or not files:' % _GetWhitelistFileName(input_api),
sorted(missing_files)))
if stale_files:
results.append(output_api.PresubmitPromptWarning(
'The following files are whitelisted unnecessarily. You must ' \
'remove the following files from the whitelist file ' \
'%s:' % _GetWhitelistFileName(input_api),
sorted(stale_files)))
return results
| bsd-3-clause |
pgmillon/ansible | lib/ansible/module_utils/facts/network/dragonfly.py | 232 | 1202 | # This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.module_utils.facts.network.base import NetworkCollector
from ansible.module_utils.facts.network.generic_bsd import GenericBsdIfconfigNetwork
class DragonFlyNetwork(GenericBsdIfconfigNetwork):
"""
This is the DragonFly Network Class.
It uses the GenericBsdIfconfigNetwork unchanged.
"""
platform = 'DragonFly'
class DragonFlyNetworkCollector(NetworkCollector):
_fact_class = DragonFlyNetwork
_platform = 'DragonFly'
| gpl-3.0 |
dhenrygithub/QGIS | python/ext-libs/future/past/builtins/misc.py | 62 | 2500 | from __future__ import unicode_literals
import sys
import inspect
from collections import Mapping
from future.utils import PY3, exec_
if PY3:
import builtins
def apply(f, *args, **kw):
return f(*args, **kw)
from past.builtins import str as oldstr
def chr(i):
"""
Return a byte-string of one character with ordinal i; 0 <= i <= 256
"""
return oldstr(bytes((i,)))
def cmp(x, y):
"""
cmp(x, y) -> integer
Return negative if x<y, zero if x==y, positive if x>y.
"""
return (x > y) - (x < y)
from sys import intern
def oct(number):
"""oct(number) -> string
Return the octal representation of an integer
"""
return '0' + builtins.oct(number)[2:]
raw_input = input
from imp import reload
unicode = str
unichr = chr
xrange = range
else:
import __builtin__
apply = __builtin__.apply
chr = __builtin__.chr
cmp = __builtin__.cmp
execfile = __builtin__.execfile
intern = __builtin__.intern
oct = __builtin__.oct
raw_input = __builtin__.raw_input
reload = __builtin__.reload
unicode = __builtin__.unicode
unichr = __builtin__.unichr
xrange = __builtin__.xrange
if PY3:
def execfile(filename, myglobals=None, mylocals=None):
"""
Read and execute a Python script from a file in the given namespaces.
The globals and locals are dictionaries, defaulting to the current
globals and locals. If only globals is given, locals defaults to it.
"""
if myglobals is None:
# There seems to be no alternative to frame hacking here.
caller_frame = inspect.stack()[1]
myglobals = caller_frame[0].f_globals
mylocals = caller_frame[0].f_locals
elif mylocals is None:
# Only if myglobals is given do we set mylocals to it.
mylocals = myglobals
if not isinstance(myglobals, Mapping):
raise TypeError('globals must be a mapping')
if not isinstance(mylocals, Mapping):
raise TypeError('locals must be a mapping')
with open(filename, "rbU") as fin:
source = fin.read()
code = compile(source, filename, "exec")
exec_(code, myglobals, mylocals)
if PY3:
__all__ = ['apply', 'chr', 'cmp', 'execfile', 'intern', 'raw_input',
'reload', 'unichr', 'unicode', 'xrange']
else:
__all__ = []
| gpl-2.0 |
guewen/OpenUpgrade | addons/product_extended/wizard/__init__.py | 374 | 1078 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import wizard_price
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
SKIRT/PTS | modeling/build/representations/generator.py | 1 | 7835 | #!/usr/bin/env python
# -*- coding: utf8 -*-
# *****************************************************************
# ** PTS -- Python Toolkit for working with SKIRT **
# ** © Astronomical Observatory, Ghent University **
# *****************************************************************
## \package pts.modeling.build.representations.generator Contains the RepresentationsGenerator class.
# -----------------------------------------------------------------
# Ensure Python 3 compatibility
from __future__ import absolute_import, division, print_function
# Import standard modules
from collections import OrderedDict
# Import astronomical modules
from astropy.units import dimensionless_angles
# Import the relevant PTS classes and modules
from ..component import BuildComponent
from ...component.galaxy import GalaxyModelingComponent
from ....core.prep.dustgrids import DustGridGenerator
from ....core.basics.log import log
from ....core.basics.range import QuantityRange, RealRange
from .galaxy import GalaxyRepresentationBuilder
from ....core.tools import time
from ....core.tools import tables
from ....core.tools import filesystem as fs
from ....core.prep.templates import get_pan_template
from ....core.advanced.dustgridtool import generate_grid
from ....core.simulation.grids import load_grid
# -----------------------------------------------------------------
class RepresentationGenerator(BuildComponent, GalaxyModelingComponent):
"""
This class...
"""
def __init__(self, *args, **kwargs):
"""
The constructor ...
:param kwargs:
:return:
"""
# Call the constructor of the base class
#super(RepresentationGenerator, self).__init__(*args, **kwargs)
BuildComponent.__init__(self, no_config=True)
GalaxyModelingComponent.__init__(self, *args, **kwargs)
# The model definition
self.definition = None
# The dust grid generator
self.dg_generator = None
# A name for this representation generation event
self.event_name = None
# The representations
self.representations = OrderedDict()
# -----------------------------------------------------------------
def _run(self, **kwargs):
"""
This function ...
:param kwargs:
:return:
"""
# 2. Create the dust grids
self.create_dust_grids()
# 3. Build the representations
self.build_representations()
# 5. Writing
self.write()
# -----------------------------------------------------------------
def setup(self, **kwargs):
"""
This function ...
:param kwargs:
:return:
"""
# Call the setup function of the base class
#super(RepresentationGenerator, self).setup(**kwargs)
BuildComponent.setup(self, **kwargs)
GalaxyModelingComponent.setup(self, **kwargs)
# Create the model definition
self.definition = self.get_model_definition(self.config.model_name)
# Create the DustGridGenerator
self.dg_generator = DustGridGenerator()
# Set the event name
self.event_name = time.unique_name("generator")
# -----------------------------------------------------------------
def create_dust_grids(self):
"""
This function ...
:return:
"""
# Inform the user
log.info("Creating the grids ...")
# Calculate the major radius of the truncation ellipse in physical coordinates (pc)
semimajor_angular = self.truncation_ellipse.semimajor # semimajor axis length of the sky ellipse
radius_physical = (semimajor_angular * self.galaxy_distance).to("pc", equivalencies=dimensionless_angles())
# Get the pixelscale in physical units
pixelscale_angular = self.definition.basic_maps_minimum_average_pixelscale.to("deg")
#pixelscale_angular = self.reference_wcs.average_pixelscale.to("deg") # in deg
pixelscale = (pixelscale_angular * self.galaxy_distance).to("pc", equivalencies=dimensionless_angles())
# BINTREE: (smallest_cell_pixels, min_level, max_mass_fraction)
# Low-resolution: 10., 6, 1e-5
# High-resolution: 0.5, 9, 0.5e-6
# OCTTREE:
# Low-resolution: 10., 2, 1e-5
# High-resolution: 0.5, 3, 0.5e-6
# Because we (currently) can't position the grid exactly as the 2D pixels (rotation etc.),
# take half of the pixel size to avoid too much interpolation
min_scale = self.config.dg.scale_range.min * pixelscale
max_scale = self.config.dg.scale_range.max * pixelscale
scale_range = QuantityRange(min_scale, max_scale, invert=True)
# The range of the max mass fraction
mass_fraction_range = RealRange(self.config.dg.mass_fraction_range.min, self.config.dg.mass_fraction_range.max, invert=True) # must be inverted
# Set fixed grid properties
self.dg_generator.grid_type = self.config.dg.grid_type # set grid type
self.dg_generator.x_radius = radius_physical
self.dg_generator.y_radius = radius_physical
self.dg_generator.z_radius = self.definition.dust_scaleheight * self.config.dg.scale_heights
# Set options
self.dg_generator.show = False
self.dg_generator.write = False
# Set the range of the minimum tree level
if self.config.dg.grid_type == "bintree": level_range = self.config.dg.bintree_level_range # 6 to 9
elif self.config.dg.grid_type == "octtree": level_range = self.config.dg.octtree_level_range # 2 to 3
else: level_range = None
# Generate the dust grids
self.dg_generator.run(scale_range=scale_range, level_range=level_range, mass_fraction_range=mass_fraction_range, ngrids=self.config.nrepresentations)
# -----------------------------------------------------------------
def build_representations(self):
"""
This function ...
:return:
"""
# Inform the user
log.info("Building the representations ...")
# Loop over the dust grids
for index, grid in enumerate(self.dg_generator.grids):
# Create builder
builder = GalaxyRepresentationBuilder(cwd=self.config.path)
# Determine a name for this representation
name = "grid" + str(index)
# Set name
builder.config.name = name
# Set model name
builder.config.model_name = self.config.model_name
# Set option to calculate the quality of the dust grid
builder.config.check_dust_grid_quality = self.config.check_dust_grid_quality
# Build, passing the dust grid that has been created
builder.run(dust_grid=grid)
# Set the path for this representation
self.representations[name] = builder.representation
# -----------------------------------------------------------------
def write(self):
"""
This function ...
:return:
"""
# Inform the user
log.info("Writing ...")
# Write the dust grid table
self.write_dust_grid_table()
# -----------------------------------------------------------------
def write_dust_grid_table(self):
"""
This function ...
:return:
"""
# Inform the user
log.info("Writing the dust grid table ...")
# Determine the path
path = fs.join(self.representations_path, self.event_name + "_dustgrids.dat")
# Write the dust grids table
tables.write(self.dg_generator.table, path)
# -----------------------------------------------------------------
| agpl-3.0 |
rainaashutosh/MyTestRekall | rekall-core/rekall/plugins/windows/registry/printkey_test.py | 8 | 1195 | # Rekall Memory Forensics
#
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Authors:
# Michael Cohen <scudette@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or (at
# your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
"""Tests for the printkey plugin."""
from rekall import testlib
class TestRegDump(testlib.HashChecker):
"""Test dumping of registry hives."""
PARAMETERS = dict(commandline="regdump --dump_dir %(tempdir)s")
class TestHiveDump(testlib.SimpleTestCase):
PARAMETERS = dict(
commandline="hivedump --hive_regex system32.config.default",
)
| gpl-2.0 |
wangmingjob/OnlineJudge | announcement/serializers.py | 6 | 1082 | # coding=utf-8
from rest_framework import serializers
from account.models import User
from .models import Announcement
class CreateAnnouncementSerializer(serializers.Serializer):
title = serializers.CharField(max_length=50)
content = serializers.CharField(max_length=10000)
is_global = serializers.BooleanField()
groups = serializers.ListField(child=serializers.IntegerField(), required=False, default=[])
class AnnouncementSerializer(serializers.ModelSerializer):
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ["username"]
created_by = UserSerializer()
class Meta:
model = Announcement
class EditAnnouncementSerializer(serializers.Serializer):
id = serializers.IntegerField()
title = serializers.CharField(max_length=50)
content = serializers.CharField(max_length=10000)
visible = serializers.BooleanField()
is_global = serializers.BooleanField()
groups = serializers.ListField(child=serializers.IntegerField(), required=False, default=[])
| mit |
jwlawson/tensorflow | tensorflow/contrib/data/python/kernel_tests/dataset_constructor_op_test.py | 9 | 33826 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the experimental input pipeline ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import threading
import numpy as np
from tensorflow.contrib.data.python.kernel_tests import dataset_serialization_test_base
from tensorflow.contrib.data.python.ops import batching
from tensorflow.contrib.data.python.ops import dataset_ops
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.client import session
from tensorflow.python.data.util import nest
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.platform import test
class DatasetConstructorTest(test.TestCase):
def testFromTensors(self):
"""Test an dataset that represents a single tuple of tensors."""
components = (np.array(1), np.array([1, 2, 3]), np.array(37.0))
iterator = (dataset_ops.Dataset.from_tensors(components)
.make_initializable_iterator())
init_op = iterator.initializer
get_next = iterator.get_next()
self.assertEqual([c.shape for c in components],
[t.shape for t in get_next])
with self.test_session() as sess:
sess.run(init_op)
results = sess.run(get_next)
for component, result_component in zip(components, results):
self.assertAllEqual(component, result_component)
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def assertSparseValuesEqual(self, a, b):
self.assertAllEqual(a.indices, b.indices)
self.assertAllEqual(a.values, b.values)
self.assertAllEqual(a.dense_shape, b.dense_shape)
def testFromTensorsSparse(self):
"""Test an dataset that represents a single tuple of tensors."""
components = (sparse_tensor.SparseTensorValue(
indices=np.array([[0]]),
values=np.array([0]),
dense_shape=np.array([1])),
sparse_tensor.SparseTensorValue(
indices=np.array([[0, 0], [1, 1]]),
values=np.array([-1, 1]),
dense_shape=np.array([2, 2])))
iterator = (
dataset_ops.Dataset.from_tensors(components)
.make_initializable_iterator())
init_op = iterator.initializer
get_next = iterator.get_next()
self.assertEqual(
[tensor_shape.TensorShape(c.dense_shape) for c in components],
[shape for shape in iterator.output_shapes])
with self.test_session() as sess:
sess.run(init_op)
results = sess.run(get_next)
for component, result_component in zip(components, results):
self.assertSparseValuesEqual(component, result_component)
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testFromTensorsMixed(self):
"""Test an dataset that represents a single tuple of tensors."""
components = (np.array(1), np.array([1, 2, 3]), np.array(37.0),
sparse_tensor.SparseTensorValue(
indices=np.array([[0]]),
values=np.array([0]),
dense_shape=np.array([1])),
sparse_tensor.SparseTensorValue(
indices=np.array([[0, 0], [1, 1]]),
values=np.array([-1, 1]),
dense_shape=np.array([2, 2])))
iterator = (
dataset_ops.Dataset.from_tensors(components)
.make_initializable_iterator())
init_op = iterator.initializer
get_next = iterator.get_next()
self.assertEqual([
tensor_shape.TensorShape(c.dense_shape)
if sparse_tensor.is_sparse(c) else c.shape for c in components
], [shape for shape in iterator.output_shapes])
with self.test_session() as sess:
sess.run(init_op)
results = sess.run(get_next)
for component, result_component in zip(components, results):
if sparse_tensor.is_sparse(component):
self.assertSparseValuesEqual(component, result_component)
else:
self.assertAllEqual(component, result_component)
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testFromTensorSlices(self):
"""Test an dataset that represents the slices from a tuple of tensors."""
components = (
np.tile(np.array([[1], [2], [3], [4]]), 20), np.tile(
np.array([[12], [13], [14], [15]]), 22),
np.array([37.0, 38.0, 39.0, 40.0])
)
iterator = (dataset_ops.Dataset.from_tensor_slices(components)
.make_initializable_iterator())
init_op = iterator.initializer
get_next = iterator.get_next()
self.assertEqual([c.shape[1:] for c in components],
[t.shape for t in get_next])
with self.test_session() as sess:
sess.run(init_op)
for i in range(4):
results = sess.run(get_next)
for component, result_component in zip(components, results):
self.assertAllEqual(component[i], result_component)
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testFromTensorSlicesSparse(self):
"""Test an dataset that represents the slices from a tuple of tensors."""
components = (sparse_tensor.SparseTensorValue(
indices=np.array([[0, 0], [1, 0], [2, 0]]),
values=np.array([0, 0, 0]),
dense_shape=np.array([3, 1])),
sparse_tensor.SparseTensorValue(
indices=np.array([[0, 0], [1, 1], [2, 2]]),
values=np.array([1, 2, 3]),
dense_shape=np.array([3, 3])))
iterator = (
dataset_ops.Dataset.from_tensor_slices(components)
.make_initializable_iterator())
init_op = iterator.initializer
get_next = iterator.get_next()
self.assertEqual(
[tensor_shape.TensorShape(c.dense_shape[1:]) for c in components],
[shape for shape in iterator.output_shapes])
with self.test_session() as sess:
sess.run(init_op)
expected = [
(sparse_tensor.SparseTensorValue(
indices=np.array([[0]]),
values=np.array([0]),
dense_shape=np.array([1])),
sparse_tensor.SparseTensorValue(
indices=np.array([[0]]),
values=np.array([1]),
dense_shape=np.array([3]))),
(sparse_tensor.SparseTensorValue(
indices=np.array([[0]]),
values=np.array([0]),
dense_shape=np.array([1])),
sparse_tensor.SparseTensorValue(
indices=np.array([[1]]),
values=np.array([2]),
dense_shape=np.array([3]))),
(sparse_tensor.SparseTensorValue(
indices=np.array([[0]]),
values=np.array([0]),
dense_shape=np.array([1])),
sparse_tensor.SparseTensorValue(
indices=np.array([[2]]),
values=np.array([3]),
dense_shape=np.array([3]))),
]
for i in range(3):
results = sess.run(get_next)
for component, result_component in zip(expected[i], results):
self.assertSparseValuesEqual(component, result_component)
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testFromTensorSlicesMixed(self):
"""Test an dataset that represents the slices from a tuple of tensors."""
components = (np.tile(np.array([[1], [2], [3]]), 20),
np.tile(np.array([[12], [13], [14]]), 22),
np.array([37.0, 38.0, 39.0]),
sparse_tensor.SparseTensorValue(
indices=np.array([[0, 0], [1, 0], [2, 0]]),
values=np.array([0, 0, 0]),
dense_shape=np.array([3, 1])),
sparse_tensor.SparseTensorValue(
indices=np.array([[0, 0], [1, 1], [2, 2]]),
values=np.array([1, 2, 3]),
dense_shape=np.array([3, 3])))
iterator = (
dataset_ops.Dataset.from_tensor_slices(components)
.make_initializable_iterator())
init_op = iterator.initializer
get_next = iterator.get_next()
self.assertEqual([
tensor_shape.TensorShape(c.dense_shape[1:])
if sparse_tensor.is_sparse(c) else c.shape[1:] for c in components
], [shape for shape in iterator.output_shapes])
with self.test_session() as sess:
sess.run(init_op)
expected = [
(sparse_tensor.SparseTensorValue(
indices=np.array([[0]]),
values=np.array([0]),
dense_shape=np.array([1])),
sparse_tensor.SparseTensorValue(
indices=np.array([[0]]),
values=np.array([1]),
dense_shape=np.array([3]))),
(sparse_tensor.SparseTensorValue(
indices=np.array([[0]]),
values=np.array([0]),
dense_shape=np.array([1])),
sparse_tensor.SparseTensorValue(
indices=np.array([[1]]),
values=np.array([2]),
dense_shape=np.array([3]))),
(sparse_tensor.SparseTensorValue(
indices=np.array([[0]]),
values=np.array([0]),
dense_shape=np.array([1])),
sparse_tensor.SparseTensorValue(
indices=np.array([[2]]),
values=np.array([3]),
dense_shape=np.array([3]))),
]
for i in range(3):
results = sess.run(get_next)
for component, result_component in zip(
(zip(*components[:3])[i] + expected[i]), results):
if sparse_tensor.is_sparse(component):
self.assertSparseValuesEqual(component, result_component)
else:
self.assertAllEqual(component, result_component)
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testFromTensorSlicesWithDict(self):
components = {"foo": [1, 2, 3], "bar": [[4.0], [5.0], [6.0]]}
iterator = (dataset_ops.Dataset.from_tensor_slices(components)
.make_initializable_iterator())
init_op = iterator.initializer
get_next = iterator.get_next()
self.assertEqual(dtypes.int32, iterator.output_types["foo"])
self.assertEqual(dtypes.float32, iterator.output_types["bar"])
self.assertEqual((), iterator.output_shapes["foo"])
self.assertEqual((1,), iterator.output_shapes["bar"])
with self.test_session() as sess:
sess.run(init_op)
for i in range(3):
results = sess.run(get_next)
self.assertEqual(components["foo"][i], results["foo"])
self.assertEqual(components["bar"][i], results["bar"])
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testFromSparseTensorSlices(self):
"""Test a dataset based on slices of a `tf.SparseTensor`."""
st = array_ops.sparse_placeholder(dtypes.float64)
iterator = (dataset_ops.Dataset.from_sparse_tensor_slices(st)
.make_initializable_iterator())
init_op = iterator.initializer
get_next = sparse_tensor.SparseTensor(*iterator.get_next())
with self.test_session() as sess:
slices = [[1., 2., 3.], [1.], [1.], [1., 2.], [], [1., 2.], [], [], []]
# Test with sparse tensor in the appropriate order.
indices = np.array(
[[i, j] for i in range(len(slices)) for j in range(len(slices[i]))])
values = np.array([val for s in slices for val in s])
dense_shape = np.array([len(slices), max(len(s) for s in slices) + 1])
sparse_feed = sparse_tensor.SparseTensorValue(indices, values,
dense_shape)
sess.run(init_op, feed_dict={st: sparse_feed})
for i, s in enumerate(slices):
results = sess.run(get_next)
self.assertAllEqual(s, results.values)
expected_indices = np.array(
[[j] for j in range(len(slices[i]))]).reshape([-1, 1])
self.assertAllEqual(expected_indices, results.indices)
self.assertAllEqual(dense_shape[1:], results.dense_shape)
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
# Test with sparse tensor in the reverse order, which is not
# currently supported.
reverse_order_indices = indices[::-1, :]
reverse_order_values = values[::-1]
sparse_feed = sparse_tensor.SparseTensorValue(
reverse_order_indices, reverse_order_values, dense_shape)
with self.assertRaises(errors.UnimplementedError):
sess.run(init_op, feed_dict={st: sparse_feed})
# Test with an empty sparse tensor.
empty_indices = np.empty((0, 4), dtype=np.int64)
empty_values = np.empty((0,), dtype=np.float64)
empty_dense_shape = [0, 4, 37, 9]
sparse_feed = sparse_tensor.SparseTensorValue(empty_indices, empty_values,
empty_dense_shape)
sess.run(init_op, feed_dict={st: sparse_feed})
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
# pylint: disable=g-long-lambda,unnecessary-lambda
def testNestedStructure(self):
components = (np.array([1, 2, 3]), (np.array([4., 5.]), np.array([6., 7.])),
np.array([8, 9, 10]))
dataset = dataset_ops.Dataset.from_tensors(components)
self.assertEquals((dtypes.int64, (dtypes.float64, dtypes.float64),
dtypes.int64), dataset.output_types)
self.assertEquals(([3], ([2], [2]), [3]), dataset.output_shapes)
dataset = dataset.shuffle(10, 10)
self.assertEquals((dtypes.int64, (dtypes.float64, dtypes.float64),
dtypes.int64), dataset.output_types)
self.assertEquals(([3], ([2], [2]), [3]), dataset.output_shapes)
dataset = dataset.repeat(-1)
self.assertEquals((dtypes.int64, (dtypes.float64, dtypes.float64),
dtypes.int64), dataset.output_types)
self.assertEquals(([3], ([2], [2]), [3]), dataset.output_shapes)
dataset = dataset.filter(lambda x, y, z: True)
self.assertEquals((dtypes.int64, (dtypes.float64, dtypes.float64),
dtypes.int64), dataset.output_types)
self.assertEquals(([3], ([2], [2]), [3]), dataset.output_shapes)
dataset = dataset.take(5)
self.assertEquals((dtypes.int64, (dtypes.float64, dtypes.float64),
dtypes.int64), dataset.output_types)
self.assertEquals(([3], ([2], [2]), [3]), dataset.output_shapes)
dataset = dataset.map(lambda x, y, z: ((x, z), (y[0], y[1])))
self.assertEquals(((dtypes.int64, dtypes.int64),
(dtypes.float64, dtypes.float64)), dataset.output_types)
self.assertEquals((([3], [3]), ([2], [2])), dataset.output_shapes)
dataset = dataset.flat_map(
lambda x, y: dataset_ops.Dataset.from_tensors(((x[0], x[1]),
(y[0], y[1])))
)
self.assertEquals(((dtypes.int64, dtypes.int64),
(dtypes.float64, dtypes.float64)), dataset.output_types)
self.assertEquals((([3], [3]), ([2], [2])), dataset.output_shapes)
dataset = dataset.batch(32)
self.assertEquals(((dtypes.int64, dtypes.int64),
(dtypes.float64, dtypes.float64)), dataset.output_types)
self.assertEquals((([None, 3], [None, 3]), ([None, 2], [None, 2])),
nest.pack_sequence_as(dataset.output_shapes, [
s.as_list()
for s in nest.flatten(dataset.output_shapes)
]))
iterator = dataset.make_one_shot_iterator()
(w, x), (y, z) = iterator.get_next()
self.assertEquals(dtypes.int64, w.dtype)
self.assertEquals(dtypes.int64, x.dtype)
self.assertEquals(dtypes.float64, y.dtype)
self.assertEquals(dtypes.float64, z.dtype)
self.assertEquals([None, 3], w.shape.as_list())
self.assertEquals([None, 3], x.shape.as_list())
self.assertEquals([None, 2], y.shape.as_list())
self.assertEquals([None, 2], z.shape.as_list())
iterator = dataset.make_initializable_iterator()
(w, x), (y, z) = iterator.get_next()
self.assertEquals(dtypes.int64, w.dtype)
self.assertEquals(dtypes.int64, x.dtype)
self.assertEquals(dtypes.float64, y.dtype)
self.assertEquals(dtypes.float64, z.dtype)
self.assertEquals([None, 3], w.shape.as_list())
self.assertEquals([None, 3], x.shape.as_list())
self.assertEquals([None, 2], y.shape.as_list())
self.assertEquals([None, 2], z.shape.as_list())
# Define a separate set of components with matching leading
# dimension for the from-slices constructor.
components_for_slices = (np.array([1, 2, 3]), (np.array(
[4., 5., 6.]), np.array([7., 8., 9.])), np.array([10, 11, 12]))
dataset = dataset_ops.Dataset.from_tensor_slices(components_for_slices)
self.assertEquals((dtypes.int64, (dtypes.float64, dtypes.float64),
dtypes.int64), dataset.output_types)
self.assertEquals(([], ([], []), []), dataset.output_shapes)
def testNestedDict(self):
components = {"a": {"aa": 1, "ab": [2.0, 2.0]}, "b": [3, 3, 3]}
dataset = dataset_ops.Dataset.from_tensors(components)
self.assertEquals(dtypes.int32, dataset.output_types["a"]["aa"])
self.assertEquals(dtypes.float32, dataset.output_types["a"]["ab"])
self.assertEquals(dtypes.int32, dataset.output_types["b"])
self.assertEquals([], dataset.output_shapes["a"]["aa"])
self.assertEquals([2], dataset.output_shapes["a"]["ab"])
self.assertEquals([3], dataset.output_shapes["b"])
def testNonSequenceNestedStructure(self):
components = np.array([1, 2, 3])
dataset = dataset_ops.Dataset.from_tensors(components)
self.assertEquals(dtypes.int64, dataset.output_types)
self.assertEquals([3], dataset.output_shapes)
dataset = dataset.filter(
lambda x: math_ops.reduce_all(math_ops.equal(x, components)))
self.assertEquals(dtypes.int64, dataset.output_types)
self.assertEquals([3], dataset.output_shapes)
dataset = dataset.map(lambda x: array_ops.stack([x, x]))
self.assertEquals(dtypes.int64, dataset.output_types)
self.assertEquals([2, 3], dataset.output_shapes)
dataset = dataset.flat_map(
lambda x: dataset_ops.Dataset.from_tensor_slices(x))
self.assertEquals(dtypes.int64, dataset.output_types)
self.assertEquals([3], dataset.output_shapes)
iterator = dataset.make_one_shot_iterator()
get_next = iterator.get_next()
self.assertEquals(dtypes.int64, get_next.dtype)
self.assertEquals([3], get_next.shape)
def _testFromGenerator(self, generator, elem_sequence, num_repeats):
iterator = (
dataset_ops.Dataset.from_generator(generator, output_types=dtypes.int64)
.repeat(num_repeats)
.prefetch(5)
.make_initializable_iterator())
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
for _ in range(2): # Run twice to test reinitialization.
sess.run(init_op)
for _ in range(num_repeats):
for elem in elem_sequence:
self.assertAllEqual(elem, sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def _testFromGeneratorOneShot(self, generator, elem_sequence, num_repeats):
iterator = (
dataset_ops.Dataset.from_generator(generator, output_types=dtypes.int64)
.repeat(num_repeats)
.prefetch(5)
.make_one_shot_iterator())
get_next = iterator.get_next()
with self.test_session() as sess:
for _ in range(num_repeats):
for elem in elem_sequence:
self.assertAllEqual(elem, sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testFromGeneratorUsingFunction(self):
def generator():
for i in range(1, 100):
yield [i] * i
elem_sequence = list(generator())
self._testFromGenerator(generator, elem_sequence, 1)
self._testFromGenerator(generator, elem_sequence, 5)
self._testFromGeneratorOneShot(generator, elem_sequence, 1)
self._testFromGeneratorOneShot(generator, elem_sequence, 5)
def testFromGeneratorUsingList(self):
generator = lambda: [[i] * i for i in range(1, 100)]
elem_sequence = list(generator())
self._testFromGenerator(generator, elem_sequence, 1)
self._testFromGenerator(generator, elem_sequence, 5)
def testFromGeneratorUsingNdarray(self):
generator = lambda: np.arange(100, dtype=np.int64)
elem_sequence = list(generator())
self._testFromGenerator(generator, elem_sequence, 1)
self._testFromGenerator(generator, elem_sequence, 5)
def testFromGeneratorUsingGeneratorExpression(self):
# NOTE(mrry): Generator *expressions* are not repeatable (or in
# general reusable), because they eagerly evaluate the `for`
# expression as `iter(range(1, 100))` and discard the means of
# reconstructing `range(1, 100)`. Wrapping the generator
# expression in a `lambda` makes it repeatable.
generator = lambda: ([i] * i for i in range(1, 100))
elem_sequence = list(generator())
self._testFromGenerator(generator, elem_sequence, 1)
self._testFromGenerator(generator, elem_sequence, 5)
def testFromMultipleConcurrentGenerators(self):
num_inner_repeats = 5
num_outer_repeats = 100
def generator():
for i in range(1, 10):
yield ([i] * i, [i, i ** 2, i ** 3])
input_list = list(generator())
# The interleave transformation is essentially a flat map that
# draws from multiple input datasets concurrently (in a cyclic
# fashion). By placing `Datsaet.from_generator()` inside an
# interleave, we test its behavior when multiple iterators are
# active at the same time; by additionally prefetching inside the
# interleave, we create the possibility of parallel (modulo GIL)
# invocations to several iterators created by the same dataset.
def interleave_fn(_):
return (dataset_ops.Dataset.from_generator(
generator, output_types=(dtypes.int64, dtypes.int64),
output_shapes=([None], [3]))
.repeat(num_inner_repeats).prefetch(5))
iterator = (
dataset_ops.Dataset.range(num_outer_repeats)
.interleave(interleave_fn, cycle_length=10,
block_length=len(input_list))
.make_initializable_iterator())
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
sess.run(init_op)
for _ in range(num_inner_repeats * num_outer_repeats):
for elem in input_list:
val0, val1 = sess.run(get_next)
self.assertAllEqual(elem[0], val0)
self.assertAllEqual(elem[1], val1)
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testFromGeneratorsRunningInParallel(self):
num_parallel_iterators = 3
# Define shared state that multiple iterator instances will access to
# demonstrate their concurrent activity.
lock = threading.Lock()
condition = threading.Condition(lock)
next_ticket = [0] # GUARDED_BY(lock)
def generator():
# NOTE(mrry): We yield one element before the barrier, because
# the current implementation of `Dataset.interleave()` must
# fetch one element from each incoming dataset to start the
# prefetching.
yield 0
# Define a barrier that `num_parallel_iterators` iterators must enter
# before any can proceed. Demonstrates that multiple iterators may be
# active at the same time.
condition.acquire()
ticket = next_ticket[0]
next_ticket[0] += 1
if ticket == num_parallel_iterators - 1:
# The last iterator to join the barrier notifies the others.
condition.notify_all()
else:
# Wait until the last iterator enters the barrier.
while next_ticket[0] < num_parallel_iterators:
condition.wait()
condition.release()
yield 1
# As in `testFromMultipleConcurrentGenerators()`, we use a combination of
# `Dataset.interleave()` and `Dataset.prefetch()` to cause multiple
# iterators to be active concurrently.
def interleave_fn(_):
return dataset_ops.Dataset.from_generator(
generator, output_types=dtypes.int64, output_shapes=[]).prefetch(2)
iterator = (
dataset_ops.Dataset.range(num_parallel_iterators)
.interleave(
interleave_fn, cycle_length=num_parallel_iterators, block_length=1)
.make_initializable_iterator())
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
sess.run(init_op)
for elem in [0, 1]:
for _ in range(num_parallel_iterators):
self.assertAllEqual(elem, sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testFromGeneratorImplicitConversion(self):
def generator():
yield [1]
yield [2]
yield [3]
for dtype in [dtypes.int8, dtypes.int32, dtypes.int64]:
iterator = (dataset_ops.Dataset.from_generator(
generator, output_types=dtype, output_shapes=[1])
.make_initializable_iterator())
init_op = iterator.initializer
get_next = iterator.get_next()
self.assertEqual(dtype, get_next.dtype)
with self.test_session() as sess:
sess.run(init_op)
for expected in [[1], [2], [3]]:
next_val = sess.run(get_next)
self.assertEqual(dtype.as_numpy_dtype, next_val.dtype)
self.assertAllEqual(expected, next_val)
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testFromGeneratorTypeError(self):
def generator():
yield np.array([1, 2, 3], dtype=np.int64)
yield np.array([4, 5, 6], dtype=np.int64)
yield "ERROR"
yield np.array([7, 8, 9], dtype=np.int64)
iterator = (dataset_ops.Dataset.from_generator(
generator, output_types=dtypes.int64, output_shapes=[3])
.make_initializable_iterator())
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
sess.run(init_op)
self.assertAllEqual([1, 2, 3], sess.run(get_next))
self.assertAllEqual([4, 5, 6], sess.run(get_next))
with self.assertRaisesOpError(r"invalid literal for long\(\)"):
sess.run(get_next)
self.assertAllEqual([7, 8, 9], sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testFromGeneratorShapeError(self):
def generator():
yield np.array([1, 2, 3], dtype=np.int64)
yield np.array([4, 5, 6], dtype=np.int64)
yield np.array([7, 8, 9, 10], dtype=np.int64)
yield np.array([11, 12, 13], dtype=np.int64)
iterator = (dataset_ops.Dataset.from_generator(
generator, output_types=dtypes.int64, output_shapes=[3])
.make_initializable_iterator())
init_op = iterator.initializer
get_next = iterator.get_next()
with self.test_session() as sess:
sess.run(init_op)
self.assertAllEqual([1, 2, 3], sess.run(get_next))
self.assertAllEqual([4, 5, 6], sess.run(get_next))
with self.assertRaisesOpError(r"element of shape \(3,\) was expected"):
sess.run(get_next)
self.assertAllEqual([11, 12, 13], sess.run(get_next))
with self.assertRaises(errors.OutOfRangeError):
sess.run(get_next)
def testSplitPipelineFailsWithPlacementError(self):
with session.Session(
target="",
config=config_pb2.ConfigProto(device_count={"CPU": 2})) as sess:
dataset = dataset_ops.Dataset.from_tensors(0)
# Define a pipeline that attempts to use variables on two
# different devices.
#
# Initialize the variables before creating to iterator, to avoid the
# placement algorithm overriding the DT_RESOURCE colocation constraints.
with ops.device("/cpu:0"):
var_0 = resource_variable_ops.ResourceVariable(initial_value=0)
dataset = dataset.map(lambda x: x + var_0.read_value())
sess.run(var_0.initializer)
with ops.device("/cpu:1"):
var_1 = resource_variable_ops.ResourceVariable(initial_value=0)
dataset = dataset.map(lambda x: x + var_1.read_value())
sess.run(var_1.initializer)
iterator = dataset.make_initializable_iterator()
with self.assertRaisesRegexp(
errors.InvalidArgumentError,
"Trying to access resource located in device"):
sess.run(iterator.initializer)
def testRestructureDataset(self):
components = (array_ops.placeholder(dtypes.int32),
(array_ops.placeholder(dtypes.int32, shape=[None]),
array_ops.placeholder(dtypes.int32, shape=[20, 30])))
dataset = dataset_ops.Dataset.from_tensors(components)
i32 = dtypes.int32
test_cases = [((i32, i32, i32), None),
(((i32, i32), i32), None),
((i32, i32, i32), (None, None, None)),
((i32, i32, i32), ([17], [17], [20, 30]))]
for new_types, new_shape_lists in test_cases:
# pylint: disable=protected-access
new = batching._RestructuredDataset(dataset, new_types, new_shape_lists)
# pylint: enable=protected-access
self.assertEqual(new_types, new.output_types)
if new_shape_lists is not None:
for expected_shape_list, shape in zip(
nest.flatten(new_shape_lists), nest.flatten(new.output_shapes)):
if expected_shape_list is None:
self.assertIs(None, shape.ndims)
else:
self.assertEqual(expected_shape_list, shape.as_list())
fail_cases = [((i32, dtypes.int64, i32), None),
((i32, i32, i32, i32), None),
((i32, i32, i32), ((None, None), None)),
((i32, i32, i32), (None, None, None, None)),
((i32, i32, i32), (None, [None], [21, 30]))]
for new_types, new_shape_lists in fail_cases:
with self.assertRaises(ValueError):
# pylint: disable=protected-access
new = batching._RestructuredDataset(dataset, new_types, new_shape_lists)
# pylint: enable=protected-access
class DatasetConstructorSerializationTest(
dataset_serialization_test_base.DatasetSerializationTestBase):
def _build_tensor_dataset(self, variable_array):
components = (variable_array, np.array([1, 2, 3]), np.array(37.0))
return dataset_ops.Dataset.from_tensors(components)
def testFromTensorsCore(self):
# Equal length components
arr = np.array(1)
num_outputs = 1
diff_arr = np.array(2)
self.run_core_tests(lambda: self._build_tensor_dataset(arr),
lambda: self._build_tensor_dataset(diff_arr),
num_outputs)
def _build_tensor_slices_dataset(self, components):
return dataset_ops.Dataset.from_tensor_slices(components)
def testFromTensorSlicesCore(self):
# Equal length components
components = (np.tile(np.array([[1], [2], [3], [4]]), 20),
np.tile(np.array([[12], [13], [14], [15]]), 22),
np.array([37.0, 38.0, 39.0, 40.0]))
diff_comp = (np.tile(np.array([[1], [2], [3], [4]]), 20),
np.tile(np.array([[5], [6], [7], [8]]), 22),
np.array([1.0, 2.0, 3.0, 4.0]))
dict_components = {"foo": [1, 2, 3], "bar": [[4.0], [5.0], [6.0]]}
self.run_core_tests(lambda: self._build_tensor_slices_dataset(components),
lambda: self._build_tensor_slices_dataset(diff_comp), 4)
self.run_core_tests(
lambda: self._build_tensor_slices_dataset(dict_components), None, 3)
def _build_sparse_tensor_slice_dataset(self, slices):
indices = np.array(
[[i, j] for i in range(len(slices)) for j in range(len(slices[i]))],
dtype=np.int64)
values = np.array([val for s in slices for val in s], dtype=np.float64)
dense_shape = np.array(
[len(slices), max(len(s) for s in slices) + 1], dtype=np.int64)
sparse_components = sparse_tensor.SparseTensor(indices, values, dense_shape)
return dataset_ops.Dataset.from_sparse_tensor_slices(sparse_components)
def testFromSparseTensorSlicesCore(self):
slices = [[1., 2., 3.], [1.], [1.], [1., 2.], [], [1., 2.], [], [], []]
diff_slices = [[1., 2.], [2.], [2., 3., 4.], [], [], []]
self.run_core_tests(
lambda: self._build_sparse_tensor_slice_dataset(slices),
lambda: self._build_sparse_tensor_slice_dataset(diff_slices),
9,
sparse_tensors=True)
if __name__ == "__main__":
test.main()
| apache-2.0 |
Cue/scales | src/greplin/scales/bottlehandler.py | 3 | 1595 |
from six import StringIO
from greplin import scales
from greplin.scales import formats, util
from bottle import abort, request, response, run, Bottle
import functools
def bottlestats(server_name, path=''):
"""Renders a GET request, by showing this nodes stats and children."""
path = path.lstrip('/')
parts = path.split('/')
if not parts[0]:
parts = parts[1:]
stat_dict = util.lookup(scales.getStats(), parts)
if stat_dict is None:
abort(404, "Not Found")
return
output = StringIO()
output_format = request.query.get('format', 'html')
query = request.query.get('query', None)
if output_format == 'json':
response.content_type = "application/json"
formats.jsonFormat(output, stat_dict, query)
elif output_format == 'prettyjson':
formats.jsonFormat(output, stat_dict, query, pretty=True)
response.content_type = "application/json"
else:
formats.htmlHeader(output, '/' + path, server_name, query)
formats.htmlFormat(output, tuple(parts), stat_dict, query)
response.content_type = "text/html"
return output.getvalue()
def register_stats_handler(app, server_name, prefix='/status/'):
"""Register the stats handler with a Flask app, serving routes
with a given prefix. The prefix defaults to '/_stats/', which is
generally what you want."""
if not prefix.endswith('/'):
prefix += '/'
handler = functools.partial(bottlestats, server_name)
app.get(prefix, callback=handler)
app.get(prefix + '<path:path>', callback=handler)
| apache-2.0 |
lesteve/sphinx-gallery | examples/plot_function_identifier.py | 1 | 1731 | # -*- coding: utf-8 -*-
"""
Identifying function names in a script
======================================
This demonstrates how Sphinx-Gallery identifies function names to figure out
which functions are called in the script and to which module do they belong.
"""
# Code source: Óscar Nájera
# License: BSD 3 clause
import os # noqa, analysis:ignore
import matplotlib.pyplot as plt
from sphinx_gallery.backreferences import identify_names
filename = os.__file__.replace('.pyc', '.py')
names = identify_names(filename)
figheight = len(names) + .5
fontsize = 20
###############################################################################
# Sphinx-Gallery examines both the executed code itself, as well as the
# documentation blocks (such as this one, or the top-level one),
# to find backreferences. This means that by writing :obj:`numpy.sin`
# and :obj:`numpy.exp` here, a backreference will be created even though
# they are not explicitly used in the code. This is useful in particular when
# functions return classes -- if you add them to the documented blocks of
# examples that use them, they will be shown in the backreferences.
fig = plt.figure(figsize=(7.5, 8))
for i, (name, obj) in enumerate(names.items()):
fig.text(0.55, (float(len(names)) - 0.5 - i) / figheight,
name,
ha="right",
size=fontsize,
transform=fig.transFigure,
bbox=dict(boxstyle='square', fc="w", ec="k"))
fig.text(0.6, (float(len(names)) - 0.5 - i) / figheight,
obj["module"],
ha="left",
size=fontsize,
transform=fig.transFigure,
bbox=dict(boxstyle='larrow', fc="w", ec="k"))
#
plt.draw()
plt.show()
| bsd-3-clause |
mscuthbert/abjad | abjad/tools/pitchtools/NamedPitch.py | 2 | 53176 | # -*- encoding: utf-8 -*-
import collections
import math
import numbers
from abjad.tools import mathtools
from abjad.tools import stringtools
from abjad.tools.pitchtools.Pitch import Pitch
class NamedPitch(Pitch):
'''A named pitch.
.. container:: example
**Example 1.** Initializes from pitch name:
::
>>> pitch = NamedPitch("cs''")
>>> show(pitch) # doctest: +SKIP
.. container:: example
**Example 2.** Initializes from pitch-class / octave string:
::
>>> pitch = NamedPitch('C#5')
>>> show(pitch) # doctest: +SKIP
'''
### CLASS VARIABLES ###
__slots__ = (
'_alteration_in_semitones',
'_diatonic_pitch_class_number',
'_octave_number',
)
### INITIALIZER ###
def __init__(self, *args):
from abjad.tools import pitchtools
if (args and
isinstance(args[0], collections.Iterable) and
not stringtools.is_string(args[0]) and
len(args) == 1):
args = args[0]
if len(args) == 1:
if isinstance(args[0], (int, float)):
arg = mathtools.integer_equivalent_number_to_integer(
float(args[0]))
self._initialize_by_pitch_number(arg)
elif isinstance(args[0], type(self)):
self._initialize_by_named_pitch(*args)
elif isinstance(args[0], pitchtools.NumberedPitch):
self._initialize_by_pitch_number(
args[0].pitch_number)
elif isinstance(args[0], pitchtools.PitchClass):
self._initialize_by_named_pitch_class_and_octave_number(
pitchtools.NamedPitchClass(args[0]), 4)
elif hasattr(args[0], 'named_pitch'):
self._initialize_by_named_pitch(args[0].named_pitch)
elif self.is_pitch_class_octave_number_string(args[0]):
self._initialize_by_pitch_class_octave_number_string(*args)
elif isinstance(args[0], str):
self._initialize_by_pitch_name(*args)
else:
message = 'can not initialize {} from {!r}.'
message = message.format(type(self).__name__, args)
raise ValueError(message)
elif len(args) == 2:
if isinstance(args[0], str):
self._initialize_by_pitch_class_name_and_octave_number(*args)
elif isinstance(args[0], pitchtools.NamedPitchClass):
self._initialize_by_named_pitch_class_and_octave_number(*args)
elif isinstance(args[0], (int, float)):
if isinstance(args[1], str):
self._initialize_by_pitch_number_and_diatonic_pitch_class_name(
*args)
elif isinstance(args[1], pitchtools.NamedPitchClass):
self._initialize_by_pitch_number_and_named_pitch_class(*args)
else:
raise TypeError
else:
message = 'can not initialize {}: {!r}.'
message = message.format(type(self).__name__, args)
raise ValueError(message)
elif len(args) == 0:
self._initialize_by_pitch_class_name_and_octave_number('c', 4)
else:
message = 'can not initialize {}: {!r}.'
message = message.format(type(self).__name__, args)
raise ValueError(message)
### SPECIAL METHODS ###
def __add__(self, interval):
r'''Adds named pitch to `interval`.
.. container:: example
**Example 1.** Adds an ascending major second to C#5:
::
>>> NamedPitch("cs''") + pitchtools.NamedInterval('+M2')
NamedPitch("ds''")
.. container:: example
**Example 2.** Adds a descending major second to C#5:
::
>>> NamedPitch("cs''") + pitchtools.NamedInterval('-M2')
NamedPitch("b'")
Returns new named pitch.
'''
from abjad.tools import pitchtools
interval = pitchtools.NamedInterval(interval)
return pitchtools.transpose_pitch_carrier_by_interval(self, interval)
def __copy__(self, *args):
r'''Copies named pitch.
.. container:: example
**Example 1.** Copies C#5:
::
>>> import copy
>>> copy.copy(NamedPitch("cs''"))
NamedPitch("cs''")
.. container:: example
**Example 2.** Copies Db5:
::
>>> copy.copy(NamedPitch("df''"))
NamedPitch("df''")
Returns new named pitch.
'''
return type(self)(self)
def __eq__(self, arg):
r'''Is true when `arg` is a named pitch equal to this named pitch.
Otherwise false.
.. container:: example
**Example 1.** C#5 equals C#5:
::
>>> NamedPitch('C#5') == NamedPitch("cs''")
True
.. container:: example
**Example 2.** C#5 does not equal Db5:
::
>>> NamedPitch('C#5') == NamedPitch('Db5')
False
Returns boolean.
'''
try:
arg = type(self)(arg)
if str(self) == str(arg):
return True
return False
except (TypeError, ValueError):
return False
def __float__(self):
r'''Changes named pitch to float.
.. container:: example
**Example 1.** Changes C#5 to float:
::
>>> float(NamedPitch('C#5'))
13.0
.. container:: example
**Example 2.** Changes Ctqs5 to float:
::
>>> float(NamedPitch('C#+5'))
13.5
Returns float.
'''
return float(self.pitch_number)
def __ge__(self, arg):
r'''Is true when named pitch is greater than or equal to `arg`.
Otherwise false.
Returns boolean.
'''
from abjad.tools import pitchtools
if isinstance(arg, type(self)):
return self.diatonic_pitch_number > arg.diatonic_pitch_number or \
(self.diatonic_pitch_number == arg.diatonic_pitch_number and
self.alteration_in_semitones >= arg.alteration_in_semitones)
elif isinstance(arg, pitchtools.PitchRange):
return self >= arg.stop_pitch
else:
try:
arg = type(self)(arg)
return self.__ge__(arg)
except (TypeError, ValueError):
pass
return False
def __getnewargs__(self):
r'''Gets new arguments.
Returns tuple.
'''
return (self.pitch_name,)
def __gt__(self, arg):
r'''Is true when named pitch is greater than `arg`. Otherwise false.
Returns boolean.
'''
from abjad.tools import pitchtools
if isinstance(arg, type(self)):
return (self.diatonic_pitch_number > arg.diatonic_pitch_number or
(self.diatonic_pitch_number == arg.diatonic_pitch_number and
self.alteration_in_semitones > arg.alteration_in_semitones))
elif isinstance(arg, pitchtools.PitchRange):
return self > arg.stop_pitch
else:
try:
arg = type(self)(arg)
return self.__gt__(arg)
except (TypeError, ValueError):
pass
return False
def __hash__(self):
r'''Required to be explicitly re-defined on Python 3 if
__eq__ changes.
Returns integer.
'''
return super(NamedPitch, self).__hash__()
def __int__(self):
r'''Changes named pitch to integer.
.. container:: example
**Example 1.** Changes C#5 to integer:
::
>>> int(NamedPitch('C#5'))
13
.. container:: example
**Example 2.** Changes Db5 to integer:
::
>>> int(NamedPitch('Db5'))
13
Returns integer.
'''
if not mathtools.is_integer_equivalent_number(self.pitch_number):
raise TypeError
return int(self.pitch_number)
def __le__(self, arg):
r'''Is true when named pitch is less than or equal to `arg`. Otherwise
false.
Returns boolean.
'''
from abjad.tools import pitchtools
if isinstance(arg, type(self)):
if not self.diatonic_pitch_number == arg.diatonic_pitch_number:
return self.diatonic_pitch_number <= arg.diatonic_pitch_number
if not self.alteration_in_semitones == arg.alteration_in_semitones:
return self.alteration_in_semitones <= \
arg.alteration_in_semitones
return True
elif isinstance(arg, pitchtools.PitchRange):
return self <= arg.start_pitch
else:
try:
arg = type(self)(arg)
return self.__le__(arg)
except (TypeError, ValueError):
pass
return False
def __lt__(self, arg):
r'''Is true when named pitch is less than `arg`. Otherwise false.
Returns boolean.
'''
from abjad.tools import pitchtools
if isinstance(arg, type(self)):
return (self.diatonic_pitch_number < arg.diatonic_pitch_number or
(self.diatonic_pitch_number == arg.diatonic_pitch_number and
self.alteration_in_semitones < arg.alteration_in_semitones))
elif isinstance(arg, pitchtools.PitchRange):
return self < arg.start_pitch
elif arg is None:
return True
else:
try:
arg = type(self)(arg)
return self.__lt__(arg)
except (TypeError, ValueError):
pass
return False
def __ne__(self, arg):
r'''Is true when named pitch does not equal `arg`. Otherwise false.
.. container:: example
**Example 1.** C#5 is not equal to D#5:
::
>>> NamedPitch("cs''") != NamedPitch("ds''")
True
.. container:: example
**Example 2.** C#5 is equal to C#5:
::
>>> NamedPitch("cs''") != NamedPitch("cs''")
False
Returns boolean.
'''
return not self == arg
def __str__(self):
r'''Gets string representation of named pitch.
.. container:: example
**Example 1.** Gets string representation of C#5:
::
>>> str(NamedPitch("cs''"))
"cs''"
.. container:: example
**Example 2.** Gets string representation of Db5:
::
>>> str(NamedPitch("df''"))
"df''"
Returns string.
'''
return self.pitch_name
def __sub__(self, arg):
r'''Subtracts `arg` from named pitch.
.. container:: example
**Example 1.** Subtracts B4 from C#5:
::
>>> NamedPitch("cs''") - NamedPitch("b'")
NamedInterval('-M2')
.. container:: example
**Example 2.** Subtracts F#5 from C#5:
::
>>> NamedPitch("cs''") - NamedPitch("fs''")
NamedInterval('+P4')
Returns named interval.
'''
from abjad.tools import pitchtools
if isinstance(arg, type(self)):
return pitchtools.NamedInterval.from_pitch_carriers(
self, arg)
else:
interval = arg
return pitchtools.transpose_pitch_carrier_by_interval(
self, -interval)
### PRIVATE PROPERTIES ###
@property
def _lilypond_format(self):
return str(self)
@property
def _storage_format_specification(self):
from abjad.tools import systemtools
return systemtools.StorageFormatSpecification(
self,
is_indented=False,
positional_argument_values=(
self.pitch_name,
)
)
### PRIVATE METHODS ###
def _initialize_by_named_pitch(self, named_pitch):
self._alteration_in_semitones = named_pitch._alteration_in_semitones
self._diatonic_pitch_class_number = \
named_pitch.diatonic_pitch_class_number
self._octave_number = named_pitch.octave_number
def _initialize_by_named_pitch_class_and_octave_number(
self, named_pitch_class, octave_number):
self._alteration_in_semitones = \
named_pitch_class._alteration_in_semitones
self._diatonic_pitch_class_number = \
named_pitch_class._diatonic_pitch_class_number
self._octave_number = int(octave_number)
def _initialize_by_pitch_class_name_and_octave_number(
self, pitch_class_name, octave_number):
from abjad.tools import pitchtools
named_pitch_class = pitchtools.NamedPitchClass(pitch_class_name)
self._initialize_by_named_pitch_class_and_octave_number(
named_pitch_class, octave_number)
def _initialize_by_pitch_class_name_octave_number_pair(self, pair):
pitch_class_name, octave_number = pair
self._initialize_by_pitch_class_name_and_octave_number(
pitch_class_name, octave_number)
def _initialize_by_pitch_class_octave_number_string(
self, pitch_class_octave_number_string):
from abjad.tools import pitchtools
groups = self._pitch_class_octave_number_regex.match(
pitch_class_octave_number_string).groups()
named_pitch_class = pitchtools.NamedPitchClass(
pitch_class_octave_number_string)
octave_number = int(groups[2])
self._initialize_by_named_pitch_class_and_octave_number(
named_pitch_class, octave_number)
def _initialize_by_pitch_name(self, pitch_string):
from abjad.tools import pitchtools
named_pitch_class = pitchtools.NamedPitchClass(pitch_string)
octave_number = pitchtools.Octave.from_pitch_name(
pitch_string).octave_number
self._initialize_by_named_pitch_class_and_octave_number(
named_pitch_class, octave_number)
def _initialize_by_pitch_number(self, pitch_number):
from abjad.tools import pitchtools
named_pitch_class = pitchtools.NamedPitchClass(pitch_number)
octave_number = pitch_number // 12 + 4
self._initialize_by_named_pitch_class_and_octave_number(
named_pitch_class, octave_number)
def _initialize_by_pitch_number_and_diatonic_pitch_class_name(
self, pitch_number, diatonic_pitch_class_name):
from abjad.tools import pitchtools
accidental, octave_number = self._spell_pitch_number(
pitch_number,
diatonic_pitch_class_name,
)
pitch_class_name = diatonic_pitch_class_name + \
accidental.abbreviation
named_pitch_class = pitchtools.NamedPitchClass(pitch_class_name)
self._initialize_by_named_pitch_class_and_octave_number(
named_pitch_class, octave_number)
def _initialize_by_pitch_number_and_named_pitch_class(
self, pitch_number, named_pitch_class):
diatonic_pitch_class_name = named_pitch_class.diatonic_pitch_class_name
self._initialize_by_pitch_number_and_diatonic_pitch_class_name(
pitch_number, diatonic_pitch_class_name)
@staticmethod
def _spell_pitch_number(pitch_number, diatonic_pitch_class_name):
from abjad.tools import pitchtools
# check input
if not isinstance(pitch_number, (int, float)):
raise TypeError
if not isinstance(diatonic_pitch_class_name, str):
raise TypeError
if not diatonic_pitch_class_name in ['c', 'd', 'e', 'f', 'g', 'a', 'b']:
raise ValueError
# find accidental semitones
pc = pitchtools.PitchClass._diatonic_pitch_class_name_to_pitch_class_number[
diatonic_pitch_class_name]
nearest_neighbor = pitchtools.transpose_pitch_class_number_to_pitch_number_neighbor(
pitch_number, pc)
semitones = pitch_number - nearest_neighbor
# find accidental alphabetic string
abbreviation = pitchtools.Accidental._semitones_to_abbreviation[
semitones]
accidental = pitchtools.Accidental(abbreviation)
# find octave
octave_number = int(math.floor((pitch_number - semitones) / 12)) + 4
# return accidental and octave
return accidental, octave_number
### PUBLIC METHODS ###
def apply_accidental(self, accidental=None):
'''Applies `accidental` to named pitch.
.. container:: example
**Example 1.** Applies sharp to C#5:
::
>>> NamedPitch("cs''").apply_accidental('s')
NamedPitch("css''")
.. container:: example
**Example 2.** Applies sharp to Db5:
::
>>> NamedPitch("df''").apply_accidental('s')
NamedPitch("d''")
Returns new named pitch.
'''
from abjad.tools import pitchtools
accidental = pitchtools.Accidental(accidental)
new_accidental = self.accidental + accidental
new_name = self.diatonic_pitch_class_name
new_name += new_accidental.abbreviation
return type(self)(new_name, self.octave_number)
def invert(self, axis=None):
r'''Inverts named pitch around `axis`.
.. container:: example
**Example 1.** Inverts pitch around middle C explicitly:
::
>>> NamedPitch("d'").invert("c'")
NamedPitch('bf')
::
>>> NamedPitch('bf').invert("c'")
NamedPitch("d'")
Default behavior.
.. container:: example
**Example 2.** Inverts pitch around middle C implicitly:
::
>>> NamedPitch("d'").invert()
NamedPitch('bf')
::
>>> NamedPitch('bf').invert()
NamedPitch("d'")
Default behavior.
.. container:: example
**Example 3.** Inverts pitch around A3:
::
>>> NamedPitch("d'").invert('a')
NamedPitch('e')
Interprets none-valued `axis` equal to middle C.
Returns new named pitch.
'''
return Pitch.invert(self, axis=axis)
def multiply(self, n=1):
r'''Multiplies pitch-class of named pitch by `n` while maintaining
octave of named pitch.
.. container:: example
**Example 1.** Multiplies D2 by 3:
::
>>> NamedPitch('d,').multiply(3)
NamedPitch('fs,')
.. container:: example
**Example 2.** Multiplies D2 by 4:
::
>>> NamedPitch('d,').multiply(4)
NamedPitch('af,')
Returns new named pitch.
'''
pitch_class_number = (self.pitch_class_number * n) % 12
octave_floor = (self.octave_number - 4) * 12
return type(self)(pitch_class_number + octave_floor)
def respell_with_flats(self):
r'''Respells named pitch with flats.
.. container:: example
**Example 1.** Respells C#5 with flats:
::
>>> NamedPitch("cs''").respell_with_flats()
NamedPitch("df''")
.. container:: example
**Example 2.** Respells Db5 with flats:
::
>>> NamedPitch("df''").respell_with_flats()
NamedPitch("df''")
Returns new named pitch.
'''
from abjad.tools import pitchtools
class_ = pitchtools.PitchClass
octave = pitchtools.Octave.from_pitch_number(
self.numbered_pitch.pitch_number)
name = class_._pitch_class_number_to_pitch_class_name_with_flats[
self.pitch_class_number]
pitch = type(self)(name, octave.octave_number)
return pitch
def respell_with_sharps(self):
r'''Respells named pitch with sharps.
.. container:: example
**Example 1.** Respells Db5 with sharps:
::
>>> NamedPitch("df''").respell_with_sharps()
NamedPitch("cs''")
.. container:: example
**Example 2.** Respells C#5 with sharps:
::
>>> NamedPitch("cs''").respell_with_sharps()
NamedPitch("cs''")
Returns new named pitch.
'''
from abjad.tools import pitchtools
class_ = pitchtools.PitchClass
octave = pitchtools.Octave.from_pitch_number(
self.numbered_pitch.pitch_number)
name = class_._pitch_class_number_to_pitch_class_name_with_sharps[
self.pitch_class_number]
pitch = type(self)(name, octave.octave_number)
return pitch
def to_staff_position(self, clef=None):
r'''Changes named pitch to staff position with optional `clef`.
.. container:: example
**Example 1.** Changes C#5 to absolute staff position:
::
>>> NamedPitch('C#5').to_staff_position()
StaffPosition(number=7)
.. container:: example
**Example 2.** Changes C#5 to treble staff position:
::
>>> NamedPitch('C#5').to_staff_position(clef=Clef('treble'))
StaffPosition(number=1)
.. container:: example
**Example 3.** Changes C#5 to bass staff position:
::
>>> NamedPitch('C#5').to_staff_position(clef=Clef('bass'))
StaffPosition(number=13)
.. container:: example
**Example 4.** Marks up absolute staff position of many pitches:
::
>>> staff = Staff("g16 a b c' d' e' f' g' a' b' c'' d'' e'' f'' g'' a''")
>>> for note in staff:
... staff_position = note.written_pitch.to_staff_position()
... markup = Markup(staff_position.number)
... attach(markup, note)
...
>>> override(staff).text_script.staff_padding = 5
>>> show(staff) # doctest: +SKIP
.. doctest::
>>> f(staff)
\new Staff \with {
\override TextScript #'staff-padding = #5
} {
g16 - \markup { -3 }
a16 - \markup { -2 }
b16 - \markup { -1 }
c'16 - \markup { 0 }
d'16 - \markup { 1 }
e'16 - \markup { 2 }
f'16 - \markup { 3 }
g'16 - \markup { 4 }
a'16 - \markup { 5 }
b'16 - \markup { 6 }
c''16 - \markup { 7 }
d''16 - \markup { 8 }
e''16 - \markup { 9 }
f''16 - \markup { 10 }
g''16 - \markup { 11 }
a''16 - \markup { 12 }
}
.. container:: example
**Example 5.** Marks up treble staff position of many pitches:
::
>>> staff = Staff("g16 a b c' d' e' f' g' a' b' c'' d'' e'' f'' g'' a''")
>>> clef = Clef('treble')
>>> for note in staff:
... staff_position = note.written_pitch.to_staff_position(
... clef=clef
... )
... markup = Markup(staff_position.number)
... attach(markup, note)
...
>>> override(staff).text_script.staff_padding = 5
>>> show(staff) # doctest: +SKIP
.. doctest::
>>> f(staff)
\new Staff \with {
\override TextScript #'staff-padding = #5
} {
g16 - \markup { -9 }
a16 - \markup { -8 }
b16 - \markup { -7 }
c'16 - \markup { -6 }
d'16 - \markup { -5 }
e'16 - \markup { -4 }
f'16 - \markup { -3 }
g'16 - \markup { -2 }
a'16 - \markup { -1 }
b'16 - \markup { 0 }
c''16 - \markup { 1 }
d''16 - \markup { 2 }
e''16 - \markup { 3 }
f''16 - \markup { 4 }
g''16 - \markup { 5 }
a''16 - \markup { 6 }
}
.. container:: example
**Example 6.** Marks up bass staff position of many pitches:
::
>>> staff = Staff("g,16 a, b, c d e f g a b c' d' e' f' g' a'")
>>> clef = Clef('bass')
>>> attach(clef, staff)
>>> for note in staff:
... staff_position = note.written_pitch.to_staff_position(
... clef=clef
... )
... markup = Markup(staff_position.number)
... attach(markup, note)
...
>>> override(staff).text_script.staff_padding = 5
>>> show(staff) # doctest: +SKIP
.. doctest::
>>> f(staff)
\new Staff \with {
\override TextScript #'staff-padding = #5
} {
\clef "bass"
g,16 - \markup { -4 }
a,16 - \markup { -3 }
b,16 - \markup { -2 }
c16 - \markup { -1 }
d16 - \markup { 0 }
e16 - \markup { 1 }
f16 - \markup { 2 }
g16 - \markup { 3 }
a16 - \markup { 4 }
b16 - \markup { 5 }
c'16 - \markup { 6 }
d'16 - \markup { 7 }
e'16 - \markup { 8 }
f'16 - \markup { 9 }
g'16 - \markup { 10 }
a'16 - \markup { 11 }
}
Returns staff position.
'''
from abjad.tools import pitchtools
staff_position_number = self.diatonic_pitch_number
if clef is not None:
staff_position_number += clef.middle_c_position.number
staff_position = pitchtools.StaffPosition(staff_position_number)
return staff_position
def transpose(self, expr):
r'''Transposes named pitch by `expr`.
.. container:: example
**Example 1.** Transposes C4 up a minor second:
::
>>> NamedPitch("c'").transpose('m2')
NamedPitch("df'")
.. container:: example
**Example 2.** Transposes C4 down a major second:
::
>>> NamedPitch("c'").transpose('-M2')
NamedPitch('bf')
Returns new named pitch.
'''
from abjad.tools import pitchtools
named_interval = pitchtools.NamedInterval(expr)
transposed_pitch = pitchtools.transpose_pitch_carrier_by_interval(
self, named_interval)
return type(self)(transposed_pitch)
### PUBLIC PROPERTIES ###
@property
def accidental(self):
r'''Gets accidental of named pitch.
.. container:: example
**Example 1.** Gets accidental of C#5:
::
>>> NamedPitch("cs''").accidental
Accidental('s')
.. container:: example
**Example 2.** Gets accidental of C5:
::
>>> NamedPitch("c''").accidental
Accidental('')
Returns accidental.
'''
from abjad.tools import pitchtools
return pitchtools.Accidental(self._alteration_in_semitones)
@property
def alteration_in_semitones(self):
r'''Gets alteration of named pitch in semitones.
.. container:: example
**Example 1.** Gets alteration of C#5 in semitones:
::
>>> NamedPitch("cs''").alteration_in_semitones
1
.. container:: example
**Example 2.** Gets alteration of Ctqs5 in semitones:
::
>>> NamedPitch("ctqs''").alteration_in_semitones
1.5
Returns integer or float.
'''
return self._alteration_in_semitones
@property
def diatonic_pitch_class_name(self):
r'''Gets diatonic pitch-class name of named pitch.
.. container:: example
**Example 1.** Gets diatonic pitch-class name of C#5:
::
>>> NamedPitch("cs''").diatonic_pitch_class_name
'c'
.. container:: example
**Example 2.** Gets diatonic pitch-class names of many pitches:
::
>>> staff = Staff("g16 a b c' d' e' f' g' a' b' c'' d'' e'' f'' g'' a''")
>>> for note in staff:
... name = note.written_pitch.diatonic_pitch_class_name
... markup = Markup(name)
... attach(markup, note)
...
>>> override(staff).text_script.staff_padding = 5
>>> show(staff) # doctest: +SKIP
.. doctest::
>>> f(staff)
\new Staff \with {
\override TextScript #'staff-padding = #5
} {
g16 - \markup { g }
a16 - \markup { a }
b16 - \markup { b }
c'16 - \markup { c }
d'16 - \markup { d }
e'16 - \markup { e }
f'16 - \markup { f }
g'16 - \markup { g }
a'16 - \markup { a }
b'16 - \markup { b }
c''16 - \markup { c }
d''16 - \markup { d }
e''16 - \markup { e }
f''16 - \markup { f }
g''16 - \markup { g }
a''16 - \markup { a }
}
Returns string.
'''
from abjad.tools import pitchtools
class_ = pitchtools.PitchClass
return class_._diatonic_pitch_class_number_to_diatonic_pitch_class_name[
self._diatonic_pitch_class_number]
@property
def diatonic_pitch_class_number(self):
r'''Gets diatonic pitch-class number of named pitch.
.. container:: example
**Example 1.** Gets diatonic pitch-class number of C#5:
::
>>> NamedPitch("cs''").diatonic_pitch_class_number
0
.. container:: example
**Example 2.** Gets diatonic pitch-class numbers of many pitches:
::
>>> staff = Staff("g16 a b c' d' e' f' g' a' b' c'' d'' e'' f'' g'' a''")
>>> for note in staff:
... number = note.written_pitch.diatonic_pitch_class_number
... markup = Markup(number)
... attach(markup, note)
...
>>> override(staff).text_script.staff_padding = 5
>>> show(staff) # doctest: +SKIP
.. doctest::
>>> f(staff)
\new Staff \with {
\override TextScript #'staff-padding = #5
} {
g16 - \markup { 4 }
a16 - \markup { 5 }
b16 - \markup { 6 }
c'16 - \markup { 0 }
d'16 - \markup { 1 }
e'16 - \markup { 2 }
f'16 - \markup { 3 }
g'16 - \markup { 4 }
a'16 - \markup { 5 }
b'16 - \markup { 6 }
c''16 - \markup { 0 }
d''16 - \markup { 1 }
e''16 - \markup { 2 }
f''16 - \markup { 3 }
g''16 - \markup { 4 }
a''16 - \markup { 5 }
}
Returns integer.
'''
return self._diatonic_pitch_class_number
@property
def diatonic_pitch_name(self):
r'''Gets diatonic pitch name of named pitch.
.. container:: example
**Example 1.** Gets diatonic pitch name of C#5:
::
>>> NamedPitch("cs''").diatonic_pitch_name
"c''"
.. container:: example
**Example 2.** Gets diatonic pitch names of many pitches:
::
>>> staff = Staff("g16 a b c' d' e' f' g' a' b' c'' d'' e'' f'' g'' a''")
>>> for note in staff:
... name = note.written_pitch.diatonic_pitch_name
... markup = Markup(name)
... attach(markup, note)
...
>>> override(staff).text_script.staff_padding = 5
>>> show(staff) # doctest: +SKIP
.. doctest::
>>> f(staff)
\new Staff \with {
\override TextScript #'staff-padding = #5
} {
g16 - \markup { g }
a16 - \markup { a }
b16 - \markup { b }
c'16 - \markup { c' }
d'16 - \markup { d' }
e'16 - \markup { e' }
f'16 - \markup { f' }
g'16 - \markup { g' }
a'16 - \markup { a' }
b'16 - \markup { b' }
c''16 - \markup { c'' }
d''16 - \markup { d'' }
e''16 - \markup { e'' }
f''16 - \markup { f'' }
g''16 - \markup { g'' }
a''16 - \markup { a'' }
}
Returns string.
'''
return '{}{}'.format(
self.diatonic_pitch_class_name,
self.octave.octave_tick_string,
)
@property
def diatonic_pitch_number(self):
r'''Gets diatonic pitch number of named pitch.
.. container:: example
**Example 1.** Gets diatonic pitch number of C#5:
::
>>> NamedPitch("cs''").diatonic_pitch_number
7
.. container:: example
**Example 2.** Gets diatonic pitch numbers of many pitches:
::
>>> staff = Staff("g16 a b c' d' e' f' g' a' b' c'' d'' e'' f'' g'' a''")
>>> for note in staff:
... number = note.written_pitch.diatonic_pitch_number
... markup = Markup(number)
... attach(markup, note)
...
>>> override(staff).text_script.staff_padding = 5
>>> show(staff) # doctest: +SKIP
.. doctest::
>>> f(staff)
\new Staff \with {
\override TextScript #'staff-padding = #5
} {
g16 - \markup { -3 }
a16 - \markup { -2 }
b16 - \markup { -1 }
c'16 - \markup { 0 }
d'16 - \markup { 1 }
e'16 - \markup { 2 }
f'16 - \markup { 3 }
g'16 - \markup { 4 }
a'16 - \markup { 5 }
b'16 - \markup { 6 }
c''16 - \markup { 7 }
d''16 - \markup { 8 }
e''16 - \markup { 9 }
f''16 - \markup { 10 }
g''16 - \markup { 11 }
a''16 - \markup { 12 }
}
Returns integer.
'''
diatonic_pitch_number = 7 * (self.octave_number - 4)
diatonic_pitch_number += self.diatonic_pitch_class_number
return diatonic_pitch_number
@property
def named_pitch(self):
r'''Gets new named pitch.
.. container:: example
**Example 1.** Gets new named pitch from C#5:
::
>>> NamedPitch("cs''").named_pitch
NamedPitch("cs''")
.. container:: example
**Example 2.** Gets new named pitch from Db5:
::
>>> NamedPitch("df''").named_pitch
NamedPitch("df''")
Returns new named pitch.
'''
return type(self)(self)
@property
def named_pitch_class(self):
r'''Gets named pitch-class of named pitch.
.. container:: example
**Example 1.** Gets named pitch-class of C#5:
::
>>> NamedPitch("cs''").named_pitch_class
NamedPitchClass('cs')
.. container:: example
**Example 2.** Gets named pitch-class of Db5:
::
>>> NamedPitch("df''").named_pitch_class
NamedPitchClass('df')
Returns named pitch-class.
'''
from abjad.tools import pitchtools
return pitchtools.NamedPitchClass(self)
@property
def numbered_pitch(self):
r'''Gets numbered pitch corresponding to named pitch.
.. container:: example
**Example 1.** Gets numbered pitch corresponding to C#5:
::
>>> NamedPitch("cs''").numbered_pitch
NumberedPitch(13)
.. container:: example
**Example 2.** Gets numbered pitch corresponding to Db5:
::
>>> NamedPitch("df''").numbered_pitch
NumberedPitch(13)
Returns numbered pitch.
'''
from abjad.tools import pitchtools
return pitchtools.NumberedPitch(self)
@property
def numbered_pitch_class(self):
r'''Gets numbered pitch-class corresponding to named pitch.
.. container:: example
**Example 1.** Gets numbered pitch-class corresponding to C#5:
::
>>> NamedPitch("cs''").numbered_pitch_class
NumberedPitchClass(1)
.. container:: example
**Example 2.** Gets numbered pitch-class corresponding to Db5:
::
>>> NamedPitch("df''").numbered_pitch_class
NumberedPitchClass(1)
Returns numbered pitch-class.
'''
from abjad.tools import pitchtools
return pitchtools.NumberedPitchClass(self)
@property
def octave(self):
r'''Gets octave of named pitch.
.. container:: example
**Example 1.** Gets octave of C#5:
::
>>> NamedPitch("cs''").octave
Octave(5)
.. container:: example
**Example 2.** Gets octave of Db5:
::
>>> NamedPitch("df''").octave
Octave(5)
Returns octave.
'''
from abjad.tools import pitchtools
return pitchtools.Octave(self._octave_number)
@property
def octave_number(self):
r'''Gets octave number of named pitch.
.. container:: example
**Example 1.** Gets octave number of C#5:
::
>>> NamedPitch("cs''").octave_number
5
.. container:: example
**Example 2.** Gets octave number of Db5:
::
>>> NamedPitch("df''").octave_number
5
Returns integer.
'''
return int(self._octave_number)
@property
def pitch_class_name(self):
r'''Gets pitch-class name of named pitch.
.. container:: example
**Example 1.** Gets pitch-class name of C#5:
::
>>> NamedPitch("cs''").pitch_class_name
'cs'
.. container:: example
**Example 2.** Gets pitch-class name of Db5:
::
>>> NamedPitch("df''").pitch_class_name
'df'
Returns string.
'''
from abjad.tools import pitchtools
return '{}{}'.format(
self.diatonic_pitch_class_name,
pitchtools.Accidental._semitones_to_abbreviation[
self._alteration_in_semitones],
)
@property
def pitch_class_number(self):
r'''Gets pitch-class number of named pitch.
.. container:: example
**Example 1.** Gets pitch-class number of C#5:
::
>>> NamedPitch("cs''").pitch_class_number
1
.. container:: example
**Example 2.** Gets pitch-class number of Ctqs5:
::
>>> NamedPitch("ctqs''").pitch_class_number
1.5
Returns integer or float.
'''
from abjad.tools import pitchtools
class_ = pitchtools.PitchClass
return (class_._diatonic_pitch_class_number_to_pitch_class_number[
self._diatonic_pitch_class_number] + \
self._alteration_in_semitones) % 12
@property
def pitch_class_octave_label(self):
r'''Gets pitch-class / octave label of named pitch.
.. container:: example
**Example 1.** Gets pitch-class / octave label of C#5:
::
>>> NamedPitch("cs''").pitch_class_octave_label
'C#5'
.. container:: example
**Example 2.** Gets pitch-class / octave label of Ctqs5:
::
>>> NamedPitch("ctqs''").pitch_class_octave_label
'C#+5'
Returns string.
'''
return '{}{}{}'.format(
self.diatonic_pitch_class_name.upper(),
self.accidental.symbolic_string,
self.octave_number,
)
@property
def pitch_name(self):
r'''Gets pitch name of named pitch.
.. container:: example
**Example 1.** Gets pitch name of C#5:
::
>>> NamedPitch("cs''").pitch_name
"cs''"
.. container:: example
**Example 2.** Gets pitch name of Ctqs5:
::
>>> NamedPitch("ctqs''").pitch_name
"ctqs''"
Returns string.
'''
return '{}{}'.format(
self.pitch_class_name,
self.octave.octave_tick_string,
)
@property
def pitch_number(self):
r'''Gets pitch number of named pitch.
.. container:: example
**Example 1.** Gets pitch number of C#5:
::
>>> NamedPitch("cs''").pitch_number
13
.. container:: example
**Example 2.** Gets pitch number of Cbb5:
::
>>> NamedPitch("cff''").pitch_number
10
Returns integer or float.
'''
from abjad.tools import pitchtools
pitch_class_number = pitchtools.PitchClass._diatonic_pitch_class_number_to_pitch_class_number[
self.diatonic_pitch_class_number]
pitch_number = pitch_class_number + 12 * (self.octave_number - 4)
pitch_number += self.alteration_in_semitones
return pitch_number
### PUBLIC METHODS ###
@staticmethod
def from_pitch_carrier(pitch_carrier):
r'''Initializes named pitch from `pitch_carrier`.
.. container:: example
**Example 1.** Initializes named pitch from named pitch:
::
>>> pitch = NamedPitch('df', 5)
>>> NamedPitch.from_pitch_carrier(pitch)
NamedPitch("df''")
.. container:: example
**Example 2.** Initializes named pitch from note:
::
>>> note = Note("df''4")
>>> NamedPitch.from_pitch_carrier(note)
NamedPitch("df''")
.. container:: example
**Example 3.** Initializes named pitch from note head:
::
>>> note = Note("df''4")
>>> NamedPitch.from_pitch_carrier(note.note_head)
NamedPitch("df''")
.. container:: example
**Example 4.** Initializes named pitch from chord:
::
>>> chord = Chord("<df''>4")
>>> NamedPitch.from_pitch_carrier(chord)
NamedPitch("df''")
.. container:: example
**Example 5.** Initializes named pitch from integer:
::
>>> NamedPitch.from_pitch_carrier(13)
NamedPitch("cs''")
.. container:: example
**Example 6.** Initializes named pitch from numbered pitch-class:
::
>>> pitch_class = pitchtools.NumberedPitchClass(7)
>>> NamedPitch.from_pitch_carrier(pitch_class)
NamedPitch("g'")
Raises value error when `pitch_carrier` carries no pitch.
Raises value error when `pitch_carrier` carries more than one pitch.
Returns new named pitch.
'''
from abjad.tools import pitchtools
from abjad.tools import scoretools
if isinstance(pitch_carrier, pitchtools.NamedPitch):
return pitch_carrier
elif isinstance(pitch_carrier, pitchtools.NumberedPitch):
return pitchtools.NamedPitch(pitch_carrier)
elif isinstance(pitch_carrier, numbers.Number):
return pitchtools.NamedPitch(pitch_carrier)
elif isinstance(pitch_carrier, scoretools.Note):
pitch = pitch_carrier.written_pitch
if pitch is not None:
return NamedPitch.from_pitch_carrier(pitch)
else:
message = 'no pitch found on {!r}.'
message = message.format(pitch_carrier)
raise ValueError(message)
elif isinstance(pitch_carrier, scoretools.NoteHead):
pitch = pitch_carrier.written_pitch
if pitch is not None:
return NamedPitch.from_pitch_carrier(pitch)
else:
message = 'no pitch found on {!r}.'
message = message.format(pitch_carrier)
raise ValueError(message)
elif isinstance(pitch_carrier, scoretools.Chord):
pitches = pitch_carrier.written_pitches
if len(pitches) == 0:
message = 'no pitch found on {!r}.'
message = message.format(pitch_carrier)
raise ValueError(message)
elif len(pitches) == 1:
return NamedPitch.from_pitch_carrier(pitches[0])
else:
message = 'multiple pitches found on {!r}.'
message = message.format(pitch_carrier)
raise ValueError(message)
elif isinstance(pitch_carrier, pitchtools.NumberedPitchClass):
named_pitch_class = pitch_carrier.named_pitch_class
named_pitch = pitchtools.NamedPitch(named_pitch_class)
return named_pitch
else:
message = 'pitch carrier {!r} must be'
message += ' pitch, note, note head or chord.'
message = message.format(pitch_carrier)
raise TypeError(message)
@staticmethod
def from_staff_position(staff_position, clef=None):
r'''Initializes named pitch from `staff_position` and optional `clef`.
.. container:: example
**Example 1.** Initializes notes from absolute staff positions:
::
>>> for n in range(-6, 6):
... staff_position = pitchtools.StaffPosition(n)
... pitch = NamedPitch.from_staff_position(staff_position)
... message = '{!s}\t{}'.format(staff_position, pitch)
... print(message)
StaffPosition(-6) d
StaffPosition(-5) e
StaffPosition(-4) f
StaffPosition(-3) g
StaffPosition(-2) a
StaffPosition(-1) b
StaffPosition(0) c'
StaffPosition(1) d'
StaffPosition(2) e'
StaffPosition(3) f'
StaffPosition(4) g'
StaffPosition(5) a'
.. container:: example
**Example 2.** Initializes notes inside treble staff from staff
positions:
::
>>> clef = Clef('treble')
>>> for n in range(-6, 6):
... staff_position = pitchtools.StaffPosition(n)
... pitch = NamedPitch.from_staff_position(
... staff_position,
... clef=clef,
... )
... message = '{!s}\t{}'.format(staff_position, pitch)
... print(message)
StaffPosition(-6) c'
StaffPosition(-5) d'
StaffPosition(-4) e'
StaffPosition(-3) f'
StaffPosition(-2) g'
StaffPosition(-1) a'
StaffPosition(0) b'
StaffPosition(1) c''
StaffPosition(2) d''
StaffPosition(3) e''
StaffPosition(4) f''
StaffPosition(5) g''
.. container:: example
**Example 3.** Initializes notes inside bass staff from staff
positions:
::
>>> clef = Clef('bass')
>>> for n in range(-6, 6):
... staff_position = pitchtools.StaffPosition(n)
... pitch = NamedPitch.from_staff_position(
... staff_position,
... clef=clef,
... )
... message = '{!s}\t{}'.format(staff_position, pitch)
... print(message)
StaffPosition(-6) e,
StaffPosition(-5) f,
StaffPosition(-4) g,
StaffPosition(-3) a,
StaffPosition(-2) b,
StaffPosition(-1) c
StaffPosition(0) d
StaffPosition(1) e
StaffPosition(2) f
StaffPosition(3) g
StaffPosition(4) a
StaffPosition(5) b
.. container:: example
**Example 4.** Initializes notes inside alto staff from staff
positions:
::
>>> clef = Clef('alto')
>>> for n in range(-6, 6):
... staff_position = pitchtools.StaffPosition(n)
... pitch = NamedPitch.from_staff_position(
... staff_position,
... clef=clef,
... )
... message = '{!s}\t{}'.format(staff_position, pitch)
... print(message)
StaffPosition(-6) d
StaffPosition(-5) e
StaffPosition(-4) f
StaffPosition(-3) g
StaffPosition(-2) a
StaffPosition(-1) b
StaffPosition(0) c'
StaffPosition(1) d'
StaffPosition(2) e'
StaffPosition(3) f'
StaffPosition(4) g'
StaffPosition(5) a'
Returns new named pitch.
'''
from abjad.tools import pitchtools
if not isinstance(staff_position, pitchtools.StaffPosition):
staff_position = pitchtools.StaffPosition(staff_position)
if clef is not None:
offset_staff_position_number = staff_position.number
offset_staff_position_number -= clef.middle_c_position.number
offset_staff_position = pitchtools.StaffPosition(
offset_staff_position_number)
else:
offset_staff_position = staff_position
octave_number = offset_staff_position.number // 7 + 4
diatonic_pitch_class_number = offset_staff_position.number % 7
pitch_class_number = pitchtools.PitchClass._diatonic_pitch_class_number_to_pitch_class_number[
diatonic_pitch_class_number]
pitch_number = 12 * (octave_number - 4)
pitch_number += pitch_class_number
named_pitch = NamedPitch(pitch_number)
return named_pitch | gpl-3.0 |
ksrajkumar/openerp-6.1 | openerp/addons/l10n_syscohada/__openerp__.py | 6 | 1893 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2010-2011 BAAMTU SARL (<http://www.baamtu.sn>).
# contact: leadsn@baamtu.com
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name" : "OHADA - Accounting",
"version" : "1.0",
"author" : "Baamtu Senegal",
"category" : "Localization/Account Charts",
"description": """This module implements the accounting chart for OHADA area.
It allows any company or association to manage its financial accounting.
Countries that use OHADA are the following:
Benin, Burkina Faso, Cameroon, Central African Republic, Comoros, Congo,
Ivory Coast, Gabon, Guinea, Guinea Bissau,
Equatorial Guinea, Mali, Niger, Replica of Democratic Congo, Senegal, Chad, Togo.
""",
"website": "http://www.baamtu.com",
"depends" : ["account", "base_vat"],
"demo_xml" : [],
"init_xml":[],
"update_xml" : ["l10n_syscohada_data.xml","l10n_syscohada_wizard.xml"],
"auto_install": False,
"installable": True,
'certificate': '0074187989333',
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
laiqiqi886/kbengine | kbe/src/lib/python/Lib/test/test_bytes.py | 60 | 55390 | """Unit tests for the bytes and bytearray types.
XXX This is a mess. Common tests should be moved to buffer_tests.py,
which itself ought to be unified with string_tests.py (and the latter
should be modernized).
"""
import os
import re
import sys
import copy
import functools
import pickle
import tempfile
import unittest
import test.support
import test.string_tests
import test.buffer_tests
if sys.flags.bytes_warning:
def check_bytes_warnings(func):
@functools.wraps(func)
def wrapper(*args, **kw):
with test.support.check_warnings(('', BytesWarning)):
return func(*args, **kw)
return wrapper
else:
# no-op
def check_bytes_warnings(func):
return func
class Indexable:
def __init__(self, value=0):
self.value = value
def __index__(self):
return self.value
class BaseBytesTest:
def test_basics(self):
b = self.type2test()
self.assertEqual(type(b), self.type2test)
self.assertEqual(b.__class__, self.type2test)
def test_copy(self):
a = self.type2test(b"abcd")
for copy_method in (copy.copy, copy.deepcopy):
b = copy_method(a)
self.assertEqual(a, b)
self.assertEqual(type(a), type(b))
def test_empty_sequence(self):
b = self.type2test()
self.assertEqual(len(b), 0)
self.assertRaises(IndexError, lambda: b[0])
self.assertRaises(IndexError, lambda: b[1])
self.assertRaises(IndexError, lambda: b[sys.maxsize])
self.assertRaises(IndexError, lambda: b[sys.maxsize+1])
self.assertRaises(IndexError, lambda: b[10**100])
self.assertRaises(IndexError, lambda: b[-1])
self.assertRaises(IndexError, lambda: b[-2])
self.assertRaises(IndexError, lambda: b[-sys.maxsize])
self.assertRaises(IndexError, lambda: b[-sys.maxsize-1])
self.assertRaises(IndexError, lambda: b[-sys.maxsize-2])
self.assertRaises(IndexError, lambda: b[-10**100])
def test_from_list(self):
ints = list(range(256))
b = self.type2test(i for i in ints)
self.assertEqual(len(b), 256)
self.assertEqual(list(b), ints)
def test_from_index(self):
b = self.type2test([Indexable(), Indexable(1), Indexable(254),
Indexable(255)])
self.assertEqual(list(b), [0, 1, 254, 255])
self.assertRaises(ValueError, self.type2test, [Indexable(-1)])
self.assertRaises(ValueError, self.type2test, [Indexable(256)])
def test_from_ssize(self):
self.assertEqual(self.type2test(0), b'')
self.assertEqual(self.type2test(1), b'\x00')
self.assertEqual(self.type2test(5), b'\x00\x00\x00\x00\x00')
self.assertRaises(ValueError, self.type2test, -1)
self.assertEqual(self.type2test('0', 'ascii'), b'0')
self.assertEqual(self.type2test(b'0'), b'0')
self.assertRaises(OverflowError, self.type2test, sys.maxsize + 1)
def test_constructor_type_errors(self):
self.assertRaises(TypeError, self.type2test, 0.0)
class C:
pass
self.assertRaises(TypeError, self.type2test, ["0"])
self.assertRaises(TypeError, self.type2test, [0.0])
self.assertRaises(TypeError, self.type2test, [None])
self.assertRaises(TypeError, self.type2test, [C()])
def test_constructor_value_errors(self):
self.assertRaises(ValueError, self.type2test, [-1])
self.assertRaises(ValueError, self.type2test, [-sys.maxsize])
self.assertRaises(ValueError, self.type2test, [-sys.maxsize-1])
self.assertRaises(ValueError, self.type2test, [-sys.maxsize-2])
self.assertRaises(ValueError, self.type2test, [-10**100])
self.assertRaises(ValueError, self.type2test, [256])
self.assertRaises(ValueError, self.type2test, [257])
self.assertRaises(ValueError, self.type2test, [sys.maxsize])
self.assertRaises(ValueError, self.type2test, [sys.maxsize+1])
self.assertRaises(ValueError, self.type2test, [10**100])
def test_compare(self):
b1 = self.type2test([1, 2, 3])
b2 = self.type2test([1, 2, 3])
b3 = self.type2test([1, 3])
self.assertEqual(b1, b2)
self.assertTrue(b2 != b3)
self.assertTrue(b1 <= b2)
self.assertTrue(b1 <= b3)
self.assertTrue(b1 < b3)
self.assertTrue(b1 >= b2)
self.assertTrue(b3 >= b2)
self.assertTrue(b3 > b2)
self.assertFalse(b1 != b2)
self.assertFalse(b2 == b3)
self.assertFalse(b1 > b2)
self.assertFalse(b1 > b3)
self.assertFalse(b1 >= b3)
self.assertFalse(b1 < b2)
self.assertFalse(b3 < b2)
self.assertFalse(b3 <= b2)
@check_bytes_warnings
def test_compare_to_str(self):
# Byte comparisons with unicode should always fail!
# Test this for all expected byte orders and Unicode character
# sizes.
self.assertEqual(self.type2test(b"\0a\0b\0c") == "abc", False)
self.assertEqual(self.type2test(b"\0\0\0a\0\0\0b\0\0\0c") == "abc",
False)
self.assertEqual(self.type2test(b"a\0b\0c\0") == "abc", False)
self.assertEqual(self.type2test(b"a\0\0\0b\0\0\0c\0\0\0") == "abc",
False)
self.assertEqual(self.type2test() == str(), False)
self.assertEqual(self.type2test() != str(), True)
def test_reversed(self):
input = list(map(ord, "Hello"))
b = self.type2test(input)
output = list(reversed(b))
input.reverse()
self.assertEqual(output, input)
def test_getslice(self):
def by(s):
return self.type2test(map(ord, s))
b = by("Hello, world")
self.assertEqual(b[:5], by("Hello"))
self.assertEqual(b[1:5], by("ello"))
self.assertEqual(b[5:7], by(", "))
self.assertEqual(b[7:], by("world"))
self.assertEqual(b[7:12], by("world"))
self.assertEqual(b[7:100], by("world"))
self.assertEqual(b[:-7], by("Hello"))
self.assertEqual(b[-11:-7], by("ello"))
self.assertEqual(b[-7:-5], by(", "))
self.assertEqual(b[-5:], by("world"))
self.assertEqual(b[-5:12], by("world"))
self.assertEqual(b[-5:100], by("world"))
self.assertEqual(b[-100:5], by("Hello"))
def test_extended_getslice(self):
# Test extended slicing by comparing with list slicing.
L = list(range(255))
b = self.type2test(L)
indices = (0, None, 1, 3, 19, 100, -1, -2, -31, -100)
for start in indices:
for stop in indices:
# Skip step 0 (invalid)
for step in indices[1:]:
self.assertEqual(b[start:stop:step], self.type2test(L[start:stop:step]))
def test_encoding(self):
sample = "Hello world\n\u1234\u5678\u9abc"
for enc in ("utf-8", "utf-16"):
b = self.type2test(sample, enc)
self.assertEqual(b, self.type2test(sample.encode(enc)))
self.assertRaises(UnicodeEncodeError, self.type2test, sample, "latin-1")
b = self.type2test(sample, "latin-1", "ignore")
self.assertEqual(b, self.type2test(sample[:-3], "utf-8"))
def test_decode(self):
sample = "Hello world\n\u1234\u5678\u9abc\def0\def0"
for enc in ("utf-8", "utf-16"):
b = self.type2test(sample, enc)
self.assertEqual(b.decode(enc), sample)
sample = "Hello world\n\x80\x81\xfe\xff"
b = self.type2test(sample, "latin-1")
self.assertRaises(UnicodeDecodeError, b.decode, "utf-8")
self.assertEqual(b.decode("utf-8", "ignore"), "Hello world\n")
self.assertEqual(b.decode(errors="ignore", encoding="utf-8"),
"Hello world\n")
# Default encoding is utf-8
self.assertEqual(self.type2test(b'\xe2\x98\x83').decode(), '\u2603')
def test_from_int(self):
b = self.type2test(0)
self.assertEqual(b, self.type2test())
b = self.type2test(10)
self.assertEqual(b, self.type2test([0]*10))
b = self.type2test(10000)
self.assertEqual(b, self.type2test([0]*10000))
def test_concat(self):
b1 = self.type2test(b"abc")
b2 = self.type2test(b"def")
self.assertEqual(b1 + b2, b"abcdef")
self.assertEqual(b1 + bytes(b"def"), b"abcdef")
self.assertEqual(bytes(b"def") + b1, b"defabc")
self.assertRaises(TypeError, lambda: b1 + "def")
self.assertRaises(TypeError, lambda: "abc" + b2)
def test_repeat(self):
for b in b"abc", self.type2test(b"abc"):
self.assertEqual(b * 3, b"abcabcabc")
self.assertEqual(b * 0, b"")
self.assertEqual(b * -1, b"")
self.assertRaises(TypeError, lambda: b * 3.14)
self.assertRaises(TypeError, lambda: 3.14 * b)
# XXX Shouldn't bytes and bytearray agree on what to raise?
with self.assertRaises((OverflowError, MemoryError)):
c = b * sys.maxsize
with self.assertRaises((OverflowError, MemoryError)):
b *= sys.maxsize
def test_repeat_1char(self):
self.assertEqual(self.type2test(b'x')*100, self.type2test([ord('x')]*100))
def test_contains(self):
b = self.type2test(b"abc")
self.assertIn(ord('a'), b)
self.assertIn(int(ord('a')), b)
self.assertNotIn(200, b)
self.assertRaises(ValueError, lambda: 300 in b)
self.assertRaises(ValueError, lambda: -1 in b)
self.assertRaises(TypeError, lambda: None in b)
self.assertRaises(TypeError, lambda: float(ord('a')) in b)
self.assertRaises(TypeError, lambda: "a" in b)
for f in bytes, bytearray:
self.assertIn(f(b""), b)
self.assertIn(f(b"a"), b)
self.assertIn(f(b"b"), b)
self.assertIn(f(b"c"), b)
self.assertIn(f(b"ab"), b)
self.assertIn(f(b"bc"), b)
self.assertIn(f(b"abc"), b)
self.assertNotIn(f(b"ac"), b)
self.assertNotIn(f(b"d"), b)
self.assertNotIn(f(b"dab"), b)
self.assertNotIn(f(b"abd"), b)
def test_fromhex(self):
self.assertRaises(TypeError, self.type2test.fromhex)
self.assertRaises(TypeError, self.type2test.fromhex, 1)
self.assertEqual(self.type2test.fromhex(''), self.type2test())
b = bytearray([0x1a, 0x2b, 0x30])
self.assertEqual(self.type2test.fromhex('1a2B30'), b)
self.assertEqual(self.type2test.fromhex(' 1A 2B 30 '), b)
self.assertEqual(self.type2test.fromhex('0000'), b'\0\0')
self.assertRaises(TypeError, self.type2test.fromhex, b'1B')
self.assertRaises(ValueError, self.type2test.fromhex, 'a')
self.assertRaises(ValueError, self.type2test.fromhex, 'rt')
self.assertRaises(ValueError, self.type2test.fromhex, '1a b cd')
self.assertRaises(ValueError, self.type2test.fromhex, '\x00')
self.assertRaises(ValueError, self.type2test.fromhex, '12 \x00 34')
def test_join(self):
self.assertEqual(self.type2test(b"").join([]), b"")
self.assertEqual(self.type2test(b"").join([b""]), b"")
for lst in [[b"abc"], [b"a", b"bc"], [b"ab", b"c"], [b"a", b"b", b"c"]]:
lst = list(map(self.type2test, lst))
self.assertEqual(self.type2test(b"").join(lst), b"abc")
self.assertEqual(self.type2test(b"").join(tuple(lst)), b"abc")
self.assertEqual(self.type2test(b"").join(iter(lst)), b"abc")
dot_join = self.type2test(b".:").join
self.assertEqual(dot_join([b"ab", b"cd"]), b"ab.:cd")
self.assertEqual(dot_join([memoryview(b"ab"), b"cd"]), b"ab.:cd")
self.assertEqual(dot_join([b"ab", memoryview(b"cd")]), b"ab.:cd")
self.assertEqual(dot_join([bytearray(b"ab"), b"cd"]), b"ab.:cd")
self.assertEqual(dot_join([b"ab", bytearray(b"cd")]), b"ab.:cd")
# Stress it with many items
seq = [b"abc"] * 1000
expected = b"abc" + b".:abc" * 999
self.assertEqual(dot_join(seq), expected)
# Error handling and cleanup when some item in the middle of the
# sequence has the wrong type.
with self.assertRaises(TypeError):
dot_join([bytearray(b"ab"), "cd", b"ef"])
with self.assertRaises(TypeError):
dot_join([memoryview(b"ab"), "cd", b"ef"])
def test_count(self):
b = self.type2test(b'mississippi')
i = 105
p = 112
w = 119
self.assertEqual(b.count(b'i'), 4)
self.assertEqual(b.count(b'ss'), 2)
self.assertEqual(b.count(b'w'), 0)
self.assertEqual(b.count(i), 4)
self.assertEqual(b.count(w), 0)
self.assertEqual(b.count(b'i', 6), 2)
self.assertEqual(b.count(b'p', 6), 2)
self.assertEqual(b.count(b'i', 1, 3), 1)
self.assertEqual(b.count(b'p', 7, 9), 1)
self.assertEqual(b.count(i, 6), 2)
self.assertEqual(b.count(p, 6), 2)
self.assertEqual(b.count(i, 1, 3), 1)
self.assertEqual(b.count(p, 7, 9), 1)
def test_startswith(self):
b = self.type2test(b'hello')
self.assertFalse(self.type2test().startswith(b"anything"))
self.assertTrue(b.startswith(b"hello"))
self.assertTrue(b.startswith(b"hel"))
self.assertTrue(b.startswith(b"h"))
self.assertFalse(b.startswith(b"hellow"))
self.assertFalse(b.startswith(b"ha"))
with self.assertRaises(TypeError) as cm:
b.startswith([b'h'])
exc = str(cm.exception)
self.assertIn('bytes', exc)
self.assertIn('tuple', exc)
def test_endswith(self):
b = self.type2test(b'hello')
self.assertFalse(bytearray().endswith(b"anything"))
self.assertTrue(b.endswith(b"hello"))
self.assertTrue(b.endswith(b"llo"))
self.assertTrue(b.endswith(b"o"))
self.assertFalse(b.endswith(b"whello"))
self.assertFalse(b.endswith(b"no"))
with self.assertRaises(TypeError) as cm:
b.endswith([b'o'])
exc = str(cm.exception)
self.assertIn('bytes', exc)
self.assertIn('tuple', exc)
def test_find(self):
b = self.type2test(b'mississippi')
i = 105
w = 119
self.assertEqual(b.find(b'ss'), 2)
self.assertEqual(b.find(b'w'), -1)
self.assertEqual(b.find(b'mississippian'), -1)
self.assertEqual(b.find(i), 1)
self.assertEqual(b.find(w), -1)
self.assertEqual(b.find(b'ss', 3), 5)
self.assertEqual(b.find(b'ss', 1, 7), 2)
self.assertEqual(b.find(b'ss', 1, 3), -1)
self.assertEqual(b.find(i, 6), 7)
self.assertEqual(b.find(i, 1, 3), 1)
self.assertEqual(b.find(w, 1, 3), -1)
for index in (-1, 256, sys.maxsize + 1):
self.assertRaisesRegex(
ValueError, r'byte must be in range\(0, 256\)',
b.find, index)
def test_rfind(self):
b = self.type2test(b'mississippi')
i = 105
w = 119
self.assertEqual(b.rfind(b'ss'), 5)
self.assertEqual(b.rfind(b'w'), -1)
self.assertEqual(b.rfind(b'mississippian'), -1)
self.assertEqual(b.rfind(i), 10)
self.assertEqual(b.rfind(w), -1)
self.assertEqual(b.rfind(b'ss', 3), 5)
self.assertEqual(b.rfind(b'ss', 0, 6), 2)
self.assertEqual(b.rfind(i, 1, 3), 1)
self.assertEqual(b.rfind(i, 3, 9), 7)
self.assertEqual(b.rfind(w, 1, 3), -1)
def test_index(self):
b = self.type2test(b'mississippi')
i = 105
w = 119
self.assertEqual(b.index(b'ss'), 2)
self.assertRaises(ValueError, b.index, b'w')
self.assertRaises(ValueError, b.index, b'mississippian')
self.assertEqual(b.index(i), 1)
self.assertRaises(ValueError, b.index, w)
self.assertEqual(b.index(b'ss', 3), 5)
self.assertEqual(b.index(b'ss', 1, 7), 2)
self.assertRaises(ValueError, b.index, b'ss', 1, 3)
self.assertEqual(b.index(i, 6), 7)
self.assertEqual(b.index(i, 1, 3), 1)
self.assertRaises(ValueError, b.index, w, 1, 3)
def test_rindex(self):
b = self.type2test(b'mississippi')
i = 105
w = 119
self.assertEqual(b.rindex(b'ss'), 5)
self.assertRaises(ValueError, b.rindex, b'w')
self.assertRaises(ValueError, b.rindex, b'mississippian')
self.assertEqual(b.rindex(i), 10)
self.assertRaises(ValueError, b.rindex, w)
self.assertEqual(b.rindex(b'ss', 3), 5)
self.assertEqual(b.rindex(b'ss', 0, 6), 2)
self.assertEqual(b.rindex(i, 1, 3), 1)
self.assertEqual(b.rindex(i, 3, 9), 7)
self.assertRaises(ValueError, b.rindex, w, 1, 3)
def test_replace(self):
b = self.type2test(b'mississippi')
self.assertEqual(b.replace(b'i', b'a'), b'massassappa')
self.assertEqual(b.replace(b'ss', b'x'), b'mixixippi')
def test_split(self):
b = self.type2test(b'mississippi')
self.assertEqual(b.split(b'i'), [b'm', b'ss', b'ss', b'pp', b''])
self.assertEqual(b.split(b'ss'), [b'mi', b'i', b'ippi'])
self.assertEqual(b.split(b'w'), [b])
# with keyword args
b = self.type2test(b'a|b|c|d')
self.assertEqual(b.split(sep=b'|'), [b'a', b'b', b'c', b'd'])
self.assertEqual(b.split(b'|', maxsplit=1), [b'a', b'b|c|d'])
self.assertEqual(b.split(sep=b'|', maxsplit=1), [b'a', b'b|c|d'])
self.assertEqual(b.split(maxsplit=1, sep=b'|'), [b'a', b'b|c|d'])
b = self.type2test(b'a b c d')
self.assertEqual(b.split(maxsplit=1), [b'a', b'b c d'])
def test_split_whitespace(self):
for b in (b' arf barf ', b'arf\tbarf', b'arf\nbarf', b'arf\rbarf',
b'arf\fbarf', b'arf\vbarf'):
b = self.type2test(b)
self.assertEqual(b.split(), [b'arf', b'barf'])
self.assertEqual(b.split(None), [b'arf', b'barf'])
self.assertEqual(b.split(None, 2), [b'arf', b'barf'])
for b in (b'a\x1Cb', b'a\x1Db', b'a\x1Eb', b'a\x1Fb'):
b = self.type2test(b)
self.assertEqual(b.split(), [b])
self.assertEqual(self.type2test(b' a bb c ').split(None, 0), [b'a bb c '])
self.assertEqual(self.type2test(b' a bb c ').split(None, 1), [b'a', b'bb c '])
self.assertEqual(self.type2test(b' a bb c ').split(None, 2), [b'a', b'bb', b'c '])
self.assertEqual(self.type2test(b' a bb c ').split(None, 3), [b'a', b'bb', b'c'])
def test_split_string_error(self):
self.assertRaises(TypeError, self.type2test(b'a b').split, ' ')
def test_split_unicodewhitespace(self):
b = self.type2test(b"\x09\x0A\x0B\x0C\x0D\x1C\x1D\x1E\x1F")
self.assertEqual(b.split(), [b'\x1c\x1d\x1e\x1f'])
def test_rsplit(self):
b = self.type2test(b'mississippi')
self.assertEqual(b.rsplit(b'i'), [b'm', b'ss', b'ss', b'pp', b''])
self.assertEqual(b.rsplit(b'ss'), [b'mi', b'i', b'ippi'])
self.assertEqual(b.rsplit(b'w'), [b])
# with keyword args
b = self.type2test(b'a|b|c|d')
self.assertEqual(b.rsplit(sep=b'|'), [b'a', b'b', b'c', b'd'])
self.assertEqual(b.rsplit(b'|', maxsplit=1), [b'a|b|c', b'd'])
self.assertEqual(b.rsplit(sep=b'|', maxsplit=1), [b'a|b|c', b'd'])
self.assertEqual(b.rsplit(maxsplit=1, sep=b'|'), [b'a|b|c', b'd'])
b = self.type2test(b'a b c d')
self.assertEqual(b.rsplit(maxsplit=1), [b'a b c', b'd'])
def test_rsplit_whitespace(self):
for b in (b' arf barf ', b'arf\tbarf', b'arf\nbarf', b'arf\rbarf',
b'arf\fbarf', b'arf\vbarf'):
b = self.type2test(b)
self.assertEqual(b.rsplit(), [b'arf', b'barf'])
self.assertEqual(b.rsplit(None), [b'arf', b'barf'])
self.assertEqual(b.rsplit(None, 2), [b'arf', b'barf'])
self.assertEqual(self.type2test(b' a bb c ').rsplit(None, 0), [b' a bb c'])
self.assertEqual(self.type2test(b' a bb c ').rsplit(None, 1), [b' a bb', b'c'])
self.assertEqual(self.type2test(b' a bb c ').rsplit(None, 2), [b' a', b'bb', b'c'])
self.assertEqual(self.type2test(b' a bb c ').rsplit(None, 3), [b'a', b'bb', b'c'])
def test_rsplit_string_error(self):
self.assertRaises(TypeError, self.type2test(b'a b').rsplit, ' ')
def test_rsplit_unicodewhitespace(self):
b = self.type2test(b"\x09\x0A\x0B\x0C\x0D\x1C\x1D\x1E\x1F")
self.assertEqual(b.rsplit(), [b'\x1c\x1d\x1e\x1f'])
def test_partition(self):
b = self.type2test(b'mississippi')
self.assertEqual(b.partition(b'ss'), (b'mi', b'ss', b'issippi'))
self.assertEqual(b.partition(b'w'), (b'mississippi', b'', b''))
def test_rpartition(self):
b = self.type2test(b'mississippi')
self.assertEqual(b.rpartition(b'ss'), (b'missi', b'ss', b'ippi'))
self.assertEqual(b.rpartition(b'i'), (b'mississipp', b'i', b''))
self.assertEqual(b.rpartition(b'w'), (b'', b'', b'mississippi'))
def test_pickling(self):
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
for b in b"", b"a", b"abc", b"\xffab\x80", b"\0\0\377\0\0":
b = self.type2test(b)
ps = pickle.dumps(b, proto)
q = pickle.loads(ps)
self.assertEqual(b, q)
def test_iterator_pickling(self):
for b in b"", b"a", b"abc", b"\xffab\x80", b"\0\0\377\0\0":
it = itorg = iter(self.type2test(b))
data = list(self.type2test(b))
d = pickle.dumps(it)
it = pickle.loads(d)
self.assertEqual(type(itorg), type(it))
self.assertEqual(list(it), data)
it = pickle.loads(d)
try:
next(it)
except StopIteration:
continue
d = pickle.dumps(it)
it = pickle.loads(d)
self.assertEqual(list(it), data[1:])
def test_strip(self):
b = self.type2test(b'mississippi')
self.assertEqual(b.strip(b'i'), b'mississipp')
self.assertEqual(b.strip(b'm'), b'ississippi')
self.assertEqual(b.strip(b'pi'), b'mississ')
self.assertEqual(b.strip(b'im'), b'ssissipp')
self.assertEqual(b.strip(b'pim'), b'ssiss')
self.assertEqual(b.strip(b), b'')
def test_lstrip(self):
b = self.type2test(b'mississippi')
self.assertEqual(b.lstrip(b'i'), b'mississippi')
self.assertEqual(b.lstrip(b'm'), b'ississippi')
self.assertEqual(b.lstrip(b'pi'), b'mississippi')
self.assertEqual(b.lstrip(b'im'), b'ssissippi')
self.assertEqual(b.lstrip(b'pim'), b'ssissippi')
def test_rstrip(self):
b = self.type2test(b'mississippi')
self.assertEqual(b.rstrip(b'i'), b'mississipp')
self.assertEqual(b.rstrip(b'm'), b'mississippi')
self.assertEqual(b.rstrip(b'pi'), b'mississ')
self.assertEqual(b.rstrip(b'im'), b'mississipp')
self.assertEqual(b.rstrip(b'pim'), b'mississ')
def test_strip_whitespace(self):
b = self.type2test(b' \t\n\r\f\vabc \t\n\r\f\v')
self.assertEqual(b.strip(), b'abc')
self.assertEqual(b.lstrip(), b'abc \t\n\r\f\v')
self.assertEqual(b.rstrip(), b' \t\n\r\f\vabc')
def test_strip_bytearray(self):
self.assertEqual(self.type2test(b'abc').strip(memoryview(b'ac')), b'b')
self.assertEqual(self.type2test(b'abc').lstrip(memoryview(b'ac')), b'bc')
self.assertEqual(self.type2test(b'abc').rstrip(memoryview(b'ac')), b'ab')
def test_strip_string_error(self):
self.assertRaises(TypeError, self.type2test(b'abc').strip, 'b')
self.assertRaises(TypeError, self.type2test(b'abc').lstrip, 'b')
self.assertRaises(TypeError, self.type2test(b'abc').rstrip, 'b')
def test_center(self):
# Fill character can be either bytes or bytearray (issue 12380)
b = self.type2test(b'abc')
for fill_type in (bytes, bytearray):
self.assertEqual(b.center(7, fill_type(b'-')),
self.type2test(b'--abc--'))
def test_ljust(self):
# Fill character can be either bytes or bytearray (issue 12380)
b = self.type2test(b'abc')
for fill_type in (bytes, bytearray):
self.assertEqual(b.ljust(7, fill_type(b'-')),
self.type2test(b'abc----'))
def test_rjust(self):
# Fill character can be either bytes or bytearray (issue 12380)
b = self.type2test(b'abc')
for fill_type in (bytes, bytearray):
self.assertEqual(b.rjust(7, fill_type(b'-')),
self.type2test(b'----abc'))
def test_ord(self):
b = self.type2test(b'\0A\x7f\x80\xff')
self.assertEqual([ord(b[i:i+1]) for i in range(len(b))],
[0, 65, 127, 128, 255])
def test_maketrans(self):
transtable = b'\000\001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037 !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`xyzdefghijklmnopqrstuvwxyz{|}~\177\200\201\202\203\204\205\206\207\210\211\212\213\214\215\216\217\220\221\222\223\224\225\226\227\230\231\232\233\234\235\236\237\240\241\242\243\244\245\246\247\250\251\252\253\254\255\256\257\260\261\262\263\264\265\266\267\270\271\272\273\274\275\276\277\300\301\302\303\304\305\306\307\310\311\312\313\314\315\316\317\320\321\322\323\324\325\326\327\330\331\332\333\334\335\336\337\340\341\342\343\344\345\346\347\350\351\352\353\354\355\356\357\360\361\362\363\364\365\366\367\370\371\372\373\374\375\376\377'
self.assertEqual(self.type2test.maketrans(b'abc', b'xyz'), transtable)
transtable = b'\000\001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037 !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\177\200\201\202\203\204\205\206\207\210\211\212\213\214\215\216\217\220\221\222\223\224\225\226\227\230\231\232\233\234\235\236\237\240\241\242\243\244\245\246\247\250\251\252\253\254\255\256\257\260\261\262\263\264\265\266\267\270\271\272\273\274\275\276\277\300\301\302\303\304\305\306\307\310\311\312\313\314\315\316\317\320\321\322\323\324\325\326\327\330\331\332\333\334\335\336\337\340\341\342\343\344\345\346\347\350\351\352\353\354\355\356\357\360\361\362\363\364\365\366\367\370\371\372\373\374xyz'
self.assertEqual(self.type2test.maketrans(b'\375\376\377', b'xyz'), transtable)
self.assertRaises(ValueError, self.type2test.maketrans, b'abc', b'xyzq')
self.assertRaises(TypeError, self.type2test.maketrans, 'abc', 'def')
def test_none_arguments(self):
# issue 11828
b = self.type2test(b'hello')
l = self.type2test(b'l')
h = self.type2test(b'h')
x = self.type2test(b'x')
o = self.type2test(b'o')
self.assertEqual(2, b.find(l, None))
self.assertEqual(3, b.find(l, -2, None))
self.assertEqual(2, b.find(l, None, -2))
self.assertEqual(0, b.find(h, None, None))
self.assertEqual(3, b.rfind(l, None))
self.assertEqual(3, b.rfind(l, -2, None))
self.assertEqual(2, b.rfind(l, None, -2))
self.assertEqual(0, b.rfind(h, None, None))
self.assertEqual(2, b.index(l, None))
self.assertEqual(3, b.index(l, -2, None))
self.assertEqual(2, b.index(l, None, -2))
self.assertEqual(0, b.index(h, None, None))
self.assertEqual(3, b.rindex(l, None))
self.assertEqual(3, b.rindex(l, -2, None))
self.assertEqual(2, b.rindex(l, None, -2))
self.assertEqual(0, b.rindex(h, None, None))
self.assertEqual(2, b.count(l, None))
self.assertEqual(1, b.count(l, -2, None))
self.assertEqual(1, b.count(l, None, -2))
self.assertEqual(0, b.count(x, None, None))
self.assertEqual(True, b.endswith(o, None))
self.assertEqual(True, b.endswith(o, -2, None))
self.assertEqual(True, b.endswith(l, None, -2))
self.assertEqual(False, b.endswith(x, None, None))
self.assertEqual(True, b.startswith(h, None))
self.assertEqual(True, b.startswith(l, -2, None))
self.assertEqual(True, b.startswith(h, None, -2))
self.assertEqual(False, b.startswith(x, None, None))
def test_integer_arguments_out_of_byte_range(self):
b = self.type2test(b'hello')
for method in (b.count, b.find, b.index, b.rfind, b.rindex):
self.assertRaises(ValueError, method, -1)
self.assertRaises(ValueError, method, 256)
self.assertRaises(ValueError, method, 9999)
def test_find_etc_raise_correct_error_messages(self):
# issue 11828
b = self.type2test(b'hello')
x = self.type2test(b'x')
self.assertRaisesRegex(TypeError, r'\bfind\b', b.find,
x, None, None, None)
self.assertRaisesRegex(TypeError, r'\brfind\b', b.rfind,
x, None, None, None)
self.assertRaisesRegex(TypeError, r'\bindex\b', b.index,
x, None, None, None)
self.assertRaisesRegex(TypeError, r'\brindex\b', b.rindex,
x, None, None, None)
self.assertRaisesRegex(TypeError, r'\bcount\b', b.count,
x, None, None, None)
self.assertRaisesRegex(TypeError, r'\bstartswith\b', b.startswith,
x, None, None, None)
self.assertRaisesRegex(TypeError, r'\bendswith\b', b.endswith,
x, None, None, None)
class BytesTest(BaseBytesTest, unittest.TestCase):
type2test = bytes
def test_buffer_is_readonly(self):
fd = os.open(__file__, os.O_RDONLY)
with open(fd, "rb", buffering=0) as f:
self.assertRaises(TypeError, f.readinto, b"")
def test_custom(self):
class A:
def __bytes__(self):
return b'abc'
self.assertEqual(bytes(A()), b'abc')
class A: pass
self.assertRaises(TypeError, bytes, A())
class A:
def __bytes__(self):
return None
self.assertRaises(TypeError, bytes, A())
class A:
def __bytes__(self):
return b'a'
def __index__(self):
return 42
self.assertEqual(bytes(A()), b'a')
# Test PyBytes_FromFormat()
def test_from_format(self):
test.support.import_module('ctypes')
from ctypes import pythonapi, py_object, c_int, c_char_p
PyBytes_FromFormat = pythonapi.PyBytes_FromFormat
PyBytes_FromFormat.restype = py_object
self.assertEqual(PyBytes_FromFormat(b'format'),
b'format')
self.assertEqual(PyBytes_FromFormat(b'%'), b'%')
self.assertEqual(PyBytes_FromFormat(b'%%'), b'%')
self.assertEqual(PyBytes_FromFormat(b'%%s'), b'%s')
self.assertEqual(PyBytes_FromFormat(b'[%%]'), b'[%]')
self.assertEqual(PyBytes_FromFormat(b'%%%c', c_int(ord('_'))), b'%_')
self.assertEqual(PyBytes_FromFormat(b'c:%c', c_int(255)),
b'c:\xff')
self.assertEqual(PyBytes_FromFormat(b's:%s', c_char_p(b'cstr')),
b's:cstr')
# Issue #19969
self.assertRaises(OverflowError,
PyBytes_FromFormat, b'%c', c_int(-1))
self.assertRaises(OverflowError,
PyBytes_FromFormat, b'%c', c_int(256))
class ByteArrayTest(BaseBytesTest, unittest.TestCase):
type2test = bytearray
def test_nohash(self):
self.assertRaises(TypeError, hash, bytearray())
def test_bytearray_api(self):
short_sample = b"Hello world\n"
sample = short_sample + b"\0"*(20 - len(short_sample))
tfn = tempfile.mktemp()
try:
# Prepare
with open(tfn, "wb") as f:
f.write(short_sample)
# Test readinto
with open(tfn, "rb") as f:
b = bytearray(20)
n = f.readinto(b)
self.assertEqual(n, len(short_sample))
self.assertEqual(list(b), list(sample))
# Test writing in binary mode
with open(tfn, "wb") as f:
f.write(b)
with open(tfn, "rb") as f:
self.assertEqual(f.read(), sample)
# Text mode is ambiguous; don't test
finally:
try:
os.remove(tfn)
except OSError:
pass
def test_reverse(self):
b = bytearray(b'hello')
self.assertEqual(b.reverse(), None)
self.assertEqual(b, b'olleh')
b = bytearray(b'hello1') # test even number of items
b.reverse()
self.assertEqual(b, b'1olleh')
b = bytearray()
b.reverse()
self.assertFalse(b)
def test_clear(self):
b = bytearray(b'python')
b.clear()
self.assertEqual(b, b'')
b = bytearray(b'')
b.clear()
self.assertEqual(b, b'')
b = bytearray(b'')
b.append(ord('r'))
b.clear()
b.append(ord('p'))
self.assertEqual(b, b'p')
def test_copy(self):
b = bytearray(b'abc')
bb = b.copy()
self.assertEqual(bb, b'abc')
b = bytearray(b'')
bb = b.copy()
self.assertEqual(bb, b'')
# test that it's indeed a copy and not a reference
b = bytearray(b'abc')
bb = b.copy()
self.assertEqual(b, bb)
self.assertIsNot(b, bb)
bb.append(ord('d'))
self.assertEqual(bb, b'abcd')
self.assertEqual(b, b'abc')
def test_regexps(self):
def by(s):
return bytearray(map(ord, s))
b = by("Hello, world")
self.assertEqual(re.findall(br"\w+", b), [by("Hello"), by("world")])
def test_setitem(self):
b = bytearray([1, 2, 3])
b[1] = 100
self.assertEqual(b, bytearray([1, 100, 3]))
b[-1] = 200
self.assertEqual(b, bytearray([1, 100, 200]))
b[0] = Indexable(10)
self.assertEqual(b, bytearray([10, 100, 200]))
try:
b[3] = 0
self.fail("Didn't raise IndexError")
except IndexError:
pass
try:
b[-10] = 0
self.fail("Didn't raise IndexError")
except IndexError:
pass
try:
b[0] = 256
self.fail("Didn't raise ValueError")
except ValueError:
pass
try:
b[0] = Indexable(-1)
self.fail("Didn't raise ValueError")
except ValueError:
pass
try:
b[0] = None
self.fail("Didn't raise TypeError")
except TypeError:
pass
def test_delitem(self):
b = bytearray(range(10))
del b[0]
self.assertEqual(b, bytearray(range(1, 10)))
del b[-1]
self.assertEqual(b, bytearray(range(1, 9)))
del b[4]
self.assertEqual(b, bytearray([1, 2, 3, 4, 6, 7, 8]))
def test_setslice(self):
b = bytearray(range(10))
self.assertEqual(list(b), list(range(10)))
b[0:5] = bytearray([1, 1, 1, 1, 1])
self.assertEqual(b, bytearray([1, 1, 1, 1, 1, 5, 6, 7, 8, 9]))
del b[0:-5]
self.assertEqual(b, bytearray([5, 6, 7, 8, 9]))
b[0:0] = bytearray([0, 1, 2, 3, 4])
self.assertEqual(b, bytearray(range(10)))
b[-7:-3] = bytearray([100, 101])
self.assertEqual(b, bytearray([0, 1, 2, 100, 101, 7, 8, 9]))
b[3:5] = [3, 4, 5, 6]
self.assertEqual(b, bytearray(range(10)))
b[3:0] = [42, 42, 42]
self.assertEqual(b, bytearray([0, 1, 2, 42, 42, 42, 3, 4, 5, 6, 7, 8, 9]))
b[3:] = b'foo'
self.assertEqual(b, bytearray([0, 1, 2, 102, 111, 111]))
b[:3] = memoryview(b'foo')
self.assertEqual(b, bytearray([102, 111, 111, 102, 111, 111]))
b[3:4] = []
self.assertEqual(b, bytearray([102, 111, 111, 111, 111]))
for elem in [5, -5, 0, int(10e20), 'str', 2.3,
['a', 'b'], [b'a', b'b'], [[]]]:
with self.assertRaises(TypeError):
b[3:4] = elem
for elem in [[254, 255, 256], [-256, 9000]]:
with self.assertRaises(ValueError):
b[3:4] = elem
def test_setslice_extend(self):
# Exercise the resizing logic (see issue #19087)
b = bytearray(range(100))
self.assertEqual(list(b), list(range(100)))
del b[:10]
self.assertEqual(list(b), list(range(10, 100)))
b.extend(range(100, 110))
self.assertEqual(list(b), list(range(10, 110)))
def test_extended_set_del_slice(self):
indices = (0, None, 1, 3, 19, 300, 1<<333, -1, -2, -31, -300)
for start in indices:
for stop in indices:
# Skip invalid step 0
for step in indices[1:]:
L = list(range(255))
b = bytearray(L)
# Make sure we have a slice of exactly the right length,
# but with different data.
data = L[start:stop:step]
data.reverse()
L[start:stop:step] = data
b[start:stop:step] = data
self.assertEqual(b, bytearray(L))
del L[start:stop:step]
del b[start:stop:step]
self.assertEqual(b, bytearray(L))
def test_setslice_trap(self):
# This test verifies that we correctly handle assigning self
# to a slice of self (the old Lambert Meertens trap).
b = bytearray(range(256))
b[8:] = b
self.assertEqual(b, bytearray(list(range(8)) + list(range(256))))
def test_iconcat(self):
b = bytearray(b"abc")
b1 = b
b += b"def"
self.assertEqual(b, b"abcdef")
self.assertEqual(b, b1)
self.assertTrue(b is b1)
b += b"xyz"
self.assertEqual(b, b"abcdefxyz")
try:
b += ""
except TypeError:
pass
else:
self.fail("bytes += unicode didn't raise TypeError")
def test_irepeat(self):
b = bytearray(b"abc")
b1 = b
b *= 3
self.assertEqual(b, b"abcabcabc")
self.assertEqual(b, b1)
self.assertTrue(b is b1)
def test_irepeat_1char(self):
b = bytearray(b"x")
b1 = b
b *= 100
self.assertEqual(b, b"x"*100)
self.assertEqual(b, b1)
self.assertTrue(b is b1)
def test_alloc(self):
b = bytearray()
alloc = b.__alloc__()
self.assertTrue(alloc >= 0)
seq = [alloc]
for i in range(100):
b += b"x"
alloc = b.__alloc__()
self.assertTrue(alloc >= len(b))
if alloc not in seq:
seq.append(alloc)
def test_extend(self):
orig = b'hello'
a = bytearray(orig)
a.extend(a)
self.assertEqual(a, orig + orig)
self.assertEqual(a[5:], orig)
a = bytearray(b'')
# Test iterators that don't have a __length_hint__
a.extend(map(int, orig * 25))
a.extend(int(x) for x in orig * 25)
self.assertEqual(a, orig * 50)
self.assertEqual(a[-5:], orig)
a = bytearray(b'')
a.extend(iter(map(int, orig * 50)))
self.assertEqual(a, orig * 50)
self.assertEqual(a[-5:], orig)
a = bytearray(b'')
a.extend(list(map(int, orig * 50)))
self.assertEqual(a, orig * 50)
self.assertEqual(a[-5:], orig)
a = bytearray(b'')
self.assertRaises(ValueError, a.extend, [0, 1, 2, 256])
self.assertRaises(ValueError, a.extend, [0, 1, 2, -1])
self.assertEqual(len(a), 0)
a = bytearray(b'')
a.extend([Indexable(ord('a'))])
self.assertEqual(a, b'a')
def test_remove(self):
b = bytearray(b'hello')
b.remove(ord('l'))
self.assertEqual(b, b'helo')
b.remove(ord('l'))
self.assertEqual(b, b'heo')
self.assertRaises(ValueError, lambda: b.remove(ord('l')))
self.assertRaises(ValueError, lambda: b.remove(400))
self.assertRaises(TypeError, lambda: b.remove('e'))
# remove first and last
b.remove(ord('o'))
b.remove(ord('h'))
self.assertEqual(b, b'e')
self.assertRaises(TypeError, lambda: b.remove(b'e'))
b.remove(Indexable(ord('e')))
self.assertEqual(b, b'')
def test_pop(self):
b = bytearray(b'world')
self.assertEqual(b.pop(), ord('d'))
self.assertEqual(b.pop(0), ord('w'))
self.assertEqual(b.pop(-2), ord('r'))
self.assertRaises(IndexError, lambda: b.pop(10))
self.assertRaises(IndexError, lambda: bytearray().pop())
# test for issue #6846
self.assertEqual(bytearray(b'\xff').pop(), 0xff)
def test_nosort(self):
self.assertRaises(AttributeError, lambda: bytearray().sort())
def test_append(self):
b = bytearray(b'hell')
b.append(ord('o'))
self.assertEqual(b, b'hello')
self.assertEqual(b.append(100), None)
b = bytearray()
b.append(ord('A'))
self.assertEqual(len(b), 1)
self.assertRaises(TypeError, lambda: b.append(b'o'))
b = bytearray()
b.append(Indexable(ord('A')))
self.assertEqual(b, b'A')
def test_insert(self):
b = bytearray(b'msssspp')
b.insert(1, ord('i'))
b.insert(4, ord('i'))
b.insert(-2, ord('i'))
b.insert(1000, ord('i'))
self.assertEqual(b, b'mississippi')
self.assertRaises(TypeError, lambda: b.insert(0, b'1'))
b = bytearray()
b.insert(0, Indexable(ord('A')))
self.assertEqual(b, b'A')
def test_copied(self):
# Issue 4348. Make sure that operations that don't mutate the array
# copy the bytes.
b = bytearray(b'abc')
self.assertFalse(b is b.replace(b'abc', b'cde', 0))
t = bytearray([i for i in range(256)])
x = bytearray(b'')
self.assertFalse(x is x.translate(t))
def test_partition_bytearray_doesnt_share_nullstring(self):
a, b, c = bytearray(b"x").partition(b"y")
self.assertEqual(b, b"")
self.assertEqual(c, b"")
self.assertTrue(b is not c)
b += b"!"
self.assertEqual(c, b"")
a, b, c = bytearray(b"x").partition(b"y")
self.assertEqual(b, b"")
self.assertEqual(c, b"")
# Same for rpartition
b, c, a = bytearray(b"x").rpartition(b"y")
self.assertEqual(b, b"")
self.assertEqual(c, b"")
self.assertTrue(b is not c)
b += b"!"
self.assertEqual(c, b"")
c, b, a = bytearray(b"x").rpartition(b"y")
self.assertEqual(b, b"")
self.assertEqual(c, b"")
def test_resize_forbidden(self):
# #4509: can't resize a bytearray when there are buffer exports, even
# if it wouldn't reallocate the underlying buffer.
# Furthermore, no destructive changes to the buffer may be applied
# before raising the error.
b = bytearray(range(10))
v = memoryview(b)
def resize(n):
b[1:-1] = range(n + 1, 2*n - 1)
resize(10)
orig = b[:]
self.assertRaises(BufferError, resize, 11)
self.assertEqual(b, orig)
self.assertRaises(BufferError, resize, 9)
self.assertEqual(b, orig)
self.assertRaises(BufferError, resize, 0)
self.assertEqual(b, orig)
# Other operations implying resize
self.assertRaises(BufferError, b.pop, 0)
self.assertEqual(b, orig)
self.assertRaises(BufferError, b.remove, b[1])
self.assertEqual(b, orig)
def delitem():
del b[1]
self.assertRaises(BufferError, delitem)
self.assertEqual(b, orig)
# deleting a non-contiguous slice
def delslice():
b[1:-1:2] = b""
self.assertRaises(BufferError, delslice)
self.assertEqual(b, orig)
class AssortedBytesTest(unittest.TestCase):
#
# Test various combinations of bytes and bytearray
#
@check_bytes_warnings
def test_repr_str(self):
for f in str, repr:
self.assertEqual(f(bytearray()), "bytearray(b'')")
self.assertEqual(f(bytearray([0])), "bytearray(b'\\x00')")
self.assertEqual(f(bytearray([0, 1, 254, 255])),
"bytearray(b'\\x00\\x01\\xfe\\xff')")
self.assertEqual(f(b"abc"), "b'abc'")
self.assertEqual(f(b"'"), '''b"'"''') # '''
self.assertEqual(f(b"'\""), r"""b'\'"'""") # '
def test_compare_bytes_to_bytearray(self):
self.assertEqual(b"abc" == bytes(b"abc"), True)
self.assertEqual(b"ab" != bytes(b"abc"), True)
self.assertEqual(b"ab" <= bytes(b"abc"), True)
self.assertEqual(b"ab" < bytes(b"abc"), True)
self.assertEqual(b"abc" >= bytes(b"ab"), True)
self.assertEqual(b"abc" > bytes(b"ab"), True)
self.assertEqual(b"abc" != bytes(b"abc"), False)
self.assertEqual(b"ab" == bytes(b"abc"), False)
self.assertEqual(b"ab" > bytes(b"abc"), False)
self.assertEqual(b"ab" >= bytes(b"abc"), False)
self.assertEqual(b"abc" < bytes(b"ab"), False)
self.assertEqual(b"abc" <= bytes(b"ab"), False)
self.assertEqual(bytes(b"abc") == b"abc", True)
self.assertEqual(bytes(b"ab") != b"abc", True)
self.assertEqual(bytes(b"ab") <= b"abc", True)
self.assertEqual(bytes(b"ab") < b"abc", True)
self.assertEqual(bytes(b"abc") >= b"ab", True)
self.assertEqual(bytes(b"abc") > b"ab", True)
self.assertEqual(bytes(b"abc") != b"abc", False)
self.assertEqual(bytes(b"ab") == b"abc", False)
self.assertEqual(bytes(b"ab") > b"abc", False)
self.assertEqual(bytes(b"ab") >= b"abc", False)
self.assertEqual(bytes(b"abc") < b"ab", False)
self.assertEqual(bytes(b"abc") <= b"ab", False)
@test.support.requires_docstrings
def test_doc(self):
self.assertIsNotNone(bytearray.__doc__)
self.assertTrue(bytearray.__doc__.startswith("bytearray("), bytearray.__doc__)
self.assertIsNotNone(bytes.__doc__)
self.assertTrue(bytes.__doc__.startswith("bytes("), bytes.__doc__)
def test_from_bytearray(self):
sample = bytes(b"Hello world\n\x80\x81\xfe\xff")
buf = memoryview(sample)
b = bytearray(buf)
self.assertEqual(b, bytearray(sample))
@check_bytes_warnings
def test_to_str(self):
self.assertEqual(str(b''), "b''")
self.assertEqual(str(b'x'), "b'x'")
self.assertEqual(str(b'\x80'), "b'\\x80'")
self.assertEqual(str(bytearray(b'')), "bytearray(b'')")
self.assertEqual(str(bytearray(b'x')), "bytearray(b'x')")
self.assertEqual(str(bytearray(b'\x80')), "bytearray(b'\\x80')")
def test_literal(self):
tests = [
(b"Wonderful spam", "Wonderful spam"),
(br"Wonderful spam too", "Wonderful spam too"),
(b"\xaa\x00\000\200", "\xaa\x00\000\200"),
(br"\xaa\x00\000\200", r"\xaa\x00\000\200"),
]
for b, s in tests:
self.assertEqual(b, bytearray(s, 'latin-1'))
for c in range(128, 256):
self.assertRaises(SyntaxError, eval,
'b"%s"' % chr(c))
def test_translate(self):
b = b'hello'
ba = bytearray(b)
rosetta = bytearray(range(0, 256))
rosetta[ord('o')] = ord('e')
c = b.translate(rosetta, b'l')
self.assertEqual(b, b'hello')
self.assertEqual(c, b'hee')
c = ba.translate(rosetta, b'l')
self.assertEqual(ba, b'hello')
self.assertEqual(c, b'hee')
c = b.translate(None, b'e')
self.assertEqual(c, b'hllo')
c = ba.translate(None, b'e')
self.assertEqual(c, b'hllo')
self.assertRaises(TypeError, b.translate, None, None)
self.assertRaises(TypeError, ba.translate, None, None)
def test_split_bytearray(self):
self.assertEqual(b'a b'.split(memoryview(b' ')), [b'a', b'b'])
def test_rsplit_bytearray(self):
self.assertEqual(b'a b'.rsplit(memoryview(b' ')), [b'a', b'b'])
def test_return_self(self):
# bytearray.replace must always return a new bytearray
b = bytearray()
self.assertFalse(b.replace(b'', b'') is b)
def test_compare(self):
if sys.flags.bytes_warning:
def bytes_warning():
return test.support.check_warnings(('', BytesWarning))
with bytes_warning():
b'' == ''
with bytes_warning():
b'' != ''
with bytes_warning():
bytearray(b'') == ''
with bytes_warning():
bytearray(b'') != ''
else:
self.skipTest("BytesWarning is needed for this test: use -bb option")
# Optimizations:
# __iter__? (optimization)
# __reversed__? (optimization)
# XXX More string methods? (Those that don't use character properties)
# There are tests in string_tests.py that are more
# comprehensive for things like split, partition, etc.
# Unfortunately they are all bundled with tests that
# are not appropriate for bytes
# I've started porting some of those into bytearray_tests.py, we should port
# the rest that make sense (the code can be cleaned up to use modern
# unittest methods at the same time).
class BytearrayPEP3137Test(unittest.TestCase,
test.buffer_tests.MixinBytesBufferCommonTests):
def marshal(self, x):
return bytearray(x)
def test_returns_new_copy(self):
val = self.marshal(b'1234')
# On immutable types these MAY return a reference to themselves
# but on mutable types like bytearray they MUST return a new copy.
for methname in ('zfill', 'rjust', 'ljust', 'center'):
method = getattr(val, methname)
newval = method(3)
self.assertEqual(val, newval)
self.assertTrue(val is not newval,
methname+' returned self on a mutable object')
for expr in ('val.split()[0]', 'val.rsplit()[0]',
'val.partition(b".")[0]', 'val.rpartition(b".")[2]',
'val.splitlines()[0]', 'val.replace(b"", b"")'):
newval = eval(expr)
self.assertEqual(val, newval)
self.assertTrue(val is not newval,
expr+' returned val on a mutable object')
sep = self.marshal(b'')
newval = sep.join([val])
self.assertEqual(val, newval)
self.assertIsNot(val, newval)
class FixedStringTest(test.string_tests.BaseTest):
def fixtype(self, obj):
if isinstance(obj, str):
return obj.encode("utf-8")
return super().fixtype(obj)
# Currently the bytes containment testing uses a single integer
# value. This may not be the final design, but until then the
# bytes section with in a bytes containment not valid
def test_contains(self):
pass
def test_expandtabs(self):
pass
def test_upper(self):
pass
def test_lower(self):
pass
class ByteArrayAsStringTest(FixedStringTest, unittest.TestCase):
type2test = bytearray
contains_bytes = True
class BytesAsStringTest(FixedStringTest, unittest.TestCase):
type2test = bytes
contains_bytes = True
class SubclassTest:
def test_basic(self):
self.assertTrue(issubclass(self.subclass2test, self.type2test))
self.assertIsInstance(self.subclass2test(), self.type2test)
a, b = b"abcd", b"efgh"
_a, _b = self.subclass2test(a), self.subclass2test(b)
# test comparison operators with subclass instances
self.assertTrue(_a == _a)
self.assertTrue(_a != _b)
self.assertTrue(_a < _b)
self.assertTrue(_a <= _b)
self.assertTrue(_b >= _a)
self.assertTrue(_b > _a)
self.assertTrue(_a is not a)
# test concat of subclass instances
self.assertEqual(a + b, _a + _b)
self.assertEqual(a + b, a + _b)
self.assertEqual(a + b, _a + b)
# test repeat
self.assertTrue(a*5 == _a*5)
def test_join(self):
# Make sure join returns a NEW object for single item sequences
# involving a subclass.
# Make sure that it is of the appropriate type.
s1 = self.subclass2test(b"abcd")
s2 = self.type2test().join([s1])
self.assertTrue(s1 is not s2)
self.assertTrue(type(s2) is self.type2test, type(s2))
# Test reverse, calling join on subclass
s3 = s1.join([b"abcd"])
self.assertTrue(type(s3) is self.type2test)
def test_pickle(self):
a = self.subclass2test(b"abcd")
a.x = 10
a.y = self.subclass2test(b"efgh")
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
b = pickle.loads(pickle.dumps(a, proto))
self.assertNotEqual(id(a), id(b))
self.assertEqual(a, b)
self.assertEqual(a.x, b.x)
self.assertEqual(a.y, b.y)
self.assertEqual(type(a), type(b))
self.assertEqual(type(a.y), type(b.y))
def test_copy(self):
a = self.subclass2test(b"abcd")
a.x = 10
a.y = self.subclass2test(b"efgh")
for copy_method in (copy.copy, copy.deepcopy):
b = copy_method(a)
self.assertNotEqual(id(a), id(b))
self.assertEqual(a, b)
self.assertEqual(a.x, b.x)
self.assertEqual(a.y, b.y)
self.assertEqual(type(a), type(b))
self.assertEqual(type(a.y), type(b.y))
class ByteArraySubclass(bytearray):
pass
class BytesSubclass(bytes):
pass
class ByteArraySubclassTest(SubclassTest, unittest.TestCase):
type2test = bytearray
subclass2test = ByteArraySubclass
def test_init_override(self):
class subclass(bytearray):
def __init__(me, newarg=1, *args, **kwargs):
bytearray.__init__(me, *args, **kwargs)
x = subclass(4, b"abcd")
x = subclass(4, source=b"abcd")
self.assertEqual(x, b"abcd")
x = subclass(newarg=4, source=b"abcd")
self.assertEqual(x, b"abcd")
class BytesSubclassTest(SubclassTest, unittest.TestCase):
type2test = bytes
subclass2test = BytesSubclass
if __name__ == "__main__":
unittest.main()
| lgpl-3.0 |
lxsmnv/spark | examples/src/main/python/ml/standard_scaler_example.py | 128 | 1594 | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
# $example on$
from pyspark.ml.feature import StandardScaler
# $example off$
from pyspark.sql import SparkSession
if __name__ == "__main__":
spark = SparkSession\
.builder\
.appName("StandardScalerExample")\
.getOrCreate()
# $example on$
dataFrame = spark.read.format("libsvm").load("data/mllib/sample_libsvm_data.txt")
scaler = StandardScaler(inputCol="features", outputCol="scaledFeatures",
withStd=True, withMean=False)
# Compute summary statistics by fitting the StandardScaler
scalerModel = scaler.fit(dataFrame)
# Normalize each feature to have unit standard deviation.
scaledData = scalerModel.transform(dataFrame)
scaledData.show()
# $example off$
spark.stop()
| apache-2.0 |
Drgravy/kernel_htc_m7 | tools/perf/util/setup.py | 4998 | 1330 | #!/usr/bin/python2
from distutils.core import setup, Extension
from os import getenv
from distutils.command.build_ext import build_ext as _build_ext
from distutils.command.install_lib import install_lib as _install_lib
class build_ext(_build_ext):
def finalize_options(self):
_build_ext.finalize_options(self)
self.build_lib = build_lib
self.build_temp = build_tmp
class install_lib(_install_lib):
def finalize_options(self):
_install_lib.finalize_options(self)
self.build_dir = build_lib
cflags = ['-fno-strict-aliasing', '-Wno-write-strings']
cflags += getenv('CFLAGS', '').split()
build_lib = getenv('PYTHON_EXTBUILD_LIB')
build_tmp = getenv('PYTHON_EXTBUILD_TMP')
ext_sources = [f.strip() for f in file('util/python-ext-sources')
if len(f.strip()) > 0 and f[0] != '#']
perf = Extension('perf',
sources = ext_sources,
include_dirs = ['util/include'],
extra_compile_args = cflags,
)
setup(name='perf',
version='0.1',
description='Interface with the Linux profiling infrastructure',
author='Arnaldo Carvalho de Melo',
author_email='acme@redhat.com',
license='GPLv2',
url='http://perf.wiki.kernel.org',
ext_modules=[perf],
cmdclass={'build_ext': build_ext, 'install_lib': install_lib})
| gpl-2.0 |
gimoh/ansible-modules-core | database/postgresql/postgresql_privs.py | 41 | 23600 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = """
---
module: postgresql_privs
version_added: "1.2"
short_description: Grant or revoke privileges on PostgreSQL database objects.
description:
- Grant or revoke privileges on PostgreSQL database objects.
- This module is basically a wrapper around most of the functionality of
PostgreSQL's GRANT and REVOKE statements with detection of changes
(GRANT/REVOKE I(privs) ON I(type) I(objs) TO/FROM I(roles))
options:
database:
description:
- Name of database to connect to.
- 'Alias: I(db)'
required: yes
state:
description:
- If C(present), the specified privileges are granted, if C(absent) they
are revoked.
required: no
default: present
choices: [present, absent]
privs:
description:
- Comma separated list of privileges to grant/revoke.
- 'Alias: I(priv)'
required: no
type:
description:
- Type of database object to set privileges on.
required: no
default: table
choices: [table, sequence, function, database,
schema, language, tablespace, group]
objs:
description:
- Comma separated list of database objects to set privileges on.
- If I(type) is C(table) or C(sequence), the special value
C(ALL_IN_SCHEMA) can be provided instead to specify all database
objects of type I(type) in the schema specified via I(schema). (This
also works with PostgreSQL < 9.0.)
- If I(type) is C(database), this parameter can be omitted, in which case
privileges are set for the database specified via I(database).
- 'If I(type) is I(function), colons (":") in object names will be
replaced with commas (needed to specify function signatures, see
examples)'
- 'Alias: I(obj)'
required: no
schema:
description:
- Schema that contains the database objects specified via I(objs).
- May only be provided if I(type) is C(table), C(sequence) or
C(function). Defaults to C(public) in these cases.
required: no
roles:
description:
- Comma separated list of role (user/group) names to set permissions for.
- The special value C(PUBLIC) can be provided instead to set permissions
for the implicitly defined PUBLIC group.
- 'Alias: I(role)'
required: yes
grant_option:
description:
- Whether C(role) may grant/revoke the specified privileges/group
memberships to others.
- Set to C(no) to revoke GRANT OPTION, leave unspecified to
make no changes.
- I(grant_option) only has an effect if I(state) is C(present).
- 'Alias: I(admin_option)'
required: no
choices: ['yes', 'no']
host:
description:
- Database host address. If unspecified, connect via Unix socket.
- 'Alias: I(login_host)'
default: null
required: no
port:
description:
- Database port to connect to.
required: no
default: 5432
unix_socket:
description:
- Path to a Unix domain socket for local connections.
- 'Alias: I(login_unix_socket)'
required: false
default: null
login:
description:
- The username to authenticate with.
- 'Alias: I(login_user)'
default: postgres
password:
description:
- The password to authenticate with.
- 'Alias: I(login_password))'
default: null
required: no
notes:
- Default authentication assumes that postgresql_privs is run by the
C(postgres) user on the remote host. (Ansible's C(user) or C(sudo-user)).
- This module requires Python package I(psycopg2) to be installed on the
remote host. In the default case of the remote host also being the
PostgreSQL server, PostgreSQL has to be installed there as well, obviously.
For Debian/Ubuntu-based systems, install packages I(postgresql) and
I(python-psycopg2).
- Parameters that accept comma separated lists (I(privs), I(objs), I(roles))
have singular alias names (I(priv), I(obj), I(role)).
- To revoke only C(GRANT OPTION) for a specific object, set I(state) to
C(present) and I(grant_option) to C(no) (see examples).
- Note that when revoking privileges from a role R, this role may still have
access via privileges granted to any role R is a member of including
C(PUBLIC).
- Note that when revoking privileges from a role R, you do so as the user
specified via I(login). If R has been granted the same privileges by
another user also, R can still access database objects via these privileges.
- When revoking privileges, C(RESTRICT) is assumed (see PostgreSQL docs).
requirements: [psycopg2]
author: Bernhard Weitzhofer
"""
EXAMPLES = """
# On database "library":
# GRANT SELECT, INSERT, UPDATE ON TABLE public.books, public.authors
# TO librarian, reader WITH GRANT OPTION
- postgresql_privs: >
database=library
state=present
privs=SELECT,INSERT,UPDATE
type=table
objs=books,authors
schema=public
roles=librarian,reader
grant_option=yes
# Same as above leveraging default values:
- postgresql_privs: >
db=library
privs=SELECT,INSERT,UPDATE
objs=books,authors
roles=librarian,reader
grant_option=yes
# REVOKE GRANT OPTION FOR INSERT ON TABLE books FROM reader
# Note that role "reader" will be *granted* INSERT privilege itself if this
# isn't already the case (since state=present).
- postgresql_privs: >
db=library
state=present
priv=INSERT
obj=books
role=reader
grant_option=no
# REVOKE INSERT, UPDATE ON ALL TABLES IN SCHEMA public FROM reader
# "public" is the default schema. This also works for PostgreSQL 8.x.
- postgresql_privs: >
db=library
state=absent
privs=INSERT,UPDATE
objs=ALL_IN_SCHEMA
role=reader
# GRANT ALL PRIVILEGES ON SCHEMA public, math TO librarian
- postgresql_privs: >
db=library
privs=ALL
type=schema
objs=public,math
role=librarian
# GRANT ALL PRIVILEGES ON FUNCTION math.add(int, int) TO librarian, reader
# Note the separation of arguments with colons.
- postgresql_privs: >
db=library
privs=ALL
type=function
obj=add(int:int)
schema=math
roles=librarian,reader
# GRANT librarian, reader TO alice, bob WITH ADMIN OPTION
# Note that group role memberships apply cluster-wide and therefore are not
# restricted to database "library" here.
- postgresql_privs: >
db=library
type=group
objs=librarian,reader
roles=alice,bob
admin_option=yes
# GRANT ALL PRIVILEGES ON DATABASE library TO librarian
# Note that here "db=postgres" specifies the database to connect to, not the
# database to grant privileges on (which is specified via the "objs" param)
- postgresql_privs: >
db=postgres
privs=ALL
type=database
obj=library
role=librarian
# GRANT ALL PRIVILEGES ON DATABASE library TO librarian
# If objs is omitted for type "database", it defaults to the database
# to which the connection is established
- postgresql_privs: >
db=library
privs=ALL
type=database
role=librarian
"""
try:
import psycopg2
import psycopg2.extensions
except ImportError:
psycopg2 = None
VALID_PRIVS = frozenset(('SELECT', 'INSERT', 'UPDATE', 'DELETE', 'TRUNCATE',
'REFERENCES', 'TRIGGER', 'CREATE', 'CONNECT',
'TEMPORARY', 'TEMP', 'EXECUTE', 'USAGE', 'ALL', 'USAGE'))
class Error(Exception):
pass
# We don't have functools.partial in Python < 2.5
def partial(f, *args, **kwargs):
"""Partial function application"""
def g(*g_args, **g_kwargs):
new_kwargs = kwargs.copy()
new_kwargs.update(g_kwargs)
return f(*(args + g_args), **g_kwargs)
g.f = f
g.args = args
g.kwargs = kwargs
return g
class Connection(object):
"""Wrapper around a psycopg2 connection with some convenience methods"""
def __init__(self, params):
self.database = params.database
# To use defaults values, keyword arguments must be absent, so
# check which values are empty and don't include in the **kw
# dictionary
params_map = {
"host":"host",
"login":"user",
"password":"password",
"port":"port",
"database": "database",
}
kw = dict( (params_map[k], getattr(params, k)) for k in params_map
if getattr(params, k) != '' )
# If a unix_socket is specified, incorporate it here.
is_localhost = "host" not in kw or kw["host"] == "" or kw["host"] == "localhost"
if is_localhost and params.unix_socket != "":
kw["host"] = params.unix_socket
self.connection = psycopg2.connect(**kw)
self.cursor = self.connection.cursor()
def commit(self):
self.connection.commit()
def rollback(self):
self.connection.rollback()
@property
def encoding(self):
"""Connection encoding in Python-compatible form"""
return psycopg2.extensions.encodings[self.connection.encoding]
### Methods for querying database objects
# PostgreSQL < 9.0 doesn't support "ALL TABLES IN SCHEMA schema"-like
# phrases in GRANT or REVOKE statements, therefore alternative methods are
# provided here.
def schema_exists(self, schema):
query = """SELECT count(*)
FROM pg_catalog.pg_namespace WHERE nspname = %s"""
self.cursor.execute(query, (schema,))
return self.cursor.fetchone()[0] > 0
def get_all_tables_in_schema(self, schema):
if not self.schema_exists(schema):
raise Error('Schema "%s" does not exist.' % schema)
query = """SELECT relname
FROM pg_catalog.pg_class c
JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
WHERE nspname = %s AND relkind = 'r'"""
self.cursor.execute(query, (schema,))
return [t[0] for t in self.cursor.fetchall()]
def get_all_sequences_in_schema(self, schema):
if not self.schema_exists(schema):
raise Error('Schema "%s" does not exist.' % schema)
query = """SELECT relname
FROM pg_catalog.pg_class c
JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
WHERE nspname = %s AND relkind = 'S'"""
self.cursor.execute(query, (schema,))
return [t[0] for t in self.cursor.fetchall()]
### Methods for getting access control lists and group membership info
# To determine whether anything has changed after granting/revoking
# privileges, we compare the access control lists of the specified database
# objects before and afterwards. Python's list/string comparison should
# suffice for change detection, we should not actually have to parse ACLs.
# The same should apply to group membership information.
def get_table_acls(self, schema, tables):
query = """SELECT relacl
FROM pg_catalog.pg_class c
JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
WHERE nspname = %s AND relkind = 'r' AND relname = ANY (%s)
ORDER BY relname"""
self.cursor.execute(query, (schema, tables))
return [t[0] for t in self.cursor.fetchall()]
def get_sequence_acls(self, schema, sequences):
query = """SELECT relacl
FROM pg_catalog.pg_class c
JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
WHERE nspname = %s AND relkind = 'S' AND relname = ANY (%s)
ORDER BY relname"""
self.cursor.execute(query, (schema, sequences))
return [t[0] for t in self.cursor.fetchall()]
def get_function_acls(self, schema, function_signatures):
funcnames = [f.split('(', 1)[0] for f in function_signatures]
query = """SELECT proacl
FROM pg_catalog.pg_proc p
JOIN pg_catalog.pg_namespace n ON n.oid = p.pronamespace
WHERE nspname = %s AND proname = ANY (%s)
ORDER BY proname, proargtypes"""
self.cursor.execute(query, (schema, funcnames))
return [t[0] for t in self.cursor.fetchall()]
def get_schema_acls(self, schemas):
query = """SELECT nspacl FROM pg_catalog.pg_namespace
WHERE nspname = ANY (%s) ORDER BY nspname"""
self.cursor.execute(query, (schemas,))
return [t[0] for t in self.cursor.fetchall()]
def get_language_acls(self, languages):
query = """SELECT lanacl FROM pg_catalog.pg_language
WHERE lanname = ANY (%s) ORDER BY lanname"""
self.cursor.execute(query, (languages,))
return [t[0] for t in self.cursor.fetchall()]
def get_tablespace_acls(self, tablespaces):
query = """SELECT spcacl FROM pg_catalog.pg_tablespace
WHERE spcname = ANY (%s) ORDER BY spcname"""
self.cursor.execute(query, (tablespaces,))
return [t[0] for t in self.cursor.fetchall()]
def get_database_acls(self, databases):
query = """SELECT datacl FROM pg_catalog.pg_database
WHERE datname = ANY (%s) ORDER BY datname"""
self.cursor.execute(query, (databases,))
return [t[0] for t in self.cursor.fetchall()]
def get_group_memberships(self, groups):
query = """SELECT roleid, grantor, member, admin_option
FROM pg_catalog.pg_auth_members am
JOIN pg_catalog.pg_roles r ON r.oid = am.roleid
WHERE r.rolname = ANY(%s)
ORDER BY roleid, grantor, member"""
self.cursor.execute(query, (groups,))
return self.cursor.fetchall()
### Manipulating privileges
def manipulate_privs(self, obj_type, privs, objs, roles,
state, grant_option, schema_qualifier=None):
"""Manipulate database object privileges.
:param obj_type: Type of database object to grant/revoke
privileges for.
:param privs: Either a list of privileges to grant/revoke
or None if type is "group".
:param objs: List of database objects to grant/revoke
privileges for.
:param roles: Either a list of role names or "PUBLIC"
for the implicitly defined "PUBLIC" group
:param state: "present" to grant privileges, "absent" to revoke.
:param grant_option: Only for state "present": If True, set
grant/admin option. If False, revoke it.
If None, don't change grant option.
:param schema_qualifier: Some object types ("TABLE", "SEQUENCE",
"FUNCTION") must be qualified by schema.
Ignored for other Types.
"""
# get_status: function to get current status
if obj_type == 'table':
get_status = partial(self.get_table_acls, schema_qualifier)
elif obj_type == 'sequence':
get_status = partial(self.get_sequence_acls, schema_qualifier)
elif obj_type == 'function':
get_status = partial(self.get_function_acls, schema_qualifier)
elif obj_type == 'schema':
get_status = self.get_schema_acls
elif obj_type == 'language':
get_status = self.get_language_acls
elif obj_type == 'tablespace':
get_status = self.get_tablespace_acls
elif obj_type == 'database':
get_status = self.get_database_acls
elif obj_type == 'group':
get_status = self.get_group_memberships
else:
raise Error('Unsupported database object type "%s".' % obj_type)
# Return False (nothing has changed) if there are no objs to work on.
if not objs:
return False
# obj_ids: quoted db object identifiers (sometimes schema-qualified)
if obj_type == 'function':
obj_ids = []
for obj in objs:
try:
f, args = obj.split('(', 1)
except:
raise Error('Illegal function signature: "%s".' % obj)
obj_ids.append('"%s"."%s"(%s' % (schema_qualifier, f, args))
elif obj_type in ['table', 'sequence']:
obj_ids = ['"%s"."%s"' % (schema_qualifier, o) for o in objs]
else:
obj_ids = ['"%s"' % o for o in objs]
# set_what: SQL-fragment specifying what to set for the target roles:
# Either group membership or privileges on objects of a certain type
if obj_type == 'group':
set_what = ','.join(pg_quote_identifier(i, 'role') for i in obj_ids)
else:
# function types are already quoted above
if obj_type != 'function':
obj_ids = [pg_quote_identifier(i, 'table') for i in obj_ids]
# Note: obj_type has been checked against a set of string literals
# and privs was escaped when it was parsed
set_what = '%s ON %s %s' % (','.join(privs), obj_type,
','.join(obj_ids))
# for_whom: SQL-fragment specifying for whom to set the above
if roles == 'PUBLIC':
for_whom = 'PUBLIC'
else:
for_whom = ','.join(pg_quote_identifier(r, 'role') for r in roles)
status_before = get_status(objs)
if state == 'present':
if grant_option:
if obj_type == 'group':
query = 'GRANT %s TO %s WITH ADMIN OPTION'
else:
query = 'GRANT %s TO %s WITH GRANT OPTION'
else:
query = 'GRANT %s TO %s'
self.cursor.execute(query % (set_what, for_whom))
# Only revoke GRANT/ADMIN OPTION if grant_option actually is False.
if grant_option == False:
if obj_type == 'group':
query = 'REVOKE ADMIN OPTION FOR %s FROM %s'
else:
query = 'REVOKE GRANT OPTION FOR %s FROM %s'
self.cursor.execute(query % (set_what, for_whom))
else:
query = 'REVOKE %s FROM %s'
self.cursor.execute(query % (set_what, for_whom))
status_after = get_status(objs)
return status_before != status_after
def main():
module = AnsibleModule(
argument_spec = dict(
database=dict(required=True, aliases=['db']),
state=dict(default='present', choices=['present', 'absent']),
privs=dict(required=False, aliases=['priv']),
type=dict(default='table',
choices=['table',
'sequence',
'function',
'database',
'schema',
'language',
'tablespace',
'group']),
objs=dict(required=False, aliases=['obj']),
schema=dict(required=False),
roles=dict(required=True, aliases=['role']),
grant_option=dict(required=False, type='bool',
aliases=['admin_option']),
host=dict(default='', aliases=['login_host']),
port=dict(type='int', default=5432),
unix_socket=dict(default='', aliases=['login_unix_socket']),
login=dict(default='postgres', aliases=['login_user']),
password=dict(default='', aliases=['login_password'])
),
supports_check_mode = True
)
# Create type object as namespace for module params
p = type('Params', (), module.params)
# param "schema": default, allowed depends on param "type"
if p.type in ['table', 'sequence', 'function']:
p.schema = p.schema or 'public'
elif p.schema:
module.fail_json(msg='Argument "schema" is not allowed '
'for type "%s".' % p.type)
# param "objs": default, required depends on param "type"
if p.type == 'database':
p.objs = p.objs or p.database
elif not p.objs:
module.fail_json(msg='Argument "objs" is required '
'for type "%s".' % p.type)
# param "privs": allowed, required depends on param "type"
if p.type == 'group':
if p.privs:
module.fail_json(msg='Argument "privs" is not allowed '
'for type "group".')
elif not p.privs:
module.fail_json(msg='Argument "privs" is required '
'for type "%s".' % p.type)
# Connect to Database
if not psycopg2:
module.fail_json(msg='Python module "psycopg2" must be installed.')
try:
conn = Connection(p)
except psycopg2.Error, e:
module.fail_json(msg='Could not connect to database: %s' % e)
try:
# privs
if p.privs:
privs = frozenset(pr.upper() for pr in p.privs.split(','))
if not privs.issubset(VALID_PRIVS):
module.fail_json(msg='Invalid privileges specified: %s' % privs.difference(VALID_PRIVS))
else:
privs = None
# objs:
if p.type == 'table' and p.objs == 'ALL_IN_SCHEMA':
objs = conn.get_all_tables_in_schema(p.schema)
elif p.type == 'sequence' and p.objs == 'ALL_IN_SCHEMA':
objs = conn.get_all_sequences_in_schema(p.schema)
else:
objs = p.objs.split(',')
# function signatures are encoded using ':' to separate args
if p.type == 'function':
objs = [obj.replace(':', ',') for obj in objs]
# roles
if p.roles == 'PUBLIC':
roles = 'PUBLIC'
else:
roles = p.roles.split(',')
changed = conn.manipulate_privs(
obj_type = p.type,
privs = privs,
objs = objs,
roles = roles,
state = p.state,
grant_option = p.grant_option,
schema_qualifier=p.schema
)
except Error, e:
conn.rollback()
module.fail_json(msg=e.message)
except psycopg2.Error, e:
conn.rollback()
# psycopg2 errors come in connection encoding, reencode
msg = e.message.decode(conn.encoding).encode(sys.getdefaultencoding(),
'replace')
module.fail_json(msg=msg)
if module.check_mode:
conn.rollback()
else:
conn.commit()
module.exit_json(changed=changed)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.database import *
if __name__ == '__main__':
main()
| gpl-3.0 |
philanthropy-u/edx-platform | openedx/tests/xblock_integration/xblock_testcase.py | 3 | 20496 | """This file defines a testing framework for XBlocks. This framework
is designed to be independent of the edx-platform, to allow:
1. The tests to move into the XBlock repositories.
2. The tests to work in xblock-sdk and other runtimes.
This is a prototype. We reserve the right to change the APIs at any
point, and expect to do so a few times before freezing.
At this point, we support:
1. Python unit testing
2. Event publish testing
3. Testing multiple students
4. Testing multiple XBlocks on the same page.
We have spec'ed out how to do acceptance testing, but have not
implemented it yet. We have not spec'ed out JavaScript testing,
but believe it is important.
We do not intend to spec out XBlock/edx-platform integration testing
in the immediate future. This is best built as traditional
edx-platform tests for now.
We also do not plan to work on regression testing (taking live data
and replaying it) for now, but also believe it is important to do so
either in this framework or another.
Our next steps would be to:
* Finish this framework
* Have an appropriate test to make sure those tests are likely
running for standard XBlocks (e.g. assert those entry points
exist)
* Move more blocks out of the platform, and more tests into the
blocks themselves.
"""
from __future__ import print_function
import collections
import HTMLParser
import json
import sys
import unittest
from datetime import datetime, timedelta
import mock
import pytz
from bs4 import BeautifulSoup
from django.conf import settings
from django.urls import reverse
from xblock.plugin import Plugin
import lms.djangoapps.lms_xblock.runtime
from lms.djangoapps.courseware.tests.helpers import LoginEnrollmentTestCase
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
class XBlockEventTestMixin(object):
"""Mixin for easily verifying that events were published during a
test.
To do:
* Evaluate patching runtime.emit instead of log_event
* Evaluate using @mulby's event compare library
By design, we capture all published events. We provide two functions:
1. assert_no_events_published verifies that no events of a
given search specification were published.
2. assert_event_published verifies that an event of a given search
specification was published.
The Mongo/bok_choy event tests in cohorts have nice examplars for
how such functionality might look.
In the future, we would like to expand both search
specifications. This is built in the edX event tracking acceptance
tests, but is built on top of Mongo. We would also like to have
nice error messages. This is in the edX event tracking tests, but
would require a bit of work to tease out of the platform and make
work in this context. We would also like to provide access to
events for downstream consumers.
Good things to look at as developing the code:
* Gabe's library for parsing events. This is nice.
* Bok choy has a nice Mongo search for events in the cohorts test
case. It is a little slow for the general case.
* This is originally based on a cleanup of the EventTestMixin. We
could work to converge those in some sensible way.
"""
def setUp(self):
"""
We patch runtime.publish to capture all XBlock events sent during
the test.
This is a little bit ugly -- it's all dynamic -- so we patch
__init__ for the system runtime to capture the
dynamically-created publish, and catch whatever is being
passed into it.
"""
super(XBlockEventTestMixin, self).setUp()
saved_init = lms.djangoapps.lms_xblock.runtime.LmsModuleSystem.__init__
def patched_init(runtime_self, **kwargs):
"""
Swap out publish in the __init__
"""
old_publish = kwargs["publish"]
def publish(block, event_type, event):
"""
Log the event, and call the original publish
"""
self.events.append({"event": event, "event_type": event_type})
old_publish(block, event_type, event)
kwargs['publish'] = publish
return saved_init(runtime_self, **kwargs)
self.events = []
lms_sys = "lms.djangoapps.lms_xblock.runtime.LmsModuleSystem.__init__"
patcher = mock.patch(lms_sys, patched_init)
patcher.start()
self.addCleanup(patcher.stop)
def assert_no_events_published(self, event_type):
"""
Ensures no events of a given type were published since the last
event related assertion.
We are relatively specific since things like implicit HTTP
events almost always do get omitted, and new event types get
added all the time. This is not useful without a filter.
"""
for event in self.events:
self.assertNotEqual(event['event_type'], event_type)
def assert_event_published(self, event_type, event_fields=None):
"""
Verify that an event was published with the given parameters.
We can verify that specific event fields are set using the
optional search parameter.
"""
if not event_fields:
event_fields = {}
for event in self.events:
if event['event_type'] == event_type:
found = True
for field in event_fields:
if field not in event['event']:
found = False
elif event_fields[field] != event['event'][field]:
found = False
if found:
return
self.assertIn({'event_type': event_type,
'event': event_fields},
self.events)
def reset_published_events(self):
"""
Reset the mock tracker in order to forget about old events.
"""
self.events = []
class GradePublishTestMixin(object):
'''
This checks whether a grading event was correctly published. This
puts basic plumbing in place, but we would like to:
* Add search parameters. Is it for the right block? The right user? This
only handles the case of one block/one user right now.
* Check end-to-end. We would like to see grades in the database, not just
look for emission. Looking for emission may still be helpful if there
are multiple events in a test.
This is a bit of work since we need to do a lot of translation
between XBlock and edx-platform identifiers (e.g. url_name and
usage key).
We could also use the runtime.publish logic above, now that we have it.
'''
def setUp(self):
'''
Hot-patch the grading emission system to capture grading events.
'''
super(GradePublishTestMixin, self).setUp()
def capture_score(user_id, usage_key, score, max_score):
'''
Hot-patch which stores scores in a local array instead of the
database.
Note that to make this generic, we'd need to do both.
'''
self.scores.append({'student': user_id,
'usage': usage_key,
'score': score,
'max_score': max_score})
# Shim a return time, defaults to 1 hour before now
return datetime.now().replace(tzinfo=pytz.UTC) - timedelta(hours=1)
self.scores = []
patcher = mock.patch("lms.djangoapps.grades.signals.handlers.set_score", capture_score)
patcher.start()
self.addCleanup(patcher.stop)
def assert_grade(self, grade):
'''
Confirm that the last grade set was equal to grade.
HACK: In the future, this should take a user ID and a block url_name.
'''
self.assertEqual(grade, self.scores[-1]['score'])
class XBlockScenarioTestCaseMixin(object):
'''
This allows us to have test cases defined in JSON today, and in OLX
someday.
Until we do OLX, we're very restrictive in structure. One block
per sequence, essentially.
'''
@classmethod
def setUpClass(cls):
"""
Create a set of pages with XBlocks on them. For now, we restrict
ourselves to one block per learning sequence.
"""
super(XBlockScenarioTestCaseMixin, cls).setUpClass()
cls.course = CourseFactory.create(
display_name='XBlock_Test_Course'
)
cls.scenario_urls = {}
cls.xblocks = {}
with cls.store.bulk_operations(cls.course.id, emit_signals=False):
for chapter_config in cls.test_configuration:
chapter = ItemFactory.create(
parent=cls.course,
display_name="ch_" + chapter_config['urlname'],
category='chapter'
)
section = ItemFactory.create(
parent=chapter,
display_name="sec_" + chapter_config['urlname'],
category='sequential'
)
unit = ItemFactory.create(
parent=section,
display_name='unit_' + chapter_config['urlname'],
category='vertical'
)
if len(chapter_config['xblocks']) > 1:
raise NotImplementedError(
"""We only support one block per page. """
"""We will do more with OLX+learning """
"""sequence cleanups."""
)
for xblock_config in chapter_config['xblocks']:
xblock = ItemFactory.create(
parent=unit,
category=xblock_config['blocktype'],
display_name=xblock_config['urlname'],
**xblock_config.get("parameters", {})
)
cls.xblocks[xblock_config['urlname']] = xblock
scenario_url = unicode(reverse(
'courseware_section',
kwargs={
'course_id': unicode(cls.course.id),
'chapter': "ch_" + chapter_config['urlname'],
'section': "sec_" + chapter_config['urlname']
}
))
cls.scenario_urls[chapter_config['urlname']] = scenario_url
class XBlockStudentTestCaseMixin(object):
'''
Creates a default set of students for XBlock tests
'''
student_list = [
{'email': 'alice@test.edx.org', 'password': 'foo'},
{'email': 'bob@test.edx.org', 'password': 'foo'},
{'email': 'eve@test.edx.org', 'password': 'foo'},
]
def setUp(self):
"""
Create users accounts. The first three, we give helpful names
to. If there are any more, we auto-generate number IDs. We
intentionally use slightly different conventions for different
users, so we exercise more corner cases, but we could
standardize if this is more hassle than it's worth.
"""
super(XBlockStudentTestCaseMixin, self).setUp()
for idx, student in enumerate(self.student_list):
username = "u{}".format(idx)
self._enroll_user(username, student['email'], student['password'])
self.select_student(0)
def _enroll_user(self, username, email, password):
'''
Create and activate a user account.
'''
self.create_account(username, email, password)
self.activate_user(email)
self.login(email, password)
self.enroll(self.course, verify=True)
def select_student(self, user_id):
"""
Select a current user account
"""
# If we don't have enough users, add a few more...
for newuser_id in range(len(self.student_list), user_id):
username = "user_{i}".format(i=newuser_id)
email = "user_{i}@example.edx.org".format(i=newuser_id)
password = "12345"
self._enroll_user(username, email, password)
self.student_list.append({'email': email, 'password': password})
email = self.student_list[user_id]['email']
password = self.student_list[user_id]['password']
# ... and log in as the appropriate user
self.login(email, password)
class XBlockTestCase(XBlockStudentTestCaseMixin,
XBlockScenarioTestCaseMixin,
XBlockEventTestMixin,
GradePublishTestMixin,
SharedModuleStoreTestCase,
LoginEnrollmentTestCase,
Plugin):
"""
Class for all XBlock-internal test cases (as opposed to
integration tests).
"""
test_configuration = None # Children must override this!
entry_point = 'xblock.test.v0'
@classmethod
def setUpClass(cls):
'''
Unless overridden, we create two student users and one staff
user. We create the course hierarchy based on the OLX defined
in the XBlock test class. Until we can deal with OLX, that
actually will come from a list.
'''
# Nose runs setUpClass methods even if a class decorator says to skip
# the class: https://github.com/nose-devs/nose/issues/946
# So, skip the test class here if we are not in the LMS.
if settings.ROOT_URLCONF != 'lms.urls':
raise unittest.SkipTest('Test only valid in lms')
super(XBlockTestCase, cls).setUpClass()
def get_handler_url(self, handler, xblock_name=None):
"""
Get url for the specified xblock handler
"""
return reverse('xblock_handler', kwargs={
'course_id': unicode(self.course.id),
'usage_id': unicode(
self.course.id.make_usage_key('done', xblock_name)
),
'handler': handler,
'suffix': ''
})
def ajax(self, function, block_urlname, json_data):
'''
Call a json_handler in the XBlock. Return the response as
an object containing response code and JSON.
'''
url = self._get_handler_url(function, block_urlname)
resp = self.client.post(url, json.dumps(json_data), '')
ajax_response = collections.namedtuple('AjaxResponse',
['data', 'status_code'])
try:
ajax_response.data = json.loads(resp.content)
except ValueError:
print("Invalid JSON response")
print("(Often a redirect if e.g. not logged in)")
print("Could not load JSON from AJAX call", file=sys.stderr)
print("Status:", resp.status_code, file=sys.stderr)
print("URL:", url, file=sys.stderr)
print("Block", block_urlname, file=sys.stderr)
print("Response", repr(resp.content), file=sys.stderr)
raise
ajax_response.status_code = resp.status_code
return ajax_response
def _get_handler_url(self, handler, xblock_name=None):
"""
Get url for the specified xblock handler
"""
xblock_type = None
for scenario in self.test_configuration:
for block in scenario["xblocks"]:
if block["urlname"] == xblock_name:
xblock_type = block["blocktype"]
key = unicode(self.course.id.make_usage_key(xblock_type, xblock_name))
return reverse('xblock_handler', kwargs={
'course_id': unicode(self.course.id),
'usage_id': key,
'handler': handler,
'suffix': ''
})
def extract_block_html(self, content, urlname):
'''This will extract the HTML of a rendered XBlock from a
page. This should be simple. This should just be (in lxml):
usage_id = self.xblocks[block_urlname].scope_ids.usage_id
encoded_id = usage_id.replace(";_", "/")
Followed by:
page_xml = defusedxml.ElementTree.parse(StringIO.StringIO(response_content))
page_xml.find("//[@data-usage-id={usage}]".format(usage=encoded_id))
or
soup_html = BeautifulSoup(response_content, 'html.parser')
soup_html.find(**{"data-usage-id": encoded_id})
Why isn't it? Well, the blocks are stored in a rather funky
way in learning sequences. Ugh. Easy enough, populate the
course with just verticals. Well, that doesn't work
either. The whole test infrastructure populates courses with
Studio AJAX calls, and Studio has broken support for anything
other than course/sequence/vertical/block.
So until we either fix Studio to support most course
structures, fix learning sequences to not have HTML-in-JS
(which causes many other problems as well -- including
user-facing bugs), or fix the test infrastructure to
create courses from OLX, we're stuck with this little hack.
'''
usage_id = self.xblocks[urlname].scope_ids.usage_id
# First, we get out our <div>
soup_html = BeautifulSoup(content)
xblock_html = unicode(soup_html.find(id="seq_contents_0"))
# Now, we get out the text of the <div>
try:
escaped_html = xblock_html.split('<')[1].split('>')[1]
except IndexError:
print("XBlock page could not render", file=sys.stderr)
print("(Often, a redirect if e.g. not logged in)", file=sys.stderr)
print("URL Name:", repr(urlname), file=sys.stderr)
print("Usage ID", repr(usage_id), file=sys.stderr)
print("Content", repr(content), file=sys.stderr)
print("Split 1", repr(xblock_html.split('<')), file=sys.stderr)
print("Dice 1:", repr(xblock_html.split('<')[1]), file=sys.stderr)
print("Split 2", repr(xblock_html.split('<')[1].split('>')), file=sys.stderr)
print("Dice 2", repr(xblock_html.split('<')[1].split('>')[1]), file=sys.stderr)
raise
# Finally, we unescape the contents
decoded_html = HTMLParser.HTMLParser().unescape(escaped_html).strip()
return decoded_html
def render_block(self, block_urlname):
'''
Return a rendering of the XBlock.
We should include data, but with a selector dropping
the rest of the HTML around the block.
'''
section = self._containing_section(block_urlname)
html_response = collections.namedtuple('HtmlResponse',
['status_code',
'content',
'debug'])
url = self.scenario_urls[section]
response = self.client.get(url)
html_response.status_code = response.status_code
response_content = response.content.decode('utf-8')
html_response.content = self.extract_block_html(
response_content,
block_urlname
)
# We return a little bit of metadata helpful for debugging.
# What is in this is not a defined part of the API contract.
html_response.debug = {'url': url,
'section': section,
'block_urlname': block_urlname}
return html_response
def _containing_section(self, block_urlname):
'''
For a given block, return the parent section
'''
for section in self.test_configuration:
blocks = section["xblocks"]
for block in blocks:
if block['urlname'] == block_urlname:
return section['urlname']
raise Exception("Block not found " + block_urlname)
def assertXBlockScreenshot(self, block_urlname, rendering=None):
'''
As in Bok Choi, but instead of a CSS selector, we pass a
block_id. We may want to be able to pass an optional selector
for picking a subelement of the block.
This confirms status code, and that the screenshot is
identical.
To do: Implement
'''
raise NotImplementedError("We need Ben's help to finish this")
| agpl-3.0 |
beni55/scrapy | scrapy/utils/serialize.py | 138 | 1141 | import json
import datetime
import decimal
from twisted.internet import defer
from scrapy.http import Request, Response
from scrapy.item import BaseItem
class ScrapyJSONEncoder(json.JSONEncoder):
DATE_FORMAT = "%Y-%m-%d"
TIME_FORMAT = "%H:%M:%S"
def default(self, o):
if isinstance(o, datetime.datetime):
return o.strftime("%s %s" % (self.DATE_FORMAT, self.TIME_FORMAT))
elif isinstance(o, datetime.date):
return o.strftime(self.DATE_FORMAT)
elif isinstance(o, datetime.time):
return o.strftime(self.TIME_FORMAT)
elif isinstance(o, decimal.Decimal):
return str(o)
elif isinstance(o, defer.Deferred):
return str(o)
elif isinstance(o, BaseItem):
return dict(o)
elif isinstance(o, Request):
return "<%s %s %s>" % (type(o).__name__, o.method, o.url)
elif isinstance(o, Response):
return "<%s %s %s>" % (type(o).__name__, o.status, o.url)
else:
return super(ScrapyJSONEncoder, self).default(o)
class ScrapyJSONDecoder(json.JSONDecoder):
pass
| bsd-3-clause |
liqd/adhocracy3.mercator | src/adhocracy_meinberlin/adhocracy_meinberlin/sheets/test_burgerhaushalt.py | 2 | 2073 | import colander
from pyramid import testing
from pytest import mark
from pytest import fixture
from pytest import raises
class TestProposalSheet:
@fixture
def meta(self):
from .burgerhaushalt import proposal_meta
return proposal_meta
def test_create(self, meta, context):
from zope.interface.verify import verifyObject
from adhocracy_core.interfaces import IResourceSheet
from .burgerhaushalt import IProposal
from .burgerhaushalt import ProposalSchema
inst = meta.sheet_class(meta, context, None)
assert IResourceSheet.providedBy(inst)
assert verifyObject(IResourceSheet, inst)
assert inst.meta.isheet == IProposal
assert inst.meta.schema_class == ProposalSchema
def test_get_empty(self, meta, context):
inst = meta.sheet_class(meta, context, None)
wanted = {'budget': None,
'location_text': '',
}
assert inst.get() == wanted
@mark.usefixtures('integration')
def test_includeme_register_sheet(self, meta, registry):
context = testing.DummyResource(__provides__=meta.isheet)
assert registry.content.get_sheet(context, meta.isheet)
class TestProposalSchema:
@fixture
def inst(self):
from .burgerhaushalt import ProposalSchema
return ProposalSchema()
def test_create(self, inst):
assert inst['budget'].validator.min == 0
assert inst['budget'].required is False
assert inst['location_text'].validator.max == 100
def test_serialize_emtpy(self, inst):
assert inst.serialize() == {'budget': None,
'location_text': ''}
def test_deserialize_emtpy(self, inst):
assert inst.deserialize({}) == {}
def test_deserialize_budget_none(self, inst):
assert inst.deserialize({'budget': None}) == {}
def test_deserialize_budget_positive_int(self, inst):
from decimal import Decimal
assert inst.deserialize({'budget': 1}) == {'budget': Decimal(1)}
| agpl-3.0 |
dhermes/google-cloud-python | datastore/google/cloud/datastore_v1/gapic/datastore_client_config.py | 4 | 2171 | config = {
"interfaces": {
"google.datastore.v1.Datastore": {
"retry_codes": {
"idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"],
"non_idempotent": [],
},
"retry_params": {
"default": {
"initial_retry_delay_millis": 100,
"retry_delay_multiplier": 1.3,
"max_retry_delay_millis": 60000,
"initial_rpc_timeout_millis": 60000,
"rpc_timeout_multiplier": 1.0,
"max_rpc_timeout_millis": 60000,
"total_timeout_millis": 600000,
}
},
"methods": {
"Lookup": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"RunQuery": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
"BeginTransaction": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"Commit": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"Rollback": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"AllocateIds": {
"timeout_millis": 60000,
"retry_codes_name": "non_idempotent",
"retry_params_name": "default",
},
"ReserveIds": {
"timeout_millis": 60000,
"retry_codes_name": "idempotent",
"retry_params_name": "default",
},
},
}
}
}
| apache-2.0 |
salguarnieri/intellij-community | python/lib/Lib/encodings/cp1251.py | 593 | 13617 | """ Python Character Mapping Codec cp1251 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1251.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp1251',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\u0402' # 0x80 -> CYRILLIC CAPITAL LETTER DJE
u'\u0403' # 0x81 -> CYRILLIC CAPITAL LETTER GJE
u'\u201a' # 0x82 -> SINGLE LOW-9 QUOTATION MARK
u'\u0453' # 0x83 -> CYRILLIC SMALL LETTER GJE
u'\u201e' # 0x84 -> DOUBLE LOW-9 QUOTATION MARK
u'\u2026' # 0x85 -> HORIZONTAL ELLIPSIS
u'\u2020' # 0x86 -> DAGGER
u'\u2021' # 0x87 -> DOUBLE DAGGER
u'\u20ac' # 0x88 -> EURO SIGN
u'\u2030' # 0x89 -> PER MILLE SIGN
u'\u0409' # 0x8A -> CYRILLIC CAPITAL LETTER LJE
u'\u2039' # 0x8B -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK
u'\u040a' # 0x8C -> CYRILLIC CAPITAL LETTER NJE
u'\u040c' # 0x8D -> CYRILLIC CAPITAL LETTER KJE
u'\u040b' # 0x8E -> CYRILLIC CAPITAL LETTER TSHE
u'\u040f' # 0x8F -> CYRILLIC CAPITAL LETTER DZHE
u'\u0452' # 0x90 -> CYRILLIC SMALL LETTER DJE
u'\u2018' # 0x91 -> LEFT SINGLE QUOTATION MARK
u'\u2019' # 0x92 -> RIGHT SINGLE QUOTATION MARK
u'\u201c' # 0x93 -> LEFT DOUBLE QUOTATION MARK
u'\u201d' # 0x94 -> RIGHT DOUBLE QUOTATION MARK
u'\u2022' # 0x95 -> BULLET
u'\u2013' # 0x96 -> EN DASH
u'\u2014' # 0x97 -> EM DASH
u'\ufffe' # 0x98 -> UNDEFINED
u'\u2122' # 0x99 -> TRADE MARK SIGN
u'\u0459' # 0x9A -> CYRILLIC SMALL LETTER LJE
u'\u203a' # 0x9B -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
u'\u045a' # 0x9C -> CYRILLIC SMALL LETTER NJE
u'\u045c' # 0x9D -> CYRILLIC SMALL LETTER KJE
u'\u045b' # 0x9E -> CYRILLIC SMALL LETTER TSHE
u'\u045f' # 0x9F -> CYRILLIC SMALL LETTER DZHE
u'\xa0' # 0xA0 -> NO-BREAK SPACE
u'\u040e' # 0xA1 -> CYRILLIC CAPITAL LETTER SHORT U
u'\u045e' # 0xA2 -> CYRILLIC SMALL LETTER SHORT U
u'\u0408' # 0xA3 -> CYRILLIC CAPITAL LETTER JE
u'\xa4' # 0xA4 -> CURRENCY SIGN
u'\u0490' # 0xA5 -> CYRILLIC CAPITAL LETTER GHE WITH UPTURN
u'\xa6' # 0xA6 -> BROKEN BAR
u'\xa7' # 0xA7 -> SECTION SIGN
u'\u0401' # 0xA8 -> CYRILLIC CAPITAL LETTER IO
u'\xa9' # 0xA9 -> COPYRIGHT SIGN
u'\u0404' # 0xAA -> CYRILLIC CAPITAL LETTER UKRAINIAN IE
u'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xac' # 0xAC -> NOT SIGN
u'\xad' # 0xAD -> SOFT HYPHEN
u'\xae' # 0xAE -> REGISTERED SIGN
u'\u0407' # 0xAF -> CYRILLIC CAPITAL LETTER YI
u'\xb0' # 0xB0 -> DEGREE SIGN
u'\xb1' # 0xB1 -> PLUS-MINUS SIGN
u'\u0406' # 0xB2 -> CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I
u'\u0456' # 0xB3 -> CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
u'\u0491' # 0xB4 -> CYRILLIC SMALL LETTER GHE WITH UPTURN
u'\xb5' # 0xB5 -> MICRO SIGN
u'\xb6' # 0xB6 -> PILCROW SIGN
u'\xb7' # 0xB7 -> MIDDLE DOT
u'\u0451' # 0xB8 -> CYRILLIC SMALL LETTER IO
u'\u2116' # 0xB9 -> NUMERO SIGN
u'\u0454' # 0xBA -> CYRILLIC SMALL LETTER UKRAINIAN IE
u'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u0458' # 0xBC -> CYRILLIC SMALL LETTER JE
u'\u0405' # 0xBD -> CYRILLIC CAPITAL LETTER DZE
u'\u0455' # 0xBE -> CYRILLIC SMALL LETTER DZE
u'\u0457' # 0xBF -> CYRILLIC SMALL LETTER YI
u'\u0410' # 0xC0 -> CYRILLIC CAPITAL LETTER A
u'\u0411' # 0xC1 -> CYRILLIC CAPITAL LETTER BE
u'\u0412' # 0xC2 -> CYRILLIC CAPITAL LETTER VE
u'\u0413' # 0xC3 -> CYRILLIC CAPITAL LETTER GHE
u'\u0414' # 0xC4 -> CYRILLIC CAPITAL LETTER DE
u'\u0415' # 0xC5 -> CYRILLIC CAPITAL LETTER IE
u'\u0416' # 0xC6 -> CYRILLIC CAPITAL LETTER ZHE
u'\u0417' # 0xC7 -> CYRILLIC CAPITAL LETTER ZE
u'\u0418' # 0xC8 -> CYRILLIC CAPITAL LETTER I
u'\u0419' # 0xC9 -> CYRILLIC CAPITAL LETTER SHORT I
u'\u041a' # 0xCA -> CYRILLIC CAPITAL LETTER KA
u'\u041b' # 0xCB -> CYRILLIC CAPITAL LETTER EL
u'\u041c' # 0xCC -> CYRILLIC CAPITAL LETTER EM
u'\u041d' # 0xCD -> CYRILLIC CAPITAL LETTER EN
u'\u041e' # 0xCE -> CYRILLIC CAPITAL LETTER O
u'\u041f' # 0xCF -> CYRILLIC CAPITAL LETTER PE
u'\u0420' # 0xD0 -> CYRILLIC CAPITAL LETTER ER
u'\u0421' # 0xD1 -> CYRILLIC CAPITAL LETTER ES
u'\u0422' # 0xD2 -> CYRILLIC CAPITAL LETTER TE
u'\u0423' # 0xD3 -> CYRILLIC CAPITAL LETTER U
u'\u0424' # 0xD4 -> CYRILLIC CAPITAL LETTER EF
u'\u0425' # 0xD5 -> CYRILLIC CAPITAL LETTER HA
u'\u0426' # 0xD6 -> CYRILLIC CAPITAL LETTER TSE
u'\u0427' # 0xD7 -> CYRILLIC CAPITAL LETTER CHE
u'\u0428' # 0xD8 -> CYRILLIC CAPITAL LETTER SHA
u'\u0429' # 0xD9 -> CYRILLIC CAPITAL LETTER SHCHA
u'\u042a' # 0xDA -> CYRILLIC CAPITAL LETTER HARD SIGN
u'\u042b' # 0xDB -> CYRILLIC CAPITAL LETTER YERU
u'\u042c' # 0xDC -> CYRILLIC CAPITAL LETTER SOFT SIGN
u'\u042d' # 0xDD -> CYRILLIC CAPITAL LETTER E
u'\u042e' # 0xDE -> CYRILLIC CAPITAL LETTER YU
u'\u042f' # 0xDF -> CYRILLIC CAPITAL LETTER YA
u'\u0430' # 0xE0 -> CYRILLIC SMALL LETTER A
u'\u0431' # 0xE1 -> CYRILLIC SMALL LETTER BE
u'\u0432' # 0xE2 -> CYRILLIC SMALL LETTER VE
u'\u0433' # 0xE3 -> CYRILLIC SMALL LETTER GHE
u'\u0434' # 0xE4 -> CYRILLIC SMALL LETTER DE
u'\u0435' # 0xE5 -> CYRILLIC SMALL LETTER IE
u'\u0436' # 0xE6 -> CYRILLIC SMALL LETTER ZHE
u'\u0437' # 0xE7 -> CYRILLIC SMALL LETTER ZE
u'\u0438' # 0xE8 -> CYRILLIC SMALL LETTER I
u'\u0439' # 0xE9 -> CYRILLIC SMALL LETTER SHORT I
u'\u043a' # 0xEA -> CYRILLIC SMALL LETTER KA
u'\u043b' # 0xEB -> CYRILLIC SMALL LETTER EL
u'\u043c' # 0xEC -> CYRILLIC SMALL LETTER EM
u'\u043d' # 0xED -> CYRILLIC SMALL LETTER EN
u'\u043e' # 0xEE -> CYRILLIC SMALL LETTER O
u'\u043f' # 0xEF -> CYRILLIC SMALL LETTER PE
u'\u0440' # 0xF0 -> CYRILLIC SMALL LETTER ER
u'\u0441' # 0xF1 -> CYRILLIC SMALL LETTER ES
u'\u0442' # 0xF2 -> CYRILLIC SMALL LETTER TE
u'\u0443' # 0xF3 -> CYRILLIC SMALL LETTER U
u'\u0444' # 0xF4 -> CYRILLIC SMALL LETTER EF
u'\u0445' # 0xF5 -> CYRILLIC SMALL LETTER HA
u'\u0446' # 0xF6 -> CYRILLIC SMALL LETTER TSE
u'\u0447' # 0xF7 -> CYRILLIC SMALL LETTER CHE
u'\u0448' # 0xF8 -> CYRILLIC SMALL LETTER SHA
u'\u0449' # 0xF9 -> CYRILLIC SMALL LETTER SHCHA
u'\u044a' # 0xFA -> CYRILLIC SMALL LETTER HARD SIGN
u'\u044b' # 0xFB -> CYRILLIC SMALL LETTER YERU
u'\u044c' # 0xFC -> CYRILLIC SMALL LETTER SOFT SIGN
u'\u044d' # 0xFD -> CYRILLIC SMALL LETTER E
u'\u044e' # 0xFE -> CYRILLIC SMALL LETTER YU
u'\u044f' # 0xFF -> CYRILLIC SMALL LETTER YA
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| apache-2.0 |
edwinmosong/twitcher | twitcher/channels.py | 1 | 1230 | """
channels.py
Contains class and implementation of a TwitchTV channel.
"""
class Channel(object):
def __init__(self, channel_data):
if channel_data is None:
raise AssertionError('received empty channel_data')
self._channel_data = channel_data
self.display_name = str(self._channel_data.get('display_name'))
self._links = self._channel_data.get('_links')
self.teams = self._channel_data.get('teams')
self.created_at = self._channel_data.get('created_at')
self.logo = str(self._channel_data.get('logo'))
self.updated_at = self._channel_data.get('updated_at')
self.mature = self._channel_data.get('mature')
self.video_banner = str(self._channel_data.get('video_banner'))
self._id = str(self._channel_data.get('_id'))
self.background = str(self._channel_data.get('background'))
self.banner = str(self._channel_data.get('banner'))
self.url = str(self._channel_data.get('url'))
self.game = str(self._channel_data.get('game'))
self.status = self._channel_data.get('status').strip('\n')
| mit |
BryceSchroeder/delvmod | examples/delvpack.py | 1 | 2123 | #!/usr/bin/env python
# Copyright 2015 Bryce Schroeder, www.bryce.pw, bryce.schroeder@gmail.com
# Wiki: http://www.ferazelhosting.net/wiki/delv
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Please do not make trouble for me or the Technical Documentation Project by
# using this software to create versions of the "Cythera Data" file which
# have bypassed registration checks.
# Also, remember that the "Cythera Data" file is copyrighted by Ambrosia and
# /or Glenn Andreas, and publishing modified versions without their permission
# would violate that copyright.
#
# "Cythera" and "Delver" are trademarks of either Glenn Andreas or
# Ambrosia Software, Inc.
from __future__ import absolute_import, division, print_function, unicode_literals
import delv
import delv.archive
import sys
USAGE = '''Usage: ./delvpack.py src dest
Packs or unpacks Delver archives. If dest is an existing directory,
it will unpack src into it. Otherwise, it writes a new Delver Archive
at dest. src itself can be either an archive or a directory containing
an unpacked Delver Archive.
Note that when copying archives, it produces an archive containing the
same data, not a copy like you'd get with cp; in particular, delv does
not leave cruft in the archive like DelvEd does, and so if you copy
"Cythera Data" the resulting copy will be smaller (but still fully
functional.)
'''
if len(sys.argv)<3:
print(USAGE, file=sys.stderr)
sys.exit(-1)
delv.archive.Scenario(sys.argv[1]).to_path(sys.argv[2])
| gpl-3.0 |
CommonsDev/dataserver | projectsheet/migrations/0009_add_field_ProjectSheetQuestionAnswer_selected_choices_id.py | 1 | 12794 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'ProjectSheetQuestionAnswer.selected_choices_id'
db.add_column(u'projectsheet_projectsheetquestionanswer', 'selected_choices_id',
self.gf('jsonfield.fields.JSONField')(default=None, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'ProjectSheetQuestionAnswer.selected_choices_id'
db.delete_column(u'projectsheet_projectsheetquestionanswer', 'selected_choices_id')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'bucket.bucket': {
'Meta': {'object_name': 'Bucket'},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'buckets_created'", 'to': u"orm['auth.User']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'bucket.bucketfile': {
'Meta': {'object_name': 'BucketFile'},
'being_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'editor_of'", 'null': 'True', 'to': u"orm['auth.User']"}),
'bucket': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'files'", 'to': u"orm['bucket.Bucket']"}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'file': ('django.db.models.fields.files.FileField', [], {'max_length': '255'}),
'filename': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'thumbnail_url': ('django.db.models.fields.CharField', [], {'max_length': '2048'}),
'updated_on': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'uploaded_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'uploader_of'", 'to': u"orm['auth.User']"}),
'uploaded_on': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'projects.project': {
'Meta': {'object_name': 'Project'},
'baseline': ('django.db.models.fields.CharField', [], {'max_length': '250', 'null': 'True', 'blank': 'True'}),
'begin_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'created_on': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['auth.Group']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['scout.Place']", 'null': 'True', 'blank': 'True'}),
'progress': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['projects.ProjectProgress']", 'null': 'True', 'blank': 'True'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '50', 'populate_from': 'None', 'unique_with': '()'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
u'projects.projectprogress': {
'Meta': {'ordering': "['order']", 'object_name': 'ProjectProgress'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
'icon': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'progress_range': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['projects.ProjectProgressRange']"})
},
u'projects.projectprogressrange': {
'Meta': {'object_name': 'ProjectProgressRange'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '50', 'populate_from': "'name'", 'unique_with': '()'})
},
u'projectsheet.projectsheet': {
'Meta': {'object_name': 'ProjectSheet'},
'bucket': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bucket.Bucket']", 'null': 'True', 'blank': 'True'}),
'cover': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['bucket.BucketFile']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['projects.Project']", 'unique': 'True'}),
'template': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['projectsheet.ProjectSheetTemplate']"}),
'videos': ('jsonfield.fields.JSONField', [], {'default': 'None', 'null': 'True', 'blank': 'True'})
},
u'projectsheet.projectsheetquestion': {
'Meta': {'ordering': "('order',)", 'object_name': 'ProjectSheetQuestion'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'template': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'questions'", 'to': u"orm['projectsheet.ProjectSheetTemplate']"}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'projectsheet.projectsheetquestionanswer': {
'Meta': {'ordering': "('question__order',)", 'object_name': 'ProjectSheetQuestionAnswer'},
'answer': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'projectsheet': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'question_answers'", 'to': u"orm['projectsheet.ProjectSheet']"}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': u"orm['projectsheet.ProjectSheetQuestion']"}),
'selected_choices_id': ('jsonfield.fields.JSONField', [], {'default': 'None', 'null': 'True', 'blank': 'True'})
},
u'projectsheet.projectsheettemplate': {
'Meta': {'object_name': 'ProjectSheetTemplate'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'shortdesc': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'slug': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '50', 'populate_from': "'name'", 'unique_with': '()'})
},
u'projectsheet.questionchoice': {
'Meta': {'object_name': 'QuestionChoice'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'choices'", 'to': u"orm['projectsheet.ProjectSheetQuestion']"}),
'text': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'value': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
u'scout.place': {
'Meta': {'object_name': 'Place'},
'address': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'place'", 'to': u"orm['scout.PostalAddress']"}),
'geo': ('django.contrib.gis.db.models.fields.PointField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'scout.postaladdress': {
'Meta': {'object_name': 'PostalAddress'},
'address_locality': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'address_region': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'post_office_box_number': ('django.db.models.fields.CharField', [], {'max_length': '20', 'blank': 'True'}),
'postal_code': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'street_address': ('django.db.models.fields.TextField', [], {'blank': 'True'})
}
}
complete_apps = ['projectsheet'] | agpl-3.0 |
CiviWiki/OpenCiviWiki | project/api/migrations/0036_auto_20210526_1834.py | 1 | 1398 | # Generated by Django 2.2.16 on 2021-05-26 13:04
from django.db import migrations
import taggit.managers
class Migration(migrations.Migration):
dependencies = [
('taggit', '0003_taggeditem_add_unique_index'),
('api', '0035_auto_20210526_0303'),
]
operations = [
migrations.RemoveField(
model_name='account',
name='interests',
),
migrations.RemoveField(
model_name='civi',
name='hashtags',
),
migrations.RemoveField(
model_name='thread',
name='hashtags',
),
migrations.AddField(
model_name='account',
name='tags',
field=taggit.managers.TaggableManager(help_text='A comma-separated list of tags.', through='taggit.TaggedItem', to='taggit.Tag', verbose_name='Tags'),
),
migrations.AddField(
model_name='civi',
name='tags',
field=taggit.managers.TaggableManager(help_text='A comma-separated list of tags.', through='taggit.TaggedItem', to='taggit.Tag', verbose_name='Tags'),
),
migrations.AddField(
model_name='thread',
name='tags',
field=taggit.managers.TaggableManager(help_text='A comma-separated list of tags.', through='taggit.TaggedItem', to='taggit.Tag', verbose_name='Tags'),
),
]
| agpl-3.0 |
krosaen/ml-study | kaggle/forest-cover-type-prediction/preprocess.py | 1 | 1087 | from sklearn.preprocessing import StandardScaler
import functools
import operator
def make_preprocessor(td, column_summary):
# it's important to scale consistently on all preprocessing based on
# consistent scaling, so we do it once and keep ahold of it for all future
# scaling.
stdsc = StandardScaler()
stdsc.fit(td[column_summary['quantitative']])
def scale_q(df, column_summary):
df[column_summary['quantitative']] = stdsc.transform(df[column_summary['quantitative']])
return df, column_summary
def scale_binary_c(df, column_summary):
binary_cs = [['{}{}'.format(c, v) for v in vs] for c, vs in column_summary['categorical'].items()]
all_binary_cs = functools.reduce(operator.add, binary_cs)
df[all_binary_cs] = df[all_binary_cs].applymap(lambda x: 1 if x == 1 else -1)
return df, column_summary
def preprocess(df):
fns = [scale_q, scale_binary_c]
cs = column_summary
for fn in fns:
df, cs = fn(df, cs)
return df
return preprocess, column_summary
| mit |
coberger/DIRAC | Resources/Storage/GFAL2_SRM2Storage.py | 2 | 6711 | """ :mod: GFAL2_SRM2Storage
=================
.. module: python
:synopsis: SRM2 module based on the GFAL2_StorageBase class.
"""
from types import StringType, ListType
# from DIRAC
from DIRAC.Resources.Storage.GFAL2_StorageBase import GFAL2_StorageBase
from DIRAC import gLogger, gConfig, S_OK, S_ERROR
from DIRAC.Resources.Utilities import checkArgumentFormat
__RCSID__ = "$Id$"
class GFAL2_SRM2Storage( GFAL2_StorageBase ):
""" SRM2 SE class that inherits from GFAL2StorageBase
"""
def __init__( self, storageName, parameters ):
""" """
self.log = gLogger.getSubLogger( "GFAL2_SRM2Storage", True )
self.log.debug( "GFAL2_SRM2Storage.__init__: Initializing object" )
GFAL2_StorageBase.__init__( self, storageName, parameters )
self.pluginName = 'GFAL2_SRM2'
# ##
# Setting the default SRM parameters here. For methods where this
# is not the default there is a method defined in this class, setting
# the proper values and then calling the base class method.
# ##
self.gfal2.set_opt_integer( "SRM PLUGIN", "OPERATION_TIMEOUT", self.gfal2Timeout )
self.gfal2.set_opt_string( "SRM PLUGIN", "SPACETOKENDESC", self.spaceToken )
self.gfal2.set_opt_string_list( "SRM PLUGIN", "TURL_PROTOCOLS", self.defaultLocalProtocols )
def __setSRMOptionsToDefault( self ):
''' Resetting the SRM options back to default
'''
self.gfal2.set_opt_integer( "SRM PLUGIN", "OPERATION_TIMEOUT", self.gfal2Timeout )
self.gfal2.set_opt_string( "SRM PLUGIN", "SPACETOKENDESC", self.spaceToken )
self.gfal2.set_opt_string_list( "SRM PLUGIN", "TURL_PROTOCOLS", self.defaultLocalProtocols )
def _getExtendedAttributes( self, path, protocols = False ):
''' Changing the TURL_PROTOCOLS option for SRM in case we ask for a specific
protocol
:param self: self reference
:param str path: path on the storage
:param str protocols: a list of protocols
:return S_OK( attributeDict ) if successful. Where the keys of the dict are the attributes
and values the respective values
'''
if protocols:
self.gfal2.set_opt_string_list( "SRM PLUGIN", "TURL_PROTOCOLS", protocols )
res = GFAL2_StorageBase._getExtendedAttributes( self, path )
self.__setSRMOptionsToDefault()
return res
def getTransportURL( self, path, protocols = False ):
""" obtain the tURLs for the supplied path and protocols
:param self: self reference
:param str path: path on storage
:param mixed protocols: protocols to use
:returns Failed dict {path : error message}
Successful dict {path : transport url}
S_ERROR in case of argument problems
"""
res = checkArgumentFormat( path )
if not res['OK']:
return res
urls = res['Value']
self.log.debug( 'GFAL2_SRM2Storage.getTransportURL: Attempting to retrieve tURL for %s paths' % len( urls ) )
failed = {}
successful = {}
if not protocols:
protocols = self.__getProtocols()
if not protocols['OK']:
return protocols
listProtocols = protocols['Value']
elif type( protocols ) == StringType:
listProtocols = [protocols]
elif type( protocols ) == ListType:
listProtocols = protocols
else:
return S_ERROR( "getTransportURL: Must supply desired protocols to this plug-in." )
if self.protocolParameters['Protocol'] in listProtocols:
successful = {}
failed = {}
for url in urls:
if self.isURL( url )['Value']:
successful[url] = url
else:
failed[url] = 'getTransportURL: Failed to obtain turls.'
return S_OK( {'Successful' : successful, 'Failed' : failed} )
for url in urls:
res = self.__getSingleTransportURL( url, listProtocols )
self.log.debug( 'res = %s' % res )
if not res['OK']:
failed[url] = res['Message']
else:
successful[url] = res['Value']
return S_OK( { 'Failed' : failed, 'Successful' : successful } )
def __getSingleTransportURL( self, path, protocols = False ):
""" Get the tURL from path with getxattr from gfal2
:param self: self reference
:param str path: path on the storage
:returns S_OK( Transport_URL ) in case of success
S_ERROR( errStr ) in case of a failure
"""
self.log.debug( 'GFAL2_SRM2Storage.__getSingleTransportURL: trying to retrieve tURL for %s' % path )
if protocols:
res = self._getExtendedAttributes( path, protocols )
else:
res = self._getExtendedAttributes( path )
if res['OK']:
attributeDict = res['Value']
# 'user.replicas' is the extended attribute we are interested in
if 'user.replicas' in attributeDict.keys():
turl = attributeDict['user.replicas']
return S_OK( turl )
else:
errStr = 'GFAL2_SRM2Storage.__getSingleTransportURL: Extended attribute tURL is not set.'
self.log.debug( errStr )
return S_ERROR( errStr )
else:
errStr = 'GFAL2_SRM2Storage.__getSingleTransportURL: %s' % res['Message']
return S_ERROR( errStr )
def __getProtocols( self ):
""" returns list of protocols to use at a given site
:warn: priority is given to a protocols list defined in the CS
:param self: self reference
"""
sections = gConfig.getSections( '/Resources/StorageElements/%s/' % ( self.name ) )
self.log.debug( "GFAL2_SRM2Storage.__getProtocols: Trying to get protocols for storage %s." % self.name )
if not sections['OK']:
return sections
protocolsList = []
for section in sections['Value']:
path = '/Resources/StorageElements/%s/%s/ProtocolName' % ( self.name, section )
if gConfig.getValue( path, '' ) == self.protocol:
protPath = '/Resources/StorageElements/%s/%s/ProtocolsList' % ( self.name, section )
siteProtocols = gConfig.getValue( protPath, [] )
if siteProtocols:
self.log.debug( 'GFAL2_SRM2Storage.__getProtocols: Found SE protocols list to override defaults:', ', '.join( siteProtocols, ) )
protocolsList = siteProtocols
if not protocolsList:
self.log.debug( "GFAL2_SRM2Storage.__getProtocols: No protocols provided, using the default protocols." )
protocolsList = gConfig.getValue( '/Resources/StorageElements/DefaultProtocols', [] )
self.log.debug( 'GFAL2_SRM2Storage.__getProtocols: protocolList = %s' % protocolsList )
# if there is even no default protocol
if not protocolsList:
return S_ERROR( "GFAL2_SRM2Storage.__getProtocols: No local protocols defined and no defaults found." )
return S_OK( protocolsList )
| gpl-3.0 |
ArcherCraftStore/ArcherVMPeridot | Python/Lib/idlelib/ParenMatch.py | 158 | 6627 | """ParenMatch -- An IDLE extension for parenthesis matching.
When you hit a right paren, the cursor should move briefly to the left
paren. Paren here is used generically; the matching applies to
parentheses, square brackets, and curly braces.
"""
from idlelib.HyperParser import HyperParser
from idlelib.configHandler import idleConf
_openers = {')':'(',']':'[','}':'{'}
CHECK_DELAY = 100 # miliseconds
class ParenMatch:
"""Highlight matching parentheses
There are three supported style of paren matching, based loosely
on the Emacs options. The style is select based on the
HILITE_STYLE attribute; it can be changed used the set_style
method.
The supported styles are:
default -- When a right paren is typed, highlight the matching
left paren for 1/2 sec.
expression -- When a right paren is typed, highlight the entire
expression from the left paren to the right paren.
TODO:
- extend IDLE with configuration dialog to change options
- implement rest of Emacs highlight styles (see below)
- print mismatch warning in IDLE status window
Note: In Emacs, there are several styles of highlight where the
matching paren is highlighted whenever the cursor is immediately
to the right of a right paren. I don't know how to do that in Tk,
so I haven't bothered.
"""
menudefs = [
('edit', [
("Show surrounding parens", "<<flash-paren>>"),
])
]
STYLE = idleConf.GetOption('extensions','ParenMatch','style',
default='expression')
FLASH_DELAY = idleConf.GetOption('extensions','ParenMatch','flash-delay',
type='int',default=500)
HILITE_CONFIG = idleConf.GetHighlight(idleConf.CurrentTheme(),'hilite')
BELL = idleConf.GetOption('extensions','ParenMatch','bell',
type='bool',default=1)
RESTORE_VIRTUAL_EVENT_NAME = "<<parenmatch-check-restore>>"
# We want the restore event be called before the usual return and
# backspace events.
RESTORE_SEQUENCES = ("<KeyPress>", "<ButtonPress>",
"<Key-Return>", "<Key-BackSpace>")
def __init__(self, editwin):
self.editwin = editwin
self.text = editwin.text
# Bind the check-restore event to the function restore_event,
# so that we can then use activate_restore (which calls event_add)
# and deactivate_restore (which calls event_delete).
editwin.text.bind(self.RESTORE_VIRTUAL_EVENT_NAME,
self.restore_event)
self.counter = 0
self.is_restore_active = 0
self.set_style(self.STYLE)
def activate_restore(self):
if not self.is_restore_active:
for seq in self.RESTORE_SEQUENCES:
self.text.event_add(self.RESTORE_VIRTUAL_EVENT_NAME, seq)
self.is_restore_active = True
def deactivate_restore(self):
if self.is_restore_active:
for seq in self.RESTORE_SEQUENCES:
self.text.event_delete(self.RESTORE_VIRTUAL_EVENT_NAME, seq)
self.is_restore_active = False
def set_style(self, style):
self.STYLE = style
if style == "default":
self.create_tag = self.create_tag_default
self.set_timeout = self.set_timeout_last
elif style == "expression":
self.create_tag = self.create_tag_expression
self.set_timeout = self.set_timeout_none
def flash_paren_event(self, event):
indices = HyperParser(self.editwin, "insert").get_surrounding_brackets()
if indices is None:
self.warn_mismatched()
return
self.activate_restore()
self.create_tag(indices)
self.set_timeout_last()
def paren_closed_event(self, event):
# If it was a shortcut and not really a closing paren, quit.
closer = self.text.get("insert-1c")
if closer not in _openers:
return
hp = HyperParser(self.editwin, "insert-1c")
if not hp.is_in_code():
return
indices = hp.get_surrounding_brackets(_openers[closer], True)
if indices is None:
self.warn_mismatched()
return
self.activate_restore()
self.create_tag(indices)
self.set_timeout()
def restore_event(self, event=None):
self.text.tag_delete("paren")
self.deactivate_restore()
self.counter += 1 # disable the last timer, if there is one.
def handle_restore_timer(self, timer_count):
if timer_count == self.counter:
self.restore_event()
def warn_mismatched(self):
if self.BELL:
self.text.bell()
# any one of the create_tag_XXX methods can be used depending on
# the style
def create_tag_default(self, indices):
"""Highlight the single paren that matches"""
self.text.tag_add("paren", indices[0])
self.text.tag_config("paren", self.HILITE_CONFIG)
def create_tag_expression(self, indices):
"""Highlight the entire expression"""
if self.text.get(indices[1]) in (')', ']', '}'):
rightindex = indices[1]+"+1c"
else:
rightindex = indices[1]
self.text.tag_add("paren", indices[0], rightindex)
self.text.tag_config("paren", self.HILITE_CONFIG)
# any one of the set_timeout_XXX methods can be used depending on
# the style
def set_timeout_none(self):
"""Highlight will remain until user input turns it off
or the insert has moved"""
# After CHECK_DELAY, call a function which disables the "paren" tag
# if the event is for the most recent timer and the insert has changed,
# or schedules another call for itself.
self.counter += 1
def callme(callme, self=self, c=self.counter,
index=self.text.index("insert")):
if index != self.text.index("insert"):
self.handle_restore_timer(c)
else:
self.editwin.text_frame.after(CHECK_DELAY, callme, callme)
self.editwin.text_frame.after(CHECK_DELAY, callme, callme)
def set_timeout_last(self):
"""The last highlight created will be removed after .5 sec"""
# associate a counter with an event; only disable the "paren"
# tag if the event is for the most recent timer.
self.counter += 1
self.editwin.text_frame.after(self.FLASH_DELAY,
lambda self=self, c=self.counter: \
self.handle_restore_timer(c))
| apache-2.0 |
sbellem/django | tests/deprecation/tests.py | 199 | 7253 | from __future__ import unicode_literals
import os
import unittest
import warnings
from django.test import SimpleTestCase
from django.test.utils import reset_warning_registry
from django.utils import six
from django.utils.deprecation import RenameMethodsBase
from django.utils.encoding import force_text
class RenameManagerMethods(RenameMethodsBase):
renamed_methods = (
('old', 'new', DeprecationWarning),
)
class RenameMethodsTests(SimpleTestCase):
"""
Tests the `RenameMethodsBase` type introduced to rename `get_query_set`
to `get_queryset` across the code base following #15363.
"""
def test_class_definition_warnings(self):
"""
Ensure a warning is raised upon class definition to suggest renaming
the faulty method.
"""
reset_warning_registry()
with warnings.catch_warnings(record=True) as recorded:
warnings.simplefilter('always')
class Manager(six.with_metaclass(RenameManagerMethods)):
def old(self):
pass
self.assertEqual(len(recorded), 1)
msg = str(recorded[0].message)
self.assertEqual(msg,
'`Manager.old` method should be renamed `new`.')
def test_get_new_defined(self):
"""
Ensure `old` complains and not `new` when only `new` is defined.
"""
with warnings.catch_warnings(record=True) as recorded:
warnings.simplefilter('ignore')
class Manager(six.with_metaclass(RenameManagerMethods)):
def new(self):
pass
warnings.simplefilter('always')
manager = Manager()
manager.new()
self.assertEqual(len(recorded), 0)
manager.old()
self.assertEqual(len(recorded), 1)
msg = str(recorded.pop().message)
self.assertEqual(msg,
'`Manager.old` is deprecated, use `new` instead.')
def test_get_old_defined(self):
"""
Ensure `old` complains when only `old` is defined.
"""
with warnings.catch_warnings(record=True) as recorded:
warnings.simplefilter('ignore')
class Manager(six.with_metaclass(RenameManagerMethods)):
def old(self):
pass
warnings.simplefilter('always')
manager = Manager()
manager.new()
self.assertEqual(len(recorded), 0)
manager.old()
self.assertEqual(len(recorded), 1)
msg = str(recorded.pop().message)
self.assertEqual(msg,
'`Manager.old` is deprecated, use `new` instead.')
def test_deprecated_subclass_renamed(self):
"""
Ensure the correct warnings are raised when a class that didn't rename
`old` subclass one that did.
"""
with warnings.catch_warnings(record=True) as recorded:
warnings.simplefilter('ignore')
class Renamed(six.with_metaclass(RenameManagerMethods)):
def new(self):
pass
class Deprecated(Renamed):
def old(self):
super(Deprecated, self).old()
warnings.simplefilter('always')
deprecated = Deprecated()
deprecated.new()
self.assertEqual(len(recorded), 1)
msg = str(recorded.pop().message)
self.assertEqual(msg,
'`Renamed.old` is deprecated, use `new` instead.')
recorded[:] = []
deprecated.old()
self.assertEqual(len(recorded), 2)
msgs = [str(warning.message) for warning in recorded]
self.assertEqual(msgs, [
'`Deprecated.old` is deprecated, use `new` instead.',
'`Renamed.old` is deprecated, use `new` instead.',
])
def test_renamed_subclass_deprecated(self):
"""
Ensure the correct warnings are raised when a class that renamed
`old` subclass one that didn't.
"""
with warnings.catch_warnings(record=True) as recorded:
warnings.simplefilter('ignore')
class Deprecated(six.with_metaclass(RenameManagerMethods)):
def old(self):
pass
class Renamed(Deprecated):
def new(self):
super(Renamed, self).new()
warnings.simplefilter('always')
renamed = Renamed()
renamed.new()
self.assertEqual(len(recorded), 0)
renamed.old()
self.assertEqual(len(recorded), 1)
msg = str(recorded.pop().message)
self.assertEqual(msg,
'`Renamed.old` is deprecated, use `new` instead.')
def test_deprecated_subclass_renamed_and_mixins(self):
"""
Ensure the correct warnings are raised when a subclass inherit from a
class that renamed `old` and mixins that may or may not have renamed
`new`.
"""
with warnings.catch_warnings(record=True) as recorded:
warnings.simplefilter('ignore')
class Renamed(six.with_metaclass(RenameManagerMethods)):
def new(self):
pass
class RenamedMixin(object):
def new(self):
super(RenamedMixin, self).new()
class DeprecatedMixin(object):
def old(self):
super(DeprecatedMixin, self).old()
class Deprecated(DeprecatedMixin, RenamedMixin, Renamed):
pass
warnings.simplefilter('always')
deprecated = Deprecated()
deprecated.new()
self.assertEqual(len(recorded), 1)
msg = str(recorded.pop().message)
self.assertEqual(msg,
'`RenamedMixin.old` is deprecated, use `new` instead.')
deprecated.old()
self.assertEqual(len(recorded), 2)
msgs = [str(warning.message) for warning in recorded]
self.assertEqual(msgs, [
'`DeprecatedMixin.old` is deprecated, use `new` instead.',
'`RenamedMixin.old` is deprecated, use `new` instead.',
])
class DeprecatingSimpleTestCaseUrls(unittest.TestCase):
def test_deprecation(self):
"""
Ensure the correct warning is raised when SimpleTestCase.urls is used.
"""
class TempTestCase(SimpleTestCase):
urls = 'tests.urls'
def test(self):
pass
with warnings.catch_warnings(record=True) as recorded:
warnings.filterwarnings('always')
suite = unittest.TestLoader().loadTestsFromTestCase(TempTestCase)
with open(os.devnull, 'w') as devnull:
unittest.TextTestRunner(stream=devnull, verbosity=2).run(suite)
msg = force_text(recorded.pop().message)
self.assertEqual(msg,
"SimpleTestCase.urls is deprecated and will be removed in "
"Django 1.10. Use @override_settings(ROOT_URLCONF=...) "
"in TempTestCase instead.")
| bsd-3-clause |
ravenac95/PyYAML | tests/lib/test_yaml_ext.py | 57 | 10646 |
import _yaml, yaml
import types, pprint
yaml.PyBaseLoader = yaml.BaseLoader
yaml.PySafeLoader = yaml.SafeLoader
yaml.PyLoader = yaml.Loader
yaml.PyBaseDumper = yaml.BaseDumper
yaml.PySafeDumper = yaml.SafeDumper
yaml.PyDumper = yaml.Dumper
old_scan = yaml.scan
def new_scan(stream, Loader=yaml.CLoader):
return old_scan(stream, Loader)
old_parse = yaml.parse
def new_parse(stream, Loader=yaml.CLoader):
return old_parse(stream, Loader)
old_compose = yaml.compose
def new_compose(stream, Loader=yaml.CLoader):
return old_compose(stream, Loader)
old_compose_all = yaml.compose_all
def new_compose_all(stream, Loader=yaml.CLoader):
return old_compose_all(stream, Loader)
old_load = yaml.load
def new_load(stream, Loader=yaml.CLoader):
return old_load(stream, Loader)
old_load_all = yaml.load_all
def new_load_all(stream, Loader=yaml.CLoader):
return old_load_all(stream, Loader)
old_safe_load = yaml.safe_load
def new_safe_load(stream):
return old_load(stream, yaml.CSafeLoader)
old_safe_load_all = yaml.safe_load_all
def new_safe_load_all(stream):
return old_load_all(stream, yaml.CSafeLoader)
old_emit = yaml.emit
def new_emit(events, stream=None, Dumper=yaml.CDumper, **kwds):
return old_emit(events, stream, Dumper, **kwds)
old_serialize = yaml.serialize
def new_serialize(node, stream, Dumper=yaml.CDumper, **kwds):
return old_serialize(node, stream, Dumper, **kwds)
old_serialize_all = yaml.serialize_all
def new_serialize_all(nodes, stream=None, Dumper=yaml.CDumper, **kwds):
return old_serialize_all(nodes, stream, Dumper, **kwds)
old_dump = yaml.dump
def new_dump(data, stream=None, Dumper=yaml.CDumper, **kwds):
return old_dump(data, stream, Dumper, **kwds)
old_dump_all = yaml.dump_all
def new_dump_all(documents, stream=None, Dumper=yaml.CDumper, **kwds):
return old_dump_all(documents, stream, Dumper, **kwds)
old_safe_dump = yaml.safe_dump
def new_safe_dump(data, stream=None, **kwds):
return old_dump(data, stream, yaml.CSafeDumper, **kwds)
old_safe_dump_all = yaml.safe_dump_all
def new_safe_dump_all(documents, stream=None, **kwds):
return old_dump_all(documents, stream, yaml.CSafeDumper, **kwds)
def _set_up():
yaml.BaseLoader = yaml.CBaseLoader
yaml.SafeLoader = yaml.CSafeLoader
yaml.Loader = yaml.CLoader
yaml.BaseDumper = yaml.CBaseDumper
yaml.SafeDumper = yaml.CSafeDumper
yaml.Dumper = yaml.CDumper
yaml.scan = new_scan
yaml.parse = new_parse
yaml.compose = new_compose
yaml.compose_all = new_compose_all
yaml.load = new_load
yaml.load_all = new_load_all
yaml.safe_load = new_safe_load
yaml.safe_load_all = new_safe_load_all
yaml.emit = new_emit
yaml.serialize = new_serialize
yaml.serialize_all = new_serialize_all
yaml.dump = new_dump
yaml.dump_all = new_dump_all
yaml.safe_dump = new_safe_dump
yaml.safe_dump_all = new_safe_dump_all
def _tear_down():
yaml.BaseLoader = yaml.PyBaseLoader
yaml.SafeLoader = yaml.PySafeLoader
yaml.Loader = yaml.PyLoader
yaml.BaseDumper = yaml.PyBaseDumper
yaml.SafeDumper = yaml.PySafeDumper
yaml.Dumper = yaml.PyDumper
yaml.scan = old_scan
yaml.parse = old_parse
yaml.compose = old_compose
yaml.compose_all = old_compose_all
yaml.load = old_load
yaml.load_all = old_load_all
yaml.safe_load = old_safe_load
yaml.safe_load_all = old_safe_load_all
yaml.emit = old_emit
yaml.serialize = old_serialize
yaml.serialize_all = old_serialize_all
yaml.dump = old_dump
yaml.dump_all = old_dump_all
yaml.safe_dump = old_safe_dump
yaml.safe_dump_all = old_safe_dump_all
def test_c_version(verbose=False):
if verbose:
print _yaml.get_version()
print _yaml.get_version_string()
assert ("%s.%s.%s" % _yaml.get_version()) == _yaml.get_version_string(), \
(_yaml.get_version(), _yaml.get_version_string())
def _compare_scanners(py_data, c_data, verbose):
py_tokens = list(yaml.scan(py_data, Loader=yaml.PyLoader))
c_tokens = []
try:
for token in yaml.scan(c_data, Loader=yaml.CLoader):
c_tokens.append(token)
assert len(py_tokens) == len(c_tokens), (len(py_tokens), len(c_tokens))
for py_token, c_token in zip(py_tokens, c_tokens):
assert py_token.__class__ == c_token.__class__, (py_token, c_token)
if hasattr(py_token, 'value'):
assert py_token.value == c_token.value, (py_token, c_token)
if isinstance(py_token, yaml.StreamEndToken):
continue
py_start = (py_token.start_mark.index, py_token.start_mark.line, py_token.start_mark.column)
py_end = (py_token.end_mark.index, py_token.end_mark.line, py_token.end_mark.column)
c_start = (c_token.start_mark.index, c_token.start_mark.line, c_token.start_mark.column)
c_end = (c_token.end_mark.index, c_token.end_mark.line, c_token.end_mark.column)
assert py_start == c_start, (py_start, c_start)
assert py_end == c_end, (py_end, c_end)
finally:
if verbose:
print "PY_TOKENS:"
pprint.pprint(py_tokens)
print "C_TOKENS:"
pprint.pprint(c_tokens)
def test_c_scanner(data_filename, canonical_filename, verbose=False):
_compare_scanners(open(data_filename, 'rb'),
open(data_filename, 'rb'), verbose)
_compare_scanners(open(data_filename, 'rb').read(),
open(data_filename, 'rb').read(), verbose)
_compare_scanners(open(canonical_filename, 'rb'),
open(canonical_filename, 'rb'), verbose)
_compare_scanners(open(canonical_filename, 'rb').read(),
open(canonical_filename, 'rb').read(), verbose)
test_c_scanner.unittest = ['.data', '.canonical']
test_c_scanner.skip = ['.skip-ext']
def _compare_parsers(py_data, c_data, verbose):
py_events = list(yaml.parse(py_data, Loader=yaml.PyLoader))
c_events = []
try:
for event in yaml.parse(c_data, Loader=yaml.CLoader):
c_events.append(event)
assert len(py_events) == len(c_events), (len(py_events), len(c_events))
for py_event, c_event in zip(py_events, c_events):
for attribute in ['__class__', 'anchor', 'tag', 'implicit',
'value', 'explicit', 'version', 'tags']:
py_value = getattr(py_event, attribute, None)
c_value = getattr(c_event, attribute, None)
assert py_value == c_value, (py_event, c_event, attribute)
finally:
if verbose:
print "PY_EVENTS:"
pprint.pprint(py_events)
print "C_EVENTS:"
pprint.pprint(c_events)
def test_c_parser(data_filename, canonical_filename, verbose=False):
_compare_parsers(open(data_filename, 'rb'),
open(data_filename, 'rb'), verbose)
_compare_parsers(open(data_filename, 'rb').read(),
open(data_filename, 'rb').read(), verbose)
_compare_parsers(open(canonical_filename, 'rb'),
open(canonical_filename, 'rb'), verbose)
_compare_parsers(open(canonical_filename, 'rb').read(),
open(canonical_filename, 'rb').read(), verbose)
test_c_parser.unittest = ['.data', '.canonical']
test_c_parser.skip = ['.skip-ext']
def _compare_emitters(data, verbose):
events = list(yaml.parse(data, Loader=yaml.PyLoader))
c_data = yaml.emit(events, Dumper=yaml.CDumper)
if verbose:
print c_data
py_events = list(yaml.parse(c_data, Loader=yaml.PyLoader))
c_events = list(yaml.parse(c_data, Loader=yaml.CLoader))
try:
assert len(events) == len(py_events), (len(events), len(py_events))
assert len(events) == len(c_events), (len(events), len(c_events))
for event, py_event, c_event in zip(events, py_events, c_events):
for attribute in ['__class__', 'anchor', 'tag', 'implicit',
'value', 'explicit', 'version', 'tags']:
value = getattr(event, attribute, None)
py_value = getattr(py_event, attribute, None)
c_value = getattr(c_event, attribute, None)
if attribute == 'tag' and value in [None, u'!'] \
and py_value in [None, u'!'] and c_value in [None, u'!']:
continue
if attribute == 'explicit' and (py_value or c_value):
continue
assert value == py_value, (event, py_event, attribute)
assert value == c_value, (event, c_event, attribute)
finally:
if verbose:
print "EVENTS:"
pprint.pprint(events)
print "PY_EVENTS:"
pprint.pprint(py_events)
print "C_EVENTS:"
pprint.pprint(c_events)
def test_c_emitter(data_filename, canonical_filename, verbose=False):
_compare_emitters(open(data_filename, 'rb').read(), verbose)
_compare_emitters(open(canonical_filename, 'rb').read(), verbose)
test_c_emitter.unittest = ['.data', '.canonical']
test_c_emitter.skip = ['.skip-ext']
def wrap_ext_function(function):
def wrapper(*args, **kwds):
_set_up()
try:
function(*args, **kwds)
finally:
_tear_down()
try:
wrapper.func_name = '%s_ext' % function.func_name
except TypeError:
pass
wrapper.unittest_name = '%s_ext' % function.func_name
wrapper.unittest = function.unittest
wrapper.skip = getattr(function, 'skip', [])+['.skip-ext']
return wrapper
def wrap_ext(collections):
functions = []
if not isinstance(collections, list):
collections = [collections]
for collection in collections:
if not isinstance(collection, dict):
collection = vars(collection)
keys = collection.keys()
keys.sort()
for key in keys:
value = collection[key]
if isinstance(value, types.FunctionType) and hasattr(value, 'unittest'):
functions.append(wrap_ext_function(value))
for function in functions:
assert function.unittest_name not in globals()
globals()[function.unittest_name] = function
import test_tokens, test_structure, test_errors, test_resolver, test_constructor, \
test_emitter, test_representer, test_recursive, test_input_output
wrap_ext([test_tokens, test_structure, test_errors, test_resolver, test_constructor,
test_emitter, test_representer, test_recursive, test_input_output])
if __name__ == '__main__':
import test_appliance
test_appliance.run(globals())
| mit |
jounex/hue | desktop/core/ext-py/Django-1.6.10/tests/requests/tests.py | 47 | 31824 | # -*- encoding: utf-8 -*-
from __future__ import unicode_literals
import time
import warnings
from datetime import datetime, timedelta
from io import BytesIO
from django.db import connection, connections, DEFAULT_DB_ALIAS
from django.core import signals
from django.core.exceptions import SuspiciousOperation
from django.core.handlers.wsgi import WSGIRequest, LimitedStream
from django.http import HttpRequest, HttpResponse, parse_cookie, build_request_repr, UnreadablePostError
from django.test import SimpleTestCase, TransactionTestCase
from django.test.client import FakePayload
from django.test.utils import override_settings, str_prefix
from django.utils import six
from django.utils.unittest import skipIf
from django.utils.http import cookie_date, urlencode
from django.utils.six.moves.urllib.parse import urlencode as original_urlencode
from django.utils.timezone import utc
class RequestsTests(SimpleTestCase):
def test_httprequest(self):
request = HttpRequest()
self.assertEqual(list(request.GET.keys()), [])
self.assertEqual(list(request.POST.keys()), [])
self.assertEqual(list(request.COOKIES.keys()), [])
self.assertEqual(list(request.META.keys()), [])
def test_httprequest_repr(self):
request = HttpRequest()
request.path = '/somepath/'
request.GET = {'get-key': 'get-value'}
request.POST = {'post-key': 'post-value'}
request.COOKIES = {'post-key': 'post-value'}
request.META = {'post-key': 'post-value'}
self.assertEqual(repr(request), str_prefix("<HttpRequest\npath:/somepath/,\nGET:{%(_)s'get-key': %(_)s'get-value'},\nPOST:{%(_)s'post-key': %(_)s'post-value'},\nCOOKIES:{%(_)s'post-key': %(_)s'post-value'},\nMETA:{%(_)s'post-key': %(_)s'post-value'}>"))
self.assertEqual(build_request_repr(request), repr(request))
self.assertEqual(build_request_repr(request, path_override='/otherpath/', GET_override={'a': 'b'}, POST_override={'c': 'd'}, COOKIES_override={'e': 'f'}, META_override={'g': 'h'}),
str_prefix("<HttpRequest\npath:/otherpath/,\nGET:{%(_)s'a': %(_)s'b'},\nPOST:{%(_)s'c': %(_)s'd'},\nCOOKIES:{%(_)s'e': %(_)s'f'},\nMETA:{%(_)s'g': %(_)s'h'}>"))
def test_wsgirequest(self):
request = WSGIRequest({'PATH_INFO': 'bogus', 'REQUEST_METHOD': 'bogus', 'wsgi.input': BytesIO(b'')})
self.assertEqual(list(request.GET.keys()), [])
self.assertEqual(list(request.POST.keys()), [])
self.assertEqual(list(request.COOKIES.keys()), [])
self.assertEqual(set(request.META.keys()), set(['PATH_INFO', 'REQUEST_METHOD', 'SCRIPT_NAME', 'wsgi.input']))
self.assertEqual(request.META['PATH_INFO'], 'bogus')
self.assertEqual(request.META['REQUEST_METHOD'], 'bogus')
self.assertEqual(request.META['SCRIPT_NAME'], '')
def test_wsgirequest_with_script_name(self):
"""
Ensure that the request's path is correctly assembled, regardless of
whether or not the SCRIPT_NAME has a trailing slash.
Refs #20169.
"""
# With trailing slash
request = WSGIRequest({'PATH_INFO': '/somepath/', 'SCRIPT_NAME': '/PREFIX/', 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
self.assertEqual(request.path, '/PREFIX/somepath/')
# Without trailing slash
request = WSGIRequest({'PATH_INFO': '/somepath/', 'SCRIPT_NAME': '/PREFIX', 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
self.assertEqual(request.path, '/PREFIX/somepath/')
def test_wsgirequest_with_force_script_name(self):
"""
Ensure that the FORCE_SCRIPT_NAME setting takes precedence over the
request's SCRIPT_NAME environment parameter.
Refs #20169.
"""
with override_settings(FORCE_SCRIPT_NAME='/FORCED_PREFIX/'):
request = WSGIRequest({'PATH_INFO': '/somepath/', 'SCRIPT_NAME': '/PREFIX/', 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
self.assertEqual(request.path, '/FORCED_PREFIX/somepath/')
def test_wsgirequest_path_with_force_script_name_trailing_slash(self):
"""
Ensure that the request's path is correctly assembled, regardless of
whether or not the FORCE_SCRIPT_NAME setting has a trailing slash.
Refs #20169.
"""
# With trailing slash
with override_settings(FORCE_SCRIPT_NAME='/FORCED_PREFIX/'):
request = WSGIRequest({'PATH_INFO': '/somepath/', 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
self.assertEqual(request.path, '/FORCED_PREFIX/somepath/')
# Without trailing slash
with override_settings(FORCE_SCRIPT_NAME='/FORCED_PREFIX'):
request = WSGIRequest({'PATH_INFO': '/somepath/', 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
self.assertEqual(request.path, '/FORCED_PREFIX/somepath/')
def test_wsgirequest_repr(self):
request = WSGIRequest({'PATH_INFO': '/somepath/', 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
request.GET = {'get-key': 'get-value'}
request.POST = {'post-key': 'post-value'}
request.COOKIES = {'post-key': 'post-value'}
request.META = {'post-key': 'post-value'}
self.assertEqual(repr(request), str_prefix("<WSGIRequest\npath:/somepath/,\nGET:{%(_)s'get-key': %(_)s'get-value'},\nPOST:{%(_)s'post-key': %(_)s'post-value'},\nCOOKIES:{%(_)s'post-key': %(_)s'post-value'},\nMETA:{%(_)s'post-key': %(_)s'post-value'}>"))
self.assertEqual(build_request_repr(request), repr(request))
self.assertEqual(build_request_repr(request, path_override='/otherpath/', GET_override={'a': 'b'}, POST_override={'c': 'd'}, COOKIES_override={'e': 'f'}, META_override={'g': 'h'}),
str_prefix("<WSGIRequest\npath:/otherpath/,\nGET:{%(_)s'a': %(_)s'b'},\nPOST:{%(_)s'c': %(_)s'd'},\nCOOKIES:{%(_)s'e': %(_)s'f'},\nMETA:{%(_)s'g': %(_)s'h'}>"))
def test_wsgirequest_path_info(self):
def wsgi_str(path_info):
path_info = path_info.encode('utf-8') # Actual URL sent by the browser (bytestring)
if six.PY3:
path_info = path_info.decode('iso-8859-1') # Value in the WSGI environ dict (native string)
return path_info
# Regression for #19468
request = WSGIRequest({'PATH_INFO': wsgi_str("/سلام/"), 'REQUEST_METHOD': 'get', 'wsgi.input': BytesIO(b'')})
self.assertEqual(request.path, "/سلام/")
def test_parse_cookie(self):
self.assertEqual(parse_cookie('invalid@key=true'), {})
def test_httprequest_location(self):
request = HttpRequest()
self.assertEqual(request.build_absolute_uri(location="https://www.example.com/asdf"),
'https://www.example.com/asdf')
request.get_host = lambda: 'www.example.com'
request.path = ''
self.assertEqual(request.build_absolute_uri(location="/path/with:colons"),
'http://www.example.com/path/with:colons')
@override_settings(
USE_X_FORWARDED_HOST=False,
ALLOWED_HOSTS=[
'forward.com', 'example.com', 'internal.com', '12.34.56.78',
'[2001:19f0:feee::dead:beef:cafe]', 'xn--4ca9at.com',
'.multitenant.com', 'INSENSITIVE.com',
])
def test_http_get_host(self):
# Check if X_FORWARDED_HOST is provided.
request = HttpRequest()
request.META = {
'HTTP_X_FORWARDED_HOST': 'forward.com',
'HTTP_HOST': 'example.com',
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 80,
}
# X_FORWARDED_HOST is ignored.
self.assertEqual(request.get_host(), 'example.com')
# Check if X_FORWARDED_HOST isn't provided.
request = HttpRequest()
request.META = {
'HTTP_HOST': 'example.com',
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 80,
}
self.assertEqual(request.get_host(), 'example.com')
# Check if HTTP_HOST isn't provided.
request = HttpRequest()
request.META = {
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 80,
}
self.assertEqual(request.get_host(), 'internal.com')
# Check if HTTP_HOST isn't provided, and we're on a nonstandard port
request = HttpRequest()
request.META = {
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 8042,
}
self.assertEqual(request.get_host(), 'internal.com:8042')
# Poisoned host headers are rejected as suspicious
legit_hosts = [
'example.com',
'example.com:80',
'12.34.56.78',
'12.34.56.78:443',
'[2001:19f0:feee::dead:beef:cafe]',
'[2001:19f0:feee::dead:beef:cafe]:8080',
'xn--4ca9at.com', # Punnycode for öäü.com
'anything.multitenant.com',
'multitenant.com',
'insensitive.com',
]
poisoned_hosts = [
'example.com@evil.tld',
'example.com:dr.frankenstein@evil.tld',
'example.com:dr.frankenstein@evil.tld:80',
'example.com:80/badpath',
'example.com: recovermypassword.com',
'other.com', # not in ALLOWED_HOSTS
]
for host in legit_hosts:
request = HttpRequest()
request.META = {
'HTTP_HOST': host,
}
request.get_host()
for host in poisoned_hosts:
with self.assertRaises(SuspiciousOperation):
request = HttpRequest()
request.META = {
'HTTP_HOST': host,
}
request.get_host()
@override_settings(USE_X_FORWARDED_HOST=True, ALLOWED_HOSTS=['*'])
def test_http_get_host_with_x_forwarded_host(self):
# Check if X_FORWARDED_HOST is provided.
request = HttpRequest()
request.META = {
'HTTP_X_FORWARDED_HOST': 'forward.com',
'HTTP_HOST': 'example.com',
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 80,
}
# X_FORWARDED_HOST is obeyed.
self.assertEqual(request.get_host(), 'forward.com')
# Check if X_FORWARDED_HOST isn't provided.
request = HttpRequest()
request.META = {
'HTTP_HOST': 'example.com',
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 80,
}
self.assertEqual(request.get_host(), 'example.com')
# Check if HTTP_HOST isn't provided.
request = HttpRequest()
request.META = {
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 80,
}
self.assertEqual(request.get_host(), 'internal.com')
# Check if HTTP_HOST isn't provided, and we're on a nonstandard port
request = HttpRequest()
request.META = {
'SERVER_NAME': 'internal.com',
'SERVER_PORT': 8042,
}
self.assertEqual(request.get_host(), 'internal.com:8042')
# Poisoned host headers are rejected as suspicious
legit_hosts = [
'example.com',
'example.com:80',
'12.34.56.78',
'12.34.56.78:443',
'[2001:19f0:feee::dead:beef:cafe]',
'[2001:19f0:feee::dead:beef:cafe]:8080',
'xn--4ca9at.com', # Punnycode for öäü.com
]
poisoned_hosts = [
'example.com@evil.tld',
'example.com:dr.frankenstein@evil.tld',
'example.com:dr.frankenstein@evil.tld:80',
'example.com:80/badpath',
'example.com: recovermypassword.com',
]
for host in legit_hosts:
request = HttpRequest()
request.META = {
'HTTP_HOST': host,
}
request.get_host()
for host in poisoned_hosts:
with self.assertRaises(SuspiciousOperation):
request = HttpRequest()
request.META = {
'HTTP_HOST': host,
}
request.get_host()
@override_settings(DEBUG=True, ALLOWED_HOSTS=[])
def test_host_validation_disabled_in_debug_mode(self):
"""If ALLOWED_HOSTS is empty and DEBUG is True, all hosts pass."""
request = HttpRequest()
request.META = {
'HTTP_HOST': 'example.com',
}
self.assertEqual(request.get_host(), 'example.com')
@override_settings(ALLOWED_HOSTS=[])
def test_get_host_suggestion_of_allowed_host(self):
"""get_host() makes helpful suggestions if a valid-looking host is not in ALLOWED_HOSTS."""
msg_invalid_host = "Invalid HTTP_HOST header: %r."
msg_suggestion = msg_invalid_host + "You may need to add %r to ALLOWED_HOSTS."
for host in [ # Valid-looking hosts
'example.com',
'12.34.56.78',
'[2001:19f0:feee::dead:beef:cafe]',
'xn--4ca9at.com', # Punnycode for öäü.com
]:
request = HttpRequest()
request.META = {'HTTP_HOST': host}
self.assertRaisesMessage(
SuspiciousOperation,
msg_suggestion % (host, host),
request.get_host
)
for domain, port in [ # Valid-looking hosts with a port number
('example.com', 80),
('12.34.56.78', 443),
('[2001:19f0:feee::dead:beef:cafe]', 8080),
]:
host = '%s:%s' % (domain, port)
request = HttpRequest()
request.META = {'HTTP_HOST': host}
self.assertRaisesMessage(
SuspiciousOperation,
msg_suggestion % (host, domain),
request.get_host
)
for host in [ # Invalid hosts
'example.com@evil.tld',
'example.com:dr.frankenstein@evil.tld',
'example.com:dr.frankenstein@evil.tld:80',
'example.com:80/badpath',
'example.com: recovermypassword.com',
]:
request = HttpRequest()
request.META = {'HTTP_HOST': host}
self.assertRaisesMessage(
SuspiciousOperation,
msg_invalid_host % host,
request.get_host
)
def test_near_expiration(self):
"Cookie will expire when an near expiration time is provided"
response = HttpResponse()
# There is a timing weakness in this test; The
# expected result for max-age requires that there be
# a very slight difference between the evaluated expiration
# time, and the time evaluated in set_cookie(). If this
# difference doesn't exist, the cookie time will be
# 1 second larger. To avoid the problem, put in a quick sleep,
# which guarantees that there will be a time difference.
expires = datetime.utcnow() + timedelta(seconds=10)
time.sleep(0.001)
response.set_cookie('datetime', expires=expires)
datetime_cookie = response.cookies['datetime']
self.assertEqual(datetime_cookie['max-age'], 10)
def test_aware_expiration(self):
"Cookie accepts an aware datetime as expiration time"
response = HttpResponse()
expires = (datetime.utcnow() + timedelta(seconds=10)).replace(tzinfo=utc)
time.sleep(0.001)
response.set_cookie('datetime', expires=expires)
datetime_cookie = response.cookies['datetime']
self.assertEqual(datetime_cookie['max-age'], 10)
def test_far_expiration(self):
"Cookie will expire when an distant expiration time is provided"
response = HttpResponse()
response.set_cookie('datetime', expires=datetime(2028, 1, 1, 4, 5, 6))
datetime_cookie = response.cookies['datetime']
self.assertEqual(datetime_cookie['expires'], 'Sat, 01-Jan-2028 04:05:06 GMT')
def test_max_age_expiration(self):
"Cookie will expire if max_age is provided"
response = HttpResponse()
response.set_cookie('max_age', max_age=10)
max_age_cookie = response.cookies['max_age']
self.assertEqual(max_age_cookie['max-age'], 10)
self.assertEqual(max_age_cookie['expires'], cookie_date(time.time()+10))
def test_httponly_cookie(self):
response = HttpResponse()
response.set_cookie('example', httponly=True)
example_cookie = response.cookies['example']
# A compat cookie may be in use -- check that it has worked
# both as an output string, and using the cookie attributes
self.assertTrue('; httponly' in str(example_cookie))
self.assertTrue(example_cookie['httponly'])
def test_limited_stream(self):
# Read all of a limited stream
stream = LimitedStream(BytesIO(b'test'), 2)
self.assertEqual(stream.read(), b'te')
# Reading again returns nothing.
self.assertEqual(stream.read(), b'')
# Read a number of characters greater than the stream has to offer
stream = LimitedStream(BytesIO(b'test'), 2)
self.assertEqual(stream.read(5), b'te')
# Reading again returns nothing.
self.assertEqual(stream.readline(5), b'')
# Read sequentially from a stream
stream = LimitedStream(BytesIO(b'12345678'), 8)
self.assertEqual(stream.read(5), b'12345')
self.assertEqual(stream.read(5), b'678')
# Reading again returns nothing.
self.assertEqual(stream.readline(5), b'')
# Read lines from a stream
stream = LimitedStream(BytesIO(b'1234\n5678\nabcd\nefgh\nijkl'), 24)
# Read a full line, unconditionally
self.assertEqual(stream.readline(), b'1234\n')
# Read a number of characters less than a line
self.assertEqual(stream.readline(2), b'56')
# Read the rest of the partial line
self.assertEqual(stream.readline(), b'78\n')
# Read a full line, with a character limit greater than the line length
self.assertEqual(stream.readline(6), b'abcd\n')
# Read the next line, deliberately terminated at the line end
self.assertEqual(stream.readline(4), b'efgh')
# Read the next line... just the line end
self.assertEqual(stream.readline(), b'\n')
# Read everything else.
self.assertEqual(stream.readline(), b'ijkl')
# Regression for #15018
# If a stream contains a newline, but the provided length
# is less than the number of provided characters, the newline
# doesn't reset the available character count
stream = LimitedStream(BytesIO(b'1234\nabcdef'), 9)
self.assertEqual(stream.readline(10), b'1234\n')
self.assertEqual(stream.readline(3), b'abc')
# Now expire the available characters
self.assertEqual(stream.readline(3), b'd')
# Reading again returns nothing.
self.assertEqual(stream.readline(2), b'')
# Same test, but with read, not readline.
stream = LimitedStream(BytesIO(b'1234\nabcdef'), 9)
self.assertEqual(stream.read(6), b'1234\na')
self.assertEqual(stream.read(2), b'bc')
self.assertEqual(stream.read(2), b'd')
self.assertEqual(stream.read(2), b'')
self.assertEqual(stream.read(), b'')
def test_stream(self):
payload = FakePayload('name=value')
request = WSGIRequest({'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload})
self.assertEqual(request.read(), b'name=value')
def test_read_after_value(self):
"""
Reading from request is allowed after accessing request contents as
POST or body.
"""
payload = FakePayload('name=value')
request = WSGIRequest({'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload})
self.assertEqual(request.POST, {'name': ['value']})
self.assertEqual(request.body, b'name=value')
self.assertEqual(request.read(), b'name=value')
def test_value_after_read(self):
"""
Construction of POST or body is not allowed after reading
from request.
"""
payload = FakePayload('name=value')
request = WSGIRequest({'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload})
self.assertEqual(request.read(2), b'na')
self.assertRaises(Exception, lambda: request.body)
self.assertEqual(request.POST, {})
def test_non_ascii_POST(self):
payload = FakePayload(urlencode({'key': 'España'}))
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_LENGTH': len(payload),
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'wsgi.input': payload,
})
self.assertEqual(request.POST, {'key': ['España']})
def test_alternate_charset_POST(self):
"""
Test a POST with non-utf-8 payload encoding.
"""
payload = FakePayload(original_urlencode({'key': 'España'.encode('latin-1')}))
request = WSGIRequest({
'REQUEST_METHOD': 'POST',
'CONTENT_LENGTH': len(payload),
'CONTENT_TYPE': 'application/x-www-form-urlencoded; charset=iso-8859-1',
'wsgi.input': payload,
})
self.assertEqual(request.POST, {'key': ['España']})
def test_body_after_POST_multipart_form_data(self):
"""
Reading body after parsing multipart/form-data is not allowed
"""
# Because multipart is used for large amounts fo data i.e. file uploads,
# we don't want the data held in memory twice, and we don't want to
# silence the error by setting body = '' either.
payload = FakePayload("\r\n".join([
'--boundary',
'Content-Disposition: form-data; name="name"',
'',
'value',
'--boundary--'
'']))
request = WSGIRequest({'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload})
self.assertEqual(request.POST, {'name': ['value']})
self.assertRaises(Exception, lambda: request.body)
def test_body_after_POST_multipart_related(self):
"""
Reading body after parsing multipart that isn't form-data is allowed
"""
# Ticket #9054
# There are cases in which the multipart data is related instead of
# being a binary upload, in which case it should still be accessible
# via body.
payload_data = b"\r\n".join([
b'--boundary',
b'Content-ID: id; name="name"',
b'',
b'value',
b'--boundary--'
b''])
payload = FakePayload(payload_data)
request = WSGIRequest({'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'multipart/related; boundary=boundary',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload})
self.assertEqual(request.POST, {})
self.assertEqual(request.body, payload_data)
def test_POST_multipart_with_content_length_zero(self):
"""
Multipart POST requests with Content-Length >= 0 are valid and need to be handled.
"""
# According to:
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.13
# Every request.POST with Content-Length >= 0 is a valid request,
# this test ensures that we handle Content-Length == 0.
payload = FakePayload("\r\n".join([
'--boundary',
'Content-Disposition: form-data; name="name"',
'',
'value',
'--boundary--'
'']))
request = WSGIRequest({'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
'CONTENT_LENGTH': 0,
'wsgi.input': payload})
self.assertEqual(request.POST, {})
def test_POST_binary_only(self):
payload = b'\r\n\x01\x00\x00\x00ab\x00\x00\xcd\xcc,@'
environ = {'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'application/octet-stream',
'CONTENT_LENGTH': len(payload),
'wsgi.input': BytesIO(payload)}
request = WSGIRequest(environ)
self.assertEqual(request.POST, {})
self.assertEqual(request.FILES, {})
self.assertEqual(request.body, payload)
# Same test without specifying content-type
environ.update({'CONTENT_TYPE': '', 'wsgi.input': BytesIO(payload)})
request = WSGIRequest(environ)
self.assertEqual(request.POST, {})
self.assertEqual(request.FILES, {})
self.assertEqual(request.body, payload)
def test_read_by_lines(self):
payload = FakePayload('name=value')
request = WSGIRequest({'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload})
self.assertEqual(list(request), [b'name=value'])
def test_POST_after_body_read(self):
"""
POST should be populated even if body is read first
"""
payload = FakePayload('name=value')
request = WSGIRequest({'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload})
raw_data = request.body
self.assertEqual(request.POST, {'name': ['value']})
def test_POST_after_body_read_and_stream_read(self):
"""
POST should be populated even if body is read first, and then
the stream is read second.
"""
payload = FakePayload('name=value')
request = WSGIRequest({'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload})
raw_data = request.body
self.assertEqual(request.read(1), b'n')
self.assertEqual(request.POST, {'name': ['value']})
def test_POST_after_body_read_and_stream_read_multipart(self):
"""
POST should be populated even if body is read first, and then
the stream is read second. Using multipart/form-data instead of urlencoded.
"""
payload = FakePayload("\r\n".join([
'--boundary',
'Content-Disposition: form-data; name="name"',
'',
'value',
'--boundary--'
'']))
request = WSGIRequest({'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'multipart/form-data; boundary=boundary',
'CONTENT_LENGTH': len(payload),
'wsgi.input': payload})
raw_data = request.body
# Consume enough data to mess up the parsing:
self.assertEqual(request.read(13), b'--boundary\r\nC')
self.assertEqual(request.POST, {'name': ['value']})
def test_POST_connection_error(self):
"""
If wsgi.input.read() raises an exception while trying to read() the
POST, the exception should be identifiable (not a generic IOError).
"""
class ExplodingBytesIO(BytesIO):
def read(self, len=0):
raise IOError("kaboom!")
payload = b'name=value'
request = WSGIRequest({'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'application/x-www-form-urlencoded',
'CONTENT_LENGTH': len(payload),
'wsgi.input': ExplodingBytesIO(payload)})
with self.assertRaises(UnreadablePostError):
request.body
def test_FILES_connection_error(self):
"""
If wsgi.input.read() raises an exception while trying to read() the
FILES, the exception should be identifiable (not a generic IOError).
"""
class ExplodingBytesIO(BytesIO):
def read(self, len=0):
raise IOError("kaboom!")
payload = b'x'
request = WSGIRequest({'REQUEST_METHOD': 'POST',
'CONTENT_TYPE': 'multipart/form-data; boundary=foo_',
'CONTENT_LENGTH': len(payload),
'wsgi.input': ExplodingBytesIO(payload)})
with self.assertRaises(UnreadablePostError):
request.FILES
@skipIf(connection.vendor == 'sqlite'
and connection.settings_dict['TEST_NAME'] in (None, '', ':memory:'),
"Cannot establish two connections to an in-memory SQLite database.")
class DatabaseConnectionHandlingTests(TransactionTestCase):
available_apps = []
def setUp(self):
# Use a temporary connection to avoid messing with the main one.
self._old_default_connection = connections['default']
del connections['default']
def tearDown(self):
try:
connections['default'].close()
finally:
connections['default'] = self._old_default_connection
def test_request_finished_db_state(self):
# Force closing connection on request end
connection.settings_dict['CONN_MAX_AGE'] = 0
# The GET below will not succeed, but it will give a response with
# defined ._handler_class. That is needed for sending the
# request_finished signal.
response = self.client.get('/')
# Make sure there is an open connection
connection.cursor()
connection.enter_transaction_management()
signals.request_finished.send(sender=response._handler_class)
self.assertEqual(len(connection.transaction_state), 0)
def test_request_finished_failed_connection(self):
# Force closing connection on request end
connection.settings_dict['CONN_MAX_AGE'] = 0
connection.enter_transaction_management()
connection.set_dirty()
# Test that the rollback doesn't succeed (for example network failure
# could cause this).
def fail_horribly():
raise Exception("Horrible failure!")
connection._rollback = fail_horribly
try:
with self.assertRaises(Exception):
signals.request_finished.send(sender=self.__class__)
# The connection's state wasn't cleaned up
self.assertEqual(len(connection.transaction_state), 1)
finally:
del connection._rollback
# The connection will be cleaned on next request where the conn
# works again.
signals.request_finished.send(sender=self.__class__)
self.assertEqual(len(connection.transaction_state), 0)
| apache-2.0 |
amsn/amsn2 | amsn2/ui/front_ends/qt4/common.py | 2 | 1673 | from amsn2.views import StringView, MenuItemView
from PyQt4 import Qt
from PyQt4 import QtCore
from PyQt4 import QtGui
def create_menu_items_from_view(menu, items):
# TODO: images & radio groups, for now only basic representation
for item in items:
if item.type is MenuItemView.COMMAND:
it = QtGui.QAction(item.label, menu)
QtCore.QObject.connect(it, QtCore.SIGNAL("triggered()"), item.command)
menu.addAction(it)
elif item.type is MenuItemView.CASCADE_MENU:
men = QtGui.QMenu(item.label, menu)
create_menu_items_from_view(men, item.items)
menu.addMenu(men)
elif item.type is MenuItemView.SEPARATOR:
menu.addSeparator()
elif item.type is MenuItemView.CHECKBUTTON:
it = QtGui.QAction(item.label, menu)
it.setCheckable(True)
if item.checkbox: #TODO : isn't it checkbox_value instead of checkbox ? By the way the MenuItemView constructor doesn't store the checkbox_value passed to it
it.setChecked(True)
QtCore.QObject.connect(it, QtCore.SIGNAL("triggered()"), item.command)
menu.addAction(it)
elif item.type is MenuItemView.RADIOBUTTON:
it = QtGui.QAction(item.label, menu)
it.setCheckable(True)
if item.checkbox:
it.setChecked(True)
QtCore.QObject.connect(it, QtCore.SIGNAL("triggered()"), item.command)
elif item.type is MenuItemView.RADIOBUTTONGROUP:
group = QtGui.QActionGroup(menu)
create_menu_items_from_view(group, item.items)
menu.addActions(group)
| gpl-2.0 |
dtebbs/gyp | pylib/gyp/MSVSSettings_test.py | 1 | 65822 | #!/usr/bin/python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unit tests for the MSVSSettings.py file."""
import StringIO
import unittest
import gyp.MSVSSettings as MSVSSettings
class TestSequenceFunctions(unittest.TestCase):
def setUp(self):
self.stderr = StringIO.StringIO()
def _ExpectedWarnings(self, expected):
"""Compares recorded lines to expected warnings."""
self.stderr.seek(0)
actual = self.stderr.read().split('\n')
actual = [line for line in actual if line]
self.assertEqual(sorted(expected), sorted(actual))
def testValidateMSVSSettings_tool_names(self):
"""Tests that only MSVS tool names are allowed."""
MSVSSettings.ValidateMSVSSettings(
{'VCCLCompilerTool': {},
'VCLinkerTool': {},
'VCMIDLTool': {},
'foo': {},
'VCResourceCompilerTool': {},
'VCLibrarianTool': {},
'VCManifestTool': {},
'ClCompile': {}},
self.stderr)
self._ExpectedWarnings([
'Warning: unrecognized tool foo',
'Warning: unrecognized tool ClCompile'])
def testValidateMSVSSettings_settings(self):
"""Tests that for invalid MSVS settings."""
MSVSSettings.ValidateMSVSSettings(
{'VCCLCompilerTool': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': ['string1', 'string2'],
'AdditionalUsingDirectories': 'folder1;folder2',
'AssemblerListingLocation': 'a_file_name',
'AssemblerOutput': '0',
'BasicRuntimeChecks': '5',
'BrowseInformation': 'fdkslj',
'BrowseInformationFile': 'a_file_name',
'BufferSecurityCheck': 'true',
'CallingConvention': '-1',
'CompileAs': '1',
'DebugInformationFormat': '2',
'DefaultCharIsUnsigned': 'true',
'Detect64BitPortabilityProblems': 'true',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'string1;string2',
'EnableEnhancedInstructionSet': '1',
'EnableFiberSafeOptimizations': 'true',
'EnableFunctionLevelLinking': 'true',
'EnableIntrinsicFunctions': 'true',
'EnablePREfast': 'true',
'Enableprefast': 'bogus',
'ErrorReporting': '1',
'ExceptionHandling': '1',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': '1',
'FloatingPointExceptions': 'true',
'FloatingPointModel': '1',
'ForceConformanceInForLoopScope': 'true',
'ForcedIncludeFiles': 'file1;file2',
'ForcedUsingFiles': 'file1;file2',
'GeneratePreprocessedFile': '1',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': '1',
'KeepComments': 'true',
'MinimalRebuild': 'true',
'ObjectFile': 'a_file_name',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMP': 'true',
'Optimization': '1',
'PrecompiledHeaderFile': 'a_file_name',
'PrecompiledHeaderThrough': 'a_file_name',
'PreprocessorDefinitions': 'string1;string2',
'ProgramDataBaseFileName': 'a_file_name',
'RuntimeLibrary': '1',
'RuntimeTypeInfo': 'true',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '1',
'SuppressStartupBanner': 'true',
'TreatWChar_tAsBuiltInType': 'true',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'string1;string2',
'UseFullPaths': 'true',
'UsePrecompiledHeader': '1',
'UseUnicodeResponseFiles': 'true',
'WarnAsError': 'true',
'WarningLevel': '1',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': 'a_file_name',
'ZZXYZ': 'bogus'},
'VCLinkerTool': {
'AdditionalDependencies': 'file1;file2',
'AdditionalLibraryDirectories': 'folder1;folder2',
'AdditionalManifestDependencies': 'file1;file2',
'AdditionalOptions': 'a string1',
'AddModuleNamesToAssembly': 'file1;file2',
'AllowIsolation': 'true',
'AssemblyDebug': '2',
'AssemblyLinkResource': 'file1;file2',
'BaseAddress': 'a string1',
'CLRImageType': '2',
'CLRThreadAttribute': '2',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '2',
'DelayLoadDLLs': 'file1;file2',
'DelaySign': 'true',
'Driver': '2',
'EmbedManagedResourceFile': 'file1;file2',
'EnableCOMDATFolding': '2',
'EnableUAC': 'true',
'EntryPointSymbol': 'a string1',
'ErrorReporting': '2',
'FixedBaseAddress': '2',
'ForceSymbolReferences': 'file1;file2',
'FunctionOrder': 'a_file_name',
'GenerateDebugInformation': 'true',
'GenerateManifest': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': 'a string1',
'HeapReserveSize': 'a string1',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreDefaultLibraryNames': 'file1;file2',
'IgnoreEmbeddedIDL': 'true',
'IgnoreImportLibrary': 'true',
'ImportLibrary': 'a_file_name',
'KeyContainer': 'a_file_name',
'KeyFile': 'a_file_name',
'LargeAddressAware': '2',
'LinkIncremental': '2',
'LinkLibraryDependencies': 'true',
'LinkTimeCodeGeneration': '2',
'ManifestFile': 'a_file_name',
'MapExports': 'true',
'MapFileName': 'a_file_name',
'MergedIDLBaseFileName': 'a_file_name',
'MergeSections': 'a string1',
'MidlCommandFile': 'a_file_name',
'ModuleDefinitionFile': 'a_file_name',
'OptimizeForWindows98': '1',
'OptimizeReferences': '2',
'OutputFile': 'a_file_name',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': 'a_file_name',
'ProgramDatabaseFile': 'a_file_name',
'RandomizedBaseAddress': '2',
'RegisterOutput': 'true',
'ResourceOnlyDLL': 'true',
'SetChecksum': 'true',
'ShowProgress': '2',
'StackCommitSize': 'a string1',
'StackReserveSize': 'a string1',
'StripPrivateSymbols': 'a_file_name',
'SubSystem': '2',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'true',
'SwapRunFromCD': 'true',
'SwapRunFromNet': 'true',
'TargetMachine': '2',
'TerminalServerAware': '2',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'a_file_name',
'TypeLibraryResourceID': '33',
'UACExecutionLevel': '2',
'UACUIAccess': 'true',
'UseLibraryDependencyInputs': 'true',
'UseUnicodeResponseFiles': 'true',
'Version': 'a string1'},
'VCMIDLTool': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'CPreprocessOptions': 'a string1',
'DefaultCharType': '1',
'DLLDataFileName': 'a_file_name',
'EnableErrorChecks': '1',
'ErrorCheckAllocations': 'true',
'ErrorCheckBounds': 'true',
'ErrorCheckEnumRange': 'true',
'ErrorCheckRefPointers': 'true',
'ErrorCheckStubData': 'true',
'GenerateStublessProxies': 'true',
'GenerateTypeLibrary': 'true',
'HeaderFileName': 'a_file_name',
'IgnoreStandardIncludePath': 'true',
'InterfaceIdentifierFileName': 'a_file_name',
'MkTypLibCompatible': 'true',
'notgood': 'bogus',
'OutputDirectory': 'a string1',
'PreprocessorDefinitions': 'string1;string2',
'ProxyFileName': 'a_file_name',
'RedirectOutputAndErrors': 'a_file_name',
'StructMemberAlignment': '1',
'SuppressStartupBanner': 'true',
'TargetEnvironment': '1',
'TypeLibraryName': 'a_file_name',
'UndefinePreprocessorDefinitions': 'string1;string2',
'ValidateParameters': 'true',
'WarnAsError': 'true',
'WarningLevel': '1'},
'VCResourceCompilerTool': {
'AdditionalOptions': 'a string1',
'AdditionalIncludeDirectories': 'folder1;folder2',
'Culture': '1003',
'IgnoreStandardIncludePath': 'true',
'notgood2': 'bogus',
'PreprocessorDefinitions': 'string1;string2',
'ResourceOutputFileName': 'a string1',
'ShowProgress': 'true',
'SuppressStartupBanner': 'true',
'UndefinePreprocessorDefinitions': 'string1;string2'},
'VCLibrarianTool': {
'AdditionalDependencies': 'file1;file2',
'AdditionalLibraryDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'ExportNamedFunctions': 'string1;string2',
'ForceSymbolReferences': 'a string1',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2',
'LinkLibraryDependencies': 'true',
'ModuleDefinitionFile': 'a_file_name',
'OutputFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'UseUnicodeResponseFiles': 'true'},
'VCManifestTool': {
'AdditionalManifestFiles': 'file1;file2',
'AdditionalOptions': 'a string1',
'AssemblyIdentity': 'a string1',
'ComponentFileName': 'a_file_name',
'DependencyInformationFile': 'a_file_name',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'a string1',
'ManifestResourceFile': 'a_file_name',
'OutputManifestFile': 'a_file_name',
'RegistrarScriptFile': 'a_file_name',
'ReplacementsFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'TypeLibraryFile': 'a_file_name',
'UpdateFileHashes': 'truel',
'UpdateFileHashesSearchPath': 'a_file_name',
'UseFAT32Workaround': 'true',
'UseUnicodeResponseFiles': 'true',
'VerboseOutput': 'true'}},
self.stderr)
self._ExpectedWarnings([
'Warning: for VCCLCompilerTool/BasicRuntimeChecks, '
'index value (5) not in expected range [0, 4)',
'Warning: for VCCLCompilerTool/BrowseInformation, '
"invalid literal for int() with base 10: 'fdkslj'",
'Warning: for VCCLCompilerTool/CallingConvention, '
'index value (-1) not in expected range [0, 3)',
'Warning: for VCCLCompilerTool/DebugInformationFormat, '
'converted value for 2 not specified.',
'Warning: unrecognized setting VCCLCompilerTool/Enableprefast',
'Warning: unrecognized setting VCCLCompilerTool/ZZXYZ',
'Warning: for VCLinkerTool/TargetMachine, '
'converted value for 2 not specified.',
'Warning: unrecognized setting VCMIDLTool/notgood',
'Warning: unrecognized setting VCResourceCompilerTool/notgood2',
'Warning: for VCManifestTool/UpdateFileHashes, '
"expected bool; got 'truel'"
''])
def testValidateMSBuildSettings_settings(self):
"""Tests that for invalid MSBuild settings."""
MSVSSettings.ValidateMSBuildSettings(
{'ClCompile': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': ['string1', 'string2'],
'AdditionalUsingDirectories': 'folder1;folder2',
'AssemblerListingLocation': 'a_file_name',
'AssemblerOutput': 'NoListing',
'BasicRuntimeChecks': 'StackFrameRuntimeCheck',
'BrowseInformation': 'false',
'BrowseInformationFile': 'a_file_name',
'BufferSecurityCheck': 'true',
'BuildingInIDE': 'true',
'CallingConvention': 'Cdecl',
'CompileAs': 'CompileAsC',
'CompileAsManaged': 'Pure',
'CreateHotpatchableImage': 'true',
'DebugInformationFormat': 'ProgramDatabase',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'string1;string2',
'EnableEnhancedInstructionSet': 'StreamingSIMDExtensions',
'EnableFiberSafeOptimizations': 'true',
'EnablePREfast': 'true',
'Enableprefast': 'bogus',
'ErrorReporting': 'Prompt',
'ExceptionHandling': 'SyncCThrow',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': 'Neither',
'FloatingPointExceptions': 'true',
'FloatingPointModel': 'Precise',
'ForceConformanceInForLoopScope': 'true',
'ForcedIncludeFiles': 'file1;file2',
'ForcedUsingFiles': 'file1;file2',
'FunctionLevelLinking': 'false',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': 'OnlyExplicitInline',
'IntrinsicFunctions': 'false',
'MinimalRebuild': 'true',
'MultiProcessorCompilation': 'true',
'ObjectFileName': 'a_file_name',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMPSupport': 'true',
'Optimization': 'Disabled',
'PrecompiledHeader': 'NotUsing',
'PrecompiledHeaderFile': 'a_file_name',
'PrecompiledHeaderOutputFile': 'a_file_name',
'PreprocessKeepComments': 'true',
'PreprocessorDefinitions': 'string1;string2',
'PreprocessOutputPath': 'a string1',
'PreprocessSuppressLineNumbers': 'false',
'PreprocessToFile': 'false',
'ProcessorNumber': '33',
'ProgramDataBaseFileName': 'a_file_name',
'RuntimeLibrary': 'MultiThreaded',
'RuntimeTypeInfo': 'true',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '1Byte',
'SuppressStartupBanner': 'true',
'TrackerLogDirectory': 'a_folder',
'TreatSpecificWarningsAsErrors': 'string1;string2',
'TreatWarningAsError': 'true',
'TreatWChar_tAsBuiltInType': 'true',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'string1;string2',
'UseFullPaths': 'true',
'UseUnicodeForAssemblerListing': 'true',
'WarningLevel': 'TurnOffAllWarnings',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': 'a_file_name',
'ZZXYZ': 'bogus'},
'Link': {
'AdditionalDependencies': 'file1;file2',
'AdditionalLibraryDirectories': 'folder1;folder2',
'AdditionalManifestDependencies': 'file1;file2',
'AdditionalOptions': 'a string1',
'AddModuleNamesToAssembly': 'file1;file2',
'AllowIsolation': 'true',
'AssemblyDebug': '',
'AssemblyLinkResource': 'file1;file2',
'BaseAddress': 'a string1',
'BuildingInIDE': 'true',
'CLRImageType': 'ForceIJWImage',
'CLRSupportLastError': 'Enabled',
'CLRThreadAttribute': 'MTAThreadingAttribute',
'CLRUnmanagedCodeCheck': 'true',
'CreateHotPatchableImage': 'X86Image',
'DataExecutionPrevention': 'false',
'DelayLoadDLLs': 'file1;file2',
'DelaySign': 'true',
'Driver': 'NotSet',
'EmbedManagedResourceFile': 'file1;file2',
'EnableCOMDATFolding': 'false',
'EnableUAC': 'true',
'EntryPointSymbol': 'a string1',
'FixedBaseAddress': 'false',
'ForceFileOutput': 'Enabled',
'ForceSymbolReferences': 'file1;file2',
'FunctionOrder': 'a_file_name',
'GenerateDebugInformation': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': 'a string1',
'HeapReserveSize': 'a string1',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreEmbeddedIDL': 'true',
'IgnoreSpecificDefaultLibraries': 'a_file_list',
'ImageHasSafeExceptionHandlers': 'true',
'ImportLibrary': 'a_file_name',
'KeyContainer': 'a_file_name',
'KeyFile': 'a_file_name',
'LargeAddressAware': 'false',
'LinkDLL': 'true',
'LinkErrorReporting': 'SendErrorReport',
'LinkStatus': 'true',
'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
'ManifestFile': 'a_file_name',
'MapExports': 'true',
'MapFileName': 'a_file_name',
'MergedIDLBaseFileName': 'a_file_name',
'MergeSections': 'a string1',
'MidlCommandFile': 'a_file_name',
'MinimumRequiredVersion': 'a string1',
'ModuleDefinitionFile': 'a_file_name',
'MSDOSStubFileName': 'a_file_name',
'NoEntryPoint': 'true',
'OptimizeReferences': 'false',
'OutputFile': 'a_file_name',
'PerUserRedirection': 'true',
'PreventDllBinding': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': 'a_file_name',
'ProgramDatabaseFile': 'a_file_name',
'RandomizedBaseAddress': 'false',
'RegisterOutput': 'true',
'SectionAlignment': '33',
'SetChecksum': 'true',
'ShowProgress': 'LinkVerboseREF',
'SpecifySectionAttributes': 'a string1',
'StackCommitSize': 'a string1',
'StackReserveSize': 'a string1',
'StripPrivateSymbols': 'a_file_name',
'SubSystem': 'Console',
'SupportNobindOfDelayLoadedDLL': 'true',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'true',
'SwapRunFromCD': 'true',
'SwapRunFromNET': 'true',
'TargetMachine': 'MachineX86',
'TerminalServerAware': 'false',
'TrackerLogDirectory': 'a_folder',
'TreatLinkerWarningAsErrors': 'true',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'a_file_name',
'TypeLibraryResourceID': '33',
'UACExecutionLevel': 'AsInvoker',
'UACUIAccess': 'true',
'Version': 'a string1'},
'ResourceCompile': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'Culture': '0x236',
'IgnoreStandardIncludePath': 'true',
'NullTerminateStrings': 'true',
'PreprocessorDefinitions': 'string1;string2',
'ResourceOutputFileName': 'a string1',
'ShowProgress': 'true',
'SuppressStartupBanner': 'true',
'TrackerLogDirectory': 'a_folder',
'UndefinePreprocessorDefinitions': 'string1;string2'},
'Midl': {
'AdditionalIncludeDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'ApplicationConfigurationMode': 'true',
'ClientStubFile': 'a_file_name',
'CPreprocessOptions': 'a string1',
'DefaultCharType': 'Signed',
'DllDataFileName': 'a_file_name',
'EnableErrorChecks': 'EnableCustom',
'ErrorCheckAllocations': 'true',
'ErrorCheckBounds': 'true',
'ErrorCheckEnumRange': 'true',
'ErrorCheckRefPointers': 'true',
'ErrorCheckStubData': 'true',
'GenerateClientFiles': 'Stub',
'GenerateServerFiles': 'None',
'GenerateStublessProxies': 'true',
'GenerateTypeLibrary': 'true',
'HeaderFileName': 'a_file_name',
'IgnoreStandardIncludePath': 'true',
'InterfaceIdentifierFileName': 'a_file_name',
'LocaleID': '33',
'MkTypLibCompatible': 'true',
'OutputDirectory': 'a string1',
'PreprocessorDefinitions': 'string1;string2',
'ProxyFileName': 'a_file_name',
'RedirectOutputAndErrors': 'a_file_name',
'ServerStubFile': 'a_file_name',
'StructMemberAlignment': 'NotSet',
'SuppressCompilerWarnings': 'true',
'SuppressStartupBanner': 'true',
'TargetEnvironment': 'Itanium',
'TrackerLogDirectory': 'a_folder',
'TypeLibFormat': 'NewFormat',
'TypeLibraryName': 'a_file_name',
'UndefinePreprocessorDefinitions': 'string1;string2',
'ValidateAllParameters': 'true',
'WarnAsError': 'true',
'WarningLevel': '1'},
'Lib': {
'AdditionalDependencies': 'file1;file2',
'AdditionalLibraryDirectories': 'folder1;folder2',
'AdditionalOptions': 'a string1',
'DisplayLibrary': 'a string1',
'ErrorReporting': 'PromptImmediately',
'ExportNamedFunctions': 'string1;string2',
'ForceSymbolReferences': 'a string1',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2',
'LinkTimeCodeGeneration': 'true',
'MinimumRequiredVersion': 'a string1',
'ModuleDefinitionFile': 'a_file_name',
'Name': 'a_file_name',
'OutputFile': 'a_file_name',
'RemoveObjects': 'file1;file2',
'SubSystem': 'Console',
'SuppressStartupBanner': 'true',
'TargetMachine': 'MachineX86i',
'TrackerLogDirectory': 'a_folder',
'TreatLibWarningAsErrors': 'true',
'UseUnicodeResponseFiles': 'true',
'Verbose': 'true'},
'Mt': {
'AdditionalManifestFiles': 'file1;file2',
'AdditionalOptions': 'a string1',
'AssemblyIdentity': 'a string1',
'ComponentFileName': 'a_file_name',
'EnableDPIAwareness': 'fal',
'GenerateCatalogFiles': 'truel',
'GenerateCategoryTags': 'true',
'InputResourceManifests': 'a string1',
'ManifestFromManagedAssembly': 'a_file_name',
'notgood3': 'bogus',
'OutputManifestFile': 'a_file_name',
'OutputResourceManifests': 'a string1',
'RegistrarScriptFile': 'a_file_name',
'ReplacementsFile': 'a_file_name',
'SuppressDependencyElement': 'true',
'SuppressStartupBanner': 'true',
'TrackerLogDirectory': 'a_folder',
'TypeLibraryFile': 'a_file_name',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'a_file_name',
'VerboseOutput': 'true'},
'ProjectReference': {
'LinkLibraryDependencies': 'true',
'UseLibraryDependencyInputs': 'true'},
'ManifestResourceCompile': {
'ResourceOutputFileName': 'a_file_name'},
'': {
'EmbedManifest': 'true',
'GenerateManifest': 'true',
'IgnoreImportLibrary': 'true',
'LinkIncremental': 'false'}},
self.stderr)
self._ExpectedWarnings([
'Warning: unrecognized setting ClCompile/Enableprefast',
'Warning: unrecognized setting ClCompile/ZZXYZ',
'Warning: unrecognized setting Mt/notgood3',
"Warning: for Mt/GenerateCatalogFiles, expected bool; got 'truel'",
'Warning: for Lib/TargetMachine, unrecognized enumerated value '
'MachineX86i',
"Warning: for Mt/EnableDPIAwareness, expected bool; got 'fal'"])
def testConvertToMSBuildSettings_empty(self):
"""Tests an empty conversion."""
msvs_settings = {}
expected_msbuild_settings = {}
actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([])
def testConvertToMSBuildSettings_minimal(self):
"""Tests a minimal conversion."""
msvs_settings = {
'VCCLCompilerTool': {
'AdditionalIncludeDirectories': 'dir1',
'AdditionalOptions': '/foo',
'BasicRuntimeChecks': '0',
},
'VCLinkerTool': {
'LinkTimeCodeGeneration': '1',
'ErrorReporting': '1',
'DataExecutionPrevention': '2',
},
}
expected_msbuild_settings = {
'ClCompile': {
'AdditionalIncludeDirectories': 'dir1',
'AdditionalOptions': '/foo',
'BasicRuntimeChecks': 'Default',
},
'Link': {
'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
'LinkErrorReporting': 'PromptImmediately',
'DataExecutionPrevention': 'true',
},
}
actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([])
def testConvertToMSBuildSettings_warnings(self):
"""Tests conversion that generates warnings."""
msvs_settings = {
'VCCLCompilerTool': {
'AdditionalIncludeDirectories': '1',
'AdditionalOptions': '2',
# These are incorrect values:
'BasicRuntimeChecks': '12',
'BrowseInformation': '21',
'UsePrecompiledHeader': '13',
'GeneratePreprocessedFile': '14'},
'VCLinkerTool': {
# These are incorrect values:
'Driver': '10',
'LinkTimeCodeGeneration': '31',
'ErrorReporting': '21',
'FixedBaseAddress': '6'},
'VCResourceCompilerTool': {
# Custom
'Culture': '1003'}}
expected_msbuild_settings = {
'ClCompile': {
'AdditionalIncludeDirectories': '1',
'AdditionalOptions': '2'},
'Link': {},
'ResourceCompile': {
# Custom
'Culture': '0x03eb'}}
actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([
'Warning: while converting VCCLCompilerTool/BasicRuntimeChecks to '
'MSBuild, index value (12) not in expected range [0, 4)',
'Warning: while converting VCCLCompilerTool/BrowseInformation to '
'MSBuild, index value (21) not in expected range [0, 3)',
'Warning: while converting VCCLCompilerTool/UsePrecompiledHeader to '
'MSBuild, index value (13) not in expected range [0, 3)',
'Warning: while converting VCCLCompilerTool/GeneratePreprocessedFile to '
'MSBuild, value must be one of [0, 1, 2]; got 14',
'Warning: while converting VCLinkerTool/Driver to '
'MSBuild, index value (10) not in expected range [0, 4)',
'Warning: while converting VCLinkerTool/LinkTimeCodeGeneration to '
'MSBuild, index value (31) not in expected range [0, 5)',
'Warning: while converting VCLinkerTool/ErrorReporting to '
'MSBuild, index value (21) not in expected range [0, 3)',
'Warning: while converting VCLinkerTool/FixedBaseAddress to '
'MSBuild, index value (6) not in expected range [0, 3)',
])
def testConvertToMSBuildSettings_full_synthetic(self):
"""Tests conversion of all the MSBuild settings."""
msvs_settings = {
'VCCLCompilerTool': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'AdditionalUsingDirectories': 'folder1;folder2;folder3',
'AssemblerListingLocation': 'a_file_name',
'AssemblerOutput': '0',
'BasicRuntimeChecks': '1',
'BrowseInformation': '2',
'BrowseInformationFile': 'a_file_name',
'BufferSecurityCheck': 'true',
'CallingConvention': '0',
'CompileAs': '1',
'DebugInformationFormat': '4',
'DefaultCharIsUnsigned': 'true',
'Detect64BitPortabilityProblems': 'true',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'd1;d2;d3',
'EnableEnhancedInstructionSet': '0',
'EnableFiberSafeOptimizations': 'true',
'EnableFunctionLevelLinking': 'true',
'EnableIntrinsicFunctions': 'true',
'EnablePREfast': 'true',
'ErrorReporting': '1',
'ExceptionHandling': '2',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': '0',
'FloatingPointExceptions': 'true',
'FloatingPointModel': '1',
'ForceConformanceInForLoopScope': 'true',
'ForcedIncludeFiles': 'file1;file2;file3',
'ForcedUsingFiles': 'file1;file2;file3',
'GeneratePreprocessedFile': '1',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': '2',
'KeepComments': 'true',
'MinimalRebuild': 'true',
'ObjectFile': 'a_file_name',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMP': 'true',
'Optimization': '3',
'PrecompiledHeaderFile': 'a_file_name',
'PrecompiledHeaderThrough': 'a_file_name',
'PreprocessorDefinitions': 'd1;d2;d3',
'ProgramDataBaseFileName': 'a_file_name',
'RuntimeLibrary': '0',
'RuntimeTypeInfo': 'true',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '1',
'SuppressStartupBanner': 'true',
'TreatWChar_tAsBuiltInType': 'true',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'd1;d2;d3',
'UseFullPaths': 'true',
'UsePrecompiledHeader': '1',
'UseUnicodeResponseFiles': 'true',
'WarnAsError': 'true',
'WarningLevel': '2',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': 'a_file_name'},
'VCLinkerTool': {
'AdditionalDependencies': 'file1;file2;file3',
'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
'AdditionalLibraryDirectories_excluded': 'folder1;folder2;folder3',
'AdditionalManifestDependencies': 'file1;file2;file3',
'AdditionalOptions': 'a_string',
'AddModuleNamesToAssembly': 'file1;file2;file3',
'AllowIsolation': 'true',
'AssemblyDebug': '0',
'AssemblyLinkResource': 'file1;file2;file3',
'BaseAddress': 'a_string',
'CLRImageType': '1',
'CLRThreadAttribute': '2',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '0',
'DelayLoadDLLs': 'file1;file2;file3',
'DelaySign': 'true',
'Driver': '1',
'EmbedManagedResourceFile': 'file1;file2;file3',
'EnableCOMDATFolding': '0',
'EnableUAC': 'true',
'EntryPointSymbol': 'a_string',
'ErrorReporting': '0',
'FixedBaseAddress': '1',
'ForceSymbolReferences': 'file1;file2;file3',
'FunctionOrder': 'a_file_name',
'GenerateDebugInformation': 'true',
'GenerateManifest': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': 'a_string',
'HeapReserveSize': 'a_string',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreDefaultLibraryNames': 'file1;file2;file3',
'IgnoreEmbeddedIDL': 'true',
'IgnoreImportLibrary': 'true',
'ImportLibrary': 'a_file_name',
'KeyContainer': 'a_file_name',
'KeyFile': 'a_file_name',
'LargeAddressAware': '2',
'LinkIncremental': '1',
'LinkLibraryDependencies': 'true',
'LinkTimeCodeGeneration': '2',
'ManifestFile': 'a_file_name',
'MapExports': 'true',
'MapFileName': 'a_file_name',
'MergedIDLBaseFileName': 'a_file_name',
'MergeSections': 'a_string',
'MidlCommandFile': 'a_file_name',
'ModuleDefinitionFile': 'a_file_name',
'OptimizeForWindows98': '1',
'OptimizeReferences': '0',
'OutputFile': 'a_file_name',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': 'a_file_name',
'ProgramDatabaseFile': 'a_file_name',
'RandomizedBaseAddress': '1',
'RegisterOutput': 'true',
'ResourceOnlyDLL': 'true',
'SetChecksum': 'true',
'ShowProgress': '0',
'StackCommitSize': 'a_string',
'StackReserveSize': 'a_string',
'StripPrivateSymbols': 'a_file_name',
'SubSystem': '2',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'true',
'SwapRunFromCD': 'true',
'SwapRunFromNet': 'true',
'TargetMachine': '3',
'TerminalServerAware': '2',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'a_file_name',
'TypeLibraryResourceID': '33',
'UACExecutionLevel': '1',
'UACUIAccess': 'true',
'UseLibraryDependencyInputs': 'false',
'UseUnicodeResponseFiles': 'true',
'Version': 'a_string'},
'VCResourceCompilerTool': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'Culture': '1003',
'IgnoreStandardIncludePath': 'true',
'PreprocessorDefinitions': 'd1;d2;d3',
'ResourceOutputFileName': 'a_string',
'ShowProgress': 'true',
'SuppressStartupBanner': 'true',
'UndefinePreprocessorDefinitions': 'd1;d2;d3'},
'VCMIDLTool': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'CPreprocessOptions': 'a_string',
'DefaultCharType': '0',
'DLLDataFileName': 'a_file_name',
'EnableErrorChecks': '2',
'ErrorCheckAllocations': 'true',
'ErrorCheckBounds': 'true',
'ErrorCheckEnumRange': 'true',
'ErrorCheckRefPointers': 'true',
'ErrorCheckStubData': 'true',
'GenerateStublessProxies': 'true',
'GenerateTypeLibrary': 'true',
'HeaderFileName': 'a_file_name',
'IgnoreStandardIncludePath': 'true',
'InterfaceIdentifierFileName': 'a_file_name',
'MkTypLibCompatible': 'true',
'OutputDirectory': 'a_string',
'PreprocessorDefinitions': 'd1;d2;d3',
'ProxyFileName': 'a_file_name',
'RedirectOutputAndErrors': 'a_file_name',
'StructMemberAlignment': '3',
'SuppressStartupBanner': 'true',
'TargetEnvironment': '1',
'TypeLibraryName': 'a_file_name',
'UndefinePreprocessorDefinitions': 'd1;d2;d3',
'ValidateParameters': 'true',
'WarnAsError': 'true',
'WarningLevel': '4'},
'VCLibrarianTool': {
'AdditionalDependencies': 'file1;file2;file3',
'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
'AdditionalLibraryDirectories_excluded': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'ExportNamedFunctions': 'd1;d2;d3',
'ForceSymbolReferences': 'a_string',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
'LinkLibraryDependencies': 'true',
'ModuleDefinitionFile': 'a_file_name',
'OutputFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'UseUnicodeResponseFiles': 'true'},
'VCManifestTool': {
'AdditionalManifestFiles': 'file1;file2;file3',
'AdditionalOptions': 'a_string',
'AssemblyIdentity': 'a_string',
'ComponentFileName': 'a_file_name',
'DependencyInformationFile': 'a_file_name',
'EmbedManifest': 'true',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'a_string',
'ManifestResourceFile': 'my_name',
'OutputManifestFile': 'a_file_name',
'RegistrarScriptFile': 'a_file_name',
'ReplacementsFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'TypeLibraryFile': 'a_file_name',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'a_file_name',
'UseFAT32Workaround': 'true',
'UseUnicodeResponseFiles': 'true',
'VerboseOutput': 'true'}}
expected_msbuild_settings = {
'ClCompile': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string /J',
'AdditionalUsingDirectories': 'folder1;folder2;folder3',
'AssemblerListingLocation': 'a_file_name',
'AssemblerOutput': 'NoListing',
'BasicRuntimeChecks': 'StackFrameRuntimeCheck',
'BrowseInformation': 'true',
'BrowseInformationFile': 'a_file_name',
'BufferSecurityCheck': 'true',
'CallingConvention': 'Cdecl',
'CompileAs': 'CompileAsC',
'DebugInformationFormat': 'EditAndContinue',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'd1;d2;d3',
'EnableEnhancedInstructionSet': 'NotSet',
'EnableFiberSafeOptimizations': 'true',
'EnablePREfast': 'true',
'ErrorReporting': 'Prompt',
'ExceptionHandling': 'Async',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': 'Neither',
'FloatingPointExceptions': 'true',
'FloatingPointModel': 'Strict',
'ForceConformanceInForLoopScope': 'true',
'ForcedIncludeFiles': 'file1;file2;file3',
'ForcedUsingFiles': 'file1;file2;file3',
'FunctionLevelLinking': 'true',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': 'AnySuitable',
'IntrinsicFunctions': 'true',
'MinimalRebuild': 'true',
'ObjectFileName': 'a_file_name',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMPSupport': 'true',
'Optimization': 'Full',
'PrecompiledHeader': 'Create',
'PrecompiledHeaderFile': 'a_file_name',
'PrecompiledHeaderOutputFile': 'a_file_name',
'PreprocessKeepComments': 'true',
'PreprocessorDefinitions': 'd1;d2;d3',
'PreprocessSuppressLineNumbers': 'false',
'PreprocessToFile': 'true',
'ProgramDataBaseFileName': 'a_file_name',
'RuntimeLibrary': 'MultiThreaded',
'RuntimeTypeInfo': 'true',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '1Byte',
'SuppressStartupBanner': 'true',
'TreatWarningAsError': 'true',
'TreatWChar_tAsBuiltInType': 'true',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'd1;d2;d3',
'UseFullPaths': 'true',
'WarningLevel': 'Level2',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': 'a_file_name'},
'Link': {
'AdditionalDependencies': 'file1;file2;file3',
'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
'AdditionalManifestDependencies': 'file1;file2;file3',
'AdditionalOptions': 'a_string',
'AddModuleNamesToAssembly': 'file1;file2;file3',
'AllowIsolation': 'true',
'AssemblyDebug': '',
'AssemblyLinkResource': 'file1;file2;file3',
'BaseAddress': 'a_string',
'CLRImageType': 'ForceIJWImage',
'CLRThreadAttribute': 'STAThreadingAttribute',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '',
'DelayLoadDLLs': 'file1;file2;file3',
'DelaySign': 'true',
'Driver': 'Driver',
'EmbedManagedResourceFile': 'file1;file2;file3',
'EnableCOMDATFolding': '',
'EnableUAC': 'true',
'EntryPointSymbol': 'a_string',
'FixedBaseAddress': 'false',
'ForceSymbolReferences': 'file1;file2;file3',
'FunctionOrder': 'a_file_name',
'GenerateDebugInformation': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': 'a_string',
'HeapReserveSize': 'a_string',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreEmbeddedIDL': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
'ImportLibrary': 'a_file_name',
'KeyContainer': 'a_file_name',
'KeyFile': 'a_file_name',
'LargeAddressAware': 'true',
'LinkErrorReporting': 'NoErrorReport',
'LinkTimeCodeGeneration': 'PGInstrument',
'ManifestFile': 'a_file_name',
'MapExports': 'true',
'MapFileName': 'a_file_name',
'MergedIDLBaseFileName': 'a_file_name',
'MergeSections': 'a_string',
'MidlCommandFile': 'a_file_name',
'ModuleDefinitionFile': 'a_file_name',
'NoEntryPoint': 'true',
'OptimizeReferences': '',
'OutputFile': 'a_file_name',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': 'a_file_name',
'ProgramDatabaseFile': 'a_file_name',
'RandomizedBaseAddress': 'false',
'RegisterOutput': 'true',
'SetChecksum': 'true',
'ShowProgress': 'NotSet',
'StackCommitSize': 'a_string',
'StackReserveSize': 'a_string',
'StripPrivateSymbols': 'a_file_name',
'SubSystem': 'Windows',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'true',
'SwapRunFromCD': 'true',
'SwapRunFromNET': 'true',
'TargetMachine': 'MachineARM',
'TerminalServerAware': 'true',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'a_file_name',
'TypeLibraryResourceID': '33',
'UACExecutionLevel': 'HighestAvailable',
'UACUIAccess': 'true',
'Version': 'a_string'},
'ResourceCompile': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'Culture': '0x03eb',
'IgnoreStandardIncludePath': 'true',
'PreprocessorDefinitions': 'd1;d2;d3',
'ResourceOutputFileName': 'a_string',
'ShowProgress': 'true',
'SuppressStartupBanner': 'true',
'UndefinePreprocessorDefinitions': 'd1;d2;d3'},
'Midl': {
'AdditionalIncludeDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'CPreprocessOptions': 'a_string',
'DefaultCharType': 'Unsigned',
'DllDataFileName': 'a_file_name',
'EnableErrorChecks': 'All',
'ErrorCheckAllocations': 'true',
'ErrorCheckBounds': 'true',
'ErrorCheckEnumRange': 'true',
'ErrorCheckRefPointers': 'true',
'ErrorCheckStubData': 'true',
'GenerateStublessProxies': 'true',
'GenerateTypeLibrary': 'true',
'HeaderFileName': 'a_file_name',
'IgnoreStandardIncludePath': 'true',
'InterfaceIdentifierFileName': 'a_file_name',
'MkTypLibCompatible': 'true',
'OutputDirectory': 'a_string',
'PreprocessorDefinitions': 'd1;d2;d3',
'ProxyFileName': 'a_file_name',
'RedirectOutputAndErrors': 'a_file_name',
'StructMemberAlignment': '4',
'SuppressStartupBanner': 'true',
'TargetEnvironment': 'Win32',
'TypeLibraryName': 'a_file_name',
'UndefinePreprocessorDefinitions': 'd1;d2;d3',
'ValidateAllParameters': 'true',
'WarnAsError': 'true',
'WarningLevel': '4'},
'Lib': {
'AdditionalDependencies': 'file1;file2;file3',
'AdditionalLibraryDirectories': 'folder1;folder2;folder3',
'AdditionalOptions': 'a_string',
'ExportNamedFunctions': 'd1;d2;d3',
'ForceSymbolReferences': 'a_string',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreSpecificDefaultLibraries': 'file1;file2;file3',
'ModuleDefinitionFile': 'a_file_name',
'OutputFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'UseUnicodeResponseFiles': 'true'},
'Mt': {
'AdditionalManifestFiles': 'file1;file2;file3',
'AdditionalOptions': 'a_string',
'AssemblyIdentity': 'a_string',
'ComponentFileName': 'a_file_name',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'a_string',
'OutputManifestFile': 'a_file_name',
'RegistrarScriptFile': 'a_file_name',
'ReplacementsFile': 'a_file_name',
'SuppressStartupBanner': 'true',
'TypeLibraryFile': 'a_file_name',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'a_file_name',
'VerboseOutput': 'true'},
'ManifestResourceCompile': {
'ResourceOutputFileName': 'my_name'},
'ProjectReference': {
'LinkLibraryDependencies': 'true',
'UseLibraryDependencyInputs': 'false'},
'': {
'EmbedManifest': 'true',
'GenerateManifest': 'true',
'IgnoreImportLibrary': 'true',
'LinkIncremental': 'false'}}
actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([])
def testConvertToMSBuildSettings_actual(self):
"""Tests the conversion of an actual project.
A VS2008 project with most of the options defined was created through the
VS2008 IDE. It was then converted to VS2010. The tool settings found in
the .vcproj and .vcxproj files were converted to the two dictionaries
msvs_settings and expected_msbuild_settings.
Note that for many settings, the VS2010 converter adds macros like
%(AdditionalIncludeDirectories) to make sure than inherited values are
included. Since the Gyp projects we generate do not use inheritance,
we removed these macros. They were:
ClCompile:
AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)'
AdditionalOptions: ' %(AdditionalOptions)'
AdditionalUsingDirectories: ';%(AdditionalUsingDirectories)'
DisableSpecificWarnings: ';%(DisableSpecificWarnings)',
ForcedIncludeFiles: ';%(ForcedIncludeFiles)',
ForcedUsingFiles: ';%(ForcedUsingFiles)',
PreprocessorDefinitions: ';%(PreprocessorDefinitions)',
UndefinePreprocessorDefinitions:
';%(UndefinePreprocessorDefinitions)',
Link:
AdditionalDependencies: ';%(AdditionalDependencies)',
AdditionalLibraryDirectories: ';%(AdditionalLibraryDirectories)',
AdditionalManifestDependencies:
';%(AdditionalManifestDependencies)',
AdditionalOptions: ' %(AdditionalOptions)',
AddModuleNamesToAssembly: ';%(AddModuleNamesToAssembly)',
AssemblyLinkResource: ';%(AssemblyLinkResource)',
DelayLoadDLLs: ';%(DelayLoadDLLs)',
EmbedManagedResourceFile: ';%(EmbedManagedResourceFile)',
ForceSymbolReferences: ';%(ForceSymbolReferences)',
IgnoreSpecificDefaultLibraries:
';%(IgnoreSpecificDefaultLibraries)',
ResourceCompile:
AdditionalIncludeDirectories: ';%(AdditionalIncludeDirectories)',
AdditionalOptions: ' %(AdditionalOptions)',
PreprocessorDefinitions: ';%(PreprocessorDefinitions)',
Mt:
AdditionalManifestFiles: ';%(AdditionalManifestFiles)',
AdditionalOptions: ' %(AdditionalOptions)',
InputResourceManifests: ';%(InputResourceManifests)',
"""
msvs_settings = {
'VCCLCompilerTool': {
'AdditionalIncludeDirectories': 'dir1',
'AdditionalOptions': '/more',
'AdditionalUsingDirectories': 'test',
'AssemblerListingLocation': '$(IntDir)\\a',
'AssemblerOutput': '1',
'BasicRuntimeChecks': '3',
'BrowseInformation': '1',
'BrowseInformationFile': '$(IntDir)\\e',
'BufferSecurityCheck': 'false',
'CallingConvention': '1',
'CompileAs': '1',
'DebugInformationFormat': '4',
'DefaultCharIsUnsigned': 'true',
'Detect64BitPortabilityProblems': 'true',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'abc',
'EnableEnhancedInstructionSet': '1',
'EnableFiberSafeOptimizations': 'true',
'EnableFunctionLevelLinking': 'true',
'EnableIntrinsicFunctions': 'true',
'EnablePREfast': 'true',
'ErrorReporting': '2',
'ExceptionHandling': '2',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': '2',
'FloatingPointExceptions': 'true',
'FloatingPointModel': '1',
'ForceConformanceInForLoopScope': 'false',
'ForcedIncludeFiles': 'def',
'ForcedUsingFiles': 'ge',
'GeneratePreprocessedFile': '2',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': '1',
'KeepComments': 'true',
'MinimalRebuild': 'true',
'ObjectFile': '$(IntDir)\\b',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMP': 'true',
'Optimization': '3',
'PrecompiledHeaderFile': '$(IntDir)\\$(TargetName).pche',
'PrecompiledHeaderThrough': 'StdAfx.hd',
'PreprocessorDefinitions': 'WIN32;_DEBUG;_CONSOLE',
'ProgramDataBaseFileName': '$(IntDir)\\vc90b.pdb',
'RuntimeLibrary': '3',
'RuntimeTypeInfo': 'false',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '3',
'SuppressStartupBanner': 'false',
'TreatWChar_tAsBuiltInType': 'false',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'wer',
'UseFullPaths': 'true',
'UsePrecompiledHeader': '0',
'UseUnicodeResponseFiles': 'false',
'WarnAsError': 'true',
'WarningLevel': '3',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': '$(IntDir)\\c'},
'VCLinkerTool': {
'AdditionalDependencies': 'zx',
'AdditionalLibraryDirectories': 'asd',
'AdditionalManifestDependencies': 's2',
'AdditionalOptions': '/mor2',
'AddModuleNamesToAssembly': 'd1',
'AllowIsolation': 'false',
'AssemblyDebug': '1',
'AssemblyLinkResource': 'd5',
'BaseAddress': '23423',
'CLRImageType': '3',
'CLRThreadAttribute': '1',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '0',
'DelayLoadDLLs': 'd4',
'DelaySign': 'true',
'Driver': '2',
'EmbedManagedResourceFile': 'd2',
'EnableCOMDATFolding': '1',
'EnableUAC': 'false',
'EntryPointSymbol': 'f5',
'ErrorReporting': '2',
'FixedBaseAddress': '1',
'ForceSymbolReferences': 'd3',
'FunctionOrder': 'fssdfsd',
'GenerateDebugInformation': 'true',
'GenerateManifest': 'false',
'GenerateMapFile': 'true',
'HeapCommitSize': '13',
'HeapReserveSize': '12',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreDefaultLibraryNames': 'flob;flok',
'IgnoreEmbeddedIDL': 'true',
'IgnoreImportLibrary': 'true',
'ImportLibrary': 'f4',
'KeyContainer': 'f7',
'KeyFile': 'f6',
'LargeAddressAware': '2',
'LinkIncremental': '0',
'LinkLibraryDependencies': 'false',
'LinkTimeCodeGeneration': '1',
'ManifestFile':
'$(IntDir)\\$(TargetFileName).2intermediate.manifest',
'MapExports': 'true',
'MapFileName': 'd5',
'MergedIDLBaseFileName': 'f2',
'MergeSections': 'f5',
'MidlCommandFile': 'f1',
'ModuleDefinitionFile': 'sdsd',
'OptimizeForWindows98': '2',
'OptimizeReferences': '2',
'OutputFile': '$(OutDir)\\$(ProjectName)2.exe',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': '$(TargetDir)$(TargetName).pgdd',
'ProgramDatabaseFile': 'Flob.pdb',
'RandomizedBaseAddress': '1',
'RegisterOutput': 'true',
'ResourceOnlyDLL': 'true',
'SetChecksum': 'false',
'ShowProgress': '1',
'StackCommitSize': '15',
'StackReserveSize': '14',
'StripPrivateSymbols': 'd3',
'SubSystem': '1',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'false',
'SwapRunFromCD': 'true',
'SwapRunFromNet': 'true',
'TargetMachine': '1',
'TerminalServerAware': '1',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'f3',
'TypeLibraryResourceID': '12',
'UACExecutionLevel': '2',
'UACUIAccess': 'true',
'UseLibraryDependencyInputs': 'true',
'UseUnicodeResponseFiles': 'false',
'Version': '333'},
'VCResourceCompilerTool': {
'AdditionalIncludeDirectories': 'f3',
'AdditionalOptions': '/more3',
'Culture': '3084',
'IgnoreStandardIncludePath': 'true',
'PreprocessorDefinitions': '_UNICODE;UNICODE2',
'ResourceOutputFileName': '$(IntDir)/$(InputName)3.res',
'ShowProgress': 'true'},
'VCManifestTool': {
'AdditionalManifestFiles': 'sfsdfsd',
'AdditionalOptions': 'afdsdafsd',
'AssemblyIdentity': 'sddfdsadfsa',
'ComponentFileName': 'fsdfds',
'DependencyInformationFile': '$(IntDir)\\mt.depdfd',
'EmbedManifest': 'false',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'asfsfdafs',
'ManifestResourceFile':
'$(IntDir)\\$(TargetFileName).embed.manifest.resfdsf',
'OutputManifestFile': '$(TargetPath).manifestdfs',
'RegistrarScriptFile': 'sdfsfd',
'ReplacementsFile': 'sdffsd',
'SuppressStartupBanner': 'false',
'TypeLibraryFile': 'sfsd',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'sfsd',
'UseFAT32Workaround': 'true',
'UseUnicodeResponseFiles': 'false',
'VerboseOutput': 'true'}}
expected_msbuild_settings = {
'ClCompile': {
'AdditionalIncludeDirectories': 'dir1',
'AdditionalOptions': '/more /J',
'AdditionalUsingDirectories': 'test',
'AssemblerListingLocation': '$(IntDir)a',
'AssemblerOutput': 'AssemblyCode',
'BasicRuntimeChecks': 'EnableFastChecks',
'BrowseInformation': 'true',
'BrowseInformationFile': '$(IntDir)e',
'BufferSecurityCheck': 'false',
'CallingConvention': 'FastCall',
'CompileAs': 'CompileAsC',
'DebugInformationFormat': 'EditAndContinue',
'DisableLanguageExtensions': 'true',
'DisableSpecificWarnings': 'abc',
'EnableEnhancedInstructionSet': 'StreamingSIMDExtensions',
'EnableFiberSafeOptimizations': 'true',
'EnablePREfast': 'true',
'ErrorReporting': 'Queue',
'ExceptionHandling': 'Async',
'ExpandAttributedSource': 'true',
'FavorSizeOrSpeed': 'Size',
'FloatingPointExceptions': 'true',
'FloatingPointModel': 'Strict',
'ForceConformanceInForLoopScope': 'false',
'ForcedIncludeFiles': 'def',
'ForcedUsingFiles': 'ge',
'FunctionLevelLinking': 'true',
'GenerateXMLDocumentationFiles': 'true',
'IgnoreStandardIncludePath': 'true',
'InlineFunctionExpansion': 'OnlyExplicitInline',
'IntrinsicFunctions': 'true',
'MinimalRebuild': 'true',
'ObjectFileName': '$(IntDir)b',
'OmitDefaultLibName': 'true',
'OmitFramePointers': 'true',
'OpenMPSupport': 'true',
'Optimization': 'Full',
'PrecompiledHeader': 'NotUsing', # Actual conversion gives ''
'PrecompiledHeaderFile': 'StdAfx.hd',
'PrecompiledHeaderOutputFile': '$(IntDir)$(TargetName).pche',
'PreprocessKeepComments': 'true',
'PreprocessorDefinitions': 'WIN32;_DEBUG;_CONSOLE',
'PreprocessSuppressLineNumbers': 'true',
'PreprocessToFile': 'true',
'ProgramDataBaseFileName': '$(IntDir)vc90b.pdb',
'RuntimeLibrary': 'MultiThreadedDebugDLL',
'RuntimeTypeInfo': 'false',
'ShowIncludes': 'true',
'SmallerTypeCheck': 'true',
'StringPooling': 'true',
'StructMemberAlignment': '4Bytes',
'SuppressStartupBanner': 'false',
'TreatWarningAsError': 'true',
'TreatWChar_tAsBuiltInType': 'false',
'UndefineAllPreprocessorDefinitions': 'true',
'UndefinePreprocessorDefinitions': 'wer',
'UseFullPaths': 'true',
'WarningLevel': 'Level3',
'WholeProgramOptimization': 'true',
'XMLDocumentationFileName': '$(IntDir)c'},
'Link': {
'AdditionalDependencies': 'zx',
'AdditionalLibraryDirectories': 'asd',
'AdditionalManifestDependencies': 's2',
'AdditionalOptions': '/mor2',
'AddModuleNamesToAssembly': 'd1',
'AllowIsolation': 'false',
'AssemblyDebug': 'true',
'AssemblyLinkResource': 'd5',
'BaseAddress': '23423',
'CLRImageType': 'ForceSafeILImage',
'CLRThreadAttribute': 'MTAThreadingAttribute',
'CLRUnmanagedCodeCheck': 'true',
'DataExecutionPrevention': '',
'DelayLoadDLLs': 'd4',
'DelaySign': 'true',
'Driver': 'UpOnly',
'EmbedManagedResourceFile': 'd2',
'EnableCOMDATFolding': 'false',
'EnableUAC': 'false',
'EntryPointSymbol': 'f5',
'FixedBaseAddress': 'false',
'ForceSymbolReferences': 'd3',
'FunctionOrder': 'fssdfsd',
'GenerateDebugInformation': 'true',
'GenerateMapFile': 'true',
'HeapCommitSize': '13',
'HeapReserveSize': '12',
'IgnoreAllDefaultLibraries': 'true',
'IgnoreEmbeddedIDL': 'true',
'IgnoreSpecificDefaultLibraries': 'flob;flok',
'ImportLibrary': 'f4',
'KeyContainer': 'f7',
'KeyFile': 'f6',
'LargeAddressAware': 'true',
'LinkErrorReporting': 'QueueForNextLogin',
'LinkTimeCodeGeneration': 'UseLinkTimeCodeGeneration',
'ManifestFile': '$(IntDir)$(TargetFileName).2intermediate.manifest',
'MapExports': 'true',
'MapFileName': 'd5',
'MergedIDLBaseFileName': 'f2',
'MergeSections': 'f5',
'MidlCommandFile': 'f1',
'ModuleDefinitionFile': 'sdsd',
'NoEntryPoint': 'true',
'OptimizeReferences': 'true',
'OutputFile': '$(OutDir)$(ProjectName)2.exe',
'PerUserRedirection': 'true',
'Profile': 'true',
'ProfileGuidedDatabase': '$(TargetDir)$(TargetName).pgdd',
'ProgramDatabaseFile': 'Flob.pdb',
'RandomizedBaseAddress': 'false',
'RegisterOutput': 'true',
'SetChecksum': 'false',
'ShowProgress': 'LinkVerbose',
'StackCommitSize': '15',
'StackReserveSize': '14',
'StripPrivateSymbols': 'd3',
'SubSystem': 'Console',
'SupportUnloadOfDelayLoadedDLL': 'true',
'SuppressStartupBanner': 'false',
'SwapRunFromCD': 'true',
'SwapRunFromNET': 'true',
'TargetMachine': 'MachineX86',
'TerminalServerAware': 'false',
'TurnOffAssemblyGeneration': 'true',
'TypeLibraryFile': 'f3',
'TypeLibraryResourceID': '12',
'UACExecutionLevel': 'RequireAdministrator',
'UACUIAccess': 'true',
'Version': '333'},
'ResourceCompile': {
'AdditionalIncludeDirectories': 'f3',
'AdditionalOptions': '/more3',
'Culture': '0x0c0c',
'IgnoreStandardIncludePath': 'true',
'PreprocessorDefinitions': '_UNICODE;UNICODE2',
'ResourceOutputFileName': '$(IntDir)%(Filename)3.res',
'ShowProgress': 'true'},
'Mt': {
'AdditionalManifestFiles': 'sfsdfsd',
'AdditionalOptions': 'afdsdafsd',
'AssemblyIdentity': 'sddfdsadfsa',
'ComponentFileName': 'fsdfds',
'GenerateCatalogFiles': 'true',
'InputResourceManifests': 'asfsfdafs',
'OutputManifestFile': '$(TargetPath).manifestdfs',
'RegistrarScriptFile': 'sdfsfd',
'ReplacementsFile': 'sdffsd',
'SuppressStartupBanner': 'false',
'TypeLibraryFile': 'sfsd',
'UpdateFileHashes': 'true',
'UpdateFileHashesSearchPath': 'sfsd',
'VerboseOutput': 'true'},
'ProjectReference': {
'LinkLibraryDependencies': 'false',
'UseLibraryDependencyInputs': 'true'},
'': {
'EmbedManifest': 'false',
'GenerateManifest': 'false',
'IgnoreImportLibrary': 'true',
'LinkIncremental': ''
},
'ManifestResourceCompile': {
'ResourceOutputFileName':
'$(IntDir)$(TargetFileName).embed.manifest.resfdsf'}
}
actual_msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(
msvs_settings,
self.stderr)
self.assertEqual(expected_msbuild_settings, actual_msbuild_settings)
self._ExpectedWarnings([])
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
aleju/cat-face-locator | apply_convnet.py | 1 | 3844 | # -*- coding: utf-8 -*-
"""
File to apply the trained ConvNet model to a number of images.
It will use the ConvNet to locate cat faces in the images and mark them.
It is expected that each image contains exactly one cat (i.e. a face will be
extracted out of each image, even if there is no cat).
If an image contains multiple cats, only one face will be extracted.
Usage:
python train.py
python apply_convnet.py
"""
from __future__ import division, print_function
from dataset import Dataset
import os
import re
import numpy as np
import argparse
import random
from scipy import ndimage
from scipy import misc
from train_convnet import MODEL_IMAGE_HEIGHT, MODEL_IMAGE_WIDTH, \
BATCH_SIZE, SAVE_WEIGHTS_CHECKPOINT_FILEPATH, \
create_model, draw_predicted_rectangle
from keras.optimizers import Adam
np.random.seed(42)
random.seed(42)
CURRENT_DIR = os.path.dirname(os.path.realpath(__file__))
OUT_SCALE = 64 # scale (height, width) of each saved image
def main():
"""
Main function.
Does the following step by step:
* Load images (from which to extract cat faces) from SOURCE_DIR
* Initialize model (as trained via train_convnet.py)
* Loads and prepares images for the model.
* Uses trained model to predict locations of cat faces.
* Projects face coordinates onto original images
* Marks faces in original images.
* Saves each marked image.
"""
parser = argparse.ArgumentParser(description="Apply a trained cat face locator " \
"model to images.")
parser.add_argument("--images", required=True, help="Directory containing images to analyze.")
parser.add_argument("--weights", required=False, default=SAVE_WEIGHTS_CHECKPOINT_FILEPATH,
help="Filepath to the weights of the model.")
parser.add_argument("--output", required=False, default=os.path.join(CURRENT_DIR, "predictions"),
help="Filepath to the directory in which to save the output.")
args = parser.parse_args()
# load images
filepaths = get_image_filepaths([args.images])
filenames = [os.path.basename(fp) for fp in filepaths] # will be used during saving
nb_images = len(filepaths)
X = np.zeros((nb_images, MODEL_IMAGE_HEIGHT, MODEL_IMAGE_WIDTH, 3), dtype=np.float32)
for i, fp in enumerate(filepaths):
image = ndimage.imread(fp, mode="RGB")
image = misc.imresize(image, (MODEL_IMAGE_HEIGHT, MODEL_IMAGE_WIDTH))
X[i] = image / 255.0
X = np.rollaxis(X, 3, 1)
# assure that dataset is not empty
print("Found %d images..." % (X.shape[0],))
assert X.shape[0] > 0, "The dataset appears to be empty (shape of X: %s)." % (X.shape,)
# create model
model = create_model(MODEL_IMAGE_HEIGHT, MODEL_IMAGE_WIDTH, "mse", Adam())
model.load_weights(args.weights)
# predict positions of faces
preds = model.predict(X, batch_size=BATCH_SIZE)
# Draw predicted rectangles and save
print("Saving images...")
for idx, (y, x, half_height, half_width) in enumerate(preds):
img = draw_predicted_rectangle(X[idx], y, x, half_height, half_width)
filepath = os.path.join(args.output, filenames[idx])
misc.imsave(filepath, img)
def get_image_filepaths(dirs):
"""Loads filepaths of images from dataset.
Args:
dirs List of directories as strings
Returns:
List of strings (filepaths)"""
result_img = []
for fp_dir in dirs:
fps = [f for f in os.listdir(fp_dir) if os.path.isfile(os.path.join(fp_dir, f))]
fps = [os.path.join(fp_dir, f) for f in fps]
fps_img = [fp for fp in fps if re.match(r".*\.jpg$", fp)]
result_img.extend(fps_img)
return result_img
if __name__ == "__main__":
main()
| mit |
karimone/django_test | django_project/polls/migrations/0001_initial.py | 1 | 1230 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-01-02 07:13
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Choice',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('choice_text', models.CharField(max_length=200)),
('votes', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('question_text', models.CharField(max_length=200)),
('pub_date', models.DateTimeField(verbose_name='date published')),
],
),
migrations.AddField(
model_name='choice',
name='question',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='polls.Question'),
),
]
| gpl-3.0 |
psywhale/pyswitchvox | switchvox_common.py | 1 | 2137 | """
Switchvox common methods
"""
import urllib2
try:
import json
except ImportError:
import simplejson as json
def switchvox_request(username, password, json_req, hostname):
url = "https://%s" % hostname
#create a password manager
passManager = urllib2.HTTPPasswordMgrWithDefaultRealm()
passManager.add_password(None, url, username, password)
#setup auth handler and opener
authHandler = urllib2.HTTPDigestAuthHandler(passManager)
urlOpener = urllib2.build_opener(authHandler)
#set http headers
urlOpener.add_headers={'Host:':hostname ,'Content-Type': 'text/json','Content-Length':str(len(json_req))}
#send request
req = urlOpener.open(url+"/json",data=json_req)
#read and return result
result=req.read()
return result
def get_errors(response):
"""
Returns processed list of errors from switchvox response
example output:
expected = [{'code': 10011, 'message': 'Missing required parameter (dial_as_account_id)'}]
expected = [{"code" : 10010, "message" : "Invalid extension (abc). Extensions may only contain digits or *."},
{"code" : 10010, "message" : "Invalid extension (def). Extensions may only contain digits or *."}
]
"""
####
# Check for no errors at all
####
try:
error = response['response']['errors']['error']
except KeyError:
return []
####
# Test for single error situation.
####
try:
items = error.items()
except AttributeError:
# Multiple error situation.
# 'error' is list object. Each 'err' is a single error dict.
return [err for err in error]
else:
# 'error' is dict object: single error situation.
return [error]
def request_form(method, parameters=None):
if not parameters:
parameters = {}
parameters = dict(parameters)
request = {"request": {
"version": "17487",
"method": method,
"parameters": parameters}}
return json.dumps(request)
| bsd-3-clause |
yinwenpeng/rescale | examples/kinships.py | 9 | 2823 | #!/usr/bin/env python
import logging
logging.basicConfig(level=logging.INFO)
_log = logging.getLogger('Example Kinships')
import numpy as np
from numpy import dot, array, zeros, setdiff1d
from numpy.linalg import norm
from numpy.random import shuffle
from scipy.io.matlab import loadmat
from scipy.sparse import lil_matrix
from sklearn.metrics import precision_recall_curve, auc
from rescal import rescal_als
def predict_rescal_als(T):
A, R, _, _, _ = rescal_als(
T, 100, init='nvecs', conv=1e-3,
lambda_A=10, lambda_R=10
)
n = A.shape[0]
P = zeros((n, n, len(R)))
for k in range(len(R)):
P[:, :, k] = dot(A, dot(R[k], A.T))
return P
def normalize_predictions(P, e, k):
for a in range(e):
for b in range(e):
nrm = norm(P[a, b, :k])
if nrm != 0:
# round values for faster computation of AUC-PR
P[a, b, :k] = np.round_(P[a, b, :k] / nrm, decimals=3)
return P
def innerfold(T, mask_idx, target_idx, e, k, sz):
Tc = [Ti.copy() for Ti in T]
mask_idx = np.unravel_index(mask_idx, (e, e, k))
target_idx = np.unravel_index(target_idx, (e, e, k))
# set values to be predicted to zero
for i in range(len(mask_idx[0])):
Tc[mask_idx[2][i]][mask_idx[0][i], mask_idx[1][i]] = 0
# predict unknown values
P = predict_rescal_als(Tc)
P = normalize_predictions(P, e, k)
# compute area under precision recall curve
prec, recall, _ = precision_recall_curve(GROUND_TRUTH[target_idx], P[target_idx])
return auc(recall, prec)
if __name__ == '__main__':
# load data
mat = loadmat('data/alyawarradata.mat')
K = array(mat['Rs'], np.float32)
e, k = K.shape[0], K.shape[2]
SZ = e * e * k
# copy ground truth before preprocessing
GROUND_TRUTH = K.copy()
# construct array for rescal
T = [lil_matrix(K[:, :, i]) for i in range(k)]
_log.info('Datasize: %d x %d x %d | No. of classes: %d' % (
T[0].shape + (len(T),) + (k,))
)
# Do cross-validation
FOLDS = 10
IDX = list(range(SZ))
shuffle(IDX)
fsz = int(SZ / FOLDS)
offset = 0
AUC_train = zeros(FOLDS)
AUC_test = zeros(FOLDS)
for f in range(FOLDS):
idx_test = IDX[offset:offset + fsz]
idx_train = setdiff1d(IDX, idx_test)
shuffle(idx_train)
idx_train = idx_train[:fsz].tolist()
_log.info('Train Fold %d' % f)
AUC_train[f] = innerfold(T, idx_train + idx_test, idx_train, e, k, SZ)
_log.info('Test Fold %d' % f)
AUC_test[f] = innerfold(T, idx_test, idx_test, e, k, SZ)
offset += fsz
_log.info('AUC-PR Test Mean / Std: %f / %f' % (AUC_test.mean(), AUC_test.std()))
_log.info('AUC-PR Train Mean / Std: %f / %f' % (AUC_train.mean(), AUC_train.std()))
| gpl-3.0 |
Patzifist/report-engine | modules/clients/python-client/reclient.py | 4 | 12648 | """ ReportEngineClient
"""
import traceback,logging,os,re
import requests,json,StringIO
import jprops
import datetime,pytz
HEADERS = {'Content-Type':'application/json','accept':'application/json'}
TEST_STATUSES = ['Passed', 'Failed', 'Skipped', 'Running', 'NoStatus']
LOG_LEVELS=['ALL','DEBUG', 'INFO', 'WARNING', 'ERROR','CRITICAL', 'DEFAULT']
def _now():
now = datetime.datetime.now(pytz.utc)
strnow = now.strftime('%Y-%m-%dT%H:%M:%S')
msec = '%03d' % now.microsecond
strnow = "%s.%s" % (strnow, msec[:3])
return strnow
class ReportEngineClient():
"""
ReportEngineClient enables you to push test results to report-engine server. Client is configured
via property file (to keep compatibility with java report-engine client)
There is an expected workflow with this client:
1. Create an instance of ReportEngineClient
2. Configure logging and add ReportEngineCilent.getLogHandler()
3. Insert a test suite
3a. Insert a test group
4. Insert test (if no test group was inserted, 'default' is inserted instead)
5. Set test as finished
6. update test suite
ReportEngineClient remebers test suite, test group and test that was inserted as last, so when
ReportEngineLogHandler starts submitting log records, client knows which test belongs it to.
"""
def __init__(self,config_file=None,config_file_orig=None):
""" Creates new instance of report-engine client
You can specify 2 config files. 'config_file' is a main config. It can reference 'config_file_orig'
within a 'ORIGINAL.FILE' config proprety. Original (base) file keeps default values that get overridden
by values in 'config_file'. If you pass non-null 'config_file_orig' it will be used as original config and
'ORIGINAL.FILE' option within 'config_file' will be ignored.
:Parameters:
config_file : string
File or URL to main config file (java properties format)
config_file_orig : string
File or URL to base (original) config file (java properties format)
"""
self.log = logging.getLogger()
self.config = None
self.logHandler = ReportEngineLogHandler(self)
config = self.read_config_file(config_file)
if config:
orig_config = None
if config_file_orig: # read original (base) file from passed parameter
orig_config = self.read_config_file(config_file_orig)
elif config.has_key('ORIGINAL.FILE') and len(config['ORIGINAL.FILE'].strip()) > 0: # or check ORIGINAL.FILE property
orig_config = self.read_config_file(config['ORIGINAL.FILE'])
if orig_config: # override values in original config
orig_config.update(config)
config = orig_config
elif config_file_orig:
raise Exception('Failed to configure ReportEngineClient: invalid original config')
self.config = config
if not self.config:
raise Exception('Failed to configure ReportEngineClient: no config file')
# check config keys
for key in ['REPORT.ENGINE.TEST.REFERENCE','REPORT.ENGINE.WATCH.LOGGER','REPORT.ENGINE.TEST.BUILD.VERSION.REFF','REPORT.ENGINE.SERVER.REST.URL','TEST.SUITE.NAME','REPORT.ENGINE.LOGGER.LEVEL']:
if not self.config.has_key(key):
raise Exception('Failed to configure ReportEngineClient, missing %s property' % key)
self.url = config['REPORT.ENGINE.SERVER.REST.URL'].rstrip('/')+'/testresults/'
# helper dictionary to store test-related information
self.status = {}
# retrieve new ID for this testsuite from ReportEngine server
try:
self.status['testSuiteId'] = int(self.get('testsuiteid').text)
except requests.exceptions.RequestException:
self.config = None
raise Exception('Unable to connect to report-engine server %s' % config['REPORT.ENGINE.SERVER.REST.URL'])
if not self.config['REPORT.ENGINE.LOGGER.LEVEL'] in LOG_LEVELS:
raise Exception('Invalid %s=%s allowed options are %s' %
('REPORT.ENGINE.LOGGER.LEVEL',self.config['REPORT.ENGINE.LOGGER.LEVEL'],str(LOG_LEVELS)))
self.logHandler.reportLevel = self.config['REPORT.ENGINE.LOGGER.LEVEL']
if len(self.config['REPORT.ENGINE.TEST.REFERENCE']) == 0:
raise Exception('Configuration property REPORT.ENGINE.TEST.REFERENCE must not be empty')
if len(self.config['TEST.SUITE.NAME']) > 0:
self.status['suiteName'] = self.config['TEST.SUITE.NAME']
else:
self.status['suiteName'] = self.config['REPORT.ENGINE.TEST.REFERENCE']
def debug(self,record):
"""
"""
pass
#if type(record) == requests.Response:
# record = 'Status: %d Response: %s' % (record.status_code,record.text)
#with open('/tmp/test.log','a') as fp:
# fp.write('Client DEBUG: '+str(record)+'\n')
def insertSuite(self,name=None):
"""Inserts a new test suite to report-engine server
:Parameters:
name : string
Name of testsuite, if None 'TEST.SUITE.NAME' or 'REPORT.ENGINE.TEST.REFERENCE' config option is used
"""
name = name or self.status['suiteName']
data = {'id':self.status['testSuiteId'],
'testStatus':'Running',
'remoteStartTime':_now(),
'testSuiteName':name,
'testReference':self.config['REPORT.ENGINE.TEST.REFERENCE']}
r = self.post('testsuite',data)
self.debug(r)
def updateSuite(self,name=None,status=''):
"""Updates existing testsuite to report-engine server
:Parameters:
name : string
Name of testsuite, if None 'TEST.SUITE.NAME' or 'REPORT.ENGINE.TEST.REFERENCE' config option is used
status : string
Test suite status, allowed values are 'Running','Completed'
"""
name = name or self.status['suiteName']
data = {'id':self.status['testSuiteId'],
'testStatus':status,
'testSuiteName':name,
'testBuild':os.getenv(self.config['REPORT.ENGINE.TEST.BUILD.VERSION.REFF'],''),
'testReference':self.config['REPORT.ENGINE.TEST.REFERENCE']}
r = self.put('testsuite',data)
self.debug(r)
def insertTestGroup(self,name):
""" Inserts a new test group to report-engine server
:Parameters:
name : string
Name of test-group
"""
data = {'testSuiteId':self.status['testSuiteId'],'testGroup':name,'remoteTime':_now()}
r = self.post('testgroup',data)
self.debug(r)
self.status['testGroupId'] = r.json()['id']
def insertTest(self,name):
""" Inserts a new test case to report-engine server. This registers
test case on server
:Parameters:
name : string
Name of test case
"""
if not self.status.has_key('testGroupId'):
self.insertTestGroup('default')
if self.status.has_key('testCaseId'):
# test was probably skipped (setTestFinished was not called after insertTest)
self.setTestFinished(name,'Skipped')
data = {'testSuiteId':self.status['testSuiteId'],
'testGroupId':self.status['testGroupId'],
'testName':name,
'testResult':'Running',
'remoteStartTime':_now()
}
r = self.post('testcase',data)
self.debug(r)
self.status['testCaseId'] = r.json()['id']
def setTestFinished(self,name,status):
""" Sets current test (previously added by `insertTest`) as finished
:Parameters:
name : string
Name of test case
status : string
test case status, available options are 'Passed','Failed','Skipped'
"""
if not self.status.has_key('testCaseId'):
self.insertTest(name)
if not status in TEST_STATUSES:
raise Exception('Invalid test status, possible values are : '+str(TEST_STATUSES))
data = {'testSuiteId':self.status['testSuiteId'],
'testGroupId':self.status['testGroupId'],
'id':self.status['testCaseId'],
'testName':name,
'testResult':status,
'remoteEndTime':_now()
}
r = self.put('testcase',data)
del self.status['testCaseId']
self.debug(r)
def addLogMessage(self,record):
""" Inserts a new log record to report-engine server
Note: it's not intended to be used by clients, but by :class: `ReportEngineLogHandler`
:Parameters:
name : logging.LogRecord
"""
if not (self.status.has_key('testGroupId') and self.status.has_key('testCaseId')):
# do not proceed when we are not in the middle of test
return
if record.name.find('requests.packages') >= 0:
m = re.search('^(http://|https://)(?P<host>[^:/]+).*$',self.url)
if m:
host = m.group('host')
if record.msg.find(host) >= 0 or record.msg.find('/resteasy/testresults/') > 0:
# do not log messages about requests going to report-engine server
return
data = {'testSuiteId':self.status['testSuiteId'],
'testGroupId':self.status['testGroupId'],
'testCaseId':self.status['testCaseId'],
'sequenceNumber':0,
'logTime':_now(),
'logLevel':record.levelname,
'className':record.module,
'methodName':record.funcName,
'message':str(record.msg)}
r = self.post('testlog',data)
self.debug(r)
def _formatErr(self, err):
"""
Formats error (a tuple (exctype, value, tb)
"""
if err:
exctype, value, tb = err
return ''.join(traceback.format_exception(exctype, value, tb))
def post(self,resource, data):
self.debug(data)
return requests.post(self.url+resource,headers = HEADERS,data=json.dumps(data))
def put(self, resource, data):
self.debug(data)
return requests.put(self.url+resource,headers = HEADERS,data=json.dumps(data))
def get(self, resource):
return requests.get(self.url+resource,headers = HEADERS,timeout=5)
def read_config_file(self,config):
"""Reads config file or URL and parses it's content
:Parameters:
config: string
File path or URL to config file
"""
if not config:
return
fp = None
if config.find('http') == 0:
data = str(requests.get(config).text)
fp = StringIO.StringIO(data)
else:
fp = open(config,'rb')
if fp:
props = jprops.load_properties(fp)
fp.close()
map(lambda x: x.strip(),props)
return props
def getLogHandler(self):
"""
returns :class: `ReportEngineLogHandler` or :class: `logging.NullHandler`
based on configuration (property 'REPORT.ENGINE.WATCH.LOGGER'=true|false)
"""
if self.config and self.config['REPORT.ENGINE.WATCH.LOGGER'].lower() == 'true':
return self.logHandler
return logging.NullHandler()
class ReportEngineLogHandler(logging.Handler):
"""
Log handler that submits logRecords to report engine.
"""
def __init__(self,client):
""" Creates new handler instance
Note: it's not intended to be used by clients, but by :class: `ReportEngineClient`
:Parameters:
client : :class: `ReportEngineClient`
"""
logging.Handler.__init__(self)
self.client = client
self.reportLevel = 'ALL'
def emit(self,record):
if self.reportLevel == 'ALL' or self.reportLevel == 'DEFAULT' or LOG_LEVELS.index(self.reportLevel) <= int(record.levelno/10):
self.client.addLogMessage(record)
| gpl-3.0 |
noba3/KoTos | addons/script.cu.lrclyrics/resources/lib/mutagen/easyid3.py | 16 | 14244 | # Simpler (but far more limited) API for ID3 editing
# Copyright 2006 Joe Wreschnig
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of version 2 of the GNU General Public License as
# published by the Free Software Foundation.
"""Easier access to ID3 tags.
EasyID3 is a wrapper around mutagen.id3.ID3 to make ID3 tags appear
more like Vorbis or APEv2 tags.
"""
import mutagen.id3
from mutagen import Metadata
from mutagen._util import DictMixin, dict_match
from mutagen.id3 import ID3, error, delete, ID3FileType
__all__ = ['EasyID3', 'Open', 'delete']
class EasyID3KeyError(KeyError, ValueError, error):
"""Raised when trying to get/set an invalid key.
Subclasses both KeyError and ValueError for API compatibility,
catching KeyError is preferred.
"""
class EasyID3(DictMixin, Metadata):
"""A file with an ID3 tag.
Like Vorbis comments, EasyID3 keys are case-insensitive ASCII
strings. Only a subset of ID3 frames are supported by default. Use
EasyID3.RegisterKey and its wrappers to support more.
You can also set the GetFallback, SetFallback, and DeleteFallback
to generic key getter/setter/deleter functions, which are called
if no specific handler is registered for a key. Additionally,
ListFallback can be used to supply an arbitrary list of extra
keys. These can be set on EasyID3 or on individual instances after
creation.
To use an EasyID3 class with mutagen.mp3.MP3::
from mutagen.mp3 import EasyMP3 as MP3
MP3(filename)
Because many of the attributes are constructed on the fly, things
like the following will not work::
ezid3["performer"].append("Joe")
Instead, you must do::
values = ezid3["performer"]
values.append("Joe")
ezid3["performer"] = values
"""
Set = {}
Get = {}
Delete = {}
List = {}
# For compatibility.
valid_keys = Get
GetFallback = None
SetFallback = None
DeleteFallback = None
ListFallback = None
@classmethod
def RegisterKey(cls, key,
getter=None, setter=None, deleter=None, lister=None):
"""Register a new key mapping.
A key mapping is four functions, a getter, setter, deleter,
and lister. The key may be either a string or a glob pattern.
The getter, deleted, and lister receive an ID3 instance and
the requested key name. The setter also receives the desired
value, which will be a list of strings.
The getter, setter, and deleter are used to implement __getitem__,
__setitem__, and __delitem__.
The lister is used to implement keys(). It should return a
list of keys that are actually in the ID3 instance, provided
by its associated getter.
"""
key = key.lower()
if getter is not None:
cls.Get[key] = getter
if setter is not None:
cls.Set[key] = setter
if deleter is not None:
cls.Delete[key] = deleter
if lister is not None:
cls.List[key] = lister
@classmethod
def RegisterTextKey(cls, key, frameid):
"""Register a text key.
If the key you need to register is a simple one-to-one mapping
of ID3 frame name to EasyID3 key, then you can use this
function::
EasyID3.RegisterTextKey("title", "TIT2")
"""
def getter(id3, key):
return list(id3[frameid])
def setter(id3, key, value):
try:
frame = id3[frameid]
except KeyError:
id3.add(mutagen.id3.Frames[frameid](encoding=3, text=value))
else:
frame.encoding = 3
frame.text = value
def deleter(id3, key):
del(id3[frameid])
cls.RegisterKey(key, getter, setter, deleter)
@classmethod
def RegisterTXXXKey(cls, key, desc):
"""Register a user-defined text frame key.
Some ID3 tags are stored in TXXX frames, which allow a
freeform 'description' which acts as a subkey,
e.g. TXXX:BARCODE.::
EasyID3.RegisterTXXXKey('barcode', 'BARCODE').
"""
frameid = "TXXX:" + desc
def getter(id3, key):
return list(id3[frameid])
def setter(id3, key, value):
try:
frame = id3[frameid]
except KeyError:
enc = 0
# Store 8859-1 if we can, per MusicBrainz spec.
for v in value:
if v and max(v) > u'\x7f':
enc = 3
id3.add(mutagen.id3.TXXX(encoding=enc, text=value, desc=desc))
else:
frame.text = value
def deleter(id3, key):
del(id3[frameid])
cls.RegisterKey(key, getter, setter, deleter)
def __init__(self, filename=None):
self.__id3 = ID3()
if filename is not None:
self.load(filename)
load = property(lambda s: s.__id3.load,
lambda s, v: setattr(s.__id3, 'load', v))
save = property(lambda s: s.__id3.save,
lambda s, v: setattr(s.__id3, 'save', v))
delete = property(lambda s: s.__id3.delete,
lambda s, v: setattr(s.__id3, 'delete', v))
filename = property(lambda s: s.__id3.filename,
lambda s, fn: setattr(s.__id3, 'filename', fn))
size = property(lambda s: s.__id3.size,
lambda s, fn: setattr(s.__id3, 'size', s))
def __getitem__(self, key):
key = key.lower()
func = dict_match(self.Get, key, self.GetFallback)
if func is not None:
return func(self.__id3, key)
else:
raise EasyID3KeyError("%r is not a valid key" % key)
def __setitem__(self, key, value):
key = key.lower()
if isinstance(value, basestring):
value = [value]
func = dict_match(self.Set, key, self.SetFallback)
if func is not None:
return func(self.__id3, key, value)
else:
raise EasyID3KeyError("%r is not a valid key" % key)
def __delitem__(self, key):
key = key.lower()
func = dict_match(self.Delete, key, self.DeleteFallback)
if func is not None:
return func(self.__id3, key)
else:
raise EasyID3KeyError("%r is not a valid key" % key)
def keys(self):
keys = []
for key in self.Get.keys():
if key in self.List:
keys.extend(self.List[key](self.__id3, key))
elif key in self:
keys.append(key)
if self.ListFallback is not None:
keys.extend(self.ListFallback(self.__id3, ""))
return keys
def pprint(self):
"""Print tag key=value pairs."""
strings = []
for key in sorted(self.keys()):
values = self[key]
for value in values:
strings.append("%s=%s" % (key, value))
return "\n".join(strings)
Open = EasyID3
def genre_get(id3, key):
return id3["TCON"].genres
def genre_set(id3, key, value):
try:
frame = id3["TCON"]
except KeyError:
id3.add(mutagen.id3.TCON(encoding=3, text=value))
else:
frame.encoding = 3
frame.genres = value
def genre_delete(id3, key):
del(id3["TCON"])
def date_get(id3, key):
return [stamp.text for stamp in id3["TDRC"].text]
def date_set(id3, key, value):
id3.add(mutagen.id3.TDRC(encoding=3, text=value))
def date_delete(id3, key):
del(id3["TDRC"])
def performer_get(id3, key):
people = []
wanted_role = key.split(":", 1)[1]
try:
mcl = id3["TMCL"]
except KeyError:
raise KeyError(key)
for role, person in mcl.people:
if role == wanted_role:
people.append(person)
if people:
return people
else:
raise KeyError(key)
def performer_set(id3, key, value):
wanted_role = key.split(":", 1)[1]
try:
mcl = id3["TMCL"]
except KeyError:
mcl = mutagen.id3.TMCL(encoding=3, people=[])
id3.add(mcl)
mcl.encoding = 3
people = [p for p in mcl.people if p[0] != wanted_role]
for v in value:
people.append((wanted_role, v))
mcl.people = people
def performer_delete(id3, key):
wanted_role = key.split(":", 1)[1]
try:
mcl = id3["TMCL"]
except KeyError:
raise KeyError(key)
people = [p for p in mcl.people if p[0] != wanted_role]
if people == mcl.people:
raise KeyError(key)
elif people:
mcl.people = people
else:
del(id3["TMCL"])
def performer_list(id3, key):
try:
mcl = id3["TMCL"]
except KeyError:
return []
else:
return list(set("performer:" + p[0] for p in mcl.people))
def musicbrainz_trackid_get(id3, key):
return [id3["UFID:http://musicbrainz.org"].data.decode('ascii')]
def musicbrainz_trackid_set(id3, key, value):
if len(value) != 1:
raise ValueError("only one track ID may be set per song")
value = value[0].encode('ascii')
try:
frame = id3["UFID:http://musicbrainz.org"]
except KeyError:
frame = mutagen.id3.UFID(owner="http://musicbrainz.org", data=value)
id3.add(frame)
else:
frame.data = value
def musicbrainz_trackid_delete(id3, key):
del(id3["UFID:http://musicbrainz.org"])
def website_get(id3, key):
urls = [frame.url for frame in id3.getall("WOAR")]
if urls:
return urls
else:
raise EasyID3KeyError(key)
def website_set(id3, key, value):
id3.delall("WOAR")
for v in value:
id3.add(mutagen.id3.WOAR(url=v))
def website_delete(id3, key):
id3.delall("WOAR")
def gain_get(id3, key):
try:
frame = id3["RVA2:" + key[11:-5]]
except KeyError:
raise EasyID3KeyError(key)
else:
return [u"%+f dB" % frame.gain]
def gain_set(id3, key, value):
if len(value) != 1:
raise ValueError(
"there must be exactly one gain value, not %r.", value)
gain = float(value[0].split()[0])
try:
frame = id3["RVA2:" + key[11:-5]]
except KeyError:
frame = mutagen.id3.RVA2(desc=key[11:-5], gain=0, peak=0, channel=1)
id3.add(frame)
frame.gain = gain
def gain_delete(id3, key):
try:
frame = id3["RVA2:" + key[11:-5]]
except KeyError:
pass
else:
if frame.peak:
frame.gain = 0.0
else:
del(id3["RVA2:" + key[11:-5]])
def peak_get(id3, key):
try:
frame = id3["RVA2:" + key[11:-5]]
except KeyError:
raise EasyID3KeyError(key)
else:
return [u"%f" % frame.peak]
def peak_set(id3, key, value):
if len(value) != 1:
raise ValueError(
"there must be exactly one peak value, not %r.", value)
peak = float(value[0])
if peak >= 2 or peak < 0:
raise ValueError("peak must be => 0 and < 2.")
try:
frame = id3["RVA2:" + key[11:-5]]
except KeyError:
frame = mutagen.id3.RVA2(desc=key[11:-5], gain=0, peak=0, channel=1)
id3.add(frame)
frame.peak = peak
def peak_delete(id3, key):
try:
frame = id3["RVA2:" + key[11:-5]]
except KeyError:
pass
else:
if frame.gain:
frame.peak = 0.0
else:
del(id3["RVA2:" + key[11:-5]])
def peakgain_list(id3, key):
keys = []
for frame in id3.getall("RVA2"):
keys.append("replaygain_%s_gain" % frame.desc)
keys.append("replaygain_%s_peak" % frame.desc)
return keys
for frameid, key in {
"TALB": "album",
"TBPM": "bpm",
"TCMP": "compilation", # iTunes extension
"TCOM": "composer",
"TCOP": "copyright",
"TENC": "encodedby",
"TEXT": "lyricist",
"TLEN": "length",
"TMED": "media",
"TMOO": "mood",
"TIT2": "title",
"TIT3": "version",
"TPE1": "artist",
"TPE2": "performer",
"TPE3": "conductor",
"TPE4": "arranger",
"TPOS": "discnumber",
"TPUB": "organization",
"TRCK": "tracknumber",
"TOLY": "author",
"TSO2": "albumartistsort", # iTunes extension
"TSOA": "albumsort",
"TSOC": "composersort", # iTunes extension
"TSOP": "artistsort",
"TSOT": "titlesort",
"TSRC": "isrc",
"TSST": "discsubtitle",
}.iteritems():
EasyID3.RegisterTextKey(key, frameid)
EasyID3.RegisterKey("genre", genre_get, genre_set, genre_delete)
EasyID3.RegisterKey("date", date_get, date_set, date_delete)
EasyID3.RegisterKey(
"performer:*", performer_get, performer_set, performer_delete,
performer_list)
EasyID3.RegisterKey("musicbrainz_trackid", musicbrainz_trackid_get,
musicbrainz_trackid_set, musicbrainz_trackid_delete)
EasyID3.RegisterKey("website", website_get, website_set, website_delete)
EasyID3.RegisterKey("website", website_get, website_set, website_delete)
EasyID3.RegisterKey(
"replaygain_*_gain", gain_get, gain_set, gain_delete, peakgain_list)
EasyID3.RegisterKey("replaygain_*_peak", peak_get, peak_set, peak_delete)
# At various times, information for this came from
# http://musicbrainz.org/docs/specs/metadata_tags.html
# http://bugs.musicbrainz.org/ticket/1383
# http://musicbrainz.org/doc/MusicBrainzTag
for desc, key in {
u"MusicBrainz Artist Id": "musicbrainz_artistid",
u"MusicBrainz Album Id": "musicbrainz_albumid",
u"MusicBrainz Album Artist Id": "musicbrainz_albumartistid",
u"MusicBrainz TRM Id": "musicbrainz_trmid",
u"MusicIP PUID": "musicip_puid",
u"MusicMagic Fingerprint": "musicip_fingerprint",
u"MusicBrainz Album Status": "musicbrainz_albumstatus",
u"MusicBrainz Album Type": "musicbrainz_albumtype",
u"MusicBrainz Album Release Country": "releasecountry",
u"MusicBrainz Disc Id": "musicbrainz_discid",
u"ASIN": "asin",
u"ALBUMARTISTSORT": "albumartistsort",
u"BARCODE": "barcode",
}.iteritems():
EasyID3.RegisterTXXXKey(key, desc)
class EasyID3FileType(ID3FileType):
"""Like ID3FileType, but uses EasyID3 for tags."""
ID3 = EasyID3
| gpl-2.0 |
sdcooke/django | django/contrib/gis/gdal/raster/band.py | 308 | 5382 | from ctypes import byref, c_int
from django.contrib.gis.gdal.base import GDALBase
from django.contrib.gis.gdal.error import GDALException
from django.contrib.gis.gdal.prototypes import raster as capi
from django.contrib.gis.shortcuts import numpy
from django.utils import six
from django.utils.encoding import force_text
from django.utils.six.moves import range
from .const import GDAL_INTEGER_TYPES, GDAL_PIXEL_TYPES, GDAL_TO_CTYPES
class GDALBand(GDALBase):
"""
Wraps a GDAL raster band, needs to be obtained from a GDALRaster object.
"""
def __init__(self, source, index):
self.source = source
self._ptr = capi.get_ds_raster_band(source._ptr, index)
@property
def description(self):
"""
Returns the description string of the band.
"""
return force_text(capi.get_band_description(self._ptr))
@property
def width(self):
"""
Width (X axis) in pixels of the band.
"""
return capi.get_band_xsize(self._ptr)
@property
def height(self):
"""
Height (Y axis) in pixels of the band.
"""
return capi.get_band_ysize(self._ptr)
@property
def pixel_count(self):
"""
Returns the total number of pixels in this band.
"""
return self.width * self.height
@property
def min(self):
"""
Returns the minimum pixel value for this band.
"""
return capi.get_band_minimum(self._ptr, byref(c_int()))
@property
def max(self):
"""
Returns the maximum pixel value for this band.
"""
return capi.get_band_maximum(self._ptr, byref(c_int()))
@property
def nodata_value(self):
"""
Returns the nodata value for this band, or None if it isn't set.
"""
# Get value and nodata exists flag
nodata_exists = c_int()
value = capi.get_band_nodata_value(self._ptr, nodata_exists)
if not nodata_exists:
value = None
# If the pixeltype is an integer, convert to int
elif self.datatype() in GDAL_INTEGER_TYPES:
value = int(value)
return value
@nodata_value.setter
def nodata_value(self, value):
"""
Sets the nodata value for this band.
"""
if not isinstance(value, (int, float)):
raise ValueError('Nodata value must be numeric.')
capi.set_band_nodata_value(self._ptr, value)
self.source._flush()
def datatype(self, as_string=False):
"""
Returns the GDAL Pixel Datatype for this band.
"""
dtype = capi.get_band_datatype(self._ptr)
if as_string:
dtype = GDAL_PIXEL_TYPES[dtype]
return dtype
def data(self, data=None, offset=None, size=None, as_memoryview=False):
"""
Reads or writes pixel values for this band. Blocks of data can
be accessed by specifying the width, height and offset of the
desired block. The same specification can be used to update
parts of a raster by providing an array of values.
Allowed input data types are bytes, memoryview, list, tuple, and array.
"""
if not offset:
offset = (0, 0)
if not size:
size = (self.width - offset[0], self.height - offset[1])
if any(x <= 0 for x in size):
raise ValueError('Offset too big for this raster.')
if size[0] > self.width or size[1] > self.height:
raise ValueError('Size is larger than raster.')
# Create ctypes type array generator
ctypes_array = GDAL_TO_CTYPES[self.datatype()] * (size[0] * size[1])
if data is None:
# Set read mode
access_flag = 0
# Prepare empty ctypes array
data_array = ctypes_array()
else:
# Set write mode
access_flag = 1
# Instantiate ctypes array holding the input data
if isinstance(data, (bytes, six.memoryview)) or (numpy and isinstance(data, numpy.ndarray)):
data_array = ctypes_array.from_buffer_copy(data)
else:
data_array = ctypes_array(*data)
# Access band
capi.band_io(self._ptr, access_flag, offset[0], offset[1],
size[0], size[1], byref(data_array), size[0],
size[1], self.datatype(), 0, 0)
# Return data as numpy array if possible, otherwise as list
if data is None:
if as_memoryview:
return memoryview(data_array)
elif numpy:
return numpy.frombuffer(
data_array, dtype=numpy.dtype(data_array)).reshape(size)
else:
return list(data_array)
else:
self.source._flush()
class BandList(list):
def __init__(self, source):
self.source = source
list.__init__(self)
def __iter__(self):
for idx in range(1, len(self) + 1):
yield GDALBand(self.source, idx)
def __len__(self):
return capi.get_ds_raster_count(self.source._ptr)
def __getitem__(self, index):
try:
return GDALBand(self.source, index + 1)
except GDALException:
raise GDALException('Unable to get band index %d' % index)
| bsd-3-clause |
AkhilHector/Rex.Inc | .env/lib/python2.7/site-packages/werkzeug/wsgi.py | 82 | 38011 | # -*- coding: utf-8 -*-
"""
werkzeug.wsgi
~~~~~~~~~~~~~
This module implements WSGI related helpers.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import re
import os
import posixpath
import mimetypes
from itertools import chain
from zlib import adler32
from time import time, mktime
from datetime import datetime
from functools import partial, update_wrapper
from werkzeug._compat import iteritems, text_type, string_types, \
implements_iterator, make_literal_wrapper, to_unicode, to_bytes, \
wsgi_get_bytes, try_coerce_native, PY2
from werkzeug._internal import _empty_stream, _encode_idna
from werkzeug.http import is_resource_modified, http_date
from werkzeug.urls import uri_to_iri, url_quote, url_parse, url_join
from werkzeug.filesystem import get_filesystem_encoding
def responder(f):
"""Marks a function as responder. Decorate a function with it and it
will automatically call the return value as WSGI application.
Example::
@responder
def application(environ, start_response):
return Response('Hello World!')
"""
return update_wrapper(lambda *a: f(*a)(*a[-2:]), f)
def get_current_url(environ, root_only=False, strip_querystring=False,
host_only=False, trusted_hosts=None):
"""A handy helper function that recreates the full URL as IRI for the
current request or parts of it. Here an example:
>>> from werkzeug.test import create_environ
>>> env = create_environ("/?param=foo", "http://localhost/script")
>>> get_current_url(env)
'http://localhost/script/?param=foo'
>>> get_current_url(env, root_only=True)
'http://localhost/script/'
>>> get_current_url(env, host_only=True)
'http://localhost/'
>>> get_current_url(env, strip_querystring=True)
'http://localhost/script/'
This optionally it verifies that the host is in a list of trusted hosts.
If the host is not in there it will raise a
:exc:`~werkzeug.exceptions.SecurityError`.
Note that the string returned might contain unicode characters as the
representation is an IRI not an URI. If you need an ASCII only
representation you can use the :func:`~werkzeug.urls.iri_to_uri`
function:
>>> from werkzeug.urls import iri_to_uri
>>> iri_to_uri(get_current_url(env))
'http://localhost/script/?param=foo'
:param environ: the WSGI environment to get the current URL from.
:param root_only: set `True` if you only want the root URL.
:param strip_querystring: set to `True` if you don't want the querystring.
:param host_only: set to `True` if the host URL should be returned.
:param trusted_hosts: a list of trusted hosts, see :func:`host_is_trusted`
for more information.
"""
tmp = [environ['wsgi.url_scheme'], '://', get_host(environ, trusted_hosts)]
cat = tmp.append
if host_only:
return uri_to_iri(''.join(tmp) + '/')
cat(url_quote(wsgi_get_bytes(environ.get('SCRIPT_NAME', ''))).rstrip('/'))
cat('/')
if not root_only:
cat(url_quote(wsgi_get_bytes(environ.get('PATH_INFO', '')).lstrip(b'/')))
if not strip_querystring:
qs = get_query_string(environ)
if qs:
cat('?' + qs)
return uri_to_iri(''.join(tmp))
def host_is_trusted(hostname, trusted_list):
"""Checks if a host is trusted against a list. This also takes care
of port normalization.
.. versionadded:: 0.9
:param hostname: the hostname to check
:param trusted_list: a list of hostnames to check against. If a
hostname starts with a dot it will match against
all subdomains as well.
"""
if not hostname:
return False
if isinstance(trusted_list, string_types):
trusted_list = [trusted_list]
def _normalize(hostname):
if ':' in hostname:
hostname = hostname.rsplit(':', 1)[0]
return _encode_idna(hostname)
try:
hostname = _normalize(hostname)
except UnicodeError:
return False
for ref in trusted_list:
if ref.startswith('.'):
ref = ref[1:]
suffix_match = True
else:
suffix_match = False
try:
ref = _normalize(ref)
except UnicodeError:
return False
if ref == hostname:
return True
if suffix_match and hostname.endswith('.' + ref):
return True
return False
def get_host(environ, trusted_hosts=None):
"""Return the real host for the given WSGI environment. This first checks
the `X-Forwarded-Host` header, then the normal `Host` header, and finally
the `SERVER_NAME` environment variable (using the first one it finds).
Optionally it verifies that the host is in a list of trusted hosts.
If the host is not in there it will raise a
:exc:`~werkzeug.exceptions.SecurityError`.
:param environ: the WSGI environment to get the host of.
:param trusted_hosts: a list of trusted hosts, see :func:`host_is_trusted`
for more information.
"""
if 'HTTP_X_FORWARDED_HOST' in environ:
rv = environ['HTTP_X_FORWARDED_HOST'].split(',', 1)[0].strip()
elif 'HTTP_HOST' in environ:
rv = environ['HTTP_HOST']
else:
rv = environ['SERVER_NAME']
if (environ['wsgi.url_scheme'], environ['SERVER_PORT']) not \
in (('https', '443'), ('http', '80')):
rv += ':' + environ['SERVER_PORT']
if trusted_hosts is not None:
if not host_is_trusted(rv, trusted_hosts):
from werkzeug.exceptions import SecurityError
raise SecurityError('Host "%s" is not trusted' % rv)
return rv
def get_content_length(environ):
"""Returns the content length from the WSGI environment as
integer. If it's not available `None` is returned.
.. versionadded:: 0.9
:param environ: the WSGI environ to fetch the content length from.
"""
content_length = environ.get('CONTENT_LENGTH')
if content_length is not None:
try:
return max(0, int(content_length))
except (ValueError, TypeError):
pass
def get_input_stream(environ, safe_fallback=True):
"""Returns the input stream from the WSGI environment and wraps it
in the most sensible way possible. The stream returned is not the
raw WSGI stream in most cases but one that is safe to read from
without taking into account the content length.
.. versionadded:: 0.9
:param environ: the WSGI environ to fetch the stream from.
:param safe: indicates whether the function should use an empty
stream as safe fallback or just return the original
WSGI input stream if it can't wrap it safely. The
default is to return an empty string in those cases.
"""
stream = environ['wsgi.input']
content_length = get_content_length(environ)
# A wsgi extension that tells us if the input is terminated. In
# that case we return the stream unchanged as we know we can safely
# read it until the end.
if environ.get('wsgi.input_terminated'):
return stream
# If we don't have a content length we fall back to an empty stream
# in case of a safe fallback, otherwise we return the stream unchanged.
# The non-safe fallback is not recommended but might be useful in
# some situations.
if content_length is None:
return safe_fallback and _empty_stream or stream
# Otherwise limit the stream to the content length
return LimitedStream(stream, content_length)
def get_query_string(environ):
"""Returns the `QUERY_STRING` from the WSGI environment. This also takes
care about the WSGI decoding dance on Python 3 environments as a
native string. The string returned will be restricted to ASCII
characters.
.. versionadded:: 0.9
:param environ: the WSGI environment object to get the query string from.
"""
qs = wsgi_get_bytes(environ.get('QUERY_STRING', ''))
# QUERY_STRING really should be ascii safe but some browsers
# will send us some unicode stuff (I am looking at you IE).
# In that case we want to urllib quote it badly.
return try_coerce_native(url_quote(qs, safe=':&%=+$!*\'(),'))
def get_path_info(environ, charset='utf-8', errors='replace'):
"""Returns the `PATH_INFO` from the WSGI environment and properly
decodes it. This also takes care about the WSGI decoding dance
on Python 3 environments. if the `charset` is set to `None` a
bytestring is returned.
.. versionadded:: 0.9
:param environ: the WSGI environment object to get the path from.
:param charset: the charset for the path info, or `None` if no
decoding should be performed.
:param errors: the decoding error handling.
"""
path = wsgi_get_bytes(environ.get('PATH_INFO', ''))
return to_unicode(path, charset, errors, allow_none_charset=True)
def get_script_name(environ, charset='utf-8', errors='replace'):
"""Returns the `SCRIPT_NAME` from the WSGI environment and properly
decodes it. This also takes care about the WSGI decoding dance
on Python 3 environments. if the `charset` is set to `None` a
bytestring is returned.
.. versionadded:: 0.9
:param environ: the WSGI environment object to get the path from.
:param charset: the charset for the path, or `None` if no
decoding should be performed.
:param errors: the decoding error handling.
"""
path = wsgi_get_bytes(environ.get('SCRIPT_NAME', ''))
return to_unicode(path, charset, errors, allow_none_charset=True)
def pop_path_info(environ, charset='utf-8', errors='replace'):
"""Removes and returns the next segment of `PATH_INFO`, pushing it onto
`SCRIPT_NAME`. Returns `None` if there is nothing left on `PATH_INFO`.
If the `charset` is set to `None` a bytestring is returned.
If there are empty segments (``'/foo//bar``) these are ignored but
properly pushed to the `SCRIPT_NAME`:
>>> env = {'SCRIPT_NAME': '/foo', 'PATH_INFO': '/a/b'}
>>> pop_path_info(env)
'a'
>>> env['SCRIPT_NAME']
'/foo/a'
>>> pop_path_info(env)
'b'
>>> env['SCRIPT_NAME']
'/foo/a/b'
.. versionadded:: 0.5
.. versionchanged:: 0.9
The path is now decoded and a charset and encoding
parameter can be provided.
:param environ: the WSGI environment that is modified.
"""
path = environ.get('PATH_INFO')
if not path:
return None
script_name = environ.get('SCRIPT_NAME', '')
# shift multiple leading slashes over
old_path = path
path = path.lstrip('/')
if path != old_path:
script_name += '/' * (len(old_path) - len(path))
if '/' not in path:
environ['PATH_INFO'] = ''
environ['SCRIPT_NAME'] = script_name + path
rv = wsgi_get_bytes(path)
else:
segment, path = path.split('/', 1)
environ['PATH_INFO'] = '/' + path
environ['SCRIPT_NAME'] = script_name + segment
rv = wsgi_get_bytes(segment)
return to_unicode(rv, charset, errors, allow_none_charset=True)
def peek_path_info(environ, charset='utf-8', errors='replace'):
"""Returns the next segment on the `PATH_INFO` or `None` if there
is none. Works like :func:`pop_path_info` without modifying the
environment:
>>> env = {'SCRIPT_NAME': '/foo', 'PATH_INFO': '/a/b'}
>>> peek_path_info(env)
'a'
>>> peek_path_info(env)
'a'
If the `charset` is set to `None` a bytestring is returned.
.. versionadded:: 0.5
.. versionchanged:: 0.9
The path is now decoded and a charset and encoding
parameter can be provided.
:param environ: the WSGI environment that is checked.
"""
segments = environ.get('PATH_INFO', '').lstrip('/').split('/', 1)
if segments:
return to_unicode(wsgi_get_bytes(segments[0]),
charset, errors, allow_none_charset=True)
def extract_path_info(environ_or_baseurl, path_or_url, charset='utf-8',
errors='replace', collapse_http_schemes=True):
"""Extracts the path info from the given URL (or WSGI environment) and
path. The path info returned is a unicode string, not a bytestring
suitable for a WSGI environment. The URLs might also be IRIs.
If the path info could not be determined, `None` is returned.
Some examples:
>>> extract_path_info('http://example.com/app', '/app/hello')
u'/hello'
>>> extract_path_info('http://example.com/app',
... 'https://example.com/app/hello')
u'/hello'
>>> extract_path_info('http://example.com/app',
... 'https://example.com/app/hello',
... collapse_http_schemes=False) is None
True
Instead of providing a base URL you can also pass a WSGI environment.
.. versionadded:: 0.6
:param environ_or_baseurl: a WSGI environment dict, a base URL or
base IRI. This is the root of the
application.
:param path_or_url: an absolute path from the server root, a
relative path (in which case it's the path info)
or a full URL. Also accepts IRIs and unicode
parameters.
:param charset: the charset for byte data in URLs
:param errors: the error handling on decode
:param collapse_http_schemes: if set to `False` the algorithm does
not assume that http and https on the
same server point to the same
resource.
"""
def _normalize_netloc(scheme, netloc):
parts = netloc.split(u'@', 1)[-1].split(u':', 1)
if len(parts) == 2:
netloc, port = parts
if (scheme == u'http' and port == u'80') or \
(scheme == u'https' and port == u'443'):
port = None
else:
netloc = parts[0]
port = None
if port is not None:
netloc += u':' + port
return netloc
# make sure whatever we are working on is a IRI and parse it
path = uri_to_iri(path_or_url, charset, errors)
if isinstance(environ_or_baseurl, dict):
environ_or_baseurl = get_current_url(environ_or_baseurl,
root_only=True)
base_iri = uri_to_iri(environ_or_baseurl, charset, errors)
base_scheme, base_netloc, base_path = url_parse(base_iri)[:3]
cur_scheme, cur_netloc, cur_path, = \
url_parse(url_join(base_iri, path))[:3]
# normalize the network location
base_netloc = _normalize_netloc(base_scheme, base_netloc)
cur_netloc = _normalize_netloc(cur_scheme, cur_netloc)
# is that IRI even on a known HTTP scheme?
if collapse_http_schemes:
for scheme in base_scheme, cur_scheme:
if scheme not in (u'http', u'https'):
return None
else:
if not (base_scheme in (u'http', u'https') and
base_scheme == cur_scheme):
return None
# are the netlocs compatible?
if base_netloc != cur_netloc:
return None
# are we below the application path?
base_path = base_path.rstrip(u'/')
if not cur_path.startswith(base_path):
return None
return u'/' + cur_path[len(base_path):].lstrip(u'/')
class SharedDataMiddleware(object):
"""A WSGI middleware that provides static content for development
environments or simple server setups. Usage is quite simple::
import os
from werkzeug.wsgi import SharedDataMiddleware
app = SharedDataMiddleware(app, {
'/shared': os.path.join(os.path.dirname(__file__), 'shared')
})
The contents of the folder ``./shared`` will now be available on
``http://example.com/shared/``. This is pretty useful during development
because a standalone media server is not required. One can also mount
files on the root folder and still continue to use the application because
the shared data middleware forwards all unhandled requests to the
application, even if the requests are below one of the shared folders.
If `pkg_resources` is available you can also tell the middleware to serve
files from package data::
app = SharedDataMiddleware(app, {
'/shared': ('myapplication', 'shared_files')
})
This will then serve the ``shared_files`` folder in the `myapplication`
Python package.
The optional `disallow` parameter can be a list of :func:`~fnmatch.fnmatch`
rules for files that are not accessible from the web. If `cache` is set to
`False` no caching headers are sent.
Currently the middleware does not support non ASCII filenames. If the
encoding on the file system happens to be the encoding of the URI it may
work but this could also be by accident. We strongly suggest using ASCII
only file names for static files.
The middleware will guess the mimetype using the Python `mimetype`
module. If it's unable to figure out the charset it will fall back
to `fallback_mimetype`.
.. versionchanged:: 0.5
The cache timeout is configurable now.
.. versionadded:: 0.6
The `fallback_mimetype` parameter was added.
:param app: the application to wrap. If you don't want to wrap an
application you can pass it :exc:`NotFound`.
:param exports: a dict of exported files and folders.
:param disallow: a list of :func:`~fnmatch.fnmatch` rules.
:param fallback_mimetype: the fallback mimetype for unknown files.
:param cache: enable or disable caching headers.
:param cache_timeout: the cache timeout in seconds for the headers.
"""
def __init__(self, app, exports, disallow=None, cache=True,
cache_timeout=60 * 60 * 12, fallback_mimetype='text/plain'):
self.app = app
self.exports = {}
self.cache = cache
self.cache_timeout = cache_timeout
for key, value in iteritems(exports):
if isinstance(value, tuple):
loader = self.get_package_loader(*value)
elif isinstance(value, string_types):
if os.path.isfile(value):
loader = self.get_file_loader(value)
else:
loader = self.get_directory_loader(value)
else:
raise TypeError('unknown def %r' % value)
self.exports[key] = loader
if disallow is not None:
from fnmatch import fnmatch
self.is_allowed = lambda x: not fnmatch(x, disallow)
self.fallback_mimetype = fallback_mimetype
def is_allowed(self, filename):
"""Subclasses can override this method to disallow the access to
certain files. However by providing `disallow` in the constructor
this method is overwritten.
"""
return True
def _opener(self, filename):
return lambda: (
open(filename, 'rb'),
datetime.utcfromtimestamp(os.path.getmtime(filename)),
int(os.path.getsize(filename))
)
def get_file_loader(self, filename):
return lambda x: (os.path.basename(filename), self._opener(filename))
def get_package_loader(self, package, package_path):
from pkg_resources import DefaultProvider, ResourceManager, \
get_provider
loadtime = datetime.utcnow()
provider = get_provider(package)
manager = ResourceManager()
filesystem_bound = isinstance(provider, DefaultProvider)
def loader(path):
if path is None:
return None, None
path = posixpath.join(package_path, path)
if not provider.has_resource(path):
return None, None
basename = posixpath.basename(path)
if filesystem_bound:
return basename, self._opener(
provider.get_resource_filename(manager, path))
return basename, lambda: (
provider.get_resource_stream(manager, path),
loadtime,
0
)
return loader
def get_directory_loader(self, directory):
def loader(path):
if path is not None:
path = os.path.join(directory, path)
else:
path = directory
if os.path.isfile(path):
return os.path.basename(path), self._opener(path)
return None, None
return loader
def generate_etag(self, mtime, file_size, real_filename):
if not isinstance(real_filename, bytes):
real_filename = real_filename.encode(get_filesystem_encoding())
return 'wzsdm-%d-%s-%s' % (
mktime(mtime.timetuple()),
file_size,
adler32(real_filename) & 0xffffffff
)
def __call__(self, environ, start_response):
cleaned_path = get_path_info(environ)
if PY2:
cleaned_path = cleaned_path.encode(get_filesystem_encoding())
# sanitize the path for non unix systems
cleaned_path = cleaned_path.strip('/')
for sep in os.sep, os.altsep:
if sep and sep != '/':
cleaned_path = cleaned_path.replace(sep, '/')
path = '/' + '/'.join(x for x in cleaned_path.split('/')
if x and x != '..')
file_loader = None
for search_path, loader in iteritems(self.exports):
if search_path == path:
real_filename, file_loader = loader(None)
if file_loader is not None:
break
if not search_path.endswith('/'):
search_path += '/'
if path.startswith(search_path):
real_filename, file_loader = loader(path[len(search_path):])
if file_loader is not None:
break
if file_loader is None or not self.is_allowed(real_filename):
return self.app(environ, start_response)
guessed_type = mimetypes.guess_type(real_filename)
mime_type = guessed_type[0] or self.fallback_mimetype
f, mtime, file_size = file_loader()
headers = [('Date', http_date())]
if self.cache:
timeout = self.cache_timeout
etag = self.generate_etag(mtime, file_size, real_filename)
headers += [
('Etag', '"%s"' % etag),
('Cache-Control', 'max-age=%d, public' % timeout)
]
if not is_resource_modified(environ, etag, last_modified=mtime):
f.close()
start_response('304 Not Modified', headers)
return []
headers.append(('Expires', http_date(time() + timeout)))
else:
headers.append(('Cache-Control', 'public'))
headers.extend((
('Content-Type', mime_type),
('Content-Length', str(file_size)),
('Last-Modified', http_date(mtime))
))
start_response('200 OK', headers)
return wrap_file(environ, f)
class DispatcherMiddleware(object):
"""Allows one to mount middlewares or applications in a WSGI application.
This is useful if you want to combine multiple WSGI applications::
app = DispatcherMiddleware(app, {
'/app2': app2,
'/app3': app3
})
"""
def __init__(self, app, mounts=None):
self.app = app
self.mounts = mounts or {}
def __call__(self, environ, start_response):
script = environ.get('PATH_INFO', '')
path_info = ''
while '/' in script:
if script in self.mounts:
app = self.mounts[script]
break
script, last_item = script.rsplit('/', 1)
path_info = '/%s%s' % (last_item, path_info)
else:
app = self.mounts.get(script, self.app)
original_script_name = environ.get('SCRIPT_NAME', '')
environ['SCRIPT_NAME'] = original_script_name + script
environ['PATH_INFO'] = path_info
return app(environ, start_response)
@implements_iterator
class ClosingIterator(object):
"""The WSGI specification requires that all middlewares and gateways
respect the `close` callback of an iterator. Because it is useful to add
another close action to a returned iterator and adding a custom iterator
is a boring task this class can be used for that::
return ClosingIterator(app(environ, start_response), [cleanup_session,
cleanup_locals])
If there is just one close function it can be passed instead of the list.
A closing iterator is not needed if the application uses response objects
and finishes the processing if the response is started::
try:
return response(environ, start_response)
finally:
cleanup_session()
cleanup_locals()
"""
def __init__(self, iterable, callbacks=None):
iterator = iter(iterable)
self._next = partial(next, iterator)
if callbacks is None:
callbacks = []
elif callable(callbacks):
callbacks = [callbacks]
else:
callbacks = list(callbacks)
iterable_close = getattr(iterator, 'close', None)
if iterable_close:
callbacks.insert(0, iterable_close)
self._callbacks = callbacks
def __iter__(self):
return self
def __next__(self):
return self._next()
def close(self):
for callback in self._callbacks:
callback()
def wrap_file(environ, file, buffer_size=8192):
"""Wraps a file. This uses the WSGI server's file wrapper if available
or otherwise the generic :class:`FileWrapper`.
.. versionadded:: 0.5
If the file wrapper from the WSGI server is used it's important to not
iterate over it from inside the application but to pass it through
unchanged. If you want to pass out a file wrapper inside a response
object you have to set :attr:`~BaseResponse.direct_passthrough` to `True`.
More information about file wrappers are available in :pep:`333`.
:param file: a :class:`file`-like object with a :meth:`~file.read` method.
:param buffer_size: number of bytes for one iteration.
"""
return environ.get('wsgi.file_wrapper', FileWrapper)(file, buffer_size)
@implements_iterator
class FileWrapper(object):
"""This class can be used to convert a :class:`file`-like object into
an iterable. It yields `buffer_size` blocks until the file is fully
read.
You should not use this class directly but rather use the
:func:`wrap_file` function that uses the WSGI server's file wrapper
support if it's available.
.. versionadded:: 0.5
If you're using this object together with a :class:`BaseResponse` you have
to use the `direct_passthrough` mode.
:param file: a :class:`file`-like object with a :meth:`~file.read` method.
:param buffer_size: number of bytes for one iteration.
"""
def __init__(self, file, buffer_size=8192):
self.file = file
self.buffer_size = buffer_size
def close(self):
if hasattr(self.file, 'close'):
self.file.close()
def __iter__(self):
return self
def __next__(self):
data = self.file.read(self.buffer_size)
if data:
return data
raise StopIteration()
def _make_chunk_iter(stream, limit, buffer_size):
"""Helper for the line and chunk iter functions."""
if isinstance(stream, (bytes, bytearray, text_type)):
raise TypeError('Passed a string or byte object instead of '
'true iterator or stream.')
if not hasattr(stream, 'read'):
for item in stream:
if item:
yield item
return
if not isinstance(stream, LimitedStream) and limit is not None:
stream = LimitedStream(stream, limit)
_read = stream.read
while 1:
item = _read(buffer_size)
if not item:
break
yield item
def make_line_iter(stream, limit=None, buffer_size=10 * 1024):
"""Safely iterates line-based over an input stream. If the input stream
is not a :class:`LimitedStream` the `limit` parameter is mandatory.
This uses the stream's :meth:`~file.read` method internally as opposite
to the :meth:`~file.readline` method that is unsafe and can only be used
in violation of the WSGI specification. The same problem applies to the
`__iter__` function of the input stream which calls :meth:`~file.readline`
without arguments.
If you need line-by-line processing it's strongly recommended to iterate
over the input stream using this helper function.
.. versionchanged:: 0.8
This function now ensures that the limit was reached.
.. versionadded:: 0.9
added support for iterators as input stream.
:param stream: the stream or iterate to iterate over.
:param limit: the limit in bytes for the stream. (Usually
content length. Not necessary if the `stream`
is a :class:`LimitedStream`.
:param buffer_size: The optional buffer size.
"""
_iter = _make_chunk_iter(stream, limit, buffer_size)
first_item = next(_iter, '')
if not first_item:
return
s = make_literal_wrapper(first_item)
empty = s('')
cr = s('\r')
lf = s('\n')
crlf = s('\r\n')
_iter = chain((first_item,), _iter)
def _iter_basic_lines():
_join = empty.join
buffer = []
while 1:
new_data = next(_iter, '')
if not new_data:
break
new_buf = []
for item in chain(buffer, new_data.splitlines(True)):
new_buf.append(item)
if item and item[-1:] in crlf:
yield _join(new_buf)
new_buf = []
buffer = new_buf
if buffer:
yield _join(buffer)
# This hackery is necessary to merge 'foo\r' and '\n' into one item
# of 'foo\r\n' if we were unlucky and we hit a chunk boundary.
previous = empty
for item in _iter_basic_lines():
if item == lf and previous[-1:] == cr:
previous += item
item = empty
if previous:
yield previous
previous = item
if previous:
yield previous
def make_chunk_iter(stream, separator, limit=None, buffer_size=10 * 1024):
"""Works like :func:`make_line_iter` but accepts a separator
which divides chunks. If you want newline based processing
you should use :func:`make_line_iter` instead as it
supports arbitrary newline markers.
.. versionadded:: 0.8
.. versionadded:: 0.9
added support for iterators as input stream.
:param stream: the stream or iterate to iterate over.
:param separator: the separator that divides chunks.
:param limit: the limit in bytes for the stream. (Usually
content length. Not necessary if the `stream`
is otherwise already limited).
:param buffer_size: The optional buffer size.
"""
_iter = _make_chunk_iter(stream, limit, buffer_size)
first_item = next(_iter, '')
if not first_item:
return
_iter = chain((first_item,), _iter)
if isinstance(first_item, text_type):
separator = to_unicode(separator)
_split = re.compile(r'(%s)' % re.escape(separator)).split
_join = u''.join
else:
separator = to_bytes(separator)
_split = re.compile(b'(' + re.escape(separator) + b')').split
_join = b''.join
buffer = []
while 1:
new_data = next(_iter, '')
if not new_data:
break
chunks = _split(new_data)
new_buf = []
for item in chain(buffer, chunks):
if item == separator:
yield _join(new_buf)
new_buf = []
else:
new_buf.append(item)
buffer = new_buf
if buffer:
yield _join(buffer)
@implements_iterator
class LimitedStream(object):
"""Wraps a stream so that it doesn't read more than n bytes. If the
stream is exhausted and the caller tries to get more bytes from it
:func:`on_exhausted` is called which by default returns an empty
string. The return value of that function is forwarded
to the reader function. So if it returns an empty string
:meth:`read` will return an empty string as well.
The limit however must never be higher than what the stream can
output. Otherwise :meth:`readlines` will try to read past the
limit.
.. admonition:: Note on WSGI compliance
calls to :meth:`readline` and :meth:`readlines` are not
WSGI compliant because it passes a size argument to the
readline methods. Unfortunately the WSGI PEP is not safely
implementable without a size argument to :meth:`readline`
because there is no EOF marker in the stream. As a result
of that the use of :meth:`readline` is discouraged.
For the same reason iterating over the :class:`LimitedStream`
is not portable. It internally calls :meth:`readline`.
We strongly suggest using :meth:`read` only or using the
:func:`make_line_iter` which safely iterates line-based
over a WSGI input stream.
:param stream: the stream to wrap.
:param limit: the limit for the stream, must not be longer than
what the string can provide if the stream does not
end with `EOF` (like `wsgi.input`)
"""
def __init__(self, stream, limit):
self._read = stream.read
self._readline = stream.readline
self._pos = 0
self.limit = limit
def __iter__(self):
return self
@property
def is_exhausted(self):
"""If the stream is exhausted this attribute is `True`."""
return self._pos >= self.limit
def on_exhausted(self):
"""This is called when the stream tries to read past the limit.
The return value of this function is returned from the reading
function.
"""
# Read null bytes from the stream so that we get the
# correct end of stream marker.
return self._read(0)
def on_disconnect(self):
"""What should happen if a disconnect is detected? The return
value of this function is returned from read functions in case
the client went away. By default a
:exc:`~werkzeug.exceptions.ClientDisconnected` exception is raised.
"""
from werkzeug.exceptions import ClientDisconnected
raise ClientDisconnected()
def exhaust(self, chunk_size=1024 * 64):
"""Exhaust the stream. This consumes all the data left until the
limit is reached.
:param chunk_size: the size for a chunk. It will read the chunk
until the stream is exhausted and throw away
the results.
"""
to_read = self.limit - self._pos
chunk = chunk_size
while to_read > 0:
chunk = min(to_read, chunk)
self.read(chunk)
to_read -= chunk
def read(self, size=None):
"""Read `size` bytes or if size is not provided everything is read.
:param size: the number of bytes read.
"""
if self._pos >= self.limit:
return self.on_exhausted()
if size is None or size == -1: # -1 is for consistence with file
size = self.limit
to_read = min(self.limit - self._pos, size)
try:
read = self._read(to_read)
except (IOError, ValueError):
return self.on_disconnect()
if to_read and len(read) != to_read:
return self.on_disconnect()
self._pos += len(read)
return read
def readline(self, size=None):
"""Reads one line from the stream."""
if self._pos >= self.limit:
return self.on_exhausted()
if size is None:
size = self.limit - self._pos
else:
size = min(size, self.limit - self._pos)
try:
line = self._readline(size)
except (ValueError, IOError):
return self.on_disconnect()
if size and not line:
return self.on_disconnect()
self._pos += len(line)
return line
def readlines(self, size=None):
"""Reads a file into a list of strings. It calls :meth:`readline`
until the file is read to the end. It does support the optional
`size` argument if the underlaying stream supports it for
`readline`.
"""
last_pos = self._pos
result = []
if size is not None:
end = min(self.limit, last_pos + size)
else:
end = self.limit
while 1:
if size is not None:
size -= last_pos - self._pos
if self._pos >= end:
break
result.append(self.readline(size))
if size is not None:
last_pos = self._pos
return result
def tell(self):
"""Returns the position of the stream.
.. versionadded:: 0.9
"""
return self._pos
def __next__(self):
line = self.readline()
if not line:
raise StopIteration()
return line
| mit |
def-/commandergenius | project/jni/python/src/Tools/scripts/which.py | 100 | 1631 | #! /usr/bin/env python
# Variant of "which".
# On stderr, near and total misses are reported.
# '-l<flags>' argument adds ls -l<flags> of each file found.
import sys
if sys.path[0] in (".", ""): del sys.path[0]
import sys, os
from stat import *
def msg(str):
sys.stderr.write(str + '\n')
def main():
pathlist = os.environ['PATH'].split(os.pathsep)
sts = 0
longlist = ''
if sys.argv[1:] and sys.argv[1][:2] == '-l':
longlist = sys.argv[1]
del sys.argv[1]
for prog in sys.argv[1:]:
ident = ()
for dir in pathlist:
filename = os.path.join(dir, prog)
try:
st = os.stat(filename)
except os.error:
continue
if not S_ISREG(st[ST_MODE]):
msg(filename + ': not a disk file')
else:
mode = S_IMODE(st[ST_MODE])
if mode & 0111:
if not ident:
print filename
ident = st[:3]
else:
if st[:3] == ident:
s = 'same as: '
else:
s = 'also: '
msg(s + filename)
else:
msg(filename + ': not executable')
if longlist:
sts = os.system('ls ' + longlist + ' ' + filename)
if sts: msg('"ls -l" exit status: ' + repr(sts))
if not ident:
msg(prog + ': not found')
sts = 1
sys.exit(sts)
if __name__ == '__main__':
main()
| lgpl-2.1 |
thaumos/ansible | lib/ansible/modules/storage/netapp/na_ontap_broadcast_domain.py | 15 | 18575 | #!/usr/bin/python
# (c) 2018-2019, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = '''
module: na_ontap_broadcast_domain
short_description: NetApp ONTAP manage broadcast domains.
extends_documentation_fragment:
- netapp.na_ontap
version_added: '2.6'
author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com>
description:
- Modify a ONTAP broadcast domain.
options:
state:
description:
- Whether the specified broadcast domain should exist or not.
choices: ['present', 'absent']
default: present
name:
description:
- Specify the broadcast domain name.
required: true
aliases:
- broadcast_domain
from_name:
description:
- Specify the broadcast domain name to be split into new broadcast domain.
version_added: "2.8"
mtu:
description:
- Specify the required mtu for the broadcast domain.
ipspace:
description:
- Specify the required ipspace for the broadcast domain.
- A domain ipspace can not be modified after the domain has been created.
ports:
description:
- Specify the ports associated with this broadcast domain. Should be comma separated.
- It represents the expected state of a list of ports at any time.
- Add a port if it is specified in expected state but not in current state.
- Delete a port if it is specified in current state but not in expected state.
- For split action, it represents the ports to be split from current broadcast domain and added to the new broadcast domain.
- if all ports are removed or splited from a broadcast domain, the broadcast domain will be deleted automatically.
'''
EXAMPLES = """
- name: create broadcast domain
na_ontap_broadcast_domain:
state: present
username: "{{ netapp_username }}"
password: "{{ netapp_password }}"
hostname: "{{ netapp_hostname }}"
name: ansible_domain
mtu: 1000
ipspace: Default
ports: ["khutton-vsim1:e0d-12", "khutton-vsim1:e0d-13"]
- name: modify broadcast domain
na_ontap_broadcast_domain:
state: present
username: "{{ netapp_username }}"
password: "{{ netapp_password }}"
hostname: "{{ netapp_hostname }}"
name: ansible_domain
mtu: 1100
ipspace: Default
ports: ["khutton-vsim1:e0d-12", "khutton-vsim1:e0d-13"]
- name: split broadcast domain
na_ontap_broadcast_domain:
state: present
username: "{{ netapp_username }}"
password: "{{ netapp_password }}"
hostname: "{{ netapp_hostname }}"
from_name: ansible_domain
name: new_ansible_domain
mtu: 1200
ipspace: Default
ports: khutton-vsim1:e0d-12
- name: delete broadcast domain
na_ontap_broadcast_domain:
state: absent
username: "{{ netapp_username }}"
password: "{{ netapp_password }}"
hostname: "{{ netapp_hostname }}"
name: ansible_domain
ipspace: Default
"""
RETURN = """
"""
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
import ansible.module_utils.netapp as netapp_utils
from ansible.module_utils.netapp_module import NetAppModule
HAS_NETAPP_LIB = netapp_utils.has_netapp_lib()
class NetAppOntapBroadcastDomain(object):
"""
Create, Modifies and Destroys a Broadcast domain
"""
def __init__(self):
"""
Initialize the ONTAP Broadcast Domain class
"""
self.argument_spec = netapp_utils.na_ontap_host_argument_spec()
self.argument_spec.update(dict(
state=dict(required=False, choices=['present', 'absent'], default='present'),
name=dict(required=True, type='str', aliases=["broadcast_domain"]),
ipspace=dict(required=False, type='str'),
mtu=dict(required=False, type='str'),
ports=dict(required=False, type='list'),
from_name=dict(required=False, type='str'),
))
self.module = AnsibleModule(
argument_spec=self.argument_spec,
supports_check_mode=True
)
self.na_helper = NetAppModule()
self.parameters = self.na_helper.set_parameters(self.module.params)
if HAS_NETAPP_LIB is False:
self.module.fail_json(msg="the python NetApp-Lib module is required")
else:
self.server = netapp_utils.setup_na_ontap_zapi(module=self.module)
return
def get_broadcast_domain(self, broadcast_domain=None):
"""
Return details about the broadcast domain
:param broadcast_domain: specific broadcast domain to get.
:return: Details about the broadcas domain. None if not found.
:rtype: dict
"""
if broadcast_domain is None:
broadcast_domain = self.parameters['name']
domain_get_iter = netapp_utils.zapi.NaElement('net-port-broadcast-domain-get-iter')
broadcast_domain_info = netapp_utils.zapi.NaElement('net-port-broadcast-domain-info')
broadcast_domain_info.add_new_child('broadcast-domain', broadcast_domain)
query = netapp_utils.zapi.NaElement('query')
query.add_child_elem(broadcast_domain_info)
domain_get_iter.add_child_elem(query)
result = self.server.invoke_successfully(domain_get_iter, True)
domain_exists = None
# check if broadcast_domain exists
if result.get_child_by_name('num-records') and \
int(result.get_child_content('num-records')) == 1:
domain_info = result.get_child_by_name('attributes-list').\
get_child_by_name('net-port-broadcast-domain-info')
domain_name = domain_info.get_child_content('broadcast-domain')
domain_mtu = domain_info.get_child_content('mtu')
domain_ipspace = domain_info.get_child_content('ipspace')
domain_ports = domain_info.get_child_by_name('ports')
if domain_ports is not None:
ports = [port.get_child_content('port') for port in domain_ports.get_children()]
else:
ports = []
domain_exists = {
'domain-name': domain_name,
'mtu': domain_mtu,
'ipspace': domain_ipspace,
'ports': ports
}
return domain_exists
def create_broadcast_domain(self):
"""
Creates a new broadcast domain
"""
domain_obj = netapp_utils.zapi.NaElement('net-port-broadcast-domain-create')
domain_obj.add_new_child("broadcast-domain", self.parameters['name'])
if self.parameters.get('ipspace'):
domain_obj.add_new_child("ipspace", self.parameters['ipspace'])
if self.parameters.get('mtu'):
domain_obj.add_new_child("mtu", self.parameters['mtu'])
if self.parameters.get('ports'):
ports_obj = netapp_utils.zapi.NaElement('ports')
domain_obj.add_child_elem(ports_obj)
for port in self.parameters['ports']:
ports_obj.add_new_child('net-qualified-port-name', port)
try:
self.server.invoke_successfully(domain_obj, True)
except netapp_utils.zapi.NaApiError as error:
self.module.fail_json(msg='Error creating broadcast domain %s: %s' %
(self.parameters['name'], to_native(error)),
exception=traceback.format_exc())
def delete_broadcast_domain(self, broadcast_domain=None):
"""
Deletes a broadcast domain
"""
if broadcast_domain is None:
broadcast_domain = self.parameters['name']
domain_obj = netapp_utils.zapi.NaElement('net-port-broadcast-domain-destroy')
domain_obj.add_new_child("broadcast-domain", broadcast_domain)
if self.parameters.get('ipspace'):
domain_obj.add_new_child("ipspace", self.parameters['ipspace'])
try:
self.server.invoke_successfully(domain_obj, True)
except netapp_utils.zapi.NaApiError as error:
self.module.fail_json(msg='Error deleting broadcast domain %s: %s' %
(broadcast_domain, to_native(error)),
exception=traceback.format_exc())
def modify_broadcast_domain(self):
"""
Modifies ipspace and mtu options of a broadcast domain
"""
domain_obj = netapp_utils.zapi.NaElement('net-port-broadcast-domain-modify')
domain_obj.add_new_child("broadcast-domain", self.parameters['name'])
if self.parameters.get('mtu'):
domain_obj.add_new_child("mtu", self.parameters['mtu'])
if self.parameters.get('ipspace'):
domain_obj.add_new_child("ipspace", self.parameters['ipspace'])
try:
self.server.invoke_successfully(domain_obj, True)
except netapp_utils.zapi.NaApiError as error:
self.module.fail_json(msg='Error modifying broadcast domain %s: %s' %
(self.parameters['name'], to_native(error)),
exception=traceback.format_exc())
def split_broadcast_domain(self):
"""
split broadcast domain
"""
domain_obj = netapp_utils.zapi.NaElement('net-port-broadcast-domain-split')
domain_obj.add_new_child("broadcast-domain", self.parameters['from_name'])
domain_obj.add_new_child("new-broadcast-domain", self.parameters['name'])
if self.parameters.get('ports'):
ports_obj = netapp_utils.zapi.NaElement('ports')
domain_obj.add_child_elem(ports_obj)
for port in self.parameters['ports']:
ports_obj.add_new_child('net-qualified-port-name', port)
if self.parameters.get('ipspace'):
domain_obj.add_new_child("ipspace", self.parameters['ipspace'])
try:
self.server.invoke_successfully(domain_obj, True)
except netapp_utils.zapi.NaApiError as error:
self.module.fail_json(msg='Error splitting broadcast domain %s: %s' %
(self.parameters['name'], to_native(error)),
exception=traceback.format_exc())
if len(self.get_broadcast_domain_ports(self.parameters['from_name'])) == 0:
self.delete_broadcast_domain(self.parameters['from_name'])
def modify_redirect(self, modify):
"""
:param modify: modify attributes.
"""
for attribute in modify.keys():
if attribute == 'mtu':
self.modify_broadcast_domain()
if attribute == 'ports':
self.modify_broadcast_domain_ports()
def get_modify_attributes(self, current, split):
"""
:param current: current state.
:param split: True or False of split action.
:return: list of modified attributes.
"""
modify = None
if self.parameters['state'] == 'present':
# split already handled ipspace and ports.
if self.parameters.get('from_name'):
current = self.get_broadcast_domain(self.parameters['from_name'])
if split:
modify = self.na_helper.get_modified_attributes(current, self.parameters)
if modify.get('ipspace'):
del modify['ipspace']
if modify.get('ports'):
del modify['ports']
# ipspace can not be modified.
else:
modify = self.na_helper.get_modified_attributes(current, self.parameters)
if modify.get('ipspace'):
self.module.fail_json(msg='A domain ipspace can not be modified after the domain has been created.',
exception=traceback.format_exc())
return modify
def modify_broadcast_domain_ports(self):
"""
compare current and desire ports. Call add or remove ports methods if needed.
:return: None.
"""
current_ports = self.get_broadcast_domain_ports()
expect_ports = self.parameters['ports']
# if want to remove all ports, simply delete the broadcast domain.
if len(expect_ports) == 0:
self.delete_broadcast_domain()
return
ports_to_remove = list(set(current_ports) - set(expect_ports))
ports_to_add = list(set(expect_ports) - set(current_ports))
if len(ports_to_add) > 0:
self.add_broadcast_domain_ports(ports_to_add)
if len(ports_to_remove) > 0:
self.delete_broadcast_domain_ports(ports_to_remove)
def add_broadcast_domain_ports(self, ports):
"""
Creates new broadcast domain ports
"""
domain_obj = netapp_utils.zapi.NaElement('net-port-broadcast-domain-add-ports')
domain_obj.add_new_child("broadcast-domain", self.parameters['name'])
if self.parameters.get('ipspace'):
domain_obj.add_new_child("ipspace", self.parameters['ipspace'])
if ports:
ports_obj = netapp_utils.zapi.NaElement('ports')
domain_obj.add_child_elem(ports_obj)
for port in ports:
ports_obj.add_new_child('net-qualified-port-name', port)
try:
self.server.invoke_successfully(domain_obj, True)
return True
except netapp_utils.zapi.NaApiError as error:
self.module.fail_json(msg='Error creating port for broadcast domain %s: %s' %
(self.parameters['name'], to_native(error)),
exception=traceback.format_exc())
def delete_broadcast_domain_ports(self, ports):
"""
Deletes broadcast domain ports
:param: ports to be deleted.
"""
domain_obj = netapp_utils.zapi.NaElement('net-port-broadcast-domain-remove-ports')
domain_obj.add_new_child("broadcast-domain", self.parameters['name'])
if self.parameters.get('ipspace'):
domain_obj.add_new_child("ipspace", self.parameters['ipspace'])
if ports:
ports_obj = netapp_utils.zapi.NaElement('ports')
domain_obj.add_child_elem(ports_obj)
for port in ports:
ports_obj.add_new_child('net-qualified-port-name', port)
try:
self.server.invoke_successfully(domain_obj, True)
return True
except netapp_utils.zapi.NaApiError as error:
self.module.fail_json(msg='Error deleting port for broadcast domain %s: %s' %
(self.parameters['name'], to_native(error)),
exception=traceback.format_exc())
def get_broadcast_domain_ports(self, broadcast_domain=None):
"""
Return details about the broadcast domain ports.
:return: Details about the broadcast domain ports. None if not found.
:rtype: list
"""
if broadcast_domain is None:
broadcast_domain = self.parameters['name']
domain_get_iter = netapp_utils.zapi.NaElement('net-port-broadcast-domain-get-iter')
broadcast_domain_info = netapp_utils.zapi.NaElement('net-port-broadcast-domain-info')
broadcast_domain_info.add_new_child('broadcast-domain', broadcast_domain)
query = netapp_utils.zapi.NaElement('query')
query.add_child_elem(broadcast_domain_info)
domain_get_iter.add_child_elem(query)
result = self.server.invoke_successfully(domain_get_iter, True)
ports = []
if result.get_child_by_name('num-records') and \
int(result.get_child_content('num-records')) == 1:
domain_info = result.get_child_by_name('attributes-list').get_child_by_name('net-port-broadcast-domain-info')
domain_ports = domain_info.get_child_by_name('ports')
if domain_ports is not None:
ports = [port.get_child_content('port') for port in domain_ports.get_children()]
return ports
def apply(self):
"""
Run Module based on play book
"""
self.asup_log_for_cserver("na_ontap_broadcast_domain")
current = self.get_broadcast_domain()
cd_action, split = None, None
cd_action = self.na_helper.get_cd_action(current, self.parameters)
if cd_action == 'create':
# either create new domain or split domain.
if self.parameters.get('from_name'):
split = self.na_helper.is_rename_action(self.get_broadcast_domain(self.parameters['from_name']), current)
if split is None:
self.module.fail_json(msg='A domain can not be split if it does not exist.',
exception=traceback.format_exc())
if split:
cd_action = None
modify = self.get_modify_attributes(current, split)
if self.na_helper.changed:
if self.module.check_mode:
pass
else:
if split:
self.split_broadcast_domain()
if cd_action == 'create':
self.create_broadcast_domain()
elif cd_action == 'delete':
self.delete_broadcast_domain()
elif modify:
self.modify_redirect(modify)
self.module.exit_json(changed=self.na_helper.changed)
def asup_log_for_cserver(self, event_name):
"""
Fetch admin vserver for the given cluster
Create and Autosupport log event with the given module name
:param event_name: Name of the event log
:return: None
"""
results = netapp_utils.get_cserver(self.server)
cserver = netapp_utils.setup_na_ontap_zapi(module=self.module, vserver=results)
netapp_utils.ems_log_event(event_name, cserver)
def main():
"""
Creates the NetApp ONTAP Broadcast Domain Object that can be created, deleted and modified.
"""
obj = NetAppOntapBroadcastDomain()
obj.apply()
if __name__ == '__main__':
main()
| gpl-3.0 |
whiteear/newrelic-plugin-agent | newrelic_plugin_agent/plugins/apache_httpd.py | 7 | 5670 | """
ApacheHTTPD Support
"""
import logging
import re
from newrelic_plugin_agent.plugins import base
LOGGER = logging.getLogger(__name__)
PATTERN = re.compile(r'^([\w\s{1}]+):\s([\d\.{1}]+)', re.M)
class ApacheHTTPD(base.HTTPStatsPlugin):
DEFAULT_QUERY = 'auto'
GUID = 'com.meetme.newrelic_apache_httpd_agent'
KEYS = {'Total Accesses': {'type': '',
'label': 'Totals/Requests',
'suffix': 'requests'},
'BusyWorkers': {'type': 'gauge',
'label': 'Workers/Busy',
'suffix': 'workers'},
'Total kBytes': {'type': '',
'label': 'Totals/Bytes Sent',
'suffix': 'kb'},
'BytesPerSec': {'type': 'gauge',
'label': 'Bytes/Per Second',
'suffix': 'bytes/sec'},
'BytesPerReq': {'type': 'gauge',
'label': 'Requests/Average Payload Size',
'suffix': 'bytes'},
'IdleWorkers': {'type': 'gauge', 'label': 'Workers/Idle',
'suffix': 'workers'},
'CPULoad': {'type': 'gauge', 'label': 'CPU Load',
'suffix': 'processes'},
'ReqPerSec': {'type': 'gauge', 'label': 'Requests/Velocity',
'suffix': 'requests/sec'},
'Uptime': {'type': 'gauge', 'label': 'Uptime', 'suffix': 'sec'},
'ConnsTotal': {'type': 'gauge', 'label': 'Connections/Total', 'suffix': 'conns'},
'ConnsAsyncWriting': {'type': 'gauge', 'label': 'Connections/AsyncWriting', 'suffix': 'conns'},
'ConnsAsyncKeepAlive': {'type': 'gauge', 'label': 'Connections/AsyncKeepAlive', 'suffix': 'conns'},
'ConnsAsyncClosing': {'type': 'gauge', 'label': 'Connections/AsyncClosing', 'suffix': 'conns'},
'_': {'type': 'gauge', 'label': 'Scoreboard/Waiting For Conn', 'suffix': 'slots'},
'S': {'type': 'gauge', 'label': 'Scoreboard/Starting Up', 'suffix': 'slots'},
'R': {'type': 'gauge', 'label': 'Scoreboard/Reading Request', 'suffix': 'slots'},
'W': {'type': 'gauge', 'label': 'Scoreboard/Sending Reply', 'suffix': 'slots'},
'K': {'type': 'gauge', 'label': 'Scoreboard/Keepalive Read', 'suffix': 'slots'},
'D': {'type': 'gauge', 'label': 'Scoreboard/DNS Lookup', 'suffix': 'slots'},
'C': {'type': 'gauge', 'label': 'Scoreboard/Closing Conn', 'suffix': 'slots'},
'L': {'type': 'gauge', 'label': 'Scoreboard/Logging', 'suffix': 'slots'},
'G': {'type': 'gauge', 'label': 'Scoreboard/Gracefully Finishing', 'suffix': 'slots'},
'I': {'type': 'gauge', 'label': 'Scoreboard/Idle Cleanup', 'suffix': 'slots'},
'.': {'type': 'gauge', 'label': 'Scoreboard/Open Slot', 'suffix': 'slots'}}
def error_message(self):
LOGGER.error('Could not match any of the stats, please make ensure '
'Apache HTTPd is configured correctly. If you report '
'this as a bug, please include the full output of the '
'status page from %s in your ticket', self.stats_url)
def get_scoreboard(self, data):
"""Fetch the scoreboard from the stats URL
:rtype: str
"""
keys = ['_', 'S', 'R', 'W', 'K', 'D', 'C', 'L', 'G', 'I', '.']
values = [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]
score_out = dict(zip(keys, values))
for line in data.splitlines():
if line.find('Scoreboard') != -1:
scoreboard = line.replace('Scoreboard: ','')
for i in range(0, len(scoreboard)):
score_out[scoreboard[i]] += 1
return score_out
def add_datapoints(self, stats):
"""Add all of the data points for a node
:param str stats: The stats content from Apache as a string
"""
matches = PATTERN.findall(stats or '')
for key, value in matches:
try:
value = int(value)
except ValueError:
try:
value = float(value)
except ValueError:
value = 0
if key in self.KEYS:
if self.KEYS[key].get('type') == 'gauge':
self.add_gauge_value(self.KEYS[key]['label'],
self.KEYS[key].get('suffix', ''),
value)
else:
self.add_derive_value(self.KEYS[key]['label'],
self.KEYS[key].get('suffix', ''),
value)
else:
LOGGER.debug('Found unmapped key/value pair: %s = %s',
key, value)
score_data = self.get_scoreboard(stats)
for key, value in score_data.iteritems():
if key in self.KEYS:
if self.KEYS[key].get('type') == 'gauge':
self.add_gauge_value(self.KEYS[key]['label'],
self.KEYS[key].get('suffix', ''),
value)
else:
self.add_derive_value(self.KEYS[key]['label'],
self.KEYS[key].get('suffix', ''),
value)
else:
LOGGER.debug('Found unmapped key/value pair: %s = %s',
key, value)
| bsd-3-clause |
kronat/ns-3-dev-git | bindings/python/ns3modulegen-modular.py | 107 | 4822 | from __future__ import print_function
import warnings
import sys
import os
import pybindgen.settings
from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
from pybindgen.module import MultiSectionFactory
import ns3modulegen_core_customizations
import logging
pybindgen.settings.wrapper_registry = pybindgen.settings.StdMapWrapperRegistry
import traceback
class ErrorHandler(pybindgen.settings.ErrorHandler):
def __init__(self, apidefs_file):
self.apidefs_file = apidefs_file
def handle_error(self, wrapper, exception, traceback_):
stack = getattr(wrapper, 'stack_where_defined', [])
stack.reverse()
for l in stack:
if l[0] == self.apidefs_file:
warnings.warn_explicit("exception %r in wrapper %s" % (exception, wrapper),
Warning, l[0], l[1])
break
else:
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
return True
#print >> sys.stderr, ">>>>>>>>>>>>>>>>>>>>>>>>>>>> ", bool(eval(os.environ["GCC_RTTI_ABI_COMPLETE"]))
pybindgen.settings.gcc_rtti_abi_complete = bool(eval(os.environ["GCC_RTTI_ABI_COMPLETE"]))
class MyMultiSectionFactory(MultiSectionFactory):
def __init__(self, main_file_name):
super(MyMultiSectionFactory, self).__init__()
self.main_file_name = main_file_name
self.main_sink = FileCodeSink(open(main_file_name, "wt"))
self.header_name = "ns3module.h"
header_file_name = os.path.join(os.path.dirname(self.main_file_name), self.header_name)
#print >> sys.stderr, ">>>>>>>>>>>>>>>>>", header_file_name, main_file_name
self.header_sink = FileCodeSink(open(header_file_name, "wt"))
def get_section_code_sink(self, section_name):
return self.main_sink
def get_main_code_sink(self):
return self.main_sink
def get_common_header_code_sink(self):
return self.header_sink
def get_common_header_include(self):
return '"%s"' % self.header_name
def close(self):
self.header_sink.file.close()
self.main_sink.file.close()
def main(argv):
logging.basicConfig()
logging.getLogger("pybindgen.typehandlers").setLevel(logging.DEBUG)
module_abs_src_path, target, extension_name, output_cc_file_name = argv[1:]
module_name = os.path.basename(module_abs_src_path)
out = MyMultiSectionFactory(output_cc_file_name)
sys.path.insert(0, os.path.join(module_abs_src_path, "bindings"))
try:
module_apidefs = __import__("modulegen__%s" % target)
del sys.modules["modulegen__%s" % target]
try:
module_customization = __import__("modulegen_customizations")
del sys.modules["modulegen_customizations"]
except ImportError:
module_customization = object()
try:
from callbacks_list import callback_classes
except ImportError as ex:
print("***************", repr(ex), file=sys.stderr)
callback_classes = []
else:
print(">>>>>>>>>>>>>>>>", repr(callback_classes), file=sys.stderr)
finally:
sys.path.pop(0)
apidefs_file, dummy = os.path.splitext(module_apidefs.__file__)
apidefs_file += '.py'
pybindgen.settings.error_handler = ErrorHandler(apidefs_file)
root_module = module_apidefs.module_init()
root_module.set_name(extension_name)
root_module.add_include('"ns3/%s-module.h"' % module_name)
ns3modulegen_core_customizations.add_std_ios_openmode(root_module)
# -----------
module_apidefs.register_types(root_module)
if hasattr(module_customization, 'post_register_types'):
module_customization.post_register_types(root_module)
# register Callback<...> type handlers
ns3modulegen_core_customizations.register_callback_classes(root_module.after_forward_declarations,
callback_classes)
# -----------
module_apidefs.register_methods(root_module)
if hasattr(module_customization, 'post_register_methods'):
module_customization.post_register_methods(root_module)
ns3modulegen_core_customizations.Object_customizations(root_module)
ns3modulegen_core_customizations.Attribute_customizations(root_module)
ns3modulegen_core_customizations.generate_callback_classes(root_module,
callback_classes)
# -----------
module_apidefs.register_functions(root_module)
if hasattr(module_customization, 'post_register_functions'):
module_customization.post_register_functions(root_module)
# -----------
root_module.generate(out)
if __name__ == '__main__':
import sys
main(sys.argv)
| gpl-2.0 |
cluck/freeipa | ipatests/test_xmlrpc/test_selinuxusermap_plugin.py | 2 | 29885 | # Authors:
# Rob Crittenden <rcritten@redhat.com>
#
# Copyright (C) 2011 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Test the `ipalib/plugins/selinuxusermap.py` module.
"""
from ipalib import api, errors
from ipatests.test_xmlrpc import objectclasses
from xmlrpc_test import Declarative, fuzzy_digits, fuzzy_uuid
from ipapython.dn import DN
from ipatests.util import Fuzzy
from ipatests.test_xmlrpc.test_user_plugin import get_user_result
rule1 = u'selinuxrule1'
selinuxuser1 = u'guest_u:s0'
selinuxuser2 = u'xguest_u:s0'
user1 = u'tuser1'
group1 = u'testgroup1'
host1 = u'testhost1.%s' % api.env.domain
hostdn1 = DN(('fqdn', host1), ('cn', 'computers'), ('cn', 'accounts'),
api.env.basedn)
hbacrule1 = u'testhbacrule1'
hbacrule2 = u'testhbacrule12'
# Note (?i) at the beginning of the regexp is the ingnore case flag
fuzzy_selinuxusermapdn = Fuzzy(
'(?i)ipauniqueid=[0-9a-f]{8}-[0-9a-f]{4}'
'-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12},%s,%s'
% (api.env.container_selinux, api.env.basedn)
)
fuzzy_hbacruledn = Fuzzy(
'(?i)ipauniqueid=[0-9a-f]{8}-[0-9a-f]{4}'
'-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12},%s,%s'
% (api.env.container_hbac, api.env.basedn)
)
allow_all_rule_dn = api.Command['hbacrule_show'](u'allow_all')['result']['dn']
class test_selinuxusermap(Declarative):
cleanup_commands = [
('selinuxusermap_del', [rule1], {}),
('group_del', [group1], {}),
('user_del', [user1], {}),
('host_del', [host1], {}),
('hbacrule_del', [hbacrule1], {}),
('hbacrule_del', [hbacrule2], {}),
]
tests = [
dict(
desc='Try to retrieve non-existent %r' % rule1,
command=('selinuxusermap_show', [rule1], {}),
expected=errors.NotFound(
reason=u'%s: SELinux User Map rule not found' % rule1),
),
dict(
desc='Try to update non-existent %r' % rule1,
command=('selinuxusermap_mod', [rule1], dict(description=u'Foo')),
expected=errors.NotFound(
reason=u'%s: SELinux User Map rule not found' % rule1),
),
dict(
desc='Try to delete non-existent %r' % rule1,
command=('selinuxusermap_del', [rule1], {}),
expected=errors.NotFound(
reason=u'%s: SELinux User Map rule not found' % rule1),
),
dict(
desc='Create rule %r' % rule1,
command=(
'selinuxusermap_add', [rule1],
dict(ipaselinuxuser=selinuxuser1)
),
expected=dict(
value=rule1,
summary=u'Added SELinux User Map "%s"' % rule1,
result=dict(
cn=[rule1],
ipaselinuxuser=[selinuxuser1],
objectclass=objectclasses.selinuxusermap,
ipauniqueid=[fuzzy_uuid],
ipaenabledflag=[u'TRUE'],
dn=fuzzy_selinuxusermapdn,
),
),
),
dict(
desc='Try to create duplicate %r' % rule1,
command=(
'selinuxusermap_add', [rule1],
dict(ipaselinuxuser=selinuxuser1)
),
expected=errors.DuplicateEntry(message=u'SELinux User Map rule ' +
u'with name "%s" already exists' % rule1),
),
dict(
desc='Retrieve rule %r' % rule1,
command=('selinuxusermap_show', [rule1], {}),
expected=dict(
value=rule1,
summary=None,
result=dict(
cn=[rule1],
ipaselinuxuser=[selinuxuser1],
ipaenabledflag=[u'TRUE'],
dn=fuzzy_selinuxusermapdn,
),
),
),
dict(
desc='Update rule %r' % rule1,
command=(
'selinuxusermap_mod', [rule1],
dict(ipaselinuxuser=selinuxuser2)
),
expected=dict(
result=dict(
cn=[rule1],
ipaselinuxuser=[selinuxuser2],
ipaenabledflag=[u'TRUE'],
),
summary=u'Modified SELinux User Map "%s"' % rule1,
value=rule1,
),
),
dict(
desc='Retrieve %r to verify update' % rule1,
command=('selinuxusermap_show', [rule1], {}),
expected=dict(
value=rule1,
result=dict(
cn=[rule1],
ipaselinuxuser=[selinuxuser2],
ipaenabledflag=[u'TRUE'],
dn=fuzzy_selinuxusermapdn,
),
summary=None,
),
),
dict(
desc='Search for rule %r' % rule1,
command=('selinuxusermap_find', [], dict(cn=rule1)),
expected=dict(
count=1,
truncated=False,
result=[
dict(
cn=[rule1],
ipaselinuxuser=[selinuxuser2],
ipaenabledflag=[u'TRUE'],
dn=fuzzy_selinuxusermapdn,
),
],
summary=u'1 SELinux User Map matched',
),
),
###############
# Create additional entries needed for testing
dict(
desc='Create %r' % user1,
command=(
'user_add', [], dict(givenname=u'Test', sn=u'User1')
),
expected=dict(
value=user1,
summary=u'Added user "%s"' % user1,
result=get_user_result(user1, u'Test', u'User1', 'add'),
),
),
dict(
desc='Create group %r' % group1,
command=(
'group_add', [group1], dict(description=u'Test desc 1')
),
expected=dict(
value=group1,
summary=u'Added group "%s"' % group1,
result=dict(
cn=[group1],
description=[u'Test desc 1'],
gidnumber=[fuzzy_digits],
objectclass=objectclasses.group + [u'posixgroup'],
ipauniqueid=[fuzzy_uuid],
dn=DN(('cn', group1), ('cn', 'groups'), ('cn', 'accounts'),
api.env.basedn),
),
),
),
dict(
desc='Add member %r to %r' % (user1, group1),
command=(
'group_add_member', [group1], dict(user=user1)
),
expected=dict(
completed=1,
failed=dict(
member=dict(
group=tuple(),
user=tuple(),
),
),
result={
'dn': DN(('cn', group1), ('cn', 'groups'),
('cn', 'accounts'), api.env.basedn),
'member_user': (user1,),
'gidnumber': [fuzzy_digits],
'cn': [group1],
'description': [u'Test desc 1'],
},
),
),
dict(
desc='Create host %r' % host1,
command=('host_add', [host1],
dict(
description=u'Test host 1',
l=u'Undisclosed location 1',
force=True,
),
),
expected=dict(
value=host1,
summary=u'Added host "%s"' % host1,
result=dict(
dn=hostdn1,
fqdn=[host1],
description=[u'Test host 1'],
l=[u'Undisclosed location 1'],
krbprincipalname=[u'host/%s@%s' % (host1, api.env.realm)],
objectclass=objectclasses.host,
ipauniqueid=[fuzzy_uuid],
managedby_host=[host1],
has_keytab=False,
has_password=False,
),
),
),
dict(
desc='Create HBAC rule %r' % hbacrule1,
command=(
'hbacrule_add', [hbacrule1], {}
),
expected=dict(
value=hbacrule1,
summary=u'Added HBAC rule "%s"' % hbacrule1,
result=dict(
cn=[hbacrule1],
objectclass=objectclasses.hbacrule,
ipauniqueid=[fuzzy_uuid],
accessruletype=[u'allow'],
ipaenabledflag=[u'TRUE'],
dn=fuzzy_hbacruledn,
),
),
),
dict(
desc='Create HBAC rule %r' % hbacrule2,
command=(
'hbacrule_add', [hbacrule2], {}
),
expected=dict(
value=hbacrule2,
summary=u'Added HBAC rule "%s"' % hbacrule2,
result=dict(
cn=[hbacrule2],
objectclass=objectclasses.hbacrule,
ipauniqueid=[fuzzy_uuid],
accessruletype=[u'allow'],
ipaenabledflag=[u'TRUE'],
dn=fuzzy_hbacruledn,
),
),
),
###############
# Fill out rule with members and/or pointers to HBAC rules
dict(
desc='Add user to %r' % rule1,
command=('selinuxusermap_add_user', [rule1], dict(user=user1)),
expected=dict(
failed=dict(memberuser=dict(group=[], user=[])),
completed=1,
result=dict(
cn=[rule1],
ipaselinuxuser=[selinuxuser2],
ipaenabledflag=[u'TRUE'],
memberuser_user=[user1],
dn=fuzzy_selinuxusermapdn,
),
)
),
dict(
desc='Add non-existent user to %r' % rule1,
command=('selinuxusermap_add_user', [rule1],
dict(user=u'notfound')),
expected=dict(
failed=dict(
memberuser=dict(group=[],
user=[(u'notfound', u'no such entry')])
),
completed=0,
result=dict(
cn=[rule1],
ipaselinuxuser=[selinuxuser2],
ipaenabledflag=[u'TRUE'],
memberuser_user=[user1],
dn=fuzzy_selinuxusermapdn,
),
)
),
dict(
desc='Remove user from %r' % rule1,
command=('selinuxusermap_remove_user', [rule1], dict(user=user1)),
expected=dict(
failed=dict(memberuser=dict(group=[], user=[])),
completed=1,
result=dict(
cn=[rule1],
ipaselinuxuser=[selinuxuser2],
ipaenabledflag=[u'TRUE'],
dn=fuzzy_selinuxusermapdn,
),
)
),
dict(
desc='Remove non-existent user to %r' % rule1,
command=('selinuxusermap_remove_user', [rule1],
dict(user=u'notfound')),
expected=dict(
failed=dict(
memberuser=dict(group=[],
user=[(u'notfound', u'This entry is not a member')]
)
),
completed=0,
result=dict(
cn=[rule1],
ipaselinuxuser=[selinuxuser2],
ipaenabledflag=[u'TRUE'],
dn=fuzzy_selinuxusermapdn,
),
)
),
dict(
desc='Add group to %r' % rule1,
command=('selinuxusermap_add_user', [rule1], dict(group=group1)),
expected=dict(
failed=dict(memberuser=dict(group=[], user=[])),
completed=1,
result=dict(
cn=[rule1],
ipaselinuxuser=[selinuxuser2],
ipaenabledflag=[u'TRUE'],
memberuser_group=[group1],
dn=fuzzy_selinuxusermapdn,
),
)
),
dict(
desc='Add host to %r' % rule1,
command=('selinuxusermap_add_host', [rule1], dict(host=host1)),
expected=dict(
failed=dict(memberhost=dict(hostgroup=[], host=[])),
completed=1,
result=dict(
cn=[rule1],
ipaselinuxuser=[selinuxuser2],
ipaenabledflag=[u'TRUE'],
memberhost_host=[host1],
memberuser_group=[group1],
dn=fuzzy_selinuxusermapdn,
),
)
),
###############
# Test enabling and disabling
dict(
desc='Disable %r' % rule1,
command=('selinuxusermap_disable', [rule1], {}),
expected=dict(
result=True,
value=rule1,
summary=u'Disabled SELinux User Map "%s"' % rule1,
)
),
dict(
desc='Disable %r again' % rule1,
command=('selinuxusermap_disable', [rule1], {}),
expected=errors.AlreadyInactive(),
),
dict(
desc='Enable %r' % rule1,
command=('selinuxusermap_enable', [rule1], {}),
expected=dict(
result=True,
value=rule1,
summary=u'Enabled SELinux User Map "%s"' % rule1,
)
),
dict(
desc='Re-enable %r again' % rule1,
command=('selinuxusermap_enable', [rule1], {}),
expected=errors.AlreadyActive(),
),
# Point to an HBAC Rule
dict(
desc='Add an HBAC rule to %r that has other members' % rule1,
command=(
'selinuxusermap_mod', [rule1], dict(seealso=hbacrule1)
),
expected=errors.MutuallyExclusiveError(
reason=u'HBAC rule and local members cannot both be set'),
),
dict(
desc='Remove host from %r' % rule1,
command=('selinuxusermap_remove_host', [rule1], dict(host=host1)),
expected=dict(
failed=dict(memberhost=dict(hostgroup=[], host=[])),
completed=1,
result=dict(
cn=[rule1],
ipaselinuxuser=[selinuxuser2],
ipaenabledflag=[u'TRUE'],
memberuser_group=[group1],
dn=fuzzy_selinuxusermapdn,
),
)
),
dict(
desc='Remove group from %r' % rule1,
command=('selinuxusermap_remove_user', [rule1],
dict(group=group1)),
expected=dict(
failed=dict(memberuser=dict(group=[], user=[])),
completed=1,
result=dict(
cn=[rule1],
ipaselinuxuser=[selinuxuser2],
ipaenabledflag=[u'TRUE'],
dn=fuzzy_selinuxusermapdn,
),
)
),
dict(
desc='Add non-existent HBAC rule to %r' % rule1,
command=(
'selinuxusermap_mod', [rule1], dict(seealso=u'notfound')
),
expected=errors.NotFound(
reason=u'HBAC rule notfound not found'),
),
dict(
desc='Add an HBAC rule to %r' % rule1,
command=(
'selinuxusermap_mod', [rule1], dict(seealso=hbacrule1)
),
expected=dict(
result=dict(
cn=[rule1],
ipaselinuxuser=[selinuxuser2],
ipaenabledflag=[u'TRUE'],
seealso=hbacrule1,
),
summary=u'Modified SELinux User Map "%s"' % rule1,
value=rule1,
),
),
dict(
desc='Add user to %r that has HBAC' % rule1,
command=('selinuxusermap_add_user', [rule1], dict(user=user1)),
expected=errors.MutuallyExclusiveError(
reason=u'HBAC rule and local members cannot both be set'),
),
dict(
desc='Add host to %r that has HBAC' % rule1,
command=('selinuxusermap_add_host', [rule1], dict(host=host1)),
expected=errors.MutuallyExclusiveError(
reason=u'HBAC rule and local members cannot both be set'),
),
dict(
desc='Try to delete HBAC rule pointed to by %r' % rule1,
command=('hbacrule_del', [hbacrule1], {}),
expected=errors.DependentEntry(key=hbacrule1,
label=u'SELinux User Map', dependent=rule1)
),
# This tests selinuxusermap-find --hbacrule=<foo> returns an
# exact match
dict(
desc='Try to delete similarly named HBAC rule %r' % hbacrule2,
command=('hbacrule_del', [hbacrule2], {}),
expected=dict(
result=dict(failed=[]),
value=[hbacrule2],
summary=u'Deleted HBAC rule "%s"' % hbacrule2,
)
),
# Test clean up
dict(
desc='Delete %r' % rule1,
command=('selinuxusermap_del', [rule1], {}),
expected=dict(
result=dict(failed=[]),
value=[rule1],
summary=u'Deleted SELinux User Map "%s"' % rule1,
)
),
dict(
desc='Try to delete non-existent %r' % rule1,
command=('selinuxusermap_del', [rule1], {}),
expected=errors.NotFound(
reason=u'%s: SELinux User Map rule not found' % rule1),
),
# Some negative tests
dict(
desc='Create rule with unknown user %r' % rule1,
command=(
'selinuxusermap_add', [rule1],
dict(ipaselinuxuser=u'notfound:s0:c0')
),
expected=errors.NotFound(reason=u'SELinux user notfound:s0:c0 ' +
u'not found in ordering list (in config)'),
),
dict(
desc='Create rule with invalid user bad+user',
command=(
'selinuxusermap_add', [rule1], dict(ipaselinuxuser=u'bad+user')
),
expected=errors.ValidationError(name='selinuxuser',
error=u'Invalid SELinux user name, only a-Z and _ are allowed'
),
),
dict(
desc='Create rule with invalid MCS xguest_u:s999',
command=(
'selinuxusermap_add', [rule1],
dict(ipaselinuxuser=u'xguest_u:s999')
),
expected=errors.ValidationError(name='selinuxuser',
error=u'Invalid MLS value, must match s[0-15](-s[0-15])'),
),
dict(
desc='Create rule with invalid MLS xguest_u:s0:p88',
command=(
'selinuxusermap_add', [rule1],
dict(ipaselinuxuser=u'xguest_u:s0:p88')
),
expected=errors.ValidationError(name='selinuxuser',
error=u'Invalid MCS value, must match c[0-1023].c[0-1023] ' +
u'and/or c[0-1023]-c[0-c0123]'),
),
dict(
desc='Create rule with invalid MLS xguest_u:s0:c0.c1028',
command=(
'selinuxusermap_add', [rule1],
dict(ipaselinuxuser=u'xguest_u:s0-s0:c0.c1028')
),
expected=errors.ValidationError(name='selinuxuser',
error=u'Invalid MCS value, must match c[0-1023].c[0-1023] ' +
u'and/or c[0-1023]-c[0-c0123]'),
),
dict(
desc='Create rule with invalid user via setattr',
command=(
'selinuxusermap_mod', [rule1],
dict(setattr=u'ipaselinuxuser=deny')
),
expected=errors.ValidationError(name='ipaselinuxuser',
error=u'Invalid MLS value, must match s[0-15](-s[0-15])'),
),
dict(
desc='Create rule with both --hbacrule and --usercat set',
command=(
'selinuxusermap_add', [rule1],
dict(ipaselinuxuser=selinuxuser1,
seealso=hbacrule1,
usercategory=u'all')
),
expected=errors.MutuallyExclusiveError(
reason=u'HBAC rule and local members cannot both be set'),
),
dict(
desc='Create rule with both --hbacrule and --hostcat set',
command=(
'selinuxusermap_add', [rule1],
dict(ipaselinuxuser=selinuxuser1,
seealso=hbacrule1,
hostcategory=u'all')
),
expected=errors.MutuallyExclusiveError(
reason=u'HBAC rule and local members cannot both be set'),
),
dict(
desc='Create rule with both --hbacrule '
'and --usercat set via setattr',
command=(
'selinuxusermap_add', [rule1],
dict(ipaselinuxuser=selinuxuser1,
seealso=hbacrule1,
setattr=u'usercategory=all')
),
expected=errors.MutuallyExclusiveError(
reason=u'HBAC rule and local members cannot both be set'),
),
dict(
desc='Create rule with both --hbacrule '
'and --hostcat set via setattr',
command=(
'selinuxusermap_add', [rule1],
dict(ipaselinuxuser=selinuxuser1,
seealso=hbacrule1,
setattr=u'hostcategory=all')
),
expected=errors.MutuallyExclusiveError(
reason=u'HBAC rule and local members cannot both be set'),
),
dict(
desc='Create rule %r with --hbacrule' % rule1,
command=(
'selinuxusermap_add', [rule1],
dict(ipaselinuxuser=selinuxuser1, seealso=hbacrule1)
),
expected=dict(
value=rule1,
summary=u'Added SELinux User Map "%s"' % rule1,
result=dict(
cn=[rule1],
ipaselinuxuser=[selinuxuser1],
objectclass=objectclasses.selinuxusermap,
ipauniqueid=[fuzzy_uuid],
ipaenabledflag=[u'TRUE'],
dn=fuzzy_selinuxusermapdn,
seealso=hbacrule1
),
),
),
dict(
desc='Add an --usercat to %r that has HBAC set' % rule1,
command=(
'selinuxusermap_mod', [rule1], dict(usercategory=u'all')
),
expected=errors.MutuallyExclusiveError(
reason=u'HBAC rule and local members cannot both be set'),
),
dict(
desc='Add an --hostcat to %r that has HBAC set' % rule1,
command=(
'selinuxusermap_mod', [rule1], dict(hostcategory=u'all')
),
expected=errors.MutuallyExclusiveError(
reason=u'HBAC rule and local members cannot both be set'),
),
dict(
desc='Add an usercat via setattr to %r that has HBAC set' % rule1,
command=(
'selinuxusermap_mod', [rule1],
dict(setattr=u'usercategory=all')
),
expected=errors.MutuallyExclusiveError(
reason=u'HBAC rule and local members cannot both be set'),
),
dict(
desc='Add an hostcat via setattr to %r that has HBAC set' % rule1,
command=(
'selinuxusermap_mod', [rule1],
dict(setattr=u'hostcategory=all')
),
expected=errors.MutuallyExclusiveError(
reason=u'HBAC rule and local members cannot both be set'),
),
dict(
desc='Delete %r' % rule1,
command=('selinuxusermap_del', [rule1], {}),
expected=dict(
result=dict(failed=[]),
value=[rule1],
summary=u'Deleted SELinux User Map "%s"' % rule1,
)
),
dict(
desc='Create rule %r with usercat and hostcat set' % rule1,
command=(
'selinuxusermap_add', [rule1],
dict(ipaselinuxuser=selinuxuser1,
usercategory=u'all',
hostcategory=u'all')
),
expected=dict(
value=rule1,
summary=u'Added SELinux User Map "%s"' % rule1,
result=dict(
cn=[rule1],
ipaselinuxuser=[selinuxuser1],
objectclass=objectclasses.selinuxusermap,
ipauniqueid=[fuzzy_uuid],
ipaenabledflag=[u'TRUE'],
dn=fuzzy_selinuxusermapdn,
usercategory=[u'all'],
hostcategory=[u'all']
),
),
),
dict(
desc='Add HBAC rule to %r that has usercat and hostcat' % rule1,
command=(
'selinuxusermap_mod', [rule1], dict(seealso=hbacrule1)
),
expected=errors.MutuallyExclusiveError(
reason=u'HBAC rule and local members cannot both be set'),
),
dict(
desc='Delete %r' % rule1,
command=('selinuxusermap_del', [rule1], {}),
expected=dict(
result=dict(failed=[]),
value=[rule1],
summary=u'Deleted SELinux User Map "%s"' % rule1,
)
),
dict(
desc='Create rule %r' % rule1,
command=(
'selinuxusermap_add', [rule1],
dict(ipaselinuxuser=selinuxuser1)
),
expected=dict(
value=rule1,
summary=u'Added SELinux User Map "%s"' % rule1,
result=dict(
cn=[rule1],
ipaselinuxuser=[selinuxuser1],
objectclass=objectclasses.selinuxusermap,
ipauniqueid=[fuzzy_uuid],
ipaenabledflag=[u'TRUE'],
dn=fuzzy_selinuxusermapdn,
),
),
),
dict(
desc='Add HBAC rule, hostcat and usercat to %r' % rule1,
command=(
'selinuxusermap_mod', [rule1],
dict(seealso=hbacrule1,
usercategory=u'all',
hostcategory=u'all')
),
expected=errors.MutuallyExclusiveError(
reason=u'HBAC rule and local members cannot both be set'),
),
dict(
desc='Delete %r' % rule1,
command=('selinuxusermap_del', [rule1], {}),
expected=dict(
result=dict(failed=[]),
value=[rule1],
summary=u'Deleted SELinux User Map "%s"' % rule1,
)
),
dict(
desc='Create rule %r with '
'--setattr=seealso=<allow_all rule DN>' % rule1,
command=(
'selinuxusermap_add',
[rule1],
dict(ipaselinuxuser=selinuxuser1,
setattr=u'seealso=%s' % allow_all_rule_dn)
),
expected=dict(
value=rule1,
summary=u'Added SELinux User Map "%s"' % rule1,
result=dict(
cn=[rule1],
ipaselinuxuser=[selinuxuser1],
objectclass=objectclasses.selinuxusermap,
ipauniqueid=[fuzzy_uuid],
ipaenabledflag=[u'TRUE'],
dn=fuzzy_selinuxusermapdn,
seealso=u'allow_all',
),
),
),
dict(
desc='Delete %r' % rule1,
command=('selinuxusermap_del', [rule1], {}),
expected=dict(
result=dict(failed=[]),
value=[rule1],
summary=u'Deleted SELinux User Map "%s"' % rule1,
)
),
]
| gpl-3.0 |
havogt/serialbox2 | src/serialbox-python/sdb/sdbgui/popupaboutwidget.py | 2 | 2905 | #!/usr/bin/python3
# -*- coding: utf-8 -*-
##===-----------------------------------------------------------------------------*- Python -*-===##
##
## S E R I A L B O X
##
## This file is distributed under terms of BSD license.
## See LICENSE.txt for more information.
##
##===------------------------------------------------------------------------------------------===##
from PyQt5.QtCore import QT_VERSION_STR, Qt
from PyQt5.QtWidgets import QLabel, QVBoxLayout, QHBoxLayout, QPushButton, QSizePolicy
from sdbcore.logger import Logger
from sdbcore.version import Version
from sdbgui.pixmap import Pixmap
from sdbgui.popupwidget import PopupWidget
class PopupAboutWidget(PopupWidget):
def __init__(self, parent):
super().__init__(parent)
Logger.info("Showing about message box")
self.setWindowTitle("About sdb")
image = Pixmap("logo.png")
image_scaled = image.scaled(self.geometry().height(), self.geometry().width(),
Qt.KeepAspectRatio)
self.__widget_label_image = QLabel()
self.__widget_label_image.setPixmap(image_scaled)
about_txt = ("",
"sdb (%s)" % Version().sdb_version(),
"Serialbox (%s)" % Version().serialbox_version(),
"numpy (%s)" % Version().numpy_version(),
"matplotlib (%s)" % Version().matplotlib_version(),
"PyQt5 (%s)" % QT_VERSION_STR,
"IPython (%s)" % Version().ipython_version(),
"",
"Copyright (c) 2016-2017, Fabian Thuering",
"",
"All rights reserved.",
"",
"The program is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE "
"WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.",
"")
self.__widget_label_about_txt = QLabel()
self.__widget_label_about_txt.setText("\n".join(about_txt))
self.__widget_label_about_txt.setWordWrap(True)
hbox = QHBoxLayout()
hbox.addWidget(self.__widget_label_image)
hbox.addStretch(1)
hbox_button = QHBoxLayout()
hbox_button.addStretch(1)
cancel_button = QPushButton("Cancel")
cancel_button.clicked.connect(self.close)
hbox_button.addWidget(cancel_button)
vbox = QVBoxLayout()
vbox.addLayout(hbox)
vbox.addWidget(self.__widget_label_about_txt)
vbox.addLayout(hbox_button)
self.setSizePolicy(QSizePolicy.Minimum, QSizePolicy.Minimum)
self.setLayout(vbox)
self.show()
def keyPressEvent(self, QKeyEvent):
if QKeyEvent.key() == Qt.Key_Escape:
Logger.info("Closing about message box")
self.close()
| bsd-2-clause |
ossdemura/django-miniblog | src/Lib/site-packages/django/contrib/admindocs/views.py | 39 | 17893 | import inspect
import os
from importlib import import_module
from django.apps import apps
from django.conf import settings
from django.contrib import admin
from django.contrib.admin.views.decorators import staff_member_required
from django.contrib.admindocs import utils
from django.contrib.admindocs.utils import (
replace_named_groups, replace_unnamed_groups,
)
from django.core.exceptions import ImproperlyConfigured, ViewDoesNotExist
from django.db import models
from django.http import Http404
from django.template.engine import Engine
from django.urls import get_mod_func, get_resolver, get_urlconf, reverse
from django.utils import six
from django.utils.decorators import method_decorator
from django.utils.inspect import (
func_accepts_kwargs, func_accepts_var_args, func_has_no_args,
get_func_full_args,
)
from django.utils.translation import ugettext as _
from django.views.generic import TemplateView
# Exclude methods starting with these strings from documentation
MODEL_METHODS_EXCLUDE = ('_', 'add_', 'delete', 'save', 'set_')
class BaseAdminDocsView(TemplateView):
"""
Base view for admindocs views.
"""
@method_decorator(staff_member_required)
def dispatch(self, request, *args, **kwargs):
if not utils.docutils_is_available:
# Display an error message for people without docutils
self.template_name = 'admin_doc/missing_docutils.html'
return self.render_to_response(admin.site.each_context(request))
return super(BaseAdminDocsView, self).dispatch(request, *args, **kwargs)
def get_context_data(self, **kwargs):
kwargs.update({'root_path': reverse('admin:index')})
kwargs.update(admin.site.each_context(self.request))
return super(BaseAdminDocsView, self).get_context_data(**kwargs)
class BookmarkletsView(BaseAdminDocsView):
template_name = 'admin_doc/bookmarklets.html'
def get_context_data(self, **kwargs):
context = super(BookmarkletsView, self).get_context_data(**kwargs)
context.update({
'admin_url': "%s://%s%s" % (
self.request.scheme, self.request.get_host(), context['root_path'])
})
return context
class TemplateTagIndexView(BaseAdminDocsView):
template_name = 'admin_doc/template_tag_index.html'
def get_context_data(self, **kwargs):
tags = []
try:
engine = Engine.get_default()
except ImproperlyConfigured:
# Non-trivial TEMPLATES settings aren't supported (#24125).
pass
else:
app_libs = sorted(engine.template_libraries.items())
builtin_libs = [('', lib) for lib in engine.template_builtins]
for module_name, library in builtin_libs + app_libs:
for tag_name, tag_func in library.tags.items():
title, body, metadata = utils.parse_docstring(tag_func.__doc__)
if title:
title = utils.parse_rst(title, 'tag', _('tag:') + tag_name)
if body:
body = utils.parse_rst(body, 'tag', _('tag:') + tag_name)
for key in metadata:
metadata[key] = utils.parse_rst(metadata[key], 'tag', _('tag:') + tag_name)
tag_library = module_name.split('.')[-1]
tags.append({
'name': tag_name,
'title': title,
'body': body,
'meta': metadata,
'library': tag_library,
})
kwargs.update({'tags': tags})
return super(TemplateTagIndexView, self).get_context_data(**kwargs)
class TemplateFilterIndexView(BaseAdminDocsView):
template_name = 'admin_doc/template_filter_index.html'
def get_context_data(self, **kwargs):
filters = []
try:
engine = Engine.get_default()
except ImproperlyConfigured:
# Non-trivial TEMPLATES settings aren't supported (#24125).
pass
else:
app_libs = sorted(engine.template_libraries.items())
builtin_libs = [('', lib) for lib in engine.template_builtins]
for module_name, library in builtin_libs + app_libs:
for filter_name, filter_func in library.filters.items():
title, body, metadata = utils.parse_docstring(filter_func.__doc__)
if title:
title = utils.parse_rst(title, 'filter', _('filter:') + filter_name)
if body:
body = utils.parse_rst(body, 'filter', _('filter:') + filter_name)
for key in metadata:
metadata[key] = utils.parse_rst(metadata[key], 'filter', _('filter:') + filter_name)
tag_library = module_name.split('.')[-1]
filters.append({
'name': filter_name,
'title': title,
'body': body,
'meta': metadata,
'library': tag_library,
})
kwargs.update({'filters': filters})
return super(TemplateFilterIndexView, self).get_context_data(**kwargs)
class ViewIndexView(BaseAdminDocsView):
template_name = 'admin_doc/view_index.html'
@staticmethod
def _get_full_name(func):
mod_name = func.__module__
if six.PY3:
return '%s.%s' % (mod_name, func.__qualname__)
else:
# PY2 does not support __qualname__
func_name = getattr(func, '__name__', func.__class__.__name__)
return '%s.%s' % (mod_name, func_name)
def get_context_data(self, **kwargs):
views = []
urlconf = import_module(settings.ROOT_URLCONF)
view_functions = extract_views_from_urlpatterns(urlconf.urlpatterns)
for (func, regex, namespace, name) in view_functions:
views.append({
'full_name': self._get_full_name(func),
'url': simplify_regex(regex),
'url_name': ':'.join((namespace or []) + (name and [name] or [])),
'namespace': ':'.join((namespace or [])),
'name': name,
})
kwargs.update({'views': views})
return super(ViewIndexView, self).get_context_data(**kwargs)
class ViewDetailView(BaseAdminDocsView):
template_name = 'admin_doc/view_detail.html'
@staticmethod
def _get_view_func(view):
urlconf = get_urlconf()
if get_resolver(urlconf)._is_callback(view):
mod, func = get_mod_func(view)
try:
# Separate the module and function, e.g.
# 'mymodule.views.myview' -> 'mymodule.views', 'myview').
return getattr(import_module(mod), func)
except ImportError:
# Import may fail because view contains a class name, e.g.
# 'mymodule.views.ViewContainer.my_view', so mod takes the form
# 'mymodule.views.ViewContainer'. Parse it again to separate
# the module and class.
mod, klass = get_mod_func(mod)
return getattr(getattr(import_module(mod), klass), func)
except AttributeError:
# PY2 generates incorrect paths for views that are methods,
# e.g. 'mymodule.views.ViewContainer.my_view' will be
# listed as 'mymodule.views.my_view' because the class name
# can't be detected. This causes an AttributeError when
# trying to resolve the view.
return None
def get_context_data(self, **kwargs):
view = self.kwargs['view']
view_func = self._get_view_func(view)
if view_func is None:
raise Http404
title, body, metadata = utils.parse_docstring(view_func.__doc__)
if title:
title = utils.parse_rst(title, 'view', _('view:') + view)
if body:
body = utils.parse_rst(body, 'view', _('view:') + view)
for key in metadata:
metadata[key] = utils.parse_rst(metadata[key], 'model', _('view:') + view)
kwargs.update({
'name': view,
'summary': title,
'body': body,
'meta': metadata,
})
return super(ViewDetailView, self).get_context_data(**kwargs)
class ModelIndexView(BaseAdminDocsView):
template_name = 'admin_doc/model_index.html'
def get_context_data(self, **kwargs):
m_list = [m._meta for m in apps.get_models()]
kwargs.update({'models': m_list})
return super(ModelIndexView, self).get_context_data(**kwargs)
class ModelDetailView(BaseAdminDocsView):
template_name = 'admin_doc/model_detail.html'
def get_context_data(self, **kwargs):
model_name = self.kwargs['model_name']
# Get the model class.
try:
app_config = apps.get_app_config(self.kwargs['app_label'])
except LookupError:
raise Http404(_("App %(app_label)r not found") % self.kwargs)
try:
model = app_config.get_model(model_name)
except LookupError:
raise Http404(_("Model %(model_name)r not found in app %(app_label)r") % self.kwargs)
opts = model._meta
title, body, metadata = utils.parse_docstring(model.__doc__)
if title:
title = utils.parse_rst(title, 'model', _('model:') + model_name)
if body:
body = utils.parse_rst(body, 'model', _('model:') + model_name)
# Gather fields/field descriptions.
fields = []
for field in opts.fields:
# ForeignKey is a special case since the field will actually be a
# descriptor that returns the other object
if isinstance(field, models.ForeignKey):
data_type = field.remote_field.model.__name__
app_label = field.remote_field.model._meta.app_label
verbose = utils.parse_rst(
(_("the related `%(app_label)s.%(data_type)s` object") % {
'app_label': app_label, 'data_type': data_type,
}),
'model',
_('model:') + data_type,
)
else:
data_type = get_readable_field_data_type(field)
verbose = field.verbose_name
fields.append({
'name': field.name,
'data_type': data_type,
'verbose': verbose or '',
'help_text': field.help_text,
})
# Gather many-to-many fields.
for field in opts.many_to_many:
data_type = field.remote_field.model.__name__
app_label = field.remote_field.model._meta.app_label
verbose = _("related `%(app_label)s.%(object_name)s` objects") % {
'app_label': app_label,
'object_name': data_type,
}
fields.append({
'name': "%s.all" % field.name,
"data_type": 'List',
'verbose': utils.parse_rst(_("all %s") % verbose, 'model', _('model:') + opts.model_name),
})
fields.append({
'name': "%s.count" % field.name,
'data_type': 'Integer',
'verbose': utils.parse_rst(_("number of %s") % verbose, 'model', _('model:') + opts.model_name),
})
methods = []
# Gather model methods.
for func_name, func in model.__dict__.items():
if inspect.isfunction(func):
try:
for exclude in MODEL_METHODS_EXCLUDE:
if func_name.startswith(exclude):
raise StopIteration
except StopIteration:
continue
verbose = func.__doc__
if verbose:
verbose = utils.parse_rst(utils.trim_docstring(verbose), 'model', _('model:') + opts.model_name)
# If a method has no arguments, show it as a 'field', otherwise
# as a 'method with arguments'.
if func_has_no_args(func) and not func_accepts_kwargs(func) and not func_accepts_var_args(func):
fields.append({
'name': func_name,
'data_type': get_return_data_type(func_name),
'verbose': verbose or '',
})
else:
arguments = get_func_full_args(func)
# Join arguments with ', ' and in case of default value,
# join it with '='. Use repr() so that strings will be
# correctly displayed.
print_arguments = ', '.join([
'='.join(list(arg_el[:1]) + [repr(el) for el in arg_el[1:]])
for arg_el in arguments
])
methods.append({
'name': func_name,
'arguments': print_arguments,
'verbose': verbose or '',
})
# Gather related objects
for rel in opts.related_objects:
verbose = _("related `%(app_label)s.%(object_name)s` objects") % {
'app_label': rel.related_model._meta.app_label,
'object_name': rel.related_model._meta.object_name,
}
accessor = rel.get_accessor_name()
fields.append({
'name': "%s.all" % accessor,
'data_type': 'List',
'verbose': utils.parse_rst(_("all %s") % verbose, 'model', _('model:') + opts.model_name),
})
fields.append({
'name': "%s.count" % accessor,
'data_type': 'Integer',
'verbose': utils.parse_rst(_("number of %s") % verbose, 'model', _('model:') + opts.model_name),
})
kwargs.update({
'name': '%s.%s' % (opts.app_label, opts.object_name),
'summary': title,
'description': body,
'fields': fields,
'methods': methods,
})
return super(ModelDetailView, self).get_context_data(**kwargs)
class TemplateDetailView(BaseAdminDocsView):
template_name = 'admin_doc/template_detail.html'
def get_context_data(self, **kwargs):
template = self.kwargs['template']
templates = []
try:
default_engine = Engine.get_default()
except ImproperlyConfigured:
# Non-trivial TEMPLATES settings aren't supported (#24125).
pass
else:
# This doesn't account for template loaders (#24128).
for index, directory in enumerate(default_engine.dirs):
template_file = os.path.join(directory, template)
if os.path.exists(template_file):
with open(template_file) as f:
template_contents = f.read()
else:
template_contents = ''
templates.append({
'file': template_file,
'exists': os.path.exists(template_file),
'contents': template_contents,
'order': index,
})
kwargs.update({
'name': template,
'templates': templates,
})
return super(TemplateDetailView, self).get_context_data(**kwargs)
####################
# Helper functions #
####################
def get_return_data_type(func_name):
"""Return a somewhat-helpful data type given a function name"""
if func_name.startswith('get_'):
if func_name.endswith('_list'):
return 'List'
elif func_name.endswith('_count'):
return 'Integer'
return ''
def get_readable_field_data_type(field):
"""Returns the description for a given field type, if it exists,
Fields' descriptions can contain format strings, which will be interpolated
against the values of field.__dict__ before being output."""
return field.description % field.__dict__
def extract_views_from_urlpatterns(urlpatterns, base='', namespace=None):
"""
Return a list of views from a list of urlpatterns.
Each object in the returned list is a two-tuple: (view_func, regex)
"""
views = []
for p in urlpatterns:
if hasattr(p, 'url_patterns'):
try:
patterns = p.url_patterns
except ImportError:
continue
views.extend(extract_views_from_urlpatterns(
patterns,
base + p.regex.pattern,
(namespace or []) + (p.namespace and [p.namespace] or [])
))
elif hasattr(p, 'callback'):
try:
views.append((p.callback, base + p.regex.pattern,
namespace, p.name))
except ViewDoesNotExist:
continue
else:
raise TypeError(_("%s does not appear to be a urlpattern object") % p)
return views
def simplify_regex(pattern):
r"""
Clean up urlpattern regexes into something more readable by humans. For
example, turn "^(?P<sport_slug>\w+)/athletes/(?P<athlete_slug>\w+)/$"
into "/<sport_slug>/athletes/<athlete_slug>/".
"""
pattern = replace_named_groups(pattern)
pattern = replace_unnamed_groups(pattern)
# clean up any outstanding regex-y characters.
pattern = pattern.replace('^', '').replace('$', '').replace('?', '')
if not pattern.startswith('/'):
pattern = '/' + pattern
return pattern
| mit |
akolpakov/django-media-manager | example/example/wsgi.py | 111 | 1422 | """
WSGI config for example project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "example.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "example.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| bsd-3-clause |
Moriadry/tensorflow | tensorflow/python/ops/distributions/beta.py | 73 | 13214 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""The Beta distribution class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import random_ops
from tensorflow.python.ops.distributions import distribution
from tensorflow.python.ops.distributions import kullback_leibler
from tensorflow.python.ops.distributions import util as distribution_util
__all__ = [
"Beta",
"BetaWithSoftplusConcentration",
]
_beta_sample_note = """Note: `x` must have dtype `self.dtype` and be in
`[0, 1].` It must have a shape compatible with `self.batch_shape()`."""
class Beta(distribution.Distribution):
"""Beta distribution.
The Beta distribution is defined over the `(0, 1)` interval using parameters
`concentration1` (aka "alpha") and `concentration0` (aka "beta").
#### Mathematical Details
The probability density function (pdf) is,
```none
pdf(x; alpha, beta) = x**(alpha - 1) (1 - x)**(beta - 1) / Z
Z = Gamma(alpha) Gamma(beta) / Gamma(alpha + beta)
```
where:
* `concentration1 = alpha`,
* `concentration0 = beta`,
* `Z` is the normalization constant, and,
* `Gamma` is the [gamma function](
https://en.wikipedia.org/wiki/Gamma_function).
The concentration parameters represent mean total counts of a `1` or a `0`,
i.e.,
```none
concentration1 = alpha = mean * total_concentration
concentration0 = beta = (1. - mean) * total_concentration
```
where `mean` in `(0, 1)` and `total_concentration` is a positive real number
representing a mean `total_count = concentration1 + concentration0`.
Distribution parameters are automatically broadcast in all functions; see
examples for details.
#### Examples
```python
# Create a batch of three Beta distributions.
alpha = [1, 2, 3]
beta = [1, 2, 3]
dist = Beta(alpha, beta)
dist.sample([4, 5]) # Shape [4, 5, 3]
# `x` has three batch entries, each with two samples.
x = [[.1, .4, .5],
[.2, .3, .5]]
# Calculate the probability of each pair of samples under the corresponding
# distribution in `dist`.
dist.prob(x) # Shape [2, 3]
```
```python
# Create batch_shape=[2, 3] via parameter broadcast:
alpha = [[1.], [2]] # Shape [2, 1]
beta = [3., 4, 5] # Shape [3]
dist = Beta(alpha, beta)
# alpha broadcast as: [[1., 1, 1,],
# [2, 2, 2]]
# beta broadcast as: [[3., 4, 5],
# [3, 4, 5]]
# batch_Shape [2, 3]
dist.sample([4, 5]) # Shape [4, 5, 2, 3]
x = [.2, .3, .5]
# x will be broadcast as [[.2, .3, .5],
# [.2, .3, .5]],
# thus matching batch_shape [2, 3].
dist.prob(x) # Shape [2, 3]
```
"""
def __init__(self,
concentration1=None,
concentration0=None,
validate_args=False,
allow_nan_stats=True,
name="Beta"):
"""Initialize a batch of Beta distributions.
Args:
concentration1: Positive floating-point `Tensor` indicating mean
number of successes; aka "alpha". Implies `self.dtype` and
`self.batch_shape`, i.e.,
`concentration1.shape = [N1, N2, ..., Nm] = self.batch_shape`.
concentration0: Positive floating-point `Tensor` indicating mean
number of failures; aka "beta". Otherwise has same semantics as
`concentration1`.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value "`NaN`" to indicate the
result is undefined. When `False`, an exception is raised if one or
more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
"""
parameters = locals()
with ops.name_scope(name, values=[concentration1, concentration0]):
self._concentration1 = self._maybe_assert_valid_concentration(
ops.convert_to_tensor(concentration1, name="concentration1"),
validate_args)
self._concentration0 = self._maybe_assert_valid_concentration(
ops.convert_to_tensor(concentration0, name="concentration0"),
validate_args)
check_ops.assert_same_float_dtype([
self._concentration1, self._concentration0])
self._total_concentration = self._concentration1 + self._concentration0
super(Beta, self).__init__(
dtype=self._total_concentration.dtype,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
reparameterization_type=distribution.NOT_REPARAMETERIZED,
parameters=parameters,
graph_parents=[self._concentration1,
self._concentration0,
self._total_concentration],
name=name)
@staticmethod
def _param_shapes(sample_shape):
return dict(zip(
["concentration1", "concentration0"],
[ops.convert_to_tensor(sample_shape, dtype=dtypes.int32)] * 2))
@property
def concentration1(self):
"""Concentration parameter associated with a `1` outcome."""
return self._concentration1
@property
def concentration0(self):
"""Concentration parameter associated with a `0` outcome."""
return self._concentration0
@property
def total_concentration(self):
"""Sum of concentration parameters."""
return self._total_concentration
def _batch_shape_tensor(self):
return array_ops.shape(self.total_concentration)
def _batch_shape(self):
return self.total_concentration.get_shape()
def _event_shape_tensor(self):
return constant_op.constant([], dtype=dtypes.int32)
def _event_shape(self):
return tensor_shape.scalar()
def _sample_n(self, n, seed=None):
expanded_concentration1 = array_ops.ones_like(
self.total_concentration, dtype=self.dtype) * self.concentration1
expanded_concentration0 = array_ops.ones_like(
self.total_concentration, dtype=self.dtype) * self.concentration0
gamma1_sample = random_ops.random_gamma(
shape=[n],
alpha=expanded_concentration1,
dtype=self.dtype,
seed=seed)
gamma2_sample = random_ops.random_gamma(
shape=[n],
alpha=expanded_concentration0,
dtype=self.dtype,
seed=distribution_util.gen_new_seed(seed, "beta"))
beta_sample = gamma1_sample / (gamma1_sample + gamma2_sample)
return beta_sample
@distribution_util.AppendDocstring(_beta_sample_note)
def _log_prob(self, x):
return self._log_unnormalized_prob(x) - self._log_normalization()
@distribution_util.AppendDocstring(_beta_sample_note)
def _prob(self, x):
return math_ops.exp(self._log_prob(x))
@distribution_util.AppendDocstring(_beta_sample_note)
def _log_cdf(self, x):
return math_ops.log(self._cdf(x))
@distribution_util.AppendDocstring(_beta_sample_note)
def _cdf(self, x):
return math_ops.betainc(self.concentration1, self.concentration0, x)
def _log_unnormalized_prob(self, x):
x = self._maybe_assert_valid_sample(x)
return ((self.concentration1 - 1.) * math_ops.log(x)
+ (self.concentration0 - 1.) * math_ops.log1p(-x))
def _log_normalization(self):
return (math_ops.lgamma(self.concentration1)
+ math_ops.lgamma(self.concentration0)
- math_ops.lgamma(self.total_concentration))
def _entropy(self):
return (
self._log_normalization()
- (self.concentration1 - 1.) * math_ops.digamma(self.concentration1)
- (self.concentration0 - 1.) * math_ops.digamma(self.concentration0)
+ ((self.total_concentration - 2.) *
math_ops.digamma(self.total_concentration)))
def _mean(self):
return self._concentration1 / self._total_concentration
def _variance(self):
return self._mean() * (1. - self._mean()) / (1. + self.total_concentration)
@distribution_util.AppendDocstring(
"""Note: The mode is undefined when `concentration1 <= 1` or
`concentration0 <= 1`. If `self.allow_nan_stats` is `True`, `NaN`
is used for undefined modes. If `self.allow_nan_stats` is `False` an
exception is raised when one or more modes are undefined.""")
def _mode(self):
mode = (self.concentration1 - 1.) / (self.total_concentration - 2.)
if self.allow_nan_stats:
nan = array_ops.fill(
self.batch_shape_tensor(),
np.array(np.nan, dtype=self.dtype.as_numpy_dtype()),
name="nan")
is_defined = math_ops.logical_and(self.concentration1 > 1.,
self.concentration0 > 1.)
return array_ops.where(is_defined, mode, nan)
return control_flow_ops.with_dependencies([
check_ops.assert_less(
array_ops.ones([], dtype=self.dtype),
self.concentration1,
message="Mode undefined for concentration1 <= 1."),
check_ops.assert_less(
array_ops.ones([], dtype=self.dtype),
self.concentration0,
message="Mode undefined for concentration0 <= 1.")
], mode)
def _maybe_assert_valid_concentration(self, concentration, validate_args):
"""Checks the validity of a concentration parameter."""
if not validate_args:
return concentration
return control_flow_ops.with_dependencies([
check_ops.assert_positive(
concentration,
message="Concentration parameter must be positive."),
], concentration)
def _maybe_assert_valid_sample(self, x):
"""Checks the validity of a sample."""
if not self.validate_args:
return x
return control_flow_ops.with_dependencies([
check_ops.assert_positive(
x,
message="sample must be positive"),
check_ops.assert_less(
x, array_ops.ones([], self.dtype),
message="sample must be no larger than `1`."),
], x)
class BetaWithSoftplusConcentration(Beta):
"""Beta with softplus transform of `concentration1` and `concentration0`."""
def __init__(self,
concentration1,
concentration0,
validate_args=False,
allow_nan_stats=True,
name="BetaWithSoftplusConcentration"):
parameters = locals()
with ops.name_scope(name, values=[concentration1,
concentration0]) as ns:
super(BetaWithSoftplusConcentration, self).__init__(
concentration1=nn.softplus(concentration1,
name="softplus_concentration1"),
concentration0=nn.softplus(concentration0,
name="softplus_concentration0"),
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
name=ns)
self._parameters = parameters
@kullback_leibler.RegisterKL(Beta, Beta)
def _kl_beta_beta(d1, d2, name=None):
"""Calculate the batchwise KL divergence KL(d1 || d2) with d1 and d2 Beta.
Args:
d1: instance of a Beta distribution object.
d2: instance of a Beta distribution object.
name: (optional) Name to use for created operations.
default is "kl_beta_beta".
Returns:
Batchwise KL(d1 || d2)
"""
def delta(fn, is_property=True):
fn1 = getattr(d1, fn)
fn2 = getattr(d2, fn)
return (fn2 - fn1) if is_property else (fn2() - fn1())
with ops.name_scope(name, "kl_beta_beta", values=[
d1.concentration1,
d1.concentration0,
d1.total_concentration,
d2.concentration1,
d2.concentration0,
d2.total_concentration,
]):
return (delta("_log_normalization", is_property=False)
- math_ops.digamma(d1.concentration1) * delta("concentration1")
- math_ops.digamma(d1.concentration0) * delta("concentration0")
+ (math_ops.digamma(d1.total_concentration)
* delta("total_concentration")))
| apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.